problem_id
stringlengths 18
22
| source
stringclasses 1
value | task_type
stringclasses 1
value | in_source_id
stringlengths 13
58
| prompt
stringlengths 1.1k
10.2k
| golden_diff
stringlengths 151
4.94k
| verification_info
stringlengths 582
21k
| num_tokens
int64 271
2.05k
| num_tokens_diff
int64 47
1.02k
|
---|---|---|---|---|---|---|---|---|
gh_patches_debug_51262
|
rasdani/github-patches
|
git_diff
|
conda__conda-5426
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Launching navigator via prompt warnings appear
_From @RidaZubair on May 24, 2017 9:47_
**OS:** Windows
**Anaconda: 4.4.0**
**Actual:**
On launching navigator via prompt following warning appears on prompt

_Copied from original issue: ContinuumIO/navigator#1189_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `conda/common/platform.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 from __future__ import absolute_import, division, print_function, unicode_literals
3
4 from collections import OrderedDict
5 from genericpath import exists
6 from glob import glob
7 from logging import getLogger
8 import sys
9
10 from .compat import iteritems, on_win
11 from .._vendor.auxlib.decorators import memoize
12
13 log = getLogger(__name__)
14
15
16 def is_admin_on_windows(): # pragma: unix no cover
17 # http://stackoverflow.com/a/1026626/2127762
18 if not on_win: # pragma: no cover
19 return False
20 try:
21 from ctypes import windll
22 return windll.shell32.IsUserAnAdmin()() != 0
23 except ImportError as e:
24 log.debug('%r', e)
25 return 'unknown'
26 except Exception as e:
27 log.warn('%r', e)
28 return 'unknown'
29
30
31 @memoize
32 def linux_get_libc_version():
33 """
34 If on linux, returns (libc_family, version), otherwise (None, None)
35 """
36
37 if not sys.platform.startswith('linux'):
38 return None, None
39
40 from os import confstr, confstr_names, readlink
41
42 # Python 2.7 does not have either of these keys in confstr_names, so provide
43 # hard-coded defaults and assert if the key is in confstr_names but differs.
44 # These are defined by POSIX anyway so should never change.
45 confstr_names_fallback = OrderedDict([('CS_GNU_LIBC_VERSION', 2),
46 ('CS_GNU_LIBPTHREAD_VERSION', 3)])
47
48 val = None
49 for k, v in iteritems(confstr_names_fallback):
50 assert k not in confstr_names or confstr_names[k] == v, (
51 "confstr_names_fallback for %s is %s yet in confstr_names it is %s"
52 "" % (k, confstr_names_fallback[k], confstr_names[k])
53 )
54 try:
55 val = str(confstr(v))
56 except:
57 pass
58 else:
59 if val:
60 break
61
62 if not val:
63 # Weird, play it safe and assume glibc 2.5
64 family, version = 'glibc', '2.5'
65 log.warning("Failed to detect libc family and version, assuming %s/%s", family, version)
66 return family, version
67 family, version = val.split(' ')
68
69 # NPTL is just the name of the threading library, even though the
70 # version refers to that of uClibc. readlink() can help to try to
71 # figure out a better name instead.
72 if family == 'NPTL':
73 clibs = glob('/lib/libc.so*')
74 for clib in clibs:
75 clib = readlink(clib)
76 if exists(clib):
77 if clib.startswith('libuClibc'):
78 if version.startswith('0.'):
79 family = 'uClibc'
80 else:
81 family = 'uClibc-ng'
82 return family, version
83 # This could be some other C library; it is unlikely though.
84 family = 'uClibc'
85 log.warning("Failed to detect non-glibc family, assuming %s (%s)", family, version)
86 return family, version
87 return family, version
88
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/conda/common/platform.py b/conda/common/platform.py
--- a/conda/common/platform.py
+++ b/conda/common/platform.py
@@ -19,12 +19,12 @@
return False
try:
from ctypes import windll
- return windll.shell32.IsUserAnAdmin()() != 0
+ return windll.shell32.IsUserAnAdmin() != 0
except ImportError as e:
log.debug('%r', e)
return 'unknown'
except Exception as e:
- log.warn('%r', e)
+ log.info('%r', e)
return 'unknown'
|
{"golden_diff": "diff --git a/conda/common/platform.py b/conda/common/platform.py\n--- a/conda/common/platform.py\n+++ b/conda/common/platform.py\n@@ -19,12 +19,12 @@\n return False\n try:\n from ctypes import windll\n- return windll.shell32.IsUserAnAdmin()() != 0\n+ return windll.shell32.IsUserAnAdmin() != 0\n except ImportError as e:\n log.debug('%r', e)\n return 'unknown'\n except Exception as e:\n- log.warn('%r', e)\n+ log.info('%r', e)\n return 'unknown'\n", "issue": "Launching navigator via prompt warnings appear\n_From @RidaZubair on May 24, 2017 9:47_\n\n**OS:** Windows\r\n**Anaconda: 4.4.0**\r\n\r\n**Actual:**\r\nOn launching navigator via prompt following warning appears on prompt\r\n\r\n\r\n\n\n_Copied from original issue: ContinuumIO/navigator#1189_\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nfrom collections import OrderedDict\nfrom genericpath import exists\nfrom glob import glob\nfrom logging import getLogger\nimport sys\n\nfrom .compat import iteritems, on_win\nfrom .._vendor.auxlib.decorators import memoize\n\nlog = getLogger(__name__)\n\n\ndef is_admin_on_windows(): # pragma: unix no cover\n # http://stackoverflow.com/a/1026626/2127762\n if not on_win: # pragma: no cover\n return False\n try:\n from ctypes import windll\n return windll.shell32.IsUserAnAdmin()() != 0\n except ImportError as e:\n log.debug('%r', e)\n return 'unknown'\n except Exception as e:\n log.warn('%r', e)\n return 'unknown'\n\n\n@memoize\ndef linux_get_libc_version():\n \"\"\"\n If on linux, returns (libc_family, version), otherwise (None, None)\n \"\"\"\n\n if not sys.platform.startswith('linux'):\n return None, None\n\n from os import confstr, confstr_names, readlink\n\n # Python 2.7 does not have either of these keys in confstr_names, so provide\n # hard-coded defaults and assert if the key is in confstr_names but differs.\n # These are defined by POSIX anyway so should never change.\n confstr_names_fallback = OrderedDict([('CS_GNU_LIBC_VERSION', 2),\n ('CS_GNU_LIBPTHREAD_VERSION', 3)])\n\n val = None\n for k, v in iteritems(confstr_names_fallback):\n assert k not in confstr_names or confstr_names[k] == v, (\n \"confstr_names_fallback for %s is %s yet in confstr_names it is %s\"\n \"\" % (k, confstr_names_fallback[k], confstr_names[k])\n )\n try:\n val = str(confstr(v))\n except:\n pass\n else:\n if val:\n break\n\n if not val:\n # Weird, play it safe and assume glibc 2.5\n family, version = 'glibc', '2.5'\n log.warning(\"Failed to detect libc family and version, assuming %s/%s\", family, version)\n return family, version\n family, version = val.split(' ')\n\n # NPTL is just the name of the threading library, even though the\n # version refers to that of uClibc. readlink() can help to try to\n # figure out a better name instead.\n if family == 'NPTL':\n clibs = glob('/lib/libc.so*')\n for clib in clibs:\n clib = readlink(clib)\n if exists(clib):\n if clib.startswith('libuClibc'):\n if version.startswith('0.'):\n family = 'uClibc'\n else:\n family = 'uClibc-ng'\n return family, version\n # This could be some other C library; it is unlikely though.\n family = 'uClibc'\n log.warning(\"Failed to detect non-glibc family, assuming %s (%s)\", family, version)\n return family, version\n return family, version\n", "path": "conda/common/platform.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nfrom collections import OrderedDict\nfrom genericpath import exists\nfrom glob import glob\nfrom logging import getLogger\nimport sys\n\nfrom .compat import iteritems, on_win\nfrom .._vendor.auxlib.decorators import memoize\n\nlog = getLogger(__name__)\n\n\ndef is_admin_on_windows(): # pragma: unix no cover\n # http://stackoverflow.com/a/1026626/2127762\n if not on_win: # pragma: no cover\n return False\n try:\n from ctypes import windll\n return windll.shell32.IsUserAnAdmin() != 0\n except ImportError as e:\n log.debug('%r', e)\n return 'unknown'\n except Exception as e:\n log.info('%r', e)\n return 'unknown'\n\n\n@memoize\ndef linux_get_libc_version():\n \"\"\"\n If on linux, returns (libc_family, version), otherwise (None, None)\n \"\"\"\n\n if not sys.platform.startswith('linux'):\n return None, None\n\n from os import confstr, confstr_names, readlink\n\n # Python 2.7 does not have either of these keys in confstr_names, so provide\n # hard-coded defaults and assert if the key is in confstr_names but differs.\n # These are defined by POSIX anyway so should never change.\n confstr_names_fallback = OrderedDict([('CS_GNU_LIBC_VERSION', 2),\n ('CS_GNU_LIBPTHREAD_VERSION', 3)])\n\n val = None\n for k, v in iteritems(confstr_names_fallback):\n assert k not in confstr_names or confstr_names[k] == v, (\n \"confstr_names_fallback for %s is %s yet in confstr_names it is %s\"\n \"\" % (k, confstr_names_fallback[k], confstr_names[k])\n )\n try:\n val = str(confstr(v))\n except:\n pass\n else:\n if val:\n break\n\n if not val:\n # Weird, play it safe and assume glibc 2.5\n family, version = 'glibc', '2.5'\n log.warning(\"Failed to detect libc family and version, assuming %s/%s\", family, version)\n return family, version\n family, version = val.split(' ')\n\n # NPTL is just the name of the threading library, even though the\n # version refers to that of uClibc. readlink() can help to try to\n # figure out a better name instead.\n if family == 'NPTL':\n clibs = glob('/lib/libc.so*')\n for clib in clibs:\n clib = readlink(clib)\n if exists(clib):\n if clib.startswith('libuClibc'):\n if version.startswith('0.'):\n family = 'uClibc'\n else:\n family = 'uClibc-ng'\n return family, version\n # This could be some other C library; it is unlikely though.\n family = 'uClibc'\n log.warning(\"Failed to detect non-glibc family, assuming %s (%s)\", family, version)\n return family, version\n return family, version\n", "path": "conda/common/platform.py"}]}
| 1,295 | 143 |
gh_patches_debug_24678
|
rasdani/github-patches
|
git_diff
|
cloudtools__troposphere-1693
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
implement AWS::DMS changes from May 14, 2020 update
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `troposphere/dms.py`
Content:
```
1 # Copyright (c) 2012-2019, Mark Peek <[email protected]>
2 # All rights reserved.
3 #
4 # See LICENSE file for full license.
5
6 from . import AWSObject, AWSProperty, Tags
7 from .validators import boolean, integer, network_port, positive_integer
8
9
10 CDC = "cdc"
11 FULL_LOAD = "full-load"
12 FULL_LOAD_AND_CDC = "full-load-and-cdc"
13
14
15 class Certificate(AWSObject):
16 resource_type = "AWS::DMS::Certificate"
17
18 props = {
19 'CertificateIdentifier': (basestring, False),
20 'CertificatePem': (basestring, False),
21 'CertificateWallet': (basestring, False),
22 }
23
24
25 class DynamoDbSettings(AWSProperty):
26 props = {
27 'ServiceAccessRoleArn': (basestring, False),
28 }
29
30
31 class ElasticsearchSettings(AWSProperty):
32 props = {
33 'EndpointUri': (basestring, False),
34 'ErrorRetryDuration': (integer, False),
35 'FullLoadErrorPercentage': (integer, False),
36 'ServiceAccessRoleArn': (basestring, False),
37 }
38
39
40 class KinesisSettings(AWSProperty):
41 props = {
42 'MessageFormat': (basestring, False),
43 'ServiceAccessRoleArn': (basestring, False),
44 'StreamArn': (basestring, False),
45 }
46
47
48 class MongoDbSettings(AWSProperty):
49 props = {
50 'AuthMechanism': (basestring, False),
51 'AuthSource': (basestring, False),
52 'AuthType': (basestring, False),
53 'DatabaseName': (basestring, False),
54 'DocsToInvestigate': (basestring, False),
55 'ExtractDocId': (basestring, False),
56 'NestingLevel': (basestring, False),
57 'Password': (basestring, False),
58 'Port': (network_port, False),
59 'ServerName': (basestring, False),
60 'Username': (basestring, False),
61 }
62
63
64 class S3Settings(AWSProperty):
65 props = {
66 'BucketFolder': (basestring, False),
67 'BucketName': (basestring, False),
68 'CompressionType': (basestring, False),
69 'CsvDelimiter': (basestring, False),
70 'CsvRowDelimiter': (basestring, False),
71 'ExternalTableDefinition': (basestring, False),
72 'ServiceAccessRoleArn': (basestring, False),
73 }
74
75
76 class KafkaSettings(AWSProperty):
77 props = {
78 'Broker': (basestring, False),
79 'Topic': (basestring, False),
80 }
81
82
83 class Endpoint(AWSObject):
84 resource_type = "AWS::DMS::Endpoint"
85
86 props = {
87 'CertificateArn': (basestring, False),
88 'DatabaseName': (basestring, False),
89 'DynamoDbSettings': (DynamoDbSettings, False),
90 'ElasticsearchSettings': (ElasticsearchSettings, False),
91 'EndpointIdentifier': (basestring, False),
92 'EndpointType': (basestring, True),
93 'EngineName': (basestring, True),
94 'ExtraConnectionAttributes': (basestring, False),
95 'KafkaSettings': (KafkaSettings, False),
96 'KinesisSettings': (KinesisSettings, False),
97 'KmsKeyId': (basestring, False),
98 'MongoDbSettings': (MongoDbSettings, False),
99 'Password': (basestring, False),
100 'Port': (network_port, False),
101 'S3Settings': (S3Settings, False),
102 'ServerName': (basestring, False),
103 'SslMode': (basestring, False),
104 'Tags': (Tags, False),
105 'Username': (basestring, False),
106 }
107
108
109 class EventSubscription(AWSObject):
110 resource_type = "AWS::DMS::EventSubscription"
111
112 props = {
113 'Enabled': (boolean, False),
114 'EventCategories': ([basestring], False),
115 'SnsTopicArn': (basestring, True),
116 'SourceIds': ([basestring], False),
117 'SourceType': (basestring, False),
118 'SubscriptionName': (basestring, False),
119 'Tags': (Tags, False),
120 }
121
122
123 class ReplicationInstance(AWSObject):
124 resource_type = "AWS::DMS::ReplicationInstance"
125
126 props = {
127 'AllocatedStorage': (integer, False),
128 'AllowMajorVersionUpgrade': (boolean, False),
129 'AutoMinorVersionUpgrade': (boolean, False),
130 'AvailabilityZone': (basestring, False),
131 'EngineVersion': (basestring, False),
132 'KmsKeyId': (basestring, False),
133 'MultiAZ': (boolean, False),
134 'PreferredMaintenanceWindow': (basestring, False),
135 'PubliclyAccessible': (boolean, False),
136 'ReplicationInstanceClass': (basestring, True),
137 'ReplicationInstanceIdentifier': (basestring, False),
138 'ReplicationSubnetGroupIdentifier': (basestring, False),
139 'Tags': (Tags, False),
140 'VpcSecurityGroupIds': ([basestring], False),
141 }
142
143
144 class ReplicationSubnetGroup(AWSObject):
145 resource_type = "AWS::DMS::ReplicationSubnetGroup"
146
147 props = {
148 'ReplicationSubnetGroupDescription': (basestring, True),
149 'ReplicationSubnetGroupIdentifier': (basestring, False),
150 'SubnetIds': ([basestring], True),
151 'Tags': (Tags, False),
152 }
153
154
155 class ReplicationTask(AWSObject):
156 resource_type = "AWS::DMS::ReplicationTask"
157
158 props = {
159 'CdcStartPosition': (basestring, False),
160 'CdcStartTime': (positive_integer, False),
161 'CdcStopPosition': (basestring, False),
162 'MigrationType': (basestring, True),
163 'ReplicationInstanceArn': (basestring, True),
164 'ReplicationTaskIdentifier': (basestring, False),
165 'ReplicationTaskSettings': (basestring, False),
166 'SourceEndpointArn': (basestring, True),
167 'TableMappings': (basestring, True),
168 'Tags': (Tags, False),
169 'TargetEndpointArn': (basestring, True),
170 }
171
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/troposphere/dms.py b/troposphere/dms.py
--- a/troposphere/dms.py
+++ b/troposphere/dms.py
@@ -80,6 +80,18 @@
}
+class NeptuneSettings(AWSProperty):
+ props = {
+ 'ErrorRetryDuration': (integer, False),
+ 'IamAuthEnabled': (boolean, False),
+ 'MaxFileSize': (integer, False),
+ 'MaxRetryCount': (integer, False),
+ 'S3BucketFolder': (basestring, False),
+ 'S3BucketName': (basestring, False),
+ 'ServiceAccessRoleArn': (basestring, False),
+ }
+
+
class Endpoint(AWSObject):
resource_type = "AWS::DMS::Endpoint"
@@ -96,6 +108,7 @@
'KinesisSettings': (KinesisSettings, False),
'KmsKeyId': (basestring, False),
'MongoDbSettings': (MongoDbSettings, False),
+ 'NeptuneSettings': (NeptuneSettings, False),
'Password': (basestring, False),
'Port': (network_port, False),
'S3Settings': (S3Settings, False),
@@ -167,4 +180,5 @@
'TableMappings': (basestring, True),
'Tags': (Tags, False),
'TargetEndpointArn': (basestring, True),
+ 'TaskData': (basestring, True),
}
|
{"golden_diff": "diff --git a/troposphere/dms.py b/troposphere/dms.py\n--- a/troposphere/dms.py\n+++ b/troposphere/dms.py\n@@ -80,6 +80,18 @@\n }\n \n \n+class NeptuneSettings(AWSProperty):\n+ props = {\n+ 'ErrorRetryDuration': (integer, False),\n+ 'IamAuthEnabled': (boolean, False),\n+ 'MaxFileSize': (integer, False),\n+ 'MaxRetryCount': (integer, False),\n+ 'S3BucketFolder': (basestring, False),\n+ 'S3BucketName': (basestring, False),\n+ 'ServiceAccessRoleArn': (basestring, False),\n+ }\n+\n+\n class Endpoint(AWSObject):\n resource_type = \"AWS::DMS::Endpoint\"\n \n@@ -96,6 +108,7 @@\n 'KinesisSettings': (KinesisSettings, False),\n 'KmsKeyId': (basestring, False),\n 'MongoDbSettings': (MongoDbSettings, False),\n+ 'NeptuneSettings': (NeptuneSettings, False),\n 'Password': (basestring, False),\n 'Port': (network_port, False),\n 'S3Settings': (S3Settings, False),\n@@ -167,4 +180,5 @@\n 'TableMappings': (basestring, True),\n 'Tags': (Tags, False),\n 'TargetEndpointArn': (basestring, True),\n+ 'TaskData': (basestring, True),\n }\n", "issue": "implement AWS::DMS changes from May 14, 2020 update\n\n", "before_files": [{"content": "# Copyright (c) 2012-2019, Mark Peek <[email protected]>\n# All rights reserved.\n#\n# See LICENSE file for full license.\n\nfrom . import AWSObject, AWSProperty, Tags\nfrom .validators import boolean, integer, network_port, positive_integer\n\n\nCDC = \"cdc\"\nFULL_LOAD = \"full-load\"\nFULL_LOAD_AND_CDC = \"full-load-and-cdc\"\n\n\nclass Certificate(AWSObject):\n resource_type = \"AWS::DMS::Certificate\"\n\n props = {\n 'CertificateIdentifier': (basestring, False),\n 'CertificatePem': (basestring, False),\n 'CertificateWallet': (basestring, False),\n }\n\n\nclass DynamoDbSettings(AWSProperty):\n props = {\n 'ServiceAccessRoleArn': (basestring, False),\n }\n\n\nclass ElasticsearchSettings(AWSProperty):\n props = {\n 'EndpointUri': (basestring, False),\n 'ErrorRetryDuration': (integer, False),\n 'FullLoadErrorPercentage': (integer, False),\n 'ServiceAccessRoleArn': (basestring, False),\n }\n\n\nclass KinesisSettings(AWSProperty):\n props = {\n 'MessageFormat': (basestring, False),\n 'ServiceAccessRoleArn': (basestring, False),\n 'StreamArn': (basestring, False),\n }\n\n\nclass MongoDbSettings(AWSProperty):\n props = {\n 'AuthMechanism': (basestring, False),\n 'AuthSource': (basestring, False),\n 'AuthType': (basestring, False),\n 'DatabaseName': (basestring, False),\n 'DocsToInvestigate': (basestring, False),\n 'ExtractDocId': (basestring, False),\n 'NestingLevel': (basestring, False),\n 'Password': (basestring, False),\n 'Port': (network_port, False),\n 'ServerName': (basestring, False),\n 'Username': (basestring, False),\n }\n\n\nclass S3Settings(AWSProperty):\n props = {\n 'BucketFolder': (basestring, False),\n 'BucketName': (basestring, False),\n 'CompressionType': (basestring, False),\n 'CsvDelimiter': (basestring, False),\n 'CsvRowDelimiter': (basestring, False),\n 'ExternalTableDefinition': (basestring, False),\n 'ServiceAccessRoleArn': (basestring, False),\n }\n\n\nclass KafkaSettings(AWSProperty):\n props = {\n 'Broker': (basestring, False),\n 'Topic': (basestring, False),\n }\n\n\nclass Endpoint(AWSObject):\n resource_type = \"AWS::DMS::Endpoint\"\n\n props = {\n 'CertificateArn': (basestring, False),\n 'DatabaseName': (basestring, False),\n 'DynamoDbSettings': (DynamoDbSettings, False),\n 'ElasticsearchSettings': (ElasticsearchSettings, False),\n 'EndpointIdentifier': (basestring, False),\n 'EndpointType': (basestring, True),\n 'EngineName': (basestring, True),\n 'ExtraConnectionAttributes': (basestring, False),\n 'KafkaSettings': (KafkaSettings, False),\n 'KinesisSettings': (KinesisSettings, False),\n 'KmsKeyId': (basestring, False),\n 'MongoDbSettings': (MongoDbSettings, False),\n 'Password': (basestring, False),\n 'Port': (network_port, False),\n 'S3Settings': (S3Settings, False),\n 'ServerName': (basestring, False),\n 'SslMode': (basestring, False),\n 'Tags': (Tags, False),\n 'Username': (basestring, False),\n }\n\n\nclass EventSubscription(AWSObject):\n resource_type = \"AWS::DMS::EventSubscription\"\n\n props = {\n 'Enabled': (boolean, False),\n 'EventCategories': ([basestring], False),\n 'SnsTopicArn': (basestring, True),\n 'SourceIds': ([basestring], False),\n 'SourceType': (basestring, False),\n 'SubscriptionName': (basestring, False),\n 'Tags': (Tags, False),\n }\n\n\nclass ReplicationInstance(AWSObject):\n resource_type = \"AWS::DMS::ReplicationInstance\"\n\n props = {\n 'AllocatedStorage': (integer, False),\n 'AllowMajorVersionUpgrade': (boolean, False),\n 'AutoMinorVersionUpgrade': (boolean, False),\n 'AvailabilityZone': (basestring, False),\n 'EngineVersion': (basestring, False),\n 'KmsKeyId': (basestring, False),\n 'MultiAZ': (boolean, False),\n 'PreferredMaintenanceWindow': (basestring, False),\n 'PubliclyAccessible': (boolean, False),\n 'ReplicationInstanceClass': (basestring, True),\n 'ReplicationInstanceIdentifier': (basestring, False),\n 'ReplicationSubnetGroupIdentifier': (basestring, False),\n 'Tags': (Tags, False),\n 'VpcSecurityGroupIds': ([basestring], False),\n }\n\n\nclass ReplicationSubnetGroup(AWSObject):\n resource_type = \"AWS::DMS::ReplicationSubnetGroup\"\n\n props = {\n 'ReplicationSubnetGroupDescription': (basestring, True),\n 'ReplicationSubnetGroupIdentifier': (basestring, False),\n 'SubnetIds': ([basestring], True),\n 'Tags': (Tags, False),\n }\n\n\nclass ReplicationTask(AWSObject):\n resource_type = \"AWS::DMS::ReplicationTask\"\n\n props = {\n 'CdcStartPosition': (basestring, False),\n 'CdcStartTime': (positive_integer, False),\n 'CdcStopPosition': (basestring, False),\n 'MigrationType': (basestring, True),\n 'ReplicationInstanceArn': (basestring, True),\n 'ReplicationTaskIdentifier': (basestring, False),\n 'ReplicationTaskSettings': (basestring, False),\n 'SourceEndpointArn': (basestring, True),\n 'TableMappings': (basestring, True),\n 'Tags': (Tags, False),\n 'TargetEndpointArn': (basestring, True),\n }\n", "path": "troposphere/dms.py"}], "after_files": [{"content": "# Copyright (c) 2012-2019, Mark Peek <[email protected]>\n# All rights reserved.\n#\n# See LICENSE file for full license.\n\nfrom . import AWSObject, AWSProperty, Tags\nfrom .validators import boolean, integer, network_port, positive_integer\n\n\nCDC = \"cdc\"\nFULL_LOAD = \"full-load\"\nFULL_LOAD_AND_CDC = \"full-load-and-cdc\"\n\n\nclass Certificate(AWSObject):\n resource_type = \"AWS::DMS::Certificate\"\n\n props = {\n 'CertificateIdentifier': (basestring, False),\n 'CertificatePem': (basestring, False),\n 'CertificateWallet': (basestring, False),\n }\n\n\nclass DynamoDbSettings(AWSProperty):\n props = {\n 'ServiceAccessRoleArn': (basestring, False),\n }\n\n\nclass ElasticsearchSettings(AWSProperty):\n props = {\n 'EndpointUri': (basestring, False),\n 'ErrorRetryDuration': (integer, False),\n 'FullLoadErrorPercentage': (integer, False),\n 'ServiceAccessRoleArn': (basestring, False),\n }\n\n\nclass KinesisSettings(AWSProperty):\n props = {\n 'MessageFormat': (basestring, False),\n 'ServiceAccessRoleArn': (basestring, False),\n 'StreamArn': (basestring, False),\n }\n\n\nclass MongoDbSettings(AWSProperty):\n props = {\n 'AuthMechanism': (basestring, False),\n 'AuthSource': (basestring, False),\n 'AuthType': (basestring, False),\n 'DatabaseName': (basestring, False),\n 'DocsToInvestigate': (basestring, False),\n 'ExtractDocId': (basestring, False),\n 'NestingLevel': (basestring, False),\n 'Password': (basestring, False),\n 'Port': (network_port, False),\n 'ServerName': (basestring, False),\n 'Username': (basestring, False),\n }\n\n\nclass S3Settings(AWSProperty):\n props = {\n 'BucketFolder': (basestring, False),\n 'BucketName': (basestring, False),\n 'CompressionType': (basestring, False),\n 'CsvDelimiter': (basestring, False),\n 'CsvRowDelimiter': (basestring, False),\n 'ExternalTableDefinition': (basestring, False),\n 'ServiceAccessRoleArn': (basestring, False),\n }\n\n\nclass KafkaSettings(AWSProperty):\n props = {\n 'Broker': (basestring, False),\n 'Topic': (basestring, False),\n }\n\n\nclass NeptuneSettings(AWSProperty):\n props = {\n 'ErrorRetryDuration': (integer, False),\n 'IamAuthEnabled': (boolean, False),\n 'MaxFileSize': (integer, False),\n 'MaxRetryCount': (integer, False),\n 'S3BucketFolder': (basestring, False),\n 'S3BucketName': (basestring, False),\n 'ServiceAccessRoleArn': (basestring, False),\n }\n\n\nclass Endpoint(AWSObject):\n resource_type = \"AWS::DMS::Endpoint\"\n\n props = {\n 'CertificateArn': (basestring, False),\n 'DatabaseName': (basestring, False),\n 'DynamoDbSettings': (DynamoDbSettings, False),\n 'ElasticsearchSettings': (ElasticsearchSettings, False),\n 'EndpointIdentifier': (basestring, False),\n 'EndpointType': (basestring, True),\n 'EngineName': (basestring, True),\n 'ExtraConnectionAttributes': (basestring, False),\n 'KafkaSettings': (KafkaSettings, False),\n 'KinesisSettings': (KinesisSettings, False),\n 'KmsKeyId': (basestring, False),\n 'MongoDbSettings': (MongoDbSettings, False),\n 'NeptuneSettings': (NeptuneSettings, False),\n 'Password': (basestring, False),\n 'Port': (network_port, False),\n 'S3Settings': (S3Settings, False),\n 'ServerName': (basestring, False),\n 'SslMode': (basestring, False),\n 'Tags': (Tags, False),\n 'Username': (basestring, False),\n }\n\n\nclass EventSubscription(AWSObject):\n resource_type = \"AWS::DMS::EventSubscription\"\n\n props = {\n 'Enabled': (boolean, False),\n 'EventCategories': ([basestring], False),\n 'SnsTopicArn': (basestring, True),\n 'SourceIds': ([basestring], False),\n 'SourceType': (basestring, False),\n 'SubscriptionName': (basestring, False),\n 'Tags': (Tags, False),\n }\n\n\nclass ReplicationInstance(AWSObject):\n resource_type = \"AWS::DMS::ReplicationInstance\"\n\n props = {\n 'AllocatedStorage': (integer, False),\n 'AllowMajorVersionUpgrade': (boolean, False),\n 'AutoMinorVersionUpgrade': (boolean, False),\n 'AvailabilityZone': (basestring, False),\n 'EngineVersion': (basestring, False),\n 'KmsKeyId': (basestring, False),\n 'MultiAZ': (boolean, False),\n 'PreferredMaintenanceWindow': (basestring, False),\n 'PubliclyAccessible': (boolean, False),\n 'ReplicationInstanceClass': (basestring, True),\n 'ReplicationInstanceIdentifier': (basestring, False),\n 'ReplicationSubnetGroupIdentifier': (basestring, False),\n 'Tags': (Tags, False),\n 'VpcSecurityGroupIds': ([basestring], False),\n }\n\n\nclass ReplicationSubnetGroup(AWSObject):\n resource_type = \"AWS::DMS::ReplicationSubnetGroup\"\n\n props = {\n 'ReplicationSubnetGroupDescription': (basestring, True),\n 'ReplicationSubnetGroupIdentifier': (basestring, False),\n 'SubnetIds': ([basestring], True),\n 'Tags': (Tags, False),\n }\n\n\nclass ReplicationTask(AWSObject):\n resource_type = \"AWS::DMS::ReplicationTask\"\n\n props = {\n 'CdcStartPosition': (basestring, False),\n 'CdcStartTime': (positive_integer, False),\n 'CdcStopPosition': (basestring, False),\n 'MigrationType': (basestring, True),\n 'ReplicationInstanceArn': (basestring, True),\n 'ReplicationTaskIdentifier': (basestring, False),\n 'ReplicationTaskSettings': (basestring, False),\n 'SourceEndpointArn': (basestring, True),\n 'TableMappings': (basestring, True),\n 'Tags': (Tags, False),\n 'TargetEndpointArn': (basestring, True),\n 'TaskData': (basestring, True),\n }\n", "path": "troposphere/dms.py"}]}
| 2,047 | 337 |
gh_patches_debug_2668
|
rasdani/github-patches
|
git_diff
|
facebookresearch__ParlAI-1821
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Obselete download link for CLEVR Dataset
Apparently, the current link to CLEVR in the source code is "https://s3-us-west-1.amazonaws.com/clevr/CLEVR_v1.0.zip" that returns the message "All access to this object has been disabled"
When I try to execute the following line of code
`!python ~/ParlAI/examples/display_data.py -t clevr`
I obtain
```
[creating task(s): clevr]
[building data: /root/ParlAI/data/CLEVR]
[ downloading: https://s3-us-west-1.amazonaws.com/clevr/CLEVR_v1.0.zip to /root/ParlAI/data/CLEVR/CLEVR_v1.0.zip ]
Downloading CLEVR_v1.0.zip: 0.00B [00:00, ?B/s]
unpacking CLEVR_v1.0.zip
Traceback (most recent call last):
File "/root/ParlAI/parlai/core/agents.py", line 819, in _create_task_agents
task_agents = my_module.create_agent(opt)
AttributeError: module 'parlai.tasks.clevr.agents' has no attribute 'create_agent'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/root/ParlAI/examples/display_data.py", line 22, in <module>
display_data(opt)
File "/root/ParlAI/parlai/scripts/display_data.py", line 42, in display_data
world = create_task(opt, agent)
File "/root/ParlAI/parlai/core/worlds.py", line 1151, in create_task
world = create_task_world(opt, user_agents, default_world=default_world)
File "/root/ParlAI/parlai/core/worlds.py", line 1108, in create_task_world
opt, user_agents, default_world=default_world
File "/root/ParlAI/parlai/core/worlds.py", line 1068, in _get_task_world
task_agents = _create_task_agents(opt)
File "/root/ParlAI/parlai/core/agents.py", line 822, in _create_task_agents
return create_task_agent_from_taskname(opt)
File "/root/ParlAI/parlai/core/agents.py", line 776, in create_task_agent_from_taskname
task_agents = teacher_class(opt)
File "/root/ParlAI/parlai/tasks/clevr/agents.py", line 45, in __init__
data_path, self.images_path = _path(opt)
File "/root/ParlAI/parlai/tasks/clevr/agents.py", line 15, in _path
build(opt)
File "/root/ParlAI/parlai/tasks/clevr/build.py", line 28, in build
build_data.untar(dpath, fname)
File "/root/ParlAI/parlai/core/build_data.py", line 180, in untar
shutil.unpack_archive(fullpath, path)
File "/usr/lib/python3.6/shutil.py", line 983, in unpack_archive
func(filename, extract_dir, **kwargs)
File "/usr/lib/python3.6/shutil.py", line 883, in _unpack_zipfile
raise ReadError("%s is not a zip file" % filename)
shutil.ReadError: /root/ParlAI/data/CLEVR/CLEVR_v1.0.zip is not a zip file
```
I found the following working link on CLEVR webpage (https://cs.stanford.edu/people/jcjohns/clevr/):
https://dl.fbaipublicfiles.com/clevr/CLEVR_v1.0.zip
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `parlai/tasks/clevr/build.py`
Content:
```
1 #!/usr/bin/env python3
2
3 # Copyright (c) Facebook, Inc. and its affiliates.
4 # This source code is licensed under the MIT license found in the
5 # LICENSE file in the root directory of this source tree.
6 # Download and build the data if it does not exist.
7
8 import parlai.core.build_data as build_data
9 import os
10
11
12 def build(opt):
13 dpath = os.path.join(opt['datapath'], 'CLEVR')
14 version = 'v1.0'
15
16 if not build_data.built(dpath, version_string=version):
17 print('[building data: ' + dpath + ']')
18 # An older version exists, so remove these outdated files.
19 if build_data.built(dpath):
20 build_data.remove_dir(dpath)
21 build_data.make_dir(dpath)
22
23 # Download the data.
24 fname = 'CLEVR_v1.0.zip'
25 url = 'https://s3-us-west-1.amazonaws.com/clevr/'
26
27 build_data.download(url + fname, dpath, fname)
28 build_data.untar(dpath, fname)
29
30 # Mark the data as built.
31 build_data.mark_done(dpath, version_string=version)
32
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/parlai/tasks/clevr/build.py b/parlai/tasks/clevr/build.py
--- a/parlai/tasks/clevr/build.py
+++ b/parlai/tasks/clevr/build.py
@@ -22,7 +22,7 @@
# Download the data.
fname = 'CLEVR_v1.0.zip'
- url = 'https://s3-us-west-1.amazonaws.com/clevr/'
+ url = 'https://dl.fbaipublicfiles.com/clevr/'
build_data.download(url + fname, dpath, fname)
build_data.untar(dpath, fname)
|
{"golden_diff": "diff --git a/parlai/tasks/clevr/build.py b/parlai/tasks/clevr/build.py\n--- a/parlai/tasks/clevr/build.py\n+++ b/parlai/tasks/clevr/build.py\n@@ -22,7 +22,7 @@\n \n # Download the data.\n fname = 'CLEVR_v1.0.zip'\n- url = 'https://s3-us-west-1.amazonaws.com/clevr/'\n+ url = 'https://dl.fbaipublicfiles.com/clevr/'\n \n build_data.download(url + fname, dpath, fname)\n build_data.untar(dpath, fname)\n", "issue": "Obselete download link for CLEVR Dataset\nApparently, the current link to CLEVR in the source code is \"https://s3-us-west-1.amazonaws.com/clevr/CLEVR_v1.0.zip\" that returns the message \"All access to this object has been disabled\"\r\n\r\nWhen I try to execute the following line of code\r\n\r\n`!python ~/ParlAI/examples/display_data.py -t clevr`\r\n\r\nI obtain\r\n\r\n```\r\n[creating task(s): clevr]\r\n[building data: /root/ParlAI/data/CLEVR]\r\n[ downloading: https://s3-us-west-1.amazonaws.com/clevr/CLEVR_v1.0.zip to /root/ParlAI/data/CLEVR/CLEVR_v1.0.zip ]\r\nDownloading CLEVR_v1.0.zip: 0.00B [00:00, ?B/s]\r\nunpacking CLEVR_v1.0.zip\r\nTraceback (most recent call last):\r\n File \"/root/ParlAI/parlai/core/agents.py\", line 819, in _create_task_agents\r\n task_agents = my_module.create_agent(opt)\r\nAttributeError: module 'parlai.tasks.clevr.agents' has no attribute 'create_agent'\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"/root/ParlAI/examples/display_data.py\", line 22, in <module>\r\n display_data(opt)\r\n File \"/root/ParlAI/parlai/scripts/display_data.py\", line 42, in display_data\r\n world = create_task(opt, agent)\r\n File \"/root/ParlAI/parlai/core/worlds.py\", line 1151, in create_task\r\n world = create_task_world(opt, user_agents, default_world=default_world)\r\n File \"/root/ParlAI/parlai/core/worlds.py\", line 1108, in create_task_world\r\n opt, user_agents, default_world=default_world\r\n File \"/root/ParlAI/parlai/core/worlds.py\", line 1068, in _get_task_world\r\n task_agents = _create_task_agents(opt)\r\n File \"/root/ParlAI/parlai/core/agents.py\", line 822, in _create_task_agents\r\n return create_task_agent_from_taskname(opt)\r\n File \"/root/ParlAI/parlai/core/agents.py\", line 776, in create_task_agent_from_taskname\r\n task_agents = teacher_class(opt)\r\n File \"/root/ParlAI/parlai/tasks/clevr/agents.py\", line 45, in __init__\r\n data_path, self.images_path = _path(opt)\r\n File \"/root/ParlAI/parlai/tasks/clevr/agents.py\", line 15, in _path\r\n build(opt)\r\n File \"/root/ParlAI/parlai/tasks/clevr/build.py\", line 28, in build\r\n build_data.untar(dpath, fname)\r\n File \"/root/ParlAI/parlai/core/build_data.py\", line 180, in untar\r\n shutil.unpack_archive(fullpath, path)\r\n File \"/usr/lib/python3.6/shutil.py\", line 983, in unpack_archive\r\n func(filename, extract_dir, **kwargs)\r\n File \"/usr/lib/python3.6/shutil.py\", line 883, in _unpack_zipfile\r\n raise ReadError(\"%s is not a zip file\" % filename)\r\nshutil.ReadError: /root/ParlAI/data/CLEVR/CLEVR_v1.0.zip is not a zip file\r\n```\r\n\r\nI found the following working link on CLEVR webpage (https://cs.stanford.edu/people/jcjohns/clevr/):\r\n\r\nhttps://dl.fbaipublicfiles.com/clevr/CLEVR_v1.0.zip\n", "before_files": [{"content": "#!/usr/bin/env python3\n\n# Copyright (c) Facebook, Inc. and its affiliates.\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n# Download and build the data if it does not exist.\n\nimport parlai.core.build_data as build_data\nimport os\n\n\ndef build(opt):\n dpath = os.path.join(opt['datapath'], 'CLEVR')\n version = 'v1.0'\n\n if not build_data.built(dpath, version_string=version):\n print('[building data: ' + dpath + ']')\n # An older version exists, so remove these outdated files.\n if build_data.built(dpath):\n build_data.remove_dir(dpath)\n build_data.make_dir(dpath)\n\n # Download the data.\n fname = 'CLEVR_v1.0.zip'\n url = 'https://s3-us-west-1.amazonaws.com/clevr/'\n\n build_data.download(url + fname, dpath, fname)\n build_data.untar(dpath, fname)\n\n # Mark the data as built.\n build_data.mark_done(dpath, version_string=version)\n", "path": "parlai/tasks/clevr/build.py"}], "after_files": [{"content": "#!/usr/bin/env python3\n\n# Copyright (c) Facebook, Inc. and its affiliates.\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n# Download and build the data if it does not exist.\n\nimport parlai.core.build_data as build_data\nimport os\n\n\ndef build(opt):\n dpath = os.path.join(opt['datapath'], 'CLEVR')\n version = 'v1.0'\n\n if not build_data.built(dpath, version_string=version):\n print('[building data: ' + dpath + ']')\n # An older version exists, so remove these outdated files.\n if build_data.built(dpath):\n build_data.remove_dir(dpath)\n build_data.make_dir(dpath)\n\n # Download the data.\n fname = 'CLEVR_v1.0.zip'\n url = 'https://dl.fbaipublicfiles.com/clevr/'\n\n build_data.download(url + fname, dpath, fname)\n build_data.untar(dpath, fname)\n\n # Mark the data as built.\n build_data.mark_done(dpath, version_string=version)\n", "path": "parlai/tasks/clevr/build.py"}]}
| 1,429 | 145 |
gh_patches_debug_33339
|
rasdani/github-patches
|
git_diff
|
pyinstaller__pyinstaller-2111
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
New numpy hook for intel mkl libraries
Hello - anaconda and winpython build numpy against Intel's mkl libraries. Building someone that uses numpy will need to have those libraries.
Here is a winpython hook:
http://stackoverflow.com/a/35853001
and I adapted that for anaconda & python3:
https://github.com/maqifrnswa/scimpy/blob/master/pyinstaller-hooks/hook-numpy.py
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `PyInstaller/hooks/hook-numpy.core.py`
Content:
```
1 #-----------------------------------------------------------------------------
2 # Copyright (c) 2013-2016, PyInstaller Development Team.
3 #
4 # Distributed under the terms of the GNU General Public License with exception
5 # for distributing bootloader.
6 #
7 # The full license is in the file COPYING.txt, distributed with this software.
8 #-----------------------------------------------------------------------------
9 # On Windows, numpy depends on a set of dynamically-detemined DLLs, which means
10 # that PyInstaller's static analysis can't find them. See https://github.com/pyinstaller/pyinstaller/issues/1969
11 # for more information. The typical error message: ``Intel MKL FATAL ERROR:
12 # Cannot load mkl_intel_thread.dll.``
13 #
14 # So, include them manually.
15 import os
16 import os.path
17 from PyInstaller.utils.hooks import get_package_paths
18
19 pkg_base, pkg_dir = get_package_paths('numpy.core')
20 # Walk through all files in ``numpy.core``, looking for DLLs.
21 datas = []
22 for f in os.listdir(pkg_dir):
23 extension = os.path.splitext(f)[1]
24 if extension == '.dll':
25 # Produce the tuple ('/abs/path/to/libs/numpy/core/file.dll', '')
26 source = os.path.join(pkg_dir, f)
27 datas.append((source, ''))
28
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/PyInstaller/hooks/hook-numpy.core.py b/PyInstaller/hooks/hook-numpy.core.py
--- a/PyInstaller/hooks/hook-numpy.core.py
+++ b/PyInstaller/hooks/hook-numpy.core.py
@@ -6,22 +6,41 @@
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
-# On Windows, numpy depends on a set of dynamically-detemined DLLs, which means
-# that PyInstaller's static analysis can't find them. See https://github.com/pyinstaller/pyinstaller/issues/1969
-# for more information. The typical error message: ``Intel MKL FATAL ERROR:
-# Cannot load mkl_intel_thread.dll.``
+# If numpy is built with MKL support it depends on a set of libraries loaded
+# at runtime. Since PyInstaller's static analysis can't find them they must be
+# included manually.
#
-# So, include them manually.
+# See
+# https://github.com/pyinstaller/pyinstaller/issues/1881
+# https://github.com/pyinstaller/pyinstaller/issues/1969
+# for more information
import os
import os.path
+import re
from PyInstaller.utils.hooks import get_package_paths
+from PyInstaller import log as logging
+from PyInstaller import compat
+binaries = []
+
+# look for libraries in numpy package path
pkg_base, pkg_dir = get_package_paths('numpy.core')
-# Walk through all files in ``numpy.core``, looking for DLLs.
-datas = []
-for f in os.listdir(pkg_dir):
- extension = os.path.splitext(f)[1]
- if extension == '.dll':
- # Produce the tuple ('/abs/path/to/libs/numpy/core/file.dll', '')
- source = os.path.join(pkg_dir, f)
- datas.append((source, ''))
+re_anylib = re.compile(r'\w+\.(?:dll|so)', re.IGNORECASE)
+dlls_pkg = [f for f in os.listdir(pkg_dir) if re_anylib.match(f)]
+binaries += [(os.path.join(pkg_dir, f), '') for f in dlls_pkg]
+
+# look for MKL libraries in pythons lib directory
+# TODO: check numpy.__config__ if numpy is actually depending on MKL
+# TODO: determine which directories are searched by the os linker
+if compat.is_win:
+ lib_dir = os.path.join(compat.base_prefix, "Library", "bin")
+else:
+ lib_dir = os.path.join(compat.base_prefix, "lib")
+if os.path.isdir(lib_dir):
+ re_mkllib = re.compile(r'^(?:lib)?mkl\w+\.(?:dll|so)', re.IGNORECASE)
+ dlls_mkl = [f for f in os.listdir(lib_dir) if re_mkllib.match(f)]
+ if dlls_mkl:
+ logger = logging.getLogger(__name__)
+ logger.info("MKL libraries found when importing numpy. Adding MKL to binaries")
+ binaries += [(os.path.join(lib_dir, f), '') for f in dlls_mkl]
+
|
{"golden_diff": "diff --git a/PyInstaller/hooks/hook-numpy.core.py b/PyInstaller/hooks/hook-numpy.core.py\n--- a/PyInstaller/hooks/hook-numpy.core.py\n+++ b/PyInstaller/hooks/hook-numpy.core.py\n@@ -6,22 +6,41 @@\n #\n # The full license is in the file COPYING.txt, distributed with this software.\n #-----------------------------------------------------------------------------\n-# On Windows, numpy depends on a set of dynamically-detemined DLLs, which means\n-# that PyInstaller's static analysis can't find them. See https://github.com/pyinstaller/pyinstaller/issues/1969\n-# for more information. The typical error message: ``Intel MKL FATAL ERROR:\n-# Cannot load mkl_intel_thread.dll.``\n+# If numpy is built with MKL support it depends on a set of libraries loaded\n+# at runtime. Since PyInstaller's static analysis can't find them they must be\n+# included manually.\n #\n-# So, include them manually.\n+# See\n+# https://github.com/pyinstaller/pyinstaller/issues/1881\n+# https://github.com/pyinstaller/pyinstaller/issues/1969\n+# for more information\n import os\n import os.path\n+import re\n from PyInstaller.utils.hooks import get_package_paths\n+from PyInstaller import log as logging \n+from PyInstaller import compat\n \n+binaries = []\n+\n+# look for libraries in numpy package path\n pkg_base, pkg_dir = get_package_paths('numpy.core')\n-# Walk through all files in ``numpy.core``, looking for DLLs.\n-datas = []\n-for f in os.listdir(pkg_dir):\n- extension = os.path.splitext(f)[1]\n- if extension == '.dll':\n- # Produce the tuple ('/abs/path/to/libs/numpy/core/file.dll', '')\n- source = os.path.join(pkg_dir, f)\n- datas.append((source, ''))\n+re_anylib = re.compile(r'\\w+\\.(?:dll|so)', re.IGNORECASE)\n+dlls_pkg = [f for f in os.listdir(pkg_dir) if re_anylib.match(f)]\n+binaries += [(os.path.join(pkg_dir, f), '') for f in dlls_pkg]\n+\n+# look for MKL libraries in pythons lib directory\n+# TODO: check numpy.__config__ if numpy is actually depending on MKL\n+# TODO: determine which directories are searched by the os linker\n+if compat.is_win:\n+ lib_dir = os.path.join(compat.base_prefix, \"Library\", \"bin\")\n+else:\n+ lib_dir = os.path.join(compat.base_prefix, \"lib\")\n+if os.path.isdir(lib_dir):\n+ re_mkllib = re.compile(r'^(?:lib)?mkl\\w+\\.(?:dll|so)', re.IGNORECASE)\n+ dlls_mkl = [f for f in os.listdir(lib_dir) if re_mkllib.match(f)]\n+ if dlls_mkl:\n+ logger = logging.getLogger(__name__)\n+ logger.info(\"MKL libraries found when importing numpy. Adding MKL to binaries\")\n+ binaries += [(os.path.join(lib_dir, f), '') for f in dlls_mkl]\n+\n", "issue": "New numpy hook for intel mkl libraries\nHello - anaconda and winpython build numpy against Intel's mkl libraries. Building someone that uses numpy will need to have those libraries.\n\nHere is a winpython hook:\nhttp://stackoverflow.com/a/35853001\n\nand I adapted that for anaconda & python3:\nhttps://github.com/maqifrnswa/scimpy/blob/master/pyinstaller-hooks/hook-numpy.py\n\n", "before_files": [{"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2013-2016, PyInstaller Development Team.\n#\n# Distributed under the terms of the GNU General Public License with exception\n# for distributing bootloader.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#-----------------------------------------------------------------------------\n# On Windows, numpy depends on a set of dynamically-detemined DLLs, which means\n# that PyInstaller's static analysis can't find them. See https://github.com/pyinstaller/pyinstaller/issues/1969\n# for more information. The typical error message: ``Intel MKL FATAL ERROR:\n# Cannot load mkl_intel_thread.dll.``\n#\n# So, include them manually.\nimport os\nimport os.path\nfrom PyInstaller.utils.hooks import get_package_paths\n\npkg_base, pkg_dir = get_package_paths('numpy.core')\n# Walk through all files in ``numpy.core``, looking for DLLs.\ndatas = []\nfor f in os.listdir(pkg_dir):\n extension = os.path.splitext(f)[1]\n if extension == '.dll':\n # Produce the tuple ('/abs/path/to/libs/numpy/core/file.dll', '')\n source = os.path.join(pkg_dir, f)\n datas.append((source, ''))\n", "path": "PyInstaller/hooks/hook-numpy.core.py"}], "after_files": [{"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2013-2016, PyInstaller Development Team.\n#\n# Distributed under the terms of the GNU General Public License with exception\n# for distributing bootloader.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#-----------------------------------------------------------------------------\n# If numpy is built with MKL support it depends on a set of libraries loaded\n# at runtime. Since PyInstaller's static analysis can't find them they must be\n# included manually.\n#\n# See\n# https://github.com/pyinstaller/pyinstaller/issues/1881\n# https://github.com/pyinstaller/pyinstaller/issues/1969\n# for more information\nimport os\nimport os.path\nimport re\nfrom PyInstaller.utils.hooks import get_package_paths\nfrom PyInstaller import log as logging \nfrom PyInstaller import compat\n\nbinaries = []\n\n# look for libraries in numpy package path\npkg_base, pkg_dir = get_package_paths('numpy.core')\nre_anylib = re.compile(r'\\w+\\.(?:dll|so)', re.IGNORECASE)\ndlls_pkg = [f for f in os.listdir(pkg_dir) if re_anylib.match(f)]\nbinaries += [(os.path.join(pkg_dir, f), '') for f in dlls_pkg]\n\n# look for MKL libraries in pythons lib directory\n# TODO: check numpy.__config__ if numpy is actually depending on MKL\n# TODO: determine which directories are searched by the os linker\nif compat.is_win:\n lib_dir = os.path.join(compat.base_prefix, \"Library\", \"bin\")\nelse:\n lib_dir = os.path.join(compat.base_prefix, \"lib\")\nif os.path.isdir(lib_dir):\n re_mkllib = re.compile(r'^(?:lib)?mkl\\w+\\.(?:dll|so)', re.IGNORECASE)\n dlls_mkl = [f for f in os.listdir(lib_dir) if re_mkllib.match(f)]\n if dlls_mkl:\n logger = logging.getLogger(__name__)\n logger.info(\"MKL libraries found when importing numpy. Adding MKL to binaries\")\n binaries += [(os.path.join(lib_dir, f), '') for f in dlls_mkl]\n\n", "path": "PyInstaller/hooks/hook-numpy.core.py"}]}
| 666 | 679 |
gh_patches_debug_40264
|
rasdani/github-patches
|
git_diff
|
tensorflow__addons-2381
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Ideally the bins in histogram equalization is variable rather than limited to 256
https://github.com/tensorflow/addons/blob/d26e2ed5f68092aed57016a7005ce534b1be3dce/tensorflow_addons/image/color_ops.py#L36
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `tensorflow_addons/image/color_ops.py`
Content:
```
1 # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 # ==============================================================================
15 """Color operations.
16 equalize: Equalizes image histogram
17 sharpness: Sharpen image
18 """
19
20 import tensorflow as tf
21
22 from tensorflow_addons.utils.types import TensorLike, Number
23 from tensorflow_addons.image.utils import to_4D_image, from_4D_image
24 from tensorflow_addons.image.compose_ops import blend
25
26 from typing import Optional
27 from functools import partial
28
29
30 def _scale_channel(image: TensorLike, channel: int) -> tf.Tensor:
31 """Scale the data in the channel to implement equalize."""
32 image_dtype = image.dtype
33 image = tf.cast(image[:, :, channel], tf.int32)
34
35 # Compute the histogram of the image channel.
36 histo = tf.histogram_fixed_width(image, [0, 255], nbins=256)
37
38 # For the purposes of computing the step, filter out the nonzeros.
39 nonzero_histo = tf.boolean_mask(histo, histo != 0)
40 step = (tf.reduce_sum(nonzero_histo) - nonzero_histo[-1]) // 255
41
42 # If step is zero, return the original image. Otherwise, build
43 # lut from the full histogram and step and then index from it.
44 if step == 0:
45 result = image
46 else:
47 lut_values = (tf.cumsum(histo, exclusive=True) + (step // 2)) // step
48 lut_values = tf.clip_by_value(lut_values, 0, 255)
49 result = tf.gather(lut_values, image)
50
51 return tf.cast(result, image_dtype)
52
53
54 def _equalize_image(image: TensorLike) -> tf.Tensor:
55 """Implements Equalize function from PIL using TF ops."""
56 image = tf.stack([_scale_channel(image, c) for c in range(image.shape[-1])], -1)
57 return image
58
59
60 @tf.function
61 def equalize(image: TensorLike, name: Optional[str] = None) -> tf.Tensor:
62 """Equalize image(s)
63
64 Args:
65 images: A tensor of shape
66 `(num_images, num_rows, num_columns, num_channels)` (NHWC), or
67 `(num_rows, num_columns, num_channels)` (HWC), or
68 `(num_rows, num_columns)` (HW). The rank must be statically known (the
69 shape is not `TensorShape(None)`).
70 name: The name of the op.
71 Returns:
72 Image(s) with the same type and shape as `images`, equalized.
73 """
74 with tf.name_scope(name or "equalize"):
75 image_dims = tf.rank(image)
76 image = to_4D_image(image)
77 fn = partial(_equalize_image)
78 image = tf.map_fn(fn, image)
79 return from_4D_image(image, image_dims)
80
81
82 def _sharpness_image(image: TensorLike, factor: Number) -> tf.Tensor:
83 """Implements Sharpness function from PIL using TF ops."""
84 orig_image = image
85 image_dtype = image.dtype
86 image_channels = image.shape[-1]
87 image = tf.cast(image, tf.float32)
88
89 # SMOOTH PIL Kernel.
90 kernel = (
91 tf.constant(
92 [[1, 1, 1], [1, 5, 1], [1, 1, 1]], dtype=tf.float32, shape=[3, 3, 1, 1]
93 )
94 / 13.0
95 )
96 kernel = tf.tile(kernel, [1, 1, image_channels, 1])
97
98 # Apply kernel channel-wise.
99 degenerate = tf.nn.depthwise_conv2d(
100 image, kernel, strides=[1, 1, 1, 1], padding="VALID", dilations=[1, 1]
101 )
102 degenerate = tf.cast(degenerate, image_dtype)
103
104 # For the borders of the resulting image, fill in the values of the original image.
105 mask = tf.ones_like(degenerate)
106 padded_mask = tf.pad(mask, [[0, 0], [1, 1], [1, 1], [0, 0]])
107 padded_degenerate = tf.pad(degenerate, [[0, 0], [1, 1], [1, 1], [0, 0]])
108 result = tf.where(tf.equal(padded_mask, 1), padded_degenerate, orig_image)
109
110 # Blend the final result.
111 blended = blend(result, orig_image, factor)
112 return tf.cast(blended, image_dtype)
113
114
115 @tf.function
116 def sharpness(
117 image: TensorLike, factor: Number, name: Optional[str] = None
118 ) -> tf.Tensor:
119 """Change sharpness of image(s).
120
121 Args:
122 image: A tensor of shape
123 `(num_images, num_rows, num_columns, num_channels)` (NHWC), or
124 `(num_rows, num_columns, num_channels)` (HWC)
125 factor: A floating point value or Tensor above 0.0.
126 name: The name of the op.
127 Returns:
128 Image(s) with the same type and shape as `images`, sharper.
129 """
130 with tf.name_scope(name or "sharpness"):
131 image_dims = tf.rank(image)
132 image = to_4D_image(image)
133 image = _sharpness_image(image, factor=factor)
134 return from_4D_image(image, image_dims)
135
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/tensorflow_addons/image/color_ops.py b/tensorflow_addons/image/color_ops.py
--- a/tensorflow_addons/image/color_ops.py
+++ b/tensorflow_addons/image/color_ops.py
@@ -27,17 +27,17 @@
from functools import partial
-def _scale_channel(image: TensorLike, channel: int) -> tf.Tensor:
+def _scale_channel(image: TensorLike, channel: int, bins: int = 256) -> tf.Tensor:
"""Scale the data in the channel to implement equalize."""
image_dtype = image.dtype
image = tf.cast(image[:, :, channel], tf.int32)
# Compute the histogram of the image channel.
- histo = tf.histogram_fixed_width(image, [0, 255], nbins=256)
+ histo = tf.histogram_fixed_width(image, [0, bins - 1], nbins=bins)
# For the purposes of computing the step, filter out the nonzeros.
nonzero_histo = tf.boolean_mask(histo, histo != 0)
- step = (tf.reduce_sum(nonzero_histo) - nonzero_histo[-1]) // 255
+ step = (tf.reduce_sum(nonzero_histo) - nonzero_histo[-1]) // (bins - 1)
# If step is zero, return the original image. Otherwise, build
# lut from the full histogram and step and then index from it.
@@ -45,20 +45,24 @@
result = image
else:
lut_values = (tf.cumsum(histo, exclusive=True) + (step // 2)) // step
- lut_values = tf.clip_by_value(lut_values, 0, 255)
+ lut_values = tf.clip_by_value(lut_values, 0, bins - 1)
result = tf.gather(lut_values, image)
return tf.cast(result, image_dtype)
-def _equalize_image(image: TensorLike) -> tf.Tensor:
+def _equalize_image(image: TensorLike, bins: int = 256) -> tf.Tensor:
"""Implements Equalize function from PIL using TF ops."""
- image = tf.stack([_scale_channel(image, c) for c in range(image.shape[-1])], -1)
+ image = tf.stack(
+ [_scale_channel(image, c, bins) for c in range(image.shape[-1])], -1
+ )
return image
@tf.function
-def equalize(image: TensorLike, name: Optional[str] = None) -> tf.Tensor:
+def equalize(
+ image: TensorLike, bins: int = 256, name: Optional[str] = None
+) -> tf.Tensor:
"""Equalize image(s)
Args:
@@ -67,6 +71,7 @@
`(num_rows, num_columns, num_channels)` (HWC), or
`(num_rows, num_columns)` (HW). The rank must be statically known (the
shape is not `TensorShape(None)`).
+ bins: The number of bins in the histogram.
name: The name of the op.
Returns:
Image(s) with the same type and shape as `images`, equalized.
@@ -75,7 +80,7 @@
image_dims = tf.rank(image)
image = to_4D_image(image)
fn = partial(_equalize_image)
- image = tf.map_fn(fn, image)
+ image = tf.map_fn(lambda x: fn(x, bins), image)
return from_4D_image(image, image_dims)
|
{"golden_diff": "diff --git a/tensorflow_addons/image/color_ops.py b/tensorflow_addons/image/color_ops.py\n--- a/tensorflow_addons/image/color_ops.py\n+++ b/tensorflow_addons/image/color_ops.py\n@@ -27,17 +27,17 @@\n from functools import partial\n \n \n-def _scale_channel(image: TensorLike, channel: int) -> tf.Tensor:\n+def _scale_channel(image: TensorLike, channel: int, bins: int = 256) -> tf.Tensor:\n \"\"\"Scale the data in the channel to implement equalize.\"\"\"\n image_dtype = image.dtype\n image = tf.cast(image[:, :, channel], tf.int32)\n \n # Compute the histogram of the image channel.\n- histo = tf.histogram_fixed_width(image, [0, 255], nbins=256)\n+ histo = tf.histogram_fixed_width(image, [0, bins - 1], nbins=bins)\n \n # For the purposes of computing the step, filter out the nonzeros.\n nonzero_histo = tf.boolean_mask(histo, histo != 0)\n- step = (tf.reduce_sum(nonzero_histo) - nonzero_histo[-1]) // 255\n+ step = (tf.reduce_sum(nonzero_histo) - nonzero_histo[-1]) // (bins - 1)\n \n # If step is zero, return the original image. Otherwise, build\n # lut from the full histogram and step and then index from it.\n@@ -45,20 +45,24 @@\n result = image\n else:\n lut_values = (tf.cumsum(histo, exclusive=True) + (step // 2)) // step\n- lut_values = tf.clip_by_value(lut_values, 0, 255)\n+ lut_values = tf.clip_by_value(lut_values, 0, bins - 1)\n result = tf.gather(lut_values, image)\n \n return tf.cast(result, image_dtype)\n \n \n-def _equalize_image(image: TensorLike) -> tf.Tensor:\n+def _equalize_image(image: TensorLike, bins: int = 256) -> tf.Tensor:\n \"\"\"Implements Equalize function from PIL using TF ops.\"\"\"\n- image = tf.stack([_scale_channel(image, c) for c in range(image.shape[-1])], -1)\n+ image = tf.stack(\n+ [_scale_channel(image, c, bins) for c in range(image.shape[-1])], -1\n+ )\n return image\n \n \n @tf.function\n-def equalize(image: TensorLike, name: Optional[str] = None) -> tf.Tensor:\n+def equalize(\n+ image: TensorLike, bins: int = 256, name: Optional[str] = None\n+) -> tf.Tensor:\n \"\"\"Equalize image(s)\n \n Args:\n@@ -67,6 +71,7 @@\n `(num_rows, num_columns, num_channels)` (HWC), or\n `(num_rows, num_columns)` (HW). The rank must be statically known (the\n shape is not `TensorShape(None)`).\n+ bins: The number of bins in the histogram.\n name: The name of the op.\n Returns:\n Image(s) with the same type and shape as `images`, equalized.\n@@ -75,7 +80,7 @@\n image_dims = tf.rank(image)\n image = to_4D_image(image)\n fn = partial(_equalize_image)\n- image = tf.map_fn(fn, image)\n+ image = tf.map_fn(lambda x: fn(x, bins), image)\n return from_4D_image(image, image_dims)\n", "issue": "Ideally the bins in histogram equalization is variable rather than limited to 256 \nhttps://github.com/tensorflow/addons/blob/d26e2ed5f68092aed57016a7005ce534b1be3dce/tensorflow_addons/image/color_ops.py#L36\n", "before_files": [{"content": "# Copyright 2020 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Color operations.\n equalize: Equalizes image histogram\n sharpness: Sharpen image\n\"\"\"\n\nimport tensorflow as tf\n\nfrom tensorflow_addons.utils.types import TensorLike, Number\nfrom tensorflow_addons.image.utils import to_4D_image, from_4D_image\nfrom tensorflow_addons.image.compose_ops import blend\n\nfrom typing import Optional\nfrom functools import partial\n\n\ndef _scale_channel(image: TensorLike, channel: int) -> tf.Tensor:\n \"\"\"Scale the data in the channel to implement equalize.\"\"\"\n image_dtype = image.dtype\n image = tf.cast(image[:, :, channel], tf.int32)\n\n # Compute the histogram of the image channel.\n histo = tf.histogram_fixed_width(image, [0, 255], nbins=256)\n\n # For the purposes of computing the step, filter out the nonzeros.\n nonzero_histo = tf.boolean_mask(histo, histo != 0)\n step = (tf.reduce_sum(nonzero_histo) - nonzero_histo[-1]) // 255\n\n # If step is zero, return the original image. Otherwise, build\n # lut from the full histogram and step and then index from it.\n if step == 0:\n result = image\n else:\n lut_values = (tf.cumsum(histo, exclusive=True) + (step // 2)) // step\n lut_values = tf.clip_by_value(lut_values, 0, 255)\n result = tf.gather(lut_values, image)\n\n return tf.cast(result, image_dtype)\n\n\ndef _equalize_image(image: TensorLike) -> tf.Tensor:\n \"\"\"Implements Equalize function from PIL using TF ops.\"\"\"\n image = tf.stack([_scale_channel(image, c) for c in range(image.shape[-1])], -1)\n return image\n\n\[email protected]\ndef equalize(image: TensorLike, name: Optional[str] = None) -> tf.Tensor:\n \"\"\"Equalize image(s)\n\n Args:\n images: A tensor of shape\n `(num_images, num_rows, num_columns, num_channels)` (NHWC), or\n `(num_rows, num_columns, num_channels)` (HWC), or\n `(num_rows, num_columns)` (HW). The rank must be statically known (the\n shape is not `TensorShape(None)`).\n name: The name of the op.\n Returns:\n Image(s) with the same type and shape as `images`, equalized.\n \"\"\"\n with tf.name_scope(name or \"equalize\"):\n image_dims = tf.rank(image)\n image = to_4D_image(image)\n fn = partial(_equalize_image)\n image = tf.map_fn(fn, image)\n return from_4D_image(image, image_dims)\n\n\ndef _sharpness_image(image: TensorLike, factor: Number) -> tf.Tensor:\n \"\"\"Implements Sharpness function from PIL using TF ops.\"\"\"\n orig_image = image\n image_dtype = image.dtype\n image_channels = image.shape[-1]\n image = tf.cast(image, tf.float32)\n\n # SMOOTH PIL Kernel.\n kernel = (\n tf.constant(\n [[1, 1, 1], [1, 5, 1], [1, 1, 1]], dtype=tf.float32, shape=[3, 3, 1, 1]\n )\n / 13.0\n )\n kernel = tf.tile(kernel, [1, 1, image_channels, 1])\n\n # Apply kernel channel-wise.\n degenerate = tf.nn.depthwise_conv2d(\n image, kernel, strides=[1, 1, 1, 1], padding=\"VALID\", dilations=[1, 1]\n )\n degenerate = tf.cast(degenerate, image_dtype)\n\n # For the borders of the resulting image, fill in the values of the original image.\n mask = tf.ones_like(degenerate)\n padded_mask = tf.pad(mask, [[0, 0], [1, 1], [1, 1], [0, 0]])\n padded_degenerate = tf.pad(degenerate, [[0, 0], [1, 1], [1, 1], [0, 0]])\n result = tf.where(tf.equal(padded_mask, 1), padded_degenerate, orig_image)\n\n # Blend the final result.\n blended = blend(result, orig_image, factor)\n return tf.cast(blended, image_dtype)\n\n\[email protected]\ndef sharpness(\n image: TensorLike, factor: Number, name: Optional[str] = None\n) -> tf.Tensor:\n \"\"\"Change sharpness of image(s).\n\n Args:\n image: A tensor of shape\n `(num_images, num_rows, num_columns, num_channels)` (NHWC), or\n `(num_rows, num_columns, num_channels)` (HWC)\n factor: A floating point value or Tensor above 0.0.\n name: The name of the op.\n Returns:\n Image(s) with the same type and shape as `images`, sharper.\n \"\"\"\n with tf.name_scope(name or \"sharpness\"):\n image_dims = tf.rank(image)\n image = to_4D_image(image)\n image = _sharpness_image(image, factor=factor)\n return from_4D_image(image, image_dims)\n", "path": "tensorflow_addons/image/color_ops.py"}], "after_files": [{"content": "# Copyright 2020 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Color operations.\n equalize: Equalizes image histogram\n sharpness: Sharpen image\n\"\"\"\n\nimport tensorflow as tf\n\nfrom tensorflow_addons.utils.types import TensorLike, Number\nfrom tensorflow_addons.image.utils import to_4D_image, from_4D_image\nfrom tensorflow_addons.image.compose_ops import blend\n\nfrom typing import Optional\nfrom functools import partial\n\n\ndef _scale_channel(image: TensorLike, channel: int, bins: int = 256) -> tf.Tensor:\n \"\"\"Scale the data in the channel to implement equalize.\"\"\"\n image_dtype = image.dtype\n image = tf.cast(image[:, :, channel], tf.int32)\n\n # Compute the histogram of the image channel.\n histo = tf.histogram_fixed_width(image, [0, bins - 1], nbins=bins)\n\n # For the purposes of computing the step, filter out the nonzeros.\n nonzero_histo = tf.boolean_mask(histo, histo != 0)\n step = (tf.reduce_sum(nonzero_histo) - nonzero_histo[-1]) // (bins - 1)\n\n # If step is zero, return the original image. Otherwise, build\n # lut from the full histogram and step and then index from it.\n if step == 0:\n result = image\n else:\n lut_values = (tf.cumsum(histo, exclusive=True) + (step // 2)) // step\n lut_values = tf.clip_by_value(lut_values, 0, bins - 1)\n result = tf.gather(lut_values, image)\n\n return tf.cast(result, image_dtype)\n\n\ndef _equalize_image(image: TensorLike, bins: int = 256) -> tf.Tensor:\n \"\"\"Implements Equalize function from PIL using TF ops.\"\"\"\n image = tf.stack(\n [_scale_channel(image, c, bins) for c in range(image.shape[-1])], -1\n )\n return image\n\n\[email protected]\ndef equalize(\n image: TensorLike, bins: int = 256, name: Optional[str] = None\n) -> tf.Tensor:\n \"\"\"Equalize image(s)\n\n Args:\n images: A tensor of shape\n `(num_images, num_rows, num_columns, num_channels)` (NHWC), or\n `(num_rows, num_columns, num_channels)` (HWC), or\n `(num_rows, num_columns)` (HW). The rank must be statically known (the\n shape is not `TensorShape(None)`).\n bins: The number of bins in the histogram.\n name: The name of the op.\n Returns:\n Image(s) with the same type and shape as `images`, equalized.\n \"\"\"\n with tf.name_scope(name or \"equalize\"):\n image_dims = tf.rank(image)\n image = to_4D_image(image)\n fn = partial(_equalize_image)\n image = tf.map_fn(lambda x: fn(x, bins), image)\n return from_4D_image(image, image_dims)\n\n\ndef _sharpness_image(image: TensorLike, factor: Number) -> tf.Tensor:\n \"\"\"Implements Sharpness function from PIL using TF ops.\"\"\"\n orig_image = image\n image_dtype = image.dtype\n image_channels = image.shape[-1]\n image = tf.cast(image, tf.float32)\n\n # SMOOTH PIL Kernel.\n kernel = (\n tf.constant(\n [[1, 1, 1], [1, 5, 1], [1, 1, 1]], dtype=tf.float32, shape=[3, 3, 1, 1]\n )\n / 13.0\n )\n kernel = tf.tile(kernel, [1, 1, image_channels, 1])\n\n # Apply kernel channel-wise.\n degenerate = tf.nn.depthwise_conv2d(\n image, kernel, strides=[1, 1, 1, 1], padding=\"VALID\", dilations=[1, 1]\n )\n degenerate = tf.cast(degenerate, image_dtype)\n\n # For the borders of the resulting image, fill in the values of the original image.\n mask = tf.ones_like(degenerate)\n padded_mask = tf.pad(mask, [[0, 0], [1, 1], [1, 1], [0, 0]])\n padded_degenerate = tf.pad(degenerate, [[0, 0], [1, 1], [1, 1], [0, 0]])\n result = tf.where(tf.equal(padded_mask, 1), padded_degenerate, orig_image)\n\n # Blend the final result.\n blended = blend(result, orig_image, factor)\n return tf.cast(blended, image_dtype)\n\n\[email protected]\ndef sharpness(\n image: TensorLike, factor: Number, name: Optional[str] = None\n) -> tf.Tensor:\n \"\"\"Change sharpness of image(s).\n\n Args:\n image: A tensor of shape\n `(num_images, num_rows, num_columns, num_channels)` (NHWC), or\n `(num_rows, num_columns, num_channels)` (HWC)\n factor: A floating point value or Tensor above 0.0.\n name: The name of the op.\n Returns:\n Image(s) with the same type and shape as `images`, sharper.\n \"\"\"\n with tf.name_scope(name or \"sharpness\"):\n image_dims = tf.rank(image)\n image = to_4D_image(image)\n image = _sharpness_image(image, factor=factor)\n return from_4D_image(image, image_dims)\n", "path": "tensorflow_addons/image/color_ops.py"}]}
| 1,935 | 801 |
gh_patches_debug_9770
|
rasdani/github-patches
|
git_diff
|
spyder-ide__spyder-14543
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
DeprecationWarning: implicit conversion to integers in spyder/widgets/colors.py:78
When running the test suite, I received the following warning:
```python
<<<PACKAGEDIR>>>/spyder/widgets/colors.py:78: DeprecationWarning: an integer is required (got type float). Implicit conversion to integers using __int__ is deprecated, and may be removed in a future version of Python.
self.lineedit.setMinimumWidth(fm.width(color.name()) * 1.2)
```
Changing this to say
```python
self.lineedit.setMinimumWidth(int(fm.width(color.name()) * 1.2))
```
should be sufficient to fix this.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `spyder/widgets/colors.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright ยฉ Spyder Project Contributors
4 # Licensed under the terms of the MIT License
5 # (see spyder/__init__.py for details)
6
7 # Third party imports
8 from qtpy.QtCore import Property, QSize, Signal, Slot
9 from qtpy.QtGui import QColor, QIcon, QPixmap
10 from qtpy.QtWidgets import QColorDialog, QHBoxLayout, QLineEdit, QToolButton
11
12 # Local imports
13 from spyder.py3compat import is_text_string
14
15
16 class ColorButton(QToolButton):
17 """
18 Color choosing push button
19 """
20 colorChanged = Signal(QColor)
21
22 def __init__(self, parent=None):
23 QToolButton.__init__(self, parent)
24 self.setFixedSize(20, 20)
25 self.setIconSize(QSize(12, 12))
26 self.clicked.connect(self.choose_color)
27 self._color = QColor()
28
29 def choose_color(self):
30 color = QColorDialog.getColor(self._color, self.parentWidget(),
31 'Select Color',
32 QColorDialog.ShowAlphaChannel)
33 if color.isValid():
34 self.set_color(color)
35
36 def get_color(self):
37 return self._color
38
39 @Slot(QColor)
40 def set_color(self, color):
41 if color != self._color:
42 self._color = color
43 self.colorChanged.emit(self._color)
44 pixmap = QPixmap(self.iconSize())
45 pixmap.fill(color)
46 self.setIcon(QIcon(pixmap))
47
48 color = Property("QColor", get_color, set_color)
49
50
51 def text_to_qcolor(text):
52 """
53 Create a QColor from specified string
54 Avoid warning from Qt when an invalid QColor is instantiated
55 """
56 color = QColor()
57 text = str(text)
58 if not is_text_string(text):
59 return color
60 if text.startswith('#') and len(text)==7:
61 correct = '#0123456789abcdef'
62 for char in text:
63 if char.lower() not in correct:
64 return color
65 elif text not in list(QColor.colorNames()):
66 return color
67 color.setNamedColor(text)
68 return color
69
70
71 class ColorLayout(QHBoxLayout):
72 """Color-specialized QLineEdit layout"""
73 def __init__(self, color, parent=None):
74 QHBoxLayout.__init__(self)
75 assert isinstance(color, QColor)
76 self.lineedit = QLineEdit(color.name(), parent)
77 fm = self.lineedit.fontMetrics()
78 self.lineedit.setMinimumWidth(fm.width(color.name()) * 1.2)
79 self.lineedit.textChanged.connect(self.update_color)
80 self.addWidget(self.lineedit)
81 self.colorbtn = ColorButton(parent)
82 self.colorbtn.color = color
83 self.colorbtn.colorChanged.connect(self.update_text)
84 self.addWidget(self.colorbtn)
85
86 def update_color(self, text):
87 color = text_to_qcolor(text)
88 if color.isValid():
89 self.colorbtn.color = color
90
91 def update_text(self, color):
92 self.lineedit.setText(color.name())
93
94 def text(self):
95 return self.lineedit.text()
96
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/spyder/widgets/colors.py b/spyder/widgets/colors.py
--- a/spyder/widgets/colors.py
+++ b/spyder/widgets/colors.py
@@ -75,7 +75,7 @@
assert isinstance(color, QColor)
self.lineedit = QLineEdit(color.name(), parent)
fm = self.lineedit.fontMetrics()
- self.lineedit.setMinimumWidth(fm.width(color.name()) * 1.2)
+ self.lineedit.setMinimumWidth(int(fm.width(color.name()) * 1.2))
self.lineedit.textChanged.connect(self.update_color)
self.addWidget(self.lineedit)
self.colorbtn = ColorButton(parent)
|
{"golden_diff": "diff --git a/spyder/widgets/colors.py b/spyder/widgets/colors.py\n--- a/spyder/widgets/colors.py\n+++ b/spyder/widgets/colors.py\n@@ -75,7 +75,7 @@\n assert isinstance(color, QColor)\r\n self.lineedit = QLineEdit(color.name(), parent)\r\n fm = self.lineedit.fontMetrics()\r\n- self.lineedit.setMinimumWidth(fm.width(color.name()) * 1.2)\r\n+ self.lineedit.setMinimumWidth(int(fm.width(color.name()) * 1.2))\r\n self.lineedit.textChanged.connect(self.update_color)\r\n self.addWidget(self.lineedit)\r\n self.colorbtn = ColorButton(parent)\n", "issue": "DeprecationWarning: implicit conversion to integers in spyder/widgets/colors.py:78\nWhen running the test suite, I received the following warning:\r\n\r\n```python\r\n <<<PACKAGEDIR>>>/spyder/widgets/colors.py:78: DeprecationWarning: an integer is required (got type float). Implicit conversion to integers using __int__ is deprecated, and may be removed in a future version of Python.\r\n self.lineedit.setMinimumWidth(fm.width(color.name()) * 1.2)\r\n```\r\n\r\nChanging this to say\r\n```python\r\n self.lineedit.setMinimumWidth(int(fm.width(color.name()) * 1.2))\r\n```\r\nshould be sufficient to fix this.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\r\n#\r\n# Copyright \u00a9 Spyder Project Contributors\r\n# Licensed under the terms of the MIT License\r\n# (see spyder/__init__.py for details)\r\n\r\n# Third party imports\r\nfrom qtpy.QtCore import Property, QSize, Signal, Slot\r\nfrom qtpy.QtGui import QColor, QIcon, QPixmap\r\nfrom qtpy.QtWidgets import QColorDialog, QHBoxLayout, QLineEdit, QToolButton\r\n\r\n# Local imports\r\nfrom spyder.py3compat import is_text_string\r\n\r\n\r\nclass ColorButton(QToolButton):\r\n \"\"\"\r\n Color choosing push button\r\n \"\"\"\r\n colorChanged = Signal(QColor)\r\n\r\n def __init__(self, parent=None):\r\n QToolButton.__init__(self, parent)\r\n self.setFixedSize(20, 20)\r\n self.setIconSize(QSize(12, 12))\r\n self.clicked.connect(self.choose_color)\r\n self._color = QColor()\r\n\r\n def choose_color(self):\r\n color = QColorDialog.getColor(self._color, self.parentWidget(),\r\n 'Select Color',\r\n QColorDialog.ShowAlphaChannel)\r\n if color.isValid():\r\n self.set_color(color)\r\n\r\n def get_color(self):\r\n return self._color\r\n\r\n @Slot(QColor)\r\n def set_color(self, color):\r\n if color != self._color:\r\n self._color = color\r\n self.colorChanged.emit(self._color)\r\n pixmap = QPixmap(self.iconSize())\r\n pixmap.fill(color)\r\n self.setIcon(QIcon(pixmap))\r\n\r\n color = Property(\"QColor\", get_color, set_color)\r\n\r\n\r\ndef text_to_qcolor(text):\r\n \"\"\"\r\n Create a QColor from specified string\r\n Avoid warning from Qt when an invalid QColor is instantiated\r\n \"\"\"\r\n color = QColor()\r\n text = str(text)\r\n if not is_text_string(text):\r\n return color\r\n if text.startswith('#') and len(text)==7:\r\n correct = '#0123456789abcdef'\r\n for char in text:\r\n if char.lower() not in correct:\r\n return color\r\n elif text not in list(QColor.colorNames()):\r\n return color\r\n color.setNamedColor(text)\r\n return color\r\n\r\n\r\nclass ColorLayout(QHBoxLayout):\r\n \"\"\"Color-specialized QLineEdit layout\"\"\"\r\n def __init__(self, color, parent=None):\r\n QHBoxLayout.__init__(self)\r\n assert isinstance(color, QColor)\r\n self.lineedit = QLineEdit(color.name(), parent)\r\n fm = self.lineedit.fontMetrics()\r\n self.lineedit.setMinimumWidth(fm.width(color.name()) * 1.2)\r\n self.lineedit.textChanged.connect(self.update_color)\r\n self.addWidget(self.lineedit)\r\n self.colorbtn = ColorButton(parent)\r\n self.colorbtn.color = color\r\n self.colorbtn.colorChanged.connect(self.update_text)\r\n self.addWidget(self.colorbtn)\r\n\r\n def update_color(self, text):\r\n color = text_to_qcolor(text)\r\n if color.isValid():\r\n self.colorbtn.color = color\r\n\r\n def update_text(self, color):\r\n self.lineedit.setText(color.name())\r\n\r\n def text(self):\r\n return self.lineedit.text()\r\n", "path": "spyder/widgets/colors.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\r\n#\r\n# Copyright \u00a9 Spyder Project Contributors\r\n# Licensed under the terms of the MIT License\r\n# (see spyder/__init__.py for details)\r\n\r\n# Third party imports\r\nfrom qtpy.QtCore import Property, QSize, Signal, Slot\r\nfrom qtpy.QtGui import QColor, QIcon, QPixmap\r\nfrom qtpy.QtWidgets import QColorDialog, QHBoxLayout, QLineEdit, QToolButton\r\n\r\n# Local imports\r\nfrom spyder.py3compat import is_text_string\r\n\r\n\r\nclass ColorButton(QToolButton):\r\n \"\"\"\r\n Color choosing push button\r\n \"\"\"\r\n colorChanged = Signal(QColor)\r\n\r\n def __init__(self, parent=None):\r\n QToolButton.__init__(self, parent)\r\n self.setFixedSize(20, 20)\r\n self.setIconSize(QSize(12, 12))\r\n self.clicked.connect(self.choose_color)\r\n self._color = QColor()\r\n\r\n def choose_color(self):\r\n color = QColorDialog.getColor(self._color, self.parentWidget(),\r\n 'Select Color',\r\n QColorDialog.ShowAlphaChannel)\r\n if color.isValid():\r\n self.set_color(color)\r\n\r\n def get_color(self):\r\n return self._color\r\n\r\n @Slot(QColor)\r\n def set_color(self, color):\r\n if color != self._color:\r\n self._color = color\r\n self.colorChanged.emit(self._color)\r\n pixmap = QPixmap(self.iconSize())\r\n pixmap.fill(color)\r\n self.setIcon(QIcon(pixmap))\r\n\r\n color = Property(\"QColor\", get_color, set_color)\r\n\r\n\r\ndef text_to_qcolor(text):\r\n \"\"\"\r\n Create a QColor from specified string\r\n Avoid warning from Qt when an invalid QColor is instantiated\r\n \"\"\"\r\n color = QColor()\r\n text = str(text)\r\n if not is_text_string(text):\r\n return color\r\n if text.startswith('#') and len(text)==7:\r\n correct = '#0123456789abcdef'\r\n for char in text:\r\n if char.lower() not in correct:\r\n return color\r\n elif text not in list(QColor.colorNames()):\r\n return color\r\n color.setNamedColor(text)\r\n return color\r\n\r\n\r\nclass ColorLayout(QHBoxLayout):\r\n \"\"\"Color-specialized QLineEdit layout\"\"\"\r\n def __init__(self, color, parent=None):\r\n QHBoxLayout.__init__(self)\r\n assert isinstance(color, QColor)\r\n self.lineedit = QLineEdit(color.name(), parent)\r\n fm = self.lineedit.fontMetrics()\r\n self.lineedit.setMinimumWidth(int(fm.width(color.name()) * 1.2))\r\n self.lineedit.textChanged.connect(self.update_color)\r\n self.addWidget(self.lineedit)\r\n self.colorbtn = ColorButton(parent)\r\n self.colorbtn.color = color\r\n self.colorbtn.colorChanged.connect(self.update_text)\r\n self.addWidget(self.colorbtn)\r\n\r\n def update_color(self, text):\r\n color = text_to_qcolor(text)\r\n if color.isValid():\r\n self.colorbtn.color = color\r\n\r\n def update_text(self, color):\r\n self.lineedit.setText(color.name())\r\n\r\n def text(self):\r\n return self.lineedit.text()\r\n", "path": "spyder/widgets/colors.py"}]}
| 1,234 | 139 |
gh_patches_debug_123
|
rasdani/github-patches
|
git_diff
|
ResonantGeoData__ResonantGeoData-455
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Improve VTK.js 3D Viewer
After #406 is merged, we should improve the 3D viewer. Basically, use [this example](https://kitware.github.io/vtk-js/examples/GeometryViewer.html)
Things we should have:
- [x] drop-down menu to change the scalar array
- [x] Scalar bar
- [x] Representation style
- [x] Better background color choice (likely black)
- [x] Point size slider
- [x] Support RGB colors
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `example_project/rgd_example/settings.py`
Content:
```
1 from rgd_testing_utils.settings import * # noqa
2
3 INSTALLED_APPS += [ # noqa
4 'rgd_3d',
5 'rgd_fmv',
6 'rgd_geometry',
7 'rgd_imagery',
8 # Swagger
9 'drf_yasg',
10 'django_extensions',
11 ]
12
13 ROOT_URLCONF = 'rgd_example.urls'
14 WSGI_APPLICATION = 'rgd_example.wsgi.application'
15
16
17 # Swagger
18 REFETCH_SCHEMA_WITH_AUTH = True
19 REFETCH_SCHEMA_ON_LOGOUT = True
20 OPERATIONS_SORTER = 'alpha'
21 DEEP_LINKING = True
22
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/example_project/rgd_example/settings.py b/example_project/rgd_example/settings.py
--- a/example_project/rgd_example/settings.py
+++ b/example_project/rgd_example/settings.py
@@ -19,3 +19,5 @@
REFETCH_SCHEMA_ON_LOGOUT = True
OPERATIONS_SORTER = 'alpha'
DEEP_LINKING = True
+
+STATIC_URL = '/static/'
|
{"golden_diff": "diff --git a/example_project/rgd_example/settings.py b/example_project/rgd_example/settings.py\n--- a/example_project/rgd_example/settings.py\n+++ b/example_project/rgd_example/settings.py\n@@ -19,3 +19,5 @@\n REFETCH_SCHEMA_ON_LOGOUT = True\n OPERATIONS_SORTER = 'alpha'\n DEEP_LINKING = True\n+\n+STATIC_URL = '/static/'\n", "issue": "Improve VTK.js 3D Viewer\nAfter #406 is merged, we should improve the 3D viewer. Basically, use [this example](https://kitware.github.io/vtk-js/examples/GeometryViewer.html)\r\n\r\nThings we should have:\r\n\r\n- [x] drop-down menu to change the scalar array\r\n- [x] Scalar bar\r\n- [x] Representation style\r\n- [x] Better background color choice (likely black)\r\n- [x] Point size slider\r\n- [x] Support RGB colors\n", "before_files": [{"content": "from rgd_testing_utils.settings import * # noqa\n\nINSTALLED_APPS += [ # noqa\n 'rgd_3d',\n 'rgd_fmv',\n 'rgd_geometry',\n 'rgd_imagery',\n # Swagger\n 'drf_yasg',\n 'django_extensions',\n]\n\nROOT_URLCONF = 'rgd_example.urls'\nWSGI_APPLICATION = 'rgd_example.wsgi.application'\n\n\n# Swagger\nREFETCH_SCHEMA_WITH_AUTH = True\nREFETCH_SCHEMA_ON_LOGOUT = True\nOPERATIONS_SORTER = 'alpha'\nDEEP_LINKING = True\n", "path": "example_project/rgd_example/settings.py"}], "after_files": [{"content": "from rgd_testing_utils.settings import * # noqa\n\nINSTALLED_APPS += [ # noqa\n 'rgd_3d',\n 'rgd_fmv',\n 'rgd_geometry',\n 'rgd_imagery',\n # Swagger\n 'drf_yasg',\n 'django_extensions',\n]\n\nROOT_URLCONF = 'rgd_example.urls'\nWSGI_APPLICATION = 'rgd_example.wsgi.application'\n\n\n# Swagger\nREFETCH_SCHEMA_WITH_AUTH = True\nREFETCH_SCHEMA_ON_LOGOUT = True\nOPERATIONS_SORTER = 'alpha'\nDEEP_LINKING = True\n\nSTATIC_URL = '/static/'\n", "path": "example_project/rgd_example/settings.py"}]}
| 535 | 88 |
gh_patches_debug_6251
|
rasdani/github-patches
|
git_diff
|
searxng__searxng-83
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Bug: engine "archive is" reports HTTP 404 / Not found
**Version of SearXNG, commit number if you are using on master branch and stipulate if you forked SearXNG**
1970d28a
**Technical report**
Error
* Error: httpx.HTTPStatusError
* Percentage: 100
* Parameters: `('404', 'Not Found', 'archive.is')`
* File name: `searx/search/processors/online.py:99`
* Function: `_send_http_request`
* Code: `response = req(params['url'], **request_args)`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `searx/engines/xpath.py`
Content:
```
1 # SPDX-License-Identifier: AGPL-3.0-or-later
2
3 from lxml import html
4 from urllib.parse import urlencode
5 from searx.utils import extract_text, extract_url, eval_xpath, eval_xpath_list
6
7 search_url = None
8 url_xpath = None
9 content_xpath = None
10 title_xpath = None
11 thumbnail_xpath = False
12 paging = False
13 suggestion_xpath = ''
14 results_xpath = ''
15 cached_xpath = ''
16 cached_url = ''
17
18 # parameters for engines with paging support
19 #
20 # number of results on each page
21 # (only needed if the site requires not a page number, but an offset)
22 page_size = 1
23 # number of the first page (usually 0 or 1)
24 first_page_num = 1
25
26
27 def request(query, params):
28 query = urlencode({'q': query})[2:]
29
30 fp = {'query': query}
31 if paging and search_url.find('{pageno}') >= 0:
32 fp['pageno'] = (params['pageno'] - 1) * page_size + first_page_num
33
34 params['url'] = search_url.format(**fp)
35 params['query'] = query
36
37 return params
38
39
40 def response(resp):
41 results = []
42 dom = html.fromstring(resp.text)
43 is_onion = True if 'onions' in categories else False # pylint: disable=undefined-variable
44
45 if results_xpath:
46 for result in eval_xpath_list(dom, results_xpath):
47 url = extract_url(eval_xpath_list(result, url_xpath, min_len=1), search_url)
48 title = extract_text(eval_xpath_list(result, title_xpath, min_len=1))
49 content = extract_text(eval_xpath_list(result, content_xpath, min_len=1))
50 tmp_result = {'url': url, 'title': title, 'content': content}
51
52 # add thumbnail if available
53 if thumbnail_xpath:
54 thumbnail_xpath_result = eval_xpath_list(result, thumbnail_xpath)
55 if len(thumbnail_xpath_result) > 0:
56 tmp_result['img_src'] = extract_url(thumbnail_xpath_result, search_url)
57
58 # add alternative cached url if available
59 if cached_xpath:
60 tmp_result['cached_url'] = cached_url\
61 + extract_text(eval_xpath_list(result, cached_xpath, min_len=1))
62
63 if is_onion:
64 tmp_result['is_onion'] = True
65
66 results.append(tmp_result)
67 else:
68 if cached_xpath:
69 for url, title, content, cached in zip(
70 (extract_url(x, search_url) for
71 x in eval_xpath_list(dom, url_xpath)),
72 map(extract_text, eval_xpath_list(dom, title_xpath)),
73 map(extract_text, eval_xpath_list(dom, content_xpath)),
74 map(extract_text, eval_xpath_list(dom, cached_xpath))
75 ):
76 results.append({'url': url, 'title': title, 'content': content,
77 'cached_url': cached_url + cached, 'is_onion': is_onion})
78 else:
79 for url, title, content in zip(
80 (extract_url(x, search_url) for
81 x in eval_xpath_list(dom, url_xpath)),
82 map(extract_text, eval_xpath_list(dom, title_xpath)),
83 map(extract_text, eval_xpath_list(dom, content_xpath))
84 ):
85 results.append({'url': url, 'title': title, 'content': content, 'is_onion': is_onion})
86
87 if not suggestion_xpath:
88 return results
89 for suggestion in eval_xpath(dom, suggestion_xpath):
90 results.append({'suggestion': extract_text(suggestion)})
91 return results
92
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/searx/engines/xpath.py b/searx/engines/xpath.py
--- a/searx/engines/xpath.py
+++ b/searx/engines/xpath.py
@@ -14,6 +14,7 @@
results_xpath = ''
cached_xpath = ''
cached_url = ''
+soft_max_redirects = 0
# parameters for engines with paging support
#
@@ -33,6 +34,7 @@
params['url'] = search_url.format(**fp)
params['query'] = query
+ params['soft_max_redirects'] = soft_max_redirects
return params
|
{"golden_diff": "diff --git a/searx/engines/xpath.py b/searx/engines/xpath.py\n--- a/searx/engines/xpath.py\n+++ b/searx/engines/xpath.py\n@@ -14,6 +14,7 @@\n results_xpath = ''\n cached_xpath = ''\n cached_url = ''\n+soft_max_redirects = 0\n \n # parameters for engines with paging support\n #\n@@ -33,6 +34,7 @@\n \n params['url'] = search_url.format(**fp)\n params['query'] = query\n+ params['soft_max_redirects'] = soft_max_redirects\n \n return params\n", "issue": "Bug: engine \"archive is\" reports HTTP 404 / Not found\n**Version of SearXNG, commit number if you are using on master branch and stipulate if you forked SearXNG**\r\n\r\n1970d28a\r\n\r\n**Technical report**\r\n\r\nError\r\n * Error: httpx.HTTPStatusError\r\n * Percentage: 100\r\n * Parameters: `('404', 'Not Found', 'archive.is')`\r\n * File name: `searx/search/processors/online.py:99`\r\n * Function: `_send_http_request`\r\n * Code: `response = req(params['url'], **request_args)`\r\n\r\n\n", "before_files": [{"content": "# SPDX-License-Identifier: AGPL-3.0-or-later\n\nfrom lxml import html\nfrom urllib.parse import urlencode\nfrom searx.utils import extract_text, extract_url, eval_xpath, eval_xpath_list\n\nsearch_url = None\nurl_xpath = None\ncontent_xpath = None\ntitle_xpath = None\nthumbnail_xpath = False\npaging = False\nsuggestion_xpath = ''\nresults_xpath = ''\ncached_xpath = ''\ncached_url = ''\n\n# parameters for engines with paging support\n#\n# number of results on each page\n# (only needed if the site requires not a page number, but an offset)\npage_size = 1\n# number of the first page (usually 0 or 1)\nfirst_page_num = 1\n\n\ndef request(query, params):\n query = urlencode({'q': query})[2:]\n\n fp = {'query': query}\n if paging and search_url.find('{pageno}') >= 0:\n fp['pageno'] = (params['pageno'] - 1) * page_size + first_page_num\n\n params['url'] = search_url.format(**fp)\n params['query'] = query\n\n return params\n\n\ndef response(resp):\n results = []\n dom = html.fromstring(resp.text)\n is_onion = True if 'onions' in categories else False # pylint: disable=undefined-variable\n\n if results_xpath:\n for result in eval_xpath_list(dom, results_xpath):\n url = extract_url(eval_xpath_list(result, url_xpath, min_len=1), search_url)\n title = extract_text(eval_xpath_list(result, title_xpath, min_len=1))\n content = extract_text(eval_xpath_list(result, content_xpath, min_len=1))\n tmp_result = {'url': url, 'title': title, 'content': content}\n\n # add thumbnail if available\n if thumbnail_xpath:\n thumbnail_xpath_result = eval_xpath_list(result, thumbnail_xpath)\n if len(thumbnail_xpath_result) > 0:\n tmp_result['img_src'] = extract_url(thumbnail_xpath_result, search_url)\n\n # add alternative cached url if available\n if cached_xpath:\n tmp_result['cached_url'] = cached_url\\\n + extract_text(eval_xpath_list(result, cached_xpath, min_len=1))\n\n if is_onion:\n tmp_result['is_onion'] = True\n\n results.append(tmp_result)\n else:\n if cached_xpath:\n for url, title, content, cached in zip(\n (extract_url(x, search_url) for\n x in eval_xpath_list(dom, url_xpath)),\n map(extract_text, eval_xpath_list(dom, title_xpath)),\n map(extract_text, eval_xpath_list(dom, content_xpath)),\n map(extract_text, eval_xpath_list(dom, cached_xpath))\n ):\n results.append({'url': url, 'title': title, 'content': content,\n 'cached_url': cached_url + cached, 'is_onion': is_onion})\n else:\n for url, title, content in zip(\n (extract_url(x, search_url) for\n x in eval_xpath_list(dom, url_xpath)),\n map(extract_text, eval_xpath_list(dom, title_xpath)),\n map(extract_text, eval_xpath_list(dom, content_xpath))\n ):\n results.append({'url': url, 'title': title, 'content': content, 'is_onion': is_onion})\n\n if not suggestion_xpath:\n return results\n for suggestion in eval_xpath(dom, suggestion_xpath):\n results.append({'suggestion': extract_text(suggestion)})\n return results\n", "path": "searx/engines/xpath.py"}], "after_files": [{"content": "# SPDX-License-Identifier: AGPL-3.0-or-later\n\nfrom lxml import html\nfrom urllib.parse import urlencode\nfrom searx.utils import extract_text, extract_url, eval_xpath, eval_xpath_list\n\nsearch_url = None\nurl_xpath = None\ncontent_xpath = None\ntitle_xpath = None\nthumbnail_xpath = False\npaging = False\nsuggestion_xpath = ''\nresults_xpath = ''\ncached_xpath = ''\ncached_url = ''\nsoft_max_redirects = 0\n\n# parameters for engines with paging support\n#\n# number of results on each page\n# (only needed if the site requires not a page number, but an offset)\npage_size = 1\n# number of the first page (usually 0 or 1)\nfirst_page_num = 1\n\n\ndef request(query, params):\n query = urlencode({'q': query})[2:]\n\n fp = {'query': query}\n if paging and search_url.find('{pageno}') >= 0:\n fp['pageno'] = (params['pageno'] - 1) * page_size + first_page_num\n\n params['url'] = search_url.format(**fp)\n params['query'] = query\n params['soft_max_redirects'] = soft_max_redirects\n\n return params\n\n\ndef response(resp):\n results = []\n dom = html.fromstring(resp.text)\n is_onion = True if 'onions' in categories else False # pylint: disable=undefined-variable\n\n if results_xpath:\n for result in eval_xpath_list(dom, results_xpath):\n url = extract_url(eval_xpath_list(result, url_xpath, min_len=1), search_url)\n title = extract_text(eval_xpath_list(result, title_xpath, min_len=1))\n content = extract_text(eval_xpath_list(result, content_xpath, min_len=1))\n tmp_result = {'url': url, 'title': title, 'content': content}\n\n # add thumbnail if available\n if thumbnail_xpath:\n thumbnail_xpath_result = eval_xpath_list(result, thumbnail_xpath)\n if len(thumbnail_xpath_result) > 0:\n tmp_result['img_src'] = extract_url(thumbnail_xpath_result, search_url)\n\n # add alternative cached url if available\n if cached_xpath:\n tmp_result['cached_url'] = cached_url\\\n + extract_text(eval_xpath_list(result, cached_xpath, min_len=1))\n\n if is_onion:\n tmp_result['is_onion'] = True\n\n results.append(tmp_result)\n else:\n if cached_xpath:\n for url, title, content, cached in zip(\n (extract_url(x, search_url) for\n x in eval_xpath_list(dom, url_xpath)),\n map(extract_text, eval_xpath_list(dom, title_xpath)),\n map(extract_text, eval_xpath_list(dom, content_xpath)),\n map(extract_text, eval_xpath_list(dom, cached_xpath))\n ):\n results.append({'url': url, 'title': title, 'content': content,\n 'cached_url': cached_url + cached, 'is_onion': is_onion})\n else:\n for url, title, content in zip(\n (extract_url(x, search_url) for\n x in eval_xpath_list(dom, url_xpath)),\n map(extract_text, eval_xpath_list(dom, title_xpath)),\n map(extract_text, eval_xpath_list(dom, content_xpath))\n ):\n results.append({'url': url, 'title': title, 'content': content, 'is_onion': is_onion})\n\n if not suggestion_xpath:\n return results\n for suggestion in eval_xpath(dom, suggestion_xpath):\n results.append({'suggestion': extract_text(suggestion)})\n return results\n", "path": "searx/engines/xpath.py"}]}
| 1,349 | 142 |
gh_patches_debug_40813
|
rasdani/github-patches
|
git_diff
|
sublimelsp__LSP-1982
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
side_by_side should preview files in a side view
**Describe the bug**
When using `side_by_side: true` option for commands that support it, if the command opens a quick panel with multiple items, the items open in the main view on changing selection and only on pressing Enter to select an item it opens in the side view. I suppose that it should also show previews in side view like native ST functionality does (for example `shift+primary+f12`).
**To Reproduce**
Steps to reproduce the behavior:
1. Set up keybinding like:
```
{
"command": "lsp_symbol_type_definition",
"keys": ["f13"],
"args": {"side_by_side": true},
"context": [
{
"key": "lsp.session_with_capability",
"operator": "equal",
"operand": "typeDefinitionProvider"
},
{
"key": "auto_complete_visible",
"operator": "equal",
"operand": false
}
]
},
```
3. Press F13 on some symbol that is referenced from multiple places
**Expected behavior**
Changing selection in quick panel should preview the file in a side by side view.
**Environment (please complete the following information):**
- OS: macOS
- Sublime Text version: 4134
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `plugin/locationpicker.py`
Content:
```
1 from .core.logging import debug
2 from .core.protocol import DocumentUri, Location, Position
3 from .core.protocol import LocationLink
4 from .core.sessions import Session
5 from .core.typing import Union, List, Optional, Tuple
6 from .core.views import get_uri_and_position_from_location
7 from .core.views import location_to_human_readable
8 from .core.views import to_encoded_filename
9 import functools
10 import sublime
11 import weakref
12
13
14 def open_location_async(session: Session, location: Union[Location, LocationLink], side_by_side: bool) -> None:
15 flags = sublime.ENCODED_POSITION
16 if side_by_side:
17 flags |= sublime.ADD_TO_SELECTION | sublime.SEMI_TRANSIENT
18
19 def check_success_async(view: Optional[sublime.View]) -> None:
20 if not view:
21 sublime.error_message("Unable to open URI")
22
23 session.open_location_async(location, flags).then(check_success_async)
24
25
26 def open_basic_file(
27 session: Session,
28 uri: str,
29 position: Position,
30 flags: int = 0,
31 group: Optional[int] = None
32 ) -> None:
33 filename = session.config.map_server_uri_to_client_path(uri)
34 if group is None:
35 group = session.window.active_group()
36 session.window.open_file(to_encoded_filename(filename, position), flags=flags, group=group)
37
38
39 class LocationPicker:
40
41 def __init__(
42 self,
43 view: sublime.View,
44 session: Session,
45 locations: Union[List[Location], List[LocationLink]],
46 side_by_side: bool
47 ) -> None:
48 self._view = view
49 window = view.window()
50 if not window:
51 raise ValueError("missing window")
52 self._window = window
53 self._weaksession = weakref.ref(session)
54 self._side_by_side = side_by_side
55 self._items = locations
56 manager = session.manager()
57 base_dir = manager.get_project_path(view.file_name() or "") if manager else None
58 self._window.show_quick_panel(
59 items=[location_to_human_readable(session.config, base_dir, location) for location in locations],
60 on_select=self._select_entry,
61 on_highlight=self._highlight_entry,
62 flags=sublime.KEEP_OPEN_ON_FOCUS_LOST
63 )
64
65 def _unpack(self, index: int) -> Tuple[Optional[Session], Union[Location, LocationLink], DocumentUri, Position]:
66 location = self._items[index]
67 uri, position = get_uri_and_position_from_location(location)
68 return self._weaksession(), location, uri, position
69
70 def _select_entry(self, index: int) -> None:
71 if index >= 0 and self._view.is_valid():
72 session, location, uri, position = self._unpack(index)
73 if not session:
74 return
75 # Note: this has to run on the main thread (and not via open_location_async)
76 # otherwise the bevior feels weird. It's the only reason why open_basic_file exists.
77 if uri.startswith("file:"):
78 flags = sublime.ENCODED_POSITION
79 if self._side_by_side:
80 flags |= sublime.ADD_TO_SELECTION | sublime.SEMI_TRANSIENT
81 open_basic_file(session, uri, position, flags)
82 else:
83 sublime.set_timeout_async(functools.partial(open_location_async, session, location, self._side_by_side))
84 else:
85 self._window.focus_view(self._view)
86
87 def _highlight_entry(self, index: int) -> None:
88 session, _, uri, position = self._unpack(index)
89 if not session:
90 return
91 if uri.startswith("file:"):
92 open_basic_file(session, uri, position, sublime.TRANSIENT | sublime.ENCODED_POSITION)
93 else:
94 # TODO: Preview non-file uris?
95 debug("no preview for", uri)
96
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/plugin/locationpicker.py b/plugin/locationpicker.py
--- a/plugin/locationpicker.py
+++ b/plugin/locationpicker.py
@@ -29,11 +29,11 @@
position: Position,
flags: int = 0,
group: Optional[int] = None
-) -> None:
+) -> sublime.View:
filename = session.config.map_server_uri_to_client_path(uri)
if group is None:
group = session.window.active_group()
- session.window.open_file(to_encoded_filename(filename, position), flags=flags, group=group)
+ return session.window.open_file(to_encoded_filename(filename, position), flags=flags, group=group)
class LocationPicker:
@@ -53,6 +53,7 @@
self._weaksession = weakref.ref(session)
self._side_by_side = side_by_side
self._items = locations
+ self._highlighted_view = None # type: Optional[sublime.View]
manager = session.manager()
base_dir = manager.get_project_path(view.file_name() or "") if manager else None
self._window.show_quick_panel(
@@ -76,20 +77,35 @@
# otherwise the bevior feels weird. It's the only reason why open_basic_file exists.
if uri.startswith("file:"):
flags = sublime.ENCODED_POSITION
- if self._side_by_side:
- flags |= sublime.ADD_TO_SELECTION | sublime.SEMI_TRANSIENT
- open_basic_file(session, uri, position, flags)
+ if not self._side_by_side:
+ open_basic_file(session, uri, position, flags)
else:
sublime.set_timeout_async(functools.partial(open_location_async, session, location, self._side_by_side))
else:
self._window.focus_view(self._view)
+ # When in side-by-side mode close the current highlighted
+ # sheet upon canceling if the sheet is semi-transient
+ if self._side_by_side and self._highlighted_view:
+ sheet = self._highlighted_view.sheet()
+ if sheet and sheet.is_semi_transient():
+ self._highlighted_view.close()
def _highlight_entry(self, index: int) -> None:
session, _, uri, position = self._unpack(index)
if not session:
return
if uri.startswith("file:"):
- open_basic_file(session, uri, position, sublime.TRANSIENT | sublime.ENCODED_POSITION)
+ flags = sublime.ENCODED_POSITION | sublime.FORCE_GROUP
+ if self._side_by_side:
+ if self._highlighted_view and self._highlighted_view.is_valid():
+ # Replacing the MRU is done relative to the current highlighted sheet
+ self._window.focus_view(self._highlighted_view)
+ flags |= sublime.REPLACE_MRU | sublime.SEMI_TRANSIENT
+ else:
+ flags |= sublime.ADD_TO_SELECTION | sublime.SEMI_TRANSIENT
+ else:
+ flags |= sublime.TRANSIENT
+ self._highlighted_view = open_basic_file(session, uri, position, flags, self._window.active_group())
else:
# TODO: Preview non-file uris?
debug("no preview for", uri)
|
{"golden_diff": "diff --git a/plugin/locationpicker.py b/plugin/locationpicker.py\n--- a/plugin/locationpicker.py\n+++ b/plugin/locationpicker.py\n@@ -29,11 +29,11 @@\n position: Position,\n flags: int = 0,\n group: Optional[int] = None\n-) -> None:\n+) -> sublime.View:\n filename = session.config.map_server_uri_to_client_path(uri)\n if group is None:\n group = session.window.active_group()\n- session.window.open_file(to_encoded_filename(filename, position), flags=flags, group=group)\n+ return session.window.open_file(to_encoded_filename(filename, position), flags=flags, group=group)\n \n \n class LocationPicker:\n@@ -53,6 +53,7 @@\n self._weaksession = weakref.ref(session)\n self._side_by_side = side_by_side\n self._items = locations\n+ self._highlighted_view = None # type: Optional[sublime.View]\n manager = session.manager()\n base_dir = manager.get_project_path(view.file_name() or \"\") if manager else None\n self._window.show_quick_panel(\n@@ -76,20 +77,35 @@\n # otherwise the bevior feels weird. It's the only reason why open_basic_file exists.\n if uri.startswith(\"file:\"):\n flags = sublime.ENCODED_POSITION\n- if self._side_by_side:\n- flags |= sublime.ADD_TO_SELECTION | sublime.SEMI_TRANSIENT\n- open_basic_file(session, uri, position, flags)\n+ if not self._side_by_side:\n+ open_basic_file(session, uri, position, flags)\n else:\n sublime.set_timeout_async(functools.partial(open_location_async, session, location, self._side_by_side))\n else:\n self._window.focus_view(self._view)\n+ # When in side-by-side mode close the current highlighted\n+ # sheet upon canceling if the sheet is semi-transient\n+ if self._side_by_side and self._highlighted_view:\n+ sheet = self._highlighted_view.sheet()\n+ if sheet and sheet.is_semi_transient():\n+ self._highlighted_view.close()\n \n def _highlight_entry(self, index: int) -> None:\n session, _, uri, position = self._unpack(index)\n if not session:\n return\n if uri.startswith(\"file:\"):\n- open_basic_file(session, uri, position, sublime.TRANSIENT | sublime.ENCODED_POSITION)\n+ flags = sublime.ENCODED_POSITION | sublime.FORCE_GROUP\n+ if self._side_by_side:\n+ if self._highlighted_view and self._highlighted_view.is_valid():\n+ # Replacing the MRU is done relative to the current highlighted sheet\n+ self._window.focus_view(self._highlighted_view)\n+ flags |= sublime.REPLACE_MRU | sublime.SEMI_TRANSIENT\n+ else:\n+ flags |= sublime.ADD_TO_SELECTION | sublime.SEMI_TRANSIENT\n+ else:\n+ flags |= sublime.TRANSIENT\n+ self._highlighted_view = open_basic_file(session, uri, position, flags, self._window.active_group())\n else:\n # TODO: Preview non-file uris?\n debug(\"no preview for\", uri)\n", "issue": "side_by_side should preview files in a side view\n**Describe the bug**\r\nWhen using `side_by_side: true` option for commands that support it, if the command opens a quick panel with multiple items, the items open in the main view on changing selection and only on pressing Enter to select an item it opens in the side view. I suppose that it should also show previews in side view like native ST functionality does (for example `shift+primary+f12`).\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n1. Set up keybinding like:\r\n```\r\n {\r\n \"command\": \"lsp_symbol_type_definition\",\r\n \"keys\": [\"f13\"],\r\n \"args\": {\"side_by_side\": true},\r\n \"context\": [\r\n {\r\n \"key\": \"lsp.session_with_capability\",\r\n \"operator\": \"equal\",\r\n \"operand\": \"typeDefinitionProvider\"\r\n },\r\n {\r\n \"key\": \"auto_complete_visible\",\r\n \"operator\": \"equal\",\r\n \"operand\": false\r\n }\r\n ]\r\n },\r\n```\r\n3. Press F13 on some symbol that is referenced from multiple places\r\n\r\n**Expected behavior**\r\nChanging selection in quick panel should preview the file in a side by side view.\r\n\r\n**Environment (please complete the following information):**\r\n- OS: macOS\r\n- Sublime Text version: 4134\r\n\n", "before_files": [{"content": "from .core.logging import debug\nfrom .core.protocol import DocumentUri, Location, Position\nfrom .core.protocol import LocationLink\nfrom .core.sessions import Session\nfrom .core.typing import Union, List, Optional, Tuple\nfrom .core.views import get_uri_and_position_from_location\nfrom .core.views import location_to_human_readable\nfrom .core.views import to_encoded_filename\nimport functools\nimport sublime\nimport weakref\n\n\ndef open_location_async(session: Session, location: Union[Location, LocationLink], side_by_side: bool) -> None:\n flags = sublime.ENCODED_POSITION\n if side_by_side:\n flags |= sublime.ADD_TO_SELECTION | sublime.SEMI_TRANSIENT\n\n def check_success_async(view: Optional[sublime.View]) -> None:\n if not view:\n sublime.error_message(\"Unable to open URI\")\n\n session.open_location_async(location, flags).then(check_success_async)\n\n\ndef open_basic_file(\n session: Session,\n uri: str,\n position: Position,\n flags: int = 0,\n group: Optional[int] = None\n) -> None:\n filename = session.config.map_server_uri_to_client_path(uri)\n if group is None:\n group = session.window.active_group()\n session.window.open_file(to_encoded_filename(filename, position), flags=flags, group=group)\n\n\nclass LocationPicker:\n\n def __init__(\n self,\n view: sublime.View,\n session: Session,\n locations: Union[List[Location], List[LocationLink]],\n side_by_side: bool\n ) -> None:\n self._view = view\n window = view.window()\n if not window:\n raise ValueError(\"missing window\")\n self._window = window\n self._weaksession = weakref.ref(session)\n self._side_by_side = side_by_side\n self._items = locations\n manager = session.manager()\n base_dir = manager.get_project_path(view.file_name() or \"\") if manager else None\n self._window.show_quick_panel(\n items=[location_to_human_readable(session.config, base_dir, location) for location in locations],\n on_select=self._select_entry,\n on_highlight=self._highlight_entry,\n flags=sublime.KEEP_OPEN_ON_FOCUS_LOST\n )\n\n def _unpack(self, index: int) -> Tuple[Optional[Session], Union[Location, LocationLink], DocumentUri, Position]:\n location = self._items[index]\n uri, position = get_uri_and_position_from_location(location)\n return self._weaksession(), location, uri, position\n\n def _select_entry(self, index: int) -> None:\n if index >= 0 and self._view.is_valid():\n session, location, uri, position = self._unpack(index)\n if not session:\n return\n # Note: this has to run on the main thread (and not via open_location_async)\n # otherwise the bevior feels weird. It's the only reason why open_basic_file exists.\n if uri.startswith(\"file:\"):\n flags = sublime.ENCODED_POSITION\n if self._side_by_side:\n flags |= sublime.ADD_TO_SELECTION | sublime.SEMI_TRANSIENT\n open_basic_file(session, uri, position, flags)\n else:\n sublime.set_timeout_async(functools.partial(open_location_async, session, location, self._side_by_side))\n else:\n self._window.focus_view(self._view)\n\n def _highlight_entry(self, index: int) -> None:\n session, _, uri, position = self._unpack(index)\n if not session:\n return\n if uri.startswith(\"file:\"):\n open_basic_file(session, uri, position, sublime.TRANSIENT | sublime.ENCODED_POSITION)\n else:\n # TODO: Preview non-file uris?\n debug(\"no preview for\", uri)\n", "path": "plugin/locationpicker.py"}], "after_files": [{"content": "from .core.logging import debug\nfrom .core.protocol import DocumentUri, Location, Position\nfrom .core.protocol import LocationLink\nfrom .core.sessions import Session\nfrom .core.typing import Union, List, Optional, Tuple\nfrom .core.views import get_uri_and_position_from_location\nfrom .core.views import location_to_human_readable\nfrom .core.views import to_encoded_filename\nimport functools\nimport sublime\nimport weakref\n\n\ndef open_location_async(session: Session, location: Union[Location, LocationLink], side_by_side: bool) -> None:\n flags = sublime.ENCODED_POSITION\n if side_by_side:\n flags |= sublime.ADD_TO_SELECTION | sublime.SEMI_TRANSIENT\n\n def check_success_async(view: Optional[sublime.View]) -> None:\n if not view:\n sublime.error_message(\"Unable to open URI\")\n\n session.open_location_async(location, flags).then(check_success_async)\n\n\ndef open_basic_file(\n session: Session,\n uri: str,\n position: Position,\n flags: int = 0,\n group: Optional[int] = None\n) -> sublime.View:\n filename = session.config.map_server_uri_to_client_path(uri)\n if group is None:\n group = session.window.active_group()\n return session.window.open_file(to_encoded_filename(filename, position), flags=flags, group=group)\n\n\nclass LocationPicker:\n\n def __init__(\n self,\n view: sublime.View,\n session: Session,\n locations: Union[List[Location], List[LocationLink]],\n side_by_side: bool\n ) -> None:\n self._view = view\n window = view.window()\n if not window:\n raise ValueError(\"missing window\")\n self._window = window\n self._weaksession = weakref.ref(session)\n self._side_by_side = side_by_side\n self._items = locations\n self._highlighted_view = None # type: Optional[sublime.View]\n manager = session.manager()\n base_dir = manager.get_project_path(view.file_name() or \"\") if manager else None\n self._window.show_quick_panel(\n items=[location_to_human_readable(session.config, base_dir, location) for location in locations],\n on_select=self._select_entry,\n on_highlight=self._highlight_entry,\n flags=sublime.KEEP_OPEN_ON_FOCUS_LOST\n )\n\n def _unpack(self, index: int) -> Tuple[Optional[Session], Union[Location, LocationLink], DocumentUri, Position]:\n location = self._items[index]\n uri, position = get_uri_and_position_from_location(location)\n return self._weaksession(), location, uri, position\n\n def _select_entry(self, index: int) -> None:\n if index >= 0 and self._view.is_valid():\n session, location, uri, position = self._unpack(index)\n if not session:\n return\n # Note: this has to run on the main thread (and not via open_location_async)\n # otherwise the bevior feels weird. It's the only reason why open_basic_file exists.\n if uri.startswith(\"file:\"):\n flags = sublime.ENCODED_POSITION\n if not self._side_by_side:\n open_basic_file(session, uri, position, flags)\n else:\n sublime.set_timeout_async(functools.partial(open_location_async, session, location, self._side_by_side))\n else:\n self._window.focus_view(self._view)\n # When in side-by-side mode close the current highlighted\n # sheet upon canceling if the sheet is semi-transient\n if self._side_by_side and self._highlighted_view:\n sheet = self._highlighted_view.sheet()\n if sheet and sheet.is_semi_transient():\n self._highlighted_view.close()\n\n def _highlight_entry(self, index: int) -> None:\n session, _, uri, position = self._unpack(index)\n if not session:\n return\n if uri.startswith(\"file:\"):\n flags = sublime.ENCODED_POSITION | sublime.FORCE_GROUP\n if self._side_by_side:\n if self._highlighted_view and self._highlighted_view.is_valid():\n # Replacing the MRU is done relative to the current highlighted sheet\n self._window.focus_view(self._highlighted_view)\n flags |= sublime.REPLACE_MRU | sublime.SEMI_TRANSIENT\n else:\n flags |= sublime.ADD_TO_SELECTION | sublime.SEMI_TRANSIENT\n else:\n flags |= sublime.TRANSIENT\n self._highlighted_view = open_basic_file(session, uri, position, flags, self._window.active_group())\n else:\n # TODO: Preview non-file uris?\n debug(\"no preview for\", uri)\n", "path": "plugin/locationpicker.py"}]}
| 1,534 | 701 |
gh_patches_debug_18206
|
rasdani/github-patches
|
git_diff
|
mindsdb__lightwood-698
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Make `column_importances` optional
If the column importance module gets automatically disabled (see #681) an issue may arise where the information required for a `ModelAnalysis` object is not available. Example stacktrace:
```python
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-10-b3f165881113> in <module>
16
17 # Analyze the ensembles
---> 18 predictor.analyze_ensemble(enc_data)
/tmp/b380bd15a0ec89f57a82f719f514e67f0cae00fe7d0085d816353697296992059.py in analyze_ensemble(self, enc_data)
1439 # --------------- #
1440 log.info("Analyzing the ensemble of mixers")
-> 1441 self.model_analysis, self.runtime_analyzer = model_analyzer(
1442 data=encoded_test_data,
1443 train_data=encoded_train_data,
~/Documents/lightwood/lightwood/analysis/analyze.py in model_analyzer(predictor, data, train_data, stats_info, target, ts_cfg, dtype_dict, accuracy_functions, analysis_blocks)
91 test_sample_size=len(encoded_val_data),
92 confusion_matrix=runtime_analyzer['cm'],
---> 93 column_importances=runtime_analyzer['column_importances'],
94 histograms=stats_info.histograms,
95 dtypes=dtype_dict
KeyError: 'column_importances'
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lightwood/analysis/analyze.py`
Content:
```
1 from typing import Dict, List, Tuple, Optional
2
3 from lightwood.helpers.log import log
4 from lightwood.api import dtype
5 from lightwood.ensemble import BaseEnsemble
6 from lightwood.analysis.base import BaseAnalysisBlock
7 from lightwood.data.encoded_ds import EncodedDs
8 from lightwood.encoder.text.pretrained import PretrainedLangEncoder
9 from lightwood.api.types import ModelAnalysis, StatisticalAnalysis, TimeseriesSettings, PredictionArguments
10
11
12 def model_analyzer(
13 predictor: BaseEnsemble,
14 data: EncodedDs,
15 train_data: EncodedDs,
16 stats_info: StatisticalAnalysis,
17 target: str,
18 ts_cfg: TimeseriesSettings,
19 dtype_dict: Dict[str, str],
20 accuracy_functions,
21 analysis_blocks: Optional[List[BaseAnalysisBlock]] = []
22 ) -> Tuple[ModelAnalysis, Dict[str, object]]:
23 """
24 Analyses model on a validation subset to evaluate accuracy, estimate feature importance and generate a
25 calibration model to estimating confidence in future predictions.
26
27 Additionally, any user-specified analysis blocks (see class `BaseAnalysisBlock`) are also called here.
28
29 :return:
30 runtime_analyzer: This dictionary object gets populated in a sequential fashion with data generated from
31 any `.analyze()` block call. This dictionary object is stored in the predictor itself, and used when
32 calling the `.explain()` method of all analysis blocks when generating predictions.
33
34 model_analysis: `ModelAnalysis` object that contains core analysis metrics, not necessarily needed when predicting.
35 """
36
37 runtime_analyzer = {}
38 data_type = dtype_dict[target]
39
40 # retrieve encoded data representations
41 encoded_train_data = train_data
42 encoded_val_data = data
43 data = encoded_val_data.data_frame
44 input_cols = list([col for col in data.columns if col != target])
45
46 # predictive task
47 is_numerical = data_type in (dtype.integer, dtype.float, dtype.array, dtype.tsarray, dtype.quantity)
48 is_classification = data_type in (dtype.categorical, dtype.binary)
49 is_multi_ts = ts_cfg.is_timeseries and ts_cfg.nr_predictions > 1
50 has_pretrained_text_enc = any([isinstance(enc, PretrainedLangEncoder)
51 for enc in encoded_train_data.encoders.values()])
52
53 # raw predictions for validation dataset
54 args = {} if not is_classification else {"predict_proba": True}
55 normal_predictions = predictor(encoded_val_data, args=PredictionArguments.from_dict(args))
56 normal_predictions = normal_predictions.set_index(data.index)
57
58 # ------------------------- #
59 # Run analysis blocks, both core and user-defined
60 # ------------------------- #
61 kwargs = {
62 'predictor': predictor,
63 'target': target,
64 'input_cols': input_cols,
65 'dtype_dict': dtype_dict,
66 'normal_predictions': normal_predictions,
67 'data': data,
68 'train_data': train_data,
69 'encoded_val_data': encoded_val_data,
70 'is_classification': is_classification,
71 'is_numerical': is_numerical,
72 'is_multi_ts': is_multi_ts,
73 'stats_info': stats_info,
74 'ts_cfg': ts_cfg,
75 'accuracy_functions': accuracy_functions,
76 'has_pretrained_text_enc': has_pretrained_text_enc
77 }
78
79 for block in analysis_blocks:
80 log.info("The block %s is now running its analyze() method", block.__class__.__name__)
81 runtime_analyzer = block.analyze(runtime_analyzer, **kwargs)
82
83 # ------------------------- #
84 # Populate ModelAnalysis object
85 # ------------------------- #
86 model_analysis = ModelAnalysis(
87 accuracies=runtime_analyzer['score_dict'],
88 accuracy_histogram=runtime_analyzer['acc_histogram'],
89 accuracy_samples=runtime_analyzer['acc_samples'],
90 train_sample_size=len(encoded_train_data),
91 test_sample_size=len(encoded_val_data),
92 confusion_matrix=runtime_analyzer['cm'],
93 column_importances=runtime_analyzer['column_importances'],
94 histograms=stats_info.histograms,
95 dtypes=dtype_dict
96 )
97
98 return model_analysis, runtime_analyzer
99
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/lightwood/analysis/analyze.py b/lightwood/analysis/analyze.py
--- a/lightwood/analysis/analyze.py
+++ b/lightwood/analysis/analyze.py
@@ -84,13 +84,13 @@
# Populate ModelAnalysis object
# ------------------------- #
model_analysis = ModelAnalysis(
- accuracies=runtime_analyzer['score_dict'],
- accuracy_histogram=runtime_analyzer['acc_histogram'],
- accuracy_samples=runtime_analyzer['acc_samples'],
+ accuracies=runtime_analyzer.get('score_dict', {}),
+ accuracy_histogram=runtime_analyzer.get('acc_histogram', {}),
+ accuracy_samples=runtime_analyzer.get('acc_samples', {}),
train_sample_size=len(encoded_train_data),
test_sample_size=len(encoded_val_data),
confusion_matrix=runtime_analyzer['cm'],
- column_importances=runtime_analyzer['column_importances'],
+ column_importances=runtime_analyzer.get('column_importances', {}),
histograms=stats_info.histograms,
dtypes=dtype_dict
)
|
{"golden_diff": "diff --git a/lightwood/analysis/analyze.py b/lightwood/analysis/analyze.py\n--- a/lightwood/analysis/analyze.py\n+++ b/lightwood/analysis/analyze.py\n@@ -84,13 +84,13 @@\n # Populate ModelAnalysis object\n # ------------------------- #\n model_analysis = ModelAnalysis(\n- accuracies=runtime_analyzer['score_dict'],\n- accuracy_histogram=runtime_analyzer['acc_histogram'],\n- accuracy_samples=runtime_analyzer['acc_samples'],\n+ accuracies=runtime_analyzer.get('score_dict', {}),\n+ accuracy_histogram=runtime_analyzer.get('acc_histogram', {}),\n+ accuracy_samples=runtime_analyzer.get('acc_samples', {}),\n train_sample_size=len(encoded_train_data),\n test_sample_size=len(encoded_val_data),\n confusion_matrix=runtime_analyzer['cm'],\n- column_importances=runtime_analyzer['column_importances'],\n+ column_importances=runtime_analyzer.get('column_importances', {}),\n histograms=stats_info.histograms,\n dtypes=dtype_dict\n )\n", "issue": "Make `column_importances` optional\nIf the column importance module gets automatically disabled (see #681) an issue may arise where the information required for a `ModelAnalysis` object is not available. Example stacktrace:\r\n\r\n```python\r\n---------------------------------------------------------------------------\r\nKeyError Traceback (most recent call last)\r\n<ipython-input-10-b3f165881113> in <module>\r\n 16 \r\n 17 # Analyze the ensembles\r\n---> 18 predictor.analyze_ensemble(enc_data)\r\n\r\n/tmp/b380bd15a0ec89f57a82f719f514e67f0cae00fe7d0085d816353697296992059.py in analyze_ensemble(self, enc_data)\r\n 1439 # --------------- #\r\n 1440 log.info(\"Analyzing the ensemble of mixers\")\r\n-> 1441 self.model_analysis, self.runtime_analyzer = model_analyzer(\r\n 1442 data=encoded_test_data,\r\n 1443 train_data=encoded_train_data,\r\n\r\n~/Documents/lightwood/lightwood/analysis/analyze.py in model_analyzer(predictor, data, train_data, stats_info, target, ts_cfg, dtype_dict, accuracy_functions, analysis_blocks)\r\n 91 test_sample_size=len(encoded_val_data),\r\n 92 confusion_matrix=runtime_analyzer['cm'],\r\n---> 93 column_importances=runtime_analyzer['column_importances'],\r\n 94 histograms=stats_info.histograms,\r\n 95 dtypes=dtype_dict\r\n\r\nKeyError: 'column_importances'\r\n```\r\n\n", "before_files": [{"content": "from typing import Dict, List, Tuple, Optional\n\nfrom lightwood.helpers.log import log\nfrom lightwood.api import dtype\nfrom lightwood.ensemble import BaseEnsemble\nfrom lightwood.analysis.base import BaseAnalysisBlock\nfrom lightwood.data.encoded_ds import EncodedDs\nfrom lightwood.encoder.text.pretrained import PretrainedLangEncoder\nfrom lightwood.api.types import ModelAnalysis, StatisticalAnalysis, TimeseriesSettings, PredictionArguments\n\n\ndef model_analyzer(\n predictor: BaseEnsemble,\n data: EncodedDs,\n train_data: EncodedDs,\n stats_info: StatisticalAnalysis,\n target: str,\n ts_cfg: TimeseriesSettings,\n dtype_dict: Dict[str, str],\n accuracy_functions,\n analysis_blocks: Optional[List[BaseAnalysisBlock]] = []\n) -> Tuple[ModelAnalysis, Dict[str, object]]:\n \"\"\"\n Analyses model on a validation subset to evaluate accuracy, estimate feature importance and generate a\n calibration model to estimating confidence in future predictions.\n\n Additionally, any user-specified analysis blocks (see class `BaseAnalysisBlock`) are also called here.\n\n :return:\n runtime_analyzer: This dictionary object gets populated in a sequential fashion with data generated from\n any `.analyze()` block call. This dictionary object is stored in the predictor itself, and used when\n calling the `.explain()` method of all analysis blocks when generating predictions.\n\n model_analysis: `ModelAnalysis` object that contains core analysis metrics, not necessarily needed when predicting.\n \"\"\"\n\n runtime_analyzer = {}\n data_type = dtype_dict[target]\n\n # retrieve encoded data representations\n encoded_train_data = train_data\n encoded_val_data = data\n data = encoded_val_data.data_frame\n input_cols = list([col for col in data.columns if col != target])\n\n # predictive task\n is_numerical = data_type in (dtype.integer, dtype.float, dtype.array, dtype.tsarray, dtype.quantity)\n is_classification = data_type in (dtype.categorical, dtype.binary)\n is_multi_ts = ts_cfg.is_timeseries and ts_cfg.nr_predictions > 1\n has_pretrained_text_enc = any([isinstance(enc, PretrainedLangEncoder)\n for enc in encoded_train_data.encoders.values()])\n\n # raw predictions for validation dataset\n args = {} if not is_classification else {\"predict_proba\": True}\n normal_predictions = predictor(encoded_val_data, args=PredictionArguments.from_dict(args))\n normal_predictions = normal_predictions.set_index(data.index)\n\n # ------------------------- #\n # Run analysis blocks, both core and user-defined\n # ------------------------- #\n kwargs = {\n 'predictor': predictor,\n 'target': target,\n 'input_cols': input_cols,\n 'dtype_dict': dtype_dict,\n 'normal_predictions': normal_predictions,\n 'data': data,\n 'train_data': train_data,\n 'encoded_val_data': encoded_val_data,\n 'is_classification': is_classification,\n 'is_numerical': is_numerical,\n 'is_multi_ts': is_multi_ts,\n 'stats_info': stats_info,\n 'ts_cfg': ts_cfg,\n 'accuracy_functions': accuracy_functions,\n 'has_pretrained_text_enc': has_pretrained_text_enc\n }\n\n for block in analysis_blocks:\n log.info(\"The block %s is now running its analyze() method\", block.__class__.__name__)\n runtime_analyzer = block.analyze(runtime_analyzer, **kwargs)\n\n # ------------------------- #\n # Populate ModelAnalysis object\n # ------------------------- #\n model_analysis = ModelAnalysis(\n accuracies=runtime_analyzer['score_dict'],\n accuracy_histogram=runtime_analyzer['acc_histogram'],\n accuracy_samples=runtime_analyzer['acc_samples'],\n train_sample_size=len(encoded_train_data),\n test_sample_size=len(encoded_val_data),\n confusion_matrix=runtime_analyzer['cm'],\n column_importances=runtime_analyzer['column_importances'],\n histograms=stats_info.histograms,\n dtypes=dtype_dict\n )\n\n return model_analysis, runtime_analyzer\n", "path": "lightwood/analysis/analyze.py"}], "after_files": [{"content": "from typing import Dict, List, Tuple, Optional\n\nfrom lightwood.helpers.log import log\nfrom lightwood.api import dtype\nfrom lightwood.ensemble import BaseEnsemble\nfrom lightwood.analysis.base import BaseAnalysisBlock\nfrom lightwood.data.encoded_ds import EncodedDs\nfrom lightwood.encoder.text.pretrained import PretrainedLangEncoder\nfrom lightwood.api.types import ModelAnalysis, StatisticalAnalysis, TimeseriesSettings, PredictionArguments\n\n\ndef model_analyzer(\n predictor: BaseEnsemble,\n data: EncodedDs,\n train_data: EncodedDs,\n stats_info: StatisticalAnalysis,\n target: str,\n ts_cfg: TimeseriesSettings,\n dtype_dict: Dict[str, str],\n accuracy_functions,\n analysis_blocks: Optional[List[BaseAnalysisBlock]] = []\n) -> Tuple[ModelAnalysis, Dict[str, object]]:\n \"\"\"\n Analyses model on a validation subset to evaluate accuracy, estimate feature importance and generate a\n calibration model to estimating confidence in future predictions.\n\n Additionally, any user-specified analysis blocks (see class `BaseAnalysisBlock`) are also called here.\n\n :return:\n runtime_analyzer: This dictionary object gets populated in a sequential fashion with data generated from\n any `.analyze()` block call. This dictionary object is stored in the predictor itself, and used when\n calling the `.explain()` method of all analysis blocks when generating predictions.\n\n model_analysis: `ModelAnalysis` object that contains core analysis metrics, not necessarily needed when predicting.\n \"\"\"\n\n runtime_analyzer = {}\n data_type = dtype_dict[target]\n\n # retrieve encoded data representations\n encoded_train_data = train_data\n encoded_val_data = data\n data = encoded_val_data.data_frame\n input_cols = list([col for col in data.columns if col != target])\n\n # predictive task\n is_numerical = data_type in (dtype.integer, dtype.float, dtype.array, dtype.tsarray, dtype.quantity)\n is_classification = data_type in (dtype.categorical, dtype.binary)\n is_multi_ts = ts_cfg.is_timeseries and ts_cfg.nr_predictions > 1\n has_pretrained_text_enc = any([isinstance(enc, PretrainedLangEncoder)\n for enc in encoded_train_data.encoders.values()])\n\n # raw predictions for validation dataset\n args = {} if not is_classification else {\"predict_proba\": True}\n normal_predictions = predictor(encoded_val_data, args=PredictionArguments.from_dict(args))\n normal_predictions = normal_predictions.set_index(data.index)\n\n # ------------------------- #\n # Run analysis blocks, both core and user-defined\n # ------------------------- #\n kwargs = {\n 'predictor': predictor,\n 'target': target,\n 'input_cols': input_cols,\n 'dtype_dict': dtype_dict,\n 'normal_predictions': normal_predictions,\n 'data': data,\n 'train_data': train_data,\n 'encoded_val_data': encoded_val_data,\n 'is_classification': is_classification,\n 'is_numerical': is_numerical,\n 'is_multi_ts': is_multi_ts,\n 'stats_info': stats_info,\n 'ts_cfg': ts_cfg,\n 'accuracy_functions': accuracy_functions,\n 'has_pretrained_text_enc': has_pretrained_text_enc\n }\n\n for block in analysis_blocks:\n log.info(\"The block %s is now running its analyze() method\", block.__class__.__name__)\n runtime_analyzer = block.analyze(runtime_analyzer, **kwargs)\n\n # ------------------------- #\n # Populate ModelAnalysis object\n # ------------------------- #\n model_analysis = ModelAnalysis(\n accuracies=runtime_analyzer.get('score_dict', {}),\n accuracy_histogram=runtime_analyzer.get('acc_histogram', {}),\n accuracy_samples=runtime_analyzer.get('acc_samples', {}),\n train_sample_size=len(encoded_train_data),\n test_sample_size=len(encoded_val_data),\n confusion_matrix=runtime_analyzer['cm'],\n column_importances=runtime_analyzer.get('column_importances', {}),\n histograms=stats_info.histograms,\n dtypes=dtype_dict\n )\n\n return model_analysis, runtime_analyzer\n", "path": "lightwood/analysis/analyze.py"}]}
| 1,685 | 226 |
gh_patches_debug_14927
|
rasdani/github-patches
|
git_diff
|
googleapis__python-bigquery-30
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Refactor logic in synth.py for excluding noxfile
As explained in a [comment](https://github.com/googleapis/python-bigquery/pull/1#discussion_r375560206), preventing the synthtool for overriding the customized `noxfile.py` can be achieved in a more straightforward way than currently used.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `synth.py`
Content:
```
1 # Copyright 2018 Google LLC
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """This script is used to synthesize generated parts of this library."""
16
17 import os
18
19 import synthtool as s
20 from synthtool import gcp
21
22 gapic = gcp.GAPICGenerator()
23 common = gcp.CommonTemplates()
24 version = 'v2'
25
26 library = gapic.py_library(
27 'bigquery',
28 version,
29 config_path='/google/cloud/bigquery/'
30 'artman_bigquery_v2.yaml',
31 artman_output_name='bigquery-v2',
32 include_protos=True,
33 )
34
35 s.move(
36 [
37 library / "google/cloud/bigquery_v2/gapic/enums.py",
38 library / "google/cloud/bigquery_v2/types.py",
39 library / "google/cloud/bigquery_v2/proto/location*",
40 library / "google/cloud/bigquery_v2/proto/encryption_config*",
41 library / "google/cloud/bigquery_v2/proto/model*",
42 library / "google/cloud/bigquery_v2/proto/standard_sql*",
43 ],
44 )
45
46 # Fix up proto docs that are missing summary line.
47 s.replace(
48 "google/cloud/bigquery_v2/proto/model_pb2.py",
49 '"""Attributes:',
50 '"""Protocol buffer.\n\n Attributes:',
51 )
52 s.replace(
53 "google/cloud/bigquery_v2/proto/encryption_config_pb2.py",
54 '"""Attributes:',
55 '"""Encryption configuration.\n\n Attributes:',
56 )
57
58 # Remove non-ascii characters from docstrings for Python 2.7.
59 # Format quoted strings as plain text.
60 s.replace("google/cloud/bigquery_v2/proto/*.py", "[โโ]", '``')
61
62 # ----------------------------------------------------------------------------
63 # Add templated files
64 # ----------------------------------------------------------------------------
65 templated_files = common.py_library(cov_level=100)
66 # we do not want to override the custom noxfile with the generated one
67 os.remove(os.path.join(templated_files, "noxfile.py"))
68 s.move(templated_files)
69
70 s.shell.run(["nox", "-s", "blacken"], hide_output=False)
71
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/synth.py b/synth.py
--- a/synth.py
+++ b/synth.py
@@ -14,8 +14,6 @@
"""This script is used to synthesize generated parts of this library."""
-import os
-
import synthtool as s
from synthtool import gcp
@@ -63,8 +61,6 @@
# Add templated files
# ----------------------------------------------------------------------------
templated_files = common.py_library(cov_level=100)
-# we do not want to override the custom noxfile with the generated one
-os.remove(os.path.join(templated_files, "noxfile.py"))
-s.move(templated_files)
+s.move(templated_files, excludes=["noxfile.py"])
s.shell.run(["nox", "-s", "blacken"], hide_output=False)
|
{"golden_diff": "diff --git a/synth.py b/synth.py\n--- a/synth.py\n+++ b/synth.py\n@@ -14,8 +14,6 @@\n \n \"\"\"This script is used to synthesize generated parts of this library.\"\"\"\n \n-import os\n-\n import synthtool as s\n from synthtool import gcp\n \n@@ -63,8 +61,6 @@\n # Add templated files\n # ----------------------------------------------------------------------------\n templated_files = common.py_library(cov_level=100)\n-# we do not want to override the custom noxfile with the generated one\n-os.remove(os.path.join(templated_files, \"noxfile.py\"))\n-s.move(templated_files)\n+s.move(templated_files, excludes=[\"noxfile.py\"])\n \n s.shell.run([\"nox\", \"-s\", \"blacken\"], hide_output=False)\n", "issue": "Refactor logic in synth.py for excluding noxfile\nAs explained in a [comment](https://github.com/googleapis/python-bigquery/pull/1#discussion_r375560206), preventing the synthtool for overriding the customized `noxfile.py` can be achieved in a more straightforward way than currently used.\r\n\n", "before_files": [{"content": "# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"This script is used to synthesize generated parts of this library.\"\"\"\n\nimport os\n\nimport synthtool as s\nfrom synthtool import gcp\n\ngapic = gcp.GAPICGenerator()\ncommon = gcp.CommonTemplates()\nversion = 'v2'\n\nlibrary = gapic.py_library(\n 'bigquery',\n version,\n config_path='/google/cloud/bigquery/'\n 'artman_bigquery_v2.yaml',\n artman_output_name='bigquery-v2',\n include_protos=True,\n)\n\ns.move(\n [\n library / \"google/cloud/bigquery_v2/gapic/enums.py\",\n library / \"google/cloud/bigquery_v2/types.py\",\n library / \"google/cloud/bigquery_v2/proto/location*\",\n library / \"google/cloud/bigquery_v2/proto/encryption_config*\",\n library / \"google/cloud/bigquery_v2/proto/model*\",\n library / \"google/cloud/bigquery_v2/proto/standard_sql*\",\n ],\n)\n\n# Fix up proto docs that are missing summary line.\ns.replace(\n \"google/cloud/bigquery_v2/proto/model_pb2.py\",\n '\"\"\"Attributes:',\n '\"\"\"Protocol buffer.\\n\\n Attributes:',\n)\ns.replace(\n \"google/cloud/bigquery_v2/proto/encryption_config_pb2.py\",\n '\"\"\"Attributes:',\n '\"\"\"Encryption configuration.\\n\\n Attributes:',\n)\n\n# Remove non-ascii characters from docstrings for Python 2.7.\n# Format quoted strings as plain text.\ns.replace(\"google/cloud/bigquery_v2/proto/*.py\", \"[\u201c\u201d]\", '``')\n\n# ----------------------------------------------------------------------------\n# Add templated files\n# ----------------------------------------------------------------------------\ntemplated_files = common.py_library(cov_level=100)\n# we do not want to override the custom noxfile with the generated one\nos.remove(os.path.join(templated_files, \"noxfile.py\"))\ns.move(templated_files)\n\ns.shell.run([\"nox\", \"-s\", \"blacken\"], hide_output=False)\n", "path": "synth.py"}], "after_files": [{"content": "# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"This script is used to synthesize generated parts of this library.\"\"\"\n\nimport synthtool as s\nfrom synthtool import gcp\n\ngapic = gcp.GAPICGenerator()\ncommon = gcp.CommonTemplates()\nversion = 'v2'\n\nlibrary = gapic.py_library(\n 'bigquery',\n version,\n config_path='/google/cloud/bigquery/'\n 'artman_bigquery_v2.yaml',\n artman_output_name='bigquery-v2',\n include_protos=True,\n)\n\ns.move(\n [\n library / \"google/cloud/bigquery_v2/gapic/enums.py\",\n library / \"google/cloud/bigquery_v2/types.py\",\n library / \"google/cloud/bigquery_v2/proto/location*\",\n library / \"google/cloud/bigquery_v2/proto/encryption_config*\",\n library / \"google/cloud/bigquery_v2/proto/model*\",\n library / \"google/cloud/bigquery_v2/proto/standard_sql*\",\n ],\n)\n\n# Fix up proto docs that are missing summary line.\ns.replace(\n \"google/cloud/bigquery_v2/proto/model_pb2.py\",\n '\"\"\"Attributes:',\n '\"\"\"Protocol buffer.\\n\\n Attributes:',\n)\ns.replace(\n \"google/cloud/bigquery_v2/proto/encryption_config_pb2.py\",\n '\"\"\"Attributes:',\n '\"\"\"Encryption configuration.\\n\\n Attributes:',\n)\n\n# Remove non-ascii characters from docstrings for Python 2.7.\n# Format quoted strings as plain text.\ns.replace(\"google/cloud/bigquery_v2/proto/*.py\", \"[\u201c\u201d]\", '``')\n\n# ----------------------------------------------------------------------------\n# Add templated files\n# ----------------------------------------------------------------------------\ntemplated_files = common.py_library(cov_level=100)\ns.move(templated_files, excludes=[\"noxfile.py\"])\n\ns.shell.run([\"nox\", \"-s\", \"blacken\"], hide_output=False)\n", "path": "synth.py"}]}
| 1,011 | 175 |
gh_patches_debug_17991
|
rasdani/github-patches
|
git_diff
|
searx__searx-1464
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Seems that startpage and ixquick enignes are not working anymore on Searx
Hello and thanks for the works.
I tried on my searx and on other instance like searx.me... but It's not working.
How could we fix that?
Thanks.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `searx/engines/startpage.py`
Content:
```
1 # Startpage (Web)
2 #
3 # @website https://startpage.com
4 # @provide-api no (nothing found)
5 #
6 # @using-api no
7 # @results HTML
8 # @stable no (HTML can change)
9 # @parse url, title, content
10 #
11 # @todo paging
12
13 from lxml import html
14 from dateutil import parser
15 from datetime import datetime, timedelta
16 import re
17 from searx.engines.xpath import extract_text
18
19 # engine dependent config
20 categories = ['general']
21 # there is a mechanism to block "bot" search
22 # (probably the parameter qid), require
23 # storing of qid's between mulitble search-calls
24
25 # paging = False
26 language_support = True
27
28 # search-url
29 base_url = 'https://startpage.com/'
30 search_url = base_url + 'do/search'
31
32 # specific xpath variables
33 # ads xpath //div[@id="results"]/div[@id="sponsored"]//div[@class="result"]
34 # not ads: div[@class="result"] are the direct childs of div[@id="results"]
35 results_xpath = '//div[@class="result"]'
36 link_xpath = './/h3/a'
37
38
39 # do search-request
40 def request(query, params):
41 offset = (params['pageno'] - 1) * 10
42
43 params['url'] = search_url
44 params['method'] = 'POST'
45 params['data'] = {'query': query,
46 'startat': offset}
47
48 # set language
49 params['data']['with_language'] = ('lang_' + params['language'].split('-')[0])
50
51 return params
52
53
54 # get response from search-request
55 def response(resp):
56 results = []
57
58 dom = html.fromstring(resp.text)
59
60 # parse results
61 for result in dom.xpath(results_xpath):
62 links = result.xpath(link_xpath)
63 if not links:
64 continue
65 link = links[0]
66 url = link.attrib.get('href')
67
68 # block google-ad url's
69 if re.match(r"^http(s|)://(www\.)?google\.[a-z]+/aclk.*$", url):
70 continue
71
72 # block startpage search url's
73 if re.match(r"^http(s|)://(www\.)?startpage\.com/do/search\?.*$", url):
74 continue
75
76 # block ixquick search url's
77 if re.match(r"^http(s|)://(www\.)?ixquick\.com/do/search\?.*$", url):
78 continue
79
80 title = extract_text(link)
81
82 if result.xpath('./p[@class="desc clk"]'):
83 content = extract_text(result.xpath('./p[@class="desc clk"]'))
84 else:
85 content = ''
86
87 published_date = None
88
89 # check if search result starts with something like: "2 Sep 2014 ... "
90 if re.match(r"^([1-9]|[1-2][0-9]|3[0-1]) [A-Z][a-z]{2} [0-9]{4} \.\.\. ", content):
91 date_pos = content.find('...') + 4
92 date_string = content[0:date_pos - 5]
93 published_date = parser.parse(date_string, dayfirst=True)
94
95 # fix content string
96 content = content[date_pos:]
97
98 # check if search result starts with something like: "5 days ago ... "
99 elif re.match(r"^[0-9]+ days? ago \.\.\. ", content):
100 date_pos = content.find('...') + 4
101 date_string = content[0:date_pos - 5]
102
103 # calculate datetime
104 published_date = datetime.now() - timedelta(days=int(re.match(r'\d+', date_string).group()))
105
106 # fix content string
107 content = content[date_pos:]
108
109 if published_date:
110 # append result
111 results.append({'url': url,
112 'title': title,
113 'content': content,
114 'publishedDate': published_date})
115 else:
116 # append result
117 results.append({'url': url,
118 'title': title,
119 'content': content})
120
121 # return results
122 return results
123
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/searx/engines/startpage.py b/searx/engines/startpage.py
--- a/searx/engines/startpage.py
+++ b/searx/engines/startpage.py
@@ -32,8 +32,9 @@
# specific xpath variables
# ads xpath //div[@id="results"]/div[@id="sponsored"]//div[@class="result"]
# not ads: div[@class="result"] are the direct childs of div[@id="results"]
-results_xpath = '//div[@class="result"]'
+results_xpath = '//li[contains(@class, "search-result") and contains(@class, "search-item")]'
link_xpath = './/h3/a'
+content_xpath = './p[@class="search-item__body"]'
# do search-request
@@ -79,8 +80,8 @@
title = extract_text(link)
- if result.xpath('./p[@class="desc clk"]'):
- content = extract_text(result.xpath('./p[@class="desc clk"]'))
+ if result.xpath(content_xpath):
+ content = extract_text(result.xpath(content_xpath))
else:
content = ''
|
{"golden_diff": "diff --git a/searx/engines/startpage.py b/searx/engines/startpage.py\n--- a/searx/engines/startpage.py\n+++ b/searx/engines/startpage.py\n@@ -32,8 +32,9 @@\n # specific xpath variables\n # ads xpath //div[@id=\"results\"]/div[@id=\"sponsored\"]//div[@class=\"result\"]\n # not ads: div[@class=\"result\"] are the direct childs of div[@id=\"results\"]\n-results_xpath = '//div[@class=\"result\"]'\n+results_xpath = '//li[contains(@class, \"search-result\") and contains(@class, \"search-item\")]'\n link_xpath = './/h3/a'\n+content_xpath = './p[@class=\"search-item__body\"]'\n \n \n # do search-request\n@@ -79,8 +80,8 @@\n \n title = extract_text(link)\n \n- if result.xpath('./p[@class=\"desc clk\"]'):\n- content = extract_text(result.xpath('./p[@class=\"desc clk\"]'))\n+ if result.xpath(content_xpath):\n+ content = extract_text(result.xpath(content_xpath))\n else:\n content = ''\n", "issue": "Seems that startpage and ixquick enignes are not working anymore on Searx\nHello and thanks for the works.\r\n\r\nI tried on my searx and on other instance like searx.me... but It's not working.\r\n\r\nHow could we fix that?\r\n\r\nThanks.\r\n\r\n\n", "before_files": [{"content": "# Startpage (Web)\n#\n# @website https://startpage.com\n# @provide-api no (nothing found)\n#\n# @using-api no\n# @results HTML\n# @stable no (HTML can change)\n# @parse url, title, content\n#\n# @todo paging\n\nfrom lxml import html\nfrom dateutil import parser\nfrom datetime import datetime, timedelta\nimport re\nfrom searx.engines.xpath import extract_text\n\n# engine dependent config\ncategories = ['general']\n# there is a mechanism to block \"bot\" search\n# (probably the parameter qid), require\n# storing of qid's between mulitble search-calls\n\n# paging = False\nlanguage_support = True\n\n# search-url\nbase_url = 'https://startpage.com/'\nsearch_url = base_url + 'do/search'\n\n# specific xpath variables\n# ads xpath //div[@id=\"results\"]/div[@id=\"sponsored\"]//div[@class=\"result\"]\n# not ads: div[@class=\"result\"] are the direct childs of div[@id=\"results\"]\nresults_xpath = '//div[@class=\"result\"]'\nlink_xpath = './/h3/a'\n\n\n# do search-request\ndef request(query, params):\n offset = (params['pageno'] - 1) * 10\n\n params['url'] = search_url\n params['method'] = 'POST'\n params['data'] = {'query': query,\n 'startat': offset}\n\n # set language\n params['data']['with_language'] = ('lang_' + params['language'].split('-')[0])\n\n return params\n\n\n# get response from search-request\ndef response(resp):\n results = []\n\n dom = html.fromstring(resp.text)\n\n # parse results\n for result in dom.xpath(results_xpath):\n links = result.xpath(link_xpath)\n if not links:\n continue\n link = links[0]\n url = link.attrib.get('href')\n\n # block google-ad url's\n if re.match(r\"^http(s|)://(www\\.)?google\\.[a-z]+/aclk.*$\", url):\n continue\n\n # block startpage search url's\n if re.match(r\"^http(s|)://(www\\.)?startpage\\.com/do/search\\?.*$\", url):\n continue\n\n # block ixquick search url's\n if re.match(r\"^http(s|)://(www\\.)?ixquick\\.com/do/search\\?.*$\", url):\n continue\n\n title = extract_text(link)\n\n if result.xpath('./p[@class=\"desc clk\"]'):\n content = extract_text(result.xpath('./p[@class=\"desc clk\"]'))\n else:\n content = ''\n\n published_date = None\n\n # check if search result starts with something like: \"2 Sep 2014 ... \"\n if re.match(r\"^([1-9]|[1-2][0-9]|3[0-1]) [A-Z][a-z]{2} [0-9]{4} \\.\\.\\. \", content):\n date_pos = content.find('...') + 4\n date_string = content[0:date_pos - 5]\n published_date = parser.parse(date_string, dayfirst=True)\n\n # fix content string\n content = content[date_pos:]\n\n # check if search result starts with something like: \"5 days ago ... \"\n elif re.match(r\"^[0-9]+ days? ago \\.\\.\\. \", content):\n date_pos = content.find('...') + 4\n date_string = content[0:date_pos - 5]\n\n # calculate datetime\n published_date = datetime.now() - timedelta(days=int(re.match(r'\\d+', date_string).group()))\n\n # fix content string\n content = content[date_pos:]\n\n if published_date:\n # append result\n results.append({'url': url,\n 'title': title,\n 'content': content,\n 'publishedDate': published_date})\n else:\n # append result\n results.append({'url': url,\n 'title': title,\n 'content': content})\n\n # return results\n return results\n", "path": "searx/engines/startpage.py"}], "after_files": [{"content": "# Startpage (Web)\n#\n# @website https://startpage.com\n# @provide-api no (nothing found)\n#\n# @using-api no\n# @results HTML\n# @stable no (HTML can change)\n# @parse url, title, content\n#\n# @todo paging\n\nfrom lxml import html\nfrom dateutil import parser\nfrom datetime import datetime, timedelta\nimport re\nfrom searx.engines.xpath import extract_text\n\n# engine dependent config\ncategories = ['general']\n# there is a mechanism to block \"bot\" search\n# (probably the parameter qid), require\n# storing of qid's between mulitble search-calls\n\n# paging = False\nlanguage_support = True\n\n# search-url\nbase_url = 'https://startpage.com/'\nsearch_url = base_url + 'do/search'\n\n# specific xpath variables\n# ads xpath //div[@id=\"results\"]/div[@id=\"sponsored\"]//div[@class=\"result\"]\n# not ads: div[@class=\"result\"] are the direct childs of div[@id=\"results\"]\nresults_xpath = '//li[contains(@class, \"search-result\") and contains(@class, \"search-item\")]'\nlink_xpath = './/h3/a'\ncontent_xpath = './p[@class=\"search-item__body\"]'\n\n\n# do search-request\ndef request(query, params):\n offset = (params['pageno'] - 1) * 10\n\n params['url'] = search_url\n params['method'] = 'POST'\n params['data'] = {'query': query,\n 'startat': offset}\n\n # set language\n params['data']['with_language'] = ('lang_' + params['language'].split('-')[0])\n\n return params\n\n\n# get response from search-request\ndef response(resp):\n results = []\n\n dom = html.fromstring(resp.text)\n\n # parse results\n for result in dom.xpath(results_xpath):\n links = result.xpath(link_xpath)\n if not links:\n continue\n link = links[0]\n url = link.attrib.get('href')\n\n # block google-ad url's\n if re.match(r\"^http(s|)://(www\\.)?google\\.[a-z]+/aclk.*$\", url):\n continue\n\n # block startpage search url's\n if re.match(r\"^http(s|)://(www\\.)?startpage\\.com/do/search\\?.*$\", url):\n continue\n\n # block ixquick search url's\n if re.match(r\"^http(s|)://(www\\.)?ixquick\\.com/do/search\\?.*$\", url):\n continue\n\n title = extract_text(link)\n\n if result.xpath(content_xpath):\n content = extract_text(result.xpath(content_xpath))\n else:\n content = ''\n\n published_date = None\n\n # check if search result starts with something like: \"2 Sep 2014 ... \"\n if re.match(r\"^([1-9]|[1-2][0-9]|3[0-1]) [A-Z][a-z]{2} [0-9]{4} \\.\\.\\. \", content):\n date_pos = content.find('...') + 4\n date_string = content[0:date_pos - 5]\n published_date = parser.parse(date_string, dayfirst=True)\n\n # fix content string\n content = content[date_pos:]\n\n # check if search result starts with something like: \"5 days ago ... \"\n elif re.match(r\"^[0-9]+ days? ago \\.\\.\\. \", content):\n date_pos = content.find('...') + 4\n date_string = content[0:date_pos - 5]\n\n # calculate datetime\n published_date = datetime.now() - timedelta(days=int(re.match(r'\\d+', date_string).group()))\n\n # fix content string\n content = content[date_pos:]\n\n if published_date:\n # append result\n results.append({'url': url,\n 'title': title,\n 'content': content,\n 'publishedDate': published_date})\n else:\n # append result\n results.append({'url': url,\n 'title': title,\n 'content': content})\n\n # return results\n return results\n", "path": "searx/engines/startpage.py"}]}
| 1,498 | 253 |
gh_patches_debug_19196
|
rasdani/github-patches
|
git_diff
|
pymedusa__Medusa-6867
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Free Mobile SMS notification failed
Hi,
I recently update Medusa, and since then, I didn't receive any SMS for started/finished downloads
On the error log file I have this:
```
2019-06-22 15:42:51 ERROR Thread_2 :: [74c3f12] Exception generated: can't concat str to bytes
Traceback (most recent call last):
File "C:\Medusa\Medusa\medusa\server\web\core\base.py", line 261, in async_call
result = function(**kwargs)
File "C:\Medusa\Medusa\medusa\server\web\home\handler.py", line 300, in testFreeMobile
result, message = notifiers.freemobile_notifier.test_notify(freemobile_id, freemobile_apikey)
File "C:\Medusa\Medusa\medusa\notifiers\freemobile.py", line 30, in test_notify
return self._notifyFreeMobile('Test', 'This is a test notification from Medusa', cust_id, apiKey, force=True)
File "C:\Medusa\Medusa\medusa\notifiers\freemobile.py", line 120, in _notifyFreeMobile
return self._sendFreeMobileSMS(title, message, cust_id, apiKey)
File "C:\Medusa\Medusa\medusa\notifiers\freemobile.py", line 51, in _sendFreeMobileSMS
msg_quoted = quote(title.encode('utf-8') + ': ' + msg.encode('utf-8'))
TypeError: can't concat str to bytes
```
My Free Mobile customer ID and my Free Mobile API Key are still the same..
Thanks in advance for any help you may provide.
Regards.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `medusa/notifiers/freemobile.py`
Content:
```
1 # coding=utf-8
2
3 from __future__ import unicode_literals
4
5 import logging
6 from builtins import object
7
8 from medusa import app
9 from medusa.common import (
10 NOTIFY_DOWNLOAD,
11 NOTIFY_GIT_UPDATE,
12 NOTIFY_GIT_UPDATE_TEXT,
13 NOTIFY_LOGIN,
14 NOTIFY_LOGIN_TEXT,
15 NOTIFY_SUBTITLE_DOWNLOAD,
16 notifyStrings,
17 )
18 from medusa.logger.adapters.style import BraceAdapter
19
20 from requests.compat import quote
21
22 from six.moves.urllib.request import Request, urlopen
23
24 log = BraceAdapter(logging.getLogger(__name__))
25 log.logger.addHandler(logging.NullHandler())
26
27
28 class Notifier(object):
29 def test_notify(self, cust_id=None, apiKey=None):
30 return self._notifyFreeMobile('Test', 'This is a test notification from Medusa', cust_id, apiKey, force=True)
31
32 def _sendFreeMobileSMS(self, title, msg, cust_id=None, apiKey=None):
33 """
34 Send a SMS notification
35
36 msg: The message to send (unicode)
37 title: The title of the message
38 userKey: The pushover user id to send the message to (or to subscribe with)
39
40 return: True if the message succeeded, False otherwise
41 """
42 if cust_id is None:
43 cust_id = app.FREEMOBILE_ID
44 if apiKey is None:
45 apiKey = app.FREEMOBILE_APIKEY
46
47 log.debug(u'Free Mobile in use with API KEY: {0}', apiKey)
48
49 # build up the URL and parameters
50 msg = msg.strip()
51 msg_quoted = quote(title.encode('utf-8') + ': ' + msg.encode('utf-8'))
52 URL = 'https://smsapi.free-mobile.fr/sendmsg?user=' + cust_id + '&pass=' + apiKey + '&msg=' + msg_quoted
53
54 req = Request(URL)
55 # send the request to Free Mobile
56 try:
57 urlopen(req)
58 except IOError as e:
59 if hasattr(e, 'code'):
60 error_message = {
61 400: 'Missing parameter(s).',
62 402: 'Too much SMS sent in a short time.',
63 403: 'API service is not enabled in your account or ID / API key is incorrect.',
64 500: 'Server error. Please retry in few moment.',
65 }
66 message = error_message.get(e.code)
67 if message:
68 log.error(message)
69 return False, message
70 except Exception as e:
71 message = u'Error while sending SMS: {0}'.format(e)
72 log.error(message)
73 return False, message
74
75 message = 'Free Mobile SMS successful.'
76 log.info(message)
77 return True, message
78
79 def notify_snatch(self, title, message):
80 if app.FREEMOBILE_NOTIFY_ONSNATCH:
81 self._notifyFreeMobile(title, message)
82
83 def notify_download(self, ep_obj, title=notifyStrings[NOTIFY_DOWNLOAD]):
84 if app.FREEMOBILE_NOTIFY_ONDOWNLOAD:
85 self._notifyFreeMobile(title, ep_obj.pretty_name_with_quality())
86
87 def notify_subtitle_download(self, ep_obj, lang, title=notifyStrings[NOTIFY_SUBTITLE_DOWNLOAD]):
88 if app.FREEMOBILE_NOTIFY_ONSUBTITLEDOWNLOAD:
89 self._notifyFreeMobile(title, ep_obj.pretty_name() + ': ' + lang)
90
91 def notify_git_update(self, new_version='??'):
92 if app.USE_FREEMOBILE:
93 update_text = notifyStrings[NOTIFY_GIT_UPDATE_TEXT]
94 title = notifyStrings[NOTIFY_GIT_UPDATE]
95 self._notifyFreeMobile(title, update_text + new_version)
96
97 def notify_login(self, ipaddress=''):
98 if app.USE_FREEMOBILE:
99 update_text = notifyStrings[NOTIFY_LOGIN_TEXT]
100 title = notifyStrings[NOTIFY_LOGIN]
101 self._notifyFreeMobile(title, update_text.format(ipaddress))
102
103 def _notifyFreeMobile(self, title, message, cust_id=None, apiKey=None, force=False): # pylint: disable=too-many-arguments
104 """
105 Sends a SMS notification
106
107 title: The title of the notification to send
108 message: The message string to send
109 cust_id: Your Free Mobile customer ID
110 apikey: Your Free Mobile API key
111 force: Enforce sending, for instance for testing
112 """
113
114 if not app.USE_FREEMOBILE and not force:
115 log.debug(u'Notification for Free Mobile not enabled, skipping this notification')
116 return False, 'Disabled'
117
118 log.debug(u'Sending a SMS for {0}', message)
119
120 return self._sendFreeMobileSMS(title, message, cust_id, apiKey)
121
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/medusa/notifiers/freemobile.py b/medusa/notifiers/freemobile.py
--- a/medusa/notifiers/freemobile.py
+++ b/medusa/notifiers/freemobile.py
@@ -3,7 +3,6 @@
from __future__ import unicode_literals
import logging
-from builtins import object
from medusa import app
from medusa.common import (
@@ -47,9 +46,13 @@
log.debug(u'Free Mobile in use with API KEY: {0}', apiKey)
# build up the URL and parameters
- msg = msg.strip()
- msg_quoted = quote(title.encode('utf-8') + ': ' + msg.encode('utf-8'))
- URL = 'https://smsapi.free-mobile.fr/sendmsg?user=' + cust_id + '&pass=' + apiKey + '&msg=' + msg_quoted
+ msg = '{0}: {1}'.format(title, msg.strip())
+ msg_quoted = quote(msg.encode('utf-8'))
+ URL = 'https://smsapi.free-mobile.fr/sendmsg?user={user}&pass={api_key}&msg={msg}'.format(
+ user=cust_id,
+ api_key=apiKey,
+ msg=msg_quoted,
+ )
req = Request(URL)
# send the request to Free Mobile
|
{"golden_diff": "diff --git a/medusa/notifiers/freemobile.py b/medusa/notifiers/freemobile.py\n--- a/medusa/notifiers/freemobile.py\n+++ b/medusa/notifiers/freemobile.py\n@@ -3,7 +3,6 @@\n from __future__ import unicode_literals\n \n import logging\n-from builtins import object\n \n from medusa import app\n from medusa.common import (\n@@ -47,9 +46,13 @@\n log.debug(u'Free Mobile in use with API KEY: {0}', apiKey)\n \n # build up the URL and parameters\n- msg = msg.strip()\n- msg_quoted = quote(title.encode('utf-8') + ': ' + msg.encode('utf-8'))\n- URL = 'https://smsapi.free-mobile.fr/sendmsg?user=' + cust_id + '&pass=' + apiKey + '&msg=' + msg_quoted\n+ msg = '{0}: {1}'.format(title, msg.strip())\n+ msg_quoted = quote(msg.encode('utf-8'))\n+ URL = 'https://smsapi.free-mobile.fr/sendmsg?user={user}&pass={api_key}&msg={msg}'.format(\n+ user=cust_id,\n+ api_key=apiKey,\n+ msg=msg_quoted,\n+ )\n \n req = Request(URL)\n # send the request to Free Mobile\n", "issue": "Free Mobile SMS notification failed\nHi, \r\nI recently update Medusa, and since then, I didn't receive any SMS for started/finished downloads\r\n\r\nOn the error log file I have this:\r\n```\r\n2019-06-22 15:42:51 ERROR Thread_2 :: [74c3f12] Exception generated: can't concat str to bytes\r\nTraceback (most recent call last):\r\n File \"C:\\Medusa\\Medusa\\medusa\\server\\web\\core\\base.py\", line 261, in async_call\r\n result = function(**kwargs)\r\n File \"C:\\Medusa\\Medusa\\medusa\\server\\web\\home\\handler.py\", line 300, in testFreeMobile\r\n result, message = notifiers.freemobile_notifier.test_notify(freemobile_id, freemobile_apikey)\r\n File \"C:\\Medusa\\Medusa\\medusa\\notifiers\\freemobile.py\", line 30, in test_notify\r\n return self._notifyFreeMobile('Test', 'This is a test notification from Medusa', cust_id, apiKey, force=True)\r\n File \"C:\\Medusa\\Medusa\\medusa\\notifiers\\freemobile.py\", line 120, in _notifyFreeMobile\r\n return self._sendFreeMobileSMS(title, message, cust_id, apiKey)\r\n File \"C:\\Medusa\\Medusa\\medusa\\notifiers\\freemobile.py\", line 51, in _sendFreeMobileSMS\r\n msg_quoted = quote(title.encode('utf-8') + ': ' + msg.encode('utf-8'))\r\nTypeError: can't concat str to bytes\r\n```\r\nMy Free Mobile customer ID and my Free Mobile API Key are still the same..\r\n\r\nThanks in advance for any help you may provide.\r\nRegards.\n", "before_files": [{"content": "# coding=utf-8\n\nfrom __future__ import unicode_literals\n\nimport logging\nfrom builtins import object\n\nfrom medusa import app\nfrom medusa.common import (\n NOTIFY_DOWNLOAD,\n NOTIFY_GIT_UPDATE,\n NOTIFY_GIT_UPDATE_TEXT,\n NOTIFY_LOGIN,\n NOTIFY_LOGIN_TEXT,\n NOTIFY_SUBTITLE_DOWNLOAD,\n notifyStrings,\n)\nfrom medusa.logger.adapters.style import BraceAdapter\n\nfrom requests.compat import quote\n\nfrom six.moves.urllib.request import Request, urlopen\n\nlog = BraceAdapter(logging.getLogger(__name__))\nlog.logger.addHandler(logging.NullHandler())\n\n\nclass Notifier(object):\n def test_notify(self, cust_id=None, apiKey=None):\n return self._notifyFreeMobile('Test', 'This is a test notification from Medusa', cust_id, apiKey, force=True)\n\n def _sendFreeMobileSMS(self, title, msg, cust_id=None, apiKey=None):\n \"\"\"\n Send a SMS notification\n\n msg: The message to send (unicode)\n title: The title of the message\n userKey: The pushover user id to send the message to (or to subscribe with)\n\n return: True if the message succeeded, False otherwise\n \"\"\"\n if cust_id is None:\n cust_id = app.FREEMOBILE_ID\n if apiKey is None:\n apiKey = app.FREEMOBILE_APIKEY\n\n log.debug(u'Free Mobile in use with API KEY: {0}', apiKey)\n\n # build up the URL and parameters\n msg = msg.strip()\n msg_quoted = quote(title.encode('utf-8') + ': ' + msg.encode('utf-8'))\n URL = 'https://smsapi.free-mobile.fr/sendmsg?user=' + cust_id + '&pass=' + apiKey + '&msg=' + msg_quoted\n\n req = Request(URL)\n # send the request to Free Mobile\n try:\n urlopen(req)\n except IOError as e:\n if hasattr(e, 'code'):\n error_message = {\n 400: 'Missing parameter(s).',\n 402: 'Too much SMS sent in a short time.',\n 403: 'API service is not enabled in your account or ID / API key is incorrect.',\n 500: 'Server error. Please retry in few moment.',\n }\n message = error_message.get(e.code)\n if message:\n log.error(message)\n return False, message\n except Exception as e:\n message = u'Error while sending SMS: {0}'.format(e)\n log.error(message)\n return False, message\n\n message = 'Free Mobile SMS successful.'\n log.info(message)\n return True, message\n\n def notify_snatch(self, title, message):\n if app.FREEMOBILE_NOTIFY_ONSNATCH:\n self._notifyFreeMobile(title, message)\n\n def notify_download(self, ep_obj, title=notifyStrings[NOTIFY_DOWNLOAD]):\n if app.FREEMOBILE_NOTIFY_ONDOWNLOAD:\n self._notifyFreeMobile(title, ep_obj.pretty_name_with_quality())\n\n def notify_subtitle_download(self, ep_obj, lang, title=notifyStrings[NOTIFY_SUBTITLE_DOWNLOAD]):\n if app.FREEMOBILE_NOTIFY_ONSUBTITLEDOWNLOAD:\n self._notifyFreeMobile(title, ep_obj.pretty_name() + ': ' + lang)\n\n def notify_git_update(self, new_version='??'):\n if app.USE_FREEMOBILE:\n update_text = notifyStrings[NOTIFY_GIT_UPDATE_TEXT]\n title = notifyStrings[NOTIFY_GIT_UPDATE]\n self._notifyFreeMobile(title, update_text + new_version)\n\n def notify_login(self, ipaddress=''):\n if app.USE_FREEMOBILE:\n update_text = notifyStrings[NOTIFY_LOGIN_TEXT]\n title = notifyStrings[NOTIFY_LOGIN]\n self._notifyFreeMobile(title, update_text.format(ipaddress))\n\n def _notifyFreeMobile(self, title, message, cust_id=None, apiKey=None, force=False): # pylint: disable=too-many-arguments\n \"\"\"\n Sends a SMS notification\n\n title: The title of the notification to send\n message: The message string to send\n cust_id: Your Free Mobile customer ID\n apikey: Your Free Mobile API key\n force: Enforce sending, for instance for testing\n \"\"\"\n\n if not app.USE_FREEMOBILE and not force:\n log.debug(u'Notification for Free Mobile not enabled, skipping this notification')\n return False, 'Disabled'\n\n log.debug(u'Sending a SMS for {0}', message)\n\n return self._sendFreeMobileSMS(title, message, cust_id, apiKey)\n", "path": "medusa/notifiers/freemobile.py"}], "after_files": [{"content": "# coding=utf-8\n\nfrom __future__ import unicode_literals\n\nimport logging\n\nfrom medusa import app\nfrom medusa.common import (\n NOTIFY_DOWNLOAD,\n NOTIFY_GIT_UPDATE,\n NOTIFY_GIT_UPDATE_TEXT,\n NOTIFY_LOGIN,\n NOTIFY_LOGIN_TEXT,\n NOTIFY_SUBTITLE_DOWNLOAD,\n notifyStrings,\n)\nfrom medusa.logger.adapters.style import BraceAdapter\n\nfrom requests.compat import quote\n\nfrom six.moves.urllib.request import Request, urlopen\n\nlog = BraceAdapter(logging.getLogger(__name__))\nlog.logger.addHandler(logging.NullHandler())\n\n\nclass Notifier(object):\n def test_notify(self, cust_id=None, apiKey=None):\n return self._notifyFreeMobile('Test', 'This is a test notification from Medusa', cust_id, apiKey, force=True)\n\n def _sendFreeMobileSMS(self, title, msg, cust_id=None, apiKey=None):\n \"\"\"\n Send a SMS notification\n\n msg: The message to send (unicode)\n title: The title of the message\n userKey: The pushover user id to send the message to (or to subscribe with)\n\n return: True if the message succeeded, False otherwise\n \"\"\"\n if cust_id is None:\n cust_id = app.FREEMOBILE_ID\n if apiKey is None:\n apiKey = app.FREEMOBILE_APIKEY\n\n log.debug(u'Free Mobile in use with API KEY: {0}', apiKey)\n\n # build up the URL and parameters\n msg = '{0}: {1}'.format(title, msg.strip())\n msg_quoted = quote(msg.encode('utf-8'))\n URL = 'https://smsapi.free-mobile.fr/sendmsg?user={user}&pass={api_key}&msg={msg}'.format(\n user=cust_id,\n api_key=apiKey,\n msg=msg_quoted,\n )\n\n req = Request(URL)\n # send the request to Free Mobile\n try:\n urlopen(req)\n except IOError as e:\n if hasattr(e, 'code'):\n error_message = {\n 400: 'Missing parameter(s).',\n 402: 'Too much SMS sent in a short time.',\n 403: 'API service is not enabled in your account or ID / API key is incorrect.',\n 500: 'Server error. Please retry in few moment.',\n }\n message = error_message.get(e.code)\n if message:\n log.error(message)\n return False, message\n except Exception as e:\n message = u'Error while sending SMS: {0}'.format(e)\n log.error(message)\n return False, message\n\n message = 'Free Mobile SMS successful.'\n log.info(message)\n return True, message\n\n def notify_snatch(self, title, message):\n if app.FREEMOBILE_NOTIFY_ONSNATCH:\n self._notifyFreeMobile(title, message)\n\n def notify_download(self, ep_obj, title=notifyStrings[NOTIFY_DOWNLOAD]):\n if app.FREEMOBILE_NOTIFY_ONDOWNLOAD:\n self._notifyFreeMobile(title, ep_obj.pretty_name_with_quality())\n\n def notify_subtitle_download(self, ep_obj, lang, title=notifyStrings[NOTIFY_SUBTITLE_DOWNLOAD]):\n if app.FREEMOBILE_NOTIFY_ONSUBTITLEDOWNLOAD:\n self._notifyFreeMobile(title, ep_obj.pretty_name() + ': ' + lang)\n\n def notify_git_update(self, new_version='??'):\n if app.USE_FREEMOBILE:\n update_text = notifyStrings[NOTIFY_GIT_UPDATE_TEXT]\n title = notifyStrings[NOTIFY_GIT_UPDATE]\n self._notifyFreeMobile(title, update_text + new_version)\n\n def notify_login(self, ipaddress=''):\n if app.USE_FREEMOBILE:\n update_text = notifyStrings[NOTIFY_LOGIN_TEXT]\n title = notifyStrings[NOTIFY_LOGIN]\n self._notifyFreeMobile(title, update_text.format(ipaddress))\n\n def _notifyFreeMobile(self, title, message, cust_id=None, apiKey=None, force=False): # pylint: disable=too-many-arguments\n \"\"\"\n Sends a SMS notification\n\n title: The title of the notification to send\n message: The message string to send\n cust_id: Your Free Mobile customer ID\n apikey: Your Free Mobile API key\n force: Enforce sending, for instance for testing\n \"\"\"\n\n if not app.USE_FREEMOBILE and not force:\n log.debug(u'Notification for Free Mobile not enabled, skipping this notification')\n return False, 'Disabled'\n\n log.debug(u'Sending a SMS for {0}', message)\n\n return self._sendFreeMobileSMS(title, message, cust_id, apiKey)\n", "path": "medusa/notifiers/freemobile.py"}]}
| 1,917 | 299 |
gh_patches_debug_22529
|
rasdani/github-patches
|
git_diff
|
lutris__lutris-1227
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Vulkan check will not work on certain distributions
Note that this probably won't work on certain distributions, where this file, for instance, is actually called `/usr/lib/x86_64-linux-gnu/libvulkan.so.1` or similar. Confirmed not working on Linux Mint 19.
_Originally posted by @Vexatos in https://github.com/lutris/lutris/pull/1186_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lutris/util/vulkan.py`
Content:
```
1 """Vulkan helper module"""
2 import os
3 from enum import Enum
4
5 class vulkan_available(Enum):
6 NONE = 0
7 THIRTY_TWO = 1
8 SIXTY_FOUR = 2
9 ALL = 3
10
11 def vulkan_check():
12 vulkan_lib = os.path.isfile("/usr/lib/libvulkan.so")
13 vulkan_lib32 = os.path.isfile("/usr/lib32/libvulkan.so")
14 vulkan_lib_multi = os.path.isfile("/usr/lib/x86_64-linux-gnu/libvulkan.so")
15 vulkan_lib32_multi = os.path.isfile("/usr/lib32/i386-linux-gnu/libvulkan.so")
16 has_32_bit = vulkan_lib32 or vulkan_lib32_multi
17 has_64_bit = vulkan_lib or vulkan_lib_multi
18
19 if not (has_64_bit or has_32_bit):
20 return vulkan_available.NONE
21 if has_64_bit and not has_32_bit:
22 return vulkan_available.SIXTY_FOUR
23 if not has_64_bit and has_32_bit:
24 return vulkan_available.THIRTY_TWO
25 return vulkan_available.ALL
26
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/lutris/util/vulkan.py b/lutris/util/vulkan.py
--- a/lutris/util/vulkan.py
+++ b/lutris/util/vulkan.py
@@ -1,5 +1,6 @@
"""Vulkan helper module"""
import os
+import re
from enum import Enum
class vulkan_available(Enum):
@@ -8,11 +9,20 @@
SIXTY_FOUR = 2
ALL = 3
+def search_for_file(directory):
+ if os.path.isdir(directory):
+ pattern = re.compile(r'^libvulkan\.so')
+ files = [f for f in os.listdir(directory) if os.path.isfile(os.path.join(directory, f))]
+ files = [os.path.join(directory, f) for f in files if pattern.search(f)]
+ if files:
+ return True
+ return False
+
def vulkan_check():
- vulkan_lib = os.path.isfile("/usr/lib/libvulkan.so")
- vulkan_lib32 = os.path.isfile("/usr/lib32/libvulkan.so")
- vulkan_lib_multi = os.path.isfile("/usr/lib/x86_64-linux-gnu/libvulkan.so")
- vulkan_lib32_multi = os.path.isfile("/usr/lib32/i386-linux-gnu/libvulkan.so")
+ vulkan_lib = search_for_file("/usr/lib")
+ vulkan_lib32 = search_for_file("/usr/lib32")
+ vulkan_lib_multi = search_for_file("/usr/lib/x86_64-linux-gnu")
+ vulkan_lib32_multi = search_for_file("/usr/lib32/i386-linux-gnu")
has_32_bit = vulkan_lib32 or vulkan_lib32_multi
has_64_bit = vulkan_lib or vulkan_lib_multi
|
{"golden_diff": "diff --git a/lutris/util/vulkan.py b/lutris/util/vulkan.py\n--- a/lutris/util/vulkan.py\n+++ b/lutris/util/vulkan.py\n@@ -1,5 +1,6 @@\n \"\"\"Vulkan helper module\"\"\"\n import os\n+import re\n from enum import Enum\n \n class vulkan_available(Enum):\n@@ -8,11 +9,20 @@\n SIXTY_FOUR = 2\n ALL = 3\n \n+def search_for_file(directory):\n+ if os.path.isdir(directory):\n+ pattern = re.compile(r'^libvulkan\\.so')\n+ files = [f for f in os.listdir(directory) if os.path.isfile(os.path.join(directory, f))]\n+ files = [os.path.join(directory, f) for f in files if pattern.search(f)]\n+ if files:\n+ return True\n+ return False\n+\n def vulkan_check():\n- vulkan_lib = os.path.isfile(\"/usr/lib/libvulkan.so\")\n- vulkan_lib32 = os.path.isfile(\"/usr/lib32/libvulkan.so\")\n- vulkan_lib_multi = os.path.isfile(\"/usr/lib/x86_64-linux-gnu/libvulkan.so\")\n- vulkan_lib32_multi = os.path.isfile(\"/usr/lib32/i386-linux-gnu/libvulkan.so\")\n+ vulkan_lib = search_for_file(\"/usr/lib\")\n+ vulkan_lib32 = search_for_file(\"/usr/lib32\")\n+ vulkan_lib_multi = search_for_file(\"/usr/lib/x86_64-linux-gnu\")\n+ vulkan_lib32_multi = search_for_file(\"/usr/lib32/i386-linux-gnu\")\n has_32_bit = vulkan_lib32 or vulkan_lib32_multi\n has_64_bit = vulkan_lib or vulkan_lib_multi\n", "issue": "Vulkan check will not work on certain distributions\nNote that this probably won't work on certain distributions, where this file, for instance, is actually called `/usr/lib/x86_64-linux-gnu/libvulkan.so.1` or similar. Confirmed not working on Linux Mint 19.\r\n\r\n_Originally posted by @Vexatos in https://github.com/lutris/lutris/pull/1186_\n", "before_files": [{"content": "\"\"\"Vulkan helper module\"\"\"\nimport os\nfrom enum import Enum\n\nclass vulkan_available(Enum):\n NONE = 0\n THIRTY_TWO = 1\n SIXTY_FOUR = 2\n ALL = 3\n\ndef vulkan_check():\n vulkan_lib = os.path.isfile(\"/usr/lib/libvulkan.so\")\n vulkan_lib32 = os.path.isfile(\"/usr/lib32/libvulkan.so\")\n vulkan_lib_multi = os.path.isfile(\"/usr/lib/x86_64-linux-gnu/libvulkan.so\")\n vulkan_lib32_multi = os.path.isfile(\"/usr/lib32/i386-linux-gnu/libvulkan.so\")\n has_32_bit = vulkan_lib32 or vulkan_lib32_multi\n has_64_bit = vulkan_lib or vulkan_lib_multi\n\n if not (has_64_bit or has_32_bit):\n return vulkan_available.NONE\n if has_64_bit and not has_32_bit:\n return vulkan_available.SIXTY_FOUR\n if not has_64_bit and has_32_bit:\n return vulkan_available.THIRTY_TWO\n return vulkan_available.ALL\n", "path": "lutris/util/vulkan.py"}], "after_files": [{"content": "\"\"\"Vulkan helper module\"\"\"\nimport os\nimport re\nfrom enum import Enum\n\nclass vulkan_available(Enum):\n NONE = 0\n THIRTY_TWO = 1\n SIXTY_FOUR = 2\n ALL = 3\n\ndef search_for_file(directory):\n if os.path.isdir(directory):\n pattern = re.compile(r'^libvulkan\\.so')\n files = [f for f in os.listdir(directory) if os.path.isfile(os.path.join(directory, f))]\n files = [os.path.join(directory, f) for f in files if pattern.search(f)]\n if files:\n return True\n return False\n\ndef vulkan_check():\n vulkan_lib = search_for_file(\"/usr/lib\")\n vulkan_lib32 = search_for_file(\"/usr/lib32\")\n vulkan_lib_multi = search_for_file(\"/usr/lib/x86_64-linux-gnu\")\n vulkan_lib32_multi = search_for_file(\"/usr/lib32/i386-linux-gnu\")\n has_32_bit = vulkan_lib32 or vulkan_lib32_multi\n has_64_bit = vulkan_lib or vulkan_lib_multi\n\n if not (has_64_bit or has_32_bit):\n return vulkan_available.NONE\n if has_64_bit and not has_32_bit:\n return vulkan_available.SIXTY_FOUR\n if not has_64_bit and has_32_bit:\n return vulkan_available.THIRTY_TWO\n return vulkan_available.ALL\n", "path": "lutris/util/vulkan.py"}]}
| 656 | 401 |
gh_patches_debug_16786
|
rasdani/github-patches
|
git_diff
|
blaze__blaze-431
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Individual columns should be able to repr if not passed in CSV
This issue title is possibly the worst ever, so here's an example:
``` python
import tempfile
import pandas as pd
from blaze import *
```
This works:
``` python
with tempfile.NamedTemporaryFile(delete=False) as f:
df = pd.DataFrame(np.random.randn(10, 2))
df.to_csv(f.name, index=False, header=False)
csv = CSV(f.name, columns=list('ab')) # passing columns to CSV
t = Table(csv)
assert t.a.isidentical(t['a'])
```
But this:
``` python
with tempfile.NamedTemporaryFile(delete=False) as f:
df = pd.DataFrame(np.random.randn(10, 2))
df.to_csv(f.name, index=False, header=False)
csv = CSV(f.name)
t = Table(csv, columns=list('ab')) # passing columns to Table
assert t.a.isidentical(t['a'])
```
yield a `t` where `t.a` throws an error in the interpreter when I try to `repr` it.
The difference is that the first propagates the columns to the `Table` (or maybe it doesn't propagate, but it uses the correct names), while in the second the columns are still represented by their "anonymous" names `_0`, `_1`, etc.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `blaze/api/table.py`
Content:
```
1
2 from datashape import discover, Tuple, Record, dshape, Fixed
3 import itertools
4
5 from ..expr.core import Expr
6 from ..expr.table import TableSymbol, TableExpr
7 from ..data.python import Python
8 from ..dispatch import dispatch
9 from ..data.core import DataDescriptor, discover
10 from ..data.pandas import into, DataFrame
11 from .into import into
12
13 names = ('_%d' % i for i in itertools.count(1))
14
15 class Table(TableSymbol):
16 """ Interactive Table
17
18 Parameters
19 ----------
20
21 data: DataDescriptor, tuple, DataFrame, RDD, SQL Table, ...
22 Anything that ``compute`` knows how to work with
23
24 Optional
25 --------
26
27 name: string
28 A name for the table
29 columns: iterable of strings
30 Column names, will be inferred from datasource if possible
31 schema: string or DataShape
32 Explitit Record containing datatypes and column names
33 """
34 __slots__ = 'data', 'schema', '_name', 'iscolumn'
35
36 def __init__(self, data, name=None, columns=None, schema=None,
37 iscolumn=False):
38 if not schema:
39 schema = discover(data).subshape[0]
40 types = None
41 if isinstance(schema[0], Tuple):
42 columns = columns or list(range(len(schema[0].dshapes)))
43 types = schema[0].dshapes
44 if isinstance(schema[0], Record):
45 columns = columns or schema[0].names
46 types = schema[0].types
47 if isinstance(schema[0], Fixed):
48 types = (schema[1],) * int(schema[0])
49 if not columns:
50 raise TypeError("Could not infer column names from data. "
51 "Please specify column names with `column=` "
52 "keyword")
53 if not types:
54 raise TypeError("Could not infer data types from data. "
55 "Please specify schema with `schema=` keyword")
56
57 schema = dshape(Record(list(zip(columns, types))))
58 self.schema = dshape(schema)
59
60 self.data = data
61 self._name = name or next(names)
62 self.iscolumn = iscolumn
63
64 def resources(self):
65 return {self: self.data}
66
67 @property
68 def args(self):
69 return (id(self.data), self.schema, self._name, self.iscolumn)
70
71
72 @dispatch(Table, dict)
73 def _subs(o, d):
74 return o
75
76
77 @dispatch(Expr)
78 def compute(expr):
79 resources = expr.resources()
80 if not resources:
81 raise ValueError("No data resources found")
82 else:
83 return compute(expr, resources)
84
85
86 def table_repr(expr, n=10):
87 if not expr.resources():
88 return str(expr)
89 if isinstance(expr, TableExpr):
90 head = expr.head(n + 1)
91 result = compute(head)
92
93 if expr.columns:
94 df = into(DataFrame(columns=expr.columns), result)
95 else:
96 df = into(DataFrame, result)
97 s = repr(df)
98 if len(df) > 10:
99 df = df[:10]
100 s = '\n'.join(s.split('\n')[:-1]) + '\n...'
101 return s
102
103 else:
104 return repr(compute(expr))
105
106
107 @dispatch((type, object), TableExpr)
108 def into(a, b):
109 return into(a, compute(b))
110
111
112 @dispatch(DataFrame, TableExpr)
113 def into(a, b):
114 columns = b.columns
115 return into(DataFrame(columns=columns), compute(b))
116
117
118 Expr.__repr__ = table_repr
119
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/blaze/api/table.py b/blaze/api/table.py
--- a/blaze/api/table.py
+++ b/blaze/api/table.py
@@ -4,9 +4,7 @@
from ..expr.core import Expr
from ..expr.table import TableSymbol, TableExpr
-from ..data.python import Python
from ..dispatch import dispatch
-from ..data.core import DataDescriptor, discover
from ..data.pandas import into, DataFrame
from .into import into
@@ -58,6 +56,12 @@
self.schema = dshape(schema)
self.data = data
+
+ if hasattr(data, 'schema') and self.schema != data.schema:
+ raise TypeError('%s schema %s does not match %s schema %s' %
+ (type(data).__name__, data.schema,
+ type(self).__name__, self.schema))
+
self._name = name or next(names)
self.iscolumn = iscolumn
|
{"golden_diff": "diff --git a/blaze/api/table.py b/blaze/api/table.py\n--- a/blaze/api/table.py\n+++ b/blaze/api/table.py\n@@ -4,9 +4,7 @@\n \n from ..expr.core import Expr\n from ..expr.table import TableSymbol, TableExpr\n-from ..data.python import Python\n from ..dispatch import dispatch\n-from ..data.core import DataDescriptor, discover\n from ..data.pandas import into, DataFrame\n from .into import into\n \n@@ -58,6 +56,12 @@\n self.schema = dshape(schema)\n \n self.data = data\n+\n+ if hasattr(data, 'schema') and self.schema != data.schema:\n+ raise TypeError('%s schema %s does not match %s schema %s' %\n+ (type(data).__name__, data.schema,\n+ type(self).__name__, self.schema))\n+\n self._name = name or next(names)\n self.iscolumn = iscolumn\n", "issue": "Individual columns should be able to repr if not passed in CSV\nThis issue title is possibly the worst ever, so here's an example:\n\n``` python\nimport tempfile\nimport pandas as pd\nfrom blaze import *\n```\n\nThis works:\n\n``` python\nwith tempfile.NamedTemporaryFile(delete=False) as f:\n df = pd.DataFrame(np.random.randn(10, 2))\n df.to_csv(f.name, index=False, header=False)\n csv = CSV(f.name, columns=list('ab')) # passing columns to CSV\n t = Table(csv)\n assert t.a.isidentical(t['a'])\n```\n\nBut this:\n\n``` python\nwith tempfile.NamedTemporaryFile(delete=False) as f:\n df = pd.DataFrame(np.random.randn(10, 2))\n df.to_csv(f.name, index=False, header=False)\n csv = CSV(f.name)\n t = Table(csv, columns=list('ab')) # passing columns to Table\n assert t.a.isidentical(t['a'])\n```\n\nyield a `t` where `t.a` throws an error in the interpreter when I try to `repr` it.\n\nThe difference is that the first propagates the columns to the `Table` (or maybe it doesn't propagate, but it uses the correct names), while in the second the columns are still represented by their \"anonymous\" names `_0`, `_1`, etc.\n\n", "before_files": [{"content": "\nfrom datashape import discover, Tuple, Record, dshape, Fixed\nimport itertools\n\nfrom ..expr.core import Expr\nfrom ..expr.table import TableSymbol, TableExpr\nfrom ..data.python import Python\nfrom ..dispatch import dispatch\nfrom ..data.core import DataDescriptor, discover\nfrom ..data.pandas import into, DataFrame\nfrom .into import into\n\nnames = ('_%d' % i for i in itertools.count(1))\n\nclass Table(TableSymbol):\n \"\"\" Interactive Table\n\n Parameters\n ----------\n\n data: DataDescriptor, tuple, DataFrame, RDD, SQL Table, ...\n Anything that ``compute`` knows how to work with\n\n Optional\n --------\n\n name: string\n A name for the table\n columns: iterable of strings\n Column names, will be inferred from datasource if possible\n schema: string or DataShape\n Explitit Record containing datatypes and column names\n \"\"\"\n __slots__ = 'data', 'schema', '_name', 'iscolumn'\n\n def __init__(self, data, name=None, columns=None, schema=None,\n iscolumn=False):\n if not schema:\n schema = discover(data).subshape[0]\n types = None\n if isinstance(schema[0], Tuple):\n columns = columns or list(range(len(schema[0].dshapes)))\n types = schema[0].dshapes\n if isinstance(schema[0], Record):\n columns = columns or schema[0].names\n types = schema[0].types\n if isinstance(schema[0], Fixed):\n types = (schema[1],) * int(schema[0])\n if not columns:\n raise TypeError(\"Could not infer column names from data. \"\n \"Please specify column names with `column=` \"\n \"keyword\")\n if not types:\n raise TypeError(\"Could not infer data types from data. \"\n \"Please specify schema with `schema=` keyword\")\n\n schema = dshape(Record(list(zip(columns, types))))\n self.schema = dshape(schema)\n\n self.data = data\n self._name = name or next(names)\n self.iscolumn = iscolumn\n\n def resources(self):\n return {self: self.data}\n\n @property\n def args(self):\n return (id(self.data), self.schema, self._name, self.iscolumn)\n\n\n@dispatch(Table, dict)\ndef _subs(o, d):\n return o\n\n\n@dispatch(Expr)\ndef compute(expr):\n resources = expr.resources()\n if not resources:\n raise ValueError(\"No data resources found\")\n else:\n return compute(expr, resources)\n\n\ndef table_repr(expr, n=10):\n if not expr.resources():\n return str(expr)\n if isinstance(expr, TableExpr):\n head = expr.head(n + 1)\n result = compute(head)\n\n if expr.columns:\n df = into(DataFrame(columns=expr.columns), result)\n else:\n df = into(DataFrame, result)\n s = repr(df)\n if len(df) > 10:\n df = df[:10]\n s = '\\n'.join(s.split('\\n')[:-1]) + '\\n...'\n return s\n\n else:\n return repr(compute(expr))\n\n\n@dispatch((type, object), TableExpr)\ndef into(a, b):\n return into(a, compute(b))\n\n\n@dispatch(DataFrame, TableExpr)\ndef into(a, b):\n columns = b.columns\n return into(DataFrame(columns=columns), compute(b))\n\n\nExpr.__repr__ = table_repr\n", "path": "blaze/api/table.py"}], "after_files": [{"content": "\nfrom datashape import discover, Tuple, Record, dshape, Fixed\nimport itertools\n\nfrom ..expr.core import Expr\nfrom ..expr.table import TableSymbol, TableExpr\nfrom ..dispatch import dispatch\nfrom ..data.pandas import into, DataFrame\nfrom .into import into\n\nnames = ('_%d' % i for i in itertools.count(1))\n\nclass Table(TableSymbol):\n \"\"\" Interactive Table\n\n Parameters\n ----------\n\n data: DataDescriptor, tuple, DataFrame, RDD, SQL Table, ...\n Anything that ``compute`` knows how to work with\n\n Optional\n --------\n\n name: string\n A name for the table\n columns: iterable of strings\n Column names, will be inferred from datasource if possible\n schema: string or DataShape\n Explitit Record containing datatypes and column names\n \"\"\"\n __slots__ = 'data', 'schema', '_name', 'iscolumn'\n\n def __init__(self, data, name=None, columns=None, schema=None,\n iscolumn=False):\n if not schema:\n schema = discover(data).subshape[0]\n types = None\n if isinstance(schema[0], Tuple):\n columns = columns or list(range(len(schema[0].dshapes)))\n types = schema[0].dshapes\n if isinstance(schema[0], Record):\n columns = columns or schema[0].names\n types = schema[0].types\n if isinstance(schema[0], Fixed):\n types = (schema[1],) * int(schema[0])\n if not columns:\n raise TypeError(\"Could not infer column names from data. \"\n \"Please specify column names with `column=` \"\n \"keyword\")\n if not types:\n raise TypeError(\"Could not infer data types from data. \"\n \"Please specify schema with `schema=` keyword\")\n\n schema = dshape(Record(list(zip(columns, types))))\n self.schema = dshape(schema)\n\n self.data = data\n\n if hasattr(data, 'schema') and self.schema != data.schema:\n raise TypeError('%s schema %s does not match %s schema %s' %\n (type(data).__name__, data.schema,\n type(self).__name__, self.schema))\n\n self._name = name or next(names)\n self.iscolumn = iscolumn\n\n def resources(self):\n return {self: self.data}\n\n @property\n def args(self):\n return (id(self.data), self.schema, self._name, self.iscolumn)\n\n\n@dispatch(Table, dict)\ndef _subs(o, d):\n return o\n\n\n@dispatch(Expr)\ndef compute(expr):\n resources = expr.resources()\n if not resources:\n raise ValueError(\"No data resources found\")\n else:\n return compute(expr, resources)\n\n\ndef table_repr(expr, n=10):\n if not expr.resources():\n return str(expr)\n if isinstance(expr, TableExpr):\n head = expr.head(n + 1)\n result = compute(head)\n\n if expr.columns:\n df = into(DataFrame(columns=expr.columns), result)\n else:\n df = into(DataFrame, result)\n s = repr(df)\n if len(df) > 10:\n df = df[:10]\n s = '\\n'.join(s.split('\\n')[:-1]) + '\\n...'\n return s\n\n else:\n return repr(compute(expr))\n\n\n@dispatch((type, object), TableExpr)\ndef into(a, b):\n return into(a, compute(b))\n\n\n@dispatch(DataFrame, TableExpr)\ndef into(a, b):\n columns = b.columns\n return into(DataFrame(columns=columns), compute(b))\n\n\nExpr.__repr__ = table_repr\n", "path": "blaze/api/table.py"}]}
| 1,565 | 204 |
gh_patches_debug_28013
|
rasdani/github-patches
|
git_diff
|
CTFd__CTFd-2344
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Challenge Preview Improvements
Challenge Preview should probably preview in the context of a full page. Primarily because it's easier to theme this.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `CTFd/admin/challenges.py`
Content:
```
1 from flask import abort, render_template, request, url_for
2
3 from CTFd.admin import admin
4 from CTFd.models import Challenges, Flags, Solves
5 from CTFd.plugins.challenges import CHALLENGE_CLASSES, get_chal_class
6 from CTFd.utils.decorators import admins_only
7
8
9 @admin.route("/admin/challenges")
10 @admins_only
11 def challenges_listing():
12 q = request.args.get("q")
13 field = request.args.get("field")
14 filters = []
15
16 if q:
17 # The field exists as an exposed column
18 if Challenges.__mapper__.has_property(field):
19 filters.append(getattr(Challenges, field).like("%{}%".format(q)))
20
21 query = Challenges.query.filter(*filters).order_by(Challenges.id.asc())
22 challenges = query.all()
23 total = query.count()
24
25 return render_template(
26 "admin/challenges/challenges.html",
27 challenges=challenges,
28 total=total,
29 q=q,
30 field=field,
31 )
32
33
34 @admin.route("/admin/challenges/<int:challenge_id>")
35 @admins_only
36 def challenges_detail(challenge_id):
37 challenges = dict(
38 Challenges.query.with_entities(Challenges.id, Challenges.name).all()
39 )
40 challenge = Challenges.query.filter_by(id=challenge_id).first_or_404()
41 solves = (
42 Solves.query.filter_by(challenge_id=challenge.id)
43 .order_by(Solves.date.asc())
44 .all()
45 )
46 flags = Flags.query.filter_by(challenge_id=challenge.id).all()
47
48 try:
49 challenge_class = get_chal_class(challenge.type)
50 except KeyError:
51 abort(
52 500,
53 f"The underlying challenge type ({challenge.type}) is not installed. This challenge can not be loaded.",
54 )
55
56 update_j2 = render_template(
57 challenge_class.templates["update"].lstrip("/"), challenge=challenge
58 )
59
60 update_script = url_for(
61 "views.static_html", route=challenge_class.scripts["update"].lstrip("/")
62 )
63 return render_template(
64 "admin/challenges/challenge.html",
65 update_template=update_j2,
66 update_script=update_script,
67 challenge=challenge,
68 challenges=challenges,
69 solves=solves,
70 flags=flags,
71 )
72
73
74 @admin.route("/admin/challenges/new")
75 @admins_only
76 def challenges_new():
77 types = CHALLENGE_CLASSES.keys()
78 return render_template("admin/challenges/new.html", types=types)
79
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/CTFd/admin/challenges.py b/CTFd/admin/challenges.py
--- a/CTFd/admin/challenges.py
+++ b/CTFd/admin/challenges.py
@@ -3,7 +3,10 @@
from CTFd.admin import admin
from CTFd.models import Challenges, Flags, Solves
from CTFd.plugins.challenges import CHALLENGE_CLASSES, get_chal_class
+from CTFd.schemas.tags import TagSchema
from CTFd.utils.decorators import admins_only
+from CTFd.utils.security.signing import serialize
+from CTFd.utils.user import get_current_team, get_current_user
@admin.route("/admin/challenges")
@@ -71,6 +74,43 @@
)
[email protected]("/admin/challenges/preview/<int:challenge_id>")
+@admins_only
+def challenges_preview(challenge_id):
+ challenge = Challenges.query.filter_by(id=challenge_id).first_or_404()
+ chal_class = get_chal_class(challenge.type)
+ user = get_current_user()
+ team = get_current_team()
+
+ files = []
+ for f in challenge.files:
+ token = {
+ "user_id": user.id,
+ "team_id": team.id if team else None,
+ "file_id": f.id,
+ }
+ files.append(url_for("views.files", path=f.location, token=serialize(token)))
+
+ tags = [
+ tag["value"] for tag in TagSchema("user", many=True).dump(challenge.tags).data
+ ]
+
+ content = render_template(
+ chal_class.templates["view"].lstrip("/"),
+ solves=None,
+ solved_by_me=False,
+ files=files,
+ tags=tags,
+ hints=challenge.hints,
+ max_attempts=challenge.max_attempts,
+ attempts=0,
+ challenge=challenge,
+ )
+ return render_template(
+ "admin/challenges/preview.html", content=content, challenge=challenge
+ )
+
+
@admin.route("/admin/challenges/new")
@admins_only
def challenges_new():
|
{"golden_diff": "diff --git a/CTFd/admin/challenges.py b/CTFd/admin/challenges.py\n--- a/CTFd/admin/challenges.py\n+++ b/CTFd/admin/challenges.py\n@@ -3,7 +3,10 @@\n from CTFd.admin import admin\n from CTFd.models import Challenges, Flags, Solves\n from CTFd.plugins.challenges import CHALLENGE_CLASSES, get_chal_class\n+from CTFd.schemas.tags import TagSchema\n from CTFd.utils.decorators import admins_only\n+from CTFd.utils.security.signing import serialize\n+from CTFd.utils.user import get_current_team, get_current_user\n \n \n @admin.route(\"/admin/challenges\")\n@@ -71,6 +74,43 @@\n )\n \n \[email protected](\"/admin/challenges/preview/<int:challenge_id>\")\n+@admins_only\n+def challenges_preview(challenge_id):\n+ challenge = Challenges.query.filter_by(id=challenge_id).first_or_404()\n+ chal_class = get_chal_class(challenge.type)\n+ user = get_current_user()\n+ team = get_current_team()\n+\n+ files = []\n+ for f in challenge.files:\n+ token = {\n+ \"user_id\": user.id,\n+ \"team_id\": team.id if team else None,\n+ \"file_id\": f.id,\n+ }\n+ files.append(url_for(\"views.files\", path=f.location, token=serialize(token)))\n+\n+ tags = [\n+ tag[\"value\"] for tag in TagSchema(\"user\", many=True).dump(challenge.tags).data\n+ ]\n+\n+ content = render_template(\n+ chal_class.templates[\"view\"].lstrip(\"/\"),\n+ solves=None,\n+ solved_by_me=False,\n+ files=files,\n+ tags=tags,\n+ hints=challenge.hints,\n+ max_attempts=challenge.max_attempts,\n+ attempts=0,\n+ challenge=challenge,\n+ )\n+ return render_template(\n+ \"admin/challenges/preview.html\", content=content, challenge=challenge\n+ )\n+\n+\n @admin.route(\"/admin/challenges/new\")\n @admins_only\n def challenges_new():\n", "issue": "Challenge Preview Improvements\nChallenge Preview should probably preview in the context of a full page. Primarily because it's easier to theme this. \n", "before_files": [{"content": "from flask import abort, render_template, request, url_for\n\nfrom CTFd.admin import admin\nfrom CTFd.models import Challenges, Flags, Solves\nfrom CTFd.plugins.challenges import CHALLENGE_CLASSES, get_chal_class\nfrom CTFd.utils.decorators import admins_only\n\n\[email protected](\"/admin/challenges\")\n@admins_only\ndef challenges_listing():\n q = request.args.get(\"q\")\n field = request.args.get(\"field\")\n filters = []\n\n if q:\n # The field exists as an exposed column\n if Challenges.__mapper__.has_property(field):\n filters.append(getattr(Challenges, field).like(\"%{}%\".format(q)))\n\n query = Challenges.query.filter(*filters).order_by(Challenges.id.asc())\n challenges = query.all()\n total = query.count()\n\n return render_template(\n \"admin/challenges/challenges.html\",\n challenges=challenges,\n total=total,\n q=q,\n field=field,\n )\n\n\[email protected](\"/admin/challenges/<int:challenge_id>\")\n@admins_only\ndef challenges_detail(challenge_id):\n challenges = dict(\n Challenges.query.with_entities(Challenges.id, Challenges.name).all()\n )\n challenge = Challenges.query.filter_by(id=challenge_id).first_or_404()\n solves = (\n Solves.query.filter_by(challenge_id=challenge.id)\n .order_by(Solves.date.asc())\n .all()\n )\n flags = Flags.query.filter_by(challenge_id=challenge.id).all()\n\n try:\n challenge_class = get_chal_class(challenge.type)\n except KeyError:\n abort(\n 500,\n f\"The underlying challenge type ({challenge.type}) is not installed. This challenge can not be loaded.\",\n )\n\n update_j2 = render_template(\n challenge_class.templates[\"update\"].lstrip(\"/\"), challenge=challenge\n )\n\n update_script = url_for(\n \"views.static_html\", route=challenge_class.scripts[\"update\"].lstrip(\"/\")\n )\n return render_template(\n \"admin/challenges/challenge.html\",\n update_template=update_j2,\n update_script=update_script,\n challenge=challenge,\n challenges=challenges,\n solves=solves,\n flags=flags,\n )\n\n\[email protected](\"/admin/challenges/new\")\n@admins_only\ndef challenges_new():\n types = CHALLENGE_CLASSES.keys()\n return render_template(\"admin/challenges/new.html\", types=types)\n", "path": "CTFd/admin/challenges.py"}], "after_files": [{"content": "from flask import abort, render_template, request, url_for\n\nfrom CTFd.admin import admin\nfrom CTFd.models import Challenges, Flags, Solves\nfrom CTFd.plugins.challenges import CHALLENGE_CLASSES, get_chal_class\nfrom CTFd.schemas.tags import TagSchema\nfrom CTFd.utils.decorators import admins_only\nfrom CTFd.utils.security.signing import serialize\nfrom CTFd.utils.user import get_current_team, get_current_user\n\n\[email protected](\"/admin/challenges\")\n@admins_only\ndef challenges_listing():\n q = request.args.get(\"q\")\n field = request.args.get(\"field\")\n filters = []\n\n if q:\n # The field exists as an exposed column\n if Challenges.__mapper__.has_property(field):\n filters.append(getattr(Challenges, field).like(\"%{}%\".format(q)))\n\n query = Challenges.query.filter(*filters).order_by(Challenges.id.asc())\n challenges = query.all()\n total = query.count()\n\n return render_template(\n \"admin/challenges/challenges.html\",\n challenges=challenges,\n total=total,\n q=q,\n field=field,\n )\n\n\[email protected](\"/admin/challenges/<int:challenge_id>\")\n@admins_only\ndef challenges_detail(challenge_id):\n challenges = dict(\n Challenges.query.with_entities(Challenges.id, Challenges.name).all()\n )\n challenge = Challenges.query.filter_by(id=challenge_id).first_or_404()\n solves = (\n Solves.query.filter_by(challenge_id=challenge.id)\n .order_by(Solves.date.asc())\n .all()\n )\n flags = Flags.query.filter_by(challenge_id=challenge.id).all()\n\n try:\n challenge_class = get_chal_class(challenge.type)\n except KeyError:\n abort(\n 500,\n f\"The underlying challenge type ({challenge.type}) is not installed. This challenge can not be loaded.\",\n )\n\n update_j2 = render_template(\n challenge_class.templates[\"update\"].lstrip(\"/\"), challenge=challenge\n )\n\n update_script = url_for(\n \"views.static_html\", route=challenge_class.scripts[\"update\"].lstrip(\"/\")\n )\n return render_template(\n \"admin/challenges/challenge.html\",\n update_template=update_j2,\n update_script=update_script,\n challenge=challenge,\n challenges=challenges,\n solves=solves,\n flags=flags,\n )\n\n\[email protected](\"/admin/challenges/preview/<int:challenge_id>\")\n@admins_only\ndef challenges_preview(challenge_id):\n challenge = Challenges.query.filter_by(id=challenge_id).first_or_404()\n chal_class = get_chal_class(challenge.type)\n user = get_current_user()\n team = get_current_team()\n\n files = []\n for f in challenge.files:\n token = {\n \"user_id\": user.id,\n \"team_id\": team.id if team else None,\n \"file_id\": f.id,\n }\n files.append(url_for(\"views.files\", path=f.location, token=serialize(token)))\n\n tags = [\n tag[\"value\"] for tag in TagSchema(\"user\", many=True).dump(challenge.tags).data\n ]\n\n content = render_template(\n chal_class.templates[\"view\"].lstrip(\"/\"),\n solves=None,\n solved_by_me=False,\n files=files,\n tags=tags,\n hints=challenge.hints,\n max_attempts=challenge.max_attempts,\n attempts=0,\n challenge=challenge,\n )\n return render_template(\n \"admin/challenges/preview.html\", content=content, challenge=challenge\n )\n\n\[email protected](\"/admin/challenges/new\")\n@admins_only\ndef challenges_new():\n types = CHALLENGE_CLASSES.keys()\n return render_template(\"admin/challenges/new.html\", types=types)\n", "path": "CTFd/admin/challenges.py"}]}
| 968 | 472 |
gh_patches_debug_19819
|
rasdani/github-patches
|
git_diff
|
chainer__chainer-3327
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Document about initializers
Criteria for initializer arguments of links is complicated (https://github.com/chainer/chainer/pull/3259#issuecomment-325562538). We should write some dedicated documentation about that, and let each link documentation point to it. Maybe we can write on [this page](https://docs.chainer.org/en/v2.0.2/reference/initializers.html).
Also we should describe *the default initializer* (which is `LeCunNormal`).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `chainer/links/connection/linear.py`
Content:
```
1 from chainer.functions.connection import linear
2 from chainer import initializers
3 from chainer import link
4 from chainer import variable
5
6
7 class Linear(link.Link):
8
9 """Linear layer (a.k.a.\\ fully-connected layer).
10
11 This is a link that wraps the :func:`~chainer.functions.linear` function,
12 and holds a weight matrix ``W`` and optionally a bias vector ``b`` as
13 parameters.
14
15 The weight matrix ``W`` is initialized with i.i.d. Gaussian samples, each
16 of which has zero mean and deviation :math:`\\sqrt{1/\\text{in_size}}`. The
17 bias vector ``b`` is of size ``out_size``. Each element is initialized with
18 the ``bias`` value. If ``nobias`` argument is set to ``True``, then this
19 link does not hold a bias vector.
20
21 Args:
22 in_size (int or None): Dimension of input vectors. If ``None``,
23 parameter initialization will be deferred until the first forward
24 data pass at which time the size will be determined.
25 out_size (int): Dimension of output vectors.
26 nobias (bool): If ``True``, then this function does not use the bias.
27 initialW (2-D array): Initial weight value. If ``None``, then the
28 default initializer is used.
29 May also be a callable that takes ``numpy.ndarray`` or
30 ``cupy.ndarray`` and edits its value.
31 initial_bias (1-D array): Initial bias value. If ``None``, the bias
32 vector is initialized to zero.
33 May also be a callable that takes ``numpy.ndarray`` or
34 ``cupy.ndarray`` and edits its value.
35 .. seealso:: :func:`~chainer.functions.linear`
36
37 Attributes:
38 W (~chainer.Variable): Weight parameter.
39 b (~chainer.Variable): Bias parameter.
40
41 .. admonition:: Example
42
43 There are several ways to make a Linear link.
44
45 Define an input vector ``x`` as:
46
47 >>> x = np.array([[0, 1, 2, 3, 4]], 'f')
48
49 1. Give the first two arguments explicitly:
50
51 Those numbers are considered as the input size and the output size.
52
53 >>> l = L.Linear(5, 10)
54 >>> y = l(x)
55 >>> y.shape
56 (1, 10)
57
58 2. Omit ``in_size`` (give the output size only as the first argument)
59 or fill it with ``None``:
60
61 In this case, the size of second axis of ``x`` is used as the
62 input size. So the below two cases are the same.
63
64 >>> l = L.Linear(10)
65 >>> y = l(x)
66 >>> y.shape
67 (1, 10)
68
69 >>> l = L.Linear(None, 10)
70 >>> y = l(x)
71 >>> y.shape
72 (1, 10)
73
74 When you omit the first argument, you need to specify the other
75 subsequent arguments from ``nobias`` as keyword arguments. So the
76 below two cases are the same.
77
78 >>> l = L.Linear(None, 10, False, None, 0)
79 >>> y = l(x)
80 >>> y.shape
81 (1, 10)
82
83 >>> l = L.Linear(10, nobias=False, initialW=None, initial_bias=0)
84 >>> y = l(x)
85 >>> y.shape
86 (1, 10)
87
88 """
89
90 def __init__(self, in_size, out_size=None, nobias=False,
91 initialW=None, initial_bias=None):
92 super(Linear, self).__init__()
93
94 if out_size is None:
95 in_size, out_size = None, in_size
96 self.out_size = out_size
97
98 with self.init_scope():
99 W_initializer = initializers._get_initializer(initialW)
100 self.W = variable.Parameter(W_initializer)
101 if in_size is not None:
102 self._initialize_params(in_size)
103
104 if nobias:
105 self.b = None
106 else:
107 if initial_bias is None:
108 initial_bias = 0
109 bias_initializer = initializers._get_initializer(initial_bias)
110 self.b = variable.Parameter(bias_initializer, out_size)
111
112 def _initialize_params(self, in_size):
113 self.W.initialize((self.out_size, in_size))
114
115 def __call__(self, x):
116 """Applies the linear layer.
117
118 Args:
119 x (~chainer.Variable): Batch of input vectors.
120
121 Returns:
122 ~chainer.Variable: Output of the linear layer.
123
124 """
125 if self.W.data is None:
126 self._initialize_params(x.size // x.shape[0])
127 return linear.linear(x, self.W, self.b)
128
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/chainer/links/connection/linear.py b/chainer/links/connection/linear.py
--- a/chainer/links/connection/linear.py
+++ b/chainer/links/connection/linear.py
@@ -24,14 +24,11 @@
data pass at which time the size will be determined.
out_size (int): Dimension of output vectors.
nobias (bool): If ``True``, then this function does not use the bias.
- initialW (2-D array): Initial weight value. If ``None``, then the
- default initializer is used.
- May also be a callable that takes ``numpy.ndarray`` or
- ``cupy.ndarray`` and edits its value.
- initial_bias (1-D array): Initial bias value. If ``None``, the bias
- vector is initialized to zero.
- May also be a callable that takes ``numpy.ndarray`` or
- ``cupy.ndarray`` and edits its value.
+ initialW (:ref:`initializer <initializer>`): Initializer to initialize
+ the weight.
+ initial_bias (:ref:`initializer <initializer>`): Initializer to
+ initialize the bias. If ``None``, the bias will be initialized to
+ zero.
.. seealso:: :func:`~chainer.functions.linear`
Attributes:
|
{"golden_diff": "diff --git a/chainer/links/connection/linear.py b/chainer/links/connection/linear.py\n--- a/chainer/links/connection/linear.py\n+++ b/chainer/links/connection/linear.py\n@@ -24,14 +24,11 @@\n data pass at which time the size will be determined.\n out_size (int): Dimension of output vectors.\n nobias (bool): If ``True``, then this function does not use the bias.\n- initialW (2-D array): Initial weight value. If ``None``, then the\n- default initializer is used.\n- May also be a callable that takes ``numpy.ndarray`` or\n- ``cupy.ndarray`` and edits its value.\n- initial_bias (1-D array): Initial bias value. If ``None``, the bias\n- vector is initialized to zero.\n- May also be a callable that takes ``numpy.ndarray`` or\n- ``cupy.ndarray`` and edits its value.\n+ initialW (:ref:`initializer <initializer>`): Initializer to initialize\n+ the weight.\n+ initial_bias (:ref:`initializer <initializer>`): Initializer to\n+ initialize the bias. If ``None``, the bias will be initialized to\n+ zero.\n .. seealso:: :func:`~chainer.functions.linear`\n \n Attributes:\n", "issue": "Document about initializers\nCriteria for initializer arguments of links is complicated (https://github.com/chainer/chainer/pull/3259#issuecomment-325562538). We should write some dedicated documentation about that, and let each link documentation point to it. Maybe we can write on [this page](https://docs.chainer.org/en/v2.0.2/reference/initializers.html).\r\n\r\nAlso we should describe *the default initializer* (which is `LeCunNormal`).\n", "before_files": [{"content": "from chainer.functions.connection import linear\nfrom chainer import initializers\nfrom chainer import link\nfrom chainer import variable\n\n\nclass Linear(link.Link):\n\n \"\"\"Linear layer (a.k.a.\\\\ fully-connected layer).\n\n This is a link that wraps the :func:`~chainer.functions.linear` function,\n and holds a weight matrix ``W`` and optionally a bias vector ``b`` as\n parameters.\n\n The weight matrix ``W`` is initialized with i.i.d. Gaussian samples, each\n of which has zero mean and deviation :math:`\\\\sqrt{1/\\\\text{in_size}}`. The\n bias vector ``b`` is of size ``out_size``. Each element is initialized with\n the ``bias`` value. If ``nobias`` argument is set to ``True``, then this\n link does not hold a bias vector.\n\n Args:\n in_size (int or None): Dimension of input vectors. If ``None``,\n parameter initialization will be deferred until the first forward\n data pass at which time the size will be determined.\n out_size (int): Dimension of output vectors.\n nobias (bool): If ``True``, then this function does not use the bias.\n initialW (2-D array): Initial weight value. If ``None``, then the\n default initializer is used.\n May also be a callable that takes ``numpy.ndarray`` or\n ``cupy.ndarray`` and edits its value.\n initial_bias (1-D array): Initial bias value. If ``None``, the bias\n vector is initialized to zero.\n May also be a callable that takes ``numpy.ndarray`` or\n ``cupy.ndarray`` and edits its value.\n .. seealso:: :func:`~chainer.functions.linear`\n\n Attributes:\n W (~chainer.Variable): Weight parameter.\n b (~chainer.Variable): Bias parameter.\n\n .. admonition:: Example\n\n There are several ways to make a Linear link.\n\n Define an input vector ``x`` as:\n\n >>> x = np.array([[0, 1, 2, 3, 4]], 'f')\n\n 1. Give the first two arguments explicitly:\n\n Those numbers are considered as the input size and the output size.\n\n >>> l = L.Linear(5, 10)\n >>> y = l(x)\n >>> y.shape\n (1, 10)\n\n 2. Omit ``in_size`` (give the output size only as the first argument)\n or fill it with ``None``:\n\n In this case, the size of second axis of ``x`` is used as the\n input size. So the below two cases are the same.\n\n >>> l = L.Linear(10)\n >>> y = l(x)\n >>> y.shape\n (1, 10)\n\n >>> l = L.Linear(None, 10)\n >>> y = l(x)\n >>> y.shape\n (1, 10)\n\n When you omit the first argument, you need to specify the other\n subsequent arguments from ``nobias`` as keyword arguments. So the\n below two cases are the same.\n\n >>> l = L.Linear(None, 10, False, None, 0)\n >>> y = l(x)\n >>> y.shape\n (1, 10)\n\n >>> l = L.Linear(10, nobias=False, initialW=None, initial_bias=0)\n >>> y = l(x)\n >>> y.shape\n (1, 10)\n\n \"\"\"\n\n def __init__(self, in_size, out_size=None, nobias=False,\n initialW=None, initial_bias=None):\n super(Linear, self).__init__()\n\n if out_size is None:\n in_size, out_size = None, in_size\n self.out_size = out_size\n\n with self.init_scope():\n W_initializer = initializers._get_initializer(initialW)\n self.W = variable.Parameter(W_initializer)\n if in_size is not None:\n self._initialize_params(in_size)\n\n if nobias:\n self.b = None\n else:\n if initial_bias is None:\n initial_bias = 0\n bias_initializer = initializers._get_initializer(initial_bias)\n self.b = variable.Parameter(bias_initializer, out_size)\n\n def _initialize_params(self, in_size):\n self.W.initialize((self.out_size, in_size))\n\n def __call__(self, x):\n \"\"\"Applies the linear layer.\n\n Args:\n x (~chainer.Variable): Batch of input vectors.\n\n Returns:\n ~chainer.Variable: Output of the linear layer.\n\n \"\"\"\n if self.W.data is None:\n self._initialize_params(x.size // x.shape[0])\n return linear.linear(x, self.W, self.b)\n", "path": "chainer/links/connection/linear.py"}], "after_files": [{"content": "from chainer.functions.connection import linear\nfrom chainer import initializers\nfrom chainer import link\nfrom chainer import variable\n\n\nclass Linear(link.Link):\n\n \"\"\"Linear layer (a.k.a.\\\\ fully-connected layer).\n\n This is a link that wraps the :func:`~chainer.functions.linear` function,\n and holds a weight matrix ``W`` and optionally a bias vector ``b`` as\n parameters.\n\n The weight matrix ``W`` is initialized with i.i.d. Gaussian samples, each\n of which has zero mean and deviation :math:`\\\\sqrt{1/\\\\text{in_size}}`. The\n bias vector ``b`` is of size ``out_size``. Each element is initialized with\n the ``bias`` value. If ``nobias`` argument is set to ``True``, then this\n link does not hold a bias vector.\n\n Args:\n in_size (int or None): Dimension of input vectors. If ``None``,\n parameter initialization will be deferred until the first forward\n data pass at which time the size will be determined.\n out_size (int): Dimension of output vectors.\n nobias (bool): If ``True``, then this function does not use the bias.\n initialW (:ref:`initializer <initializer>`): Initializer to initialize\n the weight.\n initial_bias (:ref:`initializer <initializer>`): Initializer to\n initialize the bias. If ``None``, the bias will be initialized to\n zero.\n .. seealso:: :func:`~chainer.functions.linear`\n\n Attributes:\n W (~chainer.Variable): Weight parameter.\n b (~chainer.Variable): Bias parameter.\n\n .. admonition:: Example\n\n There are several ways to make a Linear link.\n\n Define an input vector ``x`` as:\n\n >>> x = np.array([[0, 1, 2, 3, 4]], 'f')\n\n 1. Give the first two arguments explicitly:\n\n Those numbers are considered as the input size and the output size.\n\n >>> l = L.Linear(5, 10)\n >>> y = l(x)\n >>> y.shape\n (1, 10)\n\n 2. Omit ``in_size`` (give the output size only as the first argument)\n or fill it with ``None``:\n\n In this case, the size of second axis of ``x`` is used as the\n input size. So the below two cases are the same.\n\n >>> l = L.Linear(10)\n >>> y = l(x)\n >>> y.shape\n (1, 10)\n\n >>> l = L.Linear(None, 10)\n >>> y = l(x)\n >>> y.shape\n (1, 10)\n\n When you omit the first argument, you need to specify the other\n subsequent arguments from ``nobias`` as keyword arguments. So the\n below two cases are the same.\n\n >>> l = L.Linear(None, 10, False, None, 0)\n >>> y = l(x)\n >>> y.shape\n (1, 10)\n\n >>> l = L.Linear(10, nobias=False, initialW=None, initial_bias=0)\n >>> y = l(x)\n >>> y.shape\n (1, 10)\n\n \"\"\"\n\n def __init__(self, in_size, out_size=None, nobias=False,\n initialW=None, initial_bias=None):\n super(Linear, self).__init__()\n\n if out_size is None:\n in_size, out_size = None, in_size\n self.out_size = out_size\n\n with self.init_scope():\n W_initializer = initializers._get_initializer(initialW)\n self.W = variable.Parameter(W_initializer)\n if in_size is not None:\n self._initialize_params(in_size)\n\n if nobias:\n self.b = None\n else:\n if initial_bias is None:\n initial_bias = 0\n bias_initializer = initializers._get_initializer(initial_bias)\n self.b = variable.Parameter(bias_initializer, out_size)\n\n def _initialize_params(self, in_size):\n self.W.initialize((self.out_size, in_size))\n\n def __call__(self, x):\n \"\"\"Applies the linear layer.\n\n Args:\n x (~chainer.Variable): Batch of input vectors.\n\n Returns:\n ~chainer.Variable: Output of the linear layer.\n\n \"\"\"\n if self.W.data is None:\n self._initialize_params(x.size // x.shape[0])\n return linear.linear(x, self.W, self.b)\n", "path": "chainer/links/connection/linear.py"}]}
| 1,690 | 285 |
gh_patches_debug_8039
|
rasdani/github-patches
|
git_diff
|
docker__docker-py-1399
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
docker-py installation breaks docker-compose
im not quite sure if this is correct, but trying to install `docker-py` through pip after i've installed `docker-compose` breaks `docker-compose` with
```
Traceback (most recent call last):
File "/usr/local/bin/docker-compose", line 7, in <module>
from compose.cli.main import main
File "/usr/local/lib/python2.7/site-packages/compose/cli/main.py", line 20, in <module>
from ..bundle import get_image_digests
File "/usr/local/lib/python2.7/site-packages/compose/bundle.py", line 13, in <module>
from .network import get_network_defs_for_service
File "/usr/local/lib/python2.7/site-packages/compose/network.py", line 7, in <module>
from docker.types import IPAMConfig
ImportError: cannot import name IPAMConfig
```
To fix that error, i just need to do the installations in this order:
```
pip install docker-py
pip install docker-compose
```
gist:
https://gist.github.com/serialdoom/3a443c420aa29f9422f8c5fc73f46602
python/pip versions tried:
```
docker run -it python:2.7.13 bash -c 'pip --version'
pip 9.0.1 from /usr/local/lib/python2.7/site-packages (python 2.7)
docker run -it python:2.7.12 bash -c 'pip --version'
pip 8.1.2 from /usr/local/lib/python2.7/site-packages (python 2.7)
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2 import codecs
3 import os
4 import sys
5
6 from setuptools import setup, find_packages
7
8
9 ROOT_DIR = os.path.dirname(__file__)
10 SOURCE_DIR = os.path.join(ROOT_DIR)
11
12 requirements = [
13 'requests >= 2.5.2, != 2.11.0, != 2.12.2',
14 'six >= 1.4.0',
15 'websocket-client >= 0.32.0',
16 'docker-pycreds >= 0.2.1'
17 ]
18
19 if sys.platform == 'win32':
20 requirements.append('pypiwin32 >= 219')
21
22 extras_require = {
23 ':python_version < "3.5"': 'backports.ssl_match_hostname >= 3.5',
24 # While not imported explicitly, the ipaddress module is required for
25 # ssl_match_hostname to verify hosts match with certificates via
26 # ServerAltname: https://pypi.python.org/pypi/backports.ssl_match_hostname
27 ':python_version < "3.3"': 'ipaddress >= 1.0.16',
28 }
29
30 version = None
31 exec(open('docker/version.py').read())
32
33 with open('./test-requirements.txt') as test_reqs_txt:
34 test_requirements = [line for line in test_reqs_txt]
35
36
37 long_description = ''
38 try:
39 with codecs.open('./README.rst', encoding='utf-8') as readme_rst:
40 long_description = readme_rst.read()
41 except IOError:
42 # README.rst is only generated on release. Its absence should not prevent
43 # setup.py from working properly.
44 pass
45
46 setup(
47 name="docker",
48 version=version,
49 description="A Python library for the Docker Engine API.",
50 long_description=long_description,
51 url='https://github.com/docker/docker-py',
52 packages=find_packages(exclude=["tests.*", "tests"]),
53 install_requires=requirements,
54 tests_require=test_requirements,
55 extras_require=extras_require,
56 zip_safe=False,
57 test_suite='tests',
58 classifiers=[
59 'Development Status :: 5 - Production/Stable',
60 'Environment :: Other Environment',
61 'Intended Audience :: Developers',
62 'Operating System :: OS Independent',
63 'Programming Language :: Python',
64 'Programming Language :: Python :: 2',
65 'Programming Language :: Python :: 2.7',
66 'Programming Language :: Python :: 3',
67 'Programming Language :: Python :: 3.3',
68 'Programming Language :: Python :: 3.4',
69 'Programming Language :: Python :: 3.5',
70 'Topic :: Utilities',
71 'License :: OSI Approved :: Apache Software License',
72 ],
73 maintainer='Joffrey F',
74 maintainer_email='[email protected]',
75 )
76
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -1,10 +1,20 @@
#!/usr/bin/env python
+from __future__ import print_function
+
import codecs
import os
import sys
+import pip
+
from setuptools import setup, find_packages
+if 'docker-py' in [x.project_name for x in pip.get_installed_distributions()]:
+ print(
+ 'ERROR: "docker-py" needs to be uninstalled before installing this'
+ ' package:\npip uninstall docker-py', file=sys.stderr
+ )
+ sys.exit(1)
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -1,10 +1,20 @@\n #!/usr/bin/env python\n+from __future__ import print_function\n+\n import codecs\n import os\n import sys\n \n+import pip\n+\n from setuptools import setup, find_packages\n \n+if 'docker-py' in [x.project_name for x in pip.get_installed_distributions()]:\n+ print(\n+ 'ERROR: \"docker-py\" needs to be uninstalled before installing this'\n+ ' package:\\npip uninstall docker-py', file=sys.stderr\n+ )\n+ sys.exit(1)\n \n ROOT_DIR = os.path.dirname(__file__)\n SOURCE_DIR = os.path.join(ROOT_DIR)\n", "issue": "docker-py installation breaks docker-compose\nim not quite sure if this is correct, but trying to install `docker-py` through pip after i've installed `docker-compose` breaks `docker-compose` with\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"/usr/local/bin/docker-compose\", line 7, in <module>\r\n from compose.cli.main import main\r\n File \"/usr/local/lib/python2.7/site-packages/compose/cli/main.py\", line 20, in <module>\r\n from ..bundle import get_image_digests\r\n File \"/usr/local/lib/python2.7/site-packages/compose/bundle.py\", line 13, in <module>\r\n from .network import get_network_defs_for_service\r\n File \"/usr/local/lib/python2.7/site-packages/compose/network.py\", line 7, in <module>\r\n from docker.types import IPAMConfig\r\nImportError: cannot import name IPAMConfig\r\n```\r\n\r\nTo fix that error, i just need to do the installations in this order:\r\n```\r\npip install docker-py\r\npip install docker-compose\r\n```\r\n\r\n\r\ngist:\r\nhttps://gist.github.com/serialdoom/3a443c420aa29f9422f8c5fc73f46602\r\n\r\npython/pip versions tried:\r\n```\r\ndocker run -it python:2.7.13 bash -c 'pip --version'\r\npip 9.0.1 from /usr/local/lib/python2.7/site-packages (python 2.7)\r\ndocker run -it python:2.7.12 bash -c 'pip --version'\r\npip 8.1.2 from /usr/local/lib/python2.7/site-packages (python 2.7)\r\n```\n", "before_files": [{"content": "#!/usr/bin/env python\nimport codecs\nimport os\nimport sys\n\nfrom setuptools import setup, find_packages\n\n\nROOT_DIR = os.path.dirname(__file__)\nSOURCE_DIR = os.path.join(ROOT_DIR)\n\nrequirements = [\n 'requests >= 2.5.2, != 2.11.0, != 2.12.2',\n 'six >= 1.4.0',\n 'websocket-client >= 0.32.0',\n 'docker-pycreds >= 0.2.1'\n]\n\nif sys.platform == 'win32':\n requirements.append('pypiwin32 >= 219')\n\nextras_require = {\n ':python_version < \"3.5\"': 'backports.ssl_match_hostname >= 3.5',\n # While not imported explicitly, the ipaddress module is required for\n # ssl_match_hostname to verify hosts match with certificates via\n # ServerAltname: https://pypi.python.org/pypi/backports.ssl_match_hostname\n ':python_version < \"3.3\"': 'ipaddress >= 1.0.16',\n}\n\nversion = None\nexec(open('docker/version.py').read())\n\nwith open('./test-requirements.txt') as test_reqs_txt:\n test_requirements = [line for line in test_reqs_txt]\n\n\nlong_description = ''\ntry:\n with codecs.open('./README.rst', encoding='utf-8') as readme_rst:\n long_description = readme_rst.read()\nexcept IOError:\n # README.rst is only generated on release. Its absence should not prevent\n # setup.py from working properly.\n pass\n\nsetup(\n name=\"docker\",\n version=version,\n description=\"A Python library for the Docker Engine API.\",\n long_description=long_description,\n url='https://github.com/docker/docker-py',\n packages=find_packages(exclude=[\"tests.*\", \"tests\"]),\n install_requires=requirements,\n tests_require=test_requirements,\n extras_require=extras_require,\n zip_safe=False,\n test_suite='tests',\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Other Environment',\n 'Intended Audience :: Developers',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Topic :: Utilities',\n 'License :: OSI Approved :: Apache Software License',\n ],\n maintainer='Joffrey F',\n maintainer_email='[email protected]',\n)\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\nfrom __future__ import print_function\n\nimport codecs\nimport os\nimport sys\n\nimport pip\n\nfrom setuptools import setup, find_packages\n\nif 'docker-py' in [x.project_name for x in pip.get_installed_distributions()]:\n print(\n 'ERROR: \"docker-py\" needs to be uninstalled before installing this'\n ' package:\\npip uninstall docker-py', file=sys.stderr\n )\n sys.exit(1)\n\nROOT_DIR = os.path.dirname(__file__)\nSOURCE_DIR = os.path.join(ROOT_DIR)\n\nrequirements = [\n 'requests >= 2.5.2, != 2.11.0, != 2.12.2',\n 'six >= 1.4.0',\n 'websocket-client >= 0.32.0',\n 'docker-pycreds >= 0.2.1'\n]\n\nif sys.platform == 'win32':\n requirements.append('pypiwin32 >= 219')\n\nextras_require = {\n ':python_version < \"3.5\"': 'backports.ssl_match_hostname >= 3.5',\n # While not imported explicitly, the ipaddress module is required for\n # ssl_match_hostname to verify hosts match with certificates via\n # ServerAltname: https://pypi.python.org/pypi/backports.ssl_match_hostname\n ':python_version < \"3.3\"': 'ipaddress >= 1.0.16',\n}\n\nversion = None\nexec(open('docker/version.py').read())\n\nwith open('./test-requirements.txt') as test_reqs_txt:\n test_requirements = [line for line in test_reqs_txt]\n\n\nlong_description = ''\ntry:\n with codecs.open('./README.rst', encoding='utf-8') as readme_rst:\n long_description = readme_rst.read()\nexcept IOError:\n # README.rst is only generated on release. Its absence should not prevent\n # setup.py from working properly.\n pass\n\nsetup(\n name=\"docker\",\n version=version,\n description=\"A Python library for the Docker Engine API.\",\n long_description=long_description,\n url='https://github.com/docker/docker-py',\n packages=find_packages(exclude=[\"tests.*\", \"tests\"]),\n install_requires=requirements,\n tests_require=test_requirements,\n extras_require=extras_require,\n zip_safe=False,\n test_suite='tests',\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Other Environment',\n 'Intended Audience :: Developers',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Topic :: Utilities',\n 'License :: OSI Approved :: Apache Software License',\n ],\n maintainer='Joffrey F',\n maintainer_email='[email protected]',\n)\n", "path": "setup.py"}]}
| 1,366 | 161 |
gh_patches_debug_54050
|
rasdani/github-patches
|
git_diff
|
python-discord__bot-1404
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add an `attachments` rule
# Abstract
We should have an antispam rule filtering small burst of images.
# Rationale
Currently, when a user posts 4 images in less than 10 seconds without any comment, the `duplicates` rule will trigger. While we still want to be informed when many images are posted, having the `duplicates` rule trigger doesn't make much sense. Besides, if different message content is given for each image, it will only trigger `burst` if more than 9 messages are sent in 10 seconds.
# Specification
- [ ] Make sure that the `duplicates` filter won't be triggered by messages with images. We can safely skip empty messages with attachments.
- [ ] Create an `images` filter based on `duplicates` that will trigger when more than 3 images are posted in less than 10 seconds. It should ignore the message content.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `bot/rules/duplicates.py`
Content:
```
1 from typing import Dict, Iterable, List, Optional, Tuple
2
3 from discord import Member, Message
4
5
6 async def apply(
7 last_message: Message, recent_messages: List[Message], config: Dict[str, int]
8 ) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]:
9 """Detects duplicated messages sent by a single user."""
10 relevant_messages = tuple(
11 msg
12 for msg in recent_messages
13 if (
14 msg.author == last_message.author
15 and msg.content == last_message.content
16 )
17 )
18
19 total_duplicated = len(relevant_messages)
20
21 if total_duplicated > config['max']:
22 return (
23 f"sent {total_duplicated} duplicated messages in {config['interval']}s",
24 (last_message.author,),
25 relevant_messages
26 )
27 return None
28
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/bot/rules/duplicates.py b/bot/rules/duplicates.py
--- a/bot/rules/duplicates.py
+++ b/bot/rules/duplicates.py
@@ -13,6 +13,7 @@
if (
msg.author == last_message.author
and msg.content == last_message.content
+ and msg.content
)
)
|
{"golden_diff": "diff --git a/bot/rules/duplicates.py b/bot/rules/duplicates.py\n--- a/bot/rules/duplicates.py\n+++ b/bot/rules/duplicates.py\n@@ -13,6 +13,7 @@\n if (\n msg.author == last_message.author\n and msg.content == last_message.content\n+ and msg.content\n )\n )\n", "issue": "Add an `attachments` rule\n# Abstract \r\n\r\nWe should have an antispam rule filtering small burst of images. \r\n\r\n# Rationale\r\n\r\nCurrently, when a user posts 4 images in less than 10 seconds without any comment, the `duplicates` rule will trigger. While we still want to be informed when many images are posted, having the `duplicates` rule trigger doesn't make much sense. Besides, if different message content is given for each image, it will only trigger `burst` if more than 9 messages are sent in 10 seconds. \r\n\r\n# Specification\r\n\r\n- [ ] Make sure that the `duplicates` filter won't be triggered by messages with images. We can safely skip empty messages with attachments.\r\n- [ ] Create an `images` filter based on `duplicates` that will trigger when more than 3 images are posted in less than 10 seconds. It should ignore the message content. \n", "before_files": [{"content": "from typing import Dict, Iterable, List, Optional, Tuple\n\nfrom discord import Member, Message\n\n\nasync def apply(\n last_message: Message, recent_messages: List[Message], config: Dict[str, int]\n) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]:\n \"\"\"Detects duplicated messages sent by a single user.\"\"\"\n relevant_messages = tuple(\n msg\n for msg in recent_messages\n if (\n msg.author == last_message.author\n and msg.content == last_message.content\n )\n )\n\n total_duplicated = len(relevant_messages)\n\n if total_duplicated > config['max']:\n return (\n f\"sent {total_duplicated} duplicated messages in {config['interval']}s\",\n (last_message.author,),\n relevant_messages\n )\n return None\n", "path": "bot/rules/duplicates.py"}], "after_files": [{"content": "from typing import Dict, Iterable, List, Optional, Tuple\n\nfrom discord import Member, Message\n\n\nasync def apply(\n last_message: Message, recent_messages: List[Message], config: Dict[str, int]\n) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]:\n \"\"\"Detects duplicated messages sent by a single user.\"\"\"\n relevant_messages = tuple(\n msg\n for msg in recent_messages\n if (\n msg.author == last_message.author\n and msg.content == last_message.content\n and msg.content\n )\n )\n\n total_duplicated = len(relevant_messages)\n\n if total_duplicated > config['max']:\n return (\n f\"sent {total_duplicated} duplicated messages in {config['interval']}s\",\n (last_message.author,),\n relevant_messages\n )\n return None\n", "path": "bot/rules/duplicates.py"}]}
| 675 | 78 |
gh_patches_debug_21398
|
rasdani/github-patches
|
git_diff
|
vispy__vispy-751
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
vispy.plot.image fails on float64 textures
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `vispy/visuals/image.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 # Copyright (c) 2014, Vispy Development Team.
3 # Distributed under the (new) BSD License. See LICENSE.txt for more info.
4
5 from __future__ import division
6
7 import numpy as np
8
9 from .. import gloo
10 from .transforms import STTransform, NullTransform
11 from .modular_mesh import ModularMesh
12 from .components import (TextureComponent, VertexTextureCoordinateComponent,
13 TextureCoordinateComponent)
14
15
16 class ImageVisual(ModularMesh):
17 """Visual subclass displaying an image.
18
19 Parameters
20 ----------
21 data : (height, width, 4) ubyte array
22 ImageVisual data.
23 method : str
24 Selects method of rendering image in case of non-linear transforms.
25 Each method produces similar results, but may trade efficiency
26 and accuracy. If the transform is linear, this parameter is ignored
27 and a single quad is drawn around the area of the image.
28
29 * 'subdivide': ImageVisual is represented as a grid of triangles
30 with texture coordinates linearly mapped.
31 * 'impostor': ImageVisual is represented as a quad covering the
32 entire view, with texture coordinates determined by the
33 transform. This produces the best transformation results, but may
34 be slow.
35
36 grid: tuple (rows, cols)
37 If method='subdivide', this tuple determines the number of rows and
38 columns in the image grid.
39 """
40 def __init__(self, data, method='subdivide', grid=(10, 10), **kwargs):
41 super(ImageVisual, self).__init__(**kwargs)
42
43 self._data = None
44
45 # maps from quad coordinates to texture coordinates
46 self._tex_transform = STTransform()
47
48 self._texture = None
49 self._interpolation = 'nearest'
50 self.set_data(data)
51 self.set_gl_options(cull_face=('front_and_back',))
52
53 self.method = method
54 self.grid = grid
55
56 def set_data(self, image=None, **kwds):
57 if image is not None:
58 self._data = image
59 self._texture = None
60 super(ImageVisual, self).set_data(**kwds)
61
62 @property
63 def interpolation(self):
64 return self._interpolation
65
66 @interpolation.setter
67 def interpolation(self, interp):
68 self._interpolation = interp
69 self.update()
70
71 @property
72 def size(self):
73 return self._data.shape[:2][::-1]
74
75 def _build_data(self, transforms):
76 # Construct complete data array with position and optionally color
77 if transforms.get_full_transform().Linear:
78 method = 'subdivide'
79 grid = (1, 1)
80 else:
81 method = self.method
82 grid = self.grid
83
84 # TODO: subdivision and impostor modes should be handled by new
85 # components?
86 if method == 'subdivide':
87 # quads cover area of image as closely as possible
88 w = 1.0 / grid[1]
89 h = 1.0 / grid[0]
90
91 quad = np.array([[0, 0, 0], [w, 0, 0], [w, h, 0],
92 [0, 0, 0], [w, h, 0], [0, h, 0]],
93 dtype=np.float32)
94 quads = np.empty((grid[1], grid[0], 6, 3), dtype=np.float32)
95 quads[:] = quad
96
97 mgrid = np.mgrid[0.:grid[1], 0.:grid[0]].transpose(1, 2, 0)
98 mgrid = mgrid[:, :, np.newaxis, :]
99 mgrid[..., 0] *= w
100 mgrid[..., 1] *= h
101
102 quads[..., :2] += mgrid
103 tex_coords = quads.reshape(grid[1]*grid[0]*6, 3)
104 vertices = tex_coords.copy()
105 vertices[..., 0] *= self._data.shape[1]
106 vertices[..., 1] *= self._data.shape[0]
107 ModularMesh.set_data(self, pos=vertices)
108 coords = np.ascontiguousarray(tex_coords[:, :2])
109 tex_coord_comp = TextureCoordinateComponent(coords)
110 elif method == 'impostor':
111 # quad covers entire view; frag. shader will deal with image shape
112 quad = np.array([[-1, -1, 0], [1, -1, 0], [1, 1, 0],
113 [-1, -1, 0], [1, 1, 0], [-1, 1, 0]],
114 dtype=np.float32)
115 ModularMesh.set_data(self, pos=quad)
116
117 self._tex_transform.scale = (1./self._data.shape[0],
118 1./self._data.shape[1])
119 ctr = transforms.get_full_transform().inverse
120 total_transform = self._tex_transform * ctr
121 tex_coord_comp = VertexTextureCoordinateComponent(total_transform)
122 tr = NullTransform()
123 self._program.vert['map_local_to_nd'] = tr
124 else:
125 raise ValueError("Unknown image draw method '%s'" % method)
126
127 data = self._data
128 self._texture = gloo.Texture2D(data)
129 self._texture.interpolation = self._interpolation
130
131 self.color_components = [TextureComponent(self._texture,
132 tex_coord_comp)]
133
134 def _activate_transform(self, transforms=None):
135 # this is handled in _build_data instead.
136 pass
137
138 def bounds(self, mode, axis):
139 if axis > 1:
140 return (0, 0)
141 else:
142 return (0, self.size[axis])
143
144 def draw(self, transforms):
145 if self._data is None:
146 return
147
148 if transforms.get_full_transform().Linear:
149 method = 'subdivide'
150 else:
151 method = self.method
152
153 # always have to rebuild for impostor, only first for subdivide
154 if self._texture is None:
155 self._build_data(transforms)
156 if method == 'subdivide':
157 tr = transforms.get_full_transform()
158 self._program.vert['map_local_to_nd'] = tr
159
160 super(ImageVisual, self).draw(transforms)
161
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/vispy/visuals/image.py b/vispy/visuals/image.py
--- a/vispy/visuals/image.py
+++ b/vispy/visuals/image.py
@@ -28,8 +28,8 @@
* 'subdivide': ImageVisual is represented as a grid of triangles
with texture coordinates linearly mapped.
- * 'impostor': ImageVisual is represented as a quad covering the
- entire view, with texture coordinates determined by the
+ * 'impostor': ImageVisual is represented as a quad covering the
+ entire view, with texture coordinates determined by the
transform. This produces the best transformation results, but may
be slow.
@@ -55,6 +55,9 @@
def set_data(self, image=None, **kwds):
if image is not None:
+ image = np.array(image, copy=False)
+ if image.dtype == np.float64:
+ image = image.astype(np.float32)
self._data = image
self._texture = None
super(ImageVisual, self).set_data(**kwds)
|
{"golden_diff": "diff --git a/vispy/visuals/image.py b/vispy/visuals/image.py\n--- a/vispy/visuals/image.py\n+++ b/vispy/visuals/image.py\n@@ -28,8 +28,8 @@\n \n * 'subdivide': ImageVisual is represented as a grid of triangles\n with texture coordinates linearly mapped.\n- * 'impostor': ImageVisual is represented as a quad covering the \n- entire view, with texture coordinates determined by the \n+ * 'impostor': ImageVisual is represented as a quad covering the\n+ entire view, with texture coordinates determined by the\n transform. This produces the best transformation results, but may\n be slow.\n \n@@ -55,6 +55,9 @@\n \n def set_data(self, image=None, **kwds):\n if image is not None:\n+ image = np.array(image, copy=False)\n+ if image.dtype == np.float64:\n+ image = image.astype(np.float32)\n self._data = image\n self._texture = None\n super(ImageVisual, self).set_data(**kwds)\n", "issue": "vispy.plot.image fails on float64 textures\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n# Copyright (c) 2014, Vispy Development Team.\n# Distributed under the (new) BSD License. See LICENSE.txt for more info.\n\nfrom __future__ import division\n\nimport numpy as np\n\nfrom .. import gloo\nfrom .transforms import STTransform, NullTransform\nfrom .modular_mesh import ModularMesh\nfrom .components import (TextureComponent, VertexTextureCoordinateComponent,\n TextureCoordinateComponent)\n\n\nclass ImageVisual(ModularMesh):\n \"\"\"Visual subclass displaying an image.\n\n Parameters\n ----------\n data : (height, width, 4) ubyte array\n ImageVisual data.\n method : str\n Selects method of rendering image in case of non-linear transforms.\n Each method produces similar results, but may trade efficiency\n and accuracy. If the transform is linear, this parameter is ignored\n and a single quad is drawn around the area of the image.\n\n * 'subdivide': ImageVisual is represented as a grid of triangles\n with texture coordinates linearly mapped.\n * 'impostor': ImageVisual is represented as a quad covering the \n entire view, with texture coordinates determined by the \n transform. This produces the best transformation results, but may\n be slow.\n\n grid: tuple (rows, cols)\n If method='subdivide', this tuple determines the number of rows and\n columns in the image grid.\n \"\"\"\n def __init__(self, data, method='subdivide', grid=(10, 10), **kwargs):\n super(ImageVisual, self).__init__(**kwargs)\n\n self._data = None\n\n # maps from quad coordinates to texture coordinates\n self._tex_transform = STTransform()\n\n self._texture = None\n self._interpolation = 'nearest'\n self.set_data(data)\n self.set_gl_options(cull_face=('front_and_back',))\n\n self.method = method\n self.grid = grid\n\n def set_data(self, image=None, **kwds):\n if image is not None:\n self._data = image\n self._texture = None\n super(ImageVisual, self).set_data(**kwds)\n\n @property\n def interpolation(self):\n return self._interpolation\n\n @interpolation.setter\n def interpolation(self, interp):\n self._interpolation = interp\n self.update()\n\n @property\n def size(self):\n return self._data.shape[:2][::-1]\n\n def _build_data(self, transforms):\n # Construct complete data array with position and optionally color\n if transforms.get_full_transform().Linear:\n method = 'subdivide'\n grid = (1, 1)\n else:\n method = self.method\n grid = self.grid\n\n # TODO: subdivision and impostor modes should be handled by new\n # components?\n if method == 'subdivide':\n # quads cover area of image as closely as possible\n w = 1.0 / grid[1]\n h = 1.0 / grid[0]\n\n quad = np.array([[0, 0, 0], [w, 0, 0], [w, h, 0],\n [0, 0, 0], [w, h, 0], [0, h, 0]],\n dtype=np.float32)\n quads = np.empty((grid[1], grid[0], 6, 3), dtype=np.float32)\n quads[:] = quad\n\n mgrid = np.mgrid[0.:grid[1], 0.:grid[0]].transpose(1, 2, 0)\n mgrid = mgrid[:, :, np.newaxis, :]\n mgrid[..., 0] *= w\n mgrid[..., 1] *= h\n\n quads[..., :2] += mgrid\n tex_coords = quads.reshape(grid[1]*grid[0]*6, 3)\n vertices = tex_coords.copy()\n vertices[..., 0] *= self._data.shape[1]\n vertices[..., 1] *= self._data.shape[0]\n ModularMesh.set_data(self, pos=vertices)\n coords = np.ascontiguousarray(tex_coords[:, :2])\n tex_coord_comp = TextureCoordinateComponent(coords)\n elif method == 'impostor':\n # quad covers entire view; frag. shader will deal with image shape\n quad = np.array([[-1, -1, 0], [1, -1, 0], [1, 1, 0],\n [-1, -1, 0], [1, 1, 0], [-1, 1, 0]],\n dtype=np.float32)\n ModularMesh.set_data(self, pos=quad)\n\n self._tex_transform.scale = (1./self._data.shape[0],\n 1./self._data.shape[1])\n ctr = transforms.get_full_transform().inverse\n total_transform = self._tex_transform * ctr\n tex_coord_comp = VertexTextureCoordinateComponent(total_transform)\n tr = NullTransform()\n self._program.vert['map_local_to_nd'] = tr\n else:\n raise ValueError(\"Unknown image draw method '%s'\" % method)\n\n data = self._data\n self._texture = gloo.Texture2D(data)\n self._texture.interpolation = self._interpolation\n\n self.color_components = [TextureComponent(self._texture,\n tex_coord_comp)]\n\n def _activate_transform(self, transforms=None):\n # this is handled in _build_data instead.\n pass\n\n def bounds(self, mode, axis):\n if axis > 1:\n return (0, 0)\n else:\n return (0, self.size[axis])\n\n def draw(self, transforms):\n if self._data is None:\n return\n\n if transforms.get_full_transform().Linear:\n method = 'subdivide'\n else:\n method = self.method\n\n # always have to rebuild for impostor, only first for subdivide\n if self._texture is None:\n self._build_data(transforms)\n if method == 'subdivide':\n tr = transforms.get_full_transform()\n self._program.vert['map_local_to_nd'] = tr\n\n super(ImageVisual, self).draw(transforms)\n", "path": "vispy/visuals/image.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n# Copyright (c) 2014, Vispy Development Team.\n# Distributed under the (new) BSD License. See LICENSE.txt for more info.\n\nfrom __future__ import division\n\nimport numpy as np\n\nfrom .. import gloo\nfrom .transforms import STTransform, NullTransform\nfrom .modular_mesh import ModularMesh\nfrom .components import (TextureComponent, VertexTextureCoordinateComponent,\n TextureCoordinateComponent)\n\n\nclass ImageVisual(ModularMesh):\n \"\"\"Visual subclass displaying an image.\n\n Parameters\n ----------\n data : (height, width, 4) ubyte array\n ImageVisual data.\n method : str\n Selects method of rendering image in case of non-linear transforms.\n Each method produces similar results, but may trade efficiency\n and accuracy. If the transform is linear, this parameter is ignored\n and a single quad is drawn around the area of the image.\n\n * 'subdivide': ImageVisual is represented as a grid of triangles\n with texture coordinates linearly mapped.\n * 'impostor': ImageVisual is represented as a quad covering the\n entire view, with texture coordinates determined by the\n transform. This produces the best transformation results, but may\n be slow.\n\n grid: tuple (rows, cols)\n If method='subdivide', this tuple determines the number of rows and\n columns in the image grid.\n \"\"\"\n def __init__(self, data, method='subdivide', grid=(10, 10), **kwargs):\n super(ImageVisual, self).__init__(**kwargs)\n\n self._data = None\n\n # maps from quad coordinates to texture coordinates\n self._tex_transform = STTransform()\n\n self._texture = None\n self._interpolation = 'nearest'\n self.set_data(data)\n self.set_gl_options(cull_face=('front_and_back',))\n\n self.method = method\n self.grid = grid\n\n def set_data(self, image=None, **kwds):\n if image is not None:\n image = np.array(image, copy=False)\n if image.dtype == np.float64:\n image = image.astype(np.float32)\n self._data = image\n self._texture = None\n super(ImageVisual, self).set_data(**kwds)\n\n @property\n def interpolation(self):\n return self._interpolation\n\n @interpolation.setter\n def interpolation(self, interp):\n self._interpolation = interp\n self.update()\n\n @property\n def size(self):\n return self._data.shape[:2][::-1]\n\n def _build_data(self, transforms):\n # Construct complete data array with position and optionally color\n if transforms.get_full_transform().Linear:\n method = 'subdivide'\n grid = (1, 1)\n else:\n method = self.method\n grid = self.grid\n\n # TODO: subdivision and impostor modes should be handled by new\n # components?\n if method == 'subdivide':\n # quads cover area of image as closely as possible\n w = 1.0 / grid[1]\n h = 1.0 / grid[0]\n\n quad = np.array([[0, 0, 0], [w, 0, 0], [w, h, 0],\n [0, 0, 0], [w, h, 0], [0, h, 0]],\n dtype=np.float32)\n quads = np.empty((grid[1], grid[0], 6, 3), dtype=np.float32)\n quads[:] = quad\n\n mgrid = np.mgrid[0.:grid[1], 0.:grid[0]].transpose(1, 2, 0)\n mgrid = mgrid[:, :, np.newaxis, :]\n mgrid[..., 0] *= w\n mgrid[..., 1] *= h\n\n quads[..., :2] += mgrid\n tex_coords = quads.reshape(grid[1]*grid[0]*6, 3)\n vertices = tex_coords.copy()\n vertices[..., 0] *= self._data.shape[1]\n vertices[..., 1] *= self._data.shape[0]\n ModularMesh.set_data(self, pos=vertices)\n coords = np.ascontiguousarray(tex_coords[:, :2])\n tex_coord_comp = TextureCoordinateComponent(coords)\n elif method == 'impostor':\n # quad covers entire view; frag. shader will deal with image shape\n quad = np.array([[-1, -1, 0], [1, -1, 0], [1, 1, 0],\n [-1, -1, 0], [1, 1, 0], [-1, 1, 0]],\n dtype=np.float32)\n ModularMesh.set_data(self, pos=quad)\n\n self._tex_transform.scale = (1./self._data.shape[0],\n 1./self._data.shape[1])\n ctr = transforms.get_full_transform().inverse\n total_transform = self._tex_transform * ctr\n tex_coord_comp = VertexTextureCoordinateComponent(total_transform)\n tr = NullTransform()\n self._program.vert['map_local_to_nd'] = tr\n else:\n raise ValueError(\"Unknown image draw method '%s'\" % method)\n\n data = self._data\n self._texture = gloo.Texture2D(data)\n self._texture.interpolation = self._interpolation\n\n self.color_components = [TextureComponent(self._texture,\n tex_coord_comp)]\n\n def _activate_transform(self, transforms=None):\n # this is handled in _build_data instead.\n pass\n\n def bounds(self, mode, axis):\n if axis > 1:\n return (0, 0)\n else:\n return (0, self.size[axis])\n\n def draw(self, transforms):\n if self._data is None:\n return\n\n if transforms.get_full_transform().Linear:\n method = 'subdivide'\n else:\n method = self.method\n\n # always have to rebuild for impostor, only first for subdivide\n if self._texture is None:\n self._build_data(transforms)\n if method == 'subdivide':\n tr = transforms.get_full_transform()\n self._program.vert['map_local_to_nd'] = tr\n\n super(ImageVisual, self).draw(transforms)\n", "path": "vispy/visuals/image.py"}]}
| 2,010 | 252 |
gh_patches_debug_39491
|
rasdani/github-patches
|
git_diff
|
akvo__akvo-rsr-3403
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
IATI export error when result has no quantitative indicators
When running the IATI export, a result that has only qualitative indicators is reported as an error.
This seems to be a bug introduced when we started including qualitative indicators in the export, but didn't change the validation to allow for qualitative-only results.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `akvo/iati/checks/fields/results.py`
Content:
```
1 # -*- coding: utf-8 -*-
2
3 # Akvo RSR is covered by the GNU Affero General Public License.
4 # See more details in the license.txt file located at the root folder of the Akvo RSR module.
5 # For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.
6
7 from akvo.rsr.models.result.utils import QUANTITATIVE
8
9 DGIS_VALIDATION_SET_NAME = u"DGIS IATI"
10
11
12 def results(project):
13 """
14 :param project: Project object
15 :return: All checks passed boolean, [Check results]
16 """
17 checks = []
18 all_checks_passed = True
19
20 DGIS_PROJECT = project.validations.filter(name=DGIS_VALIDATION_SET_NAME).count() == 1
21
22 for result in project.results.all():
23 if not result.type:
24 all_checks_passed = False
25 checks.append((u'error', u'result (id: %s) has no type specified' % str(result.pk)))
26
27 if not result.title:
28 all_checks_passed = False
29 checks.append((u'error', u'result (id: %s) has no title specified' % str(result.pk)))
30
31 if not result.indicators.filter(type=QUANTITATIVE):
32 all_checks_passed = False
33 checks.append(
34 (u'error', u'result (id: %s) has no quantitative indicator(s)' % str(result.pk))
35 )
36
37 for indicator in result.indicators.filter(type=QUANTITATIVE):
38 if not indicator.measure:
39 all_checks_passed = False
40 checks.append((u'error', u'indicator (id: %s) has no measure specified' %
41 str(indicator.pk)))
42
43 if not indicator.title:
44 all_checks_passed = False
45 checks.append((u'error', u'indicator (id: %s) has no title specified' %
46 str(indicator.pk)))
47
48 if not indicator.baseline_value:
49 if DGIS_PROJECT:
50 all_checks_passed = False
51 checks.append((u'warning', u'indicator (id: %s) baseline has no value '
52 u'specified, however the value of "N/A" has been '
53 u'set for the attribute' % str(indicator.pk)))
54 elif indicator.baseline_year or indicator.baseline_comment:
55 all_checks_passed = False
56 checks.append((u'error', u'indicator (id: %s) baseline has no value specified' %
57 str(indicator.pk)))
58
59 if not indicator.baseline_year:
60 if DGIS_PROJECT:
61 all_checks_passed = False
62 checks.append((u'warning', u'indicator (id: %s) baseline has no year '
63 u'specified, however the value of "1" has been '
64 u'set for the attribute' % str(indicator.pk)))
65 elif indicator.baseline_value or indicator.baseline_comment:
66 all_checks_passed = False
67 checks.append((u'error', u'indicator (id: %s) baseline has no year specified' %
68 str(indicator.pk)))
69
70 for reference in indicator.references.all():
71 if not reference.reference:
72 all_checks_passed = False
73 checks.append((u'error', u'indicator reference (id: %s) has no code '
74 u'specified' % str(reference.pk)))
75
76 if not reference.vocabulary:
77 all_checks_passed = False
78 checks.append((u'error', u'indicator reference (id: %s) has no vocabulary '
79 u'specified' % str(reference.pk)))
80
81 if reference.vocabulary == '99' and not reference.vocabulary_uri:
82 all_checks_passed = False
83 checks.append((u'error', u'indicator reference (id: %s) has vocabulary 99 '
84 u'(reporting organisation) but no vocabulary URI '
85 u'specified' % str(reference.pk)))
86
87 for period in indicator.periods.all():
88 if not period.period_start:
89 all_checks_passed = False
90 checks.append((u'error', u'indicator period (id: %s) has no start date '
91 u'specified' % str(period.pk)))
92
93 if not period.period_end:
94 all_checks_passed = False
95 checks.append((u'error', u'indicator period (id: %s) has no end date '
96 u'specified' % str(period.pk)))
97
98 if period.period_start and period.period_end and \
99 period.period_start > period.period_end:
100 all_checks_passed = False
101 checks.append((u'error', u'indicator period (id: %s) has a start date '
102 u'later than the end date' % str(period.pk)))
103
104 if not period.target_value:
105 if DGIS_PROJECT:
106 all_checks_passed = False
107 checks.append((u'warning', u'indicator period (id: %s) has no target value '
108 u'specified. The value "N/A" has been set for '
109 u'the target value attribute' % str(period.pk)))
110 elif (period.target_comment or period.target_locations.all() or
111 period.target_dimensions.all()):
112 all_checks_passed = False
113 checks.append((u'error', u'indicator period (id: %s) has no target value, '
114 u'but does have a target comment, target '
115 u'location(s) or target dimension(s)' %
116 str(period.pk)))
117
118 if not period.actual_value:
119 if DGIS_PROJECT:
120 all_checks_passed = False
121 checks.append((u'warning', u'indicator period (id: %s) has no actual value '
122 u'specified. The value "N/A" has been set for '
123 u'the actual value attribute' % str(period.pk)))
124 elif (period.actual_comment or period.actual_locations.all() or
125 period.actual_dimensions.all()):
126 all_checks_passed = False
127 checks.append((u'error', u'indicator period (id: %s) has no actual value, '
128 u'but does have a actual comment, actual '
129 u'location(s) or actual dimension(s)' %
130 str(period.pk)))
131
132 if project.results.all() and all_checks_passed:
133 checks.append((u'success', u'has valid result(s)'))
134
135 return all_checks_passed, checks
136
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/akvo/iati/checks/fields/results.py b/akvo/iati/checks/fields/results.py
--- a/akvo/iati/checks/fields/results.py
+++ b/akvo/iati/checks/fields/results.py
@@ -4,7 +4,7 @@
# See more details in the license.txt file located at the root folder of the Akvo RSR module.
# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.
-from akvo.rsr.models.result.utils import QUANTITATIVE
+from akvo.rsr.models.result.utils import QUANTITATIVE, QUALITATIVE
DGIS_VALIDATION_SET_NAME = u"DGIS IATI"
@@ -28,14 +28,14 @@
all_checks_passed = False
checks.append((u'error', u'result (id: %s) has no title specified' % str(result.pk)))
- if not result.indicators.filter(type=QUANTITATIVE):
+ if not result.indicators.exists():
all_checks_passed = False
checks.append(
- (u'error', u'result (id: %s) has no quantitative indicator(s)' % str(result.pk))
+ (u'error', u'result (id: %s) has no indicator(s)' % str(result.pk))
)
- for indicator in result.indicators.filter(type=QUANTITATIVE):
- if not indicator.measure:
+ for indicator in result.indicators.all():
+ if indicator.type == QUANTITATIVE and not indicator.measure:
all_checks_passed = False
checks.append((u'error', u'indicator (id: %s) has no measure specified' %
str(indicator.pk)))
@@ -101,7 +101,7 @@
checks.append((u'error', u'indicator period (id: %s) has a start date '
u'later than the end date' % str(period.pk)))
- if not period.target_value:
+ if indicator.type == QUANTITATIVE and not period.target_value:
if DGIS_PROJECT:
all_checks_passed = False
checks.append((u'warning', u'indicator period (id: %s) has no target value '
@@ -115,7 +115,7 @@
u'location(s) or target dimension(s)' %
str(period.pk)))
- if not period.actual_value:
+ if indicator.type == QUANTITATIVE and not period.actual_value:
if DGIS_PROJECT:
all_checks_passed = False
checks.append((u'warning', u'indicator period (id: %s) has no actual value '
|
{"golden_diff": "diff --git a/akvo/iati/checks/fields/results.py b/akvo/iati/checks/fields/results.py\n--- a/akvo/iati/checks/fields/results.py\n+++ b/akvo/iati/checks/fields/results.py\n@@ -4,7 +4,7 @@\n # See more details in the license.txt file located at the root folder of the Akvo RSR module.\n # For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.\n \n-from akvo.rsr.models.result.utils import QUANTITATIVE\n+from akvo.rsr.models.result.utils import QUANTITATIVE, QUALITATIVE\n \n DGIS_VALIDATION_SET_NAME = u\"DGIS IATI\"\n \n@@ -28,14 +28,14 @@\n all_checks_passed = False\n checks.append((u'error', u'result (id: %s) has no title specified' % str(result.pk)))\n \n- if not result.indicators.filter(type=QUANTITATIVE):\n+ if not result.indicators.exists():\n all_checks_passed = False\n checks.append(\n- (u'error', u'result (id: %s) has no quantitative indicator(s)' % str(result.pk))\n+ (u'error', u'result (id: %s) has no indicator(s)' % str(result.pk))\n )\n \n- for indicator in result.indicators.filter(type=QUANTITATIVE):\n- if not indicator.measure:\n+ for indicator in result.indicators.all():\n+ if indicator.type == QUANTITATIVE and not indicator.measure:\n all_checks_passed = False\n checks.append((u'error', u'indicator (id: %s) has no measure specified' %\n str(indicator.pk)))\n@@ -101,7 +101,7 @@\n checks.append((u'error', u'indicator period (id: %s) has a start date '\n u'later than the end date' % str(period.pk)))\n \n- if not period.target_value:\n+ if indicator.type == QUANTITATIVE and not period.target_value:\n if DGIS_PROJECT:\n all_checks_passed = False\n checks.append((u'warning', u'indicator period (id: %s) has no target value '\n@@ -115,7 +115,7 @@\n u'location(s) or target dimension(s)' %\n str(period.pk)))\n \n- if not period.actual_value:\n+ if indicator.type == QUANTITATIVE and not period.actual_value:\n if DGIS_PROJECT:\n all_checks_passed = False\n checks.append((u'warning', u'indicator period (id: %s) has no actual value '\n", "issue": "IATI export error when result has no quantitative indicators\nWhen running the IATI export, a result that has only qualitative indicators is reported as an error.\r\n\r\nThis seems to be a bug introduced when we started including qualitative indicators in the export, but didn't change the validation to allow for qualitative-only results.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Akvo RSR is covered by the GNU Affero General Public License.\n# See more details in the license.txt file located at the root folder of the Akvo RSR module.\n# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.\n\nfrom akvo.rsr.models.result.utils import QUANTITATIVE\n\nDGIS_VALIDATION_SET_NAME = u\"DGIS IATI\"\n\n\ndef results(project):\n \"\"\"\n :param project: Project object\n :return: All checks passed boolean, [Check results]\n \"\"\"\n checks = []\n all_checks_passed = True\n\n DGIS_PROJECT = project.validations.filter(name=DGIS_VALIDATION_SET_NAME).count() == 1\n\n for result in project.results.all():\n if not result.type:\n all_checks_passed = False\n checks.append((u'error', u'result (id: %s) has no type specified' % str(result.pk)))\n\n if not result.title:\n all_checks_passed = False\n checks.append((u'error', u'result (id: %s) has no title specified' % str(result.pk)))\n\n if not result.indicators.filter(type=QUANTITATIVE):\n all_checks_passed = False\n checks.append(\n (u'error', u'result (id: %s) has no quantitative indicator(s)' % str(result.pk))\n )\n\n for indicator in result.indicators.filter(type=QUANTITATIVE):\n if not indicator.measure:\n all_checks_passed = False\n checks.append((u'error', u'indicator (id: %s) has no measure specified' %\n str(indicator.pk)))\n\n if not indicator.title:\n all_checks_passed = False\n checks.append((u'error', u'indicator (id: %s) has no title specified' %\n str(indicator.pk)))\n\n if not indicator.baseline_value:\n if DGIS_PROJECT:\n all_checks_passed = False\n checks.append((u'warning', u'indicator (id: %s) baseline has no value '\n u'specified, however the value of \"N/A\" has been '\n u'set for the attribute' % str(indicator.pk)))\n elif indicator.baseline_year or indicator.baseline_comment:\n all_checks_passed = False\n checks.append((u'error', u'indicator (id: %s) baseline has no value specified' %\n str(indicator.pk)))\n\n if not indicator.baseline_year:\n if DGIS_PROJECT:\n all_checks_passed = False\n checks.append((u'warning', u'indicator (id: %s) baseline has no year '\n u'specified, however the value of \"1\" has been '\n u'set for the attribute' % str(indicator.pk)))\n elif indicator.baseline_value or indicator.baseline_comment:\n all_checks_passed = False\n checks.append((u'error', u'indicator (id: %s) baseline has no year specified' %\n str(indicator.pk)))\n\n for reference in indicator.references.all():\n if not reference.reference:\n all_checks_passed = False\n checks.append((u'error', u'indicator reference (id: %s) has no code '\n u'specified' % str(reference.pk)))\n\n if not reference.vocabulary:\n all_checks_passed = False\n checks.append((u'error', u'indicator reference (id: %s) has no vocabulary '\n u'specified' % str(reference.pk)))\n\n if reference.vocabulary == '99' and not reference.vocabulary_uri:\n all_checks_passed = False\n checks.append((u'error', u'indicator reference (id: %s) has vocabulary 99 '\n u'(reporting organisation) but no vocabulary URI '\n u'specified' % str(reference.pk)))\n\n for period in indicator.periods.all():\n if not period.period_start:\n all_checks_passed = False\n checks.append((u'error', u'indicator period (id: %s) has no start date '\n u'specified' % str(period.pk)))\n\n if not period.period_end:\n all_checks_passed = False\n checks.append((u'error', u'indicator period (id: %s) has no end date '\n u'specified' % str(period.pk)))\n\n if period.period_start and period.period_end and \\\n period.period_start > period.period_end:\n all_checks_passed = False\n checks.append((u'error', u'indicator period (id: %s) has a start date '\n u'later than the end date' % str(period.pk)))\n\n if not period.target_value:\n if DGIS_PROJECT:\n all_checks_passed = False\n checks.append((u'warning', u'indicator period (id: %s) has no target value '\n u'specified. The value \"N/A\" has been set for '\n u'the target value attribute' % str(period.pk)))\n elif (period.target_comment or period.target_locations.all() or\n period.target_dimensions.all()):\n all_checks_passed = False\n checks.append((u'error', u'indicator period (id: %s) has no target value, '\n u'but does have a target comment, target '\n u'location(s) or target dimension(s)' %\n str(period.pk)))\n\n if not period.actual_value:\n if DGIS_PROJECT:\n all_checks_passed = False\n checks.append((u'warning', u'indicator period (id: %s) has no actual value '\n u'specified. The value \"N/A\" has been set for '\n u'the actual value attribute' % str(period.pk)))\n elif (period.actual_comment or period.actual_locations.all() or\n period.actual_dimensions.all()):\n all_checks_passed = False\n checks.append((u'error', u'indicator period (id: %s) has no actual value, '\n u'but does have a actual comment, actual '\n u'location(s) or actual dimension(s)' %\n str(period.pk)))\n\n if project.results.all() and all_checks_passed:\n checks.append((u'success', u'has valid result(s)'))\n\n return all_checks_passed, checks\n", "path": "akvo/iati/checks/fields/results.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Akvo RSR is covered by the GNU Affero General Public License.\n# See more details in the license.txt file located at the root folder of the Akvo RSR module.\n# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.\n\nfrom akvo.rsr.models.result.utils import QUANTITATIVE, QUALITATIVE\n\nDGIS_VALIDATION_SET_NAME = u\"DGIS IATI\"\n\n\ndef results(project):\n \"\"\"\n :param project: Project object\n :return: All checks passed boolean, [Check results]\n \"\"\"\n checks = []\n all_checks_passed = True\n\n DGIS_PROJECT = project.validations.filter(name=DGIS_VALIDATION_SET_NAME).count() == 1\n\n for result in project.results.all():\n if not result.type:\n all_checks_passed = False\n checks.append((u'error', u'result (id: %s) has no type specified' % str(result.pk)))\n\n if not result.title:\n all_checks_passed = False\n checks.append((u'error', u'result (id: %s) has no title specified' % str(result.pk)))\n\n if not result.indicators.exists():\n all_checks_passed = False\n checks.append(\n (u'error', u'result (id: %s) has no indicator(s)' % str(result.pk))\n )\n\n for indicator in result.indicators.all():\n if indicator.type == QUANTITATIVE and not indicator.measure:\n all_checks_passed = False\n checks.append((u'error', u'indicator (id: %s) has no measure specified' %\n str(indicator.pk)))\n\n if not indicator.title:\n all_checks_passed = False\n checks.append((u'error', u'indicator (id: %s) has no title specified' %\n str(indicator.pk)))\n\n if not indicator.baseline_value:\n if DGIS_PROJECT:\n all_checks_passed = False\n checks.append((u'warning', u'indicator (id: %s) baseline has no value '\n u'specified, however the value of \"N/A\" has been '\n u'set for the attribute' % str(indicator.pk)))\n elif indicator.baseline_year or indicator.baseline_comment:\n all_checks_passed = False\n checks.append((u'error', u'indicator (id: %s) baseline has no value specified' %\n str(indicator.pk)))\n\n if not indicator.baseline_year:\n if DGIS_PROJECT:\n all_checks_passed = False\n checks.append((u'warning', u'indicator (id: %s) baseline has no year '\n u'specified, however the value of \"1\" has been '\n u'set for the attribute' % str(indicator.pk)))\n elif indicator.baseline_value or indicator.baseline_comment:\n all_checks_passed = False\n checks.append((u'error', u'indicator (id: %s) baseline has no year specified' %\n str(indicator.pk)))\n\n for reference in indicator.references.all():\n if not reference.reference:\n all_checks_passed = False\n checks.append((u'error', u'indicator reference (id: %s) has no code '\n u'specified' % str(reference.pk)))\n\n if not reference.vocabulary:\n all_checks_passed = False\n checks.append((u'error', u'indicator reference (id: %s) has no vocabulary '\n u'specified' % str(reference.pk)))\n\n if reference.vocabulary == '99' and not reference.vocabulary_uri:\n all_checks_passed = False\n checks.append((u'error', u'indicator reference (id: %s) has vocabulary 99 '\n u'(reporting organisation) but no vocabulary URI '\n u'specified' % str(reference.pk)))\n\n for period in indicator.periods.all():\n if not period.period_start:\n all_checks_passed = False\n checks.append((u'error', u'indicator period (id: %s) has no start date '\n u'specified' % str(period.pk)))\n\n if not period.period_end:\n all_checks_passed = False\n checks.append((u'error', u'indicator period (id: %s) has no end date '\n u'specified' % str(period.pk)))\n\n if period.period_start and period.period_end and \\\n period.period_start > period.period_end:\n all_checks_passed = False\n checks.append((u'error', u'indicator period (id: %s) has a start date '\n u'later than the end date' % str(period.pk)))\n\n if indicator.type == QUANTITATIVE and not period.target_value:\n if DGIS_PROJECT:\n all_checks_passed = False\n checks.append((u'warning', u'indicator period (id: %s) has no target value '\n u'specified. The value \"N/A\" has been set for '\n u'the target value attribute' % str(period.pk)))\n elif (period.target_comment or period.target_locations.all() or\n period.target_dimensions.all()):\n all_checks_passed = False\n checks.append((u'error', u'indicator period (id: %s) has no target value, '\n u'but does have a target comment, target '\n u'location(s) or target dimension(s)' %\n str(period.pk)))\n\n if indicator.type == QUANTITATIVE and not period.actual_value:\n if DGIS_PROJECT:\n all_checks_passed = False\n checks.append((u'warning', u'indicator period (id: %s) has no actual value '\n u'specified. The value \"N/A\" has been set for '\n u'the actual value attribute' % str(period.pk)))\n elif (period.actual_comment or period.actual_locations.all() or\n period.actual_dimensions.all()):\n all_checks_passed = False\n checks.append((u'error', u'indicator period (id: %s) has no actual value, '\n u'but does have a actual comment, actual '\n u'location(s) or actual dimension(s)' %\n str(period.pk)))\n\n if project.results.all() and all_checks_passed:\n checks.append((u'success', u'has valid result(s)'))\n\n return all_checks_passed, checks\n", "path": "akvo/iati/checks/fields/results.py"}]}
| 1,970 | 591 |
gh_patches_debug_14027
|
rasdani/github-patches
|
git_diff
|
cal-itp__benefits-451
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Remove all paragraph content from the viewmodel in `core:index`
From https://github.com/cal-itp/benefits/issues/366
<img width="832" alt="image" src="https://user-images.githubusercontent.com/3673236/163299119-7b8c7696-05b5-4ade-9bc2-c5b784707a66.png">
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `benefits/core/views.py`
Content:
```
1 """
2 The core application: view definition for the root of the webapp.
3 """
4 from django.http import HttpResponseBadRequest, HttpResponseNotFound, HttpResponseServerError
5 from django.template import loader
6 from django.template.response import TemplateResponse
7 from django.urls import reverse
8 from django.utils.translation import gettext as _
9
10 from . import middleware, models, session, viewmodels
11
12
13 def PageTemplateResponse(request, page_vm):
14 """Helper returns a TemplateResponse using the common page template."""
15 return TemplateResponse(request, "core/page.html", page_vm.context_dict())
16
17
18 def _index_content_title():
19 """Helper returns the content title for the common index page."""
20 return _("core.pages.index.content_title")
21
22
23 def _index_paragraphs():
24 """Helper returns the content paragraphs for the common index page."""
25 return [_("core.pages.index.p[0]"), _("core.pages.index.p[1]"), _("core.pages.index.p[2]")]
26
27
28 def _index_url():
29 """Helper computes the index url path."""
30 return reverse("core:index")
31
32
33 @middleware.pageview_decorator
34 def index(request):
35 """View handler for the main entry page."""
36 session.reset(request)
37
38 # generate a button to the landing page for each active agency
39 agencies = models.TransitAgency.all_active()
40 buttons = [viewmodels.Button.outline_primary(text=a.short_name, url=a.index_url) for a in agencies]
41 buttons[0].classes.append("mt-3")
42 buttons[0].label = _("core.pages.index.chooseprovider")
43
44 page = viewmodels.Page(
45 content_title=_index_content_title(),
46 paragraphs=_index_paragraphs(),
47 buttons=buttons,
48 classes="home",
49 )
50
51 return PageTemplateResponse(request, page)
52
53
54 @middleware.pageview_decorator
55 def agency_index(request, agency):
56 """View handler for an agency entry page."""
57 session.reset(request)
58 session.update(request, agency=agency, origin=agency.index_url)
59
60 button = viewmodels.Button.primary(text=_("core.pages.index.continue"), url=reverse("eligibility:index"))
61 button.label = _("core.pages.agency_index.button.label")
62
63 page = viewmodels.Page(
64 content_title=_("core.pages.agency_index.content_title"),
65 button=button,
66 classes="home",
67 )
68
69 help_page = reverse("core:help")
70 context_dict = {**page.context_dict(), **{"info_link": f"{help_page}#about"}}
71
72 return TemplateResponse(request, "core/agency_index.html", context_dict)
73
74
75 @middleware.pageview_decorator
76 def help(request):
77 """View handler for the help page."""
78 if session.active_agency(request):
79 agency = session.agency(request)
80 buttons = viewmodels.Button.agency_contact_links(agency)
81 else:
82 buttons = [btn for a in models.TransitAgency.all_active() for btn in viewmodels.Button.agency_contact_links(a)]
83
84 buttons.append(viewmodels.Button.home(request, _("core.buttons.back")))
85
86 page = viewmodels.Page(
87 title=_("core.buttons.help"),
88 content_title=_("core.buttons.help"),
89 buttons=buttons,
90 classes="text-lg-center",
91 noimage=True,
92 )
93
94 return TemplateResponse(request, "core/help.html", page.context_dict())
95
96
97 @middleware.pageview_decorator
98 def bad_request(request, exception, template_name="400.html"):
99 """View handler for HTTP 400 Bad Request responses."""
100 if session.active_agency(request):
101 session.update(request, origin=session.agency(request).index_url)
102 else:
103 session.update(request, origin=_index_url())
104
105 home = viewmodels.Button.home(request)
106 page = viewmodels.ErrorPage.error(button=home)
107 t = loader.get_template(template_name)
108
109 return HttpResponseBadRequest(t.render(page.context_dict()))
110
111
112 @middleware.pageview_decorator
113 def csrf_failure(request, reason):
114 """
115 View handler for CSRF_FAILURE_VIEW with custom data.
116 """
117 if session.active_agency(request):
118 session.update(request, origin=session.agency(request).index_url)
119 else:
120 session.update(request, origin=_index_url())
121
122 home = viewmodels.Button.home(request)
123 page = viewmodels.ErrorPage.not_found(button=home, path=request.path)
124 t = loader.get_template("400.html")
125
126 return HttpResponseNotFound(t.render(page.context_dict()))
127
128
129 @middleware.pageview_decorator
130 def page_not_found(request, exception, template_name="404.html"):
131 """View handler for HTTP 404 Not Found responses."""
132 if session.active_agency(request):
133 session.update(request, origin=session.agency(request).index_url)
134 else:
135 session.update(request, origin=_index_url())
136
137 home = viewmodels.Button.home(request)
138 page = viewmodels.ErrorPage.not_found(button=home, path=request.path)
139 t = loader.get_template(template_name)
140
141 return HttpResponseNotFound(t.render(page.context_dict()))
142
143
144 @middleware.pageview_decorator
145 def server_error(request, template_name="500.html"):
146 """View handler for HTTP 500 Server Error responses."""
147 if session.active_agency(request):
148 session.update(request, origin=session.agency(request).index_url)
149 else:
150 session.update(request, origin=_index_url())
151
152 home = viewmodels.Button.home(request)
153 page = viewmodels.ErrorPage.error(button=home)
154 t = loader.get_template(template_name)
155
156 return HttpResponseServerError(t.render(page.context_dict()))
157
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/benefits/core/views.py b/benefits/core/views.py
--- a/benefits/core/views.py
+++ b/benefits/core/views.py
@@ -20,11 +20,6 @@
return _("core.pages.index.content_title")
-def _index_paragraphs():
- """Helper returns the content paragraphs for the common index page."""
- return [_("core.pages.index.p[0]"), _("core.pages.index.p[1]"), _("core.pages.index.p[2]")]
-
-
def _index_url():
"""Helper computes the index url path."""
return reverse("core:index")
@@ -43,7 +38,6 @@
page = viewmodels.Page(
content_title=_index_content_title(),
- paragraphs=_index_paragraphs(),
buttons=buttons,
classes="home",
)
|
{"golden_diff": "diff --git a/benefits/core/views.py b/benefits/core/views.py\n--- a/benefits/core/views.py\n+++ b/benefits/core/views.py\n@@ -20,11 +20,6 @@\n return _(\"core.pages.index.content_title\")\n \n \n-def _index_paragraphs():\n- \"\"\"Helper returns the content paragraphs for the common index page.\"\"\"\n- return [_(\"core.pages.index.p[0]\"), _(\"core.pages.index.p[1]\"), _(\"core.pages.index.p[2]\")]\n-\n-\n def _index_url():\n \"\"\"Helper computes the index url path.\"\"\"\n return reverse(\"core:index\")\n@@ -43,7 +38,6 @@\n \n page = viewmodels.Page(\n content_title=_index_content_title(),\n- paragraphs=_index_paragraphs(),\n buttons=buttons,\n classes=\"home\",\n )\n", "issue": "Remove all paragraph content from the viewmodel in `core:index`\nFrom https://github.com/cal-itp/benefits/issues/366\r\n<img width=\"832\" alt=\"image\" src=\"https://user-images.githubusercontent.com/3673236/163299119-7b8c7696-05b5-4ade-9bc2-c5b784707a66.png\">\r\n\r\n\n", "before_files": [{"content": "\"\"\"\nThe core application: view definition for the root of the webapp.\n\"\"\"\nfrom django.http import HttpResponseBadRequest, HttpResponseNotFound, HttpResponseServerError\nfrom django.template import loader\nfrom django.template.response import TemplateResponse\nfrom django.urls import reverse\nfrom django.utils.translation import gettext as _\n\nfrom . import middleware, models, session, viewmodels\n\n\ndef PageTemplateResponse(request, page_vm):\n \"\"\"Helper returns a TemplateResponse using the common page template.\"\"\"\n return TemplateResponse(request, \"core/page.html\", page_vm.context_dict())\n\n\ndef _index_content_title():\n \"\"\"Helper returns the content title for the common index page.\"\"\"\n return _(\"core.pages.index.content_title\")\n\n\ndef _index_paragraphs():\n \"\"\"Helper returns the content paragraphs for the common index page.\"\"\"\n return [_(\"core.pages.index.p[0]\"), _(\"core.pages.index.p[1]\"), _(\"core.pages.index.p[2]\")]\n\n\ndef _index_url():\n \"\"\"Helper computes the index url path.\"\"\"\n return reverse(\"core:index\")\n\n\[email protected]_decorator\ndef index(request):\n \"\"\"View handler for the main entry page.\"\"\"\n session.reset(request)\n\n # generate a button to the landing page for each active agency\n agencies = models.TransitAgency.all_active()\n buttons = [viewmodels.Button.outline_primary(text=a.short_name, url=a.index_url) for a in agencies]\n buttons[0].classes.append(\"mt-3\")\n buttons[0].label = _(\"core.pages.index.chooseprovider\")\n\n page = viewmodels.Page(\n content_title=_index_content_title(),\n paragraphs=_index_paragraphs(),\n buttons=buttons,\n classes=\"home\",\n )\n\n return PageTemplateResponse(request, page)\n\n\[email protected]_decorator\ndef agency_index(request, agency):\n \"\"\"View handler for an agency entry page.\"\"\"\n session.reset(request)\n session.update(request, agency=agency, origin=agency.index_url)\n\n button = viewmodels.Button.primary(text=_(\"core.pages.index.continue\"), url=reverse(\"eligibility:index\"))\n button.label = _(\"core.pages.agency_index.button.label\")\n\n page = viewmodels.Page(\n content_title=_(\"core.pages.agency_index.content_title\"),\n button=button,\n classes=\"home\",\n )\n\n help_page = reverse(\"core:help\")\n context_dict = {**page.context_dict(), **{\"info_link\": f\"{help_page}#about\"}}\n\n return TemplateResponse(request, \"core/agency_index.html\", context_dict)\n\n\[email protected]_decorator\ndef help(request):\n \"\"\"View handler for the help page.\"\"\"\n if session.active_agency(request):\n agency = session.agency(request)\n buttons = viewmodels.Button.agency_contact_links(agency)\n else:\n buttons = [btn for a in models.TransitAgency.all_active() for btn in viewmodels.Button.agency_contact_links(a)]\n\n buttons.append(viewmodels.Button.home(request, _(\"core.buttons.back\")))\n\n page = viewmodels.Page(\n title=_(\"core.buttons.help\"),\n content_title=_(\"core.buttons.help\"),\n buttons=buttons,\n classes=\"text-lg-center\",\n noimage=True,\n )\n\n return TemplateResponse(request, \"core/help.html\", page.context_dict())\n\n\[email protected]_decorator\ndef bad_request(request, exception, template_name=\"400.html\"):\n \"\"\"View handler for HTTP 400 Bad Request responses.\"\"\"\n if session.active_agency(request):\n session.update(request, origin=session.agency(request).index_url)\n else:\n session.update(request, origin=_index_url())\n\n home = viewmodels.Button.home(request)\n page = viewmodels.ErrorPage.error(button=home)\n t = loader.get_template(template_name)\n\n return HttpResponseBadRequest(t.render(page.context_dict()))\n\n\[email protected]_decorator\ndef csrf_failure(request, reason):\n \"\"\"\n View handler for CSRF_FAILURE_VIEW with custom data.\n \"\"\"\n if session.active_agency(request):\n session.update(request, origin=session.agency(request).index_url)\n else:\n session.update(request, origin=_index_url())\n\n home = viewmodels.Button.home(request)\n page = viewmodels.ErrorPage.not_found(button=home, path=request.path)\n t = loader.get_template(\"400.html\")\n\n return HttpResponseNotFound(t.render(page.context_dict()))\n\n\[email protected]_decorator\ndef page_not_found(request, exception, template_name=\"404.html\"):\n \"\"\"View handler for HTTP 404 Not Found responses.\"\"\"\n if session.active_agency(request):\n session.update(request, origin=session.agency(request).index_url)\n else:\n session.update(request, origin=_index_url())\n\n home = viewmodels.Button.home(request)\n page = viewmodels.ErrorPage.not_found(button=home, path=request.path)\n t = loader.get_template(template_name)\n\n return HttpResponseNotFound(t.render(page.context_dict()))\n\n\[email protected]_decorator\ndef server_error(request, template_name=\"500.html\"):\n \"\"\"View handler for HTTP 500 Server Error responses.\"\"\"\n if session.active_agency(request):\n session.update(request, origin=session.agency(request).index_url)\n else:\n session.update(request, origin=_index_url())\n\n home = viewmodels.Button.home(request)\n page = viewmodels.ErrorPage.error(button=home)\n t = loader.get_template(template_name)\n\n return HttpResponseServerError(t.render(page.context_dict()))\n", "path": "benefits/core/views.py"}], "after_files": [{"content": "\"\"\"\nThe core application: view definition for the root of the webapp.\n\"\"\"\nfrom django.http import HttpResponseBadRequest, HttpResponseNotFound, HttpResponseServerError\nfrom django.template import loader\nfrom django.template.response import TemplateResponse\nfrom django.urls import reverse\nfrom django.utils.translation import gettext as _\n\nfrom . import middleware, models, session, viewmodels\n\n\ndef PageTemplateResponse(request, page_vm):\n \"\"\"Helper returns a TemplateResponse using the common page template.\"\"\"\n return TemplateResponse(request, \"core/page.html\", page_vm.context_dict())\n\n\ndef _index_content_title():\n \"\"\"Helper returns the content title for the common index page.\"\"\"\n return _(\"core.pages.index.content_title\")\n\n\ndef _index_url():\n \"\"\"Helper computes the index url path.\"\"\"\n return reverse(\"core:index\")\n\n\[email protected]_decorator\ndef index(request):\n \"\"\"View handler for the main entry page.\"\"\"\n session.reset(request)\n\n # generate a button to the landing page for each active agency\n agencies = models.TransitAgency.all_active()\n buttons = [viewmodels.Button.outline_primary(text=a.short_name, url=a.index_url) for a in agencies]\n buttons[0].classes.append(\"mt-3\")\n buttons[0].label = _(\"core.pages.index.chooseprovider\")\n\n page = viewmodels.Page(\n content_title=_index_content_title(),\n buttons=buttons,\n classes=\"home\",\n )\n\n return PageTemplateResponse(request, page)\n\n\[email protected]_decorator\ndef agency_index(request, agency):\n \"\"\"View handler for an agency entry page.\"\"\"\n session.reset(request)\n session.update(request, agency=agency, origin=agency.index_url)\n\n button = viewmodels.Button.primary(text=_(\"core.pages.index.continue\"), url=reverse(\"eligibility:index\"))\n button.label = _(\"core.pages.agency_index.button.label\")\n\n page = viewmodels.Page(\n content_title=_(\"core.pages.agency_index.content_title\"),\n button=button,\n classes=\"home\",\n )\n\n help_page = reverse(\"core:help\")\n context_dict = {**page.context_dict(), **{\"info_link\": f\"{help_page}#about\"}}\n\n return TemplateResponse(request, \"core/agency_index.html\", context_dict)\n\n\[email protected]_decorator\ndef help(request):\n \"\"\"View handler for the help page.\"\"\"\n if session.active_agency(request):\n agency = session.agency(request)\n buttons = viewmodels.Button.agency_contact_links(agency)\n else:\n buttons = [btn for a in models.TransitAgency.all_active() for btn in viewmodels.Button.agency_contact_links(a)]\n\n buttons.append(viewmodels.Button.home(request, _(\"core.buttons.back\")))\n\n page = viewmodels.Page(\n title=_(\"core.buttons.help\"),\n content_title=_(\"core.buttons.help\"),\n buttons=buttons,\n classes=\"text-lg-center\",\n noimage=True,\n )\n\n return TemplateResponse(request, \"core/help.html\", page.context_dict())\n\n\[email protected]_decorator\ndef bad_request(request, exception, template_name=\"400.html\"):\n \"\"\"View handler for HTTP 400 Bad Request responses.\"\"\"\n if session.active_agency(request):\n session.update(request, origin=session.agency(request).index_url)\n else:\n session.update(request, origin=_index_url())\n\n home = viewmodels.Button.home(request)\n page = viewmodels.ErrorPage.error(button=home)\n t = loader.get_template(template_name)\n\n return HttpResponseBadRequest(t.render(page.context_dict()))\n\n\[email protected]_decorator\ndef csrf_failure(request, reason):\n \"\"\"\n View handler for CSRF_FAILURE_VIEW with custom data.\n \"\"\"\n if session.active_agency(request):\n session.update(request, origin=session.agency(request).index_url)\n else:\n session.update(request, origin=_index_url())\n\n home = viewmodels.Button.home(request)\n page = viewmodels.ErrorPage.not_found(button=home, path=request.path)\n t = loader.get_template(\"400.html\")\n\n return HttpResponseNotFound(t.render(page.context_dict()))\n\n\[email protected]_decorator\ndef page_not_found(request, exception, template_name=\"404.html\"):\n \"\"\"View handler for HTTP 404 Not Found responses.\"\"\"\n if session.active_agency(request):\n session.update(request, origin=session.agency(request).index_url)\n else:\n session.update(request, origin=_index_url())\n\n home = viewmodels.Button.home(request)\n page = viewmodels.ErrorPage.not_found(button=home, path=request.path)\n t = loader.get_template(template_name)\n\n return HttpResponseNotFound(t.render(page.context_dict()))\n\n\[email protected]_decorator\ndef server_error(request, template_name=\"500.html\"):\n \"\"\"View handler for HTTP 500 Server Error responses.\"\"\"\n if session.active_agency(request):\n session.update(request, origin=session.agency(request).index_url)\n else:\n session.update(request, origin=_index_url())\n\n home = viewmodels.Button.home(request)\n page = viewmodels.ErrorPage.error(button=home)\n t = loader.get_template(template_name)\n\n return HttpResponseServerError(t.render(page.context_dict()))\n", "path": "benefits/core/views.py"}]}
| 1,866 | 180 |
gh_patches_debug_7083
|
rasdani/github-patches
|
git_diff
|
bridgecrewio__checkov-5222
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
CKV_GIT_4 always fail with terraform_plan
**Describe the issue**
Once a GitHub org/repo secret is created and stored in the terraform state, the check CKV_GIT_4 will always fail when scanning terraform plans even if the value was created using an encrypted value.
It seems like the check consider `"plaintext_text": ""` to be a hard-coded secret because if I remove that line from the plan or change it to `"plaintext_text": null`, the check passes.
```
"resources": [
{
"address": "github_actions_organization_secret.my_github_secret",
"mode": "managed",
"type": "github_actions_organization_secret",
"name": "my_github_secret",
"provider_name": "registry.terraform.io/integrations/github",
"schema_version": 0,
"values": {
"created_at": "2023-05-17 13:54:59 +0000 UTC",
"encrypted_value": "MIr5c6eSzTJeGW/uyB0u...",
"id": "MY_GITHUB_SECRET",
"plaintext_value": "",
"secret_name": "MY_GITHUB_SECRET",
"selected_repository_ids": [],
"updated_at": "2023-05-17 13:54:59 +0000 UTC",
"visibility": "all"
},
"sensitive_values": {
"selected_repository_ids": []
}
}
```
**Examples**
**Version (please complete the following information):**
- Checkov Version 2.3.223
**Additional context**
Add any other context about the problem here.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `checkov/terraform/checks/resource/github/SecretsEncrypted.py`
Content:
```
1 from typing import List, Any, Dict
2
3 from checkov.common.models.enums import CheckCategories, CheckResult
4 from checkov.terraform.checks.resource.base_resource_negative_value_check import BaseResourceNegativeValueCheck
5 from checkov.common.models.consts import ANY_VALUE
6
7
8 class SecretsEncrypted(BaseResourceNegativeValueCheck):
9 def __init__(self) -> None:
10 # -from github docs "It is also advised that you do not store plaintext values in your code but rather populate
11 # the encrypted_value using fields from a resource, data source or variable as,
12 # while encrypted in state, these will be easily accessible in your code"
13 name = "Ensure GitHub Actions secrets are encrypted"
14 id = "CKV_GIT_4"
15 supported_resources = (
16 "github_actions_environment_secret",
17 "github_actions_organization_secret",
18 "github_actions_secret",
19 )
20 categories = (CheckCategories.ENCRYPTION,)
21 super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
22
23 def scan_resource_conf(self, conf: Dict[str, List[Any]]) -> CheckResult:
24 plaintext = conf.get("plaintext_value")
25 if plaintext and self._is_variable_dependant(plaintext[0]):
26 return CheckResult.UNKNOWN
27
28 return super().scan_resource_conf(conf)
29
30 def get_inspected_key(self) -> str:
31 return "plaintext_value"
32
33 def get_forbidden_values(self) -> List[Any]:
34 return [ANY_VALUE]
35
36
37 check = SecretsEncrypted()
38
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/checkov/terraform/checks/resource/github/SecretsEncrypted.py b/checkov/terraform/checks/resource/github/SecretsEncrypted.py
--- a/checkov/terraform/checks/resource/github/SecretsEncrypted.py
+++ b/checkov/terraform/checks/resource/github/SecretsEncrypted.py
@@ -25,6 +25,10 @@
if plaintext and self._is_variable_dependant(plaintext[0]):
return CheckResult.UNKNOWN
+ if isinstance(plaintext, list) and not plaintext[0]:
+ # this happens mainly in TF plan files, because the value is just an empty string
+ return CheckResult.PASSED
+
return super().scan_resource_conf(conf)
def get_inspected_key(self) -> str:
|
{"golden_diff": "diff --git a/checkov/terraform/checks/resource/github/SecretsEncrypted.py b/checkov/terraform/checks/resource/github/SecretsEncrypted.py\n--- a/checkov/terraform/checks/resource/github/SecretsEncrypted.py\n+++ b/checkov/terraform/checks/resource/github/SecretsEncrypted.py\n@@ -25,6 +25,10 @@\n if plaintext and self._is_variable_dependant(plaintext[0]):\n return CheckResult.UNKNOWN\n \n+ if isinstance(plaintext, list) and not plaintext[0]:\n+ # this happens mainly in TF plan files, because the value is just an empty string\n+ return CheckResult.PASSED\n+\n return super().scan_resource_conf(conf)\n \n def get_inspected_key(self) -> str:\n", "issue": "CKV_GIT_4 always fail with terraform_plan\n**Describe the issue**\r\nOnce a GitHub org/repo secret is created and stored in the terraform state, the check CKV_GIT_4 will always fail when scanning terraform plans even if the value was created using an encrypted value.\r\n\r\nIt seems like the check consider `\"plaintext_text\": \"\"` to be a hard-coded secret because if I remove that line from the plan or change it to `\"plaintext_text\": null`, the check passes.\r\n\r\n```\r\n \"resources\": [\r\n {\r\n \"address\": \"github_actions_organization_secret.my_github_secret\",\r\n \"mode\": \"managed\",\r\n \"type\": \"github_actions_organization_secret\",\r\n \"name\": \"my_github_secret\",\r\n \"provider_name\": \"registry.terraform.io/integrations/github\",\r\n \"schema_version\": 0,\r\n \"values\": {\r\n \"created_at\": \"2023-05-17 13:54:59 +0000 UTC\",\r\n \"encrypted_value\": \"MIr5c6eSzTJeGW/uyB0u...\",\r\n \"id\": \"MY_GITHUB_SECRET\",\r\n \"plaintext_value\": \"\",\r\n \"secret_name\": \"MY_GITHUB_SECRET\",\r\n \"selected_repository_ids\": [],\r\n \"updated_at\": \"2023-05-17 13:54:59 +0000 UTC\",\r\n \"visibility\": \"all\"\r\n },\r\n \"sensitive_values\": {\r\n \"selected_repository_ids\": []\r\n }\r\n }\r\n```\r\n\r\n**Examples**\r\n\r\n\r\n**Version (please complete the following information):**\r\n - Checkov Version 2.3.223\r\n\r\n**Additional context**\r\nAdd any other context about the problem here.\r\n\n", "before_files": [{"content": "from typing import List, Any, Dict\n\nfrom checkov.common.models.enums import CheckCategories, CheckResult\nfrom checkov.terraform.checks.resource.base_resource_negative_value_check import BaseResourceNegativeValueCheck\nfrom checkov.common.models.consts import ANY_VALUE\n\n\nclass SecretsEncrypted(BaseResourceNegativeValueCheck):\n def __init__(self) -> None:\n # -from github docs \"It is also advised that you do not store plaintext values in your code but rather populate\n # the encrypted_value using fields from a resource, data source or variable as,\n # while encrypted in state, these will be easily accessible in your code\"\n name = \"Ensure GitHub Actions secrets are encrypted\"\n id = \"CKV_GIT_4\"\n supported_resources = (\n \"github_actions_environment_secret\",\n \"github_actions_organization_secret\",\n \"github_actions_secret\",\n )\n categories = (CheckCategories.ENCRYPTION,)\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def scan_resource_conf(self, conf: Dict[str, List[Any]]) -> CheckResult:\n plaintext = conf.get(\"plaintext_value\")\n if plaintext and self._is_variable_dependant(plaintext[0]):\n return CheckResult.UNKNOWN\n\n return super().scan_resource_conf(conf)\n\n def get_inspected_key(self) -> str:\n return \"plaintext_value\"\n\n def get_forbidden_values(self) -> List[Any]:\n return [ANY_VALUE]\n\n\ncheck = SecretsEncrypted()\n", "path": "checkov/terraform/checks/resource/github/SecretsEncrypted.py"}], "after_files": [{"content": "from typing import List, Any, Dict\n\nfrom checkov.common.models.enums import CheckCategories, CheckResult\nfrom checkov.terraform.checks.resource.base_resource_negative_value_check import BaseResourceNegativeValueCheck\nfrom checkov.common.models.consts import ANY_VALUE\n\n\nclass SecretsEncrypted(BaseResourceNegativeValueCheck):\n def __init__(self) -> None:\n # -from github docs \"It is also advised that you do not store plaintext values in your code but rather populate\n # the encrypted_value using fields from a resource, data source or variable as,\n # while encrypted in state, these will be easily accessible in your code\"\n name = \"Ensure GitHub Actions secrets are encrypted\"\n id = \"CKV_GIT_4\"\n supported_resources = (\n \"github_actions_environment_secret\",\n \"github_actions_organization_secret\",\n \"github_actions_secret\",\n )\n categories = (CheckCategories.ENCRYPTION,)\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def scan_resource_conf(self, conf: Dict[str, List[Any]]) -> CheckResult:\n plaintext = conf.get(\"plaintext_value\")\n if plaintext and self._is_variable_dependant(plaintext[0]):\n return CheckResult.UNKNOWN\n\n if isinstance(plaintext, list) and not plaintext[0]:\n # this happens mainly in TF plan files, because the value is just an empty string\n return CheckResult.PASSED\n\n return super().scan_resource_conf(conf)\n\n def get_inspected_key(self) -> str:\n return \"plaintext_value\"\n\n def get_forbidden_values(self) -> List[Any]:\n return [ANY_VALUE]\n\n\ncheck = SecretsEncrypted()\n", "path": "checkov/terraform/checks/resource/github/SecretsEncrypted.py"}]}
| 1,030 | 168 |
gh_patches_debug_2251
|
rasdani/github-patches
|
git_diff
|
mirumee__ariadne-232
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Update GraphQL Core Next & Starlette
Issue for me to remember to update our core dependencies to latest versions before release.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #! /usr/bin/env python
2 import os
3 from setuptools import setup
4
5 CLASSIFIERS = [
6 "Development Status :: 4 - Beta",
7 "Intended Audience :: Developers",
8 "License :: OSI Approved :: BSD License",
9 "Operating System :: OS Independent",
10 "Programming Language :: Python",
11 "Programming Language :: Python :: 3.6",
12 "Programming Language :: Python :: 3.7",
13 "Topic :: Software Development :: Libraries :: Python Modules",
14 ]
15
16 README_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "README.md")
17 with open(README_PATH, "r") as f:
18 README = f.read()
19
20 setup(
21 name="ariadne",
22 author="Mirumee Software",
23 author_email="[email protected]",
24 description="Ariadne is a Python library for implementing GraphQL servers.",
25 long_description=README,
26 long_description_content_type="text/markdown",
27 license="BSD",
28 version="0.5.0",
29 url="https://github.com/mirumee/ariadne",
30 packages=["ariadne"],
31 include_package_data=True,
32 install_requires=[
33 "graphql-core-next>=1.0.4",
34 "python-multipart>=0.0.5",
35 "starlette<0.13",
36 "typing_extensions>=3.6.0",
37 ],
38 classifiers=CLASSIFIERS,
39 platforms=["any"],
40 zip_safe=False,
41 )
42
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -30,7 +30,7 @@
packages=["ariadne"],
include_package_data=True,
install_requires=[
- "graphql-core-next>=1.0.4",
+ "graphql-core-next<3.0.0",
"python-multipart>=0.0.5",
"starlette<0.13",
"typing_extensions>=3.6.0",
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -30,7 +30,7 @@\n packages=[\"ariadne\"],\n include_package_data=True,\n install_requires=[\n- \"graphql-core-next>=1.0.4\",\n+ \"graphql-core-next<3.0.0\",\n \"python-multipart>=0.0.5\",\n \"starlette<0.13\",\n \"typing_extensions>=3.6.0\",\n", "issue": "Update GraphQL Core Next & Starlette\nIssue for me to remember to update our core dependencies to latest versions before release.\n", "before_files": [{"content": "#! /usr/bin/env python\nimport os\nfrom setuptools import setup\n\nCLASSIFIERS = [\n \"Development Status :: 4 - Beta\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: BSD License\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Topic :: Software Development :: Libraries :: Python Modules\",\n]\n\nREADME_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"README.md\")\nwith open(README_PATH, \"r\") as f:\n README = f.read()\n\nsetup(\n name=\"ariadne\",\n author=\"Mirumee Software\",\n author_email=\"[email protected]\",\n description=\"Ariadne is a Python library for implementing GraphQL servers.\",\n long_description=README,\n long_description_content_type=\"text/markdown\",\n license=\"BSD\",\n version=\"0.5.0\",\n url=\"https://github.com/mirumee/ariadne\",\n packages=[\"ariadne\"],\n include_package_data=True,\n install_requires=[\n \"graphql-core-next>=1.0.4\",\n \"python-multipart>=0.0.5\",\n \"starlette<0.13\",\n \"typing_extensions>=3.6.0\",\n ],\n classifiers=CLASSIFIERS,\n platforms=[\"any\"],\n zip_safe=False,\n)\n", "path": "setup.py"}], "after_files": [{"content": "#! /usr/bin/env python\nimport os\nfrom setuptools import setup\n\nCLASSIFIERS = [\n \"Development Status :: 4 - Beta\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: BSD License\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Topic :: Software Development :: Libraries :: Python Modules\",\n]\n\nREADME_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"README.md\")\nwith open(README_PATH, \"r\") as f:\n README = f.read()\n\nsetup(\n name=\"ariadne\",\n author=\"Mirumee Software\",\n author_email=\"[email protected]\",\n description=\"Ariadne is a Python library for implementing GraphQL servers.\",\n long_description=README,\n long_description_content_type=\"text/markdown\",\n license=\"BSD\",\n version=\"0.5.0\",\n url=\"https://github.com/mirumee/ariadne\",\n packages=[\"ariadne\"],\n include_package_data=True,\n install_requires=[\n \"graphql-core-next<3.0.0\",\n \"python-multipart>=0.0.5\",\n \"starlette<0.13\",\n \"typing_extensions>=3.6.0\",\n ],\n classifiers=CLASSIFIERS,\n platforms=[\"any\"],\n zip_safe=False,\n)\n", "path": "setup.py"}]}
| 668 | 108 |
gh_patches_debug_56857
|
rasdani/github-patches
|
git_diff
|
spesmilo__electrum-1112
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Hardcoded icon path in launcher for GNU/Linux systems
Hi,
currently the icon path specified in the created _.desktop_ launcher file is set to:
_Icon=/usr/share/app-install/icons/electrum.png_
(see https://github.com/spesmilo/electrum/blob/bc3013caf0d3d6a35290e9cc9e51125b7d03d14c/electrum.desktop)
This prevents icon theming without editing the launcher file.
I'd like to ask you to change the icon location as well as the icon path line in the launcher according to the freedesktop.org standards.
Please find more info here:
http://standards.freedesktop.org/icon-theme-spec/icon-theme-spec-latest.html
https://github.com/Foggalong/hardcode-fixer/wiki/What,-Why-&-How
---
According to the above resources standard icon locations are:
_/usr/share/icons/hicolor/[size]/apps/[icon name]_
_~/.local/share/icons/hicolor/[size]/apps/[icon name]_
_/usr/share/pixmaps/[icon name]_
The standard icon line in the .desktop launcher is:
_Icon=[icon name]_ (without full path, without file extension)
for instance
**Icon=electrum**
(for an icon _electrum.svg_ or _electrum.png_ stored in standard location)
Could you please move the icon to one of those locations and adjust the icon path in the .desktop file accordingly?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/python
2
3 # python setup.py sdist --format=zip,gztar
4
5 from setuptools import setup
6 import os
7 import sys
8 import platform
9 import imp
10
11
12 version = imp.load_source('version', 'lib/version.py')
13
14 if sys.version_info[:3] < (2, 7, 0):
15 sys.exit("Error: Electrum requires Python version >= 2.7.0...")
16
17
18
19 data_files = []
20 if platform.system() == 'Linux' or platform.system() == 'FreeBSD':
21 usr_share = os.path.join(sys.prefix, "share")
22 data_files += [
23 (os.path.join(usr_share, 'applications/'), ['electrum.desktop']),
24 (os.path.join(usr_share, 'app-install', 'icons/'), ['icons/electrum.png'])
25 ]
26
27
28 setup(
29 name="Electrum",
30 version=version.ELECTRUM_VERSION,
31 install_requires=[
32 'slowaes>=0.1a1',
33 'ecdsa>=0.9',
34 'pbkdf2',
35 'requests',
36 'pyasn1-modules',
37 'pyasn1',
38 'qrcode',
39 'protobuf',
40 'tlslite',
41 'dnspython',
42 ],
43 package_dir={
44 'electrum': 'lib',
45 'electrum_gui': 'gui',
46 'electrum_plugins': 'plugins',
47 },
48 packages=['electrum','electrum_gui','electrum_gui.qt','electrum_plugins'],
49 package_data={
50 'electrum': [
51 'wordlist/*.txt',
52 'locale/*/LC_MESSAGES/electrum.mo',
53 ],
54 'electrum_gui': [
55 "qt/themes/cleanlook/name.cfg",
56 "qt/themes/cleanlook/style.css",
57 "qt/themes/sahara/name.cfg",
58 "qt/themes/sahara/style.css",
59 "qt/themes/dark/name.cfg",
60 "qt/themes/dark/style.css",
61 ]
62 },
63 scripts=['electrum'],
64 data_files=data_files,
65 description="Lightweight Bitcoin Wallet",
66 author="Thomas Voegtlin",
67 author_email="[email protected]",
68 license="GNU GPLv3",
69 url="https://electrum.org",
70 long_description="""Lightweight Bitcoin Wallet"""
71 )
72
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@
usr_share = os.path.join(sys.prefix, "share")
data_files += [
(os.path.join(usr_share, 'applications/'), ['electrum.desktop']),
- (os.path.join(usr_share, 'app-install', 'icons/'), ['icons/electrum.png'])
+ (os.path.join(usr_share, 'pixmaps/'), ['icons/electrum.png'])
]
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -21,7 +21,7 @@\n usr_share = os.path.join(sys.prefix, \"share\")\n data_files += [\n (os.path.join(usr_share, 'applications/'), ['electrum.desktop']),\n- (os.path.join(usr_share, 'app-install', 'icons/'), ['icons/electrum.png'])\n+ (os.path.join(usr_share, 'pixmaps/'), ['icons/electrum.png'])\n ]\n", "issue": "Hardcoded icon path in launcher for GNU/Linux systems\nHi,\n\ncurrently the icon path specified in the created _.desktop_ launcher file is set to: \n_Icon=/usr/share/app-install/icons/electrum.png_ \n\n(see https://github.com/spesmilo/electrum/blob/bc3013caf0d3d6a35290e9cc9e51125b7d03d14c/electrum.desktop)\n\nThis prevents icon theming without editing the launcher file.\n\nI'd like to ask you to change the icon location as well as the icon path line in the launcher according to the freedesktop.org standards.\n\nPlease find more info here:\nhttp://standards.freedesktop.org/icon-theme-spec/icon-theme-spec-latest.html\nhttps://github.com/Foggalong/hardcode-fixer/wiki/What,-Why-&-How\n\n---\n\nAccording to the above resources standard icon locations are:\n\n_/usr/share/icons/hicolor/[size]/apps/[icon name]_\n_~/.local/share/icons/hicolor/[size]/apps/[icon name]_\n_/usr/share/pixmaps/[icon name]_\n\nThe standard icon line in the .desktop launcher is:\n_Icon=[icon name]_ (without full path, without file extension)\n\nfor instance\n**Icon=electrum** \n(for an icon _electrum.svg_ or _electrum.png_ stored in standard location)\n\nCould you please move the icon to one of those locations and adjust the icon path in the .desktop file accordingly?\n\n", "before_files": [{"content": "#!/usr/bin/python\n\n# python setup.py sdist --format=zip,gztar\n\nfrom setuptools import setup\nimport os\nimport sys\nimport platform\nimport imp\n\n\nversion = imp.load_source('version', 'lib/version.py')\n\nif sys.version_info[:3] < (2, 7, 0):\n sys.exit(\"Error: Electrum requires Python version >= 2.7.0...\")\n\n\n\ndata_files = []\nif platform.system() == 'Linux' or platform.system() == 'FreeBSD':\n usr_share = os.path.join(sys.prefix, \"share\")\n data_files += [\n (os.path.join(usr_share, 'applications/'), ['electrum.desktop']),\n (os.path.join(usr_share, 'app-install', 'icons/'), ['icons/electrum.png'])\n ]\n\n\nsetup(\n name=\"Electrum\",\n version=version.ELECTRUM_VERSION,\n install_requires=[\n 'slowaes>=0.1a1',\n 'ecdsa>=0.9',\n 'pbkdf2',\n 'requests',\n 'pyasn1-modules',\n 'pyasn1',\n 'qrcode',\n 'protobuf',\n 'tlslite',\n 'dnspython',\n ],\n package_dir={\n 'electrum': 'lib',\n 'electrum_gui': 'gui',\n 'electrum_plugins': 'plugins',\n },\n packages=['electrum','electrum_gui','electrum_gui.qt','electrum_plugins'],\n package_data={\n 'electrum': [\n 'wordlist/*.txt',\n 'locale/*/LC_MESSAGES/electrum.mo',\n ],\n 'electrum_gui': [\n \"qt/themes/cleanlook/name.cfg\",\n \"qt/themes/cleanlook/style.css\",\n \"qt/themes/sahara/name.cfg\",\n \"qt/themes/sahara/style.css\",\n \"qt/themes/dark/name.cfg\",\n \"qt/themes/dark/style.css\",\n ]\n },\n scripts=['electrum'],\n data_files=data_files,\n description=\"Lightweight Bitcoin Wallet\",\n author=\"Thomas Voegtlin\",\n author_email=\"[email protected]\",\n license=\"GNU GPLv3\",\n url=\"https://electrum.org\",\n long_description=\"\"\"Lightweight Bitcoin Wallet\"\"\"\n)\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/python\n\n# python setup.py sdist --format=zip,gztar\n\nfrom setuptools import setup\nimport os\nimport sys\nimport platform\nimport imp\n\n\nversion = imp.load_source('version', 'lib/version.py')\n\nif sys.version_info[:3] < (2, 7, 0):\n sys.exit(\"Error: Electrum requires Python version >= 2.7.0...\")\n\n\n\ndata_files = []\nif platform.system() == 'Linux' or platform.system() == 'FreeBSD':\n usr_share = os.path.join(sys.prefix, \"share\")\n data_files += [\n (os.path.join(usr_share, 'applications/'), ['electrum.desktop']),\n (os.path.join(usr_share, 'pixmaps/'), ['icons/electrum.png'])\n ]\n\n\nsetup(\n name=\"Electrum\",\n version=version.ELECTRUM_VERSION,\n install_requires=[\n 'slowaes>=0.1a1',\n 'ecdsa>=0.9',\n 'pbkdf2',\n 'requests',\n 'pyasn1-modules',\n 'pyasn1',\n 'qrcode',\n 'protobuf',\n 'tlslite',\n 'dnspython',\n ],\n package_dir={\n 'electrum': 'lib',\n 'electrum_gui': 'gui',\n 'electrum_plugins': 'plugins',\n },\n packages=['electrum','electrum_gui','electrum_gui.qt','electrum_plugins'],\n package_data={\n 'electrum': [\n 'wordlist/*.txt',\n 'locale/*/LC_MESSAGES/electrum.mo',\n ],\n 'electrum_gui': [\n \"qt/themes/cleanlook/name.cfg\",\n \"qt/themes/cleanlook/style.css\",\n \"qt/themes/sahara/name.cfg\",\n \"qt/themes/sahara/style.css\",\n \"qt/themes/dark/name.cfg\",\n \"qt/themes/dark/style.css\",\n ]\n },\n scripts=['electrum'],\n data_files=data_files,\n description=\"Lightweight Bitcoin Wallet\",\n author=\"Thomas Voegtlin\",\n author_email=\"[email protected]\",\n license=\"GNU GPLv3\",\n url=\"https://electrum.org\",\n long_description=\"\"\"Lightweight Bitcoin Wallet\"\"\"\n)\n", "path": "setup.py"}]}
| 1,196 | 117 |
gh_patches_debug_2600
|
rasdani/github-patches
|
git_diff
|
dmlc__dgl-2505
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
jtnn example error
NOCUDA=1 python3 vaetrain_dgl.py
it shows NameError: name 'tensor' is not defined in dgl/examples/pytorch/jtnn/jtnn/nnutils.py", line 11, in cuda
return tensor
env:
dgl 0.5.3
torch 1.7.1
mac os
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/pytorch/jtnn/jtnn/nnutils.py`
Content:
```
1 import torch
2 import torch.nn as nn
3 import os
4 import dgl
5
6
7 def cuda(x):
8 if torch.cuda.is_available() and not os.getenv('NOCUDA', None):
9 return x.to(torch.device('cuda')) # works for both DGLGraph and tensor
10 else:
11 return tensor
12
13
14 class GRUUpdate(nn.Module):
15 def __init__(self, hidden_size):
16 nn.Module.__init__(self)
17 self.hidden_size = hidden_size
18
19 self.W_z = nn.Linear(2 * hidden_size, hidden_size)
20 self.W_r = nn.Linear(hidden_size, hidden_size, bias=False)
21 self.U_r = nn.Linear(hidden_size, hidden_size)
22 self.W_h = nn.Linear(2 * hidden_size, hidden_size)
23
24 def update_zm(self, node):
25 src_x = node.data['src_x']
26 s = node.data['s']
27 rm = node.data['accum_rm']
28 z = torch.sigmoid(self.W_z(torch.cat([src_x, s], 1)))
29 m = torch.tanh(self.W_h(torch.cat([src_x, rm], 1)))
30 m = (1 - z) * s + z * m
31 return {'m': m, 'z': z}
32
33 def update_r(self, node, zm=None):
34 dst_x = node.data['dst_x']
35 m = node.data['m'] if zm is None else zm['m']
36 r_1 = self.W_r(dst_x)
37 r_2 = self.U_r(m)
38 r = torch.sigmoid(r_1 + r_2)
39 return {'r': r, 'rm': r * m}
40
41 def forward(self, node):
42 dic = self.update_zm(node)
43 dic.update(self.update_r(node, zm=dic))
44 return dic
45
46 def tocpu(g):
47 src, dst = g.edges()
48 src = src.cpu()
49 dst = dst.cpu()
50 return dgl.graph((src, dst), num_nodes=g.number_of_nodes())
51
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/examples/pytorch/jtnn/jtnn/nnutils.py b/examples/pytorch/jtnn/jtnn/nnutils.py
--- a/examples/pytorch/jtnn/jtnn/nnutils.py
+++ b/examples/pytorch/jtnn/jtnn/nnutils.py
@@ -8,7 +8,7 @@
if torch.cuda.is_available() and not os.getenv('NOCUDA', None):
return x.to(torch.device('cuda')) # works for both DGLGraph and tensor
else:
- return tensor
+ return x
class GRUUpdate(nn.Module):
|
{"golden_diff": "diff --git a/examples/pytorch/jtnn/jtnn/nnutils.py b/examples/pytorch/jtnn/jtnn/nnutils.py\n--- a/examples/pytorch/jtnn/jtnn/nnutils.py\n+++ b/examples/pytorch/jtnn/jtnn/nnutils.py\n@@ -8,7 +8,7 @@\n if torch.cuda.is_available() and not os.getenv('NOCUDA', None):\n return x.to(torch.device('cuda')) # works for both DGLGraph and tensor\n else:\n- return tensor\n+ return x\n \n \n class GRUUpdate(nn.Module):\n", "issue": "jtnn example error\nNOCUDA=1 python3 vaetrain_dgl.py\r\nit shows NameError: name 'tensor' is not defined in dgl/examples/pytorch/jtnn/jtnn/nnutils.py\", line 11, in cuda\r\n return tensor\r\n\r\nenv: \r\ndgl 0.5.3\r\ntorch 1.7.1\r\nmac os\r\n\r\n\r\n\n", "before_files": [{"content": "import torch\nimport torch.nn as nn\nimport os\nimport dgl\n\n\ndef cuda(x):\n if torch.cuda.is_available() and not os.getenv('NOCUDA', None):\n return x.to(torch.device('cuda')) # works for both DGLGraph and tensor\n else:\n return tensor\n\n\nclass GRUUpdate(nn.Module):\n def __init__(self, hidden_size):\n nn.Module.__init__(self)\n self.hidden_size = hidden_size\n\n self.W_z = nn.Linear(2 * hidden_size, hidden_size)\n self.W_r = nn.Linear(hidden_size, hidden_size, bias=False)\n self.U_r = nn.Linear(hidden_size, hidden_size)\n self.W_h = nn.Linear(2 * hidden_size, hidden_size)\n\n def update_zm(self, node):\n src_x = node.data['src_x']\n s = node.data['s']\n rm = node.data['accum_rm']\n z = torch.sigmoid(self.W_z(torch.cat([src_x, s], 1)))\n m = torch.tanh(self.W_h(torch.cat([src_x, rm], 1)))\n m = (1 - z) * s + z * m\n return {'m': m, 'z': z}\n\n def update_r(self, node, zm=None):\n dst_x = node.data['dst_x']\n m = node.data['m'] if zm is None else zm['m']\n r_1 = self.W_r(dst_x)\n r_2 = self.U_r(m)\n r = torch.sigmoid(r_1 + r_2)\n return {'r': r, 'rm': r * m}\n\n def forward(self, node):\n dic = self.update_zm(node)\n dic.update(self.update_r(node, zm=dic))\n return dic\n\ndef tocpu(g):\n src, dst = g.edges()\n src = src.cpu()\n dst = dst.cpu()\n return dgl.graph((src, dst), num_nodes=g.number_of_nodes())\n", "path": "examples/pytorch/jtnn/jtnn/nnutils.py"}], "after_files": [{"content": "import torch\nimport torch.nn as nn\nimport os\nimport dgl\n\n\ndef cuda(x):\n if torch.cuda.is_available() and not os.getenv('NOCUDA', None):\n return x.to(torch.device('cuda')) # works for both DGLGraph and tensor\n else:\n return x\n\n\nclass GRUUpdate(nn.Module):\n def __init__(self, hidden_size):\n nn.Module.__init__(self)\n self.hidden_size = hidden_size\n\n self.W_z = nn.Linear(2 * hidden_size, hidden_size)\n self.W_r = nn.Linear(hidden_size, hidden_size, bias=False)\n self.U_r = nn.Linear(hidden_size, hidden_size)\n self.W_h = nn.Linear(2 * hidden_size, hidden_size)\n\n def update_zm(self, node):\n src_x = node.data['src_x']\n s = node.data['s']\n rm = node.data['accum_rm']\n z = torch.sigmoid(self.W_z(torch.cat([src_x, s], 1)))\n m = torch.tanh(self.W_h(torch.cat([src_x, rm], 1)))\n m = (1 - z) * s + z * m\n return {'m': m, 'z': z}\n\n def update_r(self, node, zm=None):\n dst_x = node.data['dst_x']\n m = node.data['m'] if zm is None else zm['m']\n r_1 = self.W_r(dst_x)\n r_2 = self.U_r(m)\n r = torch.sigmoid(r_1 + r_2)\n return {'r': r, 'rm': r * m}\n\n def forward(self, node):\n dic = self.update_zm(node)\n dic.update(self.update_r(node, zm=dic))\n return dic\n\ndef tocpu(g):\n src, dst = g.edges()\n src = src.cpu()\n dst = dst.cpu()\n return dgl.graph((src, dst), num_nodes=g.number_of_nodes())\n", "path": "examples/pytorch/jtnn/jtnn/nnutils.py"}]}
| 875 | 133 |
gh_patches_debug_178
|
rasdani/github-patches
|
git_diff
|
napalm-automation__napalm-904
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
`get_lldp_neighbors_detail()` fails on Arista 7150S
```python
In [1]: from napalm.eos import EOSDriver
In [2]: from getpass import getpass
In [3]: with EOSDriver("arista", "bewing", getpass()) as d:
...: print(d.get_lldp_neighbors_detail())
...:
Password:
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-3-85f875e30fe3> in <module>
1 with EOSDriver("arista", "bewing", getpass()) as d:
----> 2 print(d.get_lldp_neighbors_detail())
3
/mnt/c/Users/bewing/PycharmProjects/napalm/napalm/eos/eos.py in get_lldp_neighbors_detail(self, interface)
647 lldp_neighbors_out[interface] = []
648 capabilities = neighbor.get("systemCapabilities", {})
--> 649 available_capabilities = self._transform_lldp_capab(capabilities.keys())
650 enabled_capabilities = self._transform_lldp_capab(
651 [capab for capab, enabled in capabilities.items() if enabled]
/mnt/c/Users/bewing/PycharmProjects/napalm/napalm/eos/eos.py in _transform_lldp_capab(self, capabilities)
616
617 def _transform_lldp_capab(self, capabilities):
--> 618 return sorted([LLDP_CAPAB_TRANFORM_TABLE[c.lower()] for c in capabilities])
619
620 def get_lldp_neighbors_detail(self, interface=""):
/mnt/c/Users/bewing/PycharmProjects/napalm/napalm/eos/eos.py in <listcomp>(.0)
616
617 def _transform_lldp_capab(self, capabilities):
--> 618 return sorted([LLDP_CAPAB_TRANFORM_TABLE[c.lower()] for c in capabilities])
619
620 def get_lldp_neighbors_detail(self, interface=""):
KeyError: 'stationonly'
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `napalm/eos/constants.py`
Content:
```
1 # Based on:
2 # https://code.getnoc.com/noc/noc/blob/6f3db2a6e4b1ece77aaf4c4c98413e35ff64643a/sa/profiles/Arista/EOS/get_lldp_neighbors.py#L76-79
3 LLDP_CAPAB_TRANFORM_TABLE = {
4 "other": "other",
5 "repeater": "repeater",
6 "bridge": "bridge",
7 "wlanaccesspoint": "wlan-access-point",
8 "router": "router",
9 "telephone": "telephone",
10 "docsis": "docsis-cable-device",
11 "station": "station",
12 }
13
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/napalm/eos/constants.py b/napalm/eos/constants.py
--- a/napalm/eos/constants.py
+++ b/napalm/eos/constants.py
@@ -9,4 +9,5 @@
"telephone": "telephone",
"docsis": "docsis-cable-device",
"station": "station",
+ "stationonly": "station",
}
|
{"golden_diff": "diff --git a/napalm/eos/constants.py b/napalm/eos/constants.py\n--- a/napalm/eos/constants.py\n+++ b/napalm/eos/constants.py\n@@ -9,4 +9,5 @@\n \"telephone\": \"telephone\",\n \"docsis\": \"docsis-cable-device\",\n \"station\": \"station\",\n+ \"stationonly\": \"station\",\n }\n", "issue": "`get_lldp_neighbors_detail()` fails on Arista 7150S\n```python\r\nIn [1]: from napalm.eos import EOSDriver\r\n\r\nIn [2]: from getpass import getpass\r\n\r\nIn [3]: with EOSDriver(\"arista\", \"bewing\", getpass()) as d:\r\n ...: print(d.get_lldp_neighbors_detail())\r\n ...:\r\nPassword:\r\n---------------------------------------------------------------------------\r\nKeyError Traceback (most recent call last)\r\n<ipython-input-3-85f875e30fe3> in <module>\r\n 1 with EOSDriver(\"arista\", \"bewing\", getpass()) as d:\r\n----> 2 print(d.get_lldp_neighbors_detail())\r\n 3\r\n\r\n/mnt/c/Users/bewing/PycharmProjects/napalm/napalm/eos/eos.py in get_lldp_neighbors_detail(self, interface)\r\n 647 lldp_neighbors_out[interface] = []\r\n 648 capabilities = neighbor.get(\"systemCapabilities\", {})\r\n--> 649 available_capabilities = self._transform_lldp_capab(capabilities.keys())\r\n 650 enabled_capabilities = self._transform_lldp_capab(\r\n 651 [capab for capab, enabled in capabilities.items() if enabled]\r\n\r\n/mnt/c/Users/bewing/PycharmProjects/napalm/napalm/eos/eos.py in _transform_lldp_capab(self, capabilities)\r\n 616\r\n 617 def _transform_lldp_capab(self, capabilities):\r\n--> 618 return sorted([LLDP_CAPAB_TRANFORM_TABLE[c.lower()] for c in capabilities])\r\n 619\r\n 620 def get_lldp_neighbors_detail(self, interface=\"\"):\r\n\r\n/mnt/c/Users/bewing/PycharmProjects/napalm/napalm/eos/eos.py in <listcomp>(.0)\r\n 616\r\n 617 def _transform_lldp_capab(self, capabilities):\r\n--> 618 return sorted([LLDP_CAPAB_TRANFORM_TABLE[c.lower()] for c in capabilities])\r\n 619\r\n 620 def get_lldp_neighbors_detail(self, interface=\"\"):\r\n\r\nKeyError: 'stationonly'\r\n```\n", "before_files": [{"content": "# Based on:\n# https://code.getnoc.com/noc/noc/blob/6f3db2a6e4b1ece77aaf4c4c98413e35ff64643a/sa/profiles/Arista/EOS/get_lldp_neighbors.py#L76-79\nLLDP_CAPAB_TRANFORM_TABLE = {\n \"other\": \"other\",\n \"repeater\": \"repeater\",\n \"bridge\": \"bridge\",\n \"wlanaccesspoint\": \"wlan-access-point\",\n \"router\": \"router\",\n \"telephone\": \"telephone\",\n \"docsis\": \"docsis-cable-device\",\n \"station\": \"station\",\n}\n", "path": "napalm/eos/constants.py"}], "after_files": [{"content": "# Based on:\n# https://code.getnoc.com/noc/noc/blob/6f3db2a6e4b1ece77aaf4c4c98413e35ff64643a/sa/profiles/Arista/EOS/get_lldp_neighbors.py#L76-79\nLLDP_CAPAB_TRANFORM_TABLE = {\n \"other\": \"other\",\n \"repeater\": \"repeater\",\n \"bridge\": \"bridge\",\n \"wlanaccesspoint\": \"wlan-access-point\",\n \"router\": \"router\",\n \"telephone\": \"telephone\",\n \"docsis\": \"docsis-cable-device\",\n \"station\": \"station\",\n \"stationonly\": \"station\",\n}\n", "path": "napalm/eos/constants.py"}]}
| 931 | 86 |
gh_patches_debug_34642
|
rasdani/github-patches
|
git_diff
|
open-telemetry__opentelemetry-python-1571
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Tracer and Meter provider lack environment variables
Now that `Configuration` has been removed, both providers should use consistent environment variables.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `opentelemetry-api/src/opentelemetry/environment_variables/__init__.py`
Content:
```
1 # Copyright The OpenTelemetry Authors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 OTEL_PROPAGATORS = "OTEL_PROPAGATORS"
16 OTEL_PYTHON_CONTEXT = "OTEL_PYTHON_CONTEXT"
17 OTEL_PYTHON_DISABLED_INSTRUMENTATIONS = "OTEL_PYTHON_DISABLED_INSTRUMENTATIONS"
18 OTEL_PYTHON_IDS_GENERATOR = "OTEL_PYTHON_IDS_GENERATOR"
19 OTEL_PYTHON_SERVICE_NAME = "OTEL_PYTHON_SERVICE_NAME"
20 OTEL_TRACE_EXPORTER = "OTEL_TRACE_EXPORTER"
21 OTEL_METRICS_EXPORTER = "OTEL_METRICS_EXPORTER"
22
```
Path: `opentelemetry-api/src/opentelemetry/util/__init__.py`
Content:
```
1 # Copyright The OpenTelemetry Authors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 import time
16 from logging import getLogger
17 from os import environ
18 from typing import TYPE_CHECKING, Union, cast
19
20 from pkg_resources import iter_entry_points
21
22 if TYPE_CHECKING:
23 from opentelemetry.metrics import MeterProvider
24 from opentelemetry.trace import TracerProvider
25
26 Provider = Union["TracerProvider", "MeterProvider"]
27
28 logger = getLogger(__name__)
29
30 # Since we want API users to be able to provide timestamps,
31 # this needs to be in the API.
32
33 try:
34 time_ns = time.time_ns
35 # Python versions < 3.7
36 except AttributeError:
37
38 def time_ns() -> int:
39 return int(time.time() * 1e9)
40
41
42 def _load_provider(provider: str) -> Provider:
43 try:
44 entry_point = next(
45 iter_entry_points(
46 "opentelemetry_{}".format(provider),
47 name=cast(
48 str,
49 environ.get(
50 provider.upper(), "default_{}".format(provider),
51 ),
52 ),
53 )
54 )
55 return cast(Provider, entry_point.load()(),)
56 except Exception: # pylint: disable=broad-except
57 logger.error("Failed to load configured provider %s", provider)
58 raise
59
60
61 def _load_meter_provider(provider: str) -> "MeterProvider":
62 return cast("MeterProvider", _load_provider(provider))
63
64
65 def _load_trace_provider(provider: str) -> "TracerProvider":
66 return cast("TracerProvider", _load_provider(provider))
67
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/opentelemetry-api/src/opentelemetry/environment_variables/__init__.py b/opentelemetry-api/src/opentelemetry/environment_variables/__init__.py
--- a/opentelemetry-api/src/opentelemetry/environment_variables/__init__.py
+++ b/opentelemetry-api/src/opentelemetry/environment_variables/__init__.py
@@ -19,3 +19,5 @@
OTEL_PYTHON_SERVICE_NAME = "OTEL_PYTHON_SERVICE_NAME"
OTEL_TRACE_EXPORTER = "OTEL_TRACE_EXPORTER"
OTEL_METRICS_EXPORTER = "OTEL_METRICS_EXPORTER"
+OTEL_PYTHON_TRACER_PROVIDER = "OTEL_PYTHON_TRACER_PROVIDER"
+OTEL_PYTHON_METER_PROVIDER = "OTEL_PYTHON_METER_PROVIDER"
diff --git a/opentelemetry-api/src/opentelemetry/util/__init__.py b/opentelemetry-api/src/opentelemetry/util/__init__.py
--- a/opentelemetry-api/src/opentelemetry/util/__init__.py
+++ b/opentelemetry-api/src/opentelemetry/util/__init__.py
@@ -19,6 +19,11 @@
from pkg_resources import iter_entry_points
+from opentelemetry.environment_variables import (
+ OTEL_PYTHON_METER_PROVIDER,
+ OTEL_PYTHON_TRACER_PROVIDER,
+)
+
if TYPE_CHECKING:
from opentelemetry.metrics import MeterProvider
from opentelemetry.trace import TracerProvider
@@ -39,7 +44,9 @@
return int(time.time() * 1e9)
-def _load_provider(provider: str) -> Provider:
+def _load_provider(
+ provider_environment_variable: str, provider: str
+) -> Provider:
try:
entry_point = next(
iter_entry_points(
@@ -47,7 +54,8 @@
name=cast(
str,
environ.get(
- provider.upper(), "default_{}".format(provider),
+ provider_environment_variable,
+ "default_{}".format(provider),
),
),
)
@@ -59,8 +67,13 @@
def _load_meter_provider(provider: str) -> "MeterProvider":
- return cast("MeterProvider", _load_provider(provider))
+ return cast(
+ "MeterProvider", _load_provider(OTEL_PYTHON_METER_PROVIDER, provider),
+ )
def _load_trace_provider(provider: str) -> "TracerProvider":
- return cast("TracerProvider", _load_provider(provider))
+ return cast(
+ "TracerProvider",
+ _load_provider(OTEL_PYTHON_TRACER_PROVIDER, provider),
+ )
|
{"golden_diff": "diff --git a/opentelemetry-api/src/opentelemetry/environment_variables/__init__.py b/opentelemetry-api/src/opentelemetry/environment_variables/__init__.py\n--- a/opentelemetry-api/src/opentelemetry/environment_variables/__init__.py\n+++ b/opentelemetry-api/src/opentelemetry/environment_variables/__init__.py\n@@ -19,3 +19,5 @@\n OTEL_PYTHON_SERVICE_NAME = \"OTEL_PYTHON_SERVICE_NAME\"\n OTEL_TRACE_EXPORTER = \"OTEL_TRACE_EXPORTER\"\n OTEL_METRICS_EXPORTER = \"OTEL_METRICS_EXPORTER\"\n+OTEL_PYTHON_TRACER_PROVIDER = \"OTEL_PYTHON_TRACER_PROVIDER\"\n+OTEL_PYTHON_METER_PROVIDER = \"OTEL_PYTHON_METER_PROVIDER\"\ndiff --git a/opentelemetry-api/src/opentelemetry/util/__init__.py b/opentelemetry-api/src/opentelemetry/util/__init__.py\n--- a/opentelemetry-api/src/opentelemetry/util/__init__.py\n+++ b/opentelemetry-api/src/opentelemetry/util/__init__.py\n@@ -19,6 +19,11 @@\n \n from pkg_resources import iter_entry_points\n \n+from opentelemetry.environment_variables import (\n+ OTEL_PYTHON_METER_PROVIDER,\n+ OTEL_PYTHON_TRACER_PROVIDER,\n+)\n+\n if TYPE_CHECKING:\n from opentelemetry.metrics import MeterProvider\n from opentelemetry.trace import TracerProvider\n@@ -39,7 +44,9 @@\n return int(time.time() * 1e9)\n \n \n-def _load_provider(provider: str) -> Provider:\n+def _load_provider(\n+ provider_environment_variable: str, provider: str\n+) -> Provider:\n try:\n entry_point = next(\n iter_entry_points(\n@@ -47,7 +54,8 @@\n name=cast(\n str,\n environ.get(\n- provider.upper(), \"default_{}\".format(provider),\n+ provider_environment_variable,\n+ \"default_{}\".format(provider),\n ),\n ),\n )\n@@ -59,8 +67,13 @@\n \n \n def _load_meter_provider(provider: str) -> \"MeterProvider\":\n- return cast(\"MeterProvider\", _load_provider(provider))\n+ return cast(\n+ \"MeterProvider\", _load_provider(OTEL_PYTHON_METER_PROVIDER, provider),\n+ )\n \n \n def _load_trace_provider(provider: str) -> \"TracerProvider\":\n- return cast(\"TracerProvider\", _load_provider(provider))\n+ return cast(\n+ \"TracerProvider\",\n+ _load_provider(OTEL_PYTHON_TRACER_PROVIDER, provider),\n+ )\n", "issue": "Tracer and Meter provider lack environment variables\nNow that `Configuration` has been removed, both providers should use consistent environment variables.\n", "before_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nOTEL_PROPAGATORS = \"OTEL_PROPAGATORS\"\nOTEL_PYTHON_CONTEXT = \"OTEL_PYTHON_CONTEXT\"\nOTEL_PYTHON_DISABLED_INSTRUMENTATIONS = \"OTEL_PYTHON_DISABLED_INSTRUMENTATIONS\"\nOTEL_PYTHON_IDS_GENERATOR = \"OTEL_PYTHON_IDS_GENERATOR\"\nOTEL_PYTHON_SERVICE_NAME = \"OTEL_PYTHON_SERVICE_NAME\"\nOTEL_TRACE_EXPORTER = \"OTEL_TRACE_EXPORTER\"\nOTEL_METRICS_EXPORTER = \"OTEL_METRICS_EXPORTER\"\n", "path": "opentelemetry-api/src/opentelemetry/environment_variables/__init__.py"}, {"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport time\nfrom logging import getLogger\nfrom os import environ\nfrom typing import TYPE_CHECKING, Union, cast\n\nfrom pkg_resources import iter_entry_points\n\nif TYPE_CHECKING:\n from opentelemetry.metrics import MeterProvider\n from opentelemetry.trace import TracerProvider\n\nProvider = Union[\"TracerProvider\", \"MeterProvider\"]\n\nlogger = getLogger(__name__)\n\n# Since we want API users to be able to provide timestamps,\n# this needs to be in the API.\n\ntry:\n time_ns = time.time_ns\n# Python versions < 3.7\nexcept AttributeError:\n\n def time_ns() -> int:\n return int(time.time() * 1e9)\n\n\ndef _load_provider(provider: str) -> Provider:\n try:\n entry_point = next(\n iter_entry_points(\n \"opentelemetry_{}\".format(provider),\n name=cast(\n str,\n environ.get(\n provider.upper(), \"default_{}\".format(provider),\n ),\n ),\n )\n )\n return cast(Provider, entry_point.load()(),)\n except Exception: # pylint: disable=broad-except\n logger.error(\"Failed to load configured provider %s\", provider)\n raise\n\n\ndef _load_meter_provider(provider: str) -> \"MeterProvider\":\n return cast(\"MeterProvider\", _load_provider(provider))\n\n\ndef _load_trace_provider(provider: str) -> \"TracerProvider\":\n return cast(\"TracerProvider\", _load_provider(provider))\n", "path": "opentelemetry-api/src/opentelemetry/util/__init__.py"}], "after_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nOTEL_PROPAGATORS = \"OTEL_PROPAGATORS\"\nOTEL_PYTHON_CONTEXT = \"OTEL_PYTHON_CONTEXT\"\nOTEL_PYTHON_DISABLED_INSTRUMENTATIONS = \"OTEL_PYTHON_DISABLED_INSTRUMENTATIONS\"\nOTEL_PYTHON_IDS_GENERATOR = \"OTEL_PYTHON_IDS_GENERATOR\"\nOTEL_PYTHON_SERVICE_NAME = \"OTEL_PYTHON_SERVICE_NAME\"\nOTEL_TRACE_EXPORTER = \"OTEL_TRACE_EXPORTER\"\nOTEL_METRICS_EXPORTER = \"OTEL_METRICS_EXPORTER\"\nOTEL_PYTHON_TRACER_PROVIDER = \"OTEL_PYTHON_TRACER_PROVIDER\"\nOTEL_PYTHON_METER_PROVIDER = \"OTEL_PYTHON_METER_PROVIDER\"\n", "path": "opentelemetry-api/src/opentelemetry/environment_variables/__init__.py"}, {"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport time\nfrom logging import getLogger\nfrom os import environ\nfrom typing import TYPE_CHECKING, Union, cast\n\nfrom pkg_resources import iter_entry_points\n\nfrom opentelemetry.environment_variables import (\n OTEL_PYTHON_METER_PROVIDER,\n OTEL_PYTHON_TRACER_PROVIDER,\n)\n\nif TYPE_CHECKING:\n from opentelemetry.metrics import MeterProvider\n from opentelemetry.trace import TracerProvider\n\nProvider = Union[\"TracerProvider\", \"MeterProvider\"]\n\nlogger = getLogger(__name__)\n\n# Since we want API users to be able to provide timestamps,\n# this needs to be in the API.\n\ntry:\n time_ns = time.time_ns\n# Python versions < 3.7\nexcept AttributeError:\n\n def time_ns() -> int:\n return int(time.time() * 1e9)\n\n\ndef _load_provider(\n provider_environment_variable: str, provider: str\n) -> Provider:\n try:\n entry_point = next(\n iter_entry_points(\n \"opentelemetry_{}\".format(provider),\n name=cast(\n str,\n environ.get(\n provider_environment_variable,\n \"default_{}\".format(provider),\n ),\n ),\n )\n )\n return cast(Provider, entry_point.load()(),)\n except Exception: # pylint: disable=broad-except\n logger.error(\"Failed to load configured provider %s\", provider)\n raise\n\n\ndef _load_meter_provider(provider: str) -> \"MeterProvider\":\n return cast(\n \"MeterProvider\", _load_provider(OTEL_PYTHON_METER_PROVIDER, provider),\n )\n\n\ndef _load_trace_provider(provider: str) -> \"TracerProvider\":\n return cast(\n \"TracerProvider\",\n _load_provider(OTEL_PYTHON_TRACER_PROVIDER, provider),\n )\n", "path": "opentelemetry-api/src/opentelemetry/util/__init__.py"}]}
| 1,159 | 566 |
gh_patches_debug_3448
|
rasdani/github-patches
|
git_diff
|
SciTools__cartopy-1245
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
SlippyImageArtist cannot be composited
For example, take the WMTS example and add a second layer. Then attempt to save as a PDF.
``` python
import cartopy.crs as ccrs
import matplotlib.pyplot as plt
url = 'http://map1c.vis.earthdata.nasa.gov/wmts-geo/wmts.cgi'
layer1 = 'VIIRS_CityLights_2012'
layer2 = 'ASTER_GDEM_Color_Index'
ax = plt.axes(projection=ccrs.PlateCarree())
ax.add_wmts(url, layer1)
ax.add_wmts(url, layer2)
ax.set_extent((-15, 25, 35, 60))
plt.title('Suomi NPP Earth at night April/October 2012')
plt.savefig('test.pdf')
plt.show()
```
which results in:
``` python
Traceback (most recent call last):
File "wmts.py", line 33, in main
plt.savefig('test.pdf')
File "/usr/lib64/python2.7/site-packages/matplotlib/pyplot.py", line 577, in savefig
res = fig.savefig(*args, **kwargs)
File "/usr/lib64/python2.7/site-packages/matplotlib/figure.py", line 1476, in savefig
self.canvas.print_figure(*args, **kwargs)
File "/usr/lib64/python2.7/site-packages/matplotlib/backends/backend_qt5agg.py", line 161, in print_figure
FigureCanvasAgg.print_figure(self, *args, **kwargs)
File "/usr/lib64/python2.7/site-packages/matplotlib/backend_bases.py", line 2211, in print_figure
**kwargs)
File "/usr/lib64/python2.7/site-packages/matplotlib/backends/backend_pdf.py", line 2485, in print_pdf
self.figure.draw(renderer)
File "/usr/lib64/python2.7/site-packages/matplotlib/artist.py", line 59, in draw_wrapper
draw(artist, renderer, *args, **kwargs)
File "/usr/lib64/python2.7/site-packages/matplotlib/figure.py", line 1085, in draw
func(*args)
File "/usr/lib64/python2.7/site-packages/matplotlib/artist.py", line 59, in draw_wrapper
draw(artist, renderer, *args, **kwargs)
File "/usr/lib64/python2.7/site-packages/cartopy/mpl/geoaxes.py", line 359, in draw
inframe=inframe)
File "/usr/lib64/python2.7/site-packages/matplotlib/artist.py", line 59, in draw_wrapper
draw(artist, renderer, *args, **kwargs)
File "/usr/lib64/python2.7/site-packages/matplotlib/axes/_base.py", line 2081, in draw
for z, im in zorder_images]
File "/usr/lib64/python2.7/site-packages/matplotlib/image.py", line 580, in make_image
raise RuntimeError('You must first set the image'
RuntimeError: You must first set the image array or the image attribute
```
I think maybe `SlippyImageArtist` should be overriding `make_image`, too.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lib/cartopy/mpl/slippy_image_artist.py`
Content:
```
1 # (C) British Crown Copyright 2014 - 2018, Met Office
2 #
3 # This file is part of cartopy.
4 #
5 # cartopy is free software: you can redistribute it and/or modify it under
6 # the terms of the GNU Lesser General Public License as published by the
7 # Free Software Foundation, either version 3 of the License, or
8 # (at your option) any later version.
9 #
10 # cartopy is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public License
16 # along with cartopy. If not, see <https://www.gnu.org/licenses/>.
17 """
18 Define the SlippyImageArtist class, which interfaces with
19 :class:`cartopy.io.RasterSource` instances at draw time, for interactive
20 dragging and zooming of raster data.
21
22 """
23
24 from __future__ import (absolute_import, division, print_function)
25
26 from matplotlib.image import AxesImage
27 import matplotlib.artist
28
29
30 class SlippyImageArtist(AxesImage):
31
32 """
33 A subclass of :class:`~matplotlib.image.AxesImage` which provides an
34 interface for getting a raster from the given object with interactive
35 slippy map type functionality.
36
37 Kwargs are passed to the AxesImage constructor.
38
39 """
40 def __init__(self, ax, raster_source, **kwargs):
41 self.raster_source = raster_source
42 super(SlippyImageArtist, self).__init__(ax, **kwargs)
43 self.set_clip_path(ax.background_patch)
44 self.cache = []
45
46 ax.figure.canvas.mpl_connect('button_press_event', self.on_press)
47 ax.figure.canvas.mpl_connect('button_release_event', self.on_release)
48
49 self.on_release()
50
51 def on_press(self, event=None):
52 self.user_is_interacting = True
53
54 def on_release(self, event=None):
55 self.user_is_interacting = False
56 self.stale = True
57
58 @matplotlib.artist.allow_rasterization
59 def draw(self, renderer, *args, **kwargs):
60 if not self.get_visible():
61 return
62
63 ax = self.axes
64 window_extent = ax.get_window_extent()
65 [x1, y1], [x2, y2] = ax.viewLim.get_points()
66 if not self.user_is_interacting:
67 located_images = self.raster_source.fetch_raster(
68 ax.projection, extent=[x1, x2, y1, y2],
69 target_resolution=(window_extent.width, window_extent.height))
70 self.cache = located_images
71
72 for img, extent in self.cache:
73 self.set_array(img)
74 with ax.hold_limits():
75 self.set_extent(extent)
76 super(SlippyImageArtist, self).draw(renderer, *args, **kwargs)
77
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/lib/cartopy/mpl/slippy_image_artist.py b/lib/cartopy/mpl/slippy_image_artist.py
--- a/lib/cartopy/mpl/slippy_image_artist.py
+++ b/lib/cartopy/mpl/slippy_image_artist.py
@@ -74,3 +74,8 @@
with ax.hold_limits():
self.set_extent(extent)
super(SlippyImageArtist, self).draw(renderer, *args, **kwargs)
+
+ def can_composite(self):
+ # As per https://github.com/SciTools/cartopy/issues/689, disable
+ # compositing multiple raster sources.
+ return False
|
{"golden_diff": "diff --git a/lib/cartopy/mpl/slippy_image_artist.py b/lib/cartopy/mpl/slippy_image_artist.py\n--- a/lib/cartopy/mpl/slippy_image_artist.py\n+++ b/lib/cartopy/mpl/slippy_image_artist.py\n@@ -74,3 +74,8 @@\n with ax.hold_limits():\n self.set_extent(extent)\n super(SlippyImageArtist, self).draw(renderer, *args, **kwargs)\n+\n+ def can_composite(self):\n+ # As per https://github.com/SciTools/cartopy/issues/689, disable\n+ # compositing multiple raster sources.\n+ return False\n", "issue": "SlippyImageArtist cannot be composited\nFor example, take the WMTS example and add a second layer. Then attempt to save as a PDF.\n\n``` python\nimport cartopy.crs as ccrs\nimport matplotlib.pyplot as plt\n\nurl = 'http://map1c.vis.earthdata.nasa.gov/wmts-geo/wmts.cgi'\nlayer1 = 'VIIRS_CityLights_2012'\nlayer2 = 'ASTER_GDEM_Color_Index'\n\nax = plt.axes(projection=ccrs.PlateCarree())\nax.add_wmts(url, layer1)\nax.add_wmts(url, layer2)\nax.set_extent((-15, 25, 35, 60))\n\nplt.title('Suomi NPP Earth at night April/October 2012')\nplt.savefig('test.pdf')\nplt.show()\n```\n\nwhich results in:\n\n``` python\nTraceback (most recent call last):\n File \"wmts.py\", line 33, in main\n plt.savefig('test.pdf')\n File \"/usr/lib64/python2.7/site-packages/matplotlib/pyplot.py\", line 577, in savefig\n res = fig.savefig(*args, **kwargs)\n File \"/usr/lib64/python2.7/site-packages/matplotlib/figure.py\", line 1476, in savefig\n self.canvas.print_figure(*args, **kwargs)\n File \"/usr/lib64/python2.7/site-packages/matplotlib/backends/backend_qt5agg.py\", line 161, in print_figure\n FigureCanvasAgg.print_figure(self, *args, **kwargs)\n File \"/usr/lib64/python2.7/site-packages/matplotlib/backend_bases.py\", line 2211, in print_figure\n **kwargs)\n File \"/usr/lib64/python2.7/site-packages/matplotlib/backends/backend_pdf.py\", line 2485, in print_pdf\n self.figure.draw(renderer)\n File \"/usr/lib64/python2.7/site-packages/matplotlib/artist.py\", line 59, in draw_wrapper\n draw(artist, renderer, *args, **kwargs)\n File \"/usr/lib64/python2.7/site-packages/matplotlib/figure.py\", line 1085, in draw\n func(*args)\n File \"/usr/lib64/python2.7/site-packages/matplotlib/artist.py\", line 59, in draw_wrapper\n draw(artist, renderer, *args, **kwargs)\n File \"/usr/lib64/python2.7/site-packages/cartopy/mpl/geoaxes.py\", line 359, in draw\n inframe=inframe)\n File \"/usr/lib64/python2.7/site-packages/matplotlib/artist.py\", line 59, in draw_wrapper\n draw(artist, renderer, *args, **kwargs)\n File \"/usr/lib64/python2.7/site-packages/matplotlib/axes/_base.py\", line 2081, in draw\n for z, im in zorder_images]\n File \"/usr/lib64/python2.7/site-packages/matplotlib/image.py\", line 580, in make_image\n raise RuntimeError('You must first set the image'\nRuntimeError: You must first set the image array or the image attribute\n```\n\nI think maybe `SlippyImageArtist` should be overriding `make_image`, too.\n\n", "before_files": [{"content": "# (C) British Crown Copyright 2014 - 2018, Met Office\n#\n# This file is part of cartopy.\n#\n# cartopy is free software: you can redistribute it and/or modify it under\n# the terms of the GNU Lesser General Public License as published by the\n# Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# cartopy is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public License\n# along with cartopy. If not, see <https://www.gnu.org/licenses/>.\n\"\"\"\nDefine the SlippyImageArtist class, which interfaces with\n:class:`cartopy.io.RasterSource` instances at draw time, for interactive\ndragging and zooming of raster data.\n\n\"\"\"\n\nfrom __future__ import (absolute_import, division, print_function)\n\nfrom matplotlib.image import AxesImage\nimport matplotlib.artist\n\n\nclass SlippyImageArtist(AxesImage):\n\n \"\"\"\n A subclass of :class:`~matplotlib.image.AxesImage` which provides an\n interface for getting a raster from the given object with interactive\n slippy map type functionality.\n\n Kwargs are passed to the AxesImage constructor.\n\n \"\"\"\n def __init__(self, ax, raster_source, **kwargs):\n self.raster_source = raster_source\n super(SlippyImageArtist, self).__init__(ax, **kwargs)\n self.set_clip_path(ax.background_patch)\n self.cache = []\n\n ax.figure.canvas.mpl_connect('button_press_event', self.on_press)\n ax.figure.canvas.mpl_connect('button_release_event', self.on_release)\n\n self.on_release()\n\n def on_press(self, event=None):\n self.user_is_interacting = True\n\n def on_release(self, event=None):\n self.user_is_interacting = False\n self.stale = True\n\n @matplotlib.artist.allow_rasterization\n def draw(self, renderer, *args, **kwargs):\n if not self.get_visible():\n return\n\n ax = self.axes\n window_extent = ax.get_window_extent()\n [x1, y1], [x2, y2] = ax.viewLim.get_points()\n if not self.user_is_interacting:\n located_images = self.raster_source.fetch_raster(\n ax.projection, extent=[x1, x2, y1, y2],\n target_resolution=(window_extent.width, window_extent.height))\n self.cache = located_images\n\n for img, extent in self.cache:\n self.set_array(img)\n with ax.hold_limits():\n self.set_extent(extent)\n super(SlippyImageArtist, self).draw(renderer, *args, **kwargs)\n", "path": "lib/cartopy/mpl/slippy_image_artist.py"}], "after_files": [{"content": "# (C) British Crown Copyright 2014 - 2018, Met Office\n#\n# This file is part of cartopy.\n#\n# cartopy is free software: you can redistribute it and/or modify it under\n# the terms of the GNU Lesser General Public License as published by the\n# Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# cartopy is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public License\n# along with cartopy. If not, see <https://www.gnu.org/licenses/>.\n\"\"\"\nDefine the SlippyImageArtist class, which interfaces with\n:class:`cartopy.io.RasterSource` instances at draw time, for interactive\ndragging and zooming of raster data.\n\n\"\"\"\n\nfrom __future__ import (absolute_import, division, print_function)\n\nfrom matplotlib.image import AxesImage\nimport matplotlib.artist\n\n\nclass SlippyImageArtist(AxesImage):\n\n \"\"\"\n A subclass of :class:`~matplotlib.image.AxesImage` which provides an\n interface for getting a raster from the given object with interactive\n slippy map type functionality.\n\n Kwargs are passed to the AxesImage constructor.\n\n \"\"\"\n def __init__(self, ax, raster_source, **kwargs):\n self.raster_source = raster_source\n super(SlippyImageArtist, self).__init__(ax, **kwargs)\n self.set_clip_path(ax.background_patch)\n self.cache = []\n\n ax.figure.canvas.mpl_connect('button_press_event', self.on_press)\n ax.figure.canvas.mpl_connect('button_release_event', self.on_release)\n\n self.on_release()\n\n def on_press(self, event=None):\n self.user_is_interacting = True\n\n def on_release(self, event=None):\n self.user_is_interacting = False\n self.stale = True\n\n @matplotlib.artist.allow_rasterization\n def draw(self, renderer, *args, **kwargs):\n if not self.get_visible():\n return\n\n ax = self.axes\n window_extent = ax.get_window_extent()\n [x1, y1], [x2, y2] = ax.viewLim.get_points()\n if not self.user_is_interacting:\n located_images = self.raster_source.fetch_raster(\n ax.projection, extent=[x1, x2, y1, y2],\n target_resolution=(window_extent.width, window_extent.height))\n self.cache = located_images\n\n for img, extent in self.cache:\n self.set_array(img)\n with ax.hold_limits():\n self.set_extent(extent)\n super(SlippyImageArtist, self).draw(renderer, *args, **kwargs)\n\n def can_composite(self):\n # As per https://github.com/SciTools/cartopy/issues/689, disable\n # compositing multiple raster sources.\n return False\n", "path": "lib/cartopy/mpl/slippy_image_artist.py"}]}
| 1,762 | 143 |
gh_patches_debug_19761
|
rasdani/github-patches
|
git_diff
|
sopel-irc__sopel-1381
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Ctrl-C doesn't run shutdown routines
When pressing <kbd>Ctrl</kbd>-<kbd>C</kbd> to interrupt a Sopel instance running in the foreground of an active shell, it simply prints `KeyboardInterrupt` and exits seemingly without calling any of the shutdown routines.
Pressing <kbd>Ctrl</kbd>-<kbd>C</kbd> should behave more or less the same as `sopel --quit`.
Discovered while testing for #1369.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sopel/__init__.py`
Content:
```
1 # coding=utf-8
2 # ASCII ONLY IN THIS FILE THOUGH!!!!!!!
3 # Python does some stupid bullshit of respecting LC_ALL over the encoding on the
4 # file, so in order to undo Python's ridiculous fucking idiocy, we have to have
5 # our own check.
6
7 # Copyright 2008, Sean B. Palmer, inamidst.com
8 # Copyright 2012, Elsie Powell, http://embolalia.com
9 # Copyright 2012, Elad Alfassa <[email protected]>
10 #
11 # Licensed under the Eiffel Forum License 2.
12
13 from __future__ import unicode_literals, absolute_import, print_function, division
14
15 import locale
16 import sys
17 loc = locale.getlocale()
18 if sys.version_info.major > 2:
19 if not loc[1] or 'UTF-8' not in loc[1]:
20 print('WARNING!!! You are running with a non-UTF8 locale environment '
21 'variables (e.g. LC_ALL is set to "C"), which makes Python 3 do '
22 'stupid things. If you get strange errors, please set it to '
23 'something like "en_US.UTF-8".', file=sys.stderr)
24
25
26 from collections import namedtuple
27 import os
28 import re
29 import time
30 import traceback
31 import signal
32
33 __version__ = '6.5.3'
34
35
36 def _version_info(version=__version__):
37 regex = re.compile(r'(\d+)\.(\d+)\.(\d+)(?:(a|b|rc)(\d+))?.*')
38 version_groups = regex.match(__version__).groups()
39 major, minor, micro = (int(piece) for piece in version_groups[0:3])
40 level = version_groups[3]
41 serial = int(version_groups[4] or 0)
42 if level == 'a':
43 level = 'alpha'
44 elif level == 'b':
45 level = 'beta'
46 elif level == 'rc':
47 level = 'candidate'
48 elif not level and version_groups[4] is None:
49 level = 'final'
50 else:
51 level = 'alpha'
52 version_type = namedtuple('version_info',
53 'major, minor, micro, releaselevel, serial')
54 return version_type(major, minor, micro, level, serial)
55
56
57 version_info = _version_info()
58
59
60 def run(config, pid_file, daemon=False):
61 import sopel.bot as bot
62 import sopel.logger
63 from sopel.tools import stderr
64 delay = 20
65 # Inject ca_certs from config to web for SSL validation of web requests
66 if not config.core.ca_certs:
67 stderr('Could not open CA certificates file. SSL will not '
68 'work properly.')
69
70 def signal_handler(sig, frame):
71 if sig == signal.SIGUSR1 or sig == signal.SIGTERM:
72 stderr('Got quit signal, shutting down.')
73 p.quit('Closing')
74 while True:
75 try:
76 p = bot.Sopel(config, daemon=daemon)
77 if hasattr(signal, 'SIGUSR1'):
78 signal.signal(signal.SIGUSR1, signal_handler)
79 if hasattr(signal, 'SIGTERM'):
80 signal.signal(signal.SIGTERM, signal_handler)
81 sopel.logger.setup_logging(p)
82 p.run(config.core.host, int(config.core.port))
83 except KeyboardInterrupt:
84 break
85 except Exception: # TODO: Be specific
86 trace = traceback.format_exc()
87 try:
88 stderr(trace)
89 except Exception: # TODO: Be specific
90 pass
91 logfile = open(os.path.join(config.core.logdir, 'exceptions.log'), 'a')
92 logfile.write('Critical exception in core')
93 logfile.write(trace)
94 logfile.write('----------------------------------------\n\n')
95 logfile.close()
96 os.unlink(pid_file)
97 os._exit(1)
98
99 if not isinstance(delay, int):
100 break
101 if p.hasquit:
102 break
103 stderr('Warning: Disconnected. Reconnecting in %s seconds...' % delay)
104 time.sleep(delay)
105 os.unlink(pid_file)
106 os._exit(0)
107
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/sopel/__init__.py b/sopel/__init__.py
--- a/sopel/__init__.py
+++ b/sopel/__init__.py
@@ -68,7 +68,7 @@
'work properly.')
def signal_handler(sig, frame):
- if sig == signal.SIGUSR1 or sig == signal.SIGTERM:
+ if sig == signal.SIGUSR1 or sig == signal.SIGTERM or sig == signal.SIGINT:
stderr('Got quit signal, shutting down.')
p.quit('Closing')
while True:
@@ -78,6 +78,8 @@
signal.signal(signal.SIGUSR1, signal_handler)
if hasattr(signal, 'SIGTERM'):
signal.signal(signal.SIGTERM, signal_handler)
+ if hasattr(signal, 'SIGINT'):
+ signal.signal(signal.SIGINT, signal_handler)
sopel.logger.setup_logging(p)
p.run(config.core.host, int(config.core.port))
except KeyboardInterrupt:
|
{"golden_diff": "diff --git a/sopel/__init__.py b/sopel/__init__.py\n--- a/sopel/__init__.py\n+++ b/sopel/__init__.py\n@@ -68,7 +68,7 @@\n 'work properly.')\n \n def signal_handler(sig, frame):\n- if sig == signal.SIGUSR1 or sig == signal.SIGTERM:\n+ if sig == signal.SIGUSR1 or sig == signal.SIGTERM or sig == signal.SIGINT:\n stderr('Got quit signal, shutting down.')\n p.quit('Closing')\n while True:\n@@ -78,6 +78,8 @@\n signal.signal(signal.SIGUSR1, signal_handler)\n if hasattr(signal, 'SIGTERM'):\n signal.signal(signal.SIGTERM, signal_handler)\n+ if hasattr(signal, 'SIGINT'):\n+ signal.signal(signal.SIGINT, signal_handler)\n sopel.logger.setup_logging(p)\n p.run(config.core.host, int(config.core.port))\n except KeyboardInterrupt:\n", "issue": "Ctrl-C doesn't run shutdown routines\nWhen pressing <kbd>Ctrl</kbd>-<kbd>C</kbd> to interrupt a Sopel instance running in the foreground of an active shell, it simply prints `KeyboardInterrupt` and exits seemingly without calling any of the shutdown routines.\r\n\r\nPressing <kbd>Ctrl</kbd>-<kbd>C</kbd> should behave more or less the same as `sopel --quit`.\r\n\r\nDiscovered while testing for #1369.\n", "before_files": [{"content": "# coding=utf-8\n# ASCII ONLY IN THIS FILE THOUGH!!!!!!!\n# Python does some stupid bullshit of respecting LC_ALL over the encoding on the\n# file, so in order to undo Python's ridiculous fucking idiocy, we have to have\n# our own check.\n\n# Copyright 2008, Sean B. Palmer, inamidst.com\n# Copyright 2012, Elsie Powell, http://embolalia.com\n# Copyright 2012, Elad Alfassa <[email protected]>\n#\n# Licensed under the Eiffel Forum License 2.\n\nfrom __future__ import unicode_literals, absolute_import, print_function, division\n\nimport locale\nimport sys\nloc = locale.getlocale()\nif sys.version_info.major > 2:\n if not loc[1] or 'UTF-8' not in loc[1]:\n print('WARNING!!! You are running with a non-UTF8 locale environment '\n 'variables (e.g. LC_ALL is set to \"C\"), which makes Python 3 do '\n 'stupid things. If you get strange errors, please set it to '\n 'something like \"en_US.UTF-8\".', file=sys.stderr)\n\n\nfrom collections import namedtuple\nimport os\nimport re\nimport time\nimport traceback\nimport signal\n\n__version__ = '6.5.3'\n\n\ndef _version_info(version=__version__):\n regex = re.compile(r'(\\d+)\\.(\\d+)\\.(\\d+)(?:(a|b|rc)(\\d+))?.*')\n version_groups = regex.match(__version__).groups()\n major, minor, micro = (int(piece) for piece in version_groups[0:3])\n level = version_groups[3]\n serial = int(version_groups[4] or 0)\n if level == 'a':\n level = 'alpha'\n elif level == 'b':\n level = 'beta'\n elif level == 'rc':\n level = 'candidate'\n elif not level and version_groups[4] is None:\n level = 'final'\n else:\n level = 'alpha'\n version_type = namedtuple('version_info',\n 'major, minor, micro, releaselevel, serial')\n return version_type(major, minor, micro, level, serial)\n\n\nversion_info = _version_info()\n\n\ndef run(config, pid_file, daemon=False):\n import sopel.bot as bot\n import sopel.logger\n from sopel.tools import stderr\n delay = 20\n # Inject ca_certs from config to web for SSL validation of web requests\n if not config.core.ca_certs:\n stderr('Could not open CA certificates file. SSL will not '\n 'work properly.')\n\n def signal_handler(sig, frame):\n if sig == signal.SIGUSR1 or sig == signal.SIGTERM:\n stderr('Got quit signal, shutting down.')\n p.quit('Closing')\n while True:\n try:\n p = bot.Sopel(config, daemon=daemon)\n if hasattr(signal, 'SIGUSR1'):\n signal.signal(signal.SIGUSR1, signal_handler)\n if hasattr(signal, 'SIGTERM'):\n signal.signal(signal.SIGTERM, signal_handler)\n sopel.logger.setup_logging(p)\n p.run(config.core.host, int(config.core.port))\n except KeyboardInterrupt:\n break\n except Exception: # TODO: Be specific\n trace = traceback.format_exc()\n try:\n stderr(trace)\n except Exception: # TODO: Be specific\n pass\n logfile = open(os.path.join(config.core.logdir, 'exceptions.log'), 'a')\n logfile.write('Critical exception in core')\n logfile.write(trace)\n logfile.write('----------------------------------------\\n\\n')\n logfile.close()\n os.unlink(pid_file)\n os._exit(1)\n\n if not isinstance(delay, int):\n break\n if p.hasquit:\n break\n stderr('Warning: Disconnected. Reconnecting in %s seconds...' % delay)\n time.sleep(delay)\n os.unlink(pid_file)\n os._exit(0)\n", "path": "sopel/__init__.py"}], "after_files": [{"content": "# coding=utf-8\n# ASCII ONLY IN THIS FILE THOUGH!!!!!!!\n# Python does some stupid bullshit of respecting LC_ALL over the encoding on the\n# file, so in order to undo Python's ridiculous fucking idiocy, we have to have\n# our own check.\n\n# Copyright 2008, Sean B. Palmer, inamidst.com\n# Copyright 2012, Elsie Powell, http://embolalia.com\n# Copyright 2012, Elad Alfassa <[email protected]>\n#\n# Licensed under the Eiffel Forum License 2.\n\nfrom __future__ import unicode_literals, absolute_import, print_function, division\n\nimport locale\nimport sys\nloc = locale.getlocale()\nif sys.version_info.major > 2:\n if not loc[1] or 'UTF-8' not in loc[1]:\n print('WARNING!!! You are running with a non-UTF8 locale environment '\n 'variables (e.g. LC_ALL is set to \"C\"), which makes Python 3 do '\n 'stupid things. If you get strange errors, please set it to '\n 'something like \"en_US.UTF-8\".', file=sys.stderr)\n\n\nfrom collections import namedtuple\nimport os\nimport re\nimport time\nimport traceback\nimport signal\n\n__version__ = '6.5.3'\n\n\ndef _version_info(version=__version__):\n regex = re.compile(r'(\\d+)\\.(\\d+)\\.(\\d+)(?:(a|b|rc)(\\d+))?.*')\n version_groups = regex.match(__version__).groups()\n major, minor, micro = (int(piece) for piece in version_groups[0:3])\n level = version_groups[3]\n serial = int(version_groups[4] or 0)\n if level == 'a':\n level = 'alpha'\n elif level == 'b':\n level = 'beta'\n elif level == 'rc':\n level = 'candidate'\n elif not level and version_groups[4] is None:\n level = 'final'\n else:\n level = 'alpha'\n version_type = namedtuple('version_info',\n 'major, minor, micro, releaselevel, serial')\n return version_type(major, minor, micro, level, serial)\n\n\nversion_info = _version_info()\n\n\ndef run(config, pid_file, daemon=False):\n import sopel.bot as bot\n import sopel.logger\n from sopel.tools import stderr\n delay = 20\n # Inject ca_certs from config to web for SSL validation of web requests\n if not config.core.ca_certs:\n stderr('Could not open CA certificates file. SSL will not '\n 'work properly.')\n\n def signal_handler(sig, frame):\n if sig == signal.SIGUSR1 or sig == signal.SIGTERM or sig == signal.SIGINT:\n stderr('Got quit signal, shutting down.')\n p.quit('Closing')\n while True:\n try:\n p = bot.Sopel(config, daemon=daemon)\n if hasattr(signal, 'SIGUSR1'):\n signal.signal(signal.SIGUSR1, signal_handler)\n if hasattr(signal, 'SIGTERM'):\n signal.signal(signal.SIGTERM, signal_handler)\n if hasattr(signal, 'SIGINT'):\n signal.signal(signal.SIGINT, signal_handler)\n sopel.logger.setup_logging(p)\n p.run(config.core.host, int(config.core.port))\n except KeyboardInterrupt:\n break\n except Exception: # TODO: Be specific\n trace = traceback.format_exc()\n try:\n stderr(trace)\n except Exception: # TODO: Be specific\n pass\n logfile = open(os.path.join(config.core.logdir, 'exceptions.log'), 'a')\n logfile.write('Critical exception in core')\n logfile.write(trace)\n logfile.write('----------------------------------------\\n\\n')\n logfile.close()\n os.unlink(pid_file)\n os._exit(1)\n\n if not isinstance(delay, int):\n break\n if p.hasquit:\n break\n stderr('Warning: Disconnected. Reconnecting in %s seconds...' % delay)\n time.sleep(delay)\n os.unlink(pid_file)\n os._exit(0)\n", "path": "sopel/__init__.py"}]}
| 1,448 | 211 |
gh_patches_debug_40194
|
rasdani/github-patches
|
git_diff
|
getsentry__sentry-python-2033
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Do not add redis SET data in span description when `set_default_pii=False`
### Problem Statement
Currently the redis integration records a span for all redis commands. This is good.
But when `send_default_pii=False` the value of the redis SET command (https://redis.io/commands/set/) should be redacted.
### Solution Brainstorm
do it
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sentry_sdk/integrations/redis.py`
Content:
```
1 from __future__ import absolute_import
2
3 from sentry_sdk import Hub
4 from sentry_sdk.consts import OP
5 from sentry_sdk.utils import capture_internal_exceptions, logger
6 from sentry_sdk.integrations import Integration, DidNotEnable
7
8 from sentry_sdk._types import TYPE_CHECKING
9
10 if TYPE_CHECKING:
11 from typing import Any, Sequence
12
13 _SINGLE_KEY_COMMANDS = frozenset(
14 ["decr", "decrby", "get", "incr", "incrby", "pttl", "set", "setex", "setnx", "ttl"]
15 )
16 _MULTI_KEY_COMMANDS = frozenset(["del", "touch", "unlink"])
17
18 #: Trim argument lists to this many values
19 _MAX_NUM_ARGS = 10
20
21
22 def patch_redis_pipeline(pipeline_cls, is_cluster, get_command_args_fn):
23 # type: (Any, bool, Any) -> None
24 old_execute = pipeline_cls.execute
25
26 def sentry_patched_execute(self, *args, **kwargs):
27 # type: (Any, *Any, **Any) -> Any
28 hub = Hub.current
29
30 if hub.get_integration(RedisIntegration) is None:
31 return old_execute(self, *args, **kwargs)
32
33 with hub.start_span(
34 op=OP.DB_REDIS, description="redis.pipeline.execute"
35 ) as span:
36 with capture_internal_exceptions():
37 span.set_tag("redis.is_cluster", is_cluster)
38 transaction = self.transaction if not is_cluster else False
39 span.set_tag("redis.transaction", transaction)
40
41 commands = []
42 for i, arg in enumerate(self.command_stack):
43 if i > _MAX_NUM_ARGS:
44 break
45 command_args = []
46 for j, command_arg in enumerate(get_command_args_fn(arg)):
47 if j > 0:
48 command_arg = repr(command_arg)
49 command_args.append(command_arg)
50 commands.append(" ".join(command_args))
51
52 span.set_data(
53 "redis.commands",
54 {"count": len(self.command_stack), "first_ten": commands},
55 )
56
57 return old_execute(self, *args, **kwargs)
58
59 pipeline_cls.execute = sentry_patched_execute
60
61
62 def _get_redis_command_args(command):
63 # type: (Any) -> Sequence[Any]
64 return command[0]
65
66
67 def _parse_rediscluster_command(command):
68 # type: (Any) -> Sequence[Any]
69 return command.args
70
71
72 def _patch_rediscluster():
73 # type: () -> None
74 try:
75 import rediscluster # type: ignore
76 except ImportError:
77 return
78
79 patch_redis_client(rediscluster.RedisCluster, is_cluster=True)
80
81 # up to v1.3.6, __version__ attribute is a tuple
82 # from v2.0.0, __version__ is a string and VERSION a tuple
83 version = getattr(rediscluster, "VERSION", rediscluster.__version__)
84
85 # StrictRedisCluster was introduced in v0.2.0 and removed in v2.0.0
86 # https://github.com/Grokzen/redis-py-cluster/blob/master/docs/release-notes.rst
87 if (0, 2, 0) < version < (2, 0, 0):
88 pipeline_cls = rediscluster.pipeline.StrictClusterPipeline
89 patch_redis_client(rediscluster.StrictRedisCluster, is_cluster=True)
90 else:
91 pipeline_cls = rediscluster.pipeline.ClusterPipeline
92
93 patch_redis_pipeline(pipeline_cls, True, _parse_rediscluster_command)
94
95
96 class RedisIntegration(Integration):
97 identifier = "redis"
98
99 @staticmethod
100 def setup_once():
101 # type: () -> None
102 try:
103 import redis
104 except ImportError:
105 raise DidNotEnable("Redis client not installed")
106
107 patch_redis_client(redis.StrictRedis, is_cluster=False)
108 patch_redis_pipeline(redis.client.Pipeline, False, _get_redis_command_args)
109 try:
110 strict_pipeline = redis.client.StrictPipeline # type: ignore
111 except AttributeError:
112 pass
113 else:
114 patch_redis_pipeline(strict_pipeline, False, _get_redis_command_args)
115
116 try:
117 import rb.clients # type: ignore
118 except ImportError:
119 pass
120 else:
121 patch_redis_client(rb.clients.FanoutClient, is_cluster=False)
122 patch_redis_client(rb.clients.MappingClient, is_cluster=False)
123 patch_redis_client(rb.clients.RoutingClient, is_cluster=False)
124
125 try:
126 _patch_rediscluster()
127 except Exception:
128 logger.exception("Error occurred while patching `rediscluster` library")
129
130
131 def patch_redis_client(cls, is_cluster):
132 # type: (Any, bool) -> None
133 """
134 This function can be used to instrument custom redis client classes or
135 subclasses.
136 """
137 old_execute_command = cls.execute_command
138
139 def sentry_patched_execute_command(self, name, *args, **kwargs):
140 # type: (Any, str, *Any, **Any) -> Any
141 hub = Hub.current
142
143 if hub.get_integration(RedisIntegration) is None:
144 return old_execute_command(self, name, *args, **kwargs)
145
146 description = name
147
148 with capture_internal_exceptions():
149 description_parts = [name]
150 for i, arg in enumerate(args):
151 if i > _MAX_NUM_ARGS:
152 break
153
154 description_parts.append(repr(arg))
155
156 description = " ".join(description_parts)
157
158 with hub.start_span(op=OP.DB_REDIS, description=description) as span:
159 span.set_tag("redis.is_cluster", is_cluster)
160 if name:
161 span.set_tag("redis.command", name)
162
163 if name and args:
164 name_low = name.lower()
165 if (name_low in _SINGLE_KEY_COMMANDS) or (
166 name_low in _MULTI_KEY_COMMANDS and len(args) == 1
167 ):
168 span.set_tag("redis.key", args[0])
169
170 return old_execute_command(self, name, *args, **kwargs)
171
172 cls.execute_command = sentry_patched_execute_command
173
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/sentry_sdk/integrations/redis.py b/sentry_sdk/integrations/redis.py
--- a/sentry_sdk/integrations/redis.py
+++ b/sentry_sdk/integrations/redis.py
@@ -2,7 +2,12 @@
from sentry_sdk import Hub
from sentry_sdk.consts import OP
-from sentry_sdk.utils import capture_internal_exceptions, logger
+from sentry_sdk.hub import _should_send_default_pii
+from sentry_sdk.utils import (
+ SENSITIVE_DATA_SUBSTITUTE,
+ capture_internal_exceptions,
+ logger,
+)
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk._types import TYPE_CHECKING
@@ -15,8 +20,13 @@
)
_MULTI_KEY_COMMANDS = frozenset(["del", "touch", "unlink"])
-#: Trim argument lists to this many values
-_MAX_NUM_ARGS = 10
+_COMMANDS_INCLUDING_SENSITIVE_DATA = [
+ "auth",
+]
+
+_MAX_NUM_ARGS = 10 # Trim argument lists to this many values
+
+_DEFAULT_MAX_DATA_SIZE = 1024
def patch_redis_pipeline(pipeline_cls, is_cluster, get_command_args_fn):
@@ -96,6 +106,10 @@
class RedisIntegration(Integration):
identifier = "redis"
+ def __init__(self, max_data_size=_DEFAULT_MAX_DATA_SIZE):
+ # type: (int) -> None
+ self.max_data_size = max_data_size
+
@staticmethod
def setup_once():
# type: () -> None
@@ -139,8 +153,9 @@
def sentry_patched_execute_command(self, name, *args, **kwargs):
# type: (Any, str, *Any, **Any) -> Any
hub = Hub.current
+ integration = hub.get_integration(RedisIntegration)
- if hub.get_integration(RedisIntegration) is None:
+ if integration is None:
return old_execute_command(self, name, *args, **kwargs)
description = name
@@ -151,12 +166,33 @@
if i > _MAX_NUM_ARGS:
break
- description_parts.append(repr(arg))
+ name_low = name.lower()
+
+ if name_low in _COMMANDS_INCLUDING_SENSITIVE_DATA:
+ description_parts.append(SENSITIVE_DATA_SUBSTITUTE)
+ continue
+
+ arg_is_the_key = i == 0
+ if arg_is_the_key:
+ description_parts.append(repr(arg))
+
+ else:
+ if _should_send_default_pii():
+ description_parts.append(repr(arg))
+ else:
+ description_parts.append(SENSITIVE_DATA_SUBSTITUTE)
description = " ".join(description_parts)
+ data_should_be_truncated = (
+ integration.max_data_size and len(description) > integration.max_data_size
+ )
+ if data_should_be_truncated:
+ description = description[: integration.max_data_size - len("...")] + "..."
+
with hub.start_span(op=OP.DB_REDIS, description=description) as span:
span.set_tag("redis.is_cluster", is_cluster)
+
if name:
span.set_tag("redis.command", name)
|
{"golden_diff": "diff --git a/sentry_sdk/integrations/redis.py b/sentry_sdk/integrations/redis.py\n--- a/sentry_sdk/integrations/redis.py\n+++ b/sentry_sdk/integrations/redis.py\n@@ -2,7 +2,12 @@\n \n from sentry_sdk import Hub\n from sentry_sdk.consts import OP\n-from sentry_sdk.utils import capture_internal_exceptions, logger\n+from sentry_sdk.hub import _should_send_default_pii\n+from sentry_sdk.utils import (\n+ SENSITIVE_DATA_SUBSTITUTE,\n+ capture_internal_exceptions,\n+ logger,\n+)\n from sentry_sdk.integrations import Integration, DidNotEnable\n \n from sentry_sdk._types import TYPE_CHECKING\n@@ -15,8 +20,13 @@\n )\n _MULTI_KEY_COMMANDS = frozenset([\"del\", \"touch\", \"unlink\"])\n \n-#: Trim argument lists to this many values\n-_MAX_NUM_ARGS = 10\n+_COMMANDS_INCLUDING_SENSITIVE_DATA = [\n+ \"auth\",\n+]\n+\n+_MAX_NUM_ARGS = 10 # Trim argument lists to this many values\n+\n+_DEFAULT_MAX_DATA_SIZE = 1024\n \n \n def patch_redis_pipeline(pipeline_cls, is_cluster, get_command_args_fn):\n@@ -96,6 +106,10 @@\n class RedisIntegration(Integration):\n identifier = \"redis\"\n \n+ def __init__(self, max_data_size=_DEFAULT_MAX_DATA_SIZE):\n+ # type: (int) -> None\n+ self.max_data_size = max_data_size\n+\n @staticmethod\n def setup_once():\n # type: () -> None\n@@ -139,8 +153,9 @@\n def sentry_patched_execute_command(self, name, *args, **kwargs):\n # type: (Any, str, *Any, **Any) -> Any\n hub = Hub.current\n+ integration = hub.get_integration(RedisIntegration)\n \n- if hub.get_integration(RedisIntegration) is None:\n+ if integration is None:\n return old_execute_command(self, name, *args, **kwargs)\n \n description = name\n@@ -151,12 +166,33 @@\n if i > _MAX_NUM_ARGS:\n break\n \n- description_parts.append(repr(arg))\n+ name_low = name.lower()\n+\n+ if name_low in _COMMANDS_INCLUDING_SENSITIVE_DATA:\n+ description_parts.append(SENSITIVE_DATA_SUBSTITUTE)\n+ continue\n+\n+ arg_is_the_key = i == 0\n+ if arg_is_the_key:\n+ description_parts.append(repr(arg))\n+\n+ else:\n+ if _should_send_default_pii():\n+ description_parts.append(repr(arg))\n+ else:\n+ description_parts.append(SENSITIVE_DATA_SUBSTITUTE)\n \n description = \" \".join(description_parts)\n \n+ data_should_be_truncated = (\n+ integration.max_data_size and len(description) > integration.max_data_size\n+ )\n+ if data_should_be_truncated:\n+ description = description[: integration.max_data_size - len(\"...\")] + \"...\"\n+\n with hub.start_span(op=OP.DB_REDIS, description=description) as span:\n span.set_tag(\"redis.is_cluster\", is_cluster)\n+\n if name:\n span.set_tag(\"redis.command\", name)\n", "issue": "Do not add redis SET data in span description when `set_default_pii=False`\n### Problem Statement\n\nCurrently the redis integration records a span for all redis commands. This is good. \r\nBut when `send_default_pii=False` the value of the redis SET command (https://redis.io/commands/set/) should be redacted.\n\n### Solution Brainstorm\n\ndo it\n", "before_files": [{"content": "from __future__ import absolute_import\n\nfrom sentry_sdk import Hub\nfrom sentry_sdk.consts import OP\nfrom sentry_sdk.utils import capture_internal_exceptions, logger\nfrom sentry_sdk.integrations import Integration, DidNotEnable\n\nfrom sentry_sdk._types import TYPE_CHECKING\n\nif TYPE_CHECKING:\n from typing import Any, Sequence\n\n_SINGLE_KEY_COMMANDS = frozenset(\n [\"decr\", \"decrby\", \"get\", \"incr\", \"incrby\", \"pttl\", \"set\", \"setex\", \"setnx\", \"ttl\"]\n)\n_MULTI_KEY_COMMANDS = frozenset([\"del\", \"touch\", \"unlink\"])\n\n#: Trim argument lists to this many values\n_MAX_NUM_ARGS = 10\n\n\ndef patch_redis_pipeline(pipeline_cls, is_cluster, get_command_args_fn):\n # type: (Any, bool, Any) -> None\n old_execute = pipeline_cls.execute\n\n def sentry_patched_execute(self, *args, **kwargs):\n # type: (Any, *Any, **Any) -> Any\n hub = Hub.current\n\n if hub.get_integration(RedisIntegration) is None:\n return old_execute(self, *args, **kwargs)\n\n with hub.start_span(\n op=OP.DB_REDIS, description=\"redis.pipeline.execute\"\n ) as span:\n with capture_internal_exceptions():\n span.set_tag(\"redis.is_cluster\", is_cluster)\n transaction = self.transaction if not is_cluster else False\n span.set_tag(\"redis.transaction\", transaction)\n\n commands = []\n for i, arg in enumerate(self.command_stack):\n if i > _MAX_NUM_ARGS:\n break\n command_args = []\n for j, command_arg in enumerate(get_command_args_fn(arg)):\n if j > 0:\n command_arg = repr(command_arg)\n command_args.append(command_arg)\n commands.append(\" \".join(command_args))\n\n span.set_data(\n \"redis.commands\",\n {\"count\": len(self.command_stack), \"first_ten\": commands},\n )\n\n return old_execute(self, *args, **kwargs)\n\n pipeline_cls.execute = sentry_patched_execute\n\n\ndef _get_redis_command_args(command):\n # type: (Any) -> Sequence[Any]\n return command[0]\n\n\ndef _parse_rediscluster_command(command):\n # type: (Any) -> Sequence[Any]\n return command.args\n\n\ndef _patch_rediscluster():\n # type: () -> None\n try:\n import rediscluster # type: ignore\n except ImportError:\n return\n\n patch_redis_client(rediscluster.RedisCluster, is_cluster=True)\n\n # up to v1.3.6, __version__ attribute is a tuple\n # from v2.0.0, __version__ is a string and VERSION a tuple\n version = getattr(rediscluster, \"VERSION\", rediscluster.__version__)\n\n # StrictRedisCluster was introduced in v0.2.0 and removed in v2.0.0\n # https://github.com/Grokzen/redis-py-cluster/blob/master/docs/release-notes.rst\n if (0, 2, 0) < version < (2, 0, 0):\n pipeline_cls = rediscluster.pipeline.StrictClusterPipeline\n patch_redis_client(rediscluster.StrictRedisCluster, is_cluster=True)\n else:\n pipeline_cls = rediscluster.pipeline.ClusterPipeline\n\n patch_redis_pipeline(pipeline_cls, True, _parse_rediscluster_command)\n\n\nclass RedisIntegration(Integration):\n identifier = \"redis\"\n\n @staticmethod\n def setup_once():\n # type: () -> None\n try:\n import redis\n except ImportError:\n raise DidNotEnable(\"Redis client not installed\")\n\n patch_redis_client(redis.StrictRedis, is_cluster=False)\n patch_redis_pipeline(redis.client.Pipeline, False, _get_redis_command_args)\n try:\n strict_pipeline = redis.client.StrictPipeline # type: ignore\n except AttributeError:\n pass\n else:\n patch_redis_pipeline(strict_pipeline, False, _get_redis_command_args)\n\n try:\n import rb.clients # type: ignore\n except ImportError:\n pass\n else:\n patch_redis_client(rb.clients.FanoutClient, is_cluster=False)\n patch_redis_client(rb.clients.MappingClient, is_cluster=False)\n patch_redis_client(rb.clients.RoutingClient, is_cluster=False)\n\n try:\n _patch_rediscluster()\n except Exception:\n logger.exception(\"Error occurred while patching `rediscluster` library\")\n\n\ndef patch_redis_client(cls, is_cluster):\n # type: (Any, bool) -> None\n \"\"\"\n This function can be used to instrument custom redis client classes or\n subclasses.\n \"\"\"\n old_execute_command = cls.execute_command\n\n def sentry_patched_execute_command(self, name, *args, **kwargs):\n # type: (Any, str, *Any, **Any) -> Any\n hub = Hub.current\n\n if hub.get_integration(RedisIntegration) is None:\n return old_execute_command(self, name, *args, **kwargs)\n\n description = name\n\n with capture_internal_exceptions():\n description_parts = [name]\n for i, arg in enumerate(args):\n if i > _MAX_NUM_ARGS:\n break\n\n description_parts.append(repr(arg))\n\n description = \" \".join(description_parts)\n\n with hub.start_span(op=OP.DB_REDIS, description=description) as span:\n span.set_tag(\"redis.is_cluster\", is_cluster)\n if name:\n span.set_tag(\"redis.command\", name)\n\n if name and args:\n name_low = name.lower()\n if (name_low in _SINGLE_KEY_COMMANDS) or (\n name_low in _MULTI_KEY_COMMANDS and len(args) == 1\n ):\n span.set_tag(\"redis.key\", args[0])\n\n return old_execute_command(self, name, *args, **kwargs)\n\n cls.execute_command = sentry_patched_execute_command\n", "path": "sentry_sdk/integrations/redis.py"}], "after_files": [{"content": "from __future__ import absolute_import\n\nfrom sentry_sdk import Hub\nfrom sentry_sdk.consts import OP\nfrom sentry_sdk.hub import _should_send_default_pii\nfrom sentry_sdk.utils import (\n SENSITIVE_DATA_SUBSTITUTE,\n capture_internal_exceptions,\n logger,\n)\nfrom sentry_sdk.integrations import Integration, DidNotEnable\n\nfrom sentry_sdk._types import TYPE_CHECKING\n\nif TYPE_CHECKING:\n from typing import Any, Sequence\n\n_SINGLE_KEY_COMMANDS = frozenset(\n [\"decr\", \"decrby\", \"get\", \"incr\", \"incrby\", \"pttl\", \"set\", \"setex\", \"setnx\", \"ttl\"]\n)\n_MULTI_KEY_COMMANDS = frozenset([\"del\", \"touch\", \"unlink\"])\n\n_COMMANDS_INCLUDING_SENSITIVE_DATA = [\n \"auth\",\n]\n\n_MAX_NUM_ARGS = 10 # Trim argument lists to this many values\n\n_DEFAULT_MAX_DATA_SIZE = 1024\n\n\ndef patch_redis_pipeline(pipeline_cls, is_cluster, get_command_args_fn):\n # type: (Any, bool, Any) -> None\n old_execute = pipeline_cls.execute\n\n def sentry_patched_execute(self, *args, **kwargs):\n # type: (Any, *Any, **Any) -> Any\n hub = Hub.current\n\n if hub.get_integration(RedisIntegration) is None:\n return old_execute(self, *args, **kwargs)\n\n with hub.start_span(\n op=OP.DB_REDIS, description=\"redis.pipeline.execute\"\n ) as span:\n with capture_internal_exceptions():\n span.set_tag(\"redis.is_cluster\", is_cluster)\n transaction = self.transaction if not is_cluster else False\n span.set_tag(\"redis.transaction\", transaction)\n\n commands = []\n for i, arg in enumerate(self.command_stack):\n if i > _MAX_NUM_ARGS:\n break\n command_args = []\n for j, command_arg in enumerate(get_command_args_fn(arg)):\n if j > 0:\n command_arg = repr(command_arg)\n command_args.append(command_arg)\n commands.append(\" \".join(command_args))\n\n span.set_data(\n \"redis.commands\",\n {\"count\": len(self.command_stack), \"first_ten\": commands},\n )\n\n return old_execute(self, *args, **kwargs)\n\n pipeline_cls.execute = sentry_patched_execute\n\n\ndef _get_redis_command_args(command):\n # type: (Any) -> Sequence[Any]\n return command[0]\n\n\ndef _parse_rediscluster_command(command):\n # type: (Any) -> Sequence[Any]\n return command.args\n\n\ndef _patch_rediscluster():\n # type: () -> None\n try:\n import rediscluster # type: ignore\n except ImportError:\n return\n\n patch_redis_client(rediscluster.RedisCluster, is_cluster=True)\n\n # up to v1.3.6, __version__ attribute is a tuple\n # from v2.0.0, __version__ is a string and VERSION a tuple\n version = getattr(rediscluster, \"VERSION\", rediscluster.__version__)\n\n # StrictRedisCluster was introduced in v0.2.0 and removed in v2.0.0\n # https://github.com/Grokzen/redis-py-cluster/blob/master/docs/release-notes.rst\n if (0, 2, 0) < version < (2, 0, 0):\n pipeline_cls = rediscluster.pipeline.StrictClusterPipeline\n patch_redis_client(rediscluster.StrictRedisCluster, is_cluster=True)\n else:\n pipeline_cls = rediscluster.pipeline.ClusterPipeline\n\n patch_redis_pipeline(pipeline_cls, True, _parse_rediscluster_command)\n\n\nclass RedisIntegration(Integration):\n identifier = \"redis\"\n\n def __init__(self, max_data_size=_DEFAULT_MAX_DATA_SIZE):\n # type: (int) -> None\n self.max_data_size = max_data_size\n\n @staticmethod\n def setup_once():\n # type: () -> None\n try:\n import redis\n except ImportError:\n raise DidNotEnable(\"Redis client not installed\")\n\n patch_redis_client(redis.StrictRedis, is_cluster=False)\n patch_redis_pipeline(redis.client.Pipeline, False, _get_redis_command_args)\n try:\n strict_pipeline = redis.client.StrictPipeline # type: ignore\n except AttributeError:\n pass\n else:\n patch_redis_pipeline(strict_pipeline, False, _get_redis_command_args)\n\n try:\n import rb.clients # type: ignore\n except ImportError:\n pass\n else:\n patch_redis_client(rb.clients.FanoutClient, is_cluster=False)\n patch_redis_client(rb.clients.MappingClient, is_cluster=False)\n patch_redis_client(rb.clients.RoutingClient, is_cluster=False)\n\n try:\n _patch_rediscluster()\n except Exception:\n logger.exception(\"Error occurred while patching `rediscluster` library\")\n\n\ndef patch_redis_client(cls, is_cluster):\n # type: (Any, bool) -> None\n \"\"\"\n This function can be used to instrument custom redis client classes or\n subclasses.\n \"\"\"\n old_execute_command = cls.execute_command\n\n def sentry_patched_execute_command(self, name, *args, **kwargs):\n # type: (Any, str, *Any, **Any) -> Any\n hub = Hub.current\n integration = hub.get_integration(RedisIntegration)\n\n if integration is None:\n return old_execute_command(self, name, *args, **kwargs)\n\n description = name\n\n with capture_internal_exceptions():\n description_parts = [name]\n for i, arg in enumerate(args):\n if i > _MAX_NUM_ARGS:\n break\n\n name_low = name.lower()\n\n if name_low in _COMMANDS_INCLUDING_SENSITIVE_DATA:\n description_parts.append(SENSITIVE_DATA_SUBSTITUTE)\n continue\n\n arg_is_the_key = i == 0\n if arg_is_the_key:\n description_parts.append(repr(arg))\n\n else:\n if _should_send_default_pii():\n description_parts.append(repr(arg))\n else:\n description_parts.append(SENSITIVE_DATA_SUBSTITUTE)\n\n description = \" \".join(description_parts)\n\n data_should_be_truncated = (\n integration.max_data_size and len(description) > integration.max_data_size\n )\n if data_should_be_truncated:\n description = description[: integration.max_data_size - len(\"...\")] + \"...\"\n\n with hub.start_span(op=OP.DB_REDIS, description=description) as span:\n span.set_tag(\"redis.is_cluster\", is_cluster)\n\n if name:\n span.set_tag(\"redis.command\", name)\n\n if name and args:\n name_low = name.lower()\n if (name_low in _SINGLE_KEY_COMMANDS) or (\n name_low in _MULTI_KEY_COMMANDS and len(args) == 1\n ):\n span.set_tag(\"redis.key\", args[0])\n\n return old_execute_command(self, name, *args, **kwargs)\n\n cls.execute_command = sentry_patched_execute_command\n", "path": "sentry_sdk/integrations/redis.py"}]}
| 2,041 | 729 |
gh_patches_debug_6195
|
rasdani/github-patches
|
git_diff
|
pystiche__pystiche-132
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Create encoding preprocessors not until runtime
Right now the preprocessors are created at import
https://github.com/pmeier/pystiche/blob/cad5ab6e9485680f2543cf4397d0d21e72a88b9e/pystiche/enc/preprocessing.py#L1-L4
We should only create them if they are needed to speed up the import.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pystiche/enc/preprocessing.py`
Content:
```
1 from torch import nn
2 from pystiche.image import TorchPreprocessing, CaffePreprocessing
3
4 PREPROCESSORS = {"torch": TorchPreprocessing(), "caffe": CaffePreprocessing()}
5
6 __all__ = ["get_preprocessor"]
7
8
9 def get_preprocessor(framework: str) -> nn.Module:
10 return PREPROCESSORS[framework]
11
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pystiche/enc/preprocessing.py b/pystiche/enc/preprocessing.py
--- a/pystiche/enc/preprocessing.py
+++ b/pystiche/enc/preprocessing.py
@@ -1,10 +1,10 @@
from torch import nn
from pystiche.image import TorchPreprocessing, CaffePreprocessing
-PREPROCESSORS = {"torch": TorchPreprocessing(), "caffe": CaffePreprocessing()}
+PREPROCESSORS = {"torch": TorchPreprocessing, "caffe": CaffePreprocessing}
__all__ = ["get_preprocessor"]
def get_preprocessor(framework: str) -> nn.Module:
- return PREPROCESSORS[framework]
+ return PREPROCESSORS[framework]()
|
{"golden_diff": "diff --git a/pystiche/enc/preprocessing.py b/pystiche/enc/preprocessing.py\n--- a/pystiche/enc/preprocessing.py\n+++ b/pystiche/enc/preprocessing.py\n@@ -1,10 +1,10 @@\n from torch import nn\n from pystiche.image import TorchPreprocessing, CaffePreprocessing\n \n-PREPROCESSORS = {\"torch\": TorchPreprocessing(), \"caffe\": CaffePreprocessing()}\n+PREPROCESSORS = {\"torch\": TorchPreprocessing, \"caffe\": CaffePreprocessing}\n \n __all__ = [\"get_preprocessor\"]\n \n \n def get_preprocessor(framework: str) -> nn.Module:\n- return PREPROCESSORS[framework]\n+ return PREPROCESSORS[framework]()\n", "issue": "Create encoding preprocessors not until runtime\nRight now the preprocessors are created at import\r\n\r\nhttps://github.com/pmeier/pystiche/blob/cad5ab6e9485680f2543cf4397d0d21e72a88b9e/pystiche/enc/preprocessing.py#L1-L4\r\n\r\nWe should only create them if they are needed to speed up the import.\n", "before_files": [{"content": "from torch import nn\nfrom pystiche.image import TorchPreprocessing, CaffePreprocessing\n\nPREPROCESSORS = {\"torch\": TorchPreprocessing(), \"caffe\": CaffePreprocessing()}\n\n__all__ = [\"get_preprocessor\"]\n\n\ndef get_preprocessor(framework: str) -> nn.Module:\n return PREPROCESSORS[framework]\n", "path": "pystiche/enc/preprocessing.py"}], "after_files": [{"content": "from torch import nn\nfrom pystiche.image import TorchPreprocessing, CaffePreprocessing\n\nPREPROCESSORS = {\"torch\": TorchPreprocessing, \"caffe\": CaffePreprocessing}\n\n__all__ = [\"get_preprocessor\"]\n\n\ndef get_preprocessor(framework: str) -> nn.Module:\n return PREPROCESSORS[framework]()\n", "path": "pystiche/enc/preprocessing.py"}]}
| 443 | 158 |
gh_patches_debug_1042
|
rasdani/github-patches
|
git_diff
|
fossasia__open-event-server-395
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
list_events url is inconsistent in API v2
The url is `/events/` whereas it should be `/events` to be consistent with other urls.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `open_event/api/events.py`
Content:
```
1 from flask.ext.restplus import Resource, Namespace, fields
2
3 from open_event.models.event import Event as EventModel
4 from .helpers import get_object_list, get_object_or_404
5
6 api = Namespace('events', description='Events')
7
8 EVENT = api.model('Event', {
9 'id': fields.Integer(required=True),
10 'name': fields.String,
11 'email': fields.String,
12 'color': fields.String,
13 'logo': fields.String,
14 'start_time': fields.DateTime,
15 'end_time': fields.DateTime,
16 'latitude': fields.Float,
17 'longitude': fields.Float,
18 'slogan': fields.String,
19 'url': fields.String,
20 'location_name': fields.String,
21 })
22
23
24 @api.route('/<int:event_id>')
25 @api.param('event_id')
26 @api.response(404, 'Event not found')
27 class Event(Resource):
28 @api.doc('get_event')
29 @api.marshal_with(EVENT)
30 def get(self, event_id):
31 """Fetch an event given its id"""
32 return get_object_or_404(EventModel, event_id)
33
34
35 @api.route('/')
36 class EventList(Resource):
37 @api.doc('list_events')
38 @api.marshal_list_with(EVENT)
39 def get(self):
40 """List all events"""
41 return get_object_list(EventModel)
42
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/open_event/api/events.py b/open_event/api/events.py
--- a/open_event/api/events.py
+++ b/open_event/api/events.py
@@ -32,7 +32,7 @@
return get_object_or_404(EventModel, event_id)
[email protected]('/')
[email protected]('')
class EventList(Resource):
@api.doc('list_events')
@api.marshal_list_with(EVENT)
|
{"golden_diff": "diff --git a/open_event/api/events.py b/open_event/api/events.py\n--- a/open_event/api/events.py\n+++ b/open_event/api/events.py\n@@ -32,7 +32,7 @@\n return get_object_or_404(EventModel, event_id)\n \n \[email protected]('/')\[email protected]('')\n class EventList(Resource):\n @api.doc('list_events')\n @api.marshal_list_with(EVENT)\n", "issue": "list_events url is inconsistent in API v2\nThe url is `/events/` whereas it should be `/events` to be consistent with other urls. \n\n", "before_files": [{"content": "from flask.ext.restplus import Resource, Namespace, fields\n\nfrom open_event.models.event import Event as EventModel\nfrom .helpers import get_object_list, get_object_or_404\n\napi = Namespace('events', description='Events')\n\nEVENT = api.model('Event', {\n 'id': fields.Integer(required=True),\n 'name': fields.String,\n 'email': fields.String,\n 'color': fields.String,\n 'logo': fields.String,\n 'start_time': fields.DateTime,\n 'end_time': fields.DateTime,\n 'latitude': fields.Float,\n 'longitude': fields.Float,\n 'slogan': fields.String,\n 'url': fields.String,\n 'location_name': fields.String,\n})\n\n\[email protected]('/<int:event_id>')\[email protected]('event_id')\[email protected](404, 'Event not found')\nclass Event(Resource):\n @api.doc('get_event')\n @api.marshal_with(EVENT)\n def get(self, event_id):\n \"\"\"Fetch an event given its id\"\"\"\n return get_object_or_404(EventModel, event_id)\n\n\[email protected]('/')\nclass EventList(Resource):\n @api.doc('list_events')\n @api.marshal_list_with(EVENT)\n def get(self):\n \"\"\"List all events\"\"\"\n return get_object_list(EventModel)\n", "path": "open_event/api/events.py"}], "after_files": [{"content": "from flask.ext.restplus import Resource, Namespace, fields\n\nfrom open_event.models.event import Event as EventModel\nfrom .helpers import get_object_list, get_object_or_404\n\napi = Namespace('events', description='Events')\n\nEVENT = api.model('Event', {\n 'id': fields.Integer(required=True),\n 'name': fields.String,\n 'email': fields.String,\n 'color': fields.String,\n 'logo': fields.String,\n 'start_time': fields.DateTime,\n 'end_time': fields.DateTime,\n 'latitude': fields.Float,\n 'longitude': fields.Float,\n 'slogan': fields.String,\n 'url': fields.String,\n 'location_name': fields.String,\n})\n\n\[email protected]('/<int:event_id>')\[email protected]('event_id')\[email protected](404, 'Event not found')\nclass Event(Resource):\n @api.doc('get_event')\n @api.marshal_with(EVENT)\n def get(self, event_id):\n \"\"\"Fetch an event given its id\"\"\"\n return get_object_or_404(EventModel, event_id)\n\n\[email protected]('')\nclass EventList(Resource):\n @api.doc('list_events')\n @api.marshal_list_with(EVENT)\n def get(self):\n \"\"\"List all events\"\"\"\n return get_object_list(EventModel)\n", "path": "open_event/api/events.py"}]}
| 647 | 92 |
gh_patches_debug_6894
|
rasdani/github-patches
|
git_diff
|
ray-project__ray-1662
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[DataFrame] Error checking on Pandas version
We need better reporting for issues with the Pandas version on a user's system.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `python/ray/dataframe/__init__.py`
Content:
```
1 from __future__ import absolute_import
2 from __future__ import division
3 from __future__ import print_function
4 import threading
5
6 DEFAULT_NPARTITIONS = 4
7
8
9 def set_npartition_default(n):
10 global DEFAULT_NPARTITIONS
11 DEFAULT_NPARTITIONS = n
12
13
14 def get_npartitions():
15 return DEFAULT_NPARTITIONS
16
17
18 # We import these file after above two function
19 # because they depend on npartitions.
20 from .dataframe import DataFrame # noqa: 402
21 from .dataframe import from_pandas # noqa: 402
22 from .dataframe import to_pandas # noqa: 402
23 from .series import Series # noqa: 402
24 from .io import (read_csv, read_parquet) # noqa: 402
25
26 __all__ = [
27 "DataFrame", "from_pandas", "to_pandas", "Series", "read_csv",
28 "read_parquet"
29 ]
30
31 try:
32 if threading.current_thread().name == "MainThread":
33 import ray
34 ray.init()
35 except AssertionError:
36 pass
37
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/python/ray/dataframe/__init__.py b/python/ray/dataframe/__init__.py
--- a/python/ray/dataframe/__init__.py
+++ b/python/ray/dataframe/__init__.py
@@ -1,8 +1,18 @@
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
+
+import pandas as pd
import threading
+pd_version = pd.__version__
+pd_major = int(pd_version.split(".")[0])
+pd_minor = int(pd_version.split(".")[1])
+
+if pd_major == 0 and pd_minor < 22:
+ raise Exception("In order to use Pandas on Ray, please upgrade your Pandas"
+ " version to >= 0.22.")
+
DEFAULT_NPARTITIONS = 4
|
{"golden_diff": "diff --git a/python/ray/dataframe/__init__.py b/python/ray/dataframe/__init__.py\n--- a/python/ray/dataframe/__init__.py\n+++ b/python/ray/dataframe/__init__.py\n@@ -1,8 +1,18 @@\n from __future__ import absolute_import\n from __future__ import division\n from __future__ import print_function\n+\n+import pandas as pd\n import threading\n \n+pd_version = pd.__version__\n+pd_major = int(pd_version.split(\".\")[0])\n+pd_minor = int(pd_version.split(\".\")[1])\n+\n+if pd_major == 0 and pd_minor < 22:\n+ raise Exception(\"In order to use Pandas on Ray, please upgrade your Pandas\"\n+ \" version to >= 0.22.\")\n+\n DEFAULT_NPARTITIONS = 4\n", "issue": "[DataFrame] Error checking on Pandas version\nWe need better reporting for issues with the Pandas version on a user's system. \n", "before_files": [{"content": "from __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nimport threading\n\nDEFAULT_NPARTITIONS = 4\n\n\ndef set_npartition_default(n):\n global DEFAULT_NPARTITIONS\n DEFAULT_NPARTITIONS = n\n\n\ndef get_npartitions():\n return DEFAULT_NPARTITIONS\n\n\n# We import these file after above two function\n# because they depend on npartitions.\nfrom .dataframe import DataFrame # noqa: 402\nfrom .dataframe import from_pandas # noqa: 402\nfrom .dataframe import to_pandas # noqa: 402\nfrom .series import Series # noqa: 402\nfrom .io import (read_csv, read_parquet) # noqa: 402\n\n__all__ = [\n \"DataFrame\", \"from_pandas\", \"to_pandas\", \"Series\", \"read_csv\",\n \"read_parquet\"\n]\n\ntry:\n if threading.current_thread().name == \"MainThread\":\n import ray\n ray.init()\nexcept AssertionError:\n pass\n", "path": "python/ray/dataframe/__init__.py"}], "after_files": [{"content": "from __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport pandas as pd\nimport threading\n\npd_version = pd.__version__\npd_major = int(pd_version.split(\".\")[0])\npd_minor = int(pd_version.split(\".\")[1])\n\nif pd_major == 0 and pd_minor < 22:\n raise Exception(\"In order to use Pandas on Ray, please upgrade your Pandas\"\n \" version to >= 0.22.\")\n\nDEFAULT_NPARTITIONS = 4\n\n\ndef set_npartition_default(n):\n global DEFAULT_NPARTITIONS\n DEFAULT_NPARTITIONS = n\n\n\ndef get_npartitions():\n return DEFAULT_NPARTITIONS\n\n\n# We import these file after above two function\n# because they depend on npartitions.\nfrom .dataframe import DataFrame # noqa: 402\nfrom .dataframe import from_pandas # noqa: 402\nfrom .dataframe import to_pandas # noqa: 402\nfrom .series import Series # noqa: 402\nfrom .io import (read_csv, read_parquet) # noqa: 402\n\n__all__ = [\n \"DataFrame\", \"from_pandas\", \"to_pandas\", \"Series\", \"read_csv\",\n \"read_parquet\"\n]\n\ntry:\n if threading.current_thread().name == \"MainThread\":\n import ray\n ray.init()\nexcept AssertionError:\n pass\n", "path": "python/ray/dataframe/__init__.py"}]}
| 597 | 181 |
gh_patches_debug_10727
|
rasdani/github-patches
|
git_diff
|
akvo__akvo-rsr-2712
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
react-clickoutside doesn't close the date-picker on clicking outside
Created via Reamaze:
Link: https://akvoo.reamaze.com/admin/conversations/bug-10
Assignee: Anthony Gonzalez
React-clickoutside needs to load before the date-picker loads
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `akvo/rsr/context_processors.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 """
3 Akvo RSR is covered by the GNU Affero General Public License.
4
5 See more details in the license.txt file located at the root folder of the
6 Akvo RSR module. For additional details on the GNU license please see
7 < http://www.gnu.org/licenses/agpl.html >.
8 """
9
10 import django
11
12 from django.conf import settings
13 from django.core.exceptions import DisallowedHost
14 from django.contrib.sites.models import get_current_site
15
16
17 def extra_context(request, protocol="http"):
18 """Add information to the request context."""
19 try:
20 current_site = get_current_site(request)
21 except DisallowedHost:
22 current_site = None
23
24 django_version = django.get_version()
25 debug = getattr(settings, 'DEBUG', False)
26 deploy_tag = getattr(settings, 'DEPLOY_TAG', 'Unknown')
27 deploy_branch = getattr(settings, 'DEPLOY_BRANCH', 'Unknown')
28 deploy_commit_id = getattr(settings, 'DEPLOY_COMMIT_ID', 'Unknown')
29 deploy_commit_full_id = getattr(settings, 'DEPLOY_COMMIT_FULL_ID', 'Unknown')
30
31 return dict(
32 current_site=current_site,
33 django_version=django_version,
34 debug=debug,
35 deploy_tag=deploy_tag,
36 deploy_branch=deploy_branch,
37 deploy_commit_id=deploy_commit_id,
38 deploy_commit_full_id=deploy_commit_full_id
39 )
40
41
42 def get_current_path_without_lang(request):
43 """Return current path without lang."""
44 path = request.get_full_path()
45 path_bits = path.split('/')
46 path = '/'.join(path_bits[2:])
47 return {'current_path_without_lang': path}
48
49
50 def extra_pages_context(request):
51 """Add context information of an RSR Page."""
52 if request.rsr_page:
53 page = request.rsr_page
54 return {
55 'rsr_page': page,
56 'favicon': page.favicon,
57 'logo': page.logo,
58 'organisation': page.organisation,
59 'return_url': page.return_url,
60 'return_url_text': page.custom_return_url_text,
61 'stylesheet': page.stylesheet,
62 'akvoapp_root_url': '//{}'.format(settings.AKVOAPP_DOMAIN),
63 'domain_url': '//{}'.format(settings.RSR_DOMAIN),
64 'no_facebook': not page.facebook_button,
65 'facebook_app_id': page.facebook_app_id,
66 'no_twitter': not page.twitter_button,
67 }
68
69 return {}
70
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/akvo/rsr/context_processors.py b/akvo/rsr/context_processors.py
--- a/akvo/rsr/context_processors.py
+++ b/akvo/rsr/context_processors.py
@@ -58,7 +58,7 @@
'organisation': page.organisation,
'return_url': page.return_url,
'return_url_text': page.custom_return_url_text,
- 'stylesheet': page.stylesheet,
+ 'page_stylesheet': page.stylesheet,
'akvoapp_root_url': '//{}'.format(settings.AKVOAPP_DOMAIN),
'domain_url': '//{}'.format(settings.RSR_DOMAIN),
'no_facebook': not page.facebook_button,
|
{"golden_diff": "diff --git a/akvo/rsr/context_processors.py b/akvo/rsr/context_processors.py\n--- a/akvo/rsr/context_processors.py\n+++ b/akvo/rsr/context_processors.py\n@@ -58,7 +58,7 @@\n 'organisation': page.organisation,\n 'return_url': page.return_url,\n 'return_url_text': page.custom_return_url_text,\n- 'stylesheet': page.stylesheet,\n+ 'page_stylesheet': page.stylesheet,\n 'akvoapp_root_url': '//{}'.format(settings.AKVOAPP_DOMAIN),\n 'domain_url': '//{}'.format(settings.RSR_DOMAIN),\n 'no_facebook': not page.facebook_button,\n", "issue": "react-clickoutside doesn't close the date-picker on clicking outside\nCreated via Reamaze:\r\n\r\nLink: https://akvoo.reamaze.com/admin/conversations/bug-10\r\nAssignee: Anthony Gonzalez\r\n\r\nReact-clickoutside needs to load before the date-picker loads\r\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"\nAkvo RSR is covered by the GNU Affero General Public License.\n\nSee more details in the license.txt file located at the root folder of the\nAkvo RSR module. For additional details on the GNU license please see\n< http://www.gnu.org/licenses/agpl.html >.\n\"\"\"\n\nimport django\n\nfrom django.conf import settings\nfrom django.core.exceptions import DisallowedHost\nfrom django.contrib.sites.models import get_current_site\n\n\ndef extra_context(request, protocol=\"http\"):\n \"\"\"Add information to the request context.\"\"\"\n try:\n current_site = get_current_site(request)\n except DisallowedHost:\n current_site = None\n\n django_version = django.get_version()\n debug = getattr(settings, 'DEBUG', False)\n deploy_tag = getattr(settings, 'DEPLOY_TAG', 'Unknown')\n deploy_branch = getattr(settings, 'DEPLOY_BRANCH', 'Unknown')\n deploy_commit_id = getattr(settings, 'DEPLOY_COMMIT_ID', 'Unknown')\n deploy_commit_full_id = getattr(settings, 'DEPLOY_COMMIT_FULL_ID', 'Unknown')\n\n return dict(\n current_site=current_site,\n django_version=django_version,\n debug=debug,\n deploy_tag=deploy_tag,\n deploy_branch=deploy_branch,\n deploy_commit_id=deploy_commit_id,\n deploy_commit_full_id=deploy_commit_full_id\n )\n\n\ndef get_current_path_without_lang(request):\n \"\"\"Return current path without lang.\"\"\"\n path = request.get_full_path()\n path_bits = path.split('/')\n path = '/'.join(path_bits[2:])\n return {'current_path_without_lang': path}\n\n\ndef extra_pages_context(request):\n \"\"\"Add context information of an RSR Page.\"\"\"\n if request.rsr_page:\n page = request.rsr_page\n return {\n 'rsr_page': page,\n 'favicon': page.favicon,\n 'logo': page.logo,\n 'organisation': page.organisation,\n 'return_url': page.return_url,\n 'return_url_text': page.custom_return_url_text,\n 'stylesheet': page.stylesheet,\n 'akvoapp_root_url': '//{}'.format(settings.AKVOAPP_DOMAIN),\n 'domain_url': '//{}'.format(settings.RSR_DOMAIN),\n 'no_facebook': not page.facebook_button,\n 'facebook_app_id': page.facebook_app_id,\n 'no_twitter': not page.twitter_button,\n }\n\n return {}\n", "path": "akvo/rsr/context_processors.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"\nAkvo RSR is covered by the GNU Affero General Public License.\n\nSee more details in the license.txt file located at the root folder of the\nAkvo RSR module. For additional details on the GNU license please see\n< http://www.gnu.org/licenses/agpl.html >.\n\"\"\"\n\nimport django\n\nfrom django.conf import settings\nfrom django.core.exceptions import DisallowedHost\nfrom django.contrib.sites.models import get_current_site\n\n\ndef extra_context(request, protocol=\"http\"):\n \"\"\"Add information to the request context.\"\"\"\n try:\n current_site = get_current_site(request)\n except DisallowedHost:\n current_site = None\n\n django_version = django.get_version()\n debug = getattr(settings, 'DEBUG', False)\n deploy_tag = getattr(settings, 'DEPLOY_TAG', 'Unknown')\n deploy_branch = getattr(settings, 'DEPLOY_BRANCH', 'Unknown')\n deploy_commit_id = getattr(settings, 'DEPLOY_COMMIT_ID', 'Unknown')\n deploy_commit_full_id = getattr(settings, 'DEPLOY_COMMIT_FULL_ID', 'Unknown')\n\n return dict(\n current_site=current_site,\n django_version=django_version,\n debug=debug,\n deploy_tag=deploy_tag,\n deploy_branch=deploy_branch,\n deploy_commit_id=deploy_commit_id,\n deploy_commit_full_id=deploy_commit_full_id\n )\n\n\ndef get_current_path_without_lang(request):\n \"\"\"Return current path without lang.\"\"\"\n path = request.get_full_path()\n path_bits = path.split('/')\n path = '/'.join(path_bits[2:])\n return {'current_path_without_lang': path}\n\n\ndef extra_pages_context(request):\n \"\"\"Add context information of an RSR Page.\"\"\"\n if request.rsr_page:\n page = request.rsr_page\n return {\n 'rsr_page': page,\n 'favicon': page.favicon,\n 'logo': page.logo,\n 'organisation': page.organisation,\n 'return_url': page.return_url,\n 'return_url_text': page.custom_return_url_text,\n 'page_stylesheet': page.stylesheet,\n 'akvoapp_root_url': '//{}'.format(settings.AKVOAPP_DOMAIN),\n 'domain_url': '//{}'.format(settings.RSR_DOMAIN),\n 'no_facebook': not page.facebook_button,\n 'facebook_app_id': page.facebook_app_id,\n 'no_twitter': not page.twitter_button,\n }\n\n return {}\n", "path": "akvo/rsr/context_processors.py"}]}
| 958 | 145 |
gh_patches_debug_9837
|
rasdani/github-patches
|
git_diff
|
StackStorm__st2-2489
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
action alias regex fails to evaluate null optional arguments when type is not string
For example, something like: `update {{hostname}}( {{count}} times)?`
Works for `!update myhost 2 times`
Fails for `!update myhost`
Error: `(invalid literal for int() with base 10: '')`
So the workaround is to change the type of `count` from integer to string.
Or @emedvedev provides another workaround:
```
- update {{ hostname }} {{ count }} times
- update {{ hostname }}
```
Start from the most explicit.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `st2common/st2common/models/utils/action_alias_utils.py`
Content:
```
1 # Licensed to the StackStorm, Inc ('StackStorm') under one or more
2 # contributor license agreements. See the NOTICE file distributed with
3 # this work for additional information regarding copyright ownership.
4 # The ASF licenses this file to You under the Apache License, Version 2.0
5 # (the "License"); you may not use this file except in compliance with
6 # the License. You may obtain a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import re
17 from st2common.exceptions import content
18
19 __all__ = [
20 'ActionAliasFormatParser'
21 ]
22
23
24 class ActionAliasFormatParser(object):
25
26 def __init__(self, alias_format=None, param_stream=None):
27 self._format = alias_format or ''
28 self._param_stream = param_stream or ''
29
30 def get_extracted_param_value(self):
31
32 result = {}
33
34 # As there's a lot of questions about using regular expressions,
35 # I'll try to be thorough when documenting this code.
36
37 # We're parsing the arbitrary key-value pairs at the end of the stream
38 # to support passing of parameters not specified in the format string,
39 # and cutting them from the stream as they're no longer needed.
40 # Possible values are quoted strings, a word, or anything inside "{}".
41 pairs_match = r'(?:^|\s+)(\S+)=("(.*?)"|\'(.*?)\'|({.*?})|(\S+))'
42 extra = re.match(r'.*?((' + pairs_match + r'\s*)*)$',
43 self._param_stream, re.DOTALL)
44 if extra:
45 kv_pairs = re.findall(pairs_match,
46 extra.group(1), re.DOTALL)
47 self._param_stream = self._param_stream.replace(extra.group(1), '')
48 self._param_stream = " %s " % self._param_stream
49
50 # Now we'll match parameters with default values in form of
51 # {{ value = parameter }} (and all possible permutations of spaces),
52 # compiling them into a list.
53 # "test {{ url = http://google.com }} {{ extra = Test }}" will become
54 # [ ["url", "http://google.com"], ["extra", "Test"] ]
55 params = re.findall(r'{{\s*(.+?)\s*(?:=\s*[\'"]?({.+?}|.+?)[\'"]?)?\s*}}',
56 self._format, re.DOTALL)
57
58 # Now we're transforming our format string into a regular expression,
59 # substituting {{ ... }} with regex named groups, so that param_stream
60 # matched against this expression yields a dict of params with values.
61 param_match = r'["\']?(?P<\2>(?:(?<=\').+?(?=\')|(?<=").+?(?=")|{.+?}|.+?))["\']?'
62 reg = re.sub(r'(\s*){{\s*([^=}]+?)\s*}}(?![\'"]?\s+}})',
63 r'\1' + param_match,
64 self._format)
65 reg = re.sub(r'(\s*){{\s*(\S+)\s*=\s*(?:{.+?}|.+?)\s*}}',
66 r'(?:\1' + param_match + r')?',
67 reg)
68 reg = re.sub(r'(\s*){{\s*(.+?)\s*}}',
69 r'\1' + param_match,
70 reg)
71 reg = '^\s*' + reg + r'\s*$'
72
73 # Now we're matching param_stream against our format string regex,
74 # getting a dict of values. We'll also get default values from
75 # "params" list if something is not present.
76 # Priority, from lowest to highest:
77 # 1. Default parameters
78 # 2. Matched parameters
79 # 3. Extra parameters
80 matched_stream = re.match(reg, self._param_stream, re.DOTALL)
81 if matched_stream:
82 values = matched_stream.groupdict()
83 for param in params:
84 matched_value = values[param[0]] if matched_stream else None
85 result[param[0]] = matched_value or param[1]
86 if extra:
87 for pair in kv_pairs:
88 result[pair[0]] = ''.join(pair[2:])
89
90 if self._format and not (self._param_stream.strip() or any(result.values())):
91 raise content.ParseException('No value supplied and no default value found.')
92
93 return result
94
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/st2common/st2common/models/utils/action_alias_utils.py b/st2common/st2common/models/utils/action_alias_utils.py
--- a/st2common/st2common/models/utils/action_alias_utils.py
+++ b/st2common/st2common/models/utils/action_alias_utils.py
@@ -82,7 +82,9 @@
values = matched_stream.groupdict()
for param in params:
matched_value = values[param[0]] if matched_stream else None
- result[param[0]] = matched_value or param[1]
+ matched_result = matched_value or param[1]
+ if matched_result:
+ result[param[0]] = matched_result
if extra:
for pair in kv_pairs:
result[pair[0]] = ''.join(pair[2:])
|
{"golden_diff": "diff --git a/st2common/st2common/models/utils/action_alias_utils.py b/st2common/st2common/models/utils/action_alias_utils.py\n--- a/st2common/st2common/models/utils/action_alias_utils.py\n+++ b/st2common/st2common/models/utils/action_alias_utils.py\n@@ -82,7 +82,9 @@\n values = matched_stream.groupdict()\n for param in params:\n matched_value = values[param[0]] if matched_stream else None\n- result[param[0]] = matched_value or param[1]\n+ matched_result = matched_value or param[1]\n+ if matched_result:\n+ result[param[0]] = matched_result\n if extra:\n for pair in kv_pairs:\n result[pair[0]] = ''.join(pair[2:])\n", "issue": "action alias regex fails to evaluate null optional arguments when type is not string\nFor example, something like: `update {{hostname}}( {{count}} times)?` \nWorks for `!update myhost 2 times`\nFails for `!update myhost`\nError: `(invalid literal for int() with base 10: '')`\nSo the workaround is to change the type of `count` from integer to string.\nOr @emedvedev provides another workaround:\n\n```\n- update {{ hostname }} {{ count }} times\n- update {{ hostname }}\n```\n\nStart from the most explicit.\n\n", "before_files": [{"content": "# Licensed to the StackStorm, Inc ('StackStorm') under one or more\n# contributor license agreements. See the NOTICE file distributed with\n# this work for additional information regarding copyright ownership.\n# The ASF licenses this file to You under the Apache License, Version 2.0\n# (the \"License\"); you may not use this file except in compliance with\n# the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport re\nfrom st2common.exceptions import content\n\n__all__ = [\n 'ActionAliasFormatParser'\n]\n\n\nclass ActionAliasFormatParser(object):\n\n def __init__(self, alias_format=None, param_stream=None):\n self._format = alias_format or ''\n self._param_stream = param_stream or ''\n\n def get_extracted_param_value(self):\n\n result = {}\n\n # As there's a lot of questions about using regular expressions,\n # I'll try to be thorough when documenting this code.\n\n # We're parsing the arbitrary key-value pairs at the end of the stream\n # to support passing of parameters not specified in the format string,\n # and cutting them from the stream as they're no longer needed.\n # Possible values are quoted strings, a word, or anything inside \"{}\".\n pairs_match = r'(?:^|\\s+)(\\S+)=(\"(.*?)\"|\\'(.*?)\\'|({.*?})|(\\S+))'\n extra = re.match(r'.*?((' + pairs_match + r'\\s*)*)$',\n self._param_stream, re.DOTALL)\n if extra:\n kv_pairs = re.findall(pairs_match,\n extra.group(1), re.DOTALL)\n self._param_stream = self._param_stream.replace(extra.group(1), '')\n self._param_stream = \" %s \" % self._param_stream\n\n # Now we'll match parameters with default values in form of\n # {{ value = parameter }} (and all possible permutations of spaces),\n # compiling them into a list.\n # \"test {{ url = http://google.com }} {{ extra = Test }}\" will become\n # [ [\"url\", \"http://google.com\"], [\"extra\", \"Test\"] ]\n params = re.findall(r'{{\\s*(.+?)\\s*(?:=\\s*[\\'\"]?({.+?}|.+?)[\\'\"]?)?\\s*}}',\n self._format, re.DOTALL)\n\n # Now we're transforming our format string into a regular expression,\n # substituting {{ ... }} with regex named groups, so that param_stream\n # matched against this expression yields a dict of params with values.\n param_match = r'[\"\\']?(?P<\\2>(?:(?<=\\').+?(?=\\')|(?<=\").+?(?=\")|{.+?}|.+?))[\"\\']?'\n reg = re.sub(r'(\\s*){{\\s*([^=}]+?)\\s*}}(?![\\'\"]?\\s+}})',\n r'\\1' + param_match,\n self._format)\n reg = re.sub(r'(\\s*){{\\s*(\\S+)\\s*=\\s*(?:{.+?}|.+?)\\s*}}',\n r'(?:\\1' + param_match + r')?',\n reg)\n reg = re.sub(r'(\\s*){{\\s*(.+?)\\s*}}',\n r'\\1' + param_match,\n reg)\n reg = '^\\s*' + reg + r'\\s*$'\n\n # Now we're matching param_stream against our format string regex,\n # getting a dict of values. We'll also get default values from\n # \"params\" list if something is not present.\n # Priority, from lowest to highest:\n # 1. Default parameters\n # 2. Matched parameters\n # 3. Extra parameters\n matched_stream = re.match(reg, self._param_stream, re.DOTALL)\n if matched_stream:\n values = matched_stream.groupdict()\n for param in params:\n matched_value = values[param[0]] if matched_stream else None\n result[param[0]] = matched_value or param[1]\n if extra:\n for pair in kv_pairs:\n result[pair[0]] = ''.join(pair[2:])\n\n if self._format and not (self._param_stream.strip() or any(result.values())):\n raise content.ParseException('No value supplied and no default value found.')\n\n return result\n", "path": "st2common/st2common/models/utils/action_alias_utils.py"}], "after_files": [{"content": "# Licensed to the StackStorm, Inc ('StackStorm') under one or more\n# contributor license agreements. See the NOTICE file distributed with\n# this work for additional information regarding copyright ownership.\n# The ASF licenses this file to You under the Apache License, Version 2.0\n# (the \"License\"); you may not use this file except in compliance with\n# the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport re\nfrom st2common.exceptions import content\n\n__all__ = [\n 'ActionAliasFormatParser'\n]\n\n\nclass ActionAliasFormatParser(object):\n\n def __init__(self, alias_format=None, param_stream=None):\n self._format = alias_format or ''\n self._param_stream = param_stream or ''\n\n def get_extracted_param_value(self):\n\n result = {}\n\n # As there's a lot of questions about using regular expressions,\n # I'll try to be thorough when documenting this code.\n\n # We're parsing the arbitrary key-value pairs at the end of the stream\n # to support passing of parameters not specified in the format string,\n # and cutting them from the stream as they're no longer needed.\n # Possible values are quoted strings, a word, or anything inside \"{}\".\n pairs_match = r'(?:^|\\s+)(\\S+)=(\"(.*?)\"|\\'(.*?)\\'|({.*?})|(\\S+))'\n extra = re.match(r'.*?((' + pairs_match + r'\\s*)*)$',\n self._param_stream, re.DOTALL)\n if extra:\n kv_pairs = re.findall(pairs_match,\n extra.group(1), re.DOTALL)\n self._param_stream = self._param_stream.replace(extra.group(1), '')\n self._param_stream = \" %s \" % self._param_stream\n\n # Now we'll match parameters with default values in form of\n # {{ value = parameter }} (and all possible permutations of spaces),\n # compiling them into a list.\n # \"test {{ url = http://google.com }} {{ extra = Test }}\" will become\n # [ [\"url\", \"http://google.com\"], [\"extra\", \"Test\"] ]\n params = re.findall(r'{{\\s*(.+?)\\s*(?:=\\s*[\\'\"]?({.+?}|.+?)[\\'\"]?)?\\s*}}',\n self._format, re.DOTALL)\n\n # Now we're transforming our format string into a regular expression,\n # substituting {{ ... }} with regex named groups, so that param_stream\n # matched against this expression yields a dict of params with values.\n param_match = r'[\"\\']?(?P<\\2>(?:(?<=\\').+?(?=\\')|(?<=\").+?(?=\")|{.+?}|.+?))[\"\\']?'\n reg = re.sub(r'(\\s*){{\\s*([^=}]+?)\\s*}}(?![\\'\"]?\\s+}})',\n r'\\1' + param_match,\n self._format)\n reg = re.sub(r'(\\s*){{\\s*(\\S+)\\s*=\\s*(?:{.+?}|.+?)\\s*}}',\n r'(?:\\1' + param_match + r')?',\n reg)\n reg = re.sub(r'(\\s*){{\\s*(.+?)\\s*}}',\n r'\\1' + param_match,\n reg)\n reg = '^\\s*' + reg + r'\\s*$'\n\n # Now we're matching param_stream against our format string regex,\n # getting a dict of values. We'll also get default values from\n # \"params\" list if something is not present.\n # Priority, from lowest to highest:\n # 1. Default parameters\n # 2. Matched parameters\n # 3. Extra parameters\n matched_stream = re.match(reg, self._param_stream, re.DOTALL)\n if matched_stream:\n values = matched_stream.groupdict()\n for param in params:\n matched_value = values[param[0]] if matched_stream else None\n matched_result = matched_value or param[1]\n if matched_result:\n result[param[0]] = matched_result\n if extra:\n for pair in kv_pairs:\n result[pair[0]] = ''.join(pair[2:])\n\n if self._format and not (self._param_stream.strip() or any(result.values())):\n raise content.ParseException('No value supplied and no default value found.')\n\n return result\n", "path": "st2common/st2common/models/utils/action_alias_utils.py"}]}
| 1,608 | 170 |
gh_patches_debug_31347
|
rasdani/github-patches
|
git_diff
|
pre-commit__pre-commit-2641
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
dotnet install fails for prefixed packages
### search you tried in the issue tracker
dotnet tool
### describe your issue
A bit of an oversight when constructing `tool_name` here:
https://github.com/pre-commit/pre-commit/blob/cb0bcfd67fc35e91f7b2eca7e33bceda459dca77/pre_commit/languages/dotnet.py#L60-L63
E.g.
```console
$ pre-commit try-repo https://github.com/rkm/sample-dotnet-tool
[INFO] Initializing environment for https://github.com/rkm/sample-dotnet-tool.
===============================================================================
Using config:
===============================================================================
repos:
- repo: https://github.com/rkm/sample-dotnet-tool
rev: e53a3601bc06bb038dac30da813572291dd8d58f
hooks:
- id: sample-dotnet-tool
===============================================================================
[INFO] Installing environment for https://github.com/rkm/sample-dotnet-tool.
[INFO] Once installed this environment will be reused.
[INFO] This may take a few minutes...
An unexpected error has occurred: CalledProcessError: command: ('/home/rkm/bin/dotnet', 'tool', 'install', '--tool-path', '/tmp/tmp6bk4v26x/repotefhurdg/dotnetenv-default/bin', '--add-source', 'pre-commit-build', 'Rkm')
return code: 1
expected return code: 0
stdout:
/tmp/1873db78-d0a7-48ba-bbff-10a7ef85a2a6/restore.csproj : error NU1101: Unable to find package rkm. No packages exist with this id in source(s): /tmp/tmp6bk4v26x/repotefhurdg/pre-commit-build, nuget.org
stderr:
The tool package could not be restored.
Tool 'rkm' failed to install. This failure may have been caused by:
* You are attempting to install a preview release and did not use the --version option to specify the version.
* A package by this name was found, but it was not a .NET tool.
* The required NuGet feed cannot be accessed, perhaps because of an Internet connection problem.
* You mistyped the name of the tool.
For more reasons, including package naming enforcement, visit https://aka.ms/failure-installing-tool
Check the log at /home/rkm/.cache/pre-commit/pre-commit.log
```
### pre-commit --version
pre-commit 2.20.0
### .pre-commit-config.yaml
```yaml
repos:
- repo: https://github.com/rkm/sample-dotnet-tool
rev: e53a3601bc06bb038dac30da813572291dd8d58f
hooks:
- id: sample-dotnet-tool
```
### ~/.cache/pre-commit/pre-commit.log (if present)
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pre_commit/languages/dotnet.py`
Content:
```
1 from __future__ import annotations
2
3 import contextlib
4 import os.path
5 from typing import Generator
6 from typing import Sequence
7
8 import pre_commit.constants as C
9 from pre_commit.envcontext import envcontext
10 from pre_commit.envcontext import PatchesT
11 from pre_commit.envcontext import Var
12 from pre_commit.hook import Hook
13 from pre_commit.languages import helpers
14 from pre_commit.prefix import Prefix
15 from pre_commit.util import clean_path_on_failure
16
17 ENVIRONMENT_DIR = 'dotnetenv'
18 BIN_DIR = 'bin'
19
20 get_default_version = helpers.basic_get_default_version
21 health_check = helpers.basic_health_check
22
23
24 def get_env_patch(venv: str) -> PatchesT:
25 return (
26 ('PATH', (os.path.join(venv, BIN_DIR), os.pathsep, Var('PATH'))),
27 )
28
29
30 @contextlib.contextmanager
31 def in_env(prefix: Prefix) -> Generator[None, None, None]:
32 directory = helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT)
33 envdir = prefix.path(directory)
34 with envcontext(get_env_patch(envdir)):
35 yield
36
37
38 def install_environment(
39 prefix: Prefix,
40 version: str,
41 additional_dependencies: Sequence[str],
42 ) -> None:
43 helpers.assert_version_default('dotnet', version)
44 helpers.assert_no_additional_deps('dotnet', additional_dependencies)
45
46 envdir = prefix.path(helpers.environment_dir(ENVIRONMENT_DIR, version))
47 with clean_path_on_failure(envdir):
48 build_dir = 'pre-commit-build'
49
50 # Build & pack nupkg file
51 helpers.run_setup_cmd(
52 prefix,
53 (
54 'dotnet', 'pack',
55 '--configuration', 'Release',
56 '--output', build_dir,
57 ),
58 )
59
60 # Determine tool from the packaged file <tool_name>.<version>.nupkg
61 build_outputs = os.listdir(os.path.join(prefix.prefix_dir, build_dir))
62 for output in build_outputs:
63 tool_name = output.split('.')[0]
64
65 # Install to bin dir
66 helpers.run_setup_cmd(
67 prefix,
68 (
69 'dotnet', 'tool', 'install',
70 '--tool-path', os.path.join(envdir, BIN_DIR),
71 '--add-source', build_dir,
72 tool_name,
73 ),
74 )
75
76 # Clean the git dir, ignoring the environment dir
77 clean_cmd = ('git', 'clean', '-ffxd', '-e', f'{ENVIRONMENT_DIR}-*')
78 helpers.run_setup_cmd(prefix, clean_cmd)
79
80
81 def run_hook(
82 hook: Hook,
83 file_args: Sequence[str],
84 color: bool,
85 ) -> tuple[int, bytes]:
86 with in_env(hook.prefix):
87 return helpers.run_xargs(hook, hook.cmd, file_args, color=color)
88
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pre_commit/languages/dotnet.py b/pre_commit/languages/dotnet.py
--- a/pre_commit/languages/dotnet.py
+++ b/pre_commit/languages/dotnet.py
@@ -2,6 +2,9 @@
import contextlib
import os.path
+import re
+import xml.etree.ElementTree
+import zipfile
from typing import Generator
from typing import Sequence
@@ -57,10 +60,29 @@
),
)
- # Determine tool from the packaged file <tool_name>.<version>.nupkg
- build_outputs = os.listdir(os.path.join(prefix.prefix_dir, build_dir))
- for output in build_outputs:
- tool_name = output.split('.')[0]
+ nupkg_dir = prefix.path(build_dir)
+ nupkgs = [x for x in os.listdir(nupkg_dir) if x.endswith('.nupkg')]
+
+ if not nupkgs:
+ raise AssertionError('could not find any build outputs to install')
+
+ for nupkg in nupkgs:
+ with zipfile.ZipFile(os.path.join(nupkg_dir, nupkg)) as f:
+ nuspec, = (x for x in f.namelist() if x.endswith('.nuspec'))
+ with f.open(nuspec) as spec:
+ tree = xml.etree.ElementTree.parse(spec)
+
+ namespace = re.match(r'{.*}', tree.getroot().tag)
+ if not namespace:
+ raise AssertionError('could not parse namespace from nuspec')
+
+ tool_id_element = tree.find(f'.//{namespace[0]}id')
+ if tool_id_element is None:
+ raise AssertionError('expected to find an "id" element')
+
+ tool_id = tool_id_element.text
+ if not tool_id:
+ raise AssertionError('"id" element missing tool name')
# Install to bin dir
helpers.run_setup_cmd(
@@ -69,7 +91,7 @@
'dotnet', 'tool', 'install',
'--tool-path', os.path.join(envdir, BIN_DIR),
'--add-source', build_dir,
- tool_name,
+ tool_id,
),
)
|
{"golden_diff": "diff --git a/pre_commit/languages/dotnet.py b/pre_commit/languages/dotnet.py\n--- a/pre_commit/languages/dotnet.py\n+++ b/pre_commit/languages/dotnet.py\n@@ -2,6 +2,9 @@\n \n import contextlib\n import os.path\n+import re\n+import xml.etree.ElementTree\n+import zipfile\n from typing import Generator\n from typing import Sequence\n \n@@ -57,10 +60,29 @@\n ),\n )\n \n- # Determine tool from the packaged file <tool_name>.<version>.nupkg\n- build_outputs = os.listdir(os.path.join(prefix.prefix_dir, build_dir))\n- for output in build_outputs:\n- tool_name = output.split('.')[0]\n+ nupkg_dir = prefix.path(build_dir)\n+ nupkgs = [x for x in os.listdir(nupkg_dir) if x.endswith('.nupkg')]\n+\n+ if not nupkgs:\n+ raise AssertionError('could not find any build outputs to install')\n+\n+ for nupkg in nupkgs:\n+ with zipfile.ZipFile(os.path.join(nupkg_dir, nupkg)) as f:\n+ nuspec, = (x for x in f.namelist() if x.endswith('.nuspec'))\n+ with f.open(nuspec) as spec:\n+ tree = xml.etree.ElementTree.parse(spec)\n+\n+ namespace = re.match(r'{.*}', tree.getroot().tag)\n+ if not namespace:\n+ raise AssertionError('could not parse namespace from nuspec')\n+\n+ tool_id_element = tree.find(f'.//{namespace[0]}id')\n+ if tool_id_element is None:\n+ raise AssertionError('expected to find an \"id\" element')\n+\n+ tool_id = tool_id_element.text\n+ if not tool_id:\n+ raise AssertionError('\"id\" element missing tool name')\n \n # Install to bin dir\n helpers.run_setup_cmd(\n@@ -69,7 +91,7 @@\n 'dotnet', 'tool', 'install',\n '--tool-path', os.path.join(envdir, BIN_DIR),\n '--add-source', build_dir,\n- tool_name,\n+ tool_id,\n ),\n )\n", "issue": "dotnet install fails for prefixed packages\n### search you tried in the issue tracker\n\ndotnet tool\n\n### describe your issue\n\nA bit of an oversight when constructing `tool_name` here:\r\n\r\nhttps://github.com/pre-commit/pre-commit/blob/cb0bcfd67fc35e91f7b2eca7e33bceda459dca77/pre_commit/languages/dotnet.py#L60-L63\r\n\r\nE.g.\r\n\r\n```console\r\n$ pre-commit try-repo https://github.com/rkm/sample-dotnet-tool\r\n[INFO] Initializing environment for https://github.com/rkm/sample-dotnet-tool.\r\n===============================================================================\r\nUsing config:\r\n===============================================================================\r\nrepos:\r\n- repo: https://github.com/rkm/sample-dotnet-tool\r\n rev: e53a3601bc06bb038dac30da813572291dd8d58f\r\n hooks:\r\n - id: sample-dotnet-tool\r\n===============================================================================\r\n[INFO] Installing environment for https://github.com/rkm/sample-dotnet-tool.\r\n[INFO] Once installed this environment will be reused.\r\n[INFO] This may take a few minutes...\r\nAn unexpected error has occurred: CalledProcessError: command: ('/home/rkm/bin/dotnet', 'tool', 'install', '--tool-path', '/tmp/tmp6bk4v26x/repotefhurdg/dotnetenv-default/bin', '--add-source', 'pre-commit-build', 'Rkm')\r\nreturn code: 1\r\nexpected return code: 0\r\nstdout:\r\n /tmp/1873db78-d0a7-48ba-bbff-10a7ef85a2a6/restore.csproj : error NU1101: Unable to find package rkm. No packages exist with this id in source(s): /tmp/tmp6bk4v26x/repotefhurdg/pre-commit-build, nuget.org\r\n\r\nstderr:\r\n The tool package could not be restored.\r\n Tool 'rkm' failed to install. This failure may have been caused by:\r\n\r\n * You are attempting to install a preview release and did not use the --version option to specify the version.\r\n * A package by this name was found, but it was not a .NET tool.\r\n * The required NuGet feed cannot be accessed, perhaps because of an Internet connection problem.\r\n * You mistyped the name of the tool.\r\n\r\n For more reasons, including package naming enforcement, visit https://aka.ms/failure-installing-tool\r\n\r\nCheck the log at /home/rkm/.cache/pre-commit/pre-commit.log\r\n```\n\n### pre-commit --version\n\npre-commit 2.20.0\n\n### .pre-commit-config.yaml\n\n```yaml\nrepos:\r\n- repo: https://github.com/rkm/sample-dotnet-tool\r\n rev: e53a3601bc06bb038dac30da813572291dd8d58f\r\n hooks:\r\n - id: sample-dotnet-tool\n```\n\n\n### ~/.cache/pre-commit/pre-commit.log (if present)\n\n_No response_\n", "before_files": [{"content": "from __future__ import annotations\n\nimport contextlib\nimport os.path\nfrom typing import Generator\nfrom typing import Sequence\n\nimport pre_commit.constants as C\nfrom pre_commit.envcontext import envcontext\nfrom pre_commit.envcontext import PatchesT\nfrom pre_commit.envcontext import Var\nfrom pre_commit.hook import Hook\nfrom pre_commit.languages import helpers\nfrom pre_commit.prefix import Prefix\nfrom pre_commit.util import clean_path_on_failure\n\nENVIRONMENT_DIR = 'dotnetenv'\nBIN_DIR = 'bin'\n\nget_default_version = helpers.basic_get_default_version\nhealth_check = helpers.basic_health_check\n\n\ndef get_env_patch(venv: str) -> PatchesT:\n return (\n ('PATH', (os.path.join(venv, BIN_DIR), os.pathsep, Var('PATH'))),\n )\n\n\[email protected]\ndef in_env(prefix: Prefix) -> Generator[None, None, None]:\n directory = helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT)\n envdir = prefix.path(directory)\n with envcontext(get_env_patch(envdir)):\n yield\n\n\ndef install_environment(\n prefix: Prefix,\n version: str,\n additional_dependencies: Sequence[str],\n) -> None:\n helpers.assert_version_default('dotnet', version)\n helpers.assert_no_additional_deps('dotnet', additional_dependencies)\n\n envdir = prefix.path(helpers.environment_dir(ENVIRONMENT_DIR, version))\n with clean_path_on_failure(envdir):\n build_dir = 'pre-commit-build'\n\n # Build & pack nupkg file\n helpers.run_setup_cmd(\n prefix,\n (\n 'dotnet', 'pack',\n '--configuration', 'Release',\n '--output', build_dir,\n ),\n )\n\n # Determine tool from the packaged file <tool_name>.<version>.nupkg\n build_outputs = os.listdir(os.path.join(prefix.prefix_dir, build_dir))\n for output in build_outputs:\n tool_name = output.split('.')[0]\n\n # Install to bin dir\n helpers.run_setup_cmd(\n prefix,\n (\n 'dotnet', 'tool', 'install',\n '--tool-path', os.path.join(envdir, BIN_DIR),\n '--add-source', build_dir,\n tool_name,\n ),\n )\n\n # Clean the git dir, ignoring the environment dir\n clean_cmd = ('git', 'clean', '-ffxd', '-e', f'{ENVIRONMENT_DIR}-*')\n helpers.run_setup_cmd(prefix, clean_cmd)\n\n\ndef run_hook(\n hook: Hook,\n file_args: Sequence[str],\n color: bool,\n) -> tuple[int, bytes]:\n with in_env(hook.prefix):\n return helpers.run_xargs(hook, hook.cmd, file_args, color=color)\n", "path": "pre_commit/languages/dotnet.py"}], "after_files": [{"content": "from __future__ import annotations\n\nimport contextlib\nimport os.path\nimport re\nimport xml.etree.ElementTree\nimport zipfile\nfrom typing import Generator\nfrom typing import Sequence\n\nimport pre_commit.constants as C\nfrom pre_commit.envcontext import envcontext\nfrom pre_commit.envcontext import PatchesT\nfrom pre_commit.envcontext import Var\nfrom pre_commit.hook import Hook\nfrom pre_commit.languages import helpers\nfrom pre_commit.prefix import Prefix\nfrom pre_commit.util import clean_path_on_failure\n\nENVIRONMENT_DIR = 'dotnetenv'\nBIN_DIR = 'bin'\n\nget_default_version = helpers.basic_get_default_version\nhealth_check = helpers.basic_health_check\n\n\ndef get_env_patch(venv: str) -> PatchesT:\n return (\n ('PATH', (os.path.join(venv, BIN_DIR), os.pathsep, Var('PATH'))),\n )\n\n\[email protected]\ndef in_env(prefix: Prefix) -> Generator[None, None, None]:\n directory = helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT)\n envdir = prefix.path(directory)\n with envcontext(get_env_patch(envdir)):\n yield\n\n\ndef install_environment(\n prefix: Prefix,\n version: str,\n additional_dependencies: Sequence[str],\n) -> None:\n helpers.assert_version_default('dotnet', version)\n helpers.assert_no_additional_deps('dotnet', additional_dependencies)\n\n envdir = prefix.path(helpers.environment_dir(ENVIRONMENT_DIR, version))\n with clean_path_on_failure(envdir):\n build_dir = 'pre-commit-build'\n\n # Build & pack nupkg file\n helpers.run_setup_cmd(\n prefix,\n (\n 'dotnet', 'pack',\n '--configuration', 'Release',\n '--output', build_dir,\n ),\n )\n\n nupkg_dir = prefix.path(build_dir)\n nupkgs = [x for x in os.listdir(nupkg_dir) if x.endswith('.nupkg')]\n\n if not nupkgs:\n raise AssertionError('could not find any build outputs to install')\n\n for nupkg in nupkgs:\n with zipfile.ZipFile(os.path.join(nupkg_dir, nupkg)) as f:\n nuspec, = (x for x in f.namelist() if x.endswith('.nuspec'))\n with f.open(nuspec) as spec:\n tree = xml.etree.ElementTree.parse(spec)\n\n namespace = re.match(r'{.*}', tree.getroot().tag)\n if not namespace:\n raise AssertionError('could not parse namespace from nuspec')\n\n tool_id_element = tree.find(f'.//{namespace[0]}id')\n if tool_id_element is None:\n raise AssertionError('expected to find an \"id\" element')\n\n tool_id = tool_id_element.text\n if not tool_id:\n raise AssertionError('\"id\" element missing tool name')\n\n # Install to bin dir\n helpers.run_setup_cmd(\n prefix,\n (\n 'dotnet', 'tool', 'install',\n '--tool-path', os.path.join(envdir, BIN_DIR),\n '--add-source', build_dir,\n tool_id,\n ),\n )\n\n # Clean the git dir, ignoring the environment dir\n clean_cmd = ('git', 'clean', '-ffxd', '-e', f'{ENVIRONMENT_DIR}-*')\n helpers.run_setup_cmd(prefix, clean_cmd)\n\n\ndef run_hook(\n hook: Hook,\n file_args: Sequence[str],\n color: bool,\n) -> tuple[int, bytes]:\n with in_env(hook.prefix):\n return helpers.run_xargs(hook, hook.cmd, file_args, color=color)\n", "path": "pre_commit/languages/dotnet.py"}]}
| 1,689 | 491 |
gh_patches_debug_15047
|
rasdani/github-patches
|
git_diff
|
freedomofpress__securedrop-2694
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Tor2Web warning does not display well
# Bug
## Description
The <strong> tags are incorrectly set.
## Steps to Reproduce
firefox https://zdf4nikyuswdzbt6.onion.to/
## Expected Behavior
Spaces between words
## Actual Behavior
Missing spaces and weird bold / regular changes.

--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `securedrop/source_app/__init__.py`
Content:
```
1 from datetime import datetime, timedelta
2 from flask import (Flask, render_template, flash, Markup, request, g, session,
3 url_for, redirect)
4 from flask_babel import gettext
5 from flask_assets import Environment
6 from flask_wtf.csrf import CSRFProtect, CSRFError
7 from jinja2 import evalcontextfilter
8 from os import path
9 from sqlalchemy.orm.exc import NoResultFound
10
11 import crypto_util
12 import i18n
13 import store
14 import template_filters
15 import version
16
17 from db import Source, db_session
18 from request_that_secures_file_uploads import RequestThatSecuresFileUploads
19 from source_app import main, info, api
20 from source_app.decorators import ignore_static
21 from source_app.utils import logged_in
22
23
24 def create_app(config):
25 app = Flask(__name__,
26 template_folder=config.SOURCE_TEMPLATES_DIR,
27 static_folder=path.join(config.SECUREDROP_ROOT, 'static'))
28 app.request_class = RequestThatSecuresFileUploads
29 app.config.from_object(config.SourceInterfaceFlaskConfig)
30
31 # The default CSRF token expiration is 1 hour. Since large uploads can
32 # take longer than an hour over Tor, we increase the valid window to 24h.
33 app.config['WTF_CSRF_TIME_LIMIT'] = 60 * 60 * 24
34
35 CSRFProtect(app)
36
37 @app.errorhandler(CSRFError)
38 def handle_csrf_error(e):
39 msg = render_template('session_timeout.html')
40 session.clear()
41 flash(Markup(msg), "important")
42 return redirect(url_for('main.index'))
43
44 assets = Environment(app)
45 app.config['assets'] = assets
46
47 i18n.setup_app(app)
48
49 app.jinja_env.trim_blocks = True
50 app.jinja_env.lstrip_blocks = True
51 app.jinja_env.globals['version'] = version.__version__
52 if getattr(config, 'CUSTOM_HEADER_IMAGE', None):
53 app.jinja_env.globals['header_image'] = config.CUSTOM_HEADER_IMAGE
54 app.jinja_env.globals['use_custom_header_image'] = True
55 else:
56 app.jinja_env.globals['header_image'] = 'logo.png'
57 app.jinja_env.globals['use_custom_header_image'] = False
58
59 app.jinja_env.filters['rel_datetime_format'] = \
60 template_filters.rel_datetime_format
61 app.jinja_env.filters['nl2br'] = evalcontextfilter(template_filters.nl2br)
62 app.jinja_env.filters['filesizeformat'] = template_filters.filesizeformat
63
64 for module in [main, info, api]:
65 app.register_blueprint(module.make_blueprint(config))
66
67 @app.before_request
68 @ignore_static
69 def check_tor2web():
70 # ignore_static here so we only flash a single message warning
71 # about Tor2Web, corresponding to the initial page load.
72 if 'X-tor2web' in request.headers:
73 flash(Markup(gettext(
74 '<strong>WARNING:</strong> You appear to be using Tor2Web. '
75 'This <strong>does not</strong> provide anonymity. '
76 '<a href="{url}">Why is this dangerous?</a>')
77 .format(url=url_for('info.tor2web_warning'))),
78 "banner-warning")
79
80 @app.before_request
81 @ignore_static
82 def setup_g():
83 """Store commonly used values in Flask's special g object"""
84 g.locale = i18n.get_locale()
85 g.text_direction = i18n.get_text_direction(g.locale)
86 g.html_lang = i18n.locale_to_rfc_5646(g.locale)
87 g.locales = i18n.get_locale2name()
88
89 if 'expires' in session and datetime.utcnow() >= session['expires']:
90 msg = render_template('session_timeout.html')
91
92 # clear the session after we render the message so it's localized
93 session.clear()
94
95 flash(Markup(msg), "important")
96
97 session['expires'] = datetime.utcnow() + \
98 timedelta(minutes=getattr(config,
99 'SESSION_EXPIRATION_MINUTES',
100 120))
101
102 # ignore_static here because `crypto_util.hash_codename` is scrypt
103 # (very time consuming), and we don't need to waste time running if
104 # we're just serving a static resource that won't need to access
105 # these common values.
106 if logged_in():
107 g.codename = session['codename']
108 g.filesystem_id = crypto_util.hash_codename(g.codename)
109 try:
110 g.source = Source.query \
111 .filter(Source.filesystem_id == g.filesystem_id) \
112 .one()
113 except NoResultFound as e:
114 app.logger.error(
115 "Found no Sources when one was expected: %s" %
116 (e,))
117 del session['logged_in']
118 del session['codename']
119 return redirect(url_for('main.index'))
120 g.loc = store.path(g.filesystem_id)
121
122 @app.teardown_appcontext
123 def shutdown_session(exception=None):
124 """Automatically remove database sessions at the end of the request, or
125 when the application shuts down"""
126 db_session.remove()
127
128 @app.errorhandler(404)
129 def page_not_found(error):
130 return render_template('notfound.html'), 404
131
132 @app.errorhandler(500)
133 def internal_error(error):
134 return render_template('error.html'), 500
135
136 return app
137
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/securedrop/source_app/__init__.py b/securedrop/source_app/__init__.py
--- a/securedrop/source_app/__init__.py
+++ b/securedrop/source_app/__init__.py
@@ -71,8 +71,10 @@
# about Tor2Web, corresponding to the initial page load.
if 'X-tor2web' in request.headers:
flash(Markup(gettext(
- '<strong>WARNING:</strong> You appear to be using Tor2Web. '
- 'This <strong>does not</strong> provide anonymity. '
+ '<strong>WARNING: </strong> '
+ 'You appear to be using Tor2Web. '
+ 'This <strong> does not </strong> '
+ 'provide anonymity. '
'<a href="{url}">Why is this dangerous?</a>')
.format(url=url_for('info.tor2web_warning'))),
"banner-warning")
|
{"golden_diff": "diff --git a/securedrop/source_app/__init__.py b/securedrop/source_app/__init__.py\n--- a/securedrop/source_app/__init__.py\n+++ b/securedrop/source_app/__init__.py\n@@ -71,8 +71,10 @@\n # about Tor2Web, corresponding to the initial page load.\n if 'X-tor2web' in request.headers:\n flash(Markup(gettext(\n- '<strong>WARNING:</strong> You appear to be using Tor2Web. '\n- 'This <strong>does not</strong> provide anonymity. '\n+ '<strong>WARNING: </strong> '\n+ 'You appear to be using Tor2Web. '\n+ 'This <strong> does not </strong> '\n+ 'provide anonymity. '\n '<a href=\"{url}\">Why is this dangerous?</a>')\n .format(url=url_for('info.tor2web_warning'))),\n \"banner-warning\")\n", "issue": "Tor2Web warning does not display well\n# Bug\r\n\r\n## Description\r\n\r\nThe <strong> tags are incorrectly set.\r\n\r\n## Steps to Reproduce\r\n\r\nfirefox https://zdf4nikyuswdzbt6.onion.to/\r\n\r\n## Expected Behavior\r\n\r\nSpaces between words\r\n\r\n## Actual Behavior\r\n\r\nMissing spaces and weird bold / regular changes.\r\n\r\n\r\n\r\n\n", "before_files": [{"content": "from datetime import datetime, timedelta\nfrom flask import (Flask, render_template, flash, Markup, request, g, session,\n url_for, redirect)\nfrom flask_babel import gettext\nfrom flask_assets import Environment\nfrom flask_wtf.csrf import CSRFProtect, CSRFError\nfrom jinja2 import evalcontextfilter\nfrom os import path\nfrom sqlalchemy.orm.exc import NoResultFound\n\nimport crypto_util\nimport i18n\nimport store\nimport template_filters\nimport version\n\nfrom db import Source, db_session\nfrom request_that_secures_file_uploads import RequestThatSecuresFileUploads\nfrom source_app import main, info, api\nfrom source_app.decorators import ignore_static\nfrom source_app.utils import logged_in\n\n\ndef create_app(config):\n app = Flask(__name__,\n template_folder=config.SOURCE_TEMPLATES_DIR,\n static_folder=path.join(config.SECUREDROP_ROOT, 'static'))\n app.request_class = RequestThatSecuresFileUploads\n app.config.from_object(config.SourceInterfaceFlaskConfig)\n\n # The default CSRF token expiration is 1 hour. Since large uploads can\n # take longer than an hour over Tor, we increase the valid window to 24h.\n app.config['WTF_CSRF_TIME_LIMIT'] = 60 * 60 * 24\n\n CSRFProtect(app)\n\n @app.errorhandler(CSRFError)\n def handle_csrf_error(e):\n msg = render_template('session_timeout.html')\n session.clear()\n flash(Markup(msg), \"important\")\n return redirect(url_for('main.index'))\n\n assets = Environment(app)\n app.config['assets'] = assets\n\n i18n.setup_app(app)\n\n app.jinja_env.trim_blocks = True\n app.jinja_env.lstrip_blocks = True\n app.jinja_env.globals['version'] = version.__version__\n if getattr(config, 'CUSTOM_HEADER_IMAGE', None):\n app.jinja_env.globals['header_image'] = config.CUSTOM_HEADER_IMAGE\n app.jinja_env.globals['use_custom_header_image'] = True\n else:\n app.jinja_env.globals['header_image'] = 'logo.png'\n app.jinja_env.globals['use_custom_header_image'] = False\n\n app.jinja_env.filters['rel_datetime_format'] = \\\n template_filters.rel_datetime_format\n app.jinja_env.filters['nl2br'] = evalcontextfilter(template_filters.nl2br)\n app.jinja_env.filters['filesizeformat'] = template_filters.filesizeformat\n\n for module in [main, info, api]:\n app.register_blueprint(module.make_blueprint(config))\n\n @app.before_request\n @ignore_static\n def check_tor2web():\n # ignore_static here so we only flash a single message warning\n # about Tor2Web, corresponding to the initial page load.\n if 'X-tor2web' in request.headers:\n flash(Markup(gettext(\n '<strong>WARNING:</strong> You appear to be using Tor2Web. '\n 'This <strong>does not</strong> provide anonymity. '\n '<a href=\"{url}\">Why is this dangerous?</a>')\n .format(url=url_for('info.tor2web_warning'))),\n \"banner-warning\")\n\n @app.before_request\n @ignore_static\n def setup_g():\n \"\"\"Store commonly used values in Flask's special g object\"\"\"\n g.locale = i18n.get_locale()\n g.text_direction = i18n.get_text_direction(g.locale)\n g.html_lang = i18n.locale_to_rfc_5646(g.locale)\n g.locales = i18n.get_locale2name()\n\n if 'expires' in session and datetime.utcnow() >= session['expires']:\n msg = render_template('session_timeout.html')\n\n # clear the session after we render the message so it's localized\n session.clear()\n\n flash(Markup(msg), \"important\")\n\n session['expires'] = datetime.utcnow() + \\\n timedelta(minutes=getattr(config,\n 'SESSION_EXPIRATION_MINUTES',\n 120))\n\n # ignore_static here because `crypto_util.hash_codename` is scrypt\n # (very time consuming), and we don't need to waste time running if\n # we're just serving a static resource that won't need to access\n # these common values.\n if logged_in():\n g.codename = session['codename']\n g.filesystem_id = crypto_util.hash_codename(g.codename)\n try:\n g.source = Source.query \\\n .filter(Source.filesystem_id == g.filesystem_id) \\\n .one()\n except NoResultFound as e:\n app.logger.error(\n \"Found no Sources when one was expected: %s\" %\n (e,))\n del session['logged_in']\n del session['codename']\n return redirect(url_for('main.index'))\n g.loc = store.path(g.filesystem_id)\n\n @app.teardown_appcontext\n def shutdown_session(exception=None):\n \"\"\"Automatically remove database sessions at the end of the request, or\n when the application shuts down\"\"\"\n db_session.remove()\n\n @app.errorhandler(404)\n def page_not_found(error):\n return render_template('notfound.html'), 404\n\n @app.errorhandler(500)\n def internal_error(error):\n return render_template('error.html'), 500\n\n return app\n", "path": "securedrop/source_app/__init__.py"}], "after_files": [{"content": "from datetime import datetime, timedelta\nfrom flask import (Flask, render_template, flash, Markup, request, g, session,\n url_for, redirect)\nfrom flask_babel import gettext\nfrom flask_assets import Environment\nfrom flask_wtf.csrf import CSRFProtect, CSRFError\nfrom jinja2 import evalcontextfilter\nfrom os import path\nfrom sqlalchemy.orm.exc import NoResultFound\n\nimport crypto_util\nimport i18n\nimport store\nimport template_filters\nimport version\n\nfrom db import Source, db_session\nfrom request_that_secures_file_uploads import RequestThatSecuresFileUploads\nfrom source_app import main, info, api\nfrom source_app.decorators import ignore_static\nfrom source_app.utils import logged_in\n\n\ndef create_app(config):\n app = Flask(__name__,\n template_folder=config.SOURCE_TEMPLATES_DIR,\n static_folder=path.join(config.SECUREDROP_ROOT, 'static'))\n app.request_class = RequestThatSecuresFileUploads\n app.config.from_object(config.SourceInterfaceFlaskConfig)\n\n # The default CSRF token expiration is 1 hour. Since large uploads can\n # take longer than an hour over Tor, we increase the valid window to 24h.\n app.config['WTF_CSRF_TIME_LIMIT'] = 60 * 60 * 24\n\n CSRFProtect(app)\n\n @app.errorhandler(CSRFError)\n def handle_csrf_error(e):\n msg = render_template('session_timeout.html')\n session.clear()\n flash(Markup(msg), \"important\")\n return redirect(url_for('main.index'))\n\n assets = Environment(app)\n app.config['assets'] = assets\n\n i18n.setup_app(app)\n\n app.jinja_env.trim_blocks = True\n app.jinja_env.lstrip_blocks = True\n app.jinja_env.globals['version'] = version.__version__\n if getattr(config, 'CUSTOM_HEADER_IMAGE', None):\n app.jinja_env.globals['header_image'] = config.CUSTOM_HEADER_IMAGE\n app.jinja_env.globals['use_custom_header_image'] = True\n else:\n app.jinja_env.globals['header_image'] = 'logo.png'\n app.jinja_env.globals['use_custom_header_image'] = False\n\n app.jinja_env.filters['rel_datetime_format'] = \\\n template_filters.rel_datetime_format\n app.jinja_env.filters['nl2br'] = evalcontextfilter(template_filters.nl2br)\n app.jinja_env.filters['filesizeformat'] = template_filters.filesizeformat\n\n for module in [main, info, api]:\n app.register_blueprint(module.make_blueprint(config))\n\n @app.before_request\n @ignore_static\n def check_tor2web():\n # ignore_static here so we only flash a single message warning\n # about Tor2Web, corresponding to the initial page load.\n if 'X-tor2web' in request.headers:\n flash(Markup(gettext(\n '<strong>WARNING: </strong> '\n 'You appear to be using Tor2Web. '\n 'This <strong> does not </strong> '\n 'provide anonymity. '\n '<a href=\"{url}\">Why is this dangerous?</a>')\n .format(url=url_for('info.tor2web_warning'))),\n \"banner-warning\")\n\n @app.before_request\n @ignore_static\n def setup_g():\n \"\"\"Store commonly used values in Flask's special g object\"\"\"\n g.locale = i18n.get_locale()\n g.text_direction = i18n.get_text_direction(g.locale)\n g.html_lang = i18n.locale_to_rfc_5646(g.locale)\n g.locales = i18n.get_locale2name()\n\n if 'expires' in session and datetime.utcnow() >= session['expires']:\n msg = render_template('session_timeout.html')\n\n # clear the session after we render the message so it's localized\n session.clear()\n\n flash(Markup(msg), \"important\")\n\n session['expires'] = datetime.utcnow() + \\\n timedelta(minutes=getattr(config,\n 'SESSION_EXPIRATION_MINUTES',\n 120))\n\n # ignore_static here because `crypto_util.hash_codename` is scrypt\n # (very time consuming), and we don't need to waste time running if\n # we're just serving a static resource that won't need to access\n # these common values.\n if logged_in():\n g.codename = session['codename']\n g.filesystem_id = crypto_util.hash_codename(g.codename)\n try:\n g.source = Source.query \\\n .filter(Source.filesystem_id == g.filesystem_id) \\\n .one()\n except NoResultFound as e:\n app.logger.error(\n \"Found no Sources when one was expected: %s\" %\n (e,))\n del session['logged_in']\n del session['codename']\n return redirect(url_for('main.index'))\n g.loc = store.path(g.filesystem_id)\n\n @app.teardown_appcontext\n def shutdown_session(exception=None):\n \"\"\"Automatically remove database sessions at the end of the request, or\n when the application shuts down\"\"\"\n db_session.remove()\n\n @app.errorhandler(404)\n def page_not_found(error):\n return render_template('notfound.html'), 404\n\n @app.errorhandler(500)\n def internal_error(error):\n return render_template('error.html'), 500\n\n return app\n", "path": "securedrop/source_app/__init__.py"}]}
| 1,871 | 214 |
gh_patches_debug_32731
|
rasdani/github-patches
|
git_diff
|
microsoft__ptvsd-1454
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Stop on entry hangs the debugger
## Environment data
- PTVSD version: master
- OS and version: windows
- Python version (& distribution if applicable, e.g. Anaconda): 3.6
- Using VS Code or Visual Studio: VS or VSC
## Actual behavior
Note that you have to change the default for STOP_ON_ENTRY in wrapper.py#1198 to True.
Repros almost always on VS, but infrequently on VSC.
## Expected behavior
Should run and break on entry
## Steps to reproduce:
For VS:
1. Change wrapper.py:1198 default for STOP_ON_ENTRY to True.
2. Use F5 to start the debugger.
For VSC:
1. Set `stopOnEntry` in launch json.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/ptvsd/debugger.py`
Content:
```
1 # Copyright (c) Microsoft Corporation. All rights reserved.
2 # Licensed under the MIT License. See LICENSE in the project root
3 # for license information.
4
5 import sys
6
7 import ptvsd.log
8 from ptvsd._local import run_module, run_file, run_main
9
10
11 # TODO: not needed?
12 DONT_DEBUG = []
13
14 LOCALHOST = 'localhost'
15
16 RUNNERS = {
17 'module': run_module, # python -m spam
18 'script': run_file, # python spam.py
19 'code': run_file, # python -c 'print("spam")'
20 None: run_file, # catchall
21 }
22
23
24 def debug(filename, port_num, debug_id, debug_options, run_as,
25 _runners=RUNNERS, _extra=None, *args, **kwargs):
26
27 ptvsd.log.to_file()
28 ptvsd.log.info('debug{0!r}', (filename, port_num, debug_id, debug_options, run_as))
29
30 if _extra is None:
31 _extra = sys.argv[1:]
32 address = (LOCALHOST, port_num)
33 try:
34 run = _runners[run_as]
35 except KeyError:
36 # TODO: fail?
37 run = _runners[None]
38 if _extra:
39 args = _extra + list(args)
40 kwargs.setdefault('singlesession', True)
41 run(address, filename, *args, **kwargs)
42
43
44 def run(filename, port_num, run_as,
45 *args, **kwargs):
46
47 ptvsd.log.to_file()
48 ptvsd.log.info('run{0!r}', (filename, port_num, run_as))
49
50 address = (LOCALHOST, port_num)
51 run_main(address, filename, run_as, *args, **kwargs)
52
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/ptvsd/debugger.py b/src/ptvsd/debugger.py
--- a/src/ptvsd/debugger.py
+++ b/src/ptvsd/debugger.py
@@ -2,50 +2,38 @@
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
-import sys
-
import ptvsd.log
-from ptvsd._local import run_module, run_file, run_main
-
-
-# TODO: not needed?
-DONT_DEBUG = []
+import ptvsd.options
+from ptvsd.__main__ import run_file, run_module, run_code
-LOCALHOST = 'localhost'
RUNNERS = {
- 'module': run_module, # python -m spam
- 'script': run_file, # python spam.py
- 'code': run_file, # python -c 'print("spam")'
- None: run_file, # catchall
+ 'module': run_module,
+ 'script': run_file,
+ 'code': run_code,
}
+# Not actually used, but VS will try to add entries to it.
+DONT_DEBUG = []
-def debug(filename, port_num, debug_id, debug_options, run_as,
- _runners=RUNNERS, _extra=None, *args, **kwargs):
+# A legacy entrypoint for Visual Studio, to allow older versions to work with new ptvsd.
+# All new code should use the entrypoints in __main__ directly.
+def debug(filename, port_num, debug_id, debug_options, run_as):
ptvsd.log.to_file()
ptvsd.log.info('debug{0!r}', (filename, port_num, debug_id, debug_options, run_as))
- if _extra is None:
- _extra = sys.argv[1:]
- address = (LOCALHOST, port_num)
try:
- run = _runners[run_as]
+ run = RUNNERS[run_as]
except KeyError:
- # TODO: fail?
- run = _runners[None]
- if _extra:
- args = _extra + list(args)
- kwargs.setdefault('singlesession', True)
- run(address, filename, *args, **kwargs)
-
+ raise ValueError('run_as must be one of: {0!r}'.format(tuple(RUNNERS.keys())))
-def run(filename, port_num, run_as,
- *args, **kwargs):
+ ptvsd.options.target_kind = 'file' if run_as == 'script' else run_as
+ ptvsd.options.target = filename
+ ptvsd.options.port = port_num
+ ptvsd.options.client = True
- ptvsd.log.to_file()
- ptvsd.log.info('run{0!r}', (filename, port_num, run_as))
+ # debug_id is ignored because it has no meaning in DAP.
+ # debug_options are ignored, because they will be passed later via DAP "launch" request.
- address = (LOCALHOST, port_num)
- run_main(address, filename, run_as, *args, **kwargs)
+ run()
|
{"golden_diff": "diff --git a/src/ptvsd/debugger.py b/src/ptvsd/debugger.py\n--- a/src/ptvsd/debugger.py\n+++ b/src/ptvsd/debugger.py\n@@ -2,50 +2,38 @@\n # Licensed under the MIT License. See LICENSE in the project root\n # for license information.\n \n-import sys\n-\n import ptvsd.log\n-from ptvsd._local import run_module, run_file, run_main\n-\n-\n-# TODO: not needed?\n-DONT_DEBUG = []\n+import ptvsd.options\n+from ptvsd.__main__ import run_file, run_module, run_code\n \n-LOCALHOST = 'localhost'\n \n RUNNERS = {\n- 'module': run_module, # python -m spam\n- 'script': run_file, # python spam.py\n- 'code': run_file, # python -c 'print(\"spam\")'\n- None: run_file, # catchall\n+ 'module': run_module,\n+ 'script': run_file,\n+ 'code': run_code,\n }\n \n+# Not actually used, but VS will try to add entries to it.\n+DONT_DEBUG = []\n \n-def debug(filename, port_num, debug_id, debug_options, run_as,\n- _runners=RUNNERS, _extra=None, *args, **kwargs):\n \n+# A legacy entrypoint for Visual Studio, to allow older versions to work with new ptvsd.\n+# All new code should use the entrypoints in __main__ directly.\n+def debug(filename, port_num, debug_id, debug_options, run_as):\n ptvsd.log.to_file()\n ptvsd.log.info('debug{0!r}', (filename, port_num, debug_id, debug_options, run_as))\n \n- if _extra is None:\n- _extra = sys.argv[1:]\n- address = (LOCALHOST, port_num)\n try:\n- run = _runners[run_as]\n+ run = RUNNERS[run_as]\n except KeyError:\n- # TODO: fail?\n- run = _runners[None]\n- if _extra:\n- args = _extra + list(args)\n- kwargs.setdefault('singlesession', True)\n- run(address, filename, *args, **kwargs)\n-\n+ raise ValueError('run_as must be one of: {0!r}'.format(tuple(RUNNERS.keys())))\n \n-def run(filename, port_num, run_as,\n- *args, **kwargs):\n+ ptvsd.options.target_kind = 'file' if run_as == 'script' else run_as\n+ ptvsd.options.target = filename\n+ ptvsd.options.port = port_num\n+ ptvsd.options.client = True\n \n- ptvsd.log.to_file()\n- ptvsd.log.info('run{0!r}', (filename, port_num, run_as))\n+ # debug_id is ignored because it has no meaning in DAP.\n+ # debug_options are ignored, because they will be passed later via DAP \"launch\" request.\n \n- address = (LOCALHOST, port_num)\n- run_main(address, filename, run_as, *args, **kwargs)\n+ run()\n", "issue": "Stop on entry hangs the debugger\n## Environment data\r\n\r\n- PTVSD version: master\r\n- OS and version: windows\r\n- Python version (& distribution if applicable, e.g. Anaconda): 3.6\r\n- Using VS Code or Visual Studio: VS or VSC\r\n\r\n## Actual behavior\r\n\r\nNote that you have to change the default for STOP_ON_ENTRY in wrapper.py#1198 to True.\r\nRepros almost always on VS, but infrequently on VSC.\r\n\r\n## Expected behavior\r\n\r\nShould run and break on entry\r\n\r\n## Steps to reproduce:\r\nFor VS:\r\n1. Change wrapper.py:1198 default for STOP_ON_ENTRY to True.\r\n2. Use F5 to start the debugger.\r\n\r\nFor VSC:\r\n1. Set `stopOnEntry` in launch json.\r\n\r\n\n", "before_files": [{"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nimport sys\n\nimport ptvsd.log\nfrom ptvsd._local import run_module, run_file, run_main\n\n\n# TODO: not needed?\nDONT_DEBUG = []\n\nLOCALHOST = 'localhost'\n\nRUNNERS = {\n 'module': run_module, # python -m spam\n 'script': run_file, # python spam.py\n 'code': run_file, # python -c 'print(\"spam\")'\n None: run_file, # catchall\n}\n\n\ndef debug(filename, port_num, debug_id, debug_options, run_as,\n _runners=RUNNERS, _extra=None, *args, **kwargs):\n\n ptvsd.log.to_file()\n ptvsd.log.info('debug{0!r}', (filename, port_num, debug_id, debug_options, run_as))\n\n if _extra is None:\n _extra = sys.argv[1:]\n address = (LOCALHOST, port_num)\n try:\n run = _runners[run_as]\n except KeyError:\n # TODO: fail?\n run = _runners[None]\n if _extra:\n args = _extra + list(args)\n kwargs.setdefault('singlesession', True)\n run(address, filename, *args, **kwargs)\n\n\ndef run(filename, port_num, run_as,\n *args, **kwargs):\n\n ptvsd.log.to_file()\n ptvsd.log.info('run{0!r}', (filename, port_num, run_as))\n\n address = (LOCALHOST, port_num)\n run_main(address, filename, run_as, *args, **kwargs)\n", "path": "src/ptvsd/debugger.py"}], "after_files": [{"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nimport ptvsd.log\nimport ptvsd.options\nfrom ptvsd.__main__ import run_file, run_module, run_code\n\n\nRUNNERS = {\n 'module': run_module,\n 'script': run_file,\n 'code': run_code,\n}\n\n# Not actually used, but VS will try to add entries to it.\nDONT_DEBUG = []\n\n\n# A legacy entrypoint for Visual Studio, to allow older versions to work with new ptvsd.\n# All new code should use the entrypoints in __main__ directly.\ndef debug(filename, port_num, debug_id, debug_options, run_as):\n ptvsd.log.to_file()\n ptvsd.log.info('debug{0!r}', (filename, port_num, debug_id, debug_options, run_as))\n\n try:\n run = RUNNERS[run_as]\n except KeyError:\n raise ValueError('run_as must be one of: {0!r}'.format(tuple(RUNNERS.keys())))\n\n ptvsd.options.target_kind = 'file' if run_as == 'script' else run_as\n ptvsd.options.target = filename\n ptvsd.options.port = port_num\n ptvsd.options.client = True\n\n # debug_id is ignored because it has no meaning in DAP.\n # debug_options are ignored, because they will be passed later via DAP \"launch\" request.\n\n run()\n", "path": "src/ptvsd/debugger.py"}]}
| 911 | 706 |
gh_patches_debug_32318
|
rasdani/github-patches
|
git_diff
|
opsdroid__opsdroid-210
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add case-insensitive kwarg to the regex matcher
It would be nice to specify case insensitive matching in the regex matcher.
e.g
```python
@match_regex(r'myregex', case_sensitive=False)
async def myskill(opsdroid, config, message):
pass
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `opsdroid/parsers/regex.py`
Content:
```
1 """A helper function for parsing and executing regex skills."""
2
3 import logging
4 import re
5
6
7 _LOGGER = logging.getLogger(__name__)
8
9
10 async def parse_regex(opsdroid, message):
11 """Parse a message against all regex skills."""
12 # pylint: disable=broad-except
13 # We want to catch all exceptions coming from a skill module and not
14 # halt the application. If a skill throws an exception it just doesn't
15 # give a response to the user, so an error response should be given.
16 for skill in opsdroid.skills:
17 if "regex" in skill:
18 regex = re.match(skill["regex"], message.text)
19 if regex:
20 message.regex = regex
21 try:
22 await skill["skill"](opsdroid, skill["config"], message)
23 except Exception:
24 await message.respond(
25 "Whoops there has been an error")
26 await message.respond(
27 "Check the log for details")
28 _LOGGER.exception("Exception when parsing '" +
29 message.text +
30 "' against skill '" +
31 skill["regex"] + "'")
32
```
Path: `opsdroid/matchers.py`
Content:
```
1 """Decorator functions to use when creating skill modules."""
2
3 import logging
4
5 from opsdroid.helper import get_opsdroid
6 from opsdroid.web import Web
7
8
9 _LOGGER = logging.getLogger(__name__)
10
11
12 def match_regex(regex):
13 """Return regex match decorator."""
14 def matcher(func):
15 """Add decorated function to skills list for regex matching."""
16 opsdroid = get_opsdroid()
17 opsdroid.skills.append({"regex": regex, "skill": func,
18 "config":
19 opsdroid.loader.current_import_config})
20 return func
21 return matcher
22
23
24 def match_apiai_action(action):
25 """Return apiai action match decorator."""
26 def matcher(func):
27 """Add decorated function to skills list for apiai matching."""
28 opsdroid = get_opsdroid()
29 opsdroid.skills.append({"apiai_action": action, "skill": func,
30 "config":
31 opsdroid.loader.current_import_config})
32 return func
33 return matcher
34
35
36 def match_apiai_intent(intent):
37 """Return apiai intent match decorator."""
38 def matcher(func):
39 """Add decorated function to skills list for apiai matching."""
40 opsdroid = get_opsdroid()
41 opsdroid.skills.append({"apiai_intent": intent, "skill": func,
42 "config":
43 opsdroid.loader.current_import_config})
44 return func
45 return matcher
46
47
48 def match_crontab(crontab, timezone=None):
49 """Return crontab match decorator."""
50 def matcher(func):
51 """Add decorated function to skills list for crontab matching."""
52 opsdroid = get_opsdroid()
53 config = opsdroid.loader.current_import_config
54 opsdroid.skills.append({"crontab": crontab, "skill": func,
55 "config": config, "timezone": timezone})
56 return func
57 return matcher
58
59
60 def match_webhook(webhook):
61 """Return webhook match decorator."""
62 def matcher(func):
63 """Add decorated function to skills list for webhook matching."""
64 opsdroid = get_opsdroid()
65 config = opsdroid.loader.current_import_config
66 opsdroid.skills.append({"webhook": webhook, "skill": func,
67 "config": config})
68
69 async def wrapper(req, opsdroid=opsdroid, config=config):
70 """Wrap up the aiohttp handler."""
71 _LOGGER.info("Running skill %s via webhook", webhook)
72 opsdroid.stats["webhooks_called"] = \
73 opsdroid.stats["webhooks_called"] + 1
74 await func(opsdroid, config, req)
75 return Web.build_response(200, {"called_skill": webhook})
76
77 opsdroid.web_server.web_app.router.add_post(
78 "/skill/{}/{}".format(config["name"], webhook), wrapper)
79 opsdroid.web_server.web_app.router.add_post(
80 "/skill/{}/{}/".format(config["name"], webhook), wrapper)
81
82 return func
83 return matcher
84
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/opsdroid/matchers.py b/opsdroid/matchers.py
--- a/opsdroid/matchers.py
+++ b/opsdroid/matchers.py
@@ -9,12 +9,14 @@
_LOGGER = logging.getLogger(__name__)
-def match_regex(regex):
+def match_regex(regex, case_sensitive=True):
"""Return regex match decorator."""
def matcher(func):
"""Add decorated function to skills list for regex matching."""
opsdroid = get_opsdroid()
- opsdroid.skills.append({"regex": regex, "skill": func,
+ opsdroid.skills.append({"regex": {"expression": regex,
+ "case_sensitive": case_sensitive},
+ "skill": func,
"config":
opsdroid.loader.current_import_config})
return func
diff --git a/opsdroid/parsers/regex.py b/opsdroid/parsers/regex.py
--- a/opsdroid/parsers/regex.py
+++ b/opsdroid/parsers/regex.py
@@ -15,7 +15,12 @@
# give a response to the user, so an error response should be given.
for skill in opsdroid.skills:
if "regex" in skill:
- regex = re.match(skill["regex"], message.text)
+ if skill["regex"]["case_sensitive"]:
+ regex = re.match(skill["regex"]["expression"],
+ message.text)
+ else:
+ regex = re.match(skill["regex"]["expression"],
+ message.text, re.IGNORECASE)
if regex:
message.regex = regex
try:
@@ -28,4 +33,4 @@
_LOGGER.exception("Exception when parsing '" +
message.text +
"' against skill '" +
- skill["regex"] + "'")
+ skill["regex"]["expression"] + "'")
|
{"golden_diff": "diff --git a/opsdroid/matchers.py b/opsdroid/matchers.py\n--- a/opsdroid/matchers.py\n+++ b/opsdroid/matchers.py\n@@ -9,12 +9,14 @@\n _LOGGER = logging.getLogger(__name__)\n \n \n-def match_regex(regex):\n+def match_regex(regex, case_sensitive=True):\n \"\"\"Return regex match decorator.\"\"\"\n def matcher(func):\n \"\"\"Add decorated function to skills list for regex matching.\"\"\"\n opsdroid = get_opsdroid()\n- opsdroid.skills.append({\"regex\": regex, \"skill\": func,\n+ opsdroid.skills.append({\"regex\": {\"expression\": regex,\n+ \"case_sensitive\": case_sensitive},\n+ \"skill\": func,\n \"config\":\n opsdroid.loader.current_import_config})\n return func\ndiff --git a/opsdroid/parsers/regex.py b/opsdroid/parsers/regex.py\n--- a/opsdroid/parsers/regex.py\n+++ b/opsdroid/parsers/regex.py\n@@ -15,7 +15,12 @@\n # give a response to the user, so an error response should be given.\n for skill in opsdroid.skills:\n if \"regex\" in skill:\n- regex = re.match(skill[\"regex\"], message.text)\n+ if skill[\"regex\"][\"case_sensitive\"]:\n+ regex = re.match(skill[\"regex\"][\"expression\"],\n+ message.text)\n+ else:\n+ regex = re.match(skill[\"regex\"][\"expression\"],\n+ message.text, re.IGNORECASE)\n if regex:\n message.regex = regex\n try:\n@@ -28,4 +33,4 @@\n _LOGGER.exception(\"Exception when parsing '\" +\n message.text +\n \"' against skill '\" +\n- skill[\"regex\"] + \"'\")\n+ skill[\"regex\"][\"expression\"] + \"'\")\n", "issue": "Add case-insensitive kwarg to the regex matcher\nIt would be nice to specify case insensitive matching in the regex matcher.\r\n\r\ne.g\r\n```python\r\n@match_regex(r'myregex', case_sensitive=False)\r\nasync def myskill(opsdroid, config, message):\r\n pass\r\n```\n", "before_files": [{"content": "\"\"\"A helper function for parsing and executing regex skills.\"\"\"\n\nimport logging\nimport re\n\n\n_LOGGER = logging.getLogger(__name__)\n\n\nasync def parse_regex(opsdroid, message):\n \"\"\"Parse a message against all regex skills.\"\"\"\n # pylint: disable=broad-except\n # We want to catch all exceptions coming from a skill module and not\n # halt the application. If a skill throws an exception it just doesn't\n # give a response to the user, so an error response should be given.\n for skill in opsdroid.skills:\n if \"regex\" in skill:\n regex = re.match(skill[\"regex\"], message.text)\n if regex:\n message.regex = regex\n try:\n await skill[\"skill\"](opsdroid, skill[\"config\"], message)\n except Exception:\n await message.respond(\n \"Whoops there has been an error\")\n await message.respond(\n \"Check the log for details\")\n _LOGGER.exception(\"Exception when parsing '\" +\n message.text +\n \"' against skill '\" +\n skill[\"regex\"] + \"'\")\n", "path": "opsdroid/parsers/regex.py"}, {"content": "\"\"\"Decorator functions to use when creating skill modules.\"\"\"\n\nimport logging\n\nfrom opsdroid.helper import get_opsdroid\nfrom opsdroid.web import Web\n\n\n_LOGGER = logging.getLogger(__name__)\n\n\ndef match_regex(regex):\n \"\"\"Return regex match decorator.\"\"\"\n def matcher(func):\n \"\"\"Add decorated function to skills list for regex matching.\"\"\"\n opsdroid = get_opsdroid()\n opsdroid.skills.append({\"regex\": regex, \"skill\": func,\n \"config\":\n opsdroid.loader.current_import_config})\n return func\n return matcher\n\n\ndef match_apiai_action(action):\n \"\"\"Return apiai action match decorator.\"\"\"\n def matcher(func):\n \"\"\"Add decorated function to skills list for apiai matching.\"\"\"\n opsdroid = get_opsdroid()\n opsdroid.skills.append({\"apiai_action\": action, \"skill\": func,\n \"config\":\n opsdroid.loader.current_import_config})\n return func\n return matcher\n\n\ndef match_apiai_intent(intent):\n \"\"\"Return apiai intent match decorator.\"\"\"\n def matcher(func):\n \"\"\"Add decorated function to skills list for apiai matching.\"\"\"\n opsdroid = get_opsdroid()\n opsdroid.skills.append({\"apiai_intent\": intent, \"skill\": func,\n \"config\":\n opsdroid.loader.current_import_config})\n return func\n return matcher\n\n\ndef match_crontab(crontab, timezone=None):\n \"\"\"Return crontab match decorator.\"\"\"\n def matcher(func):\n \"\"\"Add decorated function to skills list for crontab matching.\"\"\"\n opsdroid = get_opsdroid()\n config = opsdroid.loader.current_import_config\n opsdroid.skills.append({\"crontab\": crontab, \"skill\": func,\n \"config\": config, \"timezone\": timezone})\n return func\n return matcher\n\n\ndef match_webhook(webhook):\n \"\"\"Return webhook match decorator.\"\"\"\n def matcher(func):\n \"\"\"Add decorated function to skills list for webhook matching.\"\"\"\n opsdroid = get_opsdroid()\n config = opsdroid.loader.current_import_config\n opsdroid.skills.append({\"webhook\": webhook, \"skill\": func,\n \"config\": config})\n\n async def wrapper(req, opsdroid=opsdroid, config=config):\n \"\"\"Wrap up the aiohttp handler.\"\"\"\n _LOGGER.info(\"Running skill %s via webhook\", webhook)\n opsdroid.stats[\"webhooks_called\"] = \\\n opsdroid.stats[\"webhooks_called\"] + 1\n await func(opsdroid, config, req)\n return Web.build_response(200, {\"called_skill\": webhook})\n\n opsdroid.web_server.web_app.router.add_post(\n \"/skill/{}/{}\".format(config[\"name\"], webhook), wrapper)\n opsdroid.web_server.web_app.router.add_post(\n \"/skill/{}/{}/\".format(config[\"name\"], webhook), wrapper)\n\n return func\n return matcher\n", "path": "opsdroid/matchers.py"}], "after_files": [{"content": "\"\"\"A helper function for parsing and executing regex skills.\"\"\"\n\nimport logging\nimport re\n\n\n_LOGGER = logging.getLogger(__name__)\n\n\nasync def parse_regex(opsdroid, message):\n \"\"\"Parse a message against all regex skills.\"\"\"\n # pylint: disable=broad-except\n # We want to catch all exceptions coming from a skill module and not\n # halt the application. If a skill throws an exception it just doesn't\n # give a response to the user, so an error response should be given.\n for skill in opsdroid.skills:\n if \"regex\" in skill:\n if skill[\"regex\"][\"case_sensitive\"]:\n regex = re.match(skill[\"regex\"][\"expression\"],\n message.text)\n else:\n regex = re.match(skill[\"regex\"][\"expression\"],\n message.text, re.IGNORECASE)\n if regex:\n message.regex = regex\n try:\n await skill[\"skill\"](opsdroid, skill[\"config\"], message)\n except Exception:\n await message.respond(\n \"Whoops there has been an error\")\n await message.respond(\n \"Check the log for details\")\n _LOGGER.exception(\"Exception when parsing '\" +\n message.text +\n \"' against skill '\" +\n skill[\"regex\"][\"expression\"] + \"'\")\n", "path": "opsdroid/parsers/regex.py"}, {"content": "\"\"\"Decorator functions to use when creating skill modules.\"\"\"\n\nimport logging\n\nfrom opsdroid.helper import get_opsdroid\nfrom opsdroid.web import Web\n\n\n_LOGGER = logging.getLogger(__name__)\n\n\ndef match_regex(regex, case_sensitive=True):\n \"\"\"Return regex match decorator.\"\"\"\n def matcher(func):\n \"\"\"Add decorated function to skills list for regex matching.\"\"\"\n opsdroid = get_opsdroid()\n opsdroid.skills.append({\"regex\": {\"expression\": regex,\n \"case_sensitive\": case_sensitive},\n \"skill\": func,\n \"config\":\n opsdroid.loader.current_import_config})\n return func\n return matcher\n\n\ndef match_apiai_action(action):\n \"\"\"Return apiai action match decorator.\"\"\"\n def matcher(func):\n \"\"\"Add decorated function to skills list for apiai matching.\"\"\"\n opsdroid = get_opsdroid()\n opsdroid.skills.append({\"apiai_action\": action, \"skill\": func,\n \"config\":\n opsdroid.loader.current_import_config})\n return func\n return matcher\n\n\ndef match_apiai_intent(intent):\n \"\"\"Return apiai intent match decorator.\"\"\"\n def matcher(func):\n \"\"\"Add decorated function to skills list for apiai matching.\"\"\"\n opsdroid = get_opsdroid()\n opsdroid.skills.append({\"apiai_intent\": intent, \"skill\": func,\n \"config\":\n opsdroid.loader.current_import_config})\n return func\n return matcher\n\n\ndef match_crontab(crontab, timezone=None):\n \"\"\"Return crontab match decorator.\"\"\"\n def matcher(func):\n \"\"\"Add decorated function to skills list for crontab matching.\"\"\"\n opsdroid = get_opsdroid()\n config = opsdroid.loader.current_import_config\n opsdroid.skills.append({\"crontab\": crontab, \"skill\": func,\n \"config\": config, \"timezone\": timezone})\n return func\n return matcher\n\n\ndef match_webhook(webhook):\n \"\"\"Return webhook match decorator.\"\"\"\n def matcher(func):\n \"\"\"Add decorated function to skills list for webhook matching.\"\"\"\n opsdroid = get_opsdroid()\n config = opsdroid.loader.current_import_config\n opsdroid.skills.append({\"webhook\": webhook, \"skill\": func,\n \"config\": config})\n\n async def wrapper(req, opsdroid=opsdroid, config=config):\n \"\"\"Wrap up the aiohttp handler.\"\"\"\n _LOGGER.info(\"Running skill %s via webhook\", webhook)\n opsdroid.stats[\"webhooks_called\"] = \\\n opsdroid.stats[\"webhooks_called\"] + 1\n await func(opsdroid, config, req)\n return Web.build_response(200, {\"called_skill\": webhook})\n\n opsdroid.web_server.web_app.router.add_post(\n \"/skill/{}/{}\".format(config[\"name\"], webhook), wrapper)\n opsdroid.web_server.web_app.router.add_post(\n \"/skill/{}/{}/\".format(config[\"name\"], webhook), wrapper)\n\n return func\n return matcher\n", "path": "opsdroid/matchers.py"}]}
| 1,420 | 404 |
gh_patches_debug_16469
|
rasdani/github-patches
|
git_diff
|
google-deepmind__optax-465
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Better tests for utils.
Optax tests did not catch a problem with one of the type annotations in #367. This is due to `utils` not having good test coverage.
I'm marking this as "good first issue". Any tests for `utils` would be very welcome! No need to write tests for all of them at once, PRs with only a single test at a time are very welcome.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `optax/_src/utils.py`
Content:
```
1 # Copyright 2019 DeepMind Technologies Limited. All Rights Reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 # ==============================================================================
15 """Utility functions for testing."""
16
17 from typing import Optional, Tuple, Sequence
18
19 import chex
20 import jax
21 import jax.numpy as jnp
22 import jax.scipy.stats.norm as multivariate_normal
23
24 from optax._src import linear_algebra
25 from optax._src import numerics
26
27
28 def tile_second_to_last_dim(a: chex.Array) -> chex.Array:
29 ones = jnp.ones_like(a)
30 a = jnp.expand_dims(a, axis=-1)
31 return jnp.expand_dims(ones, axis=-2) * a
32
33
34 def canonicalize_dtype(
35 dtype: Optional[chex.ArrayDType]) -> Optional[chex.ArrayDType]:
36 """Canonicalise a dtype, skip if None."""
37 if dtype is not None:
38 return jax.dtypes.canonicalize_dtype(dtype)
39 return dtype
40
41
42 def cast_tree(tree: chex.ArrayTree,
43 dtype: Optional[chex.ArrayDType]) -> chex.ArrayTree:
44 """Cast tree to given dtype, skip if None."""
45 if dtype is not None:
46 return jax.tree_util.tree_map(lambda t: t.astype(dtype), tree)
47 else:
48 return tree
49
50
51 def set_diags(a: chex.Array, new_diags: chex.Array) -> chex.Array:
52 """Set the diagonals of every DxD matrix in an input of shape NxDxD.
53
54 Args:
55 a: rank 3, tensor NxDxD.
56 new_diags: NxD matrix, the new diagonals of each DxD matrix.
57
58 Returns:
59 NxDxD tensor, with the same contents as `a` but with the diagonal
60 changed to `new_diags`.
61 """
62 n, d, d1 = a.shape
63 assert d == d1
64
65 indices1 = jnp.repeat(jnp.arange(n), d)
66 indices2 = jnp.tile(jnp.arange(d), n)
67 indices3 = indices2
68
69 # Use numpy array setting
70 a = a.at[indices1, indices2, indices3].set(new_diags.flatten())
71 return a
72
73
74 class MultiNormalDiagFromLogScale():
75 """MultiNormalDiag which directly exposes its input parameters."""
76
77 def __init__(self, loc: chex.Array, log_scale: chex.Array):
78 self._log_scale = log_scale
79 self._scale = jnp.exp(log_scale)
80 self._mean = loc
81 self._param_shape = jax.lax.broadcast_shapes(
82 self._mean.shape, self._scale.shape)
83
84 def sample(self, shape: Sequence[int],
85 seed: chex.PRNGKey) -> chex.Array:
86 sample_shape = tuple(shape) + self._param_shape
87 return jax.random.normal(
88 seed, shape=sample_shape) * self._scale + self._mean
89
90 def log_prob(self, x: chex.Array) -> chex.Array:
91 log_prob = multivariate_normal.logpdf(x, loc=self._mean, scale=self._scale)
92 # Sum over parameter axes.
93 sum_axis = [-(i + 1) for i in range(len(self._param_shape))]
94 return jnp.sum(log_prob, axis=sum_axis)
95
96 @property
97 def log_scale(self) -> chex.Array:
98 return self._log_scale
99
100 @property
101 def params(self) -> Sequence[chex.Array]:
102 return [self._mean, self._log_scale]
103
104
105 def multi_normal(loc: chex.Array,
106 log_scale: chex.Array) -> MultiNormalDiagFromLogScale:
107 return MultiNormalDiagFromLogScale(loc=loc, log_scale=log_scale)
108
109
110 @jax.custom_vjp
111 def _scale_gradient(inputs: chex.ArrayTree, scale: float) -> chex.ArrayTree:
112 """Internal gradient scaling implementation."""
113 del scale # Only used for the backward pass defined in _scale_gradient_bwd.
114 return inputs
115
116
117 def _scale_gradient_fwd(inputs: chex.ArrayTree,
118 scale: float) -> Tuple[chex.ArrayTree, float]:
119 return _scale_gradient(inputs, scale), scale
120
121
122 def _scale_gradient_bwd(scale: float,
123 g: chex.ArrayTree) -> Tuple[chex.ArrayTree, None]:
124 return (jax.tree_util.tree_map(lambda g_: g_ * scale, g), None)
125
126
127 _scale_gradient.defvjp(_scale_gradient_fwd, _scale_gradient_bwd)
128
129
130 def scale_gradient(inputs: chex.ArrayTree, scale: float) -> chex.ArrayTree:
131 """Scales gradients for the backwards pass.
132
133 Args:
134 inputs: A nested array.
135 scale: The scale factor for the gradient on the backwards pass.
136
137 Returns:
138 An array of the same structure as `inputs`, with scaled backward gradient.
139 """
140 # Special case scales of 1. and 0. for more efficiency.
141 if scale == 1.:
142 return inputs
143 elif scale == 0.:
144 return jax.lax.stop_gradient(inputs)
145 else:
146 return _scale_gradient(inputs, scale)
147
148
149 # TODO(b/183800387): remove legacy aliases.
150 safe_norm = numerics.safe_norm
151 safe_int32_increment = numerics.safe_int32_increment
152 global_norm = linear_algebra.global_norm
153
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/optax/_src/utils.py b/optax/_src/utils.py
--- a/optax/_src/utils.py
+++ b/optax/_src/utils.py
@@ -59,8 +59,22 @@
NxDxD tensor, with the same contents as `a` but with the diagonal
changed to `new_diags`.
"""
+ a_dim, new_diags_dim = len(a.shape), len(new_diags.shape)
+ if a_dim != 3:
+ raise ValueError(f'Expected `a` to be a 3D tensor, got {a_dim}D instead')
+ if new_diags_dim != 2:
+ raise ValueError(
+ f'Expected `new_diags` to be a 2D array, got {new_diags_dim}D instead')
n, d, d1 = a.shape
- assert d == d1
+ n_diags, d_diags = new_diags.shape
+ if d != d1:
+ raise ValueError(
+ f'Shape mismatch: expected `a.shape` to be {(n, d, d)}, '
+ f'got {(n, d, d1)} instead')
+ if d_diags != d or n_diags != n:
+ raise ValueError(
+ f'Shape mismatch: expected `new_diags.shape` to be {(n, d)}, '
+ f'got {(n_diags, d_diags)} instead')
indices1 = jnp.repeat(jnp.arange(n), d)
indices2 = jnp.tile(jnp.arange(d), n)
|
{"golden_diff": "diff --git a/optax/_src/utils.py b/optax/_src/utils.py\n--- a/optax/_src/utils.py\n+++ b/optax/_src/utils.py\n@@ -59,8 +59,22 @@\n NxDxD tensor, with the same contents as `a` but with the diagonal\n changed to `new_diags`.\n \"\"\"\n+ a_dim, new_diags_dim = len(a.shape), len(new_diags.shape)\n+ if a_dim != 3:\n+ raise ValueError(f'Expected `a` to be a 3D tensor, got {a_dim}D instead')\n+ if new_diags_dim != 2:\n+ raise ValueError(\n+ f'Expected `new_diags` to be a 2D array, got {new_diags_dim}D instead')\n n, d, d1 = a.shape\n- assert d == d1\n+ n_diags, d_diags = new_diags.shape\n+ if d != d1:\n+ raise ValueError(\n+ f'Shape mismatch: expected `a.shape` to be {(n, d, d)}, '\n+ f'got {(n, d, d1)} instead')\n+ if d_diags != d or n_diags != n:\n+ raise ValueError(\n+ f'Shape mismatch: expected `new_diags.shape` to be {(n, d)}, '\n+ f'got {(n_diags, d_diags)} instead')\n \n indices1 = jnp.repeat(jnp.arange(n), d)\n indices2 = jnp.tile(jnp.arange(d), n)\n", "issue": "Better tests for utils.\nOptax tests did not catch a problem with one of the type annotations in #367. This is due to `utils` not having good test coverage. \r\n\r\nI'm marking this as \"good first issue\". Any tests for `utils` would be very welcome! No need to write tests for all of them at once, PRs with only a single test at a time are very welcome.\n", "before_files": [{"content": "# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Utility functions for testing.\"\"\"\n\nfrom typing import Optional, Tuple, Sequence\n\nimport chex\nimport jax\nimport jax.numpy as jnp\nimport jax.scipy.stats.norm as multivariate_normal\n\nfrom optax._src import linear_algebra\nfrom optax._src import numerics\n\n\ndef tile_second_to_last_dim(a: chex.Array) -> chex.Array:\n ones = jnp.ones_like(a)\n a = jnp.expand_dims(a, axis=-1)\n return jnp.expand_dims(ones, axis=-2) * a\n\n\ndef canonicalize_dtype(\n dtype: Optional[chex.ArrayDType]) -> Optional[chex.ArrayDType]:\n \"\"\"Canonicalise a dtype, skip if None.\"\"\"\n if dtype is not None:\n return jax.dtypes.canonicalize_dtype(dtype)\n return dtype\n\n\ndef cast_tree(tree: chex.ArrayTree,\n dtype: Optional[chex.ArrayDType]) -> chex.ArrayTree:\n \"\"\"Cast tree to given dtype, skip if None.\"\"\"\n if dtype is not None:\n return jax.tree_util.tree_map(lambda t: t.astype(dtype), tree)\n else:\n return tree\n\n\ndef set_diags(a: chex.Array, new_diags: chex.Array) -> chex.Array:\n \"\"\"Set the diagonals of every DxD matrix in an input of shape NxDxD.\n\n Args:\n a: rank 3, tensor NxDxD.\n new_diags: NxD matrix, the new diagonals of each DxD matrix.\n\n Returns:\n NxDxD tensor, with the same contents as `a` but with the diagonal\n changed to `new_diags`.\n \"\"\"\n n, d, d1 = a.shape\n assert d == d1\n\n indices1 = jnp.repeat(jnp.arange(n), d)\n indices2 = jnp.tile(jnp.arange(d), n)\n indices3 = indices2\n\n # Use numpy array setting\n a = a.at[indices1, indices2, indices3].set(new_diags.flatten())\n return a\n\n\nclass MultiNormalDiagFromLogScale():\n \"\"\"MultiNormalDiag which directly exposes its input parameters.\"\"\"\n\n def __init__(self, loc: chex.Array, log_scale: chex.Array):\n self._log_scale = log_scale\n self._scale = jnp.exp(log_scale)\n self._mean = loc\n self._param_shape = jax.lax.broadcast_shapes(\n self._mean.shape, self._scale.shape)\n\n def sample(self, shape: Sequence[int],\n seed: chex.PRNGKey) -> chex.Array:\n sample_shape = tuple(shape) + self._param_shape\n return jax.random.normal(\n seed, shape=sample_shape) * self._scale + self._mean\n\n def log_prob(self, x: chex.Array) -> chex.Array:\n log_prob = multivariate_normal.logpdf(x, loc=self._mean, scale=self._scale)\n # Sum over parameter axes.\n sum_axis = [-(i + 1) for i in range(len(self._param_shape))]\n return jnp.sum(log_prob, axis=sum_axis)\n\n @property\n def log_scale(self) -> chex.Array:\n return self._log_scale\n\n @property\n def params(self) -> Sequence[chex.Array]:\n return [self._mean, self._log_scale]\n\n\ndef multi_normal(loc: chex.Array,\n log_scale: chex.Array) -> MultiNormalDiagFromLogScale:\n return MultiNormalDiagFromLogScale(loc=loc, log_scale=log_scale)\n\n\[email protected]_vjp\ndef _scale_gradient(inputs: chex.ArrayTree, scale: float) -> chex.ArrayTree:\n \"\"\"Internal gradient scaling implementation.\"\"\"\n del scale # Only used for the backward pass defined in _scale_gradient_bwd.\n return inputs\n\n\ndef _scale_gradient_fwd(inputs: chex.ArrayTree,\n scale: float) -> Tuple[chex.ArrayTree, float]:\n return _scale_gradient(inputs, scale), scale\n\n\ndef _scale_gradient_bwd(scale: float,\n g: chex.ArrayTree) -> Tuple[chex.ArrayTree, None]:\n return (jax.tree_util.tree_map(lambda g_: g_ * scale, g), None)\n\n\n_scale_gradient.defvjp(_scale_gradient_fwd, _scale_gradient_bwd)\n\n\ndef scale_gradient(inputs: chex.ArrayTree, scale: float) -> chex.ArrayTree:\n \"\"\"Scales gradients for the backwards pass.\n\n Args:\n inputs: A nested array.\n scale: The scale factor for the gradient on the backwards pass.\n\n Returns:\n An array of the same structure as `inputs`, with scaled backward gradient.\n \"\"\"\n # Special case scales of 1. and 0. for more efficiency.\n if scale == 1.:\n return inputs\n elif scale == 0.:\n return jax.lax.stop_gradient(inputs)\n else:\n return _scale_gradient(inputs, scale)\n\n\n# TODO(b/183800387): remove legacy aliases.\nsafe_norm = numerics.safe_norm\nsafe_int32_increment = numerics.safe_int32_increment\nglobal_norm = linear_algebra.global_norm\n", "path": "optax/_src/utils.py"}], "after_files": [{"content": "# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Utility functions for testing.\"\"\"\n\nfrom typing import Optional, Tuple, Sequence\n\nimport chex\nimport jax\nimport jax.numpy as jnp\nimport jax.scipy.stats.norm as multivariate_normal\n\nfrom optax._src import linear_algebra\nfrom optax._src import numerics\n\n\ndef tile_second_to_last_dim(a: chex.Array) -> chex.Array:\n ones = jnp.ones_like(a)\n a = jnp.expand_dims(a, axis=-1)\n return jnp.expand_dims(ones, axis=-2) * a\n\n\ndef canonicalize_dtype(\n dtype: Optional[chex.ArrayDType]) -> Optional[chex.ArrayDType]:\n \"\"\"Canonicalise a dtype, skip if None.\"\"\"\n if dtype is not None:\n return jax.dtypes.canonicalize_dtype(dtype)\n return dtype\n\n\ndef cast_tree(tree: chex.ArrayTree,\n dtype: Optional[chex.ArrayDType]) -> chex.ArrayTree:\n \"\"\"Cast tree to given dtype, skip if None.\"\"\"\n if dtype is not None:\n return jax.tree_util.tree_map(lambda t: t.astype(dtype), tree)\n else:\n return tree\n\n\ndef set_diags(a: chex.Array, new_diags: chex.Array) -> chex.Array:\n \"\"\"Set the diagonals of every DxD matrix in an input of shape NxDxD.\n\n Args:\n a: rank 3, tensor NxDxD.\n new_diags: NxD matrix, the new diagonals of each DxD matrix.\n\n Returns:\n NxDxD tensor, with the same contents as `a` but with the diagonal\n changed to `new_diags`.\n \"\"\"\n a_dim, new_diags_dim = len(a.shape), len(new_diags.shape)\n if a_dim != 3:\n raise ValueError(f'Expected `a` to be a 3D tensor, got {a_dim}D instead')\n if new_diags_dim != 2:\n raise ValueError(\n f'Expected `new_diags` to be a 2D array, got {new_diags_dim}D instead')\n n, d, d1 = a.shape\n n_diags, d_diags = new_diags.shape\n if d != d1:\n raise ValueError(\n f'Shape mismatch: expected `a.shape` to be {(n, d, d)}, '\n f'got {(n, d, d1)} instead')\n if d_diags != d or n_diags != n:\n raise ValueError(\n f'Shape mismatch: expected `new_diags.shape` to be {(n, d)}, '\n f'got {(n_diags, d_diags)} instead')\n\n indices1 = jnp.repeat(jnp.arange(n), d)\n indices2 = jnp.tile(jnp.arange(d), n)\n indices3 = indices2\n\n # Use numpy array setting\n a = a.at[indices1, indices2, indices3].set(new_diags.flatten())\n return a\n\n\nclass MultiNormalDiagFromLogScale():\n \"\"\"MultiNormalDiag which directly exposes its input parameters.\"\"\"\n\n def __init__(self, loc: chex.Array, log_scale: chex.Array):\n self._log_scale = log_scale\n self._scale = jnp.exp(log_scale)\n self._mean = loc\n self._param_shape = jax.lax.broadcast_shapes(\n self._mean.shape, self._scale.shape)\n\n def sample(self, shape: Sequence[int],\n seed: chex.PRNGKey) -> chex.Array:\n sample_shape = tuple(shape) + self._param_shape\n return jax.random.normal(\n seed, shape=sample_shape) * self._scale + self._mean\n\n def log_prob(self, x: chex.Array) -> chex.Array:\n log_prob = multivariate_normal.logpdf(x, loc=self._mean, scale=self._scale)\n # Sum over parameter axes.\n sum_axis = [-(i + 1) for i in range(len(self._param_shape))]\n return jnp.sum(log_prob, axis=sum_axis)\n\n @property\n def log_scale(self) -> chex.Array:\n return self._log_scale\n\n @property\n def params(self) -> Sequence[chex.Array]:\n return [self._mean, self._log_scale]\n\n\ndef multi_normal(loc: chex.Array,\n log_scale: chex.Array) -> MultiNormalDiagFromLogScale:\n return MultiNormalDiagFromLogScale(loc=loc, log_scale=log_scale)\n\n\[email protected]_vjp\ndef _scale_gradient(inputs: chex.ArrayTree, scale: float) -> chex.ArrayTree:\n \"\"\"Internal gradient scaling implementation.\"\"\"\n del scale # Only used for the backward pass defined in _scale_gradient_bwd.\n return inputs\n\n\ndef _scale_gradient_fwd(inputs: chex.ArrayTree,\n scale: float) -> Tuple[chex.ArrayTree, float]:\n return _scale_gradient(inputs, scale), scale\n\n\ndef _scale_gradient_bwd(scale: float,\n g: chex.ArrayTree) -> Tuple[chex.ArrayTree, None]:\n return (jax.tree_util.tree_map(lambda g_: g_ * scale, g), None)\n\n\n_scale_gradient.defvjp(_scale_gradient_fwd, _scale_gradient_bwd)\n\n\ndef scale_gradient(inputs: chex.ArrayTree, scale: float) -> chex.ArrayTree:\n \"\"\"Scales gradients for the backwards pass.\n\n Args:\n inputs: A nested array.\n scale: The scale factor for the gradient on the backwards pass.\n\n Returns:\n An array of the same structure as `inputs`, with scaled backward gradient.\n \"\"\"\n # Special case scales of 1. and 0. for more efficiency.\n if scale == 1.:\n return inputs\n elif scale == 0.:\n return jax.lax.stop_gradient(inputs)\n else:\n return _scale_gradient(inputs, scale)\n\n\n# TODO(b/183800387): remove legacy aliases.\nsafe_norm = numerics.safe_norm\nsafe_int32_increment = numerics.safe_int32_increment\nglobal_norm = linear_algebra.global_norm\n", "path": "optax/_src/utils.py"}]}
| 1,978 | 348 |
gh_patches_debug_14244
|
rasdani/github-patches
|
git_diff
|
scrapy__scrapy-1188
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Change the scrapy short description
The Scrapy short description says:
> Scrapy, a fast high-level screen scraping and web crawling framework.
I think would be better:
> Scrapy, a fast high-level web crawling and screen scraping framework.
Because it highlights first its difference with simple screen scraping tools (i.e. Nokogiri. Mechanize, etc).
Screen scraping can be done even with curl and grep, but I don't think you could do web crawling with such simple tools.
Perhaps this can be an alternative:
> Scrapy, a fast and scalable web crawling and screen scraping framework.
Also the term "web data mining" can be a good fit for Scrapy (along with Scrapely and similar tools) and help to shape its roadmap.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 from os.path import dirname, join
2 from setuptools import setup, find_packages
3
4
5 with open(join(dirname(__file__), 'scrapy/VERSION'), 'rb') as f:
6 version = f.read().decode('ascii').strip()
7
8
9 setup(
10 name='Scrapy',
11 version=version,
12 url='http://scrapy.org',
13 description='A high-level Web Crawling and Screen Scraping framework',
14 long_description=open('README.rst').read(),
15 author='Scrapy developers',
16 maintainer='Pablo Hoffman',
17 maintainer_email='[email protected]',
18 license='BSD',
19 packages=find_packages(exclude=('tests', 'tests.*')),
20 include_package_data=True,
21 zip_safe=False,
22 entry_points={
23 'console_scripts': ['scrapy = scrapy.cmdline:execute']
24 },
25 classifiers=[
26 'Framework :: Scrapy',
27 'Development Status :: 5 - Production/Stable',
28 'Environment :: Console',
29 'Intended Audience :: Developers',
30 'License :: OSI Approved :: BSD License',
31 'Operating System :: OS Independent',
32 'Programming Language :: Python',
33 'Programming Language :: Python :: 2',
34 'Programming Language :: Python :: 2.7',
35 'Topic :: Internet :: WWW/HTTP',
36 'Topic :: Software Development :: Libraries :: Application Frameworks',
37 'Topic :: Software Development :: Libraries :: Python Modules',
38 ],
39 install_requires=[
40 'Twisted>=10.0.0',
41 'w3lib>=1.8.0',
42 'queuelib',
43 'lxml',
44 'pyOpenSSL',
45 'cssselect>=0.9',
46 'six>=1.5.2',
47 ],
48 )
49
```
Path: `scrapy/__init__.py`
Content:
```
1 """
2 Scrapy - a web crawling and screen scraping framework written for Python
3 """
4
5 __all__ = ['__version__', 'version_info', 'optional_features', 'twisted_version',
6 'Spider', 'Request', 'FormRequest', 'Selector', 'Item', 'Field']
7
8 # Scrapy version
9 import pkgutil
10 __version__ = pkgutil.get_data(__package__, 'VERSION').decode('ascii').strip()
11 version_info = tuple(int(v) if v.isdigit() else v
12 for v in __version__.split('.'))
13 del pkgutil
14
15 # Check minimum required Python version
16 import sys
17 if sys.version_info < (2, 7):
18 print("Scrapy %s requires Python 2.7" % __version__)
19 sys.exit(1)
20
21 # Ignore noisy twisted deprecation warnings
22 import warnings
23 warnings.filterwarnings('ignore', category=DeprecationWarning, module='twisted')
24 del warnings
25
26 # Apply monkey patches to fix issues in external libraries
27 from . import _monkeypatches
28 del _monkeypatches
29
30 # WARNING: optional_features set is deprecated and will be removed soon. Do not use.
31 optional_features = set()
32 # TODO: backwards compatibility, remove for Scrapy 0.20
33 optional_features.add('ssl')
34 try:
35 import boto
36 del boto
37 except ImportError:
38 pass
39 else:
40 optional_features.add('boto')
41
42 from twisted import version as _txv
43 twisted_version = (_txv.major, _txv.minor, _txv.micro)
44 if twisted_version >= (11, 1, 0):
45 optional_features.add('http11')
46
47 # Declare top-level shortcuts
48 from scrapy.spider import Spider
49 from scrapy.http import Request, FormRequest
50 from scrapy.selector import Selector
51 from scrapy.item import Item, Field
52
53 del sys
54
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/scrapy/__init__.py b/scrapy/__init__.py
--- a/scrapy/__init__.py
+++ b/scrapy/__init__.py
@@ -1,5 +1,5 @@
"""
-Scrapy - a web crawling and screen scraping framework written for Python
+Scrapy - a web crawling and web scraping framework written for Python
"""
__all__ = ['__version__', 'version_info', 'optional_features', 'twisted_version',
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -10,7 +10,7 @@
name='Scrapy',
version=version,
url='http://scrapy.org',
- description='A high-level Web Crawling and Screen Scraping framework',
+ description='A high-level Web Crawling and Web Scraping framework',
long_description=open('README.rst').read(),
author='Scrapy developers',
maintainer='Pablo Hoffman',
|
{"golden_diff": "diff --git a/scrapy/__init__.py b/scrapy/__init__.py\n--- a/scrapy/__init__.py\n+++ b/scrapy/__init__.py\n@@ -1,5 +1,5 @@\n \"\"\"\n-Scrapy - a web crawling and screen scraping framework written for Python\n+Scrapy - a web crawling and web scraping framework written for Python\n \"\"\"\n \n __all__ = ['__version__', 'version_info', 'optional_features', 'twisted_version',\ndiff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -10,7 +10,7 @@\n name='Scrapy',\n version=version,\n url='http://scrapy.org',\n- description='A high-level Web Crawling and Screen Scraping framework',\n+ description='A high-level Web Crawling and Web Scraping framework',\n long_description=open('README.rst').read(),\n author='Scrapy developers',\n maintainer='Pablo Hoffman',\n", "issue": "Change the scrapy short description\nThe Scrapy short description says:\n\n> Scrapy, a fast high-level screen scraping and web crawling framework.\n\nI think would be better:\n\n> Scrapy, a fast high-level web crawling and screen scraping framework.\n\nBecause it highlights first its difference with simple screen scraping tools (i.e. Nokogiri. Mechanize, etc).\n\nScreen scraping can be done even with curl and grep, but I don't think you could do web crawling with such simple tools.\n\nPerhaps this can be an alternative:\n\n> Scrapy, a fast and scalable web crawling and screen scraping framework.\n\nAlso the term \"web data mining\" can be a good fit for Scrapy (along with Scrapely and similar tools) and help to shape its roadmap.\n\n", "before_files": [{"content": "from os.path import dirname, join\nfrom setuptools import setup, find_packages\n\n\nwith open(join(dirname(__file__), 'scrapy/VERSION'), 'rb') as f:\n version = f.read().decode('ascii').strip()\n\n\nsetup(\n name='Scrapy',\n version=version,\n url='http://scrapy.org',\n description='A high-level Web Crawling and Screen Scraping framework',\n long_description=open('README.rst').read(),\n author='Scrapy developers',\n maintainer='Pablo Hoffman',\n maintainer_email='[email protected]',\n license='BSD',\n packages=find_packages(exclude=('tests', 'tests.*')),\n include_package_data=True,\n zip_safe=False,\n entry_points={\n 'console_scripts': ['scrapy = scrapy.cmdline:execute']\n },\n classifiers=[\n 'Framework :: Scrapy',\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Topic :: Internet :: WWW/HTTP',\n 'Topic :: Software Development :: Libraries :: Application Frameworks',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n ],\n install_requires=[\n 'Twisted>=10.0.0',\n 'w3lib>=1.8.0',\n 'queuelib',\n 'lxml',\n 'pyOpenSSL',\n 'cssselect>=0.9',\n 'six>=1.5.2',\n ],\n)\n", "path": "setup.py"}, {"content": "\"\"\"\nScrapy - a web crawling and screen scraping framework written for Python\n\"\"\"\n\n__all__ = ['__version__', 'version_info', 'optional_features', 'twisted_version',\n 'Spider', 'Request', 'FormRequest', 'Selector', 'Item', 'Field']\n\n# Scrapy version\nimport pkgutil\n__version__ = pkgutil.get_data(__package__, 'VERSION').decode('ascii').strip()\nversion_info = tuple(int(v) if v.isdigit() else v\n for v in __version__.split('.'))\ndel pkgutil\n\n# Check minimum required Python version\nimport sys\nif sys.version_info < (2, 7):\n print(\"Scrapy %s requires Python 2.7\" % __version__)\n sys.exit(1)\n\n# Ignore noisy twisted deprecation warnings\nimport warnings\nwarnings.filterwarnings('ignore', category=DeprecationWarning, module='twisted')\ndel warnings\n\n# Apply monkey patches to fix issues in external libraries\nfrom . import _monkeypatches\ndel _monkeypatches\n\n# WARNING: optional_features set is deprecated and will be removed soon. Do not use.\noptional_features = set()\n# TODO: backwards compatibility, remove for Scrapy 0.20\noptional_features.add('ssl')\ntry:\n import boto\n del boto\nexcept ImportError:\n pass\nelse:\n optional_features.add('boto')\n\nfrom twisted import version as _txv\ntwisted_version = (_txv.major, _txv.minor, _txv.micro)\nif twisted_version >= (11, 1, 0):\n optional_features.add('http11')\n\n# Declare top-level shortcuts\nfrom scrapy.spider import Spider\nfrom scrapy.http import Request, FormRequest\nfrom scrapy.selector import Selector\nfrom scrapy.item import Item, Field\n\ndel sys\n", "path": "scrapy/__init__.py"}], "after_files": [{"content": "from os.path import dirname, join\nfrom setuptools import setup, find_packages\n\n\nwith open(join(dirname(__file__), 'scrapy/VERSION'), 'rb') as f:\n version = f.read().decode('ascii').strip()\n\n\nsetup(\n name='Scrapy',\n version=version,\n url='http://scrapy.org',\n description='A high-level Web Crawling and Web Scraping framework',\n long_description=open('README.rst').read(),\n author='Scrapy developers',\n maintainer='Pablo Hoffman',\n maintainer_email='[email protected]',\n license='BSD',\n packages=find_packages(exclude=('tests', 'tests.*')),\n include_package_data=True,\n zip_safe=False,\n entry_points={\n 'console_scripts': ['scrapy = scrapy.cmdline:execute']\n },\n classifiers=[\n 'Framework :: Scrapy',\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Topic :: Internet :: WWW/HTTP',\n 'Topic :: Software Development :: Libraries :: Application Frameworks',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n ],\n install_requires=[\n 'Twisted>=10.0.0',\n 'w3lib>=1.8.0',\n 'queuelib',\n 'lxml',\n 'pyOpenSSL',\n 'cssselect>=0.9',\n 'six>=1.5.2',\n ],\n)\n", "path": "setup.py"}, {"content": "\"\"\"\nScrapy - a web crawling and web scraping framework written for Python\n\"\"\"\n\n__all__ = ['__version__', 'version_info', 'optional_features', 'twisted_version',\n 'Spider', 'Request', 'FormRequest', 'Selector', 'Item', 'Field']\n\n# Scrapy version\nimport pkgutil\n__version__ = pkgutil.get_data(__package__, 'VERSION').decode('ascii').strip()\nversion_info = tuple(int(v) if v.isdigit() else v\n for v in __version__.split('.'))\ndel pkgutil\n\n# Check minimum required Python version\nimport sys\nif sys.version_info < (2, 7):\n print(\"Scrapy %s requires Python 2.7\" % __version__)\n sys.exit(1)\n\n# Ignore noisy twisted deprecation warnings\nimport warnings\nwarnings.filterwarnings('ignore', category=DeprecationWarning, module='twisted')\ndel warnings\n\n# Apply monkey patches to fix issues in external libraries\nfrom . import _monkeypatches\ndel _monkeypatches\n\n# WARNING: optional_features set is deprecated and will be removed soon. Do not use.\noptional_features = set()\n# TODO: backwards compatibility, remove for Scrapy 0.20\noptional_features.add('ssl')\ntry:\n import boto\n del boto\nexcept ImportError:\n pass\nelse:\n optional_features.add('boto')\n\nfrom twisted import version as _txv\ntwisted_version = (_txv.major, _txv.minor, _txv.micro)\nif twisted_version >= (11, 1, 0):\n optional_features.add('http11')\n\n# Declare top-level shortcuts\nfrom scrapy.spider import Spider\nfrom scrapy.http import Request, FormRequest\nfrom scrapy.selector import Selector\nfrom scrapy.item import Item, Field\n\ndel sys\n", "path": "scrapy/__init__.py"}]}
| 1,364 | 210 |
gh_patches_debug_59247
|
rasdani/github-patches
|
git_diff
|
projectmesa__mesa-1860
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
mesa.visualization.chartmodule doesn't work
As shown in the picture, I run the boltzmann_wealth_model in the mesa example, but the line chart is not displayed normally. Can anyone help me?
<img width="788" alt="ๅฑๅนๆชๅพ 2023-11-04 183542" src="https://github.com/projectmesa/mesa/assets/75169342/89ba1b20-4011-471b-909e-5fea97da6b73">
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2 import re
3 from codecs import open
4
5 from setuptools import find_packages, setup
6
7 requires = [
8 "click",
9 "cookiecutter",
10 "matplotlib",
11 "mesa_viz_tornado",
12 "networkx",
13 "numpy",
14 "pandas",
15 "solara",
16 "tqdm",
17 ]
18
19 extras_require = {
20 "dev": [
21 "black",
22 "ruff~=0.1.1", # Update periodically
23 "coverage",
24 "pytest >= 4.6",
25 "pytest-cov",
26 "sphinx",
27 ],
28 # Explicitly install ipykernel for Python 3.8.
29 # See https://stackoverflow.com/questions/28831854/how-do-i-add-python3-kernel-to-jupyter-ipython
30 # Could be removed in the future
31 "docs": [
32 "sphinx",
33 "ipython",
34 "ipykernel",
35 "pydata_sphinx_theme",
36 "seaborn",
37 "myst-nb",
38 ],
39 }
40
41 version = ""
42 with open("mesa/__init__.py") as fd:
43 version = re.search(
44 r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE
45 ).group(1)
46
47 with open("README.rst", "rb", encoding="utf-8") as f:
48 readme = f.read()
49
50
51 setup(
52 name="Mesa",
53 version=version,
54 description="Agent-based modeling (ABM) in Python 3+",
55 long_description=readme,
56 author="Project Mesa Team",
57 author_email="[email protected]",
58 url="https://github.com/projectmesa/mesa",
59 packages=find_packages(),
60 package_data={
61 "cookiecutter-mesa": ["cookiecutter-mesa/*"],
62 },
63 include_package_data=True,
64 install_requires=requires,
65 extras_require=extras_require,
66 keywords="agent based modeling model ABM simulation multi-agent",
67 license="Apache 2.0",
68 zip_safe=False,
69 classifiers=[
70 "Topic :: Scientific/Engineering",
71 "Topic :: Scientific/Engineering :: Artificial Life",
72 "Topic :: Scientific/Engineering :: Artificial Intelligence",
73 "Intended Audience :: Science/Research",
74 "Programming Language :: Python :: 3 :: Only",
75 "Programming Language :: Python :: 3.8",
76 "Programming Language :: Python :: 3.9",
77 "Programming Language :: Python :: 3.10",
78 "License :: OSI Approved :: Apache Software License",
79 "Operating System :: OS Independent",
80 "Development Status :: 3 - Alpha",
81 "Natural Language :: English",
82 ],
83 entry_points="""
84 [console_scripts]
85 mesa=mesa.main:cli
86 """,
87 python_requires=">=3.8",
88 )
89
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -8,7 +8,7 @@
"click",
"cookiecutter",
"matplotlib",
- "mesa_viz_tornado",
+ "mesa_viz_tornado~=0.1.0,>=0.1.2",
"networkx",
"numpy",
"pandas",
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -8,7 +8,7 @@\n \"click\",\n \"cookiecutter\",\n \"matplotlib\",\n- \"mesa_viz_tornado\",\n+ \"mesa_viz_tornado~=0.1.0,>=0.1.2\",\n \"networkx\",\n \"numpy\",\n \"pandas\",\n", "issue": "mesa.visualization.chartmodule doesn't work\nAs shown in the picture, I run the boltzmann_wealth_model in the mesa example, but the line chart is not displayed normally. Can anyone help me?\r\n<img width=\"788\" alt=\"\u5c4f\u5e55\u622a\u56fe 2023-11-04 183542\" src=\"https://github.com/projectmesa/mesa/assets/75169342/89ba1b20-4011-471b-909e-5fea97da6b73\">\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\nimport re\nfrom codecs import open\n\nfrom setuptools import find_packages, setup\n\nrequires = [\n \"click\",\n \"cookiecutter\",\n \"matplotlib\",\n \"mesa_viz_tornado\",\n \"networkx\",\n \"numpy\",\n \"pandas\",\n \"solara\",\n \"tqdm\",\n]\n\nextras_require = {\n \"dev\": [\n \"black\",\n \"ruff~=0.1.1\", # Update periodically\n \"coverage\",\n \"pytest >= 4.6\",\n \"pytest-cov\",\n \"sphinx\",\n ],\n # Explicitly install ipykernel for Python 3.8.\n # See https://stackoverflow.com/questions/28831854/how-do-i-add-python3-kernel-to-jupyter-ipython\n # Could be removed in the future\n \"docs\": [\n \"sphinx\",\n \"ipython\",\n \"ipykernel\",\n \"pydata_sphinx_theme\",\n \"seaborn\",\n \"myst-nb\",\n ],\n}\n\nversion = \"\"\nwith open(\"mesa/__init__.py\") as fd:\n version = re.search(\n r'^__version__\\s*=\\s*[\\'\"]([^\\'\"]*)[\\'\"]', fd.read(), re.MULTILINE\n ).group(1)\n\nwith open(\"README.rst\", \"rb\", encoding=\"utf-8\") as f:\n readme = f.read()\n\n\nsetup(\n name=\"Mesa\",\n version=version,\n description=\"Agent-based modeling (ABM) in Python 3+\",\n long_description=readme,\n author=\"Project Mesa Team\",\n author_email=\"[email protected]\",\n url=\"https://github.com/projectmesa/mesa\",\n packages=find_packages(),\n package_data={\n \"cookiecutter-mesa\": [\"cookiecutter-mesa/*\"],\n },\n include_package_data=True,\n install_requires=requires,\n extras_require=extras_require,\n keywords=\"agent based modeling model ABM simulation multi-agent\",\n license=\"Apache 2.0\",\n zip_safe=False,\n classifiers=[\n \"Topic :: Scientific/Engineering\",\n \"Topic :: Scientific/Engineering :: Artificial Life\",\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n \"Intended Audience :: Science/Research\",\n \"Programming Language :: Python :: 3 :: Only\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: OS Independent\",\n \"Development Status :: 3 - Alpha\",\n \"Natural Language :: English\",\n ],\n entry_points=\"\"\"\n [console_scripts]\n mesa=mesa.main:cli\n \"\"\",\n python_requires=\">=3.8\",\n)\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\nimport re\nfrom codecs import open\n\nfrom setuptools import find_packages, setup\n\nrequires = [\n \"click\",\n \"cookiecutter\",\n \"matplotlib\",\n \"mesa_viz_tornado~=0.1.0,>=0.1.2\",\n \"networkx\",\n \"numpy\",\n \"pandas\",\n \"solara\",\n \"tqdm\",\n]\n\nextras_require = {\n \"dev\": [\n \"black\",\n \"ruff~=0.1.1\", # Update periodically\n \"coverage\",\n \"pytest >= 4.6\",\n \"pytest-cov\",\n \"sphinx\",\n ],\n # Explicitly install ipykernel for Python 3.8.\n # See https://stackoverflow.com/questions/28831854/how-do-i-add-python3-kernel-to-jupyter-ipython\n # Could be removed in the future\n \"docs\": [\n \"sphinx\",\n \"ipython\",\n \"ipykernel\",\n \"pydata_sphinx_theme\",\n \"seaborn\",\n \"myst-nb\",\n ],\n}\n\nversion = \"\"\nwith open(\"mesa/__init__.py\") as fd:\n version = re.search(\n r'^__version__\\s*=\\s*[\\'\"]([^\\'\"]*)[\\'\"]', fd.read(), re.MULTILINE\n ).group(1)\n\nwith open(\"README.rst\", \"rb\", encoding=\"utf-8\") as f:\n readme = f.read()\n\n\nsetup(\n name=\"Mesa\",\n version=version,\n description=\"Agent-based modeling (ABM) in Python 3+\",\n long_description=readme,\n author=\"Project Mesa Team\",\n author_email=\"[email protected]\",\n url=\"https://github.com/projectmesa/mesa\",\n packages=find_packages(),\n package_data={\n \"cookiecutter-mesa\": [\"cookiecutter-mesa/*\"],\n },\n include_package_data=True,\n install_requires=requires,\n extras_require=extras_require,\n keywords=\"agent based modeling model ABM simulation multi-agent\",\n license=\"Apache 2.0\",\n zip_safe=False,\n classifiers=[\n \"Topic :: Scientific/Engineering\",\n \"Topic :: Scientific/Engineering :: Artificial Life\",\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n \"Intended Audience :: Science/Research\",\n \"Programming Language :: Python :: 3 :: Only\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: OS Independent\",\n \"Development Status :: 3 - Alpha\",\n \"Natural Language :: English\",\n ],\n entry_points=\"\"\"\n [console_scripts]\n mesa=mesa.main:cli\n \"\"\",\n python_requires=\">=3.8\",\n)\n", "path": "setup.py"}]}
| 1,177 | 91 |
gh_patches_debug_11847
|
rasdani/github-patches
|
git_diff
|
CTFd__CTFd-2030
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
SubmissionSchema needs more nested fields
I'm having trouble accessing a user's name from a SubmissionSchema dump. This is probably because we need more Nested Fields on the Schema in addition to just the nested challenge schema.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `CTFd/schemas/submissions.py`
Content:
```
1 from marshmallow import fields
2
3 from CTFd.models import Submissions, ma
4 from CTFd.schemas.challenges import ChallengeSchema
5 from CTFd.utils import string_types
6
7
8 class SubmissionSchema(ma.ModelSchema):
9 challenge = fields.Nested(ChallengeSchema, only=["name", "category", "value"])
10
11 class Meta:
12 model = Submissions
13 include_fk = True
14 dump_only = ("id",)
15
16 views = {
17 "admin": [
18 "provided",
19 "ip",
20 "challenge_id",
21 "challenge",
22 "user",
23 "team",
24 "date",
25 "type",
26 "id",
27 ],
28 "user": ["challenge_id", "challenge", "user", "team", "date", "type", "id"],
29 }
30
31 def __init__(self, view=None, *args, **kwargs):
32 if view:
33 if isinstance(view, string_types):
34 kwargs["only"] = self.views[view]
35 elif isinstance(view, list):
36 kwargs["only"] = view
37
38 super(SubmissionSchema, self).__init__(*args, **kwargs)
39
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/CTFd/schemas/submissions.py b/CTFd/schemas/submissions.py
--- a/CTFd/schemas/submissions.py
+++ b/CTFd/schemas/submissions.py
@@ -2,11 +2,15 @@
from CTFd.models import Submissions, ma
from CTFd.schemas.challenges import ChallengeSchema
+from CTFd.schemas.teams import TeamSchema
+from CTFd.schemas.users import UserSchema
from CTFd.utils import string_types
class SubmissionSchema(ma.ModelSchema):
- challenge = fields.Nested(ChallengeSchema, only=["name", "category", "value"])
+ challenge = fields.Nested(ChallengeSchema, only=["id", "name", "category", "value"])
+ user = fields.Nested(UserSchema, only=["id", "name"])
+ team = fields.Nested(TeamSchema, only=["id", "name"])
class Meta:
model = Submissions
|
{"golden_diff": "diff --git a/CTFd/schemas/submissions.py b/CTFd/schemas/submissions.py\n--- a/CTFd/schemas/submissions.py\n+++ b/CTFd/schemas/submissions.py\n@@ -2,11 +2,15 @@\n \n from CTFd.models import Submissions, ma\n from CTFd.schemas.challenges import ChallengeSchema\n+from CTFd.schemas.teams import TeamSchema\n+from CTFd.schemas.users import UserSchema\n from CTFd.utils import string_types\n \n \n class SubmissionSchema(ma.ModelSchema):\n- challenge = fields.Nested(ChallengeSchema, only=[\"name\", \"category\", \"value\"])\n+ challenge = fields.Nested(ChallengeSchema, only=[\"id\", \"name\", \"category\", \"value\"])\n+ user = fields.Nested(UserSchema, only=[\"id\", \"name\"])\n+ team = fields.Nested(TeamSchema, only=[\"id\", \"name\"])\n \n class Meta:\n model = Submissions\n", "issue": "SubmissionSchema needs more nested fields\nI'm having trouble accessing a user's name from a SubmissionSchema dump. This is probably because we need more Nested Fields on the Schema in addition to just the nested challenge schema. \n", "before_files": [{"content": "from marshmallow import fields\n\nfrom CTFd.models import Submissions, ma\nfrom CTFd.schemas.challenges import ChallengeSchema\nfrom CTFd.utils import string_types\n\n\nclass SubmissionSchema(ma.ModelSchema):\n challenge = fields.Nested(ChallengeSchema, only=[\"name\", \"category\", \"value\"])\n\n class Meta:\n model = Submissions\n include_fk = True\n dump_only = (\"id\",)\n\n views = {\n \"admin\": [\n \"provided\",\n \"ip\",\n \"challenge_id\",\n \"challenge\",\n \"user\",\n \"team\",\n \"date\",\n \"type\",\n \"id\",\n ],\n \"user\": [\"challenge_id\", \"challenge\", \"user\", \"team\", \"date\", \"type\", \"id\"],\n }\n\n def __init__(self, view=None, *args, **kwargs):\n if view:\n if isinstance(view, string_types):\n kwargs[\"only\"] = self.views[view]\n elif isinstance(view, list):\n kwargs[\"only\"] = view\n\n super(SubmissionSchema, self).__init__(*args, **kwargs)\n", "path": "CTFd/schemas/submissions.py"}], "after_files": [{"content": "from marshmallow import fields\n\nfrom CTFd.models import Submissions, ma\nfrom CTFd.schemas.challenges import ChallengeSchema\nfrom CTFd.schemas.teams import TeamSchema\nfrom CTFd.schemas.users import UserSchema\nfrom CTFd.utils import string_types\n\n\nclass SubmissionSchema(ma.ModelSchema):\n challenge = fields.Nested(ChallengeSchema, only=[\"id\", \"name\", \"category\", \"value\"])\n user = fields.Nested(UserSchema, only=[\"id\", \"name\"])\n team = fields.Nested(TeamSchema, only=[\"id\", \"name\"])\n\n class Meta:\n model = Submissions\n include_fk = True\n dump_only = (\"id\",)\n\n views = {\n \"admin\": [\n \"provided\",\n \"ip\",\n \"challenge_id\",\n \"challenge\",\n \"user\",\n \"team\",\n \"date\",\n \"type\",\n \"id\",\n ],\n \"user\": [\"challenge_id\", \"challenge\", \"user\", \"team\", \"date\", \"type\", \"id\"],\n }\n\n def __init__(self, view=None, *args, **kwargs):\n if view:\n if isinstance(view, string_types):\n kwargs[\"only\"] = self.views[view]\n elif isinstance(view, list):\n kwargs[\"only\"] = view\n\n super(SubmissionSchema, self).__init__(*args, **kwargs)\n", "path": "CTFd/schemas/submissions.py"}]}
| 616 | 214 |
gh_patches_debug_24458
|
rasdani/github-patches
|
git_diff
|
chainer__chainer-4108
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Support optimization for rsqrt
CUDA provides `rsqrt` function, which is faster than doing `1.0 / sqrt(x)`.
It is better to provide custom kernel in Chainer or directly support in CuPy.
(Note that NumPy does not provide `rsqrt`)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `chainer/functions/math/sqrt.py`
Content:
```
1 from chainer.backends import cuda
2 from chainer import function_node
3 from chainer import utils
4 from chainer.utils import type_check
5
6
7 class Sqrt(function_node.FunctionNode):
8
9 @property
10 def label(self):
11 return 'sqrt'
12
13 def check_type_forward(self, in_types):
14 type_check.expect(
15 in_types.size() == 1,
16 in_types[0].dtype.kind == 'f',
17 )
18
19 def forward(self, x):
20 self.retain_outputs((0,))
21 xp = cuda.get_array_module(*x)
22 return utils.force_array(xp.sqrt(x[0], dtype=x[0].dtype)),
23
24 def backward(self, indexes, grad_outputs):
25 gx = self.get_retained_outputs()[0]
26 gy = grad_outputs[0]
27 return gy / (gx * 2.0),
28
29
30 def sqrt(x):
31 """Elementwise square root function.
32
33 .. math::
34 y_i = \\sqrt x_i.
35
36 If the value of :math:`x_i` is negative, it returns ``Nan`` for :math:`y_i`
37 respect to underlying numpy and cupy specification.
38
39 Args:
40 x (~chainer.Variable): Input variable.
41
42 Returns:
43 ~chainer.Variable: Output variable.
44 """
45 return Sqrt().apply((x,))[0]
46
47
48 def rsqrt(x):
49 """Computes elementwise reciprocal of square root of input :math:`x_i`.
50
51 .. math::
52 y_i = {1 \\over \\sqrt x_i}.
53
54 Args:
55 x (~chainer.Variable): Input variable.
56
57 Returns:
58 ~chainer.Variable: Output variable.
59
60 .. seealso:: :func:`~chainer.functions.sqrt`
61 """
62 return 1.0 / sqrt(x)
63
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/chainer/functions/math/sqrt.py b/chainer/functions/math/sqrt.py
--- a/chainer/functions/math/sqrt.py
+++ b/chainer/functions/math/sqrt.py
@@ -1,3 +1,5 @@
+import numpy
+
from chainer.backends import cuda
from chainer import function_node
from chainer import utils
@@ -27,6 +29,36 @@
return gy / (gx * 2.0),
+class Rsqrt(function_node.FunctionNode):
+
+ @property
+ def label(self):
+ return 'rsqrt'
+
+ def check_type_forward(self, in_types):
+ type_check.expect(
+ in_types.size() == 1,
+ in_types[0].dtype.kind == 'f',
+ )
+
+ def forward(self, inputs):
+ self.retain_inputs((0,))
+ x, = inputs
+ xp = cuda.get_array_module(x)
+ dtype = x.dtype
+ if xp is numpy:
+ out = xp.reciprocal(xp.sqrt(x, dtype=dtype), dtype=dtype)
+ else:
+ # CuPy provides `rsqrt` which is faster than `1.0 / sqrt(x)`.
+ out = cuda.cupyx.rsqrt(x, dtype=dtype)
+ return utils.force_array(out),
+
+ def backward(self, indexes, grad_outputs):
+ x, = self.get_retained_inputs()
+ gy, = grad_outputs
+ return gy * (x ** -1.5) * -0.5,
+
+
def sqrt(x):
"""Elementwise square root function.
@@ -59,4 +91,4 @@
.. seealso:: :func:`~chainer.functions.sqrt`
"""
- return 1.0 / sqrt(x)
+ return Rsqrt().apply((x,))[0]
|
{"golden_diff": "diff --git a/chainer/functions/math/sqrt.py b/chainer/functions/math/sqrt.py\n--- a/chainer/functions/math/sqrt.py\n+++ b/chainer/functions/math/sqrt.py\n@@ -1,3 +1,5 @@\n+import numpy\n+\n from chainer.backends import cuda\n from chainer import function_node\n from chainer import utils\n@@ -27,6 +29,36 @@\n return gy / (gx * 2.0),\n \n \n+class Rsqrt(function_node.FunctionNode):\n+\n+ @property\n+ def label(self):\n+ return 'rsqrt'\n+\n+ def check_type_forward(self, in_types):\n+ type_check.expect(\n+ in_types.size() == 1,\n+ in_types[0].dtype.kind == 'f',\n+ )\n+\n+ def forward(self, inputs):\n+ self.retain_inputs((0,))\n+ x, = inputs\n+ xp = cuda.get_array_module(x)\n+ dtype = x.dtype\n+ if xp is numpy:\n+ out = xp.reciprocal(xp.sqrt(x, dtype=dtype), dtype=dtype)\n+ else:\n+ # CuPy provides `rsqrt` which is faster than `1.0 / sqrt(x)`.\n+ out = cuda.cupyx.rsqrt(x, dtype=dtype)\n+ return utils.force_array(out),\n+\n+ def backward(self, indexes, grad_outputs):\n+ x, = self.get_retained_inputs()\n+ gy, = grad_outputs\n+ return gy * (x ** -1.5) * -0.5,\n+\n+\n def sqrt(x):\n \"\"\"Elementwise square root function.\n \n@@ -59,4 +91,4 @@\n \n .. seealso:: :func:`~chainer.functions.sqrt`\n \"\"\"\n- return 1.0 / sqrt(x)\n+ return Rsqrt().apply((x,))[0]\n", "issue": "Support optimization for rsqrt\nCUDA provides `rsqrt` function, which is faster than doing `1.0 / sqrt(x)`.\r\nIt is better to provide custom kernel in Chainer or directly support in CuPy.\r\n(Note that NumPy does not provide `rsqrt`)\n", "before_files": [{"content": "from chainer.backends import cuda\nfrom chainer import function_node\nfrom chainer import utils\nfrom chainer.utils import type_check\n\n\nclass Sqrt(function_node.FunctionNode):\n\n @property\n def label(self):\n return 'sqrt'\n\n def check_type_forward(self, in_types):\n type_check.expect(\n in_types.size() == 1,\n in_types[0].dtype.kind == 'f',\n )\n\n def forward(self, x):\n self.retain_outputs((0,))\n xp = cuda.get_array_module(*x)\n return utils.force_array(xp.sqrt(x[0], dtype=x[0].dtype)),\n\n def backward(self, indexes, grad_outputs):\n gx = self.get_retained_outputs()[0]\n gy = grad_outputs[0]\n return gy / (gx * 2.0),\n\n\ndef sqrt(x):\n \"\"\"Elementwise square root function.\n\n .. math::\n y_i = \\\\sqrt x_i.\n\n If the value of :math:`x_i` is negative, it returns ``Nan`` for :math:`y_i`\n respect to underlying numpy and cupy specification.\n\n Args:\n x (~chainer.Variable): Input variable.\n\n Returns:\n ~chainer.Variable: Output variable.\n \"\"\"\n return Sqrt().apply((x,))[0]\n\n\ndef rsqrt(x):\n \"\"\"Computes elementwise reciprocal of square root of input :math:`x_i`.\n\n .. math::\n y_i = {1 \\\\over \\\\sqrt x_i}.\n\n Args:\n x (~chainer.Variable): Input variable.\n\n Returns:\n ~chainer.Variable: Output variable.\n\n .. seealso:: :func:`~chainer.functions.sqrt`\n \"\"\"\n return 1.0 / sqrt(x)\n", "path": "chainer/functions/math/sqrt.py"}], "after_files": [{"content": "import numpy\n\nfrom chainer.backends import cuda\nfrom chainer import function_node\nfrom chainer import utils\nfrom chainer.utils import type_check\n\n\nclass Sqrt(function_node.FunctionNode):\n\n @property\n def label(self):\n return 'sqrt'\n\n def check_type_forward(self, in_types):\n type_check.expect(\n in_types.size() == 1,\n in_types[0].dtype.kind == 'f',\n )\n\n def forward(self, x):\n self.retain_outputs((0,))\n xp = cuda.get_array_module(*x)\n return utils.force_array(xp.sqrt(x[0], dtype=x[0].dtype)),\n\n def backward(self, indexes, grad_outputs):\n gx = self.get_retained_outputs()[0]\n gy = grad_outputs[0]\n return gy / (gx * 2.0),\n\n\nclass Rsqrt(function_node.FunctionNode):\n\n @property\n def label(self):\n return 'rsqrt'\n\n def check_type_forward(self, in_types):\n type_check.expect(\n in_types.size() == 1,\n in_types[0].dtype.kind == 'f',\n )\n\n def forward(self, inputs):\n self.retain_inputs((0,))\n x, = inputs\n xp = cuda.get_array_module(x)\n dtype = x.dtype\n if xp is numpy:\n out = xp.reciprocal(xp.sqrt(x, dtype=dtype), dtype=dtype)\n else:\n # CuPy provides `rsqrt` which is faster than `1.0 / sqrt(x)`.\n out = cuda.cupyx.rsqrt(x, dtype=dtype)\n return utils.force_array(out),\n\n def backward(self, indexes, grad_outputs):\n x, = self.get_retained_inputs()\n gy, = grad_outputs\n return gy * (x ** -1.5) * -0.5,\n\n\ndef sqrt(x):\n \"\"\"Elementwise square root function.\n\n .. math::\n y_i = \\\\sqrt x_i.\n\n If the value of :math:`x_i` is negative, it returns ``Nan`` for :math:`y_i`\n respect to underlying numpy and cupy specification.\n\n Args:\n x (~chainer.Variable): Input variable.\n\n Returns:\n ~chainer.Variable: Output variable.\n \"\"\"\n return Sqrt().apply((x,))[0]\n\n\ndef rsqrt(x):\n \"\"\"Computes elementwise reciprocal of square root of input :math:`x_i`.\n\n .. math::\n y_i = {1 \\\\over \\\\sqrt x_i}.\n\n Args:\n x (~chainer.Variable): Input variable.\n\n Returns:\n ~chainer.Variable: Output variable.\n\n .. seealso:: :func:`~chainer.functions.sqrt`\n \"\"\"\n return Rsqrt().apply((x,))[0]\n", "path": "chainer/functions/math/sqrt.py"}]}
| 817 | 411 |
gh_patches_debug_34864
|
rasdani/github-patches
|
git_diff
|
RedHatInsights__insights-core-1741
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
SMDA* is not business SAP instances
~~~
# cat insights_commands/usr.sap.hostctrl.exe.saphostctrl_-function_GetCIMObject_-enuminstances_SAPInstance
*********************************************************
CreationClassName , String , SAPInstance
SID , String , SMA
SystemNumber , String , 98
InstanceName , String , SMDA98
Hostname , String , li-ld-1846
FullQualifiedHostname , String , li-ld-1846.hag.hilti.com
SapVersionInfo , String , 749, patch 200, changelist 1746260
~~~
From Rolf:
> the reported instance SMDA98 is the Solution Manager agent, which the customer wants use to monitor that system [1]. With newer systems, that can alternatively also be the diagnostic agent (instance name DAA*98).
* here is a typo, DAA should be SID name
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `insights/combiners/sap.py`
Content:
```
1 """
2 Sap - Combiner
3 ==============
4
5 This combiner gets the running SAP instances on the system based on below
6 logic::
7
8 if (SAPLOCALHOST = 'hostname') && InstanceType = D## ) then
9 on this system runs SAP Netweaver Application Server version
10
11 if (SAPLOCALHOST = 'hostname') && InstanceType = ASCS## ) then
12 on this system runs SAP Netweaver Application Server Central Instance
13 version
14
15 if (SAPLOCALHOST = 'hostname') && InstanceType = HDB## ) then
16 on this system runs SAP HANA database version
17
18 Check settings according SAP Notes compiled here:
19 https://wiki.scn.sap.com/wiki/x/rDK7Gg
20
21 """
22
23 from collections import namedtuple
24 from insights import LegacyItemAccess
25 from insights.parsers import SkipException
26 from insights.core.plugins import combiner
27 from insights.combiners.hostname import hostname
28 from insights.parsers.lssap import Lssap
29 from insights.parsers.saphostctrl import SAPHostCtrlInstances
30
31
32 SAPInstances = namedtuple("SAPInstances",
33 field_names=["name", "hostname", "sid", "type", "number", "version"])
34 """namedtuple: Type for storing the SAP instance."""
35
36
37 @combiner(hostname, optional=[SAPHostCtrlInstances, Lssap])
38 class Sap(LegacyItemAccess):
39 """
40 Combiner for analyzing the SAP instances running on the system.
41
42 Prefer SAPHostCtrlInstances to Lssap.
43
44 Examples:
45 >>> type(saps)
46 <class 'insights.combiners.sap.Sap'>
47 >>> saps['D16'].number
48 '16'
49 >>> saps.sid('HDB16')
50 'HA2'
51 >>> saps.hostname('HDB16')
52 'lu0417'
53 >>> 'D22' in saps.local_instances
54 False
55 >>> saps.is_hana
56 True
57 >>> saps.is_netweaver
58 True
59 >>> saps.is_ascs
60 False
61
62 Attributes:
63 all_instances (list): List all the SAP instances listed by the command.
64 local_instances (list): List SAP instances which are running on the system.
65 """
66
67 def __init__(self, hostname, insts, lssap):
68 hn = hostname.hostname
69 self.data = {}
70 self.local_instances = []
71 self.all_instances = []
72 self._types = set()
73 if insts:
74 for inst in insts.data:
75 k = inst['InstanceName']
76 self.all_instances.append(k)
77 if hn == inst['Hostname']:
78 self.local_instances.append(k)
79 self._types.add(inst['InstanceType'])
80 self.data[k] = SAPInstances(k,
81 inst['Hostname'],
82 inst['SID'],
83 inst['InstanceType'],
84 inst['SystemNumber'],
85 inst['SapVersionInfo'])
86 elif lssap:
87 for inst in lssap.data:
88 k = inst['Instance']
89 t = k.rstrip('1234567890')
90 self.all_instances.append(k)
91 if hn == inst['SAPLOCALHOST']:
92 self.local_instances.append(k)
93 self._types.add(t)
94 self.data[k] = SAPInstances(k,
95 inst['SAPLOCALHOST'],
96 inst['SID'],
97 t,
98 inst['Nr'],
99 inst['Version'])
100 else:
101 raise SkipException('No SAP instance.')
102
103 def version(self, instance):
104 """str: Returns the version of the ``instance``."""
105 return self.data[instance].version if instance in self.data else None
106
107 def sid(self, instance):
108 """str: Returns the sid of the ``instance``."""
109 return self.data[instance].sid if instance in self.data else None
110
111 def type(self, instance):
112 """str: Returns the type code of the ``instance``."""
113 return self.data[instance].type if instance in self.data else None
114
115 def hostname(self, instance):
116 """str: Returns the hostname of the ``instance``."""
117 return self.data[instance].hostname if instance in self.data else None
118
119 def number(self, instance):
120 """str: Returns the systeme number of the ``instance``."""
121 return self.data[instance].number if instance in self.data else None
122
123 @property
124 def is_netweaver(self):
125 """bool: SAP Netweaver is running on the system."""
126 return 'D' in self._types
127
128 @property
129 def is_hana(self):
130 """bool: SAP Hana is running on the system."""
131 return 'HDB' in self._types
132
133 @property
134 def is_ascs(self):
135 """bool: SAP System Central Services is running on the system."""
136 return 'ASCS' in self._types
137
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/insights/combiners/sap.py b/insights/combiners/sap.py
--- a/insights/combiners/sap.py
+++ b/insights/combiners/sap.py
@@ -52,6 +52,8 @@
'lu0417'
>>> 'D22' in saps.local_instances
False
+ >>> len(saps.business_instances)
+ 2
>>> saps.is_hana
True
>>> saps.is_netweaver
@@ -60,14 +62,19 @@
False
Attributes:
- all_instances (list): List all the SAP instances listed by the command.
- local_instances (list): List SAP instances which are running on the system.
+ all_instances (list): List of all the SAP instances listed by the command.
+ local_instances (list): List of SAP instances which are running on the system.
+ function_instances (list): List of function SAP instances running on the system.
+ E.g. Diagnostics Agents SMDA97/SMDA98
+ business_instances (list): List of business SAP instances running on the system.
+ E.g. HANA, NetWeaver, ASCS, or others
"""
-
def __init__(self, hostname, insts, lssap):
hn = hostname.hostname
self.data = {}
self.local_instances = []
+ self.business_instances = []
+ self.function_instances = []
self.all_instances = []
self._types = set()
if insts:
@@ -100,6 +107,10 @@
else:
raise SkipException('No SAP instance.')
+ FUNC_INSTS = ('SMDA')
+ for i in self.local_instances:
+ (self.function_instances if i.startswith(FUNC_INSTS) else self.business_instances).append(i)
+
def version(self, instance):
"""str: Returns the version of the ``instance``."""
return self.data[instance].version if instance in self.data else None
@@ -122,7 +133,7 @@
@property
def is_netweaver(self):
- """bool: SAP Netweaver is running on the system."""
+ """bool: SAP NetWeaver is running on the system."""
return 'D' in self._types
@property
|
{"golden_diff": "diff --git a/insights/combiners/sap.py b/insights/combiners/sap.py\n--- a/insights/combiners/sap.py\n+++ b/insights/combiners/sap.py\n@@ -52,6 +52,8 @@\n 'lu0417'\n >>> 'D22' in saps.local_instances\n False\n+ >>> len(saps.business_instances)\n+ 2\n >>> saps.is_hana\n True\n >>> saps.is_netweaver\n@@ -60,14 +62,19 @@\n False\n \n Attributes:\n- all_instances (list): List all the SAP instances listed by the command.\n- local_instances (list): List SAP instances which are running on the system.\n+ all_instances (list): List of all the SAP instances listed by the command.\n+ local_instances (list): List of SAP instances which are running on the system.\n+ function_instances (list): List of function SAP instances running on the system.\n+ E.g. Diagnostics Agents SMDA97/SMDA98\n+ business_instances (list): List of business SAP instances running on the system.\n+ E.g. HANA, NetWeaver, ASCS, or others\n \"\"\"\n-\n def __init__(self, hostname, insts, lssap):\n hn = hostname.hostname\n self.data = {}\n self.local_instances = []\n+ self.business_instances = []\n+ self.function_instances = []\n self.all_instances = []\n self._types = set()\n if insts:\n@@ -100,6 +107,10 @@\n else:\n raise SkipException('No SAP instance.')\n \n+ FUNC_INSTS = ('SMDA')\n+ for i in self.local_instances:\n+ (self.function_instances if i.startswith(FUNC_INSTS) else self.business_instances).append(i)\n+\n def version(self, instance):\n \"\"\"str: Returns the version of the ``instance``.\"\"\"\n return self.data[instance].version if instance in self.data else None\n@@ -122,7 +133,7 @@\n \n @property\n def is_netweaver(self):\n- \"\"\"bool: SAP Netweaver is running on the system.\"\"\"\n+ \"\"\"bool: SAP NetWeaver is running on the system.\"\"\"\n return 'D' in self._types\n \n @property\n", "issue": "SMDA* is not business SAP instances\n~~~\r\n# cat insights_commands/usr.sap.hostctrl.exe.saphostctrl_-function_GetCIMObject_-enuminstances_SAPInstance\r\n*********************************************************\r\n CreationClassName , String , SAPInstance\r\n SID , String , SMA\r\n SystemNumber , String , 98\r\n InstanceName , String , SMDA98\r\n Hostname , String , li-ld-1846\r\n FullQualifiedHostname , String , li-ld-1846.hag.hilti.com\r\n SapVersionInfo , String , 749, patch 200, changelist 1746260\r\n~~~\r\n\r\nFrom Rolf:\r\n> the reported instance SMDA98 is the Solution Manager agent, which the customer wants use to monitor that system [1]. With newer systems, that can alternatively also be the diagnostic agent (instance name DAA*98).\r\n* here is a typo, DAA should be SID name\n", "before_files": [{"content": "\"\"\"\nSap - Combiner\n==============\n\nThis combiner gets the running SAP instances on the system based on below\nlogic::\n\n if (SAPLOCALHOST = 'hostname') && InstanceType = D## ) then\n on this system runs SAP Netweaver Application Server version\n\n if (SAPLOCALHOST = 'hostname') && InstanceType = ASCS## ) then\n on this system runs SAP Netweaver Application Server Central Instance\n version\n\n if (SAPLOCALHOST = 'hostname') && InstanceType = HDB## ) then\n on this system runs SAP HANA database version\n\nCheck settings according SAP Notes compiled here:\nhttps://wiki.scn.sap.com/wiki/x/rDK7Gg\n\n\"\"\"\n\nfrom collections import namedtuple\nfrom insights import LegacyItemAccess\nfrom insights.parsers import SkipException\nfrom insights.core.plugins import combiner\nfrom insights.combiners.hostname import hostname\nfrom insights.parsers.lssap import Lssap\nfrom insights.parsers.saphostctrl import SAPHostCtrlInstances\n\n\nSAPInstances = namedtuple(\"SAPInstances\",\n field_names=[\"name\", \"hostname\", \"sid\", \"type\", \"number\", \"version\"])\n\"\"\"namedtuple: Type for storing the SAP instance.\"\"\"\n\n\n@combiner(hostname, optional=[SAPHostCtrlInstances, Lssap])\nclass Sap(LegacyItemAccess):\n \"\"\"\n Combiner for analyzing the SAP instances running on the system.\n\n Prefer SAPHostCtrlInstances to Lssap.\n\n Examples:\n >>> type(saps)\n <class 'insights.combiners.sap.Sap'>\n >>> saps['D16'].number\n '16'\n >>> saps.sid('HDB16')\n 'HA2'\n >>> saps.hostname('HDB16')\n 'lu0417'\n >>> 'D22' in saps.local_instances\n False\n >>> saps.is_hana\n True\n >>> saps.is_netweaver\n True\n >>> saps.is_ascs\n False\n\n Attributes:\n all_instances (list): List all the SAP instances listed by the command.\n local_instances (list): List SAP instances which are running on the system.\n \"\"\"\n\n def __init__(self, hostname, insts, lssap):\n hn = hostname.hostname\n self.data = {}\n self.local_instances = []\n self.all_instances = []\n self._types = set()\n if insts:\n for inst in insts.data:\n k = inst['InstanceName']\n self.all_instances.append(k)\n if hn == inst['Hostname']:\n self.local_instances.append(k)\n self._types.add(inst['InstanceType'])\n self.data[k] = SAPInstances(k,\n inst['Hostname'],\n inst['SID'],\n inst['InstanceType'],\n inst['SystemNumber'],\n inst['SapVersionInfo'])\n elif lssap:\n for inst in lssap.data:\n k = inst['Instance']\n t = k.rstrip('1234567890')\n self.all_instances.append(k)\n if hn == inst['SAPLOCALHOST']:\n self.local_instances.append(k)\n self._types.add(t)\n self.data[k] = SAPInstances(k,\n inst['SAPLOCALHOST'],\n inst['SID'],\n t,\n inst['Nr'],\n inst['Version'])\n else:\n raise SkipException('No SAP instance.')\n\n def version(self, instance):\n \"\"\"str: Returns the version of the ``instance``.\"\"\"\n return self.data[instance].version if instance in self.data else None\n\n def sid(self, instance):\n \"\"\"str: Returns the sid of the ``instance``.\"\"\"\n return self.data[instance].sid if instance in self.data else None\n\n def type(self, instance):\n \"\"\"str: Returns the type code of the ``instance``.\"\"\"\n return self.data[instance].type if instance in self.data else None\n\n def hostname(self, instance):\n \"\"\"str: Returns the hostname of the ``instance``.\"\"\"\n return self.data[instance].hostname if instance in self.data else None\n\n def number(self, instance):\n \"\"\"str: Returns the systeme number of the ``instance``.\"\"\"\n return self.data[instance].number if instance in self.data else None\n\n @property\n def is_netweaver(self):\n \"\"\"bool: SAP Netweaver is running on the system.\"\"\"\n return 'D' in self._types\n\n @property\n def is_hana(self):\n \"\"\"bool: SAP Hana is running on the system.\"\"\"\n return 'HDB' in self._types\n\n @property\n def is_ascs(self):\n \"\"\"bool: SAP System Central Services is running on the system.\"\"\"\n return 'ASCS' in self._types\n", "path": "insights/combiners/sap.py"}], "after_files": [{"content": "\"\"\"\nSap - Combiner\n==============\n\nThis combiner gets the running SAP instances on the system based on below\nlogic::\n\n if (SAPLOCALHOST = 'hostname') && InstanceType = D## ) then\n on this system runs SAP Netweaver Application Server version\n\n if (SAPLOCALHOST = 'hostname') && InstanceType = ASCS## ) then\n on this system runs SAP Netweaver Application Server Central Instance\n version\n\n if (SAPLOCALHOST = 'hostname') && InstanceType = HDB## ) then\n on this system runs SAP HANA database version\n\nCheck settings according SAP Notes compiled here:\nhttps://wiki.scn.sap.com/wiki/x/rDK7Gg\n\n\"\"\"\n\nfrom collections import namedtuple\nfrom insights import LegacyItemAccess\nfrom insights.parsers import SkipException\nfrom insights.core.plugins import combiner\nfrom insights.combiners.hostname import hostname\nfrom insights.parsers.lssap import Lssap\nfrom insights.parsers.saphostctrl import SAPHostCtrlInstances\n\n\nSAPInstances = namedtuple(\"SAPInstances\",\n field_names=[\"name\", \"hostname\", \"sid\", \"type\", \"number\", \"version\"])\n\"\"\"namedtuple: Type for storing the SAP instance.\"\"\"\n\n\n@combiner(hostname, optional=[SAPHostCtrlInstances, Lssap])\nclass Sap(LegacyItemAccess):\n \"\"\"\n Combiner for analyzing the SAP instances running on the system.\n\n Prefer SAPHostCtrlInstances to Lssap.\n\n Examples:\n >>> type(saps)\n <class 'insights.combiners.sap.Sap'>\n >>> saps['D16'].number\n '16'\n >>> saps.sid('HDB16')\n 'HA2'\n >>> saps.hostname('HDB16')\n 'lu0417'\n >>> 'D22' in saps.local_instances\n False\n >>> len(saps.business_instances)\n 2\n >>> saps.is_hana\n True\n >>> saps.is_netweaver\n True\n >>> saps.is_ascs\n False\n\n Attributes:\n all_instances (list): List of all the SAP instances listed by the command.\n local_instances (list): List of SAP instances which are running on the system.\n function_instances (list): List of function SAP instances running on the system.\n E.g. Diagnostics Agents SMDA97/SMDA98\n business_instances (list): List of business SAP instances running on the system.\n E.g. HANA, NetWeaver, ASCS, or others\n \"\"\"\n def __init__(self, hostname, insts, lssap):\n hn = hostname.hostname\n self.data = {}\n self.local_instances = []\n self.business_instances = []\n self.function_instances = []\n self.all_instances = []\n self._types = set()\n if insts:\n for inst in insts.data:\n k = inst['InstanceName']\n self.all_instances.append(k)\n if hn == inst['Hostname']:\n self.local_instances.append(k)\n self._types.add(inst['InstanceType'])\n self.data[k] = SAPInstances(k,\n inst['Hostname'],\n inst['SID'],\n inst['InstanceType'],\n inst['SystemNumber'],\n inst['SapVersionInfo'])\n elif lssap:\n for inst in lssap.data:\n k = inst['Instance']\n t = k.rstrip('1234567890')\n self.all_instances.append(k)\n if hn == inst['SAPLOCALHOST']:\n self.local_instances.append(k)\n self._types.add(t)\n self.data[k] = SAPInstances(k,\n inst['SAPLOCALHOST'],\n inst['SID'],\n t,\n inst['Nr'],\n inst['Version'])\n else:\n raise SkipException('No SAP instance.')\n\n FUNC_INSTS = ('SMDA')\n for i in self.local_instances:\n (self.function_instances if i.startswith(FUNC_INSTS) else self.business_instances).append(i)\n\n def version(self, instance):\n \"\"\"str: Returns the version of the ``instance``.\"\"\"\n return self.data[instance].version if instance in self.data else None\n\n def sid(self, instance):\n \"\"\"str: Returns the sid of the ``instance``.\"\"\"\n return self.data[instance].sid if instance in self.data else None\n\n def type(self, instance):\n \"\"\"str: Returns the type code of the ``instance``.\"\"\"\n return self.data[instance].type if instance in self.data else None\n\n def hostname(self, instance):\n \"\"\"str: Returns the hostname of the ``instance``.\"\"\"\n return self.data[instance].hostname if instance in self.data else None\n\n def number(self, instance):\n \"\"\"str: Returns the systeme number of the ``instance``.\"\"\"\n return self.data[instance].number if instance in self.data else None\n\n @property\n def is_netweaver(self):\n \"\"\"bool: SAP NetWeaver is running on the system.\"\"\"\n return 'D' in self._types\n\n @property\n def is_hana(self):\n \"\"\"bool: SAP Hana is running on the system.\"\"\"\n return 'HDB' in self._types\n\n @property\n def is_ascs(self):\n \"\"\"bool: SAP System Central Services is running on the system.\"\"\"\n return 'ASCS' in self._types\n", "path": "insights/combiners/sap.py"}]}
| 1,809 | 525 |
gh_patches_debug_16856
|
rasdani/github-patches
|
git_diff
|
googleapis__google-cloud-python-3348
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Error reporting system tests needed
Follow up to #3263.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `error_reporting/nox.py`
Content:
```
1 # Copyright 2016 Google Inc.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from __future__ import absolute_import
16
17 import os
18
19 import nox
20
21
22 LOCAL_DEPS = ('../core/', '../logging/')
23
24
25 @nox.session
26 @nox.parametrize('python_version', ['2.7', '3.4', '3.5', '3.6'])
27 def unit_tests(session, python_version):
28 """Run the unit test suite."""
29
30 # Run unit tests against all supported versions of Python.
31 session.interpreter = 'python{}'.format(python_version)
32
33 # Install all test dependencies, then install this package in-place.
34 session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS)
35 session.install('-e', '.')
36
37 # Run py.test against the unit tests.
38 session.run(
39 'py.test', '--quiet', '--cov=google.cloud.error_reporting',
40 '--cov=tests.unit', '--cov-append', '--cov-config=.coveragerc',
41 '--cov-report=', '--cov-fail-under=97', 'tests/unit',
42 )
43
44
45 @nox.session
46 def lint(session):
47 """Run flake8.
48
49 Returns a failure if flake8 finds linting errors or sufficiently
50 serious code quality issues.
51 """
52 session.interpreter = 'python3.6'
53 session.install('flake8', *LOCAL_DEPS)
54 session.install('.')
55 session.run('flake8', 'google/cloud/error_reporting')
56
57
58 @nox.session
59 def lint_setup_py(session):
60 """Verify that setup.py is valid (including RST check)."""
61 session.interpreter = 'python3.6'
62 session.install('docutils', 'Pygments')
63 session.run(
64 'python', 'setup.py', 'check', '--restructuredtext', '--strict')
65
66
67 @nox.session
68 def cover(session):
69 """Run the final coverage report.
70
71 This outputs the coverage report aggregating coverage from the unit
72 test runs (not system test runs), and then erases coverage data.
73 """
74 session.interpreter = 'python3.6'
75 session.install('coverage', 'pytest-cov')
76 session.run('coverage', 'report', '--show-missing', '--fail-under=100')
77 session.run('coverage', 'erase')
78
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/error_reporting/nox.py b/error_reporting/nox.py
--- a/error_reporting/nox.py
+++ b/error_reporting/nox.py
@@ -64,6 +64,28 @@
'python', 'setup.py', 'check', '--restructuredtext', '--strict')
[email protected]
[email protected]('python_version', ['2.7', '3.6'])
+def system_tests(session, python_version):
+ """Run the system test suite."""
+
+ # Sanity check: Only run system tests if the environment variable is set.
+ if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''):
+ return
+
+ # Run the system tests against latest Python 2 and Python 3 only.
+ session.interpreter = 'python{}'.format(python_version)
+
+ # Install all test dependencies, then install this package into the
+ # virtualenv's dist-packages.
+ session.install('mock', 'pytest', *LOCAL_DEPS)
+ session.install('../test_utils/')
+ session.install('.')
+
+ # Run py.test against the system tests.
+ session.run('py.test', '-vvv', 'tests/system.py')
+
+
@nox.session
def cover(session):
"""Run the final coverage report.
|
{"golden_diff": "diff --git a/error_reporting/nox.py b/error_reporting/nox.py\n--- a/error_reporting/nox.py\n+++ b/error_reporting/nox.py\n@@ -64,6 +64,28 @@\n 'python', 'setup.py', 'check', '--restructuredtext', '--strict')\n \n \[email protected]\[email protected]('python_version', ['2.7', '3.6'])\n+def system_tests(session, python_version):\n+ \"\"\"Run the system test suite.\"\"\"\n+\n+ # Sanity check: Only run system tests if the environment variable is set.\n+ if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''):\n+ return\n+\n+ # Run the system tests against latest Python 2 and Python 3 only.\n+ session.interpreter = 'python{}'.format(python_version)\n+\n+ # Install all test dependencies, then install this package into the\n+ # virtualenv's dist-packages.\n+ session.install('mock', 'pytest', *LOCAL_DEPS)\n+ session.install('../test_utils/')\n+ session.install('.')\n+\n+ # Run py.test against the system tests.\n+ session.run('py.test', '-vvv', 'tests/system.py')\n+\n+\n @nox.session\n def cover(session):\n \"\"\"Run the final coverage report.\n", "issue": "Error reporting system tests needed\nFollow up to #3263.\n", "before_files": [{"content": "# Copyright 2016 Google Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import absolute_import\n\nimport os\n\nimport nox\n\n\nLOCAL_DEPS = ('../core/', '../logging/')\n\n\[email protected]\[email protected]('python_version', ['2.7', '3.4', '3.5', '3.6'])\ndef unit_tests(session, python_version):\n \"\"\"Run the unit test suite.\"\"\"\n\n # Run unit tests against all supported versions of Python.\n session.interpreter = 'python{}'.format(python_version)\n\n # Install all test dependencies, then install this package in-place.\n session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS)\n session.install('-e', '.')\n\n # Run py.test against the unit tests.\n session.run(\n 'py.test', '--quiet', '--cov=google.cloud.error_reporting',\n '--cov=tests.unit', '--cov-append', '--cov-config=.coveragerc',\n '--cov-report=', '--cov-fail-under=97', 'tests/unit',\n )\n\n\[email protected]\ndef lint(session):\n \"\"\"Run flake8.\n\n Returns a failure if flake8 finds linting errors or sufficiently\n serious code quality issues.\n \"\"\"\n session.interpreter = 'python3.6'\n session.install('flake8', *LOCAL_DEPS)\n session.install('.')\n session.run('flake8', 'google/cloud/error_reporting')\n\n\[email protected]\ndef lint_setup_py(session):\n \"\"\"Verify that setup.py is valid (including RST check).\"\"\"\n session.interpreter = 'python3.6'\n session.install('docutils', 'Pygments')\n session.run(\n 'python', 'setup.py', 'check', '--restructuredtext', '--strict')\n\n\[email protected]\ndef cover(session):\n \"\"\"Run the final coverage report.\n\n This outputs the coverage report aggregating coverage from the unit\n test runs (not system test runs), and then erases coverage data.\n \"\"\"\n session.interpreter = 'python3.6'\n session.install('coverage', 'pytest-cov')\n session.run('coverage', 'report', '--show-missing', '--fail-under=100')\n session.run('coverage', 'erase')\n", "path": "error_reporting/nox.py"}], "after_files": [{"content": "# Copyright 2016 Google Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import absolute_import\n\nimport os\n\nimport nox\n\n\nLOCAL_DEPS = ('../core/', '../logging/')\n\n\[email protected]\[email protected]('python_version', ['2.7', '3.4', '3.5', '3.6'])\ndef unit_tests(session, python_version):\n \"\"\"Run the unit test suite.\"\"\"\n\n # Run unit tests against all supported versions of Python.\n session.interpreter = 'python{}'.format(python_version)\n\n # Install all test dependencies, then install this package in-place.\n session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS)\n session.install('-e', '.')\n\n # Run py.test against the unit tests.\n session.run(\n 'py.test', '--quiet', '--cov=google.cloud.error_reporting',\n '--cov=tests.unit', '--cov-append', '--cov-config=.coveragerc',\n '--cov-report=', '--cov-fail-under=97', 'tests/unit',\n )\n\n\[email protected]\ndef lint(session):\n \"\"\"Run flake8.\n\n Returns a failure if flake8 finds linting errors or sufficiently\n serious code quality issues.\n \"\"\"\n session.interpreter = 'python3.6'\n session.install('flake8', *LOCAL_DEPS)\n session.install('.')\n session.run('flake8', 'google/cloud/error_reporting')\n\n\[email protected]\ndef lint_setup_py(session):\n \"\"\"Verify that setup.py is valid (including RST check).\"\"\"\n session.interpreter = 'python3.6'\n session.install('docutils', 'Pygments')\n session.run(\n 'python', 'setup.py', 'check', '--restructuredtext', '--strict')\n\n\[email protected]\[email protected]('python_version', ['2.7', '3.6'])\ndef system_tests(session, python_version):\n \"\"\"Run the system test suite.\"\"\"\n\n # Sanity check: Only run system tests if the environment variable is set.\n if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''):\n return\n\n # Run the system tests against latest Python 2 and Python 3 only.\n session.interpreter = 'python{}'.format(python_version)\n\n # Install all test dependencies, then install this package into the\n # virtualenv's dist-packages.\n session.install('mock', 'pytest', *LOCAL_DEPS)\n session.install('../test_utils/')\n session.install('.')\n\n # Run py.test against the system tests.\n session.run('py.test', '-vvv', 'tests/system.py')\n\n\[email protected]\ndef cover(session):\n \"\"\"Run the final coverage report.\n\n This outputs the coverage report aggregating coverage from the unit\n test runs (not system test runs), and then erases coverage data.\n \"\"\"\n session.interpreter = 'python3.6'\n session.install('coverage', 'pytest-cov')\n session.run('coverage', 'report', '--show-missing', '--fail-under=100')\n session.run('coverage', 'erase')\n", "path": "error_reporting/nox.py"}]}
| 1,025 | 278 |
gh_patches_debug_12060
|
rasdani/github-patches
|
git_diff
|
ivy-llc__ivy-21310
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
adaptive_avg_pool2d
Close #20804
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ivy/functional/frontends/mindspore/ops/function/nn_func.py`
Content:
```
1 """Includes Mindspore Frontend functions listed in the TODO list
2 https://github.com/unifyai/ivy/issues/14951."""
3
4 # local
5 import ivy
6 from ivy.func_wrapper import with_supported_dtypes
7 from ivy.functional.frontends.paddle.func_wrapper import to_ivy_arrays_and_back
8
9
10 @with_supported_dtypes(
11 {
12 "2.0.0 and below": (
13 "int8",
14 "int16",
15 "int32",
16 "int64",
17 "float16",
18 "float32",
19 "float64",
20 )
21 },
22 "mindspore",
23 )
24 @to_ivy_arrays_and_back
25 def dropout2d(input, p=0.5, training=True):
26 return ivy.dropout2d(input, p, training=training, data_format="NCHW")
27
28
29 @with_supported_dtypes({"2.0.0 and below": ("float16", "float32")}, "mindspore")
30 @to_ivy_arrays_and_back
31 def selu(input_x):
32 return ivy.selu(input_x)
33
34
35 @with_supported_dtypes({"2.0 and below": ("float16", "float32")}, "mindspore")
36 @to_ivy_arrays_and_back
37 def softsign(x):
38 return ivy.divide(x, ivy.add(1, ivy.abs(x)))
39
40 @with_supported_dtypes({"2.0 and below": ("int8", "int16", "int32", "int64", "float16", "float32", "float64")}, "mindspore")
41 @to_ivy_arrays_and_back
42 def pad(input, pad_width, mode='constant', constant_values=0):
43 return ivy.pad(input, pad_width, mode=mode, constant_values=constant_values)
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/ivy/functional/frontends/mindspore/ops/function/nn_func.py b/ivy/functional/frontends/mindspore/ops/function/nn_func.py
--- a/ivy/functional/frontends/mindspore/ops/function/nn_func.py
+++ b/ivy/functional/frontends/mindspore/ops/function/nn_func.py
@@ -40,4 +40,10 @@
@with_supported_dtypes({"2.0 and below": ("int8", "int16", "int32", "int64", "float16", "float32", "float64")}, "mindspore")
@to_ivy_arrays_and_back
def pad(input, pad_width, mode='constant', constant_values=0):
- return ivy.pad(input, pad_width, mode=mode, constant_values=constant_values)
\ No newline at end of file
+ return ivy.pad(input, pad_width, mode=mode, constant_values=constant_values)
+
+
+@with_supported_dtypes({"2.0.0 and below": ("float16", "float32", "float64")}, "mindspore")
+@to_ivy_arrays_and_back
+def adaptive_avg_pool2d(input, output_size):
+ return ivy.adaptive_avg_pool2d(input, output_size)
|
{"golden_diff": "diff --git a/ivy/functional/frontends/mindspore/ops/function/nn_func.py b/ivy/functional/frontends/mindspore/ops/function/nn_func.py\n--- a/ivy/functional/frontends/mindspore/ops/function/nn_func.py\n+++ b/ivy/functional/frontends/mindspore/ops/function/nn_func.py\n@@ -40,4 +40,10 @@\n @with_supported_dtypes({\"2.0 and below\": (\"int8\", \"int16\", \"int32\", \"int64\", \"float16\", \"float32\", \"float64\")}, \"mindspore\")\n @to_ivy_arrays_and_back\n def pad(input, pad_width, mode='constant', constant_values=0):\n- return ivy.pad(input, pad_width, mode=mode, constant_values=constant_values)\n\\ No newline at end of file\n+ return ivy.pad(input, pad_width, mode=mode, constant_values=constant_values)\n+\n+\n+@with_supported_dtypes({\"2.0.0 and below\": (\"float16\", \"float32\", \"float64\")}, \"mindspore\")\n+@to_ivy_arrays_and_back\n+def adaptive_avg_pool2d(input, output_size):\n+ return ivy.adaptive_avg_pool2d(input, output_size)\n", "issue": "adaptive_avg_pool2d\nClose #20804\n", "before_files": [{"content": "\"\"\"Includes Mindspore Frontend functions listed in the TODO list\nhttps://github.com/unifyai/ivy/issues/14951.\"\"\"\n\n# local\nimport ivy\nfrom ivy.func_wrapper import with_supported_dtypes\nfrom ivy.functional.frontends.paddle.func_wrapper import to_ivy_arrays_and_back\n\n\n@with_supported_dtypes(\n {\n \"2.0.0 and below\": (\n \"int8\",\n \"int16\",\n \"int32\",\n \"int64\",\n \"float16\",\n \"float32\",\n \"float64\",\n )\n },\n \"mindspore\",\n)\n@to_ivy_arrays_and_back\ndef dropout2d(input, p=0.5, training=True):\n return ivy.dropout2d(input, p, training=training, data_format=\"NCHW\")\n\n\n@with_supported_dtypes({\"2.0.0 and below\": (\"float16\", \"float32\")}, \"mindspore\")\n@to_ivy_arrays_and_back\ndef selu(input_x):\n return ivy.selu(input_x)\n\n\n@with_supported_dtypes({\"2.0 and below\": (\"float16\", \"float32\")}, \"mindspore\")\n@to_ivy_arrays_and_back\ndef softsign(x):\n return ivy.divide(x, ivy.add(1, ivy.abs(x)))\n\n@with_supported_dtypes({\"2.0 and below\": (\"int8\", \"int16\", \"int32\", \"int64\", \"float16\", \"float32\", \"float64\")}, \"mindspore\")\n@to_ivy_arrays_and_back\ndef pad(input, pad_width, mode='constant', constant_values=0):\n return ivy.pad(input, pad_width, mode=mode, constant_values=constant_values)", "path": "ivy/functional/frontends/mindspore/ops/function/nn_func.py"}], "after_files": [{"content": "\"\"\"Includes Mindspore Frontend functions listed in the TODO list\nhttps://github.com/unifyai/ivy/issues/14951.\"\"\"\n\n# local\nimport ivy\nfrom ivy.func_wrapper import with_supported_dtypes\nfrom ivy.functional.frontends.paddle.func_wrapper import to_ivy_arrays_and_back\n\n\n@with_supported_dtypes(\n {\n \"2.0.0 and below\": (\n \"int8\",\n \"int16\",\n \"int32\",\n \"int64\",\n \"float16\",\n \"float32\",\n \"float64\",\n )\n },\n \"mindspore\",\n)\n@to_ivy_arrays_and_back\ndef dropout2d(input, p=0.5, training=True):\n return ivy.dropout2d(input, p, training=training, data_format=\"NCHW\")\n\n\n@with_supported_dtypes({\"2.0.0 and below\": (\"float16\", \"float32\")}, \"mindspore\")\n@to_ivy_arrays_and_back\ndef selu(input_x):\n return ivy.selu(input_x)\n\n\n@with_supported_dtypes({\"2.0 and below\": (\"float16\", \"float32\")}, \"mindspore\")\n@to_ivy_arrays_and_back\ndef softsign(x):\n return ivy.divide(x, ivy.add(1, ivy.abs(x)))\n\n@with_supported_dtypes({\"2.0 and below\": (\"int8\", \"int16\", \"int32\", \"int64\", \"float16\", \"float32\", \"float64\")}, \"mindspore\")\n@to_ivy_arrays_and_back\ndef pad(input, pad_width, mode='constant', constant_values=0):\n return ivy.pad(input, pad_width, mode=mode, constant_values=constant_values)\n\n\n@with_supported_dtypes({\"2.0.0 and below\": (\"float16\", \"float32\", \"float64\")}, \"mindspore\")\n@to_ivy_arrays_and_back\ndef adaptive_avg_pool2d(input, output_size):\n return ivy.adaptive_avg_pool2d(input, output_size)\n", "path": "ivy/functional/frontends/mindspore/ops/function/nn_func.py"}]}
| 759 | 294 |
gh_patches_debug_10346
|
rasdani/github-patches
|
git_diff
|
liberapay__liberapay.com-180
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Fix connecting a GitHub org
I tried connecting the GitHub liberapay org to the [LiberapayOrg](https://liberapay.com/LiberapayOrg/) account but I ended up on the take-over confirmation page asking me to transfer my personal account.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `liberapay/elsewhere/github.py`
Content:
```
1 from __future__ import absolute_import, division, print_function, unicode_literals
2
3 from liberapay.elsewhere import PlatformOAuth2
4 from liberapay.elsewhere._extractors import key
5 from liberapay.elsewhere._paginators import header_links_paginator
6
7
8 class GitHub(PlatformOAuth2):
9
10 # Platform attributes
11 name = 'github'
12 display_name = 'GitHub'
13 account_url = 'https://github.com/{user_name}'
14 allows_team_connect = True
15
16 # Auth attributes
17 auth_url = 'https://github.com/login/oauth/authorize'
18 access_token_url = 'https://github.com/login/oauth/access_token'
19 oauth_email_scope = 'user:email'
20 oauth_default_scope = ['read:org']
21
22 # API attributes
23 api_format = 'json'
24 api_paginator = header_links_paginator()
25 api_url = 'https://api.github.com'
26 api_user_info_path = '/user/{user_id}'
27 api_user_name_info_path = '/users/{user_name}'
28 api_user_self_info_path = '/user'
29 api_team_members_path = '/orgs/{user_name}/public_members'
30 api_friends_path = '/users/{user_name}/following'
31 ratelimit_headers_prefix = 'x-ratelimit-'
32
33 # User info extractors
34 x_user_id = key('id')
35 x_user_name = key('login')
36 x_display_name = key('name')
37 x_email = key('email')
38 x_gravatar_id = key('gravatar_id')
39 x_avatar_url = key('avatar_url')
40 x_is_team = key('type', clean=lambda t: t.lower() == 'organization')
41
42 def is_team_admin(self, team_name, sess):
43 user_teams = self.api_parser(self.api_get('/user/teams', sess=sess))
44 return any(team.get('organization', {}).get('login') == team_name and
45 team.get('permission') == 'admin'
46 for team in user_teams)
47
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/liberapay/elsewhere/github.py b/liberapay/elsewhere/github.py
--- a/liberapay/elsewhere/github.py
+++ b/liberapay/elsewhere/github.py
@@ -39,8 +39,7 @@
x_avatar_url = key('avatar_url')
x_is_team = key('type', clean=lambda t: t.lower() == 'organization')
- def is_team_admin(self, team_name, sess):
- user_teams = self.api_parser(self.api_get('/user/teams', sess=sess))
- return any(team.get('organization', {}).get('login') == team_name and
- team.get('permission') == 'admin'
- for team in user_teams)
+ def is_team_member(self, org_name, sess):
+ org_name = org_name.lower()
+ user_orgs = self.api_parser(self.api_get('/user/orgs', sess=sess))
+ return any(org.get('login') == org_name for org in user_orgs)
|
{"golden_diff": "diff --git a/liberapay/elsewhere/github.py b/liberapay/elsewhere/github.py\n--- a/liberapay/elsewhere/github.py\n+++ b/liberapay/elsewhere/github.py\n@@ -39,8 +39,7 @@\n x_avatar_url = key('avatar_url')\n x_is_team = key('type', clean=lambda t: t.lower() == 'organization')\n \n- def is_team_admin(self, team_name, sess):\n- user_teams = self.api_parser(self.api_get('/user/teams', sess=sess))\n- return any(team.get('organization', {}).get('login') == team_name and\n- team.get('permission') == 'admin'\n- for team in user_teams)\n+ def is_team_member(self, org_name, sess):\n+ org_name = org_name.lower()\n+ user_orgs = self.api_parser(self.api_get('/user/orgs', sess=sess))\n+ return any(org.get('login') == org_name for org in user_orgs)\n", "issue": "Fix connecting a GitHub org\nI tried connecting the GitHub liberapay org to the [LiberapayOrg](https://liberapay.com/LiberapayOrg/) account but I ended up on the take-over confirmation page asking me to transfer my personal account.\n\n", "before_files": [{"content": "from __future__ import absolute_import, division, print_function, unicode_literals\n\nfrom liberapay.elsewhere import PlatformOAuth2\nfrom liberapay.elsewhere._extractors import key\nfrom liberapay.elsewhere._paginators import header_links_paginator\n\n\nclass GitHub(PlatformOAuth2):\n\n # Platform attributes\n name = 'github'\n display_name = 'GitHub'\n account_url = 'https://github.com/{user_name}'\n allows_team_connect = True\n\n # Auth attributes\n auth_url = 'https://github.com/login/oauth/authorize'\n access_token_url = 'https://github.com/login/oauth/access_token'\n oauth_email_scope = 'user:email'\n oauth_default_scope = ['read:org']\n\n # API attributes\n api_format = 'json'\n api_paginator = header_links_paginator()\n api_url = 'https://api.github.com'\n api_user_info_path = '/user/{user_id}'\n api_user_name_info_path = '/users/{user_name}'\n api_user_self_info_path = '/user'\n api_team_members_path = '/orgs/{user_name}/public_members'\n api_friends_path = '/users/{user_name}/following'\n ratelimit_headers_prefix = 'x-ratelimit-'\n\n # User info extractors\n x_user_id = key('id')\n x_user_name = key('login')\n x_display_name = key('name')\n x_email = key('email')\n x_gravatar_id = key('gravatar_id')\n x_avatar_url = key('avatar_url')\n x_is_team = key('type', clean=lambda t: t.lower() == 'organization')\n\n def is_team_admin(self, team_name, sess):\n user_teams = self.api_parser(self.api_get('/user/teams', sess=sess))\n return any(team.get('organization', {}).get('login') == team_name and\n team.get('permission') == 'admin'\n for team in user_teams)\n", "path": "liberapay/elsewhere/github.py"}], "after_files": [{"content": "from __future__ import absolute_import, division, print_function, unicode_literals\n\nfrom liberapay.elsewhere import PlatformOAuth2\nfrom liberapay.elsewhere._extractors import key\nfrom liberapay.elsewhere._paginators import header_links_paginator\n\n\nclass GitHub(PlatformOAuth2):\n\n # Platform attributes\n name = 'github'\n display_name = 'GitHub'\n account_url = 'https://github.com/{user_name}'\n allows_team_connect = True\n\n # Auth attributes\n auth_url = 'https://github.com/login/oauth/authorize'\n access_token_url = 'https://github.com/login/oauth/access_token'\n oauth_email_scope = 'user:email'\n oauth_default_scope = ['read:org']\n\n # API attributes\n api_format = 'json'\n api_paginator = header_links_paginator()\n api_url = 'https://api.github.com'\n api_user_info_path = '/user/{user_id}'\n api_user_name_info_path = '/users/{user_name}'\n api_user_self_info_path = '/user'\n api_team_members_path = '/orgs/{user_name}/public_members'\n api_friends_path = '/users/{user_name}/following'\n ratelimit_headers_prefix = 'x-ratelimit-'\n\n # User info extractors\n x_user_id = key('id')\n x_user_name = key('login')\n x_display_name = key('name')\n x_email = key('email')\n x_gravatar_id = key('gravatar_id')\n x_avatar_url = key('avatar_url')\n x_is_team = key('type', clean=lambda t: t.lower() == 'organization')\n\n def is_team_member(self, org_name, sess):\n org_name = org_name.lower()\n user_orgs = self.api_parser(self.api_get('/user/orgs', sess=sess))\n return any(org.get('login') == org_name for org in user_orgs)\n", "path": "liberapay/elsewhere/github.py"}]}
| 830 | 224 |
gh_patches_debug_9374
|
rasdani/github-patches
|
git_diff
|
pyca__cryptography-1422
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Key versioning/key rotation support (wishlist)
It would be nice if Fernet had built-in support for key versioning. I don't know exactly what the API would look like, but the idea would be to allow encrypting new data under a new key while still retaining the ability to decrypt old data that was encrypted under older keys.
If Fernet supported this natively, then I could tell developers to "just use Fernet" when they first write their applications, and later when they get a real security engineer, that person could start doing key rotation without having to change application-level code or data structures.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `cryptography/fernet.py`
Content:
```
1 # Licensed under the Apache License, Version 2.0 (the "License");
2 # you may not use this file except in compliance with the License.
3 # You may obtain a copy of the License at
4 #
5 # http://www.apache.org/licenses/LICENSE-2.0
6 #
7 # Unless required by applicable law or agreed to in writing, software
8 # distributed under the License is distributed on an "AS IS" BASIS,
9 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
10 # implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
13
14 from __future__ import absolute_import, division, print_function
15
16 import base64
17 import binascii
18 import os
19 import struct
20 import time
21
22 import six
23
24 from cryptography.exceptions import InvalidSignature
25 from cryptography.hazmat.backends import default_backend
26 from cryptography.hazmat.primitives import hashes, padding
27 from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
28 from cryptography.hazmat.primitives.hmac import HMAC
29
30
31 class InvalidToken(Exception):
32 pass
33
34
35 _MAX_CLOCK_SKEW = 60
36
37
38 class Fernet(object):
39 def __init__(self, key, backend=None):
40 if backend is None:
41 backend = default_backend()
42
43 key = base64.urlsafe_b64decode(key)
44 if len(key) != 32:
45 raise ValueError(
46 "Fernet key must be 32 url-safe base64-encoded bytes."
47 )
48
49 self._signing_key = key[:16]
50 self._encryption_key = key[16:]
51 self._backend = backend
52
53 @classmethod
54 def generate_key(cls):
55 return base64.urlsafe_b64encode(os.urandom(32))
56
57 def encrypt(self, data):
58 current_time = int(time.time())
59 iv = os.urandom(16)
60 return self._encrypt_from_parts(data, current_time, iv)
61
62 def _encrypt_from_parts(self, data, current_time, iv):
63 if not isinstance(data, bytes):
64 raise TypeError("data must be bytes.")
65
66 padder = padding.PKCS7(algorithms.AES.block_size).padder()
67 padded_data = padder.update(data) + padder.finalize()
68 encryptor = Cipher(
69 algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend
70 ).encryptor()
71 ciphertext = encryptor.update(padded_data) + encryptor.finalize()
72
73 basic_parts = (
74 b"\x80" + struct.pack(">Q", current_time) + iv + ciphertext
75 )
76
77 h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend)
78 h.update(basic_parts)
79 hmac = h.finalize()
80 return base64.urlsafe_b64encode(basic_parts + hmac)
81
82 def decrypt(self, token, ttl=None):
83 if not isinstance(token, bytes):
84 raise TypeError("token must be bytes.")
85
86 current_time = int(time.time())
87
88 try:
89 data = base64.urlsafe_b64decode(token)
90 except (TypeError, binascii.Error):
91 raise InvalidToken
92
93 if not data or six.indexbytes(data, 0) != 0x80:
94 raise InvalidToken
95
96 try:
97 timestamp, = struct.unpack(">Q", data[1:9])
98 except struct.error:
99 raise InvalidToken
100 if ttl is not None:
101 if timestamp + ttl < current_time:
102 raise InvalidToken
103 if current_time + _MAX_CLOCK_SKEW < timestamp:
104 raise InvalidToken
105 h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend)
106 h.update(data[:-32])
107 try:
108 h.verify(data[-32:])
109 except InvalidSignature:
110 raise InvalidToken
111
112 iv = data[9:25]
113 ciphertext = data[25:-32]
114 decryptor = Cipher(
115 algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend
116 ).decryptor()
117 plaintext_padded = decryptor.update(ciphertext)
118 try:
119 plaintext_padded += decryptor.finalize()
120 except ValueError:
121 raise InvalidToken
122 unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder()
123
124 unpadded = unpadder.update(plaintext_padded)
125 try:
126 unpadded += unpadder.finalize()
127 except ValueError:
128 raise InvalidToken
129 return unpadded
130
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/cryptography/fernet.py b/cryptography/fernet.py
--- a/cryptography/fernet.py
+++ b/cryptography/fernet.py
@@ -127,3 +127,24 @@
except ValueError:
raise InvalidToken
return unpadded
+
+
+class MultiFernet(object):
+ def __init__(self, fernets):
+ fernets = list(fernets)
+ if not fernets:
+ raise ValueError(
+ "MultiFernet requires at least one Fernet instance"
+ )
+ self._fernets = fernets
+
+ def encrypt(self, msg):
+ return self._fernets[0].encrypt(msg)
+
+ def decrypt(self, msg, ttl=None):
+ for f in self._fernets:
+ try:
+ return f.decrypt(msg, ttl)
+ except InvalidToken:
+ pass
+ raise InvalidToken
|
{"golden_diff": "diff --git a/cryptography/fernet.py b/cryptography/fernet.py\n--- a/cryptography/fernet.py\n+++ b/cryptography/fernet.py\n@@ -127,3 +127,24 @@\n except ValueError:\n raise InvalidToken\n return unpadded\n+\n+\n+class MultiFernet(object):\n+ def __init__(self, fernets):\n+ fernets = list(fernets)\n+ if not fernets:\n+ raise ValueError(\n+ \"MultiFernet requires at least one Fernet instance\"\n+ )\n+ self._fernets = fernets\n+\n+ def encrypt(self, msg):\n+ return self._fernets[0].encrypt(msg)\n+\n+ def decrypt(self, msg, ttl=None):\n+ for f in self._fernets:\n+ try:\n+ return f.decrypt(msg, ttl)\n+ except InvalidToken:\n+ pass\n+ raise InvalidToken\n", "issue": "Key versioning/key rotation support (wishlist)\nIt would be nice if Fernet had built-in support for key versioning. I don't know exactly what the API would look like, but the idea would be to allow encrypting new data under a new key while still retaining the ability to decrypt old data that was encrypted under older keys.\n\nIf Fernet supported this natively, then I could tell developers to \"just use Fernet\" when they first write their applications, and later when they get a real security engineer, that person could start doing key rotation without having to change application-level code or data structures.\n\n", "before_files": [{"content": "# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n# implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import absolute_import, division, print_function\n\nimport base64\nimport binascii\nimport os\nimport struct\nimport time\n\nimport six\n\nfrom cryptography.exceptions import InvalidSignature\nfrom cryptography.hazmat.backends import default_backend\nfrom cryptography.hazmat.primitives import hashes, padding\nfrom cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes\nfrom cryptography.hazmat.primitives.hmac import HMAC\n\n\nclass InvalidToken(Exception):\n pass\n\n\n_MAX_CLOCK_SKEW = 60\n\n\nclass Fernet(object):\n def __init__(self, key, backend=None):\n if backend is None:\n backend = default_backend()\n\n key = base64.urlsafe_b64decode(key)\n if len(key) != 32:\n raise ValueError(\n \"Fernet key must be 32 url-safe base64-encoded bytes.\"\n )\n\n self._signing_key = key[:16]\n self._encryption_key = key[16:]\n self._backend = backend\n\n @classmethod\n def generate_key(cls):\n return base64.urlsafe_b64encode(os.urandom(32))\n\n def encrypt(self, data):\n current_time = int(time.time())\n iv = os.urandom(16)\n return self._encrypt_from_parts(data, current_time, iv)\n\n def _encrypt_from_parts(self, data, current_time, iv):\n if not isinstance(data, bytes):\n raise TypeError(\"data must be bytes.\")\n\n padder = padding.PKCS7(algorithms.AES.block_size).padder()\n padded_data = padder.update(data) + padder.finalize()\n encryptor = Cipher(\n algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend\n ).encryptor()\n ciphertext = encryptor.update(padded_data) + encryptor.finalize()\n\n basic_parts = (\n b\"\\x80\" + struct.pack(\">Q\", current_time) + iv + ciphertext\n )\n\n h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend)\n h.update(basic_parts)\n hmac = h.finalize()\n return base64.urlsafe_b64encode(basic_parts + hmac)\n\n def decrypt(self, token, ttl=None):\n if not isinstance(token, bytes):\n raise TypeError(\"token must be bytes.\")\n\n current_time = int(time.time())\n\n try:\n data = base64.urlsafe_b64decode(token)\n except (TypeError, binascii.Error):\n raise InvalidToken\n\n if not data or six.indexbytes(data, 0) != 0x80:\n raise InvalidToken\n\n try:\n timestamp, = struct.unpack(\">Q\", data[1:9])\n except struct.error:\n raise InvalidToken\n if ttl is not None:\n if timestamp + ttl < current_time:\n raise InvalidToken\n if current_time + _MAX_CLOCK_SKEW < timestamp:\n raise InvalidToken\n h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend)\n h.update(data[:-32])\n try:\n h.verify(data[-32:])\n except InvalidSignature:\n raise InvalidToken\n\n iv = data[9:25]\n ciphertext = data[25:-32]\n decryptor = Cipher(\n algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend\n ).decryptor()\n plaintext_padded = decryptor.update(ciphertext)\n try:\n plaintext_padded += decryptor.finalize()\n except ValueError:\n raise InvalidToken\n unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder()\n\n unpadded = unpadder.update(plaintext_padded)\n try:\n unpadded += unpadder.finalize()\n except ValueError:\n raise InvalidToken\n return unpadded\n", "path": "cryptography/fernet.py"}], "after_files": [{"content": "# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n# implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import absolute_import, division, print_function\n\nimport base64\nimport binascii\nimport os\nimport struct\nimport time\n\nimport six\n\nfrom cryptography.exceptions import InvalidSignature\nfrom cryptography.hazmat.backends import default_backend\nfrom cryptography.hazmat.primitives import hashes, padding\nfrom cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes\nfrom cryptography.hazmat.primitives.hmac import HMAC\n\n\nclass InvalidToken(Exception):\n pass\n\n\n_MAX_CLOCK_SKEW = 60\n\n\nclass Fernet(object):\n def __init__(self, key, backend=None):\n if backend is None:\n backend = default_backend()\n\n key = base64.urlsafe_b64decode(key)\n if len(key) != 32:\n raise ValueError(\n \"Fernet key must be 32 url-safe base64-encoded bytes.\"\n )\n\n self._signing_key = key[:16]\n self._encryption_key = key[16:]\n self._backend = backend\n\n @classmethod\n def generate_key(cls):\n return base64.urlsafe_b64encode(os.urandom(32))\n\n def encrypt(self, data):\n current_time = int(time.time())\n iv = os.urandom(16)\n return self._encrypt_from_parts(data, current_time, iv)\n\n def _encrypt_from_parts(self, data, current_time, iv):\n if not isinstance(data, bytes):\n raise TypeError(\"data must be bytes.\")\n\n padder = padding.PKCS7(algorithms.AES.block_size).padder()\n padded_data = padder.update(data) + padder.finalize()\n encryptor = Cipher(\n algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend\n ).encryptor()\n ciphertext = encryptor.update(padded_data) + encryptor.finalize()\n\n basic_parts = (\n b\"\\x80\" + struct.pack(\">Q\", current_time) + iv + ciphertext\n )\n\n h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend)\n h.update(basic_parts)\n hmac = h.finalize()\n return base64.urlsafe_b64encode(basic_parts + hmac)\n\n def decrypt(self, token, ttl=None):\n if not isinstance(token, bytes):\n raise TypeError(\"token must be bytes.\")\n\n current_time = int(time.time())\n\n try:\n data = base64.urlsafe_b64decode(token)\n except (TypeError, binascii.Error):\n raise InvalidToken\n\n if not data or six.indexbytes(data, 0) != 0x80:\n raise InvalidToken\n\n try:\n timestamp, = struct.unpack(\">Q\", data[1:9])\n except struct.error:\n raise InvalidToken\n if ttl is not None:\n if timestamp + ttl < current_time:\n raise InvalidToken\n if current_time + _MAX_CLOCK_SKEW < timestamp:\n raise InvalidToken\n h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend)\n h.update(data[:-32])\n try:\n h.verify(data[-32:])\n except InvalidSignature:\n raise InvalidToken\n\n iv = data[9:25]\n ciphertext = data[25:-32]\n decryptor = Cipher(\n algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend\n ).decryptor()\n plaintext_padded = decryptor.update(ciphertext)\n try:\n plaintext_padded += decryptor.finalize()\n except ValueError:\n raise InvalidToken\n unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder()\n\n unpadded = unpadder.update(plaintext_padded)\n try:\n unpadded += unpadder.finalize()\n except ValueError:\n raise InvalidToken\n return unpadded\n\n\nclass MultiFernet(object):\n def __init__(self, fernets):\n fernets = list(fernets)\n if not fernets:\n raise ValueError(\n \"MultiFernet requires at least one Fernet instance\"\n )\n self._fernets = fernets\n\n def encrypt(self, msg):\n return self._fernets[0].encrypt(msg)\n\n def decrypt(self, msg, ttl=None):\n for f in self._fernets:\n try:\n return f.decrypt(msg, ttl)\n except InvalidToken:\n pass\n raise InvalidToken\n", "path": "cryptography/fernet.py"}]}
| 1,639 | 207 |
gh_patches_debug_24010
|
rasdani/github-patches
|
git_diff
|
jazzband__pip-tools-798
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
--cert option for pip-sync
pip-sync is missing the `--cert` and `--client-cert` options, that are (somehow, see #712) implemented for pip-compile.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `piptools/scripts/sync.py`
Content:
```
1 # coding: utf-8
2 from __future__ import absolute_import, division, print_function, unicode_literals
3
4 import os
5 import sys
6
7 from .. import click, sync
8 from .._compat import get_installed_distributions, parse_requirements
9 from ..exceptions import PipToolsError
10 from ..logging import log
11 from ..utils import flat_map
12
13 DEFAULT_REQUIREMENTS_FILE = "requirements.txt"
14
15
16 @click.command()
17 @click.version_option()
18 @click.option(
19 "-n",
20 "--dry-run",
21 is_flag=True,
22 help="Only show what would happen, don't change anything",
23 )
24 @click.option("--force", is_flag=True, help="Proceed even if conflicts are found")
25 @click.option(
26 "-f",
27 "--find-links",
28 multiple=True,
29 help="Look for archives in this directory or on this HTML page",
30 envvar="PIP_FIND_LINKS",
31 )
32 @click.option(
33 "-i",
34 "--index-url",
35 help="Change index URL (defaults to PyPI)",
36 envvar="PIP_INDEX_URL",
37 )
38 @click.option(
39 "--extra-index-url",
40 multiple=True,
41 help="Add additional index URL to search",
42 envvar="PIP_EXTRA_INDEX_URL",
43 )
44 @click.option(
45 "--trusted-host",
46 multiple=True,
47 help="Mark this host as trusted, even though it does not have valid or any HTTPS.",
48 )
49 @click.option(
50 "--no-index",
51 is_flag=True,
52 help="Ignore package index (only looking at --find-links URLs instead)",
53 )
54 @click.option("-q", "--quiet", default=False, is_flag=True, help="Give less output")
55 @click.option(
56 "--user", "user_only", is_flag=True, help="Restrict attention to user directory"
57 )
58 @click.argument("src_files", required=False, type=click.Path(exists=True), nargs=-1)
59 def cli(
60 dry_run,
61 force,
62 find_links,
63 index_url,
64 extra_index_url,
65 trusted_host,
66 no_index,
67 quiet,
68 user_only,
69 src_files,
70 ):
71 """Synchronize virtual environment with requirements.txt."""
72 if not src_files:
73 if os.path.exists(DEFAULT_REQUIREMENTS_FILE):
74 src_files = (DEFAULT_REQUIREMENTS_FILE,)
75 else:
76 msg = "No requirement files given and no {} found in the current directory"
77 log.error(msg.format(DEFAULT_REQUIREMENTS_FILE))
78 sys.exit(2)
79
80 if any(src_file.endswith(".in") for src_file in src_files):
81 msg = (
82 "Some input files have the .in extension, which is most likely an error "
83 "and can cause weird behaviour. You probably meant to use "
84 "the corresponding *.txt file?"
85 )
86 if force:
87 log.warning("WARNING: " + msg)
88 else:
89 log.error("ERROR: " + msg)
90 sys.exit(2)
91
92 requirements = flat_map(
93 lambda src: parse_requirements(src, session=True), src_files
94 )
95
96 try:
97 requirements = sync.merge(requirements, ignore_conflicts=force)
98 except PipToolsError as e:
99 log.error(str(e))
100 sys.exit(2)
101
102 installed_dists = get_installed_distributions(skip=[], user_only=user_only)
103 to_install, to_uninstall = sync.diff(requirements, installed_dists)
104
105 install_flags = []
106 for link in find_links or []:
107 install_flags.extend(["-f", link])
108 if no_index:
109 install_flags.append("--no-index")
110 if index_url:
111 install_flags.extend(["-i", index_url])
112 if extra_index_url:
113 for extra_index in extra_index_url:
114 install_flags.extend(["--extra-index-url", extra_index])
115 if trusted_host:
116 for host in trusted_host:
117 install_flags.extend(["--trusted-host", host])
118 if user_only:
119 install_flags.append("--user")
120
121 sys.exit(
122 sync.sync(
123 to_install,
124 to_uninstall,
125 verbose=(not quiet),
126 dry_run=dry_run,
127 install_flags=install_flags,
128 )
129 )
130
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/piptools/scripts/sync.py b/piptools/scripts/sync.py
--- a/piptools/scripts/sync.py
+++ b/piptools/scripts/sync.py
@@ -55,6 +55,12 @@
@click.option(
"--user", "user_only", is_flag=True, help="Restrict attention to user directory"
)
[email protected]("--cert", help="Path to alternate CA bundle.")
[email protected](
+ "--client-cert",
+ help="Path to SSL client certificate, a single file containing "
+ "the private key and the certificate in PEM format.",
+)
@click.argument("src_files", required=False, type=click.Path(exists=True), nargs=-1)
def cli(
dry_run,
@@ -66,6 +72,8 @@
no_index,
quiet,
user_only,
+ cert,
+ client_cert,
src_files,
):
"""Synchronize virtual environment with requirements.txt."""
@@ -117,6 +125,10 @@
install_flags.extend(["--trusted-host", host])
if user_only:
install_flags.append("--user")
+ if cert:
+ install_flags.extend(["--cert", cert])
+ if client_cert:
+ install_flags.extend(["--client-cert", client_cert])
sys.exit(
sync.sync(
|
{"golden_diff": "diff --git a/piptools/scripts/sync.py b/piptools/scripts/sync.py\n--- a/piptools/scripts/sync.py\n+++ b/piptools/scripts/sync.py\n@@ -55,6 +55,12 @@\n @click.option(\n \"--user\", \"user_only\", is_flag=True, help=\"Restrict attention to user directory\"\n )\[email protected](\"--cert\", help=\"Path to alternate CA bundle.\")\[email protected](\n+ \"--client-cert\",\n+ help=\"Path to SSL client certificate, a single file containing \"\n+ \"the private key and the certificate in PEM format.\",\n+)\n @click.argument(\"src_files\", required=False, type=click.Path(exists=True), nargs=-1)\n def cli(\n dry_run,\n@@ -66,6 +72,8 @@\n no_index,\n quiet,\n user_only,\n+ cert,\n+ client_cert,\n src_files,\n ):\n \"\"\"Synchronize virtual environment with requirements.txt.\"\"\"\n@@ -117,6 +125,10 @@\n install_flags.extend([\"--trusted-host\", host])\n if user_only:\n install_flags.append(\"--user\")\n+ if cert:\n+ install_flags.extend([\"--cert\", cert])\n+ if client_cert:\n+ install_flags.extend([\"--client-cert\", client_cert])\n \n sys.exit(\n sync.sync(\n", "issue": "--cert option for pip-sync\npip-sync is missing the `--cert` and `--client-cert` options, that are (somehow, see #712) implemented for pip-compile.\n", "before_files": [{"content": "# coding: utf-8\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nimport os\nimport sys\n\nfrom .. import click, sync\nfrom .._compat import get_installed_distributions, parse_requirements\nfrom ..exceptions import PipToolsError\nfrom ..logging import log\nfrom ..utils import flat_map\n\nDEFAULT_REQUIREMENTS_FILE = \"requirements.txt\"\n\n\[email protected]()\[email protected]_option()\[email protected](\n \"-n\",\n \"--dry-run\",\n is_flag=True,\n help=\"Only show what would happen, don't change anything\",\n)\[email protected](\"--force\", is_flag=True, help=\"Proceed even if conflicts are found\")\[email protected](\n \"-f\",\n \"--find-links\",\n multiple=True,\n help=\"Look for archives in this directory or on this HTML page\",\n envvar=\"PIP_FIND_LINKS\",\n)\[email protected](\n \"-i\",\n \"--index-url\",\n help=\"Change index URL (defaults to PyPI)\",\n envvar=\"PIP_INDEX_URL\",\n)\[email protected](\n \"--extra-index-url\",\n multiple=True,\n help=\"Add additional index URL to search\",\n envvar=\"PIP_EXTRA_INDEX_URL\",\n)\[email protected](\n \"--trusted-host\",\n multiple=True,\n help=\"Mark this host as trusted, even though it does not have valid or any HTTPS.\",\n)\[email protected](\n \"--no-index\",\n is_flag=True,\n help=\"Ignore package index (only looking at --find-links URLs instead)\",\n)\[email protected](\"-q\", \"--quiet\", default=False, is_flag=True, help=\"Give less output\")\[email protected](\n \"--user\", \"user_only\", is_flag=True, help=\"Restrict attention to user directory\"\n)\[email protected](\"src_files\", required=False, type=click.Path(exists=True), nargs=-1)\ndef cli(\n dry_run,\n force,\n find_links,\n index_url,\n extra_index_url,\n trusted_host,\n no_index,\n quiet,\n user_only,\n src_files,\n):\n \"\"\"Synchronize virtual environment with requirements.txt.\"\"\"\n if not src_files:\n if os.path.exists(DEFAULT_REQUIREMENTS_FILE):\n src_files = (DEFAULT_REQUIREMENTS_FILE,)\n else:\n msg = \"No requirement files given and no {} found in the current directory\"\n log.error(msg.format(DEFAULT_REQUIREMENTS_FILE))\n sys.exit(2)\n\n if any(src_file.endswith(\".in\") for src_file in src_files):\n msg = (\n \"Some input files have the .in extension, which is most likely an error \"\n \"and can cause weird behaviour. You probably meant to use \"\n \"the corresponding *.txt file?\"\n )\n if force:\n log.warning(\"WARNING: \" + msg)\n else:\n log.error(\"ERROR: \" + msg)\n sys.exit(2)\n\n requirements = flat_map(\n lambda src: parse_requirements(src, session=True), src_files\n )\n\n try:\n requirements = sync.merge(requirements, ignore_conflicts=force)\n except PipToolsError as e:\n log.error(str(e))\n sys.exit(2)\n\n installed_dists = get_installed_distributions(skip=[], user_only=user_only)\n to_install, to_uninstall = sync.diff(requirements, installed_dists)\n\n install_flags = []\n for link in find_links or []:\n install_flags.extend([\"-f\", link])\n if no_index:\n install_flags.append(\"--no-index\")\n if index_url:\n install_flags.extend([\"-i\", index_url])\n if extra_index_url:\n for extra_index in extra_index_url:\n install_flags.extend([\"--extra-index-url\", extra_index])\n if trusted_host:\n for host in trusted_host:\n install_flags.extend([\"--trusted-host\", host])\n if user_only:\n install_flags.append(\"--user\")\n\n sys.exit(\n sync.sync(\n to_install,\n to_uninstall,\n verbose=(not quiet),\n dry_run=dry_run,\n install_flags=install_flags,\n )\n )\n", "path": "piptools/scripts/sync.py"}], "after_files": [{"content": "# coding: utf-8\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nimport os\nimport sys\n\nfrom .. import click, sync\nfrom .._compat import get_installed_distributions, parse_requirements\nfrom ..exceptions import PipToolsError\nfrom ..logging import log\nfrom ..utils import flat_map\n\nDEFAULT_REQUIREMENTS_FILE = \"requirements.txt\"\n\n\[email protected]()\[email protected]_option()\[email protected](\n \"-n\",\n \"--dry-run\",\n is_flag=True,\n help=\"Only show what would happen, don't change anything\",\n)\[email protected](\"--force\", is_flag=True, help=\"Proceed even if conflicts are found\")\[email protected](\n \"-f\",\n \"--find-links\",\n multiple=True,\n help=\"Look for archives in this directory or on this HTML page\",\n envvar=\"PIP_FIND_LINKS\",\n)\[email protected](\n \"-i\",\n \"--index-url\",\n help=\"Change index URL (defaults to PyPI)\",\n envvar=\"PIP_INDEX_URL\",\n)\[email protected](\n \"--extra-index-url\",\n multiple=True,\n help=\"Add additional index URL to search\",\n envvar=\"PIP_EXTRA_INDEX_URL\",\n)\[email protected](\n \"--trusted-host\",\n multiple=True,\n help=\"Mark this host as trusted, even though it does not have valid or any HTTPS.\",\n)\[email protected](\n \"--no-index\",\n is_flag=True,\n help=\"Ignore package index (only looking at --find-links URLs instead)\",\n)\[email protected](\"-q\", \"--quiet\", default=False, is_flag=True, help=\"Give less output\")\[email protected](\n \"--user\", \"user_only\", is_flag=True, help=\"Restrict attention to user directory\"\n)\[email protected](\"--cert\", help=\"Path to alternate CA bundle.\")\[email protected](\n \"--client-cert\",\n help=\"Path to SSL client certificate, a single file containing \"\n \"the private key and the certificate in PEM format.\",\n)\[email protected](\"src_files\", required=False, type=click.Path(exists=True), nargs=-1)\ndef cli(\n dry_run,\n force,\n find_links,\n index_url,\n extra_index_url,\n trusted_host,\n no_index,\n quiet,\n user_only,\n cert,\n client_cert,\n src_files,\n):\n \"\"\"Synchronize virtual environment with requirements.txt.\"\"\"\n if not src_files:\n if os.path.exists(DEFAULT_REQUIREMENTS_FILE):\n src_files = (DEFAULT_REQUIREMENTS_FILE,)\n else:\n msg = \"No requirement files given and no {} found in the current directory\"\n log.error(msg.format(DEFAULT_REQUIREMENTS_FILE))\n sys.exit(2)\n\n if any(src_file.endswith(\".in\") for src_file in src_files):\n msg = (\n \"Some input files have the .in extension, which is most likely an error \"\n \"and can cause weird behaviour. You probably meant to use \"\n \"the corresponding *.txt file?\"\n )\n if force:\n log.warning(\"WARNING: \" + msg)\n else:\n log.error(\"ERROR: \" + msg)\n sys.exit(2)\n\n requirements = flat_map(\n lambda src: parse_requirements(src, session=True), src_files\n )\n\n try:\n requirements = sync.merge(requirements, ignore_conflicts=force)\n except PipToolsError as e:\n log.error(str(e))\n sys.exit(2)\n\n installed_dists = get_installed_distributions(skip=[], user_only=user_only)\n to_install, to_uninstall = sync.diff(requirements, installed_dists)\n\n install_flags = []\n for link in find_links or []:\n install_flags.extend([\"-f\", link])\n if no_index:\n install_flags.append(\"--no-index\")\n if index_url:\n install_flags.extend([\"-i\", index_url])\n if extra_index_url:\n for extra_index in extra_index_url:\n install_flags.extend([\"--extra-index-url\", extra_index])\n if trusted_host:\n for host in trusted_host:\n install_flags.extend([\"--trusted-host\", host])\n if user_only:\n install_flags.append(\"--user\")\n if cert:\n install_flags.extend([\"--cert\", cert])\n if client_cert:\n install_flags.extend([\"--client-cert\", client_cert])\n\n sys.exit(\n sync.sync(\n to_install,\n to_uninstall,\n verbose=(not quiet),\n dry_run=dry_run,\n install_flags=install_flags,\n )\n )\n", "path": "piptools/scripts/sync.py"}]}
| 1,439 | 293 |
gh_patches_debug_11925
|
rasdani/github-patches
|
git_diff
|
elastic__apm-agent-python-1155
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
RemovedInDjango41Warning
**Describe the bug**: ...
```shell
../../../../.pyenv/versions/3.8.2/lib/python3.8/site-packages/django/apps/registry.py:91
/Users/mingyu.wu/.pyenv/versions/3.8.2/lib/python3.8/site-packages/django/apps/registry.py:91: RemovedInDjango41Warning: 'elasticapm.contrib.django' defines default_app_config = 'elasticapm.contrib.django.apps.ElasticAPMConfig'. Django now detects this configuration automatically. You can remove default_app_config.
app_config = AppConfig.create(entry)
-- Docs: https://docs.pytest.org/en/stable/warnings.html
```
**To Reproduce**
1. run unit test or start server
**Environment (please complete the following information)**
- OS: Linux/MacOS
- Python version:3.8.2
- Framework and version: Django 3.2.2
- APM Server version: *
- Agent version: 6.1.3
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `elasticapm/contrib/django/__init__.py`
Content:
```
1 # BSD 3-Clause License
2 #
3 # Copyright (c) 2019, Elasticsearch BV
4 # All rights reserved.
5 #
6 # Redistribution and use in source and binary forms, with or without
7 # modification, are permitted provided that the following conditions are met:
8 #
9 # * Redistributions of source code must retain the above copyright notice, this
10 # list of conditions and the following disclaimer.
11 #
12 # * Redistributions in binary form must reproduce the above copyright notice,
13 # this list of conditions and the following disclaimer in the documentation
14 # and/or other materials provided with the distribution.
15 #
16 # * Neither the name of the copyright holder nor the names of its
17 # contributors may be used to endorse or promote products derived from
18 # this software without specific prior written permission.
19 #
20 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
31 from elasticapm.contrib.django.client import * # noqa E401
32
33 default_app_config = "elasticapm.contrib.django.apps.ElasticAPMConfig"
34
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/elasticapm/contrib/django/__init__.py b/elasticapm/contrib/django/__init__.py
--- a/elasticapm/contrib/django/__init__.py
+++ b/elasticapm/contrib/django/__init__.py
@@ -27,7 +27,9 @@
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+from django import VERSION as DJANGO_VERSION
from elasticapm.contrib.django.client import * # noqa E401
-default_app_config = "elasticapm.contrib.django.apps.ElasticAPMConfig"
+if DJANGO_VERSION < (3, 2):
+ default_app_config = "elasticapm.contrib.django.apps.ElasticAPMConfig"
|
{"golden_diff": "diff --git a/elasticapm/contrib/django/__init__.py b/elasticapm/contrib/django/__init__.py\n--- a/elasticapm/contrib/django/__init__.py\n+++ b/elasticapm/contrib/django/__init__.py\n@@ -27,7 +27,9 @@\n # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n+from django import VERSION as DJANGO_VERSION\n \n from elasticapm.contrib.django.client import * # noqa E401\n \n-default_app_config = \"elasticapm.contrib.django.apps.ElasticAPMConfig\"\n+if DJANGO_VERSION < (3, 2):\n+ default_app_config = \"elasticapm.contrib.django.apps.ElasticAPMConfig\"\n", "issue": "RemovedInDjango41Warning\n**Describe the bug**: ...\r\n\r\n```shell\r\n../../../../.pyenv/versions/3.8.2/lib/python3.8/site-packages/django/apps/registry.py:91\r\n /Users/mingyu.wu/.pyenv/versions/3.8.2/lib/python3.8/site-packages/django/apps/registry.py:91: RemovedInDjango41Warning: 'elasticapm.contrib.django' defines default_app_config = 'elasticapm.contrib.django.apps.ElasticAPMConfig'. Django now detects this configuration automatically. You can remove default_app_config.\r\n app_config = AppConfig.create(entry)\r\n\r\n-- Docs: https://docs.pytest.org/en/stable/warnings.html\r\n```\r\n\r\n**To Reproduce**\r\n\r\n1. run unit test or start server\r\n\r\n**Environment (please complete the following information)**\r\n- OS: Linux/MacOS\r\n- Python version:3.8.2\r\n- Framework and version: Django 3.2.2\r\n- APM Server version: *\r\n- Agent version: 6.1.3\r\n\n", "before_files": [{"content": "# BSD 3-Clause License\n#\n# Copyright (c) 2019, Elasticsearch BV\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are met:\n#\n# * Redistributions of source code must retain the above copyright notice, this\n# list of conditions and the following disclaimer.\n#\n# * Redistributions in binary form must reproduce the above copyright notice,\n# this list of conditions and the following disclaimer in the documentation\n# and/or other materials provided with the distribution.\n#\n# * Neither the name of the copyright holder nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\nfrom elasticapm.contrib.django.client import * # noqa E401\n\ndefault_app_config = \"elasticapm.contrib.django.apps.ElasticAPMConfig\"\n", "path": "elasticapm/contrib/django/__init__.py"}], "after_files": [{"content": "# BSD 3-Clause License\n#\n# Copyright (c) 2019, Elasticsearch BV\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are met:\n#\n# * Redistributions of source code must retain the above copyright notice, this\n# list of conditions and the following disclaimer.\n#\n# * Redistributions in binary form must reproduce the above copyright notice,\n# this list of conditions and the following disclaimer in the documentation\n# and/or other materials provided with the distribution.\n#\n# * Neither the name of the copyright holder nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\nfrom django import VERSION as DJANGO_VERSION\n\nfrom elasticapm.contrib.django.client import * # noqa E401\n\nif DJANGO_VERSION < (3, 2):\n default_app_config = \"elasticapm.contrib.django.apps.ElasticAPMConfig\"\n", "path": "elasticapm/contrib/django/__init__.py"}]}
| 933 | 202 |
gh_patches_debug_11882
|
rasdani/github-patches
|
git_diff
|
translate__pootle-6471
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
AttributeError: 'QuerySet' object has no attribute 'keys'
The following appears when you first go to `/` and Pootle tries to redirect you to your language based on your Accept-Lang headers. Note this will set a cookie so you need to have cookies reset to replicate.
```python
AttributeError: 'QuerySet' object has no attribute 'keys'
File "pootle_app/views/index/index.py", line 94, in dispatch
else self.languages).values_list('code', flat=True))
File "pootle/i18n/override.py", line 81, in get_lang_from_http_header
for lang in supported.keys():
AttributeError: 'QuerySet' object has no attribute 'keys'
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pootle/apps/pootle_app/views/index/index.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) Pootle contributors.
4 #
5 # This file is a part of the Pootle project. It is distributed under the GPL3
6 # or later license. See the LICENSE file for a copy of the license and the
7 # AUTHORS file for copyright and authorship information.
8
9 from django.contrib.auth import REDIRECT_FIELD_NAME
10 from django.shortcuts import redirect
11 from django.urls import reverse
12 from django.utils.functional import cached_property
13 from django.utils.translation import get_language
14 from django.views.generic import TemplateView, View
15
16 from pootle.core.decorators import persistent_property
17 from pootle.core.delegate import revision, scores
18 from pootle.i18n.override import get_lang_from_http_header
19 from pootle_language.models import Language
20 from pootle_project.models import Project, ProjectSet
21
22
23 COOKIE_NAME = 'pootle-language'
24
25
26 class WelcomeView(TemplateView):
27 ns = "pootle.web.welcome"
28 template_name = "welcome.html"
29
30 @property
31 def revision(self):
32 return revision.get(self.project_set.directory.__class__)(
33 self.project_set.directory).get(key="stats")
34
35 @property
36 def cache_key(self):
37 return (
38 "%s.%s.%s"
39 % (self.request.user.username,
40 self.revision,
41 self.request_lang))
42
43 @cached_property
44 def project_set(self):
45 user_projects = Project.accessible_by_user(self.request.user)
46 user_projects = (
47 Project.objects.for_user(self.request.user)
48 .filter(code__in=user_projects))
49 return ProjectSet(user_projects)
50
51 @property
52 def request_lang(self):
53 return get_language()
54
55 @persistent_property
56 def score_data(self):
57 return scores.get(ProjectSet)(
58 self.project_set).display(language=self.request_lang)
59
60 def get_context_data(self, **kwargs):
61 context = super(WelcomeView, self).get_context_data(**kwargs)
62 context.update(dict(score_data=self.score_data))
63 return context
64
65
66 class IndexView(View):
67
68 @property
69 def active_languages(self):
70 return Language.objects.filter(
71 translationproject__isnull=False,
72 translationproject__directory__obsolete=False)
73
74 @property
75 def all_languages(self):
76 return self.active_languages
77
78 @property
79 def languages(self):
80 return self.active_languages.filter(
81 translationproject__project__disabled=False)
82
83 def dispatch(self, request, *args, **kwargs):
84 if not request.user.is_authenticated:
85 ctx = {
86 'next': request.GET.get(REDIRECT_FIELD_NAME, '')}
87 return WelcomeView.as_view()(request, ctx)
88 lang = request.COOKIES.get(COOKIE_NAME, None)
89 if lang is None:
90 lang = get_lang_from_http_header(
91 request,
92 (self.all_languages
93 if request.user.is_superuser
94 else self.languages).values_list('code', flat=True))
95 if lang is not None and lang not in ('projects', ''):
96 url = reverse('pootle-language-browse', args=[lang])
97 else:
98 url = reverse('pootle-projects-browse')
99 # Preserve query strings
100 args = request.GET.urlencode()
101 qs = '?%s' % args if args else ''
102 redirect_url = '%s%s' % (url, qs)
103 return redirect(redirect_url)
104
105
106 class AboutView(TemplateView):
107 template_name = 'about.html'
108
109 def get_context_data(self, **kwargs):
110 from translate.__version__ import sver as toolkit_version
111 from pootle import __version__
112
113 return {
114 'pootle_version': __version__,
115 'toolkit_version': toolkit_version,
116 }
117
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pootle/apps/pootle_app/views/index/index.py b/pootle/apps/pootle_app/views/index/index.py
--- a/pootle/apps/pootle_app/views/index/index.py
+++ b/pootle/apps/pootle_app/views/index/index.py
@@ -89,9 +89,9 @@
if lang is None:
lang = get_lang_from_http_header(
request,
- (self.all_languages
- if request.user.is_superuser
- else self.languages).values_list('code', flat=True))
+ dict((self.all_languages
+ if request.user.is_superuser
+ else self.languages).values_list('code', 'fullname')))
if lang is not None and lang not in ('projects', ''):
url = reverse('pootle-language-browse', args=[lang])
else:
|
{"golden_diff": "diff --git a/pootle/apps/pootle_app/views/index/index.py b/pootle/apps/pootle_app/views/index/index.py\n--- a/pootle/apps/pootle_app/views/index/index.py\n+++ b/pootle/apps/pootle_app/views/index/index.py\n@@ -89,9 +89,9 @@\n if lang is None:\n lang = get_lang_from_http_header(\n request,\n- (self.all_languages\n- if request.user.is_superuser\n- else self.languages).values_list('code', flat=True))\n+ dict((self.all_languages\n+ if request.user.is_superuser\n+ else self.languages).values_list('code', 'fullname')))\n if lang is not None and lang not in ('projects', ''):\n url = reverse('pootle-language-browse', args=[lang])\n else:\n", "issue": "AttributeError: 'QuerySet' object has no attribute 'keys'\nThe following appears when you first go to `/` and Pootle tries to redirect you to your language based on your Accept-Lang headers. Note this will set a cookie so you need to have cookies reset to replicate.\r\n\r\n```python\r\nAttributeError: 'QuerySet' object has no attribute 'keys'\r\n File \"pootle_app/views/index/index.py\", line 94, in dispatch\r\n else self.languages).values_list('code', flat=True))\r\n File \"pootle/i18n/override.py\", line 81, in get_lang_from_http_header\r\n for lang in supported.keys():\r\n\r\nAttributeError: 'QuerySet' object has no attribute 'keys'\r\n```\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nfrom django.contrib.auth import REDIRECT_FIELD_NAME\nfrom django.shortcuts import redirect\nfrom django.urls import reverse\nfrom django.utils.functional import cached_property\nfrom django.utils.translation import get_language\nfrom django.views.generic import TemplateView, View\n\nfrom pootle.core.decorators import persistent_property\nfrom pootle.core.delegate import revision, scores\nfrom pootle.i18n.override import get_lang_from_http_header\nfrom pootle_language.models import Language\nfrom pootle_project.models import Project, ProjectSet\n\n\nCOOKIE_NAME = 'pootle-language'\n\n\nclass WelcomeView(TemplateView):\n ns = \"pootle.web.welcome\"\n template_name = \"welcome.html\"\n\n @property\n def revision(self):\n return revision.get(self.project_set.directory.__class__)(\n self.project_set.directory).get(key=\"stats\")\n\n @property\n def cache_key(self):\n return (\n \"%s.%s.%s\"\n % (self.request.user.username,\n self.revision,\n self.request_lang))\n\n @cached_property\n def project_set(self):\n user_projects = Project.accessible_by_user(self.request.user)\n user_projects = (\n Project.objects.for_user(self.request.user)\n .filter(code__in=user_projects))\n return ProjectSet(user_projects)\n\n @property\n def request_lang(self):\n return get_language()\n\n @persistent_property\n def score_data(self):\n return scores.get(ProjectSet)(\n self.project_set).display(language=self.request_lang)\n\n def get_context_data(self, **kwargs):\n context = super(WelcomeView, self).get_context_data(**kwargs)\n context.update(dict(score_data=self.score_data))\n return context\n\n\nclass IndexView(View):\n\n @property\n def active_languages(self):\n return Language.objects.filter(\n translationproject__isnull=False,\n translationproject__directory__obsolete=False)\n\n @property\n def all_languages(self):\n return self.active_languages\n\n @property\n def languages(self):\n return self.active_languages.filter(\n translationproject__project__disabled=False)\n\n def dispatch(self, request, *args, **kwargs):\n if not request.user.is_authenticated:\n ctx = {\n 'next': request.GET.get(REDIRECT_FIELD_NAME, '')}\n return WelcomeView.as_view()(request, ctx)\n lang = request.COOKIES.get(COOKIE_NAME, None)\n if lang is None:\n lang = get_lang_from_http_header(\n request,\n (self.all_languages\n if request.user.is_superuser\n else self.languages).values_list('code', flat=True))\n if lang is not None and lang not in ('projects', ''):\n url = reverse('pootle-language-browse', args=[lang])\n else:\n url = reverse('pootle-projects-browse')\n # Preserve query strings\n args = request.GET.urlencode()\n qs = '?%s' % args if args else ''\n redirect_url = '%s%s' % (url, qs)\n return redirect(redirect_url)\n\n\nclass AboutView(TemplateView):\n template_name = 'about.html'\n\n def get_context_data(self, **kwargs):\n from translate.__version__ import sver as toolkit_version\n from pootle import __version__\n\n return {\n 'pootle_version': __version__,\n 'toolkit_version': toolkit_version,\n }\n", "path": "pootle/apps/pootle_app/views/index/index.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nfrom django.contrib.auth import REDIRECT_FIELD_NAME\nfrom django.shortcuts import redirect\nfrom django.urls import reverse\nfrom django.utils.functional import cached_property\nfrom django.utils.translation import get_language\nfrom django.views.generic import TemplateView, View\n\nfrom pootle.core.decorators import persistent_property\nfrom pootle.core.delegate import revision, scores\nfrom pootle.i18n.override import get_lang_from_http_header\nfrom pootle_language.models import Language\nfrom pootle_project.models import Project, ProjectSet\n\n\nCOOKIE_NAME = 'pootle-language'\n\n\nclass WelcomeView(TemplateView):\n ns = \"pootle.web.welcome\"\n template_name = \"welcome.html\"\n\n @property\n def revision(self):\n return revision.get(self.project_set.directory.__class__)(\n self.project_set.directory).get(key=\"stats\")\n\n @property\n def cache_key(self):\n return (\n \"%s.%s.%s\"\n % (self.request.user.username,\n self.revision,\n self.request_lang))\n\n @cached_property\n def project_set(self):\n user_projects = Project.accessible_by_user(self.request.user)\n user_projects = (\n Project.objects.for_user(self.request.user)\n .filter(code__in=user_projects))\n return ProjectSet(user_projects)\n\n @property\n def request_lang(self):\n return get_language()\n\n @persistent_property\n def score_data(self):\n return scores.get(ProjectSet)(\n self.project_set).display(language=self.request_lang)\n\n def get_context_data(self, **kwargs):\n context = super(WelcomeView, self).get_context_data(**kwargs)\n context.update(dict(score_data=self.score_data))\n return context\n\n\nclass IndexView(View):\n\n @property\n def active_languages(self):\n return Language.objects.filter(\n translationproject__isnull=False,\n translationproject__directory__obsolete=False)\n\n @property\n def all_languages(self):\n return self.active_languages\n\n @property\n def languages(self):\n return self.active_languages.filter(\n translationproject__project__disabled=False)\n\n def dispatch(self, request, *args, **kwargs):\n if not request.user.is_authenticated:\n ctx = {\n 'next': request.GET.get(REDIRECT_FIELD_NAME, '')}\n return WelcomeView.as_view()(request, ctx)\n lang = request.COOKIES.get(COOKIE_NAME, None)\n if lang is None:\n lang = get_lang_from_http_header(\n request,\n dict((self.all_languages\n if request.user.is_superuser\n else self.languages).values_list('code', 'fullname')))\n if lang is not None and lang not in ('projects', ''):\n url = reverse('pootle-language-browse', args=[lang])\n else:\n url = reverse('pootle-projects-browse')\n # Preserve query strings\n args = request.GET.urlencode()\n qs = '?%s' % args if args else ''\n redirect_url = '%s%s' % (url, qs)\n return redirect(redirect_url)\n\n\nclass AboutView(TemplateView):\n template_name = 'about.html'\n\n def get_context_data(self, **kwargs):\n from translate.__version__ import sver as toolkit_version\n from pootle import __version__\n\n return {\n 'pootle_version': __version__,\n 'toolkit_version': toolkit_version,\n }\n", "path": "pootle/apps/pootle_app/views/index/index.py"}]}
| 1,465 | 183 |
gh_patches_debug_10221
|
rasdani/github-patches
|
git_diff
|
data-for-change__anyway-164
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Move to postgres
That's the database of choice hasadna uses
<!---
@huboard:{"order":125.5,"milestone_order":81,"custom_state":""}
-->
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `config.py`
Content:
```
1 import os
2
3 #
4 # This is the configuration file of the application
5 #
6 # Please make sure you don't store here any secret information and use environment
7 # variables
8 #
9
10
11 SQLALCHEMY_DATABASE_URI = os.environ.get('CLEARDB_DATABASE_URL')
12 SQLALCHEMY_POOL_RECYCLE = 60
13
14
15 SECRET_KEY = 'aiosdjsaodjoidjioewnioewfnoeijfoisdjf'
16
17 FACEBOOK_KEY = "157028231131213"
18 FACEBOOK_SECRET = "0437ee70207dca46609219b990be0614"
19
```
Path: `mysqlshell.py`
Content:
```
1 #!/usr/bin/env python
2 import sys
3 import os
4 import urlparse
5
6 def main():
7 DATABASE_URI = os.getenv('CLEARDB_DATABASE_URL')
8
9 if not DATABASE_URI:
10 print >>sys.stderr, 'Environment CLEARDB_DATABASE_URL not set'
11 sys.exit(1)
12
13 db = urlparse.urlparse(DATABASE_URI)
14 os.execlp('mysql', 'mysql', '-u', db.username, '-p' + db.password, '-h', db.hostname, db.path[1:])
15
16 if __name__ == '__main__':
17 main()
18
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/config.py b/config.py
--- a/config.py
+++ b/config.py
@@ -8,7 +8,7 @@
#
-SQLALCHEMY_DATABASE_URI = os.environ.get('CLEARDB_DATABASE_URL')
+SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL')
SQLALCHEMY_POOL_RECYCLE = 60
diff --git a/mysqlshell.py b/mysqlshell.py
deleted file mode 100755
--- a/mysqlshell.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env python
-import sys
-import os
-import urlparse
-
-def main():
- DATABASE_URI = os.getenv('CLEARDB_DATABASE_URL')
-
- if not DATABASE_URI:
- print >>sys.stderr, 'Environment CLEARDB_DATABASE_URL not set'
- sys.exit(1)
-
- db = urlparse.urlparse(DATABASE_URI)
- os.execlp('mysql', 'mysql', '-u', db.username, '-p' + db.password, '-h', db.hostname, db.path[1:])
-
-if __name__ == '__main__':
- main()
|
{"golden_diff": "diff --git a/config.py b/config.py\n--- a/config.py\n+++ b/config.py\n@@ -8,7 +8,7 @@\n #\n \n \n-SQLALCHEMY_DATABASE_URI = os.environ.get('CLEARDB_DATABASE_URL')\n+SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL')\n SQLALCHEMY_POOL_RECYCLE = 60\n \n \ndiff --git a/mysqlshell.py b/mysqlshell.py\ndeleted file mode 100755\n--- a/mysqlshell.py\n+++ /dev/null\n@@ -1,17 +0,0 @@\n-#!/usr/bin/env python\n-import sys\n-import os\n-import urlparse\n-\n-def main():\n- DATABASE_URI = os.getenv('CLEARDB_DATABASE_URL')\n-\n- if not DATABASE_URI:\n- print >>sys.stderr, 'Environment CLEARDB_DATABASE_URL not set'\n- sys.exit(1)\n-\n- db = urlparse.urlparse(DATABASE_URI)\n- os.execlp('mysql', 'mysql', '-u', db.username, '-p' + db.password, '-h', db.hostname, db.path[1:])\n-\n-if __name__ == '__main__':\n- main()\n", "issue": "Move to postgres\nThat's the database of choice hasadna uses\n\n<!---\n@huboard:{\"order\":125.5,\"milestone_order\":81,\"custom_state\":\"\"}\n-->\n\n", "before_files": [{"content": "import os\n\n#\n# This is the configuration file of the application\n#\n# Please make sure you don't store here any secret information and use environment\n# variables\n#\n\n\nSQLALCHEMY_DATABASE_URI = os.environ.get('CLEARDB_DATABASE_URL')\nSQLALCHEMY_POOL_RECYCLE = 60\n\n\nSECRET_KEY = 'aiosdjsaodjoidjioewnioewfnoeijfoisdjf'\n\nFACEBOOK_KEY = \"157028231131213\"\nFACEBOOK_SECRET = \"0437ee70207dca46609219b990be0614\"\n", "path": "config.py"}, {"content": "#!/usr/bin/env python\nimport sys\nimport os\nimport urlparse\n\ndef main():\n DATABASE_URI = os.getenv('CLEARDB_DATABASE_URL')\n\n if not DATABASE_URI:\n print >>sys.stderr, 'Environment CLEARDB_DATABASE_URL not set'\n sys.exit(1)\n\n db = urlparse.urlparse(DATABASE_URI)\n os.execlp('mysql', 'mysql', '-u', db.username, '-p' + db.password, '-h', db.hostname, db.path[1:])\n\nif __name__ == '__main__':\n main()\n", "path": "mysqlshell.py"}], "after_files": [{"content": "import os\n\n#\n# This is the configuration file of the application\n#\n# Please make sure you don't store here any secret information and use environment\n# variables\n#\n\n\nSQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL')\nSQLALCHEMY_POOL_RECYCLE = 60\n\n\nSECRET_KEY = 'aiosdjsaodjoidjioewnioewfnoeijfoisdjf'\n\nFACEBOOK_KEY = \"157028231131213\"\nFACEBOOK_SECRET = \"0437ee70207dca46609219b990be0614\"\n", "path": "config.py"}, {"content": null, "path": "mysqlshell.py"}]}
| 634 | 242 |
gh_patches_debug_38347
|
rasdani/github-patches
|
git_diff
|
NVIDIA-Merlin__NVTabular-568
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Fill Missing Op option to create a new binary column indicating the value was replaced.
Fill Missing Op should have the option to create a new binary column indicating whether the column was filled or not for continuous variables.
This is a common feature used when dealing with missing values of categoricals.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `nvtabular/ops/fill.py`
Content:
```
1 #
2 # Copyright (c) 2020, NVIDIA CORPORATION.
3 #
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15 #
16 import cudf
17 import dask_cudf
18 from nvtx import annotate
19
20 from .operator import ColumnNames, Operator
21 from .stat_operator import StatOperator
22
23
24 class FillMissing(Operator):
25 """
26 This operation replaces missing values with a constant pre-defined value
27
28 Example usage::
29
30 # Use FillMissing to define a workflow for continuous columns and specify the fill value
31 # Default is 0
32 cont_features = ['cont1', 'cont2', 'cont3'] >> ops.FillMissing() >> ...
33 processor = nvtabular.Workflow(cont_features)
34
35 Parameters
36 -----------
37 fill_val : float, default 0
38 The constant value to replace missing values with.
39 """
40
41 def __init__(self, fill_val=0):
42 super().__init__()
43 self.fill_val = fill_val
44
45 @annotate("FillMissing_op", color="darkgreen", domain="nvt_python")
46 def transform(self, columns, gdf: cudf.DataFrame) -> cudf.DataFrame:
47 return gdf[columns].fillna(self.fill_val)
48
49 transform.__doc__ = Operator.transform.__doc__
50
51
52 class FillMedian(StatOperator):
53 """
54 This operation replaces missing values with the median value for the column.
55
56 Example usage::
57
58 # Initialize the workflow
59 proc = nvt.Workflow(
60 cat_names=CATEGORICAL_COLUMNS,
61 cont_names=CONTINUOUS_COLUMNS,
62 label_name=LABEL_COLUMNS
63 )
64
65 # Add FillMedian to the workflow for continuous columns
66 proc.add_cont_feature(nvt.ops.FillMedian())
67 """
68
69 def __init__(self):
70 super().__init__()
71 self.medians = {}
72
73 @annotate("FillMedian_transform", color="darkgreen", domain="nvt_python")
74 def transform(self, columns: ColumnNames, gdf: cudf.DataFrame) -> cudf.DataFrame:
75 if not self.medians:
76 raise RuntimeError("need to call 'fit' before running transform")
77
78 for col in columns:
79 gdf[col] = gdf[col].fillna(self.medians[col])
80 return gdf
81
82 @annotate("FillMedian_fit", color="green", domain="nvt_python")
83 def fit(self, columns: ColumnNames, ddf: dask_cudf.DataFrame):
84 # TODO: Use `method="tidigest"` when crick supports device
85 dask_stats = ddf[columns].quantile(q=0.5, method="dask")
86 return dask_stats
87
88 @annotate("FillMedian_finalize", color="green", domain="nvt_python")
89 def fit_finalize(self, dask_stats):
90 for col in dask_stats.index.values_host:
91 self.medians[col] = float(dask_stats[col])
92
93 transform.__doc__ = Operator.transform.__doc__
94 fit.__doc__ = StatOperator.fit.__doc__
95 fit_finalize.__doc__ = StatOperator.fit_finalize.__doc__
96
97 def clear(self):
98 self.medians = {}
99
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/nvtabular/ops/fill.py b/nvtabular/ops/fill.py
--- a/nvtabular/ops/fill.py
+++ b/nvtabular/ops/fill.py
@@ -36,18 +36,34 @@
-----------
fill_val : float, default 0
The constant value to replace missing values with.
+ add_binary_cols : boolean, default False
+ When True, adds binary columns that indicate whether cells in each column were filled
"""
- def __init__(self, fill_val=0):
+ def __init__(self, fill_val=0, add_binary_cols=False):
super().__init__()
self.fill_val = fill_val
+ self.add_binary_cols = add_binary_cols
@annotate("FillMissing_op", color="darkgreen", domain="nvt_python")
def transform(self, columns, gdf: cudf.DataFrame) -> cudf.DataFrame:
- return gdf[columns].fillna(self.fill_val)
+ if self.add_binary_cols:
+ for col in columns:
+ gdf[f"{col}_filled"] = gdf[col].isna()
+ gdf[col] = gdf[col].fillna(self.fill_val)
+ else:
+ gdf[columns] = gdf[columns].fillna(self.fill_val)
+
+ return gdf
transform.__doc__ = Operator.transform.__doc__
+ def output_column_names(self, columns: ColumnNames) -> ColumnNames:
+ output_cols = columns[:]
+ if self.add_binary_cols:
+ output_cols.extend([f"{col}_filled" for col in columns])
+ return output_cols
+
class FillMedian(StatOperator):
"""
@@ -64,10 +80,16 @@
# Add FillMedian to the workflow for continuous columns
proc.add_cont_feature(nvt.ops.FillMedian())
+
+ Parameters
+ -----------
+ add_binary_cols : boolean, default False
+ When True, adds binary columns that indicate whether cells in each column were filled
"""
- def __init__(self):
+ def __init__(self, add_binary_cols=False):
super().__init__()
+ self.add_binary_cols = add_binary_cols
self.medians = {}
@annotate("FillMedian_transform", color="darkgreen", domain="nvt_python")
@@ -76,6 +98,8 @@
raise RuntimeError("need to call 'fit' before running transform")
for col in columns:
+ if self.add_binary_cols:
+ gdf[f"{col}_filled"] = gdf[col].isna()
gdf[col] = gdf[col].fillna(self.medians[col])
return gdf
@@ -96,3 +120,9 @@
def clear(self):
self.medians = {}
+
+ def output_column_names(self, columns: ColumnNames) -> ColumnNames:
+ output_cols = columns[:]
+ if self.add_binary_cols:
+ output_cols.extend([f"{col}_filled" for col in columns])
+ return output_cols
|
{"golden_diff": "diff --git a/nvtabular/ops/fill.py b/nvtabular/ops/fill.py\n--- a/nvtabular/ops/fill.py\n+++ b/nvtabular/ops/fill.py\n@@ -36,18 +36,34 @@\n -----------\n fill_val : float, default 0\n The constant value to replace missing values with.\n+ add_binary_cols : boolean, default False\n+ When True, adds binary columns that indicate whether cells in each column were filled\n \"\"\"\n \n- def __init__(self, fill_val=0):\n+ def __init__(self, fill_val=0, add_binary_cols=False):\n super().__init__()\n self.fill_val = fill_val\n+ self.add_binary_cols = add_binary_cols\n \n @annotate(\"FillMissing_op\", color=\"darkgreen\", domain=\"nvt_python\")\n def transform(self, columns, gdf: cudf.DataFrame) -> cudf.DataFrame:\n- return gdf[columns].fillna(self.fill_val)\n+ if self.add_binary_cols:\n+ for col in columns:\n+ gdf[f\"{col}_filled\"] = gdf[col].isna()\n+ gdf[col] = gdf[col].fillna(self.fill_val)\n+ else:\n+ gdf[columns] = gdf[columns].fillna(self.fill_val)\n+\n+ return gdf\n \n transform.__doc__ = Operator.transform.__doc__\n \n+ def output_column_names(self, columns: ColumnNames) -> ColumnNames:\n+ output_cols = columns[:]\n+ if self.add_binary_cols:\n+ output_cols.extend([f\"{col}_filled\" for col in columns])\n+ return output_cols\n+\n \n class FillMedian(StatOperator):\n \"\"\"\n@@ -64,10 +80,16 @@\n \n # Add FillMedian to the workflow for continuous columns\n proc.add_cont_feature(nvt.ops.FillMedian())\n+\n+ Parameters\n+ -----------\n+ add_binary_cols : boolean, default False\n+ When True, adds binary columns that indicate whether cells in each column were filled\n \"\"\"\n \n- def __init__(self):\n+ def __init__(self, add_binary_cols=False):\n super().__init__()\n+ self.add_binary_cols = add_binary_cols\n self.medians = {}\n \n @annotate(\"FillMedian_transform\", color=\"darkgreen\", domain=\"nvt_python\")\n@@ -76,6 +98,8 @@\n raise RuntimeError(\"need to call 'fit' before running transform\")\n \n for col in columns:\n+ if self.add_binary_cols:\n+ gdf[f\"{col}_filled\"] = gdf[col].isna()\n gdf[col] = gdf[col].fillna(self.medians[col])\n return gdf\n \n@@ -96,3 +120,9 @@\n \n def clear(self):\n self.medians = {}\n+\n+ def output_column_names(self, columns: ColumnNames) -> ColumnNames:\n+ output_cols = columns[:]\n+ if self.add_binary_cols:\n+ output_cols.extend([f\"{col}_filled\" for col in columns])\n+ return output_cols\n", "issue": "Fill Missing Op option to create a new binary column indicating the value was replaced.\nFill Missing Op should have the option to create a new binary column indicating whether the column was filled or not for continuous variables.\r\n\r\nThis is a common feature used when dealing with missing values of categoricals.\n", "before_files": [{"content": "#\n# Copyright (c) 2020, NVIDIA CORPORATION.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nimport cudf\nimport dask_cudf\nfrom nvtx import annotate\n\nfrom .operator import ColumnNames, Operator\nfrom .stat_operator import StatOperator\n\n\nclass FillMissing(Operator):\n \"\"\"\n This operation replaces missing values with a constant pre-defined value\n\n Example usage::\n\n # Use FillMissing to define a workflow for continuous columns and specify the fill value\n # Default is 0\n cont_features = ['cont1', 'cont2', 'cont3'] >> ops.FillMissing() >> ...\n processor = nvtabular.Workflow(cont_features)\n\n Parameters\n -----------\n fill_val : float, default 0\n The constant value to replace missing values with.\n \"\"\"\n\n def __init__(self, fill_val=0):\n super().__init__()\n self.fill_val = fill_val\n\n @annotate(\"FillMissing_op\", color=\"darkgreen\", domain=\"nvt_python\")\n def transform(self, columns, gdf: cudf.DataFrame) -> cudf.DataFrame:\n return gdf[columns].fillna(self.fill_val)\n\n transform.__doc__ = Operator.transform.__doc__\n\n\nclass FillMedian(StatOperator):\n \"\"\"\n This operation replaces missing values with the median value for the column.\n\n Example usage::\n\n # Initialize the workflow\n proc = nvt.Workflow(\n cat_names=CATEGORICAL_COLUMNS,\n cont_names=CONTINUOUS_COLUMNS,\n label_name=LABEL_COLUMNS\n )\n\n # Add FillMedian to the workflow for continuous columns\n proc.add_cont_feature(nvt.ops.FillMedian())\n \"\"\"\n\n def __init__(self):\n super().__init__()\n self.medians = {}\n\n @annotate(\"FillMedian_transform\", color=\"darkgreen\", domain=\"nvt_python\")\n def transform(self, columns: ColumnNames, gdf: cudf.DataFrame) -> cudf.DataFrame:\n if not self.medians:\n raise RuntimeError(\"need to call 'fit' before running transform\")\n\n for col in columns:\n gdf[col] = gdf[col].fillna(self.medians[col])\n return gdf\n\n @annotate(\"FillMedian_fit\", color=\"green\", domain=\"nvt_python\")\n def fit(self, columns: ColumnNames, ddf: dask_cudf.DataFrame):\n # TODO: Use `method=\"tidigest\"` when crick supports device\n dask_stats = ddf[columns].quantile(q=0.5, method=\"dask\")\n return dask_stats\n\n @annotate(\"FillMedian_finalize\", color=\"green\", domain=\"nvt_python\")\n def fit_finalize(self, dask_stats):\n for col in dask_stats.index.values_host:\n self.medians[col] = float(dask_stats[col])\n\n transform.__doc__ = Operator.transform.__doc__\n fit.__doc__ = StatOperator.fit.__doc__\n fit_finalize.__doc__ = StatOperator.fit_finalize.__doc__\n\n def clear(self):\n self.medians = {}\n", "path": "nvtabular/ops/fill.py"}], "after_files": [{"content": "#\n# Copyright (c) 2020, NVIDIA CORPORATION.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nimport cudf\nimport dask_cudf\nfrom nvtx import annotate\n\nfrom .operator import ColumnNames, Operator\nfrom .stat_operator import StatOperator\n\n\nclass FillMissing(Operator):\n \"\"\"\n This operation replaces missing values with a constant pre-defined value\n\n Example usage::\n\n # Use FillMissing to define a workflow for continuous columns and specify the fill value\n # Default is 0\n cont_features = ['cont1', 'cont2', 'cont3'] >> ops.FillMissing() >> ...\n processor = nvtabular.Workflow(cont_features)\n\n Parameters\n -----------\n fill_val : float, default 0\n The constant value to replace missing values with.\n add_binary_cols : boolean, default False\n When True, adds binary columns that indicate whether cells in each column were filled\n \"\"\"\n\n def __init__(self, fill_val=0, add_binary_cols=False):\n super().__init__()\n self.fill_val = fill_val\n self.add_binary_cols = add_binary_cols\n\n @annotate(\"FillMissing_op\", color=\"darkgreen\", domain=\"nvt_python\")\n def transform(self, columns, gdf: cudf.DataFrame) -> cudf.DataFrame:\n if self.add_binary_cols:\n for col in columns:\n gdf[f\"{col}_filled\"] = gdf[col].isna()\n gdf[col] = gdf[col].fillna(self.fill_val)\n else:\n gdf[columns] = gdf[columns].fillna(self.fill_val)\n\n return gdf\n\n transform.__doc__ = Operator.transform.__doc__\n\n def output_column_names(self, columns: ColumnNames) -> ColumnNames:\n output_cols = columns[:]\n if self.add_binary_cols:\n output_cols.extend([f\"{col}_filled\" for col in columns])\n return output_cols\n\n\nclass FillMedian(StatOperator):\n \"\"\"\n This operation replaces missing values with the median value for the column.\n\n Example usage::\n\n # Initialize the workflow\n proc = nvt.Workflow(\n cat_names=CATEGORICAL_COLUMNS,\n cont_names=CONTINUOUS_COLUMNS,\n label_name=LABEL_COLUMNS\n )\n\n # Add FillMedian to the workflow for continuous columns\n proc.add_cont_feature(nvt.ops.FillMedian())\n\n Parameters\n -----------\n add_binary_cols : boolean, default False\n When True, adds binary columns that indicate whether cells in each column were filled\n \"\"\"\n\n def __init__(self, add_binary_cols=False):\n super().__init__()\n self.add_binary_cols = add_binary_cols\n self.medians = {}\n\n @annotate(\"FillMedian_transform\", color=\"darkgreen\", domain=\"nvt_python\")\n def transform(self, columns: ColumnNames, gdf: cudf.DataFrame) -> cudf.DataFrame:\n if not self.medians:\n raise RuntimeError(\"need to call 'fit' before running transform\")\n\n for col in columns:\n if self.add_binary_cols:\n gdf[f\"{col}_filled\"] = gdf[col].isna()\n gdf[col] = gdf[col].fillna(self.medians[col])\n return gdf\n\n @annotate(\"FillMedian_fit\", color=\"green\", domain=\"nvt_python\")\n def fit(self, columns: ColumnNames, ddf: dask_cudf.DataFrame):\n # TODO: Use `method=\"tidigest\"` when crick supports device\n dask_stats = ddf[columns].quantile(q=0.5, method=\"dask\")\n return dask_stats\n\n @annotate(\"FillMedian_finalize\", color=\"green\", domain=\"nvt_python\")\n def fit_finalize(self, dask_stats):\n for col in dask_stats.index.values_host:\n self.medians[col] = float(dask_stats[col])\n\n transform.__doc__ = Operator.transform.__doc__\n fit.__doc__ = StatOperator.fit.__doc__\n fit_finalize.__doc__ = StatOperator.fit_finalize.__doc__\n\n def clear(self):\n self.medians = {}\n\n def output_column_names(self, columns: ColumnNames) -> ColumnNames:\n output_cols = columns[:]\n if self.add_binary_cols:\n output_cols.extend([f\"{col}_filled\" for col in columns])\n return output_cols\n", "path": "nvtabular/ops/fill.py"}]}
| 1,280 | 682 |
gh_patches_debug_10704
|
rasdani/github-patches
|
git_diff
|
facebookresearch__fairseq-4805
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[fairseq] Guard call to `shape_as_tensor` with `is_in_onnx_export()`
This is a no-op in eager and in ONNX export, but it's better for other
tracers if this is preserved as shapes directly instead of converted to
a tensor.
There is a little annoying code duplication with
`torch.jit.is_scripting()`, which is unforunately necessary because we
didn't implement compile-time short circuiting correctly in TorchScript
lol.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `fairseq/modules/sinusoidal_positional_embedding.py`
Content:
```
1 # Copyright (c) Facebook, Inc. and its affiliates.
2 #
3 # This source code is licensed under the MIT license found in the
4 # LICENSE file in the root directory of this source tree.
5
6 import math
7 from typing import Any, Optional
8
9 import torch
10 import torch.onnx.operators
11 from fairseq import utils
12 from torch import Tensor, nn
13
14
15 class SinusoidalPositionalEmbedding(nn.Module):
16 """This module produces sinusoidal positional embeddings of any length.
17
18 Padding symbols are ignored.
19 """
20
21 def __init__(self, embedding_dim, padding_idx, init_size=1024):
22 super().__init__()
23 self.embedding_dim = embedding_dim
24 self.padding_idx = padding_idx if padding_idx is not None else 0
25 self.weights = SinusoidalPositionalEmbedding.get_embedding(
26 init_size, embedding_dim, padding_idx
27 )
28 self.onnx_trace = False
29 self.register_buffer("_float_tensor", torch.FloatTensor(1))
30 self.max_positions = int(1e5)
31
32 def prepare_for_onnx_export_(self):
33 self.onnx_trace = True
34
35 @staticmethod
36 def get_embedding(
37 num_embeddings: int, embedding_dim: int, padding_idx: Optional[int] = None
38 ):
39 """Build sinusoidal embeddings.
40
41 This matches the implementation in tensor2tensor, but differs slightly
42 from the description in Section 3.5 of "Attention Is All You Need".
43 """
44 half_dim = embedding_dim // 2
45 emb = math.log(10000) / (half_dim - 1)
46 emb = torch.exp(torch.arange(half_dim, dtype=torch.float) * -emb)
47 emb = torch.arange(num_embeddings, dtype=torch.float).unsqueeze(
48 1
49 ) * emb.unsqueeze(0)
50 emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1).view(
51 num_embeddings, -1
52 )
53 if embedding_dim % 2 == 1:
54 # zero pad
55 emb = torch.cat([emb, torch.zeros(num_embeddings, 1)], dim=1)
56 if padding_idx is not None:
57 emb[padding_idx, :] = 0
58 return emb
59
60 def forward(
61 self,
62 input,
63 incremental_state: Optional[Any] = None,
64 timestep: Optional[Tensor] = None,
65 positions: Optional[Any] = None,
66 ):
67 """Input is expected to be of size [bsz x seqlen]."""
68 if torch.jit.is_scripting():
69 bspair = torch.onnx.operators.shape_as_tensor(input)
70 elif torch.onnx.is_in_onnx_export():
71 bspair = torch.onnx.operators.shape_as_tensor(input)
72 else:
73 bspair = input.size()
74 bsz, seq_len = bspair[0], bspair[1]
75 max_pos = self.padding_idx + 1 + seq_len
76 if self.weights is None or max_pos > self.weights.size(0):
77 # recompute/expand embeddings if needed
78 self.weights = SinusoidalPositionalEmbedding.get_embedding(
79 max_pos, self.embedding_dim, self.padding_idx
80 )
81 self.weights = self.weights.to(self._float_tensor)
82
83 if incremental_state is not None:
84 # positions is the same for every token when decoding a single step
85 pos = timestep.view(-1)[0] + 1 if timestep is not None else seq_len
86 if self.onnx_trace:
87 return (
88 self.weights.index_select(index=self.padding_idx + pos, dim=0)
89 .unsqueeze(1)
90 .repeat(bsz, 1, 1)
91 )
92 return self.weights[self.padding_idx + pos, :].expand(bsz, 1, -1)
93
94 positions = utils.make_positions(
95 input, self.padding_idx, onnx_trace=self.onnx_trace
96 )
97 if self.onnx_trace:
98 flat_embeddings = self.weights.detach().index_select(0, positions.view(-1))
99 embedding_shape = torch.cat(
100 (bsz.view(1), seq_len.view(1), torch.tensor([-1], dtype=torch.long))
101 )
102 embeddings = torch.onnx.operators.reshape_from_tensor_shape(
103 flat_embeddings, embedding_shape
104 )
105 return embeddings
106 return (
107 self.weights.index_select(0, positions.view(-1))
108 .view(bsz, seq_len, -1)
109 .detach()
110 )
111
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/fairseq/modules/sinusoidal_positional_embedding.py b/fairseq/modules/sinusoidal_positional_embedding.py
--- a/fairseq/modules/sinusoidal_positional_embedding.py
+++ b/fairseq/modules/sinusoidal_positional_embedding.py
@@ -97,7 +97,7 @@
if self.onnx_trace:
flat_embeddings = self.weights.detach().index_select(0, positions.view(-1))
embedding_shape = torch.cat(
- (bsz.view(1), seq_len.view(1), torch.tensor([-1], dtype=torch.long))
+ (bsz, seq_len, torch.tensor([-1], dtype=torch.long))
)
embeddings = torch.onnx.operators.reshape_from_tensor_shape(
flat_embeddings, embedding_shape
|
{"golden_diff": "diff --git a/fairseq/modules/sinusoidal_positional_embedding.py b/fairseq/modules/sinusoidal_positional_embedding.py\n--- a/fairseq/modules/sinusoidal_positional_embedding.py\n+++ b/fairseq/modules/sinusoidal_positional_embedding.py\n@@ -97,7 +97,7 @@\n if self.onnx_trace:\n flat_embeddings = self.weights.detach().index_select(0, positions.view(-1))\n embedding_shape = torch.cat(\n- (bsz.view(1), seq_len.view(1), torch.tensor([-1], dtype=torch.long))\n+ (bsz, seq_len, torch.tensor([-1], dtype=torch.long))\n )\n embeddings = torch.onnx.operators.reshape_from_tensor_shape(\n flat_embeddings, embedding_shape\n", "issue": "[fairseq] Guard call to `shape_as_tensor` with `is_in_onnx_export()`\nThis is a no-op in eager and in ONNX export, but it's better for other\ntracers if this is preserved as shapes directly instead of converted to\na tensor.\n\nThere is a little annoying code duplication with\n`torch.jit.is_scripting()`, which is unforunately necessary because we\ndidn't implement compile-time short circuiting correctly in TorchScript\nlol.\n\n", "before_files": [{"content": "# Copyright (c) Facebook, Inc. and its affiliates.\n#\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\nimport math\nfrom typing import Any, Optional\n\nimport torch\nimport torch.onnx.operators\nfrom fairseq import utils\nfrom torch import Tensor, nn\n\n\nclass SinusoidalPositionalEmbedding(nn.Module):\n \"\"\"This module produces sinusoidal positional embeddings of any length.\n\n Padding symbols are ignored.\n \"\"\"\n\n def __init__(self, embedding_dim, padding_idx, init_size=1024):\n super().__init__()\n self.embedding_dim = embedding_dim\n self.padding_idx = padding_idx if padding_idx is not None else 0\n self.weights = SinusoidalPositionalEmbedding.get_embedding(\n init_size, embedding_dim, padding_idx\n )\n self.onnx_trace = False\n self.register_buffer(\"_float_tensor\", torch.FloatTensor(1))\n self.max_positions = int(1e5)\n\n def prepare_for_onnx_export_(self):\n self.onnx_trace = True\n\n @staticmethod\n def get_embedding(\n num_embeddings: int, embedding_dim: int, padding_idx: Optional[int] = None\n ):\n \"\"\"Build sinusoidal embeddings.\n\n This matches the implementation in tensor2tensor, but differs slightly\n from the description in Section 3.5 of \"Attention Is All You Need\".\n \"\"\"\n half_dim = embedding_dim // 2\n emb = math.log(10000) / (half_dim - 1)\n emb = torch.exp(torch.arange(half_dim, dtype=torch.float) * -emb)\n emb = torch.arange(num_embeddings, dtype=torch.float).unsqueeze(\n 1\n ) * emb.unsqueeze(0)\n emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1).view(\n num_embeddings, -1\n )\n if embedding_dim % 2 == 1:\n # zero pad\n emb = torch.cat([emb, torch.zeros(num_embeddings, 1)], dim=1)\n if padding_idx is not None:\n emb[padding_idx, :] = 0\n return emb\n\n def forward(\n self,\n input,\n incremental_state: Optional[Any] = None,\n timestep: Optional[Tensor] = None,\n positions: Optional[Any] = None,\n ):\n \"\"\"Input is expected to be of size [bsz x seqlen].\"\"\"\n if torch.jit.is_scripting():\n bspair = torch.onnx.operators.shape_as_tensor(input)\n elif torch.onnx.is_in_onnx_export():\n bspair = torch.onnx.operators.shape_as_tensor(input)\n else:\n bspair = input.size()\n bsz, seq_len = bspair[0], bspair[1]\n max_pos = self.padding_idx + 1 + seq_len\n if self.weights is None or max_pos > self.weights.size(0):\n # recompute/expand embeddings if needed\n self.weights = SinusoidalPositionalEmbedding.get_embedding(\n max_pos, self.embedding_dim, self.padding_idx\n )\n self.weights = self.weights.to(self._float_tensor)\n\n if incremental_state is not None:\n # positions is the same for every token when decoding a single step\n pos = timestep.view(-1)[0] + 1 if timestep is not None else seq_len\n if self.onnx_trace:\n return (\n self.weights.index_select(index=self.padding_idx + pos, dim=0)\n .unsqueeze(1)\n .repeat(bsz, 1, 1)\n )\n return self.weights[self.padding_idx + pos, :].expand(bsz, 1, -1)\n\n positions = utils.make_positions(\n input, self.padding_idx, onnx_trace=self.onnx_trace\n )\n if self.onnx_trace:\n flat_embeddings = self.weights.detach().index_select(0, positions.view(-1))\n embedding_shape = torch.cat(\n (bsz.view(1), seq_len.view(1), torch.tensor([-1], dtype=torch.long))\n )\n embeddings = torch.onnx.operators.reshape_from_tensor_shape(\n flat_embeddings, embedding_shape\n )\n return embeddings\n return (\n self.weights.index_select(0, positions.view(-1))\n .view(bsz, seq_len, -1)\n .detach()\n )\n", "path": "fairseq/modules/sinusoidal_positional_embedding.py"}], "after_files": [{"content": "# Copyright (c) Facebook, Inc. and its affiliates.\n#\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\nimport math\nfrom typing import Any, Optional\n\nimport torch\nimport torch.onnx.operators\nfrom fairseq import utils\nfrom torch import Tensor, nn\n\n\nclass SinusoidalPositionalEmbedding(nn.Module):\n \"\"\"This module produces sinusoidal positional embeddings of any length.\n\n Padding symbols are ignored.\n \"\"\"\n\n def __init__(self, embedding_dim, padding_idx, init_size=1024):\n super().__init__()\n self.embedding_dim = embedding_dim\n self.padding_idx = padding_idx if padding_idx is not None else 0\n self.weights = SinusoidalPositionalEmbedding.get_embedding(\n init_size, embedding_dim, padding_idx\n )\n self.onnx_trace = False\n self.register_buffer(\"_float_tensor\", torch.FloatTensor(1))\n self.max_positions = int(1e5)\n\n def prepare_for_onnx_export_(self):\n self.onnx_trace = True\n\n @staticmethod\n def get_embedding(\n num_embeddings: int, embedding_dim: int, padding_idx: Optional[int] = None\n ):\n \"\"\"Build sinusoidal embeddings.\n\n This matches the implementation in tensor2tensor, but differs slightly\n from the description in Section 3.5 of \"Attention Is All You Need\".\n \"\"\"\n half_dim = embedding_dim // 2\n emb = math.log(10000) / (half_dim - 1)\n emb = torch.exp(torch.arange(half_dim, dtype=torch.float) * -emb)\n emb = torch.arange(num_embeddings, dtype=torch.float).unsqueeze(\n 1\n ) * emb.unsqueeze(0)\n emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1).view(\n num_embeddings, -1\n )\n if embedding_dim % 2 == 1:\n # zero pad\n emb = torch.cat([emb, torch.zeros(num_embeddings, 1)], dim=1)\n if padding_idx is not None:\n emb[padding_idx, :] = 0\n return emb\n\n def forward(\n self,\n input,\n incremental_state: Optional[Any] = None,\n timestep: Optional[Tensor] = None,\n positions: Optional[Any] = None,\n ):\n \"\"\"Input is expected to be of size [bsz x seqlen].\"\"\"\n if torch.jit.is_scripting():\n bspair = torch.onnx.operators.shape_as_tensor(input)\n elif torch.onnx.is_in_onnx_export():\n bspair = torch.onnx.operators.shape_as_tensor(input)\n else:\n bspair = input.size()\n bsz, seq_len = bspair[0], bspair[1]\n max_pos = self.padding_idx + 1 + seq_len\n if self.weights is None or max_pos > self.weights.size(0):\n # recompute/expand embeddings if needed\n self.weights = SinusoidalPositionalEmbedding.get_embedding(\n max_pos, self.embedding_dim, self.padding_idx\n )\n self.weights = self.weights.to(self._float_tensor)\n\n if incremental_state is not None:\n # positions is the same for every token when decoding a single step\n pos = timestep.view(-1)[0] + 1 if timestep is not None else seq_len\n if self.onnx_trace:\n return (\n self.weights.index_select(index=self.padding_idx + pos, dim=0)\n .unsqueeze(1)\n .repeat(bsz, 1, 1)\n )\n return self.weights[self.padding_idx + pos, :].expand(bsz, 1, -1)\n\n positions = utils.make_positions(\n input, self.padding_idx, onnx_trace=self.onnx_trace\n )\n if self.onnx_trace:\n flat_embeddings = self.weights.detach().index_select(0, positions.view(-1))\n embedding_shape = torch.cat(\n (bsz, seq_len, torch.tensor([-1], dtype=torch.long))\n )\n embeddings = torch.onnx.operators.reshape_from_tensor_shape(\n flat_embeddings, embedding_shape\n )\n return embeddings\n return (\n self.weights.index_select(0, positions.view(-1))\n .view(bsz, seq_len, -1)\n .detach()\n )\n", "path": "fairseq/modules/sinusoidal_positional_embedding.py"}]}
| 1,543 | 164 |
gh_patches_debug_8031
|
rasdani/github-patches
|
git_diff
|
ansible__ansible-22664
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
syslog_json callback doesn't cast SYSLOG_PORT value from environment as integer
##### ISSUE TYPE
<!--- Pick one below and delete the rest: -->
- Bug Report
##### COMPONENT NAME
<!--- Name of the module/plugin/task/feature -->
syslog_json callback plugin
##### ANSIBLE VERSION
<!--- Paste verbatim output from โansible --versionโ between quotes below -->
```
ansible 2.2.1.0
```
##### CONFIGURATION
Specifying SYSLOG_PORT environment variable to target a non-default port:
SYSLOG_PORT=1514
##### OS / ENVIRONMENT
Redhat 6 (but this should affect any UNIX platform)
##### SUMMARY
The syslog_json callback plugin allows configuration of its target port by specifying the SYSLOG_PORT environment variable; however, it doesn't cast the value it obtains from get_env as int, so specifying a port number this way results in errors on each task and no syslog output:
Traceback (most recent call last):
File "/usr/lib64/python2.6/logging/handlers.py", line 806, in emit
self.socket.sendto(msg, self.address)
TypeError: an integer is required
##### STEPS TO REPRODUCE
For any playbook, specify the syslog_json callback plugin and the syslog port environment variable:
<!--- Paste example playbooks or commands between quotes below -->
```yaml
ANSIBLE_STDOUT_CALLBACK=syslog_json SYSLOG_PORT=1514 ansible-playbook playbook.yml
```
<!--- You can also paste gist.github.com links for larger files -->
##### EXPECTED RESULTS
<!--- What did you expect to happen when running the steps above? -->
No output to stdout, JSON output directed to syslog for each task.
##### ACTUAL RESULTS
<!--- What actually happened? If possible run with extra verbosity (-vvvv) -->
<!--- Paste verbatim command output between quotes below -->
```
Traceback (most recent call last):
File "/usr/lib64/python2.6/logging/handlers.py", line 806, in emit
self.socket.sendto(msg, self.address)
TypeError: an integer is required
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lib/ansible/plugins/callback/syslog_json.py`
Content:
```
1 # Make coding more python3-ish
2 from __future__ import (absolute_import, division, print_function)
3 __metaclass__ = type
4
5 import os
6 import json
7
8 import logging
9 import logging.handlers
10
11 import socket
12
13 from ansible.plugins.callback import CallbackBase
14
15 class CallbackModule(CallbackBase):
16 """
17 logs ansible-playbook and ansible runs to a syslog server in json format
18 make sure you have in ansible.cfg:
19 callback_plugins = <path_to_callback_plugins_folder>
20 and put the plugin in <path_to_callback_plugins_folder>
21
22 This plugin makes use of the following environment variables:
23 SYSLOG_SERVER (optional): defaults to localhost
24 SYSLOG_PORT (optional): defaults to 514
25 SYSLOG_FACILITY (optional): defaults to user
26 """
27 CALLBACK_VERSION = 2.0
28 CALLBACK_TYPE = 'aggregate'
29 CALLBACK_NAME = 'syslog_json'
30 CALLBACK_NEEDS_WHITELIST = True
31
32 def __init__(self):
33
34 super(CallbackModule, self).__init__()
35
36 self.logger = logging.getLogger('ansible logger')
37 self.logger.setLevel(logging.DEBUG)
38
39 self.handler = logging.handlers.SysLogHandler(
40 address = (os.getenv('SYSLOG_SERVER','localhost'),
41 os.getenv('SYSLOG_PORT',514)),
42 facility= os.getenv('SYSLOG_FACILITY',logging.handlers.SysLogHandler.LOG_USER)
43 )
44 self.logger.addHandler(self.handler)
45 self.hostname = socket.gethostname()
46
47
48 def runner_on_failed(self, host, res, ignore_errors=False):
49 self.logger.error('%s ansible-command: task execution FAILED; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))
50
51 def runner_on_ok(self, host, res):
52 self.logger.info('%s ansible-command: task execution OK; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))
53
54 def runner_on_skipped(self, host, item=None):
55 self.logger.info('%s ansible-command: task execution SKIPPED; host: %s; message: %s' % (self.hostname,host, 'skipped'))
56
57 def runner_on_unreachable(self, host, res):
58 self.logger.error('%s ansible-command: task execution UNREACHABLE; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))
59
60 def runner_on_async_failed(self, host, res, jid):
61 self.logger.error('%s ansible-command: task execution FAILED; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))
62
63 def playbook_on_import_for_host(self, host, imported_file):
64 self.logger.info('%s ansible-command: playbook IMPORTED; host: %s; message: imported file %s' % (self.hostname,host,imported_file))
65
66 def playbook_on_not_import_for_host(self, host, missing_file):
67 self.logger.info('%s ansible-command: playbook NOT IMPORTED; host: %s; message: missing file %s' % (self.hostname,host,missing_file))
68
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/lib/ansible/plugins/callback/syslog_json.py b/lib/ansible/plugins/callback/syslog_json.py
--- a/lib/ansible/plugins/callback/syslog_json.py
+++ b/lib/ansible/plugins/callback/syslog_json.py
@@ -38,7 +38,7 @@
self.handler = logging.handlers.SysLogHandler(
address = (os.getenv('SYSLOG_SERVER','localhost'),
- os.getenv('SYSLOG_PORT',514)),
+ int(os.getenv('SYSLOG_PORT',514))),
facility= os.getenv('SYSLOG_FACILITY',logging.handlers.SysLogHandler.LOG_USER)
)
self.logger.addHandler(self.handler)
|
{"golden_diff": "diff --git a/lib/ansible/plugins/callback/syslog_json.py b/lib/ansible/plugins/callback/syslog_json.py\n--- a/lib/ansible/plugins/callback/syslog_json.py\n+++ b/lib/ansible/plugins/callback/syslog_json.py\n@@ -38,7 +38,7 @@\n \n self.handler = logging.handlers.SysLogHandler(\n address = (os.getenv('SYSLOG_SERVER','localhost'),\n- os.getenv('SYSLOG_PORT',514)),\n+ int(os.getenv('SYSLOG_PORT',514))),\n facility= os.getenv('SYSLOG_FACILITY',logging.handlers.SysLogHandler.LOG_USER)\n )\n self.logger.addHandler(self.handler)\n", "issue": "syslog_json callback doesn't cast SYSLOG_PORT value from environment as integer\n##### ISSUE TYPE\r\n<!--- Pick one below and delete the rest: -->\r\n - Bug Report\r\n\r\n##### COMPONENT NAME\r\n<!--- Name of the module/plugin/task/feature -->\r\nsyslog_json callback plugin\r\n\r\n##### ANSIBLE VERSION\r\n<!--- Paste verbatim output from \u201cansible --version\u201d between quotes below -->\r\n```\r\nansible 2.2.1.0\r\n```\r\n\r\n##### CONFIGURATION\r\nSpecifying SYSLOG_PORT environment variable to target a non-default port:\r\nSYSLOG_PORT=1514\r\n\r\n##### OS / ENVIRONMENT\r\nRedhat 6 (but this should affect any UNIX platform)\r\n\r\n##### SUMMARY\r\nThe syslog_json callback plugin allows configuration of its target port by specifying the SYSLOG_PORT environment variable; however, it doesn't cast the value it obtains from get_env as int, so specifying a port number this way results in errors on each task and no syslog output:\r\n\r\nTraceback (most recent call last):\r\n File \"/usr/lib64/python2.6/logging/handlers.py\", line 806, in emit\r\n self.socket.sendto(msg, self.address)\r\nTypeError: an integer is required\r\n\r\n##### STEPS TO REPRODUCE\r\nFor any playbook, specify the syslog_json callback plugin and the syslog port environment variable:\r\n\r\n<!--- Paste example playbooks or commands between quotes below -->\r\n```yaml\r\nANSIBLE_STDOUT_CALLBACK=syslog_json SYSLOG_PORT=1514 ansible-playbook playbook.yml\r\n```\r\n\r\n<!--- You can also paste gist.github.com links for larger files -->\r\n\r\n##### EXPECTED RESULTS\r\n<!--- What did you expect to happen when running the steps above? -->\r\nNo output to stdout, JSON output directed to syslog for each task.\r\n\r\n##### ACTUAL RESULTS\r\n<!--- What actually happened? If possible run with extra verbosity (-vvvv) -->\r\n\r\n<!--- Paste verbatim command output between quotes below -->\r\n```\r\nTraceback (most recent call last):\r\n File \"/usr/lib64/python2.6/logging/handlers.py\", line 806, in emit\r\n self.socket.sendto(msg, self.address)\r\nTypeError: an integer is required\r\n```\r\n\n", "before_files": [{"content": "# Make coding more python3-ish\nfrom __future__ import (absolute_import, division, print_function)\n__metaclass__ = type\n\nimport os\nimport json\n\nimport logging\nimport logging.handlers\n\nimport socket\n\nfrom ansible.plugins.callback import CallbackBase\n\nclass CallbackModule(CallbackBase):\n \"\"\"\n logs ansible-playbook and ansible runs to a syslog server in json format\n make sure you have in ansible.cfg:\n callback_plugins = <path_to_callback_plugins_folder>\n and put the plugin in <path_to_callback_plugins_folder>\n\n This plugin makes use of the following environment variables:\n SYSLOG_SERVER (optional): defaults to localhost\n SYSLOG_PORT (optional): defaults to 514\n SYSLOG_FACILITY (optional): defaults to user\n \"\"\"\n CALLBACK_VERSION = 2.0\n CALLBACK_TYPE = 'aggregate'\n CALLBACK_NAME = 'syslog_json'\n CALLBACK_NEEDS_WHITELIST = True\n\n def __init__(self):\n\n super(CallbackModule, self).__init__()\n\n self.logger = logging.getLogger('ansible logger')\n self.logger.setLevel(logging.DEBUG)\n\n self.handler = logging.handlers.SysLogHandler(\n address = (os.getenv('SYSLOG_SERVER','localhost'),\n os.getenv('SYSLOG_PORT',514)),\n facility= os.getenv('SYSLOG_FACILITY',logging.handlers.SysLogHandler.LOG_USER)\n )\n self.logger.addHandler(self.handler)\n self.hostname = socket.gethostname()\n\n\n def runner_on_failed(self, host, res, ignore_errors=False):\n self.logger.error('%s ansible-command: task execution FAILED; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))\n\n def runner_on_ok(self, host, res):\n self.logger.info('%s ansible-command: task execution OK; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))\n\n def runner_on_skipped(self, host, item=None):\n self.logger.info('%s ansible-command: task execution SKIPPED; host: %s; message: %s' % (self.hostname,host, 'skipped'))\n\n def runner_on_unreachable(self, host, res):\n self.logger.error('%s ansible-command: task execution UNREACHABLE; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))\n\n def runner_on_async_failed(self, host, res, jid):\n self.logger.error('%s ansible-command: task execution FAILED; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))\n\n def playbook_on_import_for_host(self, host, imported_file):\n self.logger.info('%s ansible-command: playbook IMPORTED; host: %s; message: imported file %s' % (self.hostname,host,imported_file))\n\n def playbook_on_not_import_for_host(self, host, missing_file):\n self.logger.info('%s ansible-command: playbook NOT IMPORTED; host: %s; message: missing file %s' % (self.hostname,host,missing_file))\n", "path": "lib/ansible/plugins/callback/syslog_json.py"}], "after_files": [{"content": "# Make coding more python3-ish\nfrom __future__ import (absolute_import, division, print_function)\n__metaclass__ = type\n\nimport os\nimport json\n\nimport logging\nimport logging.handlers\n\nimport socket\n\nfrom ansible.plugins.callback import CallbackBase\n\nclass CallbackModule(CallbackBase):\n \"\"\"\n logs ansible-playbook and ansible runs to a syslog server in json format\n make sure you have in ansible.cfg:\n callback_plugins = <path_to_callback_plugins_folder>\n and put the plugin in <path_to_callback_plugins_folder>\n\n This plugin makes use of the following environment variables:\n SYSLOG_SERVER (optional): defaults to localhost\n SYSLOG_PORT (optional): defaults to 514\n SYSLOG_FACILITY (optional): defaults to user\n \"\"\"\n CALLBACK_VERSION = 2.0\n CALLBACK_TYPE = 'aggregate'\n CALLBACK_NAME = 'syslog_json'\n CALLBACK_NEEDS_WHITELIST = True\n\n def __init__(self):\n\n super(CallbackModule, self).__init__()\n\n self.logger = logging.getLogger('ansible logger')\n self.logger.setLevel(logging.DEBUG)\n\n self.handler = logging.handlers.SysLogHandler(\n address = (os.getenv('SYSLOG_SERVER','localhost'),\n int(os.getenv('SYSLOG_PORT',514))),\n facility= os.getenv('SYSLOG_FACILITY',logging.handlers.SysLogHandler.LOG_USER)\n )\n self.logger.addHandler(self.handler)\n self.hostname = socket.gethostname()\n\n\n def runner_on_failed(self, host, res, ignore_errors=False):\n self.logger.error('%s ansible-command: task execution FAILED; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))\n\n def runner_on_ok(self, host, res):\n self.logger.info('%s ansible-command: task execution OK; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))\n\n def runner_on_skipped(self, host, item=None):\n self.logger.info('%s ansible-command: task execution SKIPPED; host: %s; message: %s' % (self.hostname,host, 'skipped'))\n\n def runner_on_unreachable(self, host, res):\n self.logger.error('%s ansible-command: task execution UNREACHABLE; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))\n\n def runner_on_async_failed(self, host, res, jid):\n self.logger.error('%s ansible-command: task execution FAILED; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))\n\n def playbook_on_import_for_host(self, host, imported_file):\n self.logger.info('%s ansible-command: playbook IMPORTED; host: %s; message: imported file %s' % (self.hostname,host,imported_file))\n\n def playbook_on_not_import_for_host(self, host, missing_file):\n self.logger.info('%s ansible-command: playbook NOT IMPORTED; host: %s; message: missing file %s' % (self.hostname,host,missing_file))\n", "path": "lib/ansible/plugins/callback/syslog_json.py"}]}
| 1,511 | 140 |
gh_patches_debug_26977
|
rasdani/github-patches
|
git_diff
|
dotkom__onlineweb4-781
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Registration form needs help text
To be a valid online user the user must register with their student emails. This is not indicated in the registration form.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `apps/authentication/forms.py`
Content:
```
1 # -*- coding: utf-8 -*-
2
3 import datetime
4 import re
5
6 from django import forms
7 from django.contrib import auth
8 from django.utils.translation import ugettext as _
9
10 from apps.authentication.models import OnlineUser as User, Email
11
12 class LoginForm(forms.Form):
13 username = forms.CharField(widget=forms.TextInput(), label=_("Brukernavn"), max_length=50)
14 password = forms.CharField(widget=forms.PasswordInput(render_value=False), label=_(u"Passord"))
15 user = None
16
17 def clean(self):
18 if self._errors:
19 return
20
21 user = auth.authenticate(username=self.cleaned_data['username'], password=self.cleaned_data['password'])
22
23 if user:
24 if user.is_active:
25 self.user = user
26 else:
27 self._errors['username'] = self.error_class([_(u"Din konto er ikke aktiv. Forsรธk gjenoppretning av passord.")])
28 else:
29 self._errors['username'] = self.error_class([_(u"Kontoen eksisterer ikke, eller kombinasjonen av brukernavn og passord er feil.")])
30 return self.cleaned_data
31
32 def login(self, request):
33 try:
34 User.objects.get(username=request.POST['username'])
35 except:
36 return False
37 if self.is_valid():
38 auth.login(request, self.user)
39 return True
40 return False
41
42 class RegisterForm(forms.Form):
43 username = forms.CharField(label=_("Brukernavn"), max_length=20)
44 first_name = forms.CharField(label=_("Fornavn"), max_length=50)
45 last_name = forms.CharField(label=_("Etternavn"), max_length=50)
46 email = forms.EmailField(label=_("Epost"), max_length=50)
47 password = forms.CharField(widget=forms.PasswordInput(render_value=False), label=_("Passord"))
48 repeat_password = forms.CharField(widget=forms.PasswordInput(render_value=False), label=_("Gjenta passord"))
49 address = forms.CharField(label=_("Adresse"), max_length=100, required=False)
50 zip_code = forms.CharField(label=_("Postnummer"), max_length=4, required=False)
51 phone = forms.CharField(label=_("Telefon"), max_length=20, required=False)
52
53 def clean(self):
54 super(RegisterForm, self).clean()
55 if self.is_valid():
56 cleaned_data = self.cleaned_data
57
58 # Check passwords
59 if cleaned_data['password'] != cleaned_data['repeat_password']:
60 self._errors['repeat_password'] = self.error_class([_(u"Passordene er ikke like.")])
61
62 # Check username
63 username = cleaned_data['username']
64 if User.objects.filter(username=username).count() > 0:
65 self._errors['username'] = self.error_class([_(u"Brukernavnet er allerede registrert.")])
66 if not re.match("^[a-zA-Z0-9_-]+$", username):
67 self._errors['username'] = self.error_class([_(u"Ditt brukernavn inneholdt ulovlige tegn. Lovlige tegn: a-Z 0-9 - _")])
68
69 # Check email
70 email = cleaned_data['email'].lower()
71 if Email.objects.filter(email=email).count() > 0:
72 self._errors['email'] = self.error_class([_(u"Det fins allerede en bruker med denne epostadressen.")])
73
74 # ZIP code digits only
75 zip_code = cleaned_data['zip_code']
76 if len(zip_code) != 0:
77 if len(zip_code) != 4 or not zip_code.isdigit():
78 self._errors['zip_code'] = self.error_class([_(u"Postnummer mรฅ bestรฅ av fire siffer.")])
79
80 return cleaned_data
81
82 class RecoveryForm(forms.Form):
83 email = forms.EmailField(label="Email", max_length=50)
84
85 class ChangePasswordForm(forms.Form):
86 new_password = forms.CharField(widget=forms.PasswordInput(render_value=False), label=_(u"Nytt passord"))
87 repeat_password = forms.CharField(widget=forms.PasswordInput(render_value=False), label=_(u"Gjenta passord"))
88
89 def clean(self):
90 super(ChangePasswordForm, self).clean()
91 if self.is_valid():
92 cleaned_data = self.cleaned_data
93
94 # Check passwords
95 if cleaned_data['new_password'] != cleaned_data['repeat_password']:
96 self._errors['repeat_password'] = self.error_class([_(u"Passordene er ikke like.")])
97
98 return cleaned_data
99
100
101 class NewEmailForm(forms.Form):
102 new_email = forms.EmailField(label=_(u"ny epostadresse"))
103
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/apps/authentication/forms.py b/apps/authentication/forms.py
--- a/apps/authentication/forms.py
+++ b/apps/authentication/forms.py
@@ -40,14 +40,14 @@
return False
class RegisterForm(forms.Form):
- username = forms.CharField(label=_("Brukernavn"), max_length=20)
- first_name = forms.CharField(label=_("Fornavn"), max_length=50)
+ username = forms.CharField(label=_("Brukernavn"), max_length=20, help_text=u'Valgfritt brukernavn')
+ first_name = forms.CharField(label=_("Fornavn"), max_length=50, help_text=u'Mellomnavn inkluderer du etter fornavnet ditt')
last_name = forms.CharField(label=_("Etternavn"), max_length=50)
- email = forms.EmailField(label=_("Epost"), max_length=50)
+ email = forms.EmailField(label=_("Epost"), max_length=50, help_text=u'Du kan legge til flere epostadresser senere i din profil.')
password = forms.CharField(widget=forms.PasswordInput(render_value=False), label=_("Passord"))
repeat_password = forms.CharField(widget=forms.PasswordInput(render_value=False), label=_("Gjenta passord"))
- address = forms.CharField(label=_("Adresse"), max_length=100, required=False)
- zip_code = forms.CharField(label=_("Postnummer"), max_length=4, required=False)
+ address = forms.CharField(label=_("Adresse"), max_length=100, required=False, widget=forms.Textarea(attrs={'rows':3}))
+ zip_code = forms.CharField(label=_("Postnummer"), max_length=4, required=False, help_text=u'Vi henter by basert pรฅ postnummer')
phone = forms.CharField(label=_("Telefon"), max_length=20, required=False)
def clean(self):
|
{"golden_diff": "diff --git a/apps/authentication/forms.py b/apps/authentication/forms.py\n--- a/apps/authentication/forms.py\n+++ b/apps/authentication/forms.py\n@@ -40,14 +40,14 @@\n return False\n \n class RegisterForm(forms.Form):\n- username = forms.CharField(label=_(\"Brukernavn\"), max_length=20)\n- first_name = forms.CharField(label=_(\"Fornavn\"), max_length=50)\n+ username = forms.CharField(label=_(\"Brukernavn\"), max_length=20, help_text=u'Valgfritt brukernavn')\n+ first_name = forms.CharField(label=_(\"Fornavn\"), max_length=50, help_text=u'Mellomnavn inkluderer du etter fornavnet ditt')\n last_name = forms.CharField(label=_(\"Etternavn\"), max_length=50)\n- email = forms.EmailField(label=_(\"Epost\"), max_length=50)\n+ email = forms.EmailField(label=_(\"Epost\"), max_length=50, help_text=u'Du kan legge til flere epostadresser senere i din profil.')\n password = forms.CharField(widget=forms.PasswordInput(render_value=False), label=_(\"Passord\"))\n repeat_password = forms.CharField(widget=forms.PasswordInput(render_value=False), label=_(\"Gjenta passord\"))\n- address = forms.CharField(label=_(\"Adresse\"), max_length=100, required=False)\n- zip_code = forms.CharField(label=_(\"Postnummer\"), max_length=4, required=False)\n+ address = forms.CharField(label=_(\"Adresse\"), max_length=100, required=False, widget=forms.Textarea(attrs={'rows':3}))\n+ zip_code = forms.CharField(label=_(\"Postnummer\"), max_length=4, required=False, help_text=u'Vi henter by basert p\u00e5 postnummer')\n phone = forms.CharField(label=_(\"Telefon\"), max_length=20, required=False)\n \n def clean(self):\n", "issue": "Registration form needs help text\nTo be a valid online user the user must register with their student emails. This is not indicated in the registration form.\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\nimport datetime\nimport re\n\nfrom django import forms\nfrom django.contrib import auth\nfrom django.utils.translation import ugettext as _\n\nfrom apps.authentication.models import OnlineUser as User, Email\n\nclass LoginForm(forms.Form):\n username = forms.CharField(widget=forms.TextInput(), label=_(\"Brukernavn\"), max_length=50)\n password = forms.CharField(widget=forms.PasswordInput(render_value=False), label=_(u\"Passord\"))\n user = None\n\n def clean(self):\n if self._errors:\n return\n \n user = auth.authenticate(username=self.cleaned_data['username'], password=self.cleaned_data['password'])\n\n if user:\n if user.is_active:\n self.user = user\n else:\n self._errors['username'] = self.error_class([_(u\"Din konto er ikke aktiv. Fors\u00f8k gjenoppretning av passord.\")])\n else:\n self._errors['username'] = self.error_class([_(u\"Kontoen eksisterer ikke, eller kombinasjonen av brukernavn og passord er feil.\")])\n return self.cleaned_data\n\n def login(self, request):\n try:\n User.objects.get(username=request.POST['username'])\n except:\n return False\n if self.is_valid():\n auth.login(request, self.user)\n return True\n return False\n\nclass RegisterForm(forms.Form):\n username = forms.CharField(label=_(\"Brukernavn\"), max_length=20)\n first_name = forms.CharField(label=_(\"Fornavn\"), max_length=50)\n last_name = forms.CharField(label=_(\"Etternavn\"), max_length=50)\n email = forms.EmailField(label=_(\"Epost\"), max_length=50)\n password = forms.CharField(widget=forms.PasswordInput(render_value=False), label=_(\"Passord\"))\n repeat_password = forms.CharField(widget=forms.PasswordInput(render_value=False), label=_(\"Gjenta passord\"))\n address = forms.CharField(label=_(\"Adresse\"), max_length=100, required=False)\n zip_code = forms.CharField(label=_(\"Postnummer\"), max_length=4, required=False)\n phone = forms.CharField(label=_(\"Telefon\"), max_length=20, required=False)\n \n def clean(self):\n super(RegisterForm, self).clean()\n if self.is_valid():\n cleaned_data = self.cleaned_data\n\n # Check passwords\n if cleaned_data['password'] != cleaned_data['repeat_password']:\n self._errors['repeat_password'] = self.error_class([_(u\"Passordene er ikke like.\")])\n\n # Check username\n username = cleaned_data['username']\n if User.objects.filter(username=username).count() > 0:\n self._errors['username'] = self.error_class([_(u\"Brukernavnet er allerede registrert.\")])\n if not re.match(\"^[a-zA-Z0-9_-]+$\", username):\n self._errors['username'] = self.error_class([_(u\"Ditt brukernavn inneholdt ulovlige tegn. Lovlige tegn: a-Z 0-9 - _\")])\n\n # Check email\n email = cleaned_data['email'].lower()\n if Email.objects.filter(email=email).count() > 0:\n self._errors['email'] = self.error_class([_(u\"Det fins allerede en bruker med denne epostadressen.\")])\n\n # ZIP code digits only\n zip_code = cleaned_data['zip_code']\n if len(zip_code) != 0:\n if len(zip_code) != 4 or not zip_code.isdigit():\n self._errors['zip_code'] = self.error_class([_(u\"Postnummer m\u00e5 best\u00e5 av fire siffer.\")])\n\n return cleaned_data \n\nclass RecoveryForm(forms.Form):\n email = forms.EmailField(label=\"Email\", max_length=50)\n\nclass ChangePasswordForm(forms.Form):\n new_password = forms.CharField(widget=forms.PasswordInput(render_value=False), label=_(u\"Nytt passord\"))\n repeat_password = forms.CharField(widget=forms.PasswordInput(render_value=False), label=_(u\"Gjenta passord\"))\n\n def clean(self):\n super(ChangePasswordForm, self).clean()\n if self.is_valid():\n cleaned_data = self.cleaned_data\n\n # Check passwords\n if cleaned_data['new_password'] != cleaned_data['repeat_password']:\n self._errors['repeat_password'] = self.error_class([_(u\"Passordene er ikke like.\")])\n\n return cleaned_data\n\n\nclass NewEmailForm(forms.Form):\n new_email = forms.EmailField(label=_(u\"ny epostadresse\"))\n", "path": "apps/authentication/forms.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\nimport datetime\nimport re\n\nfrom django import forms\nfrom django.contrib import auth\nfrom django.utils.translation import ugettext as _\n\nfrom apps.authentication.models import OnlineUser as User, Email\n\nclass LoginForm(forms.Form):\n username = forms.CharField(widget=forms.TextInput(), label=_(\"Brukernavn\"), max_length=50)\n password = forms.CharField(widget=forms.PasswordInput(render_value=False), label=_(u\"Passord\"))\n user = None\n\n def clean(self):\n if self._errors:\n return\n \n user = auth.authenticate(username=self.cleaned_data['username'], password=self.cleaned_data['password'])\n\n if user:\n if user.is_active:\n self.user = user\n else:\n self._errors['username'] = self.error_class([_(u\"Din konto er ikke aktiv. Fors\u00f8k gjenoppretning av passord.\")])\n else:\n self._errors['username'] = self.error_class([_(u\"Kontoen eksisterer ikke, eller kombinasjonen av brukernavn og passord er feil.\")])\n return self.cleaned_data\n\n def login(self, request):\n try:\n User.objects.get(username=request.POST['username'])\n except:\n return False\n if self.is_valid():\n auth.login(request, self.user)\n return True\n return False\n\nclass RegisterForm(forms.Form):\n username = forms.CharField(label=_(\"Brukernavn\"), max_length=20, help_text=u'Valgfritt brukernavn')\n first_name = forms.CharField(label=_(\"Fornavn\"), max_length=50, help_text=u'Mellomnavn inkluderer du etter fornavnet ditt')\n last_name = forms.CharField(label=_(\"Etternavn\"), max_length=50)\n email = forms.EmailField(label=_(\"Epost\"), max_length=50, help_text=u'Du kan legge til flere epostadresser senere i din profil.')\n password = forms.CharField(widget=forms.PasswordInput(render_value=False), label=_(\"Passord\"))\n repeat_password = forms.CharField(widget=forms.PasswordInput(render_value=False), label=_(\"Gjenta passord\"))\n address = forms.CharField(label=_(\"Adresse\"), max_length=100, required=False, widget=forms.Textarea(attrs={'rows':3}))\n zip_code = forms.CharField(label=_(\"Postnummer\"), max_length=4, required=False, help_text=u'Vi henter by basert p\u00e5 postnummer')\n phone = forms.CharField(label=_(\"Telefon\"), max_length=20, required=False)\n \n def clean(self):\n super(RegisterForm, self).clean()\n if self.is_valid():\n cleaned_data = self.cleaned_data\n\n # Check passwords\n if cleaned_data['password'] != cleaned_data['repeat_password']:\n self._errors['repeat_password'] = self.error_class([_(u\"Passordene er ikke like.\")])\n\n # Check username\n username = cleaned_data['username']\n if User.objects.filter(username=username).count() > 0:\n self._errors['username'] = self.error_class([_(u\"Brukernavnet er allerede registrert.\")])\n if not re.match(\"^[a-zA-Z0-9_-]+$\", username):\n self._errors['username'] = self.error_class([_(u\"Ditt brukernavn inneholdt ulovlige tegn. Lovlige tegn: a-Z 0-9 - _\")])\n\n # Check email\n email = cleaned_data['email'].lower()\n if Email.objects.filter(email=email).count() > 0:\n self._errors['email'] = self.error_class([_(u\"Det fins allerede en bruker med denne epostadressen.\")])\n\n # ZIP code digits only\n zip_code = cleaned_data['zip_code']\n if len(zip_code) != 0:\n if len(zip_code) != 4 or not zip_code.isdigit():\n self._errors['zip_code'] = self.error_class([_(u\"Postnummer m\u00e5 best\u00e5 av fire siffer.\")])\n\n return cleaned_data \n\nclass RecoveryForm(forms.Form):\n email = forms.EmailField(label=\"Email\", max_length=50)\n\nclass ChangePasswordForm(forms.Form):\n new_password = forms.CharField(widget=forms.PasswordInput(render_value=False), label=_(u\"Nytt passord\"))\n repeat_password = forms.CharField(widget=forms.PasswordInput(render_value=False), label=_(u\"Gjenta passord\"))\n\n def clean(self):\n super(ChangePasswordForm, self).clean()\n if self.is_valid():\n cleaned_data = self.cleaned_data\n\n # Check passwords\n if cleaned_data['new_password'] != cleaned_data['repeat_password']:\n self._errors['repeat_password'] = self.error_class([_(u\"Passordene er ikke like.\")])\n\n return cleaned_data\n\n\nclass NewEmailForm(forms.Form):\n new_email = forms.EmailField(label=_(u\"ny epostadresse\"))\n", "path": "apps/authentication/forms.py"}]}
| 1,469 | 402 |
gh_patches_debug_32213
|
rasdani/github-patches
|
git_diff
|
mesonbuild__meson-840
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
setup.py: Install scripts without extensions on UNIX-like platforms
Because of issue #394 , meson install scripts as `xxx.py` , but in linux, install script with a extension name is no a good practice. And change the installed script name also break some package depend on meson.
Could you deal with it for different platform?
thx.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python3
2
3 # Copyright 2016 The Meson development team
4
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
8
9 # http://www.apache.org/licenses/LICENSE-2.0
10
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
16
17 import sys
18
19 if sys.version_info[0] < 3:
20 print('Tried to install with Python 2, Meson only supports Python 3.')
21 sys.exit(1)
22
23 # We need to support Python installations that have nothing but the basic
24 # Python installation. Use setuptools when possible and fall back to
25 # plain distutils when setuptools is not available.
26 try:
27 from setuptools import setup
28 except ImportError:
29 from distutils.core import setup
30
31 from mesonbuild.coredata import version
32
33 setup(name='meson',
34 version=version,
35 description='A high performance build system',
36 author='Jussi Pakkanen',
37 author_email='[email protected]',
38 url='http://mesonbuild.com',
39 license=' Apache License, Version 2.0',
40 packages=['mesonbuild',
41 'mesonbuild.modules',
42 'mesonbuild.scripts',
43 'mesonbuild.backend',
44 'mesonbuild.wrap'],
45 scripts=['meson.py',
46 'mesonconf.py',
47 'mesonintrospect.py',
48 'wraptool.py'],
49 data_files=[('share/man/man1', ['man/meson.1',
50 'man/mesonconf.1',
51 'man/mesonintrospect.1',
52 'man/wraptool.1'])],
53 classifiers=['Development Status :: 5 - Production/Stable',
54 'Environment :: Console',
55 'Intended Audience :: Developers',
56 'License :: OSI Approved :: Apache Software License',
57 'Natural Language :: English',
58 'Operating System :: MacOS :: MacOS X',
59 'Operating System :: Microsoft :: Windows',
60 'Operating System :: POSIX :: BSD',
61 'Operating System :: POSIX :: Linux',
62 'Programming Language :: Python :: 3 :: Only',
63 'Topic :: Software Development :: Build Tools',
64 ],
65 long_description='''Meson is a cross-platform build system designed to be both as
66 fast and as user friendly as possible. It supports many languages and compilers, including
67 GCC, Clang and Visual Studio. Its build definitions are written in a simple non-turing
68 complete DSL.''')
69
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -14,7 +14,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import os
import sys
+from os import path
if sys.version_info[0] < 3:
print('Tried to install with Python 2, Meson only supports Python 3.')
@@ -25,8 +27,32 @@
# plain distutils when setuptools is not available.
try:
from setuptools import setup
+ from setuptools.command.install_scripts import install_scripts as orig
except ImportError:
from distutils.core import setup
+ from distutils.command.install_scripts import install_scripts as orig
+
+from distutils.file_util import copy_file
+from distutils.dir_util import mkpath
+from stat import ST_MODE
+
+class install_scripts(orig):
+ def run(self):
+ if sys.platform == 'win32':
+ super().run()
+ return
+
+ self.outfiles = []
+ if not self.dry_run:
+ mkpath(self.install_dir)
+
+ # We want the files to be installed without a suffix on Unix
+ for infile in self.get_inputs():
+ in_stripped = infile[:-3] if infile.endswith('.py') else infile
+ outfile = path.join(self.install_dir, in_stripped)
+ # NOTE: Mode is preserved by default
+ copy_file(infile, outfile, dry_run=self.dry_run)
+ self.outfiles.append(outfile)
from mesonbuild.coredata import version
@@ -46,6 +72,7 @@
'mesonconf.py',
'mesonintrospect.py',
'wraptool.py'],
+ cmdclass={'install_scripts': install_scripts},
data_files=[('share/man/man1', ['man/meson.1',
'man/mesonconf.1',
'man/mesonintrospect.1',
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -14,7 +14,9 @@\n # See the License for the specific language governing permissions and\n # limitations under the License.\n \n+import os\n import sys\n+from os import path\n \n if sys.version_info[0] < 3:\n print('Tried to install with Python 2, Meson only supports Python 3.')\n@@ -25,8 +27,32 @@\n # plain distutils when setuptools is not available.\n try:\n from setuptools import setup\n+ from setuptools.command.install_scripts import install_scripts as orig\n except ImportError:\n from distutils.core import setup\n+ from distutils.command.install_scripts import install_scripts as orig\n+\n+from distutils.file_util import copy_file\n+from distutils.dir_util import mkpath\n+from stat import ST_MODE\n+\n+class install_scripts(orig):\n+ def run(self):\n+ if sys.platform == 'win32':\n+ super().run()\n+ return\n+\n+ self.outfiles = []\n+ if not self.dry_run:\n+ mkpath(self.install_dir)\n+\n+ # We want the files to be installed without a suffix on Unix\n+ for infile in self.get_inputs():\n+ in_stripped = infile[:-3] if infile.endswith('.py') else infile\n+ outfile = path.join(self.install_dir, in_stripped)\n+ # NOTE: Mode is preserved by default\n+ copy_file(infile, outfile, dry_run=self.dry_run)\n+ self.outfiles.append(outfile)\n \n from mesonbuild.coredata import version\n \n@@ -46,6 +72,7 @@\n 'mesonconf.py',\n 'mesonintrospect.py',\n 'wraptool.py'],\n+ cmdclass={'install_scripts': install_scripts},\n data_files=[('share/man/man1', ['man/meson.1',\n 'man/mesonconf.1',\n 'man/mesonintrospect.1',\n", "issue": "setup.py: Install scripts without extensions on UNIX-like platforms\nBecause of issue #394 , meson install scripts as `xxx.py` , but in linux, install script with a extension name is no a good practice. And change the installed script name also break some package depend on meson.\n\nCould you deal with it for different platform?\n\nthx.\n\n", "before_files": [{"content": "#!/usr/bin/env python3\n\n# Copyright 2016 The Meson development team\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport sys\n\nif sys.version_info[0] < 3:\n print('Tried to install with Python 2, Meson only supports Python 3.')\n sys.exit(1)\n\n# We need to support Python installations that have nothing but the basic\n# Python installation. Use setuptools when possible and fall back to\n# plain distutils when setuptools is not available.\ntry:\n from setuptools import setup\nexcept ImportError:\n from distutils.core import setup\n\nfrom mesonbuild.coredata import version\n\nsetup(name='meson',\n version=version,\n description='A high performance build system',\n author='Jussi Pakkanen',\n author_email='[email protected]',\n url='http://mesonbuild.com',\n license=' Apache License, Version 2.0',\n packages=['mesonbuild',\n 'mesonbuild.modules',\n 'mesonbuild.scripts',\n 'mesonbuild.backend',\n 'mesonbuild.wrap'],\n scripts=['meson.py',\n 'mesonconf.py',\n 'mesonintrospect.py',\n 'wraptool.py'],\n data_files=[('share/man/man1', ['man/meson.1',\n 'man/mesonconf.1',\n 'man/mesonintrospect.1',\n 'man/wraptool.1'])],\n classifiers=['Development Status :: 5 - Production/Stable',\n 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Apache Software License',\n 'Natural Language :: English',\n 'Operating System :: MacOS :: MacOS X',\n 'Operating System :: Microsoft :: Windows',\n 'Operating System :: POSIX :: BSD',\n 'Operating System :: POSIX :: Linux',\n 'Programming Language :: Python :: 3 :: Only',\n 'Topic :: Software Development :: Build Tools',\n ],\n long_description='''Meson is a cross-platform build system designed to be both as\nfast and as user friendly as possible. It supports many languages and compilers, including\nGCC, Clang and Visual Studio. Its build definitions are written in a simple non-turing\ncomplete DSL.''')\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python3\n\n# Copyright 2016 The Meson development team\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\nimport sys\nfrom os import path\n\nif sys.version_info[0] < 3:\n print('Tried to install with Python 2, Meson only supports Python 3.')\n sys.exit(1)\n\n# We need to support Python installations that have nothing but the basic\n# Python installation. Use setuptools when possible and fall back to\n# plain distutils when setuptools is not available.\ntry:\n from setuptools import setup\n from setuptools.command.install_scripts import install_scripts as orig\nexcept ImportError:\n from distutils.core import setup\n from distutils.command.install_scripts import install_scripts as orig\n\nfrom distutils.file_util import copy_file\nfrom distutils.dir_util import mkpath\nfrom stat import ST_MODE\n\nclass install_scripts(orig):\n def run(self):\n if sys.platform == 'win32':\n super().run()\n return\n\n self.outfiles = []\n if not self.dry_run:\n mkpath(self.install_dir)\n\n # We want the files to be installed without a suffix on Unix\n for infile in self.get_inputs():\n in_stripped = infile[:-3] if infile.endswith('.py') else infile\n outfile = path.join(self.install_dir, in_stripped)\n # NOTE: Mode is preserved by default\n copy_file(infile, outfile, dry_run=self.dry_run)\n self.outfiles.append(outfile)\n\nfrom mesonbuild.coredata import version\n\nsetup(name='meson',\n version=version,\n description='A high performance build system',\n author='Jussi Pakkanen',\n author_email='[email protected]',\n url='http://mesonbuild.com',\n license=' Apache License, Version 2.0',\n packages=['mesonbuild',\n 'mesonbuild.modules',\n 'mesonbuild.scripts',\n 'mesonbuild.backend',\n 'mesonbuild.wrap'],\n scripts=['meson.py',\n 'mesonconf.py',\n 'mesonintrospect.py',\n 'wraptool.py'],\n cmdclass={'install_scripts': install_scripts},\n data_files=[('share/man/man1', ['man/meson.1',\n 'man/mesonconf.1',\n 'man/mesonintrospect.1',\n 'man/wraptool.1'])],\n classifiers=['Development Status :: 5 - Production/Stable',\n 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Apache Software License',\n 'Natural Language :: English',\n 'Operating System :: MacOS :: MacOS X',\n 'Operating System :: Microsoft :: Windows',\n 'Operating System :: POSIX :: BSD',\n 'Operating System :: POSIX :: Linux',\n 'Programming Language :: Python :: 3 :: Only',\n 'Topic :: Software Development :: Build Tools',\n ],\n long_description='''Meson is a cross-platform build system designed to be both as\nfast and as user friendly as possible. It supports many languages and compilers, including\nGCC, Clang and Visual Studio. Its build definitions are written in a simple non-turing\ncomplete DSL.''')\n", "path": "setup.py"}]}
| 1,048 | 436 |
gh_patches_debug_16211
|
rasdani/github-patches
|
git_diff
|
google__jax-326
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
jax missing scipy.special.expit
Would be possible to add gradients for `expit` and `logit`?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `jax/scipy/special.py`
Content:
```
1 # Copyright 2018 Google LLC
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # https://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from __future__ import absolute_import
16 from __future__ import division
17 from __future__ import print_function
18
19 import scipy.special as osp_special
20
21 from .. import lax
22 from ..numpy.lax_numpy import _wraps
23
24
25 # need to create new functions because _wraps sets the __name__ attribute
26 gammaln = _wraps(osp_special.gammaln)(lambda x: lax.lgamma(x))
27 digamma = _wraps(osp_special.digamma)(lambda x: lax.digamma(x))
28 erf = _wraps(osp_special.erf)(lambda x: lax.erf(x))
29 erfc = _wraps(osp_special.erfc)(lambda x: lax.erfc(x))
30 erfinv = _wraps(osp_special.erfinv)(lambda x: lax.erf_inv(x))
31
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/jax/scipy/special.py b/jax/scipy/special.py
--- a/jax/scipy/special.py
+++ b/jax/scipy/special.py
@@ -19,7 +19,7 @@
import scipy.special as osp_special
from .. import lax
-from ..numpy.lax_numpy import _wraps
+from ..numpy.lax_numpy import _wraps, asarray
# need to create new functions because _wraps sets the __name__ attribute
@@ -28,3 +28,16 @@
erf = _wraps(osp_special.erf)(lambda x: lax.erf(x))
erfc = _wraps(osp_special.erfc)(lambda x: lax.erfc(x))
erfinv = _wraps(osp_special.erfinv)(lambda x: lax.erf_inv(x))
+
+
+@_wraps(osp_special.logit)
+def logit(x):
+ x = asarray(x)
+ return lax.log(lax.div(x, lax.sub(lax._const(x, 1), x)))
+
+
+@_wraps(osp_special.expit)
+def expit(x):
+ x = asarray(x)
+ one = lax._const(x, 1)
+ return lax.div(one, lax.add(one, lax.exp(lax.neg(x))))
|
{"golden_diff": "diff --git a/jax/scipy/special.py b/jax/scipy/special.py\n--- a/jax/scipy/special.py\n+++ b/jax/scipy/special.py\n@@ -19,7 +19,7 @@\n import scipy.special as osp_special\n \n from .. import lax\n-from ..numpy.lax_numpy import _wraps\n+from ..numpy.lax_numpy import _wraps, asarray\n \n \n # need to create new functions because _wraps sets the __name__ attribute\n@@ -28,3 +28,16 @@\n erf = _wraps(osp_special.erf)(lambda x: lax.erf(x))\n erfc = _wraps(osp_special.erfc)(lambda x: lax.erfc(x))\n erfinv = _wraps(osp_special.erfinv)(lambda x: lax.erf_inv(x))\n+\n+\n+@_wraps(osp_special.logit)\n+def logit(x):\n+ x = asarray(x)\n+ return lax.log(lax.div(x, lax.sub(lax._const(x, 1), x)))\n+\n+\n+@_wraps(osp_special.expit)\n+def expit(x):\n+ x = asarray(x)\n+ one = lax._const(x, 1)\n+ return lax.div(one, lax.add(one, lax.exp(lax.neg(x))))\n", "issue": "jax missing scipy.special.expit\nWould be possible to add gradients for `expit` and `logit`?\n", "before_files": [{"content": "# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport scipy.special as osp_special\n\nfrom .. import lax\nfrom ..numpy.lax_numpy import _wraps\n\n\n# need to create new functions because _wraps sets the __name__ attribute\ngammaln = _wraps(osp_special.gammaln)(lambda x: lax.lgamma(x))\ndigamma = _wraps(osp_special.digamma)(lambda x: lax.digamma(x))\nerf = _wraps(osp_special.erf)(lambda x: lax.erf(x))\nerfc = _wraps(osp_special.erfc)(lambda x: lax.erfc(x))\nerfinv = _wraps(osp_special.erfinv)(lambda x: lax.erf_inv(x))\n", "path": "jax/scipy/special.py"}], "after_files": [{"content": "# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport scipy.special as osp_special\n\nfrom .. import lax\nfrom ..numpy.lax_numpy import _wraps, asarray\n\n\n# need to create new functions because _wraps sets the __name__ attribute\ngammaln = _wraps(osp_special.gammaln)(lambda x: lax.lgamma(x))\ndigamma = _wraps(osp_special.digamma)(lambda x: lax.digamma(x))\nerf = _wraps(osp_special.erf)(lambda x: lax.erf(x))\nerfc = _wraps(osp_special.erfc)(lambda x: lax.erfc(x))\nerfinv = _wraps(osp_special.erfinv)(lambda x: lax.erf_inv(x))\n\n\n@_wraps(osp_special.logit)\ndef logit(x):\n x = asarray(x)\n return lax.log(lax.div(x, lax.sub(lax._const(x, 1), x)))\n\n\n@_wraps(osp_special.expit)\ndef expit(x):\n x = asarray(x)\n one = lax._const(x, 1)\n return lax.div(one, lax.add(one, lax.exp(lax.neg(x))))\n", "path": "jax/scipy/special.py"}]}
| 642 | 290 |
gh_patches_debug_516
|
rasdani/github-patches
|
git_diff
|
meltano__meltano-7210
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
feature: Python 3.11 support
### Feature scope
Other
### Description
Python 3.11.0 is planned to be officially released as of 2022-10-24. We should add it to our test matrix, and build Docker images for it for each release.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `noxfile.py`
Content:
```
1 """Nox configuration."""
2
3 from __future__ import annotations
4
5 import os
6 import sys
7 from pathlib import Path
8 from random import randint
9 from textwrap import dedent
10
11 try:
12 from nox_poetry import Session
13 from nox_poetry import session as nox_session
14 except ImportError:
15 message = f"""\
16 Nox failed to import the 'nox-poetry' package.
17 Please install it using the following command:
18 {sys.executable} -m pip install nox-poetry"""
19 raise SystemExit(dedent(message)) from None
20
21
22 package = "meltano"
23 python_versions = ["3.10", "3.9", "3.8", "3.7"]
24 main_python_version = "3.9"
25 locations = "src", "tests", "noxfile.py"
26
27
28 @nox_session(python=python_versions)
29 def tests(session: Session) -> None:
30 """Execute pytest tests and compute coverage.
31
32 Args:
33 session: Nox session.
34 """
35 backend_db = os.environ.get("PYTEST_BACKEND", "sqlite")
36
37 if backend_db == "mssql":
38 session.install(".[mssql,azure,gcs,s3]")
39
40 else:
41 session.install(".[azure,gcs,s3]")
42
43 session.install(
44 "colorama", # colored output in Windows
45 "freezegun",
46 "mock",
47 "pytest",
48 "pytest-asyncio",
49 "pytest-cov",
50 "pytest-docker",
51 "pytest-order",
52 "pytest-randomly",
53 "pytest-xdist",
54 "requests-mock",
55 )
56
57 try:
58 session.run(
59 "pytest",
60 f"--randomly-seed={randint(0, 2**32-1)}", # noqa: S311, WPS432
61 *session.posargs,
62 env={"NOX_CURRENT_SESSION": "tests"},
63 )
64 finally:
65 if session.interactive:
66 session.notify("coverage", posargs=[])
67
68
69 @nox_session(python=main_python_version)
70 def coverage(session: Session) -> None:
71 """Upload coverage data.
72
73 Args:
74 session: Nox session.
75 """
76 args = session.posargs or ["report"]
77
78 session.install("coverage[toml]")
79
80 if not session.posargs and any(Path().glob(".coverage.*")):
81 session.run("coverage", "combine")
82
83 session.run("coverage", *args)
84
85
86 @nox_session(python=main_python_version)
87 def mypy(session: Session) -> None:
88 """Run mypy type checking.
89
90 Args:
91 session: Nox session.
92 """
93 args = session.posargs or ["src/meltano", "--exclude", "src/meltano/migrations/"]
94
95 session.install(".")
96 session.install(
97 "mypy",
98 "sqlalchemy2-stubs",
99 "types-croniter",
100 "types-psutil",
101 "types-requests",
102 "boto3-stubs[essential]",
103 )
104 session.run("mypy", *args)
105
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/noxfile.py b/noxfile.py
--- a/noxfile.py
+++ b/noxfile.py
@@ -20,7 +20,7 @@
package = "meltano"
-python_versions = ["3.10", "3.9", "3.8", "3.7"]
+python_versions = ["3.11", "3.10", "3.9", "3.8", "3.7"]
main_python_version = "3.9"
locations = "src", "tests", "noxfile.py"
|
{"golden_diff": "diff --git a/noxfile.py b/noxfile.py\n--- a/noxfile.py\n+++ b/noxfile.py\n@@ -20,7 +20,7 @@\n \n \n package = \"meltano\"\n-python_versions = [\"3.10\", \"3.9\", \"3.8\", \"3.7\"]\n+python_versions = [\"3.11\", \"3.10\", \"3.9\", \"3.8\", \"3.7\"]\n main_python_version = \"3.9\"\n locations = \"src\", \"tests\", \"noxfile.py\"\n", "issue": "feature: Python 3.11 support\n### Feature scope\n\nOther\n\n### Description\n\nPython 3.11.0 is planned to be officially released as of 2022-10-24. We should add it to our test matrix, and build Docker images for it for each release.\n", "before_files": [{"content": "\"\"\"Nox configuration.\"\"\"\n\nfrom __future__ import annotations\n\nimport os\nimport sys\nfrom pathlib import Path\nfrom random import randint\nfrom textwrap import dedent\n\ntry:\n from nox_poetry import Session\n from nox_poetry import session as nox_session\nexcept ImportError:\n message = f\"\"\"\\\n Nox failed to import the 'nox-poetry' package.\n Please install it using the following command:\n {sys.executable} -m pip install nox-poetry\"\"\"\n raise SystemExit(dedent(message)) from None\n\n\npackage = \"meltano\"\npython_versions = [\"3.10\", \"3.9\", \"3.8\", \"3.7\"]\nmain_python_version = \"3.9\"\nlocations = \"src\", \"tests\", \"noxfile.py\"\n\n\n@nox_session(python=python_versions)\ndef tests(session: Session) -> None:\n \"\"\"Execute pytest tests and compute coverage.\n\n Args:\n session: Nox session.\n \"\"\"\n backend_db = os.environ.get(\"PYTEST_BACKEND\", \"sqlite\")\n\n if backend_db == \"mssql\":\n session.install(\".[mssql,azure,gcs,s3]\")\n\n else:\n session.install(\".[azure,gcs,s3]\")\n\n session.install(\n \"colorama\", # colored output in Windows\n \"freezegun\",\n \"mock\",\n \"pytest\",\n \"pytest-asyncio\",\n \"pytest-cov\",\n \"pytest-docker\",\n \"pytest-order\",\n \"pytest-randomly\",\n \"pytest-xdist\",\n \"requests-mock\",\n )\n\n try:\n session.run(\n \"pytest\",\n f\"--randomly-seed={randint(0, 2**32-1)}\", # noqa: S311, WPS432\n *session.posargs,\n env={\"NOX_CURRENT_SESSION\": \"tests\"},\n )\n finally:\n if session.interactive:\n session.notify(\"coverage\", posargs=[])\n\n\n@nox_session(python=main_python_version)\ndef coverage(session: Session) -> None:\n \"\"\"Upload coverage data.\n\n Args:\n session: Nox session.\n \"\"\"\n args = session.posargs or [\"report\"]\n\n session.install(\"coverage[toml]\")\n\n if not session.posargs and any(Path().glob(\".coverage.*\")):\n session.run(\"coverage\", \"combine\")\n\n session.run(\"coverage\", *args)\n\n\n@nox_session(python=main_python_version)\ndef mypy(session: Session) -> None:\n \"\"\"Run mypy type checking.\n\n Args:\n session: Nox session.\n \"\"\"\n args = session.posargs or [\"src/meltano\", \"--exclude\", \"src/meltano/migrations/\"]\n\n session.install(\".\")\n session.install(\n \"mypy\",\n \"sqlalchemy2-stubs\",\n \"types-croniter\",\n \"types-psutil\",\n \"types-requests\",\n \"boto3-stubs[essential]\",\n )\n session.run(\"mypy\", *args)\n", "path": "noxfile.py"}], "after_files": [{"content": "\"\"\"Nox configuration.\"\"\"\n\nfrom __future__ import annotations\n\nimport os\nimport sys\nfrom pathlib import Path\nfrom random import randint\nfrom textwrap import dedent\n\ntry:\n from nox_poetry import Session\n from nox_poetry import session as nox_session\nexcept ImportError:\n message = f\"\"\"\\\n Nox failed to import the 'nox-poetry' package.\n Please install it using the following command:\n {sys.executable} -m pip install nox-poetry\"\"\"\n raise SystemExit(dedent(message)) from None\n\n\npackage = \"meltano\"\npython_versions = [\"3.11\", \"3.10\", \"3.9\", \"3.8\", \"3.7\"]\nmain_python_version = \"3.9\"\nlocations = \"src\", \"tests\", \"noxfile.py\"\n\n\n@nox_session(python=python_versions)\ndef tests(session: Session) -> None:\n \"\"\"Execute pytest tests and compute coverage.\n\n Args:\n session: Nox session.\n \"\"\"\n backend_db = os.environ.get(\"PYTEST_BACKEND\", \"sqlite\")\n\n if backend_db == \"mssql\":\n session.install(\".[mssql,azure,gcs,s3]\")\n\n else:\n session.install(\".[azure,gcs,s3]\")\n\n session.install(\n \"colorama\", # colored output in Windows\n \"freezegun\",\n \"mock\",\n \"pytest\",\n \"pytest-asyncio\",\n \"pytest-cov\",\n \"pytest-docker\",\n \"pytest-order\",\n \"pytest-randomly\",\n \"pytest-xdist\",\n \"requests-mock\",\n )\n\n try:\n session.run(\n \"pytest\",\n f\"--randomly-seed={randint(0, 2**32-1)}\", # noqa: S311, WPS432\n *session.posargs,\n env={\"NOX_CURRENT_SESSION\": \"tests\"},\n )\n finally:\n if session.interactive:\n session.notify(\"coverage\", posargs=[])\n\n\n@nox_session(python=main_python_version)\ndef coverage(session: Session) -> None:\n \"\"\"Upload coverage data.\n\n Args:\n session: Nox session.\n \"\"\"\n args = session.posargs or [\"report\"]\n\n session.install(\"coverage[toml]\")\n\n if not session.posargs and any(Path().glob(\".coverage.*\")):\n session.run(\"coverage\", \"combine\")\n\n session.run(\"coverage\", *args)\n\n\n@nox_session(python=main_python_version)\ndef mypy(session: Session) -> None:\n \"\"\"Run mypy type checking.\n\n Args:\n session: Nox session.\n \"\"\"\n args = session.posargs or [\"src/meltano\", \"--exclude\", \"src/meltano/migrations/\"]\n\n session.install(\".\")\n session.install(\n \"mypy\",\n \"sqlalchemy2-stubs\",\n \"types-croniter\",\n \"types-psutil\",\n \"types-requests\",\n \"boto3-stubs[essential]\",\n )\n session.run(\"mypy\", *args)\n", "path": "noxfile.py"}]}
| 1,193 | 125 |
gh_patches_debug_11609
|
rasdani/github-patches
|
git_diff
|
google-research__text-to-text-transfer-transformer-39
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Do we support GPU distributed training?
Hi, thanks for the awesome project!
Does the code base support distributed training? If not, is it possible to support it after some code modifications?
By the way, what is the way to set batch size and gpu number if I want to use GPU to train the model?
Thank you for your kind attention.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 # Copyright 2019 The T5 Authors.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """Install T5."""
16
17 import setuptools
18
19 # Get the long description from the README file.
20 with open('README.md') as fp:
21 _LONG_DESCRIPTION = fp.read()
22
23 setuptools.setup(
24 name='t5',
25 version='0.1.7',
26 description='Text-to-text transfer transformer',
27 long_description=_LONG_DESCRIPTION,
28 long_description_content_type='text/markdown',
29 author='Google Inc.',
30 author_email='[email protected]',
31 url='http://github.com/google-research/text-to-text-transfer-transformer',
32 license='Apache 2.0',
33 packages=setuptools.find_packages(),
34 package_data={
35 '': ['*.gin'],
36 },
37 scripts=[],
38 install_requires=[
39 'absl-py',
40 'allennlp',
41 'babel',
42 'future',
43 'gin-config',
44 'mesh-tensorflow[transformer]>=0.1.8',
45 'nltk',
46 'numpy',
47 'pandas',
48 'rouge-score',
49 'sacrebleu',
50 'scikit-learn',
51 'scipy',
52 'sentencepiece',
53 'six',
54 'tensorflow-datasets>=1.3.2',
55 'tensorflow-text==1.15.0rc0',
56 ],
57 extras_require={
58 'tensorflow': ['tensorflow==1.15'],
59 'gcp': ['gevent', 'google-api-python-client', 'google-compute-engine',
60 'google-cloud-storage', 'oauth2client'],
61 },
62 entry_points={
63 'console_scripts': [
64 't5_mesh_transformer = '
65 't5.models.mesh_transformer_main:console_entry_point',
66 ],
67 },
68 classifiers=[
69 'Development Status :: 4 - Beta',
70 'Intended Audience :: Developers',
71 'Intended Audience :: Science/Research',
72 'License :: OSI Approved :: Apache Software License',
73 'Topic :: Scientific/Engineering :: Artificial Intelligence',
74 ],
75 keywords='text nlp machinelearning',
76 )
77
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -22,7 +22,7 @@
setuptools.setup(
name='t5',
- version='0.1.7',
+ version='0.1.8',
description='Text-to-text transfer transformer',
long_description=_LONG_DESCRIPTION,
long_description_content_type='text/markdown',
@@ -41,7 +41,7 @@
'babel',
'future',
'gin-config',
- 'mesh-tensorflow[transformer]>=0.1.8',
+ 'mesh-tensorflow[transformer]>=0.1.9',
'nltk',
'numpy',
'pandas',
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -22,7 +22,7 @@\n \n setuptools.setup(\n name='t5',\n- version='0.1.7',\n+ version='0.1.8',\n description='Text-to-text transfer transformer',\n long_description=_LONG_DESCRIPTION,\n long_description_content_type='text/markdown',\n@@ -41,7 +41,7 @@\n 'babel',\n 'future',\n 'gin-config',\n- 'mesh-tensorflow[transformer]>=0.1.8',\n+ 'mesh-tensorflow[transformer]>=0.1.9',\n 'nltk',\n 'numpy',\n 'pandas',\n", "issue": "Do we support GPU distributed training?\nHi, thanks for the awesome project!\r\n\r\nDoes the code base support distributed training? If not, is it possible to support it after some code modifications?\r\n\r\nBy the way, what is the way to set batch size and gpu number if I want to use GPU to train the model?\r\n\r\nThank you for your kind attention.\n", "before_files": [{"content": "# Copyright 2019 The T5 Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Install T5.\"\"\"\n\nimport setuptools\n\n# Get the long description from the README file.\nwith open('README.md') as fp:\n _LONG_DESCRIPTION = fp.read()\n\nsetuptools.setup(\n name='t5',\n version='0.1.7',\n description='Text-to-text transfer transformer',\n long_description=_LONG_DESCRIPTION,\n long_description_content_type='text/markdown',\n author='Google Inc.',\n author_email='[email protected]',\n url='http://github.com/google-research/text-to-text-transfer-transformer',\n license='Apache 2.0',\n packages=setuptools.find_packages(),\n package_data={\n '': ['*.gin'],\n },\n scripts=[],\n install_requires=[\n 'absl-py',\n 'allennlp',\n 'babel',\n 'future',\n 'gin-config',\n 'mesh-tensorflow[transformer]>=0.1.8',\n 'nltk',\n 'numpy',\n 'pandas',\n 'rouge-score',\n 'sacrebleu',\n 'scikit-learn',\n 'scipy',\n 'sentencepiece',\n 'six',\n 'tensorflow-datasets>=1.3.2',\n 'tensorflow-text==1.15.0rc0',\n ],\n extras_require={\n 'tensorflow': ['tensorflow==1.15'],\n 'gcp': ['gevent', 'google-api-python-client', 'google-compute-engine',\n 'google-cloud-storage', 'oauth2client'],\n },\n entry_points={\n 'console_scripts': [\n 't5_mesh_transformer = '\n 't5.models.mesh_transformer_main:console_entry_point',\n ],\n },\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: Apache Software License',\n 'Topic :: Scientific/Engineering :: Artificial Intelligence',\n ],\n keywords='text nlp machinelearning',\n)\n", "path": "setup.py"}], "after_files": [{"content": "# Copyright 2019 The T5 Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Install T5.\"\"\"\n\nimport setuptools\n\n# Get the long description from the README file.\nwith open('README.md') as fp:\n _LONG_DESCRIPTION = fp.read()\n\nsetuptools.setup(\n name='t5',\n version='0.1.8',\n description='Text-to-text transfer transformer',\n long_description=_LONG_DESCRIPTION,\n long_description_content_type='text/markdown',\n author='Google Inc.',\n author_email='[email protected]',\n url='http://github.com/google-research/text-to-text-transfer-transformer',\n license='Apache 2.0',\n packages=setuptools.find_packages(),\n package_data={\n '': ['*.gin'],\n },\n scripts=[],\n install_requires=[\n 'absl-py',\n 'allennlp',\n 'babel',\n 'future',\n 'gin-config',\n 'mesh-tensorflow[transformer]>=0.1.9',\n 'nltk',\n 'numpy',\n 'pandas',\n 'rouge-score',\n 'sacrebleu',\n 'scikit-learn',\n 'scipy',\n 'sentencepiece',\n 'six',\n 'tensorflow-datasets>=1.3.2',\n 'tensorflow-text==1.15.0rc0',\n ],\n extras_require={\n 'tensorflow': ['tensorflow==1.15'],\n 'gcp': ['gevent', 'google-api-python-client', 'google-compute-engine',\n 'google-cloud-storage', 'oauth2client'],\n },\n entry_points={\n 'console_scripts': [\n 't5_mesh_transformer = '\n 't5.models.mesh_transformer_main:console_entry_point',\n ],\n },\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: Apache Software License',\n 'Topic :: Scientific/Engineering :: Artificial Intelligence',\n ],\n keywords='text nlp machinelearning',\n)\n", "path": "setup.py"}]}
| 1,029 | 161 |
gh_patches_debug_43005
|
rasdani/github-patches
|
git_diff
|
deepset-ai__haystack-6304
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
`HTMLToDocument` to add `ByteStream` metadata to Document
`HTMLToDocument` converter, when receiving a `ByteStream` from the `LinkContentFetcher` does not add the url to the metadata of the Document. The URL is in the metadata of the ByteStream
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `haystack/preview/components/file_converters/html.py`
Content:
```
1 import logging
2 from typing import List, Union
3 from pathlib import Path
4
5 from haystack.preview import Document, component
6 from haystack.preview.dataclasses import ByteStream
7 from haystack.preview.lazy_imports import LazyImport
8
9 logger = logging.getLogger(__name__)
10
11 with LazyImport("Run 'pip install boilerpy3'") as boilerpy3_import:
12 from boilerpy3 import extractors
13
14
15 @component
16 class HTMLToDocument:
17 """
18 Converts an HTML file to a Document.
19 """
20
21 def __init__(self):
22 """
23 Initializes the HTMLToDocument component.
24 """
25 boilerpy3_import.check()
26
27 @component.output_types(documents=List[Document])
28 def run(self, sources: List[Union[str, Path, ByteStream]]):
29 """
30 Converts a list of HTML files to Documents.
31
32 :param sources: List of HTML file paths or ByteStream objects.
33 :return: List of converted Documents.
34 """
35 documents = []
36 extractor = extractors.ArticleExtractor(raise_on_failure=False)
37 for source in sources:
38 try:
39 file_content = self._extract_content(source)
40 except Exception as e:
41 logger.warning("Could not read %s. Skipping it. Error: %s", source, e)
42 continue
43 try:
44 text = extractor.get_content(file_content)
45 except Exception as conversion_e: # Consider specifying the expected exception type(s) here
46 logger.warning("Failed to extract text from %s. Skipping it. Error: %s", source, conversion_e)
47 continue
48
49 document = Document(content=text)
50 documents.append(document)
51
52 return {"documents": documents}
53
54 def _extract_content(self, source: Union[str, Path, ByteStream]) -> str:
55 """
56 Extracts content from the given data source
57 :param source: The data source to extract content from.
58 :return: The extracted content.
59 """
60 if isinstance(source, (str, Path)):
61 with open(source) as text_file:
62 return text_file.read()
63 if isinstance(source, ByteStream):
64 return source.data.decode("utf-8")
65
66 raise ValueError(f"Unsupported source type: {type(source)}")
67
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/haystack/preview/components/file_converters/html.py b/haystack/preview/components/file_converters/html.py
--- a/haystack/preview/components/file_converters/html.py
+++ b/haystack/preview/components/file_converters/html.py
@@ -1,6 +1,6 @@
import logging
-from typing import List, Union
from pathlib import Path
+from typing import Any, Dict, List, Optional, Union
from haystack.preview import Document, component
from haystack.preview.dataclasses import ByteStream
@@ -16,6 +16,18 @@
class HTMLToDocument:
"""
Converts an HTML file to a Document.
+
+ Usage example:
+ ```python
+ from haystack.preview.components.file_converters.html import HTMLToDocument
+
+ converter = HTMLToDocument()
+ results = converter.run(sources=["sample.html"])
+ documents = results["documents"]
+ print(documents[0].content)
+ # 'This is a text from the HTML file.'
+ ```
+
"""
def __init__(self):
@@ -25,18 +37,30 @@
boilerpy3_import.check()
@component.output_types(documents=List[Document])
- def run(self, sources: List[Union[str, Path, ByteStream]]):
+ def run(self, sources: List[Union[str, Path, ByteStream]], meta: Optional[List[Dict[str, Any]]] = None):
"""
Converts a list of HTML files to Documents.
:param sources: List of HTML file paths or ByteStream objects.
+ :param meta: Optional list of metadata to attach to the Documents.
+ The length of the list must match the number of sources. Defaults to `None`.
:return: List of converted Documents.
"""
+
documents = []
+
+ # Create metadata placeholders if not provided
+ if meta:
+ if len(sources) != len(meta):
+ raise ValueError("The length of the metadata list must match the number of sources.")
+ else:
+ meta = [{}] * len(sources)
+
extractor = extractors.ArticleExtractor(raise_on_failure=False)
- for source in sources:
+
+ for source, metadata in zip(sources, meta):
try:
- file_content = self._extract_content(source)
+ file_content, extracted_meta = self._extract_content(source)
except Exception as e:
logger.warning("Could not read %s. Skipping it. Error: %s", source, e)
continue
@@ -46,21 +70,25 @@
logger.warning("Failed to extract text from %s. Skipping it. Error: %s", source, conversion_e)
continue
- document = Document(content=text)
+ # Merge metadata received from ByteStream with supplied metadata
+ if extracted_meta:
+ # Supplied metadata overwrites metadata from ByteStream for overlapping keys.
+ metadata = {**extracted_meta, **metadata}
+ document = Document(content=text, meta=metadata)
documents.append(document)
return {"documents": documents}
- def _extract_content(self, source: Union[str, Path, ByteStream]) -> str:
+ def _extract_content(self, source: Union[str, Path, ByteStream]) -> tuple:
"""
Extracts content from the given data source
:param source: The data source to extract content from.
- :return: The extracted content.
+ :return: The extracted content and metadata.
"""
if isinstance(source, (str, Path)):
with open(source) as text_file:
- return text_file.read()
+ return (text_file.read(), None)
if isinstance(source, ByteStream):
- return source.data.decode("utf-8")
+ return (source.data.decode("utf-8"), source.metadata)
raise ValueError(f"Unsupported source type: {type(source)}")
|
{"golden_diff": "diff --git a/haystack/preview/components/file_converters/html.py b/haystack/preview/components/file_converters/html.py\n--- a/haystack/preview/components/file_converters/html.py\n+++ b/haystack/preview/components/file_converters/html.py\n@@ -1,6 +1,6 @@\n import logging\n-from typing import List, Union\n from pathlib import Path\n+from typing import Any, Dict, List, Optional, Union\n \n from haystack.preview import Document, component\n from haystack.preview.dataclasses import ByteStream\n@@ -16,6 +16,18 @@\n class HTMLToDocument:\n \"\"\"\n Converts an HTML file to a Document.\n+\n+ Usage example:\n+ ```python\n+ from haystack.preview.components.file_converters.html import HTMLToDocument\n+\n+ converter = HTMLToDocument()\n+ results = converter.run(sources=[\"sample.html\"])\n+ documents = results[\"documents\"]\n+ print(documents[0].content)\n+ # 'This is a text from the HTML file.'\n+ ```\n+\n \"\"\"\n \n def __init__(self):\n@@ -25,18 +37,30 @@\n boilerpy3_import.check()\n \n @component.output_types(documents=List[Document])\n- def run(self, sources: List[Union[str, Path, ByteStream]]):\n+ def run(self, sources: List[Union[str, Path, ByteStream]], meta: Optional[List[Dict[str, Any]]] = None):\n \"\"\"\n Converts a list of HTML files to Documents.\n \n :param sources: List of HTML file paths or ByteStream objects.\n+ :param meta: Optional list of metadata to attach to the Documents.\n+ The length of the list must match the number of sources. Defaults to `None`.\n :return: List of converted Documents.\n \"\"\"\n+\n documents = []\n+\n+ # Create metadata placeholders if not provided\n+ if meta:\n+ if len(sources) != len(meta):\n+ raise ValueError(\"The length of the metadata list must match the number of sources.\")\n+ else:\n+ meta = [{}] * len(sources)\n+\n extractor = extractors.ArticleExtractor(raise_on_failure=False)\n- for source in sources:\n+\n+ for source, metadata in zip(sources, meta):\n try:\n- file_content = self._extract_content(source)\n+ file_content, extracted_meta = self._extract_content(source)\n except Exception as e:\n logger.warning(\"Could not read %s. Skipping it. Error: %s\", source, e)\n continue\n@@ -46,21 +70,25 @@\n logger.warning(\"Failed to extract text from %s. Skipping it. Error: %s\", source, conversion_e)\n continue\n \n- document = Document(content=text)\n+ # Merge metadata received from ByteStream with supplied metadata\n+ if extracted_meta:\n+ # Supplied metadata overwrites metadata from ByteStream for overlapping keys.\n+ metadata = {**extracted_meta, **metadata}\n+ document = Document(content=text, meta=metadata)\n documents.append(document)\n \n return {\"documents\": documents}\n \n- def _extract_content(self, source: Union[str, Path, ByteStream]) -> str:\n+ def _extract_content(self, source: Union[str, Path, ByteStream]) -> tuple:\n \"\"\"\n Extracts content from the given data source\n :param source: The data source to extract content from.\n- :return: The extracted content.\n+ :return: The extracted content and metadata.\n \"\"\"\n if isinstance(source, (str, Path)):\n with open(source) as text_file:\n- return text_file.read()\n+ return (text_file.read(), None)\n if isinstance(source, ByteStream):\n- return source.data.decode(\"utf-8\")\n+ return (source.data.decode(\"utf-8\"), source.metadata)\n \n raise ValueError(f\"Unsupported source type: {type(source)}\")\n", "issue": "`HTMLToDocument` to add `ByteStream` metadata to Document \n`HTMLToDocument` converter, when receiving a `ByteStream` from the `LinkContentFetcher` does not add the url to the metadata of the Document. The URL is in the metadata of the ByteStream\r\n\n", "before_files": [{"content": "import logging\nfrom typing import List, Union\nfrom pathlib import Path\n\nfrom haystack.preview import Document, component\nfrom haystack.preview.dataclasses import ByteStream\nfrom haystack.preview.lazy_imports import LazyImport\n\nlogger = logging.getLogger(__name__)\n\nwith LazyImport(\"Run 'pip install boilerpy3'\") as boilerpy3_import:\n from boilerpy3 import extractors\n\n\n@component\nclass HTMLToDocument:\n \"\"\"\n Converts an HTML file to a Document.\n \"\"\"\n\n def __init__(self):\n \"\"\"\n Initializes the HTMLToDocument component.\n \"\"\"\n boilerpy3_import.check()\n\n @component.output_types(documents=List[Document])\n def run(self, sources: List[Union[str, Path, ByteStream]]):\n \"\"\"\n Converts a list of HTML files to Documents.\n\n :param sources: List of HTML file paths or ByteStream objects.\n :return: List of converted Documents.\n \"\"\"\n documents = []\n extractor = extractors.ArticleExtractor(raise_on_failure=False)\n for source in sources:\n try:\n file_content = self._extract_content(source)\n except Exception as e:\n logger.warning(\"Could not read %s. Skipping it. Error: %s\", source, e)\n continue\n try:\n text = extractor.get_content(file_content)\n except Exception as conversion_e: # Consider specifying the expected exception type(s) here\n logger.warning(\"Failed to extract text from %s. Skipping it. Error: %s\", source, conversion_e)\n continue\n\n document = Document(content=text)\n documents.append(document)\n\n return {\"documents\": documents}\n\n def _extract_content(self, source: Union[str, Path, ByteStream]) -> str:\n \"\"\"\n Extracts content from the given data source\n :param source: The data source to extract content from.\n :return: The extracted content.\n \"\"\"\n if isinstance(source, (str, Path)):\n with open(source) as text_file:\n return text_file.read()\n if isinstance(source, ByteStream):\n return source.data.decode(\"utf-8\")\n\n raise ValueError(f\"Unsupported source type: {type(source)}\")\n", "path": "haystack/preview/components/file_converters/html.py"}], "after_files": [{"content": "import logging\nfrom pathlib import Path\nfrom typing import Any, Dict, List, Optional, Union\n\nfrom haystack.preview import Document, component\nfrom haystack.preview.dataclasses import ByteStream\nfrom haystack.preview.lazy_imports import LazyImport\n\nlogger = logging.getLogger(__name__)\n\nwith LazyImport(\"Run 'pip install boilerpy3'\") as boilerpy3_import:\n from boilerpy3 import extractors\n\n\n@component\nclass HTMLToDocument:\n \"\"\"\n Converts an HTML file to a Document.\n\n Usage example:\n ```python\n from haystack.preview.components.file_converters.html import HTMLToDocument\n\n converter = HTMLToDocument()\n results = converter.run(sources=[\"sample.html\"])\n documents = results[\"documents\"]\n print(documents[0].content)\n # 'This is a text from the HTML file.'\n ```\n\n \"\"\"\n\n def __init__(self):\n \"\"\"\n Initializes the HTMLToDocument component.\n \"\"\"\n boilerpy3_import.check()\n\n @component.output_types(documents=List[Document])\n def run(self, sources: List[Union[str, Path, ByteStream]], meta: Optional[List[Dict[str, Any]]] = None):\n \"\"\"\n Converts a list of HTML files to Documents.\n\n :param sources: List of HTML file paths or ByteStream objects.\n :param meta: Optional list of metadata to attach to the Documents.\n The length of the list must match the number of sources. Defaults to `None`.\n :return: List of converted Documents.\n \"\"\"\n\n documents = []\n\n # Create metadata placeholders if not provided\n if meta:\n if len(sources) != len(meta):\n raise ValueError(\"The length of the metadata list must match the number of sources.\")\n else:\n meta = [{}] * len(sources)\n\n extractor = extractors.ArticleExtractor(raise_on_failure=False)\n\n for source, metadata in zip(sources, meta):\n try:\n file_content, extracted_meta = self._extract_content(source)\n except Exception as e:\n logger.warning(\"Could not read %s. Skipping it. Error: %s\", source, e)\n continue\n try:\n text = extractor.get_content(file_content)\n except Exception as conversion_e: # Consider specifying the expected exception type(s) here\n logger.warning(\"Failed to extract text from %s. Skipping it. Error: %s\", source, conversion_e)\n continue\n\n # Merge metadata received from ByteStream with supplied metadata\n if extracted_meta:\n # Supplied metadata overwrites metadata from ByteStream for overlapping keys.\n metadata = {**extracted_meta, **metadata}\n document = Document(content=text, meta=metadata)\n documents.append(document)\n\n return {\"documents\": documents}\n\n def _extract_content(self, source: Union[str, Path, ByteStream]) -> tuple:\n \"\"\"\n Extracts content from the given data source\n :param source: The data source to extract content from.\n :return: The extracted content and metadata.\n \"\"\"\n if isinstance(source, (str, Path)):\n with open(source) as text_file:\n return (text_file.read(), None)\n if isinstance(source, ByteStream):\n return (source.data.decode(\"utf-8\"), source.metadata)\n\n raise ValueError(f\"Unsupported source type: {type(source)}\")\n", "path": "haystack/preview/components/file_converters/html.py"}]}
| 910 | 849 |
gh_patches_debug_23254
|
rasdani/github-patches
|
git_diff
|
facebookresearch__ParlAI-1939
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
AttributeError: 'dict' object has no attribute 'force_set'
**Bug description**
When I try the pretrained model of Self-feeding Chatbot, by `python projects/self_feeding/interactive.py --model-file zoo:self_feeding/hh131k_hb60k_fb60k_st1k/model --no-cuda`, error occurs: AttributeError: 'dict' object has no attribute 'force_set'
**Logs**
Please paste the command line output:
```
Enter Your Message: hello
Traceback (most recent call last):
File "projects/self_feeding/interactive.py", line 87, in <module>
interactive(parser.parse_args(print_args=False), print_parser=parser)
File "projects/self_feeding/interactive.py", line 78, in interactive
world.parley()
File "/home/han/Github/ParlAI/parlai/core/worlds.py", line 273, in parley
agents[1].observe(validate(acts[0]))
File "/home/han/Github/ParlAI/projects/self_feeding/self_feeding_agent.py", line 370, in observe
observation.force_set(
AttributeError: 'dict' object has no attribute 'force_set'
```
**Additional context**
Add any other context about the problem here. (like proxy settings, network setup, overall goals, etc.)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `parlai/agents/local_human/local_human.py`
Content:
```
1 #!/usr/bin/env python3
2
3 # Copyright (c) Facebook, Inc. and its affiliates.
4 # This source code is licensed under the MIT license found in the
5 # LICENSE file in the root directory of this source tree.
6 """Agent does gets the local keyboard input in the act() function.
7 Example: python examples/eval_model.py -m local_human -t babi:Task1k:1 -dt valid
8 """
9
10 from parlai.core.agents import Agent
11 from parlai.core.utils import display_messages, load_cands
12
13
14 class LocalHumanAgent(Agent):
15 def add_cmdline_args(argparser):
16 """Add command-line arguments specifically for this agent."""
17 agent = argparser.add_argument_group('Local Human Arguments')
18 agent.add_argument(
19 '-fixedCands',
20 '--local-human-candidates-file',
21 default=None,
22 type=str,
23 help='File of label_candidates to send to other agent',
24 )
25 agent.add_argument(
26 '--single_turn',
27 type='bool',
28 default=False,
29 help='If on, assumes single turn episodes.',
30 )
31
32 def __init__(self, opt, shared=None):
33 super().__init__(opt)
34 self.id = 'localHuman'
35 self.episodeDone = False
36 self.fixedCands_txt = load_cands(self.opt.get('local_human_candidates_file'))
37 print("Enter [DONE] if you want to end the episode.\n")
38
39 def observe(self, msg):
40 print(
41 display_messages(
42 [msg],
43 ignore_fields=self.opt.get('display_ignore_fields', ''),
44 prettify=self.opt.get('display_prettify', False),
45 )
46 )
47
48 def act(self):
49 reply = {}
50 reply['id'] = self.getID()
51 reply_text = input("Enter Your Message: ")
52 reply_text = reply_text.replace('\\n', '\n')
53 if self.opt.get('single_turn', False):
54 reply_text += '[DONE]'
55 reply['episode_done'] = False
56 reply['label_candidates'] = self.fixedCands_txt
57 if '[DONE]' in reply_text:
58 reply['episode_done'] = True
59 self.episodeDone = True
60 reply_text = reply_text.replace('[DONE]', '')
61 reply['text'] = reply_text
62 return reply
63
64 def episode_done(self):
65 return self.episodeDone
66
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/parlai/agents/local_human/local_human.py b/parlai/agents/local_human/local_human.py
--- a/parlai/agents/local_human/local_human.py
+++ b/parlai/agents/local_human/local_human.py
@@ -8,6 +8,7 @@
"""
from parlai.core.agents import Agent
+from parlai.core.message import Message
from parlai.core.utils import display_messages, load_cands
@@ -46,7 +47,7 @@
)
def act(self):
- reply = {}
+ reply = Message()
reply['id'] = self.getID()
reply_text = input("Enter Your Message: ")
reply_text = reply_text.replace('\\n', '\n')
@@ -55,7 +56,7 @@
reply['episode_done'] = False
reply['label_candidates'] = self.fixedCands_txt
if '[DONE]' in reply_text:
- reply['episode_done'] = True
+ reply.force_set('episode_done', True)
self.episodeDone = True
reply_text = reply_text.replace('[DONE]', '')
reply['text'] = reply_text
|
{"golden_diff": "diff --git a/parlai/agents/local_human/local_human.py b/parlai/agents/local_human/local_human.py\n--- a/parlai/agents/local_human/local_human.py\n+++ b/parlai/agents/local_human/local_human.py\n@@ -8,6 +8,7 @@\n \"\"\"\n \n from parlai.core.agents import Agent\n+from parlai.core.message import Message\n from parlai.core.utils import display_messages, load_cands\n \n \n@@ -46,7 +47,7 @@\n )\n \n def act(self):\n- reply = {}\n+ reply = Message()\n reply['id'] = self.getID()\n reply_text = input(\"Enter Your Message: \")\n reply_text = reply_text.replace('\\\\n', '\\n')\n@@ -55,7 +56,7 @@\n reply['episode_done'] = False\n reply['label_candidates'] = self.fixedCands_txt\n if '[DONE]' in reply_text:\n- reply['episode_done'] = True\n+ reply.force_set('episode_done', True)\n self.episodeDone = True\n reply_text = reply_text.replace('[DONE]', '')\n reply['text'] = reply_text\n", "issue": "AttributeError: 'dict' object has no attribute 'force_set'\n**Bug description**\r\nWhen I try the pretrained model of Self-feeding Chatbot, by `python projects/self_feeding/interactive.py --model-file zoo:self_feeding/hh131k_hb60k_fb60k_st1k/model --no-cuda`, error occurs: AttributeError: 'dict' object has no attribute 'force_set'\r\n\r\n**Logs**\r\nPlease paste the command line output:\r\n\r\n```\r\nEnter Your Message: hello\r\nTraceback (most recent call last):\r\n File \"projects/self_feeding/interactive.py\", line 87, in <module>\r\n interactive(parser.parse_args(print_args=False), print_parser=parser)\r\n File \"projects/self_feeding/interactive.py\", line 78, in interactive\r\n world.parley()\r\n File \"/home/han/Github/ParlAI/parlai/core/worlds.py\", line 273, in parley\r\n agents[1].observe(validate(acts[0]))\r\n File \"/home/han/Github/ParlAI/projects/self_feeding/self_feeding_agent.py\", line 370, in observe\r\n observation.force_set(\r\nAttributeError: 'dict' object has no attribute 'force_set'\r\n```\r\n\r\n**Additional context**\r\nAdd any other context about the problem here. (like proxy settings, network setup, overall goals, etc.)\r\n\n", "before_files": [{"content": "#!/usr/bin/env python3\n\n# Copyright (c) Facebook, Inc. and its affiliates.\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\"\"\"Agent does gets the local keyboard input in the act() function.\n Example: python examples/eval_model.py -m local_human -t babi:Task1k:1 -dt valid\n\"\"\"\n\nfrom parlai.core.agents import Agent\nfrom parlai.core.utils import display_messages, load_cands\n\n\nclass LocalHumanAgent(Agent):\n def add_cmdline_args(argparser):\n \"\"\"Add command-line arguments specifically for this agent.\"\"\"\n agent = argparser.add_argument_group('Local Human Arguments')\n agent.add_argument(\n '-fixedCands',\n '--local-human-candidates-file',\n default=None,\n type=str,\n help='File of label_candidates to send to other agent',\n )\n agent.add_argument(\n '--single_turn',\n type='bool',\n default=False,\n help='If on, assumes single turn episodes.',\n )\n\n def __init__(self, opt, shared=None):\n super().__init__(opt)\n self.id = 'localHuman'\n self.episodeDone = False\n self.fixedCands_txt = load_cands(self.opt.get('local_human_candidates_file'))\n print(\"Enter [DONE] if you want to end the episode.\\n\")\n\n def observe(self, msg):\n print(\n display_messages(\n [msg],\n ignore_fields=self.opt.get('display_ignore_fields', ''),\n prettify=self.opt.get('display_prettify', False),\n )\n )\n\n def act(self):\n reply = {}\n reply['id'] = self.getID()\n reply_text = input(\"Enter Your Message: \")\n reply_text = reply_text.replace('\\\\n', '\\n')\n if self.opt.get('single_turn', False):\n reply_text += '[DONE]'\n reply['episode_done'] = False\n reply['label_candidates'] = self.fixedCands_txt\n if '[DONE]' in reply_text:\n reply['episode_done'] = True\n self.episodeDone = True\n reply_text = reply_text.replace('[DONE]', '')\n reply['text'] = reply_text\n return reply\n\n def episode_done(self):\n return self.episodeDone\n", "path": "parlai/agents/local_human/local_human.py"}], "after_files": [{"content": "#!/usr/bin/env python3\n\n# Copyright (c) Facebook, Inc. and its affiliates.\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\"\"\"Agent does gets the local keyboard input in the act() function.\n Example: python examples/eval_model.py -m local_human -t babi:Task1k:1 -dt valid\n\"\"\"\n\nfrom parlai.core.agents import Agent\nfrom parlai.core.message import Message\nfrom parlai.core.utils import display_messages, load_cands\n\n\nclass LocalHumanAgent(Agent):\n def add_cmdline_args(argparser):\n \"\"\"Add command-line arguments specifically for this agent.\"\"\"\n agent = argparser.add_argument_group('Local Human Arguments')\n agent.add_argument(\n '-fixedCands',\n '--local-human-candidates-file',\n default=None,\n type=str,\n help='File of label_candidates to send to other agent',\n )\n agent.add_argument(\n '--single_turn',\n type='bool',\n default=False,\n help='If on, assumes single turn episodes.',\n )\n\n def __init__(self, opt, shared=None):\n super().__init__(opt)\n self.id = 'localHuman'\n self.episodeDone = False\n self.fixedCands_txt = load_cands(self.opt.get('local_human_candidates_file'))\n print(\"Enter [DONE] if you want to end the episode.\\n\")\n\n def observe(self, msg):\n print(\n display_messages(\n [msg],\n ignore_fields=self.opt.get('display_ignore_fields', ''),\n prettify=self.opt.get('display_prettify', False),\n )\n )\n\n def act(self):\n reply = Message()\n reply['id'] = self.getID()\n reply_text = input(\"Enter Your Message: \")\n reply_text = reply_text.replace('\\\\n', '\\n')\n if self.opt.get('single_turn', False):\n reply_text += '[DONE]'\n reply['episode_done'] = False\n reply['label_candidates'] = self.fixedCands_txt\n if '[DONE]' in reply_text:\n reply.force_set('episode_done', True)\n self.episodeDone = True\n reply_text = reply_text.replace('[DONE]', '')\n reply['text'] = reply_text\n return reply\n\n def episode_done(self):\n return self.episodeDone\n", "path": "parlai/agents/local_human/local_human.py"}]}
| 1,180 | 256 |
gh_patches_debug_16688
|
rasdani/github-patches
|
git_diff
|
bids-standard__pybids-21
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
get_fieldmaps not compatible with multiple 'intendedfor'
currently `if path.endswith(metadata["IntendedFor"]):` assumes string, but current specs allow list as well
get_fieldmaps not compatible with multiple 'intendedfor'
currently `if path.endswith(metadata["IntendedFor"]):` assumes string, but current specs allow list as well
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `bids/grabbids/bids_layout.py`
Content:
```
1 import os
2 import re
3 import json
4
5 from itertools import combinations
6 from os.path import dirname
7 from os.path import realpath
8 from os.path import join as pathjoin
9 from os.path import split as pathsplit
10
11 from grabbit import Layout
12
13 __all__ = ['BIDSLayout']
14
15
16 class BIDSLayout(Layout):
17 def __init__(self, path, config=None):
18 if config is None:
19 root = dirname(realpath(__file__))
20 config = pathjoin(root, 'config', 'bids.json')
21 super(BIDSLayout, self).__init__(path, config, dynamic_getters=True)
22
23 def get_metadata(self, path):
24 sidecarJSON = path.replace(".nii.gz", ".json").replace(".nii", ".json")
25 path_components = pathsplit(sidecarJSON)
26 filename_components = path_components[-1].split("_")
27 ses = None
28 suffix = filename_components[-1]
29
30 sub = filename_components[0]
31 keyword_components = filename_components[1:-1]
32 if filename_components[1][:3] == "ses":
33 ses = filename_components[1]
34 keyword_components = filename_components[2:-1]
35
36 potentialJSONs = []
37 for prefixes, midlayer, conditional in ( # Levels
38 (tuple(), tuple(), True), # top
39 ((sub,), tuple(), True), # subject
40 ((sub, ), (pathsplit(path_components[-2])[-1],), True),
41 ((sub, ses), tuple(), ses), # session
42 ((sub, ses), (pathsplit(path_components[-2])[-1],), ses)
43 ):
44 if not conditional:
45 continue
46 for k in range(len(keyword_components) + 1):
47 for components in combinations(keyword_components, k):
48 potentialJSONs.append(
49 pathjoin(
50 self.root,
51 *(prefixes + midlayer +
52 ("_".join(prefixes + components + (suffix,)),))))
53
54 merged_param_dict = {}
55 for json_file_path in potentialJSONs:
56 if os.path.exists(json_file_path):
57 param_dict = json.load(open(json_file_path, "r"))
58 merged_param_dict.update(param_dict)
59
60 return merged_param_dict
61
62 def get_fieldmap(self, path):
63 sub = os.path.split(path)[1].split("_")[0].split("sub-")[1]
64 fieldmap_set = {}
65 for file in self.get(subject=sub,
66 type='(phase1|phase2|phasediff|epi|fieldmap)',
67 extensions=['nii.gz', 'nii']):
68 metadata = self.get_metadata(file.filename)
69 if metadata and "IntendedFor" in metadata.keys():
70 if path.endswith(metadata["IntendedFor"]):
71 if file.type == "phasediff":
72 fieldmap_set = {"phasediff": file.filename,
73 "magnitude1": file.filename.replace(
74 "phasediff", "magnitude1"),
75 "magnitude2": file.filename.replace(
76 "phasediff", "magnitude2"),
77 "type": "phasediff"}
78 break
79 elif file.type == "phase1":
80 fieldmap_set["phase1"] = file.filename
81 fieldmap_set["magnitude1"] = \
82 file.filename.replace("phase1", "magnitude1")
83 fieldmap_set["type"] = "phase"
84 elif file.type == "phase2":
85 fieldmap_set["phase2"] = file.filename
86 fieldmap_set["magnitude2"] = \
87 file.filename.replace("phase2", "magnitude2")
88 fieldmap_set["type"] = "phase"
89 elif file.type == "epi":
90 if "epi" not in fieldmap_set.keys():
91 fieldmap_set["epi"] = []
92 fieldmap_set["epi"].append(file.filename)
93 fieldmap_set["type"] = "epi"
94 elif file.type == "fieldmap":
95 fieldmap_set["fieldmap"] = file.filename
96 fieldmap_set["magnitude"] = \
97 file.filename.replace("fieldmap", "magnitude")
98 fieldmap_set["type"] = "fieldmap"
99 return fieldmap_set
100
101 def find_match(self, target, source=None):
102
103 # Try to take the easy way out
104 if source is not None:
105 _target = source.split('.')[0] + '.' + target
106 if os.path.exists(_target):
107 return target
108
109 if target in list(self.entities.keys()):
110 candidates = list(self.entities[target].files.keys())
111 else:
112 candidates = []
113
114 for root, directories, filenames in os.walk(self.root):
115 for f in filenames:
116 if re.search(target + '$', f):
117 if os.path.sep == "\\":
118 f = f.replace("\\", "\\\\")
119 candidates.append(f)
120
121 if source is None:
122 return candidates
123
124 # Walk up the file hierarchy from source, find first match
125 if not os.path.exists(source):
126 raise OSError("The file '%s' doesn't exist." % source)
127 elif not source.startswith(self.root):
128 raise ValueError("The file '%s' is not contained "
129 "within the current project "
130 "directory (%s)." % (source, self.root))
131 rel = os.path.relpath(dirname(source), self.root)
132 sep = os.path.sep
133 chunks = rel.split(sep)
134 n_chunks = len(chunks)
135 for i in range(n_chunks, -1, -1):
136 path = pathjoin(self.root, *chunks[:i])
137 patt = path + '\%s[^\%s]+$' % (sep, sep)
138 if sep == "\\":
139 patt = path + '\\[^\\]+$'
140 patt = patt.replace("\\", "\\\\")
141 matches = [x for x in candidates if re.search(patt, x)]
142 if matches:
143 if len(matches) == 1:
144 return matches[0]
145 else:
146 raise ValueError("Ambiguous target: more than one "
147 "candidate file found in "
148 "directory '%s'." % path)
149 return None
150
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/bids/grabbids/bids_layout.py b/bids/grabbids/bids_layout.py
--- a/bids/grabbids/bids_layout.py
+++ b/bids/grabbids/bids_layout.py
@@ -67,7 +67,11 @@
extensions=['nii.gz', 'nii']):
metadata = self.get_metadata(file.filename)
if metadata and "IntendedFor" in metadata.keys():
- if path.endswith(metadata["IntendedFor"]):
+ if isinstance(metadata["IntendedFor"], list):
+ intended_for = metadata["IntendedFor"]
+ else:
+ intended_for = [metadata["IntendedFor"]]
+ if any([path.endswith(suffix) for suffix in intended_for]):
if file.type == "phasediff":
fieldmap_set = {"phasediff": file.filename,
"magnitude1": file.filename.replace(
|
{"golden_diff": "diff --git a/bids/grabbids/bids_layout.py b/bids/grabbids/bids_layout.py\n--- a/bids/grabbids/bids_layout.py\n+++ b/bids/grabbids/bids_layout.py\n@@ -67,7 +67,11 @@\n extensions=['nii.gz', 'nii']):\n metadata = self.get_metadata(file.filename)\n if metadata and \"IntendedFor\" in metadata.keys():\n- if path.endswith(metadata[\"IntendedFor\"]):\n+ if isinstance(metadata[\"IntendedFor\"], list):\n+ intended_for = metadata[\"IntendedFor\"]\n+ else:\n+ intended_for = [metadata[\"IntendedFor\"]]\n+ if any([path.endswith(suffix) for suffix in intended_for]):\n if file.type == \"phasediff\":\n fieldmap_set = {\"phasediff\": file.filename,\n \"magnitude1\": file.filename.replace(\n", "issue": "get_fieldmaps not compatible with multiple 'intendedfor'\ncurrently `if path.endswith(metadata[\"IntendedFor\"]):` assumes string, but current specs allow list as well\n\nget_fieldmaps not compatible with multiple 'intendedfor'\ncurrently `if path.endswith(metadata[\"IntendedFor\"]):` assumes string, but current specs allow list as well\n\n", "before_files": [{"content": "import os\nimport re\nimport json\n\nfrom itertools import combinations\nfrom os.path import dirname\nfrom os.path import realpath\nfrom os.path import join as pathjoin\nfrom os.path import split as pathsplit\n\nfrom grabbit import Layout\n\n__all__ = ['BIDSLayout']\n\n\nclass BIDSLayout(Layout):\n def __init__(self, path, config=None):\n if config is None:\n root = dirname(realpath(__file__))\n config = pathjoin(root, 'config', 'bids.json')\n super(BIDSLayout, self).__init__(path, config, dynamic_getters=True)\n\n def get_metadata(self, path):\n sidecarJSON = path.replace(\".nii.gz\", \".json\").replace(\".nii\", \".json\")\n path_components = pathsplit(sidecarJSON)\n filename_components = path_components[-1].split(\"_\")\n ses = None\n suffix = filename_components[-1]\n\n sub = filename_components[0]\n keyword_components = filename_components[1:-1]\n if filename_components[1][:3] == \"ses\":\n ses = filename_components[1]\n keyword_components = filename_components[2:-1]\n\n potentialJSONs = []\n for prefixes, midlayer, conditional in ( # Levels\n (tuple(), tuple(), True), # top\n ((sub,), tuple(), True), # subject\n ((sub, ), (pathsplit(path_components[-2])[-1],), True),\n ((sub, ses), tuple(), ses), # session\n ((sub, ses), (pathsplit(path_components[-2])[-1],), ses)\n ):\n if not conditional:\n continue\n for k in range(len(keyword_components) + 1):\n for components in combinations(keyword_components, k):\n potentialJSONs.append(\n pathjoin(\n self.root,\n *(prefixes + midlayer +\n (\"_\".join(prefixes + components + (suffix,)),))))\n\n merged_param_dict = {}\n for json_file_path in potentialJSONs:\n if os.path.exists(json_file_path):\n param_dict = json.load(open(json_file_path, \"r\"))\n merged_param_dict.update(param_dict)\n\n return merged_param_dict\n\n def get_fieldmap(self, path):\n sub = os.path.split(path)[1].split(\"_\")[0].split(\"sub-\")[1]\n fieldmap_set = {}\n for file in self.get(subject=sub,\n type='(phase1|phase2|phasediff|epi|fieldmap)',\n extensions=['nii.gz', 'nii']):\n metadata = self.get_metadata(file.filename)\n if metadata and \"IntendedFor\" in metadata.keys():\n if path.endswith(metadata[\"IntendedFor\"]):\n if file.type == \"phasediff\":\n fieldmap_set = {\"phasediff\": file.filename,\n \"magnitude1\": file.filename.replace(\n \"phasediff\", \"magnitude1\"),\n \"magnitude2\": file.filename.replace(\n \"phasediff\", \"magnitude2\"),\n \"type\": \"phasediff\"}\n break\n elif file.type == \"phase1\":\n fieldmap_set[\"phase1\"] = file.filename\n fieldmap_set[\"magnitude1\"] = \\\n file.filename.replace(\"phase1\", \"magnitude1\")\n fieldmap_set[\"type\"] = \"phase\"\n elif file.type == \"phase2\":\n fieldmap_set[\"phase2\"] = file.filename\n fieldmap_set[\"magnitude2\"] = \\\n file.filename.replace(\"phase2\", \"magnitude2\")\n fieldmap_set[\"type\"] = \"phase\"\n elif file.type == \"epi\":\n if \"epi\" not in fieldmap_set.keys():\n fieldmap_set[\"epi\"] = []\n fieldmap_set[\"epi\"].append(file.filename)\n fieldmap_set[\"type\"] = \"epi\"\n elif file.type == \"fieldmap\":\n fieldmap_set[\"fieldmap\"] = file.filename\n fieldmap_set[\"magnitude\"] = \\\n file.filename.replace(\"fieldmap\", \"magnitude\")\n fieldmap_set[\"type\"] = \"fieldmap\"\n return fieldmap_set\n\n def find_match(self, target, source=None):\n\n # Try to take the easy way out\n if source is not None:\n _target = source.split('.')[0] + '.' + target\n if os.path.exists(_target):\n return target\n\n if target in list(self.entities.keys()):\n candidates = list(self.entities[target].files.keys())\n else:\n candidates = []\n\n for root, directories, filenames in os.walk(self.root):\n for f in filenames:\n if re.search(target + '$', f):\n if os.path.sep == \"\\\\\":\n f = f.replace(\"\\\\\", \"\\\\\\\\\")\n candidates.append(f)\n\n if source is None:\n return candidates\n\n # Walk up the file hierarchy from source, find first match\n if not os.path.exists(source):\n raise OSError(\"The file '%s' doesn't exist.\" % source)\n elif not source.startswith(self.root):\n raise ValueError(\"The file '%s' is not contained \"\n \"within the current project \"\n \"directory (%s).\" % (source, self.root))\n rel = os.path.relpath(dirname(source), self.root)\n sep = os.path.sep\n chunks = rel.split(sep)\n n_chunks = len(chunks)\n for i in range(n_chunks, -1, -1):\n path = pathjoin(self.root, *chunks[:i])\n patt = path + '\\%s[^\\%s]+$' % (sep, sep)\n if sep == \"\\\\\":\n patt = path + '\\\\[^\\\\]+$'\n patt = patt.replace(\"\\\\\", \"\\\\\\\\\")\n matches = [x for x in candidates if re.search(patt, x)]\n if matches:\n if len(matches) == 1:\n return matches[0]\n else:\n raise ValueError(\"Ambiguous target: more than one \"\n \"candidate file found in \"\n \"directory '%s'.\" % path)\n return None\n", "path": "bids/grabbids/bids_layout.py"}], "after_files": [{"content": "import os\nimport re\nimport json\n\nfrom itertools import combinations\nfrom os.path import dirname\nfrom os.path import realpath\nfrom os.path import join as pathjoin\nfrom os.path import split as pathsplit\n\nfrom grabbit import Layout\n\n__all__ = ['BIDSLayout']\n\n\nclass BIDSLayout(Layout):\n def __init__(self, path, config=None):\n if config is None:\n root = dirname(realpath(__file__))\n config = pathjoin(root, 'config', 'bids.json')\n super(BIDSLayout, self).__init__(path, config, dynamic_getters=True)\n\n def get_metadata(self, path):\n sidecarJSON = path.replace(\".nii.gz\", \".json\").replace(\".nii\", \".json\")\n path_components = pathsplit(sidecarJSON)\n filename_components = path_components[-1].split(\"_\")\n ses = None\n suffix = filename_components[-1]\n\n sub = filename_components[0]\n keyword_components = filename_components[1:-1]\n if filename_components[1][:3] == \"ses\":\n ses = filename_components[1]\n keyword_components = filename_components[2:-1]\n\n potentialJSONs = []\n for prefixes, midlayer, conditional in ( # Levels\n (tuple(), tuple(), True), # top\n ((sub,), tuple(), True), # subject\n ((sub, ), (pathsplit(path_components[-2])[-1],), True),\n ((sub, ses), tuple(), ses), # session\n ((sub, ses), (pathsplit(path_components[-2])[-1],), ses)\n ):\n if not conditional:\n continue\n for k in range(len(keyword_components) + 1):\n for components in combinations(keyword_components, k):\n potentialJSONs.append(\n pathjoin(\n self.root,\n *(prefixes + midlayer +\n (\"_\".join(prefixes + components + (suffix,)),))))\n\n merged_param_dict = {}\n for json_file_path in potentialJSONs:\n if os.path.exists(json_file_path):\n param_dict = json.load(open(json_file_path, \"r\"))\n merged_param_dict.update(param_dict)\n\n return merged_param_dict\n\n def get_fieldmap(self, path):\n sub = os.path.split(path)[1].split(\"_\")[0].split(\"sub-\")[1]\n fieldmap_set = {}\n for file in self.get(subject=sub,\n type='(phase1|phase2|phasediff|epi|fieldmap)',\n extensions=['nii.gz', 'nii']):\n metadata = self.get_metadata(file.filename)\n if metadata and \"IntendedFor\" in metadata.keys():\n if isinstance(metadata[\"IntendedFor\"], list):\n intended_for = metadata[\"IntendedFor\"]\n else:\n intended_for = [metadata[\"IntendedFor\"]]\n if any([path.endswith(suffix) for suffix in intended_for]):\n if file.type == \"phasediff\":\n fieldmap_set = {\"phasediff\": file.filename,\n \"magnitude1\": file.filename.replace(\n \"phasediff\", \"magnitude1\"),\n \"magnitude2\": file.filename.replace(\n \"phasediff\", \"magnitude2\"),\n \"type\": \"phasediff\"}\n break\n elif file.type == \"phase1\":\n fieldmap_set[\"phase1\"] = file.filename\n fieldmap_set[\"magnitude1\"] = \\\n file.filename.replace(\"phase1\", \"magnitude1\")\n fieldmap_set[\"type\"] = \"phase\"\n elif file.type == \"phase2\":\n fieldmap_set[\"phase2\"] = file.filename\n fieldmap_set[\"magnitude2\"] = \\\n file.filename.replace(\"phase2\", \"magnitude2\")\n fieldmap_set[\"type\"] = \"phase\"\n elif file.type == \"epi\":\n if \"epi\" not in fieldmap_set.keys():\n fieldmap_set[\"epi\"] = []\n fieldmap_set[\"epi\"].append(file.filename)\n fieldmap_set[\"type\"] = \"epi\"\n elif file.type == \"fieldmap\":\n fieldmap_set[\"fieldmap\"] = file.filename\n fieldmap_set[\"magnitude\"] = \\\n file.filename.replace(\"fieldmap\", \"magnitude\")\n fieldmap_set[\"type\"] = \"fieldmap\"\n return fieldmap_set\n\n def find_match(self, target, source=None):\n\n # Try to take the easy way out\n if source is not None:\n _target = source.split('.')[0] + '.' + target\n if os.path.exists(_target):\n return target\n\n if target in list(self.entities.keys()):\n candidates = list(self.entities[target].files.keys())\n else:\n candidates = []\n\n for root, directories, filenames in os.walk(self.root):\n for f in filenames:\n if re.search(target + '$', f):\n if os.path.sep == \"\\\\\":\n f = f.replace(\"\\\\\", \"\\\\\\\\\")\n candidates.append(f)\n\n if source is None:\n return candidates\n\n # Walk up the file hierarchy from source, find first match\n if not os.path.exists(source):\n raise OSError(\"The file '%s' doesn't exist.\" % source)\n elif not source.startswith(self.root):\n raise ValueError(\"The file '%s' is not contained \"\n \"within the current project \"\n \"directory (%s).\" % (source, self.root))\n rel = os.path.relpath(dirname(source), self.root)\n sep = os.path.sep\n chunks = rel.split(sep)\n n_chunks = len(chunks)\n for i in range(n_chunks, -1, -1):\n path = pathjoin(self.root, *chunks[:i])\n patt = path + '\\%s[^\\%s]+$' % (sep, sep)\n if sep == \"\\\\\":\n patt = path + '\\\\[^\\\\]+$'\n patt = patt.replace(\"\\\\\", \"\\\\\\\\\")\n matches = [x for x in candidates if re.search(patt, x)]\n if matches:\n if len(matches) == 1:\n return matches[0]\n else:\n raise ValueError(\"Ambiguous target: more than one \"\n \"candidate file found in \"\n \"directory '%s'.\" % path)\n return None\n", "path": "bids/grabbids/bids_layout.py"}]}
| 1,966 | 192 |
gh_patches_debug_628
|
rasdani/github-patches
|
git_diff
|
litestar-org__litestar-1633
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `tools/build_docs.py`
Content:
```
1 from __future__ import annotations
2
3 import argparse
4 import importlib.metadata
5 import json
6 import os
7 import shutil
8 import subprocess
9 from contextlib import contextmanager
10 from pathlib import Path
11 from typing import TypedDict
12
13 REDIRECT_TEMPLATE = """
14 <!DOCTYPE HTML>
15 <html lang="en-US">
16 <head>
17 <title>Page Redirection</title>
18 <meta charset="UTF-8">
19 <meta http-equiv="refresh" content="0; url={target}">
20 <script type="text/javascript">window.location.href = "{target}"</script>
21 </head>
22 <body>
23 You are being redirected. If this does not work, click <a href='{target}'>this link</a>
24 </body>
25 </html>
26 """
27
28 parser = argparse.ArgumentParser()
29 parser.add_argument("--version", required=False)
30 parser.add_argument("--ignore-missing-examples-output", action="store_true", default=False)
31 parser.add_argument("output")
32
33
34 class VersionSpec(TypedDict):
35 versions: list[str]
36 latest: str
37
38
39 @contextmanager
40 def checkout(branch: str) -> None:
41 subprocess.run(["git", "checkout", branch], check=True) # noqa: S603 S607
42 yield
43 subprocess.run(["git", "checkout", "-"], check=True) # noqa: S603 S607
44
45
46 def load_version_spec() -> VersionSpec:
47 versions_file = Path("docs/_static/versions.json")
48 if versions_file.exists():
49 return json.loads(versions_file.read_text())
50 return {"versions": [], "latest": ""}
51
52
53 def build(output_dir: str, version: str | None, ignore_missing_output: bool) -> None:
54 if version is None:
55 version = importlib.metadata.version("litestar").rsplit(".")[0]
56 else:
57 os.environ["_LITESTAR_DOCS_BUILD_VERSION"] = version
58
59 if ignore_missing_output:
60 os.environ["_LITESTAR_DOCS_IGNORE_MISSING_EXAMPLE_OUTPUT"] = "1"
61
62 subprocess.run(["make", "docs"], check=True) # noqa: S603 S607
63
64 output_dir = Path(output_dir)
65 output_dir.mkdir()
66 output_dir.joinpath(".nojekyll").touch(exist_ok=True)
67
68 version_spec = load_version_spec()
69 is_latest = version == version_spec["latest"]
70
71 docs_src_path = Path("docs/_build/html")
72
73 output_dir.joinpath("index.html").write_text(REDIRECT_TEMPLATE.format(target="latest"))
74
75 if is_latest:
76 shutil.copytree(docs_src_path, output_dir / "latest", dirs_exist_ok=True)
77 shutil.copytree(docs_src_path, output_dir / version, dirs_exist_ok=True)
78
79 # copy existing versions into our output dir to preserve them when cleaning the branch
80 with checkout("gh-pages"):
81 for other_version in [*version_spec["versions"], "latest"]:
82 other_version_path = Path(other_version)
83 other_version_target_path = output_dir / other_version
84 if other_version_path.exists() and not other_version_target_path.exists():
85 shutil.copytree(other_version_path, other_version_target_path)
86
87
88 def main() -> None:
89 args = parser.parse_args()
90 build(
91 output_dir=args.output,
92 version=args.version,
93 ignore_missing_output=args.ignore_missing_output,
94 )
95
96
97 if __name__ == "__main__":
98 main()
99
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/tools/build_docs.py b/tools/build_docs.py
--- a/tools/build_docs.py
+++ b/tools/build_docs.py
@@ -90,7 +90,7 @@
build(
output_dir=args.output,
version=args.version,
- ignore_missing_output=args.ignore_missing_output,
+ ignore_missing_output=args.ignore_missing_examples_output,
)
|
{"golden_diff": "diff --git a/tools/build_docs.py b/tools/build_docs.py\n--- a/tools/build_docs.py\n+++ b/tools/build_docs.py\n@@ -90,7 +90,7 @@\n build(\n output_dir=args.output,\n version=args.version,\n- ignore_missing_output=args.ignore_missing_output,\n+ ignore_missing_output=args.ignore_missing_examples_output,\n )\n", "issue": "StaticFilesConfig and virtual directories\nI'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem. \r\n\r\nThis is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.\r\n\r\nhttps://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32\n", "before_files": [{"content": "from __future__ import annotations\n\nimport argparse\nimport importlib.metadata\nimport json\nimport os\nimport shutil\nimport subprocess\nfrom contextlib import contextmanager\nfrom pathlib import Path\nfrom typing import TypedDict\n\nREDIRECT_TEMPLATE = \"\"\"\n<!DOCTYPE HTML>\n<html lang=\"en-US\">\n <head>\n <title>Page Redirection</title>\n <meta charset=\"UTF-8\">\n <meta http-equiv=\"refresh\" content=\"0; url={target}\">\n <script type=\"text/javascript\">window.location.href = \"{target}\"</script>\n </head>\n <body>\n You are being redirected. If this does not work, click <a href='{target}'>this link</a>\n </body>\n</html>\n\"\"\"\n\nparser = argparse.ArgumentParser()\nparser.add_argument(\"--version\", required=False)\nparser.add_argument(\"--ignore-missing-examples-output\", action=\"store_true\", default=False)\nparser.add_argument(\"output\")\n\n\nclass VersionSpec(TypedDict):\n versions: list[str]\n latest: str\n\n\n@contextmanager\ndef checkout(branch: str) -> None:\n subprocess.run([\"git\", \"checkout\", branch], check=True) # noqa: S603 S607\n yield\n subprocess.run([\"git\", \"checkout\", \"-\"], check=True) # noqa: S603 S607\n\n\ndef load_version_spec() -> VersionSpec:\n versions_file = Path(\"docs/_static/versions.json\")\n if versions_file.exists():\n return json.loads(versions_file.read_text())\n return {\"versions\": [], \"latest\": \"\"}\n\n\ndef build(output_dir: str, version: str | None, ignore_missing_output: bool) -> None:\n if version is None:\n version = importlib.metadata.version(\"litestar\").rsplit(\".\")[0]\n else:\n os.environ[\"_LITESTAR_DOCS_BUILD_VERSION\"] = version\n\n if ignore_missing_output:\n os.environ[\"_LITESTAR_DOCS_IGNORE_MISSING_EXAMPLE_OUTPUT\"] = \"1\"\n\n subprocess.run([\"make\", \"docs\"], check=True) # noqa: S603 S607\n\n output_dir = Path(output_dir)\n output_dir.mkdir()\n output_dir.joinpath(\".nojekyll\").touch(exist_ok=True)\n\n version_spec = load_version_spec()\n is_latest = version == version_spec[\"latest\"]\n\n docs_src_path = Path(\"docs/_build/html\")\n\n output_dir.joinpath(\"index.html\").write_text(REDIRECT_TEMPLATE.format(target=\"latest\"))\n\n if is_latest:\n shutil.copytree(docs_src_path, output_dir / \"latest\", dirs_exist_ok=True)\n shutil.copytree(docs_src_path, output_dir / version, dirs_exist_ok=True)\n\n # copy existing versions into our output dir to preserve them when cleaning the branch\n with checkout(\"gh-pages\"):\n for other_version in [*version_spec[\"versions\"], \"latest\"]:\n other_version_path = Path(other_version)\n other_version_target_path = output_dir / other_version\n if other_version_path.exists() and not other_version_target_path.exists():\n shutil.copytree(other_version_path, other_version_target_path)\n\n\ndef main() -> None:\n args = parser.parse_args()\n build(\n output_dir=args.output,\n version=args.version,\n ignore_missing_output=args.ignore_missing_output,\n )\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "tools/build_docs.py"}], "after_files": [{"content": "from __future__ import annotations\n\nimport argparse\nimport importlib.metadata\nimport json\nimport os\nimport shutil\nimport subprocess\nfrom contextlib import contextmanager\nfrom pathlib import Path\nfrom typing import TypedDict\n\nREDIRECT_TEMPLATE = \"\"\"\n<!DOCTYPE HTML>\n<html lang=\"en-US\">\n <head>\n <title>Page Redirection</title>\n <meta charset=\"UTF-8\">\n <meta http-equiv=\"refresh\" content=\"0; url={target}\">\n <script type=\"text/javascript\">window.location.href = \"{target}\"</script>\n </head>\n <body>\n You are being redirected. If this does not work, click <a href='{target}'>this link</a>\n </body>\n</html>\n\"\"\"\n\nparser = argparse.ArgumentParser()\nparser.add_argument(\"--version\", required=False)\nparser.add_argument(\"--ignore-missing-examples-output\", action=\"store_true\", default=False)\nparser.add_argument(\"output\")\n\n\nclass VersionSpec(TypedDict):\n versions: list[str]\n latest: str\n\n\n@contextmanager\ndef checkout(branch: str) -> None:\n subprocess.run([\"git\", \"checkout\", branch], check=True) # noqa: S603 S607\n yield\n subprocess.run([\"git\", \"checkout\", \"-\"], check=True) # noqa: S603 S607\n\n\ndef load_version_spec() -> VersionSpec:\n versions_file = Path(\"docs/_static/versions.json\")\n if versions_file.exists():\n return json.loads(versions_file.read_text())\n return {\"versions\": [], \"latest\": \"\"}\n\n\ndef build(output_dir: str, version: str | None, ignore_missing_output: bool) -> None:\n if version is None:\n version = importlib.metadata.version(\"litestar\").rsplit(\".\")[0]\n else:\n os.environ[\"_LITESTAR_DOCS_BUILD_VERSION\"] = version\n\n if ignore_missing_output:\n os.environ[\"_LITESTAR_DOCS_IGNORE_MISSING_EXAMPLE_OUTPUT\"] = \"1\"\n\n subprocess.run([\"make\", \"docs\"], check=True) # noqa: S603 S607\n\n output_dir = Path(output_dir)\n output_dir.mkdir()\n output_dir.joinpath(\".nojekyll\").touch(exist_ok=True)\n\n version_spec = load_version_spec()\n is_latest = version == version_spec[\"latest\"]\n\n docs_src_path = Path(\"docs/_build/html\")\n\n output_dir.joinpath(\"index.html\").write_text(REDIRECT_TEMPLATE.format(target=\"latest\"))\n\n if is_latest:\n shutil.copytree(docs_src_path, output_dir / \"latest\", dirs_exist_ok=True)\n shutil.copytree(docs_src_path, output_dir / version, dirs_exist_ok=True)\n\n # copy existing versions into our output dir to preserve them when cleaning the branch\n with checkout(\"gh-pages\"):\n for other_version in [*version_spec[\"versions\"], \"latest\"]:\n other_version_path = Path(other_version)\n other_version_target_path = output_dir / other_version\n if other_version_path.exists() and not other_version_target_path.exists():\n shutil.copytree(other_version_path, other_version_target_path)\n\n\ndef main() -> None:\n args = parser.parse_args()\n build(\n output_dir=args.output,\n version=args.version,\n ignore_missing_output=args.ignore_missing_examples_output,\n )\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "tools/build_docs.py"}]}
| 1,344 | 77 |
gh_patches_debug_22748
|
rasdani/github-patches
|
git_diff
|
dotkom__onlineweb4-2246
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Membership application uses email object instead of email-address
Same as #2219
https://sentry.io/organizations/dotkom/issues/890887549/?project=204971&referrer=github_plugin
```
TypeError: object of type 'Email' has no len()
(16 additional frame(s) were not displayed)
...
File "guardian/decorators.py", line 126, in _wrapped_view
return view_func(request, *args, **kwargs)
File "apps/approval/dashboard/views.py", line 105, in approve_application
app.save()
File "apps/approval/signals.py", line 50, in notify_membership_applicant_handler
send_approval_status_update(instance)
File "apps/approval/tasks.py", line 45, in send_approval_status_update
[approval.applicant.get_email()],
TypeError: object of type 'Email' has no len()
```
Membership application uses email object instead of email-address
Same as #2219
https://sentry.io/organizations/dotkom/issues/890887549/?project=204971&referrer=github_plugin
```
TypeError: object of type 'Email' has no len()
(16 additional frame(s) were not displayed)
...
File "guardian/decorators.py", line 126, in _wrapped_view
return view_func(request, *args, **kwargs)
File "apps/approval/dashboard/views.py", line 105, in approve_application
app.save()
File "apps/approval/signals.py", line 50, in notify_membership_applicant_handler
send_approval_status_update(instance)
File "apps/approval/tasks.py", line 45, in send_approval_status_update
[approval.applicant.get_email()],
TypeError: object of type 'Email' has no len()
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `apps/approval/signals.py`
Content:
```
1 from django.conf import settings
2 from django.db.models.signals import post_save
3 from django.dispatch import receiver
4
5 from apps.approval.models import CommitteeApplication, MembershipApproval
6
7 from .tasks import (send_approval_notification, send_approval_status_update,
8 send_committee_application_notification)
9
10
11 @receiver(post_save, sender=MembershipApproval)
12 def new_membership_approval_handler(sender, instance, created, **kwargs):
13 """
14
15 :param sender: The sending model.
16 :type sender: MembershipApproval
17 :param instance: The MembershipApproval instance
18 :type instance: MembershipApproval
19 :param created: True or False, whether this instance is new or not.
20 :type created: bool
21 :param kwargs: Other parameters.
22 :type kwargs: dict
23 :return: Nothing
24 :rtype: None
25 """
26
27 if created and not instance.processed:
28 if settings.APPROVAL_SETTINGS.get('SEND_APPROVER_NOTIFICATION_EMAIL', False):
29 send_approval_notification(instance)
30
31
32 @receiver(post_save, sender=MembershipApproval)
33 def notify_membership_applicant_handler(sender, instance, created, **kwargs):
34 """
35
36 :param sender: The sending model.
37 :type sender: Approval
38 :param instance: The Approval instance
39 :type instance: Approval
40 :param approved: True or False, whether this instance is new or not.
41 :type created: bool
42 :param kwargs: Other parameters.
43 :type kwargs: dict
44 :return: Nothing
45 :rtype: None
46 """
47
48 if not created and instance.processed and instance.applicant.get_email():
49 if settings.APPROVAL_SETTINGS.get('SEND_APPLICANT_NOTIFICATION_EMAIL', False):
50 send_approval_status_update(instance)
51
52
53 @receiver(post_save, sender=CommitteeApplication)
54 def notify_new_committee_application(sender, instance, created, **kwargs):
55 if created:
56 send_committee_application_notification(instance, [settings.EMAIL_OPPTAK], link_to_admin=True)
57 if settings.APPROVAL_SETTINGS.get('SEND_COMMITTEEAPPLICATION_APPLICANT_EMAIL', False):
58 send_committee_application_notification(instance, [instance.get_email()], link_to_admin=False)
59
```
Path: `apps/approval/tasks.py`
Content:
```
1 import logging
2
3 from django.conf import settings
4 from django.core.exceptions import ImproperlyConfigured
5 from django.core.mail import EmailMessage, send_mail
6 from django.template.loader import render_to_string
7 from django.urls import reverse
8
9
10 def send_approval_notification(approval):
11 logger = logging.getLogger(__name__)
12 d = {
13 'approval': approval,
14 'approval_url': settings.BASE_URL + reverse('approvals')
15 }
16
17 to_emails = [settings.EMAIL_HS]
18 content = render_to_string('approval/email/approval_notification.txt', d)
19
20 try:
21 EmailMessage("[Medlemskapssรธknad] %s" % approval.applicant.get_full_name(),
22 content, settings.DEFAULT_FROM_EMAIL, to_emails).send()
23 except ImproperlyConfigured:
24 logger.warn('Failed to send approval approver notification email for approval#{pk}.'.format(
25 {'pk': approval.pk}))
26
27
28 def send_approval_status_update(approval):
29 logger = logging.getLogger(__name__)
30
31 accepted = approval.approved
32 message = "Ditt medlemskap i Online er "
33 if accepted:
34 message += "godkjent."
35 else:
36 message += "ikke godkjent."
37 if len(approval.message) == 0:
38 message += " Ta kontakt med Online for begrunnelse."
39 else:
40 message += approval.message
41 try:
42 EmailMessage("Soknad om medlemskap i Online er vurdert",
43 message,
44 settings.DEFAULT_FROM_EMAIL,
45 [approval.applicant.get_email()],
46 ).send()
47 except ImproperlyConfigured:
48 logger.warn('Failed to notify applicant about updated status on membership for approval#{pk}.'.format(
49 {'pk': approval.pk}))
50
51
52 def send_committee_application_notification(application, to_emails, link_to_admin=False):
53 context = {
54 'link_to_admin': link_to_admin,
55 'absolute_url': settings.BASE_URL + application.get_absolute_url(),
56 'applicant_name': application.get_name(),
57 }
58 message = render_to_string('approval/email/committeeapplication_notification.txt', context)
59 send_mail('[opptak] Bekreftelse pรฅ komitesรธknad', message, settings.DEFAULT_FROM_EMAIL, to_emails)
60
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/apps/approval/signals.py b/apps/approval/signals.py
--- a/apps/approval/signals.py
+++ b/apps/approval/signals.py
@@ -55,4 +55,4 @@
if created:
send_committee_application_notification(instance, [settings.EMAIL_OPPTAK], link_to_admin=True)
if settings.APPROVAL_SETTINGS.get('SEND_COMMITTEEAPPLICATION_APPLICANT_EMAIL', False):
- send_committee_application_notification(instance, [instance.get_email()], link_to_admin=False)
+ send_committee_application_notification(instance, [instance.get_email().email], link_to_admin=False)
diff --git a/apps/approval/tasks.py b/apps/approval/tasks.py
--- a/apps/approval/tasks.py
+++ b/apps/approval/tasks.py
@@ -42,7 +42,7 @@
EmailMessage("Soknad om medlemskap i Online er vurdert",
message,
settings.DEFAULT_FROM_EMAIL,
- [approval.applicant.get_email()],
+ [approval.applicant.get_email().email],
).send()
except ImproperlyConfigured:
logger.warn('Failed to notify applicant about updated status on membership for approval#{pk}.'.format(
|
{"golden_diff": "diff --git a/apps/approval/signals.py b/apps/approval/signals.py\n--- a/apps/approval/signals.py\n+++ b/apps/approval/signals.py\n@@ -55,4 +55,4 @@\n if created:\n send_committee_application_notification(instance, [settings.EMAIL_OPPTAK], link_to_admin=True)\n if settings.APPROVAL_SETTINGS.get('SEND_COMMITTEEAPPLICATION_APPLICANT_EMAIL', False):\n- send_committee_application_notification(instance, [instance.get_email()], link_to_admin=False)\n+ send_committee_application_notification(instance, [instance.get_email().email], link_to_admin=False)\ndiff --git a/apps/approval/tasks.py b/apps/approval/tasks.py\n--- a/apps/approval/tasks.py\n+++ b/apps/approval/tasks.py\n@@ -42,7 +42,7 @@\n EmailMessage(\"Soknad om medlemskap i Online er vurdert\",\n message,\n settings.DEFAULT_FROM_EMAIL,\n- [approval.applicant.get_email()],\n+ [approval.applicant.get_email().email],\n ).send()\n except ImproperlyConfigured:\n logger.warn('Failed to notify applicant about updated status on membership for approval#{pk}.'.format(\n", "issue": "Membership application uses email object instead of email-address\nSame as #2219\n\nhttps://sentry.io/organizations/dotkom/issues/890887549/?project=204971&referrer=github_plugin\n\n```\nTypeError: object of type 'Email' has no len()\n(16 additional frame(s) were not displayed)\n...\n File \"guardian/decorators.py\", line 126, in _wrapped_view\n return view_func(request, *args, **kwargs)\n File \"apps/approval/dashboard/views.py\", line 105, in approve_application\n app.save()\n File \"apps/approval/signals.py\", line 50, in notify_membership_applicant_handler\n send_approval_status_update(instance)\n File \"apps/approval/tasks.py\", line 45, in send_approval_status_update\n [approval.applicant.get_email()],\n\nTypeError: object of type 'Email' has no len()\n```\nMembership application uses email object instead of email-address\nSame as #2219\n\nhttps://sentry.io/organizations/dotkom/issues/890887549/?project=204971&referrer=github_plugin\n\n```\nTypeError: object of type 'Email' has no len()\n(16 additional frame(s) were not displayed)\n...\n File \"guardian/decorators.py\", line 126, in _wrapped_view\n return view_func(request, *args, **kwargs)\n File \"apps/approval/dashboard/views.py\", line 105, in approve_application\n app.save()\n File \"apps/approval/signals.py\", line 50, in notify_membership_applicant_handler\n send_approval_status_update(instance)\n File \"apps/approval/tasks.py\", line 45, in send_approval_status_update\n [approval.applicant.get_email()],\n\nTypeError: object of type 'Email' has no len()\n```\n", "before_files": [{"content": "from django.conf import settings\nfrom django.db.models.signals import post_save\nfrom django.dispatch import receiver\n\nfrom apps.approval.models import CommitteeApplication, MembershipApproval\n\nfrom .tasks import (send_approval_notification, send_approval_status_update,\n send_committee_application_notification)\n\n\n@receiver(post_save, sender=MembershipApproval)\ndef new_membership_approval_handler(sender, instance, created, **kwargs):\n \"\"\"\n\n :param sender: The sending model.\n :type sender: MembershipApproval\n :param instance: The MembershipApproval instance\n :type instance: MembershipApproval\n :param created: True or False, whether this instance is new or not.\n :type created: bool\n :param kwargs: Other parameters.\n :type kwargs: dict\n :return: Nothing\n :rtype: None\n \"\"\"\n\n if created and not instance.processed:\n if settings.APPROVAL_SETTINGS.get('SEND_APPROVER_NOTIFICATION_EMAIL', False):\n send_approval_notification(instance)\n\n\n@receiver(post_save, sender=MembershipApproval)\ndef notify_membership_applicant_handler(sender, instance, created, **kwargs):\n \"\"\"\n\n :param sender: The sending model.\n :type sender: Approval\n :param instance: The Approval instance\n :type instance: Approval\n :param approved: True or False, whether this instance is new or not.\n :type created: bool\n :param kwargs: Other parameters.\n :type kwargs: dict\n :return: Nothing\n :rtype: None\n \"\"\"\n\n if not created and instance.processed and instance.applicant.get_email():\n if settings.APPROVAL_SETTINGS.get('SEND_APPLICANT_NOTIFICATION_EMAIL', False):\n send_approval_status_update(instance)\n\n\n@receiver(post_save, sender=CommitteeApplication)\ndef notify_new_committee_application(sender, instance, created, **kwargs):\n if created:\n send_committee_application_notification(instance, [settings.EMAIL_OPPTAK], link_to_admin=True)\n if settings.APPROVAL_SETTINGS.get('SEND_COMMITTEEAPPLICATION_APPLICANT_EMAIL', False):\n send_committee_application_notification(instance, [instance.get_email()], link_to_admin=False)\n", "path": "apps/approval/signals.py"}, {"content": "import logging\n\nfrom django.conf import settings\nfrom django.core.exceptions import ImproperlyConfigured\nfrom django.core.mail import EmailMessage, send_mail\nfrom django.template.loader import render_to_string\nfrom django.urls import reverse\n\n\ndef send_approval_notification(approval):\n logger = logging.getLogger(__name__)\n d = {\n 'approval': approval,\n 'approval_url': settings.BASE_URL + reverse('approvals')\n }\n\n to_emails = [settings.EMAIL_HS]\n content = render_to_string('approval/email/approval_notification.txt', d)\n\n try:\n EmailMessage(\"[Medlemskapss\u00f8knad] %s\" % approval.applicant.get_full_name(),\n content, settings.DEFAULT_FROM_EMAIL, to_emails).send()\n except ImproperlyConfigured:\n logger.warn('Failed to send approval approver notification email for approval#{pk}.'.format(\n {'pk': approval.pk}))\n\n\ndef send_approval_status_update(approval):\n logger = logging.getLogger(__name__)\n\n accepted = approval.approved\n message = \"Ditt medlemskap i Online er \"\n if accepted:\n message += \"godkjent.\"\n else:\n message += \"ikke godkjent.\"\n if len(approval.message) == 0:\n message += \" Ta kontakt med Online for begrunnelse.\"\n else:\n message += approval.message\n try:\n EmailMessage(\"Soknad om medlemskap i Online er vurdert\",\n message,\n settings.DEFAULT_FROM_EMAIL,\n [approval.applicant.get_email()],\n ).send()\n except ImproperlyConfigured:\n logger.warn('Failed to notify applicant about updated status on membership for approval#{pk}.'.format(\n {'pk': approval.pk}))\n\n\ndef send_committee_application_notification(application, to_emails, link_to_admin=False):\n context = {\n 'link_to_admin': link_to_admin,\n 'absolute_url': settings.BASE_URL + application.get_absolute_url(),\n 'applicant_name': application.get_name(),\n }\n message = render_to_string('approval/email/committeeapplication_notification.txt', context)\n send_mail('[opptak] Bekreftelse p\u00e5 komites\u00f8knad', message, settings.DEFAULT_FROM_EMAIL, to_emails)\n", "path": "apps/approval/tasks.py"}], "after_files": [{"content": "from django.conf import settings\nfrom django.db.models.signals import post_save\nfrom django.dispatch import receiver\n\nfrom apps.approval.models import CommitteeApplication, MembershipApproval\n\nfrom .tasks import (send_approval_notification, send_approval_status_update,\n send_committee_application_notification)\n\n\n@receiver(post_save, sender=MembershipApproval)\ndef new_membership_approval_handler(sender, instance, created, **kwargs):\n \"\"\"\n\n :param sender: The sending model.\n :type sender: MembershipApproval\n :param instance: The MembershipApproval instance\n :type instance: MembershipApproval\n :param created: True or False, whether this instance is new or not.\n :type created: bool\n :param kwargs: Other parameters.\n :type kwargs: dict\n :return: Nothing\n :rtype: None\n \"\"\"\n\n if created and not instance.processed:\n if settings.APPROVAL_SETTINGS.get('SEND_APPROVER_NOTIFICATION_EMAIL', False):\n send_approval_notification(instance)\n\n\n@receiver(post_save, sender=MembershipApproval)\ndef notify_membership_applicant_handler(sender, instance, created, **kwargs):\n \"\"\"\n\n :param sender: The sending model.\n :type sender: Approval\n :param instance: The Approval instance\n :type instance: Approval\n :param approved: True or False, whether this instance is new or not.\n :type created: bool\n :param kwargs: Other parameters.\n :type kwargs: dict\n :return: Nothing\n :rtype: None\n \"\"\"\n\n if not created and instance.processed and instance.applicant.get_email():\n if settings.APPROVAL_SETTINGS.get('SEND_APPLICANT_NOTIFICATION_EMAIL', False):\n send_approval_status_update(instance)\n\n\n@receiver(post_save, sender=CommitteeApplication)\ndef notify_new_committee_application(sender, instance, created, **kwargs):\n if created:\n send_committee_application_notification(instance, [settings.EMAIL_OPPTAK], link_to_admin=True)\n if settings.APPROVAL_SETTINGS.get('SEND_COMMITTEEAPPLICATION_APPLICANT_EMAIL', False):\n send_committee_application_notification(instance, [instance.get_email().email], link_to_admin=False)\n", "path": "apps/approval/signals.py"}, {"content": "import logging\n\nfrom django.conf import settings\nfrom django.core.exceptions import ImproperlyConfigured\nfrom django.core.mail import EmailMessage, send_mail\nfrom django.template.loader import render_to_string\nfrom django.urls import reverse\n\n\ndef send_approval_notification(approval):\n logger = logging.getLogger(__name__)\n d = {\n 'approval': approval,\n 'approval_url': settings.BASE_URL + reverse('approvals')\n }\n\n to_emails = [settings.EMAIL_HS]\n content = render_to_string('approval/email/approval_notification.txt', d)\n\n try:\n EmailMessage(\"[Medlemskapss\u00f8knad] %s\" % approval.applicant.get_full_name(),\n content, settings.DEFAULT_FROM_EMAIL, to_emails).send()\n except ImproperlyConfigured:\n logger.warn('Failed to send approval approver notification email for approval#{pk}.'.format(\n {'pk': approval.pk}))\n\n\ndef send_approval_status_update(approval):\n logger = logging.getLogger(__name__)\n\n accepted = approval.approved\n message = \"Ditt medlemskap i Online er \"\n if accepted:\n message += \"godkjent.\"\n else:\n message += \"ikke godkjent.\"\n if len(approval.message) == 0:\n message += \" Ta kontakt med Online for begrunnelse.\"\n else:\n message += approval.message\n try:\n EmailMessage(\"Soknad om medlemskap i Online er vurdert\",\n message,\n settings.DEFAULT_FROM_EMAIL,\n [approval.applicant.get_email().email],\n ).send()\n except ImproperlyConfigured:\n logger.warn('Failed to notify applicant about updated status on membership for approval#{pk}.'.format(\n {'pk': approval.pk}))\n\n\ndef send_committee_application_notification(application, to_emails, link_to_admin=False):\n context = {\n 'link_to_admin': link_to_admin,\n 'absolute_url': settings.BASE_URL + application.get_absolute_url(),\n 'applicant_name': application.get_name(),\n }\n message = render_to_string('approval/email/committeeapplication_notification.txt', context)\n send_mail('[opptak] Bekreftelse p\u00e5 komites\u00f8knad', message, settings.DEFAULT_FROM_EMAIL, to_emails)\n", "path": "apps/approval/tasks.py"}]}
| 1,849 | 264 |
gh_patches_debug_32292
|
rasdani/github-patches
|
git_diff
|
CiviWiki__OpenCiviWiki-1088
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Migrate threads urls to path
in `threads` app, we need to change `url()` function with `path()` function as discussed in #1066
https://github.com/CiviWiki/OpenCiviWiki/blob/d7f24fad7c0a2565da3bf2cd54e89a847d7479dd/project/threads/urls.py#L19-L41
Conversion to [path](https://github.com/CiviWiki/OpenCiviWiki/blob/d7f24fad7c0a2565da3bf2cd54e89a847d7479dd/project/threads/urls.py#L19-L41) is simple.
For example,
```python
url(r"^thread_data/(?P<thread_id>\w+)/$", get_thread, name="get thread"),
```
should become
```python
path("thread_data/(<int:thread_id>/", get_thread, name="get thread"),
```
We need to be changed all usages of `url()` function in `threads` app.
Migrate threads urls to path
in `threads` app, we need to change `url()` function with `path()` function as discussed in #1066
https://github.com/CiviWiki/OpenCiviWiki/blob/d7f24fad7c0a2565da3bf2cd54e89a847d7479dd/project/threads/urls.py#L19-L41
Conversion to [path](https://github.com/CiviWiki/OpenCiviWiki/blob/d7f24fad7c0a2565da3bf2cd54e89a847d7479dd/project/threads/urls.py#L19-L41) is simple.
For example,
```python
url(r"^thread_data/(?P<thread_id>\w+)/$", get_thread, name="get thread"),
```
should become
```python
path("thread_data/(<int:thread_id>/", get_thread, name="get thread"),
```
We need to be changed all usages of `url()` function in `threads` app.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `project/threads/urls.py`
Content:
```
1 from django.conf.urls import include, url
2 from rest_framework.routers import DefaultRouter
3
4 from .api import (create_civi, delete_civi, edit_civi, edit_thread, get_civi,
5 get_thread, rate_civi, upload_civi_image, new_thread, get_civis,
6 get_responses, upload_thread_image)
7
8 from .views import (
9 ThreadViewSet, CategoryViewSet,
10 CiviViewSet
11 )
12 from accounts.api import ProfileViewSet
13
14 router = DefaultRouter(trailing_slash=False)
15 router.register(r"threads", ThreadViewSet)
16 router.register(r"categories", CategoryViewSet)
17 router.register(r"civis", CiviViewSet)
18 router.register(r"accounts", ProfileViewSet)
19
20 urlpatterns = [
21 url(r"^v1/", include(router.urls)),
22 ]
23
24 urlpatterns += [
25 url(r"^thread_data/(?P<thread_id>\w+)/$", get_thread, name="get thread"),
26 url(r"^civi_data/(?P<civi_id>\w+)$", get_civi, name="get civi"),
27 url(r"^threads/(?P<thread_id>\w+)/civis$", get_civis, name="get civis"),
28 url(
29 r"^response_data/(?P<thread_id>\w+)/(?P<civi_id>\w+)/$",
30 get_responses,
31 name="get responses",
32 ),
33 url(r"^new_thread/$", new_thread, name="new thread"),
34 url(r"^edit_thread/$", edit_thread, name="edit thread"),
35 url(r"^new_civi/$", create_civi, name="new civi"),
36 url(r"^rate_civi/$", rate_civi, name="rate civi"),
37 url(r"^edit_civi/$", edit_civi, name="edit civi"),
38 url(r"^delete_civi/$", delete_civi, name="delete civi"),
39 url(r"^upload_images/$", upload_civi_image, name="upload images"),
40 url(r"^upload_image/$", upload_thread_image, name="upload image"),
41 ]
42
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/project/threads/urls.py b/project/threads/urls.py
--- a/project/threads/urls.py
+++ b/project/threads/urls.py
@@ -1,4 +1,5 @@
-from django.conf.urls import include, url
+from django.conf.urls import include
+from django.urls import path
from rest_framework.routers import DefaultRouter
from .api import (create_civi, delete_civi, edit_civi, edit_thread, get_civi,
@@ -18,24 +19,24 @@
router.register(r"accounts", ProfileViewSet)
urlpatterns = [
- url(r"^v1/", include(router.urls)),
+ path("v1/", include(router.urls)),
]
urlpatterns += [
- url(r"^thread_data/(?P<thread_id>\w+)/$", get_thread, name="get thread"),
- url(r"^civi_data/(?P<civi_id>\w+)$", get_civi, name="get civi"),
- url(r"^threads/(?P<thread_id>\w+)/civis$", get_civis, name="get civis"),
- url(
- r"^response_data/(?P<thread_id>\w+)/(?P<civi_id>\w+)/$",
+ path("thread_data/<int:thread_id>/", get_thread, name="get thread"),
+ path("civi_data/<int:civi_id>/", get_civi, name="get civi"),
+ path("threads/<int:thread_id>/civis", get_civis, name="get civis"),
+ path(
+ "response_data/<int:thread_id>/<int:civi_id>/",
get_responses,
name="get responses",
),
- url(r"^new_thread/$", new_thread, name="new thread"),
- url(r"^edit_thread/$", edit_thread, name="edit thread"),
- url(r"^new_civi/$", create_civi, name="new civi"),
- url(r"^rate_civi/$", rate_civi, name="rate civi"),
- url(r"^edit_civi/$", edit_civi, name="edit civi"),
- url(r"^delete_civi/$", delete_civi, name="delete civi"),
- url(r"^upload_images/$", upload_civi_image, name="upload images"),
- url(r"^upload_image/$", upload_thread_image, name="upload image"),
+ path("new_thread/", new_thread, name="new thread"),
+ path("edit_thread/", edit_thread, name="edit thread"),
+ path("new_civi/", create_civi, name="new civi"),
+ path("rate_civi/", rate_civi, name="rate civi"),
+ path("edit_civi/", edit_civi, name="edit civi"),
+ path("delete_civi/", delete_civi, name="delete civi"),
+ path("upload_images/", upload_civi_image, name="upload images"),
+ path("upload_image/", upload_thread_image, name="upload image"),
]
|
{"golden_diff": "diff --git a/project/threads/urls.py b/project/threads/urls.py\n--- a/project/threads/urls.py\n+++ b/project/threads/urls.py\n@@ -1,4 +1,5 @@\n-from django.conf.urls import include, url\r\n+from django.conf.urls import include\r\n+from django.urls import path\r\n from rest_framework.routers import DefaultRouter\r\n \r\n from .api import (create_civi, delete_civi, edit_civi, edit_thread, get_civi,\r\n@@ -18,24 +19,24 @@\n router.register(r\"accounts\", ProfileViewSet)\r\n \r\n urlpatterns = [\r\n- url(r\"^v1/\", include(router.urls)),\r\n+ path(\"v1/\", include(router.urls)),\r\n ]\r\n \r\n urlpatterns += [\r\n- url(r\"^thread_data/(?P<thread_id>\\w+)/$\", get_thread, name=\"get thread\"),\r\n- url(r\"^civi_data/(?P<civi_id>\\w+)$\", get_civi, name=\"get civi\"),\r\n- url(r\"^threads/(?P<thread_id>\\w+)/civis$\", get_civis, name=\"get civis\"),\r\n- url(\r\n- r\"^response_data/(?P<thread_id>\\w+)/(?P<civi_id>\\w+)/$\",\r\n+ path(\"thread_data/<int:thread_id>/\", get_thread, name=\"get thread\"),\r\n+ path(\"civi_data/<int:civi_id>/\", get_civi, name=\"get civi\"),\r\n+ path(\"threads/<int:thread_id>/civis\", get_civis, name=\"get civis\"),\r\n+ path(\r\n+ \"response_data/<int:thread_id>/<int:civi_id>/\",\r\n get_responses,\r\n name=\"get responses\",\r\n ),\r\n- url(r\"^new_thread/$\", new_thread, name=\"new thread\"),\r\n- url(r\"^edit_thread/$\", edit_thread, name=\"edit thread\"),\r\n- url(r\"^new_civi/$\", create_civi, name=\"new civi\"),\r\n- url(r\"^rate_civi/$\", rate_civi, name=\"rate civi\"),\r\n- url(r\"^edit_civi/$\", edit_civi, name=\"edit civi\"),\r\n- url(r\"^delete_civi/$\", delete_civi, name=\"delete civi\"),\r\n- url(r\"^upload_images/$\", upload_civi_image, name=\"upload images\"),\r\n- url(r\"^upload_image/$\", upload_thread_image, name=\"upload image\"),\r\n+ path(\"new_thread/\", new_thread, name=\"new thread\"),\r\n+ path(\"edit_thread/\", edit_thread, name=\"edit thread\"),\r\n+ path(\"new_civi/\", create_civi, name=\"new civi\"),\r\n+ path(\"rate_civi/\", rate_civi, name=\"rate civi\"),\r\n+ path(\"edit_civi/\", edit_civi, name=\"edit civi\"),\r\n+ path(\"delete_civi/\", delete_civi, name=\"delete civi\"),\r\n+ path(\"upload_images/\", upload_civi_image, name=\"upload images\"),\r\n+ path(\"upload_image/\", upload_thread_image, name=\"upload image\"),\r\n ]\n", "issue": "Migrate threads urls to path\nin `threads` app, we need to change `url()` function with `path()` function as discussed in #1066\r\n\r\nhttps://github.com/CiviWiki/OpenCiviWiki/blob/d7f24fad7c0a2565da3bf2cd54e89a847d7479dd/project/threads/urls.py#L19-L41\r\n\r\nConversion to [path](https://github.com/CiviWiki/OpenCiviWiki/blob/d7f24fad7c0a2565da3bf2cd54e89a847d7479dd/project/threads/urls.py#L19-L41) is simple.\r\n\r\nFor example,\r\n\r\n```python\r\nurl(r\"^thread_data/(?P<thread_id>\\w+)/$\", get_thread, name=\"get thread\"),\r\n```\r\n\r\nshould become\r\n\r\n```python\r\npath(\"thread_data/(<int:thread_id>/\", get_thread, name=\"get thread\"),\r\n```\r\n\r\nWe need to be changed all usages of `url()` function in `threads` app.\nMigrate threads urls to path\nin `threads` app, we need to change `url()` function with `path()` function as discussed in #1066\r\n\r\nhttps://github.com/CiviWiki/OpenCiviWiki/blob/d7f24fad7c0a2565da3bf2cd54e89a847d7479dd/project/threads/urls.py#L19-L41\r\n\r\nConversion to [path](https://github.com/CiviWiki/OpenCiviWiki/blob/d7f24fad7c0a2565da3bf2cd54e89a847d7479dd/project/threads/urls.py#L19-L41) is simple.\r\n\r\nFor example,\r\n\r\n```python\r\nurl(r\"^thread_data/(?P<thread_id>\\w+)/$\", get_thread, name=\"get thread\"),\r\n```\r\n\r\nshould become\r\n\r\n```python\r\npath(\"thread_data/(<int:thread_id>/\", get_thread, name=\"get thread\"),\r\n```\r\n\r\nWe need to be changed all usages of `url()` function in `threads` app.\n", "before_files": [{"content": "from django.conf.urls import include, url\r\nfrom rest_framework.routers import DefaultRouter\r\n\r\nfrom .api import (create_civi, delete_civi, edit_civi, edit_thread, get_civi,\r\n get_thread, rate_civi, upload_civi_image, new_thread, get_civis,\r\n get_responses, upload_thread_image)\r\n\r\nfrom .views import (\r\n ThreadViewSet, CategoryViewSet,\r\n CiviViewSet\r\n)\r\nfrom accounts.api import ProfileViewSet\r\n\r\nrouter = DefaultRouter(trailing_slash=False)\r\nrouter.register(r\"threads\", ThreadViewSet)\r\nrouter.register(r\"categories\", CategoryViewSet)\r\nrouter.register(r\"civis\", CiviViewSet)\r\nrouter.register(r\"accounts\", ProfileViewSet)\r\n\r\nurlpatterns = [\r\n url(r\"^v1/\", include(router.urls)),\r\n]\r\n\r\nurlpatterns += [\r\n url(r\"^thread_data/(?P<thread_id>\\w+)/$\", get_thread, name=\"get thread\"),\r\n url(r\"^civi_data/(?P<civi_id>\\w+)$\", get_civi, name=\"get civi\"),\r\n url(r\"^threads/(?P<thread_id>\\w+)/civis$\", get_civis, name=\"get civis\"),\r\n url(\r\n r\"^response_data/(?P<thread_id>\\w+)/(?P<civi_id>\\w+)/$\",\r\n get_responses,\r\n name=\"get responses\",\r\n ),\r\n url(r\"^new_thread/$\", new_thread, name=\"new thread\"),\r\n url(r\"^edit_thread/$\", edit_thread, name=\"edit thread\"),\r\n url(r\"^new_civi/$\", create_civi, name=\"new civi\"),\r\n url(r\"^rate_civi/$\", rate_civi, name=\"rate civi\"),\r\n url(r\"^edit_civi/$\", edit_civi, name=\"edit civi\"),\r\n url(r\"^delete_civi/$\", delete_civi, name=\"delete civi\"),\r\n url(r\"^upload_images/$\", upload_civi_image, name=\"upload images\"),\r\n url(r\"^upload_image/$\", upload_thread_image, name=\"upload image\"),\r\n]\r\n", "path": "project/threads/urls.py"}], "after_files": [{"content": "from django.conf.urls import include\r\nfrom django.urls import path\r\nfrom rest_framework.routers import DefaultRouter\r\n\r\nfrom .api import (create_civi, delete_civi, edit_civi, edit_thread, get_civi,\r\n get_thread, rate_civi, upload_civi_image, new_thread, get_civis,\r\n get_responses, upload_thread_image)\r\n\r\nfrom .views import (\r\n ThreadViewSet, CategoryViewSet,\r\n CiviViewSet\r\n)\r\nfrom accounts.api import ProfileViewSet\r\n\r\nrouter = DefaultRouter(trailing_slash=False)\r\nrouter.register(r\"threads\", ThreadViewSet)\r\nrouter.register(r\"categories\", CategoryViewSet)\r\nrouter.register(r\"civis\", CiviViewSet)\r\nrouter.register(r\"accounts\", ProfileViewSet)\r\n\r\nurlpatterns = [\r\n path(\"v1/\", include(router.urls)),\r\n]\r\n\r\nurlpatterns += [\r\n path(\"thread_data/<int:thread_id>/\", get_thread, name=\"get thread\"),\r\n path(\"civi_data/<int:civi_id>/\", get_civi, name=\"get civi\"),\r\n path(\"threads/<int:thread_id>/civis\", get_civis, name=\"get civis\"),\r\n path(\r\n \"response_data/<int:thread_id>/<int:civi_id>/\",\r\n get_responses,\r\n name=\"get responses\",\r\n ),\r\n path(\"new_thread/\", new_thread, name=\"new thread\"),\r\n path(\"edit_thread/\", edit_thread, name=\"edit thread\"),\r\n path(\"new_civi/\", create_civi, name=\"new civi\"),\r\n path(\"rate_civi/\", rate_civi, name=\"rate civi\"),\r\n path(\"edit_civi/\", edit_civi, name=\"edit civi\"),\r\n path(\"delete_civi/\", delete_civi, name=\"delete civi\"),\r\n path(\"upload_images/\", upload_civi_image, name=\"upload images\"),\r\n path(\"upload_image/\", upload_thread_image, name=\"upload image\"),\r\n]\r\n", "path": "project/threads/urls.py"}]}
| 1,245 | 660 |
gh_patches_debug_19561
|
rasdani/github-patches
|
git_diff
|
networkx__networkx-6503
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Join operation in trees---not handling label_attribute
<!-- If you have a general question about NetworkX, please use the discussions tab to create a new discussion -->
<!--- Provide a general summary of the issue in the Title above -->
[https://github.com/networkx/networkx/blob/main/networkx/algorithms/tree/operations.py](https://github.com/networkx/networkx/blob/main/networkx/algorithms/tree/operations.py)
1. The resulting graph of join operation in trees isn't including the old labels of inputs.
2. Not handling the cases where label_attribute is passed as an argument.
### Current Behavior

<!--- Tell us what happens instead of the expected behavior -->
### Expected Behavior

<!--- Tell us what should happen -->
### Steps to Reproduce
As shown above
<!--- Provide a minimal example that reproduces the bug -->
### Environment
<!--- Please provide details about your local environment -->
Python version: 3.10.6
NetworkX version: 3.0
### Additional context
[https://networkx.org/documentation/stable/reference/algorithms/generated/networkx.algorithms.tree.operations.join.html](https://networkx.org/documentation/stable/reference/algorithms/generated/networkx.algorithms.tree.operations.join.html)
<!--- Add any other context about the problem here, screenshots, etc. -->
Improve test coverage for operations.py (join)
<!-- If you have a general question about NetworkX, please use the discussions tab to create a new discussion -->
<!--- Provide a general summary of the issue in the Title above -->
### Current Behavior
https://app.codecov.io/gh/networkx/networkx/blob/main/networkx/algorithms/tree/operations.py the current test coverage is 92.8%. There are still some cases needed to be handled.
<!--- Tell us what happens instead of the expected behavior -->
### Expected Behavior
<!--- Tell us what should happen -->
https://networkx.org/documentation/stable/reference/algorithms/generated/networkx.algorithms.tree.operations.join.html
1. Test case to check label_attribute should be added
2. In the documentation its written that the inputs must be tree. But this function works for graphs too. Could you tell me if its for trees or graphs as well?
### Steps to Reproduce
<!--- Provide a minimal example that reproduces the bug -->
### Environment
<!--- Please provide details about your local environment -->
Python version:3.10.6
NetworkX version:3.0
### Additional context
<!--- Add any other context about the problem here, screenshots, etc. -->
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `networkx/algorithms/tree/operations.py`
Content:
```
1 """Operations on trees."""
2 from functools import partial
3 from itertools import accumulate, chain
4
5 import networkx as nx
6
7 __all__ = ["join"]
8
9
10 def join(rooted_trees, label_attribute=None):
11 """Returns a new rooted tree with a root node joined with the roots
12 of each of the given rooted trees.
13
14 Parameters
15 ----------
16 rooted_trees : list
17 A list of pairs in which each left element is a NetworkX graph
18 object representing a tree and each right element is the root
19 node of that tree. The nodes of these trees will be relabeled to
20 integers.
21
22 label_attribute : str
23 If provided, the old node labels will be stored in the new tree
24 under this node attribute. If not provided, the node attribute
25 ``'_old'`` will store the original label of the node in the
26 rooted trees given in the input.
27
28 Returns
29 -------
30 NetworkX graph
31 The rooted tree whose subtrees are the given rooted trees. The
32 new root node is labeled 0. Each non-root node has an attribute,
33 as described under the keyword argument ``label_attribute``,
34 that indicates the label of the original node in the input tree.
35
36 Notes
37 -----
38 Graph, edge, and node attributes are propagated from the given
39 rooted trees to the created tree. If there are any overlapping graph
40 attributes, those from later trees will overwrite those from earlier
41 trees in the tuple of positional arguments.
42
43 Examples
44 --------
45 Join two full balanced binary trees of height *h* to get a full
46 balanced binary tree of depth *h* + 1::
47
48 >>> h = 4
49 >>> left = nx.balanced_tree(2, h)
50 >>> right = nx.balanced_tree(2, h)
51 >>> joined_tree = nx.join([(left, 0), (right, 0)])
52 >>> nx.is_isomorphic(joined_tree, nx.balanced_tree(2, h + 1))
53 True
54
55 """
56 if len(rooted_trees) == 0:
57 return nx.empty_graph(1)
58
59 # Unzip the zipped list of (tree, root) pairs.
60 trees, roots = zip(*rooted_trees)
61
62 # The join of the trees has the same type as the type of the first
63 # tree.
64 R = type(trees[0])()
65
66 # Relabel the nodes so that their union is the integers starting at 1.
67 if label_attribute is None:
68 label_attribute = "_old"
69 relabel = partial(
70 nx.convert_node_labels_to_integers, label_attribute=label_attribute
71 )
72 lengths = (len(tree) for tree in trees[:-1])
73 first_labels = chain([0], accumulate(lengths))
74 trees = [
75 relabel(tree, first_label=first_label + 1)
76 for tree, first_label in zip(trees, first_labels)
77 ]
78
79 # Get the relabeled roots.
80 roots = [
81 next(v for v, d in tree.nodes(data=True) if d.get("_old") == root)
82 for tree, root in zip(trees, roots)
83 ]
84
85 # Remove the old node labels.
86 for tree in trees:
87 for v in tree:
88 tree.nodes[v].pop("_old")
89
90 # Add all sets of nodes and edges, with data.
91 nodes = (tree.nodes(data=True) for tree in trees)
92 edges = (tree.edges(data=True) for tree in trees)
93 R.add_nodes_from(chain.from_iterable(nodes))
94 R.add_edges_from(chain.from_iterable(edges))
95
96 # Add graph attributes; later attributes take precedent over earlier
97 # attributes.
98 for tree in trees:
99 R.graph.update(tree.graph)
100
101 # Finally, join the subtrees at the root. We know 0 is unused by the
102 # way we relabeled the subtrees.
103 R.add_node(0)
104 R.add_edges_from((0, root) for root in roots)
105
106 return R
107
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/networkx/algorithms/tree/operations.py b/networkx/algorithms/tree/operations.py
--- a/networkx/algorithms/tree/operations.py
+++ b/networkx/algorithms/tree/operations.py
@@ -78,25 +78,13 @@
# Get the relabeled roots.
roots = [
- next(v for v, d in tree.nodes(data=True) if d.get("_old") == root)
+ next(v for v, d in tree.nodes(data=True) if d.get(label_attribute) == root)
for tree, root in zip(trees, roots)
]
- # Remove the old node labels.
+ # Add all sets of nodes and edges, attributes
for tree in trees:
- for v in tree:
- tree.nodes[v].pop("_old")
-
- # Add all sets of nodes and edges, with data.
- nodes = (tree.nodes(data=True) for tree in trees)
- edges = (tree.edges(data=True) for tree in trees)
- R.add_nodes_from(chain.from_iterable(nodes))
- R.add_edges_from(chain.from_iterable(edges))
-
- # Add graph attributes; later attributes take precedent over earlier
- # attributes.
- for tree in trees:
- R.graph.update(tree.graph)
+ R.update(tree)
# Finally, join the subtrees at the root. We know 0 is unused by the
# way we relabeled the subtrees.
|
{"golden_diff": "diff --git a/networkx/algorithms/tree/operations.py b/networkx/algorithms/tree/operations.py\n--- a/networkx/algorithms/tree/operations.py\n+++ b/networkx/algorithms/tree/operations.py\n@@ -78,25 +78,13 @@\n \n # Get the relabeled roots.\n roots = [\n- next(v for v, d in tree.nodes(data=True) if d.get(\"_old\") == root)\n+ next(v for v, d in tree.nodes(data=True) if d.get(label_attribute) == root)\n for tree, root in zip(trees, roots)\n ]\n \n- # Remove the old node labels.\n+ # Add all sets of nodes and edges, attributes\n for tree in trees:\n- for v in tree:\n- tree.nodes[v].pop(\"_old\")\n-\n- # Add all sets of nodes and edges, with data.\n- nodes = (tree.nodes(data=True) for tree in trees)\n- edges = (tree.edges(data=True) for tree in trees)\n- R.add_nodes_from(chain.from_iterable(nodes))\n- R.add_edges_from(chain.from_iterable(edges))\n-\n- # Add graph attributes; later attributes take precedent over earlier\n- # attributes.\n- for tree in trees:\n- R.graph.update(tree.graph)\n+ R.update(tree)\n \n # Finally, join the subtrees at the root. We know 0 is unused by the\n # way we relabeled the subtrees.\n", "issue": "Join operation in trees---not handling label_attribute\n<!-- If you have a general question about NetworkX, please use the discussions tab to create a new discussion -->\r\n\r\n<!--- Provide a general summary of the issue in the Title above -->\r\n[https://github.com/networkx/networkx/blob/main/networkx/algorithms/tree/operations.py](https://github.com/networkx/networkx/blob/main/networkx/algorithms/tree/operations.py)\r\n1. The resulting graph of join operation in trees isn't including the old labels of inputs.\r\n2. Not handling the cases where label_attribute is passed as an argument.\r\n\r\n### Current Behavior\r\n\r\n\r\n<!--- Tell us what happens instead of the expected behavior -->\r\n\r\n### Expected Behavior\r\n\r\n\r\n\r\n\r\n<!--- Tell us what should happen -->\r\n\r\n### Steps to Reproduce\r\nAs shown above\r\n<!--- Provide a minimal example that reproduces the bug -->\r\n\r\n### Environment\r\n\r\n<!--- Please provide details about your local environment -->\r\n\r\nPython version: 3.10.6\r\nNetworkX version: 3.0\r\n\r\n### Additional context\r\n[https://networkx.org/documentation/stable/reference/algorithms/generated/networkx.algorithms.tree.operations.join.html](https://networkx.org/documentation/stable/reference/algorithms/generated/networkx.algorithms.tree.operations.join.html)\r\n\r\n<!--- Add any other context about the problem here, screenshots, etc. -->\nImprove test coverage for operations.py (join)\n<!-- If you have a general question about NetworkX, please use the discussions tab to create a new discussion -->\r\n\r\n<!--- Provide a general summary of the issue in the Title above -->\r\n\r\n### Current Behavior\r\nhttps://app.codecov.io/gh/networkx/networkx/blob/main/networkx/algorithms/tree/operations.py the current test coverage is 92.8%. There are still some cases needed to be handled.\r\n<!--- Tell us what happens instead of the expected behavior -->\r\n\r\n### Expected Behavior\r\n\r\n<!--- Tell us what should happen -->\r\nhttps://networkx.org/documentation/stable/reference/algorithms/generated/networkx.algorithms.tree.operations.join.html\r\n1. Test case to check label_attribute should be added \r\n2. In the documentation its written that the inputs must be tree. But this function works for graphs too. Could you tell me if its for trees or graphs as well?\r\n### Steps to Reproduce\r\n\r\n<!--- Provide a minimal example that reproduces the bug -->\r\n\r\n### Environment\r\n\r\n<!--- Please provide details about your local environment -->\r\n\r\nPython version:3.10.6\r\nNetworkX version:3.0\r\n\r\n### Additional context\r\n\r\n<!--- Add any other context about the problem here, screenshots, etc. -->\r\n\n", "before_files": [{"content": "\"\"\"Operations on trees.\"\"\"\nfrom functools import partial\nfrom itertools import accumulate, chain\n\nimport networkx as nx\n\n__all__ = [\"join\"]\n\n\ndef join(rooted_trees, label_attribute=None):\n \"\"\"Returns a new rooted tree with a root node joined with the roots\n of each of the given rooted trees.\n\n Parameters\n ----------\n rooted_trees : list\n A list of pairs in which each left element is a NetworkX graph\n object representing a tree and each right element is the root\n node of that tree. The nodes of these trees will be relabeled to\n integers.\n\n label_attribute : str\n If provided, the old node labels will be stored in the new tree\n under this node attribute. If not provided, the node attribute\n ``'_old'`` will store the original label of the node in the\n rooted trees given in the input.\n\n Returns\n -------\n NetworkX graph\n The rooted tree whose subtrees are the given rooted trees. The\n new root node is labeled 0. Each non-root node has an attribute,\n as described under the keyword argument ``label_attribute``,\n that indicates the label of the original node in the input tree.\n\n Notes\n -----\n Graph, edge, and node attributes are propagated from the given\n rooted trees to the created tree. If there are any overlapping graph\n attributes, those from later trees will overwrite those from earlier\n trees in the tuple of positional arguments.\n\n Examples\n --------\n Join two full balanced binary trees of height *h* to get a full\n balanced binary tree of depth *h* + 1::\n\n >>> h = 4\n >>> left = nx.balanced_tree(2, h)\n >>> right = nx.balanced_tree(2, h)\n >>> joined_tree = nx.join([(left, 0), (right, 0)])\n >>> nx.is_isomorphic(joined_tree, nx.balanced_tree(2, h + 1))\n True\n\n \"\"\"\n if len(rooted_trees) == 0:\n return nx.empty_graph(1)\n\n # Unzip the zipped list of (tree, root) pairs.\n trees, roots = zip(*rooted_trees)\n\n # The join of the trees has the same type as the type of the first\n # tree.\n R = type(trees[0])()\n\n # Relabel the nodes so that their union is the integers starting at 1.\n if label_attribute is None:\n label_attribute = \"_old\"\n relabel = partial(\n nx.convert_node_labels_to_integers, label_attribute=label_attribute\n )\n lengths = (len(tree) for tree in trees[:-1])\n first_labels = chain([0], accumulate(lengths))\n trees = [\n relabel(tree, first_label=first_label + 1)\n for tree, first_label in zip(trees, first_labels)\n ]\n\n # Get the relabeled roots.\n roots = [\n next(v for v, d in tree.nodes(data=True) if d.get(\"_old\") == root)\n for tree, root in zip(trees, roots)\n ]\n\n # Remove the old node labels.\n for tree in trees:\n for v in tree:\n tree.nodes[v].pop(\"_old\")\n\n # Add all sets of nodes and edges, with data.\n nodes = (tree.nodes(data=True) for tree in trees)\n edges = (tree.edges(data=True) for tree in trees)\n R.add_nodes_from(chain.from_iterable(nodes))\n R.add_edges_from(chain.from_iterable(edges))\n\n # Add graph attributes; later attributes take precedent over earlier\n # attributes.\n for tree in trees:\n R.graph.update(tree.graph)\n\n # Finally, join the subtrees at the root. We know 0 is unused by the\n # way we relabeled the subtrees.\n R.add_node(0)\n R.add_edges_from((0, root) for root in roots)\n\n return R\n", "path": "networkx/algorithms/tree/operations.py"}], "after_files": [{"content": "\"\"\"Operations on trees.\"\"\"\nfrom functools import partial\nfrom itertools import accumulate, chain\n\nimport networkx as nx\n\n__all__ = [\"join\"]\n\n\ndef join(rooted_trees, label_attribute=None):\n \"\"\"Returns a new rooted tree with a root node joined with the roots\n of each of the given rooted trees.\n\n Parameters\n ----------\n rooted_trees : list\n A list of pairs in which each left element is a NetworkX graph\n object representing a tree and each right element is the root\n node of that tree. The nodes of these trees will be relabeled to\n integers.\n\n label_attribute : str\n If provided, the old node labels will be stored in the new tree\n under this node attribute. If not provided, the node attribute\n ``'_old'`` will store the original label of the node in the\n rooted trees given in the input.\n\n Returns\n -------\n NetworkX graph\n The rooted tree whose subtrees are the given rooted trees. The\n new root node is labeled 0. Each non-root node has an attribute,\n as described under the keyword argument ``label_attribute``,\n that indicates the label of the original node in the input tree.\n\n Notes\n -----\n Graph, edge, and node attributes are propagated from the given\n rooted trees to the created tree. If there are any overlapping graph\n attributes, those from later trees will overwrite those from earlier\n trees in the tuple of positional arguments.\n\n Examples\n --------\n Join two full balanced binary trees of height *h* to get a full\n balanced binary tree of depth *h* + 1::\n\n >>> h = 4\n >>> left = nx.balanced_tree(2, h)\n >>> right = nx.balanced_tree(2, h)\n >>> joined_tree = nx.join([(left, 0), (right, 0)])\n >>> nx.is_isomorphic(joined_tree, nx.balanced_tree(2, h + 1))\n True\n\n \"\"\"\n if len(rooted_trees) == 0:\n return nx.empty_graph(1)\n\n # Unzip the zipped list of (tree, root) pairs.\n trees, roots = zip(*rooted_trees)\n\n # The join of the trees has the same type as the type of the first\n # tree.\n R = type(trees[0])()\n\n # Relabel the nodes so that their union is the integers starting at 1.\n if label_attribute is None:\n label_attribute = \"_old\"\n relabel = partial(\n nx.convert_node_labels_to_integers, label_attribute=label_attribute\n )\n lengths = (len(tree) for tree in trees[:-1])\n first_labels = chain([0], accumulate(lengths))\n trees = [\n relabel(tree, first_label=first_label + 1)\n for tree, first_label in zip(trees, first_labels)\n ]\n\n # Get the relabeled roots.\n roots = [\n next(v for v, d in tree.nodes(data=True) if d.get(label_attribute) == root)\n for tree, root in zip(trees, roots)\n ]\n\n # Add all sets of nodes and edges, attributes\n for tree in trees:\n R.update(tree)\n\n # Finally, join the subtrees at the root. We know 0 is unused by the\n # way we relabeled the subtrees.\n R.add_node(0)\n R.add_edges_from((0, root) for root in roots)\n\n return R\n", "path": "networkx/algorithms/tree/operations.py"}]}
| 2,009 | 319 |
gh_patches_debug_26658
|
rasdani/github-patches
|
git_diff
|
pulp__pulpcore-2779
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Append of chunked upload processes raw data
**Version**
Please provide the versions of the pulpcore and plugin packages in use, and how they are installed. If you are using Pulp via Katello, please provide the Katello version.
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
**Expected behavior**
A clear and concise description of what you expected to happen.
**Additional context**
Add any other context about the problem here. Please provide links to any previous discussions via Discourse or Bugzilla.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pulpcore/app/models/upload.py`
Content:
```
1 import hashlib
2 import os
3
4 from django.core.files.base import ContentFile
5 from django.db import models
6 from django.db.models.signals import post_delete
7 from django.dispatch import receiver
8 from rest_framework import serializers
9
10 from pulpcore.app.models import BaseModel, fields, storage
11
12
13 class Upload(BaseModel):
14 """
15 A chunked upload. Stores chunks until used to create an artifact, etc.
16
17 Fields:
18
19 size (models.BigIntegerField): The size of the file in bytes.
20 """
21
22 size = models.BigIntegerField()
23
24 def append(self, chunk, offset, sha256=None):
25 """
26 Append a chunk to an upload.
27
28 Args:
29 chunk (File): Binary file to append to the upload file.
30 offset (int): First byte position to write chunk to.
31 """
32 chunk_read = chunk.read()
33 current_sha256 = hashlib.sha256(chunk_read).hexdigest()
34 if sha256 and sha256 != current_sha256:
35 raise serializers.ValidationError("Checksum does not match chunk upload.")
36
37 upload_chunk = UploadChunk(upload=self, offset=offset, size=len(chunk))
38 filename = os.path.basename(upload_chunk.storage_path(""))
39 upload_chunk.file.save(filename, ContentFile(chunk_read))
40
41
42 class UploadChunk(BaseModel):
43 """
44 A chunk for an uploaded file.
45
46 Fields:
47
48 file (fields.FileField): A file where the uploaded chunk is stored.
49 upload (models.ForeignKey): Upload this chunk belongs to.
50 offset (models.BigIntegerField): Start of the chunk in bytes.
51 size (models.BigIntegerField): Size of the chunk in bytes.
52 """
53
54 def storage_path(self, name):
55 """
56 Callable used by FileField to determine where the uploaded file should be stored.
57
58 Args:
59 name (str): Original name of uploaded file. It is ignored by this method because the
60 pulp_id is used to determine a file path instead.
61 """
62 return storage.get_upload_chunk_file_path(self.pulp_id)
63
64 file = fields.FileField(null=False, upload_to=storage_path, max_length=255)
65 upload = models.ForeignKey(Upload, on_delete=models.CASCADE, related_name="chunks")
66 offset = models.BigIntegerField()
67 size = models.BigIntegerField()
68
69
70 @receiver(post_delete, sender=UploadChunk)
71 def upload_chunk_delete(instance, **kwargs):
72 instance.file.delete(save=False)
73
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pulpcore/app/models/upload.py b/pulpcore/app/models/upload.py
--- a/pulpcore/app/models/upload.py
+++ b/pulpcore/app/models/upload.py
@@ -1,6 +1,8 @@
import hashlib
import os
+from gettext import gettext as _
+
from django.core.files.base import ContentFile
from django.db import models
from django.db.models.signals import post_delete
@@ -26,17 +28,18 @@
Append a chunk to an upload.
Args:
- chunk (File): Binary file to append to the upload file.
+ chunk (File): Binary data to append to the upload file.
offset (int): First byte position to write chunk to.
"""
- chunk_read = chunk.read()
- current_sha256 = hashlib.sha256(chunk_read).hexdigest()
- if sha256 and sha256 != current_sha256:
- raise serializers.ValidationError("Checksum does not match chunk upload.")
+ chunk = chunk.read()
+ if sha256:
+ current_sha256 = hashlib.sha256(chunk).hexdigest()
+ if sha256 != current_sha256:
+ raise serializers.ValidationError(_("Checksum does not match chunk upload."))
upload_chunk = UploadChunk(upload=self, offset=offset, size=len(chunk))
filename = os.path.basename(upload_chunk.storage_path(""))
- upload_chunk.file.save(filename, ContentFile(chunk_read))
+ upload_chunk.file.save(filename, ContentFile(chunk))
class UploadChunk(BaseModel):
|
{"golden_diff": "diff --git a/pulpcore/app/models/upload.py b/pulpcore/app/models/upload.py\n--- a/pulpcore/app/models/upload.py\n+++ b/pulpcore/app/models/upload.py\n@@ -1,6 +1,8 @@\n import hashlib\n import os\n \n+from gettext import gettext as _\n+\n from django.core.files.base import ContentFile\n from django.db import models\n from django.db.models.signals import post_delete\n@@ -26,17 +28,18 @@\n Append a chunk to an upload.\n \n Args:\n- chunk (File): Binary file to append to the upload file.\n+ chunk (File): Binary data to append to the upload file.\n offset (int): First byte position to write chunk to.\n \"\"\"\n- chunk_read = chunk.read()\n- current_sha256 = hashlib.sha256(chunk_read).hexdigest()\n- if sha256 and sha256 != current_sha256:\n- raise serializers.ValidationError(\"Checksum does not match chunk upload.\")\n+ chunk = chunk.read()\n+ if sha256:\n+ current_sha256 = hashlib.sha256(chunk).hexdigest()\n+ if sha256 != current_sha256:\n+ raise serializers.ValidationError(_(\"Checksum does not match chunk upload.\"))\n \n upload_chunk = UploadChunk(upload=self, offset=offset, size=len(chunk))\n filename = os.path.basename(upload_chunk.storage_path(\"\"))\n- upload_chunk.file.save(filename, ContentFile(chunk_read))\n+ upload_chunk.file.save(filename, ContentFile(chunk))\n \n \n class UploadChunk(BaseModel):\n", "issue": "Append of chunked upload processes raw data\n**Version**\r\nPlease provide the versions of the pulpcore and plugin packages in use, and how they are installed. If you are using Pulp via Katello, please provide the Katello version.\r\n\r\n**Describe the bug**\r\nA clear and concise description of what the bug is.\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n\r\n**Expected behavior**\r\nA clear and concise description of what you expected to happen.\r\n\r\n**Additional context**\r\nAdd any other context about the problem here. Please provide links to any previous discussions via Discourse or Bugzilla.\r\n\n", "before_files": [{"content": "import hashlib\nimport os\n\nfrom django.core.files.base import ContentFile\nfrom django.db import models\nfrom django.db.models.signals import post_delete\nfrom django.dispatch import receiver\nfrom rest_framework import serializers\n\nfrom pulpcore.app.models import BaseModel, fields, storage\n\n\nclass Upload(BaseModel):\n \"\"\"\n A chunked upload. Stores chunks until used to create an artifact, etc.\n\n Fields:\n\n size (models.BigIntegerField): The size of the file in bytes.\n \"\"\"\n\n size = models.BigIntegerField()\n\n def append(self, chunk, offset, sha256=None):\n \"\"\"\n Append a chunk to an upload.\n\n Args:\n chunk (File): Binary file to append to the upload file.\n offset (int): First byte position to write chunk to.\n \"\"\"\n chunk_read = chunk.read()\n current_sha256 = hashlib.sha256(chunk_read).hexdigest()\n if sha256 and sha256 != current_sha256:\n raise serializers.ValidationError(\"Checksum does not match chunk upload.\")\n\n upload_chunk = UploadChunk(upload=self, offset=offset, size=len(chunk))\n filename = os.path.basename(upload_chunk.storage_path(\"\"))\n upload_chunk.file.save(filename, ContentFile(chunk_read))\n\n\nclass UploadChunk(BaseModel):\n \"\"\"\n A chunk for an uploaded file.\n\n Fields:\n\n file (fields.FileField): A file where the uploaded chunk is stored.\n upload (models.ForeignKey): Upload this chunk belongs to.\n offset (models.BigIntegerField): Start of the chunk in bytes.\n size (models.BigIntegerField): Size of the chunk in bytes.\n \"\"\"\n\n def storage_path(self, name):\n \"\"\"\n Callable used by FileField to determine where the uploaded file should be stored.\n\n Args:\n name (str): Original name of uploaded file. It is ignored by this method because the\n pulp_id is used to determine a file path instead.\n \"\"\"\n return storage.get_upload_chunk_file_path(self.pulp_id)\n\n file = fields.FileField(null=False, upload_to=storage_path, max_length=255)\n upload = models.ForeignKey(Upload, on_delete=models.CASCADE, related_name=\"chunks\")\n offset = models.BigIntegerField()\n size = models.BigIntegerField()\n\n\n@receiver(post_delete, sender=UploadChunk)\ndef upload_chunk_delete(instance, **kwargs):\n instance.file.delete(save=False)\n", "path": "pulpcore/app/models/upload.py"}], "after_files": [{"content": "import hashlib\nimport os\n\nfrom gettext import gettext as _\n\nfrom django.core.files.base import ContentFile\nfrom django.db import models\nfrom django.db.models.signals import post_delete\nfrom django.dispatch import receiver\nfrom rest_framework import serializers\n\nfrom pulpcore.app.models import BaseModel, fields, storage\n\n\nclass Upload(BaseModel):\n \"\"\"\n A chunked upload. Stores chunks until used to create an artifact, etc.\n\n Fields:\n\n size (models.BigIntegerField): The size of the file in bytes.\n \"\"\"\n\n size = models.BigIntegerField()\n\n def append(self, chunk, offset, sha256=None):\n \"\"\"\n Append a chunk to an upload.\n\n Args:\n chunk (File): Binary data to append to the upload file.\n offset (int): First byte position to write chunk to.\n \"\"\"\n chunk = chunk.read()\n if sha256:\n current_sha256 = hashlib.sha256(chunk).hexdigest()\n if sha256 != current_sha256:\n raise serializers.ValidationError(_(\"Checksum does not match chunk upload.\"))\n\n upload_chunk = UploadChunk(upload=self, offset=offset, size=len(chunk))\n filename = os.path.basename(upload_chunk.storage_path(\"\"))\n upload_chunk.file.save(filename, ContentFile(chunk))\n\n\nclass UploadChunk(BaseModel):\n \"\"\"\n A chunk for an uploaded file.\n\n Fields:\n\n file (fields.FileField): A file where the uploaded chunk is stored.\n upload (models.ForeignKey): Upload this chunk belongs to.\n offset (models.BigIntegerField): Start of the chunk in bytes.\n size (models.BigIntegerField): Size of the chunk in bytes.\n \"\"\"\n\n def storage_path(self, name):\n \"\"\"\n Callable used by FileField to determine where the uploaded file should be stored.\n\n Args:\n name (str): Original name of uploaded file. It is ignored by this method because the\n pulp_id is used to determine a file path instead.\n \"\"\"\n return storage.get_upload_chunk_file_path(self.pulp_id)\n\n file = fields.FileField(null=False, upload_to=storage_path, max_length=255)\n upload = models.ForeignKey(Upload, on_delete=models.CASCADE, related_name=\"chunks\")\n offset = models.BigIntegerField()\n size = models.BigIntegerField()\n\n\n@receiver(post_delete, sender=UploadChunk)\ndef upload_chunk_delete(instance, **kwargs):\n instance.file.delete(save=False)\n", "path": "pulpcore/app/models/upload.py"}]}
| 1,029 | 338 |
gh_patches_debug_16493
|
rasdani/github-patches
|
git_diff
|
svthalia__concrexit-2808
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Invited from queue email missing some stuff
### Describe the bug

### How to reproduce
Steps to reproduce the behaviour:
1. Be in queue
2. Get invited (by people deregistering)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `website/events/emails.py`
Content:
```
1 """The emails defined by the events package."""
2 from django.conf import settings
3 from django.core.mail import EmailMessage
4 from django.template.loader import get_template
5 from django.utils.translation import gettext_lazy as _
6
7
8 def notify_first_waiting(event):
9 """Send an email to the first person on the waiting list when someone cancels their registration.
10
11 :param event: the event
12 """
13 if (
14 event.max_participants is not None
15 and event.eventregistration_set.filter(date_cancelled=None).count()
16 > event.max_participants
17 ):
18 # Prepare email to send to the first person on the waiting list
19 first_waiting = event.eventregistration_set.filter(
20 date_cancelled=None
21 ).order_by("date")[event.max_participants]
22
23 text_template = get_template("events/member_email.txt")
24
25 subject = _("[THALIA] Notification about your registration for '{}'").format(
26 event.title
27 )
28 text_message = text_template.render(
29 {
30 "event": event,
31 "registration": first_waiting,
32 "name": first_waiting.name or first_waiting.member.first_name,
33 "base_url": settings.BASE_URL,
34 }
35 )
36
37 EmailMessage(subject, text_message, to=[first_waiting.email]).send()
38
39
40 def notify_organiser(event, registration):
41 """Send an email to the organiser of the event if someone cancels their registration.
42
43 :param event: the event
44 :param registration: the registration that was cancelled
45 """
46 if not event.organisers.exists():
47 return
48
49 text_template = get_template("events/organiser_email.txt")
50 subject = f"Registration for {event.title} cancelled by member"
51 text_message = text_template.render({"event": event, "registration": registration})
52
53 EmailMessage(
54 subject,
55 text_message,
56 to=[
57 organiser.contact_mailinglist.name + "@" + settings.SITE_DOMAIN
58 for organiser in event.organisers.all()
59 ],
60 ).send()
61
62
63 def notify_waiting(event, registration):
64 text_template = get_template("events/more_places_email.txt")
65 subject = _("[THALIA] Notification about your registration for '{}'").format(
66 event.title
67 )
68 text_message = text_template.render(
69 {
70 "event": event,
71 "registration": registration,
72 "name": registration.name or registration.member.first_name,
73 "base_url": settings.BASE_URL,
74 }
75 )
76 EmailMessage(subject, text_message, to=[registration.email]).send()
77
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/website/events/emails.py b/website/events/emails.py
--- a/website/events/emails.py
+++ b/website/events/emails.py
@@ -25,12 +25,19 @@
subject = _("[THALIA] Notification about your registration for '{}'").format(
event.title
)
+
+ organiser_emails = [
+ organiser.contact_address
+ for organiser in event.organisers.all()
+ if organiser.contact_address is not None
+ ]
text_message = text_template.render(
{
"event": event,
"registration": first_waiting,
"name": first_waiting.name or first_waiting.member.first_name,
"base_url": settings.BASE_URL,
+ "organisers": organiser_emails,
}
)
|
{"golden_diff": "diff --git a/website/events/emails.py b/website/events/emails.py\n--- a/website/events/emails.py\n+++ b/website/events/emails.py\n@@ -25,12 +25,19 @@\n subject = _(\"[THALIA] Notification about your registration for '{}'\").format(\n event.title\n )\n+\n+ organiser_emails = [\n+ organiser.contact_address\n+ for organiser in event.organisers.all()\n+ if organiser.contact_address is not None\n+ ]\n text_message = text_template.render(\n {\n \"event\": event,\n \"registration\": first_waiting,\n \"name\": first_waiting.name or first_waiting.member.first_name,\n \"base_url\": settings.BASE_URL,\n+ \"organisers\": organiser_emails,\n }\n )\n", "issue": "Invited from queue email missing some stuff\n### Describe the bug\n\n\n\n### How to reproduce\nSteps to reproduce the behaviour:\n1. Be in queue\n2. Get invited (by people deregistering)\n", "before_files": [{"content": "\"\"\"The emails defined by the events package.\"\"\"\nfrom django.conf import settings\nfrom django.core.mail import EmailMessage\nfrom django.template.loader import get_template\nfrom django.utils.translation import gettext_lazy as _\n\n\ndef notify_first_waiting(event):\n \"\"\"Send an email to the first person on the waiting list when someone cancels their registration.\n\n :param event: the event\n \"\"\"\n if (\n event.max_participants is not None\n and event.eventregistration_set.filter(date_cancelled=None).count()\n > event.max_participants\n ):\n # Prepare email to send to the first person on the waiting list\n first_waiting = event.eventregistration_set.filter(\n date_cancelled=None\n ).order_by(\"date\")[event.max_participants]\n\n text_template = get_template(\"events/member_email.txt\")\n\n subject = _(\"[THALIA] Notification about your registration for '{}'\").format(\n event.title\n )\n text_message = text_template.render(\n {\n \"event\": event,\n \"registration\": first_waiting,\n \"name\": first_waiting.name or first_waiting.member.first_name,\n \"base_url\": settings.BASE_URL,\n }\n )\n\n EmailMessage(subject, text_message, to=[first_waiting.email]).send()\n\n\ndef notify_organiser(event, registration):\n \"\"\"Send an email to the organiser of the event if someone cancels their registration.\n\n :param event: the event\n :param registration: the registration that was cancelled\n \"\"\"\n if not event.organisers.exists():\n return\n\n text_template = get_template(\"events/organiser_email.txt\")\n subject = f\"Registration for {event.title} cancelled by member\"\n text_message = text_template.render({\"event\": event, \"registration\": registration})\n\n EmailMessage(\n subject,\n text_message,\n to=[\n organiser.contact_mailinglist.name + \"@\" + settings.SITE_DOMAIN\n for organiser in event.organisers.all()\n ],\n ).send()\n\n\ndef notify_waiting(event, registration):\n text_template = get_template(\"events/more_places_email.txt\")\n subject = _(\"[THALIA] Notification about your registration for '{}'\").format(\n event.title\n )\n text_message = text_template.render(\n {\n \"event\": event,\n \"registration\": registration,\n \"name\": registration.name or registration.member.first_name,\n \"base_url\": settings.BASE_URL,\n }\n )\n EmailMessage(subject, text_message, to=[registration.email]).send()\n", "path": "website/events/emails.py"}], "after_files": [{"content": "\"\"\"The emails defined by the events package.\"\"\"\nfrom django.conf import settings\nfrom django.core.mail import EmailMessage\nfrom django.template.loader import get_template\nfrom django.utils.translation import gettext_lazy as _\n\n\ndef notify_first_waiting(event):\n \"\"\"Send an email to the first person on the waiting list when someone cancels their registration.\n\n :param event: the event\n \"\"\"\n if (\n event.max_participants is not None\n and event.eventregistration_set.filter(date_cancelled=None).count()\n > event.max_participants\n ):\n # Prepare email to send to the first person on the waiting list\n first_waiting = event.eventregistration_set.filter(\n date_cancelled=None\n ).order_by(\"date\")[event.max_participants]\n\n text_template = get_template(\"events/member_email.txt\")\n\n subject = _(\"[THALIA] Notification about your registration for '{}'\").format(\n event.title\n )\n\n organiser_emails = [\n organiser.contact_address\n for organiser in event.organisers.all()\n if organiser.contact_address is not None\n ]\n text_message = text_template.render(\n {\n \"event\": event,\n \"registration\": first_waiting,\n \"name\": first_waiting.name or first_waiting.member.first_name,\n \"base_url\": settings.BASE_URL,\n \"organisers\": organiser_emails,\n }\n )\n\n EmailMessage(subject, text_message, to=[first_waiting.email]).send()\n\n\ndef notify_organiser(event, registration):\n \"\"\"Send an email to the organiser of the event if someone cancels their registration.\n\n :param event: the event\n :param registration: the registration that was cancelled\n \"\"\"\n if not event.organisers.exists():\n return\n\n text_template = get_template(\"events/organiser_email.txt\")\n subject = f\"Registration for {event.title} cancelled by member\"\n text_message = text_template.render({\"event\": event, \"registration\": registration})\n\n EmailMessage(\n subject,\n text_message,\n to=[\n organiser.contact_mailinglist.name + \"@\" + settings.SITE_DOMAIN\n for organiser in event.organisers.all()\n ],\n ).send()\n\n\ndef notify_waiting(event, registration):\n text_template = get_template(\"events/more_places_email.txt\")\n subject = _(\"[THALIA] Notification about your registration for '{}'\").format(\n event.title\n )\n text_message = text_template.render(\n {\n \"event\": event,\n \"registration\": registration,\n \"name\": registration.name or registration.member.first_name,\n \"base_url\": settings.BASE_URL,\n }\n )\n EmailMessage(subject, text_message, to=[registration.email]).send()\n", "path": "website/events/emails.py"}]}
| 1,035 | 174 |
gh_patches_debug_7108
|
rasdani/github-patches
|
git_diff
|
vas3k__vas3k.club-858
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Bug: ะฒะพะทะผะพะถะฝะพััั ะทะฐะผัััะธัั ัะฐะผะพะณะพ ัะตะฑั
## ะงะตะบะปะธัั
- [x] ะฏ ะฟะพะธัะบะฐะป ะฟะพะธัะบะพะผ ะฟะพ ััะตะบะตัั ะฟะพั
ะพะถะธะต ะฟัะพะฑะปะตะผั, ะฒ ัะพะผ ัะธัะปะต ะฒ ะทะฐะบััััั
Issues
- [x] ะะฐะณ ััะฐะฑะธะปัะฝะพ ะฒะพัะฟัะพะธะทะฒะพะดะธััั ะธ ั ะทะฝะฐั ะบะฐะบ ััะพ ัะดะตะปะฐัั
## ะะฟะธัะฐะฝะธะต ะฑะฐะณะฐ
ะะพะปัะทะพะฒะฐัะตะปั ัะตั
ะฝะธัะตัะบะธ ะผะพะถะตั ะทะฐะผััะธัั ัะฐะผะพะณะพ ัะตะฑั, ะฟะพัะปะต ัะตะณะพ ะฝะต ัะผะพะถะตั ัะฒะธะดะตัั ัะฒะพะธ ะฟะพััั ะธ ะบะพะผะผะตะฝัะฐัะธะธ.
ะัะผัััะธัั ัะตะฑั ะฟะพะปัะทะพะฒะฐัะตะปั ัะพะถะต ะผะพะถะตั ัะฐะผะพััะพััะตะปัะฝะพ (ััะพ ัะฐะฑะพัะฐะตั ะฒ ะพะฑะต ััะพัะพะฝั).
## ะะถะธะดะฐะตะผัะน ัะตะทัะปััะฐั
- ะัะธ ะฒัะทะพะฒะต ะผัััะฐ ัะฐะผะพะณะพ ัะตะฑั ัะธััะตะผะฐ ะฟะพะบะฐะทัะฒะฐะตั ัะฐะฑะปะพะฝ ั ะทะฐะณะปััะบะพะน: ะฝะตะปัะทั ะผัััะธัั ัะตะฑั.
- ะะตะฒะพะทะผะพะถะฝะพััั ะผัััะฐ ะธ ัะฐะทะผัััะฐ ัะฐะผะพะณะพ ัะตะฑั.
## ะจะฐะณะธ ะบ ะฒะพัะฟัะพะธะทะฒะตะดะตะฝะธั
1. **Mute**: ะฟะตัะตะนัะธ ะฝะฐ URL `vas3k.club/user/%USERNAME%/mute/`, ะณะดะต `%USERNAME%` โ ะฟัะตะฒะดะพะฝะธะผ ะฟะพะปัะทะพะฒะฐัะตะปั (slug)
2. ะกะปะตะดะพะฒะฐัั ะผะฐััะตัั ะผัััะฐ.
3. **Unmute**: ะฟะตัะตะนัะธ ะฝะฐ URL `vas3k.club/user/%USERNAME%/mute/`, ะณะดะต `%USERNAME%` โ ะฟัะตะฒะดะพะฝะธะผ ะฟะพะปัะทะพะฒะฐัะตะปั (slug)
4. ะกะปะตะดะพะฒะฐัั ะผะฐััะตัั ะผัััะฐ.
ะกะบัะธะฝัะพั ัะพ ัััะฐะฝะธัั ัะฐะฑะปะพะฝะฐ ะผัััะฐ (ะฟัะพะดะฐะบัะฝ):

--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `users/views/muted.py`
Content:
```
1 from django.conf import settings
2 from django.http import HttpResponseForbidden
3 from django.shortcuts import get_object_or_404, render
4
5 from auth.helpers import auth_required
6 from club.exceptions import AccessDenied
7 from notifications.telegram.users import notify_admin_user_on_mute
8 from users.models.mute import Muted
9 from users.models.user import User
10
11
12 @auth_required
13 def toggle_mute(request, user_slug):
14 user_to = get_object_or_404(User, slug=user_slug)
15 if user_to.is_curator or user_to.is_moderator:
16 raise AccessDenied(title="ะฃ ััะพะณะพ ัะทะตัะฐ ะธะผะผัะฝะธัะตั ะพั ะผัััะฐ")
17
18 total_user_muted_count = Muted.objects.filter(user_from=request.me).count()
19
20 # show form on GET
21 if request.method != "POST":
22 is_muted = Muted.is_muted(
23 user_from=request.me,
24 user_to=user_to,
25 )
26 if is_muted:
27 return render(request, "users/mute/unmute.html", {
28 "user": user_to,
29 })
30 else:
31 return render(request, "users/mute/mute.html", {
32 "user": user_to,
33 "mutes_left": settings.MAX_MUTE_COUNT - total_user_muted_count,
34 })
35
36 # else โ process POST
37 if total_user_muted_count > settings.MAX_MUTE_COUNT:
38 raise AccessDenied(
39 title="ะั ะทะฐะผัััะธะปะธ ัะปะธัะบะพะผ ะผะฝะพะณะพ ะปัะดะตะน",
40 message="ะ ะตะบะพะผะตะฝะดัะตะผ ะฟัะธัะพัะผะพะทะธัั ะธ ะฟะพะณะพะฒะพัะธัั ั ะบะตะผ-ะฝะธะฑัะดั..."
41 )
42
43 comment = request.POST.get("comment") or ""
44 mute, is_created = Muted.mute(
45 user_from=request.me,
46 user_to=user_to,
47 comment=comment,
48 )
49
50 if is_created:
51 # notify admins
52 notify_admin_user_on_mute(
53 user_from=request.me,
54 user_to=user_to,
55 comment=comment,
56 )
57
58 return render(request, "users/messages/muted.html", {
59 "user": user_to,
60 })
61 else:
62 # unmute this user
63 Muted.unmute(
64 user_from=request.me,
65 user_to=user_to,
66 )
67
68 return render(request, "users/messages/unmuted.html", {
69 "user": user_to,
70 })
71
72
73 @auth_required
74 def muted(request, user_slug):
75 if request.me.slug != user_slug:
76 return HttpResponseForbidden()
77
78 user = get_object_or_404(User, slug=user_slug)
79 muted_users = Muted.muted_by_user(user)
80
81 return render(request, "users/mute/index.html", {
82 "user": user,
83 "muted": muted_users,
84 })
85
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/users/views/muted.py b/users/views/muted.py
--- a/users/views/muted.py
+++ b/users/views/muted.py
@@ -15,6 +15,10 @@
if user_to.is_curator or user_to.is_moderator:
raise AccessDenied(title="ะฃ ััะพะณะพ ัะทะตัะฐ ะธะผะผัะฝะธัะตั ะพั ะผัััะฐ")
+ if user_to == request.me:
+ raise AccessDenied(title='ะฅะพัะพัะฐั ะฟะพะฟััะบะฐ, ะฝะพ ะผัััะธัั ัะตะฑั ะฝะตะปัะทั. ะะพะถะฐะฝัะน ะผะตัะพะบ, ัั ะฟัะตะบัะฐัะตะฝ!',
+ message='')
+
total_user_muted_count = Muted.objects.filter(user_from=request.me).count()
# show form on GET
|
{"golden_diff": "diff --git a/users/views/muted.py b/users/views/muted.py\n--- a/users/views/muted.py\n+++ b/users/views/muted.py\n@@ -15,6 +15,10 @@\n if user_to.is_curator or user_to.is_moderator:\n raise AccessDenied(title=\"\u0423 \u044d\u0442\u043e\u0433\u043e \u044e\u0437\u0435\u0440\u0430 \u0438\u043c\u043c\u0443\u043d\u0438\u0442\u0435\u0442 \u043e\u0442 \u043c\u044c\u044e\u0442\u0430\")\n \n+ if user_to == request.me:\n+ raise AccessDenied(title='\u0425\u043e\u0440\u043e\u0448\u0430\u044f \u043f\u043e\u043f\u044b\u0442\u043a\u0430, \u043d\u043e \u043c\u044c\u044e\u0442\u0438\u0442\u044c \u0441\u0435\u0431\u044f \u043d\u0435\u043b\u044c\u0437\u044f. \u041a\u043e\u0436\u0430\u043d\u044b\u0439 \u043c\u0435\u0448\u043e\u043a, \u0442\u044b \u043f\u0440\u0435\u043a\u0440\u0430\u0441\u0435\u043d!',\n+ message='')\n+\n total_user_muted_count = Muted.objects.filter(user_from=request.me).count()\n \n # show form on GET\n", "issue": "Bug: \u0432\u043e\u0437\u043c\u043e\u0436\u043d\u043e\u0441\u0442\u044c \u0437\u0430\u043c\u044c\u044e\u0442\u0438\u0442\u044c \u0441\u0430\u043c\u043e\u0433\u043e \u0441\u0435\u0431\u044f\n## \u0427\u0435\u043a\u043b\u0438\u0441\u0442\r\n\r\n- [x] \u042f \u043f\u043e\u0438\u0441\u043a\u0430\u043b \u043f\u043e\u0438\u0441\u043a\u043e\u043c \u043f\u043e \u0442\u0440\u0435\u043a\u0435\u0440\u0443 \u043f\u043e\u0445\u043e\u0436\u0438\u0435 \u043f\u0440\u043e\u0431\u043b\u0435\u043c\u044b, \u0432 \u0442\u043e\u043c \u0447\u0438\u0441\u043b\u0435 \u0432 \u0437\u0430\u043a\u0440\u044b\u0442\u044b\u0445 Issues\r\n- [x] \u0411\u0430\u0433 \u0441\u0442\u0430\u0431\u0438\u043b\u044c\u043d\u043e \u0432\u043e\u0441\u043f\u0440\u043e\u0438\u0437\u0432\u043e\u0434\u0438\u0442\u0441\u044f \u0438 \u044f \u0437\u043d\u0430\u044e \u043a\u0430\u043a \u044d\u0442\u043e \u0441\u0434\u0435\u043b\u0430\u0442\u044c\r\n\r\n## \u041e\u043f\u0438\u0441\u0430\u043d\u0438\u0435 \u0431\u0430\u0433\u0430\r\n\r\n\u041f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u0435\u043b\u044c \u0442\u0435\u0445\u043d\u0438\u0447\u0435\u0441\u043a\u0438 \u043c\u043e\u0436\u0435\u0442 \u0437\u0430\u043c\u044c\u044e\u0438\u0442\u044c \u0441\u0430\u043c\u043e\u0433\u043e \u0441\u0435\u0431\u044f, \u043f\u043e\u0441\u043b\u0435 \u0447\u0435\u0433\u043e \u043d\u0435 \u0441\u043c\u043e\u0436\u0435\u0442 \u0443\u0432\u0438\u0434\u0435\u0442\u044c \u0441\u0432\u043e\u0438 \u043f\u043e\u0441\u0442\u044b \u0438 \u043a\u043e\u043c\u043c\u0435\u043d\u0442\u0430\u0440\u0438\u0438.\r\n\u041e\u0442\u043c\u044c\u044e\u0442\u0438\u0442\u044c \u0441\u0435\u0431\u044f \u043f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u0435\u043b\u044c \u0442\u043e\u0436\u0435 \u043c\u043e\u0436\u0435\u0442 \u0441\u0430\u043c\u043e\u0441\u0442\u043e\u044f\u0442\u0435\u043b\u044c\u043d\u043e (\u044d\u0442\u043e \u0440\u0430\u0431\u043e\u0442\u0430\u0435\u0442 \u0432 \u043e\u0431\u0435 \u0441\u0442\u043e\u0440\u043e\u043d\u044b).\r\n\r\n## \u041e\u0436\u0438\u0434\u0430\u0435\u043c\u044b\u0439 \u0440\u0435\u0437\u0443\u043b\u044c\u0442\u0430\u0442\r\n\r\n- \u041f\u0440\u0438 \u0432\u044b\u0437\u043e\u0432\u0435 \u043c\u044c\u044e\u0442\u0430 \u0441\u0430\u043c\u043e\u0433\u043e \u0441\u0435\u0431\u044f \u0441\u0438\u0441\u0442\u0435\u043c\u0430 \u043f\u043e\u043a\u0430\u0437\u044b\u0432\u0430\u0435\u0442 \u0448\u0430\u0431\u043b\u043e\u043d \u0441 \u0437\u0430\u0433\u043b\u0443\u0448\u043a\u043e\u0439: \u043d\u0435\u043b\u044c\u0437\u044f \u043c\u044c\u044e\u0442\u0438\u0442\u044c \u0441\u0435\u0431\u044f.\r\n- \u041d\u0435\u0432\u043e\u0437\u043c\u043e\u0436\u043d\u043e\u0441\u0442\u044c \u043c\u044c\u044e\u0442\u0430 \u0438 \u0440\u0430\u0437\u043c\u044c\u044e\u0442\u0430 \u0441\u0430\u043c\u043e\u0433\u043e \u0441\u0435\u0431\u044f.\r\n\r\n## \u0428\u0430\u0433\u0438 \u043a \u0432\u043e\u0441\u043f\u0440\u043e\u0438\u0437\u0432\u0435\u0434\u0435\u043d\u0438\u044e\r\n\r\n1. **Mute**: \u043f\u0435\u0440\u0435\u0439\u0442\u0438 \u043d\u0430 URL `vas3k.club/user/%USERNAME%/mute/`, \u0433\u0434\u0435 `%USERNAME%` \u2014 \u043f\u0441\u0435\u0432\u0434\u043e\u043d\u0438\u043c \u043f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u0435\u043b\u044f (slug) \r\n2. \u0421\u043b\u0435\u0434\u043e\u0432\u0430\u0442\u044c \u043c\u0430\u0441\u0442\u0435\u0440\u0443 \u043c\u044c\u044e\u0442\u0430.\r\n3. **Unmute**: \u043f\u0435\u0440\u0435\u0439\u0442\u0438 \u043d\u0430 URL `vas3k.club/user/%USERNAME%/mute/`, \u0433\u0434\u0435 `%USERNAME%` \u2014 \u043f\u0441\u0435\u0432\u0434\u043e\u043d\u0438\u043c \u043f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u0435\u043b\u044f (slug) \r\n4. \u0421\u043b\u0435\u0434\u043e\u0432\u0430\u0442\u044c \u043c\u0430\u0441\u0442\u0435\u0440\u0443 \u043c\u044c\u044e\u0442\u0430.\r\n\r\n\u0421\u043a\u0440\u0438\u043d\u0448\u043e\u0442 \u0441\u043e \u0441\u0442\u0440\u0430\u043d\u0438\u0446\u044b \u0448\u0430\u0431\u043b\u043e\u043d\u0430 \u043c\u044c\u044e\u0442\u0430 (\u043f\u0440\u043e\u0434\u0430\u043a\u0448\u043d):\r\n\n", "before_files": [{"content": "from django.conf import settings\nfrom django.http import HttpResponseForbidden\nfrom django.shortcuts import get_object_or_404, render\n\nfrom auth.helpers import auth_required\nfrom club.exceptions import AccessDenied\nfrom notifications.telegram.users import notify_admin_user_on_mute\nfrom users.models.mute import Muted\nfrom users.models.user import User\n\n\n@auth_required\ndef toggle_mute(request, user_slug):\n user_to = get_object_or_404(User, slug=user_slug)\n if user_to.is_curator or user_to.is_moderator:\n raise AccessDenied(title=\"\u0423 \u044d\u0442\u043e\u0433\u043e \u044e\u0437\u0435\u0440\u0430 \u0438\u043c\u043c\u0443\u043d\u0438\u0442\u0435\u0442 \u043e\u0442 \u043c\u044c\u044e\u0442\u0430\")\n\n total_user_muted_count = Muted.objects.filter(user_from=request.me).count()\n\n # show form on GET\n if request.method != \"POST\":\n is_muted = Muted.is_muted(\n user_from=request.me,\n user_to=user_to,\n )\n if is_muted:\n return render(request, \"users/mute/unmute.html\", {\n \"user\": user_to,\n })\n else:\n return render(request, \"users/mute/mute.html\", {\n \"user\": user_to,\n \"mutes_left\": settings.MAX_MUTE_COUNT - total_user_muted_count,\n })\n\n # else \u2014 process POST\n if total_user_muted_count > settings.MAX_MUTE_COUNT:\n raise AccessDenied(\n title=\"\u0412\u044b \u0437\u0430\u043c\u044c\u044e\u0442\u0438\u043b\u0438 \u0441\u043b\u0438\u0448\u043a\u043e\u043c \u043c\u043d\u043e\u0433\u043e \u043b\u044e\u0434\u0435\u0439\",\n message=\"\u0420\u0435\u043a\u043e\u043c\u0435\u043d\u0434\u0443\u0435\u043c \u043f\u0440\u0438\u0442\u043e\u0440\u043c\u043e\u0437\u0438\u0442\u044c \u0438 \u043f\u043e\u0433\u043e\u0432\u043e\u0440\u0438\u0442\u044c \u0441 \u043a\u0435\u043c-\u043d\u0438\u0431\u0443\u0434\u044c...\"\n )\n\n comment = request.POST.get(\"comment\") or \"\"\n mute, is_created = Muted.mute(\n user_from=request.me,\n user_to=user_to,\n comment=comment,\n )\n\n if is_created:\n # notify admins\n notify_admin_user_on_mute(\n user_from=request.me,\n user_to=user_to,\n comment=comment,\n )\n\n return render(request, \"users/messages/muted.html\", {\n \"user\": user_to,\n })\n else:\n # unmute this user\n Muted.unmute(\n user_from=request.me,\n user_to=user_to,\n )\n\n return render(request, \"users/messages/unmuted.html\", {\n \"user\": user_to,\n })\n\n\n@auth_required\ndef muted(request, user_slug):\n if request.me.slug != user_slug:\n return HttpResponseForbidden()\n\n user = get_object_or_404(User, slug=user_slug)\n muted_users = Muted.muted_by_user(user)\n\n return render(request, \"users/mute/index.html\", {\n \"user\": user,\n \"muted\": muted_users,\n })\n", "path": "users/views/muted.py"}], "after_files": [{"content": "from django.conf import settings\nfrom django.http import HttpResponseForbidden\nfrom django.shortcuts import get_object_or_404, render\n\nfrom auth.helpers import auth_required\nfrom club.exceptions import AccessDenied\nfrom notifications.telegram.users import notify_admin_user_on_mute\nfrom users.models.mute import Muted\nfrom users.models.user import User\n\n\n@auth_required\ndef toggle_mute(request, user_slug):\n user_to = get_object_or_404(User, slug=user_slug)\n if user_to.is_curator or user_to.is_moderator:\n raise AccessDenied(title=\"\u0423 \u044d\u0442\u043e\u0433\u043e \u044e\u0437\u0435\u0440\u0430 \u0438\u043c\u043c\u0443\u043d\u0438\u0442\u0435\u0442 \u043e\u0442 \u043c\u044c\u044e\u0442\u0430\")\n\n if user_to == request.me:\n raise AccessDenied(title='\u0425\u043e\u0440\u043e\u0448\u0430\u044f \u043f\u043e\u043f\u044b\u0442\u043a\u0430, \u043d\u043e \u043c\u044c\u044e\u0442\u0438\u0442\u044c \u0441\u0435\u0431\u044f \u043d\u0435\u043b\u044c\u0437\u044f. \u041a\u043e\u0436\u0430\u043d\u044b\u0439 \u043c\u0435\u0448\u043e\u043a, \u0442\u044b \u043f\u0440\u0435\u043a\u0440\u0430\u0441\u0435\u043d!',\n message='')\n\n total_user_muted_count = Muted.objects.filter(user_from=request.me).count()\n\n # show form on GET\n if request.method != \"POST\":\n is_muted = Muted.is_muted(\n user_from=request.me,\n user_to=user_to,\n )\n if is_muted:\n return render(request, \"users/mute/unmute.html\", {\n \"user\": user_to,\n })\n else:\n return render(request, \"users/mute/mute.html\", {\n \"user\": user_to,\n \"mutes_left\": settings.MAX_MUTE_COUNT - total_user_muted_count,\n })\n\n # else \u2014 process POST\n if total_user_muted_count > settings.MAX_MUTE_COUNT:\n raise AccessDenied(\n title=\"\u0412\u044b \u0437\u0430\u043c\u044c\u044e\u0442\u0438\u043b\u0438 \u0441\u043b\u0438\u0448\u043a\u043e\u043c \u043c\u043d\u043e\u0433\u043e \u043b\u044e\u0434\u0435\u0439\",\n message=\"\u0420\u0435\u043a\u043e\u043c\u0435\u043d\u0434\u0443\u0435\u043c \u043f\u0440\u0438\u0442\u043e\u0440\u043c\u043e\u0437\u0438\u0442\u044c \u0438 \u043f\u043e\u0433\u043e\u0432\u043e\u0440\u0438\u0442\u044c \u0441 \u043a\u0435\u043c-\u043d\u0438\u0431\u0443\u0434\u044c...\"\n )\n\n comment = request.POST.get(\"comment\") or \"\"\n mute, is_created = Muted.mute(\n user_from=request.me,\n user_to=user_to,\n comment=comment,\n )\n\n if is_created:\n # notify admins\n notify_admin_user_on_mute(\n user_from=request.me,\n user_to=user_to,\n comment=comment,\n )\n\n return render(request, \"users/messages/muted.html\", {\n \"user\": user_to,\n })\n else:\n # unmute this user\n Muted.unmute(\n user_from=request.me,\n user_to=user_to,\n )\n\n return render(request, \"users/messages/unmuted.html\", {\n \"user\": user_to,\n })\n\n\n@auth_required\ndef muted(request, user_slug):\n if request.me.slug != user_slug:\n return HttpResponseForbidden()\n\n user = get_object_or_404(User, slug=user_slug)\n muted_users = Muted.muted_by_user(user)\n\n return render(request, \"users/mute/index.html\", {\n \"user\": user,\n \"muted\": muted_users,\n })\n", "path": "users/views/muted.py"}]}
| 1,369 | 157 |
gh_patches_debug_15840
|
rasdani/github-patches
|
git_diff
|
napari__napari-1494
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
CTRL-C should exit napari gracefully
## ๐ Bug
After #1476 napari just prints `KeyboardInterrupt` when CTRL-C is pressed in the system terminal window that used to launch napari. Prior to 1476 it exited with a crash, which got the job done but was not great.
Ideally napari would exit gracefully when you hit CTRL-C in the system terminal window.
## To Reproduce
Steps to reproduce the behavior:
1. From a system terminal (e.g. Terminal program on mac)
2. Run "napari" or a script that uses `napari.gui_qt()`
3. Switch back to the terminal window and type CTRL-C
## Expected behavior
Napari exits gracefully.
## Environment
```
napari: not-installed
Platform: macOS-10.15.3-x86_64-i386-64bit
Python: 3.8.1 (default, Jan 8 2020, 16:15:59) [Clang 4.0.1 (tags/RELEASE_401/final)]
Qt: 5.14.2
PyQt5: 5.14.2
NumPy: 1.18.4
SciPy: 1.4.1
Dask: 2.17.2
VisPy: 0.6.5.dev111+g8387ea1a.d20200424
GL version: 2.1 ATI-3.5.5
MAX_TEXTURE_SIZE: 16384
Plugins:
- napari-plugin-engine: 0.1.6
- svg: 0.1.3
```
## Additional context
This is low priority since you can exit with the Quit command, or from the system terminal hit CTRL-Z and `kill %1` the app if necessary. However it seems like exiting gracefully is the right behavior long term.
I tried adding this to our new `ExceptionHandler` class:
```
# Interpret CTRL-C as a request to quit.
if isinstance(value, KeyboardInterrupt):
QApplication.instance().quit()
return
```
but while it exits cleanly sometimes, sometimes it bus errors or seg faults.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `napari/_qt/exceptions.py`
Content:
```
1 import logging
2 import os
3 import traceback
4 from types import TracebackType
5 from typing import Optional, Type
6
7 from qtpy.QtCore import QObject, Signal
8
9 from .qt_error_notification import NapariNotification
10
11
12 class ExceptionHandler(QObject):
13 """General class to handle all uncaught exceptions in the Qt event loop.
14
15 Parameters
16 ----------
17 parent : QObject, optional
18 parent object, by default None
19 gui_exceptions : bool, optional
20 Whether to show exceptions as, by default True. May be overriden by
21 environment variable: ``NAPARI_CATCH_ERRORS=1`
22 Note: this ``False`` by default in ``gui_qt()`` (the main
23 instantiator of this class), but it is ``True`` in ``napari.__main__``.
24 As a result, exceptions will be shown in the GUI only (mostly) when
25 running napari as ``napari`` or ``python -m napari`` from the command
26 line.
27 """
28
29 error = Signal(tuple)
30 message: Optional[NapariNotification] = None
31
32 def __init__(self, parent=None, *, gui_exceptions=True):
33 super().__init__(parent)
34 if os.getenv("NAPARI_CATCH_ERRORS") in ('0', 'False'):
35 self.gui_exceptions = False
36 else:
37 self.gui_exceptions = gui_exceptions
38
39 def handle(
40 self,
41 etype: Type[BaseException],
42 value: BaseException,
43 tb: TracebackType,
44 ):
45 """Our sys.excepthook override.
46
47 This function handles uncaught exceptions and can delegate to a
48 secondary handler, whether it be a GUI dialog, or an IPython traceback
49 printout. The override to ``sys.excepthook`` happens in
50 :func:`napari.gui_qt`, and therefore this is only active when the qt
51 event loop has been started by napari.
52
53 The three parameters here are what would be returned from
54 :func:`sys.exc_info()`.
55
56 Parameters
57 ----------
58 etype : Type[BaseException]
59 The type of error raised
60 value : BaseException
61 The error instance
62 tb : TracebackType
63 The traceback object associated with the error.
64 """
65 if self.gui_exceptions:
66 self._show_error_dialog(value)
67 else:
68 text = "".join(traceback.format_exception(etype, value, tb))
69 logging.error("Unhandled exception:\n%s", text)
70 self.error.emit((etype, value, tb))
71
72 def _show_error_dialog(self, exception: BaseException):
73 self.message = NapariNotification.from_exception(exception)
74 self.message.show()
75
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/napari/_qt/exceptions.py b/napari/_qt/exceptions.py
--- a/napari/_qt/exceptions.py
+++ b/napari/_qt/exceptions.py
@@ -1,5 +1,6 @@
import logging
import os
+import sys
import traceback
from types import TracebackType
from typing import Optional, Type
@@ -62,6 +63,12 @@
tb : TracebackType
The traceback object associated with the error.
"""
+ # etype.__module__ contains the module raising the error
+ # Custom exception classes can have different behavior
+ # can add custom exception handlers here ...
+ if isinstance(value, KeyboardInterrupt):
+ print("Closed by KeyboardInterrupt", file=sys.stderr)
+ sys.exit(1)
if self.gui_exceptions:
self._show_error_dialog(value)
else:
|
{"golden_diff": "diff --git a/napari/_qt/exceptions.py b/napari/_qt/exceptions.py\n--- a/napari/_qt/exceptions.py\n+++ b/napari/_qt/exceptions.py\n@@ -1,5 +1,6 @@\n import logging\n import os\n+import sys\n import traceback\n from types import TracebackType\n from typing import Optional, Type\n@@ -62,6 +63,12 @@\n tb : TracebackType\n The traceback object associated with the error.\n \"\"\"\n+ # etype.__module__ contains the module raising the error\n+ # Custom exception classes can have different behavior\n+ # can add custom exception handlers here ...\n+ if isinstance(value, KeyboardInterrupt):\n+ print(\"Closed by KeyboardInterrupt\", file=sys.stderr)\n+ sys.exit(1)\n if self.gui_exceptions:\n self._show_error_dialog(value)\n else:\n", "issue": "CTRL-C should exit napari gracefully\n## \ud83d\udc1b Bug\r\n\r\nAfter #1476 napari just prints `KeyboardInterrupt` when CTRL-C is pressed in the system terminal window that used to launch napari. Prior to 1476 it exited with a crash, which got the job done but was not great. \r\n\r\nIdeally napari would exit gracefully when you hit CTRL-C in the system terminal window.\r\n\r\n\r\n## To Reproduce\r\n\r\nSteps to reproduce the behavior:\r\n\r\n1. From a system terminal (e.g. Terminal program on mac)\r\n2. Run \"napari\" or a script that uses `napari.gui_qt()`\r\n3. Switch back to the terminal window and type CTRL-C\r\n\r\n## Expected behavior\r\n\r\nNapari exits gracefully.\r\n\r\n## Environment\r\n\r\n```\r\nnapari: not-installed\r\nPlatform: macOS-10.15.3-x86_64-i386-64bit\r\nPython: 3.8.1 (default, Jan 8 2020, 16:15:59) [Clang 4.0.1 (tags/RELEASE_401/final)]\r\nQt: 5.14.2\r\nPyQt5: 5.14.2\r\nNumPy: 1.18.4\r\nSciPy: 1.4.1\r\nDask: 2.17.2\r\nVisPy: 0.6.5.dev111+g8387ea1a.d20200424\r\n\r\nGL version: 2.1 ATI-3.5.5\r\nMAX_TEXTURE_SIZE: 16384\r\n\r\nPlugins:\r\n- napari-plugin-engine: 0.1.6\r\n- svg: 0.1.3\r\n```\r\n\r\n## Additional context\r\n\r\nThis is low priority since you can exit with the Quit command, or from the system terminal hit CTRL-Z and `kill %1` the app if necessary. However it seems like exiting gracefully is the right behavior long term.\r\n\r\nI tried adding this to our new `ExceptionHandler` class:\r\n```\r\n # Interpret CTRL-C as a request to quit.\r\n if isinstance(value, KeyboardInterrupt):\r\n QApplication.instance().quit()\r\n return\r\n```\r\nbut while it exits cleanly sometimes, sometimes it bus errors or seg faults.\n", "before_files": [{"content": "import logging\nimport os\nimport traceback\nfrom types import TracebackType\nfrom typing import Optional, Type\n\nfrom qtpy.QtCore import QObject, Signal\n\nfrom .qt_error_notification import NapariNotification\n\n\nclass ExceptionHandler(QObject):\n \"\"\"General class to handle all uncaught exceptions in the Qt event loop.\n\n Parameters\n ----------\n parent : QObject, optional\n parent object, by default None\n gui_exceptions : bool, optional\n Whether to show exceptions as, by default True. May be overriden by\n environment variable: ``NAPARI_CATCH_ERRORS=1`\n Note: this ``False`` by default in ``gui_qt()`` (the main\n instantiator of this class), but it is ``True`` in ``napari.__main__``.\n As a result, exceptions will be shown in the GUI only (mostly) when\n running napari as ``napari`` or ``python -m napari`` from the command\n line.\n \"\"\"\n\n error = Signal(tuple)\n message: Optional[NapariNotification] = None\n\n def __init__(self, parent=None, *, gui_exceptions=True):\n super().__init__(parent)\n if os.getenv(\"NAPARI_CATCH_ERRORS\") in ('0', 'False'):\n self.gui_exceptions = False\n else:\n self.gui_exceptions = gui_exceptions\n\n def handle(\n self,\n etype: Type[BaseException],\n value: BaseException,\n tb: TracebackType,\n ):\n \"\"\"Our sys.excepthook override.\n\n This function handles uncaught exceptions and can delegate to a\n secondary handler, whether it be a GUI dialog, or an IPython traceback\n printout. The override to ``sys.excepthook`` happens in\n :func:`napari.gui_qt`, and therefore this is only active when the qt\n event loop has been started by napari.\n\n The three parameters here are what would be returned from\n :func:`sys.exc_info()`.\n\n Parameters\n ----------\n etype : Type[BaseException]\n The type of error raised\n value : BaseException\n The error instance\n tb : TracebackType\n The traceback object associated with the error.\n \"\"\"\n if self.gui_exceptions:\n self._show_error_dialog(value)\n else:\n text = \"\".join(traceback.format_exception(etype, value, tb))\n logging.error(\"Unhandled exception:\\n%s\", text)\n self.error.emit((etype, value, tb))\n\n def _show_error_dialog(self, exception: BaseException):\n self.message = NapariNotification.from_exception(exception)\n self.message.show()\n", "path": "napari/_qt/exceptions.py"}], "after_files": [{"content": "import logging\nimport os\nimport sys\nimport traceback\nfrom types import TracebackType\nfrom typing import Optional, Type\n\nfrom qtpy.QtCore import QObject, Signal\n\nfrom .qt_error_notification import NapariNotification\n\n\nclass ExceptionHandler(QObject):\n \"\"\"General class to handle all uncaught exceptions in the Qt event loop.\n\n Parameters\n ----------\n parent : QObject, optional\n parent object, by default None\n gui_exceptions : bool, optional\n Whether to show exceptions as, by default True. May be overriden by\n environment variable: ``NAPARI_CATCH_ERRORS=1`\n Note: this ``False`` by default in ``gui_qt()`` (the main\n instantiator of this class), but it is ``True`` in ``napari.__main__``.\n As a result, exceptions will be shown in the GUI only (mostly) when\n running napari as ``napari`` or ``python -m napari`` from the command\n line.\n \"\"\"\n\n error = Signal(tuple)\n message: Optional[NapariNotification] = None\n\n def __init__(self, parent=None, *, gui_exceptions=True):\n super().__init__(parent)\n if os.getenv(\"NAPARI_CATCH_ERRORS\") in ('0', 'False'):\n self.gui_exceptions = False\n else:\n self.gui_exceptions = gui_exceptions\n\n def handle(\n self,\n etype: Type[BaseException],\n value: BaseException,\n tb: TracebackType,\n ):\n \"\"\"Our sys.excepthook override.\n\n This function handles uncaught exceptions and can delegate to a\n secondary handler, whether it be a GUI dialog, or an IPython traceback\n printout. The override to ``sys.excepthook`` happens in\n :func:`napari.gui_qt`, and therefore this is only active when the qt\n event loop has been started by napari.\n\n The three parameters here are what would be returned from\n :func:`sys.exc_info()`.\n\n Parameters\n ----------\n etype : Type[BaseException]\n The type of error raised\n value : BaseException\n The error instance\n tb : TracebackType\n The traceback object associated with the error.\n \"\"\"\n # etype.__module__ contains the module raising the error\n # Custom exception classes can have different behavior\n # can add custom exception handlers here ...\n if isinstance(value, KeyboardInterrupt):\n print(\"Closed by KeyboardInterrupt\", file=sys.stderr)\n sys.exit(1)\n if self.gui_exceptions:\n self._show_error_dialog(value)\n else:\n text = \"\".join(traceback.format_exception(etype, value, tb))\n logging.error(\"Unhandled exception:\\n%s\", text)\n self.error.emit((etype, value, tb))\n\n def _show_error_dialog(self, exception: BaseException):\n self.message = NapariNotification.from_exception(exception)\n self.message.show()\n", "path": "napari/_qt/exceptions.py"}]}
| 1,470 | 192 |
gh_patches_debug_10139
|
rasdani/github-patches
|
git_diff
|
zestedesavoir__zds-site-3807
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[beta][v20] L'api des notifications renvoie que les notifs non lues
Serveur : Beta
Version : v20/6bb2f75
Systรจme : Mac OS X
Navigateur : 52.0.2743.116 (64-bit)
---
1. Rรฉcupรฉrez vos notifications depuis l'API
2. Constatez que le serveur renvoie uniquement les notifs non lues.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `zds/notification/api/views.py`
Content:
```
1 # coding: utf-8
2 from dry_rest_permissions.generics import DRYPermissions
3 from rest_framework import filters
4 from rest_framework.generics import ListAPIView
5 from rest_framework.permissions import IsAuthenticated
6 from rest_framework_extensions.cache.decorators import cache_response
7 from rest_framework_extensions.etag.decorators import etag
8 from rest_framework_extensions.key_constructor import bits
9 from rest_framework_extensions.key_constructor.constructors import DefaultKeyConstructor
10
11 from zds.api.bits import DJRF3xPaginationKeyBit
12 from zds.notification.api.serializers import NotificationSerializer
13 from zds.notification.models import Notification
14
15
16 class PagingNotificationListKeyConstructor(DefaultKeyConstructor):
17 pagination = DJRF3xPaginationKeyBit()
18 search = bits.QueryParamsKeyBit(['search', 'ordering', 'type'])
19 list_sql_query = bits.ListSqlQueryKeyBit()
20 unique_view_id = bits.UniqueViewIdKeyBit()
21 user = bits.UserKeyBit()
22
23
24 class NotificationListAPI(ListAPIView):
25 """
26 List of notification.
27 """
28
29 filter_backends = (filters.SearchFilter, filters.OrderingFilter)
30 search_fields = ('title',)
31 ordering_fields = ('pubdate', 'title',)
32 list_key_func = PagingNotificationListKeyConstructor()
33 serializer_class = NotificationSerializer
34 permission_classes = (IsAuthenticated, DRYPermissions,)
35
36 @etag(list_key_func)
37 @cache_response(key_func=list_key_func)
38 def get(self, request, *args, **kwargs):
39 """
40 Lists all notifications of a user.
41 ---
42
43 parameters:
44 - name: Authorization
45 description: Bearer token to make an authenticated request.
46 required: true
47 paramType: header
48 - name: page
49 description: Restricts output to the given page number.
50 required: false
51 paramType: query
52 - name: page_size
53 description: Sets the number of notifications per page.
54 required: false
55 paramType: query
56 - name: search
57 description: Filters by title.
58 required: false
59 paramType: query
60 - name: ordering
61 description: Sorts the results. You can order by (-)pubdate or (-)title.
62 paramType: query
63 - name: type
64 description: Filters by notification type.
65 paramType: query
66 - name: subscription_type
67 description: Filters by subscription type.
68 paramType: query
69 - name: expand
70 description: Returns an object instead of an identifier representing the given field.
71 required: false
72 paramType: query
73 responseMessages:
74 - code: 401
75 message: Not Authenticated
76 - code: 404
77 message: Not Found
78 """
79 return self.list(request, *args, **kwargs)
80
81 def get_queryset(self):
82 queryset = Notification.objects.get_unread_notifications_of(self.request.user)
83 subscription_type = self.request.query_params.get('subscription_type', None)
84 if subscription_type:
85 queryset = queryset.filter(subscription__content_type__model=subscription_type)
86 _type = self.request.query_params.get('type', None)
87 if _type:
88 queryset = queryset.filter(content_type__model=_type)
89 return queryset
90
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/zds/notification/api/views.py b/zds/notification/api/views.py
--- a/zds/notification/api/views.py
+++ b/zds/notification/api/views.py
@@ -79,7 +79,7 @@
return self.list(request, *args, **kwargs)
def get_queryset(self):
- queryset = Notification.objects.get_unread_notifications_of(self.request.user)
+ queryset = Notification.objects.get_notifications_of(self.request.user)
subscription_type = self.request.query_params.get('subscription_type', None)
if subscription_type:
queryset = queryset.filter(subscription__content_type__model=subscription_type)
|
{"golden_diff": "diff --git a/zds/notification/api/views.py b/zds/notification/api/views.py\n--- a/zds/notification/api/views.py\n+++ b/zds/notification/api/views.py\n@@ -79,7 +79,7 @@\n return self.list(request, *args, **kwargs)\n \n def get_queryset(self):\n- queryset = Notification.objects.get_unread_notifications_of(self.request.user)\n+ queryset = Notification.objects.get_notifications_of(self.request.user)\n subscription_type = self.request.query_params.get('subscription_type', None)\n if subscription_type:\n queryset = queryset.filter(subscription__content_type__model=subscription_type)\n", "issue": "[beta][v20] L'api des notifications renvoie que les notifs non lues\nServeur : Beta\nVersion : v20/6bb2f75\nSyst\u00e8me : Mac OS X\nNavigateur : 52.0.2743.116 (64-bit)\n\n---\n1. R\u00e9cup\u00e9rez vos notifications depuis l'API\n2. Constatez que le serveur renvoie uniquement les notifs non lues.\n\n", "before_files": [{"content": "# coding: utf-8\nfrom dry_rest_permissions.generics import DRYPermissions\nfrom rest_framework import filters\nfrom rest_framework.generics import ListAPIView\nfrom rest_framework.permissions import IsAuthenticated\nfrom rest_framework_extensions.cache.decorators import cache_response\nfrom rest_framework_extensions.etag.decorators import etag\nfrom rest_framework_extensions.key_constructor import bits\nfrom rest_framework_extensions.key_constructor.constructors import DefaultKeyConstructor\n\nfrom zds.api.bits import DJRF3xPaginationKeyBit\nfrom zds.notification.api.serializers import NotificationSerializer\nfrom zds.notification.models import Notification\n\n\nclass PagingNotificationListKeyConstructor(DefaultKeyConstructor):\n pagination = DJRF3xPaginationKeyBit()\n search = bits.QueryParamsKeyBit(['search', 'ordering', 'type'])\n list_sql_query = bits.ListSqlQueryKeyBit()\n unique_view_id = bits.UniqueViewIdKeyBit()\n user = bits.UserKeyBit()\n\n\nclass NotificationListAPI(ListAPIView):\n \"\"\"\n List of notification.\n \"\"\"\n\n filter_backends = (filters.SearchFilter, filters.OrderingFilter)\n search_fields = ('title',)\n ordering_fields = ('pubdate', 'title',)\n list_key_func = PagingNotificationListKeyConstructor()\n serializer_class = NotificationSerializer\n permission_classes = (IsAuthenticated, DRYPermissions,)\n\n @etag(list_key_func)\n @cache_response(key_func=list_key_func)\n def get(self, request, *args, **kwargs):\n \"\"\"\n Lists all notifications of a user.\n ---\n\n parameters:\n - name: Authorization\n description: Bearer token to make an authenticated request.\n required: true\n paramType: header\n - name: page\n description: Restricts output to the given page number.\n required: false\n paramType: query\n - name: page_size\n description: Sets the number of notifications per page.\n required: false\n paramType: query\n - name: search\n description: Filters by title.\n required: false\n paramType: query\n - name: ordering\n description: Sorts the results. You can order by (-)pubdate or (-)title.\n paramType: query\n - name: type\n description: Filters by notification type.\n paramType: query\n - name: subscription_type\n description: Filters by subscription type.\n paramType: query\n - name: expand\n description: Returns an object instead of an identifier representing the given field.\n required: false\n paramType: query\n responseMessages:\n - code: 401\n message: Not Authenticated\n - code: 404\n message: Not Found\n \"\"\"\n return self.list(request, *args, **kwargs)\n\n def get_queryset(self):\n queryset = Notification.objects.get_unread_notifications_of(self.request.user)\n subscription_type = self.request.query_params.get('subscription_type', None)\n if subscription_type:\n queryset = queryset.filter(subscription__content_type__model=subscription_type)\n _type = self.request.query_params.get('type', None)\n if _type:\n queryset = queryset.filter(content_type__model=_type)\n return queryset\n", "path": "zds/notification/api/views.py"}], "after_files": [{"content": "# coding: utf-8\nfrom dry_rest_permissions.generics import DRYPermissions\nfrom rest_framework import filters\nfrom rest_framework.generics import ListAPIView\nfrom rest_framework.permissions import IsAuthenticated\nfrom rest_framework_extensions.cache.decorators import cache_response\nfrom rest_framework_extensions.etag.decorators import etag\nfrom rest_framework_extensions.key_constructor import bits\nfrom rest_framework_extensions.key_constructor.constructors import DefaultKeyConstructor\n\nfrom zds.api.bits import DJRF3xPaginationKeyBit\nfrom zds.notification.api.serializers import NotificationSerializer\nfrom zds.notification.models import Notification\n\n\nclass PagingNotificationListKeyConstructor(DefaultKeyConstructor):\n pagination = DJRF3xPaginationKeyBit()\n search = bits.QueryParamsKeyBit(['search', 'ordering', 'type'])\n list_sql_query = bits.ListSqlQueryKeyBit()\n unique_view_id = bits.UniqueViewIdKeyBit()\n user = bits.UserKeyBit()\n\n\nclass NotificationListAPI(ListAPIView):\n \"\"\"\n List of notification.\n \"\"\"\n\n filter_backends = (filters.SearchFilter, filters.OrderingFilter)\n search_fields = ('title',)\n ordering_fields = ('pubdate', 'title',)\n list_key_func = PagingNotificationListKeyConstructor()\n serializer_class = NotificationSerializer\n permission_classes = (IsAuthenticated, DRYPermissions,)\n\n @etag(list_key_func)\n @cache_response(key_func=list_key_func)\n def get(self, request, *args, **kwargs):\n \"\"\"\n Lists all notifications of a user.\n ---\n\n parameters:\n - name: Authorization\n description: Bearer token to make an authenticated request.\n required: true\n paramType: header\n - name: page\n description: Restricts output to the given page number.\n required: false\n paramType: query\n - name: page_size\n description: Sets the number of notifications per page.\n required: false\n paramType: query\n - name: search\n description: Filters by title.\n required: false\n paramType: query\n - name: ordering\n description: Sorts the results. You can order by (-)pubdate or (-)title.\n paramType: query\n - name: type\n description: Filters by notification type.\n paramType: query\n - name: subscription_type\n description: Filters by subscription type.\n paramType: query\n - name: expand\n description: Returns an object instead of an identifier representing the given field.\n required: false\n paramType: query\n responseMessages:\n - code: 401\n message: Not Authenticated\n - code: 404\n message: Not Found\n \"\"\"\n return self.list(request, *args, **kwargs)\n\n def get_queryset(self):\n queryset = Notification.objects.get_notifications_of(self.request.user)\n subscription_type = self.request.query_params.get('subscription_type', None)\n if subscription_type:\n queryset = queryset.filter(subscription__content_type__model=subscription_type)\n _type = self.request.query_params.get('type', None)\n if _type:\n queryset = queryset.filter(content_type__model=_type)\n return queryset\n", "path": "zds/notification/api/views.py"}]}
| 1,213 | 131 |
gh_patches_debug_20469
|
rasdani/github-patches
|
git_diff
|
privacyidea__privacyidea-3091
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Bad error handling in /ttype/<type> endpoint
We observed a bad error handling when requesting the `/ttype/<type>` endpoint.
In specific, we faced the following error:
```
[ERROR][privacyidea.app:1892] Exception on /ttype/push"}. [GET]
...
AttributeError: 'NoneType' object has no attribute 'api_endpoint'
```
Actually, we could fix the problem but it would be nice to fix this upstream right away.
### Top-level intent
Access the `/ttype/<type>` endpoint.
### Steps to reproduce
1. Query `/ttype/test` endpoint
2. There will be a NoneType error in the logs.
### Expected outcome
Proper error handling
### Actual outcome
NoneType exception.
### Configuration
* **privacyIDEA version**: v3.6.3
* **Installation method**: (from Ubuntu packages, github, PyPI, ...)
* **Python version**: 3
* **Operating system**: linux
* **Webserver**: apache
### Log file
**Set PI_LOGLEVEL = logging.DEBUG in pi.cfg and take a look at the privacyidea.log!**
**If appropriate, attach the log file or paste relevant portions.**
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `privacyidea/api/ttype.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 #
3 # http://www.privacyidea.org
4 # (c) Cornelius Kรถlbel, privacyidea.org
5 #
6 # 2015-09-01 Cornelius Kรถlbel, <[email protected]>
7 # Initial writeup
8 #
9 # This code is free software; you can redistribute it and/or
10 # modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE
11 # License as published by the Free Software Foundation; either
12 # version 3 of the License, or any later version.
13 #
14 # This code is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU AFFERO GENERAL PUBLIC LICENSE for more details.
18 #
19 # You should have received a copy of the GNU Affero General Public
20 # License along with this program. If not, see <http://www.gnu.org/licenses/>.
21 #
22 """
23 This API endpoint is a generic endpoint that can be used by any token
24 type.
25
26 The tokentype needs to implement a classmethod *api_endpoint* and can then be
27 called by /ttype/<tokentype>.
28 This way, each tokentype can create its own API without the need to change
29 the core API.
30
31 The TiQR Token uses this API to implement its special functionalities. See
32 :ref:`code_tiqr_token`.
33 """
34 from flask import (Blueprint,
35 request)
36 from .lib.utils import getParam
37 from ..lib.log import log_with
38 from flask import g, jsonify, current_app
39 import logging
40 from privacyidea.api.lib.utils import get_all_params
41 from privacyidea.lib.policy import PolicyClass
42 from privacyidea.lib.audit import getAudit
43 from privacyidea.lib.config import (get_token_class, get_from_config,
44 SYSCONF, ensure_no_config_object)
45 from privacyidea.lib.user import get_user_from_param
46 from privacyidea.lib.utils import get_client_ip
47 import json
48
49 log = logging.getLogger(__name__)
50
51 ttype_blueprint = Blueprint('ttype_blueprint', __name__)
52
53
54 @ttype_blueprint.before_request
55 def before_request():
56 """
57 This is executed before the request
58 """
59 ensure_no_config_object()
60 request.all_data = get_all_params(request)
61 privacyidea_server = current_app.config.get("PI_AUDIT_SERVERNAME") or \
62 request.host
63 # Create a policy_object, that reads the database audit settings
64 # and contains the complete policy definition during the request.
65 # This audit_object can be used in the postpolicy and prepolicy and it
66 # can be passed to the innerpolicies.
67 g.policy_object = PolicyClass()
68 g.audit_object = getAudit(current_app.config)
69 # access_route contains the ip adresses of all clients, hops and proxies.
70 g.client_ip = get_client_ip(request,
71 get_from_config(SYSCONF.OVERRIDECLIENT))
72 g.serial = getParam(request.all_data, "serial") or None
73 g.audit_object.log({"success": False,
74 "action_detail": "",
75 "client": g.client_ip,
76 "client_user_agent": request.user_agent.browser,
77 "privacyidea_server": privacyidea_server,
78 "action": "{0!s} {1!s}".format(request.method, request.url_rule),
79 "info": ""})
80
81
82 @ttype_blueprint.route('/<ttype>', methods=['POST', 'GET'])
83 @log_with(log)
84 def token(ttype=None):
85 """
86 This is a special token function. Each token type can define an
87 additional API call, that does not need authentication on the REST API
88 level.
89
90 :return: Token Type dependent
91 """
92 tokenc = get_token_class(ttype)
93 res = tokenc.api_endpoint(request, g)
94 serial = getParam(request.all_data, "serial")
95 user = get_user_from_param(request.all_data)
96 g.audit_object.log({"success": 1,
97 "user": user.login,
98 "realm": user.realm,
99 "serial": serial,
100 "token_type": ttype})
101 if res[0] == "json":
102 return jsonify(res[1])
103 elif res[0] in ["html", "plain"]:
104 return current_app.response_class(res[1], mimetype="text/{0!s}".format(res[0]))
105 elif len(res) == 2:
106 return current_app.response_class(json.dumps(res[1]),
107 mimetype="application/{0!s}".format(res[0]))
108 else:
109 return current_app.response_class(res[1], mimetype="application/octet-binary",
110 headers=res[2])
111
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/privacyidea/api/ttype.py b/privacyidea/api/ttype.py
--- a/privacyidea/api/ttype.py
+++ b/privacyidea/api/ttype.py
@@ -38,6 +38,7 @@
from flask import g, jsonify, current_app
import logging
from privacyidea.api.lib.utils import get_all_params
+from privacyidea.lib.error import ParameterError
from privacyidea.lib.policy import PolicyClass
from privacyidea.lib.audit import getAudit
from privacyidea.lib.config import (get_token_class, get_from_config,
@@ -90,6 +91,9 @@
:return: Token Type dependent
"""
tokenc = get_token_class(ttype)
+ if tokenc is None:
+ log.error(u"Invalid tokentype provided. ttype: {}".format(ttype.lower()))
+ raise ParameterError(u"Invalid tokentype provided. ttype: {}".format(ttype.lower()))
res = tokenc.api_endpoint(request, g)
serial = getParam(request.all_data, "serial")
user = get_user_from_param(request.all_data)
|
{"golden_diff": "diff --git a/privacyidea/api/ttype.py b/privacyidea/api/ttype.py\n--- a/privacyidea/api/ttype.py\n+++ b/privacyidea/api/ttype.py\n@@ -38,6 +38,7 @@\n from flask import g, jsonify, current_app\n import logging\n from privacyidea.api.lib.utils import get_all_params\n+from privacyidea.lib.error import ParameterError\n from privacyidea.lib.policy import PolicyClass\n from privacyidea.lib.audit import getAudit\n from privacyidea.lib.config import (get_token_class, get_from_config,\n@@ -90,6 +91,9 @@\n :return: Token Type dependent\n \"\"\"\n tokenc = get_token_class(ttype)\n+ if tokenc is None:\n+ log.error(u\"Invalid tokentype provided. ttype: {}\".format(ttype.lower()))\n+ raise ParameterError(u\"Invalid tokentype provided. ttype: {}\".format(ttype.lower()))\n res = tokenc.api_endpoint(request, g)\n serial = getParam(request.all_data, \"serial\")\n user = get_user_from_param(request.all_data)\n", "issue": "Bad error handling in /ttype/<type> endpoint\nWe observed a bad error handling when requesting the `/ttype/<type>` endpoint.\r\n\r\nIn specific, we faced the following error:\r\n```\r\n[ERROR][privacyidea.app:1892] Exception on /ttype/push\"}. [GET]\r\n...\r\nAttributeError: 'NoneType' object has no attribute 'api_endpoint'\r\n```\r\nActually, we could fix the problem but it would be nice to fix this upstream right away.\r\n\r\n### Top-level intent\r\n\r\nAccess the `/ttype/<type>` endpoint.\r\n\r\n### Steps to reproduce\r\n\r\n1. Query `/ttype/test` endpoint\r\n2. There will be a NoneType error in the logs.\r\n\r\n### Expected outcome\r\n\r\nProper error handling\r\n\r\n### Actual outcome\r\n\r\nNoneType exception.\r\n\r\n### Configuration\r\n\r\n* **privacyIDEA version**: v3.6.3\r\n* **Installation method**: (from Ubuntu packages, github, PyPI, ...)\r\n* **Python version**: 3\r\n* **Operating system**: linux\r\n* **Webserver**: apache\r\n\r\n\r\n### Log file\r\n\r\n**Set PI_LOGLEVEL = logging.DEBUG in pi.cfg and take a look at the privacyidea.log!**\r\n**If appropriate, attach the log file or paste relevant portions.**\r\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# http://www.privacyidea.org\n# (c) Cornelius K\u00f6lbel, privacyidea.org\n#\n# 2015-09-01 Cornelius K\u00f6lbel, <[email protected]>\n# Initial writeup\n#\n# This code is free software; you can redistribute it and/or\n# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE\n# License as published by the Free Software Foundation; either\n# version 3 of the License, or any later version.\n#\n# This code is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU AFFERO GENERAL PUBLIC LICENSE for more details.\n#\n# You should have received a copy of the GNU Affero General Public\n# License along with this program. If not, see <http://www.gnu.org/licenses/>.\n#\n\"\"\"\nThis API endpoint is a generic endpoint that can be used by any token\ntype.\n\nThe tokentype needs to implement a classmethod *api_endpoint* and can then be\ncalled by /ttype/<tokentype>.\nThis way, each tokentype can create its own API without the need to change\nthe core API.\n\nThe TiQR Token uses this API to implement its special functionalities. See\n:ref:`code_tiqr_token`.\n\"\"\"\nfrom flask import (Blueprint,\n request)\nfrom .lib.utils import getParam\nfrom ..lib.log import log_with\nfrom flask import g, jsonify, current_app\nimport logging\nfrom privacyidea.api.lib.utils import get_all_params\nfrom privacyidea.lib.policy import PolicyClass\nfrom privacyidea.lib.audit import getAudit\nfrom privacyidea.lib.config import (get_token_class, get_from_config,\n SYSCONF, ensure_no_config_object)\nfrom privacyidea.lib.user import get_user_from_param\nfrom privacyidea.lib.utils import get_client_ip\nimport json\n\nlog = logging.getLogger(__name__)\n\nttype_blueprint = Blueprint('ttype_blueprint', __name__)\n\n\n@ttype_blueprint.before_request\ndef before_request():\n \"\"\"\n This is executed before the request\n \"\"\"\n ensure_no_config_object()\n request.all_data = get_all_params(request)\n privacyidea_server = current_app.config.get(\"PI_AUDIT_SERVERNAME\") or \\\n request.host\n # Create a policy_object, that reads the database audit settings\n # and contains the complete policy definition during the request.\n # This audit_object can be used in the postpolicy and prepolicy and it\n # can be passed to the innerpolicies.\n g.policy_object = PolicyClass()\n g.audit_object = getAudit(current_app.config)\n # access_route contains the ip adresses of all clients, hops and proxies.\n g.client_ip = get_client_ip(request,\n get_from_config(SYSCONF.OVERRIDECLIENT))\n g.serial = getParam(request.all_data, \"serial\") or None\n g.audit_object.log({\"success\": False,\n \"action_detail\": \"\",\n \"client\": g.client_ip,\n \"client_user_agent\": request.user_agent.browser,\n \"privacyidea_server\": privacyidea_server,\n \"action\": \"{0!s} {1!s}\".format(request.method, request.url_rule),\n \"info\": \"\"})\n\n\n@ttype_blueprint.route('/<ttype>', methods=['POST', 'GET'])\n@log_with(log)\ndef token(ttype=None):\n \"\"\"\n This is a special token function. Each token type can define an\n additional API call, that does not need authentication on the REST API\n level.\n\n :return: Token Type dependent\n \"\"\"\n tokenc = get_token_class(ttype)\n res = tokenc.api_endpoint(request, g)\n serial = getParam(request.all_data, \"serial\")\n user = get_user_from_param(request.all_data)\n g.audit_object.log({\"success\": 1,\n \"user\": user.login,\n \"realm\": user.realm,\n \"serial\": serial,\n \"token_type\": ttype})\n if res[0] == \"json\":\n return jsonify(res[1])\n elif res[0] in [\"html\", \"plain\"]:\n return current_app.response_class(res[1], mimetype=\"text/{0!s}\".format(res[0]))\n elif len(res) == 2:\n return current_app.response_class(json.dumps(res[1]),\n mimetype=\"application/{0!s}\".format(res[0]))\n else:\n return current_app.response_class(res[1], mimetype=\"application/octet-binary\",\n headers=res[2])\n", "path": "privacyidea/api/ttype.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# http://www.privacyidea.org\n# (c) Cornelius K\u00f6lbel, privacyidea.org\n#\n# 2015-09-01 Cornelius K\u00f6lbel, <[email protected]>\n# Initial writeup\n#\n# This code is free software; you can redistribute it and/or\n# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE\n# License as published by the Free Software Foundation; either\n# version 3 of the License, or any later version.\n#\n# This code is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU AFFERO GENERAL PUBLIC LICENSE for more details.\n#\n# You should have received a copy of the GNU Affero General Public\n# License along with this program. If not, see <http://www.gnu.org/licenses/>.\n#\n\"\"\"\nThis API endpoint is a generic endpoint that can be used by any token\ntype.\n\nThe tokentype needs to implement a classmethod *api_endpoint* and can then be\ncalled by /ttype/<tokentype>.\nThis way, each tokentype can create its own API without the need to change\nthe core API.\n\nThe TiQR Token uses this API to implement its special functionalities. See\n:ref:`code_tiqr_token`.\n\"\"\"\nfrom flask import (Blueprint,\n request)\nfrom .lib.utils import getParam\nfrom ..lib.log import log_with\nfrom flask import g, jsonify, current_app\nimport logging\nfrom privacyidea.api.lib.utils import get_all_params\nfrom privacyidea.lib.error import ParameterError\nfrom privacyidea.lib.policy import PolicyClass\nfrom privacyidea.lib.audit import getAudit\nfrom privacyidea.lib.config import (get_token_class, get_from_config,\n SYSCONF, ensure_no_config_object)\nfrom privacyidea.lib.user import get_user_from_param\nfrom privacyidea.lib.utils import get_client_ip\nimport json\n\nlog = logging.getLogger(__name__)\n\nttype_blueprint = Blueprint('ttype_blueprint', __name__)\n\n\n@ttype_blueprint.before_request\ndef before_request():\n \"\"\"\n This is executed before the request\n \"\"\"\n ensure_no_config_object()\n request.all_data = get_all_params(request)\n privacyidea_server = current_app.config.get(\"PI_AUDIT_SERVERNAME\") or \\\n request.host\n # Create a policy_object, that reads the database audit settings\n # and contains the complete policy definition during the request.\n # This audit_object can be used in the postpolicy and prepolicy and it\n # can be passed to the innerpolicies.\n g.policy_object = PolicyClass()\n g.audit_object = getAudit(current_app.config)\n # access_route contains the ip adresses of all clients, hops and proxies.\n g.client_ip = get_client_ip(request,\n get_from_config(SYSCONF.OVERRIDECLIENT))\n g.serial = getParam(request.all_data, \"serial\") or None\n g.audit_object.log({\"success\": False,\n \"action_detail\": \"\",\n \"client\": g.client_ip,\n \"client_user_agent\": request.user_agent.browser,\n \"privacyidea_server\": privacyidea_server,\n \"action\": \"{0!s} {1!s}\".format(request.method, request.url_rule),\n \"info\": \"\"})\n\n\n@ttype_blueprint.route('/<ttype>', methods=['POST', 'GET'])\n@log_with(log)\ndef token(ttype=None):\n \"\"\"\n This is a special token function. Each token type can define an\n additional API call, that does not need authentication on the REST API\n level.\n\n :return: Token Type dependent\n \"\"\"\n tokenc = get_token_class(ttype)\n if tokenc is None:\n log.error(u\"Invalid tokentype provided. ttype: {}\".format(ttype.lower()))\n raise ParameterError(u\"Invalid tokentype provided. ttype: {}\".format(ttype.lower()))\n res = tokenc.api_endpoint(request, g)\n serial = getParam(request.all_data, \"serial\")\n user = get_user_from_param(request.all_data)\n g.audit_object.log({\"success\": 1,\n \"user\": user.login,\n \"realm\": user.realm,\n \"serial\": serial,\n \"token_type\": ttype})\n if res[0] == \"json\":\n return jsonify(res[1])\n elif res[0] in [\"html\", \"plain\"]:\n return current_app.response_class(res[1], mimetype=\"text/{0!s}\".format(res[0]))\n elif len(res) == 2:\n return current_app.response_class(json.dumps(res[1]),\n mimetype=\"application/{0!s}\".format(res[0]))\n else:\n return current_app.response_class(res[1], mimetype=\"application/octet-binary\",\n headers=res[2])\n", "path": "privacyidea/api/ttype.py"}]}
| 1,729 | 237 |
gh_patches_debug_790
|
rasdani/github-patches
|
git_diff
|
ibis-project__ibis-8364
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
bug: `Scalar.isin(Column)` returns a Column, not a Scalar
### What happened?
```python
import ibis
needle = ibis.literal(2)
haystack = ibis.memtable({"x": [1, 2, 3]}).x
type(needle.isin(haystack))
# ibis.expr.types.logical.BooleanColumn
```
### What version of ibis are you using?
main
### What backend(s) are you using, if any?
_No response_
### Relevant log output
_No response_
### Code of Conduct
- [X] I agree to follow this project's Code of Conduct
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ibis/expr/operations/subqueries.py`
Content:
```
1 from __future__ import annotations
2
3 from public import public
4
5 import ibis.expr.datashape as ds
6 import ibis.expr.datatypes as dt
7 import ibis.expr.rules as rlz
8 from ibis.common.annotations import attribute
9 from ibis.common.exceptions import IntegrityError
10 from ibis.expr.operations.core import Value
11 from ibis.expr.operations.relations import Relation # noqa: TCH001
12
13
14 @public
15 class Subquery(Value):
16 rel: Relation
17
18 @attribute
19 def relations(self):
20 return frozenset()
21
22
23 @public
24 class ExistsSubquery(Subquery):
25 dtype = dt.boolean
26 shape = ds.columnar
27
28
29 @public
30 class ScalarSubquery(Subquery):
31 shape = ds.scalar
32
33 def __init__(self, rel):
34 if len(rel.schema) != 1:
35 raise IntegrityError(
36 "Relation passed to ScalarSubquery() must have exactly one "
37 f"column, got {len(rel.schema)}"
38 )
39 super().__init__(rel=rel)
40
41 @attribute
42 def value(self):
43 (value,) = self.rel.values.values()
44 return value
45
46 @attribute
47 def dtype(self):
48 return self.value.dtype
49
50
51 @public
52 class InSubquery(Subquery):
53 needle: Value
54
55 dtype = dt.boolean
56 shape = ds.columnar
57
58 def __init__(self, rel, needle):
59 if len(rel.schema) != 1:
60 raise IntegrityError(
61 "Relation passed to InSubquery() must have exactly one "
62 f"column, got {len(rel.schema)}"
63 )
64 (value,) = rel.values.values()
65 if not rlz.comparable(value, needle):
66 raise IntegrityError(f"{needle!r} is not comparable to {value!r}")
67 super().__init__(rel=rel, needle=needle)
68
69 @attribute
70 def value(self):
71 (value,) = self.rel.values.values()
72 return value
73
74 @attribute
75 def relations(self):
76 return self.needle.relations
77
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/ibis/expr/operations/subqueries.py b/ibis/expr/operations/subqueries.py
--- a/ibis/expr/operations/subqueries.py
+++ b/ibis/expr/operations/subqueries.py
@@ -53,7 +53,7 @@
needle: Value
dtype = dt.boolean
- shape = ds.columnar
+ shape = rlz.shape_like("needle")
def __init__(self, rel, needle):
if len(rel.schema) != 1:
|
{"golden_diff": "diff --git a/ibis/expr/operations/subqueries.py b/ibis/expr/operations/subqueries.py\n--- a/ibis/expr/operations/subqueries.py\n+++ b/ibis/expr/operations/subqueries.py\n@@ -53,7 +53,7 @@\n needle: Value\n \n dtype = dt.boolean\n- shape = ds.columnar\n+ shape = rlz.shape_like(\"needle\")\n \n def __init__(self, rel, needle):\n if len(rel.schema) != 1:\n", "issue": "bug: `Scalar.isin(Column)` returns a Column, not a Scalar\n### What happened?\n\n```python\r\nimport ibis\r\n\r\nneedle = ibis.literal(2)\r\nhaystack = ibis.memtable({\"x\": [1, 2, 3]}).x\r\ntype(needle.isin(haystack))\r\n# ibis.expr.types.logical.BooleanColumn\r\n```\n\n### What version of ibis are you using?\n\nmain\n\n### What backend(s) are you using, if any?\n\n_No response_\n\n### Relevant log output\n\n_No response_\n\n### Code of Conduct\n\n- [X] I agree to follow this project's Code of Conduct\n", "before_files": [{"content": "from __future__ import annotations\n\nfrom public import public\n\nimport ibis.expr.datashape as ds\nimport ibis.expr.datatypes as dt\nimport ibis.expr.rules as rlz\nfrom ibis.common.annotations import attribute\nfrom ibis.common.exceptions import IntegrityError\nfrom ibis.expr.operations.core import Value\nfrom ibis.expr.operations.relations import Relation # noqa: TCH001\n\n\n@public\nclass Subquery(Value):\n rel: Relation\n\n @attribute\n def relations(self):\n return frozenset()\n\n\n@public\nclass ExistsSubquery(Subquery):\n dtype = dt.boolean\n shape = ds.columnar\n\n\n@public\nclass ScalarSubquery(Subquery):\n shape = ds.scalar\n\n def __init__(self, rel):\n if len(rel.schema) != 1:\n raise IntegrityError(\n \"Relation passed to ScalarSubquery() must have exactly one \"\n f\"column, got {len(rel.schema)}\"\n )\n super().__init__(rel=rel)\n\n @attribute\n def value(self):\n (value,) = self.rel.values.values()\n return value\n\n @attribute\n def dtype(self):\n return self.value.dtype\n\n\n@public\nclass InSubquery(Subquery):\n needle: Value\n\n dtype = dt.boolean\n shape = ds.columnar\n\n def __init__(self, rel, needle):\n if len(rel.schema) != 1:\n raise IntegrityError(\n \"Relation passed to InSubquery() must have exactly one \"\n f\"column, got {len(rel.schema)}\"\n )\n (value,) = rel.values.values()\n if not rlz.comparable(value, needle):\n raise IntegrityError(f\"{needle!r} is not comparable to {value!r}\")\n super().__init__(rel=rel, needle=needle)\n\n @attribute\n def value(self):\n (value,) = self.rel.values.values()\n return value\n\n @attribute\n def relations(self):\n return self.needle.relations\n", "path": "ibis/expr/operations/subqueries.py"}], "after_files": [{"content": "from __future__ import annotations\n\nfrom public import public\n\nimport ibis.expr.datashape as ds\nimport ibis.expr.datatypes as dt\nimport ibis.expr.rules as rlz\nfrom ibis.common.annotations import attribute\nfrom ibis.common.exceptions import IntegrityError\nfrom ibis.expr.operations.core import Value\nfrom ibis.expr.operations.relations import Relation # noqa: TCH001\n\n\n@public\nclass Subquery(Value):\n rel: Relation\n\n @attribute\n def relations(self):\n return frozenset()\n\n\n@public\nclass ExistsSubquery(Subquery):\n dtype = dt.boolean\n shape = ds.columnar\n\n\n@public\nclass ScalarSubquery(Subquery):\n shape = ds.scalar\n\n def __init__(self, rel):\n if len(rel.schema) != 1:\n raise IntegrityError(\n \"Relation passed to ScalarSubquery() must have exactly one \"\n f\"column, got {len(rel.schema)}\"\n )\n super().__init__(rel=rel)\n\n @attribute\n def value(self):\n (value,) = self.rel.values.values()\n return value\n\n @attribute\n def dtype(self):\n return self.value.dtype\n\n\n@public\nclass InSubquery(Subquery):\n needle: Value\n\n dtype = dt.boolean\n shape = rlz.shape_like(\"needle\")\n\n def __init__(self, rel, needle):\n if len(rel.schema) != 1:\n raise IntegrityError(\n \"Relation passed to InSubquery() must have exactly one \"\n f\"column, got {len(rel.schema)}\"\n )\n (value,) = rel.values.values()\n if not rlz.comparable(value, needle):\n raise IntegrityError(f\"{needle!r} is not comparable to {value!r}\")\n super().__init__(rel=rel, needle=needle)\n\n @attribute\n def value(self):\n (value,) = self.rel.values.values()\n return value\n\n @attribute\n def relations(self):\n return self.needle.relations\n", "path": "ibis/expr/operations/subqueries.py"}]}
| 981 | 116 |
gh_patches_debug_22414
|
rasdani/github-patches
|
git_diff
|
translate__pootle-6485
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Languages in languages drop down menu are messed
Hi,
the languages in the languages drop down menu are in a pretty mess now. It seems that they are not sorted anymore now, neither by language name nor by locale.
Regards,
Michael
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pootle/core/views/base.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) Pootle contributors.
4 #
5 # This file is a part of the Pootle project. It is distributed under the GPL3
6 # or later license. See the LICENSE file for a copy of the license and the
7 # AUTHORS file for copyright and authorship information.
8
9 from django.urls import reverse
10 from django.utils.decorators import method_decorator
11 from django.utils.functional import cached_property
12 from django.utils.translation import get_language
13 from django.views.decorators.cache import never_cache
14 from django.views.generic import DetailView
15
16 from pootle.core.delegate import site_languages
17 from pootle.core.url_helpers import get_path_parts
18 from pootle.i18n.gettext import ugettext as _
19 from pootle_app.models.permissions import check_permission
20 from pootle_misc.util import ajax_required
21
22 from .decorators import requires_permission, set_permissions
23 from .mixins import GatherContextMixin, PootleJSONMixin
24
25
26 class PootleDetailView(GatherContextMixin, DetailView):
27 translate_url_path = ""
28 browse_url_path = ""
29 resource_path = ""
30 view_name = ""
31 sw_version = 0
32 ns = "pootle.core"
33
34 @property
35 def browse_url(self):
36 return reverse(
37 self.browse_url_path,
38 kwargs=self.url_kwargs)
39
40 @property
41 def cache_key(self):
42 return (
43 "%s.%s.%s.%s"
44 % (self.page_name,
45 self.view_name,
46 self.object.data_tool.cache_key,
47 self.request_lang))
48
49 @property
50 def request_lang(self):
51 return get_language()
52
53 @cached_property
54 def has_admin_access(self):
55 return check_permission('administrate', self.request)
56
57 @property
58 def language(self):
59 if self.tp:
60 return self.tp.language
61
62 @property
63 def permission_context(self):
64 return self.get_object()
65
66 @property
67 def pootle_path(self):
68 return self.object.pootle_path
69
70 @property
71 def project(self):
72 if self.tp:
73 return self.tp.project
74
75 @property
76 def tp(self):
77 return None
78
79 @property
80 def translate_url(self):
81 return reverse(
82 self.translate_url_path,
83 kwargs=self.url_kwargs)
84
85 @set_permissions
86 @requires_permission("view")
87 def dispatch(self, request, *args, **kwargs):
88 # get funky with the request 8/
89 return super(PootleDetailView, self).dispatch(request, *args, **kwargs)
90
91 @property
92 def languages(self):
93 languages = site_languages.get()
94 return (
95 languages.all_languages
96 if self.has_admin_access
97 else languages.languages)
98
99 def get_context_data(self, *args, **kwargs):
100 return {
101 'object': self.object,
102 'pootle_path': self.pootle_path,
103 'project': self.project,
104 'language': self.language,
105 "all_languages": self.languages,
106 'translation_project': self.tp,
107 'has_admin_access': self.has_admin_access,
108 'resource_path': self.resource_path,
109 'resource_path_parts': get_path_parts(self.resource_path),
110 'translate_url': self.translate_url,
111 'browse_url': self.browse_url,
112 'paths_placeholder': _("Entire Project"),
113 'unit_api_root': "/xhr/units/"}
114
115
116 class PootleJSON(PootleJSONMixin, PootleDetailView):
117
118 @never_cache
119 @method_decorator(ajax_required)
120 @set_permissions
121 @requires_permission("view")
122 def dispatch(self, request, *args, **kwargs):
123 return super(PootleJSON, self).dispatch(request, *args, **kwargs)
124
125
126 class PootleAdminView(DetailView):
127
128 @set_permissions
129 @requires_permission("administrate")
130 def dispatch(self, request, *args, **kwargs):
131 return super(PootleAdminView, self).dispatch(request, *args, **kwargs)
132
133 @property
134 def permission_context(self):
135 return self.get_object().directory
136
137 def post(self, *args, **kwargs):
138 return self.get(*args, **kwargs)
139
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pootle/core/views/base.py b/pootle/core/views/base.py
--- a/pootle/core/views/base.py
+++ b/pootle/core/views/base.py
@@ -6,6 +6,8 @@
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
+from collections import OrderedDict
+
from django.urls import reverse
from django.utils.decorators import method_decorator
from django.utils.functional import cached_property
@@ -91,10 +93,18 @@
@property
def languages(self):
languages = site_languages.get()
- return (
+ languages = (
languages.all_languages
if self.has_admin_access
else languages.languages)
+ lang_map = {
+ v: k
+ for k, v
+ in languages.items()}
+ return OrderedDict(
+ (lang_map[v], v)
+ for v
+ in sorted(languages.values()))
def get_context_data(self, *args, **kwargs):
return {
|
{"golden_diff": "diff --git a/pootle/core/views/base.py b/pootle/core/views/base.py\n--- a/pootle/core/views/base.py\n+++ b/pootle/core/views/base.py\n@@ -6,6 +6,8 @@\n # or later license. See the LICENSE file for a copy of the license and the\n # AUTHORS file for copyright and authorship information.\n \n+from collections import OrderedDict\n+\n from django.urls import reverse\n from django.utils.decorators import method_decorator\n from django.utils.functional import cached_property\n@@ -91,10 +93,18 @@\n @property\n def languages(self):\n languages = site_languages.get()\n- return (\n+ languages = (\n languages.all_languages\n if self.has_admin_access\n else languages.languages)\n+ lang_map = {\n+ v: k\n+ for k, v\n+ in languages.items()}\n+ return OrderedDict(\n+ (lang_map[v], v)\n+ for v\n+ in sorted(languages.values()))\n \n def get_context_data(self, *args, **kwargs):\n return {\n", "issue": "Languages in languages drop down menu are messed\nHi,\r\n\r\nthe languages in the languages drop down menu are in a pretty mess now. It seems that they are not sorted anymore now, neither by language name nor by locale.\r\n\r\nRegards,\r\nMichael\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nfrom django.urls import reverse\nfrom django.utils.decorators import method_decorator\nfrom django.utils.functional import cached_property\nfrom django.utils.translation import get_language\nfrom django.views.decorators.cache import never_cache\nfrom django.views.generic import DetailView\n\nfrom pootle.core.delegate import site_languages\nfrom pootle.core.url_helpers import get_path_parts\nfrom pootle.i18n.gettext import ugettext as _\nfrom pootle_app.models.permissions import check_permission\nfrom pootle_misc.util import ajax_required\n\nfrom .decorators import requires_permission, set_permissions\nfrom .mixins import GatherContextMixin, PootleJSONMixin\n\n\nclass PootleDetailView(GatherContextMixin, DetailView):\n translate_url_path = \"\"\n browse_url_path = \"\"\n resource_path = \"\"\n view_name = \"\"\n sw_version = 0\n ns = \"pootle.core\"\n\n @property\n def browse_url(self):\n return reverse(\n self.browse_url_path,\n kwargs=self.url_kwargs)\n\n @property\n def cache_key(self):\n return (\n \"%s.%s.%s.%s\"\n % (self.page_name,\n self.view_name,\n self.object.data_tool.cache_key,\n self.request_lang))\n\n @property\n def request_lang(self):\n return get_language()\n\n @cached_property\n def has_admin_access(self):\n return check_permission('administrate', self.request)\n\n @property\n def language(self):\n if self.tp:\n return self.tp.language\n\n @property\n def permission_context(self):\n return self.get_object()\n\n @property\n def pootle_path(self):\n return self.object.pootle_path\n\n @property\n def project(self):\n if self.tp:\n return self.tp.project\n\n @property\n def tp(self):\n return None\n\n @property\n def translate_url(self):\n return reverse(\n self.translate_url_path,\n kwargs=self.url_kwargs)\n\n @set_permissions\n @requires_permission(\"view\")\n def dispatch(self, request, *args, **kwargs):\n # get funky with the request 8/\n return super(PootleDetailView, self).dispatch(request, *args, **kwargs)\n\n @property\n def languages(self):\n languages = site_languages.get()\n return (\n languages.all_languages\n if self.has_admin_access\n else languages.languages)\n\n def get_context_data(self, *args, **kwargs):\n return {\n 'object': self.object,\n 'pootle_path': self.pootle_path,\n 'project': self.project,\n 'language': self.language,\n \"all_languages\": self.languages,\n 'translation_project': self.tp,\n 'has_admin_access': self.has_admin_access,\n 'resource_path': self.resource_path,\n 'resource_path_parts': get_path_parts(self.resource_path),\n 'translate_url': self.translate_url,\n 'browse_url': self.browse_url,\n 'paths_placeholder': _(\"Entire Project\"),\n 'unit_api_root': \"/xhr/units/\"}\n\n\nclass PootleJSON(PootleJSONMixin, PootleDetailView):\n\n @never_cache\n @method_decorator(ajax_required)\n @set_permissions\n @requires_permission(\"view\")\n def dispatch(self, request, *args, **kwargs):\n return super(PootleJSON, self).dispatch(request, *args, **kwargs)\n\n\nclass PootleAdminView(DetailView):\n\n @set_permissions\n @requires_permission(\"administrate\")\n def dispatch(self, request, *args, **kwargs):\n return super(PootleAdminView, self).dispatch(request, *args, **kwargs)\n\n @property\n def permission_context(self):\n return self.get_object().directory\n\n def post(self, *args, **kwargs):\n return self.get(*args, **kwargs)\n", "path": "pootle/core/views/base.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nfrom collections import OrderedDict\n\nfrom django.urls import reverse\nfrom django.utils.decorators import method_decorator\nfrom django.utils.functional import cached_property\nfrom django.utils.translation import get_language\nfrom django.views.decorators.cache import never_cache\nfrom django.views.generic import DetailView\n\nfrom pootle.core.delegate import site_languages\nfrom pootle.core.url_helpers import get_path_parts\nfrom pootle.i18n.gettext import ugettext as _\nfrom pootle_app.models.permissions import check_permission\nfrom pootle_misc.util import ajax_required\n\nfrom .decorators import requires_permission, set_permissions\nfrom .mixins import GatherContextMixin, PootleJSONMixin\n\n\nclass PootleDetailView(GatherContextMixin, DetailView):\n translate_url_path = \"\"\n browse_url_path = \"\"\n resource_path = \"\"\n view_name = \"\"\n sw_version = 0\n ns = \"pootle.core\"\n\n @property\n def browse_url(self):\n return reverse(\n self.browse_url_path,\n kwargs=self.url_kwargs)\n\n @property\n def cache_key(self):\n return (\n \"%s.%s.%s.%s\"\n % (self.page_name,\n self.view_name,\n self.object.data_tool.cache_key,\n self.request_lang))\n\n @property\n def request_lang(self):\n return get_language()\n\n @cached_property\n def has_admin_access(self):\n return check_permission('administrate', self.request)\n\n @property\n def language(self):\n if self.tp:\n return self.tp.language\n\n @property\n def permission_context(self):\n return self.get_object()\n\n @property\n def pootle_path(self):\n return self.object.pootle_path\n\n @property\n def project(self):\n if self.tp:\n return self.tp.project\n\n @property\n def tp(self):\n return None\n\n @property\n def translate_url(self):\n return reverse(\n self.translate_url_path,\n kwargs=self.url_kwargs)\n\n @set_permissions\n @requires_permission(\"view\")\n def dispatch(self, request, *args, **kwargs):\n # get funky with the request 8/\n return super(PootleDetailView, self).dispatch(request, *args, **kwargs)\n\n @property\n def languages(self):\n languages = site_languages.get()\n languages = (\n languages.all_languages\n if self.has_admin_access\n else languages.languages)\n lang_map = {\n v: k\n for k, v\n in languages.items()}\n return OrderedDict(\n (lang_map[v], v)\n for v\n in sorted(languages.values()))\n\n def get_context_data(self, *args, **kwargs):\n return {\n 'object': self.object,\n 'pootle_path': self.pootle_path,\n 'project': self.project,\n 'language': self.language,\n \"all_languages\": self.languages,\n 'translation_project': self.tp,\n 'has_admin_access': self.has_admin_access,\n 'resource_path': self.resource_path,\n 'resource_path_parts': get_path_parts(self.resource_path),\n 'translate_url': self.translate_url,\n 'browse_url': self.browse_url,\n 'paths_placeholder': _(\"Entire Project\"),\n 'unit_api_root': \"/xhr/units/\"}\n\n\nclass PootleJSON(PootleJSONMixin, PootleDetailView):\n\n @never_cache\n @method_decorator(ajax_required)\n @set_permissions\n @requires_permission(\"view\")\n def dispatch(self, request, *args, **kwargs):\n return super(PootleJSON, self).dispatch(request, *args, **kwargs)\n\n\nclass PootleAdminView(DetailView):\n\n @set_permissions\n @requires_permission(\"administrate\")\n def dispatch(self, request, *args, **kwargs):\n return super(PootleAdminView, self).dispatch(request, *args, **kwargs)\n\n @property\n def permission_context(self):\n return self.get_object().directory\n\n def post(self, *args, **kwargs):\n return self.get(*args, **kwargs)\n", "path": "pootle/core/views/base.py"}]}
| 1,521 | 233 |
gh_patches_debug_27678
|
rasdani/github-patches
|
git_diff
|
mozilla__bugbug-854
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add an option to the bug_classifier script to download the model when it doesn't exist
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `scripts/bug_classifier.py`
Content:
```
1 # -*- coding: utf-8 -*-
2
3 import argparse
4 import os
5
6 import numpy as np
7
8 from bugbug import bugzilla
9 from bugbug.models import get_model_class
10
11 MODELS_WITH_TYPE = ("component",)
12
13
14 def classify_bugs(model_name, classifier):
15 if classifier != "default":
16 assert (
17 model_name in MODELS_WITH_TYPE
18 ), f"{classifier} is not a valid classifier type for {model_name}"
19
20 model_file_name = f"{model_name}{classifier}model"
21 model_name = f"{model_name}_{classifier}"
22 else:
23 model_file_name = f"{model_name}model"
24
25 assert os.path.exists(
26 model_file_name
27 ), f"{model_file_name} does not exist. Train the model with trainer.py first."
28
29 model_class = get_model_class(model_name)
30 model = model_class.load(model_file_name)
31
32 for bug in bugzilla.get_bugs():
33 print(
34 f'https://bugzilla.mozilla.org/show_bug.cgi?id={bug["id"]} - {bug["summary"]} '
35 )
36
37 if model.calculate_importance:
38 probas, importance = model.classify(
39 bug, probabilities=True, importances=True
40 )
41
42 feature_names = model.get_human_readable_feature_names()
43
44 model.print_feature_importances(
45 importance["importances"], feature_names, class_probabilities=probas
46 )
47 else:
48 probas = model.classify(bug, probabilities=True, importances=False)
49
50 if np.argmax(probas) == 1:
51 print(f"Positive! {probas}")
52 else:
53 print(f"Negative! {probas}")
54 input()
55
56
57 def main():
58 description = "Perform evaluation on bugs using the specified model"
59 parser = argparse.ArgumentParser(description=description)
60
61 parser.add_argument("model", help="Which model to use for evaluation")
62 parser.add_argument(
63 "--classifier",
64 help="Type of the classifier. Only used for component classification.",
65 choices=["default", "nn"],
66 default="default",
67 )
68
69 args = parser.parse_args()
70
71 classify_bugs(args.model, args.classifier)
72
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/scripts/bug_classifier.py b/scripts/bug_classifier.py
--- a/scripts/bug_classifier.py
+++ b/scripts/bug_classifier.py
@@ -2,14 +2,20 @@
import argparse
import os
+from logging import INFO, basicConfig, getLogger
import numpy as np
+import requests
from bugbug import bugzilla
from bugbug.models import get_model_class
+from bugbug.utils import download_check_etag, zstd_decompress
MODELS_WITH_TYPE = ("component",)
+basicConfig(level=INFO)
+logger = getLogger(__name__)
+
def classify_bugs(model_name, classifier):
if classifier != "default":
@@ -22,9 +28,21 @@
else:
model_file_name = f"{model_name}model"
- assert os.path.exists(
- model_file_name
- ), f"{model_file_name} does not exist. Train the model with trainer.py first."
+ if not os.path.exists(model_file_name):
+ logger.info(f"{model_file_name} does not exist. Downloading the model....")
+ try:
+ download_check_etag(
+ f"https://index.taskcluster.net/v1/task/project.relman.bugbug.train_{model_name}.latest/artifacts/public/{model_file_name}.zst",
+ f"{model_file_name}.zst",
+ )
+ except requests.HTTPError:
+ logger.error(
+ f"A pre-trained model is not available, you will need to train it yourself using the trainer script"
+ )
+ raise SystemExit(1)
+
+ zstd_decompress(model_file_name)
+ assert os.path.exists(model_file_name), "Decompressed file doesn't exist"
model_class = get_model_class(model_name)
model = model_class.load(model_file_name)
|
{"golden_diff": "diff --git a/scripts/bug_classifier.py b/scripts/bug_classifier.py\n--- a/scripts/bug_classifier.py\n+++ b/scripts/bug_classifier.py\n@@ -2,14 +2,20 @@\n \n import argparse\n import os\n+from logging import INFO, basicConfig, getLogger\n \n import numpy as np\n+import requests\n \n from bugbug import bugzilla\n from bugbug.models import get_model_class\n+from bugbug.utils import download_check_etag, zstd_decompress\n \n MODELS_WITH_TYPE = (\"component\",)\n \n+basicConfig(level=INFO)\n+logger = getLogger(__name__)\n+\n \n def classify_bugs(model_name, classifier):\n if classifier != \"default\":\n@@ -22,9 +28,21 @@\n else:\n model_file_name = f\"{model_name}model\"\n \n- assert os.path.exists(\n- model_file_name\n- ), f\"{model_file_name} does not exist. Train the model with trainer.py first.\"\n+ if not os.path.exists(model_file_name):\n+ logger.info(f\"{model_file_name} does not exist. Downloading the model....\")\n+ try:\n+ download_check_etag(\n+ f\"https://index.taskcluster.net/v1/task/project.relman.bugbug.train_{model_name}.latest/artifacts/public/{model_file_name}.zst\",\n+ f\"{model_file_name}.zst\",\n+ )\n+ except requests.HTTPError:\n+ logger.error(\n+ f\"A pre-trained model is not available, you will need to train it yourself using the trainer script\"\n+ )\n+ raise SystemExit(1)\n+\n+ zstd_decompress(model_file_name)\n+ assert os.path.exists(model_file_name), \"Decompressed file doesn't exist\"\n \n model_class = get_model_class(model_name)\n model = model_class.load(model_file_name)\n", "issue": "Add an option to the bug_classifier script to download the model when it doesn't exist\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\nimport argparse\nimport os\n\nimport numpy as np\n\nfrom bugbug import bugzilla\nfrom bugbug.models import get_model_class\n\nMODELS_WITH_TYPE = (\"component\",)\n\n\ndef classify_bugs(model_name, classifier):\n if classifier != \"default\":\n assert (\n model_name in MODELS_WITH_TYPE\n ), f\"{classifier} is not a valid classifier type for {model_name}\"\n\n model_file_name = f\"{model_name}{classifier}model\"\n model_name = f\"{model_name}_{classifier}\"\n else:\n model_file_name = f\"{model_name}model\"\n\n assert os.path.exists(\n model_file_name\n ), f\"{model_file_name} does not exist. Train the model with trainer.py first.\"\n\n model_class = get_model_class(model_name)\n model = model_class.load(model_file_name)\n\n for bug in bugzilla.get_bugs():\n print(\n f'https://bugzilla.mozilla.org/show_bug.cgi?id={bug[\"id\"]} - {bug[\"summary\"]} '\n )\n\n if model.calculate_importance:\n probas, importance = model.classify(\n bug, probabilities=True, importances=True\n )\n\n feature_names = model.get_human_readable_feature_names()\n\n model.print_feature_importances(\n importance[\"importances\"], feature_names, class_probabilities=probas\n )\n else:\n probas = model.classify(bug, probabilities=True, importances=False)\n\n if np.argmax(probas) == 1:\n print(f\"Positive! {probas}\")\n else:\n print(f\"Negative! {probas}\")\n input()\n\n\ndef main():\n description = \"Perform evaluation on bugs using the specified model\"\n parser = argparse.ArgumentParser(description=description)\n\n parser.add_argument(\"model\", help=\"Which model to use for evaluation\")\n parser.add_argument(\n \"--classifier\",\n help=\"Type of the classifier. Only used for component classification.\",\n choices=[\"default\", \"nn\"],\n default=\"default\",\n )\n\n args = parser.parse_args()\n\n classify_bugs(args.model, args.classifier)\n", "path": "scripts/bug_classifier.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\nimport argparse\nimport os\nfrom logging import INFO, basicConfig, getLogger\n\nimport numpy as np\nimport requests\n\nfrom bugbug import bugzilla\nfrom bugbug.models import get_model_class\nfrom bugbug.utils import download_check_etag, zstd_decompress\n\nMODELS_WITH_TYPE = (\"component\",)\n\nbasicConfig(level=INFO)\nlogger = getLogger(__name__)\n\n\ndef classify_bugs(model_name, classifier):\n if classifier != \"default\":\n assert (\n model_name in MODELS_WITH_TYPE\n ), f\"{classifier} is not a valid classifier type for {model_name}\"\n\n model_file_name = f\"{model_name}{classifier}model\"\n model_name = f\"{model_name}_{classifier}\"\n else:\n model_file_name = f\"{model_name}model\"\n\n if not os.path.exists(model_file_name):\n logger.info(f\"{model_file_name} does not exist. Downloading the model....\")\n try:\n download_check_etag(\n f\"https://index.taskcluster.net/v1/task/project.relman.bugbug.train_{model_name}.latest/artifacts/public/{model_file_name}.zst\",\n f\"{model_file_name}.zst\",\n )\n except requests.HTTPError:\n logger.error(\n f\"A pre-trained model is not available, you will need to train it yourself using the trainer script\"\n )\n raise SystemExit(1)\n\n zstd_decompress(model_file_name)\n assert os.path.exists(model_file_name), \"Decompressed file doesn't exist\"\n\n model_class = get_model_class(model_name)\n model = model_class.load(model_file_name)\n\n for bug in bugzilla.get_bugs():\n print(\n f'https://bugzilla.mozilla.org/show_bug.cgi?id={bug[\"id\"]} - {bug[\"summary\"]} '\n )\n\n if model.calculate_importance:\n probas, importance = model.classify(\n bug, probabilities=True, importances=True\n )\n\n feature_names = model.get_human_readable_feature_names()\n\n model.print_feature_importances(\n importance[\"importances\"], feature_names, class_probabilities=probas\n )\n else:\n probas = model.classify(bug, probabilities=True, importances=False)\n\n if np.argmax(probas) == 1:\n print(f\"Positive! {probas}\")\n else:\n print(f\"Negative! {probas}\")\n input()\n\n\ndef main():\n description = \"Perform evaluation on bugs using the specified model\"\n parser = argparse.ArgumentParser(description=description)\n\n parser.add_argument(\"model\", help=\"Which model to use for evaluation\")\n parser.add_argument(\n \"--classifier\",\n help=\"Type of the classifier. Only used for component classification.\",\n choices=[\"default\", \"nn\"],\n default=\"default\",\n )\n\n args = parser.parse_args()\n\n classify_bugs(args.model, args.classifier)\n", "path": "scripts/bug_classifier.py"}]}
| 869 | 396 |
gh_patches_debug_8135
|
rasdani/github-patches
|
git_diff
|
GeotrekCE__Geotrek-admin-1047
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Show land module in left menus
As we said it would be interesting to move it to its own menu, we should take opportunity to rename some elements :
Module name :
"Gestion fonciรจre" TO "Gestionnaires"
AND ALSO :
"Zone de compรฉtence" TO "Compรฉtence sentiers"
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `geotrek/land/urls.py`
Content:
```
1 from mapentity import registry
2
3 from . import models
4
5
6 urlpatterns = registry.register(models.PhysicalEdge, menu=False)
7 urlpatterns += registry.register(models.LandEdge, menu=False)
8 urlpatterns += registry.register(models.CompetenceEdge, menu=False)
9 urlpatterns += registry.register(models.WorkManagementEdge, menu=False)
10 urlpatterns += registry.register(models.SignageManagementEdge, menu=False)
11
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/geotrek/land/urls.py b/geotrek/land/urls.py
--- a/geotrek/land/urls.py
+++ b/geotrek/land/urls.py
@@ -4,7 +4,7 @@
urlpatterns = registry.register(models.PhysicalEdge, menu=False)
-urlpatterns += registry.register(models.LandEdge, menu=False)
+urlpatterns += registry.register(models.LandEdge)
urlpatterns += registry.register(models.CompetenceEdge, menu=False)
urlpatterns += registry.register(models.WorkManagementEdge, menu=False)
urlpatterns += registry.register(models.SignageManagementEdge, menu=False)
|
{"golden_diff": "diff --git a/geotrek/land/urls.py b/geotrek/land/urls.py\n--- a/geotrek/land/urls.py\n+++ b/geotrek/land/urls.py\n@@ -4,7 +4,7 @@\n \n \n urlpatterns = registry.register(models.PhysicalEdge, menu=False)\n-urlpatterns += registry.register(models.LandEdge, menu=False)\n+urlpatterns += registry.register(models.LandEdge)\n urlpatterns += registry.register(models.CompetenceEdge, menu=False)\n urlpatterns += registry.register(models.WorkManagementEdge, menu=False)\n urlpatterns += registry.register(models.SignageManagementEdge, menu=False)\n", "issue": "Show land module in left menus\nAs we said it would be interesting to move it to its own menu, we should take opportunity to rename some elements : \n\nModule name : \n\"Gestion fonci\u00e8re\" TO \"Gestionnaires\"\nAND ALSO : \n\"Zone de comp\u00e9tence\" TO \"Comp\u00e9tence sentiers\"\n\n", "before_files": [{"content": "from mapentity import registry\n\nfrom . import models\n\n\nurlpatterns = registry.register(models.PhysicalEdge, menu=False)\nurlpatterns += registry.register(models.LandEdge, menu=False)\nurlpatterns += registry.register(models.CompetenceEdge, menu=False)\nurlpatterns += registry.register(models.WorkManagementEdge, menu=False)\nurlpatterns += registry.register(models.SignageManagementEdge, menu=False)\n", "path": "geotrek/land/urls.py"}], "after_files": [{"content": "from mapentity import registry\n\nfrom . import models\n\n\nurlpatterns = registry.register(models.PhysicalEdge, menu=False)\nurlpatterns += registry.register(models.LandEdge)\nurlpatterns += registry.register(models.CompetenceEdge, menu=False)\nurlpatterns += registry.register(models.WorkManagementEdge, menu=False)\nurlpatterns += registry.register(models.SignageManagementEdge, menu=False)\n", "path": "geotrek/land/urls.py"}]}
| 417 | 128 |
gh_patches_debug_23375
|
rasdani/github-patches
|
git_diff
|
pypa__setuptools-2863
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[FR] Add integration tests to catch breaking changes in the API
### What's the problem this feature will solve?
It would be nice to have integration tests focusing on the usage of setuptools "public API" by some popular packages in the community.
This way we can catch breaking changes in the API before publishing new releases
### Describe the solution you'd like
According to the discussion in https://github.com/pypa/setuptools/pull/2844, if adding a new "integration test suite", the following characteristics are desirable:
1. It should run separated from the main test suite (integration tests are resource intensive and time consuming, so the best is to avoid always running them and postponing until a new release is ready).
2. It should test how setuptools' API is being used by popular packages in the community to catch rare errors.
### Alternative Solutions
_No response_
### Additional context
_No response_
### Code of Conduct
- [X] I agree to follow the PSF Code of Conduct
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `conftest.py`
Content:
```
1 import sys
2
3
4 pytest_plugins = 'setuptools.tests.fixtures'
5
6
7 def pytest_addoption(parser):
8 parser.addoption(
9 "--package_name", action="append", default=[],
10 help="list of package_name to pass to test functions",
11 )
12
13
14 collect_ignore = [
15 'tests/manual_test.py',
16 'setuptools/tests/mod_with_constant.py',
17 'setuptools/_distutils',
18 '_distutils_hack',
19 'setuptools/extern',
20 'pkg_resources/extern',
21 'pkg_resources/tests/data',
22 'setuptools/_vendor',
23 'pkg_resources/_vendor',
24 ]
25
26
27 if sys.version_info < (3, 6):
28 collect_ignore.append('docs/conf.py') # uses f-strings
29 collect_ignore.append('pavement.py')
30
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/conftest.py b/conftest.py
--- a/conftest.py
+++ b/conftest.py
@@ -1,5 +1,7 @@
import sys
+import pytest
+
pytest_plugins = 'setuptools.tests.fixtures'
@@ -9,6 +11,14 @@
"--package_name", action="append", default=[],
help="list of package_name to pass to test functions",
)
+ parser.addoption(
+ "--integration", action="store_true", default=False,
+ help="run integration tests (only)"
+ )
+
+
+def pytest_configure(config):
+ config.addinivalue_line("markers", "integration: integration tests")
collect_ignore = [
@@ -27,3 +37,13 @@
if sys.version_info < (3, 6):
collect_ignore.append('docs/conf.py') # uses f-strings
collect_ignore.append('pavement.py')
+
+
[email protected](autouse=True)
+def _skip_integration(request):
+ running_integration_tests = request.config.getoption("--integration")
+ is_integration_test = request.node.get_closest_marker("integration")
+ if running_integration_tests and not is_integration_test:
+ pytest.skip("running integration tests only")
+ if not running_integration_tests and is_integration_test:
+ pytest.skip("skipping integration tests")
|
{"golden_diff": "diff --git a/conftest.py b/conftest.py\n--- a/conftest.py\n+++ b/conftest.py\n@@ -1,5 +1,7 @@\n import sys\n \n+import pytest\n+\n \n pytest_plugins = 'setuptools.tests.fixtures'\n \n@@ -9,6 +11,14 @@\n \"--package_name\", action=\"append\", default=[],\n help=\"list of package_name to pass to test functions\",\n )\n+ parser.addoption(\n+ \"--integration\", action=\"store_true\", default=False,\n+ help=\"run integration tests (only)\"\n+ )\n+\n+\n+def pytest_configure(config):\n+ config.addinivalue_line(\"markers\", \"integration: integration tests\")\n \n \n collect_ignore = [\n@@ -27,3 +37,13 @@\n if sys.version_info < (3, 6):\n collect_ignore.append('docs/conf.py') # uses f-strings\n collect_ignore.append('pavement.py')\n+\n+\[email protected](autouse=True)\n+def _skip_integration(request):\n+ running_integration_tests = request.config.getoption(\"--integration\")\n+ is_integration_test = request.node.get_closest_marker(\"integration\")\n+ if running_integration_tests and not is_integration_test:\n+ pytest.skip(\"running integration tests only\")\n+ if not running_integration_tests and is_integration_test:\n+ pytest.skip(\"skipping integration tests\")\n", "issue": "[FR] Add integration tests to catch breaking changes in the API\n### What's the problem this feature will solve?\n\nIt would be nice to have integration tests focusing on the usage of setuptools \"public API\" by some popular packages in the community.\r\n\r\nThis way we can catch breaking changes in the API before publishing new releases\n\n### Describe the solution you'd like\n\nAccording to the discussion in https://github.com/pypa/setuptools/pull/2844, if adding a new \"integration test suite\", the following characteristics are desirable:\r\n\r\n1. It should run separated from the main test suite (integration tests are resource intensive and time consuming, so the best is to avoid always running them and postponing until a new release is ready).\r\n2. It should test how setuptools' API is being used by popular packages in the community to catch rare errors.\n\n### Alternative Solutions\n\n_No response_\n\n### Additional context\n\n_No response_\n\n### Code of Conduct\n\n- [X] I agree to follow the PSF Code of Conduct\n", "before_files": [{"content": "import sys\n\n\npytest_plugins = 'setuptools.tests.fixtures'\n\n\ndef pytest_addoption(parser):\n parser.addoption(\n \"--package_name\", action=\"append\", default=[],\n help=\"list of package_name to pass to test functions\",\n )\n\n\ncollect_ignore = [\n 'tests/manual_test.py',\n 'setuptools/tests/mod_with_constant.py',\n 'setuptools/_distutils',\n '_distutils_hack',\n 'setuptools/extern',\n 'pkg_resources/extern',\n 'pkg_resources/tests/data',\n 'setuptools/_vendor',\n 'pkg_resources/_vendor',\n]\n\n\nif sys.version_info < (3, 6):\n collect_ignore.append('docs/conf.py') # uses f-strings\n collect_ignore.append('pavement.py')\n", "path": "conftest.py"}], "after_files": [{"content": "import sys\n\nimport pytest\n\n\npytest_plugins = 'setuptools.tests.fixtures'\n\n\ndef pytest_addoption(parser):\n parser.addoption(\n \"--package_name\", action=\"append\", default=[],\n help=\"list of package_name to pass to test functions\",\n )\n parser.addoption(\n \"--integration\", action=\"store_true\", default=False,\n help=\"run integration tests (only)\"\n )\n\n\ndef pytest_configure(config):\n config.addinivalue_line(\"markers\", \"integration: integration tests\")\n\n\ncollect_ignore = [\n 'tests/manual_test.py',\n 'setuptools/tests/mod_with_constant.py',\n 'setuptools/_distutils',\n '_distutils_hack',\n 'setuptools/extern',\n 'pkg_resources/extern',\n 'pkg_resources/tests/data',\n 'setuptools/_vendor',\n 'pkg_resources/_vendor',\n]\n\n\nif sys.version_info < (3, 6):\n collect_ignore.append('docs/conf.py') # uses f-strings\n collect_ignore.append('pavement.py')\n\n\[email protected](autouse=True)\ndef _skip_integration(request):\n running_integration_tests = request.config.getoption(\"--integration\")\n is_integration_test = request.node.get_closest_marker(\"integration\")\n if running_integration_tests and not is_integration_test:\n pytest.skip(\"running integration tests only\")\n if not running_integration_tests and is_integration_test:\n pytest.skip(\"skipping integration tests\")\n", "path": "conftest.py"}]}
| 684 | 299 |
gh_patches_debug_26462
|
rasdani/github-patches
|
git_diff
|
litestar-org__litestar-2269
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `litestar/contrib/sqlalchemy/plugins/__init__.py`
Content:
```
1 from __future__ import annotations
2
3 from .init import (
4 AsyncSessionConfig,
5 EngineConfig,
6 GenericSessionConfig,
7 GenericSQLAlchemyConfig,
8 SQLAlchemyAsyncConfig,
9 SQLAlchemyInitPlugin,
10 SQLAlchemySyncConfig,
11 SyncSessionConfig,
12 )
13 from .serialization import SQLAlchemySerializationPlugin
14
15
16 class SQLAlchemyPlugin(SQLAlchemyInitPlugin, SQLAlchemySerializationPlugin):
17 """A plugin that provides SQLAlchemy integration."""
18
19 def __init__(self, config: SQLAlchemyAsyncConfig | SQLAlchemySyncConfig) -> None:
20 SQLAlchemyInitPlugin.__init__(self, config=config)
21 SQLAlchemySerializationPlugin.__init__(self)
22
23
24 __all__ = (
25 "AsyncSessionConfig",
26 "EngineConfig",
27 "GenericSQLAlchemyConfig",
28 "GenericSessionConfig",
29 "SQLAlchemyAsyncConfig",
30 "SQLAlchemyInitPlugin",
31 "SQLAlchemyPlugin",
32 "SQLAlchemySerializationPlugin",
33 "SQLAlchemySyncConfig",
34 "SyncSessionConfig",
35 )
36
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/litestar/contrib/sqlalchemy/plugins/__init__.py b/litestar/contrib/sqlalchemy/plugins/__init__.py
--- a/litestar/contrib/sqlalchemy/plugins/__init__.py
+++ b/litestar/contrib/sqlalchemy/plugins/__init__.py
@@ -1,5 +1,10 @@
from __future__ import annotations
+from typing import TYPE_CHECKING
+
+from litestar.contrib.sqlalchemy.plugins import _slots_base
+from litestar.plugins import InitPluginProtocol
+
from .init import (
AsyncSessionConfig,
EngineConfig,
@@ -12,13 +17,29 @@
)
from .serialization import SQLAlchemySerializationPlugin
+if TYPE_CHECKING:
+ from litestar.config.app import AppConfig
+
-class SQLAlchemyPlugin(SQLAlchemyInitPlugin, SQLAlchemySerializationPlugin):
+class SQLAlchemyPlugin(InitPluginProtocol, _slots_base.SlotsBase):
"""A plugin that provides SQLAlchemy integration."""
def __init__(self, config: SQLAlchemyAsyncConfig | SQLAlchemySyncConfig) -> None:
- SQLAlchemyInitPlugin.__init__(self, config=config)
- SQLAlchemySerializationPlugin.__init__(self)
+ """Initialize ``SQLAlchemyPlugin``.
+
+ Args:
+ config: configure DB connection and hook handlers and dependencies.
+ """
+ self._config = config
+
+ def on_app_init(self, app_config: AppConfig) -> AppConfig:
+ """Configure application for use with SQLAlchemy.
+
+ Args:
+ app_config: The :class:`AppConfig <.config.app.AppConfig>` instance.
+ """
+ app_config.plugins.extend([SQLAlchemyInitPlugin(config=self._config), SQLAlchemySerializationPlugin()])
+ return app_config
__all__ = (
|
{"golden_diff": "diff --git a/litestar/contrib/sqlalchemy/plugins/__init__.py b/litestar/contrib/sqlalchemy/plugins/__init__.py\n--- a/litestar/contrib/sqlalchemy/plugins/__init__.py\n+++ b/litestar/contrib/sqlalchemy/plugins/__init__.py\n@@ -1,5 +1,10 @@\n from __future__ import annotations\n \n+from typing import TYPE_CHECKING\n+\n+from litestar.contrib.sqlalchemy.plugins import _slots_base\n+from litestar.plugins import InitPluginProtocol\n+\n from .init import (\n AsyncSessionConfig,\n EngineConfig,\n@@ -12,13 +17,29 @@\n )\n from .serialization import SQLAlchemySerializationPlugin\n \n+if TYPE_CHECKING:\n+ from litestar.config.app import AppConfig\n+\n \n-class SQLAlchemyPlugin(SQLAlchemyInitPlugin, SQLAlchemySerializationPlugin):\n+class SQLAlchemyPlugin(InitPluginProtocol, _slots_base.SlotsBase):\n \"\"\"A plugin that provides SQLAlchemy integration.\"\"\"\n \n def __init__(self, config: SQLAlchemyAsyncConfig | SQLAlchemySyncConfig) -> None:\n- SQLAlchemyInitPlugin.__init__(self, config=config)\n- SQLAlchemySerializationPlugin.__init__(self)\n+ \"\"\"Initialize ``SQLAlchemyPlugin``.\n+\n+ Args:\n+ config: configure DB connection and hook handlers and dependencies.\n+ \"\"\"\n+ self._config = config\n+\n+ def on_app_init(self, app_config: AppConfig) -> AppConfig:\n+ \"\"\"Configure application for use with SQLAlchemy.\n+\n+ Args:\n+ app_config: The :class:`AppConfig <.config.app.AppConfig>` instance.\n+ \"\"\"\n+ app_config.plugins.extend([SQLAlchemyInitPlugin(config=self._config), SQLAlchemySerializationPlugin()])\n+ return app_config\n \n \n __all__ = (\n", "issue": "StaticFilesConfig and virtual directories\nI'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem. \r\n\r\nThis is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.\r\n\r\nhttps://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32\n", "before_files": [{"content": "from __future__ import annotations\n\nfrom .init import (\n AsyncSessionConfig,\n EngineConfig,\n GenericSessionConfig,\n GenericSQLAlchemyConfig,\n SQLAlchemyAsyncConfig,\n SQLAlchemyInitPlugin,\n SQLAlchemySyncConfig,\n SyncSessionConfig,\n)\nfrom .serialization import SQLAlchemySerializationPlugin\n\n\nclass SQLAlchemyPlugin(SQLAlchemyInitPlugin, SQLAlchemySerializationPlugin):\n \"\"\"A plugin that provides SQLAlchemy integration.\"\"\"\n\n def __init__(self, config: SQLAlchemyAsyncConfig | SQLAlchemySyncConfig) -> None:\n SQLAlchemyInitPlugin.__init__(self, config=config)\n SQLAlchemySerializationPlugin.__init__(self)\n\n\n__all__ = (\n \"AsyncSessionConfig\",\n \"EngineConfig\",\n \"GenericSQLAlchemyConfig\",\n \"GenericSessionConfig\",\n \"SQLAlchemyAsyncConfig\",\n \"SQLAlchemyInitPlugin\",\n \"SQLAlchemyPlugin\",\n \"SQLAlchemySerializationPlugin\",\n \"SQLAlchemySyncConfig\",\n \"SyncSessionConfig\",\n)\n", "path": "litestar/contrib/sqlalchemy/plugins/__init__.py"}], "after_files": [{"content": "from __future__ import annotations\n\nfrom typing import TYPE_CHECKING\n\nfrom litestar.contrib.sqlalchemy.plugins import _slots_base\nfrom litestar.plugins import InitPluginProtocol\n\nfrom .init import (\n AsyncSessionConfig,\n EngineConfig,\n GenericSessionConfig,\n GenericSQLAlchemyConfig,\n SQLAlchemyAsyncConfig,\n SQLAlchemyInitPlugin,\n SQLAlchemySyncConfig,\n SyncSessionConfig,\n)\nfrom .serialization import SQLAlchemySerializationPlugin\n\nif TYPE_CHECKING:\n from litestar.config.app import AppConfig\n\n\nclass SQLAlchemyPlugin(InitPluginProtocol, _slots_base.SlotsBase):\n \"\"\"A plugin that provides SQLAlchemy integration.\"\"\"\n\n def __init__(self, config: SQLAlchemyAsyncConfig | SQLAlchemySyncConfig) -> None:\n \"\"\"Initialize ``SQLAlchemyPlugin``.\n\n Args:\n config: configure DB connection and hook handlers and dependencies.\n \"\"\"\n self._config = config\n\n def on_app_init(self, app_config: AppConfig) -> AppConfig:\n \"\"\"Configure application for use with SQLAlchemy.\n\n Args:\n app_config: The :class:`AppConfig <.config.app.AppConfig>` instance.\n \"\"\"\n app_config.plugins.extend([SQLAlchemyInitPlugin(config=self._config), SQLAlchemySerializationPlugin()])\n return app_config\n\n\n__all__ = (\n \"AsyncSessionConfig\",\n \"EngineConfig\",\n \"GenericSQLAlchemyConfig\",\n \"GenericSessionConfig\",\n \"SQLAlchemyAsyncConfig\",\n \"SQLAlchemyInitPlugin\",\n \"SQLAlchemyPlugin\",\n \"SQLAlchemySerializationPlugin\",\n \"SQLAlchemySyncConfig\",\n \"SyncSessionConfig\",\n)\n", "path": "litestar/contrib/sqlalchemy/plugins/__init__.py"}]}
| 694 | 373 |
gh_patches_debug_3229
|
rasdani/github-patches
|
git_diff
|
CTFd__CTFd-2371
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Test Translations & Support Spanish
We need to test translations before release and make sure we support Spanish
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `CTFd/constants/languages.py`
Content:
```
1 from CTFd.constants import RawEnum
2
3
4 class Languages(str, RawEnum):
5 ENGLISH = "en"
6 GERMAN = "de"
7 POLISH = "pl"
8
9
10 LANGUAGE_NAMES = {
11 "en": "English",
12 "de": "Deutsch",
13 "pl": "Polski",
14 }
15
16 SELECT_LANGUAGE_LIST = [("", "")] + [
17 (str(lang), LANGUAGE_NAMES.get(str(lang))) for lang in Languages
18 ]
19
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/CTFd/constants/languages.py b/CTFd/constants/languages.py
--- a/CTFd/constants/languages.py
+++ b/CTFd/constants/languages.py
@@ -5,12 +5,16 @@
ENGLISH = "en"
GERMAN = "de"
POLISH = "pl"
+ SPANISH = "es"
+ CHINESE = "zh"
LANGUAGE_NAMES = {
"en": "English",
"de": "Deutsch",
"pl": "Polski",
+ "es": "Espaรฑol",
+ "zh": "ไธญๆ",
}
SELECT_LANGUAGE_LIST = [("", "")] + [
|
{"golden_diff": "diff --git a/CTFd/constants/languages.py b/CTFd/constants/languages.py\n--- a/CTFd/constants/languages.py\n+++ b/CTFd/constants/languages.py\n@@ -5,12 +5,16 @@\n ENGLISH = \"en\"\n GERMAN = \"de\"\n POLISH = \"pl\"\n+ SPANISH = \"es\"\n+ CHINESE = \"zh\"\n \n \n LANGUAGE_NAMES = {\n \"en\": \"English\",\n \"de\": \"Deutsch\",\n \"pl\": \"Polski\",\n+ \"es\": \"Espa\u00f1ol\",\n+ \"zh\": \"\u4e2d\u6587\",\n }\n \n SELECT_LANGUAGE_LIST = [(\"\", \"\")] + [\n", "issue": "Test Translations & Support Spanish\nWe need to test translations before release and make sure we support Spanish\n", "before_files": [{"content": "from CTFd.constants import RawEnum\n\n\nclass Languages(str, RawEnum):\n ENGLISH = \"en\"\n GERMAN = \"de\"\n POLISH = \"pl\"\n\n\nLANGUAGE_NAMES = {\n \"en\": \"English\",\n \"de\": \"Deutsch\",\n \"pl\": \"Polski\",\n}\n\nSELECT_LANGUAGE_LIST = [(\"\", \"\")] + [\n (str(lang), LANGUAGE_NAMES.get(str(lang))) for lang in Languages\n]\n", "path": "CTFd/constants/languages.py"}], "after_files": [{"content": "from CTFd.constants import RawEnum\n\n\nclass Languages(str, RawEnum):\n ENGLISH = \"en\"\n GERMAN = \"de\"\n POLISH = \"pl\"\n SPANISH = \"es\"\n CHINESE = \"zh\"\n\n\nLANGUAGE_NAMES = {\n \"en\": \"English\",\n \"de\": \"Deutsch\",\n \"pl\": \"Polski\",\n \"es\": \"Espa\u00f1ol\",\n \"zh\": \"\u4e2d\u6587\",\n}\n\nSELECT_LANGUAGE_LIST = [(\"\", \"\")] + [\n (str(lang), LANGUAGE_NAMES.get(str(lang))) for lang in Languages\n]\n", "path": "CTFd/constants/languages.py"}]}
| 408 | 150 |
gh_patches_debug_15602
|
rasdani/github-patches
|
git_diff
|
yt-dlp__yt-dlp-7108
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
cc.com
### Checklist
- [X] I'm reporting a broken site
- [X] I've verified that I'm running yt-dlp version **2021.12.01**. ([update instructions](https://github.com/yt-dlp/yt-dlp#update))
- [X] I've checked that all provided URLs are alive and playable in a browser
- [X] I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
- [X] I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
- [X] I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
- [X] I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
### Region
United States
### Description
https://www.cc.com/topic/a-clusterfunke-christmas
TV episodes work fine, but the movie comes back Unsupported URL
### Verbose log
```shell
C:\Users\Kevin\Downloads\yt>ytdl.exe -Uv https://www.cc.com/movies/tkp406/a-clue
sterfuenke-christmas
[debug] Command-line config: ['-Uv', 'https://www.cc.com/movies/tkp406/a-clueste
rfuenke-christmas']
[debug] Encodings: locale cp1252, fs utf-8, out utf-8 (No ANSI), err utf-8 (No A
NSI), pref cp1252
[debug] yt-dlp version 2021.12.01 [91f071a] (win_exe)
[debug] Python version 3.8.10 (CPython 64bit) - Windows-7-6.1.7601-SP1
[debug] exe versions: ffmpeg 4.4-full_build-www.gyan.dev (setts), ffprobe 4.4-fu
ll_build-www.gyan.dev
[debug] Optional libraries: Cryptodome, mutagen, sqlite, websockets
[debug] Proxy map: {}
Latest version: 2021.12.01, Current version: 2021.12.01
yt-dlp is up to date (2021.12.01)
[debug] [generic] Extracting URL: https://www.cc.com/movies/tkp406/a-cluesterfue
nke-christmas
[generic] a-cluesterfuenke-christmas: Requesting header
WARNING: [generic] Falling back on generic information extractor.
[generic] a-cluesterfuenke-christmas: Downloading webpage
[generic] a-cluesterfuenke-christmas: Extracting information
[debug] Looking for video embeds
ERROR: Unsupported URL: https://www.cc.com/movies/tkp406/a-cluesterfuenke-christ
mas
Traceback (most recent call last):
File "yt_dlp\YoutubeDL.py", line 1329, in wrapper
File "yt_dlp\YoutubeDL.py", line 1398, in __extract_info
File "yt_dlp\extractor\common.py", line 597, in extract
File "yt_dlp\extractor\generic.py", line 3813, in _real_extract
yt_dlp.utils.UnsupportedError: Unsupported URL: https://www.cc.com/movies/tkp406
/a-cluesterfuenke-christmas
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `yt_dlp/extractor/comedycentral.py`
Content:
```
1 from .mtv import MTVServicesInfoExtractor
2
3
4 class ComedyCentralIE(MTVServicesInfoExtractor):
5 _VALID_URL = r'https?://(?:www\.)?cc\.com/(?:episodes|video(?:-clips)?|collection-playlist)/(?P<id>[0-9a-z]{6})'
6 _FEED_URL = 'http://comedycentral.com/feeds/mrss/'
7
8 _TESTS = [{
9 'url': 'http://www.cc.com/video-clips/5ke9v2/the-daily-show-with-trevor-noah-doc-rivers-and-steve-ballmer---the-nba-player-strike',
10 'md5': 'b8acb347177c680ff18a292aa2166f80',
11 'info_dict': {
12 'id': '89ccc86e-1b02-4f83-b0c9-1d9592ecd025',
13 'ext': 'mp4',
14 'title': 'The Daily Show with Trevor Noah|August 28, 2020|25|25149|Doc Rivers and Steve Ballmer - The NBA Player Strike',
15 'description': 'md5:5334307c433892b85f4f5e5ac9ef7498',
16 'timestamp': 1598670000,
17 'upload_date': '20200829',
18 },
19 }, {
20 'url': 'http://www.cc.com/episodes/pnzzci/drawn-together--american-idol--parody-clip-show-season-3-ep-314',
21 'only_matching': True,
22 }, {
23 'url': 'https://www.cc.com/video/k3sdvm/the-daily-show-with-jon-stewart-exclusive-the-fourth-estate',
24 'only_matching': True,
25 }, {
26 'url': 'https://www.cc.com/collection-playlist/cosnej/stand-up-specials/t6vtjb',
27 'only_matching': True,
28 }]
29
30
31 class ComedyCentralTVIE(MTVServicesInfoExtractor):
32 _VALID_URL = r'https?://(?:www\.)?comedycentral\.tv/folgen/(?P<id>[0-9a-z]{6})'
33 _TESTS = [{
34 'url': 'https://www.comedycentral.tv/folgen/pxdpec/josh-investigates-klimawandel-staffel-1-ep-1',
35 'info_dict': {
36 'id': '15907dc3-ec3c-11e8-a442-0e40cf2fc285',
37 'ext': 'mp4',
38 'title': 'Josh Investigates',
39 'description': 'Steht uns das Ende der Welt bevor?',
40 },
41 }]
42 _FEED_URL = 'http://feeds.mtvnservices.com/od/feed/intl-mrss-player-feed'
43 _GEO_COUNTRIES = ['DE']
44
45 def _get_feed_query(self, uri):
46 return {
47 'accountOverride': 'intl.mtvi.com',
48 'arcEp': 'web.cc.tv',
49 'ep': 'b9032c3a',
50 'imageEp': 'web.cc.tv',
51 'mgid': uri,
52 }
53
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/yt_dlp/extractor/comedycentral.py b/yt_dlp/extractor/comedycentral.py
--- a/yt_dlp/extractor/comedycentral.py
+++ b/yt_dlp/extractor/comedycentral.py
@@ -2,7 +2,7 @@
class ComedyCentralIE(MTVServicesInfoExtractor):
- _VALID_URL = r'https?://(?:www\.)?cc\.com/(?:episodes|video(?:-clips)?|collection-playlist)/(?P<id>[0-9a-z]{6})'
+ _VALID_URL = r'https?://(?:www\.)?cc\.com/(?:episodes|video(?:-clips)?|collection-playlist|movies)/(?P<id>[0-9a-z]{6})'
_FEED_URL = 'http://comedycentral.com/feeds/mrss/'
_TESTS = [{
@@ -25,6 +25,9 @@
}, {
'url': 'https://www.cc.com/collection-playlist/cosnej/stand-up-specials/t6vtjb',
'only_matching': True,
+ }, {
+ 'url': 'https://www.cc.com/movies/tkp406/a-cluesterfuenke-christmas',
+ 'only_matching': True,
}]
|
{"golden_diff": "diff --git a/yt_dlp/extractor/comedycentral.py b/yt_dlp/extractor/comedycentral.py\n--- a/yt_dlp/extractor/comedycentral.py\n+++ b/yt_dlp/extractor/comedycentral.py\n@@ -2,7 +2,7 @@\n \n \n class ComedyCentralIE(MTVServicesInfoExtractor):\n- _VALID_URL = r'https?://(?:www\\.)?cc\\.com/(?:episodes|video(?:-clips)?|collection-playlist)/(?P<id>[0-9a-z]{6})'\n+ _VALID_URL = r'https?://(?:www\\.)?cc\\.com/(?:episodes|video(?:-clips)?|collection-playlist|movies)/(?P<id>[0-9a-z]{6})'\n _FEED_URL = 'http://comedycentral.com/feeds/mrss/'\n \n _TESTS = [{\n@@ -25,6 +25,9 @@\n }, {\n 'url': 'https://www.cc.com/collection-playlist/cosnej/stand-up-specials/t6vtjb',\n 'only_matching': True,\n+ }, {\n+ 'url': 'https://www.cc.com/movies/tkp406/a-cluesterfuenke-christmas',\n+ 'only_matching': True,\n }]\n", "issue": "cc.com\n### Checklist\n\n- [X] I'm reporting a broken site\n- [X] I've verified that I'm running yt-dlp version **2021.12.01**. ([update instructions](https://github.com/yt-dlp/yt-dlp#update))\n- [X] I've checked that all provided URLs are alive and playable in a browser\n- [X] I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)\n- [X] I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates\n- [X] I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)\n- [X] I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required\n\n### Region\n\nUnited States\n\n### Description\n\nhttps://www.cc.com/topic/a-clusterfunke-christmas\r\n\r\nTV episodes work fine, but the movie comes back Unsupported URL\n\n### Verbose log\n\n```shell\nC:\\Users\\Kevin\\Downloads\\yt>ytdl.exe -Uv https://www.cc.com/movies/tkp406/a-clue\r\nsterfuenke-christmas\r\n[debug] Command-line config: ['-Uv', 'https://www.cc.com/movies/tkp406/a-clueste\r\nrfuenke-christmas']\r\n[debug] Encodings: locale cp1252, fs utf-8, out utf-8 (No ANSI), err utf-8 (No A\r\nNSI), pref cp1252\r\n[debug] yt-dlp version 2021.12.01 [91f071a] (win_exe)\r\n[debug] Python version 3.8.10 (CPython 64bit) - Windows-7-6.1.7601-SP1\r\n[debug] exe versions: ffmpeg 4.4-full_build-www.gyan.dev (setts), ffprobe 4.4-fu\r\nll_build-www.gyan.dev\r\n[debug] Optional libraries: Cryptodome, mutagen, sqlite, websockets\r\n[debug] Proxy map: {}\r\nLatest version: 2021.12.01, Current version: 2021.12.01\r\nyt-dlp is up to date (2021.12.01)\r\n[debug] [generic] Extracting URL: https://www.cc.com/movies/tkp406/a-cluesterfue\r\nnke-christmas\r\n[generic] a-cluesterfuenke-christmas: Requesting header\r\nWARNING: [generic] Falling back on generic information extractor.\r\n[generic] a-cluesterfuenke-christmas: Downloading webpage\r\n[generic] a-cluesterfuenke-christmas: Extracting information\r\n[debug] Looking for video embeds\r\nERROR: Unsupported URL: https://www.cc.com/movies/tkp406/a-cluesterfuenke-christ\r\nmas\r\nTraceback (most recent call last):\r\n File \"yt_dlp\\YoutubeDL.py\", line 1329, in wrapper\r\n File \"yt_dlp\\YoutubeDL.py\", line 1398, in __extract_info\r\n File \"yt_dlp\\extractor\\common.py\", line 597, in extract\r\n File \"yt_dlp\\extractor\\generic.py\", line 3813, in _real_extract\r\nyt_dlp.utils.UnsupportedError: Unsupported URL: https://www.cc.com/movies/tkp406\r\n/a-cluesterfuenke-christmas\n```\n\n", "before_files": [{"content": "from .mtv import MTVServicesInfoExtractor\n\n\nclass ComedyCentralIE(MTVServicesInfoExtractor):\n _VALID_URL = r'https?://(?:www\\.)?cc\\.com/(?:episodes|video(?:-clips)?|collection-playlist)/(?P<id>[0-9a-z]{6})'\n _FEED_URL = 'http://comedycentral.com/feeds/mrss/'\n\n _TESTS = [{\n 'url': 'http://www.cc.com/video-clips/5ke9v2/the-daily-show-with-trevor-noah-doc-rivers-and-steve-ballmer---the-nba-player-strike',\n 'md5': 'b8acb347177c680ff18a292aa2166f80',\n 'info_dict': {\n 'id': '89ccc86e-1b02-4f83-b0c9-1d9592ecd025',\n 'ext': 'mp4',\n 'title': 'The Daily Show with Trevor Noah|August 28, 2020|25|25149|Doc Rivers and Steve Ballmer - The NBA Player Strike',\n 'description': 'md5:5334307c433892b85f4f5e5ac9ef7498',\n 'timestamp': 1598670000,\n 'upload_date': '20200829',\n },\n }, {\n 'url': 'http://www.cc.com/episodes/pnzzci/drawn-together--american-idol--parody-clip-show-season-3-ep-314',\n 'only_matching': True,\n }, {\n 'url': 'https://www.cc.com/video/k3sdvm/the-daily-show-with-jon-stewart-exclusive-the-fourth-estate',\n 'only_matching': True,\n }, {\n 'url': 'https://www.cc.com/collection-playlist/cosnej/stand-up-specials/t6vtjb',\n 'only_matching': True,\n }]\n\n\nclass ComedyCentralTVIE(MTVServicesInfoExtractor):\n _VALID_URL = r'https?://(?:www\\.)?comedycentral\\.tv/folgen/(?P<id>[0-9a-z]{6})'\n _TESTS = [{\n 'url': 'https://www.comedycentral.tv/folgen/pxdpec/josh-investigates-klimawandel-staffel-1-ep-1',\n 'info_dict': {\n 'id': '15907dc3-ec3c-11e8-a442-0e40cf2fc285',\n 'ext': 'mp4',\n 'title': 'Josh Investigates',\n 'description': 'Steht uns das Ende der Welt bevor?',\n },\n }]\n _FEED_URL = 'http://feeds.mtvnservices.com/od/feed/intl-mrss-player-feed'\n _GEO_COUNTRIES = ['DE']\n\n def _get_feed_query(self, uri):\n return {\n 'accountOverride': 'intl.mtvi.com',\n 'arcEp': 'web.cc.tv',\n 'ep': 'b9032c3a',\n 'imageEp': 'web.cc.tv',\n 'mgid': uri,\n }\n", "path": "yt_dlp/extractor/comedycentral.py"}], "after_files": [{"content": "from .mtv import MTVServicesInfoExtractor\n\n\nclass ComedyCentralIE(MTVServicesInfoExtractor):\n _VALID_URL = r'https?://(?:www\\.)?cc\\.com/(?:episodes|video(?:-clips)?|collection-playlist|movies)/(?P<id>[0-9a-z]{6})'\n _FEED_URL = 'http://comedycentral.com/feeds/mrss/'\n\n _TESTS = [{\n 'url': 'http://www.cc.com/video-clips/5ke9v2/the-daily-show-with-trevor-noah-doc-rivers-and-steve-ballmer---the-nba-player-strike',\n 'md5': 'b8acb347177c680ff18a292aa2166f80',\n 'info_dict': {\n 'id': '89ccc86e-1b02-4f83-b0c9-1d9592ecd025',\n 'ext': 'mp4',\n 'title': 'The Daily Show with Trevor Noah|August 28, 2020|25|25149|Doc Rivers and Steve Ballmer - The NBA Player Strike',\n 'description': 'md5:5334307c433892b85f4f5e5ac9ef7498',\n 'timestamp': 1598670000,\n 'upload_date': '20200829',\n },\n }, {\n 'url': 'http://www.cc.com/episodes/pnzzci/drawn-together--american-idol--parody-clip-show-season-3-ep-314',\n 'only_matching': True,\n }, {\n 'url': 'https://www.cc.com/video/k3sdvm/the-daily-show-with-jon-stewart-exclusive-the-fourth-estate',\n 'only_matching': True,\n }, {\n 'url': 'https://www.cc.com/collection-playlist/cosnej/stand-up-specials/t6vtjb',\n 'only_matching': True,\n }, {\n 'url': 'https://www.cc.com/movies/tkp406/a-cluesterfuenke-christmas',\n 'only_matching': True,\n }]\n\n\nclass ComedyCentralTVIE(MTVServicesInfoExtractor):\n _VALID_URL = r'https?://(?:www\\.)?comedycentral\\.tv/folgen/(?P<id>[0-9a-z]{6})'\n _TESTS = [{\n 'url': 'https://www.comedycentral.tv/folgen/pxdpec/josh-investigates-klimawandel-staffel-1-ep-1',\n 'info_dict': {\n 'id': '15907dc3-ec3c-11e8-a442-0e40cf2fc285',\n 'ext': 'mp4',\n 'title': 'Josh Investigates',\n 'description': 'Steht uns das Ende der Welt bevor?',\n },\n }]\n _FEED_URL = 'http://feeds.mtvnservices.com/od/feed/intl-mrss-player-feed'\n _GEO_COUNTRIES = ['DE']\n\n def _get_feed_query(self, uri):\n return {\n 'accountOverride': 'intl.mtvi.com',\n 'arcEp': 'web.cc.tv',\n 'ep': 'b9032c3a',\n 'imageEp': 'web.cc.tv',\n 'mgid': uri,\n }\n", "path": "yt_dlp/extractor/comedycentral.py"}]}
| 2,021 | 292 |
gh_patches_debug_38746
|
rasdani/github-patches
|
git_diff
|
alltheplaces__alltheplaces-3627
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Spider teavana is broken
During the global build at 2021-05-26-14-42-23, spider **teavana** failed with **0 features** and **2 errors**.
Here's [the log](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/logs/teavana.log) and [the output](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/teavana.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/teavana.geojson))
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `locations/spiders/teavana.py`
Content:
```
1 import scrapy
2 import re
3 from locations.items import GeojsonPointItem
4
5
6 class ExpressSpider(scrapy.Spider):
7
8 name = "teavana"
9 item_attributes = {"brand": "Teavana"}
10 allowed_domains = ["locations.teavana.com"]
11 download_delay = 0.5
12 start_urls = ("https://locations.teavana.com/",)
13
14 def parse_stores(self, response):
15 ref = re.findall(r"[^(\/)]+$", response.url)
16 if len(ref) > 0:
17 ref = ref[0].split(".")[0]
18 properties = {
19 "addr_full": " ".join(
20 response.xpath(
21 '//span[@itemprop="streetAddress"]/span/text()'
22 ).extract()
23 ),
24 "phone": response.xpath(
25 'normalize-space(//span[@itemprop="telephone"]/text())'
26 ).extract_first(),
27 "city": response.xpath(
28 'normalize-space(//span[@itemprop="addressLocality"]/text())'
29 ).extract_first(),
30 "state": response.xpath(
31 'normalize-space(//abbr[@itemprop="addressRegion"]/text())'
32 ).extract_first(),
33 "postcode": response.xpath(
34 'normalize-space(//span[@itemprop="postalCode"]/text())'
35 ).extract_first(),
36 "ref": ref,
37 "website": response.url,
38 "lat": float(
39 response.xpath(
40 'normalize-space(//meta[@itemprop="latitude"]/@content)'
41 ).extract_first()
42 ),
43 "lon": float(
44 response.xpath(
45 'normalize-space(//meta[@itemprop="longitude"]/@content)'
46 ).extract_first()
47 ),
48 }
49 hours = response.xpath('//div[@itemprop="openingHours"]/@content').extract()
50 if hours != []:
51 hours = "; ".join(hours)
52 properties["opening_hours"] = hours
53 yield GeojsonPointItem(**properties)
54
55 def parse_city_stores(self, response):
56 stores = response.xpath(
57 '//h3[@class="Teaser-title Link Link--teaser Heading--h5"]/a/@href'
58 ).extract()
59 for store in stores:
60 yield scrapy.Request(response.urljoin(store), callback=self.parse_stores)
61
62 def parse_state(self, response):
63 urls = response.xpath(
64 '//div[@class="c-directory-list-content-wrapper"]/ul/li/a/@href'
65 ).extract()
66 for path in urls:
67 pattern = re.compile(r"..\/[a-z]{2}\/[a-z]{2}\/[^()]+\/[^()]+.html$")
68 if pattern.match(path.strip()):
69 yield scrapy.Request(response.urljoin(path), callback=self.parse_stores)
70 else:
71 yield scrapy.Request(
72 response.urljoin(path), callback=self.parse_city_stores
73 )
74
75 def parse(self, response):
76 urls = response.xpath(
77 '//div[@class="c-directory-list-content-wrapper"]/ul/li/a/@href'
78 ).extract()
79 for path in urls:
80 pattern = re.compile(r"^[a-z]{2}\/[a-z]{2}.html$")
81 pattern1 = re.compile(r"^[a-z]{2}\/[a-z]{2}\/[^()]+\/[^()]+.html$")
82 if pattern.match(path.strip()):
83 yield scrapy.Request(response.urljoin(path), callback=self.parse_state)
84 elif pattern1.match(path.strip()):
85 yield scrapy.Request(response.urljoin(path), callback=self.parse_stores)
86 else:
87 yield scrapy.Request(
88 response.urljoin(path), callback=self.parse_city_stores
89 )
90
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/locations/spiders/teavana.py b/locations/spiders/teavana.py
deleted file mode 100644
--- a/locations/spiders/teavana.py
+++ /dev/null
@@ -1,89 +0,0 @@
-import scrapy
-import re
-from locations.items import GeojsonPointItem
-
-
-class ExpressSpider(scrapy.Spider):
-
- name = "teavana"
- item_attributes = {"brand": "Teavana"}
- allowed_domains = ["locations.teavana.com"]
- download_delay = 0.5
- start_urls = ("https://locations.teavana.com/",)
-
- def parse_stores(self, response):
- ref = re.findall(r"[^(\/)]+$", response.url)
- if len(ref) > 0:
- ref = ref[0].split(".")[0]
- properties = {
- "addr_full": " ".join(
- response.xpath(
- '//span[@itemprop="streetAddress"]/span/text()'
- ).extract()
- ),
- "phone": response.xpath(
- 'normalize-space(//span[@itemprop="telephone"]/text())'
- ).extract_first(),
- "city": response.xpath(
- 'normalize-space(//span[@itemprop="addressLocality"]/text())'
- ).extract_first(),
- "state": response.xpath(
- 'normalize-space(//abbr[@itemprop="addressRegion"]/text())'
- ).extract_first(),
- "postcode": response.xpath(
- 'normalize-space(//span[@itemprop="postalCode"]/text())'
- ).extract_first(),
- "ref": ref,
- "website": response.url,
- "lat": float(
- response.xpath(
- 'normalize-space(//meta[@itemprop="latitude"]/@content)'
- ).extract_first()
- ),
- "lon": float(
- response.xpath(
- 'normalize-space(//meta[@itemprop="longitude"]/@content)'
- ).extract_first()
- ),
- }
- hours = response.xpath('//div[@itemprop="openingHours"]/@content').extract()
- if hours != []:
- hours = "; ".join(hours)
- properties["opening_hours"] = hours
- yield GeojsonPointItem(**properties)
-
- def parse_city_stores(self, response):
- stores = response.xpath(
- '//h3[@class="Teaser-title Link Link--teaser Heading--h5"]/a/@href'
- ).extract()
- for store in stores:
- yield scrapy.Request(response.urljoin(store), callback=self.parse_stores)
-
- def parse_state(self, response):
- urls = response.xpath(
- '//div[@class="c-directory-list-content-wrapper"]/ul/li/a/@href'
- ).extract()
- for path in urls:
- pattern = re.compile(r"..\/[a-z]{2}\/[a-z]{2}\/[^()]+\/[^()]+.html$")
- if pattern.match(path.strip()):
- yield scrapy.Request(response.urljoin(path), callback=self.parse_stores)
- else:
- yield scrapy.Request(
- response.urljoin(path), callback=self.parse_city_stores
- )
-
- def parse(self, response):
- urls = response.xpath(
- '//div[@class="c-directory-list-content-wrapper"]/ul/li/a/@href'
- ).extract()
- for path in urls:
- pattern = re.compile(r"^[a-z]{2}\/[a-z]{2}.html$")
- pattern1 = re.compile(r"^[a-z]{2}\/[a-z]{2}\/[^()]+\/[^()]+.html$")
- if pattern.match(path.strip()):
- yield scrapy.Request(response.urljoin(path), callback=self.parse_state)
- elif pattern1.match(path.strip()):
- yield scrapy.Request(response.urljoin(path), callback=self.parse_stores)
- else:
- yield scrapy.Request(
- response.urljoin(path), callback=self.parse_city_stores
- )
|
{"golden_diff": "diff --git a/locations/spiders/teavana.py b/locations/spiders/teavana.py\ndeleted file mode 100644\n--- a/locations/spiders/teavana.py\n+++ /dev/null\n@@ -1,89 +0,0 @@\n-import scrapy\n-import re\n-from locations.items import GeojsonPointItem\n-\n-\n-class ExpressSpider(scrapy.Spider):\n-\n- name = \"teavana\"\n- item_attributes = {\"brand\": \"Teavana\"}\n- allowed_domains = [\"locations.teavana.com\"]\n- download_delay = 0.5\n- start_urls = (\"https://locations.teavana.com/\",)\n-\n- def parse_stores(self, response):\n- ref = re.findall(r\"[^(\\/)]+$\", response.url)\n- if len(ref) > 0:\n- ref = ref[0].split(\".\")[0]\n- properties = {\n- \"addr_full\": \" \".join(\n- response.xpath(\n- '//span[@itemprop=\"streetAddress\"]/span/text()'\n- ).extract()\n- ),\n- \"phone\": response.xpath(\n- 'normalize-space(//span[@itemprop=\"telephone\"]/text())'\n- ).extract_first(),\n- \"city\": response.xpath(\n- 'normalize-space(//span[@itemprop=\"addressLocality\"]/text())'\n- ).extract_first(),\n- \"state\": response.xpath(\n- 'normalize-space(//abbr[@itemprop=\"addressRegion\"]/text())'\n- ).extract_first(),\n- \"postcode\": response.xpath(\n- 'normalize-space(//span[@itemprop=\"postalCode\"]/text())'\n- ).extract_first(),\n- \"ref\": ref,\n- \"website\": response.url,\n- \"lat\": float(\n- response.xpath(\n- 'normalize-space(//meta[@itemprop=\"latitude\"]/@content)'\n- ).extract_first()\n- ),\n- \"lon\": float(\n- response.xpath(\n- 'normalize-space(//meta[@itemprop=\"longitude\"]/@content)'\n- ).extract_first()\n- ),\n- }\n- hours = response.xpath('//div[@itemprop=\"openingHours\"]/@content').extract()\n- if hours != []:\n- hours = \"; \".join(hours)\n- properties[\"opening_hours\"] = hours\n- yield GeojsonPointItem(**properties)\n-\n- def parse_city_stores(self, response):\n- stores = response.xpath(\n- '//h3[@class=\"Teaser-title Link Link--teaser Heading--h5\"]/a/@href'\n- ).extract()\n- for store in stores:\n- yield scrapy.Request(response.urljoin(store), callback=self.parse_stores)\n-\n- def parse_state(self, response):\n- urls = response.xpath(\n- '//div[@class=\"c-directory-list-content-wrapper\"]/ul/li/a/@href'\n- ).extract()\n- for path in urls:\n- pattern = re.compile(r\"..\\/[a-z]{2}\\/[a-z]{2}\\/[^()]+\\/[^()]+.html$\")\n- if pattern.match(path.strip()):\n- yield scrapy.Request(response.urljoin(path), callback=self.parse_stores)\n- else:\n- yield scrapy.Request(\n- response.urljoin(path), callback=self.parse_city_stores\n- )\n-\n- def parse(self, response):\n- urls = response.xpath(\n- '//div[@class=\"c-directory-list-content-wrapper\"]/ul/li/a/@href'\n- ).extract()\n- for path in urls:\n- pattern = re.compile(r\"^[a-z]{2}\\/[a-z]{2}.html$\")\n- pattern1 = re.compile(r\"^[a-z]{2}\\/[a-z]{2}\\/[^()]+\\/[^()]+.html$\")\n- if pattern.match(path.strip()):\n- yield scrapy.Request(response.urljoin(path), callback=self.parse_state)\n- elif pattern1.match(path.strip()):\n- yield scrapy.Request(response.urljoin(path), callback=self.parse_stores)\n- else:\n- yield scrapy.Request(\n- response.urljoin(path), callback=self.parse_city_stores\n- )\n", "issue": "Spider teavana is broken\nDuring the global build at 2021-05-26-14-42-23, spider **teavana** failed with **0 features** and **2 errors**.\n\nHere's [the log](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/logs/teavana.log) and [the output](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/teavana.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/teavana.geojson))\n", "before_files": [{"content": "import scrapy\nimport re\nfrom locations.items import GeojsonPointItem\n\n\nclass ExpressSpider(scrapy.Spider):\n\n name = \"teavana\"\n item_attributes = {\"brand\": \"Teavana\"}\n allowed_domains = [\"locations.teavana.com\"]\n download_delay = 0.5\n start_urls = (\"https://locations.teavana.com/\",)\n\n def parse_stores(self, response):\n ref = re.findall(r\"[^(\\/)]+$\", response.url)\n if len(ref) > 0:\n ref = ref[0].split(\".\")[0]\n properties = {\n \"addr_full\": \" \".join(\n response.xpath(\n '//span[@itemprop=\"streetAddress\"]/span/text()'\n ).extract()\n ),\n \"phone\": response.xpath(\n 'normalize-space(//span[@itemprop=\"telephone\"]/text())'\n ).extract_first(),\n \"city\": response.xpath(\n 'normalize-space(//span[@itemprop=\"addressLocality\"]/text())'\n ).extract_first(),\n \"state\": response.xpath(\n 'normalize-space(//abbr[@itemprop=\"addressRegion\"]/text())'\n ).extract_first(),\n \"postcode\": response.xpath(\n 'normalize-space(//span[@itemprop=\"postalCode\"]/text())'\n ).extract_first(),\n \"ref\": ref,\n \"website\": response.url,\n \"lat\": float(\n response.xpath(\n 'normalize-space(//meta[@itemprop=\"latitude\"]/@content)'\n ).extract_first()\n ),\n \"lon\": float(\n response.xpath(\n 'normalize-space(//meta[@itemprop=\"longitude\"]/@content)'\n ).extract_first()\n ),\n }\n hours = response.xpath('//div[@itemprop=\"openingHours\"]/@content').extract()\n if hours != []:\n hours = \"; \".join(hours)\n properties[\"opening_hours\"] = hours\n yield GeojsonPointItem(**properties)\n\n def parse_city_stores(self, response):\n stores = response.xpath(\n '//h3[@class=\"Teaser-title Link Link--teaser Heading--h5\"]/a/@href'\n ).extract()\n for store in stores:\n yield scrapy.Request(response.urljoin(store), callback=self.parse_stores)\n\n def parse_state(self, response):\n urls = response.xpath(\n '//div[@class=\"c-directory-list-content-wrapper\"]/ul/li/a/@href'\n ).extract()\n for path in urls:\n pattern = re.compile(r\"..\\/[a-z]{2}\\/[a-z]{2}\\/[^()]+\\/[^()]+.html$\")\n if pattern.match(path.strip()):\n yield scrapy.Request(response.urljoin(path), callback=self.parse_stores)\n else:\n yield scrapy.Request(\n response.urljoin(path), callback=self.parse_city_stores\n )\n\n def parse(self, response):\n urls = response.xpath(\n '//div[@class=\"c-directory-list-content-wrapper\"]/ul/li/a/@href'\n ).extract()\n for path in urls:\n pattern = re.compile(r\"^[a-z]{2}\\/[a-z]{2}.html$\")\n pattern1 = re.compile(r\"^[a-z]{2}\\/[a-z]{2}\\/[^()]+\\/[^()]+.html$\")\n if pattern.match(path.strip()):\n yield scrapy.Request(response.urljoin(path), callback=self.parse_state)\n elif pattern1.match(path.strip()):\n yield scrapy.Request(response.urljoin(path), callback=self.parse_stores)\n else:\n yield scrapy.Request(\n response.urljoin(path), callback=self.parse_city_stores\n )\n", "path": "locations/spiders/teavana.py"}], "after_files": [{"content": null, "path": "locations/spiders/teavana.py"}]}
| 1,370 | 893 |
gh_patches_debug_2128
|
rasdani/github-patches
|
git_diff
|
projectmesa__mesa-891
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Cookiecutter doesn't work on 0.8.7 release
**Describe the bug**
`mesa startproject` fails after `pipenv install mesa`
```
A valid repository for "/home/neil/.local/share/virtualenvs/baseline-economy-6fg_iky1/lib/python3.8/site-packages/mesa/cookiecutter-mesa" could not be found in the following locations:
...
```
**Expected behavior**
Generate the project layout
**To Reproduce**
- pipenv install mesa
- mesa startproject
**Additional context**
The cookiecutter directory from the repo is missing from the installation.
Additionally there is no help message for `startproject` when you run `mesa --help`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 import re
4
5 from setuptools import setup, find_packages
6 from codecs import open
7
8 requires = ["click", "cookiecutter", "networkx", "numpy", "pandas", "tornado", "tqdm"]
9
10 extras_require = {
11 "dev": ["coverage", "flake8", "pytest >= 3.6", "pytest-cov", "sphinx"],
12 "docs": ["sphinx"],
13 }
14
15 version = ""
16 with open("mesa/__init__.py", "r") as fd:
17 version = re.search(
18 r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE
19 ).group(1)
20
21 with open("README.rst", "rb", encoding="utf-8") as f:
22 readme = f.read()
23
24 setup(
25 name="Mesa",
26 version=version,
27 description="Agent-based modeling (ABM) in Python 3+",
28 long_description=readme,
29 author="Project Mesa Team",
30 author_email="[email protected]",
31 url="https://github.com/projectmesa/mesa",
32 packages=find_packages(),
33 package_data={
34 "mesa": [
35 "visualization/templates/*.html",
36 "visualization/templates/css/*",
37 "visualization/templates/fonts/*",
38 "visualization/templates/js/*",
39 ],
40 "cookiecutter-mesa": ["cookiecutter-mesa/*"],
41 },
42 include_package_data=True,
43 install_requires=requires,
44 extras_require=extras_require,
45 keywords="agent based modeling model ABM simulation multi-agent",
46 license="Apache 2.0",
47 zip_safe=False,
48 classifiers=[
49 "Topic :: Scientific/Engineering",
50 "Topic :: Scientific/Engineering :: Artificial Life",
51 "Topic :: Scientific/Engineering :: Artificial Intelligence",
52 "Intended Audience :: Science/Research",
53 "Programming Language :: Python :: 3 :: Only",
54 "License :: OSI Approved :: Apache Software License",
55 "Operating System :: OS Independent",
56 "Development Status :: 3 - Alpha",
57 "Natural Language :: English",
58 ],
59 entry_points="""
60 [console_scripts]
61 mesa=mesa.main:cli
62 """,
63 )
64
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -8,7 +8,7 @@
requires = ["click", "cookiecutter", "networkx", "numpy", "pandas", "tornado", "tqdm"]
extras_require = {
- "dev": ["coverage", "flake8", "pytest >= 3.6", "pytest-cov", "sphinx"],
+ "dev": ["coverage", "flake8", "pytest >= 4.6", "pytest-cov", "sphinx"],
"docs": ["sphinx"],
}
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -8,7 +8,7 @@\n requires = [\"click\", \"cookiecutter\", \"networkx\", \"numpy\", \"pandas\", \"tornado\", \"tqdm\"]\n \n extras_require = {\n- \"dev\": [\"coverage\", \"flake8\", \"pytest >= 3.6\", \"pytest-cov\", \"sphinx\"],\n+ \"dev\": [\"coverage\", \"flake8\", \"pytest >= 4.6\", \"pytest-cov\", \"sphinx\"],\n \"docs\": [\"sphinx\"],\n }\n", "issue": "Cookiecutter doesn't work on 0.8.7 release\n**Describe the bug**\r\n`mesa startproject` fails after `pipenv install mesa`\r\n```\r\nA valid repository for \"/home/neil/.local/share/virtualenvs/baseline-economy-6fg_iky1/lib/python3.8/site-packages/mesa/cookiecutter-mesa\" could not be found in the following locations:\r\n...\r\n```\r\n\r\n**Expected behavior**\r\nGenerate the project layout\r\n\r\n**To Reproduce**\r\n- pipenv install mesa\r\n- mesa startproject\r\n\r\n**Additional context**\r\nThe cookiecutter directory from the repo is missing from the installation.\r\nAdditionally there is no help message for `startproject` when you run `mesa --help`\r\n\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nimport re\n\nfrom setuptools import setup, find_packages\nfrom codecs import open\n\nrequires = [\"click\", \"cookiecutter\", \"networkx\", \"numpy\", \"pandas\", \"tornado\", \"tqdm\"]\n\nextras_require = {\n \"dev\": [\"coverage\", \"flake8\", \"pytest >= 3.6\", \"pytest-cov\", \"sphinx\"],\n \"docs\": [\"sphinx\"],\n}\n\nversion = \"\"\nwith open(\"mesa/__init__.py\", \"r\") as fd:\n version = re.search(\n r'^__version__\\s*=\\s*[\\'\"]([^\\'\"]*)[\\'\"]', fd.read(), re.MULTILINE\n ).group(1)\n\nwith open(\"README.rst\", \"rb\", encoding=\"utf-8\") as f:\n readme = f.read()\n\nsetup(\n name=\"Mesa\",\n version=version,\n description=\"Agent-based modeling (ABM) in Python 3+\",\n long_description=readme,\n author=\"Project Mesa Team\",\n author_email=\"[email protected]\",\n url=\"https://github.com/projectmesa/mesa\",\n packages=find_packages(),\n package_data={\n \"mesa\": [\n \"visualization/templates/*.html\",\n \"visualization/templates/css/*\",\n \"visualization/templates/fonts/*\",\n \"visualization/templates/js/*\",\n ],\n \"cookiecutter-mesa\": [\"cookiecutter-mesa/*\"],\n },\n include_package_data=True,\n install_requires=requires,\n extras_require=extras_require,\n keywords=\"agent based modeling model ABM simulation multi-agent\",\n license=\"Apache 2.0\",\n zip_safe=False,\n classifiers=[\n \"Topic :: Scientific/Engineering\",\n \"Topic :: Scientific/Engineering :: Artificial Life\",\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n \"Intended Audience :: Science/Research\",\n \"Programming Language :: Python :: 3 :: Only\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: OS Independent\",\n \"Development Status :: 3 - Alpha\",\n \"Natural Language :: English\",\n ],\n entry_points=\"\"\"\n [console_scripts]\n mesa=mesa.main:cli\n \"\"\",\n)\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nimport re\n\nfrom setuptools import setup, find_packages\nfrom codecs import open\n\nrequires = [\"click\", \"cookiecutter\", \"networkx\", \"numpy\", \"pandas\", \"tornado\", \"tqdm\"]\n\nextras_require = {\n \"dev\": [\"coverage\", \"flake8\", \"pytest >= 4.6\", \"pytest-cov\", \"sphinx\"],\n \"docs\": [\"sphinx\"],\n}\n\nversion = \"\"\nwith open(\"mesa/__init__.py\", \"r\") as fd:\n version = re.search(\n r'^__version__\\s*=\\s*[\\'\"]([^\\'\"]*)[\\'\"]', fd.read(), re.MULTILINE\n ).group(1)\n\nwith open(\"README.rst\", \"rb\", encoding=\"utf-8\") as f:\n readme = f.read()\n\nsetup(\n name=\"Mesa\",\n version=version,\n description=\"Agent-based modeling (ABM) in Python 3+\",\n long_description=readme,\n author=\"Project Mesa Team\",\n author_email=\"[email protected]\",\n url=\"https://github.com/projectmesa/mesa\",\n packages=find_packages(),\n package_data={\n \"mesa\": [\n \"visualization/templates/*.html\",\n \"visualization/templates/css/*\",\n \"visualization/templates/fonts/*\",\n \"visualization/templates/js/*\",\n ],\n \"cookiecutter-mesa\": [\"cookiecutter-mesa/*\"],\n },\n include_package_data=True,\n install_requires=requires,\n extras_require=extras_require,\n keywords=\"agent based modeling model ABM simulation multi-agent\",\n license=\"Apache 2.0\",\n zip_safe=False,\n classifiers=[\n \"Topic :: Scientific/Engineering\",\n \"Topic :: Scientific/Engineering :: Artificial Life\",\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n \"Intended Audience :: Science/Research\",\n \"Programming Language :: Python :: 3 :: Only\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: OS Independent\",\n \"Development Status :: 3 - Alpha\",\n \"Natural Language :: English\",\n ],\n entry_points=\"\"\"\n [console_scripts]\n mesa=mesa.main:cli\n \"\"\",\n)\n", "path": "setup.py"}]}
| 1,016 | 132 |
gh_patches_debug_1092
|
rasdani/github-patches
|
git_diff
|
psychopy__psychopy-2333
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Demos -> Hardware -> testSoundLatency.py not working in v3.0.6
Running Demo -> Hardware -> testSoundLatency.py results in the following error message:
```
##### Running: C:\Program Files (x86)\PsychoPy3\lib\site-packages\psychopy\demos\coder\hardware\testSoundLatency.py #####
pygame 1.9.4
Hello from the pygame community. https://www.pygame.org/contribute.html
Traceback (most recent call last):
File "C:\Program Files (x86)\PsychoPy3\lib\site-packages\psychopy\demos\coder\hardware\testSoundLatency.py", line 16, in <module>
from labjack import u3
ModuleNotFoundError: No module named 'labjack'
```
Windows 7, 64 bit, PsychoPy 3.0.6 64 bit standalone
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `psychopy/demos/coder/hardware/labjack_u3.py`
Content:
```
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3
4 """
5 Demo for using labjack DAC devices
6
7 See also
8 http: //labjack.com/support/labjackpython
9 but note that the version shipped with standalone PsychoPy
10 has u3 (and others below an umbrella called labjack) so the import
11 line is slightly different to the documentation on LabJack's website
12 """
13
14 from __future__ import absolute_import, division, print_function
15
16 from builtins import range
17 from psychopy import visual, core, event, sound
18 from labjack import u3
19
20 # sound.setAudioAPI('pyaudio')
21
22 win = visual.Window([800, 800])
23 stim = visual.GratingStim(win, color=-1, sf=0)
24 snd = sound.Sound(880)
25 print(snd)
26 # setup labjack U3
27 ports = u3.U3()
28 FIO4 = 6004 # the address of line FIO4
29
30 while True:
31 # do this repeatedly for timing tests
32 ports.writeRegister(FIO4, 0) # start low
33
34 # draw black square
35 stim.draw()
36 win.flip()
37
38 # wait for a key press
39 if 'q' in event.waitKeys():
40 break
41
42 # set to white, flip window and raise level port FIO4
43 stim.setColor(1)
44 stim.draw()
45 win.flip()
46 ports.writeRegister(FIO4, 1)
47 snd.play()
48 for frameN in range(4):
49 stim.draw()
50 win.flip()
51
52 # set color back to black and set FIO4 to low again
53 stim.setColor(-1)
54 stim.draw()
55 win.flip()
56 ports.writeRegister(FIO4, 0)
57
58 win.close()
59 core.quit()
60
61 # The contents of this file are in the public domain.
62
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/psychopy/demos/coder/hardware/labjack_u3.py b/psychopy/demos/coder/hardware/labjack_u3.py
--- a/psychopy/demos/coder/hardware/labjack_u3.py
+++ b/psychopy/demos/coder/hardware/labjack_u3.py
@@ -15,7 +15,10 @@
from builtins import range
from psychopy import visual, core, event, sound
-from labjack import u3
+try:
+ from labjack import u3
+except ImportError:
+ import u3
# sound.setAudioAPI('pyaudio')
|
{"golden_diff": "diff --git a/psychopy/demos/coder/hardware/labjack_u3.py b/psychopy/demos/coder/hardware/labjack_u3.py\n--- a/psychopy/demos/coder/hardware/labjack_u3.py\n+++ b/psychopy/demos/coder/hardware/labjack_u3.py\n@@ -15,7 +15,10 @@\n \n from builtins import range\n from psychopy import visual, core, event, sound\n-from labjack import u3\n+try:\n+ from labjack import u3\n+except ImportError:\n+ import u3\n \n # sound.setAudioAPI('pyaudio')\n", "issue": "Demos -> Hardware -> testSoundLatency.py not working in v3.0.6\nRunning Demo -> Hardware -> testSoundLatency.py results in the following error message:\r\n```\r\n##### Running: C:\\Program Files (x86)\\PsychoPy3\\lib\\site-packages\\psychopy\\demos\\coder\\hardware\\testSoundLatency.py #####\r\npygame 1.9.4\r\nHello from the pygame community. https://www.pygame.org/contribute.html\r\nTraceback (most recent call last):\r\n File \"C:\\Program Files (x86)\\PsychoPy3\\lib\\site-packages\\psychopy\\demos\\coder\\hardware\\testSoundLatency.py\", line 16, in <module>\r\n from labjack import u3\r\nModuleNotFoundError: No module named 'labjack'\r\n```\r\nWindows 7, 64 bit, PsychoPy 3.0.6 64 bit standalone\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"\nDemo for using labjack DAC devices\n\nSee also\n http: //labjack.com/support/labjackpython\nbut note that the version shipped with standalone PsychoPy\nhas u3 (and others below an umbrella called labjack) so the import\nline is slightly different to the documentation on LabJack's website\n\"\"\"\n\nfrom __future__ import absolute_import, division, print_function\n\nfrom builtins import range\nfrom psychopy import visual, core, event, sound\nfrom labjack import u3\n\n# sound.setAudioAPI('pyaudio')\n\nwin = visual.Window([800, 800])\nstim = visual.GratingStim(win, color=-1, sf=0)\nsnd = sound.Sound(880)\nprint(snd)\n# setup labjack U3\nports = u3.U3()\nFIO4 = 6004 # the address of line FIO4\n\nwhile True:\n # do this repeatedly for timing tests\n ports.writeRegister(FIO4, 0) # start low\n\n # draw black square\n stim.draw()\n win.flip()\n\n # wait for a key press\n if 'q' in event.waitKeys():\n break\n\n # set to white, flip window and raise level port FIO4\n stim.setColor(1)\n stim.draw()\n win.flip()\n ports.writeRegister(FIO4, 1)\n snd.play()\n for frameN in range(4):\n stim.draw()\n win.flip()\n\n # set color back to black and set FIO4 to low again\n stim.setColor(-1)\n stim.draw()\n win.flip()\n ports.writeRegister(FIO4, 0)\n\nwin.close()\ncore.quit()\n\n# The contents of this file are in the public domain.\n", "path": "psychopy/demos/coder/hardware/labjack_u3.py"}], "after_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"\nDemo for using labjack DAC devices\n\nSee also\n http: //labjack.com/support/labjackpython\nbut note that the version shipped with standalone PsychoPy\nhas u3 (and others below an umbrella called labjack) so the import\nline is slightly different to the documentation on LabJack's website\n\"\"\"\n\nfrom __future__ import absolute_import, division, print_function\n\nfrom builtins import range\nfrom psychopy import visual, core, event, sound\ntry:\n from labjack import u3\nexcept ImportError:\n import u3\n\n# sound.setAudioAPI('pyaudio')\n\nwin = visual.Window([800, 800])\nstim = visual.GratingStim(win, color=-1, sf=0)\nsnd = sound.Sound(880)\nprint(snd)\n# setup labjack U3\nports = u3.U3()\nFIO4 = 6004 # the address of line FIO4\n\nwhile True:\n # do this repeatedly for timing tests\n ports.writeRegister(FIO4, 0) # start low\n\n # draw black square\n stim.draw()\n win.flip()\n\n # wait for a key press\n if 'q' in event.waitKeys():\n break\n\n # set to white, flip window and raise level port FIO4\n stim.setColor(1)\n stim.draw()\n win.flip()\n ports.writeRegister(FIO4, 1)\n snd.play()\n for frameN in range(4):\n stim.draw()\n win.flip()\n\n # set color back to black and set FIO4 to low again\n stim.setColor(-1)\n stim.draw()\n win.flip()\n ports.writeRegister(FIO4, 0)\n\nwin.close()\ncore.quit()\n\n# The contents of this file are in the public domain.\n", "path": "psychopy/demos/coder/hardware/labjack_u3.py"}]}
| 982 | 138 |
gh_patches_debug_33778
|
rasdani/github-patches
|
git_diff
|
praw-dev__praw-1957
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Docs: Font color of method names is unreasonably white on a white background when using dark theme
### Describe the Documentation Issue
Hey Praw maintainers, thanks for the great work.
I'm about to use this API and I'm really happy with what I've found so far.
The only sad part is I'll have to read the documentation on light theme. This is because of the issue in the title, pictured below, or [directly in the site but turn on **dark mode**](https://praw.readthedocs.io/en/stable/code_overview/reddit_instance.html#praw.Reddit.request):

### Attributes
- [X] Yes
### Location of the issue
https://praw.readthedocs.io/en/stable/code_overview/reddit_instance.html#praw.Reddit.request
### What did you expect to see?
method names a bit easier to read
### What did you actually see?
method names hard to read
### Proposed Fix
Gotta be a code color somewhere or a css rule to fix it
### Operating System/Web Browser
_No response_
### Anything else?
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 """praw setup.py"""
2
3 import re
4 from codecs import open
5 from os import path
6
7 from setuptools import find_packages, setup
8
9 PACKAGE_NAME = "praw"
10 HERE = path.abspath(path.dirname(__file__))
11 with open(path.join(HERE, "README.rst"), encoding="utf-8") as fp:
12 README = fp.read()
13 with open(path.join(HERE, PACKAGE_NAME, "const.py"), encoding="utf-8") as fp:
14 VERSION = re.search('__version__ = "([^"]+)"', fp.read()).group(1)
15
16 extras = {
17 "ci": ["coveralls"],
18 "dev": ["packaging"],
19 "lint": ["pre-commit"],
20 "readthedocs": ["sphinx", "sphinx-rtd-dark-mode", "sphinx_rtd_theme"],
21 "test": [
22 "betamax >=0.8, <0.9",
23 "betamax-matchers >=0.3.0, <0.5",
24 "pytest >=2.7.3",
25 "requests >=2.20.1, <3",
26 "urllib3 ==1.26.*, <2",
27 ],
28 }
29 extras["lint"] += extras["readthedocs"]
30 extras["dev"] += extras["lint"] + extras["test"]
31
32 setup(
33 name=PACKAGE_NAME,
34 author="Bryce Boe",
35 author_email="[email protected]",
36 python_requires="~=3.7",
37 classifiers=[
38 "Development Status :: 5 - Production/Stable",
39 "Environment :: Console",
40 "Intended Audience :: Developers",
41 "License :: OSI Approved :: BSD License",
42 "Natural Language :: English",
43 "Operating System :: OS Independent",
44 "Programming Language :: Python",
45 "Programming Language :: Python :: 3",
46 "Programming Language :: Python :: 3.7",
47 "Programming Language :: Python :: 3.8",
48 "Programming Language :: Python :: 3.9",
49 "Programming Language :: Python :: 3.10",
50 "Programming Language :: Python :: 3.11",
51 "Topic :: Utilities",
52 ],
53 description=(
54 'PRAW, an acronym for "Python Reddit API Wrapper", is a python package that'
55 " allows for simple access to Reddit's API."
56 ),
57 extras_require=extras,
58 install_requires=[
59 "prawcore >=2.1, <3",
60 "update_checker >=0.18",
61 "websocket-client >=0.54.0",
62 ],
63 keywords="reddit api wrapper",
64 license="Simplified BSD License",
65 long_description=README,
66 package_data={"": ["LICENSE.txt"], PACKAGE_NAME: ["*.ini", "images/*.png"]},
67 packages=find_packages(exclude=["tests", "tests.*", "tools", "tools.*"]),
68 project_urls={
69 "Change Log": "https://praw.readthedocs.io/en/latest/package_info/change_log.html",
70 "Documentation": "https://praw.readthedocs.io/",
71 "Issue Tracker": "https://github.com/praw-dev/praw/issues",
72 "Source Code": "https://github.com/praw-dev/praw",
73 },
74 version=VERSION,
75 )
76
```
Path: `docs/conf.py`
Content:
```
1 import os
2 import sys
3 from datetime import datetime
4
5 # Do not touch these. They use the local PRAW over the global PRAW.
6 sys.path.insert(0, ".")
7 sys.path.insert(1, "..")
8
9 from praw import __version__ # noqa: E402
10
11 copyright = datetime.today().strftime("%Y, Bryce Boe")
12 exclude_patterns = ["_build"]
13 extensions = [
14 "sphinx.ext.autodoc",
15 "sphinx.ext.intersphinx",
16 "sphinx_rtd_dark_mode",
17 "sphinx_rtd_theme",
18 ]
19 html_static_path = ["_static"]
20 html_theme = "sphinx_rtd_theme"
21 html_theme_options = {"collapse_navigation": True}
22 htmlhelp_basename = "PRAW"
23 intersphinx_mapping = {"python": ("https://docs.python.org/3", None)}
24 master_doc = "index"
25 nitpick_ignore = [
26 ("py:class", "IO"),
27 ("py:class", "prawcore.requestor.Requestor"),
28 ("py:class", "praw.models.redditors.PartialRedditor"),
29 ]
30 nitpicky = True
31 project = "PRAW"
32 pygments_style = "sphinx"
33 release = __version__
34 source_suffix = ".rst"
35 suppress_warnings = ["image.nonlocal_uri"]
36 version = ".".join(__version__.split(".", 2)[:2])
37
38 # Use RTD theme locally
39 if not os.environ.get("READTHEDOCS"):
40 import sphinx_rtd_theme
41
42 html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
43
44
45 def skip(app, what, name, obj, skip, options):
46 if name in {
47 "__call__",
48 "__contains__",
49 "__getitem__",
50 "__init__",
51 "__iter__",
52 "__len__",
53 }:
54 return False
55 return skip
56
57
58 def setup(app):
59 app.connect("autodoc-skip-member", skip)
60 app.add_css_file("theme_override.css")
61
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,4 +1,3 @@
-import os
import sys
from datetime import datetime
@@ -13,12 +12,8 @@
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.intersphinx",
- "sphinx_rtd_dark_mode",
- "sphinx_rtd_theme",
]
-html_static_path = ["_static"]
-html_theme = "sphinx_rtd_theme"
-html_theme_options = {"collapse_navigation": True}
+html_theme = "furo"
htmlhelp_basename = "PRAW"
intersphinx_mapping = {"python": ("https://docs.python.org/3", None)}
master_doc = "index"
@@ -35,12 +30,6 @@
suppress_warnings = ["image.nonlocal_uri"]
version = ".".join(__version__.split(".", 2)[:2])
-# Use RTD theme locally
-if not os.environ.get("READTHEDOCS"):
- import sphinx_rtd_theme
-
- html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
-
def skip(app, what, name, obj, skip, options):
if name in {
@@ -57,4 +46,3 @@
def setup(app):
app.connect("autodoc-skip-member", skip)
- app.add_css_file("theme_override.css")
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -17,7 +17,7 @@
"ci": ["coveralls"],
"dev": ["packaging"],
"lint": ["pre-commit"],
- "readthedocs": ["sphinx", "sphinx-rtd-dark-mode", "sphinx_rtd_theme"],
+ "readthedocs": ["furo", "sphinx"],
"test": [
"betamax >=0.8, <0.9",
"betamax-matchers >=0.3.0, <0.5",
@@ -51,7 +51,7 @@
"Topic :: Utilities",
],
description=(
- 'PRAW, an acronym for "Python Reddit API Wrapper", is a python package that'
+ 'PRAW, an acronym for "Python Reddit API Wrapper", is a Python package that'
" allows for simple access to Reddit's API."
),
extras_require=extras,
|
{"golden_diff": "diff --git a/docs/conf.py b/docs/conf.py\n--- a/docs/conf.py\n+++ b/docs/conf.py\n@@ -1,4 +1,3 @@\n-import os\n import sys\n from datetime import datetime\n \n@@ -13,12 +12,8 @@\n extensions = [\n \"sphinx.ext.autodoc\",\n \"sphinx.ext.intersphinx\",\n- \"sphinx_rtd_dark_mode\",\n- \"sphinx_rtd_theme\",\n ]\n-html_static_path = [\"_static\"]\n-html_theme = \"sphinx_rtd_theme\"\n-html_theme_options = {\"collapse_navigation\": True}\n+html_theme = \"furo\"\n htmlhelp_basename = \"PRAW\"\n intersphinx_mapping = {\"python\": (\"https://docs.python.org/3\", None)}\n master_doc = \"index\"\n@@ -35,12 +30,6 @@\n suppress_warnings = [\"image.nonlocal_uri\"]\n version = \".\".join(__version__.split(\".\", 2)[:2])\n \n-# Use RTD theme locally\n-if not os.environ.get(\"READTHEDOCS\"):\n- import sphinx_rtd_theme\n-\n- html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]\n-\n \n def skip(app, what, name, obj, skip, options):\n if name in {\n@@ -57,4 +46,3 @@\n \n def setup(app):\n app.connect(\"autodoc-skip-member\", skip)\n- app.add_css_file(\"theme_override.css\")\ndiff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -17,7 +17,7 @@\n \"ci\": [\"coveralls\"],\n \"dev\": [\"packaging\"],\n \"lint\": [\"pre-commit\"],\n- \"readthedocs\": [\"sphinx\", \"sphinx-rtd-dark-mode\", \"sphinx_rtd_theme\"],\n+ \"readthedocs\": [\"furo\", \"sphinx\"],\n \"test\": [\n \"betamax >=0.8, <0.9\",\n \"betamax-matchers >=0.3.0, <0.5\",\n@@ -51,7 +51,7 @@\n \"Topic :: Utilities\",\n ],\n description=(\n- 'PRAW, an acronym for \"Python Reddit API Wrapper\", is a python package that'\n+ 'PRAW, an acronym for \"Python Reddit API Wrapper\", is a Python package that'\n \" allows for simple access to Reddit's API.\"\n ),\n extras_require=extras,\n", "issue": "Docs: Font color of method names is unreasonably white on a white background when using dark theme\n### Describe the Documentation Issue\n\nHey Praw maintainers, thanks for the great work.\r\nI'm about to use this API and I'm really happy with what I've found so far.\r\nThe only sad part is I'll have to read the documentation on light theme. This is because of the issue in the title, pictured below, or [directly in the site but turn on **dark mode**](https://praw.readthedocs.io/en/stable/code_overview/reddit_instance.html#praw.Reddit.request):\r\n\n\n### Attributes\n\n- [X] Yes\n\n### Location of the issue\n\nhttps://praw.readthedocs.io/en/stable/code_overview/reddit_instance.html#praw.Reddit.request\n\n### What did you expect to see?\n\nmethod names a bit easier to read\n\n### What did you actually see?\n\nmethod names hard to read\n\n### Proposed Fix\n\nGotta be a code color somewhere or a css rule to fix it\n\n### Operating System/Web Browser\n\n_No response_\n\n### Anything else?\n\n_No response_\n", "before_files": [{"content": "\"\"\"praw setup.py\"\"\"\n\nimport re\nfrom codecs import open\nfrom os import path\n\nfrom setuptools import find_packages, setup\n\nPACKAGE_NAME = \"praw\"\nHERE = path.abspath(path.dirname(__file__))\nwith open(path.join(HERE, \"README.rst\"), encoding=\"utf-8\") as fp:\n README = fp.read()\nwith open(path.join(HERE, PACKAGE_NAME, \"const.py\"), encoding=\"utf-8\") as fp:\n VERSION = re.search('__version__ = \"([^\"]+)\"', fp.read()).group(1)\n\nextras = {\n \"ci\": [\"coveralls\"],\n \"dev\": [\"packaging\"],\n \"lint\": [\"pre-commit\"],\n \"readthedocs\": [\"sphinx\", \"sphinx-rtd-dark-mode\", \"sphinx_rtd_theme\"],\n \"test\": [\n \"betamax >=0.8, <0.9\",\n \"betamax-matchers >=0.3.0, <0.5\",\n \"pytest >=2.7.3\",\n \"requests >=2.20.1, <3\",\n \"urllib3 ==1.26.*, <2\",\n ],\n}\nextras[\"lint\"] += extras[\"readthedocs\"]\nextras[\"dev\"] += extras[\"lint\"] + extras[\"test\"]\n\nsetup(\n name=PACKAGE_NAME,\n author=\"Bryce Boe\",\n author_email=\"[email protected]\",\n python_requires=\"~=3.7\",\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Environment :: Console\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: BSD License\",\n \"Natural Language :: English\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Programming Language :: Python :: 3.11\",\n \"Topic :: Utilities\",\n ],\n description=(\n 'PRAW, an acronym for \"Python Reddit API Wrapper\", is a python package that'\n \" allows for simple access to Reddit's API.\"\n ),\n extras_require=extras,\n install_requires=[\n \"prawcore >=2.1, <3\",\n \"update_checker >=0.18\",\n \"websocket-client >=0.54.0\",\n ],\n keywords=\"reddit api wrapper\",\n license=\"Simplified BSD License\",\n long_description=README,\n package_data={\"\": [\"LICENSE.txt\"], PACKAGE_NAME: [\"*.ini\", \"images/*.png\"]},\n packages=find_packages(exclude=[\"tests\", \"tests.*\", \"tools\", \"tools.*\"]),\n project_urls={\n \"Change Log\": \"https://praw.readthedocs.io/en/latest/package_info/change_log.html\",\n \"Documentation\": \"https://praw.readthedocs.io/\",\n \"Issue Tracker\": \"https://github.com/praw-dev/praw/issues\",\n \"Source Code\": \"https://github.com/praw-dev/praw\",\n },\n version=VERSION,\n)\n", "path": "setup.py"}, {"content": "import os\nimport sys\nfrom datetime import datetime\n\n# Do not touch these. They use the local PRAW over the global PRAW.\nsys.path.insert(0, \".\")\nsys.path.insert(1, \"..\")\n\nfrom praw import __version__ # noqa: E402\n\ncopyright = datetime.today().strftime(\"%Y, Bryce Boe\")\nexclude_patterns = [\"_build\"]\nextensions = [\n \"sphinx.ext.autodoc\",\n \"sphinx.ext.intersphinx\",\n \"sphinx_rtd_dark_mode\",\n \"sphinx_rtd_theme\",\n]\nhtml_static_path = [\"_static\"]\nhtml_theme = \"sphinx_rtd_theme\"\nhtml_theme_options = {\"collapse_navigation\": True}\nhtmlhelp_basename = \"PRAW\"\nintersphinx_mapping = {\"python\": (\"https://docs.python.org/3\", None)}\nmaster_doc = \"index\"\nnitpick_ignore = [\n (\"py:class\", \"IO\"),\n (\"py:class\", \"prawcore.requestor.Requestor\"),\n (\"py:class\", \"praw.models.redditors.PartialRedditor\"),\n]\nnitpicky = True\nproject = \"PRAW\"\npygments_style = \"sphinx\"\nrelease = __version__\nsource_suffix = \".rst\"\nsuppress_warnings = [\"image.nonlocal_uri\"]\nversion = \".\".join(__version__.split(\".\", 2)[:2])\n\n# Use RTD theme locally\nif not os.environ.get(\"READTHEDOCS\"):\n import sphinx_rtd_theme\n\n html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]\n\n\ndef skip(app, what, name, obj, skip, options):\n if name in {\n \"__call__\",\n \"__contains__\",\n \"__getitem__\",\n \"__init__\",\n \"__iter__\",\n \"__len__\",\n }:\n return False\n return skip\n\n\ndef setup(app):\n app.connect(\"autodoc-skip-member\", skip)\n app.add_css_file(\"theme_override.css\")\n", "path": "docs/conf.py"}], "after_files": [{"content": "\"\"\"praw setup.py\"\"\"\n\nimport re\nfrom codecs import open\nfrom os import path\n\nfrom setuptools import find_packages, setup\n\nPACKAGE_NAME = \"praw\"\nHERE = path.abspath(path.dirname(__file__))\nwith open(path.join(HERE, \"README.rst\"), encoding=\"utf-8\") as fp:\n README = fp.read()\nwith open(path.join(HERE, PACKAGE_NAME, \"const.py\"), encoding=\"utf-8\") as fp:\n VERSION = re.search('__version__ = \"([^\"]+)\"', fp.read()).group(1)\n\nextras = {\n \"ci\": [\"coveralls\"],\n \"dev\": [\"packaging\"],\n \"lint\": [\"pre-commit\"],\n \"readthedocs\": [\"furo\", \"sphinx\"],\n \"test\": [\n \"betamax >=0.8, <0.9\",\n \"betamax-matchers >=0.3.0, <0.5\",\n \"pytest >=2.7.3\",\n \"requests >=2.20.1, <3\",\n \"urllib3 ==1.26.*, <2\",\n ],\n}\nextras[\"lint\"] += extras[\"readthedocs\"]\nextras[\"dev\"] += extras[\"lint\"] + extras[\"test\"]\n\nsetup(\n name=PACKAGE_NAME,\n author=\"Bryce Boe\",\n author_email=\"[email protected]\",\n python_requires=\"~=3.7\",\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Environment :: Console\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: BSD License\",\n \"Natural Language :: English\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Programming Language :: Python :: 3.11\",\n \"Topic :: Utilities\",\n ],\n description=(\n 'PRAW, an acronym for \"Python Reddit API Wrapper\", is a Python package that'\n \" allows for simple access to Reddit's API.\"\n ),\n extras_require=extras,\n install_requires=[\n \"prawcore >=2.1, <3\",\n \"update_checker >=0.18\",\n \"websocket-client >=0.54.0\",\n ],\n keywords=\"reddit api wrapper\",\n license=\"Simplified BSD License\",\n long_description=README,\n package_data={\"\": [\"LICENSE.txt\"], PACKAGE_NAME: [\"*.ini\", \"images/*.png\"]},\n packages=find_packages(exclude=[\"tests\", \"tests.*\", \"tools\", \"tools.*\"]),\n project_urls={\n \"Change Log\": \"https://praw.readthedocs.io/en/latest/package_info/change_log.html\",\n \"Documentation\": \"https://praw.readthedocs.io/\",\n \"Issue Tracker\": \"https://github.com/praw-dev/praw/issues\",\n \"Source Code\": \"https://github.com/praw-dev/praw\",\n },\n version=VERSION,\n)\n", "path": "setup.py"}, {"content": "import sys\nfrom datetime import datetime\n\n# Do not touch these. They use the local PRAW over the global PRAW.\nsys.path.insert(0, \".\")\nsys.path.insert(1, \"..\")\n\nfrom praw import __version__ # noqa: E402\n\ncopyright = datetime.today().strftime(\"%Y, Bryce Boe\")\nexclude_patterns = [\"_build\"]\nextensions = [\n \"sphinx.ext.autodoc\",\n \"sphinx.ext.intersphinx\",\n]\nhtml_theme = \"furo\"\nhtmlhelp_basename = \"PRAW\"\nintersphinx_mapping = {\"python\": (\"https://docs.python.org/3\", None)}\nmaster_doc = \"index\"\nnitpick_ignore = [\n (\"py:class\", \"IO\"),\n (\"py:class\", \"prawcore.requestor.Requestor\"),\n (\"py:class\", \"praw.models.redditors.PartialRedditor\"),\n]\nnitpicky = True\nproject = \"PRAW\"\npygments_style = \"sphinx\"\nrelease = __version__\nsource_suffix = \".rst\"\nsuppress_warnings = [\"image.nonlocal_uri\"]\nversion = \".\".join(__version__.split(\".\", 2)[:2])\n\n\ndef skip(app, what, name, obj, skip, options):\n if name in {\n \"__call__\",\n \"__contains__\",\n \"__getitem__\",\n \"__init__\",\n \"__iter__\",\n \"__len__\",\n }:\n return False\n return skip\n\n\ndef setup(app):\n app.connect(\"autodoc-skip-member\", skip)\n", "path": "docs/conf.py"}]}
| 1,939 | 533 |
gh_patches_debug_7880
|
rasdani/github-patches
|
git_diff
|
locustio__locust-841
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Don't rely on obsolete msgpack-python
msgpack-python looks obsolete -> https://pypi.org/project/msgpack-python/
"This package is deprecated. Install msgpack instead."
but msgpack doesn't provide pythonegg(msgpack-python).
Please consider switching to msgpack directly instead.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 import ast
3 import os
4 import re
5
6 from setuptools import find_packages, setup
7
8 # parse version from locust/__init__.py
9 _version_re = re.compile(r'__version__\s+=\s+(.*)')
10 _init_file = os.path.join(os.path.abspath(os.path.dirname(__file__)), "locust", "__init__.py")
11 with open(_init_file, 'rb') as f:
12 version = str(ast.literal_eval(_version_re.search(
13 f.read().decode('utf-8')).group(1)))
14
15 setup(
16 name='locustio',
17 version=version,
18 description="Website load testing framework",
19 long_description="""Locust is a python utility for doing easy, distributed load testing of a web site""",
20 classifiers=[
21 "Topic :: Software Development :: Testing :: Traffic Generation",
22 "Development Status :: 4 - Beta",
23 "License :: OSI Approved :: MIT License",
24 "Operating System :: OS Independent",
25 "Programming Language :: Python",
26 "Programming Language :: Python :: 2",
27 "Programming Language :: Python :: 2.7",
28 "Programming Language :: Python :: 3",
29 "Programming Language :: Python :: 3.4",
30 "Programming Language :: Python :: 3.5",
31 "Programming Language :: Python :: 3.6",
32 "Intended Audience :: Developers",
33 "Intended Audience :: System Administrators",
34 ],
35 keywords='',
36 author='Jonatan Heyman, Carl Bystrom, Joakim Hamrรฉn, Hugo Heyman',
37 author_email='',
38 url='https://locust.io/',
39 license='MIT',
40 packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
41 include_package_data=True,
42 zip_safe=False,
43 install_requires=["gevent>=1.2.2", "flask>=0.10.1", "requests>=2.9.1", "msgpack-python>=0.4.2", "six>=1.10.0", "pyzmq>=16.0.2"],
44 test_suite="locust.test",
45 tests_require=['mock'],
46 entry_points={
47 'console_scripts': [
48 'locust = locust.main:main',
49 ]
50 },
51 )
52
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -40,7 +40,7 @@
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
- install_requires=["gevent>=1.2.2", "flask>=0.10.1", "requests>=2.9.1", "msgpack-python>=0.4.2", "six>=1.10.0", "pyzmq>=16.0.2"],
+ install_requires=["gevent>=1.2.2", "flask>=0.10.1", "requests>=2.9.1", "msgpack>=0.4.2", "six>=1.10.0", "pyzmq>=16.0.2"],
test_suite="locust.test",
tests_require=['mock'],
entry_points={
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -40,7 +40,7 @@\n packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),\n include_package_data=True,\n zip_safe=False,\n- install_requires=[\"gevent>=1.2.2\", \"flask>=0.10.1\", \"requests>=2.9.1\", \"msgpack-python>=0.4.2\", \"six>=1.10.0\", \"pyzmq>=16.0.2\"],\n+ install_requires=[\"gevent>=1.2.2\", \"flask>=0.10.1\", \"requests>=2.9.1\", \"msgpack>=0.4.2\", \"six>=1.10.0\", \"pyzmq>=16.0.2\"],\n test_suite=\"locust.test\",\n tests_require=['mock'],\n entry_points={\n", "issue": "Don't rely on obsolete msgpack-python\n\r\nmsgpack-python looks obsolete -> https://pypi.org/project/msgpack-python/\r\n\"This package is deprecated. Install msgpack instead.\"\r\n\r\nbut msgpack doesn't provide pythonegg(msgpack-python).\r\n\r\nPlease consider switching to msgpack directly instead.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nimport ast\nimport os\nimport re\n\nfrom setuptools import find_packages, setup\n\n# parse version from locust/__init__.py\n_version_re = re.compile(r'__version__\\s+=\\s+(.*)')\n_init_file = os.path.join(os.path.abspath(os.path.dirname(__file__)), \"locust\", \"__init__.py\")\nwith open(_init_file, 'rb') as f:\n version = str(ast.literal_eval(_version_re.search(\n f.read().decode('utf-8')).group(1)))\n\nsetup(\n name='locustio',\n version=version,\n description=\"Website load testing framework\",\n long_description=\"\"\"Locust is a python utility for doing easy, distributed load testing of a web site\"\"\",\n classifiers=[\n \"Topic :: Software Development :: Testing :: Traffic Generation\",\n \"Development Status :: 4 - Beta\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Intended Audience :: Developers\",\n \"Intended Audience :: System Administrators\",\n ],\n keywords='',\n author='Jonatan Heyman, Carl Bystrom, Joakim Hamr\u00e9n, Hugo Heyman',\n author_email='',\n url='https://locust.io/',\n license='MIT',\n packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),\n include_package_data=True,\n zip_safe=False,\n install_requires=[\"gevent>=1.2.2\", \"flask>=0.10.1\", \"requests>=2.9.1\", \"msgpack-python>=0.4.2\", \"six>=1.10.0\", \"pyzmq>=16.0.2\"],\n test_suite=\"locust.test\",\n tests_require=['mock'],\n entry_points={\n 'console_scripts': [\n 'locust = locust.main:main',\n ]\n },\n)\n", "path": "setup.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\nimport ast\nimport os\nimport re\n\nfrom setuptools import find_packages, setup\n\n# parse version from locust/__init__.py\n_version_re = re.compile(r'__version__\\s+=\\s+(.*)')\n_init_file = os.path.join(os.path.abspath(os.path.dirname(__file__)), \"locust\", \"__init__.py\")\nwith open(_init_file, 'rb') as f:\n version = str(ast.literal_eval(_version_re.search(\n f.read().decode('utf-8')).group(1)))\n\nsetup(\n name='locustio',\n version=version,\n description=\"Website load testing framework\",\n long_description=\"\"\"Locust is a python utility for doing easy, distributed load testing of a web site\"\"\",\n classifiers=[\n \"Topic :: Software Development :: Testing :: Traffic Generation\",\n \"Development Status :: 4 - Beta\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Intended Audience :: Developers\",\n \"Intended Audience :: System Administrators\",\n ],\n keywords='',\n author='Jonatan Heyman, Carl Bystrom, Joakim Hamr\u00e9n, Hugo Heyman',\n author_email='',\n url='https://locust.io/',\n license='MIT',\n packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),\n include_package_data=True,\n zip_safe=False,\n install_requires=[\"gevent>=1.2.2\", \"flask>=0.10.1\", \"requests>=2.9.1\", \"msgpack>=0.4.2\", \"six>=1.10.0\", \"pyzmq>=16.0.2\"],\n test_suite=\"locust.test\",\n tests_require=['mock'],\n entry_points={\n 'console_scripts': [\n 'locust = locust.main:main',\n ]\n },\n)\n", "path": "setup.py"}]}
| 894 | 210 |
gh_patches_debug_28502
|
rasdani/github-patches
|
git_diff
|
CTFd__CTFd-1560
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Colon in CTF name breaks emails
This is because of:
https://tools.ietf.org/html/rfc5322#section-2.2
This can probably be fixed with `"HE:tech" <[email protected]>`.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `CTFd/utils/email/smtp.py`
Content:
```
1 import smtplib
2 from email.message import EmailMessage
3 from socket import timeout
4
5 from CTFd.utils import get_app_config, get_config
6
7
8 def get_smtp(host, port, username=None, password=None, TLS=None, SSL=None, auth=None):
9 if SSL is None:
10 smtp = smtplib.SMTP(host, port, timeout=3)
11 else:
12 smtp = smtplib.SMTP_SSL(host, port, timeout=3)
13
14 if TLS:
15 smtp.starttls()
16
17 if auth:
18 smtp.login(username, password)
19 return smtp
20
21
22 def sendmail(addr, text, subject):
23 ctf_name = get_config("ctf_name")
24 mailfrom_addr = get_config("mailfrom_addr") or get_app_config("MAILFROM_ADDR")
25 mailfrom_addr = "{} <{}>".format(ctf_name, mailfrom_addr)
26
27 data = {
28 "host": get_config("mail_server") or get_app_config("MAIL_SERVER"),
29 "port": int(get_config("mail_port") or get_app_config("MAIL_PORT")),
30 }
31 username = get_config("mail_username") or get_app_config("MAIL_USERNAME")
32 password = get_config("mail_password") or get_app_config("MAIL_PASSWORD")
33 TLS = get_config("mail_tls") or get_app_config("MAIL_TLS")
34 SSL = get_config("mail_ssl") or get_app_config("MAIL_SSL")
35 auth = get_config("mail_useauth") or get_app_config("MAIL_USEAUTH")
36
37 if username:
38 data["username"] = username
39 if password:
40 data["password"] = password
41 if TLS:
42 data["TLS"] = TLS
43 if SSL:
44 data["SSL"] = SSL
45 if auth:
46 data["auth"] = auth
47
48 try:
49 smtp = get_smtp(**data)
50
51 msg = EmailMessage()
52 msg.set_content(text)
53
54 msg["Subject"] = subject
55 msg["From"] = mailfrom_addr
56 msg["To"] = addr
57
58 smtp.send_message(msg)
59
60 smtp.quit()
61 return True, "Email sent"
62 except smtplib.SMTPException as e:
63 return False, str(e)
64 except timeout:
65 return False, "SMTP server connection timed out"
66 except Exception as e:
67 return False, str(e)
68
```
Path: `CTFd/utils/email/mailgun.py`
Content:
```
1 import requests
2
3 from CTFd.utils import get_app_config, get_config
4
5
6 def sendmail(addr, text, subject):
7 ctf_name = get_config("ctf_name")
8 mailfrom_addr = get_config("mailfrom_addr") or get_app_config("MAILFROM_ADDR")
9 mailfrom_addr = "{} <{}>".format(ctf_name, mailfrom_addr)
10
11 mailgun_base_url = get_config("mailgun_base_url") or get_app_config(
12 "MAILGUN_BASE_URL"
13 )
14 mailgun_api_key = get_config("mailgun_api_key") or get_app_config("MAILGUN_API_KEY")
15 try:
16 r = requests.post(
17 mailgun_base_url + "/messages",
18 auth=("api", mailgun_api_key),
19 data={
20 "from": mailfrom_addr,
21 "to": [addr],
22 "subject": subject,
23 "text": text,
24 },
25 timeout=1.0,
26 )
27 except requests.RequestException as e:
28 return (
29 False,
30 "{error} exception occured while handling your request".format(
31 error=type(e).__name__
32 ),
33 )
34
35 if r.status_code == 200:
36 return True, "Email sent"
37 else:
38 return False, "Mailgun settings are incorrect"
39
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/CTFd/utils/email/mailgun.py b/CTFd/utils/email/mailgun.py
--- a/CTFd/utils/email/mailgun.py
+++ b/CTFd/utils/email/mailgun.py
@@ -1,3 +1,5 @@
+from email.utils import formataddr
+
import requests
from CTFd.utils import get_app_config, get_config
@@ -6,7 +8,7 @@
def sendmail(addr, text, subject):
ctf_name = get_config("ctf_name")
mailfrom_addr = get_config("mailfrom_addr") or get_app_config("MAILFROM_ADDR")
- mailfrom_addr = "{} <{}>".format(ctf_name, mailfrom_addr)
+ mailfrom_addr = formataddr((ctf_name, mailfrom_addr))
mailgun_base_url = get_config("mailgun_base_url") or get_app_config(
"MAILGUN_BASE_URL"
diff --git a/CTFd/utils/email/smtp.py b/CTFd/utils/email/smtp.py
--- a/CTFd/utils/email/smtp.py
+++ b/CTFd/utils/email/smtp.py
@@ -1,5 +1,6 @@
import smtplib
from email.message import EmailMessage
+from email.utils import formataddr
from socket import timeout
from CTFd.utils import get_app_config, get_config
@@ -22,7 +23,7 @@
def sendmail(addr, text, subject):
ctf_name = get_config("ctf_name")
mailfrom_addr = get_config("mailfrom_addr") or get_app_config("MAILFROM_ADDR")
- mailfrom_addr = "{} <{}>".format(ctf_name, mailfrom_addr)
+ mailfrom_addr = formataddr((ctf_name, mailfrom_addr))
data = {
"host": get_config("mail_server") or get_app_config("MAIL_SERVER"),
|
{"golden_diff": "diff --git a/CTFd/utils/email/mailgun.py b/CTFd/utils/email/mailgun.py\n--- a/CTFd/utils/email/mailgun.py\n+++ b/CTFd/utils/email/mailgun.py\n@@ -1,3 +1,5 @@\n+from email.utils import formataddr\n+\n import requests\n \n from CTFd.utils import get_app_config, get_config\n@@ -6,7 +8,7 @@\n def sendmail(addr, text, subject):\n ctf_name = get_config(\"ctf_name\")\n mailfrom_addr = get_config(\"mailfrom_addr\") or get_app_config(\"MAILFROM_ADDR\")\n- mailfrom_addr = \"{} <{}>\".format(ctf_name, mailfrom_addr)\n+ mailfrom_addr = formataddr((ctf_name, mailfrom_addr))\n \n mailgun_base_url = get_config(\"mailgun_base_url\") or get_app_config(\n \"MAILGUN_BASE_URL\"\ndiff --git a/CTFd/utils/email/smtp.py b/CTFd/utils/email/smtp.py\n--- a/CTFd/utils/email/smtp.py\n+++ b/CTFd/utils/email/smtp.py\n@@ -1,5 +1,6 @@\n import smtplib\n from email.message import EmailMessage\n+from email.utils import formataddr\n from socket import timeout\n \n from CTFd.utils import get_app_config, get_config\n@@ -22,7 +23,7 @@\n def sendmail(addr, text, subject):\n ctf_name = get_config(\"ctf_name\")\n mailfrom_addr = get_config(\"mailfrom_addr\") or get_app_config(\"MAILFROM_ADDR\")\n- mailfrom_addr = \"{} <{}>\".format(ctf_name, mailfrom_addr)\n+ mailfrom_addr = formataddr((ctf_name, mailfrom_addr))\n \n data = {\n \"host\": get_config(\"mail_server\") or get_app_config(\"MAIL_SERVER\"),\n", "issue": "Colon in CTF name breaks emails\nThis is because of:\r\n\r\nhttps://tools.ietf.org/html/rfc5322#section-2.2\r\n\r\nThis can probably be fixed with `\"HE:tech\" <[email protected]>`.\n", "before_files": [{"content": "import smtplib\nfrom email.message import EmailMessage\nfrom socket import timeout\n\nfrom CTFd.utils import get_app_config, get_config\n\n\ndef get_smtp(host, port, username=None, password=None, TLS=None, SSL=None, auth=None):\n if SSL is None:\n smtp = smtplib.SMTP(host, port, timeout=3)\n else:\n smtp = smtplib.SMTP_SSL(host, port, timeout=3)\n\n if TLS:\n smtp.starttls()\n\n if auth:\n smtp.login(username, password)\n return smtp\n\n\ndef sendmail(addr, text, subject):\n ctf_name = get_config(\"ctf_name\")\n mailfrom_addr = get_config(\"mailfrom_addr\") or get_app_config(\"MAILFROM_ADDR\")\n mailfrom_addr = \"{} <{}>\".format(ctf_name, mailfrom_addr)\n\n data = {\n \"host\": get_config(\"mail_server\") or get_app_config(\"MAIL_SERVER\"),\n \"port\": int(get_config(\"mail_port\") or get_app_config(\"MAIL_PORT\")),\n }\n username = get_config(\"mail_username\") or get_app_config(\"MAIL_USERNAME\")\n password = get_config(\"mail_password\") or get_app_config(\"MAIL_PASSWORD\")\n TLS = get_config(\"mail_tls\") or get_app_config(\"MAIL_TLS\")\n SSL = get_config(\"mail_ssl\") or get_app_config(\"MAIL_SSL\")\n auth = get_config(\"mail_useauth\") or get_app_config(\"MAIL_USEAUTH\")\n\n if username:\n data[\"username\"] = username\n if password:\n data[\"password\"] = password\n if TLS:\n data[\"TLS\"] = TLS\n if SSL:\n data[\"SSL\"] = SSL\n if auth:\n data[\"auth\"] = auth\n\n try:\n smtp = get_smtp(**data)\n\n msg = EmailMessage()\n msg.set_content(text)\n\n msg[\"Subject\"] = subject\n msg[\"From\"] = mailfrom_addr\n msg[\"To\"] = addr\n\n smtp.send_message(msg)\n\n smtp.quit()\n return True, \"Email sent\"\n except smtplib.SMTPException as e:\n return False, str(e)\n except timeout:\n return False, \"SMTP server connection timed out\"\n except Exception as e:\n return False, str(e)\n", "path": "CTFd/utils/email/smtp.py"}, {"content": "import requests\n\nfrom CTFd.utils import get_app_config, get_config\n\n\ndef sendmail(addr, text, subject):\n ctf_name = get_config(\"ctf_name\")\n mailfrom_addr = get_config(\"mailfrom_addr\") or get_app_config(\"MAILFROM_ADDR\")\n mailfrom_addr = \"{} <{}>\".format(ctf_name, mailfrom_addr)\n\n mailgun_base_url = get_config(\"mailgun_base_url\") or get_app_config(\n \"MAILGUN_BASE_URL\"\n )\n mailgun_api_key = get_config(\"mailgun_api_key\") or get_app_config(\"MAILGUN_API_KEY\")\n try:\n r = requests.post(\n mailgun_base_url + \"/messages\",\n auth=(\"api\", mailgun_api_key),\n data={\n \"from\": mailfrom_addr,\n \"to\": [addr],\n \"subject\": subject,\n \"text\": text,\n },\n timeout=1.0,\n )\n except requests.RequestException as e:\n return (\n False,\n \"{error} exception occured while handling your request\".format(\n error=type(e).__name__\n ),\n )\n\n if r.status_code == 200:\n return True, \"Email sent\"\n else:\n return False, \"Mailgun settings are incorrect\"\n", "path": "CTFd/utils/email/mailgun.py"}], "after_files": [{"content": "import smtplib\nfrom email.message import EmailMessage\nfrom email.utils import formataddr\nfrom socket import timeout\n\nfrom CTFd.utils import get_app_config, get_config\n\n\ndef get_smtp(host, port, username=None, password=None, TLS=None, SSL=None, auth=None):\n if SSL is None:\n smtp = smtplib.SMTP(host, port, timeout=3)\n else:\n smtp = smtplib.SMTP_SSL(host, port, timeout=3)\n\n if TLS:\n smtp.starttls()\n\n if auth:\n smtp.login(username, password)\n return smtp\n\n\ndef sendmail(addr, text, subject):\n ctf_name = get_config(\"ctf_name\")\n mailfrom_addr = get_config(\"mailfrom_addr\") or get_app_config(\"MAILFROM_ADDR\")\n mailfrom_addr = formataddr((ctf_name, mailfrom_addr))\n\n data = {\n \"host\": get_config(\"mail_server\") or get_app_config(\"MAIL_SERVER\"),\n \"port\": int(get_config(\"mail_port\") or get_app_config(\"MAIL_PORT\")),\n }\n username = get_config(\"mail_username\") or get_app_config(\"MAIL_USERNAME\")\n password = get_config(\"mail_password\") or get_app_config(\"MAIL_PASSWORD\")\n TLS = get_config(\"mail_tls\") or get_app_config(\"MAIL_TLS\")\n SSL = get_config(\"mail_ssl\") or get_app_config(\"MAIL_SSL\")\n auth = get_config(\"mail_useauth\") or get_app_config(\"MAIL_USEAUTH\")\n\n if username:\n data[\"username\"] = username\n if password:\n data[\"password\"] = password\n if TLS:\n data[\"TLS\"] = TLS\n if SSL:\n data[\"SSL\"] = SSL\n if auth:\n data[\"auth\"] = auth\n\n try:\n smtp = get_smtp(**data)\n\n msg = EmailMessage()\n msg.set_content(text)\n\n msg[\"Subject\"] = subject\n msg[\"From\"] = mailfrom_addr\n msg[\"To\"] = addr\n\n smtp.send_message(msg)\n\n smtp.quit()\n return True, \"Email sent\"\n except smtplib.SMTPException as e:\n return False, str(e)\n except timeout:\n return False, \"SMTP server connection timed out\"\n except Exception as e:\n return False, str(e)\n", "path": "CTFd/utils/email/smtp.py"}, {"content": "from email.utils import formataddr\n\nimport requests\n\nfrom CTFd.utils import get_app_config, get_config\n\n\ndef sendmail(addr, text, subject):\n ctf_name = get_config(\"ctf_name\")\n mailfrom_addr = get_config(\"mailfrom_addr\") or get_app_config(\"MAILFROM_ADDR\")\n mailfrom_addr = formataddr((ctf_name, mailfrom_addr))\n\n mailgun_base_url = get_config(\"mailgun_base_url\") or get_app_config(\n \"MAILGUN_BASE_URL\"\n )\n mailgun_api_key = get_config(\"mailgun_api_key\") or get_app_config(\"MAILGUN_API_KEY\")\n try:\n r = requests.post(\n mailgun_base_url + \"/messages\",\n auth=(\"api\", mailgun_api_key),\n data={\n \"from\": mailfrom_addr,\n \"to\": [addr],\n \"subject\": subject,\n \"text\": text,\n },\n timeout=1.0,\n )\n except requests.RequestException as e:\n return (\n False,\n \"{error} exception occured while handling your request\".format(\n error=type(e).__name__\n ),\n )\n\n if r.status_code == 200:\n return True, \"Email sent\"\n else:\n return False, \"Mailgun settings are incorrect\"\n", "path": "CTFd/utils/email/mailgun.py"}]}
| 1,293 | 399 |
gh_patches_debug_29426
|
rasdani/github-patches
|
git_diff
|
jupyterhub__jupyterhub-1820
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Running jupyterhub upgrade-db with PostgreSQL database fails
**How to reproduce the issue**
Run `jupyterhub upgrade-db` with a PostgreSQL database to upgrade to 99a28a4418e1.
**What you expected to happen**
Successful schema update.
**What actually happens**
It fails with an sqlalchemy `ProgrammingError` message that originates here:
https://github.com/jupyterhub/jupyterhub/blob/master/jupyterhub/alembic/versions/99a28a4418e1_user_created.py#L40
in particular I think that should be `IS NOT NULL` not just `NOT NULL`. I substituted this live and it allowed the upgrade to proceed.
**Share what version of JupyterHub you are using**
Latest master.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `jupyterhub/alembic/versions/99a28a4418e1_user_created.py`
Content:
```
1 """user.created and spawner.started
2
3 Revision ID: 99a28a4418e1
4 Revises: 56cc5a70207e
5 Create Date: 2018-03-21 14:27:17.466841
6
7 """
8
9 # revision identifiers, used by Alembic.
10 revision = '99a28a4418e1'
11 down_revision = '56cc5a70207e'
12 branch_labels = None
13 depends_on = None
14
15
16 from alembic import op
17 import sqlalchemy as sa
18
19 from datetime import datetime
20
21 def upgrade():
22 op.add_column('users', sa.Column('created', sa.DateTime, nullable=True))
23 c = op.get_bind()
24 # fill created date with current time
25 now = datetime.utcnow()
26 c.execute("""
27 UPDATE users
28 SET created='%s'
29 """ % (now,)
30 )
31
32 tables = c.engine.table_names()
33
34 if 'spawners' in tables:
35 op.add_column('spawners', sa.Column('started', sa.DateTime, nullable=True))
36 # fill started value with now for running servers
37 c.execute("""
38 UPDATE spawners
39 SET started='%s'
40 WHERE server_id NOT NULL
41 """ % (now,)
42 )
43
44
45 def downgrade():
46 op.drop_column('users', 'created')
47 op.drop_column('spawners', 'started')
48
```
Path: `jupyterhub/alembic/versions/56cc5a70207e_token_tracking.py`
Content:
```
1 """token tracking
2
3 Revision ID: 56cc5a70207e
4 Revises: 1cebaf56856c
5 Create Date: 2017-12-19 15:21:09.300513
6
7 """
8
9 # revision identifiers, used by Alembic.
10 revision = '56cc5a70207e'
11 down_revision = '1cebaf56856c'
12 branch_labels = None
13 depends_on = None
14
15 from alembic import op
16 import sqlalchemy as sa
17
18
19 def upgrade():
20 tables = op.get_bind().engine.table_names()
21 op.add_column('api_tokens', sa.Column('created', sa.DateTime(), nullable=True))
22 op.add_column('api_tokens', sa.Column('last_activity', sa.DateTime(), nullable=True))
23 op.add_column('api_tokens', sa.Column('note', sa.Unicode(length=1023), nullable=True))
24 if 'oauth_access_tokens' in tables:
25 op.add_column('oauth_access_tokens', sa.Column('created', sa.DateTime(), nullable=True))
26 op.add_column('oauth_access_tokens', sa.Column('last_activity', sa.DateTime(), nullable=True))
27 op.create_foreign_key(None, 'oauth_access_tokens', 'oauth_clients', ['client_id'], ['identifier'], ondelete='CASCADE')
28 op.create_foreign_key(None, 'oauth_codes', 'oauth_clients', ['client_id'], ['identifier'], ondelete='CASCADE')
29
30
31 def downgrade():
32 op.drop_constraint(None, 'oauth_codes', type_='foreignkey')
33 op.drop_constraint(None, 'oauth_access_tokens', type_='foreignkey')
34 op.drop_column('oauth_access_tokens', 'last_activity')
35 op.drop_column('oauth_access_tokens', 'created')
36 op.drop_column('api_tokens', 'note')
37 op.drop_column('api_tokens', 'last_activity')
38 op.drop_column('api_tokens', 'created')
39
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/jupyterhub/alembic/versions/56cc5a70207e_token_tracking.py b/jupyterhub/alembic/versions/56cc5a70207e_token_tracking.py
--- a/jupyterhub/alembic/versions/56cc5a70207e_token_tracking.py
+++ b/jupyterhub/alembic/versions/56cc5a70207e_token_tracking.py
@@ -15,6 +15,9 @@
from alembic import op
import sqlalchemy as sa
+import logging
+logger = logging.getLogger('alembic')
+
def upgrade():
tables = op.get_bind().engine.table_names()
@@ -24,8 +27,11 @@
if 'oauth_access_tokens' in tables:
op.add_column('oauth_access_tokens', sa.Column('created', sa.DateTime(), nullable=True))
op.add_column('oauth_access_tokens', sa.Column('last_activity', sa.DateTime(), nullable=True))
- op.create_foreign_key(None, 'oauth_access_tokens', 'oauth_clients', ['client_id'], ['identifier'], ondelete='CASCADE')
- op.create_foreign_key(None, 'oauth_codes', 'oauth_clients', ['client_id'], ['identifier'], ondelete='CASCADE')
+ if op.get_context().dialect.name == 'sqlite':
+ logger.warning("sqlite cannot use ALTER TABLE to create foreign keys. Upgrade will be incomplete.")
+ else:
+ op.create_foreign_key(None, 'oauth_access_tokens', 'oauth_clients', ['client_id'], ['identifier'], ondelete='CASCADE')
+ op.create_foreign_key(None, 'oauth_codes', 'oauth_clients', ['client_id'], ['identifier'], ondelete='CASCADE')
def downgrade():
diff --git a/jupyterhub/alembic/versions/99a28a4418e1_user_created.py b/jupyterhub/alembic/versions/99a28a4418e1_user_created.py
--- a/jupyterhub/alembic/versions/99a28a4418e1_user_created.py
+++ b/jupyterhub/alembic/versions/99a28a4418e1_user_created.py
@@ -37,7 +37,7 @@
c.execute("""
UPDATE spawners
SET started='%s'
- WHERE server_id NOT NULL
+ WHERE server_id IS NOT NULL
""" % (now,)
)
|
{"golden_diff": "diff --git a/jupyterhub/alembic/versions/56cc5a70207e_token_tracking.py b/jupyterhub/alembic/versions/56cc5a70207e_token_tracking.py\n--- a/jupyterhub/alembic/versions/56cc5a70207e_token_tracking.py\n+++ b/jupyterhub/alembic/versions/56cc5a70207e_token_tracking.py\n@@ -15,6 +15,9 @@\n from alembic import op\n import sqlalchemy as sa\n \n+import logging\n+logger = logging.getLogger('alembic')\n+\n \n def upgrade():\n tables = op.get_bind().engine.table_names()\n@@ -24,8 +27,11 @@\n if 'oauth_access_tokens' in tables:\n op.add_column('oauth_access_tokens', sa.Column('created', sa.DateTime(), nullable=True))\n op.add_column('oauth_access_tokens', sa.Column('last_activity', sa.DateTime(), nullable=True))\n- op.create_foreign_key(None, 'oauth_access_tokens', 'oauth_clients', ['client_id'], ['identifier'], ondelete='CASCADE')\n- op.create_foreign_key(None, 'oauth_codes', 'oauth_clients', ['client_id'], ['identifier'], ondelete='CASCADE')\n+ if op.get_context().dialect.name == 'sqlite':\n+ logger.warning(\"sqlite cannot use ALTER TABLE to create foreign keys. Upgrade will be incomplete.\")\n+ else:\n+ op.create_foreign_key(None, 'oauth_access_tokens', 'oauth_clients', ['client_id'], ['identifier'], ondelete='CASCADE')\n+ op.create_foreign_key(None, 'oauth_codes', 'oauth_clients', ['client_id'], ['identifier'], ondelete='CASCADE')\n \n \n def downgrade():\ndiff --git a/jupyterhub/alembic/versions/99a28a4418e1_user_created.py b/jupyterhub/alembic/versions/99a28a4418e1_user_created.py\n--- a/jupyterhub/alembic/versions/99a28a4418e1_user_created.py\n+++ b/jupyterhub/alembic/versions/99a28a4418e1_user_created.py\n@@ -37,7 +37,7 @@\n c.execute(\"\"\"\n UPDATE spawners\n SET started='%s'\n- WHERE server_id NOT NULL\n+ WHERE server_id IS NOT NULL\n \"\"\" % (now,)\n )\n", "issue": "Running jupyterhub upgrade-db with PostgreSQL database fails\n**How to reproduce the issue**\r\n\r\nRun `jupyterhub upgrade-db` with a PostgreSQL database to upgrade to 99a28a4418e1.\r\n\r\n**What you expected to happen**\r\n\r\nSuccessful schema update.\r\n\r\n**What actually happens**\r\n\r\nIt fails with an sqlalchemy `ProgrammingError` message that originates here:\r\n\r\nhttps://github.com/jupyterhub/jupyterhub/blob/master/jupyterhub/alembic/versions/99a28a4418e1_user_created.py#L40\r\n\r\nin particular I think that should be `IS NOT NULL` not just `NOT NULL`. I substituted this live and it allowed the upgrade to proceed.\r\n\r\n**Share what version of JupyterHub you are using**\r\n\r\nLatest master.\n", "before_files": [{"content": "\"\"\"user.created and spawner.started\n\nRevision ID: 99a28a4418e1\nRevises: 56cc5a70207e\nCreate Date: 2018-03-21 14:27:17.466841\n\n\"\"\"\n\n# revision identifiers, used by Alembic.\nrevision = '99a28a4418e1'\ndown_revision = '56cc5a70207e'\nbranch_labels = None\ndepends_on = None\n\n\nfrom alembic import op\nimport sqlalchemy as sa\n\nfrom datetime import datetime\n\ndef upgrade():\n op.add_column('users', sa.Column('created', sa.DateTime, nullable=True))\n c = op.get_bind()\n # fill created date with current time\n now = datetime.utcnow()\n c.execute(\"\"\"\n UPDATE users\n SET created='%s'\n \"\"\" % (now,)\n )\n\n tables = c.engine.table_names()\n\n if 'spawners' in tables:\n op.add_column('spawners', sa.Column('started', sa.DateTime, nullable=True))\n # fill started value with now for running servers\n c.execute(\"\"\"\n UPDATE spawners\n SET started='%s'\n WHERE server_id NOT NULL\n \"\"\" % (now,)\n )\n\n\ndef downgrade():\n op.drop_column('users', 'created')\n op.drop_column('spawners', 'started')\n", "path": "jupyterhub/alembic/versions/99a28a4418e1_user_created.py"}, {"content": "\"\"\"token tracking\n\nRevision ID: 56cc5a70207e\nRevises: 1cebaf56856c\nCreate Date: 2017-12-19 15:21:09.300513\n\n\"\"\"\n\n# revision identifiers, used by Alembic.\nrevision = '56cc5a70207e'\ndown_revision = '1cebaf56856c'\nbranch_labels = None\ndepends_on = None\n\nfrom alembic import op\nimport sqlalchemy as sa\n\n\ndef upgrade():\n tables = op.get_bind().engine.table_names()\n op.add_column('api_tokens', sa.Column('created', sa.DateTime(), nullable=True))\n op.add_column('api_tokens', sa.Column('last_activity', sa.DateTime(), nullable=True))\n op.add_column('api_tokens', sa.Column('note', sa.Unicode(length=1023), nullable=True))\n if 'oauth_access_tokens' in tables:\n op.add_column('oauth_access_tokens', sa.Column('created', sa.DateTime(), nullable=True))\n op.add_column('oauth_access_tokens', sa.Column('last_activity', sa.DateTime(), nullable=True))\n op.create_foreign_key(None, 'oauth_access_tokens', 'oauth_clients', ['client_id'], ['identifier'], ondelete='CASCADE')\n op.create_foreign_key(None, 'oauth_codes', 'oauth_clients', ['client_id'], ['identifier'], ondelete='CASCADE')\n\n\ndef downgrade():\n op.drop_constraint(None, 'oauth_codes', type_='foreignkey')\n op.drop_constraint(None, 'oauth_access_tokens', type_='foreignkey')\n op.drop_column('oauth_access_tokens', 'last_activity')\n op.drop_column('oauth_access_tokens', 'created')\n op.drop_column('api_tokens', 'note')\n op.drop_column('api_tokens', 'last_activity')\n op.drop_column('api_tokens', 'created')\n", "path": "jupyterhub/alembic/versions/56cc5a70207e_token_tracking.py"}], "after_files": [{"content": "\"\"\"user.created and spawner.started\n\nRevision ID: 99a28a4418e1\nRevises: 56cc5a70207e\nCreate Date: 2018-03-21 14:27:17.466841\n\n\"\"\"\n\n# revision identifiers, used by Alembic.\nrevision = '99a28a4418e1'\ndown_revision = '56cc5a70207e'\nbranch_labels = None\ndepends_on = None\n\n\nfrom alembic import op\nimport sqlalchemy as sa\n\nfrom datetime import datetime\n\ndef upgrade():\n op.add_column('users', sa.Column('created', sa.DateTime, nullable=True))\n c = op.get_bind()\n # fill created date with current time\n now = datetime.utcnow()\n c.execute(\"\"\"\n UPDATE users\n SET created='%s'\n \"\"\" % (now,)\n )\n\n tables = c.engine.table_names()\n\n if 'spawners' in tables:\n op.add_column('spawners', sa.Column('started', sa.DateTime, nullable=True))\n # fill started value with now for running servers\n c.execute(\"\"\"\n UPDATE spawners\n SET started='%s'\n WHERE server_id IS NOT NULL\n \"\"\" % (now,)\n )\n\n\ndef downgrade():\n op.drop_column('users', 'created')\n op.drop_column('spawners', 'started')\n", "path": "jupyterhub/alembic/versions/99a28a4418e1_user_created.py"}, {"content": "\"\"\"token tracking\n\nRevision ID: 56cc5a70207e\nRevises: 1cebaf56856c\nCreate Date: 2017-12-19 15:21:09.300513\n\n\"\"\"\n\n# revision identifiers, used by Alembic.\nrevision = '56cc5a70207e'\ndown_revision = '1cebaf56856c'\nbranch_labels = None\ndepends_on = None\n\nfrom alembic import op\nimport sqlalchemy as sa\n\nimport logging\nlogger = logging.getLogger('alembic')\n\n\ndef upgrade():\n tables = op.get_bind().engine.table_names()\n op.add_column('api_tokens', sa.Column('created', sa.DateTime(), nullable=True))\n op.add_column('api_tokens', sa.Column('last_activity', sa.DateTime(), nullable=True))\n op.add_column('api_tokens', sa.Column('note', sa.Unicode(length=1023), nullable=True))\n if 'oauth_access_tokens' in tables:\n op.add_column('oauth_access_tokens', sa.Column('created', sa.DateTime(), nullable=True))\n op.add_column('oauth_access_tokens', sa.Column('last_activity', sa.DateTime(), nullable=True))\n if op.get_context().dialect.name == 'sqlite':\n logger.warning(\"sqlite cannot use ALTER TABLE to create foreign keys. Upgrade will be incomplete.\")\n else:\n op.create_foreign_key(None, 'oauth_access_tokens', 'oauth_clients', ['client_id'], ['identifier'], ondelete='CASCADE')\n op.create_foreign_key(None, 'oauth_codes', 'oauth_clients', ['client_id'], ['identifier'], ondelete='CASCADE')\n\n\ndef downgrade():\n op.drop_constraint(None, 'oauth_codes', type_='foreignkey')\n op.drop_constraint(None, 'oauth_access_tokens', type_='foreignkey')\n op.drop_column('oauth_access_tokens', 'last_activity')\n op.drop_column('oauth_access_tokens', 'created')\n op.drop_column('api_tokens', 'note')\n op.drop_column('api_tokens', 'last_activity')\n op.drop_column('api_tokens', 'created')\n", "path": "jupyterhub/alembic/versions/56cc5a70207e_token_tracking.py"}]}
| 1,376 | 547 |
gh_patches_debug_10663
|
rasdani/github-patches
|
git_diff
|
shuup__shuup-2095
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Xtheme: there is no warning for usaved changes when switching between plugins
To reproduce:
1. Edit some content in Xtheme editor
2. Select another plugin without saving
3. See your changes to disappear
There probably should be warning before switching plugins when you have unsaved information.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `shuup/campaigns/admin_module/forms/_basket.py`
Content:
```
1 # This file is part of Shuup.
2 #
3 # Copyright (c) 2012-2019, Shoop Commerce Ltd. All rights reserved.
4 #
5 # This source code is licensed under the OSL-3.0 license found in the
6 # LICENSE file in the root directory of this source tree.
7 from django import forms
8 from django.db.models import Q
9 from django.utils.translation import ugettext_lazy as _
10
11 from shuup.admin.shop_provider import get_shop
12 from shuup.admin.supplier_provider import get_supplier
13 from shuup.campaigns.models import BasketCampaign, Coupon
14
15 from ._base import BaseCampaignForm, QuickAddCouponSelect
16
17
18 class BasketCampaignForm(BaseCampaignForm):
19 class Meta(BaseCampaignForm.Meta):
20 model = BasketCampaign
21
22 def __init__(self, *args, **kwargs):
23 super(BasketCampaignForm, self).__init__(*args, **kwargs)
24
25 coupons = Coupon.objects.filter(
26 Q(active=True, shop=get_shop(self.request)),
27 Q(campaign=None) | Q(campaign=self.instance),
28 )
29 supplier = get_supplier(self.request)
30 if supplier:
31 coupons = coupons.filter(supplier=supplier)
32
33 coupon_code_choices = [('', '')] + list(coupons.values_list("pk", "code"))
34 field_kwargs = dict(choices=coupon_code_choices, required=False)
35 field_kwargs["help_text"] = _("Define the required coupon for this campaign.")
36 field_kwargs["label"] = _("Coupon")
37 field_kwargs["widget"] = QuickAddCouponSelect(editable_model="campaigns.Coupon")
38 if self.instance.pk and self.instance.coupon:
39 field_kwargs["initial"] = self.instance.coupon.pk
40
41 self.fields["coupon"] = forms.ChoiceField(**field_kwargs)
42
43 # the supplier will be, by default, the current one
44 if supplier:
45 self.fields["supplier"].widget = forms.HiddenInput()
46
47 def clean_coupon(self):
48 coupon = self.cleaned_data.get("coupon")
49 if coupon:
50 coupon = Coupon.objects.get(pk=coupon)
51 return coupon or None
52
53 def clean_supplier(self):
54 return self.cleaned_data.get("supplier") or get_supplier(self.request)
55
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/shuup/campaigns/admin_module/forms/_basket.py b/shuup/campaigns/admin_module/forms/_basket.py
--- a/shuup/campaigns/admin_module/forms/_basket.py
+++ b/shuup/campaigns/admin_module/forms/_basket.py
@@ -30,7 +30,7 @@
if supplier:
coupons = coupons.filter(supplier=supplier)
- coupon_code_choices = [('', '')] + list(coupons.values_list("pk", "code"))
+ coupon_code_choices = [('', '---------')] + list(coupons.values_list("pk", "code"))
field_kwargs = dict(choices=coupon_code_choices, required=False)
field_kwargs["help_text"] = _("Define the required coupon for this campaign.")
field_kwargs["label"] = _("Coupon")
|
{"golden_diff": "diff --git a/shuup/campaigns/admin_module/forms/_basket.py b/shuup/campaigns/admin_module/forms/_basket.py\n--- a/shuup/campaigns/admin_module/forms/_basket.py\n+++ b/shuup/campaigns/admin_module/forms/_basket.py\n@@ -30,7 +30,7 @@\n if supplier:\n coupons = coupons.filter(supplier=supplier)\n \n- coupon_code_choices = [('', '')] + list(coupons.values_list(\"pk\", \"code\"))\n+ coupon_code_choices = [('', '---------')] + list(coupons.values_list(\"pk\", \"code\"))\n field_kwargs = dict(choices=coupon_code_choices, required=False)\n field_kwargs[\"help_text\"] = _(\"Define the required coupon for this campaign.\")\n field_kwargs[\"label\"] = _(\"Coupon\")\n", "issue": "Xtheme: there is no warning for usaved changes when switching between plugins\nTo reproduce:\r\n1. Edit some content in Xtheme editor\r\n2. Select another plugin without saving\r\n3. See your changes to disappear\r\n\r\nThere probably should be warning before switching plugins when you have unsaved information.\n", "before_files": [{"content": "# This file is part of Shuup.\n#\n# Copyright (c) 2012-2019, Shoop Commerce Ltd. All rights reserved.\n#\n# This source code is licensed under the OSL-3.0 license found in the\n# LICENSE file in the root directory of this source tree.\nfrom django import forms\nfrom django.db.models import Q\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom shuup.admin.shop_provider import get_shop\nfrom shuup.admin.supplier_provider import get_supplier\nfrom shuup.campaigns.models import BasketCampaign, Coupon\n\nfrom ._base import BaseCampaignForm, QuickAddCouponSelect\n\n\nclass BasketCampaignForm(BaseCampaignForm):\n class Meta(BaseCampaignForm.Meta):\n model = BasketCampaign\n\n def __init__(self, *args, **kwargs):\n super(BasketCampaignForm, self).__init__(*args, **kwargs)\n\n coupons = Coupon.objects.filter(\n Q(active=True, shop=get_shop(self.request)),\n Q(campaign=None) | Q(campaign=self.instance),\n )\n supplier = get_supplier(self.request)\n if supplier:\n coupons = coupons.filter(supplier=supplier)\n\n coupon_code_choices = [('', '')] + list(coupons.values_list(\"pk\", \"code\"))\n field_kwargs = dict(choices=coupon_code_choices, required=False)\n field_kwargs[\"help_text\"] = _(\"Define the required coupon for this campaign.\")\n field_kwargs[\"label\"] = _(\"Coupon\")\n field_kwargs[\"widget\"] = QuickAddCouponSelect(editable_model=\"campaigns.Coupon\")\n if self.instance.pk and self.instance.coupon:\n field_kwargs[\"initial\"] = self.instance.coupon.pk\n\n self.fields[\"coupon\"] = forms.ChoiceField(**field_kwargs)\n\n # the supplier will be, by default, the current one\n if supplier:\n self.fields[\"supplier\"].widget = forms.HiddenInput()\n\n def clean_coupon(self):\n coupon = self.cleaned_data.get(\"coupon\")\n if coupon:\n coupon = Coupon.objects.get(pk=coupon)\n return coupon or None\n\n def clean_supplier(self):\n return self.cleaned_data.get(\"supplier\") or get_supplier(self.request)\n", "path": "shuup/campaigns/admin_module/forms/_basket.py"}], "after_files": [{"content": "# This file is part of Shuup.\n#\n# Copyright (c) 2012-2019, Shoop Commerce Ltd. All rights reserved.\n#\n# This source code is licensed under the OSL-3.0 license found in the\n# LICENSE file in the root directory of this source tree.\nfrom django import forms\nfrom django.db.models import Q\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom shuup.admin.shop_provider import get_shop\nfrom shuup.admin.supplier_provider import get_supplier\nfrom shuup.campaigns.models import BasketCampaign, Coupon\n\nfrom ._base import BaseCampaignForm, QuickAddCouponSelect\n\n\nclass BasketCampaignForm(BaseCampaignForm):\n class Meta(BaseCampaignForm.Meta):\n model = BasketCampaign\n\n def __init__(self, *args, **kwargs):\n super(BasketCampaignForm, self).__init__(*args, **kwargs)\n\n coupons = Coupon.objects.filter(\n Q(active=True, shop=get_shop(self.request)),\n Q(campaign=None) | Q(campaign=self.instance),\n )\n supplier = get_supplier(self.request)\n if supplier:\n coupons = coupons.filter(supplier=supplier)\n\n coupon_code_choices = [('', '---------')] + list(coupons.values_list(\"pk\", \"code\"))\n field_kwargs = dict(choices=coupon_code_choices, required=False)\n field_kwargs[\"help_text\"] = _(\"Define the required coupon for this campaign.\")\n field_kwargs[\"label\"] = _(\"Coupon\")\n field_kwargs[\"widget\"] = QuickAddCouponSelect(editable_model=\"campaigns.Coupon\")\n if self.instance.pk and self.instance.coupon:\n field_kwargs[\"initial\"] = self.instance.coupon.pk\n\n self.fields[\"coupon\"] = forms.ChoiceField(**field_kwargs)\n\n # the supplier will be, by default, the current one\n if supplier:\n self.fields[\"supplier\"].widget = forms.HiddenInput()\n\n def clean_coupon(self):\n coupon = self.cleaned_data.get(\"coupon\")\n if coupon:\n coupon = Coupon.objects.get(pk=coupon)\n return coupon or None\n\n def clean_supplier(self):\n return self.cleaned_data.get(\"supplier\") or get_supplier(self.request)\n", "path": "shuup/campaigns/admin_module/forms/_basket.py"}]}
| 893 | 175 |
gh_patches_debug_30949
|
rasdani/github-patches
|
git_diff
|
apache__airflow-24496
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
db+ string in result backend but not metadata secret
### Official Helm Chart version
1.1.0 (latest released)
### Apache Airflow version
2.1.3 (latest released)
### Kubernetes Version
1.21
### Helm Chart configuration
data:
metadataSecretName: "airflow-metadata"
resultBackendSecretName: "airflow-result-backend"
### Docker Image customisations
_No response_
### What happened
If we only supply 1 secret with
```
connection: postgresql://airflow:[email protected]:5432/airflow?sslmode=disable
```
To use for both metadata and resultBackendConnection then we end up with a connection error because
resultBackendConnection expects the string to be formatted like
```
connection: db+postgresql://airflow:[email protected]:5432/airflow?sslmode=disable
```
from what i can tell
### What you expected to happen
I'd expect to be able to use the same secret for both using the same format if they are using the same connection.
### How to reproduce
Make a secret structured like above to look like the metadataConnection auto-generated secret.
use that same secret for the result backend.
deploy.
### Anything else
Occurs always.
To get around currently we make 2 secrets one with just the db+ prepended.
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://github.com/apache/airflow/blob/main/CODE_OF_CONDUCT.md)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `airflow/config_templates/default_celery.py`
Content:
```
1 #
2 # Licensed to the Apache Software Foundation (ASF) under one
3 # or more contributor license agreements. See the NOTICE file
4 # distributed with this work for additional information
5 # regarding copyright ownership. The ASF licenses this file
6 # to you under the Apache License, Version 2.0 (the
7 # "License"); you may not use this file except in compliance
8 # with the License. You may obtain a copy of the License at
9 #
10 # http://www.apache.org/licenses/LICENSE-2.0
11 #
12 # Unless required by applicable law or agreed to in writing,
13 # software distributed under the License is distributed on an
14 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 # KIND, either express or implied. See the License for the
16 # specific language governing permissions and limitations
17 # under the License.
18 """Default celery configuration."""
19 import logging
20 import ssl
21
22 from airflow.configuration import conf
23 from airflow.exceptions import AirflowConfigException, AirflowException
24
25
26 def _broker_supports_visibility_timeout(url):
27 return url.startswith("redis://") or url.startswith("sqs://")
28
29
30 log = logging.getLogger(__name__)
31
32 broker_url = conf.get('celery', 'BROKER_URL')
33
34 broker_transport_options = conf.getsection('celery_broker_transport_options') or {}
35 if 'visibility_timeout' not in broker_transport_options:
36 if _broker_supports_visibility_timeout(broker_url):
37 broker_transport_options['visibility_timeout'] = 21600
38
39 DEFAULT_CELERY_CONFIG = {
40 'accept_content': ['json'],
41 'event_serializer': 'json',
42 'worker_prefetch_multiplier': conf.getint('celery', 'worker_prefetch_multiplier'),
43 'task_acks_late': True,
44 'task_default_queue': conf.get('operators', 'DEFAULT_QUEUE'),
45 'task_default_exchange': conf.get('operators', 'DEFAULT_QUEUE'),
46 'task_track_started': conf.getboolean('celery', 'task_track_started'),
47 'broker_url': broker_url,
48 'broker_transport_options': broker_transport_options,
49 'result_backend': conf.get('celery', 'RESULT_BACKEND'),
50 'worker_concurrency': conf.getint('celery', 'WORKER_CONCURRENCY'),
51 'worker_enable_remote_control': conf.getboolean('celery', 'worker_enable_remote_control'),
52 }
53
54 celery_ssl_active = False
55 try:
56 celery_ssl_active = conf.getboolean('celery', 'SSL_ACTIVE')
57 except AirflowConfigException:
58 log.warning("Celery Executor will run without SSL")
59
60 try:
61 if celery_ssl_active:
62 if broker_url and 'amqp://' in broker_url:
63 broker_use_ssl = {
64 'keyfile': conf.get('celery', 'SSL_KEY'),
65 'certfile': conf.get('celery', 'SSL_CERT'),
66 'ca_certs': conf.get('celery', 'SSL_CACERT'),
67 'cert_reqs': ssl.CERT_REQUIRED,
68 }
69 elif broker_url and 'redis://' in broker_url:
70 broker_use_ssl = {
71 'ssl_keyfile': conf.get('celery', 'SSL_KEY'),
72 'ssl_certfile': conf.get('celery', 'SSL_CERT'),
73 'ssl_ca_certs': conf.get('celery', 'SSL_CACERT'),
74 'ssl_cert_reqs': ssl.CERT_REQUIRED,
75 }
76 else:
77 raise AirflowException(
78 'The broker you configured does not support SSL_ACTIVE to be True. '
79 'Please use RabbitMQ or Redis if you would like to use SSL for broker.'
80 )
81
82 DEFAULT_CELERY_CONFIG['broker_use_ssl'] = broker_use_ssl
83 except AirflowConfigException:
84 raise AirflowException(
85 'AirflowConfigException: SSL_ACTIVE is True, '
86 'please ensure SSL_KEY, '
87 'SSL_CERT and SSL_CACERT are set'
88 )
89 except Exception as e:
90 raise AirflowException(
91 f'Exception: There was an unknown Celery SSL Error. Please ensure you want to use SSL and/or have '
92 f'all necessary certs and key ({e}).'
93 )
94
95 result_backend = str(DEFAULT_CELERY_CONFIG['result_backend'])
96 if 'amqp://' in result_backend or 'redis://' in result_backend or 'rpc://' in result_backend:
97 log.warning(
98 "You have configured a result_backend of %s, it is highly recommended "
99 "to use an alternative result_backend (i.e. a database).",
100 result_backend,
101 )
102
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/airflow/config_templates/default_celery.py b/airflow/config_templates/default_celery.py
--- a/airflow/config_templates/default_celery.py
+++ b/airflow/config_templates/default_celery.py
@@ -36,6 +36,12 @@
if _broker_supports_visibility_timeout(broker_url):
broker_transport_options['visibility_timeout'] = 21600
+if conf.has_option("celery", 'RESULT_BACKEND'):
+ result_backend = conf.get_mandatory_value('celery', 'RESULT_BACKEND')
+else:
+ log.debug("Value for celery result_backend not found. Using sql_alchemy_conn with db+ prefix.")
+ result_backend = f'db+{conf.get("database", "SQL_ALCHEMY_CONN")}'
+
DEFAULT_CELERY_CONFIG = {
'accept_content': ['json'],
'event_serializer': 'json',
@@ -46,7 +52,7 @@
'task_track_started': conf.getboolean('celery', 'task_track_started'),
'broker_url': broker_url,
'broker_transport_options': broker_transport_options,
- 'result_backend': conf.get('celery', 'RESULT_BACKEND'),
+ 'result_backend': result_backend,
'worker_concurrency': conf.getint('celery', 'WORKER_CONCURRENCY'),
'worker_enable_remote_control': conf.getboolean('celery', 'worker_enable_remote_control'),
}
@@ -92,7 +98,6 @@
f'all necessary certs and key ({e}).'
)
-result_backend = str(DEFAULT_CELERY_CONFIG['result_backend'])
if 'amqp://' in result_backend or 'redis://' in result_backend or 'rpc://' in result_backend:
log.warning(
"You have configured a result_backend of %s, it is highly recommended "
|
{"golden_diff": "diff --git a/airflow/config_templates/default_celery.py b/airflow/config_templates/default_celery.py\n--- a/airflow/config_templates/default_celery.py\n+++ b/airflow/config_templates/default_celery.py\n@@ -36,6 +36,12 @@\n if _broker_supports_visibility_timeout(broker_url):\n broker_transport_options['visibility_timeout'] = 21600\n \n+if conf.has_option(\"celery\", 'RESULT_BACKEND'):\n+ result_backend = conf.get_mandatory_value('celery', 'RESULT_BACKEND')\n+else:\n+ log.debug(\"Value for celery result_backend not found. Using sql_alchemy_conn with db+ prefix.\")\n+ result_backend = f'db+{conf.get(\"database\", \"SQL_ALCHEMY_CONN\")}'\n+\n DEFAULT_CELERY_CONFIG = {\n 'accept_content': ['json'],\n 'event_serializer': 'json',\n@@ -46,7 +52,7 @@\n 'task_track_started': conf.getboolean('celery', 'task_track_started'),\n 'broker_url': broker_url,\n 'broker_transport_options': broker_transport_options,\n- 'result_backend': conf.get('celery', 'RESULT_BACKEND'),\n+ 'result_backend': result_backend,\n 'worker_concurrency': conf.getint('celery', 'WORKER_CONCURRENCY'),\n 'worker_enable_remote_control': conf.getboolean('celery', 'worker_enable_remote_control'),\n }\n@@ -92,7 +98,6 @@\n f'all necessary certs and key ({e}).'\n )\n \n-result_backend = str(DEFAULT_CELERY_CONFIG['result_backend'])\n if 'amqp://' in result_backend or 'redis://' in result_backend or 'rpc://' in result_backend:\n log.warning(\n \"You have configured a result_backend of %s, it is highly recommended \"\n", "issue": "db+ string in result backend but not metadata secret\n### Official Helm Chart version\n\n1.1.0 (latest released)\n\n### Apache Airflow version\n\n2.1.3 (latest released)\n\n### Kubernetes Version\n\n1.21\n\n### Helm Chart configuration\n\n data:\r\n metadataSecretName: \"airflow-metadata\"\r\n resultBackendSecretName: \"airflow-result-backend\"\n\n### Docker Image customisations\n\n_No response_\n\n### What happened\n\nIf we only supply 1 secret with \r\n```\r\nconnection: postgresql://airflow:[email protected]:5432/airflow?sslmode=disable\r\n```\r\nTo use for both metadata and resultBackendConnection then we end up with a connection error because\r\nresultBackendConnection expects the string to be formatted like \r\n```\r\nconnection: db+postgresql://airflow:[email protected]:5432/airflow?sslmode=disable\r\n```\r\nfrom what i can tell\n\n### What you expected to happen\n\nI'd expect to be able to use the same secret for both using the same format if they are using the same connection. \n\n### How to reproduce\n\nMake a secret structured like above to look like the metadataConnection auto-generated secret.\r\nuse that same secret for the result backend.\r\ndeploy.\r\n\n\n### Anything else\n\nOccurs always. \r\nTo get around currently we make 2 secrets one with just the db+ prepended. \n\n### Are you willing to submit PR?\n\n- [ ] Yes I am willing to submit a PR!\n\n### Code of Conduct\n\n- [X] I agree to follow this project's [Code of Conduct](https://github.com/apache/airflow/blob/main/CODE_OF_CONDUCT.md)\n\n", "before_files": [{"content": "#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n\"\"\"Default celery configuration.\"\"\"\nimport logging\nimport ssl\n\nfrom airflow.configuration import conf\nfrom airflow.exceptions import AirflowConfigException, AirflowException\n\n\ndef _broker_supports_visibility_timeout(url):\n return url.startswith(\"redis://\") or url.startswith(\"sqs://\")\n\n\nlog = logging.getLogger(__name__)\n\nbroker_url = conf.get('celery', 'BROKER_URL')\n\nbroker_transport_options = conf.getsection('celery_broker_transport_options') or {}\nif 'visibility_timeout' not in broker_transport_options:\n if _broker_supports_visibility_timeout(broker_url):\n broker_transport_options['visibility_timeout'] = 21600\n\nDEFAULT_CELERY_CONFIG = {\n 'accept_content': ['json'],\n 'event_serializer': 'json',\n 'worker_prefetch_multiplier': conf.getint('celery', 'worker_prefetch_multiplier'),\n 'task_acks_late': True,\n 'task_default_queue': conf.get('operators', 'DEFAULT_QUEUE'),\n 'task_default_exchange': conf.get('operators', 'DEFAULT_QUEUE'),\n 'task_track_started': conf.getboolean('celery', 'task_track_started'),\n 'broker_url': broker_url,\n 'broker_transport_options': broker_transport_options,\n 'result_backend': conf.get('celery', 'RESULT_BACKEND'),\n 'worker_concurrency': conf.getint('celery', 'WORKER_CONCURRENCY'),\n 'worker_enable_remote_control': conf.getboolean('celery', 'worker_enable_remote_control'),\n}\n\ncelery_ssl_active = False\ntry:\n celery_ssl_active = conf.getboolean('celery', 'SSL_ACTIVE')\nexcept AirflowConfigException:\n log.warning(\"Celery Executor will run without SSL\")\n\ntry:\n if celery_ssl_active:\n if broker_url and 'amqp://' in broker_url:\n broker_use_ssl = {\n 'keyfile': conf.get('celery', 'SSL_KEY'),\n 'certfile': conf.get('celery', 'SSL_CERT'),\n 'ca_certs': conf.get('celery', 'SSL_CACERT'),\n 'cert_reqs': ssl.CERT_REQUIRED,\n }\n elif broker_url and 'redis://' in broker_url:\n broker_use_ssl = {\n 'ssl_keyfile': conf.get('celery', 'SSL_KEY'),\n 'ssl_certfile': conf.get('celery', 'SSL_CERT'),\n 'ssl_ca_certs': conf.get('celery', 'SSL_CACERT'),\n 'ssl_cert_reqs': ssl.CERT_REQUIRED,\n }\n else:\n raise AirflowException(\n 'The broker you configured does not support SSL_ACTIVE to be True. '\n 'Please use RabbitMQ or Redis if you would like to use SSL for broker.'\n )\n\n DEFAULT_CELERY_CONFIG['broker_use_ssl'] = broker_use_ssl\nexcept AirflowConfigException:\n raise AirflowException(\n 'AirflowConfigException: SSL_ACTIVE is True, '\n 'please ensure SSL_KEY, '\n 'SSL_CERT and SSL_CACERT are set'\n )\nexcept Exception as e:\n raise AirflowException(\n f'Exception: There was an unknown Celery SSL Error. Please ensure you want to use SSL and/or have '\n f'all necessary certs and key ({e}).'\n )\n\nresult_backend = str(DEFAULT_CELERY_CONFIG['result_backend'])\nif 'amqp://' in result_backend or 'redis://' in result_backend or 'rpc://' in result_backend:\n log.warning(\n \"You have configured a result_backend of %s, it is highly recommended \"\n \"to use an alternative result_backend (i.e. a database).\",\n result_backend,\n )\n", "path": "airflow/config_templates/default_celery.py"}], "after_files": [{"content": "#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n\"\"\"Default celery configuration.\"\"\"\nimport logging\nimport ssl\n\nfrom airflow.configuration import conf\nfrom airflow.exceptions import AirflowConfigException, AirflowException\n\n\ndef _broker_supports_visibility_timeout(url):\n return url.startswith(\"redis://\") or url.startswith(\"sqs://\")\n\n\nlog = logging.getLogger(__name__)\n\nbroker_url = conf.get('celery', 'BROKER_URL')\n\nbroker_transport_options = conf.getsection('celery_broker_transport_options') or {}\nif 'visibility_timeout' not in broker_transport_options:\n if _broker_supports_visibility_timeout(broker_url):\n broker_transport_options['visibility_timeout'] = 21600\n\nif conf.has_option(\"celery\", 'RESULT_BACKEND'):\n result_backend = conf.get_mandatory_value('celery', 'RESULT_BACKEND')\nelse:\n log.debug(\"Value for celery result_backend not found. Using sql_alchemy_conn with db+ prefix.\")\n result_backend = f'db+{conf.get(\"database\", \"SQL_ALCHEMY_CONN\")}'\n\nDEFAULT_CELERY_CONFIG = {\n 'accept_content': ['json'],\n 'event_serializer': 'json',\n 'worker_prefetch_multiplier': conf.getint('celery', 'worker_prefetch_multiplier'),\n 'task_acks_late': True,\n 'task_default_queue': conf.get('operators', 'DEFAULT_QUEUE'),\n 'task_default_exchange': conf.get('operators', 'DEFAULT_QUEUE'),\n 'task_track_started': conf.getboolean('celery', 'task_track_started'),\n 'broker_url': broker_url,\n 'broker_transport_options': broker_transport_options,\n 'result_backend': result_backend,\n 'worker_concurrency': conf.getint('celery', 'WORKER_CONCURRENCY'),\n 'worker_enable_remote_control': conf.getboolean('celery', 'worker_enable_remote_control'),\n}\n\ncelery_ssl_active = False\ntry:\n celery_ssl_active = conf.getboolean('celery', 'SSL_ACTIVE')\nexcept AirflowConfigException:\n log.warning(\"Celery Executor will run without SSL\")\n\ntry:\n if celery_ssl_active:\n if broker_url and 'amqp://' in broker_url:\n broker_use_ssl = {\n 'keyfile': conf.get('celery', 'SSL_KEY'),\n 'certfile': conf.get('celery', 'SSL_CERT'),\n 'ca_certs': conf.get('celery', 'SSL_CACERT'),\n 'cert_reqs': ssl.CERT_REQUIRED,\n }\n elif broker_url and 'redis://' in broker_url:\n broker_use_ssl = {\n 'ssl_keyfile': conf.get('celery', 'SSL_KEY'),\n 'ssl_certfile': conf.get('celery', 'SSL_CERT'),\n 'ssl_ca_certs': conf.get('celery', 'SSL_CACERT'),\n 'ssl_cert_reqs': ssl.CERT_REQUIRED,\n }\n else:\n raise AirflowException(\n 'The broker you configured does not support SSL_ACTIVE to be True. '\n 'Please use RabbitMQ or Redis if you would like to use SSL for broker.'\n )\n\n DEFAULT_CELERY_CONFIG['broker_use_ssl'] = broker_use_ssl\nexcept AirflowConfigException:\n raise AirflowException(\n 'AirflowConfigException: SSL_ACTIVE is True, '\n 'please ensure SSL_KEY, '\n 'SSL_CERT and SSL_CACERT are set'\n )\nexcept Exception as e:\n raise AirflowException(\n f'Exception: There was an unknown Celery SSL Error. Please ensure you want to use SSL and/or have '\n f'all necessary certs and key ({e}).'\n )\n\nif 'amqp://' in result_backend or 'redis://' in result_backend or 'rpc://' in result_backend:\n log.warning(\n \"You have configured a result_backend of %s, it is highly recommended \"\n \"to use an alternative result_backend (i.e. a database).\",\n result_backend,\n )\n", "path": "airflow/config_templates/default_celery.py"}]}
| 1,758 | 393 |
gh_patches_debug_18040
|
rasdani/github-patches
|
git_diff
|
liqd__a4-meinberlin-1652
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
make it possible to detach plan from project
now that a connection of a project with a plan is no longer obligatory, initiators need to be able to undo a connection. Once I click on a plan I can no longer NOT connect to a plan, which should be possible.

--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `meinberlin/apps/plans/forms.py`
Content:
```
1 from django import forms
2 from django.conf import settings
3 from django.db.models import Q
4 from django.utils.translation import ugettext_lazy as _
5
6 from adhocracy4.dashboard.components.forms import ProjectDashboardForm
7 from adhocracy4.maps import widgets as maps_widgets
8 from adhocracy4.projects import models as project_models
9 from meinberlin.apps.contrib import widgets as contrib_widgets
10
11 from . import models
12
13
14 def get_theme_options():
15 return models.Plan.objects\
16 .filter(~Q(theme=''))\
17 .order_by('theme')\
18 .values_list('theme', flat=True)\
19 .distinct()
20
21
22 class PlanForm(forms.ModelForm):
23
24 class Meta:
25 model = models.Plan
26 fields = [
27 'title',
28 'description_image',
29 'contact',
30 'point',
31 'point_label',
32 'district',
33 'cost',
34 'description',
35 'theme',
36 'status',
37 'participation']
38 widgets = {
39 'point': maps_widgets.MapChoosePointWidget(
40 polygon=settings.BERLIN_POLYGON),
41 'theme': contrib_widgets.TextWithDatalistWidget(attrs={
42 'options': get_theme_options
43 })
44 }
45 error_messages = {
46 'point': {
47 'required': _('Please locate the plan on the map.')
48 }
49 }
50
51 def __init__(self, *args, **kwargs):
52 super().__init__(*args, **kwargs)
53 self.fields['district'].empty_label = _('City wide')
54
55
56 class CustomMultipleChoiceField(forms.ModelMultipleChoiceField):
57
58 widget = forms.RadioSelect
59
60 def clean(self, value):
61 if value is None:
62 return super().clean([])
63 return super().clean([value])
64
65
66 class ProjectPlansDashboardForm(ProjectDashboardForm):
67 plans = CustomMultipleChoiceField(queryset=None,
68 label=_('Plans'))
69
70 class Meta:
71 model = project_models.Project
72 fields = ['plans']
73 required = False
74
75 def save(self, commit=False):
76 plans = self.cleaned_data['plans']
77 self.instance.plans.set(plans)
78
79 def __init__(self, *args, **kwargs):
80 super().__init__(*args, **kwargs)
81 self.initial['plans'] = self.instance.plans.all()
82 self.fields['plans'
83 ].queryset = self.instance.organisation.plan_set.all()
84
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/meinberlin/apps/plans/forms.py b/meinberlin/apps/plans/forms.py
--- a/meinberlin/apps/plans/forms.py
+++ b/meinberlin/apps/plans/forms.py
@@ -55,10 +55,10 @@
class CustomMultipleChoiceField(forms.ModelMultipleChoiceField):
- widget = forms.RadioSelect
+ widget = forms.Select
def clean(self, value):
- if value is None:
+ if not value:
return super().clean([])
return super().clean([value])
@@ -79,5 +79,7 @@
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.initial['plans'] = self.instance.plans.all()
- self.fields['plans'
- ].queryset = self.instance.organisation.plan_set.all()
+ self.fields['plans'].required = False
+ self.fields['plans'].empty_label = '----------'
+ self.fields['plans'].queryset = \
+ self.instance.organisation.plan_set.all()
|
{"golden_diff": "diff --git a/meinberlin/apps/plans/forms.py b/meinberlin/apps/plans/forms.py\n--- a/meinberlin/apps/plans/forms.py\n+++ b/meinberlin/apps/plans/forms.py\n@@ -55,10 +55,10 @@\n \n class CustomMultipleChoiceField(forms.ModelMultipleChoiceField):\n \n- widget = forms.RadioSelect\n+ widget = forms.Select\n \n def clean(self, value):\n- if value is None:\n+ if not value:\n return super().clean([])\n return super().clean([value])\n \n@@ -79,5 +79,7 @@\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.initial['plans'] = self.instance.plans.all()\n- self.fields['plans'\n- ].queryset = self.instance.organisation.plan_set.all()\n+ self.fields['plans'].required = False\n+ self.fields['plans'].empty_label = '----------'\n+ self.fields['plans'].queryset = \\\n+ self.instance.organisation.plan_set.all()\n", "issue": "make it possible to detach plan from project\nnow that a connection of a project with a plan is no longer obligatory, initiators need to be able to undo a connection. Once I click on a plan I can no longer NOT connect to a plan, which should be possible.\r\n\r\n\r\n\n", "before_files": [{"content": "from django import forms\nfrom django.conf import settings\nfrom django.db.models import Q\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom adhocracy4.dashboard.components.forms import ProjectDashboardForm\nfrom adhocracy4.maps import widgets as maps_widgets\nfrom adhocracy4.projects import models as project_models\nfrom meinberlin.apps.contrib import widgets as contrib_widgets\n\nfrom . import models\n\n\ndef get_theme_options():\n return models.Plan.objects\\\n .filter(~Q(theme=''))\\\n .order_by('theme')\\\n .values_list('theme', flat=True)\\\n .distinct()\n\n\nclass PlanForm(forms.ModelForm):\n\n class Meta:\n model = models.Plan\n fields = [\n 'title',\n 'description_image',\n 'contact',\n 'point',\n 'point_label',\n 'district',\n 'cost',\n 'description',\n 'theme',\n 'status',\n 'participation']\n widgets = {\n 'point': maps_widgets.MapChoosePointWidget(\n polygon=settings.BERLIN_POLYGON),\n 'theme': contrib_widgets.TextWithDatalistWidget(attrs={\n 'options': get_theme_options\n })\n }\n error_messages = {\n 'point': {\n 'required': _('Please locate the plan on the map.')\n }\n }\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.fields['district'].empty_label = _('City wide')\n\n\nclass CustomMultipleChoiceField(forms.ModelMultipleChoiceField):\n\n widget = forms.RadioSelect\n\n def clean(self, value):\n if value is None:\n return super().clean([])\n return super().clean([value])\n\n\nclass ProjectPlansDashboardForm(ProjectDashboardForm):\n plans = CustomMultipleChoiceField(queryset=None,\n label=_('Plans'))\n\n class Meta:\n model = project_models.Project\n fields = ['plans']\n required = False\n\n def save(self, commit=False):\n plans = self.cleaned_data['plans']\n self.instance.plans.set(plans)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.initial['plans'] = self.instance.plans.all()\n self.fields['plans'\n ].queryset = self.instance.organisation.plan_set.all()\n", "path": "meinberlin/apps/plans/forms.py"}], "after_files": [{"content": "from django import forms\nfrom django.conf import settings\nfrom django.db.models import Q\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom adhocracy4.dashboard.components.forms import ProjectDashboardForm\nfrom adhocracy4.maps import widgets as maps_widgets\nfrom adhocracy4.projects import models as project_models\nfrom meinberlin.apps.contrib import widgets as contrib_widgets\n\nfrom . import models\n\n\ndef get_theme_options():\n return models.Plan.objects\\\n .filter(~Q(theme=''))\\\n .order_by('theme')\\\n .values_list('theme', flat=True)\\\n .distinct()\n\n\nclass PlanForm(forms.ModelForm):\n\n class Meta:\n model = models.Plan\n fields = [\n 'title',\n 'description_image',\n 'contact',\n 'point',\n 'point_label',\n 'district',\n 'cost',\n 'description',\n 'theme',\n 'status',\n 'participation']\n widgets = {\n 'point': maps_widgets.MapChoosePointWidget(\n polygon=settings.BERLIN_POLYGON),\n 'theme': contrib_widgets.TextWithDatalistWidget(attrs={\n 'options': get_theme_options\n })\n }\n error_messages = {\n 'point': {\n 'required': _('Please locate the plan on the map.')\n }\n }\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.fields['district'].empty_label = _('City wide')\n\n\nclass CustomMultipleChoiceField(forms.ModelMultipleChoiceField):\n\n widget = forms.Select\n\n def clean(self, value):\n if not value:\n return super().clean([])\n return super().clean([value])\n\n\nclass ProjectPlansDashboardForm(ProjectDashboardForm):\n plans = CustomMultipleChoiceField(queryset=None,\n label=_('Plans'))\n\n class Meta:\n model = project_models.Project\n fields = ['plans']\n required = False\n\n def save(self, commit=False):\n plans = self.cleaned_data['plans']\n self.instance.plans.set(plans)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.initial['plans'] = self.instance.plans.all()\n self.fields['plans'].required = False\n self.fields['plans'].empty_label = '----------'\n self.fields['plans'].queryset = \\\n self.instance.organisation.plan_set.all()\n", "path": "meinberlin/apps/plans/forms.py"}]}
| 1,038 | 240 |
gh_patches_debug_1022
|
rasdani/github-patches
|
git_diff
|
searxng__searxng-437
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Bug: microsoft academic engine
**Version of SearXNG, commit number if you are using on master branch and stipulate if you forked SearXNG**
Repository: https://github.com/tiekoetter/searxng
Branch: master
Version: 1.0.0-972-93548243
<!-- Check if these values are correct -->
**How did you install SearXNG?**
<!-- Did you install SearXNG using the official wiki or using searxng-docker
or manually by executing the searx/webapp.py file? -->
**What happened?**
<!-- A clear and concise description of what the bug is. -->
**How To Reproduce**
<!-- How can we reproduce this issue? (as minimally and as precisely as possible) -->
**Expected behavior**
<!-- A clear and concise description of what you expected to happen. -->
**Screenshots & Logs**
<!-- If applicable, add screenshots, logs to help explain your problem. -->
**Additional context**
<!-- Add any other context about the problem here. -->
**Technical report**
Error
* Error: httpx.TimeoutException
* Percentage: 50
* Parameters: `(None, None, None)`
* File name: `searx/search/processors/online.py:97`
* Function: `_send_http_request`
* Code: `response = req(params['url'], **request_args)`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `searx/engines/microsoft_academic.py`
Content:
```
1 # SPDX-License-Identifier: AGPL-3.0-or-later
2 """
3 Microsoft Academic (Science)
4 """
5
6 from json import dumps, loads
7 from searx.utils import html_to_text
8
9 # about
10 about = {
11 "website": 'https://academic.microsoft.com',
12 "wikidata_id": 'Q28136779',
13 "official_api_documentation": 'http://ma-graph.org/',
14 "use_official_api": False,
15 "require_api_key": False,
16 "results": 'JSON',
17 }
18
19 categories = ['images']
20 paging = True
21 search_url = 'https://academic.microsoft.com/api/search'
22 _paper_url = 'https://academic.microsoft.com/paper/{id}/reference'
23
24
25 def request(query, params):
26 params['url'] = search_url
27 params['method'] = 'POST'
28 params['headers']['content-type'] = 'application/json; charset=utf-8'
29 params['data'] = dumps({
30 'query': query,
31 'queryExpression': '',
32 'filters': [],
33 'orderBy': 0,
34 'skip': (params['pageno'] - 1) * 10,
35 'sortAscending': True,
36 'take': 10,
37 'includeCitationContexts': False,
38 'profileId': '',
39 })
40
41 return params
42
43
44 def response(resp):
45 results = []
46 response_data = loads(resp.text)
47 if not response_data:
48 return results
49
50 for result in response_data['pr']:
51 if 'dn' not in result['paper']:
52 continue
53
54 title = result['paper']['dn']
55 content = _get_content(result['paper'])
56 url = _paper_url.format(id=result['paper']['id'])
57 results.append({
58 'url': url,
59 'title': html_to_text(title),
60 'content': html_to_text(content),
61 })
62
63 return results
64
65
66 def _get_content(result):
67 if 'd' in result:
68 content = result['d']
69 if len(content) > 300:
70 return content[:300] + '...'
71 return content
72
73 return ''
74
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/searx/engines/microsoft_academic.py b/searx/engines/microsoft_academic.py
--- a/searx/engines/microsoft_academic.py
+++ b/searx/engines/microsoft_academic.py
@@ -47,7 +47,7 @@
if not response_data:
return results
- for result in response_data['pr']:
+ for result in response_data.get('pr', {}):
if 'dn' not in result['paper']:
continue
|
{"golden_diff": "diff --git a/searx/engines/microsoft_academic.py b/searx/engines/microsoft_academic.py\n--- a/searx/engines/microsoft_academic.py\n+++ b/searx/engines/microsoft_academic.py\n@@ -47,7 +47,7 @@\n if not response_data:\n return results\n \n- for result in response_data['pr']:\n+ for result in response_data.get('pr', {}):\n if 'dn' not in result['paper']:\n continue\n", "issue": "Bug: microsoft academic engine\n**Version of SearXNG, commit number if you are using on master branch and stipulate if you forked SearXNG**\r\nRepository: https://github.com/tiekoetter/searxng\r\nBranch: master\r\nVersion: 1.0.0-972-93548243\r\n<!-- Check if these values are correct -->\r\n\r\n**How did you install SearXNG?**\r\n<!-- Did you install SearXNG using the official wiki or using searxng-docker\r\nor manually by executing the searx/webapp.py file? -->\r\n**What happened?**\r\n<!-- A clear and concise description of what the bug is. -->\r\n\r\n**How To Reproduce**\r\n<!-- How can we reproduce this issue? (as minimally and as precisely as possible) -->\r\n\r\n**Expected behavior**\r\n<!-- A clear and concise description of what you expected to happen. -->\r\n\r\n**Screenshots & Logs**\r\n<!-- If applicable, add screenshots, logs to help explain your problem. -->\r\n\r\n**Additional context**\r\n<!-- Add any other context about the problem here. -->\r\n\r\n**Technical report**\r\n\r\nError\r\n * Error: httpx.TimeoutException\r\n * Percentage: 50\r\n * Parameters: `(None, None, None)`\r\n * File name: `searx/search/processors/online.py:97`\r\n * Function: `_send_http_request`\r\n * Code: `response = req(params['url'], **request_args)`\r\n\r\n\n", "before_files": [{"content": "# SPDX-License-Identifier: AGPL-3.0-or-later\n\"\"\"\n Microsoft Academic (Science)\n\"\"\"\n\nfrom json import dumps, loads\nfrom searx.utils import html_to_text\n\n# about\nabout = {\n \"website\": 'https://academic.microsoft.com',\n \"wikidata_id\": 'Q28136779',\n \"official_api_documentation\": 'http://ma-graph.org/',\n \"use_official_api\": False,\n \"require_api_key\": False,\n \"results\": 'JSON',\n}\n\ncategories = ['images']\npaging = True\nsearch_url = 'https://academic.microsoft.com/api/search'\n_paper_url = 'https://academic.microsoft.com/paper/{id}/reference'\n\n\ndef request(query, params):\n params['url'] = search_url\n params['method'] = 'POST'\n params['headers']['content-type'] = 'application/json; charset=utf-8'\n params['data'] = dumps({\n 'query': query,\n 'queryExpression': '',\n 'filters': [],\n 'orderBy': 0,\n 'skip': (params['pageno'] - 1) * 10,\n 'sortAscending': True,\n 'take': 10,\n 'includeCitationContexts': False,\n 'profileId': '',\n })\n\n return params\n\n\ndef response(resp):\n results = []\n response_data = loads(resp.text)\n if not response_data:\n return results\n\n for result in response_data['pr']:\n if 'dn' not in result['paper']:\n continue\n\n title = result['paper']['dn']\n content = _get_content(result['paper'])\n url = _paper_url.format(id=result['paper']['id'])\n results.append({\n 'url': url,\n 'title': html_to_text(title),\n 'content': html_to_text(content),\n })\n\n return results\n\n\ndef _get_content(result):\n if 'd' in result:\n content = result['d']\n if len(content) > 300:\n return content[:300] + '...'\n return content\n\n return ''\n", "path": "searx/engines/microsoft_academic.py"}], "after_files": [{"content": "# SPDX-License-Identifier: AGPL-3.0-or-later\n\"\"\"\n Microsoft Academic (Science)\n\"\"\"\n\nfrom json import dumps, loads\nfrom searx.utils import html_to_text\n\n# about\nabout = {\n \"website\": 'https://academic.microsoft.com',\n \"wikidata_id\": 'Q28136779',\n \"official_api_documentation\": 'http://ma-graph.org/',\n \"use_official_api\": False,\n \"require_api_key\": False,\n \"results\": 'JSON',\n}\n\ncategories = ['images']\npaging = True\nsearch_url = 'https://academic.microsoft.com/api/search'\n_paper_url = 'https://academic.microsoft.com/paper/{id}/reference'\n\n\ndef request(query, params):\n params['url'] = search_url\n params['method'] = 'POST'\n params['headers']['content-type'] = 'application/json; charset=utf-8'\n params['data'] = dumps({\n 'query': query,\n 'queryExpression': '',\n 'filters': [],\n 'orderBy': 0,\n 'skip': (params['pageno'] - 1) * 10,\n 'sortAscending': True,\n 'take': 10,\n 'includeCitationContexts': False,\n 'profileId': '',\n })\n\n return params\n\n\ndef response(resp):\n results = []\n response_data = loads(resp.text)\n if not response_data:\n return results\n\n for result in response_data.get('pr', {}):\n if 'dn' not in result['paper']:\n continue\n\n title = result['paper']['dn']\n content = _get_content(result['paper'])\n url = _paper_url.format(id=result['paper']['id'])\n results.append({\n 'url': url,\n 'title': html_to_text(title),\n 'content': html_to_text(content),\n })\n\n return results\n\n\ndef _get_content(result):\n if 'd' in result:\n content = result['d']\n if len(content) > 300:\n return content[:300] + '...'\n return content\n\n return ''\n", "path": "searx/engines/microsoft_academic.py"}]}
| 1,175 | 115 |
gh_patches_debug_552
|
rasdani/github-patches
|
git_diff
|
pex-tool__pex-880
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Release 2.1.1
On the docket:
+ [x] PEX 2.1.0 regression: pex file won't build inside a running docker image as user #850
+ [x] Fully pin vendored requirements. #853
+ [x] Fix `tox -epackage` to create pex supporting 3.8. #843
+ [x] Pex erroneously warns about needing to use vendored `pkg_resources` for distributions with empty `namespace_packages.txt` metadata files. #840
+ [x] Interpreter discovery and pyenv don't interact well #782
+ [x] ensure_python_interpreter() bootstrapping broken on pypy shard #477
+ [x] Resolve error checking does not account for environment markers. #851
+ [x] Ensure Pex PEX contraints match pex wheel / sdist. #863
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pex/version.py`
Content:
```
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 __version__ = '2.1.0'
5
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pex/version.py b/pex/version.py
--- a/pex/version.py
+++ b/pex/version.py
@@ -1,4 +1,4 @@
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
-__version__ = '2.1.0'
+__version__ = '2.1.1'
|
{"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = '2.1.0'\n+__version__ = '2.1.1'\n", "issue": "Release 2.1.1\nOn the docket:\r\n+ [x] PEX 2.1.0 regression: pex file won't build inside a running docker image as user #850\r\n+ [x] Fully pin vendored requirements. #853\r\n+ [x] Fix `tox -epackage` to create pex supporting 3.8. #843\r\n+ [x] Pex erroneously warns about needing to use vendored `pkg_resources` for distributions with empty `namespace_packages.txt` metadata files. #840\r\n+ [x] Interpreter discovery and pyenv don't interact well #782\r\n+ [x] ensure_python_interpreter() bootstrapping broken on pypy shard #477\r\n+ [x] Resolve error checking does not account for environment markers. #851\r\n+ [x] Ensure Pex PEX contraints match pex wheel / sdist. #863\r\n\r\n\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = '2.1.0'\n", "path": "pex/version.py"}], "after_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = '2.1.1'\n", "path": "pex/version.py"}]}
| 515 | 94 |
gh_patches_debug_11105
|
rasdani/github-patches
|
git_diff
|
pwndbg__pwndbg-130
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Error in the `search` command
The `search` command appears to have some issues while searching signed integer data
For example while debugging a x86 program:
```
pwndbg> search -4 0xf7eebf83
Traceback (most recent call last):
File "/home/bruce30262/pwndbg/pwndbg/commands/__init__.py", line 57, in __call__
return self.function(*args, **kwargs)
File "/home/bruce30262/pwndbg/pwndbg/commands/__init__.py", line 162, in _ArgparsedCommand
return function(**vars(args))
File "/home/bruce30262/pwndbg/pwndbg/commands/__init__.py", line 115, in _OnlyWhenRunning
return function(*a, **kw)
File "/home/bruce30262/pwndbg/pwndbg/commands/search.py", line 112, in search
value = struct.pack(fmt, value)
struct.error: argument out of range
```
This is because `pwndbg.commands.fix_int(value)` returns `-135348349` instead of an unsigned integer, and the packing format in [search.py](https://github.com/pwndbg/pwndbg/blob/b1892b27741a478cd5361061b0b4dda9ef46d02e/pwndbg/commands/search.py#L106) only support the unsigned data type, causing the error.
Commands like `search -4 -- -100` will also cause the same error.
Kind of curious why it only support the unsigned data type though. It might need some additional check for the appropriate packing format.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pwndbg/commands/search.py`
Content:
```
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 from __future__ import absolute_import
4 from __future__ import division
5 from __future__ import print_function
6 from __future__ import unicode_literals
7
8 import argparse
9 import codecs
10 import os
11 import struct
12
13 import gdb
14
15 import pwndbg.color.memory as M
16 import pwndbg.commands
17 import pwndbg.config
18 import pwndbg.enhance
19 import pwndbg.search
20 import pwndbg.vmmap
21
22 saved = set()
23
24 def print_search_hit(address):
25 """Prints out a single search hit.
26
27 Arguments:
28 address(int): Address to print
29 """
30 if not address:
31 return
32
33 vmmap = pwndbg.vmmap.find(address)
34 if vmmap:
35 region = os.path.basename(vmmap.objfile)
36 else:
37 region = '[mapped]'
38
39 region = region.ljust(15)
40
41 region = M.get(address, region)
42 addr = M.get(address)
43 display = pwndbg.enhance.enhance(address)
44 print(region,addr,display)
45
46 auto_save = pwndbg.config.Parameter('auto-save-search', False,
47 'automatically pass --save to "search" command')
48
49 parser = argparse.ArgumentParser(description='''
50 Search memory for byte sequences, strings, pointers, and integer values
51 ''')
52 parser.add_argument('-t', '--type', choices=['byte','short','dword','qword','pointer','string','bytes'],
53 help='Size of search target', default='bytes', type=str)
54 parser.add_argument('-1', '--byte', dest='type', action='store_const', const='byte',
55 help='Search for a 1-byte integer')
56 parser.add_argument('-2', '--word', dest='type', action='store_const', const='word',
57 help='Search for a 2-byte integer')
58 parser.add_argument('-4', '--dword', dest='type', action='store_const', const='dword',
59 help='Search for a 4-byte integer')
60 parser.add_argument('-8', '--qword', dest='type', action='store_const', const='qword',
61 help='Search for an 8-byte integer')
62 parser.add_argument('-p', '--pointer', dest='type', action='store_const', const='pointer',
63 help='Search for a pointer-width integer')
64 parser.add_argument('-x', '--hex', action='store_true',
65 help='Target is a hex-encoded (for bytes/strings)')
66 parser.add_argument('-s', '--string', action='store_true',
67 help='Target is a raw string')
68 parser.add_argument('-e', '--executable', action='store_true',
69 help='Search executable segments only')
70 parser.add_argument('-w', '--writable', action='store_true',
71 help='Search writable segments only')
72 parser.add_argument('value', type=str,
73 help='Value to search for')
74 parser.add_argument('mapping', type=str, nargs='?', default=None,
75 help='Mapping to search [e.g. libc]')
76 parser.add_argument('--save', action='store_true', default=None,
77 help='Save results for --resume. Default comes from config %r' % auto_save.name)
78 parser.add_argument('--no-save', action='store_false', default=None, dest='save',
79 help='Invert --save')
80 parser.add_argument('-n', '--next', action='store_true',
81 help='Search only locations returned by previous search with --save')
82
83 @pwndbg.commands.ArgparsedCommand(parser)
84 @pwndbg.commands.OnlyWhenRunning
85 def search(type, hex, string, executable, writable, value, mapping, save, next):
86 # Adjust pointer sizes to the local architecture
87 if type == 'pointer':
88 type = {
89 4: 'dword',
90 8: 'qword'
91 }[pwndbg.arch.ptrsize]
92
93 if save is None:
94 save = bool(pwndbg.config.auto_save_search)
95
96 if hex:
97 value = codecs.decode(value, 'hex')
98
99 # Convert to an integer if needed, and pack to bytes
100 if type not in ('string', 'bytes'):
101 value = pwndbg.commands.fix_int(value)
102 fmt = {
103 'little': '<',
104 'big': '>'
105 }[pwndbg.arch.endian] + {
106 'byte': 'B',
107 'short': 'H',
108 'dword': 'L',
109 'qword': 'Q'
110 }[type]
111
112 value = struct.pack(fmt, value)
113
114 # Null-terminate strings
115 elif type == 'string':
116 value += b'\x00'
117
118 # Prep the saved set if necessary
119 global saved
120 if save:
121 saved = set()
122
123 # Perform the search
124 for address in pwndbg.search.search(value,
125 mapping=mapping,
126 executable=executable,
127 writable=writable):
128
129 if next and address not in saved:
130 continue
131
132 if save:
133 saved.add(address)
134
135 print_search_hit(address)
136
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pwndbg/commands/search.py b/pwndbg/commands/search.py
--- a/pwndbg/commands/search.py
+++ b/pwndbg/commands/search.py
@@ -12,6 +12,7 @@
import gdb
+import pwndbg.arch
import pwndbg.color.memory as M
import pwndbg.commands
import pwndbg.config
@@ -99,6 +100,7 @@
# Convert to an integer if needed, and pack to bytes
if type not in ('string', 'bytes'):
value = pwndbg.commands.fix_int(value)
+ value &= pwndbg.arch.ptrmask
fmt = {
'little': '<',
'big': '>'
|
{"golden_diff": "diff --git a/pwndbg/commands/search.py b/pwndbg/commands/search.py\n--- a/pwndbg/commands/search.py\n+++ b/pwndbg/commands/search.py\n@@ -12,6 +12,7 @@\n \n import gdb\n \n+import pwndbg.arch\n import pwndbg.color.memory as M\n import pwndbg.commands\n import pwndbg.config\n@@ -99,6 +100,7 @@\n # Convert to an integer if needed, and pack to bytes\n if type not in ('string', 'bytes'):\n value = pwndbg.commands.fix_int(value)\n+ value &= pwndbg.arch.ptrmask\n fmt = {\n 'little': '<',\n 'big': '>'\n", "issue": "Error in the `search` command\nThe `search` command appears to have some issues while searching signed integer data \nFor example while debugging a x86 program:\n\n```\npwndbg> search -4 0xf7eebf83\nTraceback (most recent call last):\n File \"/home/bruce30262/pwndbg/pwndbg/commands/__init__.py\", line 57, in __call__\n return self.function(*args, **kwargs)\n File \"/home/bruce30262/pwndbg/pwndbg/commands/__init__.py\", line 162, in _ArgparsedCommand\n return function(**vars(args))\n File \"/home/bruce30262/pwndbg/pwndbg/commands/__init__.py\", line 115, in _OnlyWhenRunning\n return function(*a, **kw)\n File \"/home/bruce30262/pwndbg/pwndbg/commands/search.py\", line 112, in search\n value = struct.pack(fmt, value)\nstruct.error: argument out of range\n```\n\nThis is because `pwndbg.commands.fix_int(value)` returns `-135348349` instead of an unsigned integer, and the packing format in [search.py](https://github.com/pwndbg/pwndbg/blob/b1892b27741a478cd5361061b0b4dda9ef46d02e/pwndbg/commands/search.py#L106) only support the unsigned data type, causing the error. \nCommands like `search -4 -- -100` will also cause the same error. \nKind of curious why it only support the unsigned data type though. It might need some additional check for the appropriate packing format.\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\n\nimport argparse\nimport codecs\nimport os\nimport struct\n\nimport gdb\n\nimport pwndbg.color.memory as M\nimport pwndbg.commands\nimport pwndbg.config\nimport pwndbg.enhance\nimport pwndbg.search\nimport pwndbg.vmmap\n\nsaved = set()\n\ndef print_search_hit(address):\n \"\"\"Prints out a single search hit.\n\n Arguments:\n address(int): Address to print\n \"\"\"\n if not address:\n return\n\n vmmap = pwndbg.vmmap.find(address)\n if vmmap:\n region = os.path.basename(vmmap.objfile)\n else:\n region = '[mapped]'\n\n region = region.ljust(15)\n\n region = M.get(address, region)\n addr = M.get(address)\n display = pwndbg.enhance.enhance(address)\n print(region,addr,display)\n\nauto_save = pwndbg.config.Parameter('auto-save-search', False,\n 'automatically pass --save to \"search\" command')\n\nparser = argparse.ArgumentParser(description='''\nSearch memory for byte sequences, strings, pointers, and integer values\n''')\nparser.add_argument('-t', '--type', choices=['byte','short','dword','qword','pointer','string','bytes'],\n help='Size of search target', default='bytes', type=str)\nparser.add_argument('-1', '--byte', dest='type', action='store_const', const='byte',\n help='Search for a 1-byte integer')\nparser.add_argument('-2', '--word', dest='type', action='store_const', const='word',\n help='Search for a 2-byte integer')\nparser.add_argument('-4', '--dword', dest='type', action='store_const', const='dword',\n help='Search for a 4-byte integer')\nparser.add_argument('-8', '--qword', dest='type', action='store_const', const='qword',\n help='Search for an 8-byte integer')\nparser.add_argument('-p', '--pointer', dest='type', action='store_const', const='pointer',\n help='Search for a pointer-width integer')\nparser.add_argument('-x', '--hex', action='store_true',\n help='Target is a hex-encoded (for bytes/strings)')\nparser.add_argument('-s', '--string', action='store_true',\n help='Target is a raw string')\nparser.add_argument('-e', '--executable', action='store_true',\n help='Search executable segments only')\nparser.add_argument('-w', '--writable', action='store_true',\n help='Search writable segments only')\nparser.add_argument('value', type=str,\n help='Value to search for')\nparser.add_argument('mapping', type=str, nargs='?', default=None,\n help='Mapping to search [e.g. libc]')\nparser.add_argument('--save', action='store_true', default=None,\n help='Save results for --resume. Default comes from config %r' % auto_save.name)\nparser.add_argument('--no-save', action='store_false', default=None, dest='save',\n help='Invert --save')\nparser.add_argument('-n', '--next', action='store_true',\n help='Search only locations returned by previous search with --save')\n\[email protected](parser)\[email protected]\ndef search(type, hex, string, executable, writable, value, mapping, save, next):\n # Adjust pointer sizes to the local architecture\n if type == 'pointer':\n type = {\n 4: 'dword',\n 8: 'qword'\n }[pwndbg.arch.ptrsize]\n\n if save is None:\n save = bool(pwndbg.config.auto_save_search)\n\n if hex:\n value = codecs.decode(value, 'hex')\n\n # Convert to an integer if needed, and pack to bytes\n if type not in ('string', 'bytes'):\n value = pwndbg.commands.fix_int(value)\n fmt = {\n 'little': '<',\n 'big': '>'\n }[pwndbg.arch.endian] + {\n 'byte': 'B',\n 'short': 'H',\n 'dword': 'L',\n 'qword': 'Q'\n }[type]\n\n value = struct.pack(fmt, value)\n\n # Null-terminate strings\n elif type == 'string':\n value += b'\\x00'\n\n # Prep the saved set if necessary\n global saved\n if save:\n saved = set()\n\n # Perform the search\n for address in pwndbg.search.search(value,\n mapping=mapping,\n executable=executable,\n writable=writable):\n\n if next and address not in saved:\n continue\n\n if save:\n saved.add(address)\n\n print_search_hit(address)\n", "path": "pwndbg/commands/search.py"}], "after_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\n\nimport argparse\nimport codecs\nimport os\nimport struct\n\nimport gdb\n\nimport pwndbg.arch\nimport pwndbg.color.memory as M\nimport pwndbg.commands\nimport pwndbg.config\nimport pwndbg.enhance\nimport pwndbg.search\nimport pwndbg.vmmap\n\nsaved = set()\n\ndef print_search_hit(address):\n \"\"\"Prints out a single search hit.\n\n Arguments:\n address(int): Address to print\n \"\"\"\n if not address:\n return\n\n vmmap = pwndbg.vmmap.find(address)\n if vmmap:\n region = os.path.basename(vmmap.objfile)\n else:\n region = '[mapped]'\n\n region = region.ljust(15)\n\n region = M.get(address, region)\n addr = M.get(address)\n display = pwndbg.enhance.enhance(address)\n print(region,addr,display)\n\nauto_save = pwndbg.config.Parameter('auto-save-search', False,\n 'automatically pass --save to \"search\" command')\n\nparser = argparse.ArgumentParser(description='''\nSearch memory for byte sequences, strings, pointers, and integer values\n''')\nparser.add_argument('-t', '--type', choices=['byte','short','dword','qword','pointer','string','bytes'],\n help='Size of search target', default='bytes', type=str)\nparser.add_argument('-1', '--byte', dest='type', action='store_const', const='byte',\n help='Search for a 1-byte integer')\nparser.add_argument('-2', '--word', dest='type', action='store_const', const='word',\n help='Search for a 2-byte integer')\nparser.add_argument('-4', '--dword', dest='type', action='store_const', const='dword',\n help='Search for a 4-byte integer')\nparser.add_argument('-8', '--qword', dest='type', action='store_const', const='qword',\n help='Search for an 8-byte integer')\nparser.add_argument('-p', '--pointer', dest='type', action='store_const', const='pointer',\n help='Search for a pointer-width integer')\nparser.add_argument('-x', '--hex', action='store_true',\n help='Target is a hex-encoded (for bytes/strings)')\nparser.add_argument('-s', '--string', action='store_true',\n help='Target is a raw string')\nparser.add_argument('-e', '--executable', action='store_true',\n help='Search executable segments only')\nparser.add_argument('-w', '--writable', action='store_true',\n help='Search writable segments only')\nparser.add_argument('value', type=str,\n help='Value to search for')\nparser.add_argument('mapping', type=str, nargs='?', default=None,\n help='Mapping to search [e.g. libc]')\nparser.add_argument('--save', action='store_true', default=None,\n help='Save results for --resume. Default comes from config %r' % auto_save.name)\nparser.add_argument('--no-save', action='store_false', default=None, dest='save',\n help='Invert --save')\nparser.add_argument('-n', '--next', action='store_true',\n help='Search only locations returned by previous search with --save')\n\[email protected](parser)\[email protected]\ndef search(type, hex, string, executable, writable, value, mapping, save, next):\n # Adjust pointer sizes to the local architecture\n if type == 'pointer':\n type = {\n 4: 'dword',\n 8: 'qword'\n }[pwndbg.arch.ptrsize]\n\n if save is None:\n save = bool(pwndbg.config.auto_save_search)\n\n if hex:\n value = codecs.decode(value, 'hex')\n\n # Convert to an integer if needed, and pack to bytes\n if type not in ('string', 'bytes'):\n value = pwndbg.commands.fix_int(value)\n value &= pwndbg.arch.ptrmask\n fmt = {\n 'little': '<',\n 'big': '>'\n }[pwndbg.arch.endian] + {\n 'byte': 'B',\n 'short': 'H',\n 'dword': 'L',\n 'qword': 'Q'\n }[type]\n\n value = struct.pack(fmt, value)\n\n # Null-terminate strings\n elif type == 'string':\n value += b'\\x00'\n\n # Prep the saved set if necessary\n global saved\n if save:\n saved = set()\n\n # Perform the search\n for address in pwndbg.search.search(value,\n mapping=mapping,\n executable=executable,\n writable=writable):\n\n if next and address not in saved:\n continue\n\n if save:\n saved.add(address)\n\n print_search_hit(address)\n", "path": "pwndbg/commands/search.py"}]}
| 2,037 | 160 |
gh_patches_debug_24
|
rasdani/github-patches
|
git_diff
|
hpcaitech__ColossalAI-2007
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[BUG]: ModuleNotFoundError: No module named 'colossalai.nn.optimizer.zero_optimizer'
### ๐ Describe the bug
I install colossalAI with the command `pip install colossalai==0.1.11rc3+torch1.12cu11.3 -f https://release.colossalai.org`
But I get an error when follow https://github.com/hpcaitech/ColossalAI/tree/main/examples/tutorial#-run-opt-finetuning-and-inference, I just run `bash ./run_clm_synthetic.sh` and get an error as follows:
```shell
โญโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ Traceback (most recent call last) โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโฎ
โ /home/he.yan/ColossalAI/examples/tutorial/opt/opt/run_clm.py:46 in <module> โ
โ โ
โ 43 from colossalai.core import global_context as gpc โ
โ 44 from colossalai.logging import disable_existing_loggers, get_dist_logger โ
โ 45 from colossalai.nn.optimizer import HybridAdam โ
โ โฑ 46 from colossalai.nn.optimizer.zero_optimizer import ZeroOptimizer โ
โ 47 from colossalai.nn.parallel import ZeroDDP โ
โ 48 from colossalai.tensor import ProcessGroup โ
โ 49 from colossalai.utils import get_current_device, get_dataloader โ
โฐโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโฏ
ModuleNotFoundError: No module named 'colossalai.nn.optimizer.zero_optimizer'
```
### Environment
Python 3.8.15
torch1.12cu11.3
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `colossalai/__init__.py`
Content:
```
1 from .initialize import (
2 get_default_parser,
3 initialize,
4 launch,
5 launch_from_openmpi,
6 launch_from_slurm,
7 launch_from_torch,
8 )
9
10 __version__ = '0.1.11rc2'
11
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/colossalai/__init__.py b/colossalai/__init__.py
--- a/colossalai/__init__.py
+++ b/colossalai/__init__.py
@@ -7,4 +7,4 @@
launch_from_torch,
)
-__version__ = '0.1.11rc2'
+__version__ = '0.1.11rc4'
|
{"golden_diff": "diff --git a/colossalai/__init__.py b/colossalai/__init__.py\n--- a/colossalai/__init__.py\n+++ b/colossalai/__init__.py\n@@ -7,4 +7,4 @@\n launch_from_torch,\n )\n \n-__version__ = '0.1.11rc2'\n+__version__ = '0.1.11rc4'\n", "issue": "[BUG]: ModuleNotFoundError: No module named 'colossalai.nn.optimizer.zero_optimizer'\n### \ud83d\udc1b Describe the bug\r\n\r\nI install colossalAI with the command `pip install colossalai==0.1.11rc3+torch1.12cu11.3 -f https://release.colossalai.org`\r\nBut I get an error when follow https://github.com/hpcaitech/ColossalAI/tree/main/examples/tutorial#-run-opt-finetuning-and-inference, I just run `bash ./run_clm_synthetic.sh` and get an error as follows:\r\n\r\n```shell\r\n\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 Traceback (most recent call last) \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\r\n\u2502 /home/he.yan/ColossalAI/examples/tutorial/opt/opt/run_clm.py:46 in <module> \u2502\r\n\u2502 \u2502\r\n\u2502 43 from colossalai.core import global_context as gpc \u2502\r\n\u2502 44 from colossalai.logging import disable_existing_loggers, get_dist_logger \u2502\r\n\u2502 45 from colossalai.nn.optimizer import HybridAdam \u2502\r\n\u2502 \u2771 46 from colossalai.nn.optimizer.zero_optimizer import ZeroOptimizer \u2502\r\n\u2502 47 from colossalai.nn.parallel import ZeroDDP \u2502\r\n\u2502 48 from colossalai.tensor import ProcessGroup \u2502\r\n\u2502 49 from colossalai.utils import get_current_device, get_dataloader \u2502\r\n\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\r\nModuleNotFoundError: No module named 'colossalai.nn.optimizer.zero_optimizer'\r\n```\r\n\r\n### Environment\r\n\r\nPython 3.8.15\r\ntorch1.12cu11.3\n", "before_files": [{"content": "from .initialize import (\n get_default_parser,\n initialize,\n launch,\n launch_from_openmpi,\n launch_from_slurm,\n launch_from_torch,\n)\n\n__version__ = '0.1.11rc2'\n", "path": "colossalai/__init__.py"}], "after_files": [{"content": "from .initialize import (\n get_default_parser,\n initialize,\n launch,\n launch_from_openmpi,\n launch_from_slurm,\n launch_from_torch,\n)\n\n__version__ = '0.1.11rc4'\n", "path": "colossalai/__init__.py"}]}
| 690 | 92 |
gh_patches_debug_2151
|
rasdani/github-patches
|
git_diff
|
WeblateOrg__weblate-4665
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
migrations fail for database name containing "-"
**Describe the bug**
Applying memory.0007_use_trigram...Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/django/db/backends/utils.py", line 84, in _execute
return self.cursor.execute(sql, params)
psycopg2.errors.SyntaxError: syntax error at or near "-"
LINE 1: ALTER DATABASE weblate-staging SET pg_trgm.similarity_thresh...
^
**To Reproduce**
Set the database name to "weblate-staging"
I worked around this by changing of
ALTER DATABASE {} SET
to
ALTER DATABASE \"{}\" SET
in 0007_use_trigram.py and 0008_adjust_similarity.py.
weblate-4.1.1
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `weblate/memory/migrations/0008_adjust_similarity.py`
Content:
```
1 # Generated by Django 3.0.5 on 2020-05-12 11:44
2
3 from django.db import migrations
4
5
6 def update_index(apps, schema_editor):
7 if schema_editor.connection.vendor != "postgresql":
8 return
9 # This ensures that extensions are loaded into the session. Without that
10 # the next ALTER database fails unless we're running as superuser (which
11 # is allowed to set non existing parameters, so missing extension doesn't
12 # matter)
13 # See https://www.postgresql.org/message-id/6376.1533675236%40sss.pgh.pa.us
14 schema_editor.execute("SELECT show_limit()")
15
16 schema_editor.execute(
17 "ALTER ROLE {} SET pg_trgm.similarity_threshold = 0.5".format(
18 schema_editor.connection.settings_dict["USER"]
19 )
20 )
21
22
23 class Migration(migrations.Migration):
24
25 dependencies = [
26 ("memory", "0007_use_trigram"),
27 ]
28
29 operations = [
30 migrations.RunPython(
31 update_index, migrations.RunPython.noop, elidable=False, atomic=False
32 )
33 ]
34
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/weblate/memory/migrations/0008_adjust_similarity.py b/weblate/memory/migrations/0008_adjust_similarity.py
--- a/weblate/memory/migrations/0008_adjust_similarity.py
+++ b/weblate/memory/migrations/0008_adjust_similarity.py
@@ -15,7 +15,7 @@
schema_editor.execute(
"ALTER ROLE {} SET pg_trgm.similarity_threshold = 0.5".format(
- schema_editor.connection.settings_dict["USER"]
+ schema_editor.quote_name(schema_editor.connection.settings_dict["USER"])
)
)
|
{"golden_diff": "diff --git a/weblate/memory/migrations/0008_adjust_similarity.py b/weblate/memory/migrations/0008_adjust_similarity.py\n--- a/weblate/memory/migrations/0008_adjust_similarity.py\n+++ b/weblate/memory/migrations/0008_adjust_similarity.py\n@@ -15,7 +15,7 @@\n \n schema_editor.execute(\n \"ALTER ROLE {} SET pg_trgm.similarity_threshold = 0.5\".format(\n- schema_editor.connection.settings_dict[\"USER\"]\n+ schema_editor.quote_name(schema_editor.connection.settings_dict[\"USER\"])\n )\n )\n", "issue": "migrations fail for database name containing \"-\"\n**Describe the bug**\r\n Applying memory.0007_use_trigram...Traceback (most recent call last):\r\n File \"/usr/lib/python3.6/site-packages/django/db/backends/utils.py\", line 84, in _execute\r\n return self.cursor.execute(sql, params)\r\npsycopg2.errors.SyntaxError: syntax error at or near \"-\"\r\nLINE 1: ALTER DATABASE weblate-staging SET pg_trgm.similarity_thresh...\r\n ^\r\n**To Reproduce**\r\nSet the database name to \"weblate-staging\"\r\n\r\nI worked around this by changing of\r\nALTER DATABASE {} SET\r\nto\r\nALTER DATABASE \\\"{}\\\" SET\r\nin 0007_use_trigram.py and 0008_adjust_similarity.py.\r\n\r\nweblate-4.1.1\n", "before_files": [{"content": "# Generated by Django 3.0.5 on 2020-05-12 11:44\n\nfrom django.db import migrations\n\n\ndef update_index(apps, schema_editor):\n if schema_editor.connection.vendor != \"postgresql\":\n return\n # This ensures that extensions are loaded into the session. Without that\n # the next ALTER database fails unless we're running as superuser (which\n # is allowed to set non existing parameters, so missing extension doesn't\n # matter)\n # See https://www.postgresql.org/message-id/6376.1533675236%40sss.pgh.pa.us\n schema_editor.execute(\"SELECT show_limit()\")\n\n schema_editor.execute(\n \"ALTER ROLE {} SET pg_trgm.similarity_threshold = 0.5\".format(\n schema_editor.connection.settings_dict[\"USER\"]\n )\n )\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n (\"memory\", \"0007_use_trigram\"),\n ]\n\n operations = [\n migrations.RunPython(\n update_index, migrations.RunPython.noop, elidable=False, atomic=False\n )\n ]\n", "path": "weblate/memory/migrations/0008_adjust_similarity.py"}], "after_files": [{"content": "# Generated by Django 3.0.5 on 2020-05-12 11:44\n\nfrom django.db import migrations\n\n\ndef update_index(apps, schema_editor):\n if schema_editor.connection.vendor != \"postgresql\":\n return\n # This ensures that extensions are loaded into the session. Without that\n # the next ALTER database fails unless we're running as superuser (which\n # is allowed to set non existing parameters, so missing extension doesn't\n # matter)\n # See https://www.postgresql.org/message-id/6376.1533675236%40sss.pgh.pa.us\n schema_editor.execute(\"SELECT show_limit()\")\n\n schema_editor.execute(\n \"ALTER ROLE {} SET pg_trgm.similarity_threshold = 0.5\".format(\n schema_editor.quote_name(schema_editor.connection.settings_dict[\"USER\"])\n )\n )\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n (\"memory\", \"0007_use_trigram\"),\n ]\n\n operations = [\n migrations.RunPython(\n update_index, migrations.RunPython.noop, elidable=False, atomic=False\n )\n ]\n", "path": "weblate/memory/migrations/0008_adjust_similarity.py"}]}
| 758 | 134 |
gh_patches_debug_2298
|
rasdani/github-patches
|
git_diff
|
iterative__dvc-1436
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
repro: --all-pipelines doesn't do anything.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `dvc/command/repro.py`
Content:
```
1 import os
2
3 from dvc.command.base import CmdBase
4 from dvc.command.status import CmdDataStatus
5 from dvc.exceptions import DvcException
6
7
8 class CmdRepro(CmdBase):
9 def run(self):
10 recursive = not self.args.single_item
11 saved_dir = os.path.realpath(os.curdir)
12 if self.args.cwd:
13 os.chdir(self.args.cwd)
14
15 ret = 0
16 for target in self.args.targets:
17 try:
18 stages = self.project.reproduce(
19 target,
20 recursive=recursive,
21 force=self.args.force,
22 dry=self.args.dry,
23 interactive=self.args.interactive,
24 pipeline=self.args.pipeline,
25 all_pipelines=self.args.all_pipelines,
26 ignore_build_cache=self.args.ignore_build_cache)
27
28 if len(stages) == 0:
29 self.project.logger.info(CmdDataStatus.UP_TO_DATE_MSG)
30
31 if self.args.metrics:
32 self.project.metrics_show()
33 except DvcException as ex:
34 msg = 'Failed to reproduce \'{}\''.format(target)
35 self.project.logger.error(msg, ex)
36 ret = 1
37 break
38
39 os.chdir(saved_dir)
40 return ret
41
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/dvc/command/repro.py b/dvc/command/repro.py
--- a/dvc/command/repro.py
+++ b/dvc/command/repro.py
@@ -12,6 +12,10 @@
if self.args.cwd:
os.chdir(self.args.cwd)
+ # Dirty hack so the for loop below can at least enter once
+ if self.args.all_pipelines:
+ self.args.targets = [None]
+
ret = 0
for target in self.args.targets:
try:
|
{"golden_diff": "diff --git a/dvc/command/repro.py b/dvc/command/repro.py\n--- a/dvc/command/repro.py\n+++ b/dvc/command/repro.py\n@@ -12,6 +12,10 @@\n if self.args.cwd:\n os.chdir(self.args.cwd)\n \n+ # Dirty hack so the for loop below can at least enter once\n+ if self.args.all_pipelines:\n+ self.args.targets = [None]\n+\n ret = 0\n for target in self.args.targets:\n try:\n", "issue": "repro: --all-pipelines doesn't do anything.\n\n", "before_files": [{"content": "import os\n\nfrom dvc.command.base import CmdBase\nfrom dvc.command.status import CmdDataStatus\nfrom dvc.exceptions import DvcException\n\n\nclass CmdRepro(CmdBase):\n def run(self):\n recursive = not self.args.single_item\n saved_dir = os.path.realpath(os.curdir)\n if self.args.cwd:\n os.chdir(self.args.cwd)\n\n ret = 0\n for target in self.args.targets:\n try:\n stages = self.project.reproduce(\n target,\n recursive=recursive,\n force=self.args.force,\n dry=self.args.dry,\n interactive=self.args.interactive,\n pipeline=self.args.pipeline,\n all_pipelines=self.args.all_pipelines,\n ignore_build_cache=self.args.ignore_build_cache)\n\n if len(stages) == 0:\n self.project.logger.info(CmdDataStatus.UP_TO_DATE_MSG)\n\n if self.args.metrics:\n self.project.metrics_show()\n except DvcException as ex:\n msg = 'Failed to reproduce \\'{}\\''.format(target)\n self.project.logger.error(msg, ex)\n ret = 1\n break\n\n os.chdir(saved_dir)\n return ret\n", "path": "dvc/command/repro.py"}], "after_files": [{"content": "import os\n\nfrom dvc.command.base import CmdBase\nfrom dvc.command.status import CmdDataStatus\nfrom dvc.exceptions import DvcException\n\n\nclass CmdRepro(CmdBase):\n def run(self):\n recursive = not self.args.single_item\n saved_dir = os.path.realpath(os.curdir)\n if self.args.cwd:\n os.chdir(self.args.cwd)\n\n # Dirty hack so the for loop below can at least enter once\n if self.args.all_pipelines:\n self.args.targets = [None]\n\n ret = 0\n for target in self.args.targets:\n try:\n stages = self.project.reproduce(\n target,\n recursive=recursive,\n force=self.args.force,\n dry=self.args.dry,\n interactive=self.args.interactive,\n pipeline=self.args.pipeline,\n all_pipelines=self.args.all_pipelines,\n ignore_build_cache=self.args.ignore_build_cache)\n\n if len(stages) == 0:\n self.project.logger.info(CmdDataStatus.UP_TO_DATE_MSG)\n\n if self.args.metrics:\n self.project.metrics_show()\n except DvcException as ex:\n msg = 'Failed to reproduce \\'{}\\''.format(target)\n self.project.logger.error(msg, ex)\n ret = 1\n break\n\n os.chdir(saved_dir)\n return ret\n", "path": "dvc/command/repro.py"}]}
| 594 | 115 |
gh_patches_debug_3798
|
rasdani/github-patches
|
git_diff
|
Parsl__parsl-618
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Error building docs with new env
```
Running Sphinx v1.8.1
loading pickled environment... failed: build environment version not current
Theme error:
sphinx_rtd_theme is no longer a hard dependency since version 1.4.0. Please install it manually.(pip install sphinx_rtd_theme)
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 from setuptools import setup, find_packages
2
3 with open('parsl/version.py') as f:
4 exec(f.read())
5
6 with open('requirements.txt') as f:
7 install_requires = f.readlines()
8
9 setup(
10 name='parsl',
11 version=VERSION,
12 description='Simple data dependent workflows in Python',
13 long_description='Simple parallel workflows system for Python',
14 url='https://github.com/Parsl/parsl',
15 author='The Parsl Team',
16 author_email='[email protected]',
17 license='Apache 2.0',
18 download_url='https://github.com/Parsl/parsl/archive/{}.tar.gz'.format(VERSION),
19 package_data={'': ['LICENSE']},
20 packages=find_packages(),
21 install_requires=install_requires,
22 scripts = ['parsl/executors/high_throughput/process_worker_pool.py',
23 'parsl/executors/extreme_scale/mpi_worker_pool.py'],
24 extras_require = {
25 'db_logging' : ['CMRESHandler', 'psutil', 'sqlalchemy'],
26 'aws' : ['boto3'],
27 'jetstream' : ['python-novaclient'],
28 'extreme_scale' : ['mpi4py'],
29 'docs' : ['nbsphinx'],
30 'google_cloud' : ['google-auth', 'google-api-python-client']
31 },
32 classifiers = [
33 # Maturity
34 'Development Status :: 3 - Alpha',
35 # Intended audience
36 'Intended Audience :: Developers',
37 # Licence, must match with licence above
38 'License :: OSI Approved :: Apache Software License',
39 # Python versions supported
40 'Programming Language :: Python :: 3.5',
41 'Programming Language :: Python :: 3.6',
42 ],
43 keywords=['Workflows', 'Scientific computing'],
44 )
45
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -26,7 +26,7 @@
'aws' : ['boto3'],
'jetstream' : ['python-novaclient'],
'extreme_scale' : ['mpi4py'],
- 'docs' : ['nbsphinx'],
+ 'docs' : ['nbsphinx', 'sphinx_rtd_theme'],
'google_cloud' : ['google-auth', 'google-api-python-client']
},
classifiers = [
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -26,7 +26,7 @@\n 'aws' : ['boto3'],\n 'jetstream' : ['python-novaclient'],\n 'extreme_scale' : ['mpi4py'],\n- 'docs' : ['nbsphinx'],\n+ 'docs' : ['nbsphinx', 'sphinx_rtd_theme'],\n 'google_cloud' : ['google-auth', 'google-api-python-client']\n },\n classifiers = [\n", "issue": "Error building docs with new env\n\r\n```\r\nRunning Sphinx v1.8.1\r\nloading pickled environment... failed: build environment version not current\r\n\r\nTheme error:\r\nsphinx_rtd_theme is no longer a hard dependency since version 1.4.0. Please install it manually.(pip install sphinx_rtd_theme)\r\n```\n", "before_files": [{"content": "from setuptools import setup, find_packages\n\nwith open('parsl/version.py') as f:\n exec(f.read())\n\nwith open('requirements.txt') as f:\n install_requires = f.readlines()\n\nsetup(\n name='parsl',\n version=VERSION,\n description='Simple data dependent workflows in Python',\n long_description='Simple parallel workflows system for Python',\n url='https://github.com/Parsl/parsl',\n author='The Parsl Team',\n author_email='[email protected]',\n license='Apache 2.0',\n download_url='https://github.com/Parsl/parsl/archive/{}.tar.gz'.format(VERSION),\n package_data={'': ['LICENSE']},\n packages=find_packages(),\n install_requires=install_requires,\n scripts = ['parsl/executors/high_throughput/process_worker_pool.py',\n 'parsl/executors/extreme_scale/mpi_worker_pool.py'],\n extras_require = {\n 'db_logging' : ['CMRESHandler', 'psutil', 'sqlalchemy'],\n 'aws' : ['boto3'],\n 'jetstream' : ['python-novaclient'],\n 'extreme_scale' : ['mpi4py'],\n 'docs' : ['nbsphinx'],\n 'google_cloud' : ['google-auth', 'google-api-python-client']\n },\n classifiers = [\n # Maturity\n 'Development Status :: 3 - Alpha',\n # Intended audience\n 'Intended Audience :: Developers',\n # Licence, must match with licence above\n 'License :: OSI Approved :: Apache Software License',\n # Python versions supported\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n ],\n keywords=['Workflows', 'Scientific computing'],\n)\n", "path": "setup.py"}], "after_files": [{"content": "from setuptools import setup, find_packages\n\nwith open('parsl/version.py') as f:\n exec(f.read())\n\nwith open('requirements.txt') as f:\n install_requires = f.readlines()\n\nsetup(\n name='parsl',\n version=VERSION,\n description='Simple data dependent workflows in Python',\n long_description='Simple parallel workflows system for Python',\n url='https://github.com/Parsl/parsl',\n author='The Parsl Team',\n author_email='[email protected]',\n license='Apache 2.0',\n download_url='https://github.com/Parsl/parsl/archive/{}.tar.gz'.format(VERSION),\n package_data={'': ['LICENSE']},\n packages=find_packages(),\n install_requires=install_requires,\n scripts = ['parsl/executors/high_throughput/process_worker_pool.py',\n 'parsl/executors/extreme_scale/mpi_worker_pool.py'],\n extras_require = {\n 'db_logging' : ['CMRESHandler', 'psutil', 'sqlalchemy'],\n 'aws' : ['boto3'],\n 'jetstream' : ['python-novaclient'],\n 'extreme_scale' : ['mpi4py'],\n 'docs' : ['nbsphinx', 'sphinx_rtd_theme'],\n 'google_cloud' : ['google-auth', 'google-api-python-client']\n },\n classifiers = [\n # Maturity\n 'Development Status :: 3 - Alpha',\n # Intended audience\n 'Intended Audience :: Developers',\n # Licence, must match with licence above\n 'License :: OSI Approved :: Apache Software License',\n # Python versions supported\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n ],\n keywords=['Workflows', 'Scientific computing'],\n)\n", "path": "setup.py"}]}
| 786 | 117 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.