problem_id
stringlengths 18
22
| source
stringclasses 1
value | task_type
stringclasses 1
value | in_source_id
stringlengths 13
58
| prompt
stringlengths 1.1k
10.2k
| golden_diff
stringlengths 151
4.94k
| verification_info
stringlengths 582
21k
| num_tokens
int64 271
2.05k
| num_tokens_diff
int64 47
1.02k
|
---|---|---|---|---|---|---|---|---|
gh_patches_debug_33176
|
rasdani/github-patches
|
git_diff
|
coala__coala-5814
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Imported classes should be arranged lexicographically
Classes being imported in `coala/coalib/bearlib/languages/__init__.py` file should be arranged alphabetically.
A good newcomer issue.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `coalib/bearlib/languages/__init__.py`
Content:
```
1 """
2 This directory holds means to get generic information for specific languages.
3 """
4
5 # Start ignoring PyUnusedCodeBear
6 from .Language import Language
7 from .Language import Languages
8
9 from .definitions.Unknown import Unknown
10 from .definitions.antlr import antlr
11 from .definitions.Bash import Bash
12 from .definitions.C import C
13 from .definitions.CPP import CPP
14 from .definitions.CSharp import CSharp
15 from .definitions.CSS import CSS
16 from .definitions.D import D
17 from .definitions.Fortran import Fortran
18 from .definitions.Golang import Golang
19 from .definitions.GraphQL import GraphQL
20 from .definitions.html import HTML
21 from .definitions.Java import Java
22 from .definitions.JavaScript import JavaScript
23 from .definitions.JSON import JSON
24 from .definitions.JSP import JSP
25 from .definitions.KornShell import KornShell
26 from .definitions.m4 import m4
27 from .definitions.Matlab import Matlab
28 from .definitions.Markdown import Markdown
29 from .definitions.ObjectiveC import ObjectiveC
30 from .definitions.PHP import PHP
31 from .definitions.PLSQL import PLSQL
32 from .definitions.PowerShell import PowerShell
33 from .definitions.Python import Python
34 from .definitions.Ruby import Ruby
35 from .definitions.Scala import Scala
36 from .definitions.Swift import Swift
37 from .definitions.Tcl import Tcl
38 from .definitions.TinyBasic import TinyBasic
39 from .definitions.Vala import Vala
40 from .definitions.TypeScript import TypeScript
41 from .definitions.Shell import Shell
42 from .definitions.Jinja2 import Jinja2
43 from .definitions.VisualBasic import VisualBasic
44 from .definitions.XML import XML
45 from.definitions.ZShell import ZShell
46 # Stop ignoring PyUnusedCodeBear
47
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/coalib/bearlib/languages/__init__.py b/coalib/bearlib/languages/__init__.py
--- a/coalib/bearlib/languages/__init__.py
+++ b/coalib/bearlib/languages/__init__.py
@@ -7,6 +7,7 @@
from .Language import Languages
from .definitions.Unknown import Unknown
+
from .definitions.antlr import antlr
from .definitions.Bash import Bash
from .definitions.C import C
@@ -20,12 +21,13 @@
from .definitions.html import HTML
from .definitions.Java import Java
from .definitions.JavaScript import JavaScript
+from .definitions.Jinja2 import Jinja2
from .definitions.JSON import JSON
from .definitions.JSP import JSP
from .definitions.KornShell import KornShell
from .definitions.m4 import m4
-from .definitions.Matlab import Matlab
from .definitions.Markdown import Markdown
+from .definitions.Matlab import Matlab
from .definitions.ObjectiveC import ObjectiveC
from .definitions.PHP import PHP
from .definitions.PLSQL import PLSQL
@@ -33,14 +35,14 @@
from .definitions.Python import Python
from .definitions.Ruby import Ruby
from .definitions.Scala import Scala
+from .definitions.Shell import Shell
from .definitions.Swift import Swift
from .definitions.Tcl import Tcl
from .definitions.TinyBasic import TinyBasic
-from .definitions.Vala import Vala
from .definitions.TypeScript import TypeScript
-from .definitions.Shell import Shell
-from .definitions.Jinja2 import Jinja2
+from .definitions.Vala import Vala
from .definitions.VisualBasic import VisualBasic
from .definitions.XML import XML
-from.definitions.ZShell import ZShell
+from .definitions.ZShell import ZShell
+
# Stop ignoring PyUnusedCodeBear
|
{"golden_diff": "diff --git a/coalib/bearlib/languages/__init__.py b/coalib/bearlib/languages/__init__.py\n--- a/coalib/bearlib/languages/__init__.py\n+++ b/coalib/bearlib/languages/__init__.py\n@@ -7,6 +7,7 @@\n from .Language import Languages\n \n from .definitions.Unknown import Unknown\n+\n from .definitions.antlr import antlr\n from .definitions.Bash import Bash\n from .definitions.C import C\n@@ -20,12 +21,13 @@\n from .definitions.html import HTML\n from .definitions.Java import Java\n from .definitions.JavaScript import JavaScript\n+from .definitions.Jinja2 import Jinja2\n from .definitions.JSON import JSON\n from .definitions.JSP import JSP\n from .definitions.KornShell import KornShell\n from .definitions.m4 import m4\n-from .definitions.Matlab import Matlab\n from .definitions.Markdown import Markdown\n+from .definitions.Matlab import Matlab\n from .definitions.ObjectiveC import ObjectiveC\n from .definitions.PHP import PHP\n from .definitions.PLSQL import PLSQL\n@@ -33,14 +35,14 @@\n from .definitions.Python import Python\n from .definitions.Ruby import Ruby\n from .definitions.Scala import Scala\n+from .definitions.Shell import Shell\n from .definitions.Swift import Swift\n from .definitions.Tcl import Tcl\n from .definitions.TinyBasic import TinyBasic\n-from .definitions.Vala import Vala\n from .definitions.TypeScript import TypeScript\n-from .definitions.Shell import Shell\n-from .definitions.Jinja2 import Jinja2\n+from .definitions.Vala import Vala\n from .definitions.VisualBasic import VisualBasic\n from .definitions.XML import XML\n-from.definitions.ZShell import ZShell\n+from .definitions.ZShell import ZShell\n+\n # Stop ignoring PyUnusedCodeBear\n", "issue": "Imported classes should be arranged lexicographically\nClasses being imported in `coala/coalib/bearlib/languages/__init__.py` file should be arranged alphabetically.\r\n\r\nA good newcomer issue. \n", "before_files": [{"content": "\"\"\"\nThis directory holds means to get generic information for specific languages.\n\"\"\"\n\n# Start ignoring PyUnusedCodeBear\nfrom .Language import Language\nfrom .Language import Languages\n\nfrom .definitions.Unknown import Unknown\nfrom .definitions.antlr import antlr\nfrom .definitions.Bash import Bash\nfrom .definitions.C import C\nfrom .definitions.CPP import CPP\nfrom .definitions.CSharp import CSharp\nfrom .definitions.CSS import CSS\nfrom .definitions.D import D\nfrom .definitions.Fortran import Fortran\nfrom .definitions.Golang import Golang\nfrom .definitions.GraphQL import GraphQL\nfrom .definitions.html import HTML\nfrom .definitions.Java import Java\nfrom .definitions.JavaScript import JavaScript\nfrom .definitions.JSON import JSON\nfrom .definitions.JSP import JSP\nfrom .definitions.KornShell import KornShell\nfrom .definitions.m4 import m4\nfrom .definitions.Matlab import Matlab\nfrom .definitions.Markdown import Markdown\nfrom .definitions.ObjectiveC import ObjectiveC\nfrom .definitions.PHP import PHP\nfrom .definitions.PLSQL import PLSQL\nfrom .definitions.PowerShell import PowerShell\nfrom .definitions.Python import Python\nfrom .definitions.Ruby import Ruby\nfrom .definitions.Scala import Scala\nfrom .definitions.Swift import Swift\nfrom .definitions.Tcl import Tcl\nfrom .definitions.TinyBasic import TinyBasic\nfrom .definitions.Vala import Vala\nfrom .definitions.TypeScript import TypeScript\nfrom .definitions.Shell import Shell\nfrom .definitions.Jinja2 import Jinja2\nfrom .definitions.VisualBasic import VisualBasic\nfrom .definitions.XML import XML\nfrom.definitions.ZShell import ZShell\n# Stop ignoring PyUnusedCodeBear\n", "path": "coalib/bearlib/languages/__init__.py"}], "after_files": [{"content": "\"\"\"\nThis directory holds means to get generic information for specific languages.\n\"\"\"\n\n# Start ignoring PyUnusedCodeBear\nfrom .Language import Language\nfrom .Language import Languages\n\nfrom .definitions.Unknown import Unknown\n\nfrom .definitions.antlr import antlr\nfrom .definitions.Bash import Bash\nfrom .definitions.C import C\nfrom .definitions.CPP import CPP\nfrom .definitions.CSharp import CSharp\nfrom .definitions.CSS import CSS\nfrom .definitions.D import D\nfrom .definitions.Fortran import Fortran\nfrom .definitions.Golang import Golang\nfrom .definitions.GraphQL import GraphQL\nfrom .definitions.html import HTML\nfrom .definitions.Java import Java\nfrom .definitions.JavaScript import JavaScript\nfrom .definitions.Jinja2 import Jinja2\nfrom .definitions.JSON import JSON\nfrom .definitions.JSP import JSP\nfrom .definitions.KornShell import KornShell\nfrom .definitions.m4 import m4\nfrom .definitions.Markdown import Markdown\nfrom .definitions.Matlab import Matlab\nfrom .definitions.ObjectiveC import ObjectiveC\nfrom .definitions.PHP import PHP\nfrom .definitions.PLSQL import PLSQL\nfrom .definitions.PowerShell import PowerShell\nfrom .definitions.Python import Python\nfrom .definitions.Ruby import Ruby\nfrom .definitions.Scala import Scala\nfrom .definitions.Shell import Shell\nfrom .definitions.Swift import Swift\nfrom .definitions.Tcl import Tcl\nfrom .definitions.TinyBasic import TinyBasic\nfrom .definitions.TypeScript import TypeScript\nfrom .definitions.Vala import Vala\nfrom .definitions.VisualBasic import VisualBasic\nfrom .definitions.XML import XML\nfrom .definitions.ZShell import ZShell\n\n# Stop ignoring PyUnusedCodeBear\n", "path": "coalib/bearlib/languages/__init__.py"}]}
| 746 | 408 |
gh_patches_debug_232
|
rasdani/github-patches
|
git_diff
|
yt-project__yt-1532
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
AHF answer tests are flaky
We're seeing random failures from the AHF answer tests on some PRs.
See e.g. https://tests.yt-project.org/job/yt_py3_git/414/.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `yt/frontends/ahf/data_structures.py`
Content:
```
1 """
2 AHF data structures
3
4
5
6 """
7
8 #-----------------------------------------------------------------------------
9 # Copyright (c) 2017, yt Development Team.
10 #
11 # Distributed under the terms of the Modified BSD License.
12 #
13 # The full license is in the file COPYING.txt, distributed with this software.
14 #-----------------------------------------------------------------------------
15
16 import glob
17 import os
18 import stat
19
20 import numpy as np
21
22 from yt.data_objects.static_output import \
23 Dataset, \
24 ParticleFile
25 from yt.funcs import \
26 setdefaultattr
27 from yt.geometry.particle_geometry_handler import \
28 ParticleIndex
29 from yt.utilities.cosmology import \
30 Cosmology
31
32 from .fields import AHFHalosFieldInfo
33
34
35 class AHFHalosFile(ParticleFile):
36 def __init__(self, ds, io, filename, file_id):
37 root, _ = os.path.splitext(filename)
38 candidates = glob.glob(root + '*.AHF_halos')
39 if len(candidates) == 1:
40 filename = candidates[0]
41 else:
42 raise ValueError('Too many AHF_halos files.')
43 self.col_names = self._read_column_names(filename)
44 super(AHFHalosFile, self).__init__(ds, io, filename, file_id)
45
46 def read_data(self, usecols=None):
47 return np.genfromtxt(self.filename, names=self.col_names,
48 usecols=usecols)
49
50 def _read_column_names(self, filename):
51 with open(filename) as f:
52 line = f.readline()
53 # Remove leading '#'
54 line = line[1:]
55 names = line.split()
56 # Remove trailing '()'
57 names = [name.split('(')[0] for name in names]
58 return names
59
60
61 class AHFHalosDataset(Dataset):
62 _index_class = ParticleIndex
63 _file_class = AHFHalosFile
64 _field_info_class = AHFHalosFieldInfo
65
66 def __init__(self, filename, dataset_type='ahf',
67 n_ref=16, over_refine_factor=1,
68 units_override=None, unit_system='cgs',
69 hubble_constant=1.0):
70 root, _ = os.path.splitext(filename)
71 self.log_filename = root + '.log'
72 self.hubble_constant = hubble_constant
73
74 self.n_ref = n_ref
75 self.over_refine_factor = over_refine_factor
76 super(AHFHalosDataset, self).__init__(
77 filename, dataset_type=dataset_type,
78 units_override=units_override, unit_system=unit_system
79 )
80
81 def _set_code_unit_attributes(self):
82 setdefaultattr(self, 'length_unit', self.quan(1.0, 'kpccm/h'))
83 setdefaultattr(self, 'mass_unit', self.quan(1.0, 'Msun/h'))
84 setdefaultattr(self, 'time_unit', self.quan(1.0, 's'))
85 setdefaultattr(self, 'velocity_unit', self.quan(1.0, 'km/s'))
86
87 def _parse_parameter_file(self):
88 # Read all parameters.
89 simu = self._read_log_simu()
90 param = self._read_parameter()
91
92 # Set up general information.
93 self.filename_template = self.parameter_filename
94 self.file_count = 1
95 self.parameters.update(param)
96 self.particle_types = ('halos')
97 self.particle_types_raw = ('halos')
98 self.unique_identifier = \
99 int(os.stat(self.parameter_filename)[stat.ST_CTIME])
100
101 # Set up geometrical information.
102 self.refine_by = 2
103 self.dimensionality = 3
104 nz = 1 << self.over_refine_factor
105 self.domain_dimensions = np.ones(self.dimensionality, "int32") * nz
106 self.domain_left_edge = np.array([0.0, 0.0, 0.0])
107 # Note that boxsize is in Mpc but particle positions are in kpc.
108 self.domain_right_edge = np.array([simu['boxsize']] * 3) * 1000
109 self.periodicity = (True, True, True)
110
111 # Set up cosmological information.
112 self.cosmological_simulation = 1
113 self.current_redshift = param['z']
114 self.omega_lambda = simu['lambda0']
115 self.omega_matter = simu['omega0']
116 cosmo = Cosmology(self.hubble_constant,
117 self.omega_matter, self.omega_lambda)
118 self.current_time = cosmo.hubble_time(param['z']).in_units('s')
119
120 @classmethod
121 def _is_valid(self, *args, **kwargs):
122 filename = args[0]
123 if not filename.endswith('.parameter'):
124 return False
125 with open(filename, 'r') as f:
126 if f.readlines()[11].startswith('AHF'):
127 return True
128 return False
129
130 # Helper methods
131
132 def _read_log_simu(self):
133 simu = {}
134 with open(self.log_filename) as f:
135 for l in f:
136 if l.startswith('simu.'):
137 name, val = l.split(':')
138 key = name.strip().split('.')[1]
139 try:
140 val = float(val)
141 except:
142 val = float.fromhex(val)
143 simu[key] = val
144 return simu
145
146 def _read_parameter(self):
147 param = {}
148 with open(self.parameter_filename) as f:
149 for l in f:
150 words = l.split()
151 if len(words) == 2:
152 key, val = words
153 try:
154 val = float(val)
155 param[key] = val
156 except:
157 pass
158 return param
159
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/yt/frontends/ahf/data_structures.py b/yt/frontends/ahf/data_structures.py
--- a/yt/frontends/ahf/data_structures.py
+++ b/yt/frontends/ahf/data_structures.py
@@ -156,3 +156,7 @@
except:
pass
return param
+
+ @property
+ def _skip_cache(self):
+ return True
|
{"golden_diff": "diff --git a/yt/frontends/ahf/data_structures.py b/yt/frontends/ahf/data_structures.py\n--- a/yt/frontends/ahf/data_structures.py\n+++ b/yt/frontends/ahf/data_structures.py\n@@ -156,3 +156,7 @@\n except:\n pass\n return param\n+\n+ @property\n+ def _skip_cache(self):\n+ return True\n", "issue": "AHF answer tests are flaky\nWe're seeing random failures from the AHF answer tests on some PRs.\r\n\r\nSee e.g. https://tests.yt-project.org/job/yt_py3_git/414/.\r\n\r\n\n", "before_files": [{"content": "\"\"\"\nAHF data structures\n\n\n\n\"\"\"\n\n#-----------------------------------------------------------------------------\n# Copyright (c) 2017, yt Development Team.\n#\n# Distributed under the terms of the Modified BSD License.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#-----------------------------------------------------------------------------\n\nimport glob\nimport os\nimport stat\n\nimport numpy as np\n\nfrom yt.data_objects.static_output import \\\n Dataset, \\\n ParticleFile\nfrom yt.funcs import \\\n setdefaultattr\nfrom yt.geometry.particle_geometry_handler import \\\n ParticleIndex\nfrom yt.utilities.cosmology import \\\n Cosmology\n\nfrom .fields import AHFHalosFieldInfo\n\n\nclass AHFHalosFile(ParticleFile):\n def __init__(self, ds, io, filename, file_id):\n root, _ = os.path.splitext(filename)\n candidates = glob.glob(root + '*.AHF_halos')\n if len(candidates) == 1:\n filename = candidates[0]\n else:\n raise ValueError('Too many AHF_halos files.')\n self.col_names = self._read_column_names(filename)\n super(AHFHalosFile, self).__init__(ds, io, filename, file_id)\n\n def read_data(self, usecols=None):\n return np.genfromtxt(self.filename, names=self.col_names,\n usecols=usecols)\n\n def _read_column_names(self, filename):\n with open(filename) as f:\n line = f.readline()\n # Remove leading '#'\n line = line[1:]\n names = line.split()\n # Remove trailing '()'\n names = [name.split('(')[0] for name in names]\n return names\n\n\nclass AHFHalosDataset(Dataset):\n _index_class = ParticleIndex\n _file_class = AHFHalosFile\n _field_info_class = AHFHalosFieldInfo\n\n def __init__(self, filename, dataset_type='ahf',\n n_ref=16, over_refine_factor=1,\n units_override=None, unit_system='cgs',\n hubble_constant=1.0):\n root, _ = os.path.splitext(filename)\n self.log_filename = root + '.log'\n self.hubble_constant = hubble_constant\n\n self.n_ref = n_ref\n self.over_refine_factor = over_refine_factor\n super(AHFHalosDataset, self).__init__(\n filename, dataset_type=dataset_type,\n units_override=units_override, unit_system=unit_system\n )\n\n def _set_code_unit_attributes(self):\n setdefaultattr(self, 'length_unit', self.quan(1.0, 'kpccm/h'))\n setdefaultattr(self, 'mass_unit', self.quan(1.0, 'Msun/h'))\n setdefaultattr(self, 'time_unit', self.quan(1.0, 's'))\n setdefaultattr(self, 'velocity_unit', self.quan(1.0, 'km/s'))\n\n def _parse_parameter_file(self):\n # Read all parameters.\n simu = self._read_log_simu()\n param = self._read_parameter()\n\n # Set up general information.\n self.filename_template = self.parameter_filename\n self.file_count = 1\n self.parameters.update(param)\n self.particle_types = ('halos')\n self.particle_types_raw = ('halos')\n self.unique_identifier = \\\n int(os.stat(self.parameter_filename)[stat.ST_CTIME])\n\n # Set up geometrical information.\n self.refine_by = 2\n self.dimensionality = 3\n nz = 1 << self.over_refine_factor\n self.domain_dimensions = np.ones(self.dimensionality, \"int32\") * nz\n self.domain_left_edge = np.array([0.0, 0.0, 0.0])\n # Note that boxsize is in Mpc but particle positions are in kpc.\n self.domain_right_edge = np.array([simu['boxsize']] * 3) * 1000\n self.periodicity = (True, True, True)\n\n # Set up cosmological information.\n self.cosmological_simulation = 1\n self.current_redshift = param['z']\n self.omega_lambda = simu['lambda0']\n self.omega_matter = simu['omega0']\n cosmo = Cosmology(self.hubble_constant,\n self.omega_matter, self.omega_lambda)\n self.current_time = cosmo.hubble_time(param['z']).in_units('s')\n\n @classmethod\n def _is_valid(self, *args, **kwargs):\n filename = args[0]\n if not filename.endswith('.parameter'):\n return False\n with open(filename, 'r') as f:\n if f.readlines()[11].startswith('AHF'):\n return True\n return False\n\n # Helper methods\n\n def _read_log_simu(self):\n simu = {}\n with open(self.log_filename) as f:\n for l in f:\n if l.startswith('simu.'):\n name, val = l.split(':')\n key = name.strip().split('.')[1]\n try:\n val = float(val)\n except:\n val = float.fromhex(val)\n simu[key] = val\n return simu\n\n def _read_parameter(self):\n param = {}\n with open(self.parameter_filename) as f:\n for l in f:\n words = l.split()\n if len(words) == 2:\n key, val = words\n try:\n val = float(val)\n param[key] = val\n except:\n pass\n return param\n", "path": "yt/frontends/ahf/data_structures.py"}], "after_files": [{"content": "\"\"\"\nAHF data structures\n\n\n\n\"\"\"\n\n#-----------------------------------------------------------------------------\n# Copyright (c) 2017, yt Development Team.\n#\n# Distributed under the terms of the Modified BSD License.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#-----------------------------------------------------------------------------\n\nimport glob\nimport os\nimport stat\n\nimport numpy as np\n\nfrom yt.data_objects.static_output import \\\n Dataset, \\\n ParticleFile\nfrom yt.funcs import \\\n setdefaultattr\nfrom yt.geometry.particle_geometry_handler import \\\n ParticleIndex\nfrom yt.utilities.cosmology import \\\n Cosmology\n\nfrom .fields import AHFHalosFieldInfo\n\n\nclass AHFHalosFile(ParticleFile):\n def __init__(self, ds, io, filename, file_id):\n root, _ = os.path.splitext(filename)\n candidates = glob.glob(root + '*.AHF_halos')\n if len(candidates) == 1:\n filename = candidates[0]\n else:\n raise ValueError('Too many AHF_halos files.')\n self.col_names = self._read_column_names(filename)\n super(AHFHalosFile, self).__init__(ds, io, filename, file_id)\n\n def read_data(self, usecols=None):\n return np.genfromtxt(self.filename, names=self.col_names,\n usecols=usecols)\n\n def _read_column_names(self, filename):\n with open(filename) as f:\n line = f.readline()\n # Remove leading '#'\n line = line[1:]\n names = line.split()\n # Remove trailing '()'\n names = [name.split('(')[0] for name in names]\n return names\n\n\nclass AHFHalosDataset(Dataset):\n _index_class = ParticleIndex\n _file_class = AHFHalosFile\n _field_info_class = AHFHalosFieldInfo\n\n def __init__(self, filename, dataset_type='ahf',\n n_ref=16, over_refine_factor=1,\n units_override=None, unit_system='cgs',\n hubble_constant=1.0):\n root, _ = os.path.splitext(filename)\n self.log_filename = root + '.log'\n self.hubble_constant = hubble_constant\n\n self.n_ref = n_ref\n self.over_refine_factor = over_refine_factor\n super(AHFHalosDataset, self).__init__(\n filename, dataset_type=dataset_type,\n units_override=units_override, unit_system=unit_system\n )\n\n def _set_code_unit_attributes(self):\n setdefaultattr(self, 'length_unit', self.quan(1.0, 'kpccm/h'))\n setdefaultattr(self, 'mass_unit', self.quan(1.0, 'Msun/h'))\n setdefaultattr(self, 'time_unit', self.quan(1.0, 's'))\n setdefaultattr(self, 'velocity_unit', self.quan(1.0, 'km/s'))\n\n def _parse_parameter_file(self):\n # Read all parameters.\n simu = self._read_log_simu()\n param = self._read_parameter()\n\n # Set up general information.\n self.filename_template = self.parameter_filename\n self.file_count = 1\n self.parameters.update(param)\n self.particle_types = ('halos')\n self.particle_types_raw = ('halos')\n self.unique_identifier = \\\n int(os.stat(self.parameter_filename)[stat.ST_CTIME])\n\n # Set up geometrical information.\n self.refine_by = 2\n self.dimensionality = 3\n nz = 1 << self.over_refine_factor\n self.domain_dimensions = np.ones(self.dimensionality, \"int32\") * nz\n self.domain_left_edge = np.array([0.0, 0.0, 0.0])\n # Note that boxsize is in Mpc but particle positions are in kpc.\n self.domain_right_edge = np.array([simu['boxsize']] * 3) * 1000\n self.periodicity = (True, True, True)\n\n # Set up cosmological information.\n self.cosmological_simulation = 1\n self.current_redshift = param['z']\n self.omega_lambda = simu['lambda0']\n self.omega_matter = simu['omega0']\n cosmo = Cosmology(self.hubble_constant,\n self.omega_matter, self.omega_lambda)\n self.current_time = cosmo.hubble_time(param['z']).in_units('s')\n\n @classmethod\n def _is_valid(self, *args, **kwargs):\n filename = args[0]\n if not filename.endswith('.parameter'):\n return False\n with open(filename, 'r') as f:\n if f.readlines()[11].startswith('AHF'):\n return True\n return False\n\n # Helper methods\n\n def _read_log_simu(self):\n simu = {}\n with open(self.log_filename) as f:\n for l in f:\n if l.startswith('simu.'):\n name, val = l.split(':')\n key = name.strip().split('.')[1]\n try:\n val = float(val)\n except:\n val = float.fromhex(val)\n simu[key] = val\n return simu\n\n def _read_parameter(self):\n param = {}\n with open(self.parameter_filename) as f:\n for l in f:\n words = l.split()\n if len(words) == 2:\n key, val = words\n try:\n val = float(val)\n param[key] = val\n except:\n pass\n return param\n\n @property\n def _skip_cache(self):\n return True\n", "path": "yt/frontends/ahf/data_structures.py"}]}
| 1,904 | 99 |
gh_patches_debug_5195
|
rasdani/github-patches
|
git_diff
|
DataDog__dd-trace-py-384
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Incorrect render patching in pyramid.
https://github.com/DataDog/dd-trace-py/blob/261136e112b23862a78308a2423e15364ae4aaa6/ddtrace/contrib/pyramid/trace.py#L31
Here we're removing request from kwargs but pyramid's render has a request kwarg so we need to keep it.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ddtrace/contrib/pyramid/trace.py`
Content:
```
1
2 # 3p
3 import logging
4 import pyramid.renderers
5 from pyramid.settings import asbool
6 import wrapt
7
8 # project
9 import ddtrace
10 from ...ext import http, AppTypes
11 from .constants import SETTINGS_SERVICE, SETTINGS_TRACE_ENABLED, SETTINGS_TRACER
12
13 log = logging.getLogger(__name__)
14
15 DD_TWEEN_NAME = 'ddtrace.contrib.pyramid:trace_tween_factory'
16 DD_SPAN = '_datadog_span'
17
18 def trace_pyramid(config):
19 config.include('ddtrace.contrib.pyramid')
20
21 def includeme(config):
22 # Add our tween just before the default exception handler
23 config.add_tween(DD_TWEEN_NAME, over=pyramid.tweens.EXCVIEW)
24 # ensure we only patch the renderer once.
25 if not isinstance(pyramid.renderers.RendererHelper.render, wrapt.ObjectProxy):
26 wrapt.wrap_function_wrapper('pyramid.renderers', 'RendererHelper.render', trace_render)
27
28
29 def trace_render(func, instance, args, kwargs):
30 # If the request is not traced, we do not trace
31 request = kwargs.pop('request', {})
32 if not request:
33 log.debug("No request passed to render, will not be traced")
34 return func(*args, **kwargs)
35 span = getattr(request, DD_SPAN, None)
36 if not span:
37 log.debug("No span found in request, will not be traced")
38 return func(*args, **kwargs)
39
40 tracer = span.tracer()
41 with tracer.trace('pyramid.render') as span:
42 span.span_type = http.TEMPLATE
43 return func(*args, **kwargs)
44
45 def trace_tween_factory(handler, registry):
46 # configuration
47 settings = registry.settings
48 service = settings.get(SETTINGS_SERVICE) or 'pyramid'
49 tracer = settings.get(SETTINGS_TRACER) or ddtrace.tracer
50 enabled = asbool(settings.get(SETTINGS_TRACE_ENABLED, tracer.enabled))
51
52 # set the service info
53 tracer.set_service_info(
54 service=service,
55 app="pyramid",
56 app_type=AppTypes.web)
57
58 if enabled:
59 # make a request tracing function
60 def trace_tween(request):
61 with tracer.trace('pyramid.request', service=service, resource='404') as span:
62 setattr(request, DD_SPAN, span) # used to find the tracer in templates
63 response = None
64 try:
65 response = handler(request)
66 except BaseException:
67 span.set_tag(http.STATUS_CODE, 500)
68 raise
69 finally:
70 span.span_type = http.TYPE
71 # set request tags
72 span.set_tag(http.URL, request.path)
73 span.set_tag(http.METHOD, request.method)
74 if request.matched_route:
75 span.resource = '{} {}'.format(request.method, request.matched_route.name)
76 span.set_tag('pyramid.route.name', request.matched_route.name)
77 # set response tags
78 if response:
79 span.set_tag(http.STATUS_CODE, response.status_code)
80 if 500 <= response.status_code < 600:
81 span.error = 1
82 return response
83 return trace_tween
84
85 # if timing support is not enabled, return the original handler
86 return handler
87
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/ddtrace/contrib/pyramid/trace.py b/ddtrace/contrib/pyramid/trace.py
--- a/ddtrace/contrib/pyramid/trace.py
+++ b/ddtrace/contrib/pyramid/trace.py
@@ -28,7 +28,7 @@
def trace_render(func, instance, args, kwargs):
# If the request is not traced, we do not trace
- request = kwargs.pop('request', {})
+ request = kwargs.get('request', {})
if not request:
log.debug("No request passed to render, will not be traced")
return func(*args, **kwargs)
|
{"golden_diff": "diff --git a/ddtrace/contrib/pyramid/trace.py b/ddtrace/contrib/pyramid/trace.py\n--- a/ddtrace/contrib/pyramid/trace.py\n+++ b/ddtrace/contrib/pyramid/trace.py\n@@ -28,7 +28,7 @@\n \n def trace_render(func, instance, args, kwargs):\n # If the request is not traced, we do not trace\n- request = kwargs.pop('request', {})\n+ request = kwargs.get('request', {})\n if not request:\n log.debug(\"No request passed to render, will not be traced\")\n return func(*args, **kwargs)\n", "issue": "Incorrect render patching in pyramid.\nhttps://github.com/DataDog/dd-trace-py/blob/261136e112b23862a78308a2423e15364ae4aaa6/ddtrace/contrib/pyramid/trace.py#L31\r\n\r\nHere we're removing request from kwargs but pyramid's render has a request kwarg so we need to keep it.\n", "before_files": [{"content": "\n# 3p\nimport logging\nimport pyramid.renderers\nfrom pyramid.settings import asbool\nimport wrapt\n\n# project\nimport ddtrace\nfrom ...ext import http, AppTypes\nfrom .constants import SETTINGS_SERVICE, SETTINGS_TRACE_ENABLED, SETTINGS_TRACER\n\nlog = logging.getLogger(__name__)\n\nDD_TWEEN_NAME = 'ddtrace.contrib.pyramid:trace_tween_factory'\nDD_SPAN = '_datadog_span'\n\ndef trace_pyramid(config):\n config.include('ddtrace.contrib.pyramid')\n\ndef includeme(config):\n # Add our tween just before the default exception handler\n config.add_tween(DD_TWEEN_NAME, over=pyramid.tweens.EXCVIEW)\n # ensure we only patch the renderer once.\n if not isinstance(pyramid.renderers.RendererHelper.render, wrapt.ObjectProxy):\n wrapt.wrap_function_wrapper('pyramid.renderers', 'RendererHelper.render', trace_render)\n\n\ndef trace_render(func, instance, args, kwargs):\n # If the request is not traced, we do not trace\n request = kwargs.pop('request', {})\n if not request:\n log.debug(\"No request passed to render, will not be traced\")\n return func(*args, **kwargs)\n span = getattr(request, DD_SPAN, None)\n if not span:\n log.debug(\"No span found in request, will not be traced\")\n return func(*args, **kwargs)\n\n tracer = span.tracer()\n with tracer.trace('pyramid.render') as span:\n span.span_type = http.TEMPLATE\n return func(*args, **kwargs)\n\ndef trace_tween_factory(handler, registry):\n # configuration\n settings = registry.settings\n service = settings.get(SETTINGS_SERVICE) or 'pyramid'\n tracer = settings.get(SETTINGS_TRACER) or ddtrace.tracer\n enabled = asbool(settings.get(SETTINGS_TRACE_ENABLED, tracer.enabled))\n\n # set the service info\n tracer.set_service_info(\n service=service,\n app=\"pyramid\",\n app_type=AppTypes.web)\n\n if enabled:\n # make a request tracing function\n def trace_tween(request):\n with tracer.trace('pyramid.request', service=service, resource='404') as span:\n setattr(request, DD_SPAN, span) # used to find the tracer in templates\n response = None\n try:\n response = handler(request)\n except BaseException:\n span.set_tag(http.STATUS_CODE, 500)\n raise\n finally:\n span.span_type = http.TYPE\n # set request tags\n span.set_tag(http.URL, request.path)\n span.set_tag(http.METHOD, request.method)\n if request.matched_route:\n span.resource = '{} {}'.format(request.method, request.matched_route.name)\n span.set_tag('pyramid.route.name', request.matched_route.name)\n # set response tags\n if response:\n span.set_tag(http.STATUS_CODE, response.status_code)\n if 500 <= response.status_code < 600:\n span.error = 1\n return response\n return trace_tween\n\n # if timing support is not enabled, return the original handler\n return handler\n", "path": "ddtrace/contrib/pyramid/trace.py"}], "after_files": [{"content": "\n# 3p\nimport logging\nimport pyramid.renderers\nfrom pyramid.settings import asbool\nimport wrapt\n\n# project\nimport ddtrace\nfrom ...ext import http, AppTypes\nfrom .constants import SETTINGS_SERVICE, SETTINGS_TRACE_ENABLED, SETTINGS_TRACER\n\nlog = logging.getLogger(__name__)\n\nDD_TWEEN_NAME = 'ddtrace.contrib.pyramid:trace_tween_factory'\nDD_SPAN = '_datadog_span'\n\ndef trace_pyramid(config):\n config.include('ddtrace.contrib.pyramid')\n\ndef includeme(config):\n # Add our tween just before the default exception handler\n config.add_tween(DD_TWEEN_NAME, over=pyramid.tweens.EXCVIEW)\n # ensure we only patch the renderer once.\n if not isinstance(pyramid.renderers.RendererHelper.render, wrapt.ObjectProxy):\n wrapt.wrap_function_wrapper('pyramid.renderers', 'RendererHelper.render', trace_render)\n\n\ndef trace_render(func, instance, args, kwargs):\n # If the request is not traced, we do not trace\n request = kwargs.get('request', {})\n if not request:\n log.debug(\"No request passed to render, will not be traced\")\n return func(*args, **kwargs)\n span = getattr(request, DD_SPAN, None)\n if not span:\n log.debug(\"No span found in request, will not be traced\")\n return func(*args, **kwargs)\n\n tracer = span.tracer()\n with tracer.trace('pyramid.render') as span:\n span.span_type = http.TEMPLATE\n return func(*args, **kwargs)\n\ndef trace_tween_factory(handler, registry):\n # configuration\n settings = registry.settings\n service = settings.get(SETTINGS_SERVICE) or 'pyramid'\n tracer = settings.get(SETTINGS_TRACER) or ddtrace.tracer\n enabled = asbool(settings.get(SETTINGS_TRACE_ENABLED, tracer.enabled))\n\n # set the service info\n tracer.set_service_info(\n service=service,\n app=\"pyramid\",\n app_type=AppTypes.web)\n\n if enabled:\n # make a request tracing function\n def trace_tween(request):\n with tracer.trace('pyramid.request', service=service, resource='404') as span:\n setattr(request, DD_SPAN, span) # used to find the tracer in templates\n response = None\n try:\n response = handler(request)\n except BaseException:\n span.set_tag(http.STATUS_CODE, 500)\n raise\n finally:\n span.span_type = http.TYPE\n # set request tags\n span.set_tag(http.URL, request.path)\n span.set_tag(http.METHOD, request.method)\n if request.matched_route:\n span.resource = '{} {}'.format(request.method, request.matched_route.name)\n span.set_tag('pyramid.route.name', request.matched_route.name)\n # set response tags\n if response:\n span.set_tag(http.STATUS_CODE, response.status_code)\n if 500 <= response.status_code < 600:\n span.error = 1\n return response\n return trace_tween\n\n # if timing support is not enabled, return the original handler\n return handler\n", "path": "ddtrace/contrib/pyramid/trace.py"}]}
| 1,212 | 135 |
gh_patches_debug_540
|
rasdani/github-patches
|
git_diff
|
mlcommons__GaNDLF-628
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Port to Pandas 2.0
**Describe the bug**
when running `gandlf_run`, I am encountering:
`ERROR: 'DataFrame' object has no attribute 'append'`
**To Reproduce**
Train a model using `gandlf_run`.
I trained using `2d_rad_segmentation` data from `https://upenn.box.com/shared/static/y8162xkq1zz5555ye3pwadry2m2e39bs.zip` and the config file from samples in the repo `config_classification.yaml`
**Additional context**
- check the changelog of pandas [here](https://pandas.pydata.org/pandas-docs/stable/whatsnew/v2.0.0.html#removal-of-prior-version-deprecations-changes:~:text=Removed%20deprecated%20Series.append()%2C%20DataFrame.append()%2C%20use%20concat()%20instead%20(GH35407))
- The training runs successfully when downgrading `pandas` to `1.5.3`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2
3 """The setup script."""
4
5
6 import sys, re
7 from setuptools import setup, find_packages
8 from setuptools.command.install import install
9 from setuptools.command.develop import develop
10 from setuptools.command.egg_info import egg_info
11
12 try:
13 with open("README.md") as readme_file:
14 readme = readme_file.read()
15 except Exception as error:
16 readme = "No README information found."
17 sys.stderr.write("Warning: Could not open '%s' due %s\n" % ("README.md", error))
18
19
20 class CustomInstallCommand(install):
21 def run(self):
22 install.run(self)
23
24
25 class CustomDevelopCommand(develop):
26 def run(self):
27 develop.run(self)
28
29
30 class CustomEggInfoCommand(egg_info):
31 def run(self):
32 egg_info.run(self)
33
34
35 try:
36 filepath = "GANDLF/version.py"
37 version_file = open(filepath)
38 (__version__,) = re.findall('__version__ = "(.*)"', version_file.read())
39
40 except Exception as error:
41 __version__ = "0.0.1"
42 sys.stderr.write("Warning: Could not open '%s' due %s\n" % (filepath, error))
43
44 requirements = [
45 "torch==1.13.1",
46 "black",
47 "numpy==1.22.0",
48 "scipy",
49 "SimpleITK!=2.0.*",
50 "SimpleITK!=2.2.1", # https://github.com/mlcommons/GaNDLF/issues/536
51 "torchvision",
52 "tqdm",
53 "torchio==0.18.75",
54 "pandas",
55 "scikit-learn>=0.23.2",
56 "scikit-image>=0.19.1",
57 "setuptools",
58 "seaborn",
59 "pyyaml",
60 "tiffslide",
61 "matplotlib",
62 "requests>=2.25.0",
63 "pytest",
64 "coverage",
65 "pytest-cov",
66 "psutil",
67 "medcam",
68 "opencv-python",
69 "torchmetrics==0.5.1", # newer versions have changed api for f1 invocation
70 "OpenPatchMiner==0.1.8",
71 "zarr==2.10.3",
72 "pydicom",
73 "onnx",
74 "torchinfo==1.7.0",
75 "segmentation-models-pytorch==0.3.2",
76 "ACSConv==0.1.1",
77 "docker",
78 "dicom-anonymizer",
79 "twine",
80 "zarr",
81 "keyring",
82 ]
83
84 if __name__ == "__main__":
85 setup(
86 name="GANDLF",
87 version=__version__,
88 author="MLCommons",
89 author_email="[email protected]",
90 python_requires=">=3.8",
91 packages=find_packages(),
92 cmdclass={
93 "install": CustomInstallCommand,
94 "develop": CustomDevelopCommand,
95 "egg_info": CustomEggInfoCommand,
96 },
97 scripts=[
98 "gandlf_run",
99 "gandlf_constructCSV",
100 "gandlf_collectStats",
101 "gandlf_patchMiner",
102 "gandlf_preprocess",
103 "gandlf_anonymizer",
104 "gandlf_verifyInstall",
105 "gandlf_configGenerator",
106 "gandlf_recoverConfig",
107 "gandlf_deploy",
108 "gandlf_optimizeModel",
109 ],
110 classifiers=[
111 "Development Status :: 3 - Alpha",
112 "Intended Audience :: Science/Research",
113 "License :: OSI Approved :: Apache Software License",
114 "Natural Language :: English",
115 "Operating System :: OS Independent",
116 "Programming Language :: Python :: 3.8",
117 "Programming Language :: Python :: 3.9",
118 "Programming Language :: Python :: 3.10",
119 "Topic :: Scientific/Engineering :: Medical Science Apps.",
120 ],
121 description=(
122 "PyTorch-based framework that handles segmentation/regression/classification using various DL architectures for medical imaging."
123 ),
124 install_requires=requirements,
125 license="Apache-2.0",
126 long_description=readme,
127 long_description_content_type="text/markdown",
128 include_package_data=True,
129 keywords="semantic, segmentation, regression, classification, data-augmentation, medical-imaging, clinical-workflows, deep-learning, pytorch",
130 zip_safe=False,
131 )
132
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -51,7 +51,7 @@
"torchvision",
"tqdm",
"torchio==0.18.75",
- "pandas",
+ "pandas<2.0.0",
"scikit-learn>=0.23.2",
"scikit-image>=0.19.1",
"setuptools",
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -51,7 +51,7 @@\n \"torchvision\",\n \"tqdm\",\n \"torchio==0.18.75\",\n- \"pandas\",\n+ \"pandas<2.0.0\",\n \"scikit-learn>=0.23.2\",\n \"scikit-image>=0.19.1\",\n \"setuptools\",\n", "issue": "Port to Pandas 2.0\n**Describe the bug**\r\nwhen running `gandlf_run`, I am encountering:\r\n\r\n`ERROR: 'DataFrame' object has no attribute 'append'`\r\n\r\n**To Reproduce**\r\n\r\nTrain a model using `gandlf_run`.\r\n\r\nI trained using `2d_rad_segmentation` data from `https://upenn.box.com/shared/static/y8162xkq1zz5555ye3pwadry2m2e39bs.zip` and the config file from samples in the repo `config_classification.yaml`\r\n\r\n**Additional context**\r\n- check the changelog of pandas [here](https://pandas.pydata.org/pandas-docs/stable/whatsnew/v2.0.0.html#removal-of-prior-version-deprecations-changes:~:text=Removed%20deprecated%20Series.append()%2C%20DataFrame.append()%2C%20use%20concat()%20instead%20(GH35407))\r\n- The training runs successfully when downgrading `pandas` to `1.5.3`\r\n\r\n\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\n\n\"\"\"The setup script.\"\"\"\n\n\nimport sys, re\nfrom setuptools import setup, find_packages\nfrom setuptools.command.install import install\nfrom setuptools.command.develop import develop\nfrom setuptools.command.egg_info import egg_info\n\ntry:\n with open(\"README.md\") as readme_file:\n readme = readme_file.read()\nexcept Exception as error:\n readme = \"No README information found.\"\n sys.stderr.write(\"Warning: Could not open '%s' due %s\\n\" % (\"README.md\", error))\n\n\nclass CustomInstallCommand(install):\n def run(self):\n install.run(self)\n\n\nclass CustomDevelopCommand(develop):\n def run(self):\n develop.run(self)\n\n\nclass CustomEggInfoCommand(egg_info):\n def run(self):\n egg_info.run(self)\n\n\ntry:\n filepath = \"GANDLF/version.py\"\n version_file = open(filepath)\n (__version__,) = re.findall('__version__ = \"(.*)\"', version_file.read())\n\nexcept Exception as error:\n __version__ = \"0.0.1\"\n sys.stderr.write(\"Warning: Could not open '%s' due %s\\n\" % (filepath, error))\n\nrequirements = [\n \"torch==1.13.1\",\n \"black\",\n \"numpy==1.22.0\",\n \"scipy\",\n \"SimpleITK!=2.0.*\",\n \"SimpleITK!=2.2.1\", # https://github.com/mlcommons/GaNDLF/issues/536\n \"torchvision\",\n \"tqdm\",\n \"torchio==0.18.75\",\n \"pandas\",\n \"scikit-learn>=0.23.2\",\n \"scikit-image>=0.19.1\",\n \"setuptools\",\n \"seaborn\",\n \"pyyaml\",\n \"tiffslide\",\n \"matplotlib\",\n \"requests>=2.25.0\",\n \"pytest\",\n \"coverage\",\n \"pytest-cov\",\n \"psutil\",\n \"medcam\",\n \"opencv-python\",\n \"torchmetrics==0.5.1\", # newer versions have changed api for f1 invocation\n \"OpenPatchMiner==0.1.8\",\n \"zarr==2.10.3\",\n \"pydicom\",\n \"onnx\",\n \"torchinfo==1.7.0\",\n \"segmentation-models-pytorch==0.3.2\",\n \"ACSConv==0.1.1\",\n \"docker\",\n \"dicom-anonymizer\",\n \"twine\",\n \"zarr\",\n \"keyring\",\n]\n\nif __name__ == \"__main__\":\n setup(\n name=\"GANDLF\",\n version=__version__,\n author=\"MLCommons\",\n author_email=\"[email protected]\",\n python_requires=\">=3.8\",\n packages=find_packages(),\n cmdclass={\n \"install\": CustomInstallCommand,\n \"develop\": CustomDevelopCommand,\n \"egg_info\": CustomEggInfoCommand,\n },\n scripts=[\n \"gandlf_run\",\n \"gandlf_constructCSV\",\n \"gandlf_collectStats\",\n \"gandlf_patchMiner\",\n \"gandlf_preprocess\",\n \"gandlf_anonymizer\",\n \"gandlf_verifyInstall\",\n \"gandlf_configGenerator\",\n \"gandlf_recoverConfig\",\n \"gandlf_deploy\",\n \"gandlf_optimizeModel\",\n ],\n classifiers=[\n \"Development Status :: 3 - Alpha\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Natural Language :: English\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Topic :: Scientific/Engineering :: Medical Science Apps.\",\n ],\n description=(\n \"PyTorch-based framework that handles segmentation/regression/classification using various DL architectures for medical imaging.\"\n ),\n install_requires=requirements,\n license=\"Apache-2.0\",\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n include_package_data=True,\n keywords=\"semantic, segmentation, regression, classification, data-augmentation, medical-imaging, clinical-workflows, deep-learning, pytorch\",\n zip_safe=False,\n )\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\n\"\"\"The setup script.\"\"\"\n\n\nimport sys, re\nfrom setuptools import setup, find_packages\nfrom setuptools.command.install import install\nfrom setuptools.command.develop import develop\nfrom setuptools.command.egg_info import egg_info\n\ntry:\n with open(\"README.md\") as readme_file:\n readme = readme_file.read()\nexcept Exception as error:\n readme = \"No README information found.\"\n sys.stderr.write(\"Warning: Could not open '%s' due %s\\n\" % (\"README.md\", error))\n\n\nclass CustomInstallCommand(install):\n def run(self):\n install.run(self)\n\n\nclass CustomDevelopCommand(develop):\n def run(self):\n develop.run(self)\n\n\nclass CustomEggInfoCommand(egg_info):\n def run(self):\n egg_info.run(self)\n\n\ntry:\n filepath = \"GANDLF/version.py\"\n version_file = open(filepath)\n (__version__,) = re.findall('__version__ = \"(.*)\"', version_file.read())\n\nexcept Exception as error:\n __version__ = \"0.0.1\"\n sys.stderr.write(\"Warning: Could not open '%s' due %s\\n\" % (filepath, error))\n\nrequirements = [\n \"torch==1.13.1\",\n \"black\",\n \"numpy==1.22.0\",\n \"scipy\",\n \"SimpleITK!=2.0.*\",\n \"SimpleITK!=2.2.1\", # https://github.com/mlcommons/GaNDLF/issues/536\n \"torchvision\",\n \"tqdm\",\n \"torchio==0.18.75\",\n \"pandas<2.0.0\",\n \"scikit-learn>=0.23.2\",\n \"scikit-image>=0.19.1\",\n \"setuptools\",\n \"seaborn\",\n \"pyyaml\",\n \"tiffslide\",\n \"matplotlib\",\n \"requests>=2.25.0\",\n \"pytest\",\n \"coverage\",\n \"pytest-cov\",\n \"psutil\",\n \"medcam\",\n \"opencv-python\",\n \"torchmetrics==0.5.1\", # newer versions have changed api for f1 invocation\n \"OpenPatchMiner==0.1.8\",\n \"zarr==2.10.3\",\n \"pydicom\",\n \"onnx\",\n \"torchinfo==1.7.0\",\n \"segmentation-models-pytorch==0.3.2\",\n \"ACSConv==0.1.1\",\n \"docker\",\n \"dicom-anonymizer\",\n \"twine\",\n \"zarr\",\n \"keyring\",\n]\n\nif __name__ == \"__main__\":\n setup(\n name=\"GANDLF\",\n version=__version__,\n author=\"MLCommons\",\n author_email=\"[email protected]\",\n python_requires=\">=3.8\",\n packages=find_packages(),\n cmdclass={\n \"install\": CustomInstallCommand,\n \"develop\": CustomDevelopCommand,\n \"egg_info\": CustomEggInfoCommand,\n },\n scripts=[\n \"gandlf_run\",\n \"gandlf_constructCSV\",\n \"gandlf_collectStats\",\n \"gandlf_patchMiner\",\n \"gandlf_preprocess\",\n \"gandlf_anonymizer\",\n \"gandlf_verifyInstall\",\n \"gandlf_configGenerator\",\n \"gandlf_recoverConfig\",\n \"gandlf_deploy\",\n \"gandlf_optimizeModel\",\n ],\n classifiers=[\n \"Development Status :: 3 - Alpha\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Natural Language :: English\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Topic :: Scientific/Engineering :: Medical Science Apps.\",\n ],\n description=(\n \"PyTorch-based framework that handles segmentation/regression/classification using various DL architectures for medical imaging.\"\n ),\n install_requires=requirements,\n license=\"Apache-2.0\",\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n include_package_data=True,\n keywords=\"semantic, segmentation, regression, classification, data-augmentation, medical-imaging, clinical-workflows, deep-learning, pytorch\",\n zip_safe=False,\n )\n", "path": "setup.py"}]}
| 1,753 | 106 |
gh_patches_debug_16396
|
rasdani/github-patches
|
git_diff
|
bokeh__bokeh-2099
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
decimal.Decimal cannot be sent across session
When trying to give data of type decimal.Decimal as data sources to plots, the BokehJSONEncoder does tries to serialise the Decimal object with the standard built-in JSON encoder. This causes an exception "Decimal('...') not JSON serializable". The solution is to edit BokehJSONEncoder.trasnform_python_types to account for this possibility. I have tested the solution and it works.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `bokeh/protocol.py`
Content:
```
1 from __future__ import absolute_import
2
3 import json
4 import logging
5 import time
6 import datetime as dt
7 import calendar
8
9 import numpy as np
10 from six.moves import cPickle as pickle
11
12 try:
13 import pandas as pd
14 is_pandas = True
15 except ImportError:
16 is_pandas = False
17
18 try:
19 from dateutil.relativedelta import relativedelta
20 is_dateutil = True
21 except ImportError:
22 is_dateutil = False
23
24 from .settings import settings
25
26 log = logging.getLogger(__name__)
27
28 millifactor = 10**6.0
29
30 class BokehJSONEncoder(json.JSONEncoder):
31 def transform_series(self, obj):
32 """transform series
33 """
34 vals = obj.values
35 return self.transform_array(vals)
36
37 # Check for astype failures (putative Numpy < 1.7)
38 dt2001 = np.datetime64('2001')
39 legacy_datetime64 = (dt2001.astype('int64') ==
40 dt2001.astype('datetime64[ms]').astype('int64'))
41 def transform_array(self, obj):
42 """Transform arrays into lists of json safe types
43 also handles pandas series, and replacing
44 nans and infs with strings
45 """
46 ## not quite correct, truncates to ms..
47 if obj.dtype.kind == 'M':
48 if self.legacy_datetime64:
49 if obj.dtype == np.dtype('datetime64[ns]'):
50 return (obj.astype('int64') / millifactor).tolist()
51 # else punt.
52 else:
53 return obj.astype('datetime64[ms]').astype('int64').tolist()
54 elif obj.dtype.kind in ('u', 'i', 'f'):
55 return self.transform_numerical_array(obj)
56 return obj.tolist()
57
58 def transform_numerical_array(self, obj):
59 """handles nans/inf conversion
60 """
61 if isinstance(obj, np.ma.MaskedArray):
62 obj = obj.filled(np.nan) # Set masked values to nan
63 if not np.isnan(obj).any() and not np.isinf(obj).any():
64 return obj.tolist()
65 else:
66 transformed = obj.astype('object')
67 transformed[np.isnan(obj)] = 'NaN'
68 transformed[np.isposinf(obj)] = 'Infinity'
69 transformed[np.isneginf(obj)] = '-Infinity'
70 return transformed.tolist()
71
72 def transform_python_types(self, obj):
73 """handle special scalars, default to default json encoder
74 """
75 # Pandas Timestamp
76 if is_pandas and isinstance(obj, pd.tslib.Timestamp):
77 return obj.value / millifactor #nanosecond to millisecond
78 elif np.issubdtype(type(obj), np.float):
79 return float(obj)
80 elif np.issubdtype(type(obj), np.int):
81 return int(obj)
82 elif np.issubdtype(type(obj), np.bool_):
83 return bool(obj)
84 # Datetime, Date
85 elif isinstance(obj, (dt.datetime, dt.date)):
86 return calendar.timegm(obj.timetuple()) * 1000.
87 # Numpy datetime64
88 elif isinstance(obj, np.datetime64):
89 epoch_delta = obj - np.datetime64('1970-01-01T00:00:00Z')
90 return (epoch_delta / np.timedelta64(1, 'ms'))
91 # Time
92 elif isinstance(obj, dt.time):
93 return (obj.hour*3600 + obj.minute*60 + obj.second)*1000 + obj.microsecond / 1000.
94 elif is_dateutil and isinstance(obj, relativedelta):
95 return dict(years=obj.years, months=obj.months, days=obj.days, hours=obj.hours,
96 minutes=obj.minutes, seconds=obj.seconds, microseconds=obj.microseconds)
97 else:
98 return super(BokehJSONEncoder, self).default(obj)
99
100 def default(self, obj):
101 #argh! local import!
102 from .plot_object import PlotObject
103 from .properties import HasProps
104 from .colors import Color
105 ## array types
106 if is_pandas and isinstance(obj, (pd.Series, pd.Index)):
107 return self.transform_series(obj)
108 elif isinstance(obj, np.ndarray):
109 return self.transform_array(obj)
110 elif isinstance(obj, PlotObject):
111 return obj.ref
112 elif isinstance(obj, HasProps):
113 return obj.changed_properties_with_values()
114 elif isinstance(obj, Color):
115 return obj.to_css()
116 else:
117 return self.transform_python_types(obj)
118
119 def serialize_json(obj, encoder=BokehJSONEncoder, **kwargs):
120 if settings.pretty(False):
121 kwargs["indent"] = 4
122 return json.dumps(obj, cls=encoder, **kwargs)
123
124 deserialize_json = json.loads
125
126 serialize_web = serialize_json
127
128 deserialize_web = deserialize_json
129
130 def status_obj(status):
131 return {'msgtype': 'status',
132 'status': status}
133
134 def error_obj(error_msg):
135 return {
136 'msgtype': 'error',
137 'error_msg': error_msg}
138
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/bokeh/protocol.py b/bokeh/protocol.py
--- a/bokeh/protocol.py
+++ b/bokeh/protocol.py
@@ -5,6 +5,7 @@
import time
import datetime as dt
import calendar
+import decimal
import numpy as np
from six.moves import cPickle as pickle
@@ -94,6 +95,9 @@
elif is_dateutil and isinstance(obj, relativedelta):
return dict(years=obj.years, months=obj.months, days=obj.days, hours=obj.hours,
minutes=obj.minutes, seconds=obj.seconds, microseconds=obj.microseconds)
+ # Decimal
+ elif isinstance(obj, decimal.Decimal):
+ return float(obj)
else:
return super(BokehJSONEncoder, self).default(obj)
|
{"golden_diff": "diff --git a/bokeh/protocol.py b/bokeh/protocol.py\n--- a/bokeh/protocol.py\n+++ b/bokeh/protocol.py\n@@ -5,6 +5,7 @@\n import time\n import datetime as dt\n import calendar\n+import decimal\n \n import numpy as np\n from six.moves import cPickle as pickle\n@@ -94,6 +95,9 @@\n elif is_dateutil and isinstance(obj, relativedelta):\n return dict(years=obj.years, months=obj.months, days=obj.days, hours=obj.hours,\n minutes=obj.minutes, seconds=obj.seconds, microseconds=obj.microseconds)\n+ # Decimal\n+ elif isinstance(obj, decimal.Decimal):\n+ return float(obj)\n else:\n return super(BokehJSONEncoder, self).default(obj)\n", "issue": "decimal.Decimal cannot be sent across session\nWhen trying to give data of type decimal.Decimal as data sources to plots, the BokehJSONEncoder does tries to serialise the Decimal object with the standard built-in JSON encoder. This causes an exception \"Decimal('...') not JSON serializable\". The solution is to edit BokehJSONEncoder.trasnform_python_types to account for this possibility. I have tested the solution and it works.\n\n", "before_files": [{"content": "from __future__ import absolute_import\n\nimport json\nimport logging\nimport time\nimport datetime as dt\nimport calendar\n\nimport numpy as np\nfrom six.moves import cPickle as pickle\n\ntry:\n import pandas as pd\n is_pandas = True\nexcept ImportError:\n is_pandas = False\n\ntry:\n from dateutil.relativedelta import relativedelta\n is_dateutil = True\nexcept ImportError:\n is_dateutil = False\n\nfrom .settings import settings\n\nlog = logging.getLogger(__name__)\n\nmillifactor = 10**6.0\n\nclass BokehJSONEncoder(json.JSONEncoder):\n def transform_series(self, obj):\n \"\"\"transform series\n \"\"\"\n vals = obj.values\n return self.transform_array(vals)\n\n # Check for astype failures (putative Numpy < 1.7)\n dt2001 = np.datetime64('2001')\n legacy_datetime64 = (dt2001.astype('int64') ==\n dt2001.astype('datetime64[ms]').astype('int64'))\n def transform_array(self, obj):\n \"\"\"Transform arrays into lists of json safe types\n also handles pandas series, and replacing\n nans and infs with strings\n \"\"\"\n ## not quite correct, truncates to ms..\n if obj.dtype.kind == 'M':\n if self.legacy_datetime64:\n if obj.dtype == np.dtype('datetime64[ns]'):\n return (obj.astype('int64') / millifactor).tolist()\n # else punt.\n else:\n return obj.astype('datetime64[ms]').astype('int64').tolist()\n elif obj.dtype.kind in ('u', 'i', 'f'):\n return self.transform_numerical_array(obj)\n return obj.tolist()\n\n def transform_numerical_array(self, obj):\n \"\"\"handles nans/inf conversion\n \"\"\"\n if isinstance(obj, np.ma.MaskedArray):\n obj = obj.filled(np.nan) # Set masked values to nan\n if not np.isnan(obj).any() and not np.isinf(obj).any():\n return obj.tolist()\n else:\n transformed = obj.astype('object')\n transformed[np.isnan(obj)] = 'NaN'\n transformed[np.isposinf(obj)] = 'Infinity'\n transformed[np.isneginf(obj)] = '-Infinity'\n return transformed.tolist()\n\n def transform_python_types(self, obj):\n \"\"\"handle special scalars, default to default json encoder\n \"\"\"\n # Pandas Timestamp\n if is_pandas and isinstance(obj, pd.tslib.Timestamp):\n return obj.value / millifactor #nanosecond to millisecond\n elif np.issubdtype(type(obj), np.float):\n return float(obj)\n elif np.issubdtype(type(obj), np.int):\n return int(obj)\n elif np.issubdtype(type(obj), np.bool_):\n return bool(obj)\n # Datetime, Date\n elif isinstance(obj, (dt.datetime, dt.date)):\n return calendar.timegm(obj.timetuple()) * 1000.\n # Numpy datetime64\n elif isinstance(obj, np.datetime64):\n epoch_delta = obj - np.datetime64('1970-01-01T00:00:00Z')\n return (epoch_delta / np.timedelta64(1, 'ms'))\n # Time\n elif isinstance(obj, dt.time):\n return (obj.hour*3600 + obj.minute*60 + obj.second)*1000 + obj.microsecond / 1000.\n elif is_dateutil and isinstance(obj, relativedelta):\n return dict(years=obj.years, months=obj.months, days=obj.days, hours=obj.hours,\n minutes=obj.minutes, seconds=obj.seconds, microseconds=obj.microseconds)\n else:\n return super(BokehJSONEncoder, self).default(obj)\n\n def default(self, obj):\n #argh! local import!\n from .plot_object import PlotObject\n from .properties import HasProps\n from .colors import Color\n ## array types\n if is_pandas and isinstance(obj, (pd.Series, pd.Index)):\n return self.transform_series(obj)\n elif isinstance(obj, np.ndarray):\n return self.transform_array(obj)\n elif isinstance(obj, PlotObject):\n return obj.ref\n elif isinstance(obj, HasProps):\n return obj.changed_properties_with_values()\n elif isinstance(obj, Color):\n return obj.to_css()\n else:\n return self.transform_python_types(obj)\n\ndef serialize_json(obj, encoder=BokehJSONEncoder, **kwargs):\n if settings.pretty(False):\n kwargs[\"indent\"] = 4\n return json.dumps(obj, cls=encoder, **kwargs)\n\ndeserialize_json = json.loads\n\nserialize_web = serialize_json\n\ndeserialize_web = deserialize_json\n\ndef status_obj(status):\n return {'msgtype': 'status',\n 'status': status}\n\ndef error_obj(error_msg):\n return {\n 'msgtype': 'error',\n 'error_msg': error_msg}\n", "path": "bokeh/protocol.py"}], "after_files": [{"content": "from __future__ import absolute_import\n\nimport json\nimport logging\nimport time\nimport datetime as dt\nimport calendar\nimport decimal\n\nimport numpy as np\nfrom six.moves import cPickle as pickle\n\ntry:\n import pandas as pd\n is_pandas = True\nexcept ImportError:\n is_pandas = False\n\ntry:\n from dateutil.relativedelta import relativedelta\n is_dateutil = True\nexcept ImportError:\n is_dateutil = False\n\nfrom .settings import settings\n\nlog = logging.getLogger(__name__)\n\nmillifactor = 10**6.0\n\nclass BokehJSONEncoder(json.JSONEncoder):\n def transform_series(self, obj):\n \"\"\"transform series\n \"\"\"\n vals = obj.values\n return self.transform_array(vals)\n\n # Check for astype failures (putative Numpy < 1.7)\n dt2001 = np.datetime64('2001')\n legacy_datetime64 = (dt2001.astype('int64') ==\n dt2001.astype('datetime64[ms]').astype('int64'))\n def transform_array(self, obj):\n \"\"\"Transform arrays into lists of json safe types\n also handles pandas series, and replacing\n nans and infs with strings\n \"\"\"\n ## not quite correct, truncates to ms..\n if obj.dtype.kind == 'M':\n if self.legacy_datetime64:\n if obj.dtype == np.dtype('datetime64[ns]'):\n return (obj.astype('int64') / millifactor).tolist()\n # else punt.\n else:\n return obj.astype('datetime64[ms]').astype('int64').tolist()\n elif obj.dtype.kind in ('u', 'i', 'f'):\n return self.transform_numerical_array(obj)\n return obj.tolist()\n\n def transform_numerical_array(self, obj):\n \"\"\"handles nans/inf conversion\n \"\"\"\n if isinstance(obj, np.ma.MaskedArray):\n obj = obj.filled(np.nan) # Set masked values to nan\n if not np.isnan(obj).any() and not np.isinf(obj).any():\n return obj.tolist()\n else:\n transformed = obj.astype('object')\n transformed[np.isnan(obj)] = 'NaN'\n transformed[np.isposinf(obj)] = 'Infinity'\n transformed[np.isneginf(obj)] = '-Infinity'\n return transformed.tolist()\n\n def transform_python_types(self, obj):\n \"\"\"handle special scalars, default to default json encoder\n \"\"\"\n # Pandas Timestamp\n if is_pandas and isinstance(obj, pd.tslib.Timestamp):\n return obj.value / millifactor #nanosecond to millisecond\n elif np.issubdtype(type(obj), np.float):\n return float(obj)\n elif np.issubdtype(type(obj), np.int):\n return int(obj)\n elif np.issubdtype(type(obj), np.bool_):\n return bool(obj)\n # Datetime, Date\n elif isinstance(obj, (dt.datetime, dt.date)):\n return calendar.timegm(obj.timetuple()) * 1000.\n # Numpy datetime64\n elif isinstance(obj, np.datetime64):\n epoch_delta = obj - np.datetime64('1970-01-01T00:00:00Z')\n return (epoch_delta / np.timedelta64(1, 'ms'))\n # Time\n elif isinstance(obj, dt.time):\n return (obj.hour*3600 + obj.minute*60 + obj.second)*1000 + obj.microsecond / 1000.\n elif is_dateutil and isinstance(obj, relativedelta):\n return dict(years=obj.years, months=obj.months, days=obj.days, hours=obj.hours,\n minutes=obj.minutes, seconds=obj.seconds, microseconds=obj.microseconds)\n # Decimal\n elif isinstance(obj, decimal.Decimal):\n return float(obj)\n else:\n return super(BokehJSONEncoder, self).default(obj)\n\n def default(self, obj):\n #argh! local import!\n from .plot_object import PlotObject\n from .properties import HasProps\n from .colors import Color\n ## array types\n if is_pandas and isinstance(obj, (pd.Series, pd.Index)):\n return self.transform_series(obj)\n elif isinstance(obj, np.ndarray):\n return self.transform_array(obj)\n elif isinstance(obj, PlotObject):\n return obj.ref\n elif isinstance(obj, HasProps):\n return obj.changed_properties_with_values()\n elif isinstance(obj, Color):\n return obj.to_css()\n else:\n return self.transform_python_types(obj)\n\ndef serialize_json(obj, encoder=BokehJSONEncoder, **kwargs):\n if settings.pretty(False):\n kwargs[\"indent\"] = 4\n return json.dumps(obj, cls=encoder, **kwargs)\n\ndeserialize_json = json.loads\n\nserialize_web = serialize_json\n\ndeserialize_web = deserialize_json\n\ndef status_obj(status):\n return {'msgtype': 'status',\n 'status': status}\n\ndef error_obj(error_msg):\n return {\n 'msgtype': 'error',\n 'error_msg': error_msg}\n", "path": "bokeh/protocol.py"}]}
| 1,754 | 174 |
gh_patches_debug_25059
|
rasdani/github-patches
|
git_diff
|
pytorch__ignite-294
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[feature request]Changing how the loss metric get the shape information
Recently I was working with multi-output models and I was using a pattern that I believe to
be common. The model returns a tuple, for instance `y_pred = (ypred_1, ypred_2)` with the correspondent y of the form `y = (y_1, y_2)` with the `loss_fn` accepting as arguments `ypred` and `y` (`loss_fn(ypred, y)`).
However I have run into problems when using the Loss metric due to the batch size used on the update function of this metric calling the attribute shape of the, presumed, torch.Tensor directly.
I have as suggestion to change the Loss metric class to accept an extra function, for instance `batch_size = lambda x: x.shape[0]` to recover the current behavior and to permit to access the shape attribute from a tuple/list.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ignite/metrics/loss.py`
Content:
```
1 from __future__ import division
2
3 from ignite.exceptions import NotComputableError
4 from ignite.metrics.metric import Metric
5
6
7 class Loss(Metric):
8 """
9 Calculates the average loss according to the passed loss_fn.
10
11 Args:
12 loss_fn (callable): a callable taking a prediction tensor, a target
13 tensor, optionally other arguments, and returns the average loss
14 over all observations in the batch.
15 output_transform (callable): a callable that is used to transform the
16 :class:`ignite.engine.Engine`'s `process_function`'s output into the
17 form expected by the metric.
18 This can be useful if, for example, you have a multi-output model and
19 you want to compute the metric with respect to one of the outputs.
20 The output is is expected to be a tuple (prediction, target) or
21 (prediction, target, kwargs) where kwargs is a dictionary of extra
22 keywords arguments.
23
24 """
25
26 def __init__(self, loss_fn, output_transform=lambda x: x):
27 super(Loss, self).__init__(output_transform)
28 self._loss_fn = loss_fn
29
30 def reset(self):
31 self._sum = 0
32 self._num_examples = 0
33
34 def update(self, output):
35 if len(output) == 2:
36 y_pred, y = output
37 kwargs = {}
38 else:
39 y_pred, y, kwargs = output
40 average_loss = self._loss_fn(y_pred, y, **kwargs)
41
42 if len(average_loss.shape) != 0:
43 raise ValueError('loss_fn did not return the average loss')
44
45 self._sum += average_loss.item() * y.shape[0]
46 self._num_examples += y.shape[0]
47
48 def compute(self):
49 if self._num_examples == 0:
50 raise NotComputableError(
51 'Loss must have at least one example before it can be computed')
52 return self._sum / self._num_examples
53
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/ignite/metrics/loss.py b/ignite/metrics/loss.py
--- a/ignite/metrics/loss.py
+++ b/ignite/metrics/loss.py
@@ -20,12 +20,16 @@
The output is is expected to be a tuple (prediction, target) or
(prediction, target, kwargs) where kwargs is a dictionary of extra
keywords arguments.
+ batch_size (callable): a callable taking a target tensor that returns the
+ first dimension size (usually the batch size).
"""
- def __init__(self, loss_fn, output_transform=lambda x: x):
+ def __init__(self, loss_fn, output_transform=lambda x: x,
+ batch_size=lambda x: x.shape[0]):
super(Loss, self).__init__(output_transform)
self._loss_fn = loss_fn
+ self._batch_size = batch_size
def reset(self):
self._sum = 0
@@ -42,8 +46,9 @@
if len(average_loss.shape) != 0:
raise ValueError('loss_fn did not return the average loss')
- self._sum += average_loss.item() * y.shape[0]
- self._num_examples += y.shape[0]
+ N = self._batch_size(y)
+ self._sum += average_loss.item() * N
+ self._num_examples += N
def compute(self):
if self._num_examples == 0:
|
{"golden_diff": "diff --git a/ignite/metrics/loss.py b/ignite/metrics/loss.py\n--- a/ignite/metrics/loss.py\n+++ b/ignite/metrics/loss.py\n@@ -20,12 +20,16 @@\n The output is is expected to be a tuple (prediction, target) or\n (prediction, target, kwargs) where kwargs is a dictionary of extra\n keywords arguments.\n+ batch_size (callable): a callable taking a target tensor that returns the\n+ first dimension size (usually the batch size).\n \n \"\"\"\n \n- def __init__(self, loss_fn, output_transform=lambda x: x):\n+ def __init__(self, loss_fn, output_transform=lambda x: x,\n+ batch_size=lambda x: x.shape[0]):\n super(Loss, self).__init__(output_transform)\n self._loss_fn = loss_fn\n+ self._batch_size = batch_size\n \n def reset(self):\n self._sum = 0\n@@ -42,8 +46,9 @@\n if len(average_loss.shape) != 0:\n raise ValueError('loss_fn did not return the average loss')\n \n- self._sum += average_loss.item() * y.shape[0]\n- self._num_examples += y.shape[0]\n+ N = self._batch_size(y)\n+ self._sum += average_loss.item() * N\n+ self._num_examples += N\n \n def compute(self):\n if self._num_examples == 0:\n", "issue": "[feature request]Changing how the loss metric get the shape information\nRecently I was working with multi-output models and I was using a pattern that I believe to\r\nbe common. The model returns a tuple, for instance `y_pred = (ypred_1, ypred_2)` with the correspondent y of the form `y = (y_1, y_2)` with the `loss_fn` accepting as arguments `ypred` and `y` (`loss_fn(ypred, y)`).\r\n\r\nHowever I have run into problems when using the Loss metric due to the batch size used on the update function of this metric calling the attribute shape of the, presumed, torch.Tensor directly.\r\n\r\nI have as suggestion to change the Loss metric class to accept an extra function, for instance `batch_size = lambda x: x.shape[0]` to recover the current behavior and to permit to access the shape attribute from a tuple/list.\r\n\n", "before_files": [{"content": "from __future__ import division\n\nfrom ignite.exceptions import NotComputableError\nfrom ignite.metrics.metric import Metric\n\n\nclass Loss(Metric):\n \"\"\"\n Calculates the average loss according to the passed loss_fn.\n\n Args:\n loss_fn (callable): a callable taking a prediction tensor, a target\n tensor, optionally other arguments, and returns the average loss\n over all observations in the batch.\n output_transform (callable): a callable that is used to transform the\n :class:`ignite.engine.Engine`'s `process_function`'s output into the\n form expected by the metric.\n This can be useful if, for example, you have a multi-output model and\n you want to compute the metric with respect to one of the outputs.\n The output is is expected to be a tuple (prediction, target) or\n (prediction, target, kwargs) where kwargs is a dictionary of extra\n keywords arguments.\n\n \"\"\"\n\n def __init__(self, loss_fn, output_transform=lambda x: x):\n super(Loss, self).__init__(output_transform)\n self._loss_fn = loss_fn\n\n def reset(self):\n self._sum = 0\n self._num_examples = 0\n\n def update(self, output):\n if len(output) == 2:\n y_pred, y = output\n kwargs = {}\n else:\n y_pred, y, kwargs = output\n average_loss = self._loss_fn(y_pred, y, **kwargs)\n\n if len(average_loss.shape) != 0:\n raise ValueError('loss_fn did not return the average loss')\n\n self._sum += average_loss.item() * y.shape[0]\n self._num_examples += y.shape[0]\n\n def compute(self):\n if self._num_examples == 0:\n raise NotComputableError(\n 'Loss must have at least one example before it can be computed')\n return self._sum / self._num_examples\n", "path": "ignite/metrics/loss.py"}], "after_files": [{"content": "from __future__ import division\n\nfrom ignite.exceptions import NotComputableError\nfrom ignite.metrics.metric import Metric\n\n\nclass Loss(Metric):\n \"\"\"\n Calculates the average loss according to the passed loss_fn.\n\n Args:\n loss_fn (callable): a callable taking a prediction tensor, a target\n tensor, optionally other arguments, and returns the average loss\n over all observations in the batch.\n output_transform (callable): a callable that is used to transform the\n :class:`ignite.engine.Engine`'s `process_function`'s output into the\n form expected by the metric.\n This can be useful if, for example, you have a multi-output model and\n you want to compute the metric with respect to one of the outputs.\n The output is is expected to be a tuple (prediction, target) or\n (prediction, target, kwargs) where kwargs is a dictionary of extra\n keywords arguments.\n batch_size (callable): a callable taking a target tensor that returns the\n first dimension size (usually the batch size).\n\n \"\"\"\n\n def __init__(self, loss_fn, output_transform=lambda x: x,\n batch_size=lambda x: x.shape[0]):\n super(Loss, self).__init__(output_transform)\n self._loss_fn = loss_fn\n self._batch_size = batch_size\n\n def reset(self):\n self._sum = 0\n self._num_examples = 0\n\n def update(self, output):\n if len(output) == 2:\n y_pred, y = output\n kwargs = {}\n else:\n y_pred, y, kwargs = output\n average_loss = self._loss_fn(y_pred, y, **kwargs)\n\n if len(average_loss.shape) != 0:\n raise ValueError('loss_fn did not return the average loss')\n\n N = self._batch_size(y)\n self._sum += average_loss.item() * N\n self._num_examples += N\n\n def compute(self):\n if self._num_examples == 0:\n raise NotComputableError(\n 'Loss must have at least one example before it can be computed')\n return self._sum / self._num_examples\n", "path": "ignite/metrics/loss.py"}]}
| 976 | 328 |
gh_patches_debug_2294
|
rasdani/github-patches
|
git_diff
|
aws__aws-cli-4760
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
upgrade to PyYAML 5.2
PyYAML 5.2 is out, with more security fixes. aws-cli pins to an older version, preventing upgrades. Please update the pin.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2 import codecs
3 import os.path
4 import re
5 import sys
6
7 from setuptools import setup, find_packages
8
9
10 here = os.path.abspath(os.path.dirname(__file__))
11
12
13 def read(*parts):
14 return codecs.open(os.path.join(here, *parts), 'r').read()
15
16
17 def find_version(*file_paths):
18 version_file = read(*file_paths)
19 version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
20 version_file, re.M)
21 if version_match:
22 return version_match.group(1)
23 raise RuntimeError("Unable to find version string.")
24
25
26 install_requires = ['botocore==1.13.38',
27 'docutils>=0.10,<0.16',
28 'rsa>=3.1.2,<=3.5.0',
29 's3transfer>=0.2.0,<0.3.0']
30
31
32 if sys.version_info[:2] == (2, 6):
33 # For python2.6 we have to require argparse since it
34 # was not in stdlib until 2.7.
35 install_requires.append('argparse>=1.1')
36
37 # For Python 2.6, we have to require a different verion of PyYAML since the latest
38 # versions dropped support for Python 2.6.
39 install_requires.append('PyYAML>=3.10,<=3.13')
40
41 # Colorama removed support for EOL pythons.
42 install_requires.append('colorama>=0.2.5,<=0.3.9')
43 elif sys.version_info[:2] == (3, 3):
44 install_requires.append('PyYAML>=3.10,<=3.13')
45 # Colorama removed support for EOL pythons.
46 install_requires.append('colorama>=0.2.5,<=0.3.9')
47 else:
48 install_requires.append('PyYAML>=3.10,<5.2')
49 install_requires.append('colorama>=0.2.5,<0.4.2')
50
51
52 setup_options = dict(
53 name='awscli',
54 version=find_version("awscli", "__init__.py"),
55 description='Universal Command Line Environment for AWS.',
56 long_description=read('README.rst'),
57 author='Amazon Web Services',
58 url='http://aws.amazon.com/cli/',
59 scripts=['bin/aws', 'bin/aws.cmd',
60 'bin/aws_completer', 'bin/aws_zsh_completer.sh',
61 'bin/aws_bash_completer'],
62 packages=find_packages(exclude=['tests*']),
63 package_data={'awscli': ['data/*.json', 'examples/*/*.rst',
64 'examples/*/*.txt', 'examples/*/*/*.txt',
65 'examples/*/*/*.rst', 'topics/*.rst',
66 'topics/*.json']},
67 install_requires=install_requires,
68 extras_require={
69 ':python_version=="2.6"': [
70 'argparse>=1.1',
71 ]
72 },
73 license="Apache License 2.0",
74 classifiers=[
75 'Development Status :: 5 - Production/Stable',
76 'Intended Audience :: Developers',
77 'Intended Audience :: System Administrators',
78 'Natural Language :: English',
79 'License :: OSI Approved :: Apache Software License',
80 'Programming Language :: Python',
81 'Programming Language :: Python :: 2',
82 'Programming Language :: Python :: 2.6',
83 'Programming Language :: Python :: 2.7',
84 'Programming Language :: Python :: 3',
85 'Programming Language :: Python :: 3.3',
86 'Programming Language :: Python :: 3.4',
87 'Programming Language :: Python :: 3.5',
88 'Programming Language :: Python :: 3.6',
89 'Programming Language :: Python :: 3.7',
90 ],
91 )
92
93 if 'py2exe' in sys.argv:
94 # This will actually give us a py2exe command.
95 import py2exe
96 # And we have some py2exe specific options.
97 setup_options['options'] = {
98 'py2exe': {
99 'optimize': 0,
100 'skip_archive': True,
101 'dll_excludes': ['crypt32.dll'],
102 'packages': ['docutils', 'urllib', 'httplib', 'HTMLParser',
103 'awscli', 'ConfigParser', 'xml.etree', 'pipes'],
104 }
105 }
106 setup_options['console'] = ['bin/aws']
107
108
109 setup(**setup_options)
110
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -45,7 +45,7 @@
# Colorama removed support for EOL pythons.
install_requires.append('colorama>=0.2.5,<=0.3.9')
else:
- install_requires.append('PyYAML>=3.10,<5.2')
+ install_requires.append('PyYAML>=3.10,<5.3')
install_requires.append('colorama>=0.2.5,<0.4.2')
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -45,7 +45,7 @@\n # Colorama removed support for EOL pythons.\n install_requires.append('colorama>=0.2.5,<=0.3.9')\n else:\n- install_requires.append('PyYAML>=3.10,<5.2')\n+ install_requires.append('PyYAML>=3.10,<5.3')\n install_requires.append('colorama>=0.2.5,<0.4.2')\n", "issue": "upgrade to PyYAML 5.2\nPyYAML 5.2 is out, with more security fixes. aws-cli pins to an older version, preventing upgrades. Please update the pin.\n", "before_files": [{"content": "#!/usr/bin/env python\nimport codecs\nimport os.path\nimport re\nimport sys\n\nfrom setuptools import setup, find_packages\n\n\nhere = os.path.abspath(os.path.dirname(__file__))\n\n\ndef read(*parts):\n return codecs.open(os.path.join(here, *parts), 'r').read()\n\n\ndef find_version(*file_paths):\n version_file = read(*file_paths)\n version_match = re.search(r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\",\n version_file, re.M)\n if version_match:\n return version_match.group(1)\n raise RuntimeError(\"Unable to find version string.\")\n\n\ninstall_requires = ['botocore==1.13.38',\n 'docutils>=0.10,<0.16',\n 'rsa>=3.1.2,<=3.5.0',\n 's3transfer>=0.2.0,<0.3.0']\n\n\nif sys.version_info[:2] == (2, 6):\n # For python2.6 we have to require argparse since it\n # was not in stdlib until 2.7.\n install_requires.append('argparse>=1.1')\n\n # For Python 2.6, we have to require a different verion of PyYAML since the latest\n # versions dropped support for Python 2.6.\n install_requires.append('PyYAML>=3.10,<=3.13')\n\n # Colorama removed support for EOL pythons.\n install_requires.append('colorama>=0.2.5,<=0.3.9')\nelif sys.version_info[:2] == (3, 3):\n install_requires.append('PyYAML>=3.10,<=3.13')\n # Colorama removed support for EOL pythons.\n install_requires.append('colorama>=0.2.5,<=0.3.9')\nelse:\n install_requires.append('PyYAML>=3.10,<5.2')\n install_requires.append('colorama>=0.2.5,<0.4.2')\n\n\nsetup_options = dict(\n name='awscli',\n version=find_version(\"awscli\", \"__init__.py\"),\n description='Universal Command Line Environment for AWS.',\n long_description=read('README.rst'),\n author='Amazon Web Services',\n url='http://aws.amazon.com/cli/',\n scripts=['bin/aws', 'bin/aws.cmd',\n 'bin/aws_completer', 'bin/aws_zsh_completer.sh',\n 'bin/aws_bash_completer'],\n packages=find_packages(exclude=['tests*']),\n package_data={'awscli': ['data/*.json', 'examples/*/*.rst',\n 'examples/*/*.txt', 'examples/*/*/*.txt',\n 'examples/*/*/*.rst', 'topics/*.rst',\n 'topics/*.json']},\n install_requires=install_requires,\n extras_require={\n ':python_version==\"2.6\"': [\n 'argparse>=1.1',\n ]\n },\n license=\"Apache License 2.0\",\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'Intended Audience :: System Administrators',\n 'Natural Language :: English',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n ],\n)\n\nif 'py2exe' in sys.argv:\n # This will actually give us a py2exe command.\n import py2exe\n # And we have some py2exe specific options.\n setup_options['options'] = {\n 'py2exe': {\n 'optimize': 0,\n 'skip_archive': True,\n 'dll_excludes': ['crypt32.dll'],\n 'packages': ['docutils', 'urllib', 'httplib', 'HTMLParser',\n 'awscli', 'ConfigParser', 'xml.etree', 'pipes'],\n }\n }\n setup_options['console'] = ['bin/aws']\n\n\nsetup(**setup_options)\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\nimport codecs\nimport os.path\nimport re\nimport sys\n\nfrom setuptools import setup, find_packages\n\n\nhere = os.path.abspath(os.path.dirname(__file__))\n\n\ndef read(*parts):\n return codecs.open(os.path.join(here, *parts), 'r').read()\n\n\ndef find_version(*file_paths):\n version_file = read(*file_paths)\n version_match = re.search(r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\",\n version_file, re.M)\n if version_match:\n return version_match.group(1)\n raise RuntimeError(\"Unable to find version string.\")\n\n\ninstall_requires = ['botocore==1.13.38',\n 'docutils>=0.10,<0.16',\n 'rsa>=3.1.2,<=3.5.0',\n 's3transfer>=0.2.0,<0.3.0']\n\n\nif sys.version_info[:2] == (2, 6):\n # For python2.6 we have to require argparse since it\n # was not in stdlib until 2.7.\n install_requires.append('argparse>=1.1')\n\n # For Python 2.6, we have to require a different verion of PyYAML since the latest\n # versions dropped support for Python 2.6.\n install_requires.append('PyYAML>=3.10,<=3.13')\n\n # Colorama removed support for EOL pythons.\n install_requires.append('colorama>=0.2.5,<=0.3.9')\nelif sys.version_info[:2] == (3, 3):\n install_requires.append('PyYAML>=3.10,<=3.13')\n # Colorama removed support for EOL pythons.\n install_requires.append('colorama>=0.2.5,<=0.3.9')\nelse:\n install_requires.append('PyYAML>=3.10,<5.3')\n install_requires.append('colorama>=0.2.5,<0.4.2')\n\n\nsetup_options = dict(\n name='awscli',\n version=find_version(\"awscli\", \"__init__.py\"),\n description='Universal Command Line Environment for AWS.',\n long_description=read('README.rst'),\n author='Amazon Web Services',\n url='http://aws.amazon.com/cli/',\n scripts=['bin/aws', 'bin/aws.cmd',\n 'bin/aws_completer', 'bin/aws_zsh_completer.sh',\n 'bin/aws_bash_completer'],\n packages=find_packages(exclude=['tests*']),\n package_data={'awscli': ['data/*.json', 'examples/*/*.rst',\n 'examples/*/*.txt', 'examples/*/*/*.txt',\n 'examples/*/*/*.rst', 'topics/*.rst',\n 'topics/*.json']},\n install_requires=install_requires,\n extras_require={\n ':python_version==\"2.6\"': [\n 'argparse>=1.1',\n ]\n },\n license=\"Apache License 2.0\",\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'Intended Audience :: System Administrators',\n 'Natural Language :: English',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n ],\n)\n\nif 'py2exe' in sys.argv:\n # This will actually give us a py2exe command.\n import py2exe\n # And we have some py2exe specific options.\n setup_options['options'] = {\n 'py2exe': {\n 'optimize': 0,\n 'skip_archive': True,\n 'dll_excludes': ['crypt32.dll'],\n 'packages': ['docutils', 'urllib', 'httplib', 'HTMLParser',\n 'awscli', 'ConfigParser', 'xml.etree', 'pipes'],\n }\n }\n setup_options['console'] = ['bin/aws']\n\n\nsetup(**setup_options)\n", "path": "setup.py"}]}
| 1,496 | 126 |
gh_patches_debug_33553
|
rasdani/github-patches
|
git_diff
|
microsoft__ptvsd-1098
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Build wheels for manylinux1
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2
3 # Copyright (c) Microsoft Corporation. All rights reserved.
4 # Licensed under the MIT License. See LICENSE in the project root
5 # for license information.
6
7 import os
8 import os.path
9 import subprocess
10 import sys
11
12 from setuptools import setup
13
14 import versioneer
15
16 sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'src'))
17 import ptvsd
18 import ptvsd._vendored
19 del sys.path[0]
20
21
22 PYDEVD_ROOT = ptvsd._vendored.project_root('pydevd')
23 PTVSD_ROOT = os.path.dirname(os.path.abspath(ptvsd.__file__))
24
25
26 def cython_build():
27 print('Compiling extension modules (set SKIP_CYTHON_BUILD=1 to omit)')
28 subprocess.call([
29 sys.executable,
30 os.path.join(PYDEVD_ROOT, 'setup_cython.py'),
31 'build_ext',
32 '-i',
33 ])
34
35
36 def iter_vendored_files():
37 # Add pydevd files as data files for this package. They are not
38 # treated as a package of their own, because we don't actually
39 # want to provide pydevd - just use our own copy internally.
40 for project in ptvsd._vendored.list_all():
41 for filename in ptvsd._vendored.iter_packaging_files(project):
42 yield filename
43
44
45 with open('DESCRIPTION.md', 'r') as fh:
46 long_description = fh.read()
47
48
49 if __name__ == '__main__':
50 if not os.getenv('SKIP_CYTHON_BUILD'):
51 cython_build()
52
53 setup(
54 name='ptvsd',
55 version=versioneer.get_version(),
56 description='Remote debugging server for Python support in Visual Studio and Visual Studio Code', # noqa
57 long_description=long_description,
58 long_description_content_type='text/markdown',
59 license='MIT',
60 author='Microsoft Corporation',
61 author_email='[email protected]',
62 url='https://aka.ms/ptvs',
63 python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*',
64 classifiers=[
65 'Development Status :: 5 - Production/Stable',
66 'Programming Language :: Python :: 2.7',
67 'Programming Language :: Python :: 3.4',
68 'Programming Language :: Python :: 3.5',
69 'Programming Language :: Python :: 3.6',
70 'Programming Language :: Python :: 3.7',
71 'Topic :: Software Development :: Debuggers',
72 'Operating System :: OS Independent',
73 'License :: OSI Approved :: Eclipse Public License 2.0 (EPL-2.0)',
74 'License :: OSI Approved :: MIT License',
75 ],
76 package_dir={'': 'src'},
77 packages=[
78 'ptvsd',
79 'ptvsd._vendored',
80 ],
81 package_data={
82 'ptvsd': ['ThirdPartyNotices.txt'],
83 'ptvsd._vendored': list(iter_vendored_files()),
84 },
85 cmdclass=versioneer.get_cmdclass(),
86 )
87
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -9,13 +9,23 @@
import subprocess
import sys
-from setuptools import setup
+pure = None
+if '--pure' in sys.argv:
+ pure = True
+ sys.argv.remove('--pure')
+elif '--universal' in sys.argv:
+ pure = True
+elif '--abi' in sys.argv:
+ pure = False
+ sys.argv.remove('--abi')
-import versioneer
+
+from setuptools import setup # noqa
+import versioneer # noqa
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'src'))
-import ptvsd
-import ptvsd._vendored
+import ptvsd # noqa
+import ptvsd._vendored # noqa
del sys.path[0]
@@ -23,6 +33,11 @@
PTVSD_ROOT = os.path.dirname(os.path.abspath(ptvsd.__file__))
+def get_buildplatform():
+ if '-p' in sys.argv:
+ return sys.argv[sys.argv.index('-p') + 1]
+ return None
+
def cython_build():
print('Compiling extension modules (set SKIP_CYTHON_BUILD=1 to omit)')
subprocess.call([
@@ -46,10 +61,29 @@
long_description = fh.read()
+try:
+ from wheel.bdist_wheel import bdist_wheel as _bdist_wheel
+
+ class bdist_wheel(_bdist_wheel):
+ def finalize_options(self):
+ _bdist_wheel.finalize_options(self)
+ self.root_is_pure = pure
+
+except ImportError:
+ bdist_wheel = None
+
if __name__ == '__main__':
if not os.getenv('SKIP_CYTHON_BUILD'):
cython_build()
+ cmds = versioneer.get_cmdclass()
+ cmds['bdist_wheel'] = bdist_wheel
+
+ extras = {}
+ platforms = get_buildplatform()
+ if platforms is not None:
+ extras['platforms'] = platforms
+
setup(
name='ptvsd',
version=versioneer.get_version(),
@@ -82,5 +116,6 @@
'ptvsd': ['ThirdPartyNotices.txt'],
'ptvsd._vendored': list(iter_vendored_files()),
},
- cmdclass=versioneer.get_cmdclass(),
+ cmdclass=cmds,
+ **extras
)
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -9,13 +9,23 @@\n import subprocess\n import sys\n \n-from setuptools import setup\n+pure = None\n+if '--pure' in sys.argv:\n+ pure = True\n+ sys.argv.remove('--pure')\n+elif '--universal' in sys.argv:\n+ pure = True\n+elif '--abi' in sys.argv:\n+ pure = False\n+ sys.argv.remove('--abi')\n \n-import versioneer\n+\n+from setuptools import setup # noqa\n+import versioneer # noqa\n \n sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'src'))\n-import ptvsd\n-import ptvsd._vendored\n+import ptvsd # noqa\n+import ptvsd._vendored # noqa\n del sys.path[0]\n \n \n@@ -23,6 +33,11 @@\n PTVSD_ROOT = os.path.dirname(os.path.abspath(ptvsd.__file__))\n \n \n+def get_buildplatform():\n+ if '-p' in sys.argv:\n+ return sys.argv[sys.argv.index('-p') + 1]\n+ return None\n+\n def cython_build():\n print('Compiling extension modules (set SKIP_CYTHON_BUILD=1 to omit)')\n subprocess.call([\n@@ -46,10 +61,29 @@\n long_description = fh.read()\n \n \n+try:\n+ from wheel.bdist_wheel import bdist_wheel as _bdist_wheel\n+\n+ class bdist_wheel(_bdist_wheel):\n+ def finalize_options(self):\n+ _bdist_wheel.finalize_options(self)\n+ self.root_is_pure = pure\n+\n+except ImportError:\n+ bdist_wheel = None\n+\n if __name__ == '__main__':\n if not os.getenv('SKIP_CYTHON_BUILD'):\n cython_build()\n \n+ cmds = versioneer.get_cmdclass()\n+ cmds['bdist_wheel'] = bdist_wheel\n+\n+ extras = {}\n+ platforms = get_buildplatform()\n+ if platforms is not None:\n+ extras['platforms'] = platforms\n+\n setup(\n name='ptvsd',\n version=versioneer.get_version(),\n@@ -82,5 +116,6 @@\n 'ptvsd': ['ThirdPartyNotices.txt'],\n 'ptvsd._vendored': list(iter_vendored_files()),\n },\n- cmdclass=versioneer.get_cmdclass(),\n+ cmdclass=cmds,\n+ **extras\n )\n", "issue": "Build wheels for manylinux1\n\n", "before_files": [{"content": "#!/usr/bin/env python\n\n# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nimport os\nimport os.path\nimport subprocess\nimport sys\n\nfrom setuptools import setup\n\nimport versioneer\n\nsys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'src'))\nimport ptvsd\nimport ptvsd._vendored\ndel sys.path[0]\n\n\nPYDEVD_ROOT = ptvsd._vendored.project_root('pydevd')\nPTVSD_ROOT = os.path.dirname(os.path.abspath(ptvsd.__file__))\n\n\ndef cython_build():\n print('Compiling extension modules (set SKIP_CYTHON_BUILD=1 to omit)')\n subprocess.call([\n sys.executable,\n os.path.join(PYDEVD_ROOT, 'setup_cython.py'),\n 'build_ext',\n '-i',\n ])\n\n\ndef iter_vendored_files():\n # Add pydevd files as data files for this package. They are not\n # treated as a package of their own, because we don't actually\n # want to provide pydevd - just use our own copy internally.\n for project in ptvsd._vendored.list_all():\n for filename in ptvsd._vendored.iter_packaging_files(project):\n yield filename\n\n\nwith open('DESCRIPTION.md', 'r') as fh:\n long_description = fh.read()\n\n\nif __name__ == '__main__':\n if not os.getenv('SKIP_CYTHON_BUILD'):\n cython_build()\n\n setup(\n name='ptvsd',\n version=versioneer.get_version(),\n description='Remote debugging server for Python support in Visual Studio and Visual Studio Code', # noqa\n long_description=long_description,\n long_description_content_type='text/markdown',\n license='MIT',\n author='Microsoft Corporation',\n author_email='[email protected]',\n url='https://aka.ms/ptvs',\n python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*',\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Topic :: Software Development :: Debuggers',\n 'Operating System :: OS Independent',\n 'License :: OSI Approved :: Eclipse Public License 2.0 (EPL-2.0)',\n 'License :: OSI Approved :: MIT License',\n ],\n package_dir={'': 'src'},\n packages=[\n 'ptvsd',\n 'ptvsd._vendored',\n ],\n package_data={\n 'ptvsd': ['ThirdPartyNotices.txt'],\n 'ptvsd._vendored': list(iter_vendored_files()),\n },\n cmdclass=versioneer.get_cmdclass(),\n )\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\n# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nimport os\nimport os.path\nimport subprocess\nimport sys\n\npure = None\nif '--pure' in sys.argv:\n pure = True\n sys.argv.remove('--pure')\nelif '--universal' in sys.argv:\n pure = True\nelif '--abi' in sys.argv:\n pure = False\n sys.argv.remove('--abi')\n\n\nfrom setuptools import setup # noqa\nimport versioneer # noqa\n\nsys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'src'))\nimport ptvsd # noqa\nimport ptvsd._vendored # noqa\ndel sys.path[0]\n\n\nPYDEVD_ROOT = ptvsd._vendored.project_root('pydevd')\nPTVSD_ROOT = os.path.dirname(os.path.abspath(ptvsd.__file__))\n\n\ndef get_buildplatform():\n if '-p' in sys.argv:\n return sys.argv[sys.argv.index('-p') + 1]\n return None\n\ndef cython_build():\n print('Compiling extension modules (set SKIP_CYTHON_BUILD=1 to omit)')\n subprocess.call([\n sys.executable,\n os.path.join(PYDEVD_ROOT, 'setup_cython.py'),\n 'build_ext',\n '-i',\n ])\n\n\ndef iter_vendored_files():\n # Add pydevd files as data files for this package. They are not\n # treated as a package of their own, because we don't actually\n # want to provide pydevd - just use our own copy internally.\n for project in ptvsd._vendored.list_all():\n for filename in ptvsd._vendored.iter_packaging_files(project):\n yield filename\n\n\nwith open('DESCRIPTION.md', 'r') as fh:\n long_description = fh.read()\n\n\ntry:\n from wheel.bdist_wheel import bdist_wheel as _bdist_wheel\n\n class bdist_wheel(_bdist_wheel):\n def finalize_options(self):\n _bdist_wheel.finalize_options(self)\n self.root_is_pure = pure\n\nexcept ImportError:\n bdist_wheel = None\n\nif __name__ == '__main__':\n if not os.getenv('SKIP_CYTHON_BUILD'):\n cython_build()\n\n cmds = versioneer.get_cmdclass()\n cmds['bdist_wheel'] = bdist_wheel\n\n extras = {}\n platforms = get_buildplatform()\n if platforms is not None:\n extras['platforms'] = platforms\n\n setup(\n name='ptvsd',\n version=versioneer.get_version(),\n description='Remote debugging server for Python support in Visual Studio and Visual Studio Code', # noqa\n long_description=long_description,\n long_description_content_type='text/markdown',\n license='MIT',\n author='Microsoft Corporation',\n author_email='[email protected]',\n url='https://aka.ms/ptvs',\n python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*',\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Topic :: Software Development :: Debuggers',\n 'Operating System :: OS Independent',\n 'License :: OSI Approved :: Eclipse Public License 2.0 (EPL-2.0)',\n 'License :: OSI Approved :: MIT License',\n ],\n package_dir={'': 'src'},\n packages=[\n 'ptvsd',\n 'ptvsd._vendored',\n ],\n package_data={\n 'ptvsd': ['ThirdPartyNotices.txt'],\n 'ptvsd._vendored': list(iter_vendored_files()),\n },\n cmdclass=cmds,\n **extras\n )\n", "path": "setup.py"}]}
| 1,099 | 558 |
gh_patches_debug_6568
|
rasdani/github-patches
|
git_diff
|
vllm-project__vllm-3638
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[CI] Test examples in CI
### Anything you want to discuss about vllm.
Current scripts in `examples/` directory are not tested in CI. We should run them to ensure passing
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/llava_example.py`
Content:
```
1 import argparse
2 import os
3 import subprocess
4
5 import torch
6
7 from vllm import LLM
8 from vllm.sequence import MultiModalData
9
10 # The assets are located at `s3://air-example-data-2/vllm_opensource_llava/`.
11
12
13 def run_llava_pixel_values():
14 llm = LLM(
15 model="llava-hf/llava-1.5-7b-hf",
16 image_input_type="pixel_values",
17 image_token_id=32000,
18 image_input_shape="1,3,336,336",
19 image_feature_size=576,
20 )
21
22 prompt = "<image>" * 576 + (
23 "\nUSER: What is the content of this image?\nASSISTANT:")
24
25 # This should be provided by another online or offline component.
26 images = torch.load("images/stop_sign_pixel_values.pt")
27
28 outputs = llm.generate(prompt,
29 multi_modal_data=MultiModalData(
30 type=MultiModalData.Type.IMAGE, data=images))
31 for o in outputs:
32 generated_text = o.outputs[0].text
33 print(generated_text)
34
35
36 def run_llava_image_features():
37 llm = LLM(
38 model="llava-hf/llava-1.5-7b-hf",
39 image_input_type="image_features",
40 image_token_id=32000,
41 image_input_shape="1,576,1024",
42 image_feature_size=576,
43 )
44
45 prompt = "<image>" * 576 + (
46 "\nUSER: What is the content of this image?\nASSISTANT:")
47
48 # This should be provided by another online or offline component.
49 images = torch.load("images/stop_sign_image_features.pt")
50
51 outputs = llm.generate(prompt,
52 multi_modal_data=MultiModalData(
53 type=MultiModalData.Type.IMAGE, data=images))
54 for o in outputs:
55 generated_text = o.outputs[0].text
56 print(generated_text)
57
58
59 def main(args):
60 if args.type == "pixel_values":
61 run_llava_pixel_values()
62 else:
63 run_llava_image_features()
64
65
66 if __name__ == "__main__":
67 parser = argparse.ArgumentParser(description="Demo on Llava")
68 parser.add_argument("--type",
69 type=str,
70 choices=["pixel_values", "image_features"],
71 default="pixel_values",
72 help="image input type")
73 args = parser.parse_args()
74 # Download from s3
75 s3_bucket_path = "s3://air-example-data-2/vllm_opensource_llava/"
76 local_directory = "images"
77
78 # Make sure the local directory exists or create it
79 os.makedirs(local_directory, exist_ok=True)
80
81 # Use AWS CLI to sync the directory
82 subprocess.check_call(
83 ["aws", "s3", "sync", s3_bucket_path, local_directory])
84 main(args)
85
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/examples/llava_example.py b/examples/llava_example.py
--- a/examples/llava_example.py
+++ b/examples/llava_example.py
@@ -78,7 +78,13 @@
# Make sure the local directory exists or create it
os.makedirs(local_directory, exist_ok=True)
- # Use AWS CLI to sync the directory
- subprocess.check_call(
- ["aws", "s3", "sync", s3_bucket_path, local_directory])
+ # Use AWS CLI to sync the directory, assume anonymous access
+ subprocess.check_call([
+ "aws",
+ "s3",
+ "sync",
+ s3_bucket_path,
+ local_directory,
+ "--no-sign-request",
+ ])
main(args)
|
{"golden_diff": "diff --git a/examples/llava_example.py b/examples/llava_example.py\n--- a/examples/llava_example.py\n+++ b/examples/llava_example.py\n@@ -78,7 +78,13 @@\n # Make sure the local directory exists or create it\n os.makedirs(local_directory, exist_ok=True)\n \n- # Use AWS CLI to sync the directory\n- subprocess.check_call(\n- [\"aws\", \"s3\", \"sync\", s3_bucket_path, local_directory])\n+ # Use AWS CLI to sync the directory, assume anonymous access\n+ subprocess.check_call([\n+ \"aws\",\n+ \"s3\",\n+ \"sync\",\n+ s3_bucket_path,\n+ local_directory,\n+ \"--no-sign-request\",\n+ ])\n main(args)\n", "issue": "[CI] Test examples in CI\n### Anything you want to discuss about vllm.\n\nCurrent scripts in `examples/` directory are not tested in CI. We should run them to ensure passing \n", "before_files": [{"content": "import argparse\nimport os\nimport subprocess\n\nimport torch\n\nfrom vllm import LLM\nfrom vllm.sequence import MultiModalData\n\n# The assets are located at `s3://air-example-data-2/vllm_opensource_llava/`.\n\n\ndef run_llava_pixel_values():\n llm = LLM(\n model=\"llava-hf/llava-1.5-7b-hf\",\n image_input_type=\"pixel_values\",\n image_token_id=32000,\n image_input_shape=\"1,3,336,336\",\n image_feature_size=576,\n )\n\n prompt = \"<image>\" * 576 + (\n \"\\nUSER: What is the content of this image?\\nASSISTANT:\")\n\n # This should be provided by another online or offline component.\n images = torch.load(\"images/stop_sign_pixel_values.pt\")\n\n outputs = llm.generate(prompt,\n multi_modal_data=MultiModalData(\n type=MultiModalData.Type.IMAGE, data=images))\n for o in outputs:\n generated_text = o.outputs[0].text\n print(generated_text)\n\n\ndef run_llava_image_features():\n llm = LLM(\n model=\"llava-hf/llava-1.5-7b-hf\",\n image_input_type=\"image_features\",\n image_token_id=32000,\n image_input_shape=\"1,576,1024\",\n image_feature_size=576,\n )\n\n prompt = \"<image>\" * 576 + (\n \"\\nUSER: What is the content of this image?\\nASSISTANT:\")\n\n # This should be provided by another online or offline component.\n images = torch.load(\"images/stop_sign_image_features.pt\")\n\n outputs = llm.generate(prompt,\n multi_modal_data=MultiModalData(\n type=MultiModalData.Type.IMAGE, data=images))\n for o in outputs:\n generated_text = o.outputs[0].text\n print(generated_text)\n\n\ndef main(args):\n if args.type == \"pixel_values\":\n run_llava_pixel_values()\n else:\n run_llava_image_features()\n\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser(description=\"Demo on Llava\")\n parser.add_argument(\"--type\",\n type=str,\n choices=[\"pixel_values\", \"image_features\"],\n default=\"pixel_values\",\n help=\"image input type\")\n args = parser.parse_args()\n # Download from s3\n s3_bucket_path = \"s3://air-example-data-2/vllm_opensource_llava/\"\n local_directory = \"images\"\n\n # Make sure the local directory exists or create it\n os.makedirs(local_directory, exist_ok=True)\n\n # Use AWS CLI to sync the directory\n subprocess.check_call(\n [\"aws\", \"s3\", \"sync\", s3_bucket_path, local_directory])\n main(args)\n", "path": "examples/llava_example.py"}], "after_files": [{"content": "import argparse\nimport os\nimport subprocess\n\nimport torch\n\nfrom vllm import LLM\nfrom vllm.sequence import MultiModalData\n\n# The assets are located at `s3://air-example-data-2/vllm_opensource_llava/`.\n\n\ndef run_llava_pixel_values():\n llm = LLM(\n model=\"llava-hf/llava-1.5-7b-hf\",\n image_input_type=\"pixel_values\",\n image_token_id=32000,\n image_input_shape=\"1,3,336,336\",\n image_feature_size=576,\n )\n\n prompt = \"<image>\" * 576 + (\n \"\\nUSER: What is the content of this image?\\nASSISTANT:\")\n\n # This should be provided by another online or offline component.\n images = torch.load(\"images/stop_sign_pixel_values.pt\")\n\n outputs = llm.generate(prompt,\n multi_modal_data=MultiModalData(\n type=MultiModalData.Type.IMAGE, data=images))\n for o in outputs:\n generated_text = o.outputs[0].text\n print(generated_text)\n\n\ndef run_llava_image_features():\n llm = LLM(\n model=\"llava-hf/llava-1.5-7b-hf\",\n image_input_type=\"image_features\",\n image_token_id=32000,\n image_input_shape=\"1,576,1024\",\n image_feature_size=576,\n )\n\n prompt = \"<image>\" * 576 + (\n \"\\nUSER: What is the content of this image?\\nASSISTANT:\")\n\n # This should be provided by another online or offline component.\n images = torch.load(\"images/stop_sign_image_features.pt\")\n\n outputs = llm.generate(prompt,\n multi_modal_data=MultiModalData(\n type=MultiModalData.Type.IMAGE, data=images))\n for o in outputs:\n generated_text = o.outputs[0].text\n print(generated_text)\n\n\ndef main(args):\n if args.type == \"pixel_values\":\n run_llava_pixel_values()\n else:\n run_llava_image_features()\n\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser(description=\"Demo on Llava\")\n parser.add_argument(\"--type\",\n type=str,\n choices=[\"pixel_values\", \"image_features\"],\n default=\"pixel_values\",\n help=\"image input type\")\n args = parser.parse_args()\n # Download from s3\n s3_bucket_path = \"s3://air-example-data-2/vllm_opensource_llava/\"\n local_directory = \"images\"\n\n # Make sure the local directory exists or create it\n os.makedirs(local_directory, exist_ok=True)\n\n # Use AWS CLI to sync the directory, assume anonymous access\n subprocess.check_call([\n \"aws\",\n \"s3\",\n \"sync\",\n s3_bucket_path,\n local_directory,\n \"--no-sign-request\",\n ])\n main(args)\n", "path": "examples/llava_example.py"}]}
| 1,109 | 172 |
gh_patches_debug_17874
|
rasdani/github-patches
|
git_diff
|
beeware__toga-1605
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Error on Android when converting selection value to String
Sample app:
```
import toga
from toga.style import Pack
from toga.style.pack import COLUMN, ROW
class AFV(toga.App):
def startup(self):
self.main_window = toga.MainWindow(title=self.formal_name)
box_test = toga.Box(style=Pack(direction=COLUMN, padding=5))
self.label_1 = toga.Label('TESTE 1')
self.comboBox_1 = toga.Selection(items=["ITEM 1", "ITEM 2", "ITEM 3"])
self.lineEdit_1 = toga.TextInput()
self.pushButton_1 = toga.Button('TESTE')
box_test.add(self.label_1, self.comboBox_1, self.lineEdit_1, self.pushButton_1)
self.pushButton_1.on_press = self.print_combo
self.main_window.content = box_test
self.main_window.show()
def print_combo(self, widget):
name_combo = self.comboBox_1.value
print(name_combo)
def main():
return AFV()
```
When the button is pressed, the error:
com.chaquo.python.PyException: AttributeError: 'str' object has no attribute 'toString'
is raised.
Using Briefcase 0.3.10; worked previously on Briefcase 0.3.9.
Error on Android when converting selection value to String
Sample app:
```
import toga
from toga.style import Pack
from toga.style.pack import COLUMN, ROW
class AFV(toga.App):
def startup(self):
self.main_window = toga.MainWindow(title=self.formal_name)
box_test = toga.Box(style=Pack(direction=COLUMN, padding=5))
self.label_1 = toga.Label('TESTE 1')
self.comboBox_1 = toga.Selection(items=["ITEM 1", "ITEM 2", "ITEM 3"])
self.lineEdit_1 = toga.TextInput()
self.pushButton_1 = toga.Button('TESTE')
box_test.add(self.label_1, self.comboBox_1, self.lineEdit_1, self.pushButton_1)
self.pushButton_1.on_press = self.print_combo
self.main_window.content = box_test
self.main_window.show()
def print_combo(self, widget):
name_combo = self.comboBox_1.value
print(name_combo)
def main():
return AFV()
```
When the button is pressed, the error:
com.chaquo.python.PyException: AttributeError: 'str' object has no attribute 'toString'
is raised.
Using Briefcase 0.3.10; worked previously on Briefcase 0.3.9.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/android/toga_android/widgets/selection.py`
Content:
```
1 from travertino.size import at_least
2
3 from ..libs.android import R__layout
4 from ..libs.android.view import Gravity, View__MeasureSpec
5 from ..libs.android.widget import ArrayAdapter, OnItemSelectedListener, Spinner
6 from .base import Widget, align
7
8
9 class TogaOnItemSelectedListener(OnItemSelectedListener):
10 def __init__(self, impl):
11 super().__init__()
12 self._impl = impl
13
14 def onItemSelected(self, _parent, _view, _position, _id):
15 if self._impl.interface.on_select:
16 self._impl.interface.on_select(widget=self._impl.interface)
17
18
19 class Selection(Widget):
20 def create(self):
21 self.native = Spinner(self._native_activity, Spinner.MODE_DROPDOWN)
22 self.native.setOnItemSelectedListener(TogaOnItemSelectedListener(
23 impl=self
24 ))
25 # On Android, the list of options is provided to the `Spinner` wrapped in
26 # an `ArrayAdapter`. We store `self.adapter` to avoid having to typecast it
27 # in `add_item()`.
28 self.adapter = ArrayAdapter(
29 self._native_activity,
30 R__layout.simple_spinner_item
31 )
32 self.adapter.setDropDownViewResource(R__layout.simple_spinner_dropdown_item)
33 self.native.setAdapter(self.adapter)
34 # Create a mapping from text to numeric index to support `select_item()`.
35 self._indexByItem = {}
36
37 def add_item(self, item):
38 new_index = self.adapter.getCount()
39 self.adapter.add(str(item))
40 self._indexByItem[item] = new_index
41
42 def select_item(self, item):
43 self.native.setSelection(self._indexByItem[item])
44
45 def get_selected_item(self):
46 selected = self.native.getSelectedItem()
47 if selected:
48 return selected.toString()
49 else:
50 return None
51
52 def remove_all_items(self):
53 self.adapter.clear()
54
55 def rehint(self):
56 self.native.measure(
57 View__MeasureSpec.UNSPECIFIED, View__MeasureSpec.UNSPECIFIED
58 )
59 self.interface.intrinsic.width = at_least(self.native.getMeasuredWidth())
60 self.interface.intrinsic.height = self.native.getMeasuredHeight()
61
62 def set_alignment(self, value):
63 self.native.setGravity(Gravity.CENTER_VERTICAL | align(value))
64
65 def set_on_select(self, handler):
66 # No special handling is required.
67 pass
68
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/android/toga_android/widgets/selection.py b/src/android/toga_android/widgets/selection.py
--- a/src/android/toga_android/widgets/selection.py
+++ b/src/android/toga_android/widgets/selection.py
@@ -22,9 +22,6 @@
self.native.setOnItemSelectedListener(TogaOnItemSelectedListener(
impl=self
))
- # On Android, the list of options is provided to the `Spinner` wrapped in
- # an `ArrayAdapter`. We store `self.adapter` to avoid having to typecast it
- # in `add_item()`.
self.adapter = ArrayAdapter(
self._native_activity,
R__layout.simple_spinner_item
@@ -45,7 +42,7 @@
def get_selected_item(self):
selected = self.native.getSelectedItem()
if selected:
- return selected.toString()
+ return str(selected)
else:
return None
|
{"golden_diff": "diff --git a/src/android/toga_android/widgets/selection.py b/src/android/toga_android/widgets/selection.py\n--- a/src/android/toga_android/widgets/selection.py\n+++ b/src/android/toga_android/widgets/selection.py\n@@ -22,9 +22,6 @@\n self.native.setOnItemSelectedListener(TogaOnItemSelectedListener(\n impl=self\n ))\n- # On Android, the list of options is provided to the `Spinner` wrapped in\n- # an `ArrayAdapter`. We store `self.adapter` to avoid having to typecast it\n- # in `add_item()`.\n self.adapter = ArrayAdapter(\n self._native_activity,\n R__layout.simple_spinner_item\n@@ -45,7 +42,7 @@\n def get_selected_item(self):\n selected = self.native.getSelectedItem()\n if selected:\n- return selected.toString()\n+ return str(selected)\n else:\n return None\n", "issue": "Error on Android when converting selection value to String\nSample app:\r\n```\r\nimport toga\r\nfrom toga.style import Pack\r\nfrom toga.style.pack import COLUMN, ROW\r\n\r\nclass AFV(toga.App):\r\n\r\n def startup(self):\r\n self.main_window = toga.MainWindow(title=self.formal_name)\r\n\r\n box_test = toga.Box(style=Pack(direction=COLUMN, padding=5))\r\n self.label_1 = toga.Label('TESTE 1')\r\n self.comboBox_1 = toga.Selection(items=[\"ITEM 1\", \"ITEM 2\", \"ITEM 3\"])\r\n self.lineEdit_1 = toga.TextInput()\r\n self.pushButton_1 = toga.Button('TESTE')\r\n\r\n box_test.add(self.label_1, self.comboBox_1, self.lineEdit_1, self.pushButton_1)\r\n\r\n self.pushButton_1.on_press = self.print_combo\r\n\r\n self.main_window.content = box_test\r\n self.main_window.show()\r\n\r\n def print_combo(self, widget):\r\n name_combo = self.comboBox_1.value\r\n print(name_combo)\r\n\r\n\r\n\r\ndef main():\r\n return AFV()\r\n```\r\n\r\nWhen the button is pressed, the error:\r\n\r\n com.chaquo.python.PyException: AttributeError: 'str' object has no attribute 'toString'\r\n\r\nis raised.\r\n\r\nUsing Briefcase 0.3.10; worked previously on Briefcase 0.3.9.\nError on Android when converting selection value to String\nSample app:\r\n```\r\nimport toga\r\nfrom toga.style import Pack\r\nfrom toga.style.pack import COLUMN, ROW\r\n\r\nclass AFV(toga.App):\r\n\r\n def startup(self):\r\n self.main_window = toga.MainWindow(title=self.formal_name)\r\n\r\n box_test = toga.Box(style=Pack(direction=COLUMN, padding=5))\r\n self.label_1 = toga.Label('TESTE 1')\r\n self.comboBox_1 = toga.Selection(items=[\"ITEM 1\", \"ITEM 2\", \"ITEM 3\"])\r\n self.lineEdit_1 = toga.TextInput()\r\n self.pushButton_1 = toga.Button('TESTE')\r\n\r\n box_test.add(self.label_1, self.comboBox_1, self.lineEdit_1, self.pushButton_1)\r\n\r\n self.pushButton_1.on_press = self.print_combo\r\n\r\n self.main_window.content = box_test\r\n self.main_window.show()\r\n\r\n def print_combo(self, widget):\r\n name_combo = self.comboBox_1.value\r\n print(name_combo)\r\n\r\n\r\n\r\ndef main():\r\n return AFV()\r\n```\r\n\r\nWhen the button is pressed, the error:\r\n\r\n com.chaquo.python.PyException: AttributeError: 'str' object has no attribute 'toString'\r\n\r\nis raised.\r\n\r\nUsing Briefcase 0.3.10; worked previously on Briefcase 0.3.9.\n", "before_files": [{"content": "from travertino.size import at_least\n\nfrom ..libs.android import R__layout\nfrom ..libs.android.view import Gravity, View__MeasureSpec\nfrom ..libs.android.widget import ArrayAdapter, OnItemSelectedListener, Spinner\nfrom .base import Widget, align\n\n\nclass TogaOnItemSelectedListener(OnItemSelectedListener):\n def __init__(self, impl):\n super().__init__()\n self._impl = impl\n\n def onItemSelected(self, _parent, _view, _position, _id):\n if self._impl.interface.on_select:\n self._impl.interface.on_select(widget=self._impl.interface)\n\n\nclass Selection(Widget):\n def create(self):\n self.native = Spinner(self._native_activity, Spinner.MODE_DROPDOWN)\n self.native.setOnItemSelectedListener(TogaOnItemSelectedListener(\n impl=self\n ))\n # On Android, the list of options is provided to the `Spinner` wrapped in\n # an `ArrayAdapter`. We store `self.adapter` to avoid having to typecast it\n # in `add_item()`.\n self.adapter = ArrayAdapter(\n self._native_activity,\n R__layout.simple_spinner_item\n )\n self.adapter.setDropDownViewResource(R__layout.simple_spinner_dropdown_item)\n self.native.setAdapter(self.adapter)\n # Create a mapping from text to numeric index to support `select_item()`.\n self._indexByItem = {}\n\n def add_item(self, item):\n new_index = self.adapter.getCount()\n self.adapter.add(str(item))\n self._indexByItem[item] = new_index\n\n def select_item(self, item):\n self.native.setSelection(self._indexByItem[item])\n\n def get_selected_item(self):\n selected = self.native.getSelectedItem()\n if selected:\n return selected.toString()\n else:\n return None\n\n def remove_all_items(self):\n self.adapter.clear()\n\n def rehint(self):\n self.native.measure(\n View__MeasureSpec.UNSPECIFIED, View__MeasureSpec.UNSPECIFIED\n )\n self.interface.intrinsic.width = at_least(self.native.getMeasuredWidth())\n self.interface.intrinsic.height = self.native.getMeasuredHeight()\n\n def set_alignment(self, value):\n self.native.setGravity(Gravity.CENTER_VERTICAL | align(value))\n\n def set_on_select(self, handler):\n # No special handling is required.\n pass\n", "path": "src/android/toga_android/widgets/selection.py"}], "after_files": [{"content": "from travertino.size import at_least\n\nfrom ..libs.android import R__layout\nfrom ..libs.android.view import Gravity, View__MeasureSpec\nfrom ..libs.android.widget import ArrayAdapter, OnItemSelectedListener, Spinner\nfrom .base import Widget, align\n\n\nclass TogaOnItemSelectedListener(OnItemSelectedListener):\n def __init__(self, impl):\n super().__init__()\n self._impl = impl\n\n def onItemSelected(self, _parent, _view, _position, _id):\n if self._impl.interface.on_select:\n self._impl.interface.on_select(widget=self._impl.interface)\n\n\nclass Selection(Widget):\n def create(self):\n self.native = Spinner(self._native_activity, Spinner.MODE_DROPDOWN)\n self.native.setOnItemSelectedListener(TogaOnItemSelectedListener(\n impl=self\n ))\n self.adapter = ArrayAdapter(\n self._native_activity,\n R__layout.simple_spinner_item\n )\n self.adapter.setDropDownViewResource(R__layout.simple_spinner_dropdown_item)\n self.native.setAdapter(self.adapter)\n # Create a mapping from text to numeric index to support `select_item()`.\n self._indexByItem = {}\n\n def add_item(self, item):\n new_index = self.adapter.getCount()\n self.adapter.add(str(item))\n self._indexByItem[item] = new_index\n\n def select_item(self, item):\n self.native.setSelection(self._indexByItem[item])\n\n def get_selected_item(self):\n selected = self.native.getSelectedItem()\n if selected:\n return str(selected)\n else:\n return None\n\n def remove_all_items(self):\n self.adapter.clear()\n\n def rehint(self):\n self.native.measure(\n View__MeasureSpec.UNSPECIFIED, View__MeasureSpec.UNSPECIFIED\n )\n self.interface.intrinsic.width = at_least(self.native.getMeasuredWidth())\n self.interface.intrinsic.height = self.native.getMeasuredHeight()\n\n def set_alignment(self, value):\n self.native.setGravity(Gravity.CENTER_VERTICAL | align(value))\n\n def set_on_select(self, handler):\n # No special handling is required.\n pass\n", "path": "src/android/toga_android/widgets/selection.py"}]}
| 1,445 | 196 |
gh_patches_debug_11906
|
rasdani/github-patches
|
git_diff
|
holoviz__panel-4441
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Panel 1.0: Divider takes 100% of window height
I'm on the current `main` branch of Panel. When I use the `Divider` it takes up 100% of the window height.
```python
import panel as pn
pn.extension(sizing_mode="stretch_width")
pn.panel("Header", styles={"background": "lightgray"}).servable()
pn.layout.Divider(styles={"background": "salmon"}).servable()
pn.panel("Footer", styles={"background": "lightgray"}).servable()
```

I don't know if it is on purpose. But the `styles` seem not to apply to the `Divider` either.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `panel/layout/spacer.py`
Content:
```
1 """
2 Spacer components to add horizontal or vertical space to a layout.
3 """
4
5 import param
6
7 from bokeh.models import Div as BkDiv, Spacer as BkSpacer
8
9 from ..reactive import Reactive
10
11
12 class Spacer(Reactive):
13 """
14 The `Spacer` layout is a very versatile component which makes it easy to
15 put fixed or responsive spacing between objects.
16
17 Like all other components spacers support both absolute and responsive
18 sizing modes.
19
20 Reference: https://panel.holoviz.org/user_guide/Customization.html#spacers
21
22 :Example:
23
24 >>> pn.Row(
25 ... 1, pn.Spacer(width=200),
26 ... 2, pn.Spacer(width=100),
27 ... 3
28 ... )
29 """
30
31 _bokeh_model = BkSpacer
32
33 def _get_model(self, doc, root=None, parent=None, comm=None):
34 properties = self._process_param_change(self._init_params())
35 model = self._bokeh_model(**properties)
36 if root is None:
37 root = model
38 self._models[root.ref['id']] = (model, parent)
39 return model
40
41
42 class VSpacer(Spacer):
43 """
44 The `VSpacer` layout provides responsive vertical spacing.
45
46 Using this component we can space objects equidistantly in a layout and
47 allow the empty space to shrink when the browser is resized.
48
49 Reference: https://panel.holoviz.org/user_guide/Customization.html#spacers
50
51 :Example:
52
53 >>> pn.Column(
54 ... pn.layout.VSpacer(), 'Item 1',
55 ... pn.layout.VSpacer(), 'Item 2',
56 ... pn.layout.VSpacer()
57 ... )
58 """
59
60 sizing_mode = param.Parameter(default='stretch_height', readonly=True)
61
62
63 class HSpacer(Spacer):
64 """
65 The `HSpacer` layout provides responsive vertical spacing.
66
67 Using this component we can space objects equidistantly in a layout and
68 allow the empty space to shrink when the browser is resized.
69
70 Reference: https://panel.holoviz.org/user_guide/Customization.html#spacers
71
72 :Example:
73
74 >>> pn.Row(
75 ... pn.layout.HSpacer(), 'Item 1',
76 ... pn.layout.HSpacer(), 'Item 2',
77 ... pn.layout.HSpacer()
78 ... )
79 """
80
81 sizing_mode = param.Parameter(default='stretch_width', readonly=True)
82
83
84 class Divider(Reactive):
85 """
86 A `Divider` draws a horizontal rule (a `<hr>` tag in HTML) to separate
87 multiple components in a layout. It automatically spans the full width of
88 the container.
89
90 Reference: https://panel.holoviz.org/reference/layouts/Divider.html
91
92 :Example:
93
94 >>> pn.Column(
95 ... '# Lorem Ipsum',
96 ... pn.layout.Divider(),
97 ... 'A very long text... '
98 >>> )
99 """
100
101 width_policy = param.ObjectSelector(default="fit", readonly=True)
102
103 _bokeh_model = BkDiv
104
105 def _get_model(self, doc, root=None, parent=None, comm=None):
106 properties = self._process_param_change(self._init_params())
107 properties['styles'] = {'width': '100%', 'height': '100%'}
108 model = self._bokeh_model(text='<hr style="margin: 0px">', **properties)
109 if root is None:
110 root = model
111 self._models[root.ref['id']] = (model, parent)
112 return model
113
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/panel/layout/spacer.py b/panel/layout/spacer.py
--- a/panel/layout/spacer.py
+++ b/panel/layout/spacer.py
@@ -102,10 +102,11 @@
_bokeh_model = BkDiv
+ _stylesheets = ["css/divider.css"]
+
def _get_model(self, doc, root=None, parent=None, comm=None):
properties = self._process_param_change(self._init_params())
- properties['styles'] = {'width': '100%', 'height': '100%'}
- model = self._bokeh_model(text='<hr style="margin: 0px">', **properties)
+ model = self._bokeh_model(text='<hr>', **properties)
if root is None:
root = model
self._models[root.ref['id']] = (model, parent)
|
{"golden_diff": "diff --git a/panel/layout/spacer.py b/panel/layout/spacer.py\n--- a/panel/layout/spacer.py\n+++ b/panel/layout/spacer.py\n@@ -102,10 +102,11 @@\n \n _bokeh_model = BkDiv\n \n+ _stylesheets = [\"css/divider.css\"]\n+\n def _get_model(self, doc, root=None, parent=None, comm=None):\n properties = self._process_param_change(self._init_params())\n- properties['styles'] = {'width': '100%', 'height': '100%'}\n- model = self._bokeh_model(text='<hr style=\"margin: 0px\">', **properties)\n+ model = self._bokeh_model(text='<hr>', **properties)\n if root is None:\n root = model\n self._models[root.ref['id']] = (model, parent)\n", "issue": "Panel 1.0: Divider takes 100% of window height\nI'm on the current `main` branch of Panel. When I use the `Divider` it takes up 100% of the window height.\r\n\r\n```python\r\nimport panel as pn\r\n\r\npn.extension(sizing_mode=\"stretch_width\")\r\n\r\npn.panel(\"Header\", styles={\"background\": \"lightgray\"}).servable()\r\npn.layout.Divider(styles={\"background\": \"salmon\"}).servable()\r\npn.panel(\"Footer\", styles={\"background\": \"lightgray\"}).servable()\r\n```\r\n\r\n\r\n\r\nI don't know if it is on purpose. But the `styles` seem not to apply to the `Divider` either.\n", "before_files": [{"content": "\"\"\"\nSpacer components to add horizontal or vertical space to a layout.\n\"\"\"\n\nimport param\n\nfrom bokeh.models import Div as BkDiv, Spacer as BkSpacer\n\nfrom ..reactive import Reactive\n\n\nclass Spacer(Reactive):\n \"\"\"\n The `Spacer` layout is a very versatile component which makes it easy to\n put fixed or responsive spacing between objects.\n\n Like all other components spacers support both absolute and responsive\n sizing modes.\n\n Reference: https://panel.holoviz.org/user_guide/Customization.html#spacers\n\n :Example:\n\n >>> pn.Row(\n ... 1, pn.Spacer(width=200),\n ... 2, pn.Spacer(width=100),\n ... 3\n ... )\n \"\"\"\n\n _bokeh_model = BkSpacer\n\n def _get_model(self, doc, root=None, parent=None, comm=None):\n properties = self._process_param_change(self._init_params())\n model = self._bokeh_model(**properties)\n if root is None:\n root = model\n self._models[root.ref['id']] = (model, parent)\n return model\n\n\nclass VSpacer(Spacer):\n \"\"\"\n The `VSpacer` layout provides responsive vertical spacing.\n\n Using this component we can space objects equidistantly in a layout and\n allow the empty space to shrink when the browser is resized.\n\n Reference: https://panel.holoviz.org/user_guide/Customization.html#spacers\n\n :Example:\n\n >>> pn.Column(\n ... pn.layout.VSpacer(), 'Item 1',\n ... pn.layout.VSpacer(), 'Item 2',\n ... pn.layout.VSpacer()\n ... )\n \"\"\"\n\n sizing_mode = param.Parameter(default='stretch_height', readonly=True)\n\n\nclass HSpacer(Spacer):\n \"\"\"\n The `HSpacer` layout provides responsive vertical spacing.\n\n Using this component we can space objects equidistantly in a layout and\n allow the empty space to shrink when the browser is resized.\n\n Reference: https://panel.holoviz.org/user_guide/Customization.html#spacers\n\n :Example:\n\n >>> pn.Row(\n ... pn.layout.HSpacer(), 'Item 1',\n ... pn.layout.HSpacer(), 'Item 2',\n ... pn.layout.HSpacer()\n ... )\n \"\"\"\n\n sizing_mode = param.Parameter(default='stretch_width', readonly=True)\n\n\nclass Divider(Reactive):\n \"\"\"\n A `Divider` draws a horizontal rule (a `<hr>` tag in HTML) to separate\n multiple components in a layout. It automatically spans the full width of\n the container.\n\n Reference: https://panel.holoviz.org/reference/layouts/Divider.html\n\n :Example:\n\n >>> pn.Column(\n ... '# Lorem Ipsum',\n ... pn.layout.Divider(),\n ... 'A very long text... '\n >>> )\n \"\"\"\n\n width_policy = param.ObjectSelector(default=\"fit\", readonly=True)\n\n _bokeh_model = BkDiv\n\n def _get_model(self, doc, root=None, parent=None, comm=None):\n properties = self._process_param_change(self._init_params())\n properties['styles'] = {'width': '100%', 'height': '100%'}\n model = self._bokeh_model(text='<hr style=\"margin: 0px\">', **properties)\n if root is None:\n root = model\n self._models[root.ref['id']] = (model, parent)\n return model\n", "path": "panel/layout/spacer.py"}], "after_files": [{"content": "\"\"\"\nSpacer components to add horizontal or vertical space to a layout.\n\"\"\"\n\nimport param\n\nfrom bokeh.models import Div as BkDiv, Spacer as BkSpacer\n\nfrom ..reactive import Reactive\n\n\nclass Spacer(Reactive):\n \"\"\"\n The `Spacer` layout is a very versatile component which makes it easy to\n put fixed or responsive spacing between objects.\n\n Like all other components spacers support both absolute and responsive\n sizing modes.\n\n Reference: https://panel.holoviz.org/user_guide/Customization.html#spacers\n\n :Example:\n\n >>> pn.Row(\n ... 1, pn.Spacer(width=200),\n ... 2, pn.Spacer(width=100),\n ... 3\n ... )\n \"\"\"\n\n _bokeh_model = BkSpacer\n\n def _get_model(self, doc, root=None, parent=None, comm=None):\n properties = self._process_param_change(self._init_params())\n model = self._bokeh_model(**properties)\n if root is None:\n root = model\n self._models[root.ref['id']] = (model, parent)\n return model\n\n\nclass VSpacer(Spacer):\n \"\"\"\n The `VSpacer` layout provides responsive vertical spacing.\n\n Using this component we can space objects equidistantly in a layout and\n allow the empty space to shrink when the browser is resized.\n\n Reference: https://panel.holoviz.org/user_guide/Customization.html#spacers\n\n :Example:\n\n >>> pn.Column(\n ... pn.layout.VSpacer(), 'Item 1',\n ... pn.layout.VSpacer(), 'Item 2',\n ... pn.layout.VSpacer()\n ... )\n \"\"\"\n\n sizing_mode = param.Parameter(default='stretch_height', readonly=True)\n\n\nclass HSpacer(Spacer):\n \"\"\"\n The `HSpacer` layout provides responsive vertical spacing.\n\n Using this component we can space objects equidistantly in a layout and\n allow the empty space to shrink when the browser is resized.\n\n Reference: https://panel.holoviz.org/user_guide/Customization.html#spacers\n\n :Example:\n\n >>> pn.Row(\n ... pn.layout.HSpacer(), 'Item 1',\n ... pn.layout.HSpacer(), 'Item 2',\n ... pn.layout.HSpacer()\n ... )\n \"\"\"\n\n sizing_mode = param.Parameter(default='stretch_width', readonly=True)\n\n\nclass Divider(Reactive):\n \"\"\"\n A `Divider` draws a horizontal rule (a `<hr>` tag in HTML) to separate\n multiple components in a layout. It automatically spans the full width of\n the container.\n\n Reference: https://panel.holoviz.org/reference/layouts/Divider.html\n\n :Example:\n\n >>> pn.Column(\n ... '# Lorem Ipsum',\n ... pn.layout.Divider(),\n ... 'A very long text... '\n >>> )\n \"\"\"\n\n width_policy = param.ObjectSelector(default=\"fit\", readonly=True)\n\n _bokeh_model = BkDiv\n\n _stylesheets = [\"css/divider.css\"]\n\n def _get_model(self, doc, root=None, parent=None, comm=None):\n properties = self._process_param_change(self._init_params())\n model = self._bokeh_model(text='<hr>', **properties)\n if root is None:\n root = model\n self._models[root.ref['id']] = (model, parent)\n return model\n", "path": "panel/layout/spacer.py"}]}
| 1,476 | 196 |
gh_patches_debug_17973
|
rasdani/github-patches
|
git_diff
|
Kinto__kinto-1164
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Batch requests accept additional attributes
We should probably forbid them.
```
> echo '{"requests": [], "foo": {}}' | http post http://localhost:8888/v1/batch
HTTP/1.1 200 OK
Access-Control-Expose-Headers: Retry-After, Alert, Content-Length, Backoff
Content-Length: 16
Content-Type: application/json
Date: Thu, 16 Mar 2017 23:01:01 GMT
Server: waitress
X-Content-Type-Options: nosniff
{
"responses": []
}
```
Edit: I just notice we currently test for batch ignoring additional attributes. Is this a feature?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kinto/core/views/batch.py`
Content:
```
1 import logging
2
3 import colander
4 from cornice.validators import colander_validator
5 from pyramid import httpexceptions
6 from pyramid.security import NO_PERMISSION_REQUIRED
7
8 from kinto.core import errors
9 from kinto.core import Service
10 from kinto.core.errors import ErrorSchema
11 from kinto.core.utils import merge_dicts, build_request, build_response
12
13
14 subrequest_logger = logging.getLogger("subrequest.summary")
15
16 valid_http_method = colander.OneOf(('GET', 'HEAD', 'DELETE', 'TRACE',
17 'POST', 'PUT', 'PATCH'))
18
19
20 def string_values(node, cstruct):
21 """Validate that a ``colander.Mapping`` only has strings in its values.
22
23 .. warning::
24
25 Should be associated to a ``colander.Mapping`` schema node.
26 """
27 are_strings = [isinstance(v, str) for v in cstruct.values()]
28 if not all(are_strings):
29 error_msg = '{} contains non string value'.format(cstruct)
30 raise colander.Invalid(node, error_msg)
31
32
33 class BatchRequestSchema(colander.MappingSchema):
34 method = colander.SchemaNode(colander.String(),
35 validator=valid_http_method,
36 missing=colander.drop)
37 path = colander.SchemaNode(colander.String(),
38 validator=colander.Regex('^/'))
39 headers = colander.SchemaNode(colander.Mapping(unknown='preserve'),
40 validator=string_values,
41 missing=colander.drop)
42 body = colander.SchemaNode(colander.Mapping(unknown='preserve'),
43 missing=colander.drop)
44
45
46 class BatchPayloadSchema(colander.MappingSchema):
47 defaults = BatchRequestSchema(missing=colander.drop).clone()
48 requests = colander.SchemaNode(colander.Sequence(),
49 BatchRequestSchema())
50
51 def __init__(self, *args, **kwargs):
52 super().__init__(*args, **kwargs)
53 # On defaults, path is not mandatory.
54 self.get('defaults').get('path').missing = colander.drop
55
56 def deserialize(self, cstruct=colander.null):
57 """Preprocess received data to carefully merge defaults.
58 """
59 if cstruct is not colander.null:
60 defaults = cstruct.get('defaults')
61 requests = cstruct.get('requests')
62 if isinstance(defaults, dict) and isinstance(requests, list):
63 for request in requests:
64 if isinstance(request, dict):
65 merge_dicts(request, defaults)
66 return super().deserialize(cstruct)
67
68
69 class BatchRequest(colander.MappingSchema):
70 body = BatchPayloadSchema()
71
72
73 class BatchResponseSchema(colander.MappingSchema):
74 status = colander.SchemaNode(colander.Integer())
75 path = colander.SchemaNode(colander.String())
76 headers = colander.SchemaNode(colander.Mapping(unknown='preserve'),
77 validator=string_values,
78 missing=colander.drop)
79 body = colander.SchemaNode(colander.Mapping(unknown='preserve'),
80 missing=colander.drop)
81
82
83 class BatchResponseBodySchema(colander.MappingSchema):
84 responses = colander.SequenceSchema(BatchResponseSchema(missing=colander.drop))
85
86
87 class BatchResponse(colander.MappingSchema):
88 body = BatchResponseBodySchema()
89
90
91 class ErrorResponseSchema(colander.MappingSchema):
92 body = ErrorSchema()
93
94
95 batch_responses = {
96 '200': BatchResponse(description='Return a list of operation responses.'),
97 '400': ErrorResponseSchema(description='The request was badly formatted.'),
98 'default': ErrorResponseSchema(description='an unknown error occurred.')
99 }
100
101 batch = Service(name="batch", path='/batch',
102 description="Batch operations")
103
104
105 @batch.post(schema=BatchRequest,
106 validators=(colander_validator,),
107 permission=NO_PERMISSION_REQUIRED,
108 tags=['Batch'], operation_id='batch',
109 response_schemas=batch_responses)
110 def post_batch(request):
111 requests = request.validated['body']['requests']
112
113 request.log_context(batch_size=len(requests))
114
115 limit = request.registry.settings['batch_max_requests']
116 if limit and len(requests) > int(limit):
117 error_msg = 'Number of requests is limited to {}'.format(limit)
118 request.errors.add('body', 'requests', error_msg)
119 return
120
121 if any([batch.path in req['path'] for req in requests]):
122 error_msg = 'Recursive call on {} endpoint is forbidden.'.format(batch.path)
123 request.errors.add('body', 'requests', error_msg)
124 return
125
126 responses = []
127
128 for subrequest_spec in requests:
129 subrequest = build_request(request, subrequest_spec)
130
131 log_context = {'path': subrequest.path,
132 'method': subrequest.method,
133 **request.log_context()}
134 try:
135 # Invoke subrequest without individual transaction.
136 resp, subrequest = request.follow_subrequest(subrequest,
137 use_tweens=False)
138 except httpexceptions.HTTPException as e:
139 if e.content_type == 'application/json':
140 resp = e
141 else:
142 # JSONify raw Pyramid errors.
143 resp = errors.http_error(e)
144
145 subrequest_logger.info('subrequest.summary', extra=log_context)
146
147 dict_resp = build_response(resp, subrequest)
148 responses.append(dict_resp)
149
150 return {
151 'responses': responses
152 }
153
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/kinto/core/views/batch.py b/kinto/core/views/batch.py
--- a/kinto/core/views/batch.py
+++ b/kinto/core/views/batch.py
@@ -42,12 +42,20 @@
body = colander.SchemaNode(colander.Mapping(unknown='preserve'),
missing=colander.drop)
+ @staticmethod
+ def schema_type():
+ return colander.Mapping(unknown='raise')
+
class BatchPayloadSchema(colander.MappingSchema):
defaults = BatchRequestSchema(missing=colander.drop).clone()
requests = colander.SchemaNode(colander.Sequence(),
BatchRequestSchema())
+ @staticmethod
+ def schema_type():
+ return colander.Mapping(unknown='raise')
+
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# On defaults, path is not mandatory.
|
{"golden_diff": "diff --git a/kinto/core/views/batch.py b/kinto/core/views/batch.py\n--- a/kinto/core/views/batch.py\n+++ b/kinto/core/views/batch.py\n@@ -42,12 +42,20 @@\n body = colander.SchemaNode(colander.Mapping(unknown='preserve'),\n missing=colander.drop)\n \n+ @staticmethod\n+ def schema_type():\n+ return colander.Mapping(unknown='raise')\n+\n \n class BatchPayloadSchema(colander.MappingSchema):\n defaults = BatchRequestSchema(missing=colander.drop).clone()\n requests = colander.SchemaNode(colander.Sequence(),\n BatchRequestSchema())\n \n+ @staticmethod\n+ def schema_type():\n+ return colander.Mapping(unknown='raise')\n+\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n # On defaults, path is not mandatory.\n", "issue": "Batch requests accept additional attributes\nWe should probably forbid them.\r\n\r\n```\r\n> echo '{\"requests\": [], \"foo\": {}}' | http post http://localhost:8888/v1/batch \r\n\r\nHTTP/1.1 200 OK\r\nAccess-Control-Expose-Headers: Retry-After, Alert, Content-Length, Backoff\r\nContent-Length: 16\r\nContent-Type: application/json\r\nDate: Thu, 16 Mar 2017 23:01:01 GMT\r\nServer: waitress\r\nX-Content-Type-Options: nosniff\r\n\r\n{\r\n \"responses\": []\r\n}\r\n```\r\n\r\nEdit: I just notice we currently test for batch ignoring additional attributes. Is this a feature?\n", "before_files": [{"content": "import logging\n\nimport colander\nfrom cornice.validators import colander_validator\nfrom pyramid import httpexceptions\nfrom pyramid.security import NO_PERMISSION_REQUIRED\n\nfrom kinto.core import errors\nfrom kinto.core import Service\nfrom kinto.core.errors import ErrorSchema\nfrom kinto.core.utils import merge_dicts, build_request, build_response\n\n\nsubrequest_logger = logging.getLogger(\"subrequest.summary\")\n\nvalid_http_method = colander.OneOf(('GET', 'HEAD', 'DELETE', 'TRACE',\n 'POST', 'PUT', 'PATCH'))\n\n\ndef string_values(node, cstruct):\n \"\"\"Validate that a ``colander.Mapping`` only has strings in its values.\n\n .. warning::\n\n Should be associated to a ``colander.Mapping`` schema node.\n \"\"\"\n are_strings = [isinstance(v, str) for v in cstruct.values()]\n if not all(are_strings):\n error_msg = '{} contains non string value'.format(cstruct)\n raise colander.Invalid(node, error_msg)\n\n\nclass BatchRequestSchema(colander.MappingSchema):\n method = colander.SchemaNode(colander.String(),\n validator=valid_http_method,\n missing=colander.drop)\n path = colander.SchemaNode(colander.String(),\n validator=colander.Regex('^/'))\n headers = colander.SchemaNode(colander.Mapping(unknown='preserve'),\n validator=string_values,\n missing=colander.drop)\n body = colander.SchemaNode(colander.Mapping(unknown='preserve'),\n missing=colander.drop)\n\n\nclass BatchPayloadSchema(colander.MappingSchema):\n defaults = BatchRequestSchema(missing=colander.drop).clone()\n requests = colander.SchemaNode(colander.Sequence(),\n BatchRequestSchema())\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n # On defaults, path is not mandatory.\n self.get('defaults').get('path').missing = colander.drop\n\n def deserialize(self, cstruct=colander.null):\n \"\"\"Preprocess received data to carefully merge defaults.\n \"\"\"\n if cstruct is not colander.null:\n defaults = cstruct.get('defaults')\n requests = cstruct.get('requests')\n if isinstance(defaults, dict) and isinstance(requests, list):\n for request in requests:\n if isinstance(request, dict):\n merge_dicts(request, defaults)\n return super().deserialize(cstruct)\n\n\nclass BatchRequest(colander.MappingSchema):\n body = BatchPayloadSchema()\n\n\nclass BatchResponseSchema(colander.MappingSchema):\n status = colander.SchemaNode(colander.Integer())\n path = colander.SchemaNode(colander.String())\n headers = colander.SchemaNode(colander.Mapping(unknown='preserve'),\n validator=string_values,\n missing=colander.drop)\n body = colander.SchemaNode(colander.Mapping(unknown='preserve'),\n missing=colander.drop)\n\n\nclass BatchResponseBodySchema(colander.MappingSchema):\n responses = colander.SequenceSchema(BatchResponseSchema(missing=colander.drop))\n\n\nclass BatchResponse(colander.MappingSchema):\n body = BatchResponseBodySchema()\n\n\nclass ErrorResponseSchema(colander.MappingSchema):\n body = ErrorSchema()\n\n\nbatch_responses = {\n '200': BatchResponse(description='Return a list of operation responses.'),\n '400': ErrorResponseSchema(description='The request was badly formatted.'),\n 'default': ErrorResponseSchema(description='an unknown error occurred.')\n}\n\nbatch = Service(name=\"batch\", path='/batch',\n description=\"Batch operations\")\n\n\[email protected](schema=BatchRequest,\n validators=(colander_validator,),\n permission=NO_PERMISSION_REQUIRED,\n tags=['Batch'], operation_id='batch',\n response_schemas=batch_responses)\ndef post_batch(request):\n requests = request.validated['body']['requests']\n\n request.log_context(batch_size=len(requests))\n\n limit = request.registry.settings['batch_max_requests']\n if limit and len(requests) > int(limit):\n error_msg = 'Number of requests is limited to {}'.format(limit)\n request.errors.add('body', 'requests', error_msg)\n return\n\n if any([batch.path in req['path'] for req in requests]):\n error_msg = 'Recursive call on {} endpoint is forbidden.'.format(batch.path)\n request.errors.add('body', 'requests', error_msg)\n return\n\n responses = []\n\n for subrequest_spec in requests:\n subrequest = build_request(request, subrequest_spec)\n\n log_context = {'path': subrequest.path,\n 'method': subrequest.method,\n **request.log_context()}\n try:\n # Invoke subrequest without individual transaction.\n resp, subrequest = request.follow_subrequest(subrequest,\n use_tweens=False)\n except httpexceptions.HTTPException as e:\n if e.content_type == 'application/json':\n resp = e\n else:\n # JSONify raw Pyramid errors.\n resp = errors.http_error(e)\n\n subrequest_logger.info('subrequest.summary', extra=log_context)\n\n dict_resp = build_response(resp, subrequest)\n responses.append(dict_resp)\n\n return {\n 'responses': responses\n }\n", "path": "kinto/core/views/batch.py"}], "after_files": [{"content": "import logging\n\nimport colander\nfrom cornice.validators import colander_validator\nfrom pyramid import httpexceptions\nfrom pyramid.security import NO_PERMISSION_REQUIRED\n\nfrom kinto.core import errors\nfrom kinto.core import Service\nfrom kinto.core.errors import ErrorSchema\nfrom kinto.core.utils import merge_dicts, build_request, build_response\n\n\nsubrequest_logger = logging.getLogger(\"subrequest.summary\")\n\nvalid_http_method = colander.OneOf(('GET', 'HEAD', 'DELETE', 'TRACE',\n 'POST', 'PUT', 'PATCH'))\n\n\ndef string_values(node, cstruct):\n \"\"\"Validate that a ``colander.Mapping`` only has strings in its values.\n\n .. warning::\n\n Should be associated to a ``colander.Mapping`` schema node.\n \"\"\"\n are_strings = [isinstance(v, str) for v in cstruct.values()]\n if not all(are_strings):\n error_msg = '{} contains non string value'.format(cstruct)\n raise colander.Invalid(node, error_msg)\n\n\nclass BatchRequestSchema(colander.MappingSchema):\n method = colander.SchemaNode(colander.String(),\n validator=valid_http_method,\n missing=colander.drop)\n path = colander.SchemaNode(colander.String(),\n validator=colander.Regex('^/'))\n headers = colander.SchemaNode(colander.Mapping(unknown='preserve'),\n validator=string_values,\n missing=colander.drop)\n body = colander.SchemaNode(colander.Mapping(unknown='preserve'),\n missing=colander.drop)\n\n @staticmethod\n def schema_type():\n return colander.Mapping(unknown='raise')\n\n\nclass BatchPayloadSchema(colander.MappingSchema):\n defaults = BatchRequestSchema(missing=colander.drop).clone()\n requests = colander.SchemaNode(colander.Sequence(),\n BatchRequestSchema())\n\n @staticmethod\n def schema_type():\n return colander.Mapping(unknown='raise')\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n # On defaults, path is not mandatory.\n self.get('defaults').get('path').missing = colander.drop\n\n def deserialize(self, cstruct=colander.null):\n \"\"\"Preprocess received data to carefully merge defaults.\n \"\"\"\n if cstruct is not colander.null:\n defaults = cstruct.get('defaults')\n requests = cstruct.get('requests')\n if isinstance(defaults, dict) and isinstance(requests, list):\n for request in requests:\n if isinstance(request, dict):\n merge_dicts(request, defaults)\n return super().deserialize(cstruct)\n\n\nclass BatchRequest(colander.MappingSchema):\n body = BatchPayloadSchema()\n\n\nclass BatchResponseSchema(colander.MappingSchema):\n status = colander.SchemaNode(colander.Integer())\n path = colander.SchemaNode(colander.String())\n headers = colander.SchemaNode(colander.Mapping(unknown='preserve'),\n validator=string_values,\n missing=colander.drop)\n body = colander.SchemaNode(colander.Mapping(unknown='preserve'),\n missing=colander.drop)\n\n\nclass BatchResponseBodySchema(colander.MappingSchema):\n responses = colander.SequenceSchema(BatchResponseSchema(missing=colander.drop))\n\n\nclass BatchResponse(colander.MappingSchema):\n body = BatchResponseBodySchema()\n\n\nclass ErrorResponseSchema(colander.MappingSchema):\n body = ErrorSchema()\n\n\nbatch_responses = {\n '200': BatchResponse(description='Return a list of operation responses.'),\n '400': ErrorResponseSchema(description='The request was badly formatted.'),\n 'default': ErrorResponseSchema(description='an unknown error occurred.')\n}\n\nbatch = Service(name=\"batch\", path='/batch',\n description=\"Batch operations\")\n\n\[email protected](schema=BatchRequest,\n validators=(colander_validator,),\n permission=NO_PERMISSION_REQUIRED,\n tags=['Batch'], operation_id='batch',\n response_schemas=batch_responses)\ndef post_batch(request):\n requests = request.validated['body']['requests']\n\n request.log_context(batch_size=len(requests))\n\n limit = request.registry.settings['batch_max_requests']\n if limit and len(requests) > int(limit):\n error_msg = 'Number of requests is limited to {}'.format(limit)\n request.errors.add('body', 'requests', error_msg)\n return\n\n if any([batch.path in req['path'] for req in requests]):\n error_msg = 'Recursive call on {} endpoint is forbidden.'.format(batch.path)\n request.errors.add('body', 'requests', error_msg)\n return\n\n responses = []\n\n for subrequest_spec in requests:\n subrequest = build_request(request, subrequest_spec)\n\n log_context = {'path': subrequest.path,\n 'method': subrequest.method,\n **request.log_context()}\n try:\n # Invoke subrequest without individual transaction.\n resp, subrequest = request.follow_subrequest(subrequest,\n use_tweens=False)\n except httpexceptions.HTTPException as e:\n if e.content_type == 'application/json':\n resp = e\n else:\n # JSONify raw Pyramid errors.\n resp = errors.http_error(e)\n\n subrequest_logger.info('subrequest.summary', extra=log_context)\n\n dict_resp = build_response(resp, subrequest)\n responses.append(dict_resp)\n\n return {\n 'responses': responses\n }\n", "path": "kinto/core/views/batch.py"}]}
| 1,850 | 198 |
gh_patches_debug_30318
|
rasdani/github-patches
|
git_diff
|
aws-cloudformation__cfn-lint-1647
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
AWS::Logs::MetricFilter MetricValues permits invalid strings
`cfn-lint 0.30.1`
AWS::Logs::MetricFilter.Properties.MetricTransformations[*].MetricValue allows a bare string not starting with '$' which it appears is never actually valid, ie
"MetricValue: length" vs "MetricValue: $length"
Assuming I'm reading the documentation correctly MetricValue must always either be a number OR start with a '$' character.
The following fragment lints, but is rejected by CloudFormation at runtime without a $ at the start of the named MetricValue field
```
QueueLengthMetricFilter:
Type: AWS::Logs::MetricFilter
Properties:
LogGroupName: !Ref LogGroup
FilterPattern: '[date, time, tag="rh-sched*", x01=throttling, x02="jobs.", ..., x10=Len, x11=of, x12=job, x13="queue*", length]'
MetricTransformations:
- MetricValue: length
MetricNamespace: !Sub '${EnvironmentName}'
MetricName: 'JobsQueued'
```
Note: I believe that this is also missed by the AWS ValidateTemplate API
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/cfnlint/rules/resources/properties/AllowedPattern.py`
Content:
```
1 """
2 Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
3 SPDX-License-Identifier: MIT-0
4 """
5 import re
6 from cfnlint.rules import CloudFormationLintRule
7 from cfnlint.rules import RuleMatch
8
9 from cfnlint.helpers import RESOURCE_SPECS
10
11
12 class AllowedPattern(CloudFormationLintRule):
13 """Check if properties have a valid value"""
14 id = 'E3031'
15 shortdesc = 'Check if property values adhere to a specific pattern'
16 description = 'Check if properties have a valid value in case of a pattern (Regular Expression)'
17 source_url = 'https://github.com/awslabs/cfn-python-lint/blob/master/docs/cfn-resource-specification.md#allowedpattern'
18 tags = ['resources', 'property', 'allowed pattern', 'regex']
19
20 def initialize(self, cfn):
21 """Initialize the rule"""
22 for resource_type_spec in RESOURCE_SPECS.get(cfn.regions[0]).get('ResourceTypes'):
23 self.resource_property_types.append(resource_type_spec)
24 for property_type_spec in RESOURCE_SPECS.get(cfn.regions[0]).get('PropertyTypes'):
25 self.resource_sub_property_types.append(property_type_spec)
26
27 def check_value(self, value, path, property_name, **kwargs):
28 """Check Value"""
29 matches = []
30
31 # Get the Allowed Pattern Regex
32 value_pattern_regex = kwargs.get('value_specs', {}).get('AllowedPatternRegex', {})
33 # Get the "Human Readable" version for the error message. Optional, if not specified,
34 # the RegEx itself is used.
35 value_pattern = kwargs.get('value_specs', {}).get('AllowedPattern', value_pattern_regex)
36
37 if value_pattern_regex:
38 regex = re.compile(value_pattern_regex)
39
40 # Ignore values with dynamic references. Simple check to prevent false-positives
41 # See: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/dynamic-references.html
42 if '{{resolve:' not in value:
43 if not regex.match(value):
44 full_path = ('/'.join(str(x) for x in path))
45
46 message = '{} contains invalid characters (Pattern: {}) at {}'
47 matches.append(RuleMatch(path, message.format(
48 property_name, value_pattern, full_path)))
49
50 return matches
51
52 def check(self, cfn, properties, value_specs, property_specs, path):
53 """Check itself"""
54 matches = list()
55 for p_value, p_path in properties.items_safe(path[:]):
56 for prop in p_value:
57 if prop in value_specs:
58 value = value_specs.get(prop).get('Value', {})
59 if value:
60 value_type = value.get('ValueType', '')
61 property_type = property_specs.get('Properties').get(prop).get('Type')
62 matches.extend(
63 cfn.check_value(
64 p_value, prop, p_path,
65 check_value=self.check_value,
66 value_specs=RESOURCE_SPECS.get(cfn.regions[0]).get(
67 'ValueTypes').get(value_type, {}),
68 cfn=cfn, property_type=property_type, property_name=prop
69 )
70 )
71 return matches
72
73 def match_resource_sub_properties(self, properties, property_type, path, cfn):
74 """Match for sub properties"""
75 matches = list()
76
77 specs = RESOURCE_SPECS.get(cfn.regions[0]).get(
78 'PropertyTypes').get(property_type, {}).get('Properties', {})
79 property_specs = RESOURCE_SPECS.get(cfn.regions[0]).get('PropertyTypes').get(property_type)
80 matches.extend(self.check(cfn, properties, specs, property_specs, path))
81
82 return matches
83
84 def match_resource_properties(self, properties, resource_type, path, cfn):
85 """Check CloudFormation Properties"""
86 matches = list()
87
88 specs = RESOURCE_SPECS.get(cfn.regions[0]).get(
89 'ResourceTypes').get(resource_type, {}).get('Properties', {})
90 resource_specs = RESOURCE_SPECS.get(cfn.regions[0]).get('ResourceTypes').get(resource_type)
91 matches.extend(self.check(cfn, properties, specs, resource_specs, path))
92
93 return matches
94
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/cfnlint/rules/resources/properties/AllowedPattern.py b/src/cfnlint/rules/resources/properties/AllowedPattern.py
--- a/src/cfnlint/rules/resources/properties/AllowedPattern.py
+++ b/src/cfnlint/rules/resources/properties/AllowedPattern.py
@@ -3,6 +3,7 @@
SPDX-License-Identifier: MIT-0
"""
import re
+import six
from cfnlint.rules import CloudFormationLintRule
from cfnlint.rules import RuleMatch
@@ -34,18 +35,22 @@
# the RegEx itself is used.
value_pattern = kwargs.get('value_specs', {}).get('AllowedPattern', value_pattern_regex)
- if value_pattern_regex:
- regex = re.compile(value_pattern_regex)
+ if isinstance(value, (int, float)):
+ value = str(value)
- # Ignore values with dynamic references. Simple check to prevent false-positives
- # See: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/dynamic-references.html
- if '{{resolve:' not in value:
- if not regex.match(value):
- full_path = ('/'.join(str(x) for x in path))
+ if isinstance(value, six.string_types):
+ if value_pattern_regex:
+ regex = re.compile(value_pattern_regex)
- message = '{} contains invalid characters (Pattern: {}) at {}'
- matches.append(RuleMatch(path, message.format(
- property_name, value_pattern, full_path)))
+ # Ignore values with dynamic references. Simple check to prevent false-positives
+ # See: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/dynamic-references.html
+ if '{{resolve:' not in value:
+ if not regex.match(value):
+ full_path = ('/'.join(str(x) for x in path))
+
+ message = '{} contains invalid characters (Pattern: {}) at {}'
+ matches.append(RuleMatch(path, message.format(
+ property_name, value_pattern, full_path)))
return matches
|
{"golden_diff": "diff --git a/src/cfnlint/rules/resources/properties/AllowedPattern.py b/src/cfnlint/rules/resources/properties/AllowedPattern.py\n--- a/src/cfnlint/rules/resources/properties/AllowedPattern.py\n+++ b/src/cfnlint/rules/resources/properties/AllowedPattern.py\n@@ -3,6 +3,7 @@\n SPDX-License-Identifier: MIT-0\n \"\"\"\n import re\n+import six\n from cfnlint.rules import CloudFormationLintRule\n from cfnlint.rules import RuleMatch\n \n@@ -34,18 +35,22 @@\n # the RegEx itself is used.\n value_pattern = kwargs.get('value_specs', {}).get('AllowedPattern', value_pattern_regex)\n \n- if value_pattern_regex:\n- regex = re.compile(value_pattern_regex)\n+ if isinstance(value, (int, float)):\n+ value = str(value)\n \n- # Ignore values with dynamic references. Simple check to prevent false-positives\n- # See: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/dynamic-references.html\n- if '{{resolve:' not in value:\n- if not regex.match(value):\n- full_path = ('/'.join(str(x) for x in path))\n+ if isinstance(value, six.string_types):\n+ if value_pattern_regex:\n+ regex = re.compile(value_pattern_regex)\n \n- message = '{} contains invalid characters (Pattern: {}) at {}'\n- matches.append(RuleMatch(path, message.format(\n- property_name, value_pattern, full_path)))\n+ # Ignore values with dynamic references. Simple check to prevent false-positives\n+ # See: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/dynamic-references.html\n+ if '{{resolve:' not in value:\n+ if not regex.match(value):\n+ full_path = ('/'.join(str(x) for x in path))\n+\n+ message = '{} contains invalid characters (Pattern: {}) at {}'\n+ matches.append(RuleMatch(path, message.format(\n+ property_name, value_pattern, full_path)))\n \n return matches\n", "issue": "AWS::Logs::MetricFilter MetricValues permits invalid strings\n`cfn-lint 0.30.1`\r\n\r\nAWS::Logs::MetricFilter.Properties.MetricTransformations[*].MetricValue allows a bare string not starting with '$' which it appears is never actually valid, ie \r\n\"MetricValue: length\" vs \"MetricValue: $length\"\r\n\r\nAssuming I'm reading the documentation correctly MetricValue must always either be a number OR start with a '$' character.\r\n\r\nThe following fragment lints, but is rejected by CloudFormation at runtime without a $ at the start of the named MetricValue field\r\n```\r\n QueueLengthMetricFilter:\r\n Type: AWS::Logs::MetricFilter\r\n Properties:\r\n LogGroupName: !Ref LogGroup\r\n FilterPattern: '[date, time, tag=\"rh-sched*\", x01=throttling, x02=\"jobs.\", ..., x10=Len, x11=of, x12=job, x13=\"queue*\", length]'\r\n MetricTransformations:\r\n - MetricValue: length\r\n MetricNamespace: !Sub '${EnvironmentName}'\r\n MetricName: 'JobsQueued'\r\n```\r\n\r\n\r\nNote: I believe that this is also missed by the AWS ValidateTemplate API\n", "before_files": [{"content": "\"\"\"\nCopyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\nSPDX-License-Identifier: MIT-0\n\"\"\"\nimport re\nfrom cfnlint.rules import CloudFormationLintRule\nfrom cfnlint.rules import RuleMatch\n\nfrom cfnlint.helpers import RESOURCE_SPECS\n\n\nclass AllowedPattern(CloudFormationLintRule):\n \"\"\"Check if properties have a valid value\"\"\"\n id = 'E3031'\n shortdesc = 'Check if property values adhere to a specific pattern'\n description = 'Check if properties have a valid value in case of a pattern (Regular Expression)'\n source_url = 'https://github.com/awslabs/cfn-python-lint/blob/master/docs/cfn-resource-specification.md#allowedpattern'\n tags = ['resources', 'property', 'allowed pattern', 'regex']\n\n def initialize(self, cfn):\n \"\"\"Initialize the rule\"\"\"\n for resource_type_spec in RESOURCE_SPECS.get(cfn.regions[0]).get('ResourceTypes'):\n self.resource_property_types.append(resource_type_spec)\n for property_type_spec in RESOURCE_SPECS.get(cfn.regions[0]).get('PropertyTypes'):\n self.resource_sub_property_types.append(property_type_spec)\n\n def check_value(self, value, path, property_name, **kwargs):\n \"\"\"Check Value\"\"\"\n matches = []\n\n # Get the Allowed Pattern Regex\n value_pattern_regex = kwargs.get('value_specs', {}).get('AllowedPatternRegex', {})\n # Get the \"Human Readable\" version for the error message. Optional, if not specified,\n # the RegEx itself is used.\n value_pattern = kwargs.get('value_specs', {}).get('AllowedPattern', value_pattern_regex)\n\n if value_pattern_regex:\n regex = re.compile(value_pattern_regex)\n\n # Ignore values with dynamic references. Simple check to prevent false-positives\n # See: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/dynamic-references.html\n if '{{resolve:' not in value:\n if not regex.match(value):\n full_path = ('/'.join(str(x) for x in path))\n\n message = '{} contains invalid characters (Pattern: {}) at {}'\n matches.append(RuleMatch(path, message.format(\n property_name, value_pattern, full_path)))\n\n return matches\n\n def check(self, cfn, properties, value_specs, property_specs, path):\n \"\"\"Check itself\"\"\"\n matches = list()\n for p_value, p_path in properties.items_safe(path[:]):\n for prop in p_value:\n if prop in value_specs:\n value = value_specs.get(prop).get('Value', {})\n if value:\n value_type = value.get('ValueType', '')\n property_type = property_specs.get('Properties').get(prop).get('Type')\n matches.extend(\n cfn.check_value(\n p_value, prop, p_path,\n check_value=self.check_value,\n value_specs=RESOURCE_SPECS.get(cfn.regions[0]).get(\n 'ValueTypes').get(value_type, {}),\n cfn=cfn, property_type=property_type, property_name=prop\n )\n )\n return matches\n\n def match_resource_sub_properties(self, properties, property_type, path, cfn):\n \"\"\"Match for sub properties\"\"\"\n matches = list()\n\n specs = RESOURCE_SPECS.get(cfn.regions[0]).get(\n 'PropertyTypes').get(property_type, {}).get('Properties', {})\n property_specs = RESOURCE_SPECS.get(cfn.regions[0]).get('PropertyTypes').get(property_type)\n matches.extend(self.check(cfn, properties, specs, property_specs, path))\n\n return matches\n\n def match_resource_properties(self, properties, resource_type, path, cfn):\n \"\"\"Check CloudFormation Properties\"\"\"\n matches = list()\n\n specs = RESOURCE_SPECS.get(cfn.regions[0]).get(\n 'ResourceTypes').get(resource_type, {}).get('Properties', {})\n resource_specs = RESOURCE_SPECS.get(cfn.regions[0]).get('ResourceTypes').get(resource_type)\n matches.extend(self.check(cfn, properties, specs, resource_specs, path))\n\n return matches\n", "path": "src/cfnlint/rules/resources/properties/AllowedPattern.py"}], "after_files": [{"content": "\"\"\"\nCopyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\nSPDX-License-Identifier: MIT-0\n\"\"\"\nimport re\nimport six\nfrom cfnlint.rules import CloudFormationLintRule\nfrom cfnlint.rules import RuleMatch\n\nfrom cfnlint.helpers import RESOURCE_SPECS\n\n\nclass AllowedPattern(CloudFormationLintRule):\n \"\"\"Check if properties have a valid value\"\"\"\n id = 'E3031'\n shortdesc = 'Check if property values adhere to a specific pattern'\n description = 'Check if properties have a valid value in case of a pattern (Regular Expression)'\n source_url = 'https://github.com/awslabs/cfn-python-lint/blob/master/docs/cfn-resource-specification.md#allowedpattern'\n tags = ['resources', 'property', 'allowed pattern', 'regex']\n\n def initialize(self, cfn):\n \"\"\"Initialize the rule\"\"\"\n for resource_type_spec in RESOURCE_SPECS.get(cfn.regions[0]).get('ResourceTypes'):\n self.resource_property_types.append(resource_type_spec)\n for property_type_spec in RESOURCE_SPECS.get(cfn.regions[0]).get('PropertyTypes'):\n self.resource_sub_property_types.append(property_type_spec)\n\n def check_value(self, value, path, property_name, **kwargs):\n \"\"\"Check Value\"\"\"\n matches = []\n\n # Get the Allowed Pattern Regex\n value_pattern_regex = kwargs.get('value_specs', {}).get('AllowedPatternRegex', {})\n # Get the \"Human Readable\" version for the error message. Optional, if not specified,\n # the RegEx itself is used.\n value_pattern = kwargs.get('value_specs', {}).get('AllowedPattern', value_pattern_regex)\n\n if isinstance(value, (int, float)):\n value = str(value)\n\n if isinstance(value, six.string_types):\n if value_pattern_regex:\n regex = re.compile(value_pattern_regex)\n\n # Ignore values with dynamic references. Simple check to prevent false-positives\n # See: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/dynamic-references.html\n if '{{resolve:' not in value:\n if not regex.match(value):\n full_path = ('/'.join(str(x) for x in path))\n\n message = '{} contains invalid characters (Pattern: {}) at {}'\n matches.append(RuleMatch(path, message.format(\n property_name, value_pattern, full_path)))\n\n return matches\n\n def check(self, cfn, properties, value_specs, property_specs, path):\n \"\"\"Check itself\"\"\"\n matches = list()\n for p_value, p_path in properties.items_safe(path[:]):\n for prop in p_value:\n if prop in value_specs:\n value = value_specs.get(prop).get('Value', {})\n if value:\n value_type = value.get('ValueType', '')\n property_type = property_specs.get('Properties').get(prop).get('Type')\n matches.extend(\n cfn.check_value(\n p_value, prop, p_path,\n check_value=self.check_value,\n value_specs=RESOURCE_SPECS.get(cfn.regions[0]).get(\n 'ValueTypes').get(value_type, {}),\n cfn=cfn, property_type=property_type, property_name=prop\n )\n )\n return matches\n\n def match_resource_sub_properties(self, properties, property_type, path, cfn):\n \"\"\"Match for sub properties\"\"\"\n matches = list()\n\n specs = RESOURCE_SPECS.get(cfn.regions[0]).get(\n 'PropertyTypes').get(property_type, {}).get('Properties', {})\n property_specs = RESOURCE_SPECS.get(cfn.regions[0]).get('PropertyTypes').get(property_type)\n matches.extend(self.check(cfn, properties, specs, property_specs, path))\n\n return matches\n\n def match_resource_properties(self, properties, resource_type, path, cfn):\n \"\"\"Check CloudFormation Properties\"\"\"\n matches = list()\n\n specs = RESOURCE_SPECS.get(cfn.regions[0]).get(\n 'ResourceTypes').get(resource_type, {}).get('Properties', {})\n resource_specs = RESOURCE_SPECS.get(cfn.regions[0]).get('ResourceTypes').get(resource_type)\n matches.extend(self.check(cfn, properties, specs, resource_specs, path))\n\n return matches\n", "path": "src/cfnlint/rules/resources/properties/AllowedPattern.py"}]}
| 1,588 | 448 |
gh_patches_debug_38111
|
rasdani/github-patches
|
git_diff
|
bridgecrewio__checkov-4781
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
BC_K8S_14/CKV_K8S_15 "Image Pull Policy should be Always" should not be reported when using a digest
**Describe the issue**
[BC_K8S_14/CKV_K8S_15 "Image Pull Policy should be Always"](https://docs.bridgecrew.io/docs/bc_k8s_14) should not be reported when using a digest.
Per the documentation:
> When the imagePullPolicy is set to Always, you ensure the latest version of the image is deployed every time the pod is started.
By specifying the digest, the same image is always used, so there is no concern about the latest version (since the the version cannot change), negating the need to always pull the image.
**Examples**
Save the following as `deployment.yaml`:
```yaml
apiVersion: v1
kind: Pod
metadata:
name: <name>
spec:
containers:
- name: nginx
image: 'nginx@sha256:b0ad43f7ee5edbc0effbc14645ae7055e21bc1973aee5150745632a24a752661'
imagePullPolicy: IfNotPresent
```
Run: `checkov --framework kubernetes -f deployment.yaml -c CKV_K8S_15`
Actual result:
FAILED
Expected result:
Passed
**Version (please complete the following information):**
- Checkov Version 2.3.126
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `checkov/terraform/checks/resource/kubernetes/ImagePullPolicyAlways.py`
Content:
```
1 from checkov.common.models.enums import CheckCategories, CheckResult
2 from checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck
3
4
5 class ImagePullPolicyAlways(BaseResourceCheck):
6
7 def __init__(self):
8 """
9 Image pull policy should be set to always to ensure you get the correct image and imagePullSecrets are correct
10 Default is 'IfNotPresent' unless image tag is omitted or :latest
11 https://kubernetes.io/docs/concepts/configuration/overview/#container-images
12
13 An admission controller could be used to enforce imagePullPolicy
14 """
15 name = "Image Pull Policy should be Always"
16 id = "CKV_K8S_15"
17 supported_resources = ["kubernetes_pod", "kubernetes_pod_v1",
18 "kubernetes_deployment", "kubernetes_deployment_v1"]
19 categories = [CheckCategories.GENERAL_SECURITY]
20 super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
21
22 def scan_resource_conf(self, conf) -> CheckResult:
23 spec = conf.get('spec', [None])[0]
24 if isinstance(spec, dict) and spec:
25 evaluated_keys_path = "spec"
26
27 template = spec.get("template")
28 if template and isinstance(template, list):
29 template = template[0]
30 template_spec = template.get("spec")
31 if template_spec and isinstance(template_spec, list):
32 spec = template_spec[0]
33 evaluated_keys_path = f'{evaluated_keys_path}/[0]/template/[0]/spec'
34
35 containers = spec.get("container")
36 if containers is None:
37 return CheckResult.UNKNOWN
38 for idx, container in enumerate(containers):
39 if not isinstance(container, dict):
40 return CheckResult.UNKNOWN
41
42 if container.get("image_pull_policy"):
43 if container.get("image_pull_policy")[0] == "Always":
44 break
45 else:
46 if container.get("image"):
47 name = container.get("image")[0]
48 if "latest" in name:
49 break
50 self.evaluated_keys = [f'{evaluated_keys_path}/[0]/container/[{idx}]']
51 return CheckResult.FAILED
52 return CheckResult.PASSED
53 return CheckResult.FAILED
54
55
56 check = ImagePullPolicyAlways()
57
```
Path: `checkov/kubernetes/checks/resource/k8s/ImagePullPolicyAlways.py`
Content:
```
1 import re
2 from typing import Any, Dict
3
4 from checkov.common.models.consts import DOCKER_IMAGE_REGEX
5 from checkov.common.models.enums import CheckResult
6 from checkov.kubernetes.checks.resource.base_container_check import BaseK8sContainerCheck
7
8
9 class ImagePullPolicyAlways(BaseK8sContainerCheck):
10 def __init__(self) -> None:
11 """
12 Image pull policy should be set to always to ensure you get the correct image and imagePullSecrets are correct
13 Default is 'IfNotPresent' unless image tag is omitted or :latest
14 https://kubernetes.io/docs/concepts/configuration/overview/#container-images
15
16 An admission controller could be used to enforce imagePullPolicy
17 """
18 name = "Image Pull Policy should be Always"
19 id = "CKV_K8S_15"
20 # Location: container .imagePullPolicy
21 super().__init__(name=name, id=id)
22
23 def scan_container_conf(self, metadata: Dict[str, Any], conf: Dict[str, Any]) -> CheckResult:
24 self.evaluated_container_keys = ["image", "imagePullPolicy"]
25 if conf.get("image"):
26 # Remove the digest, if present
27 image_val = conf["image"]
28 if not isinstance(image_val, str) or image_val.strip() == "":
29 return CheckResult.UNKNOWN
30 if "@" in image_val:
31 image_val = image_val[0 : image_val.index("@")]
32 if "imagePullPolicy" not in conf:
33 image_tag_match = re.findall(DOCKER_IMAGE_REGEX, image_val)
34 if len(image_tag_match) != 1:
35 # If image name is invalid, check result can not be determined
36 return CheckResult.UNKNOWN
37 (image, tag) = image_tag_match[0]
38 if tag == "latest" or tag == "":
39 # Default imagePullPolicy = Always
40 return CheckResult.PASSED
41 else:
42 # Default imagePullPolicy = IfNotPresent
43 return CheckResult.FAILED
44 else:
45 if conf["imagePullPolicy"] != "Always":
46 return CheckResult.FAILED
47
48 else:
49 return CheckResult.FAILED
50 return CheckResult.PASSED
51
52
53 check = ImagePullPolicyAlways()
54
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/checkov/kubernetes/checks/resource/k8s/ImagePullPolicyAlways.py b/checkov/kubernetes/checks/resource/k8s/ImagePullPolicyAlways.py
--- a/checkov/kubernetes/checks/resource/k8s/ImagePullPolicyAlways.py
+++ b/checkov/kubernetes/checks/resource/k8s/ImagePullPolicyAlways.py
@@ -29,6 +29,9 @@
return CheckResult.UNKNOWN
if "@" in image_val:
image_val = image_val[0 : image_val.index("@")]
+ has_digest = True
+ else:
+ has_digest = False
if "imagePullPolicy" not in conf:
image_tag_match = re.findall(DOCKER_IMAGE_REGEX, image_val)
if len(image_tag_match) != 1:
@@ -38,11 +41,13 @@
if tag == "latest" or tag == "":
# Default imagePullPolicy = Always
return CheckResult.PASSED
+ elif has_digest:
+ return CheckResult.PASSED
else:
# Default imagePullPolicy = IfNotPresent
return CheckResult.FAILED
else:
- if conf["imagePullPolicy"] != "Always":
+ if not has_digest and conf["imagePullPolicy"] != "Always":
return CheckResult.FAILED
else:
diff --git a/checkov/terraform/checks/resource/kubernetes/ImagePullPolicyAlways.py b/checkov/terraform/checks/resource/kubernetes/ImagePullPolicyAlways.py
--- a/checkov/terraform/checks/resource/kubernetes/ImagePullPolicyAlways.py
+++ b/checkov/terraform/checks/resource/kubernetes/ImagePullPolicyAlways.py
@@ -7,7 +7,7 @@
def __init__(self):
"""
Image pull policy should be set to always to ensure you get the correct image and imagePullSecrets are correct
- Default is 'IfNotPresent' unless image tag is omitted or :latest
+ Default is 'IfNotPresent' unless image tag/digest is omitted or :latest
https://kubernetes.io/docs/concepts/configuration/overview/#container-images
An admission controller could be used to enforce imagePullPolicy
@@ -47,6 +47,8 @@
name = container.get("image")[0]
if "latest" in name:
break
+ if "@" in name:
+ break
self.evaluated_keys = [f'{evaluated_keys_path}/[0]/container/[{idx}]']
return CheckResult.FAILED
return CheckResult.PASSED
|
{"golden_diff": "diff --git a/checkov/kubernetes/checks/resource/k8s/ImagePullPolicyAlways.py b/checkov/kubernetes/checks/resource/k8s/ImagePullPolicyAlways.py\n--- a/checkov/kubernetes/checks/resource/k8s/ImagePullPolicyAlways.py\n+++ b/checkov/kubernetes/checks/resource/k8s/ImagePullPolicyAlways.py\n@@ -29,6 +29,9 @@\n return CheckResult.UNKNOWN\n if \"@\" in image_val:\n image_val = image_val[0 : image_val.index(\"@\")]\n+ has_digest = True\n+ else:\n+ has_digest = False\n if \"imagePullPolicy\" not in conf:\n image_tag_match = re.findall(DOCKER_IMAGE_REGEX, image_val)\n if len(image_tag_match) != 1:\n@@ -38,11 +41,13 @@\n if tag == \"latest\" or tag == \"\":\n # Default imagePullPolicy = Always\n return CheckResult.PASSED\n+ elif has_digest:\n+ return CheckResult.PASSED\n else:\n # Default imagePullPolicy = IfNotPresent\n return CheckResult.FAILED\n else:\n- if conf[\"imagePullPolicy\"] != \"Always\":\n+ if not has_digest and conf[\"imagePullPolicy\"] != \"Always\":\n return CheckResult.FAILED\n \n else:\ndiff --git a/checkov/terraform/checks/resource/kubernetes/ImagePullPolicyAlways.py b/checkov/terraform/checks/resource/kubernetes/ImagePullPolicyAlways.py\n--- a/checkov/terraform/checks/resource/kubernetes/ImagePullPolicyAlways.py\n+++ b/checkov/terraform/checks/resource/kubernetes/ImagePullPolicyAlways.py\n@@ -7,7 +7,7 @@\n def __init__(self):\n \"\"\"\n Image pull policy should be set to always to ensure you get the correct image and imagePullSecrets are correct\n- Default is 'IfNotPresent' unless image tag is omitted or :latest\n+ Default is 'IfNotPresent' unless image tag/digest is omitted or :latest\n https://kubernetes.io/docs/concepts/configuration/overview/#container-images\n \n An admission controller could be used to enforce imagePullPolicy\n@@ -47,6 +47,8 @@\n name = container.get(\"image\")[0]\n if \"latest\" in name:\n break\n+ if \"@\" in name:\n+ break\n self.evaluated_keys = [f'{evaluated_keys_path}/[0]/container/[{idx}]']\n return CheckResult.FAILED\n return CheckResult.PASSED\n", "issue": "BC_K8S_14/CKV_K8S_15 \"Image Pull Policy should be Always\" should not be reported when using a digest\n**Describe the issue**\r\n[BC_K8S_14/CKV_K8S_15 \"Image Pull Policy should be Always\"](https://docs.bridgecrew.io/docs/bc_k8s_14) should not be reported when using a digest.\r\n\r\nPer the documentation:\r\n> When the imagePullPolicy is set to Always, you ensure the latest version of the image is deployed every time the pod is started.\r\n\r\nBy specifying the digest, the same image is always used, so there is no concern about the latest version (since the the version cannot change), negating the need to always pull the image.\r\n\r\n**Examples**\r\nSave the following as `deployment.yaml`:\r\n```yaml\r\napiVersion: v1\r\nkind: Pod\r\nmetadata:\r\n name: <name>\r\nspec:\r\n containers:\r\n - name: nginx\r\n image: 'nginx@sha256:b0ad43f7ee5edbc0effbc14645ae7055e21bc1973aee5150745632a24a752661'\r\n imagePullPolicy: IfNotPresent\r\n```\r\nRun: `checkov --framework kubernetes -f deployment.yaml -c CKV_K8S_15`\r\n\r\nActual result:\r\nFAILED\r\n\r\nExpected result:\r\nPassed\r\n\r\n**Version (please complete the following information):**\r\n - Checkov Version 2.3.126\n", "before_files": [{"content": "from checkov.common.models.enums import CheckCategories, CheckResult\nfrom checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck\n\n\nclass ImagePullPolicyAlways(BaseResourceCheck):\n\n def __init__(self):\n \"\"\"\n Image pull policy should be set to always to ensure you get the correct image and imagePullSecrets are correct\n Default is 'IfNotPresent' unless image tag is omitted or :latest\n https://kubernetes.io/docs/concepts/configuration/overview/#container-images\n\n An admission controller could be used to enforce imagePullPolicy\n \"\"\"\n name = \"Image Pull Policy should be Always\"\n id = \"CKV_K8S_15\"\n supported_resources = [\"kubernetes_pod\", \"kubernetes_pod_v1\",\n \"kubernetes_deployment\", \"kubernetes_deployment_v1\"]\n categories = [CheckCategories.GENERAL_SECURITY]\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def scan_resource_conf(self, conf) -> CheckResult:\n spec = conf.get('spec', [None])[0]\n if isinstance(spec, dict) and spec:\n evaluated_keys_path = \"spec\"\n\n template = spec.get(\"template\")\n if template and isinstance(template, list):\n template = template[0]\n template_spec = template.get(\"spec\")\n if template_spec and isinstance(template_spec, list):\n spec = template_spec[0]\n evaluated_keys_path = f'{evaluated_keys_path}/[0]/template/[0]/spec'\n\n containers = spec.get(\"container\")\n if containers is None:\n return CheckResult.UNKNOWN\n for idx, container in enumerate(containers):\n if not isinstance(container, dict):\n return CheckResult.UNKNOWN\n\n if container.get(\"image_pull_policy\"):\n if container.get(\"image_pull_policy\")[0] == \"Always\":\n break\n else:\n if container.get(\"image\"):\n name = container.get(\"image\")[0]\n if \"latest\" in name:\n break\n self.evaluated_keys = [f'{evaluated_keys_path}/[0]/container/[{idx}]']\n return CheckResult.FAILED\n return CheckResult.PASSED\n return CheckResult.FAILED\n\n\ncheck = ImagePullPolicyAlways()\n", "path": "checkov/terraform/checks/resource/kubernetes/ImagePullPolicyAlways.py"}, {"content": "import re\nfrom typing import Any, Dict\n\nfrom checkov.common.models.consts import DOCKER_IMAGE_REGEX\nfrom checkov.common.models.enums import CheckResult\nfrom checkov.kubernetes.checks.resource.base_container_check import BaseK8sContainerCheck\n\n\nclass ImagePullPolicyAlways(BaseK8sContainerCheck):\n def __init__(self) -> None:\n \"\"\"\n Image pull policy should be set to always to ensure you get the correct image and imagePullSecrets are correct\n Default is 'IfNotPresent' unless image tag is omitted or :latest\n https://kubernetes.io/docs/concepts/configuration/overview/#container-images\n\n An admission controller could be used to enforce imagePullPolicy\n \"\"\"\n name = \"Image Pull Policy should be Always\"\n id = \"CKV_K8S_15\"\n # Location: container .imagePullPolicy\n super().__init__(name=name, id=id)\n\n def scan_container_conf(self, metadata: Dict[str, Any], conf: Dict[str, Any]) -> CheckResult:\n self.evaluated_container_keys = [\"image\", \"imagePullPolicy\"]\n if conf.get(\"image\"):\n # Remove the digest, if present\n image_val = conf[\"image\"]\n if not isinstance(image_val, str) or image_val.strip() == \"\":\n return CheckResult.UNKNOWN\n if \"@\" in image_val:\n image_val = image_val[0 : image_val.index(\"@\")]\n if \"imagePullPolicy\" not in conf:\n image_tag_match = re.findall(DOCKER_IMAGE_REGEX, image_val)\n if len(image_tag_match) != 1:\n # If image name is invalid, check result can not be determined\n return CheckResult.UNKNOWN\n (image, tag) = image_tag_match[0]\n if tag == \"latest\" or tag == \"\":\n # Default imagePullPolicy = Always\n return CheckResult.PASSED\n else:\n # Default imagePullPolicy = IfNotPresent\n return CheckResult.FAILED\n else:\n if conf[\"imagePullPolicy\"] != \"Always\":\n return CheckResult.FAILED\n\n else:\n return CheckResult.FAILED\n return CheckResult.PASSED\n\n\ncheck = ImagePullPolicyAlways()\n", "path": "checkov/kubernetes/checks/resource/k8s/ImagePullPolicyAlways.py"}], "after_files": [{"content": "from checkov.common.models.enums import CheckCategories, CheckResult\nfrom checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck\n\n\nclass ImagePullPolicyAlways(BaseResourceCheck):\n\n def __init__(self):\n \"\"\"\n Image pull policy should be set to always to ensure you get the correct image and imagePullSecrets are correct\n Default is 'IfNotPresent' unless image tag/digest is omitted or :latest\n https://kubernetes.io/docs/concepts/configuration/overview/#container-images\n\n An admission controller could be used to enforce imagePullPolicy\n \"\"\"\n name = \"Image Pull Policy should be Always\"\n id = \"CKV_K8S_15\"\n supported_resources = [\"kubernetes_pod\", \"kubernetes_pod_v1\",\n \"kubernetes_deployment\", \"kubernetes_deployment_v1\"]\n categories = [CheckCategories.GENERAL_SECURITY]\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def scan_resource_conf(self, conf) -> CheckResult:\n spec = conf.get('spec', [None])[0]\n if isinstance(spec, dict) and spec:\n evaluated_keys_path = \"spec\"\n\n template = spec.get(\"template\")\n if template and isinstance(template, list):\n template = template[0]\n template_spec = template.get(\"spec\")\n if template_spec and isinstance(template_spec, list):\n spec = template_spec[0]\n evaluated_keys_path = f'{evaluated_keys_path}/[0]/template/[0]/spec'\n\n containers = spec.get(\"container\")\n if containers is None:\n return CheckResult.UNKNOWN\n for idx, container in enumerate(containers):\n if not isinstance(container, dict):\n return CheckResult.UNKNOWN\n\n if container.get(\"image_pull_policy\"):\n if container.get(\"image_pull_policy\")[0] == \"Always\":\n break\n else:\n if container.get(\"image\"):\n name = container.get(\"image\")[0]\n if \"latest\" in name:\n break\n if \"@\" in name:\n break\n self.evaluated_keys = [f'{evaluated_keys_path}/[0]/container/[{idx}]']\n return CheckResult.FAILED\n return CheckResult.PASSED\n return CheckResult.FAILED\n\n\ncheck = ImagePullPolicyAlways()\n", "path": "checkov/terraform/checks/resource/kubernetes/ImagePullPolicyAlways.py"}, {"content": "import re\nfrom typing import Any, Dict\n\nfrom checkov.common.models.consts import DOCKER_IMAGE_REGEX\nfrom checkov.common.models.enums import CheckResult\nfrom checkov.kubernetes.checks.resource.base_container_check import BaseK8sContainerCheck\n\n\nclass ImagePullPolicyAlways(BaseK8sContainerCheck):\n def __init__(self) -> None:\n \"\"\"\n Image pull policy should be set to always to ensure you get the correct image and imagePullSecrets are correct\n Default is 'IfNotPresent' unless image tag is omitted or :latest\n https://kubernetes.io/docs/concepts/configuration/overview/#container-images\n\n An admission controller could be used to enforce imagePullPolicy\n \"\"\"\n name = \"Image Pull Policy should be Always\"\n id = \"CKV_K8S_15\"\n # Location: container .imagePullPolicy\n super().__init__(name=name, id=id)\n\n def scan_container_conf(self, metadata: Dict[str, Any], conf: Dict[str, Any]) -> CheckResult:\n self.evaluated_container_keys = [\"image\", \"imagePullPolicy\"]\n if conf.get(\"image\"):\n # Remove the digest, if present\n image_val = conf[\"image\"]\n if not isinstance(image_val, str) or image_val.strip() == \"\":\n return CheckResult.UNKNOWN\n if \"@\" in image_val:\n image_val = image_val[0 : image_val.index(\"@\")]\n has_digest = True\n else:\n has_digest = False\n if \"imagePullPolicy\" not in conf:\n image_tag_match = re.findall(DOCKER_IMAGE_REGEX, image_val)\n if len(image_tag_match) != 1:\n # If image name is invalid, check result can not be determined\n return CheckResult.UNKNOWN\n (image, tag) = image_tag_match[0]\n if tag == \"latest\" or tag == \"\":\n # Default imagePullPolicy = Always\n return CheckResult.PASSED\n elif has_digest:\n return CheckResult.PASSED\n else:\n # Default imagePullPolicy = IfNotPresent\n return CheckResult.FAILED\n else:\n if not has_digest and conf[\"imagePullPolicy\"] != \"Always\":\n return CheckResult.FAILED\n\n else:\n return CheckResult.FAILED\n return CheckResult.PASSED\n\n\ncheck = ImagePullPolicyAlways()\n", "path": "checkov/kubernetes/checks/resource/k8s/ImagePullPolicyAlways.py"}]}
| 1,802 | 543 |
gh_patches_debug_64689
|
rasdani/github-patches
|
git_diff
|
chainer__chainer-2992
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Install bug: Mock required for gradient_check
#2972 Install bug
Chainer installed with `pip install chainer`
`from chainer import gradient_check` fails due to unable to find mock to import
Fixed by `conda install mock`
`gradient_check` is included in the block declarations in the tutorial, so it should either be removed from there or mock should be added to default install so that people doing the tutorial do not get an error during the import commands.
```
from chainer import gradient_check
---------------------------------------------------------------------------
ImportError Traceback (most recent call last)
<ipython-input-1-0ba4708b632d> in <module>()
1 import numpy as np
2 import chainer
----> 3 from chainer import gradient_check
4 from chainer import datasets, iterators, optimizers, serializers
5 from chainer import Link, Chain, ChainList
/home/crissman/.pyenv/versions/anaconda3-4.2.0/lib/python3.5/site-packages/chainer/gradient_check.py in <module>()
7 from chainer import cuda
8 from chainer.functions.math import identity
----> 9 from chainer import testing
10 from chainer import variable
11
/home/crissman/.pyenv/versions/anaconda3-4.2.0/lib/python3.5/site-packages/chainer/testing/__init__.py in <module>()
5 from chainer.testing import parameterized # NOQA
6 from chainer.testing import serializer # NOQA
----> 7 from chainer.testing import training # NOQA
8 from chainer.testing import unary_math_function_test # NOQA
9
/home/crissman/.pyenv/versions/anaconda3-4.2.0/lib/python3.5/site-packages/chainer/testing/training.py in <module>()
1 from __future__ import division
2
----> 3 import mock
4
5 from chainer import training
ImportError: No module named 'mock'
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2
3 import os
4 import pkg_resources
5 import sys
6
7 from setuptools import setup
8
9
10 if sys.version_info[:3] == (3, 5, 0):
11 if not int(os.getenv('CHAINER_PYTHON_350_FORCE', '0')):
12 msg = """
13 Chainer does not work with Python 3.5.0.
14
15 We strongly recommend to use another version of Python.
16 If you want to use Chainer with Python 3.5.0 at your own risk,
17 set CHAINER_PYTHON_350_FORCE environment variable to 1."""
18 print(msg)
19 sys.exit(1)
20
21
22 setup_requires = []
23 install_requires = [
24 'filelock',
25 'nose',
26 'numpy>=1.9.0',
27 'protobuf>=2.6.0',
28 'six>=1.9.0',
29 ]
30 cupy_require = 'cupy==2.0.0a1'
31
32 cupy_pkg = None
33 try:
34 cupy_pkg = pkg_resources.get_distribution('cupy')
35 except pkg_resources.DistributionNotFound:
36 pass
37
38 if cupy_pkg is not None:
39 install_requires.append(cupy_require)
40 print('Use %s' % cupy_require)
41
42 setup(
43 name='chainer',
44 version='3.0.0a1',
45 description='A flexible framework of neural networks',
46 author='Seiya Tokui',
47 author_email='[email protected]',
48 url='https://chainer.org/',
49 license='MIT License',
50 packages=['chainer',
51 'chainer.dataset',
52 'chainer.datasets',
53 'chainer.functions',
54 'chainer.functions.activation',
55 'chainer.functions.array',
56 'chainer.functions.connection',
57 'chainer.functions.evaluation',
58 'chainer.functions.loss',
59 'chainer.functions.math',
60 'chainer.functions.noise',
61 'chainer.functions.normalization',
62 'chainer.functions.pooling',
63 'chainer.functions.theano',
64 'chainer.functions.util',
65 'chainer.function_hooks',
66 'chainer.iterators',
67 'chainer.initializers',
68 'chainer.links',
69 'chainer.links.activation',
70 'chainer.links.caffe',
71 'chainer.links.caffe.protobuf2',
72 'chainer.links.caffe.protobuf3',
73 'chainer.links.connection',
74 'chainer.links.loss',
75 'chainer.links.model',
76 'chainer.links.model.vision',
77 'chainer.links.normalization',
78 'chainer.links.theano',
79 'chainer.optimizers',
80 'chainer.serializers',
81 'chainer.testing',
82 'chainer.training',
83 'chainer.training.extensions',
84 'chainer.training.triggers',
85 'chainer.training.updaters',
86 'chainer.utils'],
87 zip_safe=False,
88 setup_requires=setup_requires,
89 install_requires=install_requires,
90 tests_require=['mock',
91 'nose'],
92 )
93
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -22,6 +22,7 @@
setup_requires = []
install_requires = [
'filelock',
+ 'mock',
'nose',
'numpy>=1.9.0',
'protobuf>=2.6.0',
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -22,6 +22,7 @@\n setup_requires = []\n install_requires = [\n 'filelock',\n+ 'mock',\n 'nose',\n 'numpy>=1.9.0',\n 'protobuf>=2.6.0',\n", "issue": "Install bug: Mock required for gradient_check\n#2972 Install bug\r\n\r\nChainer installed with `pip install chainer`\r\n`from chainer import gradient_check` fails due to unable to find mock to import\r\nFixed by `conda install mock`\r\n\r\n`gradient_check` is included in the block declarations in the tutorial, so it should either be removed from there or mock should be added to default install so that people doing the tutorial do not get an error during the import commands.\r\n\r\n```\r\nfrom chainer import gradient_check\r\n\r\n---------------------------------------------------------------------------\r\nImportError Traceback (most recent call last)\r\n<ipython-input-1-0ba4708b632d> in <module>()\r\n 1 import numpy as np\r\n 2 import chainer\r\n----> 3 from chainer import gradient_check\r\n 4 from chainer import datasets, iterators, optimizers, serializers\r\n 5 from chainer import Link, Chain, ChainList\r\n\r\n/home/crissman/.pyenv/versions/anaconda3-4.2.0/lib/python3.5/site-packages/chainer/gradient_check.py in <module>()\r\n 7 from chainer import cuda\r\n 8 from chainer.functions.math import identity\r\n----> 9 from chainer import testing\r\n 10 from chainer import variable\r\n 11 \r\n\r\n/home/crissman/.pyenv/versions/anaconda3-4.2.0/lib/python3.5/site-packages/chainer/testing/__init__.py in <module>()\r\n 5 from chainer.testing import parameterized # NOQA\r\n 6 from chainer.testing import serializer # NOQA\r\n----> 7 from chainer.testing import training # NOQA\r\n 8 from chainer.testing import unary_math_function_test # NOQA\r\n 9 \r\n\r\n/home/crissman/.pyenv/versions/anaconda3-4.2.0/lib/python3.5/site-packages/chainer/testing/training.py in <module>()\r\n 1 from __future__ import division\r\n 2 \r\n----> 3 import mock\r\n 4 \r\n 5 from chainer import training\r\n\r\nImportError: No module named 'mock'\r\n```\n", "before_files": [{"content": "#!/usr/bin/env python\n\nimport os\nimport pkg_resources\nimport sys\n\nfrom setuptools import setup\n\n\nif sys.version_info[:3] == (3, 5, 0):\n if not int(os.getenv('CHAINER_PYTHON_350_FORCE', '0')):\n msg = \"\"\"\nChainer does not work with Python 3.5.0.\n\nWe strongly recommend to use another version of Python.\nIf you want to use Chainer with Python 3.5.0 at your own risk,\nset CHAINER_PYTHON_350_FORCE environment variable to 1.\"\"\"\n print(msg)\n sys.exit(1)\n\n\nsetup_requires = []\ninstall_requires = [\n 'filelock',\n 'nose',\n 'numpy>=1.9.0',\n 'protobuf>=2.6.0',\n 'six>=1.9.0',\n]\ncupy_require = 'cupy==2.0.0a1'\n\ncupy_pkg = None\ntry:\n cupy_pkg = pkg_resources.get_distribution('cupy')\nexcept pkg_resources.DistributionNotFound:\n pass\n\nif cupy_pkg is not None:\n install_requires.append(cupy_require)\n print('Use %s' % cupy_require)\n\nsetup(\n name='chainer',\n version='3.0.0a1',\n description='A flexible framework of neural networks',\n author='Seiya Tokui',\n author_email='[email protected]',\n url='https://chainer.org/',\n license='MIT License',\n packages=['chainer',\n 'chainer.dataset',\n 'chainer.datasets',\n 'chainer.functions',\n 'chainer.functions.activation',\n 'chainer.functions.array',\n 'chainer.functions.connection',\n 'chainer.functions.evaluation',\n 'chainer.functions.loss',\n 'chainer.functions.math',\n 'chainer.functions.noise',\n 'chainer.functions.normalization',\n 'chainer.functions.pooling',\n 'chainer.functions.theano',\n 'chainer.functions.util',\n 'chainer.function_hooks',\n 'chainer.iterators',\n 'chainer.initializers',\n 'chainer.links',\n 'chainer.links.activation',\n 'chainer.links.caffe',\n 'chainer.links.caffe.protobuf2',\n 'chainer.links.caffe.protobuf3',\n 'chainer.links.connection',\n 'chainer.links.loss',\n 'chainer.links.model',\n 'chainer.links.model.vision',\n 'chainer.links.normalization',\n 'chainer.links.theano',\n 'chainer.optimizers',\n 'chainer.serializers',\n 'chainer.testing',\n 'chainer.training',\n 'chainer.training.extensions',\n 'chainer.training.triggers',\n 'chainer.training.updaters',\n 'chainer.utils'],\n zip_safe=False,\n setup_requires=setup_requires,\n install_requires=install_requires,\n tests_require=['mock',\n 'nose'],\n)\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\nimport os\nimport pkg_resources\nimport sys\n\nfrom setuptools import setup\n\n\nif sys.version_info[:3] == (3, 5, 0):\n if not int(os.getenv('CHAINER_PYTHON_350_FORCE', '0')):\n msg = \"\"\"\nChainer does not work with Python 3.5.0.\n\nWe strongly recommend to use another version of Python.\nIf you want to use Chainer with Python 3.5.0 at your own risk,\nset CHAINER_PYTHON_350_FORCE environment variable to 1.\"\"\"\n print(msg)\n sys.exit(1)\n\n\nsetup_requires = []\ninstall_requires = [\n 'filelock',\n 'mock',\n 'nose',\n 'numpy>=1.9.0',\n 'protobuf>=2.6.0',\n 'six>=1.9.0',\n]\ncupy_require = 'cupy==2.0.0a1'\n\ncupy_pkg = None\ntry:\n cupy_pkg = pkg_resources.get_distribution('cupy')\nexcept pkg_resources.DistributionNotFound:\n pass\n\nif cupy_pkg is not None:\n install_requires.append(cupy_require)\n print('Use %s' % cupy_require)\n\nsetup(\n name='chainer',\n version='3.0.0a1',\n description='A flexible framework of neural networks',\n author='Seiya Tokui',\n author_email='[email protected]',\n url='https://chainer.org/',\n license='MIT License',\n packages=['chainer',\n 'chainer.dataset',\n 'chainer.datasets',\n 'chainer.functions',\n 'chainer.functions.activation',\n 'chainer.functions.array',\n 'chainer.functions.connection',\n 'chainer.functions.evaluation',\n 'chainer.functions.loss',\n 'chainer.functions.math',\n 'chainer.functions.noise',\n 'chainer.functions.normalization',\n 'chainer.functions.pooling',\n 'chainer.functions.theano',\n 'chainer.functions.util',\n 'chainer.function_hooks',\n 'chainer.iterators',\n 'chainer.initializers',\n 'chainer.links',\n 'chainer.links.activation',\n 'chainer.links.caffe',\n 'chainer.links.caffe.protobuf2',\n 'chainer.links.caffe.protobuf3',\n 'chainer.links.connection',\n 'chainer.links.loss',\n 'chainer.links.model',\n 'chainer.links.model.vision',\n 'chainer.links.normalization',\n 'chainer.links.theano',\n 'chainer.optimizers',\n 'chainer.serializers',\n 'chainer.testing',\n 'chainer.training',\n 'chainer.training.extensions',\n 'chainer.training.triggers',\n 'chainer.training.updaters',\n 'chainer.utils'],\n zip_safe=False,\n setup_requires=setup_requires,\n install_requires=install_requires,\n tests_require=['mock',\n 'nose'],\n)\n", "path": "setup.py"}]}
| 1,534 | 75 |
gh_patches_debug_6471
|
rasdani/github-patches
|
git_diff
|
nonebot__nonebot2-1757
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Bug: `Adapter.bot_disconnect` 不应允许关闭其他适配器创建的 bot
**描述问题:**
当前的 `bot_disconnect` 只会根据 `bot.self_id` 关闭对应 bot。如果 `OneBot V12` 适配器调用 `bot_disconnect` 也能将 `OneBot V11` 适配器创建的 bot 移除。
**如何复现?**
<https://github.com/nonebot/adapter-onebot/pull/45>
<https://github.com/he0119/CoolQBot/issues/264>
**期望的结果**
如果关闭的 bot 不属于当前适配器,则跳过或者报错。
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `nonebot/internal/adapter/adapter.py`
Content:
```
1 import abc
2 from contextlib import asynccontextmanager
3 from typing import Any, Dict, AsyncGenerator
4
5 from nonebot.config import Config
6 from nonebot.internal.driver import (
7 Driver,
8 Request,
9 Response,
10 WebSocket,
11 ForwardDriver,
12 ReverseDriver,
13 HTTPServerSetup,
14 WebSocketServerSetup,
15 )
16
17 from .bot import Bot
18
19
20 class Adapter(abc.ABC):
21 """协议适配器基类。
22
23 通常,在 Adapter 中编写协议通信相关代码,如: 建立通信连接、处理接收与发送 data 等。
24
25 参数:
26 driver: {ref}`nonebot.drivers.Driver` 实例
27 kwargs: 其他由 {ref}`nonebot.drivers.Driver.register_adapter` 传入的额外参数
28 """
29
30 def __init__(self, driver: Driver, **kwargs: Any):
31 self.driver: Driver = driver
32 """{ref}`nonebot.drivers.Driver` 实例"""
33 self.bots: Dict[str, Bot] = {}
34 """本协议适配器已建立连接的 {ref}`nonebot.adapters.Bot` 实例"""
35
36 def __repr__(self) -> str:
37 return f"Adapter(name={self.get_name()!r})"
38
39 @classmethod
40 @abc.abstractmethod
41 def get_name(cls) -> str:
42 """当前协议适配器的名称"""
43 raise NotImplementedError
44
45 @property
46 def config(self) -> Config:
47 """全局 NoneBot 配置"""
48 return self.driver.config
49
50 def bot_connect(self, bot: Bot) -> None:
51 """告知 NoneBot 建立了一个新的 {ref}`nonebot.adapters.Bot` 连接。
52
53 当有新的 {ref}`nonebot.adapters.Bot` 实例连接建立成功时调用。
54
55 参数:
56 bot: {ref}`nonebot.adapters.Bot` 实例
57 """
58 self.driver._bot_connect(bot)
59 self.bots[bot.self_id] = bot
60
61 def bot_disconnect(self, bot: Bot) -> None:
62 """告知 NoneBot {ref}`nonebot.adapters.Bot` 连接已断开。
63
64 当有 {ref}`nonebot.adapters.Bot` 实例连接断开时调用。
65
66 参数:
67 bot: {ref}`nonebot.adapters.Bot` 实例
68 """
69 self.driver._bot_disconnect(bot)
70 self.bots.pop(bot.self_id, None)
71
72 def setup_http_server(self, setup: HTTPServerSetup):
73 """设置一个 HTTP 服务器路由配置"""
74 if not isinstance(self.driver, ReverseDriver):
75 raise TypeError("Current driver does not support http server")
76 self.driver.setup_http_server(setup)
77
78 def setup_websocket_server(self, setup: WebSocketServerSetup):
79 """设置一个 WebSocket 服务器路由配置"""
80 if not isinstance(self.driver, ReverseDriver):
81 raise TypeError("Current driver does not support websocket server")
82 self.driver.setup_websocket_server(setup)
83
84 async def request(self, setup: Request) -> Response:
85 """进行一个 HTTP 客户端请求"""
86 if not isinstance(self.driver, ForwardDriver):
87 raise TypeError("Current driver does not support http client")
88 return await self.driver.request(setup)
89
90 @asynccontextmanager
91 async def websocket(self, setup: Request) -> AsyncGenerator[WebSocket, None]:
92 """建立一个 WebSocket 客户端连接请求"""
93 if not isinstance(self.driver, ForwardDriver):
94 raise TypeError("Current driver does not support websocket client")
95 async with self.driver.websocket(setup) as ws:
96 yield ws
97
98 @abc.abstractmethod
99 async def _call_api(self, bot: Bot, api: str, **data: Any) -> Any:
100 """`Adapter` 实际调用 api 的逻辑实现函数,实现该方法以调用 api。
101
102 参数:
103 api: API 名称
104 data: API 数据
105 """
106 raise NotImplementedError
107
108
109 __autodoc__ = {"Adapter._call_api": True}
110
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/nonebot/internal/adapter/adapter.py b/nonebot/internal/adapter/adapter.py
--- a/nonebot/internal/adapter/adapter.py
+++ b/nonebot/internal/adapter/adapter.py
@@ -66,8 +66,9 @@
参数:
bot: {ref}`nonebot.adapters.Bot` 实例
"""
+ if self.bots.pop(bot.self_id, None) is None:
+ raise RuntimeError(f"{bot} not found in adapter {self.get_name()}")
self.driver._bot_disconnect(bot)
- self.bots.pop(bot.self_id, None)
def setup_http_server(self, setup: HTTPServerSetup):
"""设置一个 HTTP 服务器路由配置"""
|
{"golden_diff": "diff --git a/nonebot/internal/adapter/adapter.py b/nonebot/internal/adapter/adapter.py\n--- a/nonebot/internal/adapter/adapter.py\n+++ b/nonebot/internal/adapter/adapter.py\n@@ -66,8 +66,9 @@\n \u53c2\u6570:\n bot: {ref}`nonebot.adapters.Bot` \u5b9e\u4f8b\n \"\"\"\n+ if self.bots.pop(bot.self_id, None) is None:\n+ raise RuntimeError(f\"{bot} not found in adapter {self.get_name()}\")\n self.driver._bot_disconnect(bot)\n- self.bots.pop(bot.self_id, None)\n \n def setup_http_server(self, setup: HTTPServerSetup):\n \"\"\"\u8bbe\u7f6e\u4e00\u4e2a HTTP \u670d\u52a1\u5668\u8def\u7531\u914d\u7f6e\"\"\"\n", "issue": "Bug: `Adapter.bot_disconnect` \u4e0d\u5e94\u5141\u8bb8\u5173\u95ed\u5176\u4ed6\u9002\u914d\u5668\u521b\u5efa\u7684 bot\n**\u63cf\u8ff0\u95ee\u9898\uff1a**\r\n\r\n\u5f53\u524d\u7684 `bot_disconnect` \u53ea\u4f1a\u6839\u636e `bot.self_id` \u5173\u95ed\u5bf9\u5e94 bot\u3002\u5982\u679c `OneBot V12` \u9002\u914d\u5668\u8c03\u7528 `bot_disconnect` \u4e5f\u80fd\u5c06 `OneBot V11` \u9002\u914d\u5668\u521b\u5efa\u7684 bot \u79fb\u9664\u3002\r\n\r\n**\u5982\u4f55\u590d\u73b0\uff1f**\r\n\r\n<https://github.com/nonebot/adapter-onebot/pull/45>\r\n<https://github.com/he0119/CoolQBot/issues/264>\r\n\r\n**\u671f\u671b\u7684\u7ed3\u679c**\r\n\r\n\u5982\u679c\u5173\u95ed\u7684 bot \u4e0d\u5c5e\u4e8e\u5f53\u524d\u9002\u914d\u5668\uff0c\u5219\u8df3\u8fc7\u6216\u8005\u62a5\u9519\u3002\r\n\n", "before_files": [{"content": "import abc\nfrom contextlib import asynccontextmanager\nfrom typing import Any, Dict, AsyncGenerator\n\nfrom nonebot.config import Config\nfrom nonebot.internal.driver import (\n Driver,\n Request,\n Response,\n WebSocket,\n ForwardDriver,\n ReverseDriver,\n HTTPServerSetup,\n WebSocketServerSetup,\n)\n\nfrom .bot import Bot\n\n\nclass Adapter(abc.ABC):\n \"\"\"\u534f\u8bae\u9002\u914d\u5668\u57fa\u7c7b\u3002\n\n \u901a\u5e38\uff0c\u5728 Adapter \u4e2d\u7f16\u5199\u534f\u8bae\u901a\u4fe1\u76f8\u5173\u4ee3\u7801\uff0c\u5982: \u5efa\u7acb\u901a\u4fe1\u8fde\u63a5\u3001\u5904\u7406\u63a5\u6536\u4e0e\u53d1\u9001 data \u7b49\u3002\n\n \u53c2\u6570:\n driver: {ref}`nonebot.drivers.Driver` \u5b9e\u4f8b\n kwargs: \u5176\u4ed6\u7531 {ref}`nonebot.drivers.Driver.register_adapter` \u4f20\u5165\u7684\u989d\u5916\u53c2\u6570\n \"\"\"\n\n def __init__(self, driver: Driver, **kwargs: Any):\n self.driver: Driver = driver\n \"\"\"{ref}`nonebot.drivers.Driver` \u5b9e\u4f8b\"\"\"\n self.bots: Dict[str, Bot] = {}\n \"\"\"\u672c\u534f\u8bae\u9002\u914d\u5668\u5df2\u5efa\u7acb\u8fde\u63a5\u7684 {ref}`nonebot.adapters.Bot` \u5b9e\u4f8b\"\"\"\n\n def __repr__(self) -> str:\n return f\"Adapter(name={self.get_name()!r})\"\n\n @classmethod\n @abc.abstractmethod\n def get_name(cls) -> str:\n \"\"\"\u5f53\u524d\u534f\u8bae\u9002\u914d\u5668\u7684\u540d\u79f0\"\"\"\n raise NotImplementedError\n\n @property\n def config(self) -> Config:\n \"\"\"\u5168\u5c40 NoneBot \u914d\u7f6e\"\"\"\n return self.driver.config\n\n def bot_connect(self, bot: Bot) -> None:\n \"\"\"\u544a\u77e5 NoneBot \u5efa\u7acb\u4e86\u4e00\u4e2a\u65b0\u7684 {ref}`nonebot.adapters.Bot` \u8fde\u63a5\u3002\n\n \u5f53\u6709\u65b0\u7684 {ref}`nonebot.adapters.Bot` \u5b9e\u4f8b\u8fde\u63a5\u5efa\u7acb\u6210\u529f\u65f6\u8c03\u7528\u3002\n\n \u53c2\u6570:\n bot: {ref}`nonebot.adapters.Bot` \u5b9e\u4f8b\n \"\"\"\n self.driver._bot_connect(bot)\n self.bots[bot.self_id] = bot\n\n def bot_disconnect(self, bot: Bot) -> None:\n \"\"\"\u544a\u77e5 NoneBot {ref}`nonebot.adapters.Bot` \u8fde\u63a5\u5df2\u65ad\u5f00\u3002\n\n \u5f53\u6709 {ref}`nonebot.adapters.Bot` \u5b9e\u4f8b\u8fde\u63a5\u65ad\u5f00\u65f6\u8c03\u7528\u3002\n\n \u53c2\u6570:\n bot: {ref}`nonebot.adapters.Bot` \u5b9e\u4f8b\n \"\"\"\n self.driver._bot_disconnect(bot)\n self.bots.pop(bot.self_id, None)\n\n def setup_http_server(self, setup: HTTPServerSetup):\n \"\"\"\u8bbe\u7f6e\u4e00\u4e2a HTTP \u670d\u52a1\u5668\u8def\u7531\u914d\u7f6e\"\"\"\n if not isinstance(self.driver, ReverseDriver):\n raise TypeError(\"Current driver does not support http server\")\n self.driver.setup_http_server(setup)\n\n def setup_websocket_server(self, setup: WebSocketServerSetup):\n \"\"\"\u8bbe\u7f6e\u4e00\u4e2a WebSocket \u670d\u52a1\u5668\u8def\u7531\u914d\u7f6e\"\"\"\n if not isinstance(self.driver, ReverseDriver):\n raise TypeError(\"Current driver does not support websocket server\")\n self.driver.setup_websocket_server(setup)\n\n async def request(self, setup: Request) -> Response:\n \"\"\"\u8fdb\u884c\u4e00\u4e2a HTTP \u5ba2\u6237\u7aef\u8bf7\u6c42\"\"\"\n if not isinstance(self.driver, ForwardDriver):\n raise TypeError(\"Current driver does not support http client\")\n return await self.driver.request(setup)\n\n @asynccontextmanager\n async def websocket(self, setup: Request) -> AsyncGenerator[WebSocket, None]:\n \"\"\"\u5efa\u7acb\u4e00\u4e2a WebSocket \u5ba2\u6237\u7aef\u8fde\u63a5\u8bf7\u6c42\"\"\"\n if not isinstance(self.driver, ForwardDriver):\n raise TypeError(\"Current driver does not support websocket client\")\n async with self.driver.websocket(setup) as ws:\n yield ws\n\n @abc.abstractmethod\n async def _call_api(self, bot: Bot, api: str, **data: Any) -> Any:\n \"\"\"`Adapter` \u5b9e\u9645\u8c03\u7528 api \u7684\u903b\u8f91\u5b9e\u73b0\u51fd\u6570\uff0c\u5b9e\u73b0\u8be5\u65b9\u6cd5\u4ee5\u8c03\u7528 api\u3002\n\n \u53c2\u6570:\n api: API \u540d\u79f0\n data: API \u6570\u636e\n \"\"\"\n raise NotImplementedError\n\n\n__autodoc__ = {\"Adapter._call_api\": True}\n", "path": "nonebot/internal/adapter/adapter.py"}], "after_files": [{"content": "import abc\nfrom contextlib import asynccontextmanager\nfrom typing import Any, Dict, AsyncGenerator\n\nfrom nonebot.config import Config\nfrom nonebot.internal.driver import (\n Driver,\n Request,\n Response,\n WebSocket,\n ForwardDriver,\n ReverseDriver,\n HTTPServerSetup,\n WebSocketServerSetup,\n)\n\nfrom .bot import Bot\n\n\nclass Adapter(abc.ABC):\n \"\"\"\u534f\u8bae\u9002\u914d\u5668\u57fa\u7c7b\u3002\n\n \u901a\u5e38\uff0c\u5728 Adapter \u4e2d\u7f16\u5199\u534f\u8bae\u901a\u4fe1\u76f8\u5173\u4ee3\u7801\uff0c\u5982: \u5efa\u7acb\u901a\u4fe1\u8fde\u63a5\u3001\u5904\u7406\u63a5\u6536\u4e0e\u53d1\u9001 data \u7b49\u3002\n\n \u53c2\u6570:\n driver: {ref}`nonebot.drivers.Driver` \u5b9e\u4f8b\n kwargs: \u5176\u4ed6\u7531 {ref}`nonebot.drivers.Driver.register_adapter` \u4f20\u5165\u7684\u989d\u5916\u53c2\u6570\n \"\"\"\n\n def __init__(self, driver: Driver, **kwargs: Any):\n self.driver: Driver = driver\n \"\"\"{ref}`nonebot.drivers.Driver` \u5b9e\u4f8b\"\"\"\n self.bots: Dict[str, Bot] = {}\n \"\"\"\u672c\u534f\u8bae\u9002\u914d\u5668\u5df2\u5efa\u7acb\u8fde\u63a5\u7684 {ref}`nonebot.adapters.Bot` \u5b9e\u4f8b\"\"\"\n\n def __repr__(self) -> str:\n return f\"Adapter(name={self.get_name()!r})\"\n\n @classmethod\n @abc.abstractmethod\n def get_name(cls) -> str:\n \"\"\"\u5f53\u524d\u534f\u8bae\u9002\u914d\u5668\u7684\u540d\u79f0\"\"\"\n raise NotImplementedError\n\n @property\n def config(self) -> Config:\n \"\"\"\u5168\u5c40 NoneBot \u914d\u7f6e\"\"\"\n return self.driver.config\n\n def bot_connect(self, bot: Bot) -> None:\n \"\"\"\u544a\u77e5 NoneBot \u5efa\u7acb\u4e86\u4e00\u4e2a\u65b0\u7684 {ref}`nonebot.adapters.Bot` \u8fde\u63a5\u3002\n\n \u5f53\u6709\u65b0\u7684 {ref}`nonebot.adapters.Bot` \u5b9e\u4f8b\u8fde\u63a5\u5efa\u7acb\u6210\u529f\u65f6\u8c03\u7528\u3002\n\n \u53c2\u6570:\n bot: {ref}`nonebot.adapters.Bot` \u5b9e\u4f8b\n \"\"\"\n self.driver._bot_connect(bot)\n self.bots[bot.self_id] = bot\n\n def bot_disconnect(self, bot: Bot) -> None:\n \"\"\"\u544a\u77e5 NoneBot {ref}`nonebot.adapters.Bot` \u8fde\u63a5\u5df2\u65ad\u5f00\u3002\n\n \u5f53\u6709 {ref}`nonebot.adapters.Bot` \u5b9e\u4f8b\u8fde\u63a5\u65ad\u5f00\u65f6\u8c03\u7528\u3002\n\n \u53c2\u6570:\n bot: {ref}`nonebot.adapters.Bot` \u5b9e\u4f8b\n \"\"\"\n if self.bots.pop(bot.self_id, None) is None:\n raise RuntimeError(f\"{bot} not found in adapter {self.get_name()}\")\n self.driver._bot_disconnect(bot)\n\n def setup_http_server(self, setup: HTTPServerSetup):\n \"\"\"\u8bbe\u7f6e\u4e00\u4e2a HTTP \u670d\u52a1\u5668\u8def\u7531\u914d\u7f6e\"\"\"\n if not isinstance(self.driver, ReverseDriver):\n raise TypeError(\"Current driver does not support http server\")\n self.driver.setup_http_server(setup)\n\n def setup_websocket_server(self, setup: WebSocketServerSetup):\n \"\"\"\u8bbe\u7f6e\u4e00\u4e2a WebSocket \u670d\u52a1\u5668\u8def\u7531\u914d\u7f6e\"\"\"\n if not isinstance(self.driver, ReverseDriver):\n raise TypeError(\"Current driver does not support websocket server\")\n self.driver.setup_websocket_server(setup)\n\n async def request(self, setup: Request) -> Response:\n \"\"\"\u8fdb\u884c\u4e00\u4e2a HTTP \u5ba2\u6237\u7aef\u8bf7\u6c42\"\"\"\n if not isinstance(self.driver, ForwardDriver):\n raise TypeError(\"Current driver does not support http client\")\n return await self.driver.request(setup)\n\n @asynccontextmanager\n async def websocket(self, setup: Request) -> AsyncGenerator[WebSocket, None]:\n \"\"\"\u5efa\u7acb\u4e00\u4e2a WebSocket \u5ba2\u6237\u7aef\u8fde\u63a5\u8bf7\u6c42\"\"\"\n if not isinstance(self.driver, ForwardDriver):\n raise TypeError(\"Current driver does not support websocket client\")\n async with self.driver.websocket(setup) as ws:\n yield ws\n\n @abc.abstractmethod\n async def _call_api(self, bot: Bot, api: str, **data: Any) -> Any:\n \"\"\"`Adapter` \u5b9e\u9645\u8c03\u7528 api \u7684\u903b\u8f91\u5b9e\u73b0\u51fd\u6570\uff0c\u5b9e\u73b0\u8be5\u65b9\u6cd5\u4ee5\u8c03\u7528 api\u3002\n\n \u53c2\u6570:\n api: API \u540d\u79f0\n data: API \u6570\u636e\n \"\"\"\n raise NotImplementedError\n\n\n__autodoc__ = {\"Adapter._call_api\": True}\n", "path": "nonebot/internal/adapter/adapter.py"}]}
| 1,496 | 157 |
gh_patches_debug_9201
|
rasdani/github-patches
|
git_diff
|
pytorch__vision-6154
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
PIL version check for enum change appears to break SIMD versions
### 🐛 Describe the bug
This change appears to break current Pillow-SIMD version #5898
```
if tuple(int(part) for part in PIL.__version__.split(".")) >= (9, 1):
File "/home/.../lib/python3.10/site-packages/torchvision/transforms/_pil_constants.py", line 7, in <genexpr>
if tuple(int(part) for part in PIL.__version__.split(".")) >= (9, 1):
ValueError: invalid literal for int() with base 10: 'post1'
```
Amusingly enough, I warned against this approach in a users PR in `timm` https://github.com/rwightman/pytorch-image-models/pull/1256
Would be nice to have it fixed before 1.12 is finalized, I just hit this trying out the RC
### Versions
PT 1.12 RC, TV 0.13.0
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `torchvision/transforms/_pil_constants.py`
Content:
```
1 import PIL
2 from PIL import Image
3
4 # See https://pillow.readthedocs.io/en/stable/releasenotes/9.1.0.html#deprecations
5 # TODO: Remove this file once PIL minimal version is >= 9.1
6
7 if tuple(int(part) for part in PIL.__version__.split(".")) >= (9, 1):
8 BICUBIC = Image.Resampling.BICUBIC
9 BILINEAR = Image.Resampling.BILINEAR
10 LINEAR = Image.Resampling.BILINEAR
11 NEAREST = Image.Resampling.NEAREST
12
13 AFFINE = Image.Transform.AFFINE
14 FLIP_LEFT_RIGHT = Image.Transpose.FLIP_LEFT_RIGHT
15 FLIP_TOP_BOTTOM = Image.Transpose.FLIP_TOP_BOTTOM
16 PERSPECTIVE = Image.Transform.PERSPECTIVE
17 else:
18 BICUBIC = Image.BICUBIC
19 BILINEAR = Image.BILINEAR
20 NEAREST = Image.NEAREST
21 LINEAR = Image.LINEAR
22
23 AFFINE = Image.AFFINE
24 FLIP_LEFT_RIGHT = Image.FLIP_LEFT_RIGHT
25 FLIP_TOP_BOTTOM = Image.FLIP_TOP_BOTTOM
26 PERSPECTIVE = Image.PERSPECTIVE
27
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/torchvision/transforms/_pil_constants.py b/torchvision/transforms/_pil_constants.py
--- a/torchvision/transforms/_pil_constants.py
+++ b/torchvision/transforms/_pil_constants.py
@@ -1,10 +1,9 @@
-import PIL
from PIL import Image
# See https://pillow.readthedocs.io/en/stable/releasenotes/9.1.0.html#deprecations
# TODO: Remove this file once PIL minimal version is >= 9.1
-if tuple(int(part) for part in PIL.__version__.split(".")) >= (9, 1):
+if hasattr(Image, "Resampling"):
BICUBIC = Image.Resampling.BICUBIC
BILINEAR = Image.Resampling.BILINEAR
LINEAR = Image.Resampling.BILINEAR
|
{"golden_diff": "diff --git a/torchvision/transforms/_pil_constants.py b/torchvision/transforms/_pil_constants.py\n--- a/torchvision/transforms/_pil_constants.py\n+++ b/torchvision/transforms/_pil_constants.py\n@@ -1,10 +1,9 @@\n-import PIL\n from PIL import Image\n \n # See https://pillow.readthedocs.io/en/stable/releasenotes/9.1.0.html#deprecations\n # TODO: Remove this file once PIL minimal version is >= 9.1\n \n-if tuple(int(part) for part in PIL.__version__.split(\".\")) >= (9, 1):\n+if hasattr(Image, \"Resampling\"):\n BICUBIC = Image.Resampling.BICUBIC\n BILINEAR = Image.Resampling.BILINEAR\n LINEAR = Image.Resampling.BILINEAR\n", "issue": "PIL version check for enum change appears to break SIMD versions\n### \ud83d\udc1b Describe the bug\n\nThis change appears to break current Pillow-SIMD version #5898 \r\n\r\n```\r\n if tuple(int(part) for part in PIL.__version__.split(\".\")) >= (9, 1):\r\n File \"/home/.../lib/python3.10/site-packages/torchvision/transforms/_pil_constants.py\", line 7, in <genexpr>\r\n if tuple(int(part) for part in PIL.__version__.split(\".\")) >= (9, 1):\r\nValueError: invalid literal for int() with base 10: 'post1'\r\n```\r\n\r\nAmusingly enough, I warned against this approach in a users PR in `timm` https://github.com/rwightman/pytorch-image-models/pull/1256\r\n\r\nWould be nice to have it fixed before 1.12 is finalized, I just hit this trying out the RC\n\n### Versions\n\nPT 1.12 RC, TV 0.13.0\n", "before_files": [{"content": "import PIL\nfrom PIL import Image\n\n# See https://pillow.readthedocs.io/en/stable/releasenotes/9.1.0.html#deprecations\n# TODO: Remove this file once PIL minimal version is >= 9.1\n\nif tuple(int(part) for part in PIL.__version__.split(\".\")) >= (9, 1):\n BICUBIC = Image.Resampling.BICUBIC\n BILINEAR = Image.Resampling.BILINEAR\n LINEAR = Image.Resampling.BILINEAR\n NEAREST = Image.Resampling.NEAREST\n\n AFFINE = Image.Transform.AFFINE\n FLIP_LEFT_RIGHT = Image.Transpose.FLIP_LEFT_RIGHT\n FLIP_TOP_BOTTOM = Image.Transpose.FLIP_TOP_BOTTOM\n PERSPECTIVE = Image.Transform.PERSPECTIVE\nelse:\n BICUBIC = Image.BICUBIC\n BILINEAR = Image.BILINEAR\n NEAREST = Image.NEAREST\n LINEAR = Image.LINEAR\n\n AFFINE = Image.AFFINE\n FLIP_LEFT_RIGHT = Image.FLIP_LEFT_RIGHT\n FLIP_TOP_BOTTOM = Image.FLIP_TOP_BOTTOM\n PERSPECTIVE = Image.PERSPECTIVE\n", "path": "torchvision/transforms/_pil_constants.py"}], "after_files": [{"content": "from PIL import Image\n\n# See https://pillow.readthedocs.io/en/stable/releasenotes/9.1.0.html#deprecations\n# TODO: Remove this file once PIL minimal version is >= 9.1\n\nif hasattr(Image, \"Resampling\"):\n BICUBIC = Image.Resampling.BICUBIC\n BILINEAR = Image.Resampling.BILINEAR\n LINEAR = Image.Resampling.BILINEAR\n NEAREST = Image.Resampling.NEAREST\n\n AFFINE = Image.Transform.AFFINE\n FLIP_LEFT_RIGHT = Image.Transpose.FLIP_LEFT_RIGHT\n FLIP_TOP_BOTTOM = Image.Transpose.FLIP_TOP_BOTTOM\n PERSPECTIVE = Image.Transform.PERSPECTIVE\nelse:\n BICUBIC = Image.BICUBIC\n BILINEAR = Image.BILINEAR\n NEAREST = Image.NEAREST\n LINEAR = Image.LINEAR\n\n AFFINE = Image.AFFINE\n FLIP_LEFT_RIGHT = Image.FLIP_LEFT_RIGHT\n FLIP_TOP_BOTTOM = Image.FLIP_TOP_BOTTOM\n PERSPECTIVE = Image.PERSPECTIVE\n", "path": "torchvision/transforms/_pil_constants.py"}]}
| 804 | 186 |
gh_patches_debug_16865
|
rasdani/github-patches
|
git_diff
|
internetarchive__openlibrary-9228
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Updating Slack "Staff PRs" notifications to be more specific
<!-- IMPORTANT: Before posting, be sure to redact or remove sensitive data, such as passwords, secret keys, session cookies, etc. -->
When our daily slack bot runs to tell us of new staff PRs we want to ignore:
- `needs:submitter`
- `draft`
- `blocked`
<!-- A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] -->
### Describe the problem that you'd like solved
<!-- A clear and concise description of what you want to happen. -->
### Proposal & Constraints
<!-- What is the proposed solution / implementation? Is there a precedent of this approach succeeding elsewhere? -->
<!-- Which suggestions or requirements should be considered for how feature needs to appear or be implemented? -->
### Additional context
<!-- Add any other context or screenshots about the feature request here. -->
### Stakeholders
<!-- @ tag stakeholders of this bug -->
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `scripts/pr_slack_digest.py`
Content:
```
1 from datetime import datetime
2 import requests
3 import os
4
5
6 def send_slack_message(message: str):
7 response = requests.post(
8 'https://slack.com/api/chat.postMessage',
9 headers={
10 'Authorization': f"Bearer {os.environ.get('SLACK_TOKEN')}",
11 'Content-Type': 'application/json; charset=utf-8',
12 },
13 json={
14 'channel': '#team-abc-plus',
15 'text': message,
16 },
17 )
18 if response.status_code != 200:
19 print(f"Failed to send message to Slack. Status code: {response.status_code}")
20 else:
21 print("Message sent to Slack successfully!")
22 print(response.content)
23
24
25 if __name__ == "__main__":
26 GH_LOGIN_TO_SLACK = {
27 'cdrini': '<@cdrini>',
28 'jimchamp': '<@U01ARTHG9EV>',
29 'mekarpeles': '<@mek>',
30 'scottbarnes': '<@U03MNR6T7FH>',
31 }
32 LABEL_EMOJI = {
33 'Priority: 0': '🚨 ',
34 'Priority: 1': '❗️ ',
35 }
36 # apparently `author` acts like an OR in this API and only this API -_-
37 query = "repo:internetarchive/openlibrary is:open is:pr author:cdrini author:jimchamp author:mekarpeles author:scottbarnes -is:draft"
38 prs = requests.get(
39 "https://api.github.com/search/issues",
40 params={
41 "q": query,
42 },
43 ).json()["items"]
44
45 message = f"{len(prs)} open staff PRs:\n\n"
46 for pr in prs:
47 pr_url = pr['html_url']
48 pr_age_days = (
49 datetime.now() - datetime.strptime(pr['created_at'], '%Y-%m-%dT%H:%M:%SZ')
50 ).days
51 message += f"<{pr_url}|*#{pr['number']}* | {pr['title']}>\n"
52 message += ' | '.join(
53 [
54 f"by {pr['user']['login']} {pr_age_days} days ago",
55 f"Assigned: {GH_LOGIN_TO_SLACK[pr['assignee']['login']] if pr['assignee'] else '⚠️ None'}",
56 f"{', '.join(LABEL_EMOJI.get(label['name'], '') + label['name'] for label in pr['labels'])}\n\n",
57 ]
58 )
59
60 send_slack_message(message)
61
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/scripts/pr_slack_digest.py b/scripts/pr_slack_digest.py
--- a/scripts/pr_slack_digest.py
+++ b/scripts/pr_slack_digest.py
@@ -33,8 +33,18 @@
'Priority: 0': '🚨 ',
'Priority: 1': '❗️ ',
}
+
+ INCLUDE_AUTHORS = ['mekarpeles', 'cdrini', 'scottbarnes', 'jimchamp']
+ EXCLUDE_LABELS = [
+ 'Needs: Submitter Input',
+ 'State: Blocked',
+ ]
+ query = 'repo:internetarchive/openlibrary is:open is:pr -is:draft'
# apparently `author` acts like an OR in this API and only this API -_-
- query = "repo:internetarchive/openlibrary is:open is:pr author:cdrini author:jimchamp author:mekarpeles author:scottbarnes -is:draft"
+ included_authors = " ".join([f"author:{author}" for author in INCLUDE_AUTHORS])
+ excluded_labels = " ".join([f'-label:"{label}"' for label in EXCLUDE_LABELS])
+ query = f'{query} {included_authors} {excluded_labels}'
+
prs = requests.get(
"https://api.github.com/search/issues",
params={
|
{"golden_diff": "diff --git a/scripts/pr_slack_digest.py b/scripts/pr_slack_digest.py\n--- a/scripts/pr_slack_digest.py\n+++ b/scripts/pr_slack_digest.py\n@@ -33,8 +33,18 @@\n 'Priority: 0': '\ud83d\udea8 ',\n 'Priority: 1': '\u2757\ufe0f ',\n }\n+\n+ INCLUDE_AUTHORS = ['mekarpeles', 'cdrini', 'scottbarnes', 'jimchamp']\n+ EXCLUDE_LABELS = [\n+ 'Needs: Submitter Input',\n+ 'State: Blocked',\n+ ]\n+ query = 'repo:internetarchive/openlibrary is:open is:pr -is:draft'\n # apparently `author` acts like an OR in this API and only this API -_-\n- query = \"repo:internetarchive/openlibrary is:open is:pr author:cdrini author:jimchamp author:mekarpeles author:scottbarnes -is:draft\"\n+ included_authors = \" \".join([f\"author:{author}\" for author in INCLUDE_AUTHORS])\n+ excluded_labels = \" \".join([f'-label:\"{label}\"' for label in EXCLUDE_LABELS])\n+ query = f'{query} {included_authors} {excluded_labels}'\n+\n prs = requests.get(\n \"https://api.github.com/search/issues\",\n params={\n", "issue": "Updating Slack \"Staff PRs\" notifications to be more specific\n<!-- IMPORTANT: Before posting, be sure to redact or remove sensitive data, such as passwords, secret keys, session cookies, etc. -->\r\n\r\nWhen our daily slack bot runs to tell us of new staff PRs we want to ignore: \r\n- `needs:submitter`\r\n- `draft`\r\n- `blocked`\r\n\r\n<!-- A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] -->\r\n\r\n### Describe the problem that you'd like solved\r\n<!-- A clear and concise description of what you want to happen. -->\r\n\r\n### Proposal & Constraints\r\n<!-- What is the proposed solution / implementation? Is there a precedent of this approach succeeding elsewhere? -->\r\n\r\n<!-- Which suggestions or requirements should be considered for how feature needs to appear or be implemented? -->\r\n\r\n### Additional context\r\n<!-- Add any other context or screenshots about the feature request here. -->\r\n\r\n### Stakeholders\r\n<!-- @ tag stakeholders of this bug -->\r\n\r\n\r\n\n", "before_files": [{"content": "from datetime import datetime\nimport requests\nimport os\n\n\ndef send_slack_message(message: str):\n response = requests.post(\n 'https://slack.com/api/chat.postMessage',\n headers={\n 'Authorization': f\"Bearer {os.environ.get('SLACK_TOKEN')}\",\n 'Content-Type': 'application/json; charset=utf-8',\n },\n json={\n 'channel': '#team-abc-plus',\n 'text': message,\n },\n )\n if response.status_code != 200:\n print(f\"Failed to send message to Slack. Status code: {response.status_code}\")\n else:\n print(\"Message sent to Slack successfully!\")\n print(response.content)\n\n\nif __name__ == \"__main__\":\n GH_LOGIN_TO_SLACK = {\n 'cdrini': '<@cdrini>',\n 'jimchamp': '<@U01ARTHG9EV>',\n 'mekarpeles': '<@mek>',\n 'scottbarnes': '<@U03MNR6T7FH>',\n }\n LABEL_EMOJI = {\n 'Priority: 0': '\ud83d\udea8 ',\n 'Priority: 1': '\u2757\ufe0f ',\n }\n # apparently `author` acts like an OR in this API and only this API -_-\n query = \"repo:internetarchive/openlibrary is:open is:pr author:cdrini author:jimchamp author:mekarpeles author:scottbarnes -is:draft\"\n prs = requests.get(\n \"https://api.github.com/search/issues\",\n params={\n \"q\": query,\n },\n ).json()[\"items\"]\n\n message = f\"{len(prs)} open staff PRs:\\n\\n\"\n for pr in prs:\n pr_url = pr['html_url']\n pr_age_days = (\n datetime.now() - datetime.strptime(pr['created_at'], '%Y-%m-%dT%H:%M:%SZ')\n ).days\n message += f\"<{pr_url}|*#{pr['number']}* | {pr['title']}>\\n\"\n message += ' | '.join(\n [\n f\"by {pr['user']['login']} {pr_age_days} days ago\",\n f\"Assigned: {GH_LOGIN_TO_SLACK[pr['assignee']['login']] if pr['assignee'] else '\u26a0\ufe0f None'}\",\n f\"{', '.join(LABEL_EMOJI.get(label['name'], '') + label['name'] for label in pr['labels'])}\\n\\n\",\n ]\n )\n\n send_slack_message(message)\n", "path": "scripts/pr_slack_digest.py"}], "after_files": [{"content": "from datetime import datetime\nimport requests\nimport os\n\n\ndef send_slack_message(message: str):\n response = requests.post(\n 'https://slack.com/api/chat.postMessage',\n headers={\n 'Authorization': f\"Bearer {os.environ.get('SLACK_TOKEN')}\",\n 'Content-Type': 'application/json; charset=utf-8',\n },\n json={\n 'channel': '#team-abc-plus',\n 'text': message,\n },\n )\n if response.status_code != 200:\n print(f\"Failed to send message to Slack. Status code: {response.status_code}\")\n else:\n print(\"Message sent to Slack successfully!\")\n print(response.content)\n\n\nif __name__ == \"__main__\":\n GH_LOGIN_TO_SLACK = {\n 'cdrini': '<@cdrini>',\n 'jimchamp': '<@U01ARTHG9EV>',\n 'mekarpeles': '<@mek>',\n 'scottbarnes': '<@U03MNR6T7FH>',\n }\n LABEL_EMOJI = {\n 'Priority: 0': '\ud83d\udea8 ',\n 'Priority: 1': '\u2757\ufe0f ',\n }\n\n INCLUDE_AUTHORS = ['mekarpeles', 'cdrini', 'scottbarnes', 'jimchamp']\n EXCLUDE_LABELS = [\n 'Needs: Submitter Input',\n 'State: Blocked',\n ]\n query = 'repo:internetarchive/openlibrary is:open is:pr -is:draft'\n # apparently `author` acts like an OR in this API and only this API -_-\n included_authors = \" \".join([f\"author:{author}\" for author in INCLUDE_AUTHORS])\n excluded_labels = \" \".join([f'-label:\"{label}\"' for label in EXCLUDE_LABELS])\n query = f'{query} {included_authors} {excluded_labels}'\n\n prs = requests.get(\n \"https://api.github.com/search/issues\",\n params={\n \"q\": query,\n },\n ).json()[\"items\"]\n\n message = f\"{len(prs)} open staff PRs:\\n\\n\"\n for pr in prs:\n pr_url = pr['html_url']\n pr_age_days = (\n datetime.now() - datetime.strptime(pr['created_at'], '%Y-%m-%dT%H:%M:%SZ')\n ).days\n message += f\"<{pr_url}|*#{pr['number']}* | {pr['title']}>\\n\"\n message += ' | '.join(\n [\n f\"by {pr['user']['login']} {pr_age_days} days ago\",\n f\"Assigned: {GH_LOGIN_TO_SLACK[pr['assignee']['login']] if pr['assignee'] else '\u26a0\ufe0f None'}\",\n f\"{', '.join(LABEL_EMOJI.get(label['name'], '') + label['name'] for label in pr['labels'])}\\n\\n\",\n ]\n )\n\n send_slack_message(message)\n", "path": "scripts/pr_slack_digest.py"}]}
| 1,121 | 302 |
gh_patches_debug_9867
|
rasdani/github-patches
|
git_diff
|
mirumee__ariadne-357
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Exception in default_literal_parser() when ValueNode has no "value" member
The [default_literal_parser() function](https://github.com/mirumee/ariadne/blob/master/ariadne/scalars.py#L90) in ariadne.scalars expects nodes to have a "value" member. However, this is not the case with, for example, `ObjectValueNode` or `ListValueNode`. This causes an exception when trying to pass such nodes.
My suggestion is to use `graphql.utilities.value_from_ast_untyped` instead:
```
return value_parser(value_from_ast_untyped(ast))
```
I'm happy to do a PR if you guys like this change.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ariadne/scalars.py`
Content:
```
1 from typing import Optional, cast
2
3 from graphql.language.ast import (
4 BooleanValueNode,
5 FloatValueNode,
6 IntValueNode,
7 StringValueNode,
8 )
9 from graphql.type import (
10 GraphQLNamedType,
11 GraphQLScalarLiteralParser,
12 GraphQLScalarSerializer,
13 GraphQLScalarType,
14 GraphQLScalarValueParser,
15 GraphQLSchema,
16 )
17
18 from .types import SchemaBindable
19
20
21 class ScalarType(SchemaBindable):
22 _serialize: Optional[GraphQLScalarSerializer]
23 _parse_value: Optional[GraphQLScalarValueParser]
24 _parse_literal: Optional[GraphQLScalarLiteralParser]
25
26 def __init__(
27 self,
28 name: str,
29 *,
30 serializer: GraphQLScalarSerializer = None,
31 value_parser: GraphQLScalarValueParser = None,
32 literal_parser: GraphQLScalarLiteralParser = None,
33 ) -> None:
34 self.name = name
35 self._serialize = serializer
36 self._parse_value = value_parser
37 self._parse_literal = literal_parser
38
39 def set_serializer(self, f: GraphQLScalarSerializer) -> GraphQLScalarSerializer:
40 self._serialize = f
41 return f
42
43 def set_value_parser(self, f: GraphQLScalarValueParser) -> GraphQLScalarValueParser:
44 self._parse_value = f
45 if not self._parse_literal:
46 self._parse_literal = create_default_literal_parser(f)
47 return f
48
49 def set_literal_parser(
50 self, f: GraphQLScalarLiteralParser
51 ) -> GraphQLScalarLiteralParser:
52 self._parse_literal = f
53 return f
54
55 # Alias above setters for consistent decorator API
56 serializer = set_serializer
57 value_parser = set_value_parser
58 literal_parser = set_literal_parser
59
60 def bind_to_schema(self, schema: GraphQLSchema) -> None:
61 graphql_type = schema.type_map.get(self.name)
62 self.validate_graphql_type(graphql_type)
63 graphql_type = cast(GraphQLScalarType, graphql_type)
64
65 if self._serialize:
66 # See mypy bug https://github.com/python/mypy/issues/2427
67 graphql_type.serialize = self._serialize # type: ignore
68 if self._parse_value:
69 graphql_type.parse_value = self._parse_value # type: ignore
70 if self._parse_literal:
71 graphql_type.parse_literal = self._parse_literal # type: ignore
72
73 def validate_graphql_type(self, graphql_type: Optional[GraphQLNamedType]) -> None:
74 if not graphql_type:
75 raise ValueError("Scalar %s is not defined in the schema" % self.name)
76 if not isinstance(graphql_type, GraphQLScalarType):
77 raise ValueError(
78 "%s is defined in the schema, but it is instance of %s (expected %s)"
79 % (self.name, type(graphql_type).__name__, GraphQLScalarType.__name__)
80 )
81
82
83 SCALAR_AST_NODES = (BooleanValueNode, FloatValueNode, IntValueNode, StringValueNode)
84
85
86 def create_default_literal_parser(
87 value_parser: GraphQLScalarValueParser,
88 ) -> GraphQLScalarLiteralParser:
89 def default_literal_parser(ast):
90 return value_parser(ast.value)
91
92 return default_literal_parser
93
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/ariadne/scalars.py b/ariadne/scalars.py
--- a/ariadne/scalars.py
+++ b/ariadne/scalars.py
@@ -14,6 +14,7 @@
GraphQLScalarValueParser,
GraphQLSchema,
)
+from graphql.utilities import value_from_ast_untyped
from .types import SchemaBindable
@@ -87,6 +88,6 @@
value_parser: GraphQLScalarValueParser,
) -> GraphQLScalarLiteralParser:
def default_literal_parser(ast):
- return value_parser(ast.value)
+ return value_parser(value_from_ast_untyped(ast))
return default_literal_parser
|
{"golden_diff": "diff --git a/ariadne/scalars.py b/ariadne/scalars.py\n--- a/ariadne/scalars.py\n+++ b/ariadne/scalars.py\n@@ -14,6 +14,7 @@\n GraphQLScalarValueParser,\n GraphQLSchema,\n )\n+from graphql.utilities import value_from_ast_untyped\n \n from .types import SchemaBindable\n \n@@ -87,6 +88,6 @@\n value_parser: GraphQLScalarValueParser,\n ) -> GraphQLScalarLiteralParser:\n def default_literal_parser(ast):\n- return value_parser(ast.value)\n+ return value_parser(value_from_ast_untyped(ast))\n \n return default_literal_parser\n", "issue": "Exception in default_literal_parser() when ValueNode has no \"value\" member\nThe [default_literal_parser() function](https://github.com/mirumee/ariadne/blob/master/ariadne/scalars.py#L90) in ariadne.scalars expects nodes to have a \"value\" member. However, this is not the case with, for example, `ObjectValueNode` or `ListValueNode`. This causes an exception when trying to pass such nodes.\r\n\r\nMy suggestion is to use `graphql.utilities.value_from_ast_untyped` instead:\r\n```\r\nreturn value_parser(value_from_ast_untyped(ast))\r\n``` \r\n\r\nI'm happy to do a PR if you guys like this change.\n", "before_files": [{"content": "from typing import Optional, cast\n\nfrom graphql.language.ast import (\n BooleanValueNode,\n FloatValueNode,\n IntValueNode,\n StringValueNode,\n)\nfrom graphql.type import (\n GraphQLNamedType,\n GraphQLScalarLiteralParser,\n GraphQLScalarSerializer,\n GraphQLScalarType,\n GraphQLScalarValueParser,\n GraphQLSchema,\n)\n\nfrom .types import SchemaBindable\n\n\nclass ScalarType(SchemaBindable):\n _serialize: Optional[GraphQLScalarSerializer]\n _parse_value: Optional[GraphQLScalarValueParser]\n _parse_literal: Optional[GraphQLScalarLiteralParser]\n\n def __init__(\n self,\n name: str,\n *,\n serializer: GraphQLScalarSerializer = None,\n value_parser: GraphQLScalarValueParser = None,\n literal_parser: GraphQLScalarLiteralParser = None,\n ) -> None:\n self.name = name\n self._serialize = serializer\n self._parse_value = value_parser\n self._parse_literal = literal_parser\n\n def set_serializer(self, f: GraphQLScalarSerializer) -> GraphQLScalarSerializer:\n self._serialize = f\n return f\n\n def set_value_parser(self, f: GraphQLScalarValueParser) -> GraphQLScalarValueParser:\n self._parse_value = f\n if not self._parse_literal:\n self._parse_literal = create_default_literal_parser(f)\n return f\n\n def set_literal_parser(\n self, f: GraphQLScalarLiteralParser\n ) -> GraphQLScalarLiteralParser:\n self._parse_literal = f\n return f\n\n # Alias above setters for consistent decorator API\n serializer = set_serializer\n value_parser = set_value_parser\n literal_parser = set_literal_parser\n\n def bind_to_schema(self, schema: GraphQLSchema) -> None:\n graphql_type = schema.type_map.get(self.name)\n self.validate_graphql_type(graphql_type)\n graphql_type = cast(GraphQLScalarType, graphql_type)\n\n if self._serialize:\n # See mypy bug https://github.com/python/mypy/issues/2427\n graphql_type.serialize = self._serialize # type: ignore\n if self._parse_value:\n graphql_type.parse_value = self._parse_value # type: ignore\n if self._parse_literal:\n graphql_type.parse_literal = self._parse_literal # type: ignore\n\n def validate_graphql_type(self, graphql_type: Optional[GraphQLNamedType]) -> None:\n if not graphql_type:\n raise ValueError(\"Scalar %s is not defined in the schema\" % self.name)\n if not isinstance(graphql_type, GraphQLScalarType):\n raise ValueError(\n \"%s is defined in the schema, but it is instance of %s (expected %s)\"\n % (self.name, type(graphql_type).__name__, GraphQLScalarType.__name__)\n )\n\n\nSCALAR_AST_NODES = (BooleanValueNode, FloatValueNode, IntValueNode, StringValueNode)\n\n\ndef create_default_literal_parser(\n value_parser: GraphQLScalarValueParser,\n) -> GraphQLScalarLiteralParser:\n def default_literal_parser(ast):\n return value_parser(ast.value)\n\n return default_literal_parser\n", "path": "ariadne/scalars.py"}], "after_files": [{"content": "from typing import Optional, cast\n\nfrom graphql.language.ast import (\n BooleanValueNode,\n FloatValueNode,\n IntValueNode,\n StringValueNode,\n)\nfrom graphql.type import (\n GraphQLNamedType,\n GraphQLScalarLiteralParser,\n GraphQLScalarSerializer,\n GraphQLScalarType,\n GraphQLScalarValueParser,\n GraphQLSchema,\n)\nfrom graphql.utilities import value_from_ast_untyped\n\nfrom .types import SchemaBindable\n\n\nclass ScalarType(SchemaBindable):\n _serialize: Optional[GraphQLScalarSerializer]\n _parse_value: Optional[GraphQLScalarValueParser]\n _parse_literal: Optional[GraphQLScalarLiteralParser]\n\n def __init__(\n self,\n name: str,\n *,\n serializer: GraphQLScalarSerializer = None,\n value_parser: GraphQLScalarValueParser = None,\n literal_parser: GraphQLScalarLiteralParser = None,\n ) -> None:\n self.name = name\n self._serialize = serializer\n self._parse_value = value_parser\n self._parse_literal = literal_parser\n\n def set_serializer(self, f: GraphQLScalarSerializer) -> GraphQLScalarSerializer:\n self._serialize = f\n return f\n\n def set_value_parser(self, f: GraphQLScalarValueParser) -> GraphQLScalarValueParser:\n self._parse_value = f\n if not self._parse_literal:\n self._parse_literal = create_default_literal_parser(f)\n return f\n\n def set_literal_parser(\n self, f: GraphQLScalarLiteralParser\n ) -> GraphQLScalarLiteralParser:\n self._parse_literal = f\n return f\n\n # Alias above setters for consistent decorator API\n serializer = set_serializer\n value_parser = set_value_parser\n literal_parser = set_literal_parser\n\n def bind_to_schema(self, schema: GraphQLSchema) -> None:\n graphql_type = schema.type_map.get(self.name)\n self.validate_graphql_type(graphql_type)\n graphql_type = cast(GraphQLScalarType, graphql_type)\n\n if self._serialize:\n # See mypy bug https://github.com/python/mypy/issues/2427\n graphql_type.serialize = self._serialize # type: ignore\n if self._parse_value:\n graphql_type.parse_value = self._parse_value # type: ignore\n if self._parse_literal:\n graphql_type.parse_literal = self._parse_literal # type: ignore\n\n def validate_graphql_type(self, graphql_type: Optional[GraphQLNamedType]) -> None:\n if not graphql_type:\n raise ValueError(\"Scalar %s is not defined in the schema\" % self.name)\n if not isinstance(graphql_type, GraphQLScalarType):\n raise ValueError(\n \"%s is defined in the schema, but it is instance of %s (expected %s)\"\n % (self.name, type(graphql_type).__name__, GraphQLScalarType.__name__)\n )\n\n\nSCALAR_AST_NODES = (BooleanValueNode, FloatValueNode, IntValueNode, StringValueNode)\n\n\ndef create_default_literal_parser(\n value_parser: GraphQLScalarValueParser,\n) -> GraphQLScalarLiteralParser:\n def default_literal_parser(ast):\n return value_parser(value_from_ast_untyped(ast))\n\n return default_literal_parser\n", "path": "ariadne/scalars.py"}]}
| 1,250 | 143 |
gh_patches_debug_33234
|
rasdani/github-patches
|
git_diff
|
modin-project__modin-1373
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
pip install modin[all] should choose what to install based on the OS
When a Windows user runs `pip install modin[all]` it will not work because Ray does not have any Windows releases. We should still support `pip install modin[all]` in Windows.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 from setuptools import setup, find_packages
2 import versioneer
3
4 with open("README.md", "r") as fh:
5 long_description = fh.read()
6
7 dask_deps = ["dask>=2.1.0", "distributed>=2.3.2"]
8 ray_deps = ["ray==0.8.3"]
9
10 setup(
11 name="modin",
12 version=versioneer.get_version(),
13 cmdclass=versioneer.get_cmdclass(),
14 description="Modin: Make your pandas code run faster by changing one line of code.",
15 packages=find_packages(),
16 url="https://github.com/modin-project/modin",
17 long_description=long_description,
18 long_description_content_type="text/markdown",
19 install_requires=["pandas==1.0.3", "packaging"],
20 extras_require={
21 # can be installed by pip install modin[dask]
22 "dask": dask_deps,
23 "ray": ray_deps,
24 "all": dask_deps + ray_deps,
25 },
26 python_requires=">=3.5",
27 )
28
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -1,18 +1,60 @@
from setuptools import setup, find_packages
import versioneer
+import os
+from setuptools.dist import Distribution
+
+try:
+ from wheel.bdist_wheel import bdist_wheel
+
+ HAS_WHEEL = True
+except ImportError:
+ HAS_WHEEL = False
with open("README.md", "r") as fh:
long_description = fh.read()
+if HAS_WHEEL:
+
+ class ModinWheel(bdist_wheel):
+ def finalize_options(self):
+ bdist_wheel.finalize_options(self)
+ self.root_is_pure = False
+
+ def get_tag(self):
+ _, _, plat = bdist_wheel.get_tag(self)
+ py = "py3"
+ abi = "none"
+ return py, abi, plat
+
+
+class ModinDistribution(Distribution):
+ def __init__(self, *attrs):
+ Distribution.__init__(self, *attrs)
+ if HAS_WHEEL:
+ self.cmdclass["bdist_wheel"] = ModinWheel
+
+ def is_pure(self):
+ return False
+
+
dask_deps = ["dask>=2.1.0", "distributed>=2.3.2"]
ray_deps = ["ray==0.8.3"]
+if "SETUP_PLAT_NAME" in os.environ:
+ if "win" in os.environ["SETUP_PLAT_NAME"]:
+ all_deps = dask_deps
+ else:
+ all_deps = dask_deps + ray_deps
+else:
+ all_deps = dask_deps if os.name == "nt" else dask_deps + ray_deps
setup(
name="modin",
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
+ distclass=ModinDistribution,
description="Modin: Make your pandas code run faster by changing one line of code.",
packages=find_packages(),
+ license="Apache 2",
url="https://github.com/modin-project/modin",
long_description=long_description,
long_description_content_type="text/markdown",
@@ -21,7 +63,7 @@
# can be installed by pip install modin[dask]
"dask": dask_deps,
"ray": ray_deps,
- "all": dask_deps + ray_deps,
+ "all": all_deps,
},
python_requires=">=3.5",
)
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -1,18 +1,60 @@\n from setuptools import setup, find_packages\n import versioneer\n+import os\n+from setuptools.dist import Distribution\n+\n+try:\n+ from wheel.bdist_wheel import bdist_wheel\n+\n+ HAS_WHEEL = True\n+except ImportError:\n+ HAS_WHEEL = False\n \n with open(\"README.md\", \"r\") as fh:\n long_description = fh.read()\n \n+if HAS_WHEEL:\n+\n+ class ModinWheel(bdist_wheel):\n+ def finalize_options(self):\n+ bdist_wheel.finalize_options(self)\n+ self.root_is_pure = False\n+\n+ def get_tag(self):\n+ _, _, plat = bdist_wheel.get_tag(self)\n+ py = \"py3\"\n+ abi = \"none\"\n+ return py, abi, plat\n+\n+\n+class ModinDistribution(Distribution):\n+ def __init__(self, *attrs):\n+ Distribution.__init__(self, *attrs)\n+ if HAS_WHEEL:\n+ self.cmdclass[\"bdist_wheel\"] = ModinWheel\n+\n+ def is_pure(self):\n+ return False\n+\n+\n dask_deps = [\"dask>=2.1.0\", \"distributed>=2.3.2\"]\n ray_deps = [\"ray==0.8.3\"]\n+if \"SETUP_PLAT_NAME\" in os.environ:\n+ if \"win\" in os.environ[\"SETUP_PLAT_NAME\"]:\n+ all_deps = dask_deps\n+ else:\n+ all_deps = dask_deps + ray_deps\n+else:\n+ all_deps = dask_deps if os.name == \"nt\" else dask_deps + ray_deps\n \n setup(\n name=\"modin\",\n version=versioneer.get_version(),\n cmdclass=versioneer.get_cmdclass(),\n+ distclass=ModinDistribution,\n description=\"Modin: Make your pandas code run faster by changing one line of code.\",\n packages=find_packages(),\n+ license=\"Apache 2\",\n url=\"https://github.com/modin-project/modin\",\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n@@ -21,7 +63,7 @@\n # can be installed by pip install modin[dask]\n \"dask\": dask_deps,\n \"ray\": ray_deps,\n- \"all\": dask_deps + ray_deps,\n+ \"all\": all_deps,\n },\n python_requires=\">=3.5\",\n )\n", "issue": "pip install modin[all] should choose what to install based on the OS\nWhen a Windows user runs `pip install modin[all]` it will not work because Ray does not have any Windows releases. We should still support `pip install modin[all]` in Windows.\r\n\n", "before_files": [{"content": "from setuptools import setup, find_packages\nimport versioneer\n\nwith open(\"README.md\", \"r\") as fh:\n long_description = fh.read()\n\ndask_deps = [\"dask>=2.1.0\", \"distributed>=2.3.2\"]\nray_deps = [\"ray==0.8.3\"]\n\nsetup(\n name=\"modin\",\n version=versioneer.get_version(),\n cmdclass=versioneer.get_cmdclass(),\n description=\"Modin: Make your pandas code run faster by changing one line of code.\",\n packages=find_packages(),\n url=\"https://github.com/modin-project/modin\",\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n install_requires=[\"pandas==1.0.3\", \"packaging\"],\n extras_require={\n # can be installed by pip install modin[dask]\n \"dask\": dask_deps,\n \"ray\": ray_deps,\n \"all\": dask_deps + ray_deps,\n },\n python_requires=\">=3.5\",\n)\n", "path": "setup.py"}], "after_files": [{"content": "from setuptools import setup, find_packages\nimport versioneer\nimport os\nfrom setuptools.dist import Distribution\n\ntry:\n from wheel.bdist_wheel import bdist_wheel\n\n HAS_WHEEL = True\nexcept ImportError:\n HAS_WHEEL = False\n\nwith open(\"README.md\", \"r\") as fh:\n long_description = fh.read()\n\nif HAS_WHEEL:\n\n class ModinWheel(bdist_wheel):\n def finalize_options(self):\n bdist_wheel.finalize_options(self)\n self.root_is_pure = False\n\n def get_tag(self):\n _, _, plat = bdist_wheel.get_tag(self)\n py = \"py3\"\n abi = \"none\"\n return py, abi, plat\n\n\nclass ModinDistribution(Distribution):\n def __init__(self, *attrs):\n Distribution.__init__(self, *attrs)\n if HAS_WHEEL:\n self.cmdclass[\"bdist_wheel\"] = ModinWheel\n\n def is_pure(self):\n return False\n\n\ndask_deps = [\"dask>=2.1.0\", \"distributed>=2.3.2\"]\nray_deps = [\"ray==0.8.3\"]\nif \"SETUP_PLAT_NAME\" in os.environ:\n if \"win\" in os.environ[\"SETUP_PLAT_NAME\"]:\n all_deps = dask_deps\n else:\n all_deps = dask_deps + ray_deps\nelse:\n all_deps = dask_deps if os.name == \"nt\" else dask_deps + ray_deps\n\nsetup(\n name=\"modin\",\n version=versioneer.get_version(),\n cmdclass=versioneer.get_cmdclass(),\n distclass=ModinDistribution,\n description=\"Modin: Make your pandas code run faster by changing one line of code.\",\n packages=find_packages(),\n license=\"Apache 2\",\n url=\"https://github.com/modin-project/modin\",\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n install_requires=[\"pandas==1.0.3\", \"packaging\"],\n extras_require={\n # can be installed by pip install modin[dask]\n \"dask\": dask_deps,\n \"ray\": ray_deps,\n \"all\": all_deps,\n },\n python_requires=\">=3.5\",\n)\n", "path": "setup.py"}]}
| 587 | 556 |
gh_patches_debug_7412
|
rasdani/github-patches
|
git_diff
|
scikit-image__scikit-image-1498
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
skimage.io.imread returns incorrect dimensions (according to docs)
See here: http://stackoverflow.com/questions/30088841
Importing a TIFF image using skimage.io.imread returns an array with shape (3,M,N) instead of (M,N,3). This breaks the flatten and as_grey features. The results are the same calling `imread` with `plugin='tifffile'`.
```
In [1]: im = skimage.io.imread('image.tif')
In [2]: im.shape
Out[2]: (3, 768, 1024)
In [3]: im = skimage.io.imread('image.tif', as_grey = True)
In [4]: im.shape
Out[4]: (3, 768)
In [5]: im = skimage.io.imread('image.tif', flatten = True)
In [6]: im.shape
Out[6]: (3, 768)
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `skimage/io/_io.py`
Content:
```
1 from io import BytesIO
2 import warnings
3
4 import numpy as np
5 import six
6
7 from ..io.manage_plugins import call_plugin
8 from ..color import rgb2grey
9 from .util import file_or_url_context
10 from ..exposure import is_low_contrast
11 from .._shared._warnings import all_warnings
12
13
14 __all__ = ['imread', 'imread_collection', 'imsave', 'imshow', 'show']
15
16
17 def imread(fname, as_grey=False, plugin=None, flatten=None,
18 **plugin_args):
19 """Load an image from file.
20
21 Parameters
22 ----------
23 fname : string
24 Image file name, e.g. ``test.jpg`` or URL.
25 as_grey : bool
26 If True, convert color images to grey-scale (32-bit floats).
27 Images that are already in grey-scale format are not converted.
28 plugin : str
29 Name of plugin to use (Python Imaging Library by default).
30
31 Other Parameters
32 ----------------
33 flatten : bool
34 Backward compatible keyword, superseded by `as_grey`.
35
36 Returns
37 -------
38 img_array : ndarray
39 The different colour bands/channels are stored in the
40 third dimension, such that a grey-image is MxN, an
41 RGB-image MxNx3 and an RGBA-image MxNx4.
42
43 Other parameters
44 ----------------
45 plugin_args : keywords
46 Passed to the given plugin.
47
48 """
49 # Backward compatibility
50 if flatten is not None:
51 as_grey = flatten
52
53 with file_or_url_context(fname) as fname:
54 img = call_plugin('imread', fname, plugin=plugin, **plugin_args)
55
56 if as_grey and getattr(img, 'ndim', 0) >= 3:
57 img = rgb2grey(img)
58
59 return img
60
61
62 def imread_collection(load_pattern, conserve_memory=True,
63 plugin=None, **plugin_args):
64 """
65 Load a collection of images.
66
67 Parameters
68 ----------
69 load_pattern : str or list
70 List of objects to load. These are usually filenames, but may
71 vary depending on the currently active plugin. See the docstring
72 for ``ImageCollection`` for the default behaviour of this parameter.
73 conserve_memory : bool, optional
74 If True, never keep more than one in memory at a specific
75 time. Otherwise, images will be cached once they are loaded.
76
77 Returns
78 -------
79 ic : ImageCollection
80 Collection of images.
81
82 Other parameters
83 ----------------
84 plugin_args : keywords
85 Passed to the given plugin.
86
87 """
88 return call_plugin('imread_collection', load_pattern, conserve_memory,
89 plugin=plugin, **plugin_args)
90
91
92 def imsave(fname, arr, plugin=None, **plugin_args):
93 """Save an image to file.
94
95 Parameters
96 ----------
97 fname : str
98 Target filename.
99 arr : ndarray of shape (M,N) or (M,N,3) or (M,N,4)
100 Image data.
101 plugin : str
102 Name of plugin to use. By default, the different plugins are
103 tried (starting with the Python Imaging Library) until a suitable
104 candidate is found.
105
106 Other parameters
107 ----------------
108 plugin_args : keywords
109 Passed to the given plugin.
110
111 """
112 if is_low_contrast(arr):
113 warnings.warn('%s is a low contrast image' % fname)
114 return call_plugin('imsave', fname, arr, plugin=plugin, **plugin_args)
115
116
117 def imshow(arr, plugin=None, **plugin_args):
118 """Display an image.
119
120 Parameters
121 ----------
122 arr : ndarray or str
123 Image data or name of image file.
124 plugin : str
125 Name of plugin to use. By default, the different plugins are
126 tried (starting with the Python Imaging Library) until a suitable
127 candidate is found.
128
129 Other parameters
130 ----------------
131 plugin_args : keywords
132 Passed to the given plugin.
133
134 """
135 if isinstance(arr, six.string_types):
136 arr = call_plugin('imread', arr, plugin=plugin)
137 return call_plugin('imshow', arr, plugin=plugin, **plugin_args)
138
139
140 def show():
141 '''Display pending images.
142
143 Launch the event loop of the current gui plugin, and display all
144 pending images, queued via `imshow`. This is required when using
145 `imshow` from non-interactive scripts.
146
147 A call to `show` will block execution of code until all windows
148 have been closed.
149
150 Examples
151 --------
152 >>> import skimage.io as io
153
154 >>> for i in range(4):
155 ... ax_im = io.imshow(np.random.rand(50, 50))
156 >>> io.show() # doctest: +SKIP
157
158 '''
159 return call_plugin('_app_show')
160
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/skimage/io/_io.py b/skimage/io/_io.py
--- a/skimage/io/_io.py
+++ b/skimage/io/_io.py
@@ -53,8 +53,16 @@
with file_or_url_context(fname) as fname:
img = call_plugin('imread', fname, plugin=plugin, **plugin_args)
- if as_grey and getattr(img, 'ndim', 0) >= 3:
- img = rgb2grey(img)
+ if not hasattr(img, 'ndim'):
+ return img
+
+ if img.ndim > 2:
+ if img.shape[-1] not in (3, 4) and img.shape[-3] in (3, 4):
+ img = np.swapaxes(img, -1, -3)
+ img = np.swapaxes(img, -2, -3)
+
+ if as_grey:
+ img = rgb2grey(img)
return img
|
{"golden_diff": "diff --git a/skimage/io/_io.py b/skimage/io/_io.py\n--- a/skimage/io/_io.py\n+++ b/skimage/io/_io.py\n@@ -53,8 +53,16 @@\n with file_or_url_context(fname) as fname:\n img = call_plugin('imread', fname, plugin=plugin, **plugin_args)\n \n- if as_grey and getattr(img, 'ndim', 0) >= 3:\n- img = rgb2grey(img)\n+ if not hasattr(img, 'ndim'):\n+ return img\n+\n+ if img.ndim > 2:\n+ if img.shape[-1] not in (3, 4) and img.shape[-3] in (3, 4):\n+ img = np.swapaxes(img, -1, -3)\n+ img = np.swapaxes(img, -2, -3)\n+\n+ if as_grey:\n+ img = rgb2grey(img)\n \n return img\n", "issue": "skimage.io.imread returns incorrect dimensions (according to docs)\nSee here: http://stackoverflow.com/questions/30088841\n\nImporting a TIFF image using skimage.io.imread returns an array with shape (3,M,N) instead of (M,N,3). This breaks the flatten and as_grey features. The results are the same calling `imread` with `plugin='tifffile'`.\n\n```\nIn [1]: im = skimage.io.imread('image.tif')\n\nIn [2]: im.shape\nOut[2]: (3, 768, 1024)\n\nIn [3]: im = skimage.io.imread('image.tif', as_grey = True)\n\nIn [4]: im.shape\nOut[4]: (3, 768)\n\nIn [5]: im = skimage.io.imread('image.tif', flatten = True)\n\nIn [6]: im.shape\nOut[6]: (3, 768)\n```\n\n", "before_files": [{"content": "from io import BytesIO\nimport warnings\n\nimport numpy as np\nimport six\n\nfrom ..io.manage_plugins import call_plugin\nfrom ..color import rgb2grey\nfrom .util import file_or_url_context\nfrom ..exposure import is_low_contrast\nfrom .._shared._warnings import all_warnings\n\n\n__all__ = ['imread', 'imread_collection', 'imsave', 'imshow', 'show']\n\n\ndef imread(fname, as_grey=False, plugin=None, flatten=None,\n **plugin_args):\n \"\"\"Load an image from file.\n\n Parameters\n ----------\n fname : string\n Image file name, e.g. ``test.jpg`` or URL.\n as_grey : bool\n If True, convert color images to grey-scale (32-bit floats).\n Images that are already in grey-scale format are not converted.\n plugin : str\n Name of plugin to use (Python Imaging Library by default).\n\n Other Parameters\n ----------------\n flatten : bool\n Backward compatible keyword, superseded by `as_grey`.\n\n Returns\n -------\n img_array : ndarray\n The different colour bands/channels are stored in the\n third dimension, such that a grey-image is MxN, an\n RGB-image MxNx3 and an RGBA-image MxNx4.\n\n Other parameters\n ----------------\n plugin_args : keywords\n Passed to the given plugin.\n\n \"\"\"\n # Backward compatibility\n if flatten is not None:\n as_grey = flatten\n\n with file_or_url_context(fname) as fname:\n img = call_plugin('imread', fname, plugin=plugin, **plugin_args)\n\n if as_grey and getattr(img, 'ndim', 0) >= 3:\n img = rgb2grey(img)\n\n return img\n\n\ndef imread_collection(load_pattern, conserve_memory=True,\n plugin=None, **plugin_args):\n \"\"\"\n Load a collection of images.\n\n Parameters\n ----------\n load_pattern : str or list\n List of objects to load. These are usually filenames, but may\n vary depending on the currently active plugin. See the docstring\n for ``ImageCollection`` for the default behaviour of this parameter.\n conserve_memory : bool, optional\n If True, never keep more than one in memory at a specific\n time. Otherwise, images will be cached once they are loaded.\n\n Returns\n -------\n ic : ImageCollection\n Collection of images.\n\n Other parameters\n ----------------\n plugin_args : keywords\n Passed to the given plugin.\n\n \"\"\"\n return call_plugin('imread_collection', load_pattern, conserve_memory,\n plugin=plugin, **plugin_args)\n\n\ndef imsave(fname, arr, plugin=None, **plugin_args):\n \"\"\"Save an image to file.\n\n Parameters\n ----------\n fname : str\n Target filename.\n arr : ndarray of shape (M,N) or (M,N,3) or (M,N,4)\n Image data.\n plugin : str\n Name of plugin to use. By default, the different plugins are\n tried (starting with the Python Imaging Library) until a suitable\n candidate is found.\n\n Other parameters\n ----------------\n plugin_args : keywords\n Passed to the given plugin.\n\n \"\"\"\n if is_low_contrast(arr):\n warnings.warn('%s is a low contrast image' % fname)\n return call_plugin('imsave', fname, arr, plugin=plugin, **plugin_args)\n\n\ndef imshow(arr, plugin=None, **plugin_args):\n \"\"\"Display an image.\n\n Parameters\n ----------\n arr : ndarray or str\n Image data or name of image file.\n plugin : str\n Name of plugin to use. By default, the different plugins are\n tried (starting with the Python Imaging Library) until a suitable\n candidate is found.\n\n Other parameters\n ----------------\n plugin_args : keywords\n Passed to the given plugin.\n\n \"\"\"\n if isinstance(arr, six.string_types):\n arr = call_plugin('imread', arr, plugin=plugin)\n return call_plugin('imshow', arr, plugin=plugin, **plugin_args)\n\n\ndef show():\n '''Display pending images.\n\n Launch the event loop of the current gui plugin, and display all\n pending images, queued via `imshow`. This is required when using\n `imshow` from non-interactive scripts.\n\n A call to `show` will block execution of code until all windows\n have been closed.\n\n Examples\n --------\n >>> import skimage.io as io\n\n >>> for i in range(4):\n ... ax_im = io.imshow(np.random.rand(50, 50))\n >>> io.show() # doctest: +SKIP\n\n '''\n return call_plugin('_app_show')\n", "path": "skimage/io/_io.py"}], "after_files": [{"content": "from io import BytesIO\nimport warnings\n\nimport numpy as np\nimport six\n\nfrom ..io.manage_plugins import call_plugin\nfrom ..color import rgb2grey\nfrom .util import file_or_url_context\nfrom ..exposure import is_low_contrast\nfrom .._shared._warnings import all_warnings\n\n\n__all__ = ['imread', 'imread_collection', 'imsave', 'imshow', 'show']\n\n\ndef imread(fname, as_grey=False, plugin=None, flatten=None,\n **plugin_args):\n \"\"\"Load an image from file.\n\n Parameters\n ----------\n fname : string\n Image file name, e.g. ``test.jpg`` or URL.\n as_grey : bool\n If True, convert color images to grey-scale (32-bit floats).\n Images that are already in grey-scale format are not converted.\n plugin : str\n Name of plugin to use (Python Imaging Library by default).\n\n Other Parameters\n ----------------\n flatten : bool\n Backward compatible keyword, superseded by `as_grey`.\n\n Returns\n -------\n img_array : ndarray\n The different colour bands/channels are stored in the\n third dimension, such that a grey-image is MxN, an\n RGB-image MxNx3 and an RGBA-image MxNx4.\n\n Other parameters\n ----------------\n plugin_args : keywords\n Passed to the given plugin.\n\n \"\"\"\n # Backward compatibility\n if flatten is not None:\n as_grey = flatten\n\n with file_or_url_context(fname) as fname:\n img = call_plugin('imread', fname, plugin=plugin, **plugin_args)\n\n if not hasattr(img, 'ndim'):\n return img\n\n if img.ndim > 2:\n if img.shape[-1] not in (3, 4) and img.shape[-3] in (3, 4):\n img = np.swapaxes(img, -1, -3)\n img = np.swapaxes(img, -2, -3)\n\n if as_grey:\n img = rgb2grey(img)\n\n return img\n\n\ndef imread_collection(load_pattern, conserve_memory=True,\n plugin=None, **plugin_args):\n \"\"\"\n Load a collection of images.\n\n Parameters\n ----------\n load_pattern : str or list\n List of objects to load. These are usually filenames, but may\n vary depending on the currently active plugin. See the docstring\n for ``ImageCollection`` for the default behaviour of this parameter.\n conserve_memory : bool, optional\n If True, never keep more than one in memory at a specific\n time. Otherwise, images will be cached once they are loaded.\n\n Returns\n -------\n ic : ImageCollection\n Collection of images.\n\n Other parameters\n ----------------\n plugin_args : keywords\n Passed to the given plugin.\n\n \"\"\"\n return call_plugin('imread_collection', load_pattern, conserve_memory,\n plugin=plugin, **plugin_args)\n\n\ndef imsave(fname, arr, plugin=None, **plugin_args):\n \"\"\"Save an image to file.\n\n Parameters\n ----------\n fname : str\n Target filename.\n arr : ndarray of shape (M,N) or (M,N,3) or (M,N,4)\n Image data.\n plugin : str\n Name of plugin to use. By default, the different plugins are\n tried (starting with the Python Imaging Library) until a suitable\n candidate is found.\n\n Other parameters\n ----------------\n plugin_args : keywords\n Passed to the given plugin.\n\n \"\"\"\n if is_low_contrast(arr):\n warnings.warn('%s is a low contrast image' % fname)\n return call_plugin('imsave', fname, arr, plugin=plugin, **plugin_args)\n\n\ndef imshow(arr, plugin=None, **plugin_args):\n \"\"\"Display an image.\n\n Parameters\n ----------\n arr : ndarray or str\n Image data or name of image file.\n plugin : str\n Name of plugin to use. By default, the different plugins are\n tried (starting with the Python Imaging Library) until a suitable\n candidate is found.\n\n Other parameters\n ----------------\n plugin_args : keywords\n Passed to the given plugin.\n\n \"\"\"\n if isinstance(arr, six.string_types):\n arr = call_plugin('imread', arr, plugin=plugin)\n return call_plugin('imshow', arr, plugin=plugin, **plugin_args)\n\n\ndef show():\n '''Display pending images.\n\n Launch the event loop of the current gui plugin, and display all\n pending images, queued via `imshow`. This is required when using\n `imshow` from non-interactive scripts.\n\n A call to `show` will block execution of code until all windows\n have been closed.\n\n Examples\n --------\n >>> import skimage.io as io\n\n >>> for i in range(4):\n ... ax_im = io.imshow(np.random.rand(50, 50))\n >>> io.show() # doctest: +SKIP\n\n '''\n return call_plugin('_app_show')\n", "path": "skimage/io/_io.py"}]}
| 1,875 | 221 |
gh_patches_debug_20597
|
rasdani/github-patches
|
git_diff
|
wagtail__wagtail-1633
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Error when serving images through the URL generator
I posted a comment on https://github.com/torchbox/wagtail/issues/983 but probably better to open a new issue. Looks like the same problem to me though.
Hi guys, I think I'm having the same problem but when serving images using the URL generator. It does work if I'm logged-in in the site (cache not working) but doesn't when I'm not (cache full on).
Cheers,
Jordi
Internal Server Error: /images/2dMQIUOPwS5DlZuprp_E_WFdfhw=/47/width-75/
Traceback (most recent call last):
File "/var/www/buildability/venvs/buildability.co.nz/local/lib/python2.7/site-packages/django/core/handlers/base.py", line 204, in get_response
response = middleware_method(request, response)
File "/var/www/buildability/venvs/buildability.co.nz/local/lib/python2.7/site-packages/django/middleware/cache.py", line 121, in process_response
self.cache.set(cache_key, response, timeout)
File "/var/www/buildability/venvs/buildability.co.nz/local/lib/python2.7/site-packages/redis_cache/cache.py", line 239, in set
result = self._set(key, pickle.dumps(value), timeout, client, _add_only)
File "/var/www/buildability/venvs/buildability.co.nz/lib/python2.7/copy_reg.py", line 70, in _reduce_ex
raise TypeError, "can't pickle %s objects" % base.__name__
TypeError: can't pickle instancemethod objects
Request repr():
<WSGIRequest
path:/images/2dMQIUOPwS5DlZuprp_E_WFdfhw=/47/width-75/,
GET:<QueryDict: {}>,
POST:<QueryDict: {}>,
COOKIES:{'_ga': 'GA1.3.1219121887.1434427204',
'csrftoken': 'GNhfTEGBu40y8wRAFPa15lQTV66F9WCs'},
META:{'CONTENT_LENGTH': '',
'CONTENT_TYPE': '',
u'CSRF_COOKIE': u'GNhfTEGBu40y8wRAFPa15lQTV66F9WCs',
'DOCUMENT_ROOT': '/usr/share/nginx/html',
'HTTP_ACCEPT': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,_/_;q=0.8',
'HTTP_ACCEPT_ENCODING': 'gzip, deflate, sdch',
'HTTP_ACCEPT_LANGUAGE': 'en-US,en;q=0.8',
'HTTP_CACHE_CONTROL': 'max-age=0',
'HTTP_CONNECTION': 'keep-alive',
'HTTP_COOKIE': '_ga=GA1.3.1219121887.1434427204; csrftoken=GNhfTEGBu40y8wRAFPa15lQTV66F9WCs',
'HTTP_HOST': 'www.buildability.co.nz',
'HTTP_UPGRADE_INSECURE_REQUESTS': '1',
'HTTP_USER_AGENT': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36',
'PATH_INFO': u'/images/2dMQIUOPwS5DlZuprp_E_WFdfhw=/47/width-75/',
'QUERY_STRING': '',
'REMOTE_ADDR': '131.203.137.142',
'REMOTE_PORT': '51455',
'REQUEST_METHOD': 'GET',
'REQUEST_URI': '/images/2dMQIUOPwS5DlZuprp_E_WFdfhw%3D/47/width-75/',
u'SCRIPT_NAME': u'',
'SERVER_NAME': 'www.buildability.co.nz',
'SERVER_PORT': '80',
'SERVER_PROTOCOL': 'HTTP/1.1',
'UWSGI_SCHEME': 'http',
'uwsgi.core': 7,
'uwsgi.node': 'avinton',
'uwsgi.version': '1.9.17.1-debian',
'wsgi.errors': <open file 'wsgi_errors', mode 'w' at 0x7f0548a548a0>,
'wsgi.file_wrapper': <built-in function uwsgi_sendfile>,
'wsgi.input': <uwsgi._Input object at 0x7f0548a20a08>,
'wsgi.multiprocess': True,
'wsgi.multithread': True,
'wsgi.run_once': False,
'wsgi.url_scheme': 'http',
'wsgi.version': (1, 0)}>
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `wagtail/wagtailimages/views/frontend.py`
Content:
```
1 from wsgiref.util import FileWrapper
2 import imghdr
3
4 from django.shortcuts import get_object_or_404
5 from django.http import HttpResponse
6 from django.core.exceptions import PermissionDenied
7
8 from wagtail.wagtailimages.models import get_image_model
9 from wagtail.wagtailimages.utils import verify_signature
10 from wagtail.wagtailimages.exceptions import InvalidFilterSpecError
11
12
13 def serve(request, signature, image_id, filter_spec):
14 image = get_object_or_404(get_image_model(), id=image_id)
15
16 if not verify_signature(signature.encode(), image_id, filter_spec):
17 raise PermissionDenied
18
19 try:
20 rendition = image.get_rendition(filter_spec)
21 rendition.file.open('rb')
22 image_format = imghdr.what(rendition.file)
23 return HttpResponse(FileWrapper(rendition.file), content_type='image/' + image_format)
24 except InvalidFilterSpecError:
25 return HttpResponse("Invalid filter spec: " + filter_spec, content_type='text/plain', status=400)
26
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/wagtail/wagtailimages/views/frontend.py b/wagtail/wagtailimages/views/frontend.py
--- a/wagtail/wagtailimages/views/frontend.py
+++ b/wagtail/wagtailimages/views/frontend.py
@@ -2,7 +2,7 @@
import imghdr
from django.shortcuts import get_object_or_404
-from django.http import HttpResponse
+from django.http import HttpResponse, StreamingHttpResponse
from django.core.exceptions import PermissionDenied
from wagtail.wagtailimages.models import get_image_model
@@ -20,6 +20,6 @@
rendition = image.get_rendition(filter_spec)
rendition.file.open('rb')
image_format = imghdr.what(rendition.file)
- return HttpResponse(FileWrapper(rendition.file), content_type='image/' + image_format)
+ return StreamingHttpResponse(FileWrapper(rendition.file), content_type='image/' + image_format)
except InvalidFilterSpecError:
return HttpResponse("Invalid filter spec: " + filter_spec, content_type='text/plain', status=400)
|
{"golden_diff": "diff --git a/wagtail/wagtailimages/views/frontend.py b/wagtail/wagtailimages/views/frontend.py\n--- a/wagtail/wagtailimages/views/frontend.py\n+++ b/wagtail/wagtailimages/views/frontend.py\n@@ -2,7 +2,7 @@\n import imghdr\n \n from django.shortcuts import get_object_or_404\n-from django.http import HttpResponse\n+from django.http import HttpResponse, StreamingHttpResponse\n from django.core.exceptions import PermissionDenied\n \n from wagtail.wagtailimages.models import get_image_model\n@@ -20,6 +20,6 @@\n rendition = image.get_rendition(filter_spec)\n rendition.file.open('rb')\n image_format = imghdr.what(rendition.file)\n- return HttpResponse(FileWrapper(rendition.file), content_type='image/' + image_format)\n+ return StreamingHttpResponse(FileWrapper(rendition.file), content_type='image/' + image_format)\n except InvalidFilterSpecError:\n return HttpResponse(\"Invalid filter spec: \" + filter_spec, content_type='text/plain', status=400)\n", "issue": "Error when serving images through the URL generator\nI posted a comment on https://github.com/torchbox/wagtail/issues/983 but probably better to open a new issue. Looks like the same problem to me though.\n\nHi guys, I think I'm having the same problem but when serving images using the URL generator. It does work if I'm logged-in in the site (cache not working) but doesn't when I'm not (cache full on).\n\nCheers,\nJordi\n\nInternal Server Error: /images/2dMQIUOPwS5DlZuprp_E_WFdfhw=/47/width-75/\nTraceback (most recent call last):\n File \"/var/www/buildability/venvs/buildability.co.nz/local/lib/python2.7/site-packages/django/core/handlers/base.py\", line 204, in get_response\n response = middleware_method(request, response)\n File \"/var/www/buildability/venvs/buildability.co.nz/local/lib/python2.7/site-packages/django/middleware/cache.py\", line 121, in process_response\n self.cache.set(cache_key, response, timeout)\n File \"/var/www/buildability/venvs/buildability.co.nz/local/lib/python2.7/site-packages/redis_cache/cache.py\", line 239, in set\n result = self._set(key, pickle.dumps(value), timeout, client, _add_only)\n File \"/var/www/buildability/venvs/buildability.co.nz/lib/python2.7/copy_reg.py\", line 70, in _reduce_ex\n raise TypeError, \"can't pickle %s objects\" % base.__name__\nTypeError: can't pickle instancemethod objects\n\nRequest repr(): \n<WSGIRequest\npath:/images/2dMQIUOPwS5DlZuprp_E_WFdfhw=/47/width-75/,\nGET:<QueryDict: {}>,\nPOST:<QueryDict: {}>,\nCOOKIES:{'_ga': 'GA1.3.1219121887.1434427204',\n 'csrftoken': 'GNhfTEGBu40y8wRAFPa15lQTV66F9WCs'},\nMETA:{'CONTENT_LENGTH': '',\n 'CONTENT_TYPE': '',\n u'CSRF_COOKIE': u'GNhfTEGBu40y8wRAFPa15lQTV66F9WCs',\n 'DOCUMENT_ROOT': '/usr/share/nginx/html',\n 'HTTP_ACCEPT': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,_/_;q=0.8',\n 'HTTP_ACCEPT_ENCODING': 'gzip, deflate, sdch',\n 'HTTP_ACCEPT_LANGUAGE': 'en-US,en;q=0.8',\n 'HTTP_CACHE_CONTROL': 'max-age=0',\n 'HTTP_CONNECTION': 'keep-alive',\n 'HTTP_COOKIE': '_ga=GA1.3.1219121887.1434427204; csrftoken=GNhfTEGBu40y8wRAFPa15lQTV66F9WCs',\n 'HTTP_HOST': 'www.buildability.co.nz',\n 'HTTP_UPGRADE_INSECURE_REQUESTS': '1',\n 'HTTP_USER_AGENT': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36',\n 'PATH_INFO': u'/images/2dMQIUOPwS5DlZuprp_E_WFdfhw=/47/width-75/',\n 'QUERY_STRING': '',\n 'REMOTE_ADDR': '131.203.137.142',\n 'REMOTE_PORT': '51455',\n 'REQUEST_METHOD': 'GET',\n 'REQUEST_URI': '/images/2dMQIUOPwS5DlZuprp_E_WFdfhw%3D/47/width-75/',\n u'SCRIPT_NAME': u'',\n 'SERVER_NAME': 'www.buildability.co.nz',\n 'SERVER_PORT': '80',\n 'SERVER_PROTOCOL': 'HTTP/1.1',\n 'UWSGI_SCHEME': 'http',\n 'uwsgi.core': 7,\n 'uwsgi.node': 'avinton',\n 'uwsgi.version': '1.9.17.1-debian',\n 'wsgi.errors': <open file 'wsgi_errors', mode 'w' at 0x7f0548a548a0>,\n 'wsgi.file_wrapper': <built-in function uwsgi_sendfile>,\n 'wsgi.input': <uwsgi._Input object at 0x7f0548a20a08>,\n 'wsgi.multiprocess': True,\n 'wsgi.multithread': True,\n 'wsgi.run_once': False,\n 'wsgi.url_scheme': 'http',\n 'wsgi.version': (1, 0)}>\n\n", "before_files": [{"content": "from wsgiref.util import FileWrapper\nimport imghdr\n\nfrom django.shortcuts import get_object_or_404\nfrom django.http import HttpResponse\nfrom django.core.exceptions import PermissionDenied\n\nfrom wagtail.wagtailimages.models import get_image_model\nfrom wagtail.wagtailimages.utils import verify_signature\nfrom wagtail.wagtailimages.exceptions import InvalidFilterSpecError\n\n\ndef serve(request, signature, image_id, filter_spec):\n image = get_object_or_404(get_image_model(), id=image_id)\n\n if not verify_signature(signature.encode(), image_id, filter_spec):\n raise PermissionDenied\n\n try:\n rendition = image.get_rendition(filter_spec)\n rendition.file.open('rb')\n image_format = imghdr.what(rendition.file)\n return HttpResponse(FileWrapper(rendition.file), content_type='image/' + image_format)\n except InvalidFilterSpecError:\n return HttpResponse(\"Invalid filter spec: \" + filter_spec, content_type='text/plain', status=400)\n", "path": "wagtail/wagtailimages/views/frontend.py"}], "after_files": [{"content": "from wsgiref.util import FileWrapper\nimport imghdr\n\nfrom django.shortcuts import get_object_or_404\nfrom django.http import HttpResponse, StreamingHttpResponse\nfrom django.core.exceptions import PermissionDenied\n\nfrom wagtail.wagtailimages.models import get_image_model\nfrom wagtail.wagtailimages.utils import verify_signature\nfrom wagtail.wagtailimages.exceptions import InvalidFilterSpecError\n\n\ndef serve(request, signature, image_id, filter_spec):\n image = get_object_or_404(get_image_model(), id=image_id)\n\n if not verify_signature(signature.encode(), image_id, filter_spec):\n raise PermissionDenied\n\n try:\n rendition = image.get_rendition(filter_spec)\n rendition.file.open('rb')\n image_format = imghdr.what(rendition.file)\n return StreamingHttpResponse(FileWrapper(rendition.file), content_type='image/' + image_format)\n except InvalidFilterSpecError:\n return HttpResponse(\"Invalid filter spec: \" + filter_spec, content_type='text/plain', status=400)\n", "path": "wagtail/wagtailimages/views/frontend.py"}]}
| 1,617 | 232 |
gh_patches_debug_20827
|
rasdani/github-patches
|
git_diff
|
shuup__shuup-742
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
System check to verify Parler sanity
Shuup should check that the Parler configuration is sane before starting.
@JsseL and @juhakujala puzzled over an unrelated exception (`'shuup.admin.modules.services.behavior_form_part.BehaviorFormSet object' has no attribute 'empty_form'`) for a while – turns out it was an `AttributeError` ([which, as we unfortunately know, are hidden within `@property`s](https://github.com/shuup/shuup/blob/5584ebf912bae415fe367ea0c00ad4c5cff49244/shuup/utils/form_group.py#L86-L100)) within `FormSet.empty_form` calls that happens due to `PARLER_DEFAULT_LANGUAGE_CODE` being undefined:
```
Traceback (most recent call last):
File "~/django/forms/formsets.py", line 187, in empty_form
empty_permitted=True,
File "~/shuup/admin/modules/services/behavior_form_part.py", line 49, in form
kwargs.setdefault("default_language", settings.PARLER_DEFAULT_LANGUAGE_CODE)
File "~/django/conf/__init__.py", line 49, in __getattr__
return getattr(self._wrapped, name)
AttributeError: 'Settings' object has no attribute 'PARLER_DEFAULT_LANGUAGE_CODE'
```
My suggestion is to add a simple system check in [ShuupCoreAppConfig.ready()](https://github.com/shuup/shuup/blob/5584ebf912bae415fe367ea0c00ad4c5cff49244/shuup/core/__init__.py#L11) that throws an exception if some of the Parler settings (`PARLER_DEFAULT_LANGUAGE_CODE` and `PARLER_LANGUAGES`) are unset -- or perhaps it could automatically derive them based on the Django `LANGUAGES` setting, as "sane defaults" go?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `shuup/core/__init__.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 # This file is part of Shuup.
3 #
4 # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
5 #
6 # This source code is licensed under the AGPLv3 license found in the
7 # LICENSE file in the root directory of this source tree.
8 from shuup.apps import AppConfig
9
10
11 class ShuupCoreAppConfig(AppConfig):
12 name = "shuup.core"
13 verbose_name = "Shuup Core"
14 label = "shuup" # Use "shuup" as app_label instead of "core"
15 required_installed_apps = (
16 "django.contrib.auth",
17 "django.contrib.contenttypes",
18 "easy_thumbnails",
19 "filer",
20 )
21 provides = {
22 "api_populator": [
23 "shuup.core.api:populate_core_api"
24 ],
25 "pricing_module": [
26 "shuup.core.pricing.default_pricing:DefaultPricingModule"
27 ],
28 }
29
30
31 default_app_config = "shuup.core.ShuupCoreAppConfig"
32
```
Path: `shuup/core/excs.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 # This file is part of Shuup.
3 #
4 # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
5 #
6 # This source code is licensed under the AGPLv3 license found in the
7 # LICENSE file in the root directory of this source tree.
8 from shuup.utils.excs import Problem
9
10
11 class ImmutabilityError(ValueError):
12 pass
13
14
15 class NoProductsToShipException(Exception):
16 pass
17
18
19 class NoPaymentToCreateException(Exception):
20 pass
21
22
23 class NoRefundToCreateException(Exception):
24 pass
25
26
27 class RefundExceedsAmountException(Exception):
28 pass
29
30
31 class InvalidRefundAmountException(Exception):
32 pass
33
34
35 class ProductNotOrderableProblem(Problem):
36 pass
37
38
39 class ProductNotVisibleProblem(Problem):
40 pass
41
42
43 class ImpossibleProductModeException(ValueError):
44 def __init__(self, message, code=None):
45 super(ImpossibleProductModeException, self).__init__(message)
46 self.code = code
47
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/shuup/core/__init__.py b/shuup/core/__init__.py
--- a/shuup/core/__init__.py
+++ b/shuup/core/__init__.py
@@ -6,6 +6,7 @@
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from shuup.apps import AppConfig
+from shuup.core.excs import MissingSettingException
class ShuupCoreAppConfig(AppConfig):
@@ -27,5 +28,12 @@
],
}
+ def ready(self):
+ from django.conf import settings
+ if not getattr(settings, "PARLER_DEFAULT_LANGUAGE_CODE", None):
+ raise MissingSettingException("PARLER_DEFAULT_LANGUAGE_CODE must be set.")
+ if not getattr(settings, "PARLER_LANGUAGES", None):
+ raise MissingSettingException("PARLER_LANGUAGES must be set.")
+
default_app_config = "shuup.core.ShuupCoreAppConfig"
diff --git a/shuup/core/excs.py b/shuup/core/excs.py
--- a/shuup/core/excs.py
+++ b/shuup/core/excs.py
@@ -32,6 +32,10 @@
pass
+class MissingSettingException(Exception):
+ pass
+
+
class ProductNotOrderableProblem(Problem):
pass
|
{"golden_diff": "diff --git a/shuup/core/__init__.py b/shuup/core/__init__.py\n--- a/shuup/core/__init__.py\n+++ b/shuup/core/__init__.py\n@@ -6,6 +6,7 @@\n # This source code is licensed under the AGPLv3 license found in the\n # LICENSE file in the root directory of this source tree.\n from shuup.apps import AppConfig\n+from shuup.core.excs import MissingSettingException\n \n \n class ShuupCoreAppConfig(AppConfig):\n@@ -27,5 +28,12 @@\n ],\n }\n \n+ def ready(self):\n+ from django.conf import settings\n+ if not getattr(settings, \"PARLER_DEFAULT_LANGUAGE_CODE\", None):\n+ raise MissingSettingException(\"PARLER_DEFAULT_LANGUAGE_CODE must be set.\")\n+ if not getattr(settings, \"PARLER_LANGUAGES\", None):\n+ raise MissingSettingException(\"PARLER_LANGUAGES must be set.\")\n+\n \n default_app_config = \"shuup.core.ShuupCoreAppConfig\"\ndiff --git a/shuup/core/excs.py b/shuup/core/excs.py\n--- a/shuup/core/excs.py\n+++ b/shuup/core/excs.py\n@@ -32,6 +32,10 @@\n pass\n \n \n+class MissingSettingException(Exception):\n+ pass\n+\n+\n class ProductNotOrderableProblem(Problem):\n pass\n", "issue": "System check to verify Parler sanity\nShuup should check that the Parler configuration is sane before starting.\n\n@JsseL and @juhakujala puzzled over an unrelated exception (`'shuup.admin.modules.services.behavior_form_part.BehaviorFormSet object' has no attribute 'empty_form'`) for a while \u2013 turns out it was an `AttributeError` ([which, as we unfortunately know, are hidden within `@property`s](https://github.com/shuup/shuup/blob/5584ebf912bae415fe367ea0c00ad4c5cff49244/shuup/utils/form_group.py#L86-L100)) within `FormSet.empty_form` calls that happens due to `PARLER_DEFAULT_LANGUAGE_CODE` being undefined:\n\n```\nTraceback (most recent call last):\n File \"~/django/forms/formsets.py\", line 187, in empty_form\n empty_permitted=True,\n File \"~/shuup/admin/modules/services/behavior_form_part.py\", line 49, in form\n kwargs.setdefault(\"default_language\", settings.PARLER_DEFAULT_LANGUAGE_CODE)\n File \"~/django/conf/__init__.py\", line 49, in __getattr__\n return getattr(self._wrapped, name)\nAttributeError: 'Settings' object has no attribute 'PARLER_DEFAULT_LANGUAGE_CODE'\n```\n\nMy suggestion is to add a simple system check in [ShuupCoreAppConfig.ready()](https://github.com/shuup/shuup/blob/5584ebf912bae415fe367ea0c00ad4c5cff49244/shuup/core/__init__.py#L11) that throws an exception if some of the Parler settings (`PARLER_DEFAULT_LANGUAGE_CODE` and `PARLER_LANGUAGES`) are unset -- or perhaps it could automatically derive them based on the Django `LANGUAGES` setting, as \"sane defaults\" go?\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n# This file is part of Shuup.\n#\n# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.\n#\n# This source code is licensed under the AGPLv3 license found in the\n# LICENSE file in the root directory of this source tree.\nfrom shuup.apps import AppConfig\n\n\nclass ShuupCoreAppConfig(AppConfig):\n name = \"shuup.core\"\n verbose_name = \"Shuup Core\"\n label = \"shuup\" # Use \"shuup\" as app_label instead of \"core\"\n required_installed_apps = (\n \"django.contrib.auth\",\n \"django.contrib.contenttypes\",\n \"easy_thumbnails\",\n \"filer\",\n )\n provides = {\n \"api_populator\": [\n \"shuup.core.api:populate_core_api\"\n ],\n \"pricing_module\": [\n \"shuup.core.pricing.default_pricing:DefaultPricingModule\"\n ],\n }\n\n\ndefault_app_config = \"shuup.core.ShuupCoreAppConfig\"\n", "path": "shuup/core/__init__.py"}, {"content": "# -*- coding: utf-8 -*-\n# This file is part of Shuup.\n#\n# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.\n#\n# This source code is licensed under the AGPLv3 license found in the\n# LICENSE file in the root directory of this source tree.\nfrom shuup.utils.excs import Problem\n\n\nclass ImmutabilityError(ValueError):\n pass\n\n\nclass NoProductsToShipException(Exception):\n pass\n\n\nclass NoPaymentToCreateException(Exception):\n pass\n\n\nclass NoRefundToCreateException(Exception):\n pass\n\n\nclass RefundExceedsAmountException(Exception):\n pass\n\n\nclass InvalidRefundAmountException(Exception):\n pass\n\n\nclass ProductNotOrderableProblem(Problem):\n pass\n\n\nclass ProductNotVisibleProblem(Problem):\n pass\n\n\nclass ImpossibleProductModeException(ValueError):\n def __init__(self, message, code=None):\n super(ImpossibleProductModeException, self).__init__(message)\n self.code = code\n", "path": "shuup/core/excs.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n# This file is part of Shuup.\n#\n# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.\n#\n# This source code is licensed under the AGPLv3 license found in the\n# LICENSE file in the root directory of this source tree.\nfrom shuup.apps import AppConfig\nfrom shuup.core.excs import MissingSettingException\n\n\nclass ShuupCoreAppConfig(AppConfig):\n name = \"shuup.core\"\n verbose_name = \"Shuup Core\"\n label = \"shuup\" # Use \"shuup\" as app_label instead of \"core\"\n required_installed_apps = (\n \"django.contrib.auth\",\n \"django.contrib.contenttypes\",\n \"easy_thumbnails\",\n \"filer\",\n )\n provides = {\n \"api_populator\": [\n \"shuup.core.api:populate_core_api\"\n ],\n \"pricing_module\": [\n \"shuup.core.pricing.default_pricing:DefaultPricingModule\"\n ],\n }\n\n def ready(self):\n from django.conf import settings\n if not getattr(settings, \"PARLER_DEFAULT_LANGUAGE_CODE\", None):\n raise MissingSettingException(\"PARLER_DEFAULT_LANGUAGE_CODE must be set.\")\n if not getattr(settings, \"PARLER_LANGUAGES\", None):\n raise MissingSettingException(\"PARLER_LANGUAGES must be set.\")\n\n\ndefault_app_config = \"shuup.core.ShuupCoreAppConfig\"\n", "path": "shuup/core/__init__.py"}, {"content": "# -*- coding: utf-8 -*-\n# This file is part of Shuup.\n#\n# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.\n#\n# This source code is licensed under the AGPLv3 license found in the\n# LICENSE file in the root directory of this source tree.\nfrom shuup.utils.excs import Problem\n\n\nclass ImmutabilityError(ValueError):\n pass\n\n\nclass NoProductsToShipException(Exception):\n pass\n\n\nclass NoPaymentToCreateException(Exception):\n pass\n\n\nclass NoRefundToCreateException(Exception):\n pass\n\n\nclass RefundExceedsAmountException(Exception):\n pass\n\n\nclass InvalidRefundAmountException(Exception):\n pass\n\n\nclass MissingSettingException(Exception):\n pass\n\n\nclass ProductNotOrderableProblem(Problem):\n pass\n\n\nclass ProductNotVisibleProblem(Problem):\n pass\n\n\nclass ImpossibleProductModeException(ValueError):\n def __init__(self, message, code=None):\n super(ImpossibleProductModeException, self).__init__(message)\n self.code = code\n", "path": "shuup/core/excs.py"}]}
| 1,319 | 311 |
gh_patches_debug_31300
|
rasdani/github-patches
|
git_diff
|
ansible-collections__community.aws-1555
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[6.0.0] wafv2_rule_group_info - Remove deprecated `state` argument
### Summary
The `state` argument does nothing and never has (likely a copy&paste mistake). Remove it for consistency
### Issue Type
Feature Idea
### Component Name
plugins/modules/wafv2_rule_group_info.py
### Additional Information
Originally deprecated as part of #1210
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `plugins/modules/wafv2_rule_group_info.py`
Content:
```
1 #!/usr/bin/python
2 # Copyright: Ansible Project
3 # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
4 from __future__ import absolute_import, division, print_function
5 __metaclass__ = type
6
7
8 DOCUMENTATION = '''
9 ---
10 module: wafv2_rule_group_info
11 version_added: 1.5.0
12 author:
13 - "Markus Bergholz (@markuman)"
14 short_description: wafv2_web_acl_info
15 description:
16 - Get informations about existing wafv2 rule groups.
17 options:
18 state:
19 description:
20 - This option does nothing, has been deprecated, and will be removed in a release after 2022-12-01.
21 required: false
22 type: str
23 name:
24 description:
25 - The name of the rule group.
26 required: true
27 type: str
28 scope:
29 description:
30 - Scope of wafv2 rule group.
31 required: true
32 choices: ["CLOUDFRONT","REGIONAL"]
33 type: str
34
35 extends_documentation_fragment:
36 - amazon.aws.aws
37 - amazon.aws.ec2
38 - amazon.aws.boto3
39
40 '''
41
42 EXAMPLES = '''
43 - name: rule group info
44 community.aws.wafv2_rule_group_info:
45 name: test02
46 state: present
47 scope: REGIONAL
48 '''
49
50 RETURN = """
51 arn:
52 description: Rule group arn
53 sample: arn:aws:wafv2:eu-central-1:11111111:regional/rulegroup/test02/6e90c01a-e4eb-43e5-b6aa-b1604cedf7d7
54 type: str
55 returned: Always, as long as the web acl exists
56 description:
57 description: Description of the rule group
58 sample: Some rule group description
59 returned: Always, as long as the web acl exists
60 type: str
61 capacity:
62 description: Current capacity of the rule group
63 sample: 500
64 returned: Always, as long as the rule group exists
65 type: int
66 name:
67 description: Rule group name
68 sample: test02
69 returned: Always, as long as the rule group exists
70 type: str
71 rules:
72 description: Current rules of the rule group
73 returned: Always, as long as the rule group exists
74 type: list
75 sample:
76 - action:
77 allow: {}
78 name: eins
79 priority: 1
80 statement:
81 ip_set_reference_statement:
82 arn: arn:aws:wafv2:eu-central-1:111111111:regional/ipset/test02/b6978915-c67b-4d1c-8832-2b1bb452143a
83 visibility_config:
84 cloud_watch_metrics_enabled: True
85 metric_name: fsd
86 sampled_requests_enabled: True
87 visibility_config:
88 description: Visibility config of the rule group
89 returned: Always, as long as the rule group exists
90 type: dict
91 sample:
92 cloud_watch_metrics_enabled: True
93 metric_name: blub
94 sampled_requests_enabled: False
95 """
96
97 try:
98 from botocore.exceptions import ClientError, BotoCoreError
99 except ImportError:
100 pass # caught by AnsibleAWSModule
101
102 from ansible_collections.amazon.aws.plugins.module_utils.core import AnsibleAWSModule
103 from ansible_collections.amazon.aws.plugins.module_utils.ec2 import camel_dict_to_snake_dict
104 from ansible_collections.community.aws.plugins.module_utils.wafv2 import wafv2_list_rule_groups
105 from ansible_collections.community.aws.plugins.module_utils.wafv2 import describe_wafv2_tags
106
107
108 def get_rule_group(wafv2, name, scope, id, fail_json_aws):
109 try:
110 response = wafv2.get_rule_group(
111 Name=name,
112 Scope=scope,
113 Id=id
114 )
115 except (BotoCoreError, ClientError) as e:
116 fail_json_aws(e, msg="Failed to get wafv2 rule group.")
117 return response
118
119
120 def main():
121 arg_spec = dict(
122 state=dict(type='str', required=False),
123 name=dict(type='str', required=True),
124 scope=dict(type='str', required=True, choices=['CLOUDFRONT', 'REGIONAL'])
125 )
126
127 module = AnsibleAWSModule(
128 argument_spec=arg_spec,
129 supports_check_mode=True
130 )
131
132 state = module.params.get("state")
133 name = module.params.get("name")
134 scope = module.params.get("scope")
135
136 wafv2 = module.client('wafv2')
137
138 if state:
139 module.deprecate(
140 'The state parameter does nothing, has been deprecated, and will be removed in a future release.',
141 version='6.0.0', collection_name='community.aws')
142
143 # check if rule group exists
144 response = wafv2_list_rule_groups(wafv2, scope, module.fail_json_aws)
145 id = None
146 retval = {}
147
148 for item in response.get('RuleGroups'):
149 if item.get('Name') == name:
150 id = item.get('Id')
151 arn = item.get('ARN')
152
153 existing_group = None
154 if id:
155 existing_group = get_rule_group(wafv2, name, scope, id, module.fail_json_aws)
156 retval = camel_dict_to_snake_dict(existing_group.get('RuleGroup'))
157 tags = describe_wafv2_tags(wafv2, arn, module.fail_json_aws)
158 retval['tags'] = tags or {}
159
160 module.exit_json(**retval)
161
162
163 if __name__ == '__main__':
164 main()
165
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/plugins/modules/wafv2_rule_group_info.py b/plugins/modules/wafv2_rule_group_info.py
--- a/plugins/modules/wafv2_rule_group_info.py
+++ b/plugins/modules/wafv2_rule_group_info.py
@@ -15,11 +15,6 @@
description:
- Get informations about existing wafv2 rule groups.
options:
- state:
- description:
- - This option does nothing, has been deprecated, and will be removed in a release after 2022-12-01.
- required: false
- type: str
name:
description:
- The name of the rule group.
@@ -43,7 +38,6 @@
- name: rule group info
community.aws.wafv2_rule_group_info:
name: test02
- state: present
scope: REGIONAL
'''
@@ -119,7 +113,6 @@
def main():
arg_spec = dict(
- state=dict(type='str', required=False),
name=dict(type='str', required=True),
scope=dict(type='str', required=True, choices=['CLOUDFRONT', 'REGIONAL'])
)
@@ -129,17 +122,11 @@
supports_check_mode=True
)
- state = module.params.get("state")
name = module.params.get("name")
scope = module.params.get("scope")
wafv2 = module.client('wafv2')
- if state:
- module.deprecate(
- 'The state parameter does nothing, has been deprecated, and will be removed in a future release.',
- version='6.0.0', collection_name='community.aws')
-
# check if rule group exists
response = wafv2_list_rule_groups(wafv2, scope, module.fail_json_aws)
id = None
|
{"golden_diff": "diff --git a/plugins/modules/wafv2_rule_group_info.py b/plugins/modules/wafv2_rule_group_info.py\n--- a/plugins/modules/wafv2_rule_group_info.py\n+++ b/plugins/modules/wafv2_rule_group_info.py\n@@ -15,11 +15,6 @@\n description:\n - Get informations about existing wafv2 rule groups.\n options:\n- state:\n- description:\n- - This option does nothing, has been deprecated, and will be removed in a release after 2022-12-01.\n- required: false\n- type: str\n name:\n description:\n - The name of the rule group.\n@@ -43,7 +38,6 @@\n - name: rule group info\n community.aws.wafv2_rule_group_info:\n name: test02\n- state: present\n scope: REGIONAL\n '''\n \n@@ -119,7 +113,6 @@\n \n def main():\n arg_spec = dict(\n- state=dict(type='str', required=False),\n name=dict(type='str', required=True),\n scope=dict(type='str', required=True, choices=['CLOUDFRONT', 'REGIONAL'])\n )\n@@ -129,17 +122,11 @@\n supports_check_mode=True\n )\n \n- state = module.params.get(\"state\")\n name = module.params.get(\"name\")\n scope = module.params.get(\"scope\")\n \n wafv2 = module.client('wafv2')\n \n- if state:\n- module.deprecate(\n- 'The state parameter does nothing, has been deprecated, and will be removed in a future release.',\n- version='6.0.0', collection_name='community.aws')\n-\n # check if rule group exists\n response = wafv2_list_rule_groups(wafv2, scope, module.fail_json_aws)\n id = None\n", "issue": "[6.0.0] wafv2_rule_group_info - Remove deprecated `state` argument \n### Summary\n\nThe `state` argument does nothing and never has (likely a copy&paste mistake). Remove it for consistency\n\n### Issue Type\n\nFeature Idea\n\n### Component Name\n\nplugins/modules/wafv2_rule_group_info.py\n\n### Additional Information\n\nOriginally deprecated as part of #1210\n\n### Code of Conduct\n\n- [X] I agree to follow the Ansible Code of Conduct\n", "before_files": [{"content": "#!/usr/bin/python\n# Copyright: Ansible Project\n# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)\nfrom __future__ import absolute_import, division, print_function\n__metaclass__ = type\n\n\nDOCUMENTATION = '''\n---\nmodule: wafv2_rule_group_info\nversion_added: 1.5.0\nauthor:\n - \"Markus Bergholz (@markuman)\"\nshort_description: wafv2_web_acl_info\ndescription:\n - Get informations about existing wafv2 rule groups.\noptions:\n state:\n description:\n - This option does nothing, has been deprecated, and will be removed in a release after 2022-12-01.\n required: false\n type: str\n name:\n description:\n - The name of the rule group.\n required: true\n type: str\n scope:\n description:\n - Scope of wafv2 rule group.\n required: true\n choices: [\"CLOUDFRONT\",\"REGIONAL\"]\n type: str\n\nextends_documentation_fragment:\n - amazon.aws.aws\n - amazon.aws.ec2\n - amazon.aws.boto3\n\n'''\n\nEXAMPLES = '''\n- name: rule group info\n community.aws.wafv2_rule_group_info:\n name: test02\n state: present\n scope: REGIONAL\n'''\n\nRETURN = \"\"\"\narn:\n description: Rule group arn\n sample: arn:aws:wafv2:eu-central-1:11111111:regional/rulegroup/test02/6e90c01a-e4eb-43e5-b6aa-b1604cedf7d7\n type: str\n returned: Always, as long as the web acl exists\ndescription:\n description: Description of the rule group\n sample: Some rule group description\n returned: Always, as long as the web acl exists\n type: str\ncapacity:\n description: Current capacity of the rule group\n sample: 500\n returned: Always, as long as the rule group exists\n type: int\nname:\n description: Rule group name\n sample: test02\n returned: Always, as long as the rule group exists\n type: str\nrules:\n description: Current rules of the rule group\n returned: Always, as long as the rule group exists\n type: list\n sample:\n - action:\n allow: {}\n name: eins\n priority: 1\n statement:\n ip_set_reference_statement:\n arn: arn:aws:wafv2:eu-central-1:111111111:regional/ipset/test02/b6978915-c67b-4d1c-8832-2b1bb452143a\n visibility_config:\n cloud_watch_metrics_enabled: True\n metric_name: fsd\n sampled_requests_enabled: True\nvisibility_config:\n description: Visibility config of the rule group\n returned: Always, as long as the rule group exists\n type: dict\n sample:\n cloud_watch_metrics_enabled: True\n metric_name: blub\n sampled_requests_enabled: False\n\"\"\"\n\ntry:\n from botocore.exceptions import ClientError, BotoCoreError\nexcept ImportError:\n pass # caught by AnsibleAWSModule\n\nfrom ansible_collections.amazon.aws.plugins.module_utils.core import AnsibleAWSModule\nfrom ansible_collections.amazon.aws.plugins.module_utils.ec2 import camel_dict_to_snake_dict\nfrom ansible_collections.community.aws.plugins.module_utils.wafv2 import wafv2_list_rule_groups\nfrom ansible_collections.community.aws.plugins.module_utils.wafv2 import describe_wafv2_tags\n\n\ndef get_rule_group(wafv2, name, scope, id, fail_json_aws):\n try:\n response = wafv2.get_rule_group(\n Name=name,\n Scope=scope,\n Id=id\n )\n except (BotoCoreError, ClientError) as e:\n fail_json_aws(e, msg=\"Failed to get wafv2 rule group.\")\n return response\n\n\ndef main():\n arg_spec = dict(\n state=dict(type='str', required=False),\n name=dict(type='str', required=True),\n scope=dict(type='str', required=True, choices=['CLOUDFRONT', 'REGIONAL'])\n )\n\n module = AnsibleAWSModule(\n argument_spec=arg_spec,\n supports_check_mode=True\n )\n\n state = module.params.get(\"state\")\n name = module.params.get(\"name\")\n scope = module.params.get(\"scope\")\n\n wafv2 = module.client('wafv2')\n\n if state:\n module.deprecate(\n 'The state parameter does nothing, has been deprecated, and will be removed in a future release.',\n version='6.0.0', collection_name='community.aws')\n\n # check if rule group exists\n response = wafv2_list_rule_groups(wafv2, scope, module.fail_json_aws)\n id = None\n retval = {}\n\n for item in response.get('RuleGroups'):\n if item.get('Name') == name:\n id = item.get('Id')\n arn = item.get('ARN')\n\n existing_group = None\n if id:\n existing_group = get_rule_group(wafv2, name, scope, id, module.fail_json_aws)\n retval = camel_dict_to_snake_dict(existing_group.get('RuleGroup'))\n tags = describe_wafv2_tags(wafv2, arn, module.fail_json_aws)\n retval['tags'] = tags or {}\n\n module.exit_json(**retval)\n\n\nif __name__ == '__main__':\n main()\n", "path": "plugins/modules/wafv2_rule_group_info.py"}], "after_files": [{"content": "#!/usr/bin/python\n# Copyright: Ansible Project\n# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)\nfrom __future__ import absolute_import, division, print_function\n__metaclass__ = type\n\n\nDOCUMENTATION = '''\n---\nmodule: wafv2_rule_group_info\nversion_added: 1.5.0\nauthor:\n - \"Markus Bergholz (@markuman)\"\nshort_description: wafv2_web_acl_info\ndescription:\n - Get informations about existing wafv2 rule groups.\noptions:\n name:\n description:\n - The name of the rule group.\n required: true\n type: str\n scope:\n description:\n - Scope of wafv2 rule group.\n required: true\n choices: [\"CLOUDFRONT\",\"REGIONAL\"]\n type: str\n\nextends_documentation_fragment:\n - amazon.aws.aws\n - amazon.aws.ec2\n - amazon.aws.boto3\n\n'''\n\nEXAMPLES = '''\n- name: rule group info\n community.aws.wafv2_rule_group_info:\n name: test02\n scope: REGIONAL\n'''\n\nRETURN = \"\"\"\narn:\n description: Rule group arn\n sample: arn:aws:wafv2:eu-central-1:11111111:regional/rulegroup/test02/6e90c01a-e4eb-43e5-b6aa-b1604cedf7d7\n type: str\n returned: Always, as long as the web acl exists\ndescription:\n description: Description of the rule group\n sample: Some rule group description\n returned: Always, as long as the web acl exists\n type: str\ncapacity:\n description: Current capacity of the rule group\n sample: 500\n returned: Always, as long as the rule group exists\n type: int\nname:\n description: Rule group name\n sample: test02\n returned: Always, as long as the rule group exists\n type: str\nrules:\n description: Current rules of the rule group\n returned: Always, as long as the rule group exists\n type: list\n sample:\n - action:\n allow: {}\n name: eins\n priority: 1\n statement:\n ip_set_reference_statement:\n arn: arn:aws:wafv2:eu-central-1:111111111:regional/ipset/test02/b6978915-c67b-4d1c-8832-2b1bb452143a\n visibility_config:\n cloud_watch_metrics_enabled: True\n metric_name: fsd\n sampled_requests_enabled: True\nvisibility_config:\n description: Visibility config of the rule group\n returned: Always, as long as the rule group exists\n type: dict\n sample:\n cloud_watch_metrics_enabled: True\n metric_name: blub\n sampled_requests_enabled: False\n\"\"\"\n\ntry:\n from botocore.exceptions import ClientError, BotoCoreError\nexcept ImportError:\n pass # caught by AnsibleAWSModule\n\nfrom ansible_collections.amazon.aws.plugins.module_utils.core import AnsibleAWSModule\nfrom ansible_collections.amazon.aws.plugins.module_utils.ec2 import camel_dict_to_snake_dict\nfrom ansible_collections.community.aws.plugins.module_utils.wafv2 import wafv2_list_rule_groups\nfrom ansible_collections.community.aws.plugins.module_utils.wafv2 import describe_wafv2_tags\n\n\ndef get_rule_group(wafv2, name, scope, id, fail_json_aws):\n try:\n response = wafv2.get_rule_group(\n Name=name,\n Scope=scope,\n Id=id\n )\n except (BotoCoreError, ClientError) as e:\n fail_json_aws(e, msg=\"Failed to get wafv2 rule group.\")\n return response\n\n\ndef main():\n arg_spec = dict(\n name=dict(type='str', required=True),\n scope=dict(type='str', required=True, choices=['CLOUDFRONT', 'REGIONAL'])\n )\n\n module = AnsibleAWSModule(\n argument_spec=arg_spec,\n supports_check_mode=True\n )\n\n name = module.params.get(\"name\")\n scope = module.params.get(\"scope\")\n\n wafv2 = module.client('wafv2')\n\n # check if rule group exists\n response = wafv2_list_rule_groups(wafv2, scope, module.fail_json_aws)\n id = None\n retval = {}\n\n for item in response.get('RuleGroups'):\n if item.get('Name') == name:\n id = item.get('Id')\n arn = item.get('ARN')\n\n existing_group = None\n if id:\n existing_group = get_rule_group(wafv2, name, scope, id, module.fail_json_aws)\n retval = camel_dict_to_snake_dict(existing_group.get('RuleGroup'))\n tags = describe_wafv2_tags(wafv2, arn, module.fail_json_aws)\n retval['tags'] = tags or {}\n\n module.exit_json(**retval)\n\n\nif __name__ == '__main__':\n main()\n", "path": "plugins/modules/wafv2_rule_group_info.py"}]}
| 2,036 | 424 |
gh_patches_debug_29881
|
rasdani/github-patches
|
git_diff
|
e2nIEE__pandapower-880
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Missing dependencies: xlswriter, xlrd, cryptography
Hi,
I am currently following the instructions for the installation of the development version, as shown here: https://www.pandapower.org/start/#develop
I have a brand new virtual environment on Python 3.8.3 (Windows 10, 64 bits), and the tests failed because of the following missing dependencies:
> Edit: Same result on Python 3.7.8.
1. xlsxwriter: `FAILED pandapower\test\api\test_file_io.py::test_excel[1] - ModuleNotFoundError: No module named 'xlsxwriter'`
2. xlrd: `FAILED pandapower\test\api\test_file_io.py::test_excel[1] - ImportError: Missing optional dependency 'xlrd'. Install xlrd >= 1.0.0 for Excel support Use pip or conda to install xlrd.`
3. cryptography: `FAILED pandapower\test\api\test_file_io.py::test_encrypted_json[1] - ModuleNotFoundError: No module named 'cryptography'`
The permanent solution would most likely be to add those to setup.py and mention them in the documentation, but you might want to check if you should restrict the version.
P.S.: The tests still ended up failing, but that's a seperate issue (see issue #876 ).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 # -*- coding: utf-8 -*-
2
3 # Copyright (c) 2016-2020 by University of Kassel and Fraunhofer Institute for Energy Economics
4 # and Energy System Technology (IEE), Kassel. All rights reserved.
5
6 from setuptools import setup, find_packages
7 import re
8
9 with open('README.rst', 'rb') as f:
10 install = f.read().decode('utf-8')
11
12 with open('CHANGELOG.rst', 'rb') as f:
13 changelog = f.read().decode('utf-8')
14
15 classifiers = [
16 'Development Status :: 5 - Production/Stable',
17 'Environment :: Console',
18 'Intended Audience :: Developers',
19 'Intended Audience :: Education',
20 'Intended Audience :: Science/Research',
21 'License :: OSI Approved :: BSD License',
22 'Natural Language :: English',
23 'Operating System :: OS Independent',
24 'Programming Language :: Python',
25 'Programming Language :: Python :: 3']
26
27 with open('.travis.yml', 'rb') as f:
28 lines = f.read().decode('utf-8')
29 for version in re.findall('python: 3.[0-9]', lines):
30 classifiers.append('Programming Language :: Python :: 3.%s' % version[-1])
31
32 long_description = '\n\n'.join((install, changelog))
33
34 setup(
35 name='pandapower',
36 version='2.3.0',
37 author='Leon Thurner, Alexander Scheidler',
38 author_email='[email protected], [email protected]',
39 description='Convenient Power System Modelling and Analysis based on PYPOWER and pandas',
40 long_description=long_description,
41 long_description_content_type='text/x-rst',
42 url='http://www.pandapower.org',
43 license='BSD',
44 install_requires=["pandas>=0.17",
45 "networkx",
46 "scipy",
47 "numpy>=0.11",
48 "packaging"],
49 extras_require={":python_version<'3.0'": ["future"],
50 "docs": ["numpydoc", "sphinx", "sphinx_rtd_theme"],
51 "plotting": ["plotly", "matplotlib", "python-igraph"],
52 "test": ["pytest", "pytest-xdist"]},
53 packages=find_packages(),
54 include_package_data=True,
55 classifiers=classifiers
56 )
57
```
Path: `pandapower/__init__.py`
Content:
```
1 __version__ = "2.3.0"
2
3 import os
4 pp_dir = os.path.dirname(os.path.realpath(__file__))
5
6 from pandapower.auxiliary import *
7 from pandapower.convert_format import *
8 from pandapower.create import *
9 from pandapower.diagnostic import *
10 from pandapower.file_io import *
11 from pandapower.run import *
12 from pandapower.runpm import *
13 from pandapower.std_types import *
14 from pandapower.toolbox import *
15 from pandapower.powerflow import *
16 from pandapower.opf import *
17 from pandapower.optimal_powerflow import OPFNotConverged
18 from pandapower.pf.runpp_3ph import runpp_3ph
19 import pandas as pd
20 pd.options.mode.chained_assignment = None # default='warn'
21
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pandapower/__init__.py b/pandapower/__init__.py
--- a/pandapower/__init__.py
+++ b/pandapower/__init__.py
@@ -1,4 +1,4 @@
-__version__ = "2.3.0"
+__version__ = "2.3.1"
import os
pp_dir = os.path.dirname(os.path.realpath(__file__))
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -33,10 +33,10 @@
setup(
name='pandapower',
- version='2.3.0',
+ version='2.3.1',
author='Leon Thurner, Alexander Scheidler',
author_email='[email protected], [email protected]',
- description='Convenient Power System Modelling and Analysis based on PYPOWER and pandas',
+ description='An easy to use open source tool for power system modeling, analysis and optimization with a high degree of automation.',
long_description=long_description,
long_description_content_type='text/x-rst',
url='http://www.pandapower.org',
@@ -45,11 +45,14 @@
"networkx",
"scipy",
"numpy>=0.11",
- "packaging"],
- extras_require={":python_version<'3.0'": ["future"],
- "docs": ["numpydoc", "sphinx", "sphinx_rtd_theme"],
- "plotting": ["plotly", "matplotlib", "python-igraph"],
- "test": ["pytest", "pytest-xdist"]},
+ "packaging",
+ "xlsxwriter",
+ "xlrd",
+ "cryptography"],
+ extras_require={
+ "docs": ["numpydoc", "sphinx", "sphinx_rtd_theme"],
+ "plotting": ["plotly", "matplotlib", "python-igraph"],
+ "test": ["pytest", "pytest-xdist"]},
packages=find_packages(),
include_package_data=True,
classifiers=classifiers
|
{"golden_diff": "diff --git a/pandapower/__init__.py b/pandapower/__init__.py\n--- a/pandapower/__init__.py\n+++ b/pandapower/__init__.py\n@@ -1,4 +1,4 @@\n-__version__ = \"2.3.0\"\n+__version__ = \"2.3.1\"\n \n import os\n pp_dir = os.path.dirname(os.path.realpath(__file__))\ndiff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -33,10 +33,10 @@\n \n setup(\n name='pandapower',\n- version='2.3.0',\n+ version='2.3.1',\n author='Leon Thurner, Alexander Scheidler',\n author_email='[email protected], [email protected]',\n- description='Convenient Power System Modelling and Analysis based on PYPOWER and pandas',\n+ description='An easy to use open source tool for power system modeling, analysis and optimization with a high degree of automation.',\n long_description=long_description,\n \tlong_description_content_type='text/x-rst',\n url='http://www.pandapower.org',\n@@ -45,11 +45,14 @@\n \"networkx\",\n \"scipy\",\n \"numpy>=0.11\",\n- \"packaging\"],\n- extras_require={\":python_version<'3.0'\": [\"future\"],\n- \"docs\": [\"numpydoc\", \"sphinx\", \"sphinx_rtd_theme\"],\n- \"plotting\": [\"plotly\", \"matplotlib\", \"python-igraph\"],\n- \"test\": [\"pytest\", \"pytest-xdist\"]},\n+ \"packaging\",\n+\t\t\t\t\t \"xlsxwriter\",\n+\t\t\t\t\t \"xlrd\",\n+\t\t\t\t\t \"cryptography\"],\n+ extras_require={\n+\t\t\"docs\": [\"numpydoc\", \"sphinx\", \"sphinx_rtd_theme\"],\n+\t\t\"plotting\": [\"plotly\", \"matplotlib\", \"python-igraph\"],\n+\t\t\"test\": [\"pytest\", \"pytest-xdist\"]},\n packages=find_packages(),\n include_package_data=True,\n classifiers=classifiers\n", "issue": "Missing dependencies: xlswriter, xlrd, cryptography\nHi,\r\n\r\nI am currently following the instructions for the installation of the development version, as shown here: https://www.pandapower.org/start/#develop\r\n\r\nI have a brand new virtual environment on Python 3.8.3 (Windows 10, 64 bits), and the tests failed because of the following missing dependencies:\r\n\r\n> Edit: Same result on Python 3.7.8.\r\n\r\n1. xlsxwriter: `FAILED pandapower\\test\\api\\test_file_io.py::test_excel[1] - ModuleNotFoundError: No module named 'xlsxwriter'`\r\n2. xlrd: `FAILED pandapower\\test\\api\\test_file_io.py::test_excel[1] - ImportError: Missing optional dependency 'xlrd'. Install xlrd >= 1.0.0 for Excel support Use pip or conda to install xlrd.`\r\n3. cryptography: `FAILED pandapower\\test\\api\\test_file_io.py::test_encrypted_json[1] - ModuleNotFoundError: No module named 'cryptography'`\r\n\r\nThe permanent solution would most likely be to add those to setup.py and mention them in the documentation, but you might want to check if you should restrict the version.\r\n\r\nP.S.: The tests still ended up failing, but that's a seperate issue (see issue #876 ).\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Copyright (c) 2016-2020 by University of Kassel and Fraunhofer Institute for Energy Economics\n# and Energy System Technology (IEE), Kassel. All rights reserved.\n\nfrom setuptools import setup, find_packages\nimport re\n\nwith open('README.rst', 'rb') as f:\n install = f.read().decode('utf-8')\n\nwith open('CHANGELOG.rst', 'rb') as f:\n changelog = f.read().decode('utf-8')\n\nclassifiers = [\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Education',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: BSD License',\n 'Natural Language :: English',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3']\n\nwith open('.travis.yml', 'rb') as f:\n lines = f.read().decode('utf-8')\n for version in re.findall('python: 3.[0-9]', lines):\n classifiers.append('Programming Language :: Python :: 3.%s' % version[-1])\n\nlong_description = '\\n\\n'.join((install, changelog))\n\nsetup(\n name='pandapower',\n version='2.3.0',\n author='Leon Thurner, Alexander Scheidler',\n author_email='[email protected], [email protected]',\n description='Convenient Power System Modelling and Analysis based on PYPOWER and pandas',\n long_description=long_description,\n\tlong_description_content_type='text/x-rst',\n url='http://www.pandapower.org',\n license='BSD',\n install_requires=[\"pandas>=0.17\",\n \"networkx\",\n \"scipy\",\n \"numpy>=0.11\",\n \"packaging\"],\n extras_require={\":python_version<'3.0'\": [\"future\"],\n \"docs\": [\"numpydoc\", \"sphinx\", \"sphinx_rtd_theme\"],\n \"plotting\": [\"plotly\", \"matplotlib\", \"python-igraph\"],\n \"test\": [\"pytest\", \"pytest-xdist\"]},\n packages=find_packages(),\n include_package_data=True,\n classifiers=classifiers\n)\n", "path": "setup.py"}, {"content": "__version__ = \"2.3.0\"\n\nimport os\npp_dir = os.path.dirname(os.path.realpath(__file__))\n\nfrom pandapower.auxiliary import *\nfrom pandapower.convert_format import *\nfrom pandapower.create import *\nfrom pandapower.diagnostic import *\nfrom pandapower.file_io import *\nfrom pandapower.run import *\nfrom pandapower.runpm import *\nfrom pandapower.std_types import *\nfrom pandapower.toolbox import *\nfrom pandapower.powerflow import *\nfrom pandapower.opf import *\nfrom pandapower.optimal_powerflow import OPFNotConverged\nfrom pandapower.pf.runpp_3ph import runpp_3ph\nimport pandas as pd\npd.options.mode.chained_assignment = None # default='warn'\n", "path": "pandapower/__init__.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Copyright (c) 2016-2020 by University of Kassel and Fraunhofer Institute for Energy Economics\n# and Energy System Technology (IEE), Kassel. All rights reserved.\n\nfrom setuptools import setup, find_packages\nimport re\n\nwith open('README.rst', 'rb') as f:\n install = f.read().decode('utf-8')\n\nwith open('CHANGELOG.rst', 'rb') as f:\n changelog = f.read().decode('utf-8')\n\nclassifiers = [\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Education',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: BSD License',\n 'Natural Language :: English',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3']\n\nwith open('.travis.yml', 'rb') as f:\n lines = f.read().decode('utf-8')\n for version in re.findall('python: 3.[0-9]', lines):\n classifiers.append('Programming Language :: Python :: 3.%s' % version[-1])\n\nlong_description = '\\n\\n'.join((install, changelog))\n\nsetup(\n name='pandapower',\n version='2.3.1',\n author='Leon Thurner, Alexander Scheidler',\n author_email='[email protected], [email protected]',\n description='An easy to use open source tool for power system modeling, analysis and optimization with a high degree of automation.',\n long_description=long_description,\n\tlong_description_content_type='text/x-rst',\n url='http://www.pandapower.org',\n license='BSD',\n install_requires=[\"pandas>=0.17\",\n \"networkx\",\n \"scipy\",\n \"numpy>=0.11\",\n \"packaging\",\n\t\t\t\t\t \"xlsxwriter\",\n\t\t\t\t\t \"xlrd\",\n\t\t\t\t\t \"cryptography\"],\n extras_require={\n\t\t\"docs\": [\"numpydoc\", \"sphinx\", \"sphinx_rtd_theme\"],\n\t\t\"plotting\": [\"plotly\", \"matplotlib\", \"python-igraph\"],\n\t\t\"test\": [\"pytest\", \"pytest-xdist\"]},\n packages=find_packages(),\n include_package_data=True,\n classifiers=classifiers\n)\n", "path": "setup.py"}, {"content": "__version__ = \"2.3.1\"\n\nimport os\npp_dir = os.path.dirname(os.path.realpath(__file__))\n\nfrom pandapower.auxiliary import *\nfrom pandapower.convert_format import *\nfrom pandapower.create import *\nfrom pandapower.diagnostic import *\nfrom pandapower.file_io import *\nfrom pandapower.run import *\nfrom pandapower.runpm import *\nfrom pandapower.std_types import *\nfrom pandapower.toolbox import *\nfrom pandapower.powerflow import *\nfrom pandapower.opf import *\nfrom pandapower.optimal_powerflow import OPFNotConverged\nfrom pandapower.pf.runpp_3ph import runpp_3ph\nimport pandas as pd\npd.options.mode.chained_assignment = None # default='warn'\n", "path": "pandapower/__init__.py"}]}
| 1,395 | 489 |
gh_patches_debug_19563
|
rasdani/github-patches
|
git_diff
|
Flexget__Flexget-1345
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
TypeError with Form Login Plugin
### Expected behaviour:
Task runs without generating error.
### Actual behaviour:
Task runs and generates the following error
```
TypeError: must be unicode, not str
```
### Steps to reproduce:
- Step 1: Install latest version of Flexget using virtualenv
- Step 2: pip install mechanize
- Step 3: Create config.yml
- Step 4: flexget --test execute
#### Config:
```
tasks:
test task:
form:
url: http://example.com/login.php
username: email address
password: password
html:
url: http://example.com/
```
#### Log:
Crash:
```
2016-08-16 11:40 DEBUG manager test task Traceback:
Traceback (most recent call last):
File "/home/username/flexget/local/lib/python2.7/site-packages/flexget/task.py", line 444, in __run_plugin
return method(*args, **kwargs)
File "/home/username/flexget/local/lib/python2.7/site-packages/flexget/event.py", line 23, in __call__
return self.func(*args, **kwargs)
File "/home/username/flexget/local/lib/python2.7/site-packages/flexget/plugins/plugin_formlogin.py", line 73, in on_task_start
f.write(br.response().get_data())
TypeError: must be unicode, not str
2016-08-16 11:40 WARNING task test task Aborting task (plugin: form)
2016-08-16 11:40 DEBUG task_queue task test task aborted: TaskAbort(reason=BUG: Unhandled error in plugin form: must be unicode, not str, silent=False)
```
Full log.
```
http://pastebin.com/yBRqhYjR
```
### Additional information:
- Flexget Version: 2.2.20
- Python Version: 2.7.9
- Installation method: Virtualenv
- OS and version: Debian 8
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `flexget/plugins/plugin_formlogin.py`
Content:
```
1 from __future__ import unicode_literals, division, absolute_import
2 from builtins import * # pylint: disable=unused-import, redefined-builtin
3
4 import logging
5 import os
6 import socket
7
8 from flexget import plugin
9 from flexget.event import event
10
11 log = logging.getLogger('formlogin')
12
13
14 class FormLogin(object):
15 """
16 Login on form
17 """
18
19 schema = {
20 'type': 'object',
21 'properties': {
22 'url': {'type': 'string', 'format': 'url'},
23 'username': {'type': 'string'},
24 'password': {'type': 'string'},
25 'userfield': {'type': 'string'},
26 'passfield': {'type': 'string'}
27 },
28 'required': ['url', 'username', 'password'],
29 'additionalProperties': False
30 }
31
32 def on_task_start(self, task, config):
33 try:
34 from mechanize import Browser
35 except ImportError:
36 raise plugin.PluginError('mechanize required (python module), please install it.', log)
37
38 userfield = config.get('userfield', 'username')
39 passfield = config.get('passfield', 'password')
40
41 url = config['url']
42 username = config['username']
43 password = config['password']
44
45 br = Browser()
46 br.set_handle_robots(False)
47 try:
48 br.open(url)
49 except Exception:
50 # TODO: improve error handling
51 raise plugin.PluginError('Unable to post login form', log)
52
53 # br.set_debug_redirects(True)
54 # br.set_debug_responses(True)
55 # br.set_debug_http(True)
56
57 try:
58 for form in br.forms():
59 loginform = form
60
61 try:
62 loginform[userfield] = username
63 loginform[passfield] = password
64 break
65 except Exception:
66 pass
67 else:
68 received = os.path.join(task.manager.config_base, 'received')
69 if not os.path.isdir(received):
70 os.mkdir(received)
71 filename = os.path.join(received, '%s.formlogin.html' % task.name)
72 with open(filename, 'w') as f:
73 f.write(br.response().get_data())
74 log.critical('I have saved the login page content to %s for you to view' % filename)
75 raise plugin.PluginError('Unable to find login fields', log)
76 except socket.timeout:
77 raise plugin.PluginError('Timed out on url %s' % url)
78
79 br.form = loginform
80
81 br.submit()
82
83 cookiejar = br._ua_handlers["_cookies"].cookiejar
84
85 # Add cookiejar to our requests session
86 task.requests.add_cookiejar(cookiejar)
87
88
89 @event('plugin.register')
90 def register_plugin():
91 plugin.register(FormLogin, 'form', api_ver=2)
92
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/flexget/plugins/plugin_formlogin.py b/flexget/plugins/plugin_formlogin.py
--- a/flexget/plugins/plugin_formlogin.py
+++ b/flexget/plugins/plugin_formlogin.py
@@ -2,6 +2,7 @@
from builtins import * # pylint: disable=unused-import, redefined-builtin
import logging
+import io
import os
import socket
@@ -69,7 +70,7 @@
if not os.path.isdir(received):
os.mkdir(received)
filename = os.path.join(received, '%s.formlogin.html' % task.name)
- with open(filename, 'w') as f:
+ with io.open(filename, 'wb') as f:
f.write(br.response().get_data())
log.critical('I have saved the login page content to %s for you to view' % filename)
raise plugin.PluginError('Unable to find login fields', log)
|
{"golden_diff": "diff --git a/flexget/plugins/plugin_formlogin.py b/flexget/plugins/plugin_formlogin.py\n--- a/flexget/plugins/plugin_formlogin.py\n+++ b/flexget/plugins/plugin_formlogin.py\n@@ -2,6 +2,7 @@\n from builtins import * # pylint: disable=unused-import, redefined-builtin\n \n import logging\n+import io\n import os\n import socket\n \n@@ -69,7 +70,7 @@\n if not os.path.isdir(received):\n os.mkdir(received)\n filename = os.path.join(received, '%s.formlogin.html' % task.name)\n- with open(filename, 'w') as f:\n+ with io.open(filename, 'wb') as f:\n f.write(br.response().get_data())\n log.critical('I have saved the login page content to %s for you to view' % filename)\n raise plugin.PluginError('Unable to find login fields', log)\n", "issue": "TypeError with Form Login Plugin\n### Expected behaviour:\n\nTask runs without generating error.\n### Actual behaviour:\n\nTask runs and generates the following error\n\n```\nTypeError: must be unicode, not str\n```\n### Steps to reproduce:\n- Step 1: Install latest version of Flexget using virtualenv\n- Step 2: pip install mechanize\n- Step 3: Create config.yml\n- Step 4: flexget --test execute\n#### Config:\n\n```\ntasks:\n test task:\n form:\n url: http://example.com/login.php\n username: email address\n password: password\n html:\n url: http://example.com/\n```\n#### Log:\n\nCrash:\n\n```\n2016-08-16 11:40 DEBUG manager test task Traceback:\nTraceback (most recent call last):\n File \"/home/username/flexget/local/lib/python2.7/site-packages/flexget/task.py\", line 444, in __run_plugin\n return method(*args, **kwargs)\n File \"/home/username/flexget/local/lib/python2.7/site-packages/flexget/event.py\", line 23, in __call__\n return self.func(*args, **kwargs)\n File \"/home/username/flexget/local/lib/python2.7/site-packages/flexget/plugins/plugin_formlogin.py\", line 73, in on_task_start\n f.write(br.response().get_data())\nTypeError: must be unicode, not str\n2016-08-16 11:40 WARNING task test task Aborting task (plugin: form)\n2016-08-16 11:40 DEBUG task_queue task test task aborted: TaskAbort(reason=BUG: Unhandled error in plugin form: must be unicode, not str, silent=False)\n```\n\nFull log.\n\n```\nhttp://pastebin.com/yBRqhYjR\n```\n### Additional information:\n- Flexget Version: 2.2.20\n- Python Version: 2.7.9\n- Installation method: Virtualenv\n- OS and version: Debian 8\n\n", "before_files": [{"content": "from __future__ import unicode_literals, division, absolute_import\nfrom builtins import * # pylint: disable=unused-import, redefined-builtin\n\nimport logging\nimport os\nimport socket\n\nfrom flexget import plugin\nfrom flexget.event import event\n\nlog = logging.getLogger('formlogin')\n\n\nclass FormLogin(object):\n \"\"\"\n Login on form\n \"\"\"\n\n schema = {\n 'type': 'object',\n 'properties': {\n 'url': {'type': 'string', 'format': 'url'},\n 'username': {'type': 'string'},\n 'password': {'type': 'string'},\n 'userfield': {'type': 'string'},\n 'passfield': {'type': 'string'}\n },\n 'required': ['url', 'username', 'password'],\n 'additionalProperties': False\n }\n\n def on_task_start(self, task, config):\n try:\n from mechanize import Browser\n except ImportError:\n raise plugin.PluginError('mechanize required (python module), please install it.', log)\n\n userfield = config.get('userfield', 'username')\n passfield = config.get('passfield', 'password')\n\n url = config['url']\n username = config['username']\n password = config['password']\n\n br = Browser()\n br.set_handle_robots(False)\n try:\n br.open(url)\n except Exception:\n # TODO: improve error handling\n raise plugin.PluginError('Unable to post login form', log)\n\n # br.set_debug_redirects(True)\n # br.set_debug_responses(True)\n # br.set_debug_http(True)\n\n try:\n for form in br.forms():\n loginform = form\n\n try:\n loginform[userfield] = username\n loginform[passfield] = password\n break\n except Exception:\n pass\n else:\n received = os.path.join(task.manager.config_base, 'received')\n if not os.path.isdir(received):\n os.mkdir(received)\n filename = os.path.join(received, '%s.formlogin.html' % task.name)\n with open(filename, 'w') as f:\n f.write(br.response().get_data())\n log.critical('I have saved the login page content to %s for you to view' % filename)\n raise plugin.PluginError('Unable to find login fields', log)\n except socket.timeout:\n raise plugin.PluginError('Timed out on url %s' % url)\n\n br.form = loginform\n\n br.submit()\n\n cookiejar = br._ua_handlers[\"_cookies\"].cookiejar\n\n # Add cookiejar to our requests session\n task.requests.add_cookiejar(cookiejar)\n\n\n@event('plugin.register')\ndef register_plugin():\n plugin.register(FormLogin, 'form', api_ver=2)\n", "path": "flexget/plugins/plugin_formlogin.py"}], "after_files": [{"content": "from __future__ import unicode_literals, division, absolute_import\nfrom builtins import * # pylint: disable=unused-import, redefined-builtin\n\nimport logging\nimport io\nimport os\nimport socket\n\nfrom flexget import plugin\nfrom flexget.event import event\n\nlog = logging.getLogger('formlogin')\n\n\nclass FormLogin(object):\n \"\"\"\n Login on form\n \"\"\"\n\n schema = {\n 'type': 'object',\n 'properties': {\n 'url': {'type': 'string', 'format': 'url'},\n 'username': {'type': 'string'},\n 'password': {'type': 'string'},\n 'userfield': {'type': 'string'},\n 'passfield': {'type': 'string'}\n },\n 'required': ['url', 'username', 'password'],\n 'additionalProperties': False\n }\n\n def on_task_start(self, task, config):\n try:\n from mechanize import Browser\n except ImportError:\n raise plugin.PluginError('mechanize required (python module), please install it.', log)\n\n userfield = config.get('userfield', 'username')\n passfield = config.get('passfield', 'password')\n\n url = config['url']\n username = config['username']\n password = config['password']\n\n br = Browser()\n br.set_handle_robots(False)\n try:\n br.open(url)\n except Exception:\n # TODO: improve error handling\n raise plugin.PluginError('Unable to post login form', log)\n\n # br.set_debug_redirects(True)\n # br.set_debug_responses(True)\n # br.set_debug_http(True)\n\n try:\n for form in br.forms():\n loginform = form\n\n try:\n loginform[userfield] = username\n loginform[passfield] = password\n break\n except Exception:\n pass\n else:\n received = os.path.join(task.manager.config_base, 'received')\n if not os.path.isdir(received):\n os.mkdir(received)\n filename = os.path.join(received, '%s.formlogin.html' % task.name)\n with io.open(filename, 'wb') as f:\n f.write(br.response().get_data())\n log.critical('I have saved the login page content to %s for you to view' % filename)\n raise plugin.PluginError('Unable to find login fields', log)\n except socket.timeout:\n raise plugin.PluginError('Timed out on url %s' % url)\n\n br.form = loginform\n\n br.submit()\n\n cookiejar = br._ua_handlers[\"_cookies\"].cookiejar\n\n # Add cookiejar to our requests session\n task.requests.add_cookiejar(cookiejar)\n\n\n@event('plugin.register')\ndef register_plugin():\n plugin.register(FormLogin, 'form', api_ver=2)\n", "path": "flexget/plugins/plugin_formlogin.py"}]}
| 1,504 | 203 |
gh_patches_debug_3564
|
rasdani/github-patches
|
git_diff
|
pypa__setuptools-2369
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
SystemError: Parent module 'setuptools' not loaded, cannot perform relative import with setuptools 50
After upgrading setuptools to 50.0 today, the environment fails to locate the entry points as it could not import distutils
```
$ python --version
Python 3.5.1
$ python -c "import distutils"
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "<frozen importlib._bootstrap>", line 969, in _find_and_load
File "<frozen importlib._bootstrap>", line 958, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 666, in _load_unlocked
File "<frozen importlib._bootstrap>", line 577, in module_from_spec
File "/home/gchan/tmp/setuptools-python-3.5/lib/python3.5/site-packages/_distutils_hack/__init__.py", line 82, in create_module
return importlib.import_module('._distutils', 'setuptools')
File "/home/gchan/tmp/setuptools-python-3.5/lib64/python3.5/importlib/__init__.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 981, in _gcd_import
File "<frozen importlib._bootstrap>", line 931, in _sanity_check
SystemError: Parent module 'setuptools' not loaded, cannot perform relative import
```
The issue could not be found in the python 3.8 environment.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `_distutils_hack/__init__.py`
Content:
```
1 import sys
2 import os
3 import re
4 import importlib
5 import warnings
6
7
8 is_pypy = '__pypy__' in sys.builtin_module_names
9
10
11 def warn_distutils_present():
12 if 'distutils' not in sys.modules:
13 return
14 if is_pypy and sys.version_info < (3, 7):
15 # PyPy for 3.6 unconditionally imports distutils, so bypass the warning
16 # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
17 return
18 warnings.warn(
19 "Distutils was imported before Setuptools, but importing Setuptools "
20 "also replaces the `distutils` module in `sys.modules`. This may lead "
21 "to undesirable behaviors or errors. To avoid these issues, avoid "
22 "using distutils directly, ensure that setuptools is installed in the "
23 "traditional way (e.g. not an editable install), and/or make sure that "
24 "setuptools is always imported before distutils.")
25
26
27 def clear_distutils():
28 if 'distutils' not in sys.modules:
29 return
30 warnings.warn("Setuptools is replacing distutils.")
31 mods = [name for name in sys.modules if re.match(r'distutils\b', name)]
32 for name in mods:
33 del sys.modules[name]
34
35
36 def enabled():
37 """
38 Allow selection of distutils by environment variable.
39 """
40 which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local')
41 return which == 'local'
42
43
44 def ensure_local_distutils():
45 clear_distutils()
46 distutils = importlib.import_module('setuptools._distutils')
47 distutils.__name__ = 'distutils'
48 sys.modules['distutils'] = distutils
49
50 # sanity check that submodules load as expected
51 core = importlib.import_module('distutils.core')
52 assert '_distutils' in core.__file__, core.__file__
53
54
55 def do_override():
56 """
57 Ensure that the local copy of distutils is preferred over stdlib.
58
59 See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
60 for more motivation.
61 """
62 if enabled():
63 warn_distutils_present()
64 ensure_local_distutils()
65
66
67 class DistutilsMetaFinder:
68 def find_spec(self, fullname, path, target=None):
69 if path is not None:
70 return
71
72 method_name = 'spec_for_{fullname}'.format(**locals())
73 method = getattr(self, method_name, lambda: None)
74 return method()
75
76 def spec_for_distutils(self):
77 import importlib.abc
78 import importlib.util
79
80 class DistutilsLoader(importlib.abc.Loader):
81
82 def create_module(self, spec):
83 return importlib.import_module('._distutils', 'setuptools')
84
85 def exec_module(self, module):
86 pass
87
88 return importlib.util.spec_from_loader('distutils', DistutilsLoader())
89
90 def spec_for_pip(self):
91 """
92 Ensure stdlib distutils when running under pip.
93 See pypa/pip#8761 for rationale.
94 """
95 clear_distutils()
96 self.spec_for_distutils = lambda: None
97
98
99 DISTUTILS_FINDER = DistutilsMetaFinder()
100
101
102 def add_shim():
103 sys.meta_path.insert(0, DISTUTILS_FINDER)
104
105
106 def remove_shim():
107 try:
108 sys.meta_path.remove(DISTUTILS_FINDER)
109 except ValueError:
110 pass
111
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/_distutils_hack/__init__.py b/_distutils_hack/__init__.py
--- a/_distutils_hack/__init__.py
+++ b/_distutils_hack/__init__.py
@@ -80,7 +80,7 @@
class DistutilsLoader(importlib.abc.Loader):
def create_module(self, spec):
- return importlib.import_module('._distutils', 'setuptools')
+ return importlib.import_module('setuptools._distutils')
def exec_module(self, module):
pass
|
{"golden_diff": "diff --git a/_distutils_hack/__init__.py b/_distutils_hack/__init__.py\n--- a/_distutils_hack/__init__.py\n+++ b/_distutils_hack/__init__.py\n@@ -80,7 +80,7 @@\n class DistutilsLoader(importlib.abc.Loader):\n \n def create_module(self, spec):\n- return importlib.import_module('._distutils', 'setuptools')\n+ return importlib.import_module('setuptools._distutils')\n \n def exec_module(self, module):\n pass\n", "issue": "SystemError: Parent module 'setuptools' not loaded, cannot perform relative import with setuptools 50\nAfter upgrading setuptools to 50.0 today, the environment fails to locate the entry points as it could not import distutils\r\n\r\n```\r\n$ python --version\r\nPython 3.5.1\r\n$ python -c \"import distutils\"\r\nTraceback (most recent call last):\r\n File \"<string>\", line 1, in <module>\r\n File \"<frozen importlib._bootstrap>\", line 969, in _find_and_load\r\n File \"<frozen importlib._bootstrap>\", line 958, in _find_and_load_unlocked\r\n File \"<frozen importlib._bootstrap>\", line 666, in _load_unlocked\r\n File \"<frozen importlib._bootstrap>\", line 577, in module_from_spec\r\n File \"/home/gchan/tmp/setuptools-python-3.5/lib/python3.5/site-packages/_distutils_hack/__init__.py\", line 82, in create_module\r\n return importlib.import_module('._distutils', 'setuptools')\r\n File \"/home/gchan/tmp/setuptools-python-3.5/lib64/python3.5/importlib/__init__.py\", line 126, in import_module\r\n return _bootstrap._gcd_import(name[level:], package, level)\r\n File \"<frozen importlib._bootstrap>\", line 981, in _gcd_import\r\n File \"<frozen importlib._bootstrap>\", line 931, in _sanity_check\r\nSystemError: Parent module 'setuptools' not loaded, cannot perform relative import\r\n```\r\n\r\nThe issue could not be found in the python 3.8 environment. \n", "before_files": [{"content": "import sys\nimport os\nimport re\nimport importlib\nimport warnings\n\n\nis_pypy = '__pypy__' in sys.builtin_module_names\n\n\ndef warn_distutils_present():\n if 'distutils' not in sys.modules:\n return\n if is_pypy and sys.version_info < (3, 7):\n # PyPy for 3.6 unconditionally imports distutils, so bypass the warning\n # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250\n return\n warnings.warn(\n \"Distutils was imported before Setuptools, but importing Setuptools \"\n \"also replaces the `distutils` module in `sys.modules`. This may lead \"\n \"to undesirable behaviors or errors. To avoid these issues, avoid \"\n \"using distutils directly, ensure that setuptools is installed in the \"\n \"traditional way (e.g. not an editable install), and/or make sure that \"\n \"setuptools is always imported before distutils.\")\n\n\ndef clear_distutils():\n if 'distutils' not in sys.modules:\n return\n warnings.warn(\"Setuptools is replacing distutils.\")\n mods = [name for name in sys.modules if re.match(r'distutils\\b', name)]\n for name in mods:\n del sys.modules[name]\n\n\ndef enabled():\n \"\"\"\n Allow selection of distutils by environment variable.\n \"\"\"\n which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local')\n return which == 'local'\n\n\ndef ensure_local_distutils():\n clear_distutils()\n distutils = importlib.import_module('setuptools._distutils')\n distutils.__name__ = 'distutils'\n sys.modules['distutils'] = distutils\n\n # sanity check that submodules load as expected\n core = importlib.import_module('distutils.core')\n assert '_distutils' in core.__file__, core.__file__\n\n\ndef do_override():\n \"\"\"\n Ensure that the local copy of distutils is preferred over stdlib.\n\n See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401\n for more motivation.\n \"\"\"\n if enabled():\n warn_distutils_present()\n ensure_local_distutils()\n\n\nclass DistutilsMetaFinder:\n def find_spec(self, fullname, path, target=None):\n if path is not None:\n return\n\n method_name = 'spec_for_{fullname}'.format(**locals())\n method = getattr(self, method_name, lambda: None)\n return method()\n\n def spec_for_distutils(self):\n import importlib.abc\n import importlib.util\n\n class DistutilsLoader(importlib.abc.Loader):\n\n def create_module(self, spec):\n return importlib.import_module('._distutils', 'setuptools')\n\n def exec_module(self, module):\n pass\n\n return importlib.util.spec_from_loader('distutils', DistutilsLoader())\n\n def spec_for_pip(self):\n \"\"\"\n Ensure stdlib distutils when running under pip.\n See pypa/pip#8761 for rationale.\n \"\"\"\n clear_distutils()\n self.spec_for_distutils = lambda: None\n\n\nDISTUTILS_FINDER = DistutilsMetaFinder()\n\n\ndef add_shim():\n sys.meta_path.insert(0, DISTUTILS_FINDER)\n\n\ndef remove_shim():\n try:\n sys.meta_path.remove(DISTUTILS_FINDER)\n except ValueError:\n pass\n", "path": "_distutils_hack/__init__.py"}], "after_files": [{"content": "import sys\nimport os\nimport re\nimport importlib\nimport warnings\n\n\nis_pypy = '__pypy__' in sys.builtin_module_names\n\n\ndef warn_distutils_present():\n if 'distutils' not in sys.modules:\n return\n if is_pypy and sys.version_info < (3, 7):\n # PyPy for 3.6 unconditionally imports distutils, so bypass the warning\n # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250\n return\n warnings.warn(\n \"Distutils was imported before Setuptools, but importing Setuptools \"\n \"also replaces the `distutils` module in `sys.modules`. This may lead \"\n \"to undesirable behaviors or errors. To avoid these issues, avoid \"\n \"using distutils directly, ensure that setuptools is installed in the \"\n \"traditional way (e.g. not an editable install), and/or make sure that \"\n \"setuptools is always imported before distutils.\")\n\n\ndef clear_distutils():\n if 'distutils' not in sys.modules:\n return\n warnings.warn(\"Setuptools is replacing distutils.\")\n mods = [name for name in sys.modules if re.match(r'distutils\\b', name)]\n for name in mods:\n del sys.modules[name]\n\n\ndef enabled():\n \"\"\"\n Allow selection of distutils by environment variable.\n \"\"\"\n which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local')\n return which == 'local'\n\n\ndef ensure_local_distutils():\n clear_distutils()\n distutils = importlib.import_module('setuptools._distutils')\n distutils.__name__ = 'distutils'\n sys.modules['distutils'] = distutils\n\n # sanity check that submodules load as expected\n core = importlib.import_module('distutils.core')\n assert '_distutils' in core.__file__, core.__file__\n\n\ndef do_override():\n \"\"\"\n Ensure that the local copy of distutils is preferred over stdlib.\n\n See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401\n for more motivation.\n \"\"\"\n if enabled():\n warn_distutils_present()\n ensure_local_distutils()\n\n\nclass DistutilsMetaFinder:\n def find_spec(self, fullname, path, target=None):\n if path is not None:\n return\n\n method_name = 'spec_for_{fullname}'.format(**locals())\n method = getattr(self, method_name, lambda: None)\n return method()\n\n def spec_for_distutils(self):\n import importlib.abc\n import importlib.util\n\n class DistutilsLoader(importlib.abc.Loader):\n\n def create_module(self, spec):\n return importlib.import_module('setuptools._distutils')\n\n def exec_module(self, module):\n pass\n\n return importlib.util.spec_from_loader('distutils', DistutilsLoader())\n\n def spec_for_pip(self):\n \"\"\"\n Ensure stdlib distutils when running under pip.\n See pypa/pip#8761 for rationale.\n \"\"\"\n clear_distutils()\n self.spec_for_distutils = lambda: None\n\n\nDISTUTILS_FINDER = DistutilsMetaFinder()\n\n\ndef add_shim():\n sys.meta_path.insert(0, DISTUTILS_FINDER)\n\n\ndef remove_shim():\n try:\n sys.meta_path.remove(DISTUTILS_FINDER)\n except ValueError:\n pass\n", "path": "_distutils_hack/__init__.py"}]}
| 1,660 | 122 |
gh_patches_debug_23989
|
rasdani/github-patches
|
git_diff
|
saleor__saleor-3526
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Saleor eCommerce, Mirumee Software in pdf
### What I'm trying to achieve
Make pdf with my custom name instead of Saleor eCommerce, Mirumee Software
### Steps to reproduce the problem
make order and fullfill and make pdf
### What I expected to happen
My brand new ecommerce company
### Screenshots
<!-- If applicable, add screenshots to help explain your problem. -->
<img width="1207" alt="schermafbeelding 2018-12-22 om 13 35 35" src="https://user-images.githubusercontent.com/7673074/50374426-8b5d1780-05ee-11e9-81cf-f778935a403b.png">
**System information**
Operating system:
Browser:
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `saleor/dashboard/order/utils.py`
Content:
```
1 from django.conf import settings
2 from django.contrib.sites.shortcuts import get_current_site
3 from django.template.loader import get_template
4
5 from ...checkout import AddressType
6 from ...checkout.utils import _get_products_voucher_discount
7 from ...core.utils.taxes import ZERO_MONEY
8 from ...discount import VoucherType
9 from ...discount.utils import (
10 get_shipping_voucher_discount, get_value_voucher_discount)
11 from ...product.utils import decrease_stock
12
13 INVOICE_TEMPLATE = 'dashboard/order/pdf/invoice.html'
14 PACKING_SLIP_TEMPLATE = 'dashboard/order/pdf/packing_slip.html'
15
16
17 def get_statics_absolute_url(request):
18 site = get_current_site(request)
19 absolute_url = '%(protocol)s://%(domain)s%(static_url)s' % {
20 'protocol': 'https' if request.is_secure() else 'http',
21 'domain': site.domain,
22 'static_url': settings.STATIC_URL}
23 return absolute_url
24
25
26 def _create_pdf(rendered_template, absolute_url):
27 from weasyprint import HTML
28 pdf_file = (HTML(string=rendered_template, base_url=absolute_url)
29 .write_pdf())
30 return pdf_file
31
32
33 def create_invoice_pdf(order, absolute_url):
34 ctx = {'order': order}
35 rendered_template = get_template(INVOICE_TEMPLATE).render(ctx)
36 pdf_file = _create_pdf(rendered_template, absolute_url)
37 return pdf_file, order
38
39
40 def create_packing_slip_pdf(order, fulfillment, absolute_url):
41 ctx = {'order': order, 'fulfillment': fulfillment}
42 rendered_template = get_template(PACKING_SLIP_TEMPLATE).render(ctx)
43 pdf_file = _create_pdf(rendered_template, absolute_url)
44 return pdf_file, order
45
46
47 def fulfill_order_line(order_line, quantity):
48 """Fulfill order line with given quantity."""
49 if order_line.variant and order_line.variant.track_inventory:
50 decrease_stock(order_line.variant, quantity)
51 order_line.quantity_fulfilled += quantity
52 order_line.save(update_fields=['quantity_fulfilled'])
53
54
55 def update_order_with_user_addresses(order):
56 """Update addresses in an order based on a user assigned to an order."""
57 if order.shipping_address:
58 order.shipping_address.delete()
59 order.shipping_address = None
60
61 if order.billing_address:
62 order.billing_address.delete()
63 order.billing_address = None
64
65 if order.user:
66 order.billing_address = (
67 order.user.default_billing_address.get_copy()
68 if order.user.default_billing_address else None)
69 order.shipping_address = (
70 order.user.default_shipping_address.get_copy()
71 if order.user.default_shipping_address else None)
72
73 order.save(update_fields=['billing_address', 'shipping_address'])
74
75
76 def get_voucher_discount_for_order(order):
77 """Calculate discount value depending on voucher and discount types.
78
79 Raise NotApplicable if voucher of given type cannot be applied.
80 """
81 if not order.voucher:
82 return ZERO_MONEY
83 if order.voucher.type == VoucherType.VALUE:
84 return get_value_voucher_discount(
85 order.voucher, order.get_subtotal())
86 if order.voucher.type == VoucherType.SHIPPING:
87 return get_shipping_voucher_discount(
88 order.voucher, order.get_subtotal(), order.shipping_price)
89 if order.voucher.type in (
90 VoucherType.PRODUCT, VoucherType.COLLECTION, VoucherType.CATEGORY):
91 return _get_products_voucher_discount(order, order.voucher)
92 raise NotImplementedError('Unknown discount type')
93
94
95 def save_address_in_order(order, address, address_type):
96 """Save new address of a given address type in an order.
97
98 If the other type of address is empty, copy it.
99 """
100 if address_type == AddressType.SHIPPING:
101 order.shipping_address = address
102 if not order.billing_address:
103 order.billing_address = address.get_copy()
104 else:
105 order.billing_address = address
106 if not order.shipping_address:
107 order.shipping_address = address.get_copy()
108 order.save(update_fields=['billing_address', 'shipping_address'])
109
110
111 def addresses_are_equal(address_1, address_2):
112 return address_1 and address_2 and address_1 == address_2
113
114
115 def remove_customer_from_order(order):
116 """Remove related customer and user email from order.
117
118 If billing and shipping addresses are set to related customer's default
119 addresses and were not edited, remove them as well.
120 """
121 customer = order.user
122 order.user = None
123 order.user_email = ''
124 order.save()
125
126 if customer:
127 equal_billing_addresses = addresses_are_equal(
128 order.billing_address, customer.default_billing_address)
129 if equal_billing_addresses:
130 order.billing_address.delete()
131 order.billing_address = None
132
133 equal_shipping_addresses = addresses_are_equal(
134 order.shipping_address, customer.default_shipping_address)
135 if equal_shipping_addresses:
136 order.shipping_address.delete()
137 order.shipping_address = None
138
139 if equal_billing_addresses or equal_shipping_addresses:
140 order.save()
141
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/saleor/dashboard/order/utils.py b/saleor/dashboard/order/utils.py
--- a/saleor/dashboard/order/utils.py
+++ b/saleor/dashboard/order/utils.py
@@ -1,4 +1,5 @@
from django.conf import settings
+from django.contrib.sites.models import Site
from django.contrib.sites.shortcuts import get_current_site
from django.template.loader import get_template
@@ -31,14 +32,19 @@
def create_invoice_pdf(order, absolute_url):
- ctx = {'order': order}
+ ctx = {
+ 'order': order,
+ 'site': Site.objects.get_current()}
rendered_template = get_template(INVOICE_TEMPLATE).render(ctx)
pdf_file = _create_pdf(rendered_template, absolute_url)
return pdf_file, order
def create_packing_slip_pdf(order, fulfillment, absolute_url):
- ctx = {'order': order, 'fulfillment': fulfillment}
+ ctx = {
+ 'order': order,
+ 'fulfillment': fulfillment,
+ 'site': Site.objects.get_current()}
rendered_template = get_template(PACKING_SLIP_TEMPLATE).render(ctx)
pdf_file = _create_pdf(rendered_template, absolute_url)
return pdf_file, order
|
{"golden_diff": "diff --git a/saleor/dashboard/order/utils.py b/saleor/dashboard/order/utils.py\n--- a/saleor/dashboard/order/utils.py\n+++ b/saleor/dashboard/order/utils.py\n@@ -1,4 +1,5 @@\n from django.conf import settings\n+from django.contrib.sites.models import Site\n from django.contrib.sites.shortcuts import get_current_site\n from django.template.loader import get_template\n \n@@ -31,14 +32,19 @@\n \n \n def create_invoice_pdf(order, absolute_url):\n- ctx = {'order': order}\n+ ctx = {\n+ 'order': order,\n+ 'site': Site.objects.get_current()}\n rendered_template = get_template(INVOICE_TEMPLATE).render(ctx)\n pdf_file = _create_pdf(rendered_template, absolute_url)\n return pdf_file, order\n \n \n def create_packing_slip_pdf(order, fulfillment, absolute_url):\n- ctx = {'order': order, 'fulfillment': fulfillment}\n+ ctx = {\n+ 'order': order,\n+ 'fulfillment': fulfillment,\n+ 'site': Site.objects.get_current()}\n rendered_template = get_template(PACKING_SLIP_TEMPLATE).render(ctx)\n pdf_file = _create_pdf(rendered_template, absolute_url)\n return pdf_file, order\n", "issue": "Saleor eCommerce, Mirumee Software in pdf\n### What I'm trying to achieve\r\nMake pdf with my custom name instead of Saleor eCommerce, Mirumee Software\r\n\r\n### Steps to reproduce the problem\r\nmake order and fullfill and make pdf\r\n### What I expected to happen\r\nMy brand new ecommerce company\r\n\r\n### Screenshots\r\n<!-- If applicable, add screenshots to help explain your problem. -->\r\n<img width=\"1207\" alt=\"schermafbeelding 2018-12-22 om 13 35 35\" src=\"https://user-images.githubusercontent.com/7673074/50374426-8b5d1780-05ee-11e9-81cf-f778935a403b.png\">\r\n\r\n**System information**\r\nOperating system: \r\nBrowser:\r\n\n", "before_files": [{"content": "from django.conf import settings\nfrom django.contrib.sites.shortcuts import get_current_site\nfrom django.template.loader import get_template\n\nfrom ...checkout import AddressType\nfrom ...checkout.utils import _get_products_voucher_discount\nfrom ...core.utils.taxes import ZERO_MONEY\nfrom ...discount import VoucherType\nfrom ...discount.utils import (\n get_shipping_voucher_discount, get_value_voucher_discount)\nfrom ...product.utils import decrease_stock\n\nINVOICE_TEMPLATE = 'dashboard/order/pdf/invoice.html'\nPACKING_SLIP_TEMPLATE = 'dashboard/order/pdf/packing_slip.html'\n\n\ndef get_statics_absolute_url(request):\n site = get_current_site(request)\n absolute_url = '%(protocol)s://%(domain)s%(static_url)s' % {\n 'protocol': 'https' if request.is_secure() else 'http',\n 'domain': site.domain,\n 'static_url': settings.STATIC_URL}\n return absolute_url\n\n\ndef _create_pdf(rendered_template, absolute_url):\n from weasyprint import HTML\n pdf_file = (HTML(string=rendered_template, base_url=absolute_url)\n .write_pdf())\n return pdf_file\n\n\ndef create_invoice_pdf(order, absolute_url):\n ctx = {'order': order}\n rendered_template = get_template(INVOICE_TEMPLATE).render(ctx)\n pdf_file = _create_pdf(rendered_template, absolute_url)\n return pdf_file, order\n\n\ndef create_packing_slip_pdf(order, fulfillment, absolute_url):\n ctx = {'order': order, 'fulfillment': fulfillment}\n rendered_template = get_template(PACKING_SLIP_TEMPLATE).render(ctx)\n pdf_file = _create_pdf(rendered_template, absolute_url)\n return pdf_file, order\n\n\ndef fulfill_order_line(order_line, quantity):\n \"\"\"Fulfill order line with given quantity.\"\"\"\n if order_line.variant and order_line.variant.track_inventory:\n decrease_stock(order_line.variant, quantity)\n order_line.quantity_fulfilled += quantity\n order_line.save(update_fields=['quantity_fulfilled'])\n\n\ndef update_order_with_user_addresses(order):\n \"\"\"Update addresses in an order based on a user assigned to an order.\"\"\"\n if order.shipping_address:\n order.shipping_address.delete()\n order.shipping_address = None\n\n if order.billing_address:\n order.billing_address.delete()\n order.billing_address = None\n\n if order.user:\n order.billing_address = (\n order.user.default_billing_address.get_copy()\n if order.user.default_billing_address else None)\n order.shipping_address = (\n order.user.default_shipping_address.get_copy()\n if order.user.default_shipping_address else None)\n\n order.save(update_fields=['billing_address', 'shipping_address'])\n\n\ndef get_voucher_discount_for_order(order):\n \"\"\"Calculate discount value depending on voucher and discount types.\n\n Raise NotApplicable if voucher of given type cannot be applied.\n \"\"\"\n if not order.voucher:\n return ZERO_MONEY\n if order.voucher.type == VoucherType.VALUE:\n return get_value_voucher_discount(\n order.voucher, order.get_subtotal())\n if order.voucher.type == VoucherType.SHIPPING:\n return get_shipping_voucher_discount(\n order.voucher, order.get_subtotal(), order.shipping_price)\n if order.voucher.type in (\n VoucherType.PRODUCT, VoucherType.COLLECTION, VoucherType.CATEGORY):\n return _get_products_voucher_discount(order, order.voucher)\n raise NotImplementedError('Unknown discount type')\n\n\ndef save_address_in_order(order, address, address_type):\n \"\"\"Save new address of a given address type in an order.\n\n If the other type of address is empty, copy it.\n \"\"\"\n if address_type == AddressType.SHIPPING:\n order.shipping_address = address\n if not order.billing_address:\n order.billing_address = address.get_copy()\n else:\n order.billing_address = address\n if not order.shipping_address:\n order.shipping_address = address.get_copy()\n order.save(update_fields=['billing_address', 'shipping_address'])\n\n\ndef addresses_are_equal(address_1, address_2):\n return address_1 and address_2 and address_1 == address_2\n\n\ndef remove_customer_from_order(order):\n \"\"\"Remove related customer and user email from order.\n\n If billing and shipping addresses are set to related customer's default\n addresses and were not edited, remove them as well.\n \"\"\"\n customer = order.user\n order.user = None\n order.user_email = ''\n order.save()\n\n if customer:\n equal_billing_addresses = addresses_are_equal(\n order.billing_address, customer.default_billing_address)\n if equal_billing_addresses:\n order.billing_address.delete()\n order.billing_address = None\n\n equal_shipping_addresses = addresses_are_equal(\n order.shipping_address, customer.default_shipping_address)\n if equal_shipping_addresses:\n order.shipping_address.delete()\n order.shipping_address = None\n\n if equal_billing_addresses or equal_shipping_addresses:\n order.save()\n", "path": "saleor/dashboard/order/utils.py"}], "after_files": [{"content": "from django.conf import settings\nfrom django.contrib.sites.models import Site\nfrom django.contrib.sites.shortcuts import get_current_site\nfrom django.template.loader import get_template\n\nfrom ...checkout import AddressType\nfrom ...checkout.utils import _get_products_voucher_discount\nfrom ...core.utils.taxes import ZERO_MONEY\nfrom ...discount import VoucherType\nfrom ...discount.utils import (\n get_shipping_voucher_discount, get_value_voucher_discount)\nfrom ...product.utils import decrease_stock\n\nINVOICE_TEMPLATE = 'dashboard/order/pdf/invoice.html'\nPACKING_SLIP_TEMPLATE = 'dashboard/order/pdf/packing_slip.html'\n\n\ndef get_statics_absolute_url(request):\n site = get_current_site(request)\n absolute_url = '%(protocol)s://%(domain)s%(static_url)s' % {\n 'protocol': 'https' if request.is_secure() else 'http',\n 'domain': site.domain,\n 'static_url': settings.STATIC_URL}\n return absolute_url\n\n\ndef _create_pdf(rendered_template, absolute_url):\n from weasyprint import HTML\n pdf_file = (HTML(string=rendered_template, base_url=absolute_url)\n .write_pdf())\n return pdf_file\n\n\ndef create_invoice_pdf(order, absolute_url):\n ctx = {\n 'order': order,\n 'site': Site.objects.get_current()}\n rendered_template = get_template(INVOICE_TEMPLATE).render(ctx)\n pdf_file = _create_pdf(rendered_template, absolute_url)\n return pdf_file, order\n\n\ndef create_packing_slip_pdf(order, fulfillment, absolute_url):\n ctx = {\n 'order': order,\n 'fulfillment': fulfillment,\n 'site': Site.objects.get_current()}\n rendered_template = get_template(PACKING_SLIP_TEMPLATE).render(ctx)\n pdf_file = _create_pdf(rendered_template, absolute_url)\n return pdf_file, order\n\n\ndef fulfill_order_line(order_line, quantity):\n \"\"\"Fulfill order line with given quantity.\"\"\"\n if order_line.variant and order_line.variant.track_inventory:\n decrease_stock(order_line.variant, quantity)\n order_line.quantity_fulfilled += quantity\n order_line.save(update_fields=['quantity_fulfilled'])\n\n\ndef update_order_with_user_addresses(order):\n \"\"\"Update addresses in an order based on a user assigned to an order.\"\"\"\n if order.shipping_address:\n order.shipping_address.delete()\n order.shipping_address = None\n\n if order.billing_address:\n order.billing_address.delete()\n order.billing_address = None\n\n if order.user:\n order.billing_address = (\n order.user.default_billing_address.get_copy()\n if order.user.default_billing_address else None)\n order.shipping_address = (\n order.user.default_shipping_address.get_copy()\n if order.user.default_shipping_address else None)\n\n order.save(update_fields=['billing_address', 'shipping_address'])\n\n\ndef get_voucher_discount_for_order(order):\n \"\"\"Calculate discount value depending on voucher and discount types.\n\n Raise NotApplicable if voucher of given type cannot be applied.\n \"\"\"\n if not order.voucher:\n return ZERO_MONEY\n if order.voucher.type == VoucherType.VALUE:\n return get_value_voucher_discount(\n order.voucher, order.get_subtotal())\n if order.voucher.type == VoucherType.SHIPPING:\n return get_shipping_voucher_discount(\n order.voucher, order.get_subtotal(), order.shipping_price)\n if order.voucher.type in (\n VoucherType.PRODUCT, VoucherType.COLLECTION, VoucherType.CATEGORY):\n return _get_products_voucher_discount(order, order.voucher)\n raise NotImplementedError('Unknown discount type')\n\n\ndef save_address_in_order(order, address, address_type):\n \"\"\"Save new address of a given address type in an order.\n\n If the other type of address is empty, copy it.\n \"\"\"\n if address_type == AddressType.SHIPPING:\n order.shipping_address = address\n if not order.billing_address:\n order.billing_address = address.get_copy()\n else:\n order.billing_address = address\n if not order.shipping_address:\n order.shipping_address = address.get_copy()\n order.save(update_fields=['billing_address', 'shipping_address'])\n\n\ndef addresses_are_equal(address_1, address_2):\n return address_1 and address_2 and address_1 == address_2\n\n\ndef remove_customer_from_order(order):\n \"\"\"Remove related customer and user email from order.\n\n If billing and shipping addresses are set to related customer's default\n addresses and were not edited, remove them as well.\n \"\"\"\n customer = order.user\n order.user = None\n order.user_email = ''\n order.save()\n\n if customer:\n equal_billing_addresses = addresses_are_equal(\n order.billing_address, customer.default_billing_address)\n if equal_billing_addresses:\n order.billing_address.delete()\n order.billing_address = None\n\n equal_shipping_addresses = addresses_are_equal(\n order.shipping_address, customer.default_shipping_address)\n if equal_shipping_addresses:\n order.shipping_address.delete()\n order.shipping_address = None\n\n if equal_billing_addresses or equal_shipping_addresses:\n order.save()\n", "path": "saleor/dashboard/order/utils.py"}]}
| 1,833 | 273 |
gh_patches_debug_31088
|
rasdani/github-patches
|
git_diff
|
shapiromatron__hawc-505
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
dosing regime dose groups hotfix
We had a reported data corruption issue where a user edited content in a dosing regime and then after saving, we found multiple endpoint-groups with the same endpoint group id, which shouldn't be possible.
After investigation, we found it was an error in the signal which keeps dose-groups and endpoint-groups synced. If there were multiple representation of dose-groups, for example 5 dose-groups and 2 units, then hawc would create 10 endpoint-groups instead of 5. Further, it would create these even for endpoints where data is not extracted.
Here we fix this issue and write a few tests.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `hawc/apps/animal/admin.py`
Content:
```
1 from django.contrib import admin
2
3 from . import models
4
5
6 @admin.register(models.Experiment)
7 class ExperimentAdmin(admin.ModelAdmin):
8 list_display = (
9 "id",
10 "study",
11 "name",
12 "type",
13 "has_multiple_generations",
14 "chemical",
15 "cas",
16 "created",
17 )
18 list_filter = ("type", "has_multiple_generations", "chemical", "study__assessment")
19 search_fields = (
20 "study__short_citation",
21 "name",
22 )
23
24
25 @admin.register(models.AnimalGroup)
26 class AnimalGroupAdmin(admin.ModelAdmin):
27 list_display = (
28 "id",
29 "experiment",
30 "name",
31 "species",
32 "strain",
33 "sex",
34 "created",
35 )
36 list_filter = ("species", "strain", "sex", "experiment__study__assessment_id")
37 search_fields = ("name",)
38
39
40 @admin.register(models.DosingRegime)
41 class DosingRegimeAdmin(admin.ModelAdmin):
42 list_display = (
43 "id",
44 "dosed_animals",
45 "route_of_exposure",
46 "duration_exposure",
47 "num_dose_groups",
48 "created",
49 )
50 list_filter = (
51 "route_of_exposure",
52 "num_dose_groups",
53 "dosed_animals__experiment__study__assessment_id",
54 )
55 list_select_related = ("dosed_animals",)
56 search_fields = ("dosed_animals__name",)
57
58
59 @admin.register(models.Endpoint)
60 class EndpointAdmin(admin.ModelAdmin):
61 list_display = (
62 "id",
63 "assessment_id",
64 "animal_group",
65 "name",
66 "system",
67 "organ",
68 "effect",
69 "data_extracted",
70 "created",
71 )
72 list_filter = ("system", "organ", "data_extracted", "assessment_id")
73 search_fields = ("name",)
74
```
Path: `hawc/services/utils/git.py`
Content:
```
1 import subprocess
2 from datetime import datetime
3
4 from pydantic import BaseModel
5
6
7 class Commit(BaseModel):
8 sha: str
9 dt: datetime
10
11 @classmethod
12 def current(cls, cwd: str = ".") -> "Commit":
13 """Return information on the last commit at the repository path desired.
14
15 Returns:
16 A Commit instance
17 """
18 cmd = "git log -1 --format=%H"
19 sha = subprocess.check_output(cmd.split(), cwd=cwd).decode().strip()[:12]
20 cmd = "git show -s --format=%ct"
21 dt = datetime.fromtimestamp(
22 int(subprocess.check_output(cmd.split(), cwd=cwd).decode().strip())
23 )
24 return cls(sha=sha, dt=dt)
25
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/hawc/apps/animal/admin.py b/hawc/apps/animal/admin.py
--- a/hawc/apps/animal/admin.py
+++ b/hawc/apps/animal/admin.py
@@ -35,6 +35,13 @@
)
list_filter = ("species", "strain", "sex", "experiment__study__assessment_id")
search_fields = ("name",)
+ raw_id_fields = ("experiment", "species", "strain", "dosing_regime", "siblings")
+
+
+class DoseGroupInline(admin.TabularInline):
+ model = models.DoseGroup
+ raw_id_fields = ("dose_units",)
+ extra = 0
@admin.register(models.DosingRegime)
@@ -54,6 +61,12 @@
)
list_select_related = ("dosed_animals",)
search_fields = ("dosed_animals__name",)
+ inlines = (DoseGroupInline,)
+
+
+class EndpointGroupInline(admin.TabularInline):
+ model = models.EndpointGroup
+ extra = 0
@admin.register(models.Endpoint)
@@ -71,3 +84,13 @@
)
list_filter = ("system", "organ", "data_extracted", "assessment_id")
search_fields = ("name",)
+ raw_id_fields = (
+ "assessment",
+ "animal_group",
+ "system_term",
+ "organ_term",
+ "effect_term",
+ "effect_subtype_term",
+ "name_term",
+ )
+ inlines = (EndpointGroupInline,)
diff --git a/hawc/services/utils/git.py b/hawc/services/utils/git.py
--- a/hawc/services/utils/git.py
+++ b/hawc/services/utils/git.py
@@ -16,7 +16,7 @@
A Commit instance
"""
cmd = "git log -1 --format=%H"
- sha = subprocess.check_output(cmd.split(), cwd=cwd).decode().strip()[:12]
+ sha = subprocess.check_output(cmd.split(), cwd=cwd).decode().strip()[:8]
cmd = "git show -s --format=%ct"
dt = datetime.fromtimestamp(
int(subprocess.check_output(cmd.split(), cwd=cwd).decode().strip())
|
{"golden_diff": "diff --git a/hawc/apps/animal/admin.py b/hawc/apps/animal/admin.py\n--- a/hawc/apps/animal/admin.py\n+++ b/hawc/apps/animal/admin.py\n@@ -35,6 +35,13 @@\n )\n list_filter = (\"species\", \"strain\", \"sex\", \"experiment__study__assessment_id\")\n search_fields = (\"name\",)\n+ raw_id_fields = (\"experiment\", \"species\", \"strain\", \"dosing_regime\", \"siblings\")\n+\n+\n+class DoseGroupInline(admin.TabularInline):\n+ model = models.DoseGroup\n+ raw_id_fields = (\"dose_units\",)\n+ extra = 0\n \n \n @admin.register(models.DosingRegime)\n@@ -54,6 +61,12 @@\n )\n list_select_related = (\"dosed_animals\",)\n search_fields = (\"dosed_animals__name\",)\n+ inlines = (DoseGroupInline,)\n+\n+\n+class EndpointGroupInline(admin.TabularInline):\n+ model = models.EndpointGroup\n+ extra = 0\n \n \n @admin.register(models.Endpoint)\n@@ -71,3 +84,13 @@\n )\n list_filter = (\"system\", \"organ\", \"data_extracted\", \"assessment_id\")\n search_fields = (\"name\",)\n+ raw_id_fields = (\n+ \"assessment\",\n+ \"animal_group\",\n+ \"system_term\",\n+ \"organ_term\",\n+ \"effect_term\",\n+ \"effect_subtype_term\",\n+ \"name_term\",\n+ )\n+ inlines = (EndpointGroupInline,)\ndiff --git a/hawc/services/utils/git.py b/hawc/services/utils/git.py\n--- a/hawc/services/utils/git.py\n+++ b/hawc/services/utils/git.py\n@@ -16,7 +16,7 @@\n A Commit instance\n \"\"\"\n cmd = \"git log -1 --format=%H\"\n- sha = subprocess.check_output(cmd.split(), cwd=cwd).decode().strip()[:12]\n+ sha = subprocess.check_output(cmd.split(), cwd=cwd).decode().strip()[:8]\n cmd = \"git show -s --format=%ct\"\n dt = datetime.fromtimestamp(\n int(subprocess.check_output(cmd.split(), cwd=cwd).decode().strip())\n", "issue": "dosing regime dose groups hotfix\nWe had a reported data corruption issue where a user edited content in a dosing regime and then after saving, we found multiple endpoint-groups with the same endpoint group id, which shouldn't be possible.\r\n\r\nAfter investigation, we found it was an error in the signal which keeps dose-groups and endpoint-groups synced. If there were multiple representation of dose-groups, for example 5 dose-groups and 2 units, then hawc would create 10 endpoint-groups instead of 5. Further, it would create these even for endpoints where data is not extracted.\r\n\r\nHere we fix this issue and write a few tests.\n", "before_files": [{"content": "from django.contrib import admin\n\nfrom . import models\n\n\[email protected](models.Experiment)\nclass ExperimentAdmin(admin.ModelAdmin):\n list_display = (\n \"id\",\n \"study\",\n \"name\",\n \"type\",\n \"has_multiple_generations\",\n \"chemical\",\n \"cas\",\n \"created\",\n )\n list_filter = (\"type\", \"has_multiple_generations\", \"chemical\", \"study__assessment\")\n search_fields = (\n \"study__short_citation\",\n \"name\",\n )\n\n\[email protected](models.AnimalGroup)\nclass AnimalGroupAdmin(admin.ModelAdmin):\n list_display = (\n \"id\",\n \"experiment\",\n \"name\",\n \"species\",\n \"strain\",\n \"sex\",\n \"created\",\n )\n list_filter = (\"species\", \"strain\", \"sex\", \"experiment__study__assessment_id\")\n search_fields = (\"name\",)\n\n\[email protected](models.DosingRegime)\nclass DosingRegimeAdmin(admin.ModelAdmin):\n list_display = (\n \"id\",\n \"dosed_animals\",\n \"route_of_exposure\",\n \"duration_exposure\",\n \"num_dose_groups\",\n \"created\",\n )\n list_filter = (\n \"route_of_exposure\",\n \"num_dose_groups\",\n \"dosed_animals__experiment__study__assessment_id\",\n )\n list_select_related = (\"dosed_animals\",)\n search_fields = (\"dosed_animals__name\",)\n\n\[email protected](models.Endpoint)\nclass EndpointAdmin(admin.ModelAdmin):\n list_display = (\n \"id\",\n \"assessment_id\",\n \"animal_group\",\n \"name\",\n \"system\",\n \"organ\",\n \"effect\",\n \"data_extracted\",\n \"created\",\n )\n list_filter = (\"system\", \"organ\", \"data_extracted\", \"assessment_id\")\n search_fields = (\"name\",)\n", "path": "hawc/apps/animal/admin.py"}, {"content": "import subprocess\nfrom datetime import datetime\n\nfrom pydantic import BaseModel\n\n\nclass Commit(BaseModel):\n sha: str\n dt: datetime\n\n @classmethod\n def current(cls, cwd: str = \".\") -> \"Commit\":\n \"\"\"Return information on the last commit at the repository path desired.\n\n Returns:\n A Commit instance\n \"\"\"\n cmd = \"git log -1 --format=%H\"\n sha = subprocess.check_output(cmd.split(), cwd=cwd).decode().strip()[:12]\n cmd = \"git show -s --format=%ct\"\n dt = datetime.fromtimestamp(\n int(subprocess.check_output(cmd.split(), cwd=cwd).decode().strip())\n )\n return cls(sha=sha, dt=dt)\n", "path": "hawc/services/utils/git.py"}], "after_files": [{"content": "from django.contrib import admin\n\nfrom . import models\n\n\[email protected](models.Experiment)\nclass ExperimentAdmin(admin.ModelAdmin):\n list_display = (\n \"id\",\n \"study\",\n \"name\",\n \"type\",\n \"has_multiple_generations\",\n \"chemical\",\n \"cas\",\n \"created\",\n )\n list_filter = (\"type\", \"has_multiple_generations\", \"chemical\", \"study__assessment\")\n search_fields = (\n \"study__short_citation\",\n \"name\",\n )\n\n\[email protected](models.AnimalGroup)\nclass AnimalGroupAdmin(admin.ModelAdmin):\n list_display = (\n \"id\",\n \"experiment\",\n \"name\",\n \"species\",\n \"strain\",\n \"sex\",\n \"created\",\n )\n list_filter = (\"species\", \"strain\", \"sex\", \"experiment__study__assessment_id\")\n search_fields = (\"name\",)\n raw_id_fields = (\"experiment\", \"species\", \"strain\", \"dosing_regime\", \"siblings\")\n\n\nclass DoseGroupInline(admin.TabularInline):\n model = models.DoseGroup\n raw_id_fields = (\"dose_units\",)\n extra = 0\n\n\[email protected](models.DosingRegime)\nclass DosingRegimeAdmin(admin.ModelAdmin):\n list_display = (\n \"id\",\n \"dosed_animals\",\n \"route_of_exposure\",\n \"duration_exposure\",\n \"num_dose_groups\",\n \"created\",\n )\n list_filter = (\n \"route_of_exposure\",\n \"num_dose_groups\",\n \"dosed_animals__experiment__study__assessment_id\",\n )\n list_select_related = (\"dosed_animals\",)\n search_fields = (\"dosed_animals__name\",)\n inlines = (DoseGroupInline,)\n\n\nclass EndpointGroupInline(admin.TabularInline):\n model = models.EndpointGroup\n extra = 0\n\n\[email protected](models.Endpoint)\nclass EndpointAdmin(admin.ModelAdmin):\n list_display = (\n \"id\",\n \"assessment_id\",\n \"animal_group\",\n \"name\",\n \"system\",\n \"organ\",\n \"effect\",\n \"data_extracted\",\n \"created\",\n )\n list_filter = (\"system\", \"organ\", \"data_extracted\", \"assessment_id\")\n search_fields = (\"name\",)\n raw_id_fields = (\n \"assessment\",\n \"animal_group\",\n \"system_term\",\n \"organ_term\",\n \"effect_term\",\n \"effect_subtype_term\",\n \"name_term\",\n )\n inlines = (EndpointGroupInline,)\n", "path": "hawc/apps/animal/admin.py"}, {"content": "import subprocess\nfrom datetime import datetime\n\nfrom pydantic import BaseModel\n\n\nclass Commit(BaseModel):\n sha: str\n dt: datetime\n\n @classmethod\n def current(cls, cwd: str = \".\") -> \"Commit\":\n \"\"\"Return information on the last commit at the repository path desired.\n\n Returns:\n A Commit instance\n \"\"\"\n cmd = \"git log -1 --format=%H\"\n sha = subprocess.check_output(cmd.split(), cwd=cwd).decode().strip()[:8]\n cmd = \"git show -s --format=%ct\"\n dt = datetime.fromtimestamp(\n int(subprocess.check_output(cmd.split(), cwd=cwd).decode().strip())\n )\n return cls(sha=sha, dt=dt)\n", "path": "hawc/services/utils/git.py"}]}
| 1,154 | 504 |
gh_patches_debug_3278
|
rasdani/github-patches
|
git_diff
|
certbot__certbot-7294
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Certbot's Apache plugin doesn't work on Scientific Linux
See https://community.letsencrypt.org/t/noinstallationerror-cannot-find-apache-executable-apache2ctl/97980.
This should be fixable by adding an override in https://github.com/certbot/certbot/blob/master/certbot-apache/certbot_apache/entrypoint.py#L17.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `certbot-apache/certbot_apache/entrypoint.py`
Content:
```
1 """ Entry point for Apache Plugin """
2 # Pylint does not like disutils.version when running inside a venv.
3 # See: https://github.com/PyCQA/pylint/issues/73
4 from distutils.version import LooseVersion # pylint: disable=no-name-in-module,import-error
5
6 from certbot import util
7
8 from certbot_apache import configurator
9 from certbot_apache import override_arch
10 from certbot_apache import override_fedora
11 from certbot_apache import override_darwin
12 from certbot_apache import override_debian
13 from certbot_apache import override_centos
14 from certbot_apache import override_gentoo
15 from certbot_apache import override_suse
16
17 OVERRIDE_CLASSES = {
18 "arch": override_arch.ArchConfigurator,
19 "darwin": override_darwin.DarwinConfigurator,
20 "debian": override_debian.DebianConfigurator,
21 "ubuntu": override_debian.DebianConfigurator,
22 "centos": override_centos.CentOSConfigurator,
23 "centos linux": override_centos.CentOSConfigurator,
24 "fedora_old": override_centos.CentOSConfigurator,
25 "fedora": override_fedora.FedoraConfigurator,
26 "ol": override_centos.CentOSConfigurator,
27 "red hat enterprise linux server": override_centos.CentOSConfigurator,
28 "rhel": override_centos.CentOSConfigurator,
29 "amazon": override_centos.CentOSConfigurator,
30 "gentoo": override_gentoo.GentooConfigurator,
31 "gentoo base system": override_gentoo.GentooConfigurator,
32 "opensuse": override_suse.OpenSUSEConfigurator,
33 "suse": override_suse.OpenSUSEConfigurator,
34 }
35
36
37 def get_configurator():
38 """ Get correct configurator class based on the OS fingerprint """
39 os_name, os_version = util.get_os_info()
40 os_name = os_name.lower()
41 override_class = None
42
43 # Special case for older Fedora versions
44 if os_name == 'fedora' and LooseVersion(os_version) < LooseVersion('29'):
45 os_name = 'fedora_old'
46
47 try:
48 override_class = OVERRIDE_CLASSES[os_name]
49 except KeyError:
50 # OS not found in the list
51 os_like = util.get_systemd_os_like()
52 if os_like:
53 for os_name in os_like:
54 if os_name in OVERRIDE_CLASSES.keys():
55 override_class = OVERRIDE_CLASSES[os_name]
56 if not override_class:
57 # No override class found, return the generic configurator
58 override_class = configurator.ApacheConfigurator
59 return override_class
60
61
62 ENTRYPOINT = get_configurator()
63
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/certbot-apache/certbot_apache/entrypoint.py b/certbot-apache/certbot_apache/entrypoint.py
--- a/certbot-apache/certbot_apache/entrypoint.py
+++ b/certbot-apache/certbot_apache/entrypoint.py
@@ -31,6 +31,8 @@
"gentoo base system": override_gentoo.GentooConfigurator,
"opensuse": override_suse.OpenSUSEConfigurator,
"suse": override_suse.OpenSUSEConfigurator,
+ "scientific": override_centos.CentOSConfigurator,
+ "scientific linux": override_centos.CentOSConfigurator,
}
|
{"golden_diff": "diff --git a/certbot-apache/certbot_apache/entrypoint.py b/certbot-apache/certbot_apache/entrypoint.py\n--- a/certbot-apache/certbot_apache/entrypoint.py\n+++ b/certbot-apache/certbot_apache/entrypoint.py\n@@ -31,6 +31,8 @@\n \"gentoo base system\": override_gentoo.GentooConfigurator,\n \"opensuse\": override_suse.OpenSUSEConfigurator,\n \"suse\": override_suse.OpenSUSEConfigurator,\n+ \"scientific\": override_centos.CentOSConfigurator,\n+ \"scientific linux\": override_centos.CentOSConfigurator,\n }\n", "issue": "Certbot's Apache plugin doesn't work on Scientific Linux\nSee https://community.letsencrypt.org/t/noinstallationerror-cannot-find-apache-executable-apache2ctl/97980.\r\n\r\nThis should be fixable by adding an override in https://github.com/certbot/certbot/blob/master/certbot-apache/certbot_apache/entrypoint.py#L17.\n", "before_files": [{"content": "\"\"\" Entry point for Apache Plugin \"\"\"\n# Pylint does not like disutils.version when running inside a venv.\n# See: https://github.com/PyCQA/pylint/issues/73\nfrom distutils.version import LooseVersion # pylint: disable=no-name-in-module,import-error\n\nfrom certbot import util\n\nfrom certbot_apache import configurator\nfrom certbot_apache import override_arch\nfrom certbot_apache import override_fedora\nfrom certbot_apache import override_darwin\nfrom certbot_apache import override_debian\nfrom certbot_apache import override_centos\nfrom certbot_apache import override_gentoo\nfrom certbot_apache import override_suse\n\nOVERRIDE_CLASSES = {\n \"arch\": override_arch.ArchConfigurator,\n \"darwin\": override_darwin.DarwinConfigurator,\n \"debian\": override_debian.DebianConfigurator,\n \"ubuntu\": override_debian.DebianConfigurator,\n \"centos\": override_centos.CentOSConfigurator,\n \"centos linux\": override_centos.CentOSConfigurator,\n \"fedora_old\": override_centos.CentOSConfigurator,\n \"fedora\": override_fedora.FedoraConfigurator,\n \"ol\": override_centos.CentOSConfigurator,\n \"red hat enterprise linux server\": override_centos.CentOSConfigurator,\n \"rhel\": override_centos.CentOSConfigurator,\n \"amazon\": override_centos.CentOSConfigurator,\n \"gentoo\": override_gentoo.GentooConfigurator,\n \"gentoo base system\": override_gentoo.GentooConfigurator,\n \"opensuse\": override_suse.OpenSUSEConfigurator,\n \"suse\": override_suse.OpenSUSEConfigurator,\n}\n\n\ndef get_configurator():\n \"\"\" Get correct configurator class based on the OS fingerprint \"\"\"\n os_name, os_version = util.get_os_info()\n os_name = os_name.lower()\n override_class = None\n\n # Special case for older Fedora versions\n if os_name == 'fedora' and LooseVersion(os_version) < LooseVersion('29'):\n os_name = 'fedora_old'\n\n try:\n override_class = OVERRIDE_CLASSES[os_name]\n except KeyError:\n # OS not found in the list\n os_like = util.get_systemd_os_like()\n if os_like:\n for os_name in os_like:\n if os_name in OVERRIDE_CLASSES.keys():\n override_class = OVERRIDE_CLASSES[os_name]\n if not override_class:\n # No override class found, return the generic configurator\n override_class = configurator.ApacheConfigurator\n return override_class\n\n\nENTRYPOINT = get_configurator()\n", "path": "certbot-apache/certbot_apache/entrypoint.py"}], "after_files": [{"content": "\"\"\" Entry point for Apache Plugin \"\"\"\n# Pylint does not like disutils.version when running inside a venv.\n# See: https://github.com/PyCQA/pylint/issues/73\nfrom distutils.version import LooseVersion # pylint: disable=no-name-in-module,import-error\n\nfrom certbot import util\n\nfrom certbot_apache import configurator\nfrom certbot_apache import override_arch\nfrom certbot_apache import override_fedora\nfrom certbot_apache import override_darwin\nfrom certbot_apache import override_debian\nfrom certbot_apache import override_centos\nfrom certbot_apache import override_gentoo\nfrom certbot_apache import override_suse\n\nOVERRIDE_CLASSES = {\n \"arch\": override_arch.ArchConfigurator,\n \"darwin\": override_darwin.DarwinConfigurator,\n \"debian\": override_debian.DebianConfigurator,\n \"ubuntu\": override_debian.DebianConfigurator,\n \"centos\": override_centos.CentOSConfigurator,\n \"centos linux\": override_centos.CentOSConfigurator,\n \"fedora_old\": override_centos.CentOSConfigurator,\n \"fedora\": override_fedora.FedoraConfigurator,\n \"ol\": override_centos.CentOSConfigurator,\n \"red hat enterprise linux server\": override_centos.CentOSConfigurator,\n \"rhel\": override_centos.CentOSConfigurator,\n \"amazon\": override_centos.CentOSConfigurator,\n \"gentoo\": override_gentoo.GentooConfigurator,\n \"gentoo base system\": override_gentoo.GentooConfigurator,\n \"opensuse\": override_suse.OpenSUSEConfigurator,\n \"suse\": override_suse.OpenSUSEConfigurator,\n \"scientific\": override_centos.CentOSConfigurator,\n \"scientific linux\": override_centos.CentOSConfigurator,\n}\n\n\ndef get_configurator():\n \"\"\" Get correct configurator class based on the OS fingerprint \"\"\"\n os_name, os_version = util.get_os_info()\n os_name = os_name.lower()\n override_class = None\n\n # Special case for older Fedora versions\n if os_name == 'fedora' and LooseVersion(os_version) < LooseVersion('29'):\n os_name = 'fedora_old'\n\n try:\n override_class = OVERRIDE_CLASSES[os_name]\n except KeyError:\n # OS not found in the list\n os_like = util.get_systemd_os_like()\n if os_like:\n for os_name in os_like:\n if os_name in OVERRIDE_CLASSES.keys():\n override_class = OVERRIDE_CLASSES[os_name]\n if not override_class:\n # No override class found, return the generic configurator\n override_class = configurator.ApacheConfigurator\n return override_class\n\n\nENTRYPOINT = get_configurator()\n", "path": "certbot-apache/certbot_apache/entrypoint.py"}]}
| 1,055 | 155 |
gh_patches_debug_4216
|
rasdani/github-patches
|
git_diff
|
great-expectations__great_expectations-4055
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
get_validator method do not work
Hello!
I have a problem with get_validator component.
Here’s my code:
```
batch_request = BatchRequest(
datasource_name="redshift_",
data_connector_name="default_inferred_data_connector_name",
data_asset_name="daily_chargeback_table_v1", # this is the name of the table you want to retrieve
)
context.create_expectation_suite(
expectation_suite_name="test_suite", overwrite_existing=True
)
validator = context.get_validator(
batch_request=batch_request, expectation_suite_name="test_suite"
)
print(validator.head())
```
I get this exception:
```
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-67-16f90e0aa558> in <module>
8 )
9 validator = context.get_validator(
---> 10 batch_request=batch_request, expectation_suite_name="test_suite"
11 )
12 print(validator.head())
.
.
.
~/anaconda3/lib/python3.7/site-packages/great_expectations/execution_engine/sqlalchemy_execution_engine.py in _build_selectable_from_batch_spec(self, batch_spec)
979 )
980 .where(
--> 981 sa.and_(
982 split_clause,
983 sampler_fn(**batch_spec["sampling_kwargs"]),
TypeError: table() got an unexpected keyword argument 'schema'
```
My Datasource configuration like:
```
name: redshift_
class_name: Datasource
execution_engine:
class_name: SqlAlchemyExecutionEngine
credentials:
host: redshift_host
port: '5443'
username: username
password: password
database: dbname
query:
sslmode: prefer
drivername: postgresql+psycopg2
data_connectors:
default_runtime_data_connector_name:
class_name: RuntimeDataConnector
batch_identifiers:
- default_identifier_name
default_inferred_data_connector_name:
class_name: InferredAssetSqlDataConnector
name: whole_table
```
My environment:
MacOS
python 3.7.4
great_expectations 0.13.34
I will be grateful for any help.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 from setuptools import find_packages, setup
2
3 import versioneer
4
5 # Parse requirements.txt
6 with open("requirements.txt") as f:
7 required = f.read().splitlines()
8
9 # try:
10 # import pypandoc
11 # long_description = pypandoc.convert_file('README.md', 'rst')
12 # except (IOError, ImportError):
13 long_description = "Always know what to expect from your data. (See https://github.com/great-expectations/great_expectations for full description)."
14
15 config = {
16 "description": "Always know what to expect from your data.",
17 "author": "The Great Expectations Team",
18 "url": "https://github.com/great-expectations/great_expectations",
19 "author_email": "[email protected]",
20 "version": versioneer.get_version(),
21 "cmdclass": versioneer.get_cmdclass(),
22 "install_requires": required,
23 "extras_require": {
24 "spark": ["pyspark>=2.3.2"],
25 "sqlalchemy": ["sqlalchemy>=1.3.16"],
26 "airflow": ["apache-airflow[s3]>=1.9.0", "boto3>=1.7.3"],
27 "gcp": [
28 "google-cloud>=0.34.0",
29 "google-cloud-storage>=1.28.0",
30 "google-cloud-secret-manager>=1.0.0",
31 "pybigquery==0.4.15",
32 ],
33 "redshift": ["psycopg2>=2.8"],
34 "s3": ["boto3>=1.14"],
35 "aws_secrets": ["boto3>=1.8.7"],
36 "azure_secrets": ["azure-identity>=1.0.0", "azure-keyvault-secrets>=4.0.0"],
37 "snowflake": ["snowflake-sqlalchemy>=1.2"],
38 },
39 "packages": find_packages(exclude=["contrib*", "docs*", "tests*", "examples*"]),
40 "entry_points": {
41 "console_scripts": ["great_expectations=great_expectations.cli:main"]
42 },
43 "name": "great_expectations",
44 "long_description": long_description,
45 "license": "Apache-2.0",
46 "keywords": "data science testing pipeline data quality dataquality validation datavalidation",
47 "include_package_data": True,
48 "classifiers": [
49 "Development Status :: 4 - Beta",
50 "Intended Audience :: Developers",
51 "Intended Audience :: Science/Research",
52 "Intended Audience :: Other Audience",
53 "Topic :: Scientific/Engineering",
54 "Topic :: Software Development",
55 "Topic :: Software Development :: Testing",
56 "License :: OSI Approved :: Apache Software License",
57 "Programming Language :: Python :: 3",
58 "Programming Language :: Python :: 3.6",
59 "Programming Language :: Python :: 3.7",
60 "Programming Language :: Python :: 3.8",
61 "Programming Language :: Python :: 3.9",
62 ],
63 }
64
65 setup(**config)
66
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -22,7 +22,7 @@
"install_requires": required,
"extras_require": {
"spark": ["pyspark>=2.3.2"],
- "sqlalchemy": ["sqlalchemy>=1.3.16"],
+ "sqlalchemy": ["sqlalchemy>=1.3.18"],
"airflow": ["apache-airflow[s3]>=1.9.0", "boto3>=1.7.3"],
"gcp": [
"google-cloud>=0.34.0",
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -22,7 +22,7 @@\n \"install_requires\": required,\n \"extras_require\": {\n \"spark\": [\"pyspark>=2.3.2\"],\n- \"sqlalchemy\": [\"sqlalchemy>=1.3.16\"],\n+ \"sqlalchemy\": [\"sqlalchemy>=1.3.18\"],\n \"airflow\": [\"apache-airflow[s3]>=1.9.0\", \"boto3>=1.7.3\"],\n \"gcp\": [\n \"google-cloud>=0.34.0\",\n", "issue": "get_validator method do not work\nHello!\r\nI have a problem with get_validator component.\r\n\r\nHere\u2019s my code:\r\n```\r\nbatch_request = BatchRequest(\r\n datasource_name=\"redshift_\",\r\n data_connector_name=\"default_inferred_data_connector_name\",\r\n data_asset_name=\"daily_chargeback_table_v1\", # this is the name of the table you want to retrieve\r\n)\r\ncontext.create_expectation_suite(\r\n expectation_suite_name=\"test_suite\", overwrite_existing=True\r\n)\r\nvalidator = context.get_validator(\r\n batch_request=batch_request, expectation_suite_name=\"test_suite\"\r\n)\r\nprint(validator.head())\r\n```\r\n\r\nI get this exception:\r\n```\r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n<ipython-input-67-16f90e0aa558> in <module>\r\n 8 )\r\n 9 validator = context.get_validator(\r\n---> 10 batch_request=batch_request, expectation_suite_name=\"test_suite\"\r\n 11 )\r\n 12 print(validator.head())\r\n.\r\n.\r\n.\r\n\r\n~/anaconda3/lib/python3.7/site-packages/great_expectations/execution_engine/sqlalchemy_execution_engine.py in _build_selectable_from_batch_spec(self, batch_spec)\r\n 979 )\r\n 980 .where(\r\n--> 981 sa.and_(\r\n 982 split_clause,\r\n 983 sampler_fn(**batch_spec[\"sampling_kwargs\"]),\r\nTypeError: table() got an unexpected keyword argument 'schema'\r\n```\r\n\r\nMy Datasource configuration like:\r\n```\r\nname: redshift_\r\nclass_name: Datasource\r\nexecution_engine:\r\n class_name: SqlAlchemyExecutionEngine\r\n credentials:\r\n host: redshift_host\r\n port: '5443'\r\n username: username\r\n password: password\r\n database: dbname\r\n query:\r\n sslmode: prefer\r\n drivername: postgresql+psycopg2\r\ndata_connectors:\r\n default_runtime_data_connector_name:\r\n class_name: RuntimeDataConnector\r\n batch_identifiers:\r\n - default_identifier_name\r\n default_inferred_data_connector_name:\r\n class_name: InferredAssetSqlDataConnector\r\n name: whole_table\r\n```\r\n\r\nMy environment:\r\nMacOS\r\npython 3.7.4\r\ngreat_expectations 0.13.34\r\n\r\nI will be grateful for any help.\n", "before_files": [{"content": "from setuptools import find_packages, setup\n\nimport versioneer\n\n# Parse requirements.txt\nwith open(\"requirements.txt\") as f:\n required = f.read().splitlines()\n\n# try:\n# import pypandoc\n# long_description = pypandoc.convert_file('README.md', 'rst')\n# except (IOError, ImportError):\nlong_description = \"Always know what to expect from your data. (See https://github.com/great-expectations/great_expectations for full description).\"\n\nconfig = {\n \"description\": \"Always know what to expect from your data.\",\n \"author\": \"The Great Expectations Team\",\n \"url\": \"https://github.com/great-expectations/great_expectations\",\n \"author_email\": \"[email protected]\",\n \"version\": versioneer.get_version(),\n \"cmdclass\": versioneer.get_cmdclass(),\n \"install_requires\": required,\n \"extras_require\": {\n \"spark\": [\"pyspark>=2.3.2\"],\n \"sqlalchemy\": [\"sqlalchemy>=1.3.16\"],\n \"airflow\": [\"apache-airflow[s3]>=1.9.0\", \"boto3>=1.7.3\"],\n \"gcp\": [\n \"google-cloud>=0.34.0\",\n \"google-cloud-storage>=1.28.0\",\n \"google-cloud-secret-manager>=1.0.0\",\n \"pybigquery==0.4.15\",\n ],\n \"redshift\": [\"psycopg2>=2.8\"],\n \"s3\": [\"boto3>=1.14\"],\n \"aws_secrets\": [\"boto3>=1.8.7\"],\n \"azure_secrets\": [\"azure-identity>=1.0.0\", \"azure-keyvault-secrets>=4.0.0\"],\n \"snowflake\": [\"snowflake-sqlalchemy>=1.2\"],\n },\n \"packages\": find_packages(exclude=[\"contrib*\", \"docs*\", \"tests*\", \"examples*\"]),\n \"entry_points\": {\n \"console_scripts\": [\"great_expectations=great_expectations.cli:main\"]\n },\n \"name\": \"great_expectations\",\n \"long_description\": long_description,\n \"license\": \"Apache-2.0\",\n \"keywords\": \"data science testing pipeline data quality dataquality validation datavalidation\",\n \"include_package_data\": True,\n \"classifiers\": [\n \"Development Status :: 4 - Beta\",\n \"Intended Audience :: Developers\",\n \"Intended Audience :: Science/Research\",\n \"Intended Audience :: Other Audience\",\n \"Topic :: Scientific/Engineering\",\n \"Topic :: Software Development\",\n \"Topic :: Software Development :: Testing\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n ],\n}\n\nsetup(**config)\n", "path": "setup.py"}], "after_files": [{"content": "from setuptools import find_packages, setup\n\nimport versioneer\n\n# Parse requirements.txt\nwith open(\"requirements.txt\") as f:\n required = f.read().splitlines()\n\n# try:\n# import pypandoc\n# long_description = pypandoc.convert_file('README.md', 'rst')\n# except (IOError, ImportError):\nlong_description = \"Always know what to expect from your data. (See https://github.com/great-expectations/great_expectations for full description).\"\n\nconfig = {\n \"description\": \"Always know what to expect from your data.\",\n \"author\": \"The Great Expectations Team\",\n \"url\": \"https://github.com/great-expectations/great_expectations\",\n \"author_email\": \"[email protected]\",\n \"version\": versioneer.get_version(),\n \"cmdclass\": versioneer.get_cmdclass(),\n \"install_requires\": required,\n \"extras_require\": {\n \"spark\": [\"pyspark>=2.3.2\"],\n \"sqlalchemy\": [\"sqlalchemy>=1.3.18\"],\n \"airflow\": [\"apache-airflow[s3]>=1.9.0\", \"boto3>=1.7.3\"],\n \"gcp\": [\n \"google-cloud>=0.34.0\",\n \"google-cloud-storage>=1.28.0\",\n \"google-cloud-secret-manager>=1.0.0\",\n \"pybigquery==0.4.15\",\n ],\n \"redshift\": [\"psycopg2>=2.8\"],\n \"s3\": [\"boto3>=1.14\"],\n \"aws_secrets\": [\"boto3>=1.8.7\"],\n \"azure_secrets\": [\"azure-identity>=1.0.0\", \"azure-keyvault-secrets>=4.0.0\"],\n \"snowflake\": [\"snowflake-sqlalchemy>=1.2\"],\n },\n \"packages\": find_packages(exclude=[\"contrib*\", \"docs*\", \"tests*\", \"examples*\"]),\n \"entry_points\": {\n \"console_scripts\": [\"great_expectations=great_expectations.cli:main\"]\n },\n \"name\": \"great_expectations\",\n \"long_description\": long_description,\n \"license\": \"Apache-2.0\",\n \"keywords\": \"data science testing pipeline data quality dataquality validation datavalidation\",\n \"include_package_data\": True,\n \"classifiers\": [\n \"Development Status :: 4 - Beta\",\n \"Intended Audience :: Developers\",\n \"Intended Audience :: Science/Research\",\n \"Intended Audience :: Other Audience\",\n \"Topic :: Scientific/Engineering\",\n \"Topic :: Software Development\",\n \"Topic :: Software Development :: Testing\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n ],\n}\n\nsetup(**config)\n", "path": "setup.py"}]}
| 1,533 | 141 |
gh_patches_debug_4163
|
rasdani/github-patches
|
git_diff
|
comic__grand-challenge.org-1665
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
RawImageFileAdmin has incorrect relation defined
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `app/grandchallenge/cases/admin.py`
Content:
```
1 import re
2 from functools import update_wrapper
3
4 from django.contrib import admin
5 from django.contrib.admin.utils import unquote
6 from django.core.exceptions import PermissionDenied
7 from django.http import Http404, HttpResponse
8 from django.urls import path
9 from django.utils.html import format_html
10 from guardian.admin import GuardedModelAdmin
11
12 from grandchallenge.cases.models import (
13 Image,
14 ImageFile,
15 RawImageFile,
16 RawImageUploadSession,
17 )
18 from grandchallenge.jqfileupload.widgets.uploader import StagedAjaxFile
19 from grandchallenge.subdomains.utils import reverse
20
21
22 class ImageFileInline(admin.StackedInline):
23 model = ImageFile
24 extra = 0
25
26
27 class ImageAdmin(GuardedModelAdmin):
28 search_fields = (
29 "pk",
30 "name",
31 "study__name",
32 "modality__modality",
33 "color_space",
34 "eye_choice",
35 "field_of_view",
36 "stereoscopic_choice",
37 "study__patient__name",
38 )
39 list_filter = (
40 "modality",
41 "color_space",
42 "eye_choice",
43 "field_of_view",
44 "stereoscopic_choice",
45 "archive__slug",
46 )
47 inlines = [ImageFileInline]
48 readonly_fields = ("origin",)
49
50
51 class ImageInline(admin.StackedInline):
52 model = Image
53 extra = 0
54
55
56 class MhdOrRawFilter(admin.SimpleListFilter):
57 """Allow filtering on mhd or raw/zraw files."""
58
59 title = "MHD or RAW file"
60 parameter_name = "mhd_or_raw"
61
62 def lookups(self, request, model_admin):
63 return (("mhd", "MHD file"), ("raw", "RAW/ZRAW file"))
64
65 def queryset(self, request, queryset):
66 if self.value() == "mhd":
67 return queryset.filter(file__endswith=".mhd")
68 if self.value() == "raw":
69 return queryset.filter(file__endswith="raw")
70
71
72 class ImageFileAdmin(GuardedModelAdmin):
73 search_fields = ("pk", "file", "image__name")
74 list_filter = (MhdOrRawFilter,)
75 readonly_fields = ("image",)
76
77
78 class RawImageUploadSessionAdmin(GuardedModelAdmin):
79 ordering = ("-created",)
80 list_display = (
81 "pk",
82 "created",
83 "creator",
84 "status",
85 "error_message",
86 )
87 readonly_fields = (
88 "creator",
89 "status",
90 )
91 list_filter = ("status",)
92 search_fields = (
93 "creator__username",
94 "pk",
95 "error_message",
96 )
97
98
99 class DownloadableFilter(admin.SimpleListFilter):
100 """Allow filtering on downloadable files."""
101
102 title = "Downloadable"
103 parameter_name = "downloadable"
104
105 def lookups(self, request, model_admin):
106 return (("yes", "Yes"),)
107
108 def queryset(self, request, queryset):
109 if self.value() == "yes":
110 return queryset.filter(staged_file_id__isnull=False)
111 return queryset
112
113
114 class RawImageFileAdmin(GuardedModelAdmin):
115 list_filter = (DownloadableFilter,)
116 list_display = ("filename", "upload_session", "download")
117 list_select_related = ("upload_session__archive",)
118 readonly_fields = (
119 "download",
120 "upload_session",
121 )
122 search_fields = ("upload_session__pk", "filename")
123
124 def download(self, instance):
125 if not instance.staged_file_id:
126 return
127 return format_html(
128 f'<a class="button" href={reverse(f"admin:{self.model._meta.app_label}_{self.model._meta.model_name}_download", kwargs={"object_id": instance.pk})}>Download</a>'
129 )
130
131 def download_view(self, request, object_id, **kwargs):
132 obj = self.get_object(request, unquote(object_id), None)
133 if not self.has_view_or_change_permission(request, obj):
134 raise PermissionDenied
135
136 try:
137 saf = StagedAjaxFile(obj.staged_file_id).open()
138 response = HttpResponse(
139 saf.read(), content_type="application/dicom"
140 )
141 response[
142 "Content-Disposition"
143 ] = f'attachment; filename="{obj.filename}"'
144 return response
145 except Exception:
146 raise Http404("File not found")
147
148 def get_urls(self):
149 def wrap(view):
150 def wrapper(*args, **kwargs):
151 return self.admin_site.admin_view(view)(*args, **kwargs)
152
153 wrapper.model_admin = self
154 return update_wrapper(wrapper, view)
155
156 urls = super().get_urls()
157
158 download_url = path(
159 "<path:object_id>/download/",
160 wrap(self.download_view),
161 name=f"{self.model._meta.app_label}_{self.model._meta.model_name}_download",
162 )
163 # Currently the last url in ModelAdmin's get-urls is this:
164 # # For backwards compatibility (was the change url before 1.9)
165 # path('<path:object_id>/', wrap(RedirectView.as_view(
166 # pattern_name='%s:%s_%s_change' % ((self.admin_site.name,) + info)
167 # ))),
168 # This would also match <path:object_id>/download/ and is only there for
169 # old django versions, which we do not use. Replace it if it is there.
170 # Otherwise just append the download_url to the list.
171 if urls[-1].pattern.regex == re.compile("^(?P<object_id>.+)/$"):
172 urls[-1] = download_url
173 else:
174 urls.append(download_url)
175
176 return urls
177
178
179 admin.site.register(Image, ImageAdmin)
180 admin.site.register(ImageFile, ImageFileAdmin)
181 admin.site.register(RawImageUploadSession, RawImageUploadSessionAdmin)
182 admin.site.register(RawImageFile, RawImageFileAdmin)
183
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/app/grandchallenge/cases/admin.py b/app/grandchallenge/cases/admin.py
--- a/app/grandchallenge/cases/admin.py
+++ b/app/grandchallenge/cases/admin.py
@@ -114,7 +114,6 @@
class RawImageFileAdmin(GuardedModelAdmin):
list_filter = (DownloadableFilter,)
list_display = ("filename", "upload_session", "download")
- list_select_related = ("upload_session__archive",)
readonly_fields = (
"download",
"upload_session",
|
{"golden_diff": "diff --git a/app/grandchallenge/cases/admin.py b/app/grandchallenge/cases/admin.py\n--- a/app/grandchallenge/cases/admin.py\n+++ b/app/grandchallenge/cases/admin.py\n@@ -114,7 +114,6 @@\n class RawImageFileAdmin(GuardedModelAdmin):\n list_filter = (DownloadableFilter,)\n list_display = (\"filename\", \"upload_session\", \"download\")\n- list_select_related = (\"upload_session__archive\",)\n readonly_fields = (\n \"download\",\n \"upload_session\",\n", "issue": "RawImageFileAdmin has incorrect relation defined\n\n", "before_files": [{"content": "import re\nfrom functools import update_wrapper\n\nfrom django.contrib import admin\nfrom django.contrib.admin.utils import unquote\nfrom django.core.exceptions import PermissionDenied\nfrom django.http import Http404, HttpResponse\nfrom django.urls import path\nfrom django.utils.html import format_html\nfrom guardian.admin import GuardedModelAdmin\n\nfrom grandchallenge.cases.models import (\n Image,\n ImageFile,\n RawImageFile,\n RawImageUploadSession,\n)\nfrom grandchallenge.jqfileupload.widgets.uploader import StagedAjaxFile\nfrom grandchallenge.subdomains.utils import reverse\n\n\nclass ImageFileInline(admin.StackedInline):\n model = ImageFile\n extra = 0\n\n\nclass ImageAdmin(GuardedModelAdmin):\n search_fields = (\n \"pk\",\n \"name\",\n \"study__name\",\n \"modality__modality\",\n \"color_space\",\n \"eye_choice\",\n \"field_of_view\",\n \"stereoscopic_choice\",\n \"study__patient__name\",\n )\n list_filter = (\n \"modality\",\n \"color_space\",\n \"eye_choice\",\n \"field_of_view\",\n \"stereoscopic_choice\",\n \"archive__slug\",\n )\n inlines = [ImageFileInline]\n readonly_fields = (\"origin\",)\n\n\nclass ImageInline(admin.StackedInline):\n model = Image\n extra = 0\n\n\nclass MhdOrRawFilter(admin.SimpleListFilter):\n \"\"\"Allow filtering on mhd or raw/zraw files.\"\"\"\n\n title = \"MHD or RAW file\"\n parameter_name = \"mhd_or_raw\"\n\n def lookups(self, request, model_admin):\n return ((\"mhd\", \"MHD file\"), (\"raw\", \"RAW/ZRAW file\"))\n\n def queryset(self, request, queryset):\n if self.value() == \"mhd\":\n return queryset.filter(file__endswith=\".mhd\")\n if self.value() == \"raw\":\n return queryset.filter(file__endswith=\"raw\")\n\n\nclass ImageFileAdmin(GuardedModelAdmin):\n search_fields = (\"pk\", \"file\", \"image__name\")\n list_filter = (MhdOrRawFilter,)\n readonly_fields = (\"image\",)\n\n\nclass RawImageUploadSessionAdmin(GuardedModelAdmin):\n ordering = (\"-created\",)\n list_display = (\n \"pk\",\n \"created\",\n \"creator\",\n \"status\",\n \"error_message\",\n )\n readonly_fields = (\n \"creator\",\n \"status\",\n )\n list_filter = (\"status\",)\n search_fields = (\n \"creator__username\",\n \"pk\",\n \"error_message\",\n )\n\n\nclass DownloadableFilter(admin.SimpleListFilter):\n \"\"\"Allow filtering on downloadable files.\"\"\"\n\n title = \"Downloadable\"\n parameter_name = \"downloadable\"\n\n def lookups(self, request, model_admin):\n return ((\"yes\", \"Yes\"),)\n\n def queryset(self, request, queryset):\n if self.value() == \"yes\":\n return queryset.filter(staged_file_id__isnull=False)\n return queryset\n\n\nclass RawImageFileAdmin(GuardedModelAdmin):\n list_filter = (DownloadableFilter,)\n list_display = (\"filename\", \"upload_session\", \"download\")\n list_select_related = (\"upload_session__archive\",)\n readonly_fields = (\n \"download\",\n \"upload_session\",\n )\n search_fields = (\"upload_session__pk\", \"filename\")\n\n def download(self, instance):\n if not instance.staged_file_id:\n return\n return format_html(\n f'<a class=\"button\" href={reverse(f\"admin:{self.model._meta.app_label}_{self.model._meta.model_name}_download\", kwargs={\"object_id\": instance.pk})}>Download</a>'\n )\n\n def download_view(self, request, object_id, **kwargs):\n obj = self.get_object(request, unquote(object_id), None)\n if not self.has_view_or_change_permission(request, obj):\n raise PermissionDenied\n\n try:\n saf = StagedAjaxFile(obj.staged_file_id).open()\n response = HttpResponse(\n saf.read(), content_type=\"application/dicom\"\n )\n response[\n \"Content-Disposition\"\n ] = f'attachment; filename=\"{obj.filename}\"'\n return response\n except Exception:\n raise Http404(\"File not found\")\n\n def get_urls(self):\n def wrap(view):\n def wrapper(*args, **kwargs):\n return self.admin_site.admin_view(view)(*args, **kwargs)\n\n wrapper.model_admin = self\n return update_wrapper(wrapper, view)\n\n urls = super().get_urls()\n\n download_url = path(\n \"<path:object_id>/download/\",\n wrap(self.download_view),\n name=f\"{self.model._meta.app_label}_{self.model._meta.model_name}_download\",\n )\n # Currently the last url in ModelAdmin's get-urls is this:\n # # For backwards compatibility (was the change url before 1.9)\n # path('<path:object_id>/', wrap(RedirectView.as_view(\n # pattern_name='%s:%s_%s_change' % ((self.admin_site.name,) + info)\n # ))),\n # This would also match <path:object_id>/download/ and is only there for\n # old django versions, which we do not use. Replace it if it is there.\n # Otherwise just append the download_url to the list.\n if urls[-1].pattern.regex == re.compile(\"^(?P<object_id>.+)/$\"):\n urls[-1] = download_url\n else:\n urls.append(download_url)\n\n return urls\n\n\nadmin.site.register(Image, ImageAdmin)\nadmin.site.register(ImageFile, ImageFileAdmin)\nadmin.site.register(RawImageUploadSession, RawImageUploadSessionAdmin)\nadmin.site.register(RawImageFile, RawImageFileAdmin)\n", "path": "app/grandchallenge/cases/admin.py"}], "after_files": [{"content": "import re\nfrom functools import update_wrapper\n\nfrom django.contrib import admin\nfrom django.contrib.admin.utils import unquote\nfrom django.core.exceptions import PermissionDenied\nfrom django.http import Http404, HttpResponse\nfrom django.urls import path\nfrom django.utils.html import format_html\nfrom guardian.admin import GuardedModelAdmin\n\nfrom grandchallenge.cases.models import (\n Image,\n ImageFile,\n RawImageFile,\n RawImageUploadSession,\n)\nfrom grandchallenge.jqfileupload.widgets.uploader import StagedAjaxFile\nfrom grandchallenge.subdomains.utils import reverse\n\n\nclass ImageFileInline(admin.StackedInline):\n model = ImageFile\n extra = 0\n\n\nclass ImageAdmin(GuardedModelAdmin):\n search_fields = (\n \"pk\",\n \"name\",\n \"study__name\",\n \"modality__modality\",\n \"color_space\",\n \"eye_choice\",\n \"field_of_view\",\n \"stereoscopic_choice\",\n \"study__patient__name\",\n )\n list_filter = (\n \"modality\",\n \"color_space\",\n \"eye_choice\",\n \"field_of_view\",\n \"stereoscopic_choice\",\n \"archive__slug\",\n )\n inlines = [ImageFileInline]\n readonly_fields = (\"origin\",)\n\n\nclass ImageInline(admin.StackedInline):\n model = Image\n extra = 0\n\n\nclass MhdOrRawFilter(admin.SimpleListFilter):\n \"\"\"Allow filtering on mhd or raw/zraw files.\"\"\"\n\n title = \"MHD or RAW file\"\n parameter_name = \"mhd_or_raw\"\n\n def lookups(self, request, model_admin):\n return ((\"mhd\", \"MHD file\"), (\"raw\", \"RAW/ZRAW file\"))\n\n def queryset(self, request, queryset):\n if self.value() == \"mhd\":\n return queryset.filter(file__endswith=\".mhd\")\n if self.value() == \"raw\":\n return queryset.filter(file__endswith=\"raw\")\n\n\nclass ImageFileAdmin(GuardedModelAdmin):\n search_fields = (\"pk\", \"file\", \"image__name\")\n list_filter = (MhdOrRawFilter,)\n readonly_fields = (\"image\",)\n\n\nclass RawImageUploadSessionAdmin(GuardedModelAdmin):\n ordering = (\"-created\",)\n list_display = (\n \"pk\",\n \"created\",\n \"creator\",\n \"status\",\n \"error_message\",\n )\n readonly_fields = (\n \"creator\",\n \"status\",\n )\n list_filter = (\"status\",)\n search_fields = (\n \"creator__username\",\n \"pk\",\n \"error_message\",\n )\n\n\nclass DownloadableFilter(admin.SimpleListFilter):\n \"\"\"Allow filtering on downloadable files.\"\"\"\n\n title = \"Downloadable\"\n parameter_name = \"downloadable\"\n\n def lookups(self, request, model_admin):\n return ((\"yes\", \"Yes\"),)\n\n def queryset(self, request, queryset):\n if self.value() == \"yes\":\n return queryset.filter(staged_file_id__isnull=False)\n return queryset\n\n\nclass RawImageFileAdmin(GuardedModelAdmin):\n list_filter = (DownloadableFilter,)\n list_display = (\"filename\", \"upload_session\", \"download\")\n readonly_fields = (\n \"download\",\n \"upload_session\",\n )\n search_fields = (\"upload_session__pk\", \"filename\")\n\n def download(self, instance):\n if not instance.staged_file_id:\n return\n return format_html(\n f'<a class=\"button\" href={reverse(f\"admin:{self.model._meta.app_label}_{self.model._meta.model_name}_download\", kwargs={\"object_id\": instance.pk})}>Download</a>'\n )\n\n def download_view(self, request, object_id, **kwargs):\n obj = self.get_object(request, unquote(object_id), None)\n if not self.has_view_or_change_permission(request, obj):\n raise PermissionDenied\n\n try:\n saf = StagedAjaxFile(obj.staged_file_id).open()\n response = HttpResponse(\n saf.read(), content_type=\"application/dicom\"\n )\n response[\n \"Content-Disposition\"\n ] = f'attachment; filename=\"{obj.filename}\"'\n return response\n except Exception:\n raise Http404(\"File not found\")\n\n def get_urls(self):\n def wrap(view):\n def wrapper(*args, **kwargs):\n return self.admin_site.admin_view(view)(*args, **kwargs)\n\n wrapper.model_admin = self\n return update_wrapper(wrapper, view)\n\n urls = super().get_urls()\n\n download_url = path(\n \"<path:object_id>/download/\",\n wrap(self.download_view),\n name=f\"{self.model._meta.app_label}_{self.model._meta.model_name}_download\",\n )\n # Currently the last url in ModelAdmin's get-urls is this:\n # # For backwards compatibility (was the change url before 1.9)\n # path('<path:object_id>/', wrap(RedirectView.as_view(\n # pattern_name='%s:%s_%s_change' % ((self.admin_site.name,) + info)\n # ))),\n # This would also match <path:object_id>/download/ and is only there for\n # old django versions, which we do not use. Replace it if it is there.\n # Otherwise just append the download_url to the list.\n if urls[-1].pattern.regex == re.compile(\"^(?P<object_id>.+)/$\"):\n urls[-1] = download_url\n else:\n urls.append(download_url)\n\n return urls\n\n\nadmin.site.register(Image, ImageAdmin)\nadmin.site.register(ImageFile, ImageFileAdmin)\nadmin.site.register(RawImageUploadSession, RawImageUploadSessionAdmin)\nadmin.site.register(RawImageFile, RawImageFileAdmin)\n", "path": "app/grandchallenge/cases/admin.py"}]}
| 1,967 | 119 |
gh_patches_debug_2981
|
rasdani/github-patches
|
git_diff
|
aws__aws-cli-573
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
aws ec2 replace-network-acl-entry --protocol ?
How can I specify a protocol? When I specify --protocol tcp or --protocol udp, the command fails:
A client error (InvalidParameterValue) occurred when calling the ReplaceNetworkAclEntry operation: Invalid value 'tcp' for IP protocol. Unknown protocol.
A client error (InvalidParameterValue) occurred when calling the ReplaceNetworkAclEntry operation: Invalid value 'udp' for IP protocol. Unknown protocol.
The command create-network-acl-entry accepts --protocol tcp or --protocol udp.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `awscli/customizations/ec2protocolarg.py`
Content:
```
1 # Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License"). You
4 # may not use this file except in compliance with the License. A copy of
5 # the License is located at
6 #
7 # http://aws.amazon.com/apache2.0/
8 #
9 # or in the "license" file accompanying this file. This file is
10 # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 # ANY KIND, either express or implied. See the License for the specific
12 # language governing permissions and limitations under the License.
13 """
14 This customization allows the user to specify the values "tcp", "udp",
15 or "icmp" as values for the --protocol parameter. The actual Protocol
16 parameter of the operation accepts only integer protocol numbers.
17 """
18
19 def _fix_args(operation, endpoint, params, **kwargs):
20 if 'protocol' in params:
21 if params['protocol'] == 'tcp':
22 params['protocol'] = '6'
23 elif params['protocol'] == 'udp':
24 params['protocol'] = '17'
25 elif params['protocol'] == 'icmp':
26 params['protocol'] = '1'
27 elif params['protocol'] == 'all':
28 params['protocol'] = '-1'
29
30
31 def register_protocol_args(cli):
32 ('before-parameter-build.ec2.RunInstances', _fix_args),
33 cli.register('before-parameter-build.ec2.CreateNetworkAclEntry',
34 _fix_args)
35
36
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/awscli/customizations/ec2protocolarg.py b/awscli/customizations/ec2protocolarg.py
--- a/awscli/customizations/ec2protocolarg.py
+++ b/awscli/customizations/ec2protocolarg.py
@@ -29,7 +29,8 @@
def register_protocol_args(cli):
- ('before-parameter-build.ec2.RunInstances', _fix_args),
cli.register('before-parameter-build.ec2.CreateNetworkAclEntry',
_fix_args)
+ cli.register('before-parameter-build.ec2.ReplaceNetworkAclEntry',
+ _fix_args)
|
{"golden_diff": "diff --git a/awscli/customizations/ec2protocolarg.py b/awscli/customizations/ec2protocolarg.py\n--- a/awscli/customizations/ec2protocolarg.py\n+++ b/awscli/customizations/ec2protocolarg.py\n@@ -29,7 +29,8 @@\n \n \n def register_protocol_args(cli):\n- ('before-parameter-build.ec2.RunInstances', _fix_args),\n cli.register('before-parameter-build.ec2.CreateNetworkAclEntry',\n _fix_args)\n+ cli.register('before-parameter-build.ec2.ReplaceNetworkAclEntry',\n+ _fix_args)\n", "issue": "aws ec2 replace-network-acl-entry --protocol ?\nHow can I specify a protocol? When I specify --protocol tcp or --protocol udp, the command fails:\n\nA client error (InvalidParameterValue) occurred when calling the ReplaceNetworkAclEntry operation: Invalid value 'tcp' for IP protocol. Unknown protocol.\n\nA client error (InvalidParameterValue) occurred when calling the ReplaceNetworkAclEntry operation: Invalid value 'udp' for IP protocol. Unknown protocol.\n\nThe command create-network-acl-entry accepts --protocol tcp or --protocol udp.\n\n", "before_files": [{"content": "# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"). You\n# may not use this file except in compliance with the License. A copy of\n# the License is located at\n#\n# http://aws.amazon.com/apache2.0/\n#\n# or in the \"license\" file accompanying this file. This file is\n# distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF\n# ANY KIND, either express or implied. See the License for the specific\n# language governing permissions and limitations under the License.\n\"\"\"\nThis customization allows the user to specify the values \"tcp\", \"udp\",\nor \"icmp\" as values for the --protocol parameter. The actual Protocol\nparameter of the operation accepts only integer protocol numbers.\n\"\"\"\n\ndef _fix_args(operation, endpoint, params, **kwargs):\n if 'protocol' in params:\n if params['protocol'] == 'tcp':\n params['protocol'] = '6'\n elif params['protocol'] == 'udp':\n params['protocol'] = '17'\n elif params['protocol'] == 'icmp':\n params['protocol'] = '1'\n elif params['protocol'] == 'all':\n params['protocol'] = '-1'\n\n\ndef register_protocol_args(cli):\n ('before-parameter-build.ec2.RunInstances', _fix_args),\n cli.register('before-parameter-build.ec2.CreateNetworkAclEntry',\n _fix_args)\n \n", "path": "awscli/customizations/ec2protocolarg.py"}], "after_files": [{"content": "# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"). You\n# may not use this file except in compliance with the License. A copy of\n# the License is located at\n#\n# http://aws.amazon.com/apache2.0/\n#\n# or in the \"license\" file accompanying this file. This file is\n# distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF\n# ANY KIND, either express or implied. See the License for the specific\n# language governing permissions and limitations under the License.\n\"\"\"\nThis customization allows the user to specify the values \"tcp\", \"udp\",\nor \"icmp\" as values for the --protocol parameter. The actual Protocol\nparameter of the operation accepts only integer protocol numbers.\n\"\"\"\n\ndef _fix_args(operation, endpoint, params, **kwargs):\n if 'protocol' in params:\n if params['protocol'] == 'tcp':\n params['protocol'] = '6'\n elif params['protocol'] == 'udp':\n params['protocol'] = '17'\n elif params['protocol'] == 'icmp':\n params['protocol'] = '1'\n elif params['protocol'] == 'all':\n params['protocol'] = '-1'\n\n\ndef register_protocol_args(cli):\n cli.register('before-parameter-build.ec2.CreateNetworkAclEntry',\n _fix_args)\n cli.register('before-parameter-build.ec2.ReplaceNetworkAclEntry',\n _fix_args)\n \n", "path": "awscli/customizations/ec2protocolarg.py"}]}
| 757 | 124 |
gh_patches_debug_15624
|
rasdani/github-patches
|
git_diff
|
saulpw__visidata-1960
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Current HEAD zsh-completion.py needs option_aliases update
**Small description**
`option_aliases` was removed in ce497f444db6d2f3fc0b8309f5ca839196c33c8b but is still referred to in the zsh completion code.
https://github.com/saulpw/visidata/blob/34808745232e798b0f25e893bb444fc9f3c034eb/dev/zsh-completion.py#L11C41-L11C41
I think the script needs a slight rejig to use the (present) `vd` import instead.
I wonder whether this can be included in future CI?
**Expected result**
The command succeeds.
**Actual result**
```
> /build/visidata-src
> Traceback (most recent call last):
> File "/build/visidata-src/dev/zsh-completion.py", line 11, in <module>
> from visidata.main import option_aliases
> ImportError: cannot import name 'option_aliases' from 'visidata.main' (/build/visidata-src/visidata/main.py)
```
**Steps to reproduce**
```
python dev/zsh-completion.py
```
**Additional context**
~~Please include the version of VisiData and Python.~~
https://github.com/saulpw/visidata/tree/34808745232e798b0f25e893bb444fc9f3c034eb but I listed the commit above that causes the breakage — I suspect this is a two minute fix for somebody familiar with the codebase, though not me. I can help with extending CI, though it might just be a case of adding
```yaml
- name: Ensure VisiData can create completions
run: python dev/zsh-completion.py
```
(I guess you might want to run a linter, instead.)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `dev/zsh-completion.py`
Content:
```
1 #!/usr/bin/env python
2 from __future__ import unicode_literals
3
4 import os
5 from os.path import dirname as dirn
6 import sys
7 import re
8
9 sys.path.insert(0, dirn(dirn((os.path.abspath(__file__)))))
10 from visidata import vd
11 from visidata.main import option_aliases
12
13 ZSH_COMPLETION_FILE = "_visidata"
14 ZSH_COMPLETION_TEMPLATE = "dev/zsh-completion.in"
15 pat_class = re.compile("'(.*)'")
16 pat_select = re.compile("^\([^)]*\)")
17
18
19 def generate_completion(opt):
20 prefix = "--" + opt.name
21 shortnames = [key for key, value in option_aliases.items() if value[0] == opt.name]
22 if len(shortnames):
23 if len(shortnames[0]) == 1:
24 shortname = "-" + shortnames[0]
25 else:
26 shortname = "--" + shortnames[0]
27 prefix = "{" + f"{shortname},{prefix}" + "}"
28 if isinstance(opt.value, bool):
29 completion = ""
30 else:
31 completion = ":" + pat_class.findall(str(opt.value.__class__))[0]
32 if opt.name in ["play", "output", "visidata_dir", "config"]:
33 completion += ":_files"
34 elif opt.name in ["plugins_url", "motd_url"]:
35 completion += ":_urls"
36 helpstr = opt.helpstr
37 selections = pat_select.findall(helpstr)
38 if len(selections):
39 completion += f":{selections[0].replace('/', ' ')}"
40 # TODO: use `zstyle ':completion:*' extra-verbose true`
41 # to control the display of default value
42 helpstr = helpstr + f" (default: {opt.value})"
43 helpstr = helpstr.replace("[", "\\[").replace("]", "\\]")
44 return f"{prefix}'[{helpstr}]{completion}'"
45
46
47 flags = [generate_completion(vd._options[opt]["default"]) for opt in vd._options]
48
49 with open(ZSH_COMPLETION_TEMPLATE) as f:
50 template = f.read()
51
52 template = template.replace("{{flags}}", " \\\n ".join(flags))
53
54 with open(ZSH_COMPLETION_FILE, "w") as f:
55 f.write(template)
56
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/dev/zsh-completion.py b/dev/zsh-completion.py
--- a/dev/zsh-completion.py
+++ b/dev/zsh-completion.py
@@ -8,7 +8,6 @@
sys.path.insert(0, dirn(dirn((os.path.abspath(__file__)))))
from visidata import vd
-from visidata.main import option_aliases
ZSH_COMPLETION_FILE = "_visidata"
ZSH_COMPLETION_TEMPLATE = "dev/zsh-completion.in"
@@ -18,7 +17,9 @@
def generate_completion(opt):
prefix = "--" + opt.name
- shortnames = [key for key, value in option_aliases.items() if value[0] == opt.name]
+ shortnames = [
+ key for key, value in vd.option_aliases.items() if value[0] == opt.name
+ ]
if len(shortnames):
if len(shortnames[0]) == 1:
shortname = "-" + shortnames[0]
|
{"golden_diff": "diff --git a/dev/zsh-completion.py b/dev/zsh-completion.py\n--- a/dev/zsh-completion.py\n+++ b/dev/zsh-completion.py\n@@ -8,7 +8,6 @@\n \n sys.path.insert(0, dirn(dirn((os.path.abspath(__file__)))))\n from visidata import vd\n-from visidata.main import option_aliases\n \n ZSH_COMPLETION_FILE = \"_visidata\"\n ZSH_COMPLETION_TEMPLATE = \"dev/zsh-completion.in\"\n@@ -18,7 +17,9 @@\n \n def generate_completion(opt):\n prefix = \"--\" + opt.name\n- shortnames = [key for key, value in option_aliases.items() if value[0] == opt.name]\n+ shortnames = [\n+ key for key, value in vd.option_aliases.items() if value[0] == opt.name\n+ ]\n if len(shortnames):\n if len(shortnames[0]) == 1:\n shortname = \"-\" + shortnames[0]\n", "issue": "Current HEAD zsh-completion.py needs option_aliases update\n**Small description**\r\n\r\n`option_aliases` was removed in ce497f444db6d2f3fc0b8309f5ca839196c33c8b but is still referred to in the zsh completion code.\r\n\r\nhttps://github.com/saulpw/visidata/blob/34808745232e798b0f25e893bb444fc9f3c034eb/dev/zsh-completion.py#L11C41-L11C41\r\n\r\nI think the script needs a slight rejig to use the (present) `vd` import instead.\r\n\r\nI wonder whether this can be included in future CI?\r\n\r\n**Expected result**\r\n\r\nThe command succeeds.\r\n\r\n**Actual result**\r\n\r\n```\r\n > /build/visidata-src\r\n > Traceback (most recent call last):\r\n > File \"/build/visidata-src/dev/zsh-completion.py\", line 11, in <module>\r\n > from visidata.main import option_aliases\r\n > ImportError: cannot import name 'option_aliases' from 'visidata.main' (/build/visidata-src/visidata/main.py)\r\n```\r\n\r\n**Steps to reproduce**\r\n\r\n```\r\npython dev/zsh-completion.py\r\n```\r\n\r\n**Additional context**\r\n~~Please include the version of VisiData and Python.~~\r\n\r\nhttps://github.com/saulpw/visidata/tree/34808745232e798b0f25e893bb444fc9f3c034eb but I listed the commit above that causes the breakage \u2014 I suspect this is a two minute fix for somebody familiar with the codebase, though not me. I can help with extending CI, though it might just be a case of adding\r\n\r\n```yaml\r\n - name: Ensure VisiData can create completions\r\n run: python dev/zsh-completion.py\r\n```\r\n\r\n(I guess you might want to run a linter, instead.)\n", "before_files": [{"content": "#!/usr/bin/env python\nfrom __future__ import unicode_literals\n\nimport os\nfrom os.path import dirname as dirn\nimport sys\nimport re\n\nsys.path.insert(0, dirn(dirn((os.path.abspath(__file__)))))\nfrom visidata import vd\nfrom visidata.main import option_aliases\n\nZSH_COMPLETION_FILE = \"_visidata\"\nZSH_COMPLETION_TEMPLATE = \"dev/zsh-completion.in\"\npat_class = re.compile(\"'(.*)'\")\npat_select = re.compile(\"^\\([^)]*\\)\")\n\n\ndef generate_completion(opt):\n prefix = \"--\" + opt.name\n shortnames = [key for key, value in option_aliases.items() if value[0] == opt.name]\n if len(shortnames):\n if len(shortnames[0]) == 1:\n shortname = \"-\" + shortnames[0]\n else:\n shortname = \"--\" + shortnames[0]\n prefix = \"{\" + f\"{shortname},{prefix}\" + \"}\"\n if isinstance(opt.value, bool):\n completion = \"\"\n else:\n completion = \":\" + pat_class.findall(str(opt.value.__class__))[0]\n if opt.name in [\"play\", \"output\", \"visidata_dir\", \"config\"]:\n completion += \":_files\"\n elif opt.name in [\"plugins_url\", \"motd_url\"]:\n completion += \":_urls\"\n helpstr = opt.helpstr\n selections = pat_select.findall(helpstr)\n if len(selections):\n completion += f\":{selections[0].replace('/', ' ')}\"\n # TODO: use `zstyle ':completion:*' extra-verbose true`\n # to control the display of default value\n helpstr = helpstr + f\" (default: {opt.value})\"\n helpstr = helpstr.replace(\"[\", \"\\\\[\").replace(\"]\", \"\\\\]\")\n return f\"{prefix}'[{helpstr}]{completion}'\"\n\n\nflags = [generate_completion(vd._options[opt][\"default\"]) for opt in vd._options]\n\nwith open(ZSH_COMPLETION_TEMPLATE) as f:\n template = f.read()\n\ntemplate = template.replace(\"{{flags}}\", \" \\\\\\n \".join(flags))\n\nwith open(ZSH_COMPLETION_FILE, \"w\") as f:\n f.write(template)\n", "path": "dev/zsh-completion.py"}], "after_files": [{"content": "#!/usr/bin/env python\nfrom __future__ import unicode_literals\n\nimport os\nfrom os.path import dirname as dirn\nimport sys\nimport re\n\nsys.path.insert(0, dirn(dirn((os.path.abspath(__file__)))))\nfrom visidata import vd\n\nZSH_COMPLETION_FILE = \"_visidata\"\nZSH_COMPLETION_TEMPLATE = \"dev/zsh-completion.in\"\npat_class = re.compile(\"'(.*)'\")\npat_select = re.compile(\"^\\([^)]*\\)\")\n\n\ndef generate_completion(opt):\n prefix = \"--\" + opt.name\n shortnames = [\n key for key, value in vd.option_aliases.items() if value[0] == opt.name\n ]\n if len(shortnames):\n if len(shortnames[0]) == 1:\n shortname = \"-\" + shortnames[0]\n else:\n shortname = \"--\" + shortnames[0]\n prefix = \"{\" + f\"{shortname},{prefix}\" + \"}\"\n if isinstance(opt.value, bool):\n completion = \"\"\n else:\n completion = \":\" + pat_class.findall(str(opt.value.__class__))[0]\n if opt.name in [\"play\", \"output\", \"visidata_dir\", \"config\"]:\n completion += \":_files\"\n elif opt.name in [\"plugins_url\", \"motd_url\"]:\n completion += \":_urls\"\n helpstr = opt.helpstr\n selections = pat_select.findall(helpstr)\n if len(selections):\n completion += f\":{selections[0].replace('/', ' ')}\"\n # TODO: use `zstyle ':completion:*' extra-verbose true`\n # to control the display of default value\n helpstr = helpstr + f\" (default: {opt.value})\"\n helpstr = helpstr.replace(\"[\", \"\\\\[\").replace(\"]\", \"\\\\]\")\n return f\"{prefix}'[{helpstr}]{completion}'\"\n\n\nflags = [generate_completion(vd._options[opt][\"default\"]) for opt in vd._options]\n\nwith open(ZSH_COMPLETION_TEMPLATE) as f:\n template = f.read()\n\ntemplate = template.replace(\"{{flags}}\", \" \\\\\\n \".join(flags))\n\nwith open(ZSH_COMPLETION_FILE, \"w\") as f:\n f.write(template)\n", "path": "dev/zsh-completion.py"}]}
| 1,302 | 217 |
gh_patches_debug_19256
|
rasdani/github-patches
|
git_diff
|
e2nIEE__pandapower-275
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Deprecation warning for asmatrix
The usage of numpy asmatrix raises Deprecation Warnings in numpy 1.15.4:
PendingDeprecationWarning: the matrix subclass is not the recommended way to represent matrices or deal with linear algebra (see https://docs.scipy.org/doc/numpy/user/numpy-for-matlab-users.html). Please adjust your code to use regular ndarray.
There are 5 occurences in pandapower/pf/dSbus_dV_pypower.py.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pandapower/pf/dSbus_dV_pypower.py`
Content:
```
1 # -*- coding: utf-8 -*-
2
3 # Copyright 1996-2015 PSERC. All rights reserved.
4 # Use of this source code is governed by a BSD-style
5 # license that can be found in the LICENSE file.
6
7 # Copyright (c) 2016-2018 by University of Kassel and Fraunhofer Institute for Energy Economics
8 # and Energy System Technology (IEE), Kassel. All rights reserved.
9
10
11
12 """Computes partial derivatives of power injection w.r.t. voltage.
13 """
14
15 from numpy import conj, diag, asmatrix, asarray, zeros
16 from scipy.sparse import issparse, csr_matrix as sparse
17
18
19 def dSbus_dV(Ybus, V):
20 """Computes partial derivatives of power injection w.r.t. voltage.
21 """
22
23 if issparse(Ybus):
24 return dSbus_dV_sparse(Ybus, V)
25 else:
26 return dSbus_dV_dense(Ybus, V)
27
28
29 def dSbus_dV_sparse(Ybus, V):
30 Ibus = Ybus * V
31 ib = range(len(V))
32 diagV = sparse((V, (ib, ib)))
33 diagIbus = sparse((Ibus, (ib, ib)))
34 diagVnorm = sparse((V / abs(V), (ib, ib)))
35 dS_dVm = diagV * conj(Ybus * diagVnorm) + conj(diagIbus) * diagVnorm
36 dS_dVa = 1j * diagV * conj(diagIbus - Ybus * diagV)
37 return dS_dVm, dS_dVa
38
39
40 def dSbus_dV_dense(Ybus, V):
41 # standard code from Pypower (slower than above)
42 Ibus = Ybus * asmatrix(V).T
43
44 diagV = asmatrix(diag(V))
45 diagIbus = asmatrix(diag(asarray(Ibus).flatten()))
46 diagVnorm = asmatrix(diag(V / abs(V)))
47
48 dS_dVm = diagV * conj(Ybus * diagVnorm) + conj(diagIbus) * diagVnorm
49 dS_dVa = 1j * diagV * conj(diagIbus - Ybus * diagV)
50 return dS_dVm, dS_dVa
51
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pandapower/pf/dSbus_dV_pypower.py b/pandapower/pf/dSbus_dV_pypower.py
--- a/pandapower/pf/dSbus_dV_pypower.py
+++ b/pandapower/pf/dSbus_dV_pypower.py
@@ -12,7 +12,7 @@
"""Computes partial derivatives of power injection w.r.t. voltage.
"""
-from numpy import conj, diag, asmatrix, asarray, zeros
+from numpy import conj, diag, asarray
from scipy.sparse import issparse, csr_matrix as sparse
@@ -39,11 +39,11 @@
def dSbus_dV_dense(Ybus, V):
# standard code from Pypower (slower than above)
- Ibus = Ybus * asmatrix(V).T
+ Ibus = Ybus * asarray(V).T
- diagV = asmatrix(diag(V))
- diagIbus = asmatrix(diag(asarray(Ibus).flatten()))
- diagVnorm = asmatrix(diag(V / abs(V)))
+ diagV = asarray(diag(V))
+ diagIbus = asarray(diag(asarray(Ibus).flatten()))
+ diagVnorm = asarray(diag(V / abs(V)))
dS_dVm = diagV * conj(Ybus * diagVnorm) + conj(diagIbus) * diagVnorm
dS_dVa = 1j * diagV * conj(diagIbus - Ybus * diagV)
|
{"golden_diff": "diff --git a/pandapower/pf/dSbus_dV_pypower.py b/pandapower/pf/dSbus_dV_pypower.py\n--- a/pandapower/pf/dSbus_dV_pypower.py\n+++ b/pandapower/pf/dSbus_dV_pypower.py\n@@ -12,7 +12,7 @@\n \"\"\"Computes partial derivatives of power injection w.r.t. voltage.\n \"\"\"\n \n-from numpy import conj, diag, asmatrix, asarray, zeros\n+from numpy import conj, diag, asarray\n from scipy.sparse import issparse, csr_matrix as sparse\n \n \n@@ -39,11 +39,11 @@\n \n def dSbus_dV_dense(Ybus, V):\n # standard code from Pypower (slower than above)\n- Ibus = Ybus * asmatrix(V).T\n+ Ibus = Ybus * asarray(V).T\n \n- diagV = asmatrix(diag(V))\n- diagIbus = asmatrix(diag(asarray(Ibus).flatten()))\n- diagVnorm = asmatrix(diag(V / abs(V)))\n+ diagV = asarray(diag(V))\n+ diagIbus = asarray(diag(asarray(Ibus).flatten()))\n+ diagVnorm = asarray(diag(V / abs(V)))\n \n dS_dVm = diagV * conj(Ybus * diagVnorm) + conj(diagIbus) * diagVnorm\n dS_dVa = 1j * diagV * conj(diagIbus - Ybus * diagV)\n", "issue": "Deprecation warning for asmatrix\nThe usage of numpy asmatrix raises Deprecation Warnings in numpy 1.15.4:\r\nPendingDeprecationWarning: the matrix subclass is not the recommended way to represent matrices or deal with linear algebra (see https://docs.scipy.org/doc/numpy/user/numpy-for-matlab-users.html). Please adjust your code to use regular ndarray.\r\nThere are 5 occurences in pandapower/pf/dSbus_dV_pypower.py.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Copyright 1996-2015 PSERC. All rights reserved.\n# Use of this source code is governed by a BSD-style\n# license that can be found in the LICENSE file.\n\n# Copyright (c) 2016-2018 by University of Kassel and Fraunhofer Institute for Energy Economics\n# and Energy System Technology (IEE), Kassel. All rights reserved.\n\n\n\n\"\"\"Computes partial derivatives of power injection w.r.t. voltage.\n\"\"\"\n\nfrom numpy import conj, diag, asmatrix, asarray, zeros\nfrom scipy.sparse import issparse, csr_matrix as sparse\n\n\ndef dSbus_dV(Ybus, V):\n \"\"\"Computes partial derivatives of power injection w.r.t. voltage.\n \"\"\"\n\n if issparse(Ybus):\n return dSbus_dV_sparse(Ybus, V)\n else:\n return dSbus_dV_dense(Ybus, V)\n\n\ndef dSbus_dV_sparse(Ybus, V):\n Ibus = Ybus * V\n ib = range(len(V))\n diagV = sparse((V, (ib, ib)))\n diagIbus = sparse((Ibus, (ib, ib)))\n diagVnorm = sparse((V / abs(V), (ib, ib)))\n dS_dVm = diagV * conj(Ybus * diagVnorm) + conj(diagIbus) * diagVnorm\n dS_dVa = 1j * diagV * conj(diagIbus - Ybus * diagV)\n return dS_dVm, dS_dVa\n\n\ndef dSbus_dV_dense(Ybus, V):\n # standard code from Pypower (slower than above)\n Ibus = Ybus * asmatrix(V).T\n\n diagV = asmatrix(diag(V))\n diagIbus = asmatrix(diag(asarray(Ibus).flatten()))\n diagVnorm = asmatrix(diag(V / abs(V)))\n\n dS_dVm = diagV * conj(Ybus * diagVnorm) + conj(diagIbus) * diagVnorm\n dS_dVa = 1j * diagV * conj(diagIbus - Ybus * diagV)\n return dS_dVm, dS_dVa\n", "path": "pandapower/pf/dSbus_dV_pypower.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Copyright 1996-2015 PSERC. All rights reserved.\n# Use of this source code is governed by a BSD-style\n# license that can be found in the LICENSE file.\n\n# Copyright (c) 2016-2018 by University of Kassel and Fraunhofer Institute for Energy Economics\n# and Energy System Technology (IEE), Kassel. All rights reserved.\n\n\n\n\"\"\"Computes partial derivatives of power injection w.r.t. voltage.\n\"\"\"\n\nfrom numpy import conj, diag, asarray\nfrom scipy.sparse import issparse, csr_matrix as sparse\n\n\ndef dSbus_dV(Ybus, V):\n \"\"\"Computes partial derivatives of power injection w.r.t. voltage.\n \"\"\"\n\n if issparse(Ybus):\n return dSbus_dV_sparse(Ybus, V)\n else:\n return dSbus_dV_dense(Ybus, V)\n\n\ndef dSbus_dV_sparse(Ybus, V):\n Ibus = Ybus * V\n ib = range(len(V))\n diagV = sparse((V, (ib, ib)))\n diagIbus = sparse((Ibus, (ib, ib)))\n diagVnorm = sparse((V / abs(V), (ib, ib)))\n dS_dVm = diagV * conj(Ybus * diagVnorm) + conj(diagIbus) * diagVnorm\n dS_dVa = 1j * diagV * conj(diagIbus - Ybus * diagV)\n return dS_dVm, dS_dVa\n\n\ndef dSbus_dV_dense(Ybus, V):\n # standard code from Pypower (slower than above)\n Ibus = Ybus * asarray(V).T\n\n diagV = asarray(diag(V))\n diagIbus = asarray(diag(asarray(Ibus).flatten()))\n diagVnorm = asarray(diag(V / abs(V)))\n\n dS_dVm = diagV * conj(Ybus * diagVnorm) + conj(diagIbus) * diagVnorm\n dS_dVa = 1j * diagV * conj(diagIbus - Ybus * diagV)\n return dS_dVm, dS_dVa\n", "path": "pandapower/pf/dSbus_dV_pypower.py"}]}
| 969 | 349 |
gh_patches_debug_25881
|
rasdani/github-patches
|
git_diff
|
translate__pootle-4060
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
merge_user should also remove the old user
Currently `merge_user` does not actually remove the old user. You need to run `purge_user` following `merge_user` to completely remove the user. This is dangerous and error prone, especially on older instances that have a large number of users.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pootle/apps/accounts/management/commands/merge_user.py`
Content:
```
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 #
4 # Copyright (C) Pootle contributors.
5 #
6 # This file is a part of the Pootle project. It is distributed under the GPL3
7 # or later license. See the LICENSE file for a copy of the license and the
8 # AUTHORS file for copyright and authorship information.
9
10 import accounts
11
12 from . import UserCommand
13
14
15 class Command(UserCommand):
16 args = "user other_user"
17 help = "Merge user to other_user"
18
19 def handle(self, *args, **kwargs):
20 super(Command, self).handle(*args, **kwargs)
21 accounts.utils.UserMerger(self.get_user(username=args[0]),
22 self.get_user(username=args[1])).merge()
23
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pootle/apps/accounts/management/commands/merge_user.py b/pootle/apps/accounts/management/commands/merge_user.py
--- a/pootle/apps/accounts/management/commands/merge_user.py
+++ b/pootle/apps/accounts/management/commands/merge_user.py
@@ -7,6 +7,8 @@
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
+from optparse import make_option
+
import accounts
from . import UserCommand
@@ -15,8 +17,22 @@
class Command(UserCommand):
args = "user other_user"
help = "Merge user to other_user"
+ shared_option_list = (
+ make_option("--no-delete",
+ dest='delete',
+ action="store_false",
+ default=True,
+ help="Don't delete user after merging."),
+ )
+ option_list = UserCommand.option_list + shared_option_list
def handle(self, *args, **kwargs):
super(Command, self).handle(*args, **kwargs)
- accounts.utils.UserMerger(self.get_user(username=args[0]),
+ src_user = self.get_user(username=args[0])
+ accounts.utils.UserMerger(src_user,
self.get_user(username=args[1])).merge()
+
+ if kwargs.get("delete"):
+ self.stdout.write("Deleting user: %s...\n" % src_user.username)
+ src_user.delete()
+ self.stdout.write("User deleted: %s\n" % src_user.username)
|
{"golden_diff": "diff --git a/pootle/apps/accounts/management/commands/merge_user.py b/pootle/apps/accounts/management/commands/merge_user.py\n--- a/pootle/apps/accounts/management/commands/merge_user.py\n+++ b/pootle/apps/accounts/management/commands/merge_user.py\n@@ -7,6 +7,8 @@\n # or later license. See the LICENSE file for a copy of the license and the\n # AUTHORS file for copyright and authorship information.\n \n+from optparse import make_option\n+\n import accounts\n \n from . import UserCommand\n@@ -15,8 +17,22 @@\n class Command(UserCommand):\n args = \"user other_user\"\n help = \"Merge user to other_user\"\n+ shared_option_list = (\n+ make_option(\"--no-delete\",\n+ dest='delete',\n+ action=\"store_false\",\n+ default=True,\n+ help=\"Don't delete user after merging.\"),\n+ )\n+ option_list = UserCommand.option_list + shared_option_list\n \n def handle(self, *args, **kwargs):\n super(Command, self).handle(*args, **kwargs)\n- accounts.utils.UserMerger(self.get_user(username=args[0]),\n+ src_user = self.get_user(username=args[0])\n+ accounts.utils.UserMerger(src_user,\n self.get_user(username=args[1])).merge()\n+\n+ if kwargs.get(\"delete\"):\n+ self.stdout.write(\"Deleting user: %s...\\n\" % src_user.username)\n+ src_user.delete()\n+ self.stdout.write(\"User deleted: %s\\n\" % src_user.username)\n", "issue": "merge_user should also remove the old user\nCurrently `merge_user` does not actually remove the old user. You need to run `purge_user` following `merge_user` to completely remove the user. This is dangerous and error prone, especially on older instances that have a large number of users.\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nimport accounts\n\nfrom . import UserCommand\n\n\nclass Command(UserCommand):\n args = \"user other_user\"\n help = \"Merge user to other_user\"\n\n def handle(self, *args, **kwargs):\n super(Command, self).handle(*args, **kwargs)\n accounts.utils.UserMerger(self.get_user(username=args[0]),\n self.get_user(username=args[1])).merge()\n", "path": "pootle/apps/accounts/management/commands/merge_user.py"}], "after_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nfrom optparse import make_option\n\nimport accounts\n\nfrom . import UserCommand\n\n\nclass Command(UserCommand):\n args = \"user other_user\"\n help = \"Merge user to other_user\"\n shared_option_list = (\n make_option(\"--no-delete\",\n dest='delete',\n action=\"store_false\",\n default=True,\n help=\"Don't delete user after merging.\"),\n )\n option_list = UserCommand.option_list + shared_option_list\n\n def handle(self, *args, **kwargs):\n super(Command, self).handle(*args, **kwargs)\n src_user = self.get_user(username=args[0])\n accounts.utils.UserMerger(src_user,\n self.get_user(username=args[1])).merge()\n\n if kwargs.get(\"delete\"):\n self.stdout.write(\"Deleting user: %s...\\n\" % src_user.username)\n src_user.delete()\n self.stdout.write(\"User deleted: %s\\n\" % src_user.username)\n", "path": "pootle/apps/accounts/management/commands/merge_user.py"}]}
| 529 | 343 |
gh_patches_debug_41748
|
rasdani/github-patches
|
git_diff
|
sql-machine-learning__elasticdl-352
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Support pushing docker image built by elasticdl client.
to run docker images in cloud environment, we need add step for pushing docker image to registry provided by cloud.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `elasticdl/client/client.py`
Content:
```
1 import argparse
2 import os
3 import inspect
4 import tempfile
5 import time
6 import getpass
7 import sys
8 from string import Template
9 import docker
10 import yaml
11 from kubernetes.client.apis import core_v1_api
12 from kubernetes import config
13
14
15 def _m_file_in_docker(model_file):
16 return "/model/" + os.path.basename(model_file)
17
18 def _build_docker_image(
19 m_file, timestamp, image_base="elasticdl:dev"
20 ):
21 DOCKER_TEMPLATE = """
22 FROM {}
23 COPY {} {}
24 """
25
26 with tempfile.NamedTemporaryFile(mode="w+", delete=False) as df:
27 df.write(DOCKER_TEMPLATE.format(image_base, m_file, _m_file_in_docker(m_file)))
28
29 client = docker.APIClient(base_url="unix://var/run/docker.sock")
30 for line in client.build(
31 dockerfile=df.name, path=".", rm=True, tag="elasticdl:dev_" + str(timestamp)
32 ):
33 print(str(line, encoding="utf-8"))
34
35 # TODO: upload docker image to docker hub.
36
37 def _gen_master_def(model_file, argv, timestamp):
38 master_yaml = """
39 apiVersion: v1
40 kind: Pod
41 metadata:
42 name: elasticdl-master-{timestamp}
43 labels:
44 purpose: test-command
45 spec:
46 containers:
47 - name: elasticdl-master-{timestamp}
48 image: elasticdl:dev_{timestamp}
49 command: ["python"]
50 args: [
51 "-m", "elasticdl.master.main",
52 "--worker_image", "elasticdl:dev_{timestamp}",
53 "--model_file", "{m_file}"
54 ]
55 imagePullPolicy: Never
56 env:
57 - name: MY_POD_IP
58 valueFrom:
59 fieldRef:
60 fieldPath: status.podIP
61 restartPolicy: Never
62 """ .format(m_file=_m_file_in_docker(model_file), timestamp=timestamp)
63
64 master_def = yaml.safe_load(master_yaml)
65
66 # Build master arguments
67 master_def['spec']['containers'][0]['args'].extend(argv)
68 return master_def
69
70 def _submit(model_file, argv, timestamp):
71 master_def = _gen_master_def(model_file, argv, timestamp)
72 config.load_kube_config()
73 api = core_v1_api.CoreV1Api()
74 resp = api.create_namespaced_pod(body=master_def, namespace="default")
75 print("Master launched. status='%s'" % str(resp.status))
76
77 def main():
78 parser = argparse.ArgumentParser(description="ElasticDL Client")
79 # Rewrite model_file argument and pass all other arguments to master.
80 parser.add_argument("--model_file", help="Path to Model file", required=True)
81 args, argv = parser.parse_known_args()
82
83 timestamp = int(round(time.time() * 1000))
84 _build_docker_image(args.model_file, timestamp)
85 _submit(args.model_file, argv, timestamp)
86
87
88 if __name__ == "__main__":
89 main()
90
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/elasticdl/client/client.py b/elasticdl/client/client.py
--- a/elasticdl/client/client.py
+++ b/elasticdl/client/client.py
@@ -16,7 +16,8 @@
return "/model/" + os.path.basename(model_file)
def _build_docker_image(
- m_file, timestamp, image_base="elasticdl:dev"
+ m_file, image_name, image_base="elasticdl:dev",
+ repository=None
):
DOCKER_TEMPLATE = """
FROM {}
@@ -28,13 +29,15 @@
client = docker.APIClient(base_url="unix://var/run/docker.sock")
for line in client.build(
- dockerfile=df.name, path=".", rm=True, tag="elasticdl:dev_" + str(timestamp)
+ dockerfile=df.name, path=".", rm=True, tag=image_name
):
print(str(line, encoding="utf-8"))
- # TODO: upload docker image to docker hub.
+ if repository != None:
+ for line in client.push(image_name, stream=True, decode=True):
+ print(line)
-def _gen_master_def(model_file, argv, timestamp):
+def _gen_master_def(image_name, model_file, argv, timestamp):
master_yaml = """
apiVersion: v1
kind: Pod
@@ -45,21 +48,21 @@
spec:
containers:
- name: elasticdl-master-{timestamp}
- image: elasticdl:dev_{timestamp}
+ image: {image_name}
command: ["python"]
args: [
"-m", "elasticdl.master.main",
- "--worker_image", "elasticdl:dev_{timestamp}",
+ "--worker_image", {image_name},
"--model_file", "{m_file}"
]
- imagePullPolicy: Never
+ imagePullPolicy: IfNotPresent
env:
- name: MY_POD_IP
valueFrom:
fieldRef:
fieldPath: status.podIP
restartPolicy: Never
-""" .format(m_file=_m_file_in_docker(model_file), timestamp=timestamp)
+""" .format(m_file=_m_file_in_docker(model_file), image_name=image_name, timestamp=timestamp)
master_def = yaml.safe_load(master_yaml)
@@ -67,8 +70,8 @@
master_def['spec']['containers'][0]['args'].extend(argv)
return master_def
-def _submit(model_file, argv, timestamp):
- master_def = _gen_master_def(model_file, argv, timestamp)
+def _submit(image_name, model_file, argv, timestamp):
+ master_def = _gen_master_def(image_name, model_file, argv, timestamp)
config.load_kube_config()
api = core_v1_api.CoreV1Api()
resp = api.create_namespaced_pod(body=master_def, namespace="default")
@@ -78,11 +81,15 @@
parser = argparse.ArgumentParser(description="ElasticDL Client")
# Rewrite model_file argument and pass all other arguments to master.
parser.add_argument("--model_file", help="Path to Model file", required=True)
+ parser.add_argument("--image-base", help="Base image containing elasticdl runtime environment.")
+ parser.add_argument("--repository", help="The repository to push docker image to.")
args, argv = parser.parse_known_args()
- timestamp = int(round(time.time() * 1000))
- _build_docker_image(args.model_file, timestamp)
- _submit(args.model_file, argv, timestamp)
+ timestamp = str(int(round(time.time() * 1000)))
+ image_name = args.image_base + '_' + timestamp
+ _build_docker_image(args.model_file, image_name, image_base=args.image_base,
+ repository=args.repository)
+ _submit(image_name, args.model_file, argv, timestamp)
if __name__ == "__main__":
|
{"golden_diff": "diff --git a/elasticdl/client/client.py b/elasticdl/client/client.py\n--- a/elasticdl/client/client.py\n+++ b/elasticdl/client/client.py\n@@ -16,7 +16,8 @@\n return \"/model/\" + os.path.basename(model_file)\n \n def _build_docker_image(\n- m_file, timestamp, image_base=\"elasticdl:dev\"\n+ m_file, image_name, image_base=\"elasticdl:dev\",\n+ repository=None\n ):\n DOCKER_TEMPLATE = \"\"\"\n FROM {}\n@@ -28,13 +29,15 @@\n \n client = docker.APIClient(base_url=\"unix://var/run/docker.sock\")\n for line in client.build(\n- dockerfile=df.name, path=\".\", rm=True, tag=\"elasticdl:dev_\" + str(timestamp)\n+ dockerfile=df.name, path=\".\", rm=True, tag=image_name\n ):\n print(str(line, encoding=\"utf-8\"))\n \n- # TODO: upload docker image to docker hub.\n+ if repository != None:\n+ for line in client.push(image_name, stream=True, decode=True):\n+ print(line)\n \n-def _gen_master_def(model_file, argv, timestamp):\n+def _gen_master_def(image_name, model_file, argv, timestamp):\n master_yaml = \"\"\"\n apiVersion: v1\n kind: Pod\n@@ -45,21 +48,21 @@\n spec:\n containers:\n - name: elasticdl-master-{timestamp}\n- image: elasticdl:dev_{timestamp}\n+ image: {image_name}\n command: [\"python\"]\n args: [\n \"-m\", \"elasticdl.master.main\",\n- \"--worker_image\", \"elasticdl:dev_{timestamp}\",\n+ \"--worker_image\", {image_name},\n \"--model_file\", \"{m_file}\"\n ]\n- imagePullPolicy: Never\n+ imagePullPolicy: IfNotPresent \n env:\n - name: MY_POD_IP\n valueFrom:\n fieldRef:\n fieldPath: status.podIP\n restartPolicy: Never\n-\"\"\" .format(m_file=_m_file_in_docker(model_file), timestamp=timestamp)\n+\"\"\" .format(m_file=_m_file_in_docker(model_file), image_name=image_name, timestamp=timestamp)\n \n master_def = yaml.safe_load(master_yaml)\n \n@@ -67,8 +70,8 @@\n master_def['spec']['containers'][0]['args'].extend(argv)\n return master_def\n \n-def _submit(model_file, argv, timestamp):\n- master_def = _gen_master_def(model_file, argv, timestamp)\n+def _submit(image_name, model_file, argv, timestamp):\n+ master_def = _gen_master_def(image_name, model_file, argv, timestamp)\n config.load_kube_config()\n api = core_v1_api.CoreV1Api()\n resp = api.create_namespaced_pod(body=master_def, namespace=\"default\")\n@@ -78,11 +81,15 @@\n parser = argparse.ArgumentParser(description=\"ElasticDL Client\")\n # Rewrite model_file argument and pass all other arguments to master.\n parser.add_argument(\"--model_file\", help=\"Path to Model file\", required=True)\n+ parser.add_argument(\"--image-base\", help=\"Base image containing elasticdl runtime environment.\")\n+ parser.add_argument(\"--repository\", help=\"The repository to push docker image to.\")\n args, argv = parser.parse_known_args()\n \n- timestamp = int(round(time.time() * 1000))\n- _build_docker_image(args.model_file, timestamp)\n- _submit(args.model_file, argv, timestamp) \n+ timestamp = str(int(round(time.time() * 1000)))\n+ image_name = args.image_base + '_' + timestamp \n+ _build_docker_image(args.model_file, image_name, image_base=args.image_base,\n+ repository=args.repository)\n+ _submit(image_name, args.model_file, argv, timestamp)\n \n \n if __name__ == \"__main__\":\n", "issue": "Support pushing docker image built by elasticdl client.\nto run docker images in cloud environment, we need add step for pushing docker image to registry provided by cloud.\n", "before_files": [{"content": "import argparse\nimport os\nimport inspect\nimport tempfile\nimport time\nimport getpass\nimport sys\nfrom string import Template\nimport docker\nimport yaml\nfrom kubernetes.client.apis import core_v1_api\nfrom kubernetes import config\n\n\ndef _m_file_in_docker(model_file):\n return \"/model/\" + os.path.basename(model_file)\n\ndef _build_docker_image(\n m_file, timestamp, image_base=\"elasticdl:dev\"\n):\n DOCKER_TEMPLATE = \"\"\"\nFROM {}\nCOPY {} {}\n\"\"\"\n\n with tempfile.NamedTemporaryFile(mode=\"w+\", delete=False) as df:\n df.write(DOCKER_TEMPLATE.format(image_base, m_file, _m_file_in_docker(m_file)))\n\n client = docker.APIClient(base_url=\"unix://var/run/docker.sock\")\n for line in client.build(\n dockerfile=df.name, path=\".\", rm=True, tag=\"elasticdl:dev_\" + str(timestamp)\n ):\n print(str(line, encoding=\"utf-8\"))\n\n # TODO: upload docker image to docker hub.\n\ndef _gen_master_def(model_file, argv, timestamp):\n master_yaml = \"\"\"\napiVersion: v1\nkind: Pod\nmetadata:\n name: elasticdl-master-{timestamp}\n labels:\n purpose: test-command\nspec:\n containers:\n - name: elasticdl-master-{timestamp}\n image: elasticdl:dev_{timestamp}\n command: [\"python\"]\n args: [\n \"-m\", \"elasticdl.master.main\",\n \"--worker_image\", \"elasticdl:dev_{timestamp}\",\n \"--model_file\", \"{m_file}\"\n ]\n imagePullPolicy: Never\n env:\n - name: MY_POD_IP\n valueFrom:\n fieldRef:\n fieldPath: status.podIP\n restartPolicy: Never\n\"\"\" .format(m_file=_m_file_in_docker(model_file), timestamp=timestamp)\n\n master_def = yaml.safe_load(master_yaml)\n\n # Build master arguments\n master_def['spec']['containers'][0]['args'].extend(argv)\n return master_def\n\ndef _submit(model_file, argv, timestamp):\n master_def = _gen_master_def(model_file, argv, timestamp)\n config.load_kube_config()\n api = core_v1_api.CoreV1Api()\n resp = api.create_namespaced_pod(body=master_def, namespace=\"default\")\n print(\"Master launched. status='%s'\" % str(resp.status))\n\ndef main():\n parser = argparse.ArgumentParser(description=\"ElasticDL Client\")\n # Rewrite model_file argument and pass all other arguments to master.\n parser.add_argument(\"--model_file\", help=\"Path to Model file\", required=True)\n args, argv = parser.parse_known_args()\n\n timestamp = int(round(time.time() * 1000))\n _build_docker_image(args.model_file, timestamp)\n _submit(args.model_file, argv, timestamp) \n\n\nif __name__ == \"__main__\":\n main()\n", "path": "elasticdl/client/client.py"}], "after_files": [{"content": "import argparse\nimport os\nimport inspect\nimport tempfile\nimport time\nimport getpass\nimport sys\nfrom string import Template\nimport docker\nimport yaml\nfrom kubernetes.client.apis import core_v1_api\nfrom kubernetes import config\n\n\ndef _m_file_in_docker(model_file):\n return \"/model/\" + os.path.basename(model_file)\n\ndef _build_docker_image(\n m_file, image_name, image_base=\"elasticdl:dev\",\n repository=None\n):\n DOCKER_TEMPLATE = \"\"\"\nFROM {}\nCOPY {} {}\n\"\"\"\n\n with tempfile.NamedTemporaryFile(mode=\"w+\", delete=False) as df:\n df.write(DOCKER_TEMPLATE.format(image_base, m_file, _m_file_in_docker(m_file)))\n\n client = docker.APIClient(base_url=\"unix://var/run/docker.sock\")\n for line in client.build(\n dockerfile=df.name, path=\".\", rm=True, tag=image_name\n ):\n print(str(line, encoding=\"utf-8\"))\n\n if repository != None:\n for line in client.push(image_name, stream=True, decode=True):\n print(line)\n\ndef _gen_master_def(image_name, model_file, argv, timestamp):\n master_yaml = \"\"\"\napiVersion: v1\nkind: Pod\nmetadata:\n name: elasticdl-master-{timestamp}\n labels:\n purpose: test-command\nspec:\n containers:\n - name: elasticdl-master-{timestamp}\n image: {image_name}\n command: [\"python\"]\n args: [\n \"-m\", \"elasticdl.master.main\",\n \"--worker_image\", {image_name},\n \"--model_file\", \"{m_file}\"\n ]\n imagePullPolicy: IfNotPresent \n env:\n - name: MY_POD_IP\n valueFrom:\n fieldRef:\n fieldPath: status.podIP\n restartPolicy: Never\n\"\"\" .format(m_file=_m_file_in_docker(model_file), image_name=image_name, timestamp=timestamp)\n\n master_def = yaml.safe_load(master_yaml)\n\n # Build master arguments\n master_def['spec']['containers'][0]['args'].extend(argv)\n return master_def\n\ndef _submit(image_name, model_file, argv, timestamp):\n master_def = _gen_master_def(image_name, model_file, argv, timestamp)\n config.load_kube_config()\n api = core_v1_api.CoreV1Api()\n resp = api.create_namespaced_pod(body=master_def, namespace=\"default\")\n print(\"Master launched. status='%s'\" % str(resp.status))\n\ndef main():\n parser = argparse.ArgumentParser(description=\"ElasticDL Client\")\n # Rewrite model_file argument and pass all other arguments to master.\n parser.add_argument(\"--model_file\", help=\"Path to Model file\", required=True)\n parser.add_argument(\"--image-base\", help=\"Base image containing elasticdl runtime environment.\")\n parser.add_argument(\"--repository\", help=\"The repository to push docker image to.\")\n args, argv = parser.parse_known_args()\n\n timestamp = str(int(round(time.time() * 1000)))\n image_name = args.image_base + '_' + timestamp \n _build_docker_image(args.model_file, image_name, image_base=args.image_base,\n repository=args.repository)\n _submit(image_name, args.model_file, argv, timestamp)\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "elasticdl/client/client.py"}]}
| 1,087 | 856 |
gh_patches_debug_13363
|
rasdani/github-patches
|
git_diff
|
joke2k__faker-757
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Visa now supports 19 digits credit card length
reference:
https://www.freeformatter.com/credit-card-number-generator-validator.html
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `faker/providers/credit_card/__init__.py`
Content:
```
1 # coding=utf-8
2 from __future__ import unicode_literals
3 from collections import OrderedDict
4
5 from .. import BaseProvider
6
7
8 class CreditCard(object):
9
10 def __init__(
11 self,
12 name,
13 prefixes,
14 length=16,
15 security_code='CVC',
16 security_code_length=3):
17 self.name = name
18 self.prefixes = prefixes
19 self.length = length
20 self.security_code = security_code
21 self.security_code_length = security_code_length
22
23
24 class Provider(BaseProvider):
25
26 # Prefixes from:
27 # * https://en.wikipedia.org/wiki/Payment_card_number#Issuer_identification_number_.28IIN.29
28 # * https://www.regular-expressions.info/creditcard.html
29 # * https://creditcardjs.com/credit-card-type-detection
30 prefix_maestro = ['5018', '5020', '5038', '56##', '57##', '58##',
31 '6304', '6759', '6761', '6762', '6763', '0604', '6390']
32 prefix_mastercard = ['51', '52', '53', '54', '55', '222%']
33 prefix_visa = ['4']
34 prefix_amex = ['34', '37']
35 prefix_discover = ['6011', '65']
36 prefix_diners = ['300', '301', '302', '303', '304', '305', '36', '38']
37 prefix_jcb16 = ['35']
38 prefix_jcb15 = ['2131', '1800']
39
40 credit_card_types = OrderedDict((
41 ('maestro', CreditCard('Maestro',
42 prefix_maestro, 12, security_code='CVV')),
43 ('mastercard', CreditCard('Mastercard',
44 prefix_mastercard, 16, security_code='CVV')),
45 ('visa16', CreditCard('VISA 16 digit', prefix_visa)),
46 ('visa13', CreditCard('VISA 13 digit', prefix_visa, 13)),
47 ('amex', CreditCard('American Express', prefix_amex,
48 15, security_code='CID', security_code_length=4)),
49 ('discover', CreditCard('Discover', prefix_discover)),
50 ('diners', CreditCard('Diners Club / Carte Blanche', prefix_diners, 14)),
51 ('jcb15', CreditCard('JCB 15 digit', prefix_jcb15, 15)),
52 ('jcb16', CreditCard('JCB 16 digit', prefix_jcb16)),
53 ))
54 credit_card_types['visa'] = credit_card_types['visa16']
55 credit_card_types['jcb'] = credit_card_types['jcb16']
56
57 luhn_lookup = {'0': 0, '1': 2, '2': 4, '3': 6, '4': 8,
58 '5': 1, '6': 3, '7': 5, '8': 7, '9': 9}
59
60 def credit_card_provider(self, card_type=None):
61 """ Returns the provider's name of the credit card. """
62 if card_type is None:
63 card_type = self.random_element(self.credit_card_types.keys())
64 return self._credit_card_type(card_type).name
65
66 def credit_card_number(self, card_type=None):
67 """ Returns a valid credit card number. """
68 card = self._credit_card_type(card_type)
69 prefix = self.random_element(card.prefixes)
70 number = self._generate_number(self.numerify(prefix), card.length)
71 return number
72
73 def credit_card_expire(self, start='now', end='+10y', date_format='%m/%y'):
74 expire_date = self.generator.date_time_between(start, end)
75 return expire_date.strftime(date_format)
76
77 def credit_card_full(self, card_type=None):
78 card = self._credit_card_type(card_type)
79
80 tpl = ('{provider}\n'
81 '{owner}\n'
82 '{number} {expire_date}\n'
83 '{security}: {security_nb}\n')
84
85 tpl = tpl.format(provider=card.name,
86 owner=self.generator.parse(
87 "{{first_name}} {{last_name}}"),
88 number=self.credit_card_number(card),
89 expire_date=self.credit_card_expire(),
90 security=card.security_code,
91 security_nb=self.credit_card_security_code(card))
92
93 return self.generator.parse(tpl)
94
95 def credit_card_security_code(self, card_type=None):
96 """ Returns a security code string. """
97 sec_len = self._credit_card_type(card_type).security_code_length
98 return self.numerify('#' * sec_len)
99
100 def _credit_card_type(self, card_type=None):
101 """ Returns a random credit card type instance. """
102 if card_type is None:
103 card_type = self.random_element(self.credit_card_types.keys())
104 elif isinstance(card_type, CreditCard):
105 return card_type
106 return self.credit_card_types[card_type]
107
108 def _generate_number(self, prefix, length):
109 """
110 'prefix' is the start of the CC number as a string, any number of digits.
111 'length' is the length of the CC number to generate. Typically 13 or 16
112 """
113 number = prefix
114 # Generate random char digits
115 number += '#' * (length - len(prefix) - 1)
116 number = self.numerify(number)
117 reverse = number[::-1]
118 # Calculate sum
119 tot = 0
120 pos = 0
121 while pos < length - 1:
122 tot += Provider.luhn_lookup[reverse[pos]]
123 if pos != (length - 2):
124 tot += int(reverse[pos + 1])
125 pos += 2
126 # Calculate check digit
127 check_digit = (10 - (tot % 10)) % 10
128 number += str(check_digit)
129 return number
130
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/faker/providers/credit_card/__init__.py b/faker/providers/credit_card/__init__.py
--- a/faker/providers/credit_card/__init__.py
+++ b/faker/providers/credit_card/__init__.py
@@ -44,6 +44,7 @@
prefix_mastercard, 16, security_code='CVV')),
('visa16', CreditCard('VISA 16 digit', prefix_visa)),
('visa13', CreditCard('VISA 13 digit', prefix_visa, 13)),
+ ('visa19', CreditCard('VISA 19 digit', prefix_visa, 19)),
('amex', CreditCard('American Express', prefix_amex,
15, security_code='CID', security_code_length=4)),
('discover', CreditCard('Discover', prefix_discover)),
|
{"golden_diff": "diff --git a/faker/providers/credit_card/__init__.py b/faker/providers/credit_card/__init__.py\n--- a/faker/providers/credit_card/__init__.py\n+++ b/faker/providers/credit_card/__init__.py\n@@ -44,6 +44,7 @@\n prefix_mastercard, 16, security_code='CVV')),\n ('visa16', CreditCard('VISA 16 digit', prefix_visa)),\n ('visa13', CreditCard('VISA 13 digit', prefix_visa, 13)),\n+ ('visa19', CreditCard('VISA 19 digit', prefix_visa, 19)),\n ('amex', CreditCard('American Express', prefix_amex,\n 15, security_code='CID', security_code_length=4)),\n ('discover', CreditCard('Discover', prefix_discover)),\n", "issue": "Visa now supports 19 digits credit card length\nreference: \r\nhttps://www.freeformatter.com/credit-card-number-generator-validator.html \n", "before_files": [{"content": "# coding=utf-8\nfrom __future__ import unicode_literals\nfrom collections import OrderedDict\n\nfrom .. import BaseProvider\n\n\nclass CreditCard(object):\n\n def __init__(\n self,\n name,\n prefixes,\n length=16,\n security_code='CVC',\n security_code_length=3):\n self.name = name\n self.prefixes = prefixes\n self.length = length\n self.security_code = security_code\n self.security_code_length = security_code_length\n\n\nclass Provider(BaseProvider):\n\n # Prefixes from:\n # * https://en.wikipedia.org/wiki/Payment_card_number#Issuer_identification_number_.28IIN.29\n # * https://www.regular-expressions.info/creditcard.html\n # * https://creditcardjs.com/credit-card-type-detection\n prefix_maestro = ['5018', '5020', '5038', '56##', '57##', '58##',\n '6304', '6759', '6761', '6762', '6763', '0604', '6390']\n prefix_mastercard = ['51', '52', '53', '54', '55', '222%']\n prefix_visa = ['4']\n prefix_amex = ['34', '37']\n prefix_discover = ['6011', '65']\n prefix_diners = ['300', '301', '302', '303', '304', '305', '36', '38']\n prefix_jcb16 = ['35']\n prefix_jcb15 = ['2131', '1800']\n\n credit_card_types = OrderedDict((\n ('maestro', CreditCard('Maestro',\n prefix_maestro, 12, security_code='CVV')),\n ('mastercard', CreditCard('Mastercard',\n prefix_mastercard, 16, security_code='CVV')),\n ('visa16', CreditCard('VISA 16 digit', prefix_visa)),\n ('visa13', CreditCard('VISA 13 digit', prefix_visa, 13)),\n ('amex', CreditCard('American Express', prefix_amex,\n 15, security_code='CID', security_code_length=4)),\n ('discover', CreditCard('Discover', prefix_discover)),\n ('diners', CreditCard('Diners Club / Carte Blanche', prefix_diners, 14)),\n ('jcb15', CreditCard('JCB 15 digit', prefix_jcb15, 15)),\n ('jcb16', CreditCard('JCB 16 digit', prefix_jcb16)),\n ))\n credit_card_types['visa'] = credit_card_types['visa16']\n credit_card_types['jcb'] = credit_card_types['jcb16']\n\n luhn_lookup = {'0': 0, '1': 2, '2': 4, '3': 6, '4': 8,\n '5': 1, '6': 3, '7': 5, '8': 7, '9': 9}\n\n def credit_card_provider(self, card_type=None):\n \"\"\" Returns the provider's name of the credit card. \"\"\"\n if card_type is None:\n card_type = self.random_element(self.credit_card_types.keys())\n return self._credit_card_type(card_type).name\n\n def credit_card_number(self, card_type=None):\n \"\"\" Returns a valid credit card number. \"\"\"\n card = self._credit_card_type(card_type)\n prefix = self.random_element(card.prefixes)\n number = self._generate_number(self.numerify(prefix), card.length)\n return number\n\n def credit_card_expire(self, start='now', end='+10y', date_format='%m/%y'):\n expire_date = self.generator.date_time_between(start, end)\n return expire_date.strftime(date_format)\n\n def credit_card_full(self, card_type=None):\n card = self._credit_card_type(card_type)\n\n tpl = ('{provider}\\n'\n '{owner}\\n'\n '{number} {expire_date}\\n'\n '{security}: {security_nb}\\n')\n\n tpl = tpl.format(provider=card.name,\n owner=self.generator.parse(\n \"{{first_name}} {{last_name}}\"),\n number=self.credit_card_number(card),\n expire_date=self.credit_card_expire(),\n security=card.security_code,\n security_nb=self.credit_card_security_code(card))\n\n return self.generator.parse(tpl)\n\n def credit_card_security_code(self, card_type=None):\n \"\"\" Returns a security code string. \"\"\"\n sec_len = self._credit_card_type(card_type).security_code_length\n return self.numerify('#' * sec_len)\n\n def _credit_card_type(self, card_type=None):\n \"\"\" Returns a random credit card type instance. \"\"\"\n if card_type is None:\n card_type = self.random_element(self.credit_card_types.keys())\n elif isinstance(card_type, CreditCard):\n return card_type\n return self.credit_card_types[card_type]\n\n def _generate_number(self, prefix, length):\n \"\"\"\n 'prefix' is the start of the CC number as a string, any number of digits.\n 'length' is the length of the CC number to generate. Typically 13 or 16\n \"\"\"\n number = prefix\n # Generate random char digits\n number += '#' * (length - len(prefix) - 1)\n number = self.numerify(number)\n reverse = number[::-1]\n # Calculate sum\n tot = 0\n pos = 0\n while pos < length - 1:\n tot += Provider.luhn_lookup[reverse[pos]]\n if pos != (length - 2):\n tot += int(reverse[pos + 1])\n pos += 2\n # Calculate check digit\n check_digit = (10 - (tot % 10)) % 10\n number += str(check_digit)\n return number\n", "path": "faker/providers/credit_card/__init__.py"}], "after_files": [{"content": "# coding=utf-8\nfrom __future__ import unicode_literals\nfrom collections import OrderedDict\n\nfrom .. import BaseProvider\n\n\nclass CreditCard(object):\n\n def __init__(\n self,\n name,\n prefixes,\n length=16,\n security_code='CVC',\n security_code_length=3):\n self.name = name\n self.prefixes = prefixes\n self.length = length\n self.security_code = security_code\n self.security_code_length = security_code_length\n\n\nclass Provider(BaseProvider):\n\n # Prefixes from:\n # * https://en.wikipedia.org/wiki/Payment_card_number#Issuer_identification_number_.28IIN.29\n # * https://www.regular-expressions.info/creditcard.html\n # * https://creditcardjs.com/credit-card-type-detection\n prefix_maestro = ['5018', '5020', '5038', '56##', '57##', '58##',\n '6304', '6759', '6761', '6762', '6763', '0604', '6390']\n prefix_mastercard = ['51', '52', '53', '54', '55', '222%']\n prefix_visa = ['4']\n prefix_amex = ['34', '37']\n prefix_discover = ['6011', '65']\n prefix_diners = ['300', '301', '302', '303', '304', '305', '36', '38']\n prefix_jcb16 = ['35']\n prefix_jcb15 = ['2131', '1800']\n\n credit_card_types = OrderedDict((\n ('maestro', CreditCard('Maestro',\n prefix_maestro, 12, security_code='CVV')),\n ('mastercard', CreditCard('Mastercard',\n prefix_mastercard, 16, security_code='CVV')),\n ('visa16', CreditCard('VISA 16 digit', prefix_visa)),\n ('visa13', CreditCard('VISA 13 digit', prefix_visa, 13)),\n ('visa19', CreditCard('VISA 19 digit', prefix_visa, 19)),\n ('amex', CreditCard('American Express', prefix_amex,\n 15, security_code='CID', security_code_length=4)),\n ('discover', CreditCard('Discover', prefix_discover)),\n ('diners', CreditCard('Diners Club / Carte Blanche', prefix_diners, 14)),\n ('jcb15', CreditCard('JCB 15 digit', prefix_jcb15, 15)),\n ('jcb16', CreditCard('JCB 16 digit', prefix_jcb16)),\n ))\n credit_card_types['visa'] = credit_card_types['visa16']\n credit_card_types['jcb'] = credit_card_types['jcb16']\n\n luhn_lookup = {'0': 0, '1': 2, '2': 4, '3': 6, '4': 8,\n '5': 1, '6': 3, '7': 5, '8': 7, '9': 9}\n\n def credit_card_provider(self, card_type=None):\n \"\"\" Returns the provider's name of the credit card. \"\"\"\n if card_type is None:\n card_type = self.random_element(self.credit_card_types.keys())\n return self._credit_card_type(card_type).name\n\n def credit_card_number(self, card_type=None):\n \"\"\" Returns a valid credit card number. \"\"\"\n card = self._credit_card_type(card_type)\n prefix = self.random_element(card.prefixes)\n number = self._generate_number(self.numerify(prefix), card.length)\n return number\n\n def credit_card_expire(self, start='now', end='+10y', date_format='%m/%y'):\n expire_date = self.generator.date_time_between(start, end)\n return expire_date.strftime(date_format)\n\n def credit_card_full(self, card_type=None):\n card = self._credit_card_type(card_type)\n\n tpl = ('{provider}\\n'\n '{owner}\\n'\n '{number} {expire_date}\\n'\n '{security}: {security_nb}\\n')\n\n tpl = tpl.format(provider=card.name,\n owner=self.generator.parse(\n \"{{first_name}} {{last_name}}\"),\n number=self.credit_card_number(card),\n expire_date=self.credit_card_expire(),\n security=card.security_code,\n security_nb=self.credit_card_security_code(card))\n\n return self.generator.parse(tpl)\n\n def credit_card_security_code(self, card_type=None):\n \"\"\" Returns a security code string. \"\"\"\n sec_len = self._credit_card_type(card_type).security_code_length\n return self.numerify('#' * sec_len)\n\n def _credit_card_type(self, card_type=None):\n \"\"\" Returns a random credit card type instance. \"\"\"\n if card_type is None:\n card_type = self.random_element(self.credit_card_types.keys())\n elif isinstance(card_type, CreditCard):\n return card_type\n return self.credit_card_types[card_type]\n\n def _generate_number(self, prefix, length):\n \"\"\"\n 'prefix' is the start of the CC number as a string, any number of digits.\n 'length' is the length of the CC number to generate. Typically 13 or 16\n \"\"\"\n number = prefix\n # Generate random char digits\n number += '#' * (length - len(prefix) - 1)\n number = self.numerify(number)\n reverse = number[::-1]\n # Calculate sum\n tot = 0\n pos = 0\n while pos < length - 1:\n tot += Provider.luhn_lookup[reverse[pos]]\n if pos != (length - 2):\n tot += int(reverse[pos + 1])\n pos += 2\n # Calculate check digit\n check_digit = (10 - (tot % 10)) % 10\n number += str(check_digit)\n return number\n", "path": "faker/providers/credit_card/__init__.py"}]}
| 1,913 | 192 |
gh_patches_debug_15372
|
rasdani/github-patches
|
git_diff
|
pwndbg__pwndbg-1269
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
pwndbg.gdblib.arch.current is wrong if executed in a pwntools gdbscript command
TL;DR: This should not fail, but does so, because the `patch` command fetches `pwndbg.gdblib.arch.current` which for some reason is wrong/not updated.
```py
from pwn import *
gdbscript = '''
tbreak main
patch $rip 'xor rax, rax'
continue
'''
p = gdb.debug('/bin/ls', gdbscript=gdbscript)
p.interactive()
```
Stacktrace:
```
Traceback (most recent call last):
File "/home/dc/tools/pwndbg/pwndbg/commands/__init__.py", line 145, in __call__
return self.function(*args, **kwargs)
File "/home/dc/tools/pwndbg/pwndbg/commands/__init__.py", line 216, in _OnlyWhenRunning
return function(*a, **kw)
File "/home/dc/tools/pwndbg/pwndbg/commands/patch.py", line 25, in patch
new_mem = asm(ins, arch=pwndbg.gdblib.arch.current)
File "/home/dc/.virtualenvs/pwn/lib/python3.8/site-packages/pwnlib/context/__init__.py", line 1444, in setter
raise AttributeError("Invalid arch/bits combination: %s/%s" % (arch, bits))
AttributeError: Invalid arch/bits combination: i386/64
If that is an issue, you can report it on https://github.com/pwndbg/pwndbg/issues
(Please don't forget to search if it hasn't been reported before)
To generate the report and open a browser, you may run `bugreport --run-browser`
PS: Pull requests are welcome
> /home/dc/.virtualenvs/pwn/lib/python3.8/site-packages/pwnlib/context/__init__.py(1444)setter()
-> raise AttributeError("Invalid arch/bits combination: %s/%s" % (arch, bits))
(Pdb) p arch, bits
('i386', 64)
(Pdb)
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pwndbg/commands/patch.py`
Content:
```
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 import argparse
4
5 from pwnlib.asm import asm
6 from pwnlib.asm import disasm
7
8 import pwndbg.color.message as message
9 import pwndbg.commands
10 import pwndbg.gdblib.memory
11 import pwndbg.lib.memoize
12
13 # Keep old patches made so we can revert them
14 patches = {}
15
16
17 parser = argparse.ArgumentParser(description="Patches given instruction with given code or bytes")
18 parser.add_argument("address", type=int, help="The address to patch")
19 parser.add_argument("ins", type=str, help="instruction[s]")
20
21
22 @pwndbg.commands.ArgparsedCommand(parser)
23 @pwndbg.commands.OnlyWhenRunning
24 def patch(address, ins):
25 new_mem = asm(ins, arch=pwndbg.gdblib.arch.current)
26
27 old_mem = pwndbg.gdblib.memory.read(address, len(new_mem))
28
29 patches[address] = (old_mem, new_mem)
30
31 pwndbg.gdblib.memory.write(address, new_mem)
32
33 pwndbg.lib.memoize.reset()
34
35
36 parser2 = argparse.ArgumentParser(description="Revert patch at given address")
37 parser2.add_argument("address", type=int, help="Address to revert patch on")
38
39
40 @pwndbg.commands.ArgparsedCommand(parser2)
41 @pwndbg.commands.OnlyWhenRunning
42 def patch_revert(address):
43 if not patches:
44 print(message.notice("No patches to revert"))
45 return
46
47 if address == -1:
48 for addr, (old, _new) in patches.items():
49 pwndbg.gdblib.memory.write(addr, old)
50 print(message.notice("Reverted patch at %#x" % addr))
51 patches.clear()
52 else:
53 old, _new = patches[address]
54 pwndbg.gdblib.memory.write(address, old)
55
56 pwndbg.lib.memoize.reset()
57
58
59 parser3 = argparse.ArgumentParser(description="List all patches")
60
61
62 @pwndbg.commands.ArgparsedCommand(parser3)
63 @pwndbg.commands.OnlyWhenRunning
64 def patch_list():
65 if not patches:
66 print(message.hint("No patches to list"))
67 return
68
69 print(message.hint("Patches:"))
70 for addr, (old, new) in patches.items():
71 old_insns = disasm(old, arch=pwndbg.gdblib.arch.current)
72 new_insns = disasm(new, arch=pwndbg.gdblib.arch.current)
73
74 print(
75 message.hint("Patch at"),
76 message.warning("%#x:" % addr),
77 message.hint("from"),
78 message.warning(old_insns.replace("\n", "; ")),
79 message.hint("to"),
80 message.warning(new_insns.replace("\n", "; ")),
81 )
82
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pwndbg/commands/patch.py b/pwndbg/commands/patch.py
--- a/pwndbg/commands/patch.py
+++ b/pwndbg/commands/patch.py
@@ -22,7 +22,7 @@
@pwndbg.commands.ArgparsedCommand(parser)
@pwndbg.commands.OnlyWhenRunning
def patch(address, ins):
- new_mem = asm(ins, arch=pwndbg.gdblib.arch.current)
+ new_mem = asm(ins)
old_mem = pwndbg.gdblib.memory.read(address, len(new_mem))
@@ -68,8 +68,8 @@
print(message.hint("Patches:"))
for addr, (old, new) in patches.items():
- old_insns = disasm(old, arch=pwndbg.gdblib.arch.current)
- new_insns = disasm(new, arch=pwndbg.gdblib.arch.current)
+ old_insns = disasm(old)
+ new_insns = disasm(new)
print(
message.hint("Patch at"),
|
{"golden_diff": "diff --git a/pwndbg/commands/patch.py b/pwndbg/commands/patch.py\n--- a/pwndbg/commands/patch.py\n+++ b/pwndbg/commands/patch.py\n@@ -22,7 +22,7 @@\n @pwndbg.commands.ArgparsedCommand(parser)\n @pwndbg.commands.OnlyWhenRunning\n def patch(address, ins):\n- new_mem = asm(ins, arch=pwndbg.gdblib.arch.current)\n+ new_mem = asm(ins)\n \n old_mem = pwndbg.gdblib.memory.read(address, len(new_mem))\n \n@@ -68,8 +68,8 @@\n \n print(message.hint(\"Patches:\"))\n for addr, (old, new) in patches.items():\n- old_insns = disasm(old, arch=pwndbg.gdblib.arch.current)\n- new_insns = disasm(new, arch=pwndbg.gdblib.arch.current)\n+ old_insns = disasm(old)\n+ new_insns = disasm(new)\n \n print(\n message.hint(\"Patch at\"),\n", "issue": "pwndbg.gdblib.arch.current is wrong if executed in a pwntools gdbscript command\nTL;DR: This should not fail, but does so, because the `patch` command fetches `pwndbg.gdblib.arch.current` which for some reason is wrong/not updated.\r\n\r\n\r\n```py\r\nfrom pwn import *\r\n\r\ngdbscript = '''\r\ntbreak main\r\npatch $rip 'xor rax, rax'\r\ncontinue\r\n'''\r\n\r\np = gdb.debug('/bin/ls', gdbscript=gdbscript)\r\n\r\np.interactive()\r\n```\r\n\r\nStacktrace:\r\n```\r\nTraceback (most recent call last):\r\n File \"/home/dc/tools/pwndbg/pwndbg/commands/__init__.py\", line 145, in __call__\r\n return self.function(*args, **kwargs)\r\n File \"/home/dc/tools/pwndbg/pwndbg/commands/__init__.py\", line 216, in _OnlyWhenRunning\r\n return function(*a, **kw)\r\n File \"/home/dc/tools/pwndbg/pwndbg/commands/patch.py\", line 25, in patch\r\n new_mem = asm(ins, arch=pwndbg.gdblib.arch.current)\r\n File \"/home/dc/.virtualenvs/pwn/lib/python3.8/site-packages/pwnlib/context/__init__.py\", line 1444, in setter\r\n raise AttributeError(\"Invalid arch/bits combination: %s/%s\" % (arch, bits))\r\nAttributeError: Invalid arch/bits combination: i386/64\r\n\r\nIf that is an issue, you can report it on https://github.com/pwndbg/pwndbg/issues\r\n(Please don't forget to search if it hasn't been reported before)\r\nTo generate the report and open a browser, you may run `bugreport --run-browser`\r\nPS: Pull requests are welcome\r\n> /home/dc/.virtualenvs/pwn/lib/python3.8/site-packages/pwnlib/context/__init__.py(1444)setter()\r\n-> raise AttributeError(\"Invalid arch/bits combination: %s/%s\" % (arch, bits))\r\n(Pdb) p arch, bits\r\n('i386', 64)\r\n(Pdb) \r\n```\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nimport argparse\n\nfrom pwnlib.asm import asm\nfrom pwnlib.asm import disasm\n\nimport pwndbg.color.message as message\nimport pwndbg.commands\nimport pwndbg.gdblib.memory\nimport pwndbg.lib.memoize\n\n# Keep old patches made so we can revert them\npatches = {}\n\n\nparser = argparse.ArgumentParser(description=\"Patches given instruction with given code or bytes\")\nparser.add_argument(\"address\", type=int, help=\"The address to patch\")\nparser.add_argument(\"ins\", type=str, help=\"instruction[s]\")\n\n\[email protected](parser)\[email protected]\ndef patch(address, ins):\n new_mem = asm(ins, arch=pwndbg.gdblib.arch.current)\n\n old_mem = pwndbg.gdblib.memory.read(address, len(new_mem))\n\n patches[address] = (old_mem, new_mem)\n\n pwndbg.gdblib.memory.write(address, new_mem)\n\n pwndbg.lib.memoize.reset()\n\n\nparser2 = argparse.ArgumentParser(description=\"Revert patch at given address\")\nparser2.add_argument(\"address\", type=int, help=\"Address to revert patch on\")\n\n\[email protected](parser2)\[email protected]\ndef patch_revert(address):\n if not patches:\n print(message.notice(\"No patches to revert\"))\n return\n\n if address == -1:\n for addr, (old, _new) in patches.items():\n pwndbg.gdblib.memory.write(addr, old)\n print(message.notice(\"Reverted patch at %#x\" % addr))\n patches.clear()\n else:\n old, _new = patches[address]\n pwndbg.gdblib.memory.write(address, old)\n\n pwndbg.lib.memoize.reset()\n\n\nparser3 = argparse.ArgumentParser(description=\"List all patches\")\n\n\[email protected](parser3)\[email protected]\ndef patch_list():\n if not patches:\n print(message.hint(\"No patches to list\"))\n return\n\n print(message.hint(\"Patches:\"))\n for addr, (old, new) in patches.items():\n old_insns = disasm(old, arch=pwndbg.gdblib.arch.current)\n new_insns = disasm(new, arch=pwndbg.gdblib.arch.current)\n\n print(\n message.hint(\"Patch at\"),\n message.warning(\"%#x:\" % addr),\n message.hint(\"from\"),\n message.warning(old_insns.replace(\"\\n\", \"; \")),\n message.hint(\"to\"),\n message.warning(new_insns.replace(\"\\n\", \"; \")),\n )\n", "path": "pwndbg/commands/patch.py"}], "after_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nimport argparse\n\nfrom pwnlib.asm import asm\nfrom pwnlib.asm import disasm\n\nimport pwndbg.color.message as message\nimport pwndbg.commands\nimport pwndbg.gdblib.memory\nimport pwndbg.lib.memoize\n\n# Keep old patches made so we can revert them\npatches = {}\n\n\nparser = argparse.ArgumentParser(description=\"Patches given instruction with given code or bytes\")\nparser.add_argument(\"address\", type=int, help=\"The address to patch\")\nparser.add_argument(\"ins\", type=str, help=\"instruction[s]\")\n\n\[email protected](parser)\[email protected]\ndef patch(address, ins):\n new_mem = asm(ins)\n\n old_mem = pwndbg.gdblib.memory.read(address, len(new_mem))\n\n patches[address] = (old_mem, new_mem)\n\n pwndbg.gdblib.memory.write(address, new_mem)\n\n pwndbg.lib.memoize.reset()\n\n\nparser2 = argparse.ArgumentParser(description=\"Revert patch at given address\")\nparser2.add_argument(\"address\", type=int, help=\"Address to revert patch on\")\n\n\[email protected](parser2)\[email protected]\ndef patch_revert(address):\n if not patches:\n print(message.notice(\"No patches to revert\"))\n return\n\n if address == -1:\n for addr, (old, _new) in patches.items():\n pwndbg.gdblib.memory.write(addr, old)\n print(message.notice(\"Reverted patch at %#x\" % addr))\n patches.clear()\n else:\n old, _new = patches[address]\n pwndbg.gdblib.memory.write(address, old)\n\n pwndbg.lib.memoize.reset()\n\n\nparser3 = argparse.ArgumentParser(description=\"List all patches\")\n\n\[email protected](parser3)\[email protected]\ndef patch_list():\n if not patches:\n print(message.hint(\"No patches to list\"))\n return\n\n print(message.hint(\"Patches:\"))\n for addr, (old, new) in patches.items():\n old_insns = disasm(old)\n new_insns = disasm(new)\n\n print(\n message.hint(\"Patch at\"),\n message.warning(\"%#x:\" % addr),\n message.hint(\"from\"),\n message.warning(old_insns.replace(\"\\n\", \"; \")),\n message.hint(\"to\"),\n message.warning(new_insns.replace(\"\\n\", \"; \")),\n )\n", "path": "pwndbg/commands/patch.py"}]}
| 1,479 | 236 |
gh_patches_debug_15569
|
rasdani/github-patches
|
git_diff
|
lightly-ai__lightly-215
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Ignore lightly outputs when creating a dataset
When working on a folder of images, e.g.
```
dataset/
L image_1.jpg
L image_2.jpg
L image_3.jpg
```
the following commands will not work when in the dataset directory:
```
lightly-embed input_dir=./
lightly-train input_dir=./
lightly-magic input_dir=./
```
This is because the command-line tool creates a directory `lightly_outputs` where logs and results are stored. However, when creating the `LightlyDataset`, this directory will be interpreted as a subfolder with images in it which leads to an error. We can handle this by ignoring the `lightly_outputs` directory.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lightly/data/_helpers.py`
Content:
```
1 """ Helper Functions """
2
3 # Copyright (c) 2020. Lightly AG and its affiliates.
4 # All Rights Reserved
5
6 import os
7 from torchvision import datasets
8
9 from lightly.data._image import DatasetFolder
10
11 try:
12 from lightly.data._video import VideoDataset
13 VIDEO_DATASET_AVAILABLE = True
14 except Exception as e:
15 VIDEO_DATASET_AVAILABLE = False
16 VIDEO_DATASET_ERRORMSG = e
17
18
19 IMG_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.ppm', '.bmp',
20 '.pgm', '.tif', '.tiff', '.webp')
21
22 VIDEO_EXTENSIONS = ('.mp4', '.mov', '.avi')
23
24
25 def _contains_videos(root: str, extensions: tuple):
26 """Checks whether directory contains video files.
27
28 Args:
29 root: Root directory path.
30
31 Returns:
32 True if root contains subdirectories else false.
33 """
34 list_dir = os.listdir(root)
35 is_video = \
36 [f.lower().endswith(extensions) for f in list_dir]
37 return any(is_video)
38
39
40 def _contains_subdirs(root: str):
41 """Checks whether directory contains subdirectories.
42
43 Args:
44 root: Root directory path.
45
46 Returns:
47 True if root contains subdirectories else false.
48
49 """
50 list_dir = os.listdir(root)
51 is_dir = \
52 [os.path.isdir(os.path.join(root, f)) for f in list_dir]
53 return any(is_dir)
54
55
56 def _load_dataset_from_folder(root: str, transform):
57 """Initializes dataset from folder.
58
59 Args:
60 root: (str) Root directory path
61 transform: (torchvision.transforms.Compose) image transformations
62
63 Returns:
64 Dataset consisting of images in the root directory.
65
66 """
67
68 # if there is a video in the input directory but we do not have
69 # the right dependencies, raise a ValueError
70 contains_videos = _contains_videos(root, VIDEO_EXTENSIONS)
71 if contains_videos and not VIDEO_DATASET_AVAILABLE:
72 raise ValueError(f'The input directory {root} contains videos '
73 'but the VideoDataset is not available. \n'
74 'Make sure you have installed the right '
75 'dependencies. The error from the imported '
76 f'module was: {VIDEO_DATASET_ERRORMSG}')
77
78 if contains_videos:
79 # root contains videos -> create a video dataset
80 dataset = VideoDataset(root,
81 extensions=VIDEO_EXTENSIONS,
82 transform=transform)
83 elif _contains_subdirs(root):
84 # root contains subdirectories -> create an image folder dataset
85 dataset = datasets.ImageFolder(root,
86 transform=transform)
87 else:
88 # root contains plain images -> create a folder dataset
89 dataset = DatasetFolder(root,
90 extensions=IMG_EXTENSIONS,
91 transform=transform)
92
93 return dataset
94
95
96 def _load_dataset(input_dir: str,
97 transform=None):
98 """Initializes dataset from torchvision or from folder.
99
100 Args:
101 root: (str) Directory where dataset is stored
102 name: (str) Name of the dataset (e.g. cifar10, cifar100)
103 train: (bool) Use the training set
104 download: (bool) Download the dataset
105 transform: (torchvision.transforms.Compose) image transformations
106 from_folder: (str) Path to directory holding the images to load.
107
108 Returns:
109 A torchvision dataset
110
111 Raises:
112 ValueError: If the specified dataset doesn't exist
113
114 """
115
116 if not os.path.exists(input_dir):
117 raise ValueError(f'The input directory {input_dir} does not exist!')
118
119 return _load_dataset_from_folder(input_dir, transform)
120
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/lightly/data/_helpers.py b/lightly/data/_helpers.py
--- a/lightly/data/_helpers.py
+++ b/lightly/data/_helpers.py
@@ -37,6 +37,19 @@
return any(is_video)
+def _is_lightly_output_dir(dirname: str):
+ """Checks whether the directory is a lightly_output directory.
+
+ Args:
+ dirname: Directory to check.
+
+ Returns:
+ True if dirname is "lightly_outputs" else false.
+
+ """
+ return 'lightly_outputs' in dirname
+
+
def _contains_subdirs(root: str):
"""Checks whether directory contains subdirectories.
@@ -48,6 +61,7 @@
"""
list_dir = os.listdir(root)
+ list_dir = list(filter(lambda x: not _is_lightly_output_dir(x), list_dir))
is_dir = \
[os.path.isdir(os.path.join(root, f)) for f in list_dir]
return any(is_dir)
|
{"golden_diff": "diff --git a/lightly/data/_helpers.py b/lightly/data/_helpers.py\n--- a/lightly/data/_helpers.py\n+++ b/lightly/data/_helpers.py\n@@ -37,6 +37,19 @@\n return any(is_video)\n \n \n+def _is_lightly_output_dir(dirname: str):\n+ \"\"\"Checks whether the directory is a lightly_output directory.\n+\n+ Args:\n+ dirname: Directory to check.\n+\n+ Returns:\n+ True if dirname is \"lightly_outputs\" else false.\n+\n+ \"\"\"\n+ return 'lightly_outputs' in dirname\n+\n+\n def _contains_subdirs(root: str):\n \"\"\"Checks whether directory contains subdirectories.\n \n@@ -48,6 +61,7 @@\n \n \"\"\"\n list_dir = os.listdir(root)\n+ list_dir = list(filter(lambda x: not _is_lightly_output_dir(x), list_dir))\n is_dir = \\\n [os.path.isdir(os.path.join(root, f)) for f in list_dir]\n return any(is_dir)\n", "issue": "Ignore lightly outputs when creating a dataset\nWhen working on a folder of images, e.g.\r\n```\r\ndataset/\r\nL image_1.jpg\r\nL image_2.jpg\r\nL image_3.jpg\r\n```\r\nthe following commands will not work when in the dataset directory:\r\n```\r\nlightly-embed input_dir=./\r\nlightly-train input_dir=./\r\nlightly-magic input_dir=./\r\n```\r\n\r\nThis is because the command-line tool creates a directory `lightly_outputs` where logs and results are stored. However, when creating the `LightlyDataset`, this directory will be interpreted as a subfolder with images in it which leads to an error. We can handle this by ignoring the `lightly_outputs` directory.\n", "before_files": [{"content": "\"\"\" Helper Functions \"\"\"\n\n# Copyright (c) 2020. Lightly AG and its affiliates.\n# All Rights Reserved\n\nimport os\nfrom torchvision import datasets\n\nfrom lightly.data._image import DatasetFolder\n\ntry:\n from lightly.data._video import VideoDataset\n VIDEO_DATASET_AVAILABLE = True\nexcept Exception as e:\n VIDEO_DATASET_AVAILABLE = False\n VIDEO_DATASET_ERRORMSG = e\n\n\nIMG_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.ppm', '.bmp',\n '.pgm', '.tif', '.tiff', '.webp')\n\nVIDEO_EXTENSIONS = ('.mp4', '.mov', '.avi')\n\n\ndef _contains_videos(root: str, extensions: tuple):\n \"\"\"Checks whether directory contains video files.\n\n Args:\n root: Root directory path.\n\n Returns:\n True if root contains subdirectories else false.\n \"\"\"\n list_dir = os.listdir(root)\n is_video = \\\n [f.lower().endswith(extensions) for f in list_dir]\n return any(is_video)\n\n\ndef _contains_subdirs(root: str):\n \"\"\"Checks whether directory contains subdirectories.\n\n Args:\n root: Root directory path.\n\n Returns:\n True if root contains subdirectories else false.\n\n \"\"\"\n list_dir = os.listdir(root)\n is_dir = \\\n [os.path.isdir(os.path.join(root, f)) for f in list_dir]\n return any(is_dir)\n\n\ndef _load_dataset_from_folder(root: str, transform):\n \"\"\"Initializes dataset from folder.\n\n Args:\n root: (str) Root directory path\n transform: (torchvision.transforms.Compose) image transformations\n\n Returns:\n Dataset consisting of images in the root directory.\n\n \"\"\"\n\n # if there is a video in the input directory but we do not have\n # the right dependencies, raise a ValueError\n contains_videos = _contains_videos(root, VIDEO_EXTENSIONS)\n if contains_videos and not VIDEO_DATASET_AVAILABLE:\n raise ValueError(f'The input directory {root} contains videos '\n 'but the VideoDataset is not available. \\n'\n 'Make sure you have installed the right '\n 'dependencies. The error from the imported '\n f'module was: {VIDEO_DATASET_ERRORMSG}')\n\n if contains_videos:\n # root contains videos -> create a video dataset\n dataset = VideoDataset(root,\n extensions=VIDEO_EXTENSIONS,\n transform=transform)\n elif _contains_subdirs(root):\n # root contains subdirectories -> create an image folder dataset\n dataset = datasets.ImageFolder(root,\n transform=transform)\n else:\n # root contains plain images -> create a folder dataset\n dataset = DatasetFolder(root,\n extensions=IMG_EXTENSIONS,\n transform=transform)\n\n return dataset\n\n\ndef _load_dataset(input_dir: str,\n transform=None):\n \"\"\"Initializes dataset from torchvision or from folder.\n\n Args:\n root: (str) Directory where dataset is stored\n name: (str) Name of the dataset (e.g. cifar10, cifar100)\n train: (bool) Use the training set\n download: (bool) Download the dataset\n transform: (torchvision.transforms.Compose) image transformations\n from_folder: (str) Path to directory holding the images to load.\n\n Returns:\n A torchvision dataset\n\n Raises:\n ValueError: If the specified dataset doesn't exist\n\n \"\"\"\n\n if not os.path.exists(input_dir):\n raise ValueError(f'The input directory {input_dir} does not exist!')\n\n return _load_dataset_from_folder(input_dir, transform)\n", "path": "lightly/data/_helpers.py"}], "after_files": [{"content": "\"\"\" Helper Functions \"\"\"\n\n# Copyright (c) 2020. Lightly AG and its affiliates.\n# All Rights Reserved\n\nimport os\nfrom torchvision import datasets\n\nfrom lightly.data._image import DatasetFolder\n\ntry:\n from lightly.data._video import VideoDataset\n VIDEO_DATASET_AVAILABLE = True\nexcept Exception as e:\n VIDEO_DATASET_AVAILABLE = False\n VIDEO_DATASET_ERRORMSG = e\n\n\nIMG_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.ppm', '.bmp',\n '.pgm', '.tif', '.tiff', '.webp')\n\nVIDEO_EXTENSIONS = ('.mp4', '.mov', '.avi')\n\n\ndef _contains_videos(root: str, extensions: tuple):\n \"\"\"Checks whether directory contains video files.\n\n Args:\n root: Root directory path.\n\n Returns:\n True if root contains subdirectories else false.\n \"\"\"\n list_dir = os.listdir(root)\n is_video = \\\n [f.lower().endswith(extensions) for f in list_dir]\n return any(is_video)\n\n\ndef _is_lightly_output_dir(dirname: str):\n \"\"\"Checks whether the directory is a lightly_output directory.\n\n Args:\n dirname: Directory to check.\n\n Returns:\n True if dirname is \"lightly_outputs\" else false.\n\n \"\"\"\n return 'lightly_outputs' in dirname\n\n\ndef _contains_subdirs(root: str):\n \"\"\"Checks whether directory contains subdirectories.\n\n Args:\n root: Root directory path.\n\n Returns:\n True if root contains subdirectories else false.\n\n \"\"\"\n list_dir = os.listdir(root)\n list_dir = list(filter(lambda x: not _is_lightly_output_dir(x), list_dir))\n is_dir = \\\n [os.path.isdir(os.path.join(root, f)) for f in list_dir]\n return any(is_dir)\n\n\ndef _load_dataset_from_folder(root: str, transform):\n \"\"\"Initializes dataset from folder.\n\n Args:\n root: (str) Root directory path\n transform: (torchvision.transforms.Compose) image transformations\n\n Returns:\n Dataset consisting of images in the root directory.\n\n \"\"\"\n\n # if there is a video in the input directory but we do not have\n # the right dependencies, raise a ValueError\n contains_videos = _contains_videos(root, VIDEO_EXTENSIONS)\n if contains_videos and not VIDEO_DATASET_AVAILABLE:\n raise ValueError(f'The input directory {root} contains videos '\n 'but the VideoDataset is not available. \\n'\n 'Make sure you have installed the right '\n 'dependencies. The error from the imported '\n f'module was: {VIDEO_DATASET_ERRORMSG}')\n\n if contains_videos:\n # root contains videos -> create a video dataset\n dataset = VideoDataset(root,\n extensions=VIDEO_EXTENSIONS,\n transform=transform)\n elif _contains_subdirs(root):\n # root contains subdirectories -> create an image folder dataset\n dataset = datasets.ImageFolder(root,\n transform=transform)\n else:\n # root contains plain images -> create a folder dataset\n dataset = DatasetFolder(root,\n extensions=IMG_EXTENSIONS,\n transform=transform)\n\n return dataset\n\n\ndef _load_dataset(input_dir: str,\n transform=None):\n \"\"\"Initializes dataset from torchvision or from folder.\n\n Args:\n root: (str) Directory where dataset is stored\n name: (str) Name of the dataset (e.g. cifar10, cifar100)\n train: (bool) Use the training set\n download: (bool) Download the dataset\n transform: (torchvision.transforms.Compose) image transformations\n from_folder: (str) Path to directory holding the images to load.\n\n Returns:\n A torchvision dataset\n\n Raises:\n ValueError: If the specified dataset doesn't exist\n\n \"\"\"\n\n if not os.path.exists(input_dir):\n raise ValueError(f'The input directory {input_dir} does not exist!')\n\n return _load_dataset_from_folder(input_dir, transform)\n", "path": "lightly/data/_helpers.py"}]}
| 1,432 | 221 |
gh_patches_debug_15140
|
rasdani/github-patches
|
git_diff
|
uccser__cs-unplugged-1381
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Plugging it in area is not accessible on dev server
This link should work: https://cs-unplugged-dev.appspot.com/plugging-it-in/
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `csunplugged/config/urls.py`
Content:
```
1 """URL configuration for the Django system.
2
3 The `urlpatterns` list routes URLs to views. For more information please see:
4 https://docs.djangoproject.com/en/dev/topics/http/urls/
5 """
6
7 from django.conf import settings
8 from django.urls import include, path
9 from django.conf.urls.i18n import i18n_patterns
10 from django.contrib import admin
11
12 urlpatterns = i18n_patterns(
13 path('', include('general.urls', namespace='general')),
14 path('topics/', include('topics.urls', namespace='topics')),
15 path('resources/', include('resources.urls', namespace='resources')),
16 path('at-home/', include('at_home.urls', namespace='at_home')),
17 )
18
19 urlpatterns += [
20 path('', include('classic.urls')),
21 path('en/search/', include('search.urls', namespace='search')),
22 path('admin/', admin.site.urls),
23 ]
24
25 if not settings.DJANGO_PRODUCTION:
26 urlpatterns += [
27 path('plugging-it-in/', include('plugging_it_in.urls', namespace='plugging_it_in')),
28 ]
29
30 if settings.DEBUG: # pragma: no cover
31 import debug_toolbar
32 urlpatterns += [
33 path('__debug__/', include(debug_toolbar.urls)),
34 ]
35 urlpatterns += i18n_patterns(
36 path('__dev__/', include('dev.urls', namespace='dev')),
37 )
38 # These patterns allows these error pages to be debugged during development.
39 from django.views import defaults
40 urlpatterns += [
41 path('400/', defaults.bad_request, kwargs={'exception': Exception('Bad request')}),
42 path('403/', defaults.permission_denied, kwargs={'exception': Exception('Permissin denied')}),
43 path('404/', defaults.page_not_found, kwargs={'exception': Exception('Page not found')}),
44 path('500/', defaults.server_error),
45 ]
46
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/csunplugged/config/urls.py b/csunplugged/config/urls.py
--- a/csunplugged/config/urls.py
+++ b/csunplugged/config/urls.py
@@ -8,6 +8,8 @@
from django.urls import include, path
from django.conf.urls.i18n import i18n_patterns
from django.contrib import admin
+import environ
+env = environ.Env()
urlpatterns = i18n_patterns(
path('', include('general.urls', namespace='general')),
@@ -22,7 +24,7 @@
path('admin/', admin.site.urls),
]
-if not settings.DJANGO_PRODUCTION:
+if not env("DEPLOYMENT", default=None) == "prod":
urlpatterns += [
path('plugging-it-in/', include('plugging_it_in.urls', namespace='plugging_it_in')),
]
|
{"golden_diff": "diff --git a/csunplugged/config/urls.py b/csunplugged/config/urls.py\n--- a/csunplugged/config/urls.py\n+++ b/csunplugged/config/urls.py\n@@ -8,6 +8,8 @@\n from django.urls import include, path\n from django.conf.urls.i18n import i18n_patterns\n from django.contrib import admin\n+import environ\n+env = environ.Env()\n \n urlpatterns = i18n_patterns(\n path('', include('general.urls', namespace='general')),\n@@ -22,7 +24,7 @@\n path('admin/', admin.site.urls),\n ]\n \n-if not settings.DJANGO_PRODUCTION:\n+if not env(\"DEPLOYMENT\", default=None) == \"prod\":\n urlpatterns += [\n path('plugging-it-in/', include('plugging_it_in.urls', namespace='plugging_it_in')),\n ]\n", "issue": "Plugging it in area is not accessible on dev server\nThis link should work: https://cs-unplugged-dev.appspot.com/plugging-it-in/\n", "before_files": [{"content": "\"\"\"URL configuration for the Django system.\n\nThe `urlpatterns` list routes URLs to views. For more information please see:\n https://docs.djangoproject.com/en/dev/topics/http/urls/\n\"\"\"\n\nfrom django.conf import settings\nfrom django.urls import include, path\nfrom django.conf.urls.i18n import i18n_patterns\nfrom django.contrib import admin\n\nurlpatterns = i18n_patterns(\n path('', include('general.urls', namespace='general')),\n path('topics/', include('topics.urls', namespace='topics')),\n path('resources/', include('resources.urls', namespace='resources')),\n path('at-home/', include('at_home.urls', namespace='at_home')),\n)\n\nurlpatterns += [\n path('', include('classic.urls')),\n path('en/search/', include('search.urls', namespace='search')),\n path('admin/', admin.site.urls),\n]\n\nif not settings.DJANGO_PRODUCTION:\n urlpatterns += [\n path('plugging-it-in/', include('plugging_it_in.urls', namespace='plugging_it_in')),\n ]\n\nif settings.DEBUG: # pragma: no cover\n import debug_toolbar\n urlpatterns += [\n path('__debug__/', include(debug_toolbar.urls)),\n ]\n urlpatterns += i18n_patterns(\n path('__dev__/', include('dev.urls', namespace='dev')),\n )\n # These patterns allows these error pages to be debugged during development.\n from django.views import defaults\n urlpatterns += [\n path('400/', defaults.bad_request, kwargs={'exception': Exception('Bad request')}),\n path('403/', defaults.permission_denied, kwargs={'exception': Exception('Permissin denied')}),\n path('404/', defaults.page_not_found, kwargs={'exception': Exception('Page not found')}),\n path('500/', defaults.server_error),\n ]\n", "path": "csunplugged/config/urls.py"}], "after_files": [{"content": "\"\"\"URL configuration for the Django system.\n\nThe `urlpatterns` list routes URLs to views. For more information please see:\n https://docs.djangoproject.com/en/dev/topics/http/urls/\n\"\"\"\n\nfrom django.conf import settings\nfrom django.urls import include, path\nfrom django.conf.urls.i18n import i18n_patterns\nfrom django.contrib import admin\nimport environ\nenv = environ.Env()\n\nurlpatterns = i18n_patterns(\n path('', include('general.urls', namespace='general')),\n path('topics/', include('topics.urls', namespace='topics')),\n path('resources/', include('resources.urls', namespace='resources')),\n path('at-home/', include('at_home.urls', namespace='at_home')),\n)\n\nurlpatterns += [\n path('', include('classic.urls')),\n path('en/search/', include('search.urls', namespace='search')),\n path('admin/', admin.site.urls),\n]\n\nif not env(\"DEPLOYMENT\", default=None) == \"prod\":\n urlpatterns += [\n path('plugging-it-in/', include('plugging_it_in.urls', namespace='plugging_it_in')),\n ]\n\nif settings.DEBUG: # pragma: no cover\n import debug_toolbar\n urlpatterns += [\n path('__debug__/', include(debug_toolbar.urls)),\n ]\n urlpatterns += i18n_patterns(\n path('__dev__/', include('dev.urls', namespace='dev')),\n )\n # These patterns allows these error pages to be debugged during development.\n from django.views import defaults\n urlpatterns += [\n path('400/', defaults.bad_request, kwargs={'exception': Exception('Bad request')}),\n path('403/', defaults.permission_denied, kwargs={'exception': Exception('Permissin denied')}),\n path('404/', defaults.page_not_found, kwargs={'exception': Exception('Page not found')}),\n path('500/', defaults.server_error),\n ]\n", "path": "csunplugged/config/urls.py"}]}
| 761 | 191 |
gh_patches_debug_23012
|
rasdani/github-patches
|
git_diff
|
tensorflow__addons-549
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Package TFA 0.5.2 pinned to TF2-RC2
RC2 is released so we can do a minor release pinned to this. For now this is blocked until #539 is merged.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 # ==============================================================================
15 """TensorFlow Addons.
16
17 TensorFlow Addons is a repository of contributions that conform to well-
18 established API patterns, but implement new functionality not available
19 in core TensorFlow. TensorFlow natively supports a large number of
20 operators, layers, metrics, losses, and optimizers. However, in a fast
21 moving field like ML, there are many interesting new developments that
22 cannot be integrated into core TensorFlow (because their broad
23 applicability is not yet clear, or it is mostly used by a smaller subset
24 of the community).
25 """
26
27 from __future__ import absolute_import
28 from __future__ import division
29 from __future__ import print_function
30
31 import os
32 import platform
33 import sys
34
35 from datetime import datetime
36 from setuptools import find_packages
37 from setuptools import setup
38 from setuptools.dist import Distribution
39 from setuptools import Extension
40
41 DOCLINES = __doc__.split('\n')
42
43 TFA_NIGHTLY = 'tfa-nightly'
44 TFA_RELEASE = 'tensorflow-addons'
45
46 if '--nightly' in sys.argv:
47 project_name = TFA_NIGHTLY
48 nightly_idx = sys.argv.index('--nightly')
49 sys.argv.pop(nightly_idx)
50 else:
51 project_name = TFA_RELEASE
52
53 # Version
54 version = {}
55 base_dir = os.path.dirname(os.path.abspath(__file__))
56 with open(os.path.join(base_dir, "tensorflow_addons", "version.py")) as fp:
57 # yapf: disable
58 exec(fp.read(), version)
59 # yapf: enable
60
61 if project_name == TFA_NIGHTLY:
62 version['__version__'] += datetime.strftime(datetime.today(), "%Y%m%d")
63
64 # Dependencies
65 REQUIRED_PACKAGES = [
66 'six >= 1.10.0',
67 ]
68
69 if project_name == TFA_RELEASE:
70 # TODO: remove if-else condition when tf supports package consolidation.
71 if platform.system() == 'Linux':
72 REQUIRED_PACKAGES.append('tensorflow-gpu == 2.0.0-rc1')
73 else:
74 REQUIRED_PACKAGES.append('tensorflow == 2.0.0-rc1')
75 elif project_name == TFA_NIGHTLY:
76 # TODO: remove if-else condition when tf-nightly supports package consolidation.
77 if platform.system() == 'Linux':
78 REQUIRED_PACKAGES.append('tf-nightly-gpu-2.0-preview')
79 else:
80 REQUIRED_PACKAGES.append('tf-nightly-2.0-preview')
81
82
83 class BinaryDistribution(Distribution):
84 """This class is needed in order to create OS specific wheels."""
85
86 def has_ext_modules(self):
87 return True
88
89
90 setup(
91 name=project_name,
92 version=version['__version__'],
93 description=DOCLINES[0],
94 long_description='\n'.join(DOCLINES[2:]),
95 author='Google Inc.',
96 author_email='[email protected]',
97 packages=find_packages(),
98 ext_modules=[Extension('_foo', ['stub.cc'])],
99 install_requires=REQUIRED_PACKAGES,
100 include_package_data=True,
101 zip_safe=False,
102 distclass=BinaryDistribution,
103 classifiers=[
104 'Development Status :: 4 - Beta',
105 'Intended Audience :: Developers',
106 'Intended Audience :: Education',
107 'Intended Audience :: Science/Research',
108 'License :: OSI Approved :: Apache Software License',
109 'Programming Language :: Python :: 2.7',
110 'Programming Language :: Python :: 3.5',
111 'Programming Language :: Python :: 3.6',
112 'Programming Language :: Python :: 3.7',
113 'Topic :: Scientific/Engineering :: Mathematics',
114 'Topic :: Software Development :: Libraries :: Python Modules',
115 'Topic :: Software Development :: Libraries',
116 ],
117 license='Apache 2.0',
118 keywords='tensorflow addons machine learning',
119 )
120
```
Path: `tensorflow_addons/version.py`
Content:
```
1 # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 # ============================================================================
15 """Define TensorFlow Addons version information."""
16 from __future__ import absolute_import
17 from __future__ import division
18 from __future__ import print_function
19
20 # We follow Semantic Versioning (https://semver.org/)
21 _MAJOR_VERSION = '0'
22 _MINOR_VERSION = '5'
23 _PATCH_VERSION = '1'
24
25 # When building releases, we can update this value on the release branch to
26 # reflect the current release candidate ('rc0', 'rc1') or, finally, the official
27 # stable release (indicated by `_VERSION_SUFFIX = ''`). Outside the context of a
28 # release branch, the current version is by default assumed to be a
29 # 'development' version, labeled 'dev'.
30 _VERSION_SUFFIX = ''
31
32 # Example, '0.1.0-dev'
33 __version__ = '.'.join([
34 _MAJOR_VERSION,
35 _MINOR_VERSION,
36 _PATCH_VERSION,
37 ])
38 if _VERSION_SUFFIX:
39 __version__ = '{}-{}'.format(__version__, _VERSION_SUFFIX)
40
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -69,9 +69,9 @@
if project_name == TFA_RELEASE:
# TODO: remove if-else condition when tf supports package consolidation.
if platform.system() == 'Linux':
- REQUIRED_PACKAGES.append('tensorflow-gpu == 2.0.0-rc1')
+ REQUIRED_PACKAGES.append('tensorflow-gpu == 2.0.0')
else:
- REQUIRED_PACKAGES.append('tensorflow == 2.0.0-rc1')
+ REQUIRED_PACKAGES.append('tensorflow == 2.0.0')
elif project_name == TFA_NIGHTLY:
# TODO: remove if-else condition when tf-nightly supports package consolidation.
if platform.system() == 'Linux':
diff --git a/tensorflow_addons/version.py b/tensorflow_addons/version.py
--- a/tensorflow_addons/version.py
+++ b/tensorflow_addons/version.py
@@ -20,7 +20,7 @@
# We follow Semantic Versioning (https://semver.org/)
_MAJOR_VERSION = '0'
_MINOR_VERSION = '5'
-_PATCH_VERSION = '1'
+_PATCH_VERSION = '2'
# When building releases, we can update this value on the release branch to
# reflect the current release candidate ('rc0', 'rc1') or, finally, the official
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -69,9 +69,9 @@\n if project_name == TFA_RELEASE:\n # TODO: remove if-else condition when tf supports package consolidation.\n if platform.system() == 'Linux':\n- REQUIRED_PACKAGES.append('tensorflow-gpu == 2.0.0-rc1')\n+ REQUIRED_PACKAGES.append('tensorflow-gpu == 2.0.0')\n else:\n- REQUIRED_PACKAGES.append('tensorflow == 2.0.0-rc1')\n+ REQUIRED_PACKAGES.append('tensorflow == 2.0.0')\n elif project_name == TFA_NIGHTLY:\n # TODO: remove if-else condition when tf-nightly supports package consolidation.\n if platform.system() == 'Linux':\ndiff --git a/tensorflow_addons/version.py b/tensorflow_addons/version.py\n--- a/tensorflow_addons/version.py\n+++ b/tensorflow_addons/version.py\n@@ -20,7 +20,7 @@\n # We follow Semantic Versioning (https://semver.org/)\n _MAJOR_VERSION = '0'\n _MINOR_VERSION = '5'\n-_PATCH_VERSION = '1'\n+_PATCH_VERSION = '2'\n \n # When building releases, we can update this value on the release branch to\n # reflect the current release candidate ('rc0', 'rc1') or, finally, the official\n", "issue": "Package TFA 0.5.2 pinned to TF2-RC2\nRC2 is released so we can do a minor release pinned to this. For now this is blocked until #539 is merged.\n", "before_files": [{"content": "# Copyright 2019 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"TensorFlow Addons.\n\nTensorFlow Addons is a repository of contributions that conform to well-\nestablished API patterns, but implement new functionality not available\nin core TensorFlow. TensorFlow natively supports a large number of\noperators, layers, metrics, losses, and optimizers. However, in a fast\nmoving field like ML, there are many interesting new developments that\ncannot be integrated into core TensorFlow (because their broad\napplicability is not yet clear, or it is mostly used by a smaller subset\nof the community).\n\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport os\nimport platform\nimport sys\n\nfrom datetime import datetime\nfrom setuptools import find_packages\nfrom setuptools import setup\nfrom setuptools.dist import Distribution\nfrom setuptools import Extension\n\nDOCLINES = __doc__.split('\\n')\n\nTFA_NIGHTLY = 'tfa-nightly'\nTFA_RELEASE = 'tensorflow-addons'\n\nif '--nightly' in sys.argv:\n project_name = TFA_NIGHTLY\n nightly_idx = sys.argv.index('--nightly')\n sys.argv.pop(nightly_idx)\nelse:\n project_name = TFA_RELEASE\n\n# Version\nversion = {}\nbase_dir = os.path.dirname(os.path.abspath(__file__))\nwith open(os.path.join(base_dir, \"tensorflow_addons\", \"version.py\")) as fp:\n # yapf: disable\n exec(fp.read(), version)\n # yapf: enable\n\nif project_name == TFA_NIGHTLY:\n version['__version__'] += datetime.strftime(datetime.today(), \"%Y%m%d\")\n\n# Dependencies\nREQUIRED_PACKAGES = [\n 'six >= 1.10.0',\n]\n\nif project_name == TFA_RELEASE:\n # TODO: remove if-else condition when tf supports package consolidation.\n if platform.system() == 'Linux':\n REQUIRED_PACKAGES.append('tensorflow-gpu == 2.0.0-rc1')\n else:\n REQUIRED_PACKAGES.append('tensorflow == 2.0.0-rc1')\nelif project_name == TFA_NIGHTLY:\n # TODO: remove if-else condition when tf-nightly supports package consolidation.\n if platform.system() == 'Linux':\n REQUIRED_PACKAGES.append('tf-nightly-gpu-2.0-preview')\n else:\n REQUIRED_PACKAGES.append('tf-nightly-2.0-preview')\n\n\nclass BinaryDistribution(Distribution):\n \"\"\"This class is needed in order to create OS specific wheels.\"\"\"\n\n def has_ext_modules(self):\n return True\n\n\nsetup(\n name=project_name,\n version=version['__version__'],\n description=DOCLINES[0],\n long_description='\\n'.join(DOCLINES[2:]),\n author='Google Inc.',\n author_email='[email protected]',\n packages=find_packages(),\n ext_modules=[Extension('_foo', ['stub.cc'])],\n install_requires=REQUIRED_PACKAGES,\n include_package_data=True,\n zip_safe=False,\n distclass=BinaryDistribution,\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Education',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Topic :: Scientific/Engineering :: Mathematics',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n 'Topic :: Software Development :: Libraries',\n ],\n license='Apache 2.0',\n keywords='tensorflow addons machine learning',\n)\n", "path": "setup.py"}, {"content": "# Copyright 2019 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ============================================================================\n\"\"\"Define TensorFlow Addons version information.\"\"\"\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\n# We follow Semantic Versioning (https://semver.org/)\n_MAJOR_VERSION = '0'\n_MINOR_VERSION = '5'\n_PATCH_VERSION = '1'\n\n# When building releases, we can update this value on the release branch to\n# reflect the current release candidate ('rc0', 'rc1') or, finally, the official\n# stable release (indicated by `_VERSION_SUFFIX = ''`). Outside the context of a\n# release branch, the current version is by default assumed to be a\n# 'development' version, labeled 'dev'.\n_VERSION_SUFFIX = ''\n\n# Example, '0.1.0-dev'\n__version__ = '.'.join([\n _MAJOR_VERSION,\n _MINOR_VERSION,\n _PATCH_VERSION,\n])\nif _VERSION_SUFFIX:\n __version__ = '{}-{}'.format(__version__, _VERSION_SUFFIX)\n", "path": "tensorflow_addons/version.py"}], "after_files": [{"content": "# Copyright 2019 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"TensorFlow Addons.\n\nTensorFlow Addons is a repository of contributions that conform to well-\nestablished API patterns, but implement new functionality not available\nin core TensorFlow. TensorFlow natively supports a large number of\noperators, layers, metrics, losses, and optimizers. However, in a fast\nmoving field like ML, there are many interesting new developments that\ncannot be integrated into core TensorFlow (because their broad\napplicability is not yet clear, or it is mostly used by a smaller subset\nof the community).\n\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport os\nimport platform\nimport sys\n\nfrom datetime import datetime\nfrom setuptools import find_packages\nfrom setuptools import setup\nfrom setuptools.dist import Distribution\nfrom setuptools import Extension\n\nDOCLINES = __doc__.split('\\n')\n\nTFA_NIGHTLY = 'tfa-nightly'\nTFA_RELEASE = 'tensorflow-addons'\n\nif '--nightly' in sys.argv:\n project_name = TFA_NIGHTLY\n nightly_idx = sys.argv.index('--nightly')\n sys.argv.pop(nightly_idx)\nelse:\n project_name = TFA_RELEASE\n\n# Version\nversion = {}\nbase_dir = os.path.dirname(os.path.abspath(__file__))\nwith open(os.path.join(base_dir, \"tensorflow_addons\", \"version.py\")) as fp:\n # yapf: disable\n exec(fp.read(), version)\n # yapf: enable\n\nif project_name == TFA_NIGHTLY:\n version['__version__'] += datetime.strftime(datetime.today(), \"%Y%m%d\")\n\n# Dependencies\nREQUIRED_PACKAGES = [\n 'six >= 1.10.0',\n]\n\nif project_name == TFA_RELEASE:\n # TODO: remove if-else condition when tf supports package consolidation.\n if platform.system() == 'Linux':\n REQUIRED_PACKAGES.append('tensorflow-gpu == 2.0.0')\n else:\n REQUIRED_PACKAGES.append('tensorflow == 2.0.0')\nelif project_name == TFA_NIGHTLY:\n # TODO: remove if-else condition when tf-nightly supports package consolidation.\n if platform.system() == 'Linux':\n REQUIRED_PACKAGES.append('tf-nightly-gpu-2.0-preview')\n else:\n REQUIRED_PACKAGES.append('tf-nightly-2.0-preview')\n\n\nclass BinaryDistribution(Distribution):\n \"\"\"This class is needed in order to create OS specific wheels.\"\"\"\n\n def has_ext_modules(self):\n return True\n\n\nsetup(\n name=project_name,\n version=version['__version__'],\n description=DOCLINES[0],\n long_description='\\n'.join(DOCLINES[2:]),\n author='Google Inc.',\n author_email='[email protected]',\n packages=find_packages(),\n ext_modules=[Extension('_foo', ['stub.cc'])],\n install_requires=REQUIRED_PACKAGES,\n include_package_data=True,\n zip_safe=False,\n distclass=BinaryDistribution,\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Education',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Topic :: Scientific/Engineering :: Mathematics',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n 'Topic :: Software Development :: Libraries',\n ],\n license='Apache 2.0',\n keywords='tensorflow addons machine learning',\n)\n", "path": "setup.py"}, {"content": "# Copyright 2019 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ============================================================================\n\"\"\"Define TensorFlow Addons version information.\"\"\"\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\n# We follow Semantic Versioning (https://semver.org/)\n_MAJOR_VERSION = '0'\n_MINOR_VERSION = '5'\n_PATCH_VERSION = '2'\n\n# When building releases, we can update this value on the release branch to\n# reflect the current release candidate ('rc0', 'rc1') or, finally, the official\n# stable release (indicated by `_VERSION_SUFFIX = ''`). Outside the context of a\n# release branch, the current version is by default assumed to be a\n# 'development' version, labeled 'dev'.\n_VERSION_SUFFIX = ''\n\n# Example, '0.1.0-dev'\n__version__ = '.'.join([\n _MAJOR_VERSION,\n _MINOR_VERSION,\n _PATCH_VERSION,\n])\nif _VERSION_SUFFIX:\n __version__ = '{}-{}'.format(__version__, _VERSION_SUFFIX)\n", "path": "tensorflow_addons/version.py"}]}
| 1,920 | 310 |
gh_patches_debug_23993
|
rasdani/github-patches
|
git_diff
|
sanic-org__sanic-2640
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
There is an obvious bug in ASGI WebsocketConnection of Sanic
### Is there an existing issue for this?
- [X] I have searched the existing issues
### Describe the bug
I started my sanic app with UvicornWorker. The original websocket will become WebsocketConnection. When I call
the ws.recv function will report an error if bytes data is received at this time.
`KeyError:‘text’`
[https://github.com/sanic-org/sanic/blob/main/sanic/server/websockets/connection.py](url)
` async def recv(self, *args, **kwargs) -> Optional[str]:
message = await self._receive()
if message["type"] == "websocket.receive":
return message["text"]
elif message["type"] == "websocket.disconnect":
pass
return None`
There is no data of bytes type processed here.
### Code snippet
_No response_
### Expected Behavior
_No response_
### How do you run Sanic?
ASGI
### Operating System
ubuntu
### Sanic Version
22.3
### Additional context
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sanic/server/websockets/connection.py`
Content:
```
1 from typing import (
2 Any,
3 Awaitable,
4 Callable,
5 Dict,
6 List,
7 MutableMapping,
8 Optional,
9 Union,
10 )
11
12
13 ASIMessage = MutableMapping[str, Any]
14
15
16 class WebSocketConnection:
17 """
18 This is for ASGI Connections.
19 It provides an interface similar to WebsocketProtocol, but
20 sends/receives over an ASGI connection.
21 """
22
23 # TODO
24 # - Implement ping/pong
25
26 def __init__(
27 self,
28 send: Callable[[ASIMessage], Awaitable[None]],
29 receive: Callable[[], Awaitable[ASIMessage]],
30 subprotocols: Optional[List[str]] = None,
31 ) -> None:
32 self._send = send
33 self._receive = receive
34 self._subprotocols = subprotocols or []
35
36 async def send(self, data: Union[str, bytes], *args, **kwargs) -> None:
37 message: Dict[str, Union[str, bytes]] = {"type": "websocket.send"}
38
39 if isinstance(data, bytes):
40 message.update({"bytes": data})
41 else:
42 message.update({"text": str(data)})
43
44 await self._send(message)
45
46 async def recv(self, *args, **kwargs) -> Optional[str]:
47 message = await self._receive()
48
49 if message["type"] == "websocket.receive":
50 return message["text"]
51 elif message["type"] == "websocket.disconnect":
52 pass
53
54 return None
55
56 receive = recv
57
58 async def accept(self, subprotocols: Optional[List[str]] = None) -> None:
59 subprotocol = None
60 if subprotocols:
61 for subp in subprotocols:
62 if subp in self.subprotocols:
63 subprotocol = subp
64 break
65
66 await self._send(
67 {
68 "type": "websocket.accept",
69 "subprotocol": subprotocol,
70 }
71 )
72
73 async def close(self, code: int = 1000, reason: str = "") -> None:
74 pass
75
76 @property
77 def subprotocols(self):
78 return self._subprotocols
79
80 @subprotocols.setter
81 def subprotocols(self, subprotocols: Optional[List[str]] = None):
82 self._subprotocols = subprotocols or []
83
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/sanic/server/websockets/connection.py b/sanic/server/websockets/connection.py
--- a/sanic/server/websockets/connection.py
+++ b/sanic/server/websockets/connection.py
@@ -9,8 +9,10 @@
Union,
)
+from sanic.exceptions import InvalidUsage
-ASIMessage = MutableMapping[str, Any]
+
+ASGIMessage = MutableMapping[str, Any]
class WebSocketConnection:
@@ -25,8 +27,8 @@
def __init__(
self,
- send: Callable[[ASIMessage], Awaitable[None]],
- receive: Callable[[], Awaitable[ASIMessage]],
+ send: Callable[[ASGIMessage], Awaitable[None]],
+ receive: Callable[[], Awaitable[ASGIMessage]],
subprotocols: Optional[List[str]] = None,
) -> None:
self._send = send
@@ -47,7 +49,13 @@
message = await self._receive()
if message["type"] == "websocket.receive":
- return message["text"]
+ try:
+ return message["text"]
+ except KeyError:
+ try:
+ return message["bytes"].decode()
+ except KeyError:
+ raise InvalidUsage("Bad ASGI message received")
elif message["type"] == "websocket.disconnect":
pass
|
{"golden_diff": "diff --git a/sanic/server/websockets/connection.py b/sanic/server/websockets/connection.py\n--- a/sanic/server/websockets/connection.py\n+++ b/sanic/server/websockets/connection.py\n@@ -9,8 +9,10 @@\n Union,\n )\n \n+from sanic.exceptions import InvalidUsage\n \n-ASIMessage = MutableMapping[str, Any]\n+\n+ASGIMessage = MutableMapping[str, Any]\n \n \n class WebSocketConnection:\n@@ -25,8 +27,8 @@\n \n def __init__(\n self,\n- send: Callable[[ASIMessage], Awaitable[None]],\n- receive: Callable[[], Awaitable[ASIMessage]],\n+ send: Callable[[ASGIMessage], Awaitable[None]],\n+ receive: Callable[[], Awaitable[ASGIMessage]],\n subprotocols: Optional[List[str]] = None,\n ) -> None:\n self._send = send\n@@ -47,7 +49,13 @@\n message = await self._receive()\n \n if message[\"type\"] == \"websocket.receive\":\n- return message[\"text\"]\n+ try:\n+ return message[\"text\"]\n+ except KeyError:\n+ try:\n+ return message[\"bytes\"].decode()\n+ except KeyError:\n+ raise InvalidUsage(\"Bad ASGI message received\")\n elif message[\"type\"] == \"websocket.disconnect\":\n pass\n", "issue": "There is an obvious bug in ASGI WebsocketConnection of Sanic\n### Is there an existing issue for this?\n\n- [X] I have searched the existing issues\n\n### Describe the bug\n\nI started my sanic app with UvicornWorker. The original websocket will become WebsocketConnection. When I call\r\nthe ws.recv function will report an error if bytes data is received at this time.\r\n`KeyError\uff1a\u2018text\u2019`\r\n[https://github.com/sanic-org/sanic/blob/main/sanic/server/websockets/connection.py](url)\r\n` async def recv(self, *args, **kwargs) -> Optional[str]:\r\n message = await self._receive()\r\n\r\n if message[\"type\"] == \"websocket.receive\":\r\n return message[\"text\"]\r\n elif message[\"type\"] == \"websocket.disconnect\":\r\n pass\r\n\r\n return None`\r\nThere is no data of bytes type processed here.\n\n### Code snippet\n\n_No response_\n\n### Expected Behavior\n\n_No response_\n\n### How do you run Sanic?\n\nASGI\n\n### Operating System\n\nubuntu\n\n### Sanic Version\n\n22.3\n\n### Additional context\n\n_No response_\n", "before_files": [{"content": "from typing import (\n Any,\n Awaitable,\n Callable,\n Dict,\n List,\n MutableMapping,\n Optional,\n Union,\n)\n\n\nASIMessage = MutableMapping[str, Any]\n\n\nclass WebSocketConnection:\n \"\"\"\n This is for ASGI Connections.\n It provides an interface similar to WebsocketProtocol, but\n sends/receives over an ASGI connection.\n \"\"\"\n\n # TODO\n # - Implement ping/pong\n\n def __init__(\n self,\n send: Callable[[ASIMessage], Awaitable[None]],\n receive: Callable[[], Awaitable[ASIMessage]],\n subprotocols: Optional[List[str]] = None,\n ) -> None:\n self._send = send\n self._receive = receive\n self._subprotocols = subprotocols or []\n\n async def send(self, data: Union[str, bytes], *args, **kwargs) -> None:\n message: Dict[str, Union[str, bytes]] = {\"type\": \"websocket.send\"}\n\n if isinstance(data, bytes):\n message.update({\"bytes\": data})\n else:\n message.update({\"text\": str(data)})\n\n await self._send(message)\n\n async def recv(self, *args, **kwargs) -> Optional[str]:\n message = await self._receive()\n\n if message[\"type\"] == \"websocket.receive\":\n return message[\"text\"]\n elif message[\"type\"] == \"websocket.disconnect\":\n pass\n\n return None\n\n receive = recv\n\n async def accept(self, subprotocols: Optional[List[str]] = None) -> None:\n subprotocol = None\n if subprotocols:\n for subp in subprotocols:\n if subp in self.subprotocols:\n subprotocol = subp\n break\n\n await self._send(\n {\n \"type\": \"websocket.accept\",\n \"subprotocol\": subprotocol,\n }\n )\n\n async def close(self, code: int = 1000, reason: str = \"\") -> None:\n pass\n\n @property\n def subprotocols(self):\n return self._subprotocols\n\n @subprotocols.setter\n def subprotocols(self, subprotocols: Optional[List[str]] = None):\n self._subprotocols = subprotocols or []\n", "path": "sanic/server/websockets/connection.py"}], "after_files": [{"content": "from typing import (\n Any,\n Awaitable,\n Callable,\n Dict,\n List,\n MutableMapping,\n Optional,\n Union,\n)\n\nfrom sanic.exceptions import InvalidUsage\n\n\nASGIMessage = MutableMapping[str, Any]\n\n\nclass WebSocketConnection:\n \"\"\"\n This is for ASGI Connections.\n It provides an interface similar to WebsocketProtocol, but\n sends/receives over an ASGI connection.\n \"\"\"\n\n # TODO\n # - Implement ping/pong\n\n def __init__(\n self,\n send: Callable[[ASGIMessage], Awaitable[None]],\n receive: Callable[[], Awaitable[ASGIMessage]],\n subprotocols: Optional[List[str]] = None,\n ) -> None:\n self._send = send\n self._receive = receive\n self._subprotocols = subprotocols or []\n\n async def send(self, data: Union[str, bytes], *args, **kwargs) -> None:\n message: Dict[str, Union[str, bytes]] = {\"type\": \"websocket.send\"}\n\n if isinstance(data, bytes):\n message.update({\"bytes\": data})\n else:\n message.update({\"text\": str(data)})\n\n await self._send(message)\n\n async def recv(self, *args, **kwargs) -> Optional[str]:\n message = await self._receive()\n\n if message[\"type\"] == \"websocket.receive\":\n try:\n return message[\"text\"]\n except KeyError:\n try:\n return message[\"bytes\"].decode()\n except KeyError:\n raise InvalidUsage(\"Bad ASGI message received\")\n elif message[\"type\"] == \"websocket.disconnect\":\n pass\n\n return None\n\n receive = recv\n\n async def accept(self, subprotocols: Optional[List[str]] = None) -> None:\n subprotocol = None\n if subprotocols:\n for subp in subprotocols:\n if subp in self.subprotocols:\n subprotocol = subp\n break\n\n await self._send(\n {\n \"type\": \"websocket.accept\",\n \"subprotocol\": subprotocol,\n }\n )\n\n async def close(self, code: int = 1000, reason: str = \"\") -> None:\n pass\n\n @property\n def subprotocols(self):\n return self._subprotocols\n\n @subprotocols.setter\n def subprotocols(self, subprotocols: Optional[List[str]] = None):\n self._subprotocols = subprotocols or []\n", "path": "sanic/server/websockets/connection.py"}]}
| 1,134 | 298 |
gh_patches_debug_20314
|
rasdani/github-patches
|
git_diff
|
urllib3__urllib3-773
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Broken vendoring in socks contrib module
Awkwardly, the SOCKS contrib module doesn't work properly when vendored because it tries to do an absolute import. We should probably rewrite it to use relative imports.
My bad.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `urllib3/contrib/socks.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 """
3 SOCKS support for urllib3
4 ~~~~~~~~~~~~~~~~~~~~~~~~~
5
6 This contrib module contains provisional support for SOCKS proxies from within
7 urllib3. This module supports SOCKS4 (specifically the SOCKS4A variant) and
8 SOCKS5. To enable its functionality, either install PySocks or install this
9 module with the ``socks`` extra.
10
11 Known Limitations:
12
13 - Currently PySocks does not support contacting remote websites via literal
14 IPv6 addresses. Any such connection attempt will fail.
15 - Currently PySocks does not support IPv6 connections to the SOCKS proxy. Any
16 such connection attempt will fail.
17 """
18 from __future__ import absolute_import
19
20 try:
21 import socks
22 except ImportError:
23 import warnings
24 from urllib3.exceptions import DependencyWarning
25
26 warnings.warn((
27 'SOCKS support in urllib3 requires the installation of optional '
28 'dependencies: specifically, PySocks. For more information, see '
29 'https://urllib3.readthedocs.org/en/latest/contrib.html#socks-proxies'
30 ),
31 DependencyWarning
32 )
33 raise
34
35 from socket import error as SocketError, timeout as SocketTimeout
36
37 from urllib3.connection import (
38 HTTPConnection, HTTPSConnection
39 )
40 from urllib3.connectionpool import (
41 HTTPConnectionPool, HTTPSConnectionPool
42 )
43 from urllib3.exceptions import ConnectTimeoutError, NewConnectionError
44 from urllib3.poolmanager import PoolManager
45 from urllib3.util.url import parse_url
46
47 try:
48 import ssl
49 except ImportError:
50 ssl = None
51
52
53 class SOCKSConnection(HTTPConnection):
54 """
55 A plain-text HTTP connection that connects via a SOCKS proxy.
56 """
57 def __init__(self, *args, **kwargs):
58 self._socks_options = kwargs.pop('_socks_options')
59 super(SOCKSConnection, self).__init__(*args, **kwargs)
60
61 def _new_conn(self):
62 """
63 Establish a new connection via the SOCKS proxy.
64 """
65 extra_kw = {}
66 if self.source_address:
67 extra_kw['source_address'] = self.source_address
68
69 if self.socket_options:
70 extra_kw['socket_options'] = self.socket_options
71
72 try:
73 conn = socks.create_connection(
74 (self.host, self.port),
75 proxy_type=self._socks_options['socks_version'],
76 proxy_addr=self._socks_options['proxy_host'],
77 proxy_port=self._socks_options['proxy_port'],
78 proxy_username=self._socks_options['username'],
79 proxy_password=self._socks_options['password'],
80 timeout=self.timeout,
81 **extra_kw
82 )
83
84 except SocketTimeout as e:
85 raise ConnectTimeoutError(
86 self, "Connection to %s timed out. (connect timeout=%s)" %
87 (self.host, self.timeout))
88
89 except socks.ProxyError as e:
90 # This is fragile as hell, but it seems to be the only way to raise
91 # useful errors here.
92 if e.socket_err:
93 error = e.socket_err
94 if isinstance(error, SocketTimeout):
95 raise ConnectTimeoutError(
96 self,
97 "Connection to %s timed out. (connect timeout=%s)" %
98 (self.host, self.timeout)
99 )
100 else:
101 raise NewConnectionError(
102 self,
103 "Failed to establish a new connection: %s" % error
104 )
105 else:
106 raise NewConnectionError(
107 self,
108 "Failed to establish a new connection: %s" % e
109 )
110
111 except SocketError as e: # Defensive: PySocks should catch all these.
112 raise NewConnectionError(
113 self, "Failed to establish a new connection: %s" % e)
114
115 return conn
116
117
118 # We don't need to duplicate the Verified/Unverified distinction from
119 # urllib3/connection.py here because the HTTPSConnection will already have been
120 # correctly set to either the Verified or Unverified form by that module. This
121 # means the SOCKSHTTPSConnection will automatically be the correct type.
122 class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection):
123 pass
124
125
126 class SOCKSHTTPConnectionPool(HTTPConnectionPool):
127 ConnectionCls = SOCKSConnection
128
129
130 class SOCKSHTTPSConnectionPool(HTTPSConnectionPool):
131 ConnectionCls = SOCKSHTTPSConnection
132
133
134 class SOCKSProxyManager(PoolManager):
135 """
136 A version of the urllib3 ProxyManager that routes connections via the
137 defined SOCKS proxy.
138 """
139 pool_classes_by_scheme = {
140 'http': SOCKSHTTPConnectionPool,
141 'https': SOCKSHTTPSConnectionPool,
142 }
143
144 def __init__(self, proxy_url, username=None, password=None,
145 num_pools=10, headers=None, **connection_pool_kw):
146 parsed = parse_url(proxy_url)
147
148 if parsed.scheme == 'socks5':
149 socks_version = socks.PROXY_TYPE_SOCKS5
150 elif parsed.scheme == 'socks4':
151 socks_version = socks.PROXY_TYPE_SOCKS4
152 else:
153 raise ValueError(
154 "Unable to determine SOCKS version from %s" % proxy_url
155 )
156
157 self.proxy_url = proxy_url
158
159 socks_options = {
160 'socks_version': socks_version,
161 'proxy_host': parsed.host,
162 'proxy_port': parsed.port,
163 'username': username,
164 'password': password,
165 }
166 connection_pool_kw['_socks_options'] = socks_options
167
168 super(SOCKSProxyManager, self).__init__(
169 num_pools, headers, **connection_pool_kw
170 )
171
172 self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme
173
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/urllib3/contrib/socks.py b/urllib3/contrib/socks.py
--- a/urllib3/contrib/socks.py
+++ b/urllib3/contrib/socks.py
@@ -21,7 +21,7 @@
import socks
except ImportError:
import warnings
- from urllib3.exceptions import DependencyWarning
+ from ..exceptions import DependencyWarning
warnings.warn((
'SOCKS support in urllib3 requires the installation of optional '
@@ -34,15 +34,15 @@
from socket import error as SocketError, timeout as SocketTimeout
-from urllib3.connection import (
+from ..connection import (
HTTPConnection, HTTPSConnection
)
-from urllib3.connectionpool import (
+from ..connectionpool import (
HTTPConnectionPool, HTTPSConnectionPool
)
-from urllib3.exceptions import ConnectTimeoutError, NewConnectionError
-from urllib3.poolmanager import PoolManager
-from urllib3.util.url import parse_url
+from ..exceptions import ConnectTimeoutError, NewConnectionError
+from ..poolmanager import PoolManager
+from ..util.url import parse_url
try:
import ssl
|
{"golden_diff": "diff --git a/urllib3/contrib/socks.py b/urllib3/contrib/socks.py\n--- a/urllib3/contrib/socks.py\n+++ b/urllib3/contrib/socks.py\n@@ -21,7 +21,7 @@\n import socks\n except ImportError:\n import warnings\n- from urllib3.exceptions import DependencyWarning\n+ from ..exceptions import DependencyWarning\n \n warnings.warn((\n 'SOCKS support in urllib3 requires the installation of optional '\n@@ -34,15 +34,15 @@\n \n from socket import error as SocketError, timeout as SocketTimeout\n \n-from urllib3.connection import (\n+from ..connection import (\n HTTPConnection, HTTPSConnection\n )\n-from urllib3.connectionpool import (\n+from ..connectionpool import (\n HTTPConnectionPool, HTTPSConnectionPool\n )\n-from urllib3.exceptions import ConnectTimeoutError, NewConnectionError\n-from urllib3.poolmanager import PoolManager\n-from urllib3.util.url import parse_url\n+from ..exceptions import ConnectTimeoutError, NewConnectionError\n+from ..poolmanager import PoolManager\n+from ..util.url import parse_url\n \n try:\n import ssl\n", "issue": "Broken vendoring in socks contrib module\nAwkwardly, the SOCKS contrib module doesn't work properly when vendored because it tries to do an absolute import. We should probably rewrite it to use relative imports.\n\nMy bad.\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"\nSOCKS support for urllib3\n~~~~~~~~~~~~~~~~~~~~~~~~~\n\nThis contrib module contains provisional support for SOCKS proxies from within\nurllib3. This module supports SOCKS4 (specifically the SOCKS4A variant) and\nSOCKS5. To enable its functionality, either install PySocks or install this\nmodule with the ``socks`` extra.\n\nKnown Limitations:\n\n- Currently PySocks does not support contacting remote websites via literal\n IPv6 addresses. Any such connection attempt will fail.\n- Currently PySocks does not support IPv6 connections to the SOCKS proxy. Any\n such connection attempt will fail.\n\"\"\"\nfrom __future__ import absolute_import\n\ntry:\n import socks\nexcept ImportError:\n import warnings\n from urllib3.exceptions import DependencyWarning\n\n warnings.warn((\n 'SOCKS support in urllib3 requires the installation of optional '\n 'dependencies: specifically, PySocks. For more information, see '\n 'https://urllib3.readthedocs.org/en/latest/contrib.html#socks-proxies'\n ),\n DependencyWarning\n )\n raise\n\nfrom socket import error as SocketError, timeout as SocketTimeout\n\nfrom urllib3.connection import (\n HTTPConnection, HTTPSConnection\n)\nfrom urllib3.connectionpool import (\n HTTPConnectionPool, HTTPSConnectionPool\n)\nfrom urllib3.exceptions import ConnectTimeoutError, NewConnectionError\nfrom urllib3.poolmanager import PoolManager\nfrom urllib3.util.url import parse_url\n\ntry:\n import ssl\nexcept ImportError:\n ssl = None\n\n\nclass SOCKSConnection(HTTPConnection):\n \"\"\"\n A plain-text HTTP connection that connects via a SOCKS proxy.\n \"\"\"\n def __init__(self, *args, **kwargs):\n self._socks_options = kwargs.pop('_socks_options')\n super(SOCKSConnection, self).__init__(*args, **kwargs)\n\n def _new_conn(self):\n \"\"\"\n Establish a new connection via the SOCKS proxy.\n \"\"\"\n extra_kw = {}\n if self.source_address:\n extra_kw['source_address'] = self.source_address\n\n if self.socket_options:\n extra_kw['socket_options'] = self.socket_options\n\n try:\n conn = socks.create_connection(\n (self.host, self.port),\n proxy_type=self._socks_options['socks_version'],\n proxy_addr=self._socks_options['proxy_host'],\n proxy_port=self._socks_options['proxy_port'],\n proxy_username=self._socks_options['username'],\n proxy_password=self._socks_options['password'],\n timeout=self.timeout,\n **extra_kw\n )\n\n except SocketTimeout as e:\n raise ConnectTimeoutError(\n self, \"Connection to %s timed out. (connect timeout=%s)\" %\n (self.host, self.timeout))\n\n except socks.ProxyError as e:\n # This is fragile as hell, but it seems to be the only way to raise\n # useful errors here.\n if e.socket_err:\n error = e.socket_err\n if isinstance(error, SocketTimeout):\n raise ConnectTimeoutError(\n self,\n \"Connection to %s timed out. (connect timeout=%s)\" %\n (self.host, self.timeout)\n )\n else:\n raise NewConnectionError(\n self,\n \"Failed to establish a new connection: %s\" % error\n )\n else:\n raise NewConnectionError(\n self,\n \"Failed to establish a new connection: %s\" % e\n )\n\n except SocketError as e: # Defensive: PySocks should catch all these.\n raise NewConnectionError(\n self, \"Failed to establish a new connection: %s\" % e)\n\n return conn\n\n\n# We don't need to duplicate the Verified/Unverified distinction from\n# urllib3/connection.py here because the HTTPSConnection will already have been\n# correctly set to either the Verified or Unverified form by that module. This\n# means the SOCKSHTTPSConnection will automatically be the correct type.\nclass SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection):\n pass\n\n\nclass SOCKSHTTPConnectionPool(HTTPConnectionPool):\n ConnectionCls = SOCKSConnection\n\n\nclass SOCKSHTTPSConnectionPool(HTTPSConnectionPool):\n ConnectionCls = SOCKSHTTPSConnection\n\n\nclass SOCKSProxyManager(PoolManager):\n \"\"\"\n A version of the urllib3 ProxyManager that routes connections via the\n defined SOCKS proxy.\n \"\"\"\n pool_classes_by_scheme = {\n 'http': SOCKSHTTPConnectionPool,\n 'https': SOCKSHTTPSConnectionPool,\n }\n\n def __init__(self, proxy_url, username=None, password=None,\n num_pools=10, headers=None, **connection_pool_kw):\n parsed = parse_url(proxy_url)\n\n if parsed.scheme == 'socks5':\n socks_version = socks.PROXY_TYPE_SOCKS5\n elif parsed.scheme == 'socks4':\n socks_version = socks.PROXY_TYPE_SOCKS4\n else:\n raise ValueError(\n \"Unable to determine SOCKS version from %s\" % proxy_url\n )\n\n self.proxy_url = proxy_url\n\n socks_options = {\n 'socks_version': socks_version,\n 'proxy_host': parsed.host,\n 'proxy_port': parsed.port,\n 'username': username,\n 'password': password,\n }\n connection_pool_kw['_socks_options'] = socks_options\n\n super(SOCKSProxyManager, self).__init__(\n num_pools, headers, **connection_pool_kw\n )\n\n self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme\n", "path": "urllib3/contrib/socks.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"\nSOCKS support for urllib3\n~~~~~~~~~~~~~~~~~~~~~~~~~\n\nThis contrib module contains provisional support for SOCKS proxies from within\nurllib3. This module supports SOCKS4 (specifically the SOCKS4A variant) and\nSOCKS5. To enable its functionality, either install PySocks or install this\nmodule with the ``socks`` extra.\n\nKnown Limitations:\n\n- Currently PySocks does not support contacting remote websites via literal\n IPv6 addresses. Any such connection attempt will fail.\n- Currently PySocks does not support IPv6 connections to the SOCKS proxy. Any\n such connection attempt will fail.\n\"\"\"\nfrom __future__ import absolute_import\n\ntry:\n import socks\nexcept ImportError:\n import warnings\n from ..exceptions import DependencyWarning\n\n warnings.warn((\n 'SOCKS support in urllib3 requires the installation of optional '\n 'dependencies: specifically, PySocks. For more information, see '\n 'https://urllib3.readthedocs.org/en/latest/contrib.html#socks-proxies'\n ),\n DependencyWarning\n )\n raise\n\nfrom socket import error as SocketError, timeout as SocketTimeout\n\nfrom ..connection import (\n HTTPConnection, HTTPSConnection\n)\nfrom ..connectionpool import (\n HTTPConnectionPool, HTTPSConnectionPool\n)\nfrom ..exceptions import ConnectTimeoutError, NewConnectionError\nfrom ..poolmanager import PoolManager\nfrom ..util.url import parse_url\n\ntry:\n import ssl\nexcept ImportError:\n ssl = None\n\n\nclass SOCKSConnection(HTTPConnection):\n \"\"\"\n A plain-text HTTP connection that connects via a SOCKS proxy.\n \"\"\"\n def __init__(self, *args, **kwargs):\n self._socks_options = kwargs.pop('_socks_options')\n super(SOCKSConnection, self).__init__(*args, **kwargs)\n\n def _new_conn(self):\n \"\"\"\n Establish a new connection via the SOCKS proxy.\n \"\"\"\n extra_kw = {}\n if self.source_address:\n extra_kw['source_address'] = self.source_address\n\n if self.socket_options:\n extra_kw['socket_options'] = self.socket_options\n\n try:\n conn = socks.create_connection(\n (self.host, self.port),\n proxy_type=self._socks_options['socks_version'],\n proxy_addr=self._socks_options['proxy_host'],\n proxy_port=self._socks_options['proxy_port'],\n proxy_username=self._socks_options['username'],\n proxy_password=self._socks_options['password'],\n timeout=self.timeout,\n **extra_kw\n )\n\n except SocketTimeout as e:\n raise ConnectTimeoutError(\n self, \"Connection to %s timed out. (connect timeout=%s)\" %\n (self.host, self.timeout))\n\n except socks.ProxyError as e:\n # This is fragile as hell, but it seems to be the only way to raise\n # useful errors here.\n if e.socket_err:\n error = e.socket_err\n if isinstance(error, SocketTimeout):\n raise ConnectTimeoutError(\n self,\n \"Connection to %s timed out. (connect timeout=%s)\" %\n (self.host, self.timeout)\n )\n else:\n raise NewConnectionError(\n self,\n \"Failed to establish a new connection: %s\" % error\n )\n else:\n raise NewConnectionError(\n self,\n \"Failed to establish a new connection: %s\" % e\n )\n\n except SocketError as e: # Defensive: PySocks should catch all these.\n raise NewConnectionError(\n self, \"Failed to establish a new connection: %s\" % e)\n\n return conn\n\n\n# We don't need to duplicate the Verified/Unverified distinction from\n# urllib3/connection.py here because the HTTPSConnection will already have been\n# correctly set to either the Verified or Unverified form by that module. This\n# means the SOCKSHTTPSConnection will automatically be the correct type.\nclass SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection):\n pass\n\n\nclass SOCKSHTTPConnectionPool(HTTPConnectionPool):\n ConnectionCls = SOCKSConnection\n\n\nclass SOCKSHTTPSConnectionPool(HTTPSConnectionPool):\n ConnectionCls = SOCKSHTTPSConnection\n\n\nclass SOCKSProxyManager(PoolManager):\n \"\"\"\n A version of the urllib3 ProxyManager that routes connections via the\n defined SOCKS proxy.\n \"\"\"\n pool_classes_by_scheme = {\n 'http': SOCKSHTTPConnectionPool,\n 'https': SOCKSHTTPSConnectionPool,\n }\n\n def __init__(self, proxy_url, username=None, password=None,\n num_pools=10, headers=None, **connection_pool_kw):\n parsed = parse_url(proxy_url)\n\n if parsed.scheme == 'socks5':\n socks_version = socks.PROXY_TYPE_SOCKS5\n elif parsed.scheme == 'socks4':\n socks_version = socks.PROXY_TYPE_SOCKS4\n else:\n raise ValueError(\n \"Unable to determine SOCKS version from %s\" % proxy_url\n )\n\n self.proxy_url = proxy_url\n\n socks_options = {\n 'socks_version': socks_version,\n 'proxy_host': parsed.host,\n 'proxy_port': parsed.port,\n 'username': username,\n 'password': password,\n }\n connection_pool_kw['_socks_options'] = socks_options\n\n super(SOCKSProxyManager, self).__init__(\n num_pools, headers, **connection_pool_kw\n )\n\n self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme\n", "path": "urllib3/contrib/socks.py"}]}
| 1,926 | 253 |
gh_patches_debug_18331
|
rasdani/github-patches
|
git_diff
|
cloud-custodian__cloud-custodian-2308
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
c7n_mailer lambda error after included slack integration
Hi team,
It appears that deployed lambda mailer is having some problem with the libs, after the last commit that included slack integration.
After update c7n_mailer lambda I'm seeing this error in the logs:
```
[ERROR] 2018-05-01T00:45:26.698Z f04d1326-4cd8-11e8-9f05-670c91cd10c1 Error starting mailer MailerSqsQueueProcessor().
Error: No module named requests
Traceback (most recent call last):
File "/var/task/c7n_mailer/handle.py", line 46, in start_c7n_mailer
mailer_sqs_queue_processor.run(parallel)
File "/var/task/c7n_mailer/sqs_queue_processor.py", line 126, in run
self.process_sqs_message(sqs_message)
File "/var/task/c7n_mailer/sqs_queue_processor.py", line 170, in process_sqs_message
from .slack_delivery import SlackDelivery
File "/var/task/c7n_mailer/slack_delivery.py", line 17, in <module>
import requests
ImportError: No module named requests
```
I've had no success even with a fresh install from the repos.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `tools/c7n_mailer/c7n_mailer/deploy.py`
Content:
```
1 # Copyright 2016-2017 Capital One Services, LLC
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 from __future__ import absolute_import, division, print_function, unicode_literals
15
16 import json
17 import os
18
19 from c7n.mu import (
20 CloudWatchEventSource,
21 LambdaFunction,
22 LambdaManager,
23 PythonPackageArchive)
24
25
26 entry_source = """\
27 import logging
28
29 from c7n_mailer import handle
30
31 logger = logging.getLogger('custodian.mailer')
32 log_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
33 logging.basicConfig(level=logging.INFO, format=log_format)
34 logging.getLogger('botocore').setLevel(logging.WARNING)
35
36 def dispatch(event, context):
37 return handle.start_c7n_mailer(logger)
38 """
39
40
41 def get_archive(config):
42 archive = PythonPackageArchive(
43 'c7n_mailer', 'ldap3', 'pyasn1', 'jinja2', 'markupsafe', 'ruamel',
44 'redis')
45
46 template_dir = os.path.abspath(
47 os.path.join(os.path.dirname(__file__), '..', 'msg-templates'))
48
49 for t in os.listdir(template_dir):
50 with open(os.path.join(template_dir, t)) as fh:
51 archive.add_contents('msg-templates/%s' % t, fh.read())
52
53 archive.add_contents('config.json', json.dumps(config))
54 archive.add_contents('periodic.py', entry_source)
55
56 archive.close()
57 return archive
58
59
60 def provision(config, session_factory):
61 func_config = dict(
62 name=config.get('lambda_name', 'cloud-custodian-mailer'),
63 description=config.get('lambda_description', 'Cloud Custodian Mailer'),
64 tags=config.get('lambda_tags', {}),
65 handler='periodic.dispatch',
66 runtime=config['runtime'],
67 memory_size=config['memory'],
68 timeout=config['timeout'],
69 role=config['role'],
70 subnets=config['subnets'],
71 security_groups=config['security_groups'],
72 dead_letter_config=config.get('dead_letter_config', {}),
73 events=[
74 CloudWatchEventSource(
75 {'type': 'periodic',
76 'schedule': config.get('lambda_schedule', 'rate(5 minutes)')},
77 session_factory,
78 prefix="")
79 ])
80
81 archive = get_archive(config)
82 func = LambdaFunction(func_config, archive)
83 manager = LambdaManager(session_factory)
84 manager.publish(func)
85
```
Path: `tools/c7n_mailer/setup.py`
Content:
```
1 # Copyright 2015-2017 Capital One Services, LLC
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 from __future__ import absolute_import, division, print_function, unicode_literals
15
16 from setuptools import setup, find_packages
17 import os
18
19 requires = ["Jinja2", "boto3", "jsonschema", "ruamel.yaml<0.15", "datadog"]
20 try:
21 from concurrent import futures
22 except ImportError:
23 # The backport has SyntaxErrors under py36, so avoid installing it.
24 # https://github.com/agronholm/pythonfutures/issues/41
25 requires += ['futures']
26
27 description = ""
28 if os.path.exists('README.md'):
29 description = open('README.md').read()
30
31 setup(
32 name="c7n_mailer",
33 version='0.3.1',
34 description="Cloud Custodian - Reference Mailer",
35 long_description=description,
36 classifiers=[
37 "Topic :: System :: Systems Administration",
38 "Topic :: System :: Distributed Computing"
39 ],
40 url="https://github.com/capitalone/cloud-custodian",
41 license="Apache-2.0",
42 packages=find_packages('c7n_mailer'),
43 entry_points={
44 'console_scripts': [
45 'c7n-mailer = c7n_mailer.cli:main',
46 'c7n-mailer-replay = c7n_mailer.replay:main'
47 ]
48 },
49 install_requires=requires,
50 )
51
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/tools/c7n_mailer/c7n_mailer/deploy.py b/tools/c7n_mailer/c7n_mailer/deploy.py
--- a/tools/c7n_mailer/c7n_mailer/deploy.py
+++ b/tools/c7n_mailer/c7n_mailer/deploy.py
@@ -41,7 +41,7 @@
def get_archive(config):
archive = PythonPackageArchive(
'c7n_mailer', 'ldap3', 'pyasn1', 'jinja2', 'markupsafe', 'ruamel',
- 'redis')
+ 'redis', 'datadog', 'slackclient', 'requests')
template_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', 'msg-templates'))
diff --git a/tools/c7n_mailer/setup.py b/tools/c7n_mailer/setup.py
--- a/tools/c7n_mailer/setup.py
+++ b/tools/c7n_mailer/setup.py
@@ -16,7 +16,9 @@
from setuptools import setup, find_packages
import os
-requires = ["Jinja2", "boto3", "jsonschema", "ruamel.yaml<0.15", "datadog"]
+# *Any* updates here should also go into c7n_mailer/deploy.py for lambda packaging.
+requires = [
+ "Jinja2", "boto3", "jsonschema", "ruamel.yaml<0.15", "datadog", "slackclient", "ldap3"]
try:
from concurrent import futures
except ImportError:
|
{"golden_diff": "diff --git a/tools/c7n_mailer/c7n_mailer/deploy.py b/tools/c7n_mailer/c7n_mailer/deploy.py\n--- a/tools/c7n_mailer/c7n_mailer/deploy.py\n+++ b/tools/c7n_mailer/c7n_mailer/deploy.py\n@@ -41,7 +41,7 @@\n def get_archive(config):\n archive = PythonPackageArchive(\n 'c7n_mailer', 'ldap3', 'pyasn1', 'jinja2', 'markupsafe', 'ruamel',\n- 'redis')\n+ 'redis', 'datadog', 'slackclient', 'requests')\n \n template_dir = os.path.abspath(\n os.path.join(os.path.dirname(__file__), '..', 'msg-templates'))\ndiff --git a/tools/c7n_mailer/setup.py b/tools/c7n_mailer/setup.py\n--- a/tools/c7n_mailer/setup.py\n+++ b/tools/c7n_mailer/setup.py\n@@ -16,7 +16,9 @@\n from setuptools import setup, find_packages\n import os\n \n-requires = [\"Jinja2\", \"boto3\", \"jsonschema\", \"ruamel.yaml<0.15\", \"datadog\"]\n+# *Any* updates here should also go into c7n_mailer/deploy.py for lambda packaging.\n+requires = [\n+ \"Jinja2\", \"boto3\", \"jsonschema\", \"ruamel.yaml<0.15\", \"datadog\", \"slackclient\", \"ldap3\"]\n try:\n from concurrent import futures\n except ImportError:\n", "issue": "c7n_mailer lambda error after included slack integration\nHi team,\r\n\r\nIt appears that deployed lambda mailer is having some problem with the libs, after the last commit that included slack integration. \r\n\r\nAfter update c7n_mailer lambda I'm seeing this error in the logs:\r\n```\r\n[ERROR] 2018-05-01T00:45:26.698Z f04d1326-4cd8-11e8-9f05-670c91cd10c1 Error starting mailer MailerSqsQueueProcessor().\r\nError: No module named requests\r\n\r\nTraceback (most recent call last):\r\nFile \"/var/task/c7n_mailer/handle.py\", line 46, in start_c7n_mailer\r\nmailer_sqs_queue_processor.run(parallel)\r\nFile \"/var/task/c7n_mailer/sqs_queue_processor.py\", line 126, in run\r\nself.process_sqs_message(sqs_message)\r\nFile \"/var/task/c7n_mailer/sqs_queue_processor.py\", line 170, in process_sqs_message\r\nfrom .slack_delivery import SlackDelivery\r\nFile \"/var/task/c7n_mailer/slack_delivery.py\", line 17, in <module>\r\nimport requests\r\nImportError: No module named requests\r\n```\r\n I've had no success even with a fresh install from the repos.\n", "before_files": [{"content": "# Copyright 2016-2017 Capital One Services, LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nimport json\nimport os\n\nfrom c7n.mu import (\n CloudWatchEventSource,\n LambdaFunction,\n LambdaManager,\n PythonPackageArchive)\n\n\nentry_source = \"\"\"\\\nimport logging\n\nfrom c7n_mailer import handle\n\nlogger = logging.getLogger('custodian.mailer')\nlog_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'\nlogging.basicConfig(level=logging.INFO, format=log_format)\nlogging.getLogger('botocore').setLevel(logging.WARNING)\n\ndef dispatch(event, context):\n return handle.start_c7n_mailer(logger)\n\"\"\"\n\n\ndef get_archive(config):\n archive = PythonPackageArchive(\n 'c7n_mailer', 'ldap3', 'pyasn1', 'jinja2', 'markupsafe', 'ruamel',\n 'redis')\n\n template_dir = os.path.abspath(\n os.path.join(os.path.dirname(__file__), '..', 'msg-templates'))\n\n for t in os.listdir(template_dir):\n with open(os.path.join(template_dir, t)) as fh:\n archive.add_contents('msg-templates/%s' % t, fh.read())\n\n archive.add_contents('config.json', json.dumps(config))\n archive.add_contents('periodic.py', entry_source)\n\n archive.close()\n return archive\n\n\ndef provision(config, session_factory):\n func_config = dict(\n name=config.get('lambda_name', 'cloud-custodian-mailer'),\n description=config.get('lambda_description', 'Cloud Custodian Mailer'),\n tags=config.get('lambda_tags', {}),\n handler='periodic.dispatch',\n runtime=config['runtime'],\n memory_size=config['memory'],\n timeout=config['timeout'],\n role=config['role'],\n subnets=config['subnets'],\n security_groups=config['security_groups'],\n dead_letter_config=config.get('dead_letter_config', {}),\n events=[\n CloudWatchEventSource(\n {'type': 'periodic',\n 'schedule': config.get('lambda_schedule', 'rate(5 minutes)')},\n session_factory,\n prefix=\"\")\n ])\n\n archive = get_archive(config)\n func = LambdaFunction(func_config, archive)\n manager = LambdaManager(session_factory)\n manager.publish(func)\n", "path": "tools/c7n_mailer/c7n_mailer/deploy.py"}, {"content": "# Copyright 2015-2017 Capital One Services, LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nfrom setuptools import setup, find_packages\nimport os\n\nrequires = [\"Jinja2\", \"boto3\", \"jsonschema\", \"ruamel.yaml<0.15\", \"datadog\"]\ntry:\n from concurrent import futures\nexcept ImportError:\n # The backport has SyntaxErrors under py36, so avoid installing it.\n # https://github.com/agronholm/pythonfutures/issues/41\n requires += ['futures']\n\ndescription = \"\"\nif os.path.exists('README.md'):\n description = open('README.md').read()\n\nsetup(\n name=\"c7n_mailer\",\n version='0.3.1',\n description=\"Cloud Custodian - Reference Mailer\",\n long_description=description,\n classifiers=[\n \"Topic :: System :: Systems Administration\",\n \"Topic :: System :: Distributed Computing\"\n ],\n url=\"https://github.com/capitalone/cloud-custodian\",\n license=\"Apache-2.0\",\n packages=find_packages('c7n_mailer'),\n entry_points={\n 'console_scripts': [\n 'c7n-mailer = c7n_mailer.cli:main',\n 'c7n-mailer-replay = c7n_mailer.replay:main'\n ]\n },\n install_requires=requires,\n)\n", "path": "tools/c7n_mailer/setup.py"}], "after_files": [{"content": "# Copyright 2016-2017 Capital One Services, LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nimport json\nimport os\n\nfrom c7n.mu import (\n CloudWatchEventSource,\n LambdaFunction,\n LambdaManager,\n PythonPackageArchive)\n\n\nentry_source = \"\"\"\\\nimport logging\n\nfrom c7n_mailer import handle\n\nlogger = logging.getLogger('custodian.mailer')\nlog_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'\nlogging.basicConfig(level=logging.INFO, format=log_format)\nlogging.getLogger('botocore').setLevel(logging.WARNING)\n\ndef dispatch(event, context):\n return handle.start_c7n_mailer(logger)\n\"\"\"\n\n\ndef get_archive(config):\n archive = PythonPackageArchive(\n 'c7n_mailer', 'ldap3', 'pyasn1', 'jinja2', 'markupsafe', 'ruamel',\n 'redis', 'datadog', 'slackclient', 'requests')\n\n template_dir = os.path.abspath(\n os.path.join(os.path.dirname(__file__), '..', 'msg-templates'))\n\n for t in os.listdir(template_dir):\n with open(os.path.join(template_dir, t)) as fh:\n archive.add_contents('msg-templates/%s' % t, fh.read())\n\n archive.add_contents('config.json', json.dumps(config))\n archive.add_contents('periodic.py', entry_source)\n\n archive.close()\n return archive\n\n\ndef provision(config, session_factory):\n func_config = dict(\n name=config.get('lambda_name', 'cloud-custodian-mailer'),\n description=config.get('lambda_description', 'Cloud Custodian Mailer'),\n tags=config.get('lambda_tags', {}),\n handler='periodic.dispatch',\n runtime=config['runtime'],\n memory_size=config['memory'],\n timeout=config['timeout'],\n role=config['role'],\n subnets=config['subnets'],\n security_groups=config['security_groups'],\n dead_letter_config=config.get('dead_letter_config', {}),\n events=[\n CloudWatchEventSource(\n {'type': 'periodic',\n 'schedule': config.get('lambda_schedule', 'rate(5 minutes)')},\n session_factory,\n prefix=\"\")\n ])\n\n archive = get_archive(config)\n func = LambdaFunction(func_config, archive)\n manager = LambdaManager(session_factory)\n manager.publish(func)\n", "path": "tools/c7n_mailer/c7n_mailer/deploy.py"}, {"content": "# Copyright 2015-2017 Capital One Services, LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nfrom setuptools import setup, find_packages\nimport os\n\n# *Any* updates here should also go into c7n_mailer/deploy.py for lambda packaging.\nrequires = [\n \"Jinja2\", \"boto3\", \"jsonschema\", \"ruamel.yaml<0.15\", \"datadog\", \"slackclient\", \"ldap3\"]\ntry:\n from concurrent import futures\nexcept ImportError:\n # The backport has SyntaxErrors under py36, so avoid installing it.\n # https://github.com/agronholm/pythonfutures/issues/41\n requires += ['futures']\n\ndescription = \"\"\nif os.path.exists('README.md'):\n description = open('README.md').read()\n\nsetup(\n name=\"c7n_mailer\",\n version='0.3.1',\n description=\"Cloud Custodian - Reference Mailer\",\n long_description=description,\n classifiers=[\n \"Topic :: System :: Systems Administration\",\n \"Topic :: System :: Distributed Computing\"\n ],\n url=\"https://github.com/capitalone/cloud-custodian\",\n license=\"Apache-2.0\",\n packages=find_packages('c7n_mailer'),\n entry_points={\n 'console_scripts': [\n 'c7n-mailer = c7n_mailer.cli:main',\n 'c7n-mailer-replay = c7n_mailer.replay:main'\n ]\n },\n install_requires=requires,\n)\n", "path": "tools/c7n_mailer/setup.py"}]}
| 1,887 | 345 |
gh_patches_debug_32229
|
rasdani/github-patches
|
git_diff
|
Cloud-CV__EvalAI-922
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Modify contact_us api to fetch name and email, if the user is logged in.
This issue is related to #853 as discussed with @deshraj .
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `apps/web/views.py`
Content:
```
1 from django.contrib.auth.models import User
2 from django.shortcuts import render
3
4 from .models import Team
5
6 from rest_framework import permissions, status
7 from rest_framework.decorators import (api_view,
8 permission_classes,
9 throttle_classes,)
10 from rest_framework.response import Response
11 from rest_framework.throttling import AnonRateThrottle
12
13 from .serializers import ContactSerializer, TeamSerializer
14
15
16 def home(request, template_name="index.html"):
17 """
18 Home Page View
19 """
20 return render(request, template_name)
21
22
23 def page_not_found(request):
24 response = render(request, 'error404.html',
25 )
26 response.status_code = 404
27 return response
28
29
30 def internal_server_error(request):
31 response = render(request, 'error500.html',
32 )
33 response.status_code = 500
34 return response
35
36
37 @throttle_classes([AnonRateThrottle, ])
38 @api_view(['POST', ])
39 @permission_classes((permissions.AllowAny,))
40 def contact_us(request):
41 try:
42 user = User.objects.get(username=request.user)
43 name = user.username
44 email = user.email
45 request_data = {"name": name, "email": email}
46 request_data['message'] = request.data['message']
47 serializer = ContactSerializer(data=request_data)
48 if serializer.is_valid():
49 serializer.save()
50 response_data = {'message': 'Your message has been successfully recorded. We will contact you shortly.'}
51 return Response(response_data, status=status.HTTP_201_CREATED)
52 return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
53 except:
54 serializer = ContactSerializer(data=request.data)
55 if serializer.is_valid():
56 serializer.save()
57 response_data = {'message': 'Your message has been successfully recorded. We will contact you shortly.'}
58 return Response(response_data, status=status.HTTP_201_CREATED)
59 return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
60
61
62 @throttle_classes([AnonRateThrottle])
63 @api_view(['GET', 'POST'])
64 @permission_classes((permissions.AllowAny,))
65 def our_team(request):
66 if request.method == 'GET':
67 teams = Team.objects.all()
68 serializer = TeamSerializer(teams, many=True, context={'request': request})
69 response_data = serializer.data
70 return Response(response_data, status=status.HTTP_200_OK)
71 elif request.method == 'POST':
72 # team_type is set to Team.CONTRIBUTOR by default and can be overridden by the requester
73 request.data['team_type'] = request.data.get('team_type', Team.CONTRIBUTOR)
74 serializer = TeamSerializer(data=request.data)
75 if serializer.is_valid():
76 serializer.save()
77 response_data = {'message', 'Successfully added the contributor.'}
78 return Response(response_data, status=status.HTTP_201_CREATED)
79 return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
80
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/apps/web/views.py b/apps/web/views.py
--- a/apps/web/views.py
+++ b/apps/web/views.py
@@ -35,29 +35,33 @@
@throttle_classes([AnonRateThrottle, ])
-@api_view(['POST', ])
+@api_view(['GET', 'POST'])
@permission_classes((permissions.AllowAny,))
def contact_us(request):
+ user_does_not_exist = False
try:
user = User.objects.get(username=request.user)
name = user.username
email = user.email
- request_data = {"name": name, "email": email}
- request_data['message'] = request.data['message']
- serializer = ContactSerializer(data=request_data)
- if serializer.is_valid():
- serializer.save()
- response_data = {'message': 'Your message has been successfully recorded. We will contact you shortly.'}
- return Response(response_data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+ request_data = {'name': name, 'email': email}
except:
- serializer = ContactSerializer(data=request.data)
+ request_data = request.data
+ user_does_not_exist = True
+
+ if request.method == 'POST' or user_does_not_exist:
+ if request.POST.get('message'):
+ request_data['message'] = request.POST.get('message')
+ serializer = ContactSerializer(data=request_data)
if serializer.is_valid():
serializer.save()
- response_data = {'message': 'Your message has been successfully recorded. We will contact you shortly.'}
+ response_data = {'message': 'We have received your request and will contact you shortly.'}
return Response(response_data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+ elif request.method == 'GET':
+ response_data = {"name": name, "email": email}
+ return Response(response_data, status=status.HTTP_200_OK)
+
@throttle_classes([AnonRateThrottle])
@api_view(['GET', 'POST'])
|
{"golden_diff": "diff --git a/apps/web/views.py b/apps/web/views.py\n--- a/apps/web/views.py\n+++ b/apps/web/views.py\n@@ -35,29 +35,33 @@\n \n \n @throttle_classes([AnonRateThrottle, ])\n-@api_view(['POST', ])\n+@api_view(['GET', 'POST'])\n @permission_classes((permissions.AllowAny,))\n def contact_us(request):\n+ user_does_not_exist = False\n try:\n user = User.objects.get(username=request.user)\n name = user.username\n email = user.email\n- request_data = {\"name\": name, \"email\": email}\n- request_data['message'] = request.data['message']\n- serializer = ContactSerializer(data=request_data)\n- if serializer.is_valid():\n- serializer.save()\n- response_data = {'message': 'Your message has been successfully recorded. We will contact you shortly.'}\n- return Response(response_data, status=status.HTTP_201_CREATED)\n- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n+ request_data = {'name': name, 'email': email}\n except:\n- serializer = ContactSerializer(data=request.data)\n+ request_data = request.data\n+ user_does_not_exist = True\n+\n+ if request.method == 'POST' or user_does_not_exist:\n+ if request.POST.get('message'):\n+ request_data['message'] = request.POST.get('message')\n+ serializer = ContactSerializer(data=request_data)\n if serializer.is_valid():\n serializer.save()\n- response_data = {'message': 'Your message has been successfully recorded. We will contact you shortly.'}\n+ response_data = {'message': 'We have received your request and will contact you shortly.'}\n return Response(response_data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n \n+ elif request.method == 'GET':\n+ response_data = {\"name\": name, \"email\": email}\n+ return Response(response_data, status=status.HTTP_200_OK)\n+\n \n @throttle_classes([AnonRateThrottle])\n @api_view(['GET', 'POST'])\n", "issue": "Modify contact_us api to fetch name and email, if the user is logged in.\nThis issue is related to #853 as discussed with @deshraj .\n", "before_files": [{"content": "from django.contrib.auth.models import User\nfrom django.shortcuts import render\n\nfrom .models import Team\n\nfrom rest_framework import permissions, status\nfrom rest_framework.decorators import (api_view,\n permission_classes,\n throttle_classes,)\nfrom rest_framework.response import Response\nfrom rest_framework.throttling import AnonRateThrottle\n\nfrom .serializers import ContactSerializer, TeamSerializer\n\n\ndef home(request, template_name=\"index.html\"):\n \"\"\"\n Home Page View\n \"\"\"\n return render(request, template_name)\n\n\ndef page_not_found(request):\n response = render(request, 'error404.html',\n )\n response.status_code = 404\n return response\n\n\ndef internal_server_error(request):\n response = render(request, 'error500.html',\n )\n response.status_code = 500\n return response\n\n\n@throttle_classes([AnonRateThrottle, ])\n@api_view(['POST', ])\n@permission_classes((permissions.AllowAny,))\ndef contact_us(request):\n try:\n user = User.objects.get(username=request.user)\n name = user.username\n email = user.email\n request_data = {\"name\": name, \"email\": email}\n request_data['message'] = request.data['message']\n serializer = ContactSerializer(data=request_data)\n if serializer.is_valid():\n serializer.save()\n response_data = {'message': 'Your message has been successfully recorded. We will contact you shortly.'}\n return Response(response_data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n except:\n serializer = ContactSerializer(data=request.data)\n if serializer.is_valid():\n serializer.save()\n response_data = {'message': 'Your message has been successfully recorded. We will contact you shortly.'}\n return Response(response_data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n\n@throttle_classes([AnonRateThrottle])\n@api_view(['GET', 'POST'])\n@permission_classes((permissions.AllowAny,))\ndef our_team(request):\n if request.method == 'GET':\n teams = Team.objects.all()\n serializer = TeamSerializer(teams, many=True, context={'request': request})\n response_data = serializer.data\n return Response(response_data, status=status.HTTP_200_OK)\n elif request.method == 'POST':\n # team_type is set to Team.CONTRIBUTOR by default and can be overridden by the requester\n request.data['team_type'] = request.data.get('team_type', Team.CONTRIBUTOR)\n serializer = TeamSerializer(data=request.data)\n if serializer.is_valid():\n serializer.save()\n response_data = {'message', 'Successfully added the contributor.'}\n return Response(response_data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n", "path": "apps/web/views.py"}], "after_files": [{"content": "from django.contrib.auth.models import User\nfrom django.shortcuts import render\n\nfrom .models import Team\n\nfrom rest_framework import permissions, status\nfrom rest_framework.decorators import (api_view,\n permission_classes,\n throttle_classes,)\nfrom rest_framework.response import Response\nfrom rest_framework.throttling import AnonRateThrottle\n\nfrom .serializers import ContactSerializer, TeamSerializer\n\n\ndef home(request, template_name=\"index.html\"):\n \"\"\"\n Home Page View\n \"\"\"\n return render(request, template_name)\n\n\ndef page_not_found(request):\n response = render(request, 'error404.html',\n )\n response.status_code = 404\n return response\n\n\ndef internal_server_error(request):\n response = render(request, 'error500.html',\n )\n response.status_code = 500\n return response\n\n\n@throttle_classes([AnonRateThrottle, ])\n@api_view(['GET', 'POST'])\n@permission_classes((permissions.AllowAny,))\ndef contact_us(request):\n user_does_not_exist = False\n try:\n user = User.objects.get(username=request.user)\n name = user.username\n email = user.email\n request_data = {'name': name, 'email': email}\n except:\n request_data = request.data\n user_does_not_exist = True\n\n if request.method == 'POST' or user_does_not_exist:\n if request.POST.get('message'):\n request_data['message'] = request.POST.get('message')\n serializer = ContactSerializer(data=request_data)\n if serializer.is_valid():\n serializer.save()\n response_data = {'message': 'We have received your request and will contact you shortly.'}\n return Response(response_data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n elif request.method == 'GET':\n response_data = {\"name\": name, \"email\": email}\n return Response(response_data, status=status.HTTP_200_OK)\n\n\n@throttle_classes([AnonRateThrottle])\n@api_view(['GET', 'POST'])\n@permission_classes((permissions.AllowAny,))\ndef our_team(request):\n if request.method == 'GET':\n teams = Team.objects.all()\n serializer = TeamSerializer(teams, many=True, context={'request': request})\n response_data = serializer.data\n return Response(response_data, status=status.HTTP_200_OK)\n elif request.method == 'POST':\n # team_type is set to Team.CONTRIBUTOR by default and can be overridden by the requester\n request.data['team_type'] = request.data.get('team_type', Team.CONTRIBUTOR)\n serializer = TeamSerializer(data=request.data)\n if serializer.is_valid():\n serializer.save()\n response_data = {'message', 'Successfully added the contributor.'}\n return Response(response_data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n", "path": "apps/web/views.py"}]}
| 1,061 | 468 |
gh_patches_debug_15728
|
rasdani/github-patches
|
git_diff
|
boto__boto-3488
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
switch to unquote_plus in bucketlistresultset
Resubmitting #2654, rebased off current develop branch. Original description follows:
There is a bug in PR #2562, which I submitted. The URL quoting that S3 performs with `encoding_type="url"` necessitates the use of `unquote_plus` because they replace spaces with plus signs. This didn't break bucket listing completely, and in my case it only led to some keys missing from the listing, something that I didn't notice in my original testing. Sorry for this!
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `boto/s3/bucketlistresultset.py`
Content:
```
1 # Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
2 #
3 # Permission is hereby granted, free of charge, to any person obtaining a
4 # copy of this software and associated documentation files (the
5 # "Software"), to deal in the Software without restriction, including
6 # without limitation the rights to use, copy, modify, merge, publish, dis-
7 # tribute, sublicense, and/or sell copies of the Software, and to permit
8 # persons to whom the Software is furnished to do so, subject to the fol-
9 # lowing conditions:
10 #
11 # The above copyright notice and this permission notice shall be included
12 # in all copies or substantial portions of the Software.
13 #
14 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
15 # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
16 # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
17 # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
18 # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
20 # IN THE SOFTWARE.
21
22 from boto.compat import unquote_str
23
24 def bucket_lister(bucket, prefix='', delimiter='', marker='', headers=None,
25 encoding_type=None):
26 """
27 A generator function for listing keys in a bucket.
28 """
29 more_results = True
30 k = None
31 while more_results:
32 rs = bucket.get_all_keys(prefix=prefix, marker=marker,
33 delimiter=delimiter, headers=headers,
34 encoding_type=encoding_type)
35 for k in rs:
36 yield k
37 if k:
38 marker = rs.next_marker or k.name
39 if marker and encoding_type == "url":
40 marker = unquote_str(marker)
41 more_results= rs.is_truncated
42
43 class BucketListResultSet(object):
44 """
45 A resultset for listing keys within a bucket. Uses the bucket_lister
46 generator function and implements the iterator interface. This
47 transparently handles the results paging from S3 so even if you have
48 many thousands of keys within the bucket you can iterate over all
49 keys in a reasonably efficient manner.
50 """
51
52 def __init__(self, bucket=None, prefix='', delimiter='', marker='',
53 headers=None, encoding_type=None):
54 self.bucket = bucket
55 self.prefix = prefix
56 self.delimiter = delimiter
57 self.marker = marker
58 self.headers = headers
59 self.encoding_type = encoding_type
60
61 def __iter__(self):
62 return bucket_lister(self.bucket, prefix=self.prefix,
63 delimiter=self.delimiter, marker=self.marker,
64 headers=self.headers,
65 encoding_type=self.encoding_type)
66
67 def versioned_bucket_lister(bucket, prefix='', delimiter='',
68 key_marker='', version_id_marker='', headers=None,
69 encoding_type=None):
70 """
71 A generator function for listing versions in a bucket.
72 """
73 more_results = True
74 k = None
75 while more_results:
76 rs = bucket.get_all_versions(prefix=prefix, key_marker=key_marker,
77 version_id_marker=version_id_marker,
78 delimiter=delimiter, headers=headers,
79 max_keys=999, encoding_type=encoding_type)
80 for k in rs:
81 yield k
82 key_marker = rs.next_key_marker
83 version_id_marker = rs.next_version_id_marker
84 more_results= rs.is_truncated
85
86 class VersionedBucketListResultSet(object):
87 """
88 A resultset for listing versions within a bucket. Uses the bucket_lister
89 generator function and implements the iterator interface. This
90 transparently handles the results paging from S3 so even if you have
91 many thousands of keys within the bucket you can iterate over all
92 keys in a reasonably efficient manner.
93 """
94
95 def __init__(self, bucket=None, prefix='', delimiter='', key_marker='',
96 version_id_marker='', headers=None, encoding_type=None):
97 self.bucket = bucket
98 self.prefix = prefix
99 self.delimiter = delimiter
100 self.key_marker = key_marker
101 self.version_id_marker = version_id_marker
102 self.headers = headers
103 self.encoding_type = encoding_type
104
105 def __iter__(self):
106 return versioned_bucket_lister(self.bucket, prefix=self.prefix,
107 delimiter=self.delimiter,
108 key_marker=self.key_marker,
109 version_id_marker=self.version_id_marker,
110 headers=self.headers,
111 encoding_type=self.encoding_type)
112
113 def multipart_upload_lister(bucket, key_marker='',
114 upload_id_marker='',
115 headers=None, encoding_type=None):
116 """
117 A generator function for listing multipart uploads in a bucket.
118 """
119 more_results = True
120 k = None
121 while more_results:
122 rs = bucket.get_all_multipart_uploads(key_marker=key_marker,
123 upload_id_marker=upload_id_marker,
124 headers=headers,
125 encoding_type=encoding_type)
126 for k in rs:
127 yield k
128 key_marker = rs.next_key_marker
129 upload_id_marker = rs.next_upload_id_marker
130 more_results= rs.is_truncated
131
132 class MultiPartUploadListResultSet(object):
133 """
134 A resultset for listing multipart uploads within a bucket.
135 Uses the multipart_upload_lister generator function and
136 implements the iterator interface. This
137 transparently handles the results paging from S3 so even if you have
138 many thousands of uploads within the bucket you can iterate over all
139 keys in a reasonably efficient manner.
140 """
141 def __init__(self, bucket=None, key_marker='',
142 upload_id_marker='', headers=None, encoding_type=None):
143 self.bucket = bucket
144 self.key_marker = key_marker
145 self.upload_id_marker = upload_id_marker
146 self.headers = headers
147 self.encoding_type = encoding_type
148
149 def __iter__(self):
150 return multipart_upload_lister(self.bucket,
151 key_marker=self.key_marker,
152 upload_id_marker=self.upload_id_marker,
153 headers=self.headers,
154 encoding_type=self.encoding_type)
155
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/boto/s3/bucketlistresultset.py b/boto/s3/bucketlistresultset.py
--- a/boto/s3/bucketlistresultset.py
+++ b/boto/s3/bucketlistresultset.py
@@ -80,6 +80,8 @@
for k in rs:
yield k
key_marker = rs.next_key_marker
+ if key_marker and encoding_type == "url":
+ key_marker = unquote_str(key_marker)
version_id_marker = rs.next_version_id_marker
more_results= rs.is_truncated
@@ -126,6 +128,8 @@
for k in rs:
yield k
key_marker = rs.next_key_marker
+ if key_marker and encoding_type == "url":
+ key_marker = unquote_str(key_marker)
upload_id_marker = rs.next_upload_id_marker
more_results= rs.is_truncated
|
{"golden_diff": "diff --git a/boto/s3/bucketlistresultset.py b/boto/s3/bucketlistresultset.py\n--- a/boto/s3/bucketlistresultset.py\n+++ b/boto/s3/bucketlistresultset.py\n@@ -80,6 +80,8 @@\n for k in rs:\n yield k\n key_marker = rs.next_key_marker\n+ if key_marker and encoding_type == \"url\":\n+ key_marker = unquote_str(key_marker)\n version_id_marker = rs.next_version_id_marker\n more_results= rs.is_truncated\n \n@@ -126,6 +128,8 @@\n for k in rs:\n yield k\n key_marker = rs.next_key_marker\n+ if key_marker and encoding_type == \"url\":\n+ key_marker = unquote_str(key_marker)\n upload_id_marker = rs.next_upload_id_marker\n more_results= rs.is_truncated\n", "issue": "switch to unquote_plus in bucketlistresultset\nResubmitting #2654, rebased off current develop branch. Original description follows:\n\nThere is a bug in PR #2562, which I submitted. The URL quoting that S3 performs with `encoding_type=\"url\"` necessitates the use of `unquote_plus` because they replace spaces with plus signs. This didn't break bucket listing completely, and in my case it only led to some keys missing from the listing, something that I didn't notice in my original testing. Sorry for this!\n\n", "before_files": [{"content": "# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/\n#\n# Permission is hereby granted, free of charge, to any person obtaining a\n# copy of this software and associated documentation files (the\n# \"Software\"), to deal in the Software without restriction, including\n# without limitation the rights to use, copy, modify, merge, publish, dis-\n# tribute, sublicense, and/or sell copies of the Software, and to permit\n# persons to whom the Software is furnished to do so, subject to the fol-\n# lowing conditions:\n#\n# The above copyright notice and this permission notice shall be included\n# in all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-\n# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT\n# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,\n# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n# IN THE SOFTWARE.\n\nfrom boto.compat import unquote_str\n\ndef bucket_lister(bucket, prefix='', delimiter='', marker='', headers=None,\n encoding_type=None):\n \"\"\"\n A generator function for listing keys in a bucket.\n \"\"\"\n more_results = True\n k = None\n while more_results:\n rs = bucket.get_all_keys(prefix=prefix, marker=marker,\n delimiter=delimiter, headers=headers,\n encoding_type=encoding_type)\n for k in rs:\n yield k\n if k:\n marker = rs.next_marker or k.name\n if marker and encoding_type == \"url\":\n marker = unquote_str(marker)\n more_results= rs.is_truncated\n\nclass BucketListResultSet(object):\n \"\"\"\n A resultset for listing keys within a bucket. Uses the bucket_lister\n generator function and implements the iterator interface. This\n transparently handles the results paging from S3 so even if you have\n many thousands of keys within the bucket you can iterate over all\n keys in a reasonably efficient manner.\n \"\"\"\n\n def __init__(self, bucket=None, prefix='', delimiter='', marker='',\n headers=None, encoding_type=None):\n self.bucket = bucket\n self.prefix = prefix\n self.delimiter = delimiter\n self.marker = marker\n self.headers = headers\n self.encoding_type = encoding_type\n\n def __iter__(self):\n return bucket_lister(self.bucket, prefix=self.prefix,\n delimiter=self.delimiter, marker=self.marker,\n headers=self.headers,\n encoding_type=self.encoding_type)\n\ndef versioned_bucket_lister(bucket, prefix='', delimiter='',\n key_marker='', version_id_marker='', headers=None,\n encoding_type=None):\n \"\"\"\n A generator function for listing versions in a bucket.\n \"\"\"\n more_results = True\n k = None\n while more_results:\n rs = bucket.get_all_versions(prefix=prefix, key_marker=key_marker,\n version_id_marker=version_id_marker,\n delimiter=delimiter, headers=headers,\n max_keys=999, encoding_type=encoding_type)\n for k in rs:\n yield k\n key_marker = rs.next_key_marker\n version_id_marker = rs.next_version_id_marker\n more_results= rs.is_truncated\n\nclass VersionedBucketListResultSet(object):\n \"\"\"\n A resultset for listing versions within a bucket. Uses the bucket_lister\n generator function and implements the iterator interface. This\n transparently handles the results paging from S3 so even if you have\n many thousands of keys within the bucket you can iterate over all\n keys in a reasonably efficient manner.\n \"\"\"\n\n def __init__(self, bucket=None, prefix='', delimiter='', key_marker='',\n version_id_marker='', headers=None, encoding_type=None):\n self.bucket = bucket\n self.prefix = prefix\n self.delimiter = delimiter\n self.key_marker = key_marker\n self.version_id_marker = version_id_marker\n self.headers = headers\n self.encoding_type = encoding_type\n\n def __iter__(self):\n return versioned_bucket_lister(self.bucket, prefix=self.prefix,\n delimiter=self.delimiter,\n key_marker=self.key_marker,\n version_id_marker=self.version_id_marker,\n headers=self.headers,\n encoding_type=self.encoding_type)\n\ndef multipart_upload_lister(bucket, key_marker='',\n upload_id_marker='',\n headers=None, encoding_type=None):\n \"\"\"\n A generator function for listing multipart uploads in a bucket.\n \"\"\"\n more_results = True\n k = None\n while more_results:\n rs = bucket.get_all_multipart_uploads(key_marker=key_marker,\n upload_id_marker=upload_id_marker,\n headers=headers,\n encoding_type=encoding_type)\n for k in rs:\n yield k\n key_marker = rs.next_key_marker\n upload_id_marker = rs.next_upload_id_marker\n more_results= rs.is_truncated\n\nclass MultiPartUploadListResultSet(object):\n \"\"\"\n A resultset for listing multipart uploads within a bucket.\n Uses the multipart_upload_lister generator function and\n implements the iterator interface. This\n transparently handles the results paging from S3 so even if you have\n many thousands of uploads within the bucket you can iterate over all\n keys in a reasonably efficient manner.\n \"\"\"\n def __init__(self, bucket=None, key_marker='',\n upload_id_marker='', headers=None, encoding_type=None):\n self.bucket = bucket\n self.key_marker = key_marker\n self.upload_id_marker = upload_id_marker\n self.headers = headers\n self.encoding_type = encoding_type\n\n def __iter__(self):\n return multipart_upload_lister(self.bucket,\n key_marker=self.key_marker,\n upload_id_marker=self.upload_id_marker,\n headers=self.headers,\n encoding_type=self.encoding_type)\n", "path": "boto/s3/bucketlistresultset.py"}], "after_files": [{"content": "# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/\n#\n# Permission is hereby granted, free of charge, to any person obtaining a\n# copy of this software and associated documentation files (the\n# \"Software\"), to deal in the Software without restriction, including\n# without limitation the rights to use, copy, modify, merge, publish, dis-\n# tribute, sublicense, and/or sell copies of the Software, and to permit\n# persons to whom the Software is furnished to do so, subject to the fol-\n# lowing conditions:\n#\n# The above copyright notice and this permission notice shall be included\n# in all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-\n# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT\n# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,\n# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n# IN THE SOFTWARE.\n\nfrom boto.compat import unquote_str\n\ndef bucket_lister(bucket, prefix='', delimiter='', marker='', headers=None,\n encoding_type=None):\n \"\"\"\n A generator function for listing keys in a bucket.\n \"\"\"\n more_results = True\n k = None\n while more_results:\n rs = bucket.get_all_keys(prefix=prefix, marker=marker,\n delimiter=delimiter, headers=headers,\n encoding_type=encoding_type)\n for k in rs:\n yield k\n if k:\n marker = rs.next_marker or k.name\n if marker and encoding_type == \"url\":\n marker = unquote_str(marker)\n more_results= rs.is_truncated\n\nclass BucketListResultSet(object):\n \"\"\"\n A resultset for listing keys within a bucket. Uses the bucket_lister\n generator function and implements the iterator interface. This\n transparently handles the results paging from S3 so even if you have\n many thousands of keys within the bucket you can iterate over all\n keys in a reasonably efficient manner.\n \"\"\"\n\n def __init__(self, bucket=None, prefix='', delimiter='', marker='',\n headers=None, encoding_type=None):\n self.bucket = bucket\n self.prefix = prefix\n self.delimiter = delimiter\n self.marker = marker\n self.headers = headers\n self.encoding_type = encoding_type\n\n def __iter__(self):\n return bucket_lister(self.bucket, prefix=self.prefix,\n delimiter=self.delimiter, marker=self.marker,\n headers=self.headers,\n encoding_type=self.encoding_type)\n\ndef versioned_bucket_lister(bucket, prefix='', delimiter='',\n key_marker='', version_id_marker='', headers=None,\n encoding_type=None):\n \"\"\"\n A generator function for listing versions in a bucket.\n \"\"\"\n more_results = True\n k = None\n while more_results:\n rs = bucket.get_all_versions(prefix=prefix, key_marker=key_marker,\n version_id_marker=version_id_marker,\n delimiter=delimiter, headers=headers,\n max_keys=999, encoding_type=encoding_type)\n for k in rs:\n yield k\n key_marker = rs.next_key_marker\n if key_marker and encoding_type == \"url\":\n key_marker = unquote_str(key_marker)\n version_id_marker = rs.next_version_id_marker\n more_results= rs.is_truncated\n\nclass VersionedBucketListResultSet(object):\n \"\"\"\n A resultset for listing versions within a bucket. Uses the bucket_lister\n generator function and implements the iterator interface. This\n transparently handles the results paging from S3 so even if you have\n many thousands of keys within the bucket you can iterate over all\n keys in a reasonably efficient manner.\n \"\"\"\n\n def __init__(self, bucket=None, prefix='', delimiter='', key_marker='',\n version_id_marker='', headers=None, encoding_type=None):\n self.bucket = bucket\n self.prefix = prefix\n self.delimiter = delimiter\n self.key_marker = key_marker\n self.version_id_marker = version_id_marker\n self.headers = headers\n self.encoding_type = encoding_type\n\n def __iter__(self):\n return versioned_bucket_lister(self.bucket, prefix=self.prefix,\n delimiter=self.delimiter,\n key_marker=self.key_marker,\n version_id_marker=self.version_id_marker,\n headers=self.headers,\n encoding_type=self.encoding_type)\n\ndef multipart_upload_lister(bucket, key_marker='',\n upload_id_marker='',\n headers=None, encoding_type=None):\n \"\"\"\n A generator function for listing multipart uploads in a bucket.\n \"\"\"\n more_results = True\n k = None\n while more_results:\n rs = bucket.get_all_multipart_uploads(key_marker=key_marker,\n upload_id_marker=upload_id_marker,\n headers=headers,\n encoding_type=encoding_type)\n for k in rs:\n yield k\n key_marker = rs.next_key_marker\n if key_marker and encoding_type == \"url\":\n key_marker = unquote_str(key_marker)\n upload_id_marker = rs.next_upload_id_marker\n more_results= rs.is_truncated\n\nclass MultiPartUploadListResultSet(object):\n \"\"\"\n A resultset for listing multipart uploads within a bucket.\n Uses the multipart_upload_lister generator function and\n implements the iterator interface. This\n transparently handles the results paging from S3 so even if you have\n many thousands of uploads within the bucket you can iterate over all\n keys in a reasonably efficient manner.\n \"\"\"\n def __init__(self, bucket=None, key_marker='',\n upload_id_marker='', headers=None, encoding_type=None):\n self.bucket = bucket\n self.key_marker = key_marker\n self.upload_id_marker = upload_id_marker\n self.headers = headers\n self.encoding_type = encoding_type\n\n def __iter__(self):\n return multipart_upload_lister(self.bucket,\n key_marker=self.key_marker,\n upload_id_marker=self.upload_id_marker,\n headers=self.headers,\n encoding_type=self.encoding_type)\n", "path": "boto/s3/bucketlistresultset.py"}]}
| 2,003 | 202 |
gh_patches_debug_49168
|
rasdani/github-patches
|
git_diff
|
mkdocs__mkdocs-2800
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
jinja2 3.1.0 breaks mkdocs
since the jinja2 3.1.0 release mkdocs does not work any more:
```
admin@host ui % pip install jinja2==3.1.0
Collecting jinja2==3.1.0
Using cached Jinja2-3.1.0-py3-none-any.whl (132 kB)
Installing collected packages: jinja2
Attempting uninstall: jinja2
Found existing installation: Jinja2 3.0.0
Uninstalling Jinja2-3.0.0:
Successfully uninstalled Jinja2-3.0.0
Successfully installed jinja2-3.1.0
admin@host ui % mkdocs build
Traceback (most recent call last):
File "/usr/local/bin/mkdocs", line 8, in <module>
sys.exit(cli())
File "/usr/local/lib/python3.9/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/usr/local/lib/python3.9/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/usr/local/lib/python3.9/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.9/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/lib/python3.9/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/usr/local/lib/python3.9/site-packages/mkdocs/__main__.py", line 187, in build_command
build.build(config.load_config(**kwargs), dirty=not clean)
File "/usr/local/lib/python3.9/site-packages/mkdocs/config/base.py", line 216, in load_config
from mkdocs.config.defaults import get_schema
File "/usr/local/lib/python3.9/site-packages/mkdocs/config/defaults.py", line 1, in <module>
from mkdocs.config import config_options
File "/usr/local/lib/python3.9/site-packages/mkdocs/config/config_options.py", line 8, in <module>
from mkdocs import utils, theme, plugins
File "/usr/local/lib/python3.9/site-packages/mkdocs/theme.py", line 6, in <module>
from mkdocs.utils import filters
File "/usr/local/lib/python3.9/site-packages/mkdocs/utils/filters.py", line 13, in <module>
@jinja2.contextfilter
AttributeError: module 'jinja2' has no attribute 'contextfilter'
```
However, if I install jinja2 3.0.0:
```
admin@host ui % pip install jinja2==3.0.0
Collecting jinja2==3.0.0
Using cached Jinja2-3.0.0-py3-none-any.whl (133 kB)
Requirement already satisfied: MarkupSafe>=2.0.0rc2 in /usr/local/lib/python3.9/site-packages (from jinja2==3.0.0) (2.1.1)
Installing collected packages: jinja2
Attempting uninstall: jinja2
Found existing installation: Jinja2 3.1.0
Uninstalling Jinja2-3.1.0:
Successfully uninstalled Jinja2-3.1.0
Successfully installed jinja2-3.0.0
admin@host ui % mkdocs build
INFO - Cleaning site directory
INFO - Building documentation to directory: /Users/admin/git/searchlab/ui/site
INFO - Documentation built in 0.33 seconds
```
- mkdocs can be patched by explicitly installing jinja2 3.0.0.
- maybe this is not a mkdocs bug, but a jinja2 bug; however, this could be patched in mkdocs as well.
Prevent error with Jinja2 v3.1
Fixes #2794
See [Jinja2 v3.0.0 changes](https://jinja.palletsprojects.com/en/3.0.x/changes/#version-3-0-0):
> The function and filter decorators have been renamed and unified. The old names are deprecated...
>
> - `pass_context` replaces `contextfunction` and `contextfilter`.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mkdocs/utils/filters.py`
Content:
```
1 import jinja2
2
3 from mkdocs.utils import normalize_url
4
5
6 @jinja2.contextfilter
7 def url_filter(context, value):
8 """ A Template filter to normalize URLs. """
9 return normalize_url(value, page=context['page'], base=context['base_url'])
10
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/mkdocs/utils/filters.py b/mkdocs/utils/filters.py
--- a/mkdocs/utils/filters.py
+++ b/mkdocs/utils/filters.py
@@ -1,9 +1,12 @@
-import jinja2
+try:
+ from jinja2 import pass_context as contextfilter
+except ImportError:
+ from jinja2 import contextfilter
from mkdocs.utils import normalize_url
[email protected]
+@contextfilter
def url_filter(context, value):
""" A Template filter to normalize URLs. """
return normalize_url(value, page=context['page'], base=context['base_url'])
|
{"golden_diff": "diff --git a/mkdocs/utils/filters.py b/mkdocs/utils/filters.py\n--- a/mkdocs/utils/filters.py\n+++ b/mkdocs/utils/filters.py\n@@ -1,9 +1,12 @@\n-import jinja2\n+try:\n+ from jinja2 import pass_context as contextfilter\n+except ImportError:\n+ from jinja2 import contextfilter\n \n from mkdocs.utils import normalize_url\n \n \[email protected]\n+@contextfilter\n def url_filter(context, value):\n \"\"\" A Template filter to normalize URLs. \"\"\"\n return normalize_url(value, page=context['page'], base=context['base_url'])\n", "issue": "jinja2 3.1.0 breaks mkdocs\nsince the jinja2 3.1.0 release mkdocs does not work any more:\r\n\r\n```\r\nadmin@host ui % pip install jinja2==3.1.0\r\nCollecting jinja2==3.1.0\r\n Using cached Jinja2-3.1.0-py3-none-any.whl (132 kB)\r\nInstalling collected packages: jinja2\r\n Attempting uninstall: jinja2\r\n Found existing installation: Jinja2 3.0.0\r\n Uninstalling Jinja2-3.0.0:\r\n Successfully uninstalled Jinja2-3.0.0\r\nSuccessfully installed jinja2-3.1.0\r\nadmin@host ui % mkdocs build\r\nTraceback (most recent call last):\r\n File \"/usr/local/bin/mkdocs\", line 8, in <module>\r\n sys.exit(cli())\r\n File \"/usr/local/lib/python3.9/site-packages/click/core.py\", line 829, in __call__\r\n return self.main(*args, **kwargs)\r\n File \"/usr/local/lib/python3.9/site-packages/click/core.py\", line 782, in main\r\n rv = self.invoke(ctx)\r\n File \"/usr/local/lib/python3.9/site-packages/click/core.py\", line 1259, in invoke\r\n return _process_result(sub_ctx.command.invoke(sub_ctx))\r\n File \"/usr/local/lib/python3.9/site-packages/click/core.py\", line 1066, in invoke\r\n return ctx.invoke(self.callback, **ctx.params)\r\n File \"/usr/local/lib/python3.9/site-packages/click/core.py\", line 610, in invoke\r\n return callback(*args, **kwargs)\r\n File \"/usr/local/lib/python3.9/site-packages/mkdocs/__main__.py\", line 187, in build_command\r\n build.build(config.load_config(**kwargs), dirty=not clean)\r\n File \"/usr/local/lib/python3.9/site-packages/mkdocs/config/base.py\", line 216, in load_config\r\n from mkdocs.config.defaults import get_schema\r\n File \"/usr/local/lib/python3.9/site-packages/mkdocs/config/defaults.py\", line 1, in <module>\r\n from mkdocs.config import config_options\r\n File \"/usr/local/lib/python3.9/site-packages/mkdocs/config/config_options.py\", line 8, in <module>\r\n from mkdocs import utils, theme, plugins\r\n File \"/usr/local/lib/python3.9/site-packages/mkdocs/theme.py\", line 6, in <module>\r\n from mkdocs.utils import filters\r\n File \"/usr/local/lib/python3.9/site-packages/mkdocs/utils/filters.py\", line 13, in <module>\r\n @jinja2.contextfilter\r\nAttributeError: module 'jinja2' has no attribute 'contextfilter'\r\n```\r\nHowever, if I install jinja2 3.0.0:\r\n```\r\nadmin@host ui % pip install jinja2==3.0.0\r\nCollecting jinja2==3.0.0\r\n Using cached Jinja2-3.0.0-py3-none-any.whl (133 kB)\r\nRequirement already satisfied: MarkupSafe>=2.0.0rc2 in /usr/local/lib/python3.9/site-packages (from jinja2==3.0.0) (2.1.1)\r\nInstalling collected packages: jinja2\r\n Attempting uninstall: jinja2\r\n Found existing installation: Jinja2 3.1.0\r\n Uninstalling Jinja2-3.1.0:\r\n Successfully uninstalled Jinja2-3.1.0\r\nSuccessfully installed jinja2-3.0.0\r\nadmin@host ui % mkdocs build\r\nINFO - Cleaning site directory\r\nINFO - Building documentation to directory: /Users/admin/git/searchlab/ui/site\r\nINFO - Documentation built in 0.33 seconds\r\n```\r\n\r\n- mkdocs can be patched by explicitly installing jinja2 3.0.0.\r\n- maybe this is not a mkdocs bug, but a jinja2 bug; however, this could be patched in mkdocs as well.\r\n\nPrevent error with Jinja2 v3.1\nFixes #2794\r\n\r\nSee [Jinja2 v3.0.0 changes](https://jinja.palletsprojects.com/en/3.0.x/changes/#version-3-0-0):\r\n\r\n> The function and filter decorators have been renamed and unified. The old names are deprecated...\r\n> \r\n> - `pass_context` replaces `contextfunction` and `contextfilter`.\n", "before_files": [{"content": "import jinja2\n\nfrom mkdocs.utils import normalize_url\n\n\[email protected]\ndef url_filter(context, value):\n \"\"\" A Template filter to normalize URLs. \"\"\"\n return normalize_url(value, page=context['page'], base=context['base_url'])\n", "path": "mkdocs/utils/filters.py"}], "after_files": [{"content": "try:\n from jinja2 import pass_context as contextfilter\nexcept ImportError:\n from jinja2 import contextfilter\n\nfrom mkdocs.utils import normalize_url\n\n\n@contextfilter\ndef url_filter(context, value):\n \"\"\" A Template filter to normalize URLs. \"\"\"\n return normalize_url(value, page=context['page'], base=context['base_url'])\n", "path": "mkdocs/utils/filters.py"}]}
| 1,341 | 142 |
gh_patches_debug_4805
|
rasdani/github-patches
|
git_diff
|
statsmodels__statsmodels-680
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Test Baxter King band-pass filter fails with scipy 0.12 beta1
current git head 1b12824f53b1 and 0.4.3, python2.7 tested on ubuntu 13.04 amd64, haven't tried scipy 0.11 but it the test seems to work with the 0.10.1 package
```
======================================================================
ERROR: Test Baxter King band-pass filter. Results are taken from Stata
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib/python2.7/dist-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/statsmodels/statsmodels/tsa/filters/tests/test_filters.py", line 47, in test_bking1d
Y = bkfilter(X, 6, 32, 12)
File "/statsmodels/statsmodels/tsa/filters/bk_filter.py", line 72, in bkfilter
return fftconvolve(bweights, X, mode='valid') # get a centered moving avg/
File "/usr/lib/python2.7/dist-packages/scipy/signal/signaltools.py", line 204, in fftconvolve
_check_valid_mode_shapes(s1, s2)
File "/usr/lib/python2.7/dist-packages/scipy/signal/signaltools.py", line 64, in _check_valid_mode_shapes
"in1 should have at least as many items as in2 in "
ValueError: in1 should have at least as many items as in2 in every dimension for 'valid' mode.
----------------------------------------------------------------------
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `statsmodels/tsa/filters/bk_filter.py`
Content:
```
1 import numpy as np
2 from scipy.signal import fftconvolve
3
4 def bkfilter(X, low=6, high=32, K=12):
5 """
6 Baxter-King bandpass filter
7
8 Parameters
9 ----------
10 X : array-like
11 A 1 or 2d ndarray. If 2d, variables are assumed to be in columns.
12 low : float
13 Minimum period for oscillations, ie., Baxter and King suggest that
14 the Burns-Mitchell U.S. business cycle has 6 for quarterly data and
15 1.5 for annual data.
16 high : float
17 Maximum period for oscillations BK suggest that the U.S.
18 business cycle has 32 for quarterly data and 8 for annual data.
19 K : int
20 Lead-lag length of the filter. Baxter and King propose a truncation
21 length of 12 for quarterly data and 3 for annual data.
22
23 Returns
24 -------
25 Y : array
26 Cyclical component of X
27
28 References
29 ---------- ::
30 Baxter, M. and R. G. King. "Measuring Business Cycles: Approximate
31 Band-Pass Filters for Economic Time Series." *Review of Economics and
32 Statistics*, 1999, 81(4), 575-593.
33
34 Notes
35 -----
36 Returns a centered weighted moving average of the original series. Where
37 the weights a[j] are computed ::
38
39 a[j] = b[j] + theta, for j = 0, +/-1, +/-2, ... +/- K
40 b[0] = (omega_2 - omega_1)/pi
41 b[j] = 1/(pi*j)(sin(omega_2*j)-sin(omega_1*j), for j = +/-1, +/-2,...
42
43 and theta is a normalizing constant ::
44
45 theta = -sum(b)/(2K+1)
46
47 Examples
48 --------
49 >>> import statsmodels.api as sm
50 >>> dta = sm.datasets.macrodata.load()
51 >>> X = dta.data['realinv']
52 >>> Y = sm.tsa.filters.bkfilter(X, 6, 24, 12)
53 """
54 #TODO: change the docstring to ..math::?
55 #TODO: allow windowing functions to correct for Gibb's Phenomenon?
56 # adjust bweights (symmetrically) by below before demeaning
57 # Lancosz Sigma Factors np.sinc(2*j/(2.*K+1))
58 if low < 2:
59 raise ValueError("low cannot be less than 2")
60 X = np.asarray(X)
61 omega_1 = 2.*np.pi/high # convert from freq. to periodicity
62 omega_2 = 2.*np.pi/low
63 bweights = np.zeros(2*K+1)
64 bweights[K] = (omega_2 - omega_1)/np.pi # weight at zero freq.
65 j = np.arange(1,int(K)+1)
66 weights = 1/(np.pi*j)*(np.sin(omega_2*j)-np.sin(omega_1*j))
67 bweights[K+j] = weights # j is an idx
68 bweights[:K] = weights[::-1] # make symmetric weights
69 bweights -= bweights.mean() # make sure weights sum to zero
70 if X.ndim == 2:
71 bweights = bweights[:,None]
72 return fftconvolve(bweights, X, mode='valid') # get a centered moving avg/
73 # convolution
74
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/statsmodels/tsa/filters/bk_filter.py b/statsmodels/tsa/filters/bk_filter.py
--- a/statsmodels/tsa/filters/bk_filter.py
+++ b/statsmodels/tsa/filters/bk_filter.py
@@ -69,5 +69,5 @@
bweights -= bweights.mean() # make sure weights sum to zero
if X.ndim == 2:
bweights = bweights[:,None]
- return fftconvolve(bweights, X, mode='valid') # get a centered moving avg/
+ return fftconvolve(X, bweights, mode='valid') # get a centered moving avg/
# convolution
|
{"golden_diff": "diff --git a/statsmodels/tsa/filters/bk_filter.py b/statsmodels/tsa/filters/bk_filter.py\n--- a/statsmodels/tsa/filters/bk_filter.py\n+++ b/statsmodels/tsa/filters/bk_filter.py\n@@ -69,5 +69,5 @@\n bweights -= bweights.mean() # make sure weights sum to zero\n if X.ndim == 2:\n bweights = bweights[:,None]\n- return fftconvolve(bweights, X, mode='valid') # get a centered moving avg/\n+ return fftconvolve(X, bweights, mode='valid') # get a centered moving avg/\n # convolution\n", "issue": "Test Baxter King band-pass filter fails with scipy 0.12 beta1\ncurrent git head 1b12824f53b1 and 0.4.3, python2.7 tested on ubuntu 13.04 amd64, haven't tried scipy 0.11 but it the test seems to work with the 0.10.1 package\n\n```\n======================================================================\nERROR: Test Baxter King band-pass filter. Results are taken from Stata\n----------------------------------------------------------------------\nTraceback (most recent call last):\n File \"/usr/lib/python2.7/dist-packages/nose/case.py\", line 197, in runTest\n self.test(*self.arg)\n File \"/statsmodels/statsmodels/tsa/filters/tests/test_filters.py\", line 47, in test_bking1d\n Y = bkfilter(X, 6, 32, 12)\n File \"/statsmodels/statsmodels/tsa/filters/bk_filter.py\", line 72, in bkfilter\n return fftconvolve(bweights, X, mode='valid') # get a centered moving avg/\n File \"/usr/lib/python2.7/dist-packages/scipy/signal/signaltools.py\", line 204, in fftconvolve\n _check_valid_mode_shapes(s1, s2)\n File \"/usr/lib/python2.7/dist-packages/scipy/signal/signaltools.py\", line 64, in _check_valid_mode_shapes\n \"in1 should have at least as many items as in2 in \"\nValueError: in1 should have at least as many items as in2 in every dimension for 'valid' mode.\n\n----------------------------------------------------------------------\n```\n\n", "before_files": [{"content": "import numpy as np\nfrom scipy.signal import fftconvolve\n\ndef bkfilter(X, low=6, high=32, K=12):\n \"\"\"\n Baxter-King bandpass filter\n\n Parameters\n ----------\n X : array-like\n A 1 or 2d ndarray. If 2d, variables are assumed to be in columns.\n low : float\n Minimum period for oscillations, ie., Baxter and King suggest that\n the Burns-Mitchell U.S. business cycle has 6 for quarterly data and\n 1.5 for annual data.\n high : float\n Maximum period for oscillations BK suggest that the U.S.\n business cycle has 32 for quarterly data and 8 for annual data.\n K : int\n Lead-lag length of the filter. Baxter and King propose a truncation\n length of 12 for quarterly data and 3 for annual data.\n\n Returns\n -------\n Y : array\n Cyclical component of X\n\n References\n ---------- ::\n Baxter, M. and R. G. King. \"Measuring Business Cycles: Approximate\n Band-Pass Filters for Economic Time Series.\" *Review of Economics and\n Statistics*, 1999, 81(4), 575-593.\n\n Notes\n -----\n Returns a centered weighted moving average of the original series. Where\n the weights a[j] are computed ::\n\n a[j] = b[j] + theta, for j = 0, +/-1, +/-2, ... +/- K\n b[0] = (omega_2 - omega_1)/pi\n b[j] = 1/(pi*j)(sin(omega_2*j)-sin(omega_1*j), for j = +/-1, +/-2,...\n\n and theta is a normalizing constant ::\n\n theta = -sum(b)/(2K+1)\n\n Examples\n --------\n >>> import statsmodels.api as sm\n >>> dta = sm.datasets.macrodata.load()\n >>> X = dta.data['realinv']\n >>> Y = sm.tsa.filters.bkfilter(X, 6, 24, 12)\n \"\"\"\n#TODO: change the docstring to ..math::?\n#TODO: allow windowing functions to correct for Gibb's Phenomenon?\n# adjust bweights (symmetrically) by below before demeaning\n# Lancosz Sigma Factors np.sinc(2*j/(2.*K+1))\n if low < 2:\n raise ValueError(\"low cannot be less than 2\")\n X = np.asarray(X)\n omega_1 = 2.*np.pi/high # convert from freq. to periodicity\n omega_2 = 2.*np.pi/low\n bweights = np.zeros(2*K+1)\n bweights[K] = (omega_2 - omega_1)/np.pi # weight at zero freq.\n j = np.arange(1,int(K)+1)\n weights = 1/(np.pi*j)*(np.sin(omega_2*j)-np.sin(omega_1*j))\n bweights[K+j] = weights # j is an idx\n bweights[:K] = weights[::-1] # make symmetric weights\n bweights -= bweights.mean() # make sure weights sum to zero\n if X.ndim == 2:\n bweights = bweights[:,None]\n return fftconvolve(bweights, X, mode='valid') # get a centered moving avg/\n # convolution\n", "path": "statsmodels/tsa/filters/bk_filter.py"}], "after_files": [{"content": "import numpy as np\nfrom scipy.signal import fftconvolve\n\ndef bkfilter(X, low=6, high=32, K=12):\n \"\"\"\n Baxter-King bandpass filter\n\n Parameters\n ----------\n X : array-like\n A 1 or 2d ndarray. If 2d, variables are assumed to be in columns.\n low : float\n Minimum period for oscillations, ie., Baxter and King suggest that\n the Burns-Mitchell U.S. business cycle has 6 for quarterly data and\n 1.5 for annual data.\n high : float\n Maximum period for oscillations BK suggest that the U.S.\n business cycle has 32 for quarterly data and 8 for annual data.\n K : int\n Lead-lag length of the filter. Baxter and King propose a truncation\n length of 12 for quarterly data and 3 for annual data.\n\n Returns\n -------\n Y : array\n Cyclical component of X\n\n References\n ---------- ::\n Baxter, M. and R. G. King. \"Measuring Business Cycles: Approximate\n Band-Pass Filters for Economic Time Series.\" *Review of Economics and\n Statistics*, 1999, 81(4), 575-593.\n\n Notes\n -----\n Returns a centered weighted moving average of the original series. Where\n the weights a[j] are computed ::\n\n a[j] = b[j] + theta, for j = 0, +/-1, +/-2, ... +/- K\n b[0] = (omega_2 - omega_1)/pi\n b[j] = 1/(pi*j)(sin(omega_2*j)-sin(omega_1*j), for j = +/-1, +/-2,...\n\n and theta is a normalizing constant ::\n\n theta = -sum(b)/(2K+1)\n\n Examples\n --------\n >>> import statsmodels.api as sm\n >>> dta = sm.datasets.macrodata.load()\n >>> X = dta.data['realinv']\n >>> Y = sm.tsa.filters.bkfilter(X, 6, 24, 12)\n \"\"\"\n#TODO: change the docstring to ..math::?\n#TODO: allow windowing functions to correct for Gibb's Phenomenon?\n# adjust bweights (symmetrically) by below before demeaning\n# Lancosz Sigma Factors np.sinc(2*j/(2.*K+1))\n if low < 2:\n raise ValueError(\"low cannot be less than 2\")\n X = np.asarray(X)\n omega_1 = 2.*np.pi/high # convert from freq. to periodicity\n omega_2 = 2.*np.pi/low\n bweights = np.zeros(2*K+1)\n bweights[K] = (omega_2 - omega_1)/np.pi # weight at zero freq.\n j = np.arange(1,int(K)+1)\n weights = 1/(np.pi*j)*(np.sin(omega_2*j)-np.sin(omega_1*j))\n bweights[K+j] = weights # j is an idx\n bweights[:K] = weights[::-1] # make symmetric weights\n bweights -= bweights.mean() # make sure weights sum to zero\n if X.ndim == 2:\n bweights = bweights[:,None]\n return fftconvolve(X, bweights, mode='valid') # get a centered moving avg/\n # convolution\n", "path": "statsmodels/tsa/filters/bk_filter.py"}]}
| 1,525 | 145 |
gh_patches_debug_10840
|
rasdani/github-patches
|
git_diff
|
nf-core__tools-1263
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Non-updated lint tip message
## Description of the bug
Linting with files applicable for auto-fixing gives the following message:
```
Tip: Some of these linting errors can automatically be resolved with the following command:
nf-core lint . --fix files_unchanged
```
However the `.` declaration has been removed in the latest version of nf-core tools, and so the command errors
```
Error: Got unexpected extra argument (.)
```
## Steps to reproduce
Modify a template file (e.g. `.github/CONTRIBUTING.md`, and run lint command
## Expected behaviour
Update message to remove `.`
## System
- nf-core tools version: 2.1
## Additional context
<!-- Add any other context about the problem here. -->
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `nf_core/lint_utils.py`
Content:
```
1 import rich
2 from rich.console import Console
3 from rich.table import Table
4 import logging
5
6 import nf_core.utils
7
8 log = logging.getLogger(__name__)
9
10 # Create a console used by all lint tests
11 console = Console(force_terminal=nf_core.utils.rich_force_colors())
12
13
14 def print_joint_summary(lint_obj, module_lint_obj):
15 """Print a joint summary of the general pipe lint tests and the module lint tests"""
16 nbr_passed = len(lint_obj.passed) + len(module_lint_obj.passed)
17 nbr_ignored = len(lint_obj.ignored)
18 nbr_fixed = len(lint_obj.fixed)
19 nbr_warned = len(lint_obj.warned) + len(module_lint_obj.warned)
20 nbr_failed = len(lint_obj.failed) + len(module_lint_obj.failed)
21
22 def _s(some_length):
23 return "" if some_length == 1 else "s"
24
25 summary_colour = "red" if nbr_failed > 0 else "green"
26 table = Table(box=rich.box.ROUNDED, style=summary_colour)
27 table.add_column(f"LINT RESULTS SUMMARY".format(nbr_passed), no_wrap=True)
28 table.add_row(r"[green][✔] {:>3} Test{} Passed".format(nbr_passed, _s(nbr_passed)))
29 if nbr_fixed:
30 table.add_row(r"[bright blue][?] {:>3} Test{} Fixed".format(nbr_fixed, _s(nbr_fixed)))
31 table.add_row(r"[grey58][?] {:>3} Test{} Ignored".format(nbr_ignored, _s(nbr_ignored)))
32 table.add_row(r"[yellow][!] {:>3} Test Warning{}".format(nbr_warned, _s(nbr_warned)))
33 table.add_row(r"[red][✗] {:>3} Test{} Failed".format(nbr_failed, _s(nbr_failed)))
34 console.print(table)
35
36
37 def print_fixes(lint_obj, module_lint_obj):
38 """Prints available and applied fixes"""
39
40 if len(lint_obj.could_fix):
41 fix_cmd = "nf-core lint {} --fix {}".format(lint_obj.wf_path, " --fix ".join(lint_obj.could_fix))
42 console.print(
43 f"\nTip: Some of these linting errors can automatically be resolved with the following command:\n\n[blue] {fix_cmd}\n"
44 )
45 if len(lint_obj.fix):
46 console.print(
47 "Automatic fixes applied. Please check with 'git diff' and revert any changes you do not want with 'git checkout <file>'."
48 )
49
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/nf_core/lint_utils.py b/nf_core/lint_utils.py
--- a/nf_core/lint_utils.py
+++ b/nf_core/lint_utils.py
@@ -38,7 +38,9 @@
"""Prints available and applied fixes"""
if len(lint_obj.could_fix):
- fix_cmd = "nf-core lint {} --fix {}".format(lint_obj.wf_path, " --fix ".join(lint_obj.could_fix))
+ fix_cmd = "nf-core lint {}--fix {}".format(
+ "" if lint_obj.wf_path == "." else f"--dir {lint_obj.wf_path}", " --fix ".join(lint_obj.could_fix)
+ )
console.print(
f"\nTip: Some of these linting errors can automatically be resolved with the following command:\n\n[blue] {fix_cmd}\n"
)
|
{"golden_diff": "diff --git a/nf_core/lint_utils.py b/nf_core/lint_utils.py\n--- a/nf_core/lint_utils.py\n+++ b/nf_core/lint_utils.py\n@@ -38,7 +38,9 @@\n \"\"\"Prints available and applied fixes\"\"\"\n \n if len(lint_obj.could_fix):\n- fix_cmd = \"nf-core lint {} --fix {}\".format(lint_obj.wf_path, \" --fix \".join(lint_obj.could_fix))\n+ fix_cmd = \"nf-core lint {}--fix {}\".format(\n+ \"\" if lint_obj.wf_path == \".\" else f\"--dir {lint_obj.wf_path}\", \" --fix \".join(lint_obj.could_fix)\n+ )\n console.print(\n f\"\\nTip: Some of these linting errors can automatically be resolved with the following command:\\n\\n[blue] {fix_cmd}\\n\"\n )\n", "issue": "Non-updated lint tip message\n## Description of the bug\r\n\r\nLinting with files applicable for auto-fixing gives the following message:\r\n\r\n```\r\nTip: Some of these linting errors can automatically be resolved with the following command:\r\n\r\n nf-core lint . --fix files_unchanged\r\n```\r\n\r\nHowever the `.` declaration has been removed in the latest version of nf-core tools, and so the command errors\r\n\r\n```\r\nError: Got unexpected extra argument (.)\r\n```\r\n\r\n## Steps to reproduce\r\n\r\nModify a template file (e.g. `.github/CONTRIBUTING.md`, and run lint command\r\n\r\n## Expected behaviour\r\n\r\nUpdate message to remove `.`\r\n\r\n## System\r\n\r\n- nf-core tools version: 2.1\r\n\r\n## Additional context\r\n\r\n<!-- Add any other context about the problem here. -->\r\n\n", "before_files": [{"content": "import rich\nfrom rich.console import Console\nfrom rich.table import Table\nimport logging\n\nimport nf_core.utils\n\nlog = logging.getLogger(__name__)\n\n# Create a console used by all lint tests\nconsole = Console(force_terminal=nf_core.utils.rich_force_colors())\n\n\ndef print_joint_summary(lint_obj, module_lint_obj):\n \"\"\"Print a joint summary of the general pipe lint tests and the module lint tests\"\"\"\n nbr_passed = len(lint_obj.passed) + len(module_lint_obj.passed)\n nbr_ignored = len(lint_obj.ignored)\n nbr_fixed = len(lint_obj.fixed)\n nbr_warned = len(lint_obj.warned) + len(module_lint_obj.warned)\n nbr_failed = len(lint_obj.failed) + len(module_lint_obj.failed)\n\n def _s(some_length):\n return \"\" if some_length == 1 else \"s\"\n\n summary_colour = \"red\" if nbr_failed > 0 else \"green\"\n table = Table(box=rich.box.ROUNDED, style=summary_colour)\n table.add_column(f\"LINT RESULTS SUMMARY\".format(nbr_passed), no_wrap=True)\n table.add_row(r\"[green][\u2714] {:>3} Test{} Passed\".format(nbr_passed, _s(nbr_passed)))\n if nbr_fixed:\n table.add_row(r\"[bright blue][?] {:>3} Test{} Fixed\".format(nbr_fixed, _s(nbr_fixed)))\n table.add_row(r\"[grey58][?] {:>3} Test{} Ignored\".format(nbr_ignored, _s(nbr_ignored)))\n table.add_row(r\"[yellow][!] {:>3} Test Warning{}\".format(nbr_warned, _s(nbr_warned)))\n table.add_row(r\"[red][\u2717] {:>3} Test{} Failed\".format(nbr_failed, _s(nbr_failed)))\n console.print(table)\n\n\ndef print_fixes(lint_obj, module_lint_obj):\n \"\"\"Prints available and applied fixes\"\"\"\n\n if len(lint_obj.could_fix):\n fix_cmd = \"nf-core lint {} --fix {}\".format(lint_obj.wf_path, \" --fix \".join(lint_obj.could_fix))\n console.print(\n f\"\\nTip: Some of these linting errors can automatically be resolved with the following command:\\n\\n[blue] {fix_cmd}\\n\"\n )\n if len(lint_obj.fix):\n console.print(\n \"Automatic fixes applied. Please check with 'git diff' and revert any changes you do not want with 'git checkout <file>'.\"\n )\n", "path": "nf_core/lint_utils.py"}], "after_files": [{"content": "import rich\nfrom rich.console import Console\nfrom rich.table import Table\nimport logging\n\nimport nf_core.utils\n\nlog = logging.getLogger(__name__)\n\n# Create a console used by all lint tests\nconsole = Console(force_terminal=nf_core.utils.rich_force_colors())\n\n\ndef print_joint_summary(lint_obj, module_lint_obj):\n \"\"\"Print a joint summary of the general pipe lint tests and the module lint tests\"\"\"\n nbr_passed = len(lint_obj.passed) + len(module_lint_obj.passed)\n nbr_ignored = len(lint_obj.ignored)\n nbr_fixed = len(lint_obj.fixed)\n nbr_warned = len(lint_obj.warned) + len(module_lint_obj.warned)\n nbr_failed = len(lint_obj.failed) + len(module_lint_obj.failed)\n\n def _s(some_length):\n return \"\" if some_length == 1 else \"s\"\n\n summary_colour = \"red\" if nbr_failed > 0 else \"green\"\n table = Table(box=rich.box.ROUNDED, style=summary_colour)\n table.add_column(f\"LINT RESULTS SUMMARY\".format(nbr_passed), no_wrap=True)\n table.add_row(r\"[green][\u2714] {:>3} Test{} Passed\".format(nbr_passed, _s(nbr_passed)))\n if nbr_fixed:\n table.add_row(r\"[bright blue][?] {:>3} Test{} Fixed\".format(nbr_fixed, _s(nbr_fixed)))\n table.add_row(r\"[grey58][?] {:>3} Test{} Ignored\".format(nbr_ignored, _s(nbr_ignored)))\n table.add_row(r\"[yellow][!] {:>3} Test Warning{}\".format(nbr_warned, _s(nbr_warned)))\n table.add_row(r\"[red][\u2717] {:>3} Test{} Failed\".format(nbr_failed, _s(nbr_failed)))\n console.print(table)\n\n\ndef print_fixes(lint_obj, module_lint_obj):\n \"\"\"Prints available and applied fixes\"\"\"\n\n if len(lint_obj.could_fix):\n fix_cmd = \"nf-core lint {}--fix {}\".format(\n \"\" if lint_obj.wf_path == \".\" else f\"--dir {lint_obj.wf_path}\", \" --fix \".join(lint_obj.could_fix)\n )\n console.print(\n f\"\\nTip: Some of these linting errors can automatically be resolved with the following command:\\n\\n[blue] {fix_cmd}\\n\"\n )\n if len(lint_obj.fix):\n console.print(\n \"Automatic fixes applied. Please check with 'git diff' and revert any changes you do not want with 'git checkout <file>'.\"\n )\n", "path": "nf_core/lint_utils.py"}]}
| 1,069 | 196 |
gh_patches_debug_23895
|
rasdani/github-patches
|
git_diff
|
alltheplaces__alltheplaces-2870
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Spider rentacenter is broken
During the global build at 2021-05-26-14-42-23, spider **rentacenter** failed with **2196 features** and **1 errors**.
Here's [the log](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/logs/rentacenter.log) and [the output](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/rentacenter.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/rentacenter.geojson))
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `locations/spiders/rentacenter.py`
Content:
```
1 import json
2 import scrapy
3 import re
4 from scrapy.selector import Selector
5
6 from locations.items import GeojsonPointItem
7 from locations.hours import OpeningHours
8
9
10 DAY_MAPPING = {
11 'Monday': 'Mo',
12 'Tuesday': 'Tu',
13 'Wednesday': 'We',
14 'Thursday': 'Th',
15 'Friday': 'Fr',
16 'Saturday': 'Sa',
17 'Sunday': 'Su'
18 }
19
20
21 class RentACenterSpider(scrapy.Spider):
22 name = "rentacenter"
23 item_attributes = { 'brand': "Rent-A-Center" }
24 allowed_domains = ["rentacenter.com"]
25
26 start_urls = [
27 "https://locations.rentacenter.com/sitemap.xml",
28 ]
29
30 def parse_hours(self, hours):
31 opening_hours = OpeningHours()
32
33 for hour in hours:
34 opening_hours.add_range(day=DAY_MAPPING[hour["dayOfWeek"].replace('http://schema.org/', '')],
35 open_time=hour["opens"],
36 close_time=hour["closes"],
37 time_format='%H:%M:%S')
38
39 return opening_hours.as_opening_hours()
40
41 def parse_location(self, response):
42 data = response.xpath('//script[@type="application/ld+json"]/text()').extract_first()
43 data = json.loads(data)
44
45 ref = data.get("branchCode")
46 if not ref:
47 return # not a store page
48
49 properties = {
50 'addr_full': data["address"]["streetAddress"],
51 'phone': data["telephone"],
52 'city': data["address"]["addressLocality"],
53 'state': data["address"]["addressRegion"],
54 'postcode': data["address"]["postalCode"],
55 'country': 'US',
56 'ref': ref,
57 'website': response.url,
58 'lat': float(data["geo"]["latitude"]),
59 'lon': float(data["geo"]["longitude"]),
60 'name': data["name"]
61 }
62
63 hours = self.parse_hours(data.get("openingHoursSpecification", []))
64 if hours:
65 properties["opening_hours"] = hours
66
67 yield GeojsonPointItem(**properties)
68
69 def parse_state_sitemap(self, response):
70 xml = Selector(response)
71 xml.remove_namespaces()
72
73 urls = xml.xpath('//loc/text()').extract()
74 urls = [url.strip() for url in urls]
75
76 # individual store pages are listed at top, then a state page, then bunch of other non-store pages
77 # find the index position of the state page and then only parse urls before that
78 i = urls.index(re.search(r'^(https://locations.rentacenter.com/.+?)/.*$', urls[0]).groups()[0] + '/')
79 for url in urls[:i]:
80 yield scrapy.Request(url, callback=self.parse_location)
81
82 def parse(self, response):
83 xml = Selector(response)
84 xml.remove_namespaces()
85
86 urls = xml.xpath('//loc/text()').extract()
87 urls = [url.strip() for url in urls]
88
89 for url in urls:
90 yield scrapy.Request(url, callback=self.parse_state_sitemap)
91
92
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/locations/spiders/rentacenter.py b/locations/spiders/rentacenter.py
--- a/locations/spiders/rentacenter.py
+++ b/locations/spiders/rentacenter.py
@@ -48,15 +48,15 @@
properties = {
'addr_full': data["address"]["streetAddress"],
- 'phone': data["telephone"],
+ 'phone': data.get("telephone"),
'city': data["address"]["addressLocality"],
'state': data["address"]["addressRegion"],
'postcode': data["address"]["postalCode"],
'country': 'US',
'ref': ref,
'website': response.url,
- 'lat': float(data["geo"]["latitude"]),
- 'lon': float(data["geo"]["longitude"]),
+ 'lat': data["geo"]["latitude"],
+ 'lon': data["geo"]["longitude"],
'name': data["name"]
}
@@ -87,5 +87,7 @@
urls = [url.strip() for url in urls]
for url in urls:
+ if '/home/' in url:
+ continue
yield scrapy.Request(url, callback=self.parse_state_sitemap)
|
{"golden_diff": "diff --git a/locations/spiders/rentacenter.py b/locations/spiders/rentacenter.py\n--- a/locations/spiders/rentacenter.py\n+++ b/locations/spiders/rentacenter.py\n@@ -48,15 +48,15 @@\n \n properties = {\n 'addr_full': data[\"address\"][\"streetAddress\"],\n- 'phone': data[\"telephone\"],\n+ 'phone': data.get(\"telephone\"),\n 'city': data[\"address\"][\"addressLocality\"],\n 'state': data[\"address\"][\"addressRegion\"],\n 'postcode': data[\"address\"][\"postalCode\"],\n 'country': 'US',\n 'ref': ref,\n 'website': response.url,\n- 'lat': float(data[\"geo\"][\"latitude\"]),\n- 'lon': float(data[\"geo\"][\"longitude\"]),\n+ 'lat': data[\"geo\"][\"latitude\"],\n+ 'lon': data[\"geo\"][\"longitude\"],\n 'name': data[\"name\"]\n }\n \n@@ -87,5 +87,7 @@\n urls = [url.strip() for url in urls]\n \n for url in urls:\n+ if '/home/' in url:\n+ continue\n yield scrapy.Request(url, callback=self.parse_state_sitemap)\n", "issue": "Spider rentacenter is broken\nDuring the global build at 2021-05-26-14-42-23, spider **rentacenter** failed with **2196 features** and **1 errors**.\n\nHere's [the log](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/logs/rentacenter.log) and [the output](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/rentacenter.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/rentacenter.geojson))\n", "before_files": [{"content": "import json\nimport scrapy\nimport re\nfrom scrapy.selector import Selector\n\nfrom locations.items import GeojsonPointItem\nfrom locations.hours import OpeningHours\n\n\nDAY_MAPPING = {\n 'Monday': 'Mo',\n 'Tuesday': 'Tu',\n 'Wednesday': 'We',\n 'Thursday': 'Th',\n 'Friday': 'Fr',\n 'Saturday': 'Sa',\n 'Sunday': 'Su'\n}\n\n\nclass RentACenterSpider(scrapy.Spider):\n name = \"rentacenter\"\n item_attributes = { 'brand': \"Rent-A-Center\" }\n allowed_domains = [\"rentacenter.com\"]\n\n start_urls = [\n \"https://locations.rentacenter.com/sitemap.xml\",\n ]\n\n def parse_hours(self, hours):\n opening_hours = OpeningHours()\n\n for hour in hours:\n opening_hours.add_range(day=DAY_MAPPING[hour[\"dayOfWeek\"].replace('http://schema.org/', '')],\n open_time=hour[\"opens\"],\n close_time=hour[\"closes\"],\n time_format='%H:%M:%S')\n\n return opening_hours.as_opening_hours()\n\n def parse_location(self, response):\n data = response.xpath('//script[@type=\"application/ld+json\"]/text()').extract_first()\n data = json.loads(data)\n\n ref = data.get(\"branchCode\")\n if not ref:\n return # not a store page\n\n properties = {\n 'addr_full': data[\"address\"][\"streetAddress\"],\n 'phone': data[\"telephone\"],\n 'city': data[\"address\"][\"addressLocality\"],\n 'state': data[\"address\"][\"addressRegion\"],\n 'postcode': data[\"address\"][\"postalCode\"],\n 'country': 'US',\n 'ref': ref,\n 'website': response.url,\n 'lat': float(data[\"geo\"][\"latitude\"]),\n 'lon': float(data[\"geo\"][\"longitude\"]),\n 'name': data[\"name\"]\n }\n\n hours = self.parse_hours(data.get(\"openingHoursSpecification\", []))\n if hours:\n properties[\"opening_hours\"] = hours\n\n yield GeojsonPointItem(**properties)\n\n def parse_state_sitemap(self, response):\n xml = Selector(response)\n xml.remove_namespaces()\n\n urls = xml.xpath('//loc/text()').extract()\n urls = [url.strip() for url in urls]\n\n # individual store pages are listed at top, then a state page, then bunch of other non-store pages\n # find the index position of the state page and then only parse urls before that\n i = urls.index(re.search(r'^(https://locations.rentacenter.com/.+?)/.*$', urls[0]).groups()[0] + '/')\n for url in urls[:i]:\n yield scrapy.Request(url, callback=self.parse_location)\n\n def parse(self, response):\n xml = Selector(response)\n xml.remove_namespaces()\n\n urls = xml.xpath('//loc/text()').extract()\n urls = [url.strip() for url in urls]\n\n for url in urls:\n yield scrapy.Request(url, callback=self.parse_state_sitemap)\n\n", "path": "locations/spiders/rentacenter.py"}], "after_files": [{"content": "import json\nimport scrapy\nimport re\nfrom scrapy.selector import Selector\n\nfrom locations.items import GeojsonPointItem\nfrom locations.hours import OpeningHours\n\n\nDAY_MAPPING = {\n 'Monday': 'Mo',\n 'Tuesday': 'Tu',\n 'Wednesday': 'We',\n 'Thursday': 'Th',\n 'Friday': 'Fr',\n 'Saturday': 'Sa',\n 'Sunday': 'Su'\n}\n\n\nclass RentACenterSpider(scrapy.Spider):\n name = \"rentacenter\"\n item_attributes = { 'brand': \"Rent-A-Center\" }\n allowed_domains = [\"rentacenter.com\"]\n\n start_urls = [\n \"https://locations.rentacenter.com/sitemap.xml\",\n ]\n\n def parse_hours(self, hours):\n opening_hours = OpeningHours()\n\n for hour in hours:\n opening_hours.add_range(day=DAY_MAPPING[hour[\"dayOfWeek\"].replace('http://schema.org/', '')],\n open_time=hour[\"opens\"],\n close_time=hour[\"closes\"],\n time_format='%H:%M:%S')\n\n return opening_hours.as_opening_hours()\n\n def parse_location(self, response):\n data = response.xpath('//script[@type=\"application/ld+json\"]/text()').extract_first()\n data = json.loads(data)\n\n ref = data.get(\"branchCode\")\n if not ref:\n return # not a store page\n\n properties = {\n 'addr_full': data[\"address\"][\"streetAddress\"],\n 'phone': data.get(\"telephone\"),\n 'city': data[\"address\"][\"addressLocality\"],\n 'state': data[\"address\"][\"addressRegion\"],\n 'postcode': data[\"address\"][\"postalCode\"],\n 'country': 'US',\n 'ref': ref,\n 'website': response.url,\n 'lat': data[\"geo\"][\"latitude\"],\n 'lon': data[\"geo\"][\"longitude\"],\n 'name': data[\"name\"]\n }\n\n hours = self.parse_hours(data.get(\"openingHoursSpecification\", []))\n if hours:\n properties[\"opening_hours\"] = hours\n\n yield GeojsonPointItem(**properties)\n\n def parse_state_sitemap(self, response):\n xml = Selector(response)\n xml.remove_namespaces()\n\n urls = xml.xpath('//loc/text()').extract()\n urls = [url.strip() for url in urls]\n\n # individual store pages are listed at top, then a state page, then bunch of other non-store pages\n # find the index position of the state page and then only parse urls before that\n i = urls.index(re.search(r'^(https://locations.rentacenter.com/.+?)/.*$', urls[0]).groups()[0] + '/')\n for url in urls[:i]:\n yield scrapy.Request(url, callback=self.parse_location)\n\n def parse(self, response):\n xml = Selector(response)\n xml.remove_namespaces()\n\n urls = xml.xpath('//loc/text()').extract()\n urls = [url.strip() for url in urls]\n\n for url in urls:\n if '/home/' in url:\n continue\n yield scrapy.Request(url, callback=self.parse_state_sitemap)\n\n", "path": "locations/spiders/rentacenter.py"}]}
| 1,288 | 266 |
gh_patches_debug_64320
|
rasdani/github-patches
|
git_diff
|
pex-tool__pex-1442
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Release 2.1.48
On the docket:
+ [x] Remove zipapp execution mode & introduce --layout. #1438
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pex/version.py`
Content:
```
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 __version__ = "2.1.47"
5
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pex/version.py b/pex/version.py
--- a/pex/version.py
+++ b/pex/version.py
@@ -1,4 +1,4 @@
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
-__version__ = "2.1.47"
+__version__ = "2.1.48"
|
{"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = \"2.1.47\"\n+__version__ = \"2.1.48\"\n", "issue": "Release 2.1.48\nOn the docket:\r\n+ [x] Remove zipapp execution mode & introduce --layout. #1438 \n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.47\"\n", "path": "pex/version.py"}], "after_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.48\"\n", "path": "pex/version.py"}]}
| 342 | 96 |
gh_patches_debug_34315
|
rasdani/github-patches
|
git_diff
|
opensearch-project__opensearch-build-3540
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Revert component-based scripts lookup
### Is your feature request related to a problem? Please describe
In https://github.com/opensearch-project/opensearch-build/pull/2934 we have added a way to include component build scripts. There are several problems with that.
1. In https://github.com/opensearch-project/opensearch-build/issues/2188 we agreed to split monorepos. The maps monorepo came in as another new kind of monorepo that should not be supported.
2. There are no tests in the PR.
3. The new feature of path lookup is not documented in https://github.com/opensearch-project/opensearch-build/blob/cbe6841a3c459b65d5fb17b713994e5c01d7ee8e/src/paths/script_finder.py#L27
### Describe the solution you'd like
1. Add tests and documentation on top of https://github.com/opensearch-project/opensearch-build/pull/2934.
1. Split the maps monorepo, https://github.com/opensearch-project/opensearch-build/issues/2188
1. Revert https://github.com/opensearch-project/opensearch-build/pull/2934
### Describe alternatives you've considered
_No response_
### Additional context
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/paths/script_finder.py`
Content:
```
1 # Copyright OpenSearch Contributors
2 # SPDX-License-Identifier: Apache-2.0
3 #
4 # The OpenSearch Contributors require contributions made to
5 # this file be licensed under the Apache-2.0 license or a
6 # compatible open source license.
7
8 import os
9 from typing import Callable, List
10
11
12 class ScriptFinder:
13 class ScriptNotFoundError(Exception):
14 def __init__(self, kind: str, paths: List[str]) -> None:
15 self.kind = kind
16 self.paths = paths
17 super().__init__(f"Could not find {kind} script. Looked in {paths}.")
18
19 component_scripts_path = os.path.realpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), os.path.join("..", "..", "scripts", "components")))
20
21 default_scripts_path = os.path.realpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), os.path.join("..", "..", "scripts", "default")))
22
23 """
24 ScriptFinder is a helper that abstracts away the details of where to look for build, test and install scripts.
25
26 For build.sh and integtest.sh scripts, given a component name and a checked-out Git repository,
27 it will look in the following locations, in order:
28 * <component_scripts_path>/<component_name>/<script-name>
29 * /<component_name>/<script-name> in the component's Git repository
30 * root of the component's Git repository
31 * /scripts/<script-name> in the component's Git repository
32 * <default_scripts_path>/<script-name>
33
34 For install.sh scripts, given a component name, it will look in the following locations, in order:
35 * <component_scripts_path>/<component_name>/<script-name>
36 * <default_scripts_path>/<script-name>
37 """
38
39 @classmethod
40 def __find_script(cls, name: str, paths: List[str]) -> str:
41 exists: Callable[[str], bool] = lambda path: os.path.exists(path)
42 script = next(filter(exists, paths), None)
43 if script is None:
44 raise ScriptFinder.ScriptNotFoundError(name, paths)
45
46 return script
47
48 @classmethod
49 def __find_named_script(cls, script_name: str, component_name: str, git_dir: str) -> str:
50 paths = [
51 os.path.realpath(os.path.join(cls.component_scripts_path, component_name, script_name)),
52 os.path.realpath(os.path.join(git_dir, component_name, script_name)),
53 os.path.realpath(os.path.join(git_dir, script_name)),
54 os.path.realpath(os.path.join(git_dir, "scripts", script_name)),
55 os.path.realpath(os.path.join(cls.default_scripts_path, script_name)),
56 ]
57
58 return cls.__find_script(script_name, paths)
59
60 @classmethod
61 def find_build_script(cls, project: str, component_name: str, git_dir: str) -> str:
62 paths = [
63 os.path.realpath(os.path.join(cls.component_scripts_path, component_name, "build.sh")),
64 os.path.realpath(os.path.join(git_dir, component_name, "build.sh")),
65 os.path.realpath(os.path.join(git_dir, "build.sh")),
66 os.path.realpath(os.path.join(git_dir, "scripts", "build.sh")),
67 os.path.realpath(
68 os.path.join(
69 cls.default_scripts_path,
70 project.replace(" ", "-").lower(),
71 "build.sh",
72 )
73 ),
74 ]
75
76 return cls.__find_script("build.sh", paths)
77
78 @classmethod
79 def find_install_script(cls, component_name: str) -> str:
80 paths = [
81 os.path.realpath(os.path.join(cls.component_scripts_path, component_name, "install.sh")),
82 os.path.realpath(os.path.join(cls.default_scripts_path, "install.sh")),
83 ]
84
85 return cls.__find_script("install.sh", paths)
86
87 @classmethod
88 def find_integ_test_script(cls, component_name: str, git_dir: str) -> str:
89 return cls.__find_named_script("integtest.sh", component_name, git_dir)
90
91 @classmethod
92 def find_bwc_test_script(cls, component_name: str, git_dir: str) -> str:
93 script_name = "bwctest.sh"
94 paths = [
95 os.path.realpath(os.path.join(cls.default_scripts_path, script_name)),
96 os.path.realpath(os.path.join(cls.component_scripts_path, component_name, script_name)),
97 os.path.realpath(os.path.join(git_dir, component_name, script_name)),
98 os.path.realpath(os.path.join(git_dir, script_name)),
99 os.path.realpath(os.path.join(git_dir, "scripts", script_name)),
100 ]
101
102 return cls.__find_script(script_name, paths)
103
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/paths/script_finder.py b/src/paths/script_finder.py
--- a/src/paths/script_finder.py
+++ b/src/paths/script_finder.py
@@ -26,7 +26,6 @@
For build.sh and integtest.sh scripts, given a component name and a checked-out Git repository,
it will look in the following locations, in order:
* <component_scripts_path>/<component_name>/<script-name>
- * /<component_name>/<script-name> in the component's Git repository
* root of the component's Git repository
* /scripts/<script-name> in the component's Git repository
* <default_scripts_path>/<script-name>
@@ -49,7 +48,6 @@
def __find_named_script(cls, script_name: str, component_name: str, git_dir: str) -> str:
paths = [
os.path.realpath(os.path.join(cls.component_scripts_path, component_name, script_name)),
- os.path.realpath(os.path.join(git_dir, component_name, script_name)),
os.path.realpath(os.path.join(git_dir, script_name)),
os.path.realpath(os.path.join(git_dir, "scripts", script_name)),
os.path.realpath(os.path.join(cls.default_scripts_path, script_name)),
@@ -61,7 +59,6 @@
def find_build_script(cls, project: str, component_name: str, git_dir: str) -> str:
paths = [
os.path.realpath(os.path.join(cls.component_scripts_path, component_name, "build.sh")),
- os.path.realpath(os.path.join(git_dir, component_name, "build.sh")),
os.path.realpath(os.path.join(git_dir, "build.sh")),
os.path.realpath(os.path.join(git_dir, "scripts", "build.sh")),
os.path.realpath(
|
{"golden_diff": "diff --git a/src/paths/script_finder.py b/src/paths/script_finder.py\n--- a/src/paths/script_finder.py\n+++ b/src/paths/script_finder.py\n@@ -26,7 +26,6 @@\n For build.sh and integtest.sh scripts, given a component name and a checked-out Git repository,\n it will look in the following locations, in order:\n * <component_scripts_path>/<component_name>/<script-name>\n- * /<component_name>/<script-name> in the component's Git repository\n * root of the component's Git repository\n * /scripts/<script-name> in the component's Git repository\n * <default_scripts_path>/<script-name>\n@@ -49,7 +48,6 @@\n def __find_named_script(cls, script_name: str, component_name: str, git_dir: str) -> str:\n paths = [\n os.path.realpath(os.path.join(cls.component_scripts_path, component_name, script_name)),\n- os.path.realpath(os.path.join(git_dir, component_name, script_name)),\n os.path.realpath(os.path.join(git_dir, script_name)),\n os.path.realpath(os.path.join(git_dir, \"scripts\", script_name)),\n os.path.realpath(os.path.join(cls.default_scripts_path, script_name)),\n@@ -61,7 +59,6 @@\n def find_build_script(cls, project: str, component_name: str, git_dir: str) -> str:\n paths = [\n os.path.realpath(os.path.join(cls.component_scripts_path, component_name, \"build.sh\")),\n- os.path.realpath(os.path.join(git_dir, component_name, \"build.sh\")),\n os.path.realpath(os.path.join(git_dir, \"build.sh\")),\n os.path.realpath(os.path.join(git_dir, \"scripts\", \"build.sh\")),\n os.path.realpath(\n", "issue": "Revert component-based scripts lookup\n### Is your feature request related to a problem? Please describe\n\nIn https://github.com/opensearch-project/opensearch-build/pull/2934 we have added a way to include component build scripts. There are several problems with that.\r\n\r\n1. In https://github.com/opensearch-project/opensearch-build/issues/2188 we agreed to split monorepos. The maps monorepo came in as another new kind of monorepo that should not be supported. \r\n2. There are no tests in the PR.\r\n3. The new feature of path lookup is not documented in https://github.com/opensearch-project/opensearch-build/blob/cbe6841a3c459b65d5fb17b713994e5c01d7ee8e/src/paths/script_finder.py#L27\n\n### Describe the solution you'd like\n\n1. Add tests and documentation on top of https://github.com/opensearch-project/opensearch-build/pull/2934.\r\n1. Split the maps monorepo, https://github.com/opensearch-project/opensearch-build/issues/2188\r\n1. Revert https://github.com/opensearch-project/opensearch-build/pull/2934\r\n\r\n\n\n### Describe alternatives you've considered\n\n_No response_\n\n### Additional context\n\n_No response_\n", "before_files": [{"content": "# Copyright OpenSearch Contributors\n# SPDX-License-Identifier: Apache-2.0\n#\n# The OpenSearch Contributors require contributions made to\n# this file be licensed under the Apache-2.0 license or a\n# compatible open source license.\n\nimport os\nfrom typing import Callable, List\n\n\nclass ScriptFinder:\n class ScriptNotFoundError(Exception):\n def __init__(self, kind: str, paths: List[str]) -> None:\n self.kind = kind\n self.paths = paths\n super().__init__(f\"Could not find {kind} script. Looked in {paths}.\")\n\n component_scripts_path = os.path.realpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), os.path.join(\"..\", \"..\", \"scripts\", \"components\")))\n\n default_scripts_path = os.path.realpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), os.path.join(\"..\", \"..\", \"scripts\", \"default\")))\n\n \"\"\"\n ScriptFinder is a helper that abstracts away the details of where to look for build, test and install scripts.\n\n For build.sh and integtest.sh scripts, given a component name and a checked-out Git repository,\n it will look in the following locations, in order:\n * <component_scripts_path>/<component_name>/<script-name>\n * /<component_name>/<script-name> in the component's Git repository\n * root of the component's Git repository\n * /scripts/<script-name> in the component's Git repository\n * <default_scripts_path>/<script-name>\n\n For install.sh scripts, given a component name, it will look in the following locations, in order:\n * <component_scripts_path>/<component_name>/<script-name>\n * <default_scripts_path>/<script-name>\n \"\"\"\n\n @classmethod\n def __find_script(cls, name: str, paths: List[str]) -> str:\n exists: Callable[[str], bool] = lambda path: os.path.exists(path)\n script = next(filter(exists, paths), None)\n if script is None:\n raise ScriptFinder.ScriptNotFoundError(name, paths)\n\n return script\n\n @classmethod\n def __find_named_script(cls, script_name: str, component_name: str, git_dir: str) -> str:\n paths = [\n os.path.realpath(os.path.join(cls.component_scripts_path, component_name, script_name)),\n os.path.realpath(os.path.join(git_dir, component_name, script_name)),\n os.path.realpath(os.path.join(git_dir, script_name)),\n os.path.realpath(os.path.join(git_dir, \"scripts\", script_name)),\n os.path.realpath(os.path.join(cls.default_scripts_path, script_name)),\n ]\n\n return cls.__find_script(script_name, paths)\n\n @classmethod\n def find_build_script(cls, project: str, component_name: str, git_dir: str) -> str:\n paths = [\n os.path.realpath(os.path.join(cls.component_scripts_path, component_name, \"build.sh\")),\n os.path.realpath(os.path.join(git_dir, component_name, \"build.sh\")),\n os.path.realpath(os.path.join(git_dir, \"build.sh\")),\n os.path.realpath(os.path.join(git_dir, \"scripts\", \"build.sh\")),\n os.path.realpath(\n os.path.join(\n cls.default_scripts_path,\n project.replace(\" \", \"-\").lower(),\n \"build.sh\",\n )\n ),\n ]\n\n return cls.__find_script(\"build.sh\", paths)\n\n @classmethod\n def find_install_script(cls, component_name: str) -> str:\n paths = [\n os.path.realpath(os.path.join(cls.component_scripts_path, component_name, \"install.sh\")),\n os.path.realpath(os.path.join(cls.default_scripts_path, \"install.sh\")),\n ]\n\n return cls.__find_script(\"install.sh\", paths)\n\n @classmethod\n def find_integ_test_script(cls, component_name: str, git_dir: str) -> str:\n return cls.__find_named_script(\"integtest.sh\", component_name, git_dir)\n\n @classmethod\n def find_bwc_test_script(cls, component_name: str, git_dir: str) -> str:\n script_name = \"bwctest.sh\"\n paths = [\n os.path.realpath(os.path.join(cls.default_scripts_path, script_name)),\n os.path.realpath(os.path.join(cls.component_scripts_path, component_name, script_name)),\n os.path.realpath(os.path.join(git_dir, component_name, script_name)),\n os.path.realpath(os.path.join(git_dir, script_name)),\n os.path.realpath(os.path.join(git_dir, \"scripts\", script_name)),\n ]\n\n return cls.__find_script(script_name, paths)\n", "path": "src/paths/script_finder.py"}], "after_files": [{"content": "# Copyright OpenSearch Contributors\n# SPDX-License-Identifier: Apache-2.0\n#\n# The OpenSearch Contributors require contributions made to\n# this file be licensed under the Apache-2.0 license or a\n# compatible open source license.\n\nimport os\nfrom typing import Callable, List\n\n\nclass ScriptFinder:\n class ScriptNotFoundError(Exception):\n def __init__(self, kind: str, paths: List[str]) -> None:\n self.kind = kind\n self.paths = paths\n super().__init__(f\"Could not find {kind} script. Looked in {paths}.\")\n\n component_scripts_path = os.path.realpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), os.path.join(\"..\", \"..\", \"scripts\", \"components\")))\n\n default_scripts_path = os.path.realpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), os.path.join(\"..\", \"..\", \"scripts\", \"default\")))\n\n \"\"\"\n ScriptFinder is a helper that abstracts away the details of where to look for build, test and install scripts.\n\n For build.sh and integtest.sh scripts, given a component name and a checked-out Git repository,\n it will look in the following locations, in order:\n * <component_scripts_path>/<component_name>/<script-name>\n * root of the component's Git repository\n * /scripts/<script-name> in the component's Git repository\n * <default_scripts_path>/<script-name>\n\n For install.sh scripts, given a component name, it will look in the following locations, in order:\n * <component_scripts_path>/<component_name>/<script-name>\n * <default_scripts_path>/<script-name>\n \"\"\"\n\n @classmethod\n def __find_script(cls, name: str, paths: List[str]) -> str:\n exists: Callable[[str], bool] = lambda path: os.path.exists(path)\n script = next(filter(exists, paths), None)\n if script is None:\n raise ScriptFinder.ScriptNotFoundError(name, paths)\n\n return script\n\n @classmethod\n def __find_named_script(cls, script_name: str, component_name: str, git_dir: str) -> str:\n paths = [\n os.path.realpath(os.path.join(cls.component_scripts_path, component_name, script_name)),\n os.path.realpath(os.path.join(git_dir, script_name)),\n os.path.realpath(os.path.join(git_dir, \"scripts\", script_name)),\n os.path.realpath(os.path.join(cls.default_scripts_path, script_name)),\n ]\n\n return cls.__find_script(script_name, paths)\n\n @classmethod\n def find_build_script(cls, project: str, component_name: str, git_dir: str) -> str:\n paths = [\n os.path.realpath(os.path.join(cls.component_scripts_path, component_name, \"build.sh\")),\n os.path.realpath(os.path.join(git_dir, \"build.sh\")),\n os.path.realpath(os.path.join(git_dir, \"scripts\", \"build.sh\")),\n os.path.realpath(\n os.path.join(\n cls.default_scripts_path,\n project.replace(\" \", \"-\").lower(),\n \"build.sh\",\n )\n ),\n ]\n\n return cls.__find_script(\"build.sh\", paths)\n\n @classmethod\n def find_install_script(cls, component_name: str) -> str:\n paths = [\n os.path.realpath(os.path.join(cls.component_scripts_path, component_name, \"install.sh\")),\n os.path.realpath(os.path.join(cls.default_scripts_path, \"install.sh\")),\n ]\n\n return cls.__find_script(\"install.sh\", paths)\n\n @classmethod\n def find_integ_test_script(cls, component_name: str, git_dir: str) -> str:\n return cls.__find_named_script(\"integtest.sh\", component_name, git_dir)\n\n @classmethod\n def find_bwc_test_script(cls, component_name: str, git_dir: str) -> str:\n script_name = \"bwctest.sh\"\n paths = [\n os.path.realpath(os.path.join(cls.default_scripts_path, script_name)),\n os.path.realpath(os.path.join(cls.component_scripts_path, component_name, script_name)),\n os.path.realpath(os.path.join(git_dir, component_name, script_name)),\n os.path.realpath(os.path.join(git_dir, script_name)),\n os.path.realpath(os.path.join(git_dir, \"scripts\", script_name)),\n ]\n\n return cls.__find_script(script_name, paths)\n", "path": "src/paths/script_finder.py"}]}
| 1,743 | 387 |
gh_patches_debug_31801
|
rasdani/github-patches
|
git_diff
|
espnet__espnet-1002
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
The scoring script is incorrect if any pruning is done on the dictionary or in the case of word models
Hi @sw005320 and other developers,
The command `json2trn.py ${dir}/data.json ${dic} --num-spkrs ${num_spkrs} --refs ${dir}/ref.trn --hyps ${dir}/hyp.trn` in the `score_sclite.sh` replaces all units in reference that are not in the dictionary with unk and then scores it.
This is wrong because you are modifying the reference transcription to match your dictionary. Let's say I train a model with dictionary with only 1 term "Mr" and the hypothesis is
"Mr unk unk" and reference is "Mr John Doe". This would give a WER of 0% because the reference would be modified to "Mr unk unk".
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `utils/json2trn.py`
Content:
```
1 #!/usr/bin/env python
2 # encoding: utf-8
3
4 # Copyright 2017 Johns Hopkins University (Shinji Watanabe)
5 # 2018 Xuankai Chang (Shanghai Jiao Tong University)
6 # Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
7
8 import argparse
9 import codecs
10 import json
11 import logging
12 import sys
13
14 from espnet.utils.cli_utils import get_commandline_args
15
16
17 def get_parser():
18 parser = argparse.ArgumentParser(
19 description='convert a json to a transcription file with a token dictionary',
20 formatter_class=argparse.ArgumentDefaultsHelpFormatter)
21 parser.add_argument('json', type=str, help='json files')
22 parser.add_argument('dict', type=str, help='dict')
23 parser.add_argument('--num-spkrs', type=int, default=1, help='number of speakers')
24 parser.add_argument('--refs', type=str, nargs='+', help='ref for all speakers')
25 parser.add_argument('--hyps', type=str, nargs='+', help='hyp for all outputs')
26 return parser
27
28
29 def main(args):
30 args = get_parser().parse_args(args)
31 convert(args.json, args.dict, args.refs, args.hyps, args.num_spkrs)
32
33
34 def convert(jsonf, dic, refs, hyps, num_spkrs=1):
35 n_ref = len(refs)
36 n_hyp = len(hyps)
37 assert n_ref == n_hyp
38 assert n_ref == num_spkrs
39
40 # logging info
41 logfmt = '%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s'
42 logging.basicConfig(level=logging.INFO, format=logfmt)
43 logging.info(get_commandline_args())
44
45 logging.info("reading %s", jsonf)
46 with codecs.open(jsonf, 'r', encoding="utf-8") as f:
47 j = json.load(f)
48
49 logging.info("reading %s", dic)
50 with codecs.open(dic, 'r', encoding="utf-8") as f:
51 dictionary = f.readlines()
52 char_list = [entry.split(' ')[0] for entry in dictionary]
53 char_list.insert(0, '<blank>')
54 char_list.append('<eos>')
55
56 for ns in range(num_spkrs):
57 hyp_file = codecs.open(hyps[ns], 'w', encoding="utf-8")
58 ref_file = codecs.open(refs[ns], 'w', encoding="utf-8")
59
60 for x in j['utts']:
61 # hyps
62 if num_spkrs == 1:
63 seq = [char_list[int(i)] for i in j['utts'][x]['output'][0]['rec_tokenid'].split()]
64 else:
65 seq = [char_list[int(i)] for i in j['utts'][x]['output'][ns][0]['rec_tokenid'].split()]
66 hyp_file.write(" ".join(seq).replace('<eos>', '')),
67 hyp_file.write(" (" + j['utts'][x]['utt2spk'].replace('-', '_') + "-" + x + ")\n")
68
69 # ref
70 if num_spkrs == 1:
71 seq = [char_list[int(i)] for i in j['utts'][x]['output'][0]['tokenid'].split()]
72 else:
73 seq = [char_list[int(i)] for i in j['utts'][x]['output'][ns][0]['tokenid'].split()]
74 ref_file.write(" ".join(seq).replace('<eos>', '')),
75 ref_file.write(" (" + j['utts'][x]['utt2spk'].replace('-', '_') + "-" + x + ")\n")
76
77 hyp_file.close()
78 ref_file.close()
79
80
81 if __name__ == '__main__':
82 main(sys.argv[1:])
83
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/utils/json2trn.py b/utils/json2trn.py
--- a/utils/json2trn.py
+++ b/utils/json2trn.py
@@ -58,21 +58,25 @@
ref_file = codecs.open(refs[ns], 'w', encoding="utf-8")
for x in j['utts']:
- # hyps
+ # recognition hypothesis
if num_spkrs == 1:
seq = [char_list[int(i)] for i in j['utts'][x]['output'][0]['rec_tokenid'].split()]
else:
seq = [char_list[int(i)] for i in j['utts'][x]['output'][ns][0]['rec_tokenid'].split()]
+ # In the recognition hypothesis, the <eos> symbol is usually attached in the last part of the sentence
+ # and it is removed below.
hyp_file.write(" ".join(seq).replace('<eos>', '')),
hyp_file.write(" (" + j['utts'][x]['utt2spk'].replace('-', '_') + "-" + x + ")\n")
- # ref
+ # reference
if num_spkrs == 1:
- seq = [char_list[int(i)] for i in j['utts'][x]['output'][0]['tokenid'].split()]
+ seq = j['utts'][x]['output'][0]['token']
else:
- seq = [char_list[int(i)] for i in j['utts'][x]['output'][ns][0]['tokenid'].split()]
- ref_file.write(" ".join(seq).replace('<eos>', '')),
- ref_file.write(" (" + j['utts'][x]['utt2spk'].replace('-', '_') + "-" + x + ")\n")
+ seq = j['utts'][x]['output'][ns][0]['token']
+ # Unlike the recognition hypothesis, the reference is directly generated from a token without dictionary
+ # to avoid to include <unk> symbols in the reference to make scoring normal.
+ # The detailed discussion can be found at https://github.com/espnet/espnet/issues/993
+ ref_file.write(seq + " (" + j['utts'][x]['utt2spk'].replace('-', '_') + "-" + x + ")\n")
hyp_file.close()
ref_file.close()
|
{"golden_diff": "diff --git a/utils/json2trn.py b/utils/json2trn.py\n--- a/utils/json2trn.py\n+++ b/utils/json2trn.py\n@@ -58,21 +58,25 @@\n ref_file = codecs.open(refs[ns], 'w', encoding=\"utf-8\")\n \n for x in j['utts']:\n- # hyps\n+ # recognition hypothesis\n if num_spkrs == 1:\n seq = [char_list[int(i)] for i in j['utts'][x]['output'][0]['rec_tokenid'].split()]\n else:\n seq = [char_list[int(i)] for i in j['utts'][x]['output'][ns][0]['rec_tokenid'].split()]\n+ # In the recognition hypothesis, the <eos> symbol is usually attached in the last part of the sentence\n+ # and it is removed below.\n hyp_file.write(\" \".join(seq).replace('<eos>', '')),\n hyp_file.write(\" (\" + j['utts'][x]['utt2spk'].replace('-', '_') + \"-\" + x + \")\\n\")\n \n- # ref\n+ # reference\n if num_spkrs == 1:\n- seq = [char_list[int(i)] for i in j['utts'][x]['output'][0]['tokenid'].split()]\n+ seq = j['utts'][x]['output'][0]['token']\n else:\n- seq = [char_list[int(i)] for i in j['utts'][x]['output'][ns][0]['tokenid'].split()]\n- ref_file.write(\" \".join(seq).replace('<eos>', '')),\n- ref_file.write(\" (\" + j['utts'][x]['utt2spk'].replace('-', '_') + \"-\" + x + \")\\n\")\n+ seq = j['utts'][x]['output'][ns][0]['token']\n+ # Unlike the recognition hypothesis, the reference is directly generated from a token without dictionary\n+ # to avoid to include <unk> symbols in the reference to make scoring normal.\n+ # The detailed discussion can be found at https://github.com/espnet/espnet/issues/993\n+ ref_file.write(seq + \" (\" + j['utts'][x]['utt2spk'].replace('-', '_') + \"-\" + x + \")\\n\")\n \n hyp_file.close()\n ref_file.close()\n", "issue": "The scoring script is incorrect if any pruning is done on the dictionary or in the case of word models\nHi @sw005320 and other developers,\r\n\r\nThe command `json2trn.py ${dir}/data.json ${dic} --num-spkrs ${num_spkrs} --refs ${dir}/ref.trn --hyps ${dir}/hyp.trn` in the `score_sclite.sh` replaces all units in reference that are not in the dictionary with unk and then scores it.\r\n\r\nThis is wrong because you are modifying the reference transcription to match your dictionary. Let's say I train a model with dictionary with only 1 term \"Mr\" and the hypothesis is \r\n\"Mr unk unk\" and reference is \"Mr John Doe\". This would give a WER of 0% because the reference would be modified to \"Mr unk unk\". \n", "before_files": [{"content": "#!/usr/bin/env python\n# encoding: utf-8\n\n# Copyright 2017 Johns Hopkins University (Shinji Watanabe)\n# 2018 Xuankai Chang (Shanghai Jiao Tong University)\n# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)\n\nimport argparse\nimport codecs\nimport json\nimport logging\nimport sys\n\nfrom espnet.utils.cli_utils import get_commandline_args\n\n\ndef get_parser():\n parser = argparse.ArgumentParser(\n description='convert a json to a transcription file with a token dictionary',\n formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n parser.add_argument('json', type=str, help='json files')\n parser.add_argument('dict', type=str, help='dict')\n parser.add_argument('--num-spkrs', type=int, default=1, help='number of speakers')\n parser.add_argument('--refs', type=str, nargs='+', help='ref for all speakers')\n parser.add_argument('--hyps', type=str, nargs='+', help='hyp for all outputs')\n return parser\n\n\ndef main(args):\n args = get_parser().parse_args(args)\n convert(args.json, args.dict, args.refs, args.hyps, args.num_spkrs)\n\n\ndef convert(jsonf, dic, refs, hyps, num_spkrs=1):\n n_ref = len(refs)\n n_hyp = len(hyps)\n assert n_ref == n_hyp\n assert n_ref == num_spkrs\n\n # logging info\n logfmt = '%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s'\n logging.basicConfig(level=logging.INFO, format=logfmt)\n logging.info(get_commandline_args())\n\n logging.info(\"reading %s\", jsonf)\n with codecs.open(jsonf, 'r', encoding=\"utf-8\") as f:\n j = json.load(f)\n\n logging.info(\"reading %s\", dic)\n with codecs.open(dic, 'r', encoding=\"utf-8\") as f:\n dictionary = f.readlines()\n char_list = [entry.split(' ')[0] for entry in dictionary]\n char_list.insert(0, '<blank>')\n char_list.append('<eos>')\n\n for ns in range(num_spkrs):\n hyp_file = codecs.open(hyps[ns], 'w', encoding=\"utf-8\")\n ref_file = codecs.open(refs[ns], 'w', encoding=\"utf-8\")\n\n for x in j['utts']:\n # hyps\n if num_spkrs == 1:\n seq = [char_list[int(i)] for i in j['utts'][x]['output'][0]['rec_tokenid'].split()]\n else:\n seq = [char_list[int(i)] for i in j['utts'][x]['output'][ns][0]['rec_tokenid'].split()]\n hyp_file.write(\" \".join(seq).replace('<eos>', '')),\n hyp_file.write(\" (\" + j['utts'][x]['utt2spk'].replace('-', '_') + \"-\" + x + \")\\n\")\n\n # ref\n if num_spkrs == 1:\n seq = [char_list[int(i)] for i in j['utts'][x]['output'][0]['tokenid'].split()]\n else:\n seq = [char_list[int(i)] for i in j['utts'][x]['output'][ns][0]['tokenid'].split()]\n ref_file.write(\" \".join(seq).replace('<eos>', '')),\n ref_file.write(\" (\" + j['utts'][x]['utt2spk'].replace('-', '_') + \"-\" + x + \")\\n\")\n\n hyp_file.close()\n ref_file.close()\n\n\nif __name__ == '__main__':\n main(sys.argv[1:])\n", "path": "utils/json2trn.py"}], "after_files": [{"content": "#!/usr/bin/env python\n# encoding: utf-8\n\n# Copyright 2017 Johns Hopkins University (Shinji Watanabe)\n# 2018 Xuankai Chang (Shanghai Jiao Tong University)\n# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)\n\nimport argparse\nimport codecs\nimport json\nimport logging\nimport sys\n\nfrom espnet.utils.cli_utils import get_commandline_args\n\n\ndef get_parser():\n parser = argparse.ArgumentParser(\n description='convert a json to a transcription file with a token dictionary',\n formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n parser.add_argument('json', type=str, help='json files')\n parser.add_argument('dict', type=str, help='dict')\n parser.add_argument('--num-spkrs', type=int, default=1, help='number of speakers')\n parser.add_argument('--refs', type=str, nargs='+', help='ref for all speakers')\n parser.add_argument('--hyps', type=str, nargs='+', help='hyp for all outputs')\n return parser\n\n\ndef main(args):\n args = get_parser().parse_args(args)\n convert(args.json, args.dict, args.refs, args.hyps, args.num_spkrs)\n\n\ndef convert(jsonf, dic, refs, hyps, num_spkrs=1):\n n_ref = len(refs)\n n_hyp = len(hyps)\n assert n_ref == n_hyp\n assert n_ref == num_spkrs\n\n # logging info\n logfmt = '%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s'\n logging.basicConfig(level=logging.INFO, format=logfmt)\n logging.info(get_commandline_args())\n\n logging.info(\"reading %s\", jsonf)\n with codecs.open(jsonf, 'r', encoding=\"utf-8\") as f:\n j = json.load(f)\n\n logging.info(\"reading %s\", dic)\n with codecs.open(dic, 'r', encoding=\"utf-8\") as f:\n dictionary = f.readlines()\n char_list = [entry.split(' ')[0] for entry in dictionary]\n char_list.insert(0, '<blank>')\n char_list.append('<eos>')\n\n for ns in range(num_spkrs):\n hyp_file = codecs.open(hyps[ns], 'w', encoding=\"utf-8\")\n ref_file = codecs.open(refs[ns], 'w', encoding=\"utf-8\")\n\n for x in j['utts']:\n # recognition hypothesis\n if num_spkrs == 1:\n seq = [char_list[int(i)] for i in j['utts'][x]['output'][0]['rec_tokenid'].split()]\n else:\n seq = [char_list[int(i)] for i in j['utts'][x]['output'][ns][0]['rec_tokenid'].split()]\n # In the recognition hypothesis, the <eos> symbol is usually attached in the last part of the sentence\n # and it is removed below.\n hyp_file.write(\" \".join(seq).replace('<eos>', '')),\n hyp_file.write(\" (\" + j['utts'][x]['utt2spk'].replace('-', '_') + \"-\" + x + \")\\n\")\n\n # reference\n if num_spkrs == 1:\n seq = j['utts'][x]['output'][0]['token']\n else:\n seq = j['utts'][x]['output'][ns][0]['token']\n # Unlike the recognition hypothesis, the reference is directly generated from a token without dictionary\n # to avoid to include <unk> symbols in the reference to make scoring normal.\n # The detailed discussion can be found at https://github.com/espnet/espnet/issues/993\n ref_file.write(seq + \" (\" + j['utts'][x]['utt2spk'].replace('-', '_') + \"-\" + x + \")\\n\")\n\n hyp_file.close()\n ref_file.close()\n\n\nif __name__ == '__main__':\n main(sys.argv[1:])\n", "path": "utils/json2trn.py"}]}
| 1,420 | 524 |
gh_patches_debug_952
|
rasdani/github-patches
|
git_diff
|
tobymao__sqlglot-2170
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Specifying 3.7+ in setup.py
sqlglot fails with Python version < 3.7.
While Python 3.6 is end-of-life, there are systems that still use 3.6 as they transition to 3.7. Having either `setup.py` or `setup.cfg` specify 3.7+ or adding to README would be helpful.
```
import sqlglot
```
`SyntaxError: future feature annotations is not defined`
**Official Documentation**
https://docs.python.org/3/library/__future__.html
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 from setuptools import find_packages, setup
2
3 setup(
4 name="sqlglot",
5 description="An easily customizable SQL parser and transpiler",
6 long_description=open("README.md").read(),
7 long_description_content_type="text/markdown",
8 url="https://github.com/tobymao/sqlglot",
9 author="Toby Mao",
10 author_email="[email protected]",
11 license="MIT",
12 packages=find_packages(include=["sqlglot", "sqlglot.*"]),
13 package_data={"sqlglot": ["py.typed"]},
14 use_scm_version={
15 "write_to": "sqlglot/_version.py",
16 "fallback_version": "0.0.0",
17 "local_scheme": "no-local-version",
18 },
19 setup_requires=["setuptools_scm"],
20 extras_require={
21 "dev": [
22 "autoflake",
23 "black",
24 "duckdb>=0.6",
25 "isort",
26 "mypy>=0.990",
27 "pandas",
28 "pyspark",
29 "python-dateutil",
30 "pdoc",
31 "pre-commit",
32 ],
33 },
34 classifiers=[
35 "Development Status :: 5 - Production/Stable",
36 "Intended Audience :: Developers",
37 "Intended Audience :: Science/Research",
38 "License :: OSI Approved :: MIT License",
39 "Operating System :: OS Independent",
40 "Programming Language :: SQL",
41 "Programming Language :: Python :: 3 :: Only",
42 ],
43 )
44
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -17,6 +17,7 @@
"local_scheme": "no-local-version",
},
setup_requires=["setuptools_scm"],
+ python_requires=">=3.7",
extras_require={
"dev": [
"autoflake",
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -17,6 +17,7 @@\n \"local_scheme\": \"no-local-version\",\n },\n setup_requires=[\"setuptools_scm\"],\n+ python_requires=\">=3.7\",\n extras_require={\n \"dev\": [\n \"autoflake\",\n", "issue": "Specifying 3.7+ in setup.py\nsqlglot fails with Python version < 3.7. \r\n\r\nWhile Python 3.6 is end-of-life, there are systems that still use 3.6 as they transition to 3.7. Having either `setup.py` or `setup.cfg` specify 3.7+ or adding to README would be helpful. \r\n\r\n```\r\nimport sqlglot\r\n```\r\n`SyntaxError: future feature annotations is not defined`\r\n\r\n**Official Documentation**\r\nhttps://docs.python.org/3/library/__future__.html\r\n\n", "before_files": [{"content": "from setuptools import find_packages, setup\n\nsetup(\n name=\"sqlglot\",\n description=\"An easily customizable SQL parser and transpiler\",\n long_description=open(\"README.md\").read(),\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/tobymao/sqlglot\",\n author=\"Toby Mao\",\n author_email=\"[email protected]\",\n license=\"MIT\",\n packages=find_packages(include=[\"sqlglot\", \"sqlglot.*\"]),\n package_data={\"sqlglot\": [\"py.typed\"]},\n use_scm_version={\n \"write_to\": \"sqlglot/_version.py\",\n \"fallback_version\": \"0.0.0\",\n \"local_scheme\": \"no-local-version\",\n },\n setup_requires=[\"setuptools_scm\"],\n extras_require={\n \"dev\": [\n \"autoflake\",\n \"black\",\n \"duckdb>=0.6\",\n \"isort\",\n \"mypy>=0.990\",\n \"pandas\",\n \"pyspark\",\n \"python-dateutil\",\n \"pdoc\",\n \"pre-commit\",\n ],\n },\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Intended Audience :: Developers\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: SQL\",\n \"Programming Language :: Python :: 3 :: Only\",\n ],\n)\n", "path": "setup.py"}], "after_files": [{"content": "from setuptools import find_packages, setup\n\nsetup(\n name=\"sqlglot\",\n description=\"An easily customizable SQL parser and transpiler\",\n long_description=open(\"README.md\").read(),\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/tobymao/sqlglot\",\n author=\"Toby Mao\",\n author_email=\"[email protected]\",\n license=\"MIT\",\n packages=find_packages(include=[\"sqlglot\", \"sqlglot.*\"]),\n package_data={\"sqlglot\": [\"py.typed\"]},\n use_scm_version={\n \"write_to\": \"sqlglot/_version.py\",\n \"fallback_version\": \"0.0.0\",\n \"local_scheme\": \"no-local-version\",\n },\n setup_requires=[\"setuptools_scm\"],\n python_requires=\">=3.7\",\n extras_require={\n \"dev\": [\n \"autoflake\",\n \"black\",\n \"duckdb>=0.6\",\n \"isort\",\n \"mypy>=0.990\",\n \"pandas\",\n \"pyspark\",\n \"python-dateutil\",\n \"pdoc\",\n \"pre-commit\",\n ],\n },\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Intended Audience :: Developers\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: SQL\",\n \"Programming Language :: Python :: 3 :: Only\",\n ],\n)\n", "path": "setup.py"}]}
| 773 | 78 |
gh_patches_debug_15529
|
rasdani/github-patches
|
git_diff
|
pypa__virtualenv-1205
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
broken Python 3.3 support due to wheel
With #1176, Python 3.3 support is broken because wheel 0.31 dropped support.
This means that virtualenv 16.0.0 will install fine on a Python 3.3 config, but then fail to create any virtualenvs.
virtualenv should also declare that it doesn't support 3.3
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 import os
2 import re
3 import shutil
4 import sys
5
6 if sys.version_info[:2] < (2, 7):
7 sys.exit('virtualenv requires Python 2.7 or higher.')
8
9 try:
10 from setuptools import setup
11 from setuptools.command.test import test as TestCommand
12
13 class PyTest(TestCommand):
14 user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
15
16 def initialize_options(self):
17 TestCommand.initialize_options(self)
18 self.pytest_args = []
19
20 def finalize_options(self):
21 TestCommand.finalize_options(self)
22 #self.test_args = []
23 #self.test_suite = True
24
25 def run_tests(self):
26 # import here, because outside the eggs aren't loaded
27 import pytest
28 sys.exit(pytest.main(self.pytest_args))
29
30 setup_params = {
31 'entry_points': {
32 'console_scripts': ['virtualenv=virtualenv:main'],
33 },
34 'zip_safe': False,
35 'cmdclass': {'test': PyTest},
36 'tests_require': ['pytest', 'mock'],
37 }
38 except ImportError:
39 from distutils.core import setup
40 if sys.platform == 'win32':
41 print('Note: without Setuptools installed you will '
42 'have to use "python -m virtualenv ENV"')
43 setup_params = {}
44 else:
45 script = 'scripts/virtualenv'
46 setup_params = {'scripts': [script]}
47
48
49 def read_file(*paths):
50 here = os.path.dirname(os.path.abspath(__file__))
51 with open(os.path.join(here, *paths)) as f:
52 return f.read()
53
54 # Get long_description from index.rst:
55 long_description = read_file('docs', 'index.rst')
56 long_description = long_description.strip().split('split here', 1)[0]
57 # Add release history
58 changes = read_file('docs', 'changes.rst')
59 # Only report last two releases for brevity
60 releases_found = 0
61 change_lines = []
62 for line in changes.splitlines():
63 change_lines.append(line)
64 if line.startswith('--------------'):
65 releases_found += 1
66 if releases_found > 2:
67 break
68
69 changes = '\n'.join(change_lines[:-2]) + '\n'
70 changes += '`Full Changelog <https://virtualenv.pypa.io/en/latest/changes.html>`_.'
71 # Replace issue/pull directives
72 changes = re.sub(r':pull:`(\d+)`', r'PR #\1', changes)
73 changes = re.sub(r':issue:`(\d+)`', r'#\1', changes)
74
75 long_description += '\n\n' + changes
76
77
78 def get_version():
79 version_file = read_file('virtualenv.py')
80 version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
81 version_file, re.M)
82 if version_match:
83 return version_match.group(1)
84 raise RuntimeError("Unable to find version string.")
85
86
87 # Hack to prevent stupid TypeError: 'NoneType' object is not callable error on
88 # exit of python setup.py test # in multiprocessing/util.py _exit_function when
89 # running python setup.py test (see
90 # http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
91 try:
92 import multiprocessing # noqa
93 except ImportError:
94 pass
95
96 setup(
97 name='virtualenv',
98 version=get_version(),
99 description="Virtual Python Environment builder",
100 long_description=long_description,
101 classifiers=[
102 'Development Status :: 5 - Production/Stable',
103 'Intended Audience :: Developers',
104 'License :: OSI Approved :: MIT License',
105 'Programming Language :: Python :: 2',
106 'Programming Language :: Python :: 2.7',
107 'Programming Language :: Python :: 3',
108 'Programming Language :: Python :: 3.4',
109 'Programming Language :: Python :: 3.5',
110 'Programming Language :: Python :: 3.6',
111 ],
112 keywords='setuptools deployment installation distutils',
113 author='Ian Bicking',
114 author_email='[email protected]',
115 maintainer='Jannis Leidel, Carl Meyer and Brian Rosner',
116 maintainer_email='[email protected]',
117 url='https://virtualenv.pypa.io/',
118 license='MIT',
119 py_modules=['virtualenv'],
120 packages=['virtualenv_support'],
121 package_data={'virtualenv_support': ['*.whl']},
122 python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*',
123 **setup_params)
124
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -108,6 +108,7 @@
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
],
keywords='setuptools deployment installation distutils',
author='Ian Bicking',
@@ -119,5 +120,5 @@
py_modules=['virtualenv'],
packages=['virtualenv_support'],
package_data={'virtualenv_support': ['*.whl']},
- python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*',
+ python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*',
**setup_params)
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -108,6 +108,7 @@\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n+ 'Programming Language :: Python :: 3.7',\n ],\n keywords='setuptools deployment installation distutils',\n author='Ian Bicking',\n@@ -119,5 +120,5 @@\n py_modules=['virtualenv'],\n packages=['virtualenv_support'],\n package_data={'virtualenv_support': ['*.whl']},\n- python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*',\n+ python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*',\n **setup_params)\n", "issue": "broken Python 3.3 support due to wheel\nWith #1176, Python 3.3 support is broken because wheel 0.31 dropped support.\r\n\r\nThis means that virtualenv 16.0.0 will install fine on a Python 3.3 config, but then fail to create any virtualenvs.\r\n\r\nvirtualenv should also declare that it doesn't support 3.3\n", "before_files": [{"content": "import os\nimport re\nimport shutil\nimport sys\n\nif sys.version_info[:2] < (2, 7):\n sys.exit('virtualenv requires Python 2.7 or higher.')\n\ntry:\n from setuptools import setup\n from setuptools.command.test import test as TestCommand\n\n class PyTest(TestCommand):\n user_options = [('pytest-args=', 'a', \"Arguments to pass to py.test\")]\n\n def initialize_options(self):\n TestCommand.initialize_options(self)\n self.pytest_args = []\n\n def finalize_options(self):\n TestCommand.finalize_options(self)\n #self.test_args = []\n #self.test_suite = True\n\n def run_tests(self):\n # import here, because outside the eggs aren't loaded\n import pytest\n sys.exit(pytest.main(self.pytest_args))\n\n setup_params = {\n 'entry_points': {\n 'console_scripts': ['virtualenv=virtualenv:main'],\n },\n 'zip_safe': False,\n 'cmdclass': {'test': PyTest},\n 'tests_require': ['pytest', 'mock'],\n }\nexcept ImportError:\n from distutils.core import setup\n if sys.platform == 'win32':\n print('Note: without Setuptools installed you will '\n 'have to use \"python -m virtualenv ENV\"')\n setup_params = {}\n else:\n script = 'scripts/virtualenv'\n setup_params = {'scripts': [script]}\n\n\ndef read_file(*paths):\n here = os.path.dirname(os.path.abspath(__file__))\n with open(os.path.join(here, *paths)) as f:\n return f.read()\n\n# Get long_description from index.rst:\nlong_description = read_file('docs', 'index.rst')\nlong_description = long_description.strip().split('split here', 1)[0]\n# Add release history\nchanges = read_file('docs', 'changes.rst')\n# Only report last two releases for brevity\nreleases_found = 0\nchange_lines = []\nfor line in changes.splitlines():\n change_lines.append(line)\n if line.startswith('--------------'):\n releases_found += 1\n if releases_found > 2:\n break\n\nchanges = '\\n'.join(change_lines[:-2]) + '\\n'\nchanges += '`Full Changelog <https://virtualenv.pypa.io/en/latest/changes.html>`_.'\n# Replace issue/pull directives\nchanges = re.sub(r':pull:`(\\d+)`', r'PR #\\1', changes)\nchanges = re.sub(r':issue:`(\\d+)`', r'#\\1', changes)\n\nlong_description += '\\n\\n' + changes\n\n\ndef get_version():\n version_file = read_file('virtualenv.py')\n version_match = re.search(r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\",\n version_file, re.M)\n if version_match:\n return version_match.group(1)\n raise RuntimeError(\"Unable to find version string.\")\n\n\n# Hack to prevent stupid TypeError: 'NoneType' object is not callable error on\n# exit of python setup.py test # in multiprocessing/util.py _exit_function when\n# running python setup.py test (see\n# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)\ntry:\n import multiprocessing # noqa\nexcept ImportError:\n pass\n\nsetup(\n name='virtualenv',\n version=get_version(),\n description=\"Virtual Python Environment builder\",\n long_description=long_description,\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n ],\n keywords='setuptools deployment installation distutils',\n author='Ian Bicking',\n author_email='[email protected]',\n maintainer='Jannis Leidel, Carl Meyer and Brian Rosner',\n maintainer_email='[email protected]',\n url='https://virtualenv.pypa.io/',\n license='MIT',\n py_modules=['virtualenv'],\n packages=['virtualenv_support'],\n package_data={'virtualenv_support': ['*.whl']},\n python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*',\n **setup_params)\n", "path": "setup.py"}], "after_files": [{"content": "import os\nimport re\nimport shutil\nimport sys\n\nif sys.version_info[:2] < (2, 7):\n sys.exit('virtualenv requires Python 2.7 or higher.')\n\ntry:\n from setuptools import setup\n from setuptools.command.test import test as TestCommand\n\n class PyTest(TestCommand):\n user_options = [('pytest-args=', 'a', \"Arguments to pass to py.test\")]\n\n def initialize_options(self):\n TestCommand.initialize_options(self)\n self.pytest_args = []\n\n def finalize_options(self):\n TestCommand.finalize_options(self)\n #self.test_args = []\n #self.test_suite = True\n\n def run_tests(self):\n # import here, because outside the eggs aren't loaded\n import pytest\n sys.exit(pytest.main(self.pytest_args))\n\n setup_params = {\n 'entry_points': {\n 'console_scripts': ['virtualenv=virtualenv:main'],\n },\n 'zip_safe': False,\n 'cmdclass': {'test': PyTest},\n 'tests_require': ['pytest', 'mock'],\n }\nexcept ImportError:\n from distutils.core import setup\n if sys.platform == 'win32':\n print('Note: without Setuptools installed you will '\n 'have to use \"python -m virtualenv ENV\"')\n setup_params = {}\n else:\n script = 'scripts/virtualenv'\n setup_params = {'scripts': [script]}\n\n\ndef read_file(*paths):\n here = os.path.dirname(os.path.abspath(__file__))\n with open(os.path.join(here, *paths)) as f:\n return f.read()\n\n# Get long_description from index.rst:\nlong_description = read_file('docs', 'index.rst')\nlong_description = long_description.strip().split('split here', 1)[0]\n# Add release history\nchanges = read_file('docs', 'changes.rst')\n# Only report last two releases for brevity\nreleases_found = 0\nchange_lines = []\nfor line in changes.splitlines():\n change_lines.append(line)\n if line.startswith('--------------'):\n releases_found += 1\n if releases_found > 2:\n break\n\nchanges = '\\n'.join(change_lines[:-2]) + '\\n'\nchanges += '`Full Changelog <https://virtualenv.pypa.io/en/latest/changes.html>`_.'\n# Replace issue/pull directives\nchanges = re.sub(r':pull:`(\\d+)`', r'PR #\\1', changes)\nchanges = re.sub(r':issue:`(\\d+)`', r'#\\1', changes)\n\nlong_description += '\\n\\n' + changes\n\n\ndef get_version():\n version_file = read_file('virtualenv.py')\n version_match = re.search(r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\",\n version_file, re.M)\n if version_match:\n return version_match.group(1)\n raise RuntimeError(\"Unable to find version string.\")\n\n\n# Hack to prevent stupid TypeError: 'NoneType' object is not callable error on\n# exit of python setup.py test # in multiprocessing/util.py _exit_function when\n# running python setup.py test (see\n# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)\ntry:\n import multiprocessing # noqa\nexcept ImportError:\n pass\n\nsetup(\n name='virtualenv',\n version=get_version(),\n description=\"Virtual Python Environment builder\",\n long_description=long_description,\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n ],\n keywords='setuptools deployment installation distutils',\n author='Ian Bicking',\n author_email='[email protected]',\n maintainer='Jannis Leidel, Carl Meyer and Brian Rosner',\n maintainer_email='[email protected]',\n url='https://virtualenv.pypa.io/',\n license='MIT',\n py_modules=['virtualenv'],\n packages=['virtualenv_support'],\n package_data={'virtualenv_support': ['*.whl']},\n python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*',\n **setup_params)\n", "path": "setup.py"}]}
| 1,591 | 205 |
gh_patches_debug_26742
|
rasdani/github-patches
|
git_diff
|
saleor__saleor-2877
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Dashboard 2.0: Order API does not expose a way to get a product thumbnail for an order line
We get access to `productName` and `productSku` but not a thumbnail. It's ok to return `null` if the product no longer exists so we don't need to denormalize this field but I'd like to avoid having the client query the `product` directly and then have to manually figure out which image to show.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `saleor/graphql/order/types.py`
Content:
```
1 import graphene
2 from graphene import relay
3
4 from ...order import OrderEvents, models
5 from ..account.types import User
6 from ..core.types.common import CountableDjangoObjectType
7 from ..core.types.money import Money, TaxedMoney
8 from decimal import Decimal
9
10 OrderEventsEnum = graphene.Enum.from_enum(OrderEvents)
11
12
13 class OrderEvent(CountableDjangoObjectType):
14 date = graphene.types.datetime.DateTime(
15 description='Date when event happened at in ISO 8601 format.')
16 type = OrderEventsEnum(description='Order event type')
17 user = graphene.Field(
18 User, id=graphene.Argument(graphene.ID),
19 description='User who performed the action.')
20 message = graphene.String(
21 description='Content of a note added to the order.')
22 email = graphene.String(description='Email of the customer')
23 email_type = graphene.String(
24 description='Type of an email sent to the customer')
25 amount = graphene.Float(description='Amount of money.')
26 quantity = graphene.Int(description='Number of items.')
27 composed_id = graphene.String(
28 description='Composed id of the Fulfillment.')
29
30 class Meta:
31 description = 'History log of the order.'
32 model = models.OrderEvent
33 interfaces = [relay.Node]
34 exclude_fields = ['order', 'parameters']
35
36 def resolve_email(self, info):
37 return self.parameters.get('email', None)
38
39 def resolve_email_type(self, info):
40 return self.parameters.get('email_type', None)
41
42 def resolve_amount(self, info):
43 amount = self.parameters.get('amount', None)
44 return Decimal(amount) if amount else None
45
46 def resolve_quantity(self, info):
47 quantity = self.parameters.get('quantity', None)
48 return int(quantity) if quantity else None
49
50 def resolve_message(self, info):
51 return self.parameters.get('message', None)
52
53 def resolve_composed_id(self, info):
54 return self.parameters.get('composed_id', None)
55
56
57 class Fulfillment(CountableDjangoObjectType):
58 status_display = graphene.String(
59 description='User-friendly fulfillment status.')
60
61 class Meta:
62 description = 'Represents order fulfillment.'
63 interfaces = [relay.Node]
64 model = models.Fulfillment
65 exclude_fields = ['order']
66
67 def resolve_status_display(self, info):
68 return self.get_status_display()
69
70
71 class FulfillmentLine(CountableDjangoObjectType):
72 class Meta:
73 description = 'Represents line of the fulfillment.'
74 interfaces = [relay.Node]
75 model = models.FulfillmentLine
76 exclude_fields = ['fulfillment']
77
78
79 class Order(CountableDjangoObjectType):
80 fulfillments = graphene.List(
81 Fulfillment,
82 required=True,
83 description='List of shipments for the order.')
84 is_paid = graphene.Boolean(
85 description='Informs if an order is fully paid.')
86 number = graphene.String(description='User-friendly number of an order.')
87 payment_status = graphene.String(description='Internal payment status.')
88 payment_status_display = graphene.String(
89 description='User-friendly payment status.')
90 subtotal = graphene.Field(
91 TaxedMoney,
92 description='The sum of line prices not including shipping.')
93 status_display = graphene.String(description='User-friendly order status.')
94 total_authorized = graphene.Field(
95 Money, description='Amount authorized for the order.')
96 total_captured = graphene.Field(
97 Money, description='Amount captured by payment.')
98 events = graphene.List(
99 OrderEvent,
100 description='List of events associated with the order.')
101 user_email = graphene.String(
102 required=False, description='Email address of the customer.')
103
104 class Meta:
105 description = 'Represents an order in the shop.'
106 interfaces = [relay.Node]
107 model = models.Order
108 exclude_fields = [
109 'shipping_price_gross', 'shipping_price_net', 'total_gross',
110 'total_net']
111
112 @staticmethod
113 def resolve_subtotal(obj, info):
114 return obj.get_subtotal()
115
116 @staticmethod
117 def resolve_total_authorized(obj, info):
118 payment = obj.get_last_payment()
119 if payment:
120 return payment.get_total_price().gross
121
122 @staticmethod
123 def resolve_total_captured(obj, info):
124 payment = obj.get_last_payment()
125 if payment:
126 return payment.get_captured_price()
127
128 @staticmethod
129 def resolve_fulfillments(obj, info):
130 return obj.fulfillments.all()
131
132 @staticmethod
133 def resolve_events(obj, info):
134 return obj.events.all()
135
136 @staticmethod
137 def resolve_is_paid(obj, info):
138 return obj.is_fully_paid()
139
140 @staticmethod
141 def resolve_number(obj, info):
142 return str(obj.pk)
143
144 @staticmethod
145 def resolve_payment_status(obj, info):
146 return obj.get_last_payment_status()
147
148 @staticmethod
149 def resolve_payment_status_display(obj, info):
150 return obj.get_last_payment_status_display()
151
152 @staticmethod
153 def resolve_status_display(obj, info):
154 return obj.get_status_display()
155
156 @staticmethod
157 def resolve_user_email(obj, info):
158 if obj.user_email:
159 return obj.user_email
160 if obj.user_id:
161 return obj.user.email
162 return None
163
164
165 class OrderLine(CountableDjangoObjectType):
166 class Meta:
167 description = 'Represents order line of particular order.'
168 model = models.OrderLine
169 interfaces = [relay.Node]
170 exclude_fields = [
171 'order', 'unit_price_gross', 'unit_price_net', 'variant']
172
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/saleor/graphql/order/types.py b/saleor/graphql/order/types.py
--- a/saleor/graphql/order/types.py
+++ b/saleor/graphql/order/types.py
@@ -1,11 +1,13 @@
+from decimal import Decimal
+
import graphene
from graphene import relay
from ...order import OrderEvents, models
+from ...product.templatetags.product_images import get_thumbnail
from ..account.types import User
from ..core.types.common import CountableDjangoObjectType
from ..core.types.money import Money, TaxedMoney
-from decimal import Decimal
OrderEventsEnum = graphene.Enum.from_enum(OrderEvents)
@@ -163,9 +165,21 @@
class OrderLine(CountableDjangoObjectType):
+ thumbnail_url = graphene.String(
+ description='The URL of a main thumbnail for the ordered product.',
+ size=graphene.Int(description='Size of the image'))
+
class Meta:
description = 'Represents order line of particular order.'
model = models.OrderLine
interfaces = [relay.Node]
exclude_fields = [
'order', 'unit_price_gross', 'unit_price_net', 'variant']
+
+ def resolve_thumbnail_url(self, info, size=None):
+ if not self.variant_id:
+ return None
+ if not size:
+ size = 255
+ return get_thumbnail(
+ self.variant.get_first_image(), size, method='thumbnail')
|
{"golden_diff": "diff --git a/saleor/graphql/order/types.py b/saleor/graphql/order/types.py\n--- a/saleor/graphql/order/types.py\n+++ b/saleor/graphql/order/types.py\n@@ -1,11 +1,13 @@\n+from decimal import Decimal\n+\n import graphene\n from graphene import relay\n \n from ...order import OrderEvents, models\n+from ...product.templatetags.product_images import get_thumbnail\n from ..account.types import User\n from ..core.types.common import CountableDjangoObjectType\n from ..core.types.money import Money, TaxedMoney\n-from decimal import Decimal\n \n OrderEventsEnum = graphene.Enum.from_enum(OrderEvents)\n \n@@ -163,9 +165,21 @@\n \n \n class OrderLine(CountableDjangoObjectType):\n+ thumbnail_url = graphene.String(\n+ description='The URL of a main thumbnail for the ordered product.',\n+ size=graphene.Int(description='Size of the image'))\n+\n class Meta:\n description = 'Represents order line of particular order.'\n model = models.OrderLine\n interfaces = [relay.Node]\n exclude_fields = [\n 'order', 'unit_price_gross', 'unit_price_net', 'variant']\n+\n+ def resolve_thumbnail_url(self, info, size=None):\n+ if not self.variant_id:\n+ return None\n+ if not size:\n+ size = 255\n+ return get_thumbnail(\n+ self.variant.get_first_image(), size, method='thumbnail')\n", "issue": "Dashboard 2.0: Order API does not expose a way to get a product thumbnail for an order line\nWe get access to `productName` and `productSku` but not a thumbnail. It's ok to return `null` if the product no longer exists so we don't need to denormalize this field but I'd like to avoid having the client query the `product` directly and then have to manually figure out which image to show.\n", "before_files": [{"content": "import graphene\nfrom graphene import relay\n\nfrom ...order import OrderEvents, models\nfrom ..account.types import User\nfrom ..core.types.common import CountableDjangoObjectType\nfrom ..core.types.money import Money, TaxedMoney\nfrom decimal import Decimal\n\nOrderEventsEnum = graphene.Enum.from_enum(OrderEvents)\n\n\nclass OrderEvent(CountableDjangoObjectType):\n date = graphene.types.datetime.DateTime(\n description='Date when event happened at in ISO 8601 format.')\n type = OrderEventsEnum(description='Order event type')\n user = graphene.Field(\n User, id=graphene.Argument(graphene.ID),\n description='User who performed the action.')\n message = graphene.String(\n description='Content of a note added to the order.')\n email = graphene.String(description='Email of the customer')\n email_type = graphene.String(\n description='Type of an email sent to the customer')\n amount = graphene.Float(description='Amount of money.')\n quantity = graphene.Int(description='Number of items.')\n composed_id = graphene.String(\n description='Composed id of the Fulfillment.')\n\n class Meta:\n description = 'History log of the order.'\n model = models.OrderEvent\n interfaces = [relay.Node]\n exclude_fields = ['order', 'parameters']\n\n def resolve_email(self, info):\n return self.parameters.get('email', None)\n\n def resolve_email_type(self, info):\n return self.parameters.get('email_type', None)\n\n def resolve_amount(self, info):\n amount = self.parameters.get('amount', None)\n return Decimal(amount) if amount else None\n\n def resolve_quantity(self, info):\n quantity = self.parameters.get('quantity', None)\n return int(quantity) if quantity else None\n\n def resolve_message(self, info):\n return self.parameters.get('message', None)\n\n def resolve_composed_id(self, info):\n return self.parameters.get('composed_id', None)\n\n\nclass Fulfillment(CountableDjangoObjectType):\n status_display = graphene.String(\n description='User-friendly fulfillment status.')\n\n class Meta:\n description = 'Represents order fulfillment.'\n interfaces = [relay.Node]\n model = models.Fulfillment\n exclude_fields = ['order']\n\n def resolve_status_display(self, info):\n return self.get_status_display()\n\n\nclass FulfillmentLine(CountableDjangoObjectType):\n class Meta:\n description = 'Represents line of the fulfillment.'\n interfaces = [relay.Node]\n model = models.FulfillmentLine\n exclude_fields = ['fulfillment']\n\n\nclass Order(CountableDjangoObjectType):\n fulfillments = graphene.List(\n Fulfillment,\n required=True,\n description='List of shipments for the order.')\n is_paid = graphene.Boolean(\n description='Informs if an order is fully paid.')\n number = graphene.String(description='User-friendly number of an order.')\n payment_status = graphene.String(description='Internal payment status.')\n payment_status_display = graphene.String(\n description='User-friendly payment status.')\n subtotal = graphene.Field(\n TaxedMoney,\n description='The sum of line prices not including shipping.')\n status_display = graphene.String(description='User-friendly order status.')\n total_authorized = graphene.Field(\n Money, description='Amount authorized for the order.')\n total_captured = graphene.Field(\n Money, description='Amount captured by payment.')\n events = graphene.List(\n OrderEvent,\n description='List of events associated with the order.')\n user_email = graphene.String(\n required=False, description='Email address of the customer.')\n\n class Meta:\n description = 'Represents an order in the shop.'\n interfaces = [relay.Node]\n model = models.Order\n exclude_fields = [\n 'shipping_price_gross', 'shipping_price_net', 'total_gross',\n 'total_net']\n\n @staticmethod\n def resolve_subtotal(obj, info):\n return obj.get_subtotal()\n\n @staticmethod\n def resolve_total_authorized(obj, info):\n payment = obj.get_last_payment()\n if payment:\n return payment.get_total_price().gross\n\n @staticmethod\n def resolve_total_captured(obj, info):\n payment = obj.get_last_payment()\n if payment:\n return payment.get_captured_price()\n\n @staticmethod\n def resolve_fulfillments(obj, info):\n return obj.fulfillments.all()\n\n @staticmethod\n def resolve_events(obj, info):\n return obj.events.all()\n\n @staticmethod\n def resolve_is_paid(obj, info):\n return obj.is_fully_paid()\n\n @staticmethod\n def resolve_number(obj, info):\n return str(obj.pk)\n\n @staticmethod\n def resolve_payment_status(obj, info):\n return obj.get_last_payment_status()\n\n @staticmethod\n def resolve_payment_status_display(obj, info):\n return obj.get_last_payment_status_display()\n\n @staticmethod\n def resolve_status_display(obj, info):\n return obj.get_status_display()\n\n @staticmethod\n def resolve_user_email(obj, info):\n if obj.user_email:\n return obj.user_email\n if obj.user_id:\n return obj.user.email\n return None\n\n\nclass OrderLine(CountableDjangoObjectType):\n class Meta:\n description = 'Represents order line of particular order.'\n model = models.OrderLine\n interfaces = [relay.Node]\n exclude_fields = [\n 'order', 'unit_price_gross', 'unit_price_net', 'variant']\n", "path": "saleor/graphql/order/types.py"}], "after_files": [{"content": "from decimal import Decimal\n\nimport graphene\nfrom graphene import relay\n\nfrom ...order import OrderEvents, models\nfrom ...product.templatetags.product_images import get_thumbnail\nfrom ..account.types import User\nfrom ..core.types.common import CountableDjangoObjectType\nfrom ..core.types.money import Money, TaxedMoney\n\nOrderEventsEnum = graphene.Enum.from_enum(OrderEvents)\n\n\nclass OrderEvent(CountableDjangoObjectType):\n date = graphene.types.datetime.DateTime(\n description='Date when event happened at in ISO 8601 format.')\n type = OrderEventsEnum(description='Order event type')\n user = graphene.Field(\n User, id=graphene.Argument(graphene.ID),\n description='User who performed the action.')\n message = graphene.String(\n description='Content of a note added to the order.')\n email = graphene.String(description='Email of the customer')\n email_type = graphene.String(\n description='Type of an email sent to the customer')\n amount = graphene.Float(description='Amount of money.')\n quantity = graphene.Int(description='Number of items.')\n composed_id = graphene.String(\n description='Composed id of the Fulfillment.')\n\n class Meta:\n description = 'History log of the order.'\n model = models.OrderEvent\n interfaces = [relay.Node]\n exclude_fields = ['order', 'parameters']\n\n def resolve_email(self, info):\n return self.parameters.get('email', None)\n\n def resolve_email_type(self, info):\n return self.parameters.get('email_type', None)\n\n def resolve_amount(self, info):\n amount = self.parameters.get('amount', None)\n return Decimal(amount) if amount else None\n\n def resolve_quantity(self, info):\n quantity = self.parameters.get('quantity', None)\n return int(quantity) if quantity else None\n\n def resolve_message(self, info):\n return self.parameters.get('message', None)\n\n def resolve_composed_id(self, info):\n return self.parameters.get('composed_id', None)\n\n\nclass Fulfillment(CountableDjangoObjectType):\n status_display = graphene.String(\n description='User-friendly fulfillment status.')\n\n class Meta:\n description = 'Represents order fulfillment.'\n interfaces = [relay.Node]\n model = models.Fulfillment\n exclude_fields = ['order']\n\n def resolve_status_display(self, info):\n return self.get_status_display()\n\n\nclass FulfillmentLine(CountableDjangoObjectType):\n class Meta:\n description = 'Represents line of the fulfillment.'\n interfaces = [relay.Node]\n model = models.FulfillmentLine\n exclude_fields = ['fulfillment']\n\n\nclass Order(CountableDjangoObjectType):\n fulfillments = graphene.List(\n Fulfillment,\n required=True,\n description='List of shipments for the order.')\n is_paid = graphene.Boolean(\n description='Informs if an order is fully paid.')\n number = graphene.String(description='User-friendly number of an order.')\n payment_status = graphene.String(description='Internal payment status.')\n payment_status_display = graphene.String(\n description='User-friendly payment status.')\n subtotal = graphene.Field(\n TaxedMoney,\n description='The sum of line prices not including shipping.')\n status_display = graphene.String(description='User-friendly order status.')\n total_authorized = graphene.Field(\n Money, description='Amount authorized for the order.')\n total_captured = graphene.Field(\n Money, description='Amount captured by payment.')\n events = graphene.List(\n OrderEvent,\n description='List of events associated with the order.')\n user_email = graphene.String(\n required=False, description='Email address of the customer.')\n\n class Meta:\n description = 'Represents an order in the shop.'\n interfaces = [relay.Node]\n model = models.Order\n exclude_fields = [\n 'shipping_price_gross', 'shipping_price_net', 'total_gross',\n 'total_net']\n\n @staticmethod\n def resolve_subtotal(obj, info):\n return obj.get_subtotal()\n\n @staticmethod\n def resolve_total_authorized(obj, info):\n payment = obj.get_last_payment()\n if payment:\n return payment.get_total_price().gross\n\n @staticmethod\n def resolve_total_captured(obj, info):\n payment = obj.get_last_payment()\n if payment:\n return payment.get_captured_price()\n\n @staticmethod\n def resolve_fulfillments(obj, info):\n return obj.fulfillments.all()\n\n @staticmethod\n def resolve_events(obj, info):\n return obj.events.all()\n\n @staticmethod\n def resolve_is_paid(obj, info):\n return obj.is_fully_paid()\n\n @staticmethod\n def resolve_number(obj, info):\n return str(obj.pk)\n\n @staticmethod\n def resolve_payment_status(obj, info):\n return obj.get_last_payment_status()\n\n @staticmethod\n def resolve_payment_status_display(obj, info):\n return obj.get_last_payment_status_display()\n\n @staticmethod\n def resolve_status_display(obj, info):\n return obj.get_status_display()\n\n @staticmethod\n def resolve_user_email(obj, info):\n if obj.user_email:\n return obj.user_email\n if obj.user_id:\n return obj.user.email\n return None\n\n\nclass OrderLine(CountableDjangoObjectType):\n thumbnail_url = graphene.String(\n description='The URL of a main thumbnail for the ordered product.',\n size=graphene.Int(description='Size of the image'))\n\n class Meta:\n description = 'Represents order line of particular order.'\n model = models.OrderLine\n interfaces = [relay.Node]\n exclude_fields = [\n 'order', 'unit_price_gross', 'unit_price_net', 'variant']\n\n def resolve_thumbnail_url(self, info, size=None):\n if not self.variant_id:\n return None\n if not size:\n size = 255\n return get_thumbnail(\n self.variant.get_first_image(), size, method='thumbnail')\n", "path": "saleor/graphql/order/types.py"}]}
| 1,925 | 318 |
gh_patches_debug_8070
|
rasdani/github-patches
|
git_diff
|
mindsdb__mindsdb-130
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Cleanup before releasing v.1.0
* Remove any unused files
* Remove any unused variable and functions (loads of those in config I'd think)
* Remove any unused requirements
* Update the version for requirements
* Update and standardize the interface of older functions and add comments to them (probably won't have time for all, but at least some)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 import setuptools
2
3 about = {}
4 with open("mindsdb/__about__.py") as fp:
5 exec(fp.read(), about)
6
7 long_description = open('README.md', encoding='utf-8').read()
8
9 with open('requirements.txt') as req_file:
10 requirements = req_file.read().splitlines()
11
12 setuptools.setup(
13 name=about['__title__'],
14 version=about['__version__'],
15 url=about['__github__'],
16 download_url=about['__pypi__'],
17 license=about['__license__'],
18 author=about['__author__'],
19 author_email=about['__email__'],
20 description=about['__description__'],
21 long_description=long_description,
22 long_description_content_type="text/markdown",
23 packages=setuptools.find_packages(),
24 install_requires=requirements,
25 classifiers=(
26 "Programming Language :: Python :: 3",
27 "License :: OSI Approved :: MIT License",
28 "Operating System :: OS Independent",
29 ),
30 python_requires=">=3.3"
31 )
32
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -1,4 +1,5 @@
import setuptools
+import subprocess
about = {}
with open("mindsdb/__about__.py") as fp:
@@ -27,5 +28,10 @@
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
),
- python_requires=">=3.3"
+ python_requires=">=3.6"
)
+
+try:
+ subprocess.call(['python3','-m','spacy','download','en'])
+except:
+ subprocess.call(['python','-m','spacy','download','en'])
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -1,4 +1,5 @@\n import setuptools\n+import subprocess\n \n about = {}\n with open(\"mindsdb/__about__.py\") as fp:\n@@ -27,5 +28,10 @@\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n ),\n- python_requires=\">=3.3\"\n+ python_requires=\">=3.6\"\n )\n+\n+try:\n+ subprocess.call(['python3','-m','spacy','download','en'])\n+except:\n+ subprocess.call(['python','-m','spacy','download','en'])\n", "issue": "Cleanup before releasing v.1.0\n* Remove any unused files\r\n* Remove any unused variable and functions (loads of those in config I'd think)\r\n* Remove any unused requirements\r\n* Update the version for requirements\r\n* Update and standardize the interface of older functions and add comments to them (probably won't have time for all, but at least some)\n", "before_files": [{"content": "import setuptools\n\nabout = {}\nwith open(\"mindsdb/__about__.py\") as fp:\n exec(fp.read(), about)\n\nlong_description = open('README.md', encoding='utf-8').read()\n\nwith open('requirements.txt') as req_file:\n requirements = req_file.read().splitlines()\n\nsetuptools.setup(\n name=about['__title__'],\n version=about['__version__'],\n url=about['__github__'],\n download_url=about['__pypi__'],\n license=about['__license__'],\n author=about['__author__'],\n author_email=about['__email__'],\n description=about['__description__'],\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n packages=setuptools.find_packages(),\n install_requires=requirements,\n classifiers=(\n \"Programming Language :: Python :: 3\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n ),\n python_requires=\">=3.3\"\n)\n", "path": "setup.py"}], "after_files": [{"content": "import setuptools\nimport subprocess\n\nabout = {}\nwith open(\"mindsdb/__about__.py\") as fp:\n exec(fp.read(), about)\n\nlong_description = open('README.md', encoding='utf-8').read()\n\nwith open('requirements.txt') as req_file:\n requirements = req_file.read().splitlines()\n\nsetuptools.setup(\n name=about['__title__'],\n version=about['__version__'],\n url=about['__github__'],\n download_url=about['__pypi__'],\n license=about['__license__'],\n author=about['__author__'],\n author_email=about['__email__'],\n description=about['__description__'],\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n packages=setuptools.find_packages(),\n install_requires=requirements,\n classifiers=(\n \"Programming Language :: Python :: 3\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n ),\n python_requires=\">=3.6\"\n)\n\ntry:\n subprocess.call(['python3','-m','spacy','download','en'])\nexcept:\n subprocess.call(['python','-m','spacy','download','en'])\n", "path": "setup.py"}]}
| 595 | 148 |
gh_patches_debug_499
|
rasdani/github-patches
|
git_diff
|
Pylons__pyramid-1504
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
ASCII apostrophes are replaced with typgraphic apostrophes in the PDF documentation code boxes
In version 1.3.2 of http://media.readthedocs.org/pdf/pyramid/1.3-branch/pyramid.pdf, apostrophes which should be encoded as ASCII (') are replaced with their typographic variants (’). This is correct in running text, but should not happen in code boxes because Python do not understand them:
```
Python 2.7.3 (default, Aug 1 2012, 05:14:39)
[GCC 4.6.3] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>> a = ’a’
File "<stdin>", line 1
a = ’a’
^
SyntaxError: invalid syntax
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 ##############################################################################
2 #
3 # Copyright (c) 2008-2013 Agendaless Consulting and Contributors.
4 # All Rights Reserved.
5 #
6 # This software is subject to the provisions of the BSD-like license at
7 # http://www.repoze.org/LICENSE.txt. A copy of the license should accompany
8 # this distribution. THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL
9 # EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO,
10 # THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND
11 # FITNESS FOR A PARTICULAR PURPOSE
12 #
13 ##############################################################################
14
15 import os
16 import sys
17
18 from setuptools import setup, find_packages
19
20 py_version = sys.version_info[:2]
21
22 PY3 = py_version[0] == 3
23
24 if PY3:
25 if py_version < (3, 2):
26 raise RuntimeError('On Python 3, Pyramid requires Python 3.2 or better')
27 else:
28 if py_version < (2, 6):
29 raise RuntimeError('On Python 2, Pyramid requires Python 2.6 or better')
30
31 here = os.path.abspath(os.path.dirname(__file__))
32 try:
33 with open(os.path.join(here, 'README.rst')) as f:
34 README = f.read()
35 with open(os.path.join(here, 'CHANGES.txt')) as f:
36 CHANGES = f.read()
37 except IOError:
38 README = CHANGES = ''
39
40 install_requires=[
41 'setuptools',
42 'WebOb >= 1.3.1', # request.domain and CookieProfile
43 'repoze.lru >= 0.4', # py3 compat
44 'zope.interface >= 3.8.0', # has zope.interface.registry
45 'zope.deprecation >= 3.5.0', # py3 compat
46 'venusian >= 1.0a3', # ``ignore``
47 'translationstring >= 0.4', # py3 compat
48 'PasteDeploy >= 1.5.0', # py3 compat
49 ]
50
51 tests_require = [
52 'WebTest >= 1.3.1', # py3 compat
53 ]
54
55 if not PY3:
56 tests_require.append('zope.component>=3.11.0')
57
58 docs_extras = [
59 'Sphinx',
60 'docutils',
61 'repoze.sphinx.autointerface',
62 ]
63
64 testing_extras = tests_require + [
65 'nose',
66 'coverage',
67 'virtualenv', # for scaffolding tests
68 ]
69
70 setup(name='pyramid',
71 version='1.5.2',
72 description='The Pyramid Web Framework, a Pylons project',
73 long_description=README + '\n\n' + CHANGES,
74 classifiers=[
75 "Intended Audience :: Developers",
76 "Programming Language :: Python",
77 "Programming Language :: Python :: 2.6",
78 "Programming Language :: Python :: 2.7",
79 "Programming Language :: Python :: 3",
80 "Programming Language :: Python :: 3.2",
81 "Programming Language :: Python :: 3.3",
82 "Programming Language :: Python :: 3.4",
83 "Programming Language :: Python :: Implementation :: CPython",
84 "Programming Language :: Python :: Implementation :: PyPy",
85 "Framework :: Pyramid",
86 "Topic :: Internet :: WWW/HTTP",
87 "Topic :: Internet :: WWW/HTTP :: WSGI",
88 "License :: Repoze Public License",
89 ],
90 keywords='web wsgi pylons pyramid',
91 author="Chris McDonough, Agendaless Consulting",
92 author_email="[email protected]",
93 url="http://docs.pylonsproject.org/en/latest/docs/pyramid.html",
94 license="BSD-derived (http://www.repoze.org/LICENSE.txt)",
95 packages=find_packages(),
96 include_package_data=True,
97 zip_safe=False,
98 install_requires = install_requires,
99 extras_require = {
100 'testing':testing_extras,
101 'docs':docs_extras,
102 },
103 tests_require = tests_require,
104 test_suite="pyramid.tests",
105 entry_points = """\
106 [pyramid.scaffold]
107 starter=pyramid.scaffolds:StarterProjectTemplate
108 zodb=pyramid.scaffolds:ZODBProjectTemplate
109 alchemy=pyramid.scaffolds:AlchemyProjectTemplate
110 [console_scripts]
111 pcreate = pyramid.scripts.pcreate:main
112 pserve = pyramid.scripts.pserve:main
113 pshell = pyramid.scripts.pshell:main
114 proutes = pyramid.scripts.proutes:main
115 pviews = pyramid.scripts.pviews:main
116 ptweens = pyramid.scripts.ptweens:main
117 prequest = pyramid.scripts.prequest:main
118 pdistreport = pyramid.scripts.pdistreport:main
119 [paste.server_runner]
120 wsgiref = pyramid.scripts.pserve:wsgiref_server_runner
121 cherrypy = pyramid.scripts.pserve:cherrypy_server_runner
122 """
123 )
124
125
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -56,7 +56,7 @@
tests_require.append('zope.component>=3.11.0')
docs_extras = [
- 'Sphinx',
+ 'Sphinx >= 1.2.3',
'docutils',
'repoze.sphinx.autointerface',
]
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -56,7 +56,7 @@\n tests_require.append('zope.component>=3.11.0')\n \n docs_extras = [\n- 'Sphinx',\n+ 'Sphinx >= 1.2.3',\n 'docutils',\n 'repoze.sphinx.autointerface',\n ]\n", "issue": "ASCII apostrophes are replaced with typgraphic apostrophes in the PDF documentation code boxes\nIn version 1.3.2 of http://media.readthedocs.org/pdf/pyramid/1.3-branch/pyramid.pdf, apostrophes which should be encoded as ASCII (') are replaced with their typographic variants (\u2019). This is correct in running text, but should not happen in code boxes because Python do not understand them:\n\n```\nPython 2.7.3 (default, Aug 1 2012, 05:14:39) \n[GCC 4.6.3] on linux2\nType \"help\", \"copyright\", \"credits\" or \"license\" for more information.\n>>> a = \u2019a\u2019\n File \"<stdin>\", line 1\n a = \u2019a\u2019\n ^\nSyntaxError: invalid syntax\n```\n\n", "before_files": [{"content": "##############################################################################\n#\n# Copyright (c) 2008-2013 Agendaless Consulting and Contributors.\n# All Rights Reserved.\n#\n# This software is subject to the provisions of the BSD-like license at\n# http://www.repoze.org/LICENSE.txt. A copy of the license should accompany\n# this distribution. THIS SOFTWARE IS PROVIDED \"AS IS\" AND ANY AND ALL\n# EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO,\n# THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND\n# FITNESS FOR A PARTICULAR PURPOSE\n#\n##############################################################################\n\nimport os\nimport sys\n\nfrom setuptools import setup, find_packages\n\npy_version = sys.version_info[:2]\n\nPY3 = py_version[0] == 3\n\nif PY3:\n if py_version < (3, 2):\n raise RuntimeError('On Python 3, Pyramid requires Python 3.2 or better')\nelse:\n if py_version < (2, 6):\n raise RuntimeError('On Python 2, Pyramid requires Python 2.6 or better')\n\nhere = os.path.abspath(os.path.dirname(__file__))\ntry:\n with open(os.path.join(here, 'README.rst')) as f:\n README = f.read()\n with open(os.path.join(here, 'CHANGES.txt')) as f:\n CHANGES = f.read()\nexcept IOError:\n README = CHANGES = ''\n\ninstall_requires=[\n 'setuptools',\n 'WebOb >= 1.3.1', # request.domain and CookieProfile\n 'repoze.lru >= 0.4', # py3 compat\n 'zope.interface >= 3.8.0', # has zope.interface.registry\n 'zope.deprecation >= 3.5.0', # py3 compat\n 'venusian >= 1.0a3', # ``ignore``\n 'translationstring >= 0.4', # py3 compat\n 'PasteDeploy >= 1.5.0', # py3 compat\n ]\n\ntests_require = [\n 'WebTest >= 1.3.1', # py3 compat\n ]\n\nif not PY3:\n tests_require.append('zope.component>=3.11.0')\n\ndocs_extras = [\n 'Sphinx',\n 'docutils',\n 'repoze.sphinx.autointerface',\n ]\n\ntesting_extras = tests_require + [\n 'nose',\n 'coverage',\n 'virtualenv', # for scaffolding tests\n ]\n\nsetup(name='pyramid',\n version='1.5.2',\n description='The Pyramid Web Framework, a Pylons project',\n long_description=README + '\\n\\n' + CHANGES,\n classifiers=[\n \"Intended Audience :: Developers\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2.6\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.2\",\n \"Programming Language :: Python :: 3.3\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: Implementation :: CPython\",\n \"Programming Language :: Python :: Implementation :: PyPy\",\n \"Framework :: Pyramid\",\n \"Topic :: Internet :: WWW/HTTP\",\n \"Topic :: Internet :: WWW/HTTP :: WSGI\",\n \"License :: Repoze Public License\",\n ],\n keywords='web wsgi pylons pyramid',\n author=\"Chris McDonough, Agendaless Consulting\",\n author_email=\"[email protected]\",\n url=\"http://docs.pylonsproject.org/en/latest/docs/pyramid.html\",\n license=\"BSD-derived (http://www.repoze.org/LICENSE.txt)\",\n packages=find_packages(),\n include_package_data=True,\n zip_safe=False,\n install_requires = install_requires,\n extras_require = {\n 'testing':testing_extras,\n 'docs':docs_extras,\n },\n tests_require = tests_require,\n test_suite=\"pyramid.tests\",\n entry_points = \"\"\"\\\n [pyramid.scaffold]\n starter=pyramid.scaffolds:StarterProjectTemplate\n zodb=pyramid.scaffolds:ZODBProjectTemplate\n alchemy=pyramid.scaffolds:AlchemyProjectTemplate\n [console_scripts]\n pcreate = pyramid.scripts.pcreate:main\n pserve = pyramid.scripts.pserve:main\n pshell = pyramid.scripts.pshell:main\n proutes = pyramid.scripts.proutes:main\n pviews = pyramid.scripts.pviews:main\n ptweens = pyramid.scripts.ptweens:main\n prequest = pyramid.scripts.prequest:main\n pdistreport = pyramid.scripts.pdistreport:main\n [paste.server_runner]\n wsgiref = pyramid.scripts.pserve:wsgiref_server_runner\n cherrypy = pyramid.scripts.pserve:cherrypy_server_runner\n \"\"\"\n )\n\n", "path": "setup.py"}], "after_files": [{"content": "##############################################################################\n#\n# Copyright (c) 2008-2013 Agendaless Consulting and Contributors.\n# All Rights Reserved.\n#\n# This software is subject to the provisions of the BSD-like license at\n# http://www.repoze.org/LICENSE.txt. A copy of the license should accompany\n# this distribution. THIS SOFTWARE IS PROVIDED \"AS IS\" AND ANY AND ALL\n# EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO,\n# THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND\n# FITNESS FOR A PARTICULAR PURPOSE\n#\n##############################################################################\n\nimport os\nimport sys\n\nfrom setuptools import setup, find_packages\n\npy_version = sys.version_info[:2]\n\nPY3 = py_version[0] == 3\n\nif PY3:\n if py_version < (3, 2):\n raise RuntimeError('On Python 3, Pyramid requires Python 3.2 or better')\nelse:\n if py_version < (2, 6):\n raise RuntimeError('On Python 2, Pyramid requires Python 2.6 or better')\n\nhere = os.path.abspath(os.path.dirname(__file__))\ntry:\n with open(os.path.join(here, 'README.rst')) as f:\n README = f.read()\n with open(os.path.join(here, 'CHANGES.txt')) as f:\n CHANGES = f.read()\nexcept IOError:\n README = CHANGES = ''\n\ninstall_requires=[\n 'setuptools',\n 'WebOb >= 1.3.1', # request.domain and CookieProfile\n 'repoze.lru >= 0.4', # py3 compat\n 'zope.interface >= 3.8.0', # has zope.interface.registry\n 'zope.deprecation >= 3.5.0', # py3 compat\n 'venusian >= 1.0a3', # ``ignore``\n 'translationstring >= 0.4', # py3 compat\n 'PasteDeploy >= 1.5.0', # py3 compat\n ]\n\ntests_require = [\n 'WebTest >= 1.3.1', # py3 compat\n ]\n\nif not PY3:\n tests_require.append('zope.component>=3.11.0')\n\ndocs_extras = [\n 'Sphinx >= 1.2.3',\n 'docutils',\n 'repoze.sphinx.autointerface',\n ]\n\ntesting_extras = tests_require + [\n 'nose',\n 'coverage',\n 'virtualenv', # for scaffolding tests\n ]\n\nsetup(name='pyramid',\n version='1.5.2',\n description='The Pyramid Web Framework, a Pylons project',\n long_description=README + '\\n\\n' + CHANGES,\n classifiers=[\n \"Intended Audience :: Developers\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2.6\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.2\",\n \"Programming Language :: Python :: 3.3\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: Implementation :: CPython\",\n \"Programming Language :: Python :: Implementation :: PyPy\",\n \"Framework :: Pyramid\",\n \"Topic :: Internet :: WWW/HTTP\",\n \"Topic :: Internet :: WWW/HTTP :: WSGI\",\n \"License :: Repoze Public License\",\n ],\n keywords='web wsgi pylons pyramid',\n author=\"Chris McDonough, Agendaless Consulting\",\n author_email=\"[email protected]\",\n url=\"http://docs.pylonsproject.org/en/latest/docs/pyramid.html\",\n license=\"BSD-derived (http://www.repoze.org/LICENSE.txt)\",\n packages=find_packages(),\n include_package_data=True,\n zip_safe=False,\n install_requires = install_requires,\n extras_require = {\n 'testing':testing_extras,\n 'docs':docs_extras,\n },\n tests_require = tests_require,\n test_suite=\"pyramid.tests\",\n entry_points = \"\"\"\\\n [pyramid.scaffold]\n starter=pyramid.scaffolds:StarterProjectTemplate\n zodb=pyramid.scaffolds:ZODBProjectTemplate\n alchemy=pyramid.scaffolds:AlchemyProjectTemplate\n [console_scripts]\n pcreate = pyramid.scripts.pcreate:main\n pserve = pyramid.scripts.pserve:main\n pshell = pyramid.scripts.pshell:main\n proutes = pyramid.scripts.proutes:main\n pviews = pyramid.scripts.pviews:main\n ptweens = pyramid.scripts.ptweens:main\n prequest = pyramid.scripts.prequest:main\n pdistreport = pyramid.scripts.pdistreport:main\n [paste.server_runner]\n wsgiref = pyramid.scripts.pserve:wsgiref_server_runner\n cherrypy = pyramid.scripts.pserve:cherrypy_server_runner\n \"\"\"\n )\n\n", "path": "setup.py"}]}
| 1,787 | 91 |
gh_patches_debug_4948
|
rasdani/github-patches
|
git_diff
|
ansible__ansible-11626
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
devel bug - profile_tasks.py requires CALLBACK_ constants or the display param is not passed
relates to #11625 and commit a09f623
```
$ ansible-playbook plays/test_to_json.yml -vvv
Using /Users/glynch/vagrant/ansible_foo/ansible.cfg as config file
1 plays in plays/test_to_json.yml
[ERROR]: Unexpected Exception: __init__() takes exactly 2 arguments (1 given)
the full traceback was:
Traceback (most recent call last):
File "/Users/glynch/dev/ansible/bin/ansible-playbook", line 77, in <module>
sys.exit(cli.run())
File "/Users/glynch/dev/ansible/lib/ansible/cli/playbook.py", line 162, in run
results = pbex.run()
File "/Users/glynch/dev/ansible/lib/ansible/executor/playbook_executor.py", line 128, in run
self._tqm.load_callbacks()
File "/Users/glynch/dev/ansible/lib/ansible/executor/task_queue_manager.py", line 154, in load_callbacks
self._callback_plugins.append(callback_plugin())
TypeError: __init__() takes exactly 2 arguments (1 given)
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lib/ansible/plugins/callback/profile_tasks.py`
Content:
```
1 # (C) 2015, Tom Paine, <[email protected]>
2 # (C) 2014, Jharrod LaFon, @JharrodLaFon
3 # (C) 2012-2013, Michael DeHaan, <[email protected]>
4 #
5 # This file is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU General Public License as published by
7 # the Free Software Foundation, either version 3 of the License, or
8 # (at your option) any later version.
9 #
10 # File is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU General Public License for more details.
14 #
15 # See <http://www.gnu.org/licenses/> for a copy of the
16 # GNU General Public License
17
18 # Provides per-task timing, ongoing playbook elapsed time and
19 # ordered list of top 20 longest running tasks at end
20
21 import time
22
23 from ansible.plugins.callback import CallbackBase
24
25 # define start time
26 t0 = tn = time.time()
27
28 def secondsToStr(t):
29 # http://bytes.com/topic/python/answers/635958-handy-short-cut-formatting-elapsed-time-floating-point-seconds
30 rediv = lambda ll, b: list(divmod(ll[0], b)) + ll[1:]
31 return "%d:%02d:%02d.%03d" % tuple(reduce(rediv, [[t * 1000, ], 1000, 60, 60]))
32
33
34 def filled(msg, fchar="*"):
35 if len(msg) == 0:
36 width = 79
37 else:
38 msg = "%s " % msg
39 width = 79 - len(msg)
40 if width < 3:
41 width = 3
42 filler = fchar * width
43 return "%s%s " % (msg, filler)
44
45
46 def timestamp(self):
47 if self.current is not None:
48 self.stats[self.current] = time.time() - self.stats[self.current]
49
50
51 def tasktime():
52 global tn
53 time_current = time.strftime('%A %d %B %Y %H:%M:%S %z')
54 time_elapsed = secondsToStr(time.time() - tn)
55 time_total_elapsed = secondsToStr(time.time() - t0)
56 display(filled('%s (%s)%s%s' % (time_current, time_elapsed, ' ' * 7, time_total_elapsed)))
57 tn = time.time()
58
59
60 class CallbackModule(CallbackBase):
61
62 def __init__(self, display):
63 self.stats = {}
64 self.current = None
65
66 super(CallbackModule, self).__init__(display)
67
68
69 def playbook_on_task_start(self, name, is_conditional):
70 """
71 Logs the start of each task
72 """
73 tasktime()
74 timestamp(self)
75
76 # Record the start time of the current task
77 self.current = name
78 self.stats[self.current] = time.time()
79
80 def playbook_on_setup(self):
81 tasktime()
82
83 def playbook_on_stats(self, stats):
84 tasktime()
85 display(filled("", fchar="="))
86
87 timestamp(self)
88
89 # Sort the tasks by their running time
90 results = sorted(
91 self.stats.items(),
92 key=lambda value: value[1],
93 reverse=True,
94 )
95
96 # Just keep the top 20
97 results = results[:20]
98
99 # Print the timings
100 for name, elapsed in results:
101 self.display.display(
102 "{0:-<70}{1:->9}".format(
103 '{0} '.format(name),
104 ' {0:.02f}s'.format(elapsed),
105 )
106 )
107
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/lib/ansible/plugins/callback/profile_tasks.py b/lib/ansible/plugins/callback/profile_tasks.py
--- a/lib/ansible/plugins/callback/profile_tasks.py
+++ b/lib/ansible/plugins/callback/profile_tasks.py
@@ -58,7 +58,14 @@
class CallbackModule(CallbackBase):
-
+ """
+ This callback module provides per-task timing, ongoing playbook elapsed time
+ and ordered list of top 20 longest running tasks at end.
+ """
+ CALLBACK_VERSION = 2.0
+ CALLBACK_TYPE = 'aggregate'
+ CALLBACK_NAME = 'profile_tasks'
+
def __init__(self, display):
self.stats = {}
self.current = None
|
{"golden_diff": "diff --git a/lib/ansible/plugins/callback/profile_tasks.py b/lib/ansible/plugins/callback/profile_tasks.py\n--- a/lib/ansible/plugins/callback/profile_tasks.py\n+++ b/lib/ansible/plugins/callback/profile_tasks.py\n@@ -58,7 +58,14 @@\n \n \n class CallbackModule(CallbackBase):\n-\n+ \"\"\"\n+ This callback module provides per-task timing, ongoing playbook elapsed time \n+ and ordered list of top 20 longest running tasks at end.\n+ \"\"\"\n+ CALLBACK_VERSION = 2.0\n+ CALLBACK_TYPE = 'aggregate'\n+ CALLBACK_NAME = 'profile_tasks'\n+ \n def __init__(self, display):\n self.stats = {}\n self.current = None\n", "issue": "devel bug - profile_tasks.py requires CALLBACK_ constants or the display param is not passed\nrelates to #11625 and commit a09f623\n\n```\n$ ansible-playbook plays/test_to_json.yml -vvv\nUsing /Users/glynch/vagrant/ansible_foo/ansible.cfg as config file\n1 plays in plays/test_to_json.yml\n [ERROR]: Unexpected Exception: __init__() takes exactly 2 arguments (1 given)\n\nthe full traceback was:\n\nTraceback (most recent call last):\n File \"/Users/glynch/dev/ansible/bin/ansible-playbook\", line 77, in <module>\n sys.exit(cli.run())\n File \"/Users/glynch/dev/ansible/lib/ansible/cli/playbook.py\", line 162, in run\n results = pbex.run()\n File \"/Users/glynch/dev/ansible/lib/ansible/executor/playbook_executor.py\", line 128, in run\n self._tqm.load_callbacks()\n File \"/Users/glynch/dev/ansible/lib/ansible/executor/task_queue_manager.py\", line 154, in load_callbacks\n self._callback_plugins.append(callback_plugin())\nTypeError: __init__() takes exactly 2 arguments (1 given)\n```\n\n", "before_files": [{"content": "# (C) 2015, Tom Paine, <[email protected]>\n# (C) 2014, Jharrod LaFon, @JharrodLaFon\n# (C) 2012-2013, Michael DeHaan, <[email protected]>\n#\n# This file is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# File is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# See <http://www.gnu.org/licenses/> for a copy of the\n# GNU General Public License\n\n# Provides per-task timing, ongoing playbook elapsed time and\n# ordered list of top 20 longest running tasks at end\n\nimport time\n\nfrom ansible.plugins.callback import CallbackBase\n\n# define start time\nt0 = tn = time.time()\n\ndef secondsToStr(t):\n # http://bytes.com/topic/python/answers/635958-handy-short-cut-formatting-elapsed-time-floating-point-seconds\n rediv = lambda ll, b: list(divmod(ll[0], b)) + ll[1:]\n return \"%d:%02d:%02d.%03d\" % tuple(reduce(rediv, [[t * 1000, ], 1000, 60, 60]))\n\n\ndef filled(msg, fchar=\"*\"):\n if len(msg) == 0:\n width = 79\n else:\n msg = \"%s \" % msg\n width = 79 - len(msg)\n if width < 3:\n width = 3\n filler = fchar * width\n return \"%s%s \" % (msg, filler)\n\n\ndef timestamp(self):\n if self.current is not None:\n self.stats[self.current] = time.time() - self.stats[self.current]\n\n\ndef tasktime():\n global tn\n time_current = time.strftime('%A %d %B %Y %H:%M:%S %z')\n time_elapsed = secondsToStr(time.time() - tn)\n time_total_elapsed = secondsToStr(time.time() - t0)\n display(filled('%s (%s)%s%s' % (time_current, time_elapsed, ' ' * 7, time_total_elapsed)))\n tn = time.time()\n\n\nclass CallbackModule(CallbackBase):\n\n def __init__(self, display):\n self.stats = {}\n self.current = None\n\n super(CallbackModule, self).__init__(display)\n\n\n def playbook_on_task_start(self, name, is_conditional):\n \"\"\"\n Logs the start of each task\n \"\"\"\n tasktime()\n timestamp(self)\n\n # Record the start time of the current task\n self.current = name\n self.stats[self.current] = time.time()\n\n def playbook_on_setup(self):\n tasktime()\n\n def playbook_on_stats(self, stats):\n tasktime()\n display(filled(\"\", fchar=\"=\"))\n\n timestamp(self)\n\n # Sort the tasks by their running time\n results = sorted(\n self.stats.items(),\n key=lambda value: value[1],\n reverse=True,\n )\n\n # Just keep the top 20\n results = results[:20]\n\n # Print the timings\n for name, elapsed in results:\n self.display.display(\n \"{0:-<70}{1:->9}\".format(\n '{0} '.format(name),\n ' {0:.02f}s'.format(elapsed),\n )\n )\n", "path": "lib/ansible/plugins/callback/profile_tasks.py"}], "after_files": [{"content": "# (C) 2015, Tom Paine, <[email protected]>\n# (C) 2014, Jharrod LaFon, @JharrodLaFon\n# (C) 2012-2013, Michael DeHaan, <[email protected]>\n#\n# This file is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# File is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# See <http://www.gnu.org/licenses/> for a copy of the\n# GNU General Public License\n\n# Provides per-task timing, ongoing playbook elapsed time and\n# ordered list of top 20 longest running tasks at end\n\nimport time\n\nfrom ansible.plugins.callback import CallbackBase\n\n# define start time\nt0 = tn = time.time()\n\ndef secondsToStr(t):\n # http://bytes.com/topic/python/answers/635958-handy-short-cut-formatting-elapsed-time-floating-point-seconds\n rediv = lambda ll, b: list(divmod(ll[0], b)) + ll[1:]\n return \"%d:%02d:%02d.%03d\" % tuple(reduce(rediv, [[t * 1000, ], 1000, 60, 60]))\n\n\ndef filled(msg, fchar=\"*\"):\n if len(msg) == 0:\n width = 79\n else:\n msg = \"%s \" % msg\n width = 79 - len(msg)\n if width < 3:\n width = 3\n filler = fchar * width\n return \"%s%s \" % (msg, filler)\n\n\ndef timestamp(self):\n if self.current is not None:\n self.stats[self.current] = time.time() - self.stats[self.current]\n\n\ndef tasktime():\n global tn\n time_current = time.strftime('%A %d %B %Y %H:%M:%S %z')\n time_elapsed = secondsToStr(time.time() - tn)\n time_total_elapsed = secondsToStr(time.time() - t0)\n display(filled('%s (%s)%s%s' % (time_current, time_elapsed, ' ' * 7, time_total_elapsed)))\n tn = time.time()\n\n\nclass CallbackModule(CallbackBase):\n \"\"\"\n This callback module provides per-task timing, ongoing playbook elapsed time \n and ordered list of top 20 longest running tasks at end.\n \"\"\"\n CALLBACK_VERSION = 2.0\n CALLBACK_TYPE = 'aggregate'\n CALLBACK_NAME = 'profile_tasks'\n \n def __init__(self, display):\n self.stats = {}\n self.current = None\n\n super(CallbackModule, self).__init__(display)\n\n\n def playbook_on_task_start(self, name, is_conditional):\n \"\"\"\n Logs the start of each task\n \"\"\"\n tasktime()\n timestamp(self)\n\n # Record the start time of the current task\n self.current = name\n self.stats[self.current] = time.time()\n\n def playbook_on_setup(self):\n tasktime()\n\n def playbook_on_stats(self, stats):\n tasktime()\n display(filled(\"\", fchar=\"=\"))\n\n timestamp(self)\n\n # Sort the tasks by their running time\n results = sorted(\n self.stats.items(),\n key=lambda value: value[1],\n reverse=True,\n )\n\n # Just keep the top 20\n results = results[:20]\n\n # Print the timings\n for name, elapsed in results:\n self.display.display(\n \"{0:-<70}{1:->9}\".format(\n '{0} '.format(name),\n ' {0:.02f}s'.format(elapsed),\n )\n )\n", "path": "lib/ansible/plugins/callback/profile_tasks.py"}]}
| 1,583 | 152 |
gh_patches_debug_8110
|
rasdani/github-patches
|
git_diff
|
OCA__bank-payment-48
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Crash in account_payment_purchase
When you create an invoice from an incoming products on v7 with account_payment_purchase:
File "/home/erp_super/erp/prod/src/addons/stock/stock.py", line 1153, in action_invoice_create
invoice_vals = self._prepare_invoice(cr, uid, picking, partner, inv_type, journal_id, context=context)
File "/home/erp_super/erp/prod/src/banking/account_payment_sale_stock/model/stock.py", line 34, in _prepare_invoice
cr, uid, picking, partner, inv_type, journal_id, context=context)
File "/home/erp_super/erp/prod/src/banking/account_payment_purchase/model/stock.py", line 38, in _prepare_invoice
picking.purchase_id.supplier_partner_bank.id or False,
File "/home/erp_super/erp/prod/src/server/openerp/osv/orm.py", line 504, in __getattr__
raise AttributeError(e)
AttributeError: "Field 'supplier_partner_bank' does not exist in object 'browse_record(purchase.order, 774)'"
It seems that, when we decided to change to datamodel of the account_payment_\* modules, the file account_payment_purchase/model/stock.py was not changed !!! I'll do a PR to fix this.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `account_payment_purchase/model/stock.py`
Content:
```
1 # -*- encoding: utf-8 -*-
2 ##############################################################################
3 #
4 # Account Payment Purchase module for OpenERP
5 # Copyright (C) 2014 Akretion (http://www.akretion.com)
6 # @author Alexis de Lattre <[email protected]>
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU Affero General Public License as
10 # published by the Free Software Foundation, either version 3 of the
11 # License, or (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU Affero General Public License for more details.
17 #
18 # You should have received a copy of the GNU Affero General Public License
19 # along with this program. If not, see <http://www.gnu.org/licenses/>.
20 #
21 ##############################################################################
22
23 from openerp.osv import orm
24
25
26 class stock_picking(orm.Model):
27 _inherit = "stock.picking"
28
29 def _prepare_invoice(
30 self, cr, uid, picking, partner, inv_type, journal_id,
31 context=None):
32 """Copy bank partner and payment type from PO to invoice"""
33 invoice_vals = super(stock_picking, self)._prepare_invoice(
34 cr, uid, picking, partner, inv_type, journal_id, context=context)
35 if picking.purchase_id:
36 invoice_vals.update({
37 'partner_bank_id':
38 picking.purchase_id.supplier_partner_bank.id or False,
39 'payment_mode_type':
40 picking.purchase_id.payment_mode_type.id or False,
41 })
42 return invoice_vals
43
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/account_payment_purchase/model/stock.py b/account_payment_purchase/model/stock.py
--- a/account_payment_purchase/model/stock.py
+++ b/account_payment_purchase/model/stock.py
@@ -35,8 +35,8 @@
if picking.purchase_id:
invoice_vals.update({
'partner_bank_id':
- picking.purchase_id.supplier_partner_bank.id or False,
- 'payment_mode_type':
- picking.purchase_id.payment_mode_type.id or False,
+ picking.purchase_id.supplier_partner_bank_id.id or False,
+ 'payment_mode_id':
+ picking.purchase_id.payment_mode_id.id or False,
})
return invoice_vals
|
{"golden_diff": "diff --git a/account_payment_purchase/model/stock.py b/account_payment_purchase/model/stock.py\n--- a/account_payment_purchase/model/stock.py\n+++ b/account_payment_purchase/model/stock.py\n@@ -35,8 +35,8 @@\n if picking.purchase_id:\n invoice_vals.update({\n 'partner_bank_id':\n- picking.purchase_id.supplier_partner_bank.id or False,\n- 'payment_mode_type':\n- picking.purchase_id.payment_mode_type.id or False,\n+ picking.purchase_id.supplier_partner_bank_id.id or False,\n+ 'payment_mode_id':\n+ picking.purchase_id.payment_mode_id.id or False,\n })\n return invoice_vals\n", "issue": "Crash in account_payment_purchase\nWhen you create an invoice from an incoming products on v7 with account_payment_purchase:\n\n File \"/home/erp_super/erp/prod/src/addons/stock/stock.py\", line 1153, in action_invoice_create\n invoice_vals = self._prepare_invoice(cr, uid, picking, partner, inv_type, journal_id, context=context)\n File \"/home/erp_super/erp/prod/src/banking/account_payment_sale_stock/model/stock.py\", line 34, in _prepare_invoice\n cr, uid, picking, partner, inv_type, journal_id, context=context)\n File \"/home/erp_super/erp/prod/src/banking/account_payment_purchase/model/stock.py\", line 38, in _prepare_invoice\n picking.purchase_id.supplier_partner_bank.id or False,\n File \"/home/erp_super/erp/prod/src/server/openerp/osv/orm.py\", line 504, in __getattr__\n raise AttributeError(e)\nAttributeError: \"Field 'supplier_partner_bank' does not exist in object 'browse_record(purchase.order, 774)'\"\n\nIt seems that, when we decided to change to datamodel of the account_payment_\\* modules, the file account_payment_purchase/model/stock.py was not changed !!! I'll do a PR to fix this.\n\n", "before_files": [{"content": "# -*- encoding: utf-8 -*-\n##############################################################################\n#\n# Account Payment Purchase module for OpenERP\n# Copyright (C) 2014 Akretion (http://www.akretion.com)\n# @author Alexis de Lattre <[email protected]>\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Affero General Public License as\n# published by the Free Software Foundation, either version 3 of the\n# License, or (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Affero General Public License for more details.\n#\n# You should have received a copy of the GNU Affero General Public License\n# along with this program. If not, see <http://www.gnu.org/licenses/>.\n#\n##############################################################################\n\nfrom openerp.osv import orm\n\n\nclass stock_picking(orm.Model):\n _inherit = \"stock.picking\"\n\n def _prepare_invoice(\n self, cr, uid, picking, partner, inv_type, journal_id,\n context=None):\n \"\"\"Copy bank partner and payment type from PO to invoice\"\"\"\n invoice_vals = super(stock_picking, self)._prepare_invoice(\n cr, uid, picking, partner, inv_type, journal_id, context=context)\n if picking.purchase_id:\n invoice_vals.update({\n 'partner_bank_id':\n picking.purchase_id.supplier_partner_bank.id or False,\n 'payment_mode_type':\n picking.purchase_id.payment_mode_type.id or False,\n })\n return invoice_vals\n", "path": "account_payment_purchase/model/stock.py"}], "after_files": [{"content": "# -*- encoding: utf-8 -*-\n##############################################################################\n#\n# Account Payment Purchase module for OpenERP\n# Copyright (C) 2014 Akretion (http://www.akretion.com)\n# @author Alexis de Lattre <[email protected]>\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Affero General Public License as\n# published by the Free Software Foundation, either version 3 of the\n# License, or (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Affero General Public License for more details.\n#\n# You should have received a copy of the GNU Affero General Public License\n# along with this program. If not, see <http://www.gnu.org/licenses/>.\n#\n##############################################################################\n\nfrom openerp.osv import orm\n\n\nclass stock_picking(orm.Model):\n _inherit = \"stock.picking\"\n\n def _prepare_invoice(\n self, cr, uid, picking, partner, inv_type, journal_id,\n context=None):\n \"\"\"Copy bank partner and payment type from PO to invoice\"\"\"\n invoice_vals = super(stock_picking, self)._prepare_invoice(\n cr, uid, picking, partner, inv_type, journal_id, context=context)\n if picking.purchase_id:\n invoice_vals.update({\n 'partner_bank_id':\n picking.purchase_id.supplier_partner_bank_id.id or False,\n 'payment_mode_id':\n picking.purchase_id.payment_mode_id.id or False,\n })\n return invoice_vals\n", "path": "account_payment_purchase/model/stock.py"}]}
| 997 | 141 |
gh_patches_debug_24830
|
rasdani/github-patches
|
git_diff
|
sanic-org__sanic-1501
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Module import fails when auto_reload is active
I have two piece of code (the structure has been simplified for clarity sake)
First in base.py
```
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# module: init0
from abc import ABCMeta
from sanic import Sanic
class BaseService( metaclass = ABCMeta ):
def create_app( self ) -> Sanic:
app = Sanic( __name__ )
return app
# app = BaseService().create_app()
# app.run( host = '0.0.0.0',
# port = 5000,
# debug = True,
# )
print('[DONE]')
```
Second in run_test.py
```
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# module: init0
from init0.base import BaseService
def main():
app = BaseService().create_app()
app.run( host = '0.0.0.0',
port = 5000,
debug = True,
)
return
if __name__ == '__main__':
main()
```
If I were to run `python -m init0.run_test` with `debug = False` then everything works perfectly, however if it's `debug = True`, then it'd throw me `ModuleNotFoundError: No module named 'init0'`
Is it some sort of loading error somewhere that I need to configure beforehand?
Thanks a lot in advance
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sanic/reloader_helpers.py`
Content:
```
1 import os
2 import signal
3 import subprocess
4 import sys
5
6 from multiprocessing import Process
7 from time import sleep
8
9
10 def _iter_module_files():
11 """This iterates over all relevant Python files.
12
13 It goes through all
14 loaded files from modules, all files in folders of already loaded modules
15 as well as all files reachable through a package.
16 """
17 # The list call is necessary on Python 3 in case the module
18 # dictionary modifies during iteration.
19 for module in list(sys.modules.values()):
20 if module is None:
21 continue
22 filename = getattr(module, "__file__", None)
23 if filename:
24 old = None
25 while not os.path.isfile(filename):
26 old = filename
27 filename = os.path.dirname(filename)
28 if filename == old:
29 break
30 else:
31 if filename[-4:] in (".pyc", ".pyo"):
32 filename = filename[:-1]
33 yield filename
34
35
36 def _get_args_for_reloading():
37 """Returns the executable."""
38 rv = [sys.executable]
39 rv.extend(sys.argv)
40 return rv
41
42
43 def restart_with_reloader():
44 """Create a new process and a subprocess in it with the same arguments as
45 this one.
46 """
47 args = _get_args_for_reloading()
48 new_environ = os.environ.copy()
49 new_environ["SANIC_SERVER_RUNNING"] = "true"
50 cmd = " ".join(args)
51 worker_process = Process(
52 target=subprocess.call,
53 args=(cmd,),
54 kwargs=dict(shell=True, env=new_environ),
55 )
56 worker_process.start()
57 return worker_process
58
59
60 def kill_process_children_unix(pid):
61 """Find and kill child processes of a process (maximum two level).
62
63 :param pid: PID of parent process (process ID)
64 :return: Nothing
65 """
66 root_process_path = "/proc/{pid}/task/{pid}/children".format(pid=pid)
67 if not os.path.isfile(root_process_path):
68 return
69 with open(root_process_path) as children_list_file:
70 children_list_pid = children_list_file.read().split()
71
72 for child_pid in children_list_pid:
73 children_proc_path = "/proc/%s/task/%s/children" % (
74 child_pid,
75 child_pid,
76 )
77 if not os.path.isfile(children_proc_path):
78 continue
79 with open(children_proc_path) as children_list_file_2:
80 children_list_pid_2 = children_list_file_2.read().split()
81 for _pid in children_list_pid_2:
82 try:
83 os.kill(int(_pid), signal.SIGTERM)
84 except ProcessLookupError:
85 continue
86 try:
87 os.kill(int(child_pid), signal.SIGTERM)
88 except ProcessLookupError:
89 continue
90
91
92 def kill_process_children_osx(pid):
93 """Find and kill child processes of a process.
94
95 :param pid: PID of parent process (process ID)
96 :return: Nothing
97 """
98 subprocess.run(["pkill", "-P", str(pid)])
99
100
101 def kill_process_children(pid):
102 """Find and kill child processes of a process.
103
104 :param pid: PID of parent process (process ID)
105 :return: Nothing
106 """
107 if sys.platform == "darwin":
108 kill_process_children_osx(pid)
109 elif sys.platform == "linux":
110 kill_process_children_unix(pid)
111 else:
112 pass # should signal error here
113
114
115 def kill_program_completly(proc):
116 """Kill worker and it's child processes and exit.
117
118 :param proc: worker process (process ID)
119 :return: Nothing
120 """
121 kill_process_children(proc.pid)
122 proc.terminate()
123 os._exit(0)
124
125
126 def watchdog(sleep_interval):
127 """Watch project files, restart worker process if a change happened.
128
129 :param sleep_interval: interval in second.
130 :return: Nothing
131 """
132 mtimes = {}
133 worker_process = restart_with_reloader()
134 signal.signal(
135 signal.SIGTERM, lambda *args: kill_program_completly(worker_process)
136 )
137 signal.signal(
138 signal.SIGINT, lambda *args: kill_program_completly(worker_process)
139 )
140 while True:
141 for filename in _iter_module_files():
142 try:
143 mtime = os.stat(filename).st_mtime
144 except OSError:
145 continue
146
147 old_time = mtimes.get(filename)
148 if old_time is None:
149 mtimes[filename] = mtime
150 continue
151 elif mtime > old_time:
152 kill_process_children(worker_process.pid)
153 worker_process.terminate()
154 worker_process = restart_with_reloader()
155 mtimes[filename] = mtime
156 break
157
158 sleep(sleep_interval)
159
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/sanic/reloader_helpers.py b/sanic/reloader_helpers.py
--- a/sanic/reloader_helpers.py
+++ b/sanic/reloader_helpers.py
@@ -36,7 +36,15 @@
def _get_args_for_reloading():
"""Returns the executable."""
rv = [sys.executable]
- rv.extend(sys.argv)
+ main_module = sys.modules["__main__"]
+ mod_spec = getattr(main_module, "__spec__", None)
+ if mod_spec:
+ # Parent exe was launched as a module rather than a script
+ rv.extend(["-m", mod_spec.name])
+ if len(sys.argv) > 1:
+ rv.extend(sys.argv[1:])
+ else:
+ rv.extend(sys.argv)
return rv
@@ -44,6 +52,7 @@
"""Create a new process and a subprocess in it with the same arguments as
this one.
"""
+ cwd = os.getcwd()
args = _get_args_for_reloading()
new_environ = os.environ.copy()
new_environ["SANIC_SERVER_RUNNING"] = "true"
@@ -51,7 +60,7 @@
worker_process = Process(
target=subprocess.call,
args=(cmd,),
- kwargs=dict(shell=True, env=new_environ),
+ kwargs={"cwd": cwd, "shell": True, "env": new_environ},
)
worker_process.start()
return worker_process
|
{"golden_diff": "diff --git a/sanic/reloader_helpers.py b/sanic/reloader_helpers.py\n--- a/sanic/reloader_helpers.py\n+++ b/sanic/reloader_helpers.py\n@@ -36,7 +36,15 @@\n def _get_args_for_reloading():\n \"\"\"Returns the executable.\"\"\"\n rv = [sys.executable]\n- rv.extend(sys.argv)\n+ main_module = sys.modules[\"__main__\"]\n+ mod_spec = getattr(main_module, \"__spec__\", None)\n+ if mod_spec:\n+ # Parent exe was launched as a module rather than a script\n+ rv.extend([\"-m\", mod_spec.name])\n+ if len(sys.argv) > 1:\n+ rv.extend(sys.argv[1:])\n+ else:\n+ rv.extend(sys.argv)\n return rv\n \n \n@@ -44,6 +52,7 @@\n \"\"\"Create a new process and a subprocess in it with the same arguments as\n this one.\n \"\"\"\n+ cwd = os.getcwd()\n args = _get_args_for_reloading()\n new_environ = os.environ.copy()\n new_environ[\"SANIC_SERVER_RUNNING\"] = \"true\"\n@@ -51,7 +60,7 @@\n worker_process = Process(\n target=subprocess.call,\n args=(cmd,),\n- kwargs=dict(shell=True, env=new_environ),\n+ kwargs={\"cwd\": cwd, \"shell\": True, \"env\": new_environ},\n )\n worker_process.start()\n return worker_process\n", "issue": "Module import fails when auto_reload is active\nI have two piece of code (the structure has been simplified for clarity sake)\r\nFirst in base.py\r\n```\r\n#! /usr/bin/env python\r\n# -*- coding: utf-8 -*-\r\n# module: init0\r\n\r\nfrom abc import ABCMeta\r\nfrom sanic import Sanic\r\n\r\nclass BaseService( metaclass = ABCMeta ):\r\n def create_app( self ) -> Sanic:\r\n app = Sanic( __name__ )\r\n return app\r\n\r\n# app = BaseService().create_app()\r\n\r\n# app.run( host = '0.0.0.0',\r\n# port = 5000,\r\n# debug = True,\r\n# )\r\n\r\nprint('[DONE]')\r\n\r\n```\r\nSecond in run_test.py\r\n```\r\n#! /usr/bin/env python\r\n# -*- coding: utf-8 -*-\r\n# module: init0\r\n\r\nfrom init0.base import BaseService\r\n\r\ndef main():\r\n app = BaseService().create_app()\r\n app.run( host = '0.0.0.0',\r\n port = 5000,\r\n debug = True,\r\n )\r\n return\r\n\r\nif __name__ == '__main__':\r\n main()\r\n\r\n```\r\n\r\nIf I were to run `python -m init0.run_test` with `debug = False` then everything works perfectly, however if it's `debug = True`, then it'd throw me `ModuleNotFoundError: No module named 'init0'`\r\n\r\nIs it some sort of loading error somewhere that I need to configure beforehand?\r\n\r\nThanks a lot in advance\n", "before_files": [{"content": "import os\nimport signal\nimport subprocess\nimport sys\n\nfrom multiprocessing import Process\nfrom time import sleep\n\n\ndef _iter_module_files():\n \"\"\"This iterates over all relevant Python files.\n\n It goes through all\n loaded files from modules, all files in folders of already loaded modules\n as well as all files reachable through a package.\n \"\"\"\n # The list call is necessary on Python 3 in case the module\n # dictionary modifies during iteration.\n for module in list(sys.modules.values()):\n if module is None:\n continue\n filename = getattr(module, \"__file__\", None)\n if filename:\n old = None\n while not os.path.isfile(filename):\n old = filename\n filename = os.path.dirname(filename)\n if filename == old:\n break\n else:\n if filename[-4:] in (\".pyc\", \".pyo\"):\n filename = filename[:-1]\n yield filename\n\n\ndef _get_args_for_reloading():\n \"\"\"Returns the executable.\"\"\"\n rv = [sys.executable]\n rv.extend(sys.argv)\n return rv\n\n\ndef restart_with_reloader():\n \"\"\"Create a new process and a subprocess in it with the same arguments as\n this one.\n \"\"\"\n args = _get_args_for_reloading()\n new_environ = os.environ.copy()\n new_environ[\"SANIC_SERVER_RUNNING\"] = \"true\"\n cmd = \" \".join(args)\n worker_process = Process(\n target=subprocess.call,\n args=(cmd,),\n kwargs=dict(shell=True, env=new_environ),\n )\n worker_process.start()\n return worker_process\n\n\ndef kill_process_children_unix(pid):\n \"\"\"Find and kill child processes of a process (maximum two level).\n\n :param pid: PID of parent process (process ID)\n :return: Nothing\n \"\"\"\n root_process_path = \"/proc/{pid}/task/{pid}/children\".format(pid=pid)\n if not os.path.isfile(root_process_path):\n return\n with open(root_process_path) as children_list_file:\n children_list_pid = children_list_file.read().split()\n\n for child_pid in children_list_pid:\n children_proc_path = \"/proc/%s/task/%s/children\" % (\n child_pid,\n child_pid,\n )\n if not os.path.isfile(children_proc_path):\n continue\n with open(children_proc_path) as children_list_file_2:\n children_list_pid_2 = children_list_file_2.read().split()\n for _pid in children_list_pid_2:\n try:\n os.kill(int(_pid), signal.SIGTERM)\n except ProcessLookupError:\n continue\n try:\n os.kill(int(child_pid), signal.SIGTERM)\n except ProcessLookupError:\n continue\n\n\ndef kill_process_children_osx(pid):\n \"\"\"Find and kill child processes of a process.\n\n :param pid: PID of parent process (process ID)\n :return: Nothing\n \"\"\"\n subprocess.run([\"pkill\", \"-P\", str(pid)])\n\n\ndef kill_process_children(pid):\n \"\"\"Find and kill child processes of a process.\n\n :param pid: PID of parent process (process ID)\n :return: Nothing\n \"\"\"\n if sys.platform == \"darwin\":\n kill_process_children_osx(pid)\n elif sys.platform == \"linux\":\n kill_process_children_unix(pid)\n else:\n pass # should signal error here\n\n\ndef kill_program_completly(proc):\n \"\"\"Kill worker and it's child processes and exit.\n\n :param proc: worker process (process ID)\n :return: Nothing\n \"\"\"\n kill_process_children(proc.pid)\n proc.terminate()\n os._exit(0)\n\n\ndef watchdog(sleep_interval):\n \"\"\"Watch project files, restart worker process if a change happened.\n\n :param sleep_interval: interval in second.\n :return: Nothing\n \"\"\"\n mtimes = {}\n worker_process = restart_with_reloader()\n signal.signal(\n signal.SIGTERM, lambda *args: kill_program_completly(worker_process)\n )\n signal.signal(\n signal.SIGINT, lambda *args: kill_program_completly(worker_process)\n )\n while True:\n for filename in _iter_module_files():\n try:\n mtime = os.stat(filename).st_mtime\n except OSError:\n continue\n\n old_time = mtimes.get(filename)\n if old_time is None:\n mtimes[filename] = mtime\n continue\n elif mtime > old_time:\n kill_process_children(worker_process.pid)\n worker_process.terminate()\n worker_process = restart_with_reloader()\n mtimes[filename] = mtime\n break\n\n sleep(sleep_interval)\n", "path": "sanic/reloader_helpers.py"}], "after_files": [{"content": "import os\nimport signal\nimport subprocess\nimport sys\n\nfrom multiprocessing import Process\nfrom time import sleep\n\n\ndef _iter_module_files():\n \"\"\"This iterates over all relevant Python files.\n\n It goes through all\n loaded files from modules, all files in folders of already loaded modules\n as well as all files reachable through a package.\n \"\"\"\n # The list call is necessary on Python 3 in case the module\n # dictionary modifies during iteration.\n for module in list(sys.modules.values()):\n if module is None:\n continue\n filename = getattr(module, \"__file__\", None)\n if filename:\n old = None\n while not os.path.isfile(filename):\n old = filename\n filename = os.path.dirname(filename)\n if filename == old:\n break\n else:\n if filename[-4:] in (\".pyc\", \".pyo\"):\n filename = filename[:-1]\n yield filename\n\n\ndef _get_args_for_reloading():\n \"\"\"Returns the executable.\"\"\"\n rv = [sys.executable]\n main_module = sys.modules[\"__main__\"]\n mod_spec = getattr(main_module, \"__spec__\", None)\n if mod_spec:\n # Parent exe was launched as a module rather than a script\n rv.extend([\"-m\", mod_spec.name])\n if len(sys.argv) > 1:\n rv.extend(sys.argv[1:])\n else:\n rv.extend(sys.argv)\n return rv\n\n\ndef restart_with_reloader():\n \"\"\"Create a new process and a subprocess in it with the same arguments as\n this one.\n \"\"\"\n cwd = os.getcwd()\n args = _get_args_for_reloading()\n new_environ = os.environ.copy()\n new_environ[\"SANIC_SERVER_RUNNING\"] = \"true\"\n cmd = \" \".join(args)\n worker_process = Process(\n target=subprocess.call,\n args=(cmd,),\n kwargs={\"cwd\": cwd, \"shell\": True, \"env\": new_environ},\n )\n worker_process.start()\n return worker_process\n\n\ndef kill_process_children_unix(pid):\n \"\"\"Find and kill child processes of a process (maximum two level).\n\n :param pid: PID of parent process (process ID)\n :return: Nothing\n \"\"\"\n root_process_path = \"/proc/{pid}/task/{pid}/children\".format(pid=pid)\n if not os.path.isfile(root_process_path):\n return\n with open(root_process_path) as children_list_file:\n children_list_pid = children_list_file.read().split()\n\n for child_pid in children_list_pid:\n children_proc_path = \"/proc/%s/task/%s/children\" % (\n child_pid,\n child_pid,\n )\n if not os.path.isfile(children_proc_path):\n continue\n with open(children_proc_path) as children_list_file_2:\n children_list_pid_2 = children_list_file_2.read().split()\n for _pid in children_list_pid_2:\n try:\n os.kill(int(_pid), signal.SIGTERM)\n except ProcessLookupError:\n continue\n try:\n os.kill(int(child_pid), signal.SIGTERM)\n except ProcessLookupError:\n continue\n\n\ndef kill_process_children_osx(pid):\n \"\"\"Find and kill child processes of a process.\n\n :param pid: PID of parent process (process ID)\n :return: Nothing\n \"\"\"\n subprocess.run([\"pkill\", \"-P\", str(pid)])\n\n\ndef kill_process_children(pid):\n \"\"\"Find and kill child processes of a process.\n\n :param pid: PID of parent process (process ID)\n :return: Nothing\n \"\"\"\n if sys.platform == \"darwin\":\n kill_process_children_osx(pid)\n elif sys.platform == \"linux\":\n kill_process_children_unix(pid)\n else:\n pass # should signal error here\n\n\ndef kill_program_completly(proc):\n \"\"\"Kill worker and it's child processes and exit.\n\n :param proc: worker process (process ID)\n :return: Nothing\n \"\"\"\n kill_process_children(proc.pid)\n proc.terminate()\n os._exit(0)\n\n\ndef watchdog(sleep_interval):\n \"\"\"Watch project files, restart worker process if a change happened.\n\n :param sleep_interval: interval in second.\n :return: Nothing\n \"\"\"\n mtimes = {}\n worker_process = restart_with_reloader()\n signal.signal(\n signal.SIGTERM, lambda *args: kill_program_completly(worker_process)\n )\n signal.signal(\n signal.SIGINT, lambda *args: kill_program_completly(worker_process)\n )\n while True:\n for filename in _iter_module_files():\n try:\n mtime = os.stat(filename).st_mtime\n except OSError:\n continue\n\n old_time = mtimes.get(filename)\n if old_time is None:\n mtimes[filename] = mtime\n continue\n elif mtime > old_time:\n kill_process_children(worker_process.pid)\n worker_process.terminate()\n worker_process = restart_with_reloader()\n mtimes[filename] = mtime\n break\n\n sleep(sleep_interval)\n", "path": "sanic/reloader_helpers.py"}]}
| 1,952 | 321 |
gh_patches_debug_19014
|
rasdani/github-patches
|
git_diff
|
mne-tools__mne-python-9070
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
use bibtex in plot_sensor_noise_level.py
convert references in `examples/visualization/plot_sensor_noise_level.py` to use footcite / footbibliography
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/visualization/plot_sensor_noise_level.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 """
3 ======================================
4 Show noise levels from empty room data
5 ======================================
6
7 This shows how to use :meth:`mne.io.Raw.plot_psd` to examine noise levels
8 of systems. See [1]_ for an example.
9
10 References
11 ----------
12 .. [1] Khan S, Cohen D (2013). Note: Magnetic noise from the inner wall of
13 a magnetically shielded room. Review of Scientific Instruments 84:56101.
14 https://doi.org/10.1063/1.4802845
15 """
16 # Author: Eric Larson <[email protected]>
17 #
18 # License: BSD (3-clause)
19
20 import os.path as op
21 import mne
22
23 data_path = mne.datasets.sample.data_path()
24
25 raw_erm = mne.io.read_raw_fif(op.join(data_path, 'MEG', 'sample',
26 'ernoise_raw.fif'), preload=True)
27
28 ###############################################################################
29 # We can plot the absolute noise levels:
30 raw_erm.plot_psd(tmax=10., average=True, spatial_colors=False,
31 dB=False, xscale='log')
32
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/examples/visualization/plot_sensor_noise_level.py b/examples/visualization/plot_sensor_noise_level.py
--- a/examples/visualization/plot_sensor_noise_level.py
+++ b/examples/visualization/plot_sensor_noise_level.py
@@ -5,13 +5,7 @@
======================================
This shows how to use :meth:`mne.io.Raw.plot_psd` to examine noise levels
-of systems. See [1]_ for an example.
-
-References
-----------
-.. [1] Khan S, Cohen D (2013). Note: Magnetic noise from the inner wall of
- a magnetically shielded room. Review of Scientific Instruments 84:56101.
- https://doi.org/10.1063/1.4802845
+of systems. See :footcite:`KhanCohen2013` for an example.
"""
# Author: Eric Larson <[email protected]>
#
@@ -29,3 +23,8 @@
# We can plot the absolute noise levels:
raw_erm.plot_psd(tmax=10., average=True, spatial_colors=False,
dB=False, xscale='log')
+###############################################################################
+# References
+# ----------
+#
+# .. footbibliography::
|
{"golden_diff": "diff --git a/examples/visualization/plot_sensor_noise_level.py b/examples/visualization/plot_sensor_noise_level.py\n--- a/examples/visualization/plot_sensor_noise_level.py\n+++ b/examples/visualization/plot_sensor_noise_level.py\n@@ -5,13 +5,7 @@\n ======================================\n \n This shows how to use :meth:`mne.io.Raw.plot_psd` to examine noise levels\n-of systems. See [1]_ for an example.\n-\n-References\n-----------\n-.. [1] Khan S, Cohen D (2013). Note: Magnetic noise from the inner wall of\n- a magnetically shielded room. Review of Scientific Instruments 84:56101.\n- https://doi.org/10.1063/1.4802845\n+of systems. See :footcite:`KhanCohen2013` for an example.\n \"\"\"\n # Author: Eric Larson <[email protected]>\n #\n@@ -29,3 +23,8 @@\n # We can plot the absolute noise levels:\n raw_erm.plot_psd(tmax=10., average=True, spatial_colors=False,\n dB=False, xscale='log')\n+###############################################################################\n+# References\n+# ----------\n+#\n+# .. footbibliography::\n", "issue": "use bibtex in plot_sensor_noise_level.py\nconvert references in `examples/visualization/plot_sensor_noise_level.py` to use footcite / footbibliography\r\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"\n======================================\nShow noise levels from empty room data\n======================================\n\nThis shows how to use :meth:`mne.io.Raw.plot_psd` to examine noise levels\nof systems. See [1]_ for an example.\n\nReferences\n----------\n.. [1] Khan S, Cohen D (2013). Note: Magnetic noise from the inner wall of\n a magnetically shielded room. Review of Scientific Instruments 84:56101.\n https://doi.org/10.1063/1.4802845\n\"\"\"\n# Author: Eric Larson <[email protected]>\n#\n# License: BSD (3-clause)\n\nimport os.path as op\nimport mne\n\ndata_path = mne.datasets.sample.data_path()\n\nraw_erm = mne.io.read_raw_fif(op.join(data_path, 'MEG', 'sample',\n 'ernoise_raw.fif'), preload=True)\n\n###############################################################################\n# We can plot the absolute noise levels:\nraw_erm.plot_psd(tmax=10., average=True, spatial_colors=False,\n dB=False, xscale='log')\n", "path": "examples/visualization/plot_sensor_noise_level.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"\n======================================\nShow noise levels from empty room data\n======================================\n\nThis shows how to use :meth:`mne.io.Raw.plot_psd` to examine noise levels\nof systems. See :footcite:`KhanCohen2013` for an example.\n\"\"\"\n# Author: Eric Larson <[email protected]>\n#\n# License: BSD (3-clause)\n\nimport os.path as op\nimport mne\n\ndata_path = mne.datasets.sample.data_path()\n\nraw_erm = mne.io.read_raw_fif(op.join(data_path, 'MEG', 'sample',\n 'ernoise_raw.fif'), preload=True)\n\n###############################################################################\n# We can plot the absolute noise levels:\nraw_erm.plot_psd(tmax=10., average=True, spatial_colors=False,\n dB=False, xscale='log')\n###############################################################################\n# References\n# ----------\n#\n# .. footbibliography::\n", "path": "examples/visualization/plot_sensor_noise_level.py"}]}
| 606 | 277 |
gh_patches_debug_60781
|
rasdani/github-patches
|
git_diff
|
pypa__cibuildwheel-1065
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[Docs] Example may accidentally be encouraging users to write invalid `pyproject.toml` files
### Description
Hi guys, first of all thank you for the amazing project (always good to remember).
While I was working on adding support for PEP 621 to setuptools, I noticed a series of users having problems with invalid `pyproject.toml` files. The common pattern seem to be a almost empty `[project]` table with only a `requires-python` field set, which is invalid according to PEP 621.
It took me some time to find the reason for this behaviour but I think it comes from:
https://cibuildwheel.readthedocs.io/en/stable/options/#requires-python
I suspect that the example and notes about the preferred way of setting the config has been encouraging users that are unaware of PEP 621 to write technically invalid `pyproject.toml` files.
Please note that this issue is not necessarily related to setuptools itself.
The existence of the `[project]` table in the `pyproject.toml` allows (according to the standard) the installer/builder/consumer program to treat the package differently (specially regarding `dynamic`).
I think it would be nice to at least add a note about this so users became aware of the implications of adding a `[project]` table.
### Build log
_No response_
### CI config
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 from setuptools import setup
2
3 extras = {
4 "docs": [
5 "mkdocs-include-markdown-plugin==2.8.0",
6 "mkdocs==1.0.4",
7 "pymdown-extensions",
8 "mkdocs-macros-plugin",
9 ],
10 "test": [
11 "jinja2",
12 "pytest>=6",
13 "pytest-timeout",
14 "pytest-xdist",
15 ],
16 "bin": [
17 "click",
18 "ghapi",
19 "pip-tools",
20 "pygithub",
21 "pyyaml",
22 "requests",
23 "rich>=9.6",
24 "packaging>=21.0",
25 ],
26 "mypy": [
27 "mypy>=0.901",
28 "types-jinja2",
29 "types-certifi",
30 "types-toml",
31 "types-jinja2",
32 "types-pyyaml",
33 "types-click",
34 "types-requests",
35 ],
36 }
37
38 extras["dev"] = [
39 *extras["mypy"],
40 *extras["test"],
41 *extras["bin"],
42 ]
43
44 extras["all"] = sum(extras.values(), [])
45
46 setup(extras_require=extras)
47
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -4,6 +4,7 @@
"docs": [
"mkdocs-include-markdown-plugin==2.8.0",
"mkdocs==1.0.4",
+ "jinja2==3.0.3",
"pymdown-extensions",
"mkdocs-macros-plugin",
],
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -4,6 +4,7 @@\n \"docs\": [\n \"mkdocs-include-markdown-plugin==2.8.0\",\n \"mkdocs==1.0.4\",\n+ \"jinja2==3.0.3\",\n \"pymdown-extensions\",\n \"mkdocs-macros-plugin\",\n ],\n", "issue": "[Docs] Example may accidentally be encouraging users to write invalid `pyproject.toml` files\n### Description\r\n\r\nHi guys, first of all thank you for the amazing project (always good to remember).\r\n\r\nWhile I was working on adding support for PEP 621 to setuptools, I noticed a series of users having problems with invalid `pyproject.toml` files. The common pattern seem to be a almost empty `[project]` table with only a `requires-python` field set, which is invalid according to PEP 621.\r\n\r\nIt took me some time to find the reason for this behaviour but I think it comes from:\r\nhttps://cibuildwheel.readthedocs.io/en/stable/options/#requires-python\r\n\r\nI suspect that the example and notes about the preferred way of setting the config has been encouraging users that are unaware of PEP 621 to write technically invalid `pyproject.toml` files.\r\n\r\nPlease note that this issue is not necessarily related to setuptools itself.\r\nThe existence of the `[project]` table in the `pyproject.toml` allows (according to the standard) the installer/builder/consumer program to treat the package differently (specially regarding `dynamic`).\r\n\r\nI think it would be nice to at least add a note about this so users became aware of the implications of adding a `[project]` table.\r\n\r\n### Build log\r\n\r\n_No response_\r\n\r\n### CI config\r\n\r\n_No response_\n", "before_files": [{"content": "from setuptools import setup\n\nextras = {\n \"docs\": [\n \"mkdocs-include-markdown-plugin==2.8.0\",\n \"mkdocs==1.0.4\",\n \"pymdown-extensions\",\n \"mkdocs-macros-plugin\",\n ],\n \"test\": [\n \"jinja2\",\n \"pytest>=6\",\n \"pytest-timeout\",\n \"pytest-xdist\",\n ],\n \"bin\": [\n \"click\",\n \"ghapi\",\n \"pip-tools\",\n \"pygithub\",\n \"pyyaml\",\n \"requests\",\n \"rich>=9.6\",\n \"packaging>=21.0\",\n ],\n \"mypy\": [\n \"mypy>=0.901\",\n \"types-jinja2\",\n \"types-certifi\",\n \"types-toml\",\n \"types-jinja2\",\n \"types-pyyaml\",\n \"types-click\",\n \"types-requests\",\n ],\n}\n\nextras[\"dev\"] = [\n *extras[\"mypy\"],\n *extras[\"test\"],\n *extras[\"bin\"],\n]\n\nextras[\"all\"] = sum(extras.values(), [])\n\nsetup(extras_require=extras)\n", "path": "setup.py"}], "after_files": [{"content": "from setuptools import setup\n\nextras = {\n \"docs\": [\n \"mkdocs-include-markdown-plugin==2.8.0\",\n \"mkdocs==1.0.4\",\n \"jinja2==3.0.3\",\n \"pymdown-extensions\",\n \"mkdocs-macros-plugin\",\n ],\n \"test\": [\n \"jinja2\",\n \"pytest>=6\",\n \"pytest-timeout\",\n \"pytest-xdist\",\n ],\n \"bin\": [\n \"click\",\n \"ghapi\",\n \"pip-tools\",\n \"pygithub\",\n \"pyyaml\",\n \"requests\",\n \"rich>=9.6\",\n \"packaging>=21.0\",\n ],\n \"mypy\": [\n \"mypy>=0.901\",\n \"types-jinja2\",\n \"types-certifi\",\n \"types-toml\",\n \"types-jinja2\",\n \"types-pyyaml\",\n \"types-click\",\n \"types-requests\",\n ],\n}\n\nextras[\"dev\"] = [\n *extras[\"mypy\"],\n *extras[\"test\"],\n *extras[\"bin\"],\n]\n\nextras[\"all\"] = sum(extras.values(), [])\n\nsetup(extras_require=extras)\n", "path": "setup.py"}]}
| 889 | 94 |
gh_patches_debug_30748
|
rasdani/github-patches
|
git_diff
|
OpenNMT__OpenNMT-py-1301
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Ensemble decoding is broken
@flauted
Ensemble decoding is not tested I think and it is broken on master.
onmt/decoders/ensemble.py", line 133, in load_test_model
if field is not None and 'vocab' in field.__dict__:
AttributeError: 'list' object has no attribute '__dict__'
To replicate, just run the translate command with the same model twice in argument -model x x
Thanks.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `onmt/decoders/ensemble.py`
Content:
```
1 """
2 Ensemble decoding.
3
4 Decodes using multiple models simultaneously,
5 combining their prediction distributions by averaging.
6 All models in the ensemble must share a target vocabulary.
7 """
8
9 import torch
10 import torch.nn as nn
11
12 from onmt.encoders.encoder import EncoderBase
13 from onmt.models import NMTModel
14 import onmt.model_builder
15
16
17 class EnsembleDecoderOutput(object):
18 """ Wrapper around multiple decoder final hidden states """
19 def __init__(self, model_dec_outs):
20 self.model_dec_outs = tuple(model_dec_outs)
21
22 def squeeze(self, dim=None):
23 """
24 Delegate squeeze to avoid modifying
25 :obj:`Translator.translate_batch()`
26 """
27 return EnsembleDecoderOutput([
28 x.squeeze(dim) for x in self.model_dec_outs])
29
30 def __getitem__(self, index):
31 return self.model_dec_outs[index]
32
33
34 class EnsembleEncoder(EncoderBase):
35 """ Dummy Encoder that delegates to individual real Encoders """
36 def __init__(self, model_encoders):
37 super(EnsembleEncoder, self).__init__()
38 self.model_encoders = nn.ModuleList(model_encoders)
39
40 def forward(self, src, lengths=None):
41 enc_hidden, memory_bank, _ = zip(*[
42 model_encoder(src, lengths)
43 for model_encoder in self.model_encoders])
44 return enc_hidden, memory_bank, lengths
45
46
47 class EnsembleDecoder(nn.Module):
48 """ Dummy Decoder that delegates to individual real Decoders """
49 def __init__(self, model_decoders):
50 super(EnsembleDecoder, self).__init__()
51 self.model_decoders = nn.ModuleList(model_decoders)
52
53 def forward(self, tgt, memory_bank, memory_lengths=None, step=None):
54 """ See :obj:`RNNDecoderBase.forward()` """
55 # Memory_lengths is a single tensor shared between all models.
56 # This assumption will not hold if Translator is modified
57 # to calculate memory_lengths as something other than the length
58 # of the input.
59 dec_outs, attns = zip(*[
60 model_decoder(
61 tgt, memory_bank[i], memory_lengths, step=step)
62 for i, model_decoder in enumerate(self.model_decoders)])
63 mean_attns = self.combine_attns(attns)
64 return EnsembleDecoderOutput(dec_outs), mean_attns
65
66 def combine_attns(self, attns):
67 result = {}
68 for key in attns[0].keys():
69 result[key] = torch.stack([attn[key] for attn in attns]).mean(0)
70 return result
71
72 def init_state(self, src, memory_bank, enc_hidden):
73 """ See :obj:`RNNDecoderBase.init_state()` """
74 for i, model_decoder in enumerate(self.model_decoders):
75 model_decoder.init_state(src, memory_bank[i], enc_hidden[i])
76
77 def map_state(self, fn):
78 for model_decoder in self.model_decoders:
79 model_decoder.map_state(fn)
80
81
82 class EnsembleGenerator(nn.Module):
83 """
84 Dummy Generator that delegates to individual real Generators,
85 and then averages the resulting target distributions.
86 """
87 def __init__(self, model_generators, raw_probs=False):
88 super(EnsembleGenerator, self).__init__()
89 self.model_generators = nn.ModuleList(model_generators)
90 self._raw_probs = raw_probs
91
92 def forward(self, hidden, attn=None, src_map=None):
93 """
94 Compute a distribution over the target dictionary
95 by averaging distributions from models in the ensemble.
96 All models in the ensemble must share a target vocabulary.
97 """
98 distributions = torch.stack(
99 [mg(h) if attn is None else mg(h, attn, src_map)
100 for h, mg in zip(hidden, self.model_generators)]
101 )
102 if self._raw_probs:
103 return torch.log(torch.exp(distributions).mean(0))
104 else:
105 return distributions.mean(0)
106
107
108 class EnsembleModel(NMTModel):
109 """ Dummy NMTModel wrapping individual real NMTModels """
110 def __init__(self, models, raw_probs=False):
111 encoder = EnsembleEncoder(model.encoder for model in models)
112 decoder = EnsembleDecoder(model.decoder for model in models)
113 super(EnsembleModel, self).__init__(encoder, decoder)
114 self.generator = EnsembleGenerator(
115 [model.generator for model in models], raw_probs)
116 self.models = nn.ModuleList(models)
117
118
119 def load_test_model(opt):
120 """ Read in multiple models for ensemble """
121 shared_fields = None
122 shared_model_opt = None
123 models = []
124 for model_path in opt.models:
125 fields, model, model_opt = \
126 onmt.model_builder.load_test_model(opt, model_path=model_path)
127 if shared_fields is None:
128 shared_fields = fields
129 else:
130 for key, field in fields.items():
131 if field is not None and 'vocab' in field.__dict__:
132 assert field.vocab.stoi == shared_fields[key].vocab.stoi, \
133 'Ensemble models must use the same preprocessed data'
134 models.append(model)
135 if shared_model_opt is None:
136 shared_model_opt = model_opt
137 ensemble_model = EnsembleModel(models, opt.avg_raw_probs)
138 return shared_fields, ensemble_model, shared_model_opt
139
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/onmt/decoders/ensemble.py b/onmt/decoders/ensemble.py
--- a/onmt/decoders/ensemble.py
+++ b/onmt/decoders/ensemble.py
@@ -58,7 +58,8 @@
# of the input.
dec_outs, attns = zip(*[
model_decoder(
- tgt, memory_bank[i], memory_lengths, step=step)
+ tgt, memory_bank[i],
+ memory_lengths=memory_lengths, step=step)
for i, model_decoder in enumerate(self.model_decoders)])
mean_attns = self.combine_attns(attns)
return EnsembleDecoderOutput(dec_outs), mean_attns
@@ -130,9 +131,25 @@
shared_fields = fields
else:
for key, field in fields.items():
- if field is not None and 'vocab' in field.__dict__:
- assert field.vocab.stoi == shared_fields[key].vocab.stoi, \
- 'Ensemble models must use the same preprocessed data'
+ assert len(field) == 1
+ n, f = field[0]
+ try:
+ f_iter = iter(f)
+ except TypeError:
+ f_iter = [(n, f)]
+ for sn, sf in f_iter:
+ if sf is not None and 'vocab' in sf.__dict__:
+ sh_field = shared_fields[key]
+ assert len(sh_field) == 1
+ sh_n, sh_f = sh_field[0]
+ try:
+ sh_f_iter = iter(sh_f)
+ except TypeError:
+ sh_f_iter = [(sh_n, sh_f)]
+ sh_f_dict = dict(sh_f_iter)
+ assert sf.vocab.stoi == sh_f_dict[sn].vocab.stoi, \
+ "Ensemble models must use the same " \
+ "preprocessed data"
models.append(model)
if shared_model_opt is None:
shared_model_opt = model_opt
|
{"golden_diff": "diff --git a/onmt/decoders/ensemble.py b/onmt/decoders/ensemble.py\n--- a/onmt/decoders/ensemble.py\n+++ b/onmt/decoders/ensemble.py\n@@ -58,7 +58,8 @@\n # of the input.\n dec_outs, attns = zip(*[\n model_decoder(\n- tgt, memory_bank[i], memory_lengths, step=step)\n+ tgt, memory_bank[i],\n+ memory_lengths=memory_lengths, step=step)\n for i, model_decoder in enumerate(self.model_decoders)])\n mean_attns = self.combine_attns(attns)\n return EnsembleDecoderOutput(dec_outs), mean_attns\n@@ -130,9 +131,25 @@\n shared_fields = fields\n else:\n for key, field in fields.items():\n- if field is not None and 'vocab' in field.__dict__:\n- assert field.vocab.stoi == shared_fields[key].vocab.stoi, \\\n- 'Ensemble models must use the same preprocessed data'\n+ assert len(field) == 1\n+ n, f = field[0]\n+ try:\n+ f_iter = iter(f)\n+ except TypeError:\n+ f_iter = [(n, f)]\n+ for sn, sf in f_iter:\n+ if sf is not None and 'vocab' in sf.__dict__:\n+ sh_field = shared_fields[key]\n+ assert len(sh_field) == 1\n+ sh_n, sh_f = sh_field[0]\n+ try:\n+ sh_f_iter = iter(sh_f)\n+ except TypeError:\n+ sh_f_iter = [(sh_n, sh_f)]\n+ sh_f_dict = dict(sh_f_iter)\n+ assert sf.vocab.stoi == sh_f_dict[sn].vocab.stoi, \\\n+ \"Ensemble models must use the same \" \\\n+ \"preprocessed data\"\n models.append(model)\n if shared_model_opt is None:\n shared_model_opt = model_opt\n", "issue": "Ensemble decoding is broken\n@flauted \r\nEnsemble decoding is not tested I think and it is broken on master.\r\nonmt/decoders/ensemble.py\", line 133, in load_test_model\r\n if field is not None and 'vocab' in field.__dict__:\r\nAttributeError: 'list' object has no attribute '__dict__'\r\n\r\nTo replicate, just run the translate command with the same model twice in argument -model x x\r\n\r\nThanks.\n", "before_files": [{"content": "\"\"\"\nEnsemble decoding.\n\nDecodes using multiple models simultaneously,\ncombining their prediction distributions by averaging.\nAll models in the ensemble must share a target vocabulary.\n\"\"\"\n\nimport torch\nimport torch.nn as nn\n\nfrom onmt.encoders.encoder import EncoderBase\nfrom onmt.models import NMTModel\nimport onmt.model_builder\n\n\nclass EnsembleDecoderOutput(object):\n \"\"\" Wrapper around multiple decoder final hidden states \"\"\"\n def __init__(self, model_dec_outs):\n self.model_dec_outs = tuple(model_dec_outs)\n\n def squeeze(self, dim=None):\n \"\"\"\n Delegate squeeze to avoid modifying\n :obj:`Translator.translate_batch()`\n \"\"\"\n return EnsembleDecoderOutput([\n x.squeeze(dim) for x in self.model_dec_outs])\n\n def __getitem__(self, index):\n return self.model_dec_outs[index]\n\n\nclass EnsembleEncoder(EncoderBase):\n \"\"\" Dummy Encoder that delegates to individual real Encoders \"\"\"\n def __init__(self, model_encoders):\n super(EnsembleEncoder, self).__init__()\n self.model_encoders = nn.ModuleList(model_encoders)\n\n def forward(self, src, lengths=None):\n enc_hidden, memory_bank, _ = zip(*[\n model_encoder(src, lengths)\n for model_encoder in self.model_encoders])\n return enc_hidden, memory_bank, lengths\n\n\nclass EnsembleDecoder(nn.Module):\n \"\"\" Dummy Decoder that delegates to individual real Decoders \"\"\"\n def __init__(self, model_decoders):\n super(EnsembleDecoder, self).__init__()\n self.model_decoders = nn.ModuleList(model_decoders)\n\n def forward(self, tgt, memory_bank, memory_lengths=None, step=None):\n \"\"\" See :obj:`RNNDecoderBase.forward()` \"\"\"\n # Memory_lengths is a single tensor shared between all models.\n # This assumption will not hold if Translator is modified\n # to calculate memory_lengths as something other than the length\n # of the input.\n dec_outs, attns = zip(*[\n model_decoder(\n tgt, memory_bank[i], memory_lengths, step=step)\n for i, model_decoder in enumerate(self.model_decoders)])\n mean_attns = self.combine_attns(attns)\n return EnsembleDecoderOutput(dec_outs), mean_attns\n\n def combine_attns(self, attns):\n result = {}\n for key in attns[0].keys():\n result[key] = torch.stack([attn[key] for attn in attns]).mean(0)\n return result\n\n def init_state(self, src, memory_bank, enc_hidden):\n \"\"\" See :obj:`RNNDecoderBase.init_state()` \"\"\"\n for i, model_decoder in enumerate(self.model_decoders):\n model_decoder.init_state(src, memory_bank[i], enc_hidden[i])\n\n def map_state(self, fn):\n for model_decoder in self.model_decoders:\n model_decoder.map_state(fn)\n\n\nclass EnsembleGenerator(nn.Module):\n \"\"\"\n Dummy Generator that delegates to individual real Generators,\n and then averages the resulting target distributions.\n \"\"\"\n def __init__(self, model_generators, raw_probs=False):\n super(EnsembleGenerator, self).__init__()\n self.model_generators = nn.ModuleList(model_generators)\n self._raw_probs = raw_probs\n\n def forward(self, hidden, attn=None, src_map=None):\n \"\"\"\n Compute a distribution over the target dictionary\n by averaging distributions from models in the ensemble.\n All models in the ensemble must share a target vocabulary.\n \"\"\"\n distributions = torch.stack(\n [mg(h) if attn is None else mg(h, attn, src_map)\n for h, mg in zip(hidden, self.model_generators)]\n )\n if self._raw_probs:\n return torch.log(torch.exp(distributions).mean(0))\n else:\n return distributions.mean(0)\n\n\nclass EnsembleModel(NMTModel):\n \"\"\" Dummy NMTModel wrapping individual real NMTModels \"\"\"\n def __init__(self, models, raw_probs=False):\n encoder = EnsembleEncoder(model.encoder for model in models)\n decoder = EnsembleDecoder(model.decoder for model in models)\n super(EnsembleModel, self).__init__(encoder, decoder)\n self.generator = EnsembleGenerator(\n [model.generator for model in models], raw_probs)\n self.models = nn.ModuleList(models)\n\n\ndef load_test_model(opt):\n \"\"\" Read in multiple models for ensemble \"\"\"\n shared_fields = None\n shared_model_opt = None\n models = []\n for model_path in opt.models:\n fields, model, model_opt = \\\n onmt.model_builder.load_test_model(opt, model_path=model_path)\n if shared_fields is None:\n shared_fields = fields\n else:\n for key, field in fields.items():\n if field is not None and 'vocab' in field.__dict__:\n assert field.vocab.stoi == shared_fields[key].vocab.stoi, \\\n 'Ensemble models must use the same preprocessed data'\n models.append(model)\n if shared_model_opt is None:\n shared_model_opt = model_opt\n ensemble_model = EnsembleModel(models, opt.avg_raw_probs)\n return shared_fields, ensemble_model, shared_model_opt\n", "path": "onmt/decoders/ensemble.py"}], "after_files": [{"content": "\"\"\"\nEnsemble decoding.\n\nDecodes using multiple models simultaneously,\ncombining their prediction distributions by averaging.\nAll models in the ensemble must share a target vocabulary.\n\"\"\"\n\nimport torch\nimport torch.nn as nn\n\nfrom onmt.encoders.encoder import EncoderBase\nfrom onmt.models import NMTModel\nimport onmt.model_builder\n\n\nclass EnsembleDecoderOutput(object):\n \"\"\" Wrapper around multiple decoder final hidden states \"\"\"\n def __init__(self, model_dec_outs):\n self.model_dec_outs = tuple(model_dec_outs)\n\n def squeeze(self, dim=None):\n \"\"\"\n Delegate squeeze to avoid modifying\n :obj:`Translator.translate_batch()`\n \"\"\"\n return EnsembleDecoderOutput([\n x.squeeze(dim) for x in self.model_dec_outs])\n\n def __getitem__(self, index):\n return self.model_dec_outs[index]\n\n\nclass EnsembleEncoder(EncoderBase):\n \"\"\" Dummy Encoder that delegates to individual real Encoders \"\"\"\n def __init__(self, model_encoders):\n super(EnsembleEncoder, self).__init__()\n self.model_encoders = nn.ModuleList(model_encoders)\n\n def forward(self, src, lengths=None):\n enc_hidden, memory_bank, _ = zip(*[\n model_encoder(src, lengths)\n for model_encoder in self.model_encoders])\n return enc_hidden, memory_bank, lengths\n\n\nclass EnsembleDecoder(nn.Module):\n \"\"\" Dummy Decoder that delegates to individual real Decoders \"\"\"\n def __init__(self, model_decoders):\n super(EnsembleDecoder, self).__init__()\n self.model_decoders = nn.ModuleList(model_decoders)\n\n def forward(self, tgt, memory_bank, memory_lengths=None, step=None):\n \"\"\" See :obj:`RNNDecoderBase.forward()` \"\"\"\n # Memory_lengths is a single tensor shared between all models.\n # This assumption will not hold if Translator is modified\n # to calculate memory_lengths as something other than the length\n # of the input.\n dec_outs, attns = zip(*[\n model_decoder(\n tgt, memory_bank[i],\n memory_lengths=memory_lengths, step=step)\n for i, model_decoder in enumerate(self.model_decoders)])\n mean_attns = self.combine_attns(attns)\n return EnsembleDecoderOutput(dec_outs), mean_attns\n\n def combine_attns(self, attns):\n result = {}\n for key in attns[0].keys():\n result[key] = torch.stack([attn[key] for attn in attns]).mean(0)\n return result\n\n def init_state(self, src, memory_bank, enc_hidden):\n \"\"\" See :obj:`RNNDecoderBase.init_state()` \"\"\"\n for i, model_decoder in enumerate(self.model_decoders):\n model_decoder.init_state(src, memory_bank[i], enc_hidden[i])\n\n def map_state(self, fn):\n for model_decoder in self.model_decoders:\n model_decoder.map_state(fn)\n\n\nclass EnsembleGenerator(nn.Module):\n \"\"\"\n Dummy Generator that delegates to individual real Generators,\n and then averages the resulting target distributions.\n \"\"\"\n def __init__(self, model_generators, raw_probs=False):\n super(EnsembleGenerator, self).__init__()\n self.model_generators = nn.ModuleList(model_generators)\n self._raw_probs = raw_probs\n\n def forward(self, hidden, attn=None, src_map=None):\n \"\"\"\n Compute a distribution over the target dictionary\n by averaging distributions from models in the ensemble.\n All models in the ensemble must share a target vocabulary.\n \"\"\"\n distributions = torch.stack(\n [mg(h) if attn is None else mg(h, attn, src_map)\n for h, mg in zip(hidden, self.model_generators)]\n )\n if self._raw_probs:\n return torch.log(torch.exp(distributions).mean(0))\n else:\n return distributions.mean(0)\n\n\nclass EnsembleModel(NMTModel):\n \"\"\" Dummy NMTModel wrapping individual real NMTModels \"\"\"\n def __init__(self, models, raw_probs=False):\n encoder = EnsembleEncoder(model.encoder for model in models)\n decoder = EnsembleDecoder(model.decoder for model in models)\n super(EnsembleModel, self).__init__(encoder, decoder)\n self.generator = EnsembleGenerator(\n [model.generator for model in models], raw_probs)\n self.models = nn.ModuleList(models)\n\n\ndef load_test_model(opt, dummy_opt):\n \"\"\" Read in multiple models for ensemble \"\"\"\n shared_fields = None\n shared_model_opt = None\n models = []\n for model_path in opt.models:\n fields, model, model_opt = \\\n onmt.model_builder.load_test_model(opt,\n dummy_opt,\n model_path=model_path)\n if shared_fields is None:\n shared_fields = fields\n else:\n for key, field in fields.items():\n assert len(field) == 1\n n, f = field[0]\n try:\n f_iter = iter(f)\n except TypeError:\n f_iter = [(n, f)]\n for sn, sf in f_iter:\n if sf is not None and 'vocab' in sf.__dict__:\n sh_field = shared_fields[key]\n assert len(sh_field) == 1\n sh_n, sh_f = sh_field[0]\n try:\n sh_f_iter = iter(sh_f)\n except TypeError:\n sh_f_iter = [(sh_n, sh_f)]\n sh_f_dict = dict(sh_f_iter)\n assert sf.vocab.stoi == sh_f_dict[sn].vocab.stoi, \\\n \"Ensemble models must use the same \" \\\n \"preprocessed data\"\n models.append(model)\n if shared_model_opt is None:\n shared_model_opt = model_opt\n ensemble_model = EnsembleModel(models, opt.avg_raw_probs)\n return shared_fields, ensemble_model, shared_model_opt\n", "path": "onmt/decoders/ensemble.py"}]}
| 1,774 | 444 |
gh_patches_debug_19725
|
rasdani/github-patches
|
git_diff
|
streamlink__streamlink-3247
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Can't find "tv3cat" plugin
## Plugin Issue
<!-- Replace [ ] with [x] in order to check the box -->
- [ X ] This is a plugin issue and I have read the contribution guidelines.
### Description
<!-- Explain the plugin issue as thoroughly as you can. -->
A image is worth a thousand words.

### Reproduction steps
<!-- How can we reproduce this? Please note the exact steps below using the list format supplied. If you need more steps please add them. -->
I've just using the "python3-streamlink" package provided by Fedora 32
Thanks!
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/streamlink/plugins/tv3cat.py`
Content:
```
1 import logging
2 import re
3
4 from streamlink.plugin import Plugin, PluginError
5 from streamlink.stream import HLSStream
6 from streamlink.plugin.api import validate
7
8 log = logging.getLogger(__name__)
9
10
11 class TV3Cat(Plugin):
12 _url_re = re.compile(r"http://(?:www.)?ccma.cat/tv3/directe/(.+?)/")
13 _stream_info_url = "http://dinamics.ccma.cat/pvideo/media.jsp" \
14 "?media=video&version=0s&idint={ident}&profile=pc&desplacament=0"
15 _media_schema = validate.Schema({
16 "geo": validate.text,
17 "url": validate.url(scheme=validate.any("http", "https"))
18 })
19 _channel_schema = validate.Schema({
20 "media": validate.any([_media_schema], _media_schema)},
21 validate.get("media"),
22 # If there is only one item, it's not a list ... silly
23 validate.transform(lambda x: x if isinstance(x, list) else [x])
24 )
25
26 @classmethod
27 def can_handle_url(cls, url):
28 return cls._url_re.match(url) is not None
29
30 def _get_streams(self):
31 match = self._url_re.match(self.url)
32 if match:
33 ident = match.group(1)
34 data_url = self._stream_info_url.format(ident=ident)
35 stream_infos = self.session.http.json(self.session.http.get(data_url), schema=self._channel_schema)
36
37 for stream in stream_infos:
38 try:
39 return HLSStream.parse_variant_playlist(self.session, stream['url'], name_fmt="{pixels}_{bitrate}")
40 except PluginError:
41 log.debug("Failed to get streams for: {0}".format(stream['geo']))
42 pass
43
44
45 __plugin__ = TV3Cat
46
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/streamlink/plugins/tv3cat.py b/src/streamlink/plugins/tv3cat.py
--- a/src/streamlink/plugins/tv3cat.py
+++ b/src/streamlink/plugins/tv3cat.py
@@ -9,7 +9,7 @@
class TV3Cat(Plugin):
- _url_re = re.compile(r"http://(?:www.)?ccma.cat/tv3/directe/(.+?)/")
+ _url_re = re.compile(r"https?://(?:www\.)?ccma\.cat/tv3/directe/(.+?)/")
_stream_info_url = "http://dinamics.ccma.cat/pvideo/media.jsp" \
"?media=video&version=0s&idint={ident}&profile=pc&desplacament=0"
_media_schema = validate.Schema({
@@ -39,7 +39,6 @@
return HLSStream.parse_variant_playlist(self.session, stream['url'], name_fmt="{pixels}_{bitrate}")
except PluginError:
log.debug("Failed to get streams for: {0}".format(stream['geo']))
- pass
__plugin__ = TV3Cat
|
{"golden_diff": "diff --git a/src/streamlink/plugins/tv3cat.py b/src/streamlink/plugins/tv3cat.py\n--- a/src/streamlink/plugins/tv3cat.py\n+++ b/src/streamlink/plugins/tv3cat.py\n@@ -9,7 +9,7 @@\n \n \n class TV3Cat(Plugin):\n- _url_re = re.compile(r\"http://(?:www.)?ccma.cat/tv3/directe/(.+?)/\")\n+ _url_re = re.compile(r\"https?://(?:www\\.)?ccma\\.cat/tv3/directe/(.+?)/\")\n _stream_info_url = \"http://dinamics.ccma.cat/pvideo/media.jsp\" \\\n \"?media=video&version=0s&idint={ident}&profile=pc&desplacament=0\"\n _media_schema = validate.Schema({\n@@ -39,7 +39,6 @@\n return HLSStream.parse_variant_playlist(self.session, stream['url'], name_fmt=\"{pixels}_{bitrate}\")\n except PluginError:\n log.debug(\"Failed to get streams for: {0}\".format(stream['geo']))\n- pass\n \n \n __plugin__ = TV3Cat\n", "issue": "Can't find \"tv3cat\" plugin\n## Plugin Issue\r\n\r\n<!-- Replace [ ] with [x] in order to check the box -->\r\n- [ X ] This is a plugin issue and I have read the contribution guidelines.\r\n\r\n\r\n### Description\r\n\r\n<!-- Explain the plugin issue as thoroughly as you can. -->\r\n\r\nA image is worth a thousand words.\r\n\r\n\r\n\r\n### Reproduction steps \r\n\r\n<!-- How can we reproduce this? Please note the exact steps below using the list format supplied. If you need more steps please add them. -->\r\n\r\nI've just using the \"python3-streamlink\" package provided by Fedora 32\r\n\r\nThanks!\r\n\n", "before_files": [{"content": "import logging\nimport re\n\nfrom streamlink.plugin import Plugin, PluginError\nfrom streamlink.stream import HLSStream\nfrom streamlink.plugin.api import validate\n\nlog = logging.getLogger(__name__)\n\n\nclass TV3Cat(Plugin):\n _url_re = re.compile(r\"http://(?:www.)?ccma.cat/tv3/directe/(.+?)/\")\n _stream_info_url = \"http://dinamics.ccma.cat/pvideo/media.jsp\" \\\n \"?media=video&version=0s&idint={ident}&profile=pc&desplacament=0\"\n _media_schema = validate.Schema({\n \"geo\": validate.text,\n \"url\": validate.url(scheme=validate.any(\"http\", \"https\"))\n })\n _channel_schema = validate.Schema({\n \"media\": validate.any([_media_schema], _media_schema)},\n validate.get(\"media\"),\n # If there is only one item, it's not a list ... silly\n validate.transform(lambda x: x if isinstance(x, list) else [x])\n )\n\n @classmethod\n def can_handle_url(cls, url):\n return cls._url_re.match(url) is not None\n\n def _get_streams(self):\n match = self._url_re.match(self.url)\n if match:\n ident = match.group(1)\n data_url = self._stream_info_url.format(ident=ident)\n stream_infos = self.session.http.json(self.session.http.get(data_url), schema=self._channel_schema)\n\n for stream in stream_infos:\n try:\n return HLSStream.parse_variant_playlist(self.session, stream['url'], name_fmt=\"{pixels}_{bitrate}\")\n except PluginError:\n log.debug(\"Failed to get streams for: {0}\".format(stream['geo']))\n pass\n\n\n__plugin__ = TV3Cat\n", "path": "src/streamlink/plugins/tv3cat.py"}], "after_files": [{"content": "import logging\nimport re\n\nfrom streamlink.plugin import Plugin, PluginError\nfrom streamlink.stream import HLSStream\nfrom streamlink.plugin.api import validate\n\nlog = logging.getLogger(__name__)\n\n\nclass TV3Cat(Plugin):\n _url_re = re.compile(r\"https?://(?:www\\.)?ccma\\.cat/tv3/directe/(.+?)/\")\n _stream_info_url = \"http://dinamics.ccma.cat/pvideo/media.jsp\" \\\n \"?media=video&version=0s&idint={ident}&profile=pc&desplacament=0\"\n _media_schema = validate.Schema({\n \"geo\": validate.text,\n \"url\": validate.url(scheme=validate.any(\"http\", \"https\"))\n })\n _channel_schema = validate.Schema({\n \"media\": validate.any([_media_schema], _media_schema)},\n validate.get(\"media\"),\n # If there is only one item, it's not a list ... silly\n validate.transform(lambda x: x if isinstance(x, list) else [x])\n )\n\n @classmethod\n def can_handle_url(cls, url):\n return cls._url_re.match(url) is not None\n\n def _get_streams(self):\n match = self._url_re.match(self.url)\n if match:\n ident = match.group(1)\n data_url = self._stream_info_url.format(ident=ident)\n stream_infos = self.session.http.json(self.session.http.get(data_url), schema=self._channel_schema)\n\n for stream in stream_infos:\n try:\n return HLSStream.parse_variant_playlist(self.session, stream['url'], name_fmt=\"{pixels}_{bitrate}\")\n except PluginError:\n log.debug(\"Failed to get streams for: {0}\".format(stream['geo']))\n\n\n__plugin__ = TV3Cat\n", "path": "src/streamlink/plugins/tv3cat.py"}]}
| 923 | 251 |
gh_patches_debug_67164
|
rasdani/github-patches
|
git_diff
|
keras-team__keras-2268
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Textual information for labels?
I seem unable to use text for labels, whilst using to_categorical
```
Using Theano backend.
Traceback (most recent call last):
File "playground.py", line 88, in <module>
train_model_and_test(number_of_epochs, number_of_classes, train_data, train_label, augmented_data_generator)
File "playground.py", line 62, in train_model_and_test
train_label = np_utils.to_categorical(train_label, number_of_classes)
File "/usr/local/lib/python2.7/dist-packages/keras/utils/np_utils.py", line 12, in to_categorical
y = np.asarray(y, dtype='int32')
File "/usr/lib/python2.7/dist-packages/numpy/core/numeric.py", line 460, in asarray
return array(a, dtype, copy=False, order=order)
ValueError: invalid literal for int() with base 10: 'yellow'
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `keras/utils/np_utils.py`
Content:
```
1 from __future__ import absolute_import
2 import numpy as np
3 import scipy as sp
4 from six.moves import range
5 from six.moves import zip
6
7
8 def to_categorical(y, nb_classes=None):
9 '''Convert class vector (integers from 0 to nb_classes)
10 to binary class matrix, for use with categorical_crossentropy.
11 '''
12 y = np.asarray(y, dtype='int32')
13 if not nb_classes:
14 nb_classes = np.max(y)+1
15 Y = np.zeros((len(y), nb_classes))
16 for i in range(len(y)):
17 Y[i, y[i]] = 1.
18 return Y
19
20
21 def normalize(a, axis=-1, order=2):
22 l2 = np.atleast_1d(np.linalg.norm(a, order, axis))
23 l2[l2 == 0] = 1
24 return a / np.expand_dims(l2, axis)
25
26
27 def binary_logloss(p, y):
28 epsilon = 1e-15
29 p = sp.maximum(epsilon, p)
30 p = sp.minimum(1-epsilon, p)
31 res = sum(y * sp.log(p) + sp.subtract(1, y) * sp.log(sp.subtract(1, p)))
32 res *= -1.0/len(y)
33 return res
34
35
36 def multiclass_logloss(P, Y):
37 npreds = [P[i][Y[i]-1] for i in range(len(Y))]
38 score = -(1. / len(Y)) * np.sum(np.log(npreds))
39 return score
40
41
42 def accuracy(p, y):
43 return np.mean([a == b for a, b in zip(p, y)])
44
45
46 def probas_to_classes(y_pred):
47 if len(y_pred.shape) > 1 and y_pred.shape[1] > 1:
48 return categorical_probas_to_classes(y_pred)
49 return np.array([1 if p > 0.5 else 0 for p in y_pred])
50
51
52 def categorical_probas_to_classes(p):
53 return np.argmax(p, axis=1)
54
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/keras/utils/np_utils.py b/keras/utils/np_utils.py
--- a/keras/utils/np_utils.py
+++ b/keras/utils/np_utils.py
@@ -9,7 +9,6 @@
'''Convert class vector (integers from 0 to nb_classes)
to binary class matrix, for use with categorical_crossentropy.
'''
- y = np.asarray(y, dtype='int32')
if not nb_classes:
nb_classes = np.max(y)+1
Y = np.zeros((len(y), nb_classes))
|
{"golden_diff": "diff --git a/keras/utils/np_utils.py b/keras/utils/np_utils.py\n--- a/keras/utils/np_utils.py\n+++ b/keras/utils/np_utils.py\n@@ -9,7 +9,6 @@\n '''Convert class vector (integers from 0 to nb_classes)\n to binary class matrix, for use with categorical_crossentropy.\n '''\n- y = np.asarray(y, dtype='int32')\n if not nb_classes:\n nb_classes = np.max(y)+1\n Y = np.zeros((len(y), nb_classes))\n", "issue": "Textual information for labels?\nI seem unable to use text for labels, whilst using to_categorical\n\n```\nUsing Theano backend.\nTraceback (most recent call last):\n File \"playground.py\", line 88, in <module>\n train_model_and_test(number_of_epochs, number_of_classes, train_data, train_label, augmented_data_generator)\n File \"playground.py\", line 62, in train_model_and_test\n train_label = np_utils.to_categorical(train_label, number_of_classes)\n File \"/usr/local/lib/python2.7/dist-packages/keras/utils/np_utils.py\", line 12, in to_categorical\n y = np.asarray(y, dtype='int32')\n File \"/usr/lib/python2.7/dist-packages/numpy/core/numeric.py\", line 460, in asarray\n return array(a, dtype, copy=False, order=order)\nValueError: invalid literal for int() with base 10: 'yellow'\n```\n\n", "before_files": [{"content": "from __future__ import absolute_import\nimport numpy as np\nimport scipy as sp\nfrom six.moves import range\nfrom six.moves import zip\n\n\ndef to_categorical(y, nb_classes=None):\n '''Convert class vector (integers from 0 to nb_classes)\n to binary class matrix, for use with categorical_crossentropy.\n '''\n y = np.asarray(y, dtype='int32')\n if not nb_classes:\n nb_classes = np.max(y)+1\n Y = np.zeros((len(y), nb_classes))\n for i in range(len(y)):\n Y[i, y[i]] = 1.\n return Y\n\n\ndef normalize(a, axis=-1, order=2):\n l2 = np.atleast_1d(np.linalg.norm(a, order, axis))\n l2[l2 == 0] = 1\n return a / np.expand_dims(l2, axis)\n\n\ndef binary_logloss(p, y):\n epsilon = 1e-15\n p = sp.maximum(epsilon, p)\n p = sp.minimum(1-epsilon, p)\n res = sum(y * sp.log(p) + sp.subtract(1, y) * sp.log(sp.subtract(1, p)))\n res *= -1.0/len(y)\n return res\n\n\ndef multiclass_logloss(P, Y):\n npreds = [P[i][Y[i]-1] for i in range(len(Y))]\n score = -(1. / len(Y)) * np.sum(np.log(npreds))\n return score\n\n\ndef accuracy(p, y):\n return np.mean([a == b for a, b in zip(p, y)])\n\n\ndef probas_to_classes(y_pred):\n if len(y_pred.shape) > 1 and y_pred.shape[1] > 1:\n return categorical_probas_to_classes(y_pred)\n return np.array([1 if p > 0.5 else 0 for p in y_pred])\n\n\ndef categorical_probas_to_classes(p):\n return np.argmax(p, axis=1)\n", "path": "keras/utils/np_utils.py"}], "after_files": [{"content": "from __future__ import absolute_import\nimport numpy as np\nimport scipy as sp\nfrom six.moves import range\nfrom six.moves import zip\n\n\ndef to_categorical(y, nb_classes=None):\n '''Convert class vector (integers from 0 to nb_classes)\n to binary class matrix, for use with categorical_crossentropy.\n '''\n if not nb_classes:\n nb_classes = np.max(y)+1\n Y = np.zeros((len(y), nb_classes))\n for i in range(len(y)):\n Y[i, y[i]] = 1.\n return Y\n\n\ndef normalize(a, axis=-1, order=2):\n l2 = np.atleast_1d(np.linalg.norm(a, order, axis))\n l2[l2 == 0] = 1\n return a / np.expand_dims(l2, axis)\n\n\ndef binary_logloss(p, y):\n epsilon = 1e-15\n p = sp.maximum(epsilon, p)\n p = sp.minimum(1-epsilon, p)\n res = sum(y * sp.log(p) + sp.subtract(1, y) * sp.log(sp.subtract(1, p)))\n res *= -1.0/len(y)\n return res\n\n\ndef multiclass_logloss(P, Y):\n npreds = [P[i][Y[i]-1] for i in range(len(Y))]\n score = -(1. / len(Y)) * np.sum(np.log(npreds))\n return score\n\n\ndef accuracy(p, y):\n return np.mean([a == b for a, b in zip(p, y)])\n\n\ndef probas_to_classes(y_pred):\n if len(y_pred.shape) > 1 and y_pred.shape[1] > 1:\n return categorical_probas_to_classes(y_pred)\n return np.array([1 if p > 0.5 else 0 for p in y_pred])\n\n\ndef categorical_probas_to_classes(p):\n return np.argmax(p, axis=1)\n", "path": "keras/utils/np_utils.py"}]}
| 1,010 | 125 |
gh_patches_debug_32399
|
rasdani/github-patches
|
git_diff
|
buildbot__buildbot-5169
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
buildbot-2.6.0.gitarchive.tar.gz is not generated correctly
```
<rjarry> tardyp: the .gitarchive.tar.gz file of release 2.6.0 is corrupted
16:57:31 <rjarry> buildbot-v2.5.1.gitarchive.tar.gz
16:57:31 <rjarry> 4.69 MB
16:57:31 <rjarry> buildbot-v2.5.1.gitarchive.tar.gz.sig
16:57:31 <rjarry> 310 Bytes
16:57:47 <rjarry> last one does not
16:57:49 <rjarry> buildbot-2.6.0.gitarchive.tar.gz
16:57:49 <rjarry> 15 Bytes
16:57:49 <rjarry> buildbot-2.6.0.gitarchive.tar.gz.sig
16:57:49 <rjarry> 310 Bytes
```
We shall regenerate the archive and signature manually.
preferably @p12tic , which originaly signed the rest of the release
This archive is used by debian packaging as they require to build everything from source
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `common/download_release.py`
Content:
```
1 #!/usr/bin/env python3
2
3 import os
4
5 import requests
6 import yaml
7
8
9 def download(url, fn):
10 print(url, fn)
11 if os.path.exists(fn):
12 return
13 with open(fn, 'wb') as f:
14 r = s.get(url, stream=True)
15 for c in r.iter_content(1024):
16 f.write(c)
17
18
19 def main():
20 global s
21 with open(os.path.expanduser("~/.config/hub")) as f:
22 conf = yaml.safe_load(f)
23 token = conf['github.com'][0]['oauth_token']
24
25 s = requests.Session()
26 s.headers.update({'Authorization': 'token ' + token})
27 r = s.get("https://api.github.com/repos/buildbot/buildbot/releases/latest")
28 r.raise_for_status()
29 r = r.json()
30 tag = r['name']
31 upload_url = r['upload_url'].split('{')[0]
32 assets = s.get("https://api.github.com/repos/buildbot/buildbot/releases/{id}/assets".format(id=r['id']))
33 assets.raise_for_status()
34 assets = assets.json()
35 os.system("mkdir -p dist")
36 for url in (a['browser_download_url'] for a in assets):
37 if url.endswith(".whl") or url.endswith(".tar.gz"):
38 fn = os.path.join('dist', url.split('/')[-1])
39 download(url, fn)
40 # download tag archive
41 url = "https://github.com/buildbot/buildbot/archive/{tag}.tar.gz".format(tag=tag)
42 fn = os.path.join('dist', "buildbot-{tag}.gitarchive.tar.gz".format(tag=tag))
43 download(url, fn)
44 sigfn = fn + ".sig"
45 if os.path.exists(sigfn):
46 os.unlink(sigfn)
47 # sign the tag archive for debian
48 os.system("gpg --output {} -b {}".format(sigfn, fn))
49 sigfnbase = os.path.basename(sigfn)
50 r = s.post(upload_url,
51 headers={'Content-Type': "application/pgp-signature"},
52 params={"name": sigfnbase},
53 data=open(sigfn, 'rb'))
54 print(r.content)
55 fnbase = os.path.basename(fn)
56 r = s.post(upload_url,
57 headers={'Content-Type': "application/gzip"},
58 params={"name": fnbase},
59 data=open(fn, 'rb'))
60 print(r.content)
61 # remove files so that twine upload do not upload them
62 os.unlink(sigfn)
63 os.unlink(fn)
64
65
66 if __name__ == '__main__':
67 main()
68
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/common/download_release.py b/common/download_release.py
--- a/common/download_release.py
+++ b/common/download_release.py
@@ -6,18 +6,19 @@
import yaml
-def download(url, fn):
- print(url, fn)
+def download(session, url, fn):
if os.path.exists(fn):
- return
+ print('Removing old file {}'.format(fn))
+ os.unlink(fn)
+ print('Downloading {} from {}'.format(fn, url))
with open(fn, 'wb') as f:
- r = s.get(url, stream=True)
+ r = session.get(url, stream=True)
+ r.raise_for_status()
for c in r.iter_content(1024):
f.write(c)
def main():
- global s
with open(os.path.expanduser("~/.config/hub")) as f:
conf = yaml.safe_load(f)
token = conf['github.com'][0]['oauth_token']
@@ -32,15 +33,15 @@
assets = s.get("https://api.github.com/repos/buildbot/buildbot/releases/{id}/assets".format(id=r['id']))
assets.raise_for_status()
assets = assets.json()
- os.system("mkdir -p dist")
+ os.makedirs('dist', exist_ok=True)
for url in (a['browser_download_url'] for a in assets):
if url.endswith(".whl") or url.endswith(".tar.gz"):
fn = os.path.join('dist', url.split('/')[-1])
- download(url, fn)
+ download(s, url, fn)
# download tag archive
url = "https://github.com/buildbot/buildbot/archive/{tag}.tar.gz".format(tag=tag)
fn = os.path.join('dist', "buildbot-{tag}.gitarchive.tar.gz".format(tag=tag))
- download(url, fn)
+ download(s, url, fn)
sigfn = fn + ".sig"
if os.path.exists(sigfn):
os.unlink(sigfn)
|
{"golden_diff": "diff --git a/common/download_release.py b/common/download_release.py\n--- a/common/download_release.py\n+++ b/common/download_release.py\n@@ -6,18 +6,19 @@\n import yaml\n \n \n-def download(url, fn):\n- print(url, fn)\n+def download(session, url, fn):\n if os.path.exists(fn):\n- return\n+ print('Removing old file {}'.format(fn))\n+ os.unlink(fn)\n+ print('Downloading {} from {}'.format(fn, url))\n with open(fn, 'wb') as f:\n- r = s.get(url, stream=True)\n+ r = session.get(url, stream=True)\n+ r.raise_for_status()\n for c in r.iter_content(1024):\n f.write(c)\n \n \n def main():\n- global s\n with open(os.path.expanduser(\"~/.config/hub\")) as f:\n conf = yaml.safe_load(f)\n token = conf['github.com'][0]['oauth_token']\n@@ -32,15 +33,15 @@\n assets = s.get(\"https://api.github.com/repos/buildbot/buildbot/releases/{id}/assets\".format(id=r['id']))\n assets.raise_for_status()\n assets = assets.json()\n- os.system(\"mkdir -p dist\")\n+ os.makedirs('dist', exist_ok=True)\n for url in (a['browser_download_url'] for a in assets):\n if url.endswith(\".whl\") or url.endswith(\".tar.gz\"):\n fn = os.path.join('dist', url.split('/')[-1])\n- download(url, fn)\n+ download(s, url, fn)\n # download tag archive\n url = \"https://github.com/buildbot/buildbot/archive/{tag}.tar.gz\".format(tag=tag)\n fn = os.path.join('dist', \"buildbot-{tag}.gitarchive.tar.gz\".format(tag=tag))\n- download(url, fn)\n+ download(s, url, fn)\n sigfn = fn + \".sig\"\n if os.path.exists(sigfn):\n os.unlink(sigfn)\n", "issue": "buildbot-2.6.0.gitarchive.tar.gz is not generated correctly\n```\r\n<rjarry> tardyp: the .gitarchive.tar.gz file of release 2.6.0 is corrupted\r\n16:57:31 <rjarry> buildbot-v2.5.1.gitarchive.tar.gz\r\n16:57:31 <rjarry> 4.69 MB\r\n16:57:31 <rjarry> buildbot-v2.5.1.gitarchive.tar.gz.sig\r\n16:57:31 <rjarry> 310 Bytes\r\n16:57:47 <rjarry> last one does not\r\n16:57:49 <rjarry> buildbot-2.6.0.gitarchive.tar.gz\r\n16:57:49 <rjarry> 15 Bytes\r\n16:57:49 <rjarry> buildbot-2.6.0.gitarchive.tar.gz.sig\r\n16:57:49 <rjarry> 310 Bytes\r\n```\r\nWe shall regenerate the archive and signature manually.\r\npreferably @p12tic , which originaly signed the rest of the release\r\n\r\nThis archive is used by debian packaging as they require to build everything from source\n", "before_files": [{"content": "#!/usr/bin/env python3\n\nimport os\n\nimport requests\nimport yaml\n\n\ndef download(url, fn):\n print(url, fn)\n if os.path.exists(fn):\n return\n with open(fn, 'wb') as f:\n r = s.get(url, stream=True)\n for c in r.iter_content(1024):\n f.write(c)\n\n\ndef main():\n global s\n with open(os.path.expanduser(\"~/.config/hub\")) as f:\n conf = yaml.safe_load(f)\n token = conf['github.com'][0]['oauth_token']\n\n s = requests.Session()\n s.headers.update({'Authorization': 'token ' + token})\n r = s.get(\"https://api.github.com/repos/buildbot/buildbot/releases/latest\")\n r.raise_for_status()\n r = r.json()\n tag = r['name']\n upload_url = r['upload_url'].split('{')[0]\n assets = s.get(\"https://api.github.com/repos/buildbot/buildbot/releases/{id}/assets\".format(id=r['id']))\n assets.raise_for_status()\n assets = assets.json()\n os.system(\"mkdir -p dist\")\n for url in (a['browser_download_url'] for a in assets):\n if url.endswith(\".whl\") or url.endswith(\".tar.gz\"):\n fn = os.path.join('dist', url.split('/')[-1])\n download(url, fn)\n # download tag archive\n url = \"https://github.com/buildbot/buildbot/archive/{tag}.tar.gz\".format(tag=tag)\n fn = os.path.join('dist', \"buildbot-{tag}.gitarchive.tar.gz\".format(tag=tag))\n download(url, fn)\n sigfn = fn + \".sig\"\n if os.path.exists(sigfn):\n os.unlink(sigfn)\n # sign the tag archive for debian\n os.system(\"gpg --output {} -b {}\".format(sigfn, fn))\n sigfnbase = os.path.basename(sigfn)\n r = s.post(upload_url,\n headers={'Content-Type': \"application/pgp-signature\"},\n params={\"name\": sigfnbase},\n data=open(sigfn, 'rb'))\n print(r.content)\n fnbase = os.path.basename(fn)\n r = s.post(upload_url,\n headers={'Content-Type': \"application/gzip\"},\n params={\"name\": fnbase},\n data=open(fn, 'rb'))\n print(r.content)\n # remove files so that twine upload do not upload them\n os.unlink(sigfn)\n os.unlink(fn)\n\n\nif __name__ == '__main__':\n main()\n", "path": "common/download_release.py"}], "after_files": [{"content": "#!/usr/bin/env python3\n\nimport os\n\nimport requests\nimport yaml\n\n\ndef download(session, url, fn):\n if os.path.exists(fn):\n print('Removing old file {}'.format(fn))\n os.unlink(fn)\n print('Downloading {} from {}'.format(fn, url))\n with open(fn, 'wb') as f:\n r = session.get(url, stream=True)\n r.raise_for_status()\n for c in r.iter_content(1024):\n f.write(c)\n\n\ndef main():\n with open(os.path.expanduser(\"~/.config/hub\")) as f:\n conf = yaml.safe_load(f)\n token = conf['github.com'][0]['oauth_token']\n\n s = requests.Session()\n s.headers.update({'Authorization': 'token ' + token})\n r = s.get(\"https://api.github.com/repos/buildbot/buildbot/releases/latest\")\n r.raise_for_status()\n r = r.json()\n tag = r['name']\n upload_url = r['upload_url'].split('{')[0]\n assets = s.get(\"https://api.github.com/repos/buildbot/buildbot/releases/{id}/assets\".format(id=r['id']))\n assets.raise_for_status()\n assets = assets.json()\n os.makedirs('dist', exist_ok=True)\n for url in (a['browser_download_url'] for a in assets):\n if url.endswith(\".whl\") or url.endswith(\".tar.gz\"):\n fn = os.path.join('dist', url.split('/')[-1])\n download(s, url, fn)\n # download tag archive\n url = \"https://github.com/buildbot/buildbot/archive/{tag}.tar.gz\".format(tag=tag)\n fn = os.path.join('dist', \"buildbot-{tag}.gitarchive.tar.gz\".format(tag=tag))\n download(s, url, fn)\n sigfn = fn + \".sig\"\n if os.path.exists(sigfn):\n os.unlink(sigfn)\n # sign the tag archive for debian\n os.system(\"gpg --output {} -b {}\".format(sigfn, fn))\n sigfnbase = os.path.basename(sigfn)\n r = s.post(upload_url,\n headers={'Content-Type': \"application/pgp-signature\"},\n params={\"name\": sigfnbase},\n data=open(sigfn, 'rb'))\n print(r.content)\n fnbase = os.path.basename(fn)\n r = s.post(upload_url,\n headers={'Content-Type': \"application/gzip\"},\n params={\"name\": fnbase},\n data=open(fn, 'rb'))\n print(r.content)\n # remove files so that twine upload do not upload them\n os.unlink(sigfn)\n os.unlink(fn)\n\n\nif __name__ == '__main__':\n main()\n", "path": "common/download_release.py"}]}
| 1,227 | 441 |
gh_patches_debug_23914
|
rasdani/github-patches
|
git_diff
|
netbox-community__netbox-4849
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Swagger references IP address family incorrectly as string in NestedIPAddress.
<!--
NOTE: IF YOUR ISSUE DOES NOT FOLLOW THIS TEMPLATE, IT WILL BE CLOSED.
This form is only for reproducible bugs. If you need assistance with
NetBox installation, or if you have a general question, DO NOT open an
issue. Instead, post to our mailing list:
https://groups.google.com/forum/#!forum/netbox-discuss
Please describe the environment in which you are running NetBox. Be sure
that you are running an unmodified instance of the latest stable release
before submitting a bug report, and that any plugins have been disabled.
-->
### Environment
* Python version: netbox-docker
* NetBox version: 2.8.6
<!--
Describe in detail the exact steps that someone else can take to reproduce
this bug using the current stable release of NetBox. Begin with the
creation of any necessary database objects and call out every operation
being performed explicitly. If reporting a bug in the REST API, be sure to
reconstruct the raw HTTP request(s) being made: Don't rely on a client
library such as pynetbox.
-->
### Steps to Reproduce
1. Generate go-netbox with latest swagger https://github.com/netbox-community/go-netbox
2. Attempt to pull device information with dcim.DcimDevicesListParams on a device / rack that contains a primary ip configured on an device.
3. "cannot unmarshal number into Go struct field NestedIPAddress.results.primary_ip.family of type string"
<!-- What did you expect to happen? -->
### Expected Behavior
Swagger to match returned data.
<!-- What happened instead? -->
### Observed Behavior
Api returns Family as an number instead of a string, mismatching with swagger.
```
json: cannot unmarshal number into Go struct field NestedIPAddress.results.primary_ip.family of type string
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `netbox/ipam/api/nested_serializers.py`
Content:
```
1 from rest_framework import serializers
2
3 from ipam import models
4 from utilities.api import WritableNestedSerializer
5
6 __all__ = [
7 'NestedAggregateSerializer',
8 'NestedIPAddressSerializer',
9 'NestedPrefixSerializer',
10 'NestedRIRSerializer',
11 'NestedRoleSerializer',
12 'NestedServiceSerializer',
13 'NestedVLANGroupSerializer',
14 'NestedVLANSerializer',
15 'NestedVRFSerializer',
16 ]
17
18
19 #
20 # VRFs
21 #
22
23 class NestedVRFSerializer(WritableNestedSerializer):
24 url = serializers.HyperlinkedIdentityField(view_name='ipam-api:vrf-detail')
25 prefix_count = serializers.IntegerField(read_only=True)
26
27 class Meta:
28 model = models.VRF
29 fields = ['id', 'url', 'name', 'rd', 'prefix_count']
30
31
32 #
33 # RIRs/aggregates
34 #
35
36 class NestedRIRSerializer(WritableNestedSerializer):
37 url = serializers.HyperlinkedIdentityField(view_name='ipam-api:rir-detail')
38 aggregate_count = serializers.IntegerField(read_only=True)
39
40 class Meta:
41 model = models.RIR
42 fields = ['id', 'url', 'name', 'slug', 'aggregate_count']
43
44
45 class NestedAggregateSerializer(WritableNestedSerializer):
46 url = serializers.HyperlinkedIdentityField(view_name='ipam-api:aggregate-detail')
47
48 class Meta:
49 model = models.Aggregate
50 fields = ['id', 'url', 'family', 'prefix']
51
52
53 #
54 # VLANs
55 #
56
57 class NestedRoleSerializer(WritableNestedSerializer):
58 url = serializers.HyperlinkedIdentityField(view_name='ipam-api:role-detail')
59 prefix_count = serializers.IntegerField(read_only=True)
60 vlan_count = serializers.IntegerField(read_only=True)
61
62 class Meta:
63 model = models.Role
64 fields = ['id', 'url', 'name', 'slug', 'prefix_count', 'vlan_count']
65
66
67 class NestedVLANGroupSerializer(WritableNestedSerializer):
68 url = serializers.HyperlinkedIdentityField(view_name='ipam-api:vlangroup-detail')
69 vlan_count = serializers.IntegerField(read_only=True)
70
71 class Meta:
72 model = models.VLANGroup
73 fields = ['id', 'url', 'name', 'slug', 'vlan_count']
74
75
76 class NestedVLANSerializer(WritableNestedSerializer):
77 url = serializers.HyperlinkedIdentityField(view_name='ipam-api:vlan-detail')
78
79 class Meta:
80 model = models.VLAN
81 fields = ['id', 'url', 'vid', 'name', 'display_name']
82
83
84 #
85 # Prefixes
86 #
87
88 class NestedPrefixSerializer(WritableNestedSerializer):
89 url = serializers.HyperlinkedIdentityField(view_name='ipam-api:prefix-detail')
90
91 class Meta:
92 model = models.Prefix
93 fields = ['id', 'url', 'family', 'prefix']
94
95
96 #
97 # IP addresses
98 #
99
100 class NestedIPAddressSerializer(WritableNestedSerializer):
101 url = serializers.HyperlinkedIdentityField(view_name='ipam-api:ipaddress-detail')
102
103 class Meta:
104 model = models.IPAddress
105 fields = ['id', 'url', 'family', 'address']
106
107
108 #
109 # Services
110 #
111
112 class NestedServiceSerializer(WritableNestedSerializer):
113 url = serializers.HyperlinkedIdentityField(view_name='ipam-api:service-detail')
114
115 class Meta:
116 model = models.Service
117 fields = ['id', 'url', 'name', 'protocol', 'port']
118
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/netbox/ipam/api/nested_serializers.py b/netbox/ipam/api/nested_serializers.py
--- a/netbox/ipam/api/nested_serializers.py
+++ b/netbox/ipam/api/nested_serializers.py
@@ -44,6 +44,7 @@
class NestedAggregateSerializer(WritableNestedSerializer):
url = serializers.HyperlinkedIdentityField(view_name='ipam-api:aggregate-detail')
+ family = serializers.IntegerField(read_only=True)
class Meta:
model = models.Aggregate
@@ -87,6 +88,7 @@
class NestedPrefixSerializer(WritableNestedSerializer):
url = serializers.HyperlinkedIdentityField(view_name='ipam-api:prefix-detail')
+ family = serializers.IntegerField(read_only=True)
class Meta:
model = models.Prefix
@@ -99,6 +101,7 @@
class NestedIPAddressSerializer(WritableNestedSerializer):
url = serializers.HyperlinkedIdentityField(view_name='ipam-api:ipaddress-detail')
+ family = serializers.IntegerField(read_only=True)
class Meta:
model = models.IPAddress
|
{"golden_diff": "diff --git a/netbox/ipam/api/nested_serializers.py b/netbox/ipam/api/nested_serializers.py\n--- a/netbox/ipam/api/nested_serializers.py\n+++ b/netbox/ipam/api/nested_serializers.py\n@@ -44,6 +44,7 @@\n \n class NestedAggregateSerializer(WritableNestedSerializer):\n url = serializers.HyperlinkedIdentityField(view_name='ipam-api:aggregate-detail')\n+ family = serializers.IntegerField(read_only=True)\n \n class Meta:\n model = models.Aggregate\n@@ -87,6 +88,7 @@\n \n class NestedPrefixSerializer(WritableNestedSerializer):\n url = serializers.HyperlinkedIdentityField(view_name='ipam-api:prefix-detail')\n+ family = serializers.IntegerField(read_only=True)\n \n class Meta:\n model = models.Prefix\n@@ -99,6 +101,7 @@\n \n class NestedIPAddressSerializer(WritableNestedSerializer):\n url = serializers.HyperlinkedIdentityField(view_name='ipam-api:ipaddress-detail')\n+ family = serializers.IntegerField(read_only=True)\n \n class Meta:\n model = models.IPAddress\n", "issue": "Swagger references IP address family incorrectly as string in NestedIPAddress.\n<!--\r\n NOTE: IF YOUR ISSUE DOES NOT FOLLOW THIS TEMPLATE, IT WILL BE CLOSED.\r\n\r\n This form is only for reproducible bugs. If you need assistance with\r\n NetBox installation, or if you have a general question, DO NOT open an\r\n issue. Instead, post to our mailing list:\r\n\r\n https://groups.google.com/forum/#!forum/netbox-discuss\r\n\r\n Please describe the environment in which you are running NetBox. Be sure\r\n that you are running an unmodified instance of the latest stable release\r\n before submitting a bug report, and that any plugins have been disabled.\r\n-->\r\n### Environment\r\n* Python version: netbox-docker\r\n* NetBox version: 2.8.6\r\n\r\n<!--\r\n Describe in detail the exact steps that someone else can take to reproduce\r\n this bug using the current stable release of NetBox. Begin with the\r\n creation of any necessary database objects and call out every operation\r\n being performed explicitly. If reporting a bug in the REST API, be sure to\r\n reconstruct the raw HTTP request(s) being made: Don't rely on a client\r\n library such as pynetbox.\r\n-->\r\n### Steps to Reproduce\r\n1. Generate go-netbox with latest swagger https://github.com/netbox-community/go-netbox\r\n2. Attempt to pull device information with dcim.DcimDevicesListParams on a device / rack that contains a primary ip configured on an device.\r\n3. \"cannot unmarshal number into Go struct field NestedIPAddress.results.primary_ip.family of type string\"\r\n\r\n<!-- What did you expect to happen? -->\r\n### Expected Behavior\r\nSwagger to match returned data.\r\n\r\n<!-- What happened instead? -->\r\n### Observed Behavior\r\nApi returns Family as an number instead of a string, mismatching with swagger.\r\n```\r\njson: cannot unmarshal number into Go struct field NestedIPAddress.results.primary_ip.family of type string\r\n```\n", "before_files": [{"content": "from rest_framework import serializers\n\nfrom ipam import models\nfrom utilities.api import WritableNestedSerializer\n\n__all__ = [\n 'NestedAggregateSerializer',\n 'NestedIPAddressSerializer',\n 'NestedPrefixSerializer',\n 'NestedRIRSerializer',\n 'NestedRoleSerializer',\n 'NestedServiceSerializer',\n 'NestedVLANGroupSerializer',\n 'NestedVLANSerializer',\n 'NestedVRFSerializer',\n]\n\n\n#\n# VRFs\n#\n\nclass NestedVRFSerializer(WritableNestedSerializer):\n url = serializers.HyperlinkedIdentityField(view_name='ipam-api:vrf-detail')\n prefix_count = serializers.IntegerField(read_only=True)\n\n class Meta:\n model = models.VRF\n fields = ['id', 'url', 'name', 'rd', 'prefix_count']\n\n\n#\n# RIRs/aggregates\n#\n\nclass NestedRIRSerializer(WritableNestedSerializer):\n url = serializers.HyperlinkedIdentityField(view_name='ipam-api:rir-detail')\n aggregate_count = serializers.IntegerField(read_only=True)\n\n class Meta:\n model = models.RIR\n fields = ['id', 'url', 'name', 'slug', 'aggregate_count']\n\n\nclass NestedAggregateSerializer(WritableNestedSerializer):\n url = serializers.HyperlinkedIdentityField(view_name='ipam-api:aggregate-detail')\n\n class Meta:\n model = models.Aggregate\n fields = ['id', 'url', 'family', 'prefix']\n\n\n#\n# VLANs\n#\n\nclass NestedRoleSerializer(WritableNestedSerializer):\n url = serializers.HyperlinkedIdentityField(view_name='ipam-api:role-detail')\n prefix_count = serializers.IntegerField(read_only=True)\n vlan_count = serializers.IntegerField(read_only=True)\n\n class Meta:\n model = models.Role\n fields = ['id', 'url', 'name', 'slug', 'prefix_count', 'vlan_count']\n\n\nclass NestedVLANGroupSerializer(WritableNestedSerializer):\n url = serializers.HyperlinkedIdentityField(view_name='ipam-api:vlangroup-detail')\n vlan_count = serializers.IntegerField(read_only=True)\n\n class Meta:\n model = models.VLANGroup\n fields = ['id', 'url', 'name', 'slug', 'vlan_count']\n\n\nclass NestedVLANSerializer(WritableNestedSerializer):\n url = serializers.HyperlinkedIdentityField(view_name='ipam-api:vlan-detail')\n\n class Meta:\n model = models.VLAN\n fields = ['id', 'url', 'vid', 'name', 'display_name']\n\n\n#\n# Prefixes\n#\n\nclass NestedPrefixSerializer(WritableNestedSerializer):\n url = serializers.HyperlinkedIdentityField(view_name='ipam-api:prefix-detail')\n\n class Meta:\n model = models.Prefix\n fields = ['id', 'url', 'family', 'prefix']\n\n\n#\n# IP addresses\n#\n\nclass NestedIPAddressSerializer(WritableNestedSerializer):\n url = serializers.HyperlinkedIdentityField(view_name='ipam-api:ipaddress-detail')\n\n class Meta:\n model = models.IPAddress\n fields = ['id', 'url', 'family', 'address']\n\n\n#\n# Services\n#\n\nclass NestedServiceSerializer(WritableNestedSerializer):\n url = serializers.HyperlinkedIdentityField(view_name='ipam-api:service-detail')\n\n class Meta:\n model = models.Service\n fields = ['id', 'url', 'name', 'protocol', 'port']\n", "path": "netbox/ipam/api/nested_serializers.py"}], "after_files": [{"content": "from rest_framework import serializers\n\nfrom ipam import models\nfrom utilities.api import WritableNestedSerializer\n\n__all__ = [\n 'NestedAggregateSerializer',\n 'NestedIPAddressSerializer',\n 'NestedPrefixSerializer',\n 'NestedRIRSerializer',\n 'NestedRoleSerializer',\n 'NestedServiceSerializer',\n 'NestedVLANGroupSerializer',\n 'NestedVLANSerializer',\n 'NestedVRFSerializer',\n]\n\n\n#\n# VRFs\n#\n\nclass NestedVRFSerializer(WritableNestedSerializer):\n url = serializers.HyperlinkedIdentityField(view_name='ipam-api:vrf-detail')\n prefix_count = serializers.IntegerField(read_only=True)\n\n class Meta:\n model = models.VRF\n fields = ['id', 'url', 'name', 'rd', 'prefix_count']\n\n\n#\n# RIRs/aggregates\n#\n\nclass NestedRIRSerializer(WritableNestedSerializer):\n url = serializers.HyperlinkedIdentityField(view_name='ipam-api:rir-detail')\n aggregate_count = serializers.IntegerField(read_only=True)\n\n class Meta:\n model = models.RIR\n fields = ['id', 'url', 'name', 'slug', 'aggregate_count']\n\n\nclass NestedAggregateSerializer(WritableNestedSerializer):\n url = serializers.HyperlinkedIdentityField(view_name='ipam-api:aggregate-detail')\n family = serializers.IntegerField(read_only=True)\n\n class Meta:\n model = models.Aggregate\n fields = ['id', 'url', 'family', 'prefix']\n\n\n#\n# VLANs\n#\n\nclass NestedRoleSerializer(WritableNestedSerializer):\n url = serializers.HyperlinkedIdentityField(view_name='ipam-api:role-detail')\n prefix_count = serializers.IntegerField(read_only=True)\n vlan_count = serializers.IntegerField(read_only=True)\n\n class Meta:\n model = models.Role\n fields = ['id', 'url', 'name', 'slug', 'prefix_count', 'vlan_count']\n\n\nclass NestedVLANGroupSerializer(WritableNestedSerializer):\n url = serializers.HyperlinkedIdentityField(view_name='ipam-api:vlangroup-detail')\n vlan_count = serializers.IntegerField(read_only=True)\n\n class Meta:\n model = models.VLANGroup\n fields = ['id', 'url', 'name', 'slug', 'vlan_count']\n\n\nclass NestedVLANSerializer(WritableNestedSerializer):\n url = serializers.HyperlinkedIdentityField(view_name='ipam-api:vlan-detail')\n\n class Meta:\n model = models.VLAN\n fields = ['id', 'url', 'vid', 'name', 'display_name']\n\n\n#\n# Prefixes\n#\n\nclass NestedPrefixSerializer(WritableNestedSerializer):\n url = serializers.HyperlinkedIdentityField(view_name='ipam-api:prefix-detail')\n family = serializers.IntegerField(read_only=True)\n\n class Meta:\n model = models.Prefix\n fields = ['id', 'url', 'family', 'prefix']\n\n\n#\n# IP addresses\n#\n\nclass NestedIPAddressSerializer(WritableNestedSerializer):\n url = serializers.HyperlinkedIdentityField(view_name='ipam-api:ipaddress-detail')\n family = serializers.IntegerField(read_only=True)\n\n class Meta:\n model = models.IPAddress\n fields = ['id', 'url', 'family', 'address']\n\n\n#\n# Services\n#\n\nclass NestedServiceSerializer(WritableNestedSerializer):\n url = serializers.HyperlinkedIdentityField(view_name='ipam-api:service-detail')\n\n class Meta:\n model = models.Service\n fields = ['id', 'url', 'name', 'protocol', 'port']\n", "path": "netbox/ipam/api/nested_serializers.py"}]}
| 1,641 | 243 |
gh_patches_debug_64137
|
rasdani/github-patches
|
git_diff
|
plotly__dash-2024
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Missing `f` prefix on f-strings
Some strings looks like they're meant to be f-strings but are missing the `f` prefix meaning variable interpolation won't happen.
https://github.com/plotly/dash/blob/f6b51a841e7deddebfbada374e7de4c297bb2ce8/dash/development/update_components.py#L48
I found this issue automatically. I'm a bot. Beep Boop 🦊. See other issues I found in your repo [here](https://codereview.doctor/plotly/dash)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `dash/development/update_components.py`
Content:
```
1 import sys
2 import subprocess
3 import shlex
4 import os
5 import argparse
6 import shutil
7 import logging
8 import coloredlogs
9
10
11 class _CombinedFormatter(
12 argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter
13 ):
14 pass
15
16
17 logger = logging.getLogger(__name__)
18 coloredlogs.install(
19 fmt="%(asctime)s,%(msecs)03d %(levelname)s - %(message)s", datefmt="%H:%M:%S"
20 )
21
22
23 def bootstrap_components(components_source, concurrency, install_type):
24
25 is_windows = sys.platform == "win32"
26
27 source_glob = (
28 components_source
29 if components_source != "all"
30 else "dash-core-components|dash-html-components|dash-table"
31 )
32
33 cmdstr = f"npx lerna exec --concurrency {concurrency} --scope *@({source_glob})* -- npm {install_type}"
34 cmd = shlex.split(cmdstr, posix=not is_windows)
35 print(cmdstr)
36
37 with subprocess.Popen(
38 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=is_windows
39 ) as proc:
40 out, err = proc.communicate()
41 status = proc.poll()
42
43 if err:
44 print(("🛑 " if status else "") + err.decode(), file=sys.stderr)
45
46 if status or not out:
47 print(
48 "🚨 Failed installing npm dependencies for component packages: {source_glob} (status={status}) 🚨",
49 file=sys.stderr,
50 )
51 sys.exit(1)
52 else:
53 print(
54 f"🟢 Finished installing npm dependencies for component packages: {source_glob} 🟢",
55 file=sys.stderr,
56 )
57
58
59 def build_components(components_source, concurrency):
60
61 is_windows = sys.platform == "win32"
62
63 source_glob = (
64 components_source
65 if components_source != "all"
66 else "dash-core-components|dash-html-components|dash-table"
67 )
68
69 cmdstr = f"npx lerna exec --concurrency {concurrency} --scope *@({source_glob})* -- npm run build"
70 cmd = shlex.split(cmdstr, posix=not is_windows)
71 print(cmdstr)
72
73 with subprocess.Popen(
74 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=is_windows
75 ) as proc:
76 out, err = proc.communicate()
77 status = proc.poll()
78
79 if err:
80 print(("🛑 " if status else "") + err.decode(), file=sys.stderr)
81
82 if status or not out:
83 print(
84 f"🚨 Finished updating component packages: {source_glob} (status={status}) 🚨",
85 file=sys.stderr,
86 )
87 sys.exit(1)
88
89 for package in source_glob.split("|"):
90 build_directory = os.path.join(
91 "components", package, package.replace("-", "_").rstrip("/\\")
92 )
93
94 dest_dir = (
95 "dcc"
96 if package == "dash-core-components"
97 else "html"
98 if package == "dash-html-components"
99 else "dash_table"
100 )
101
102 dest_path = os.path.join("dash", dest_dir)
103
104 if not os.path.exists(dest_path):
105 try:
106 os.makedirs(dest_path)
107 except OSError:
108 logger.exception("🚨 Having issues manipulating %s", dest_path)
109 sys.exit(1)
110
111 if not os.path.exists(build_directory):
112 print(
113 "🚨 Could not locate build artifacts."
114 + " Check that the npm build process completed"
115 + f" successfully for package: {package} 🚨"
116 )
117 sys.exit(1)
118 else:
119 print(f"🚚 Moving build artifacts from {build_directory} to Dash 🚚")
120 shutil.rmtree(dest_path)
121 shutil.copytree(build_directory, dest_path)
122 with open(os.path.join(dest_path, ".gitkeep"), "w"):
123 pass
124 print(f"🟢 Finished moving build artifacts from {build_directory} to Dash 🟢")
125
126
127 def cli():
128 parser = argparse.ArgumentParser(
129 prog="dash-update-components",
130 formatter_class=_CombinedFormatter,
131 description="Update the specified subcomponent libraries within Dash"
132 " by copying over build artifacts, dependencies, and dependency metadata.",
133 )
134 parser.add_argument(
135 "components_source",
136 help="A glob string that matches the Dash component libraries to be updated"
137 " (eg.'dash-table' // 'dash-core-components|dash-html-components' // 'all')."
138 " The default argument is 'all'.",
139 default="all",
140 )
141 parser.add_argument(
142 "--concurrency",
143 type=int,
144 default=3,
145 help="Maximum concurrent steps, up to 3 (ie all components in parallel)",
146 )
147 parser.add_argument(
148 "--ci",
149 help="For clean-install use '--ci True'",
150 default="False",
151 )
152
153 args = parser.parse_args()
154
155 bootstrap_components(
156 args.components_source, args.concurrency, "ci" if args.ci == "True" else "i"
157 )
158 build_components(args.components_source, args.concurrency)
159
160
161 if __name__ == "__main__":
162 cli()
163
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/dash/development/update_components.py b/dash/development/update_components.py
--- a/dash/development/update_components.py
+++ b/dash/development/update_components.py
@@ -45,7 +45,7 @@
if status or not out:
print(
- "🚨 Failed installing npm dependencies for component packages: {source_glob} (status={status}) 🚨",
+ f"🚨 Failed installing npm dependencies for component packages: {source_glob} (status={status}) 🚨",
file=sys.stderr,
)
sys.exit(1)
|
{"golden_diff": "diff --git a/dash/development/update_components.py b/dash/development/update_components.py\n--- a/dash/development/update_components.py\n+++ b/dash/development/update_components.py\n@@ -45,7 +45,7 @@\n \n if status or not out:\n print(\n- \"\ud83d\udea8 Failed installing npm dependencies for component packages: {source_glob} (status={status}) \ud83d\udea8\",\n+ f\"\ud83d\udea8 Failed installing npm dependencies for component packages: {source_glob} (status={status}) \ud83d\udea8\",\n file=sys.stderr,\n )\n sys.exit(1)\n", "issue": "Missing `f` prefix on f-strings\nSome strings looks like they're meant to be f-strings but are missing the `f` prefix meaning variable interpolation won't happen.\n\nhttps://github.com/plotly/dash/blob/f6b51a841e7deddebfbada374e7de4c297bb2ce8/dash/development/update_components.py#L48\n\nI found this issue automatically. I'm a bot. Beep Boop \ud83e\udd8a. See other issues I found in your repo [here](https://codereview.doctor/plotly/dash)\n", "before_files": [{"content": "import sys\nimport subprocess\nimport shlex\nimport os\nimport argparse\nimport shutil\nimport logging\nimport coloredlogs\n\n\nclass _CombinedFormatter(\n argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter\n):\n pass\n\n\nlogger = logging.getLogger(__name__)\ncoloredlogs.install(\n fmt=\"%(asctime)s,%(msecs)03d %(levelname)s - %(message)s\", datefmt=\"%H:%M:%S\"\n)\n\n\ndef bootstrap_components(components_source, concurrency, install_type):\n\n is_windows = sys.platform == \"win32\"\n\n source_glob = (\n components_source\n if components_source != \"all\"\n else \"dash-core-components|dash-html-components|dash-table\"\n )\n\n cmdstr = f\"npx lerna exec --concurrency {concurrency} --scope *@({source_glob})* -- npm {install_type}\"\n cmd = shlex.split(cmdstr, posix=not is_windows)\n print(cmdstr)\n\n with subprocess.Popen(\n cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=is_windows\n ) as proc:\n out, err = proc.communicate()\n status = proc.poll()\n\n if err:\n print((\"\ud83d\uded1 \" if status else \"\") + err.decode(), file=sys.stderr)\n\n if status or not out:\n print(\n \"\ud83d\udea8 Failed installing npm dependencies for component packages: {source_glob} (status={status}) \ud83d\udea8\",\n file=sys.stderr,\n )\n sys.exit(1)\n else:\n print(\n f\"\ud83d\udfe2 Finished installing npm dependencies for component packages: {source_glob} \ud83d\udfe2\",\n file=sys.stderr,\n )\n\n\ndef build_components(components_source, concurrency):\n\n is_windows = sys.platform == \"win32\"\n\n source_glob = (\n components_source\n if components_source != \"all\"\n else \"dash-core-components|dash-html-components|dash-table\"\n )\n\n cmdstr = f\"npx lerna exec --concurrency {concurrency} --scope *@({source_glob})* -- npm run build\"\n cmd = shlex.split(cmdstr, posix=not is_windows)\n print(cmdstr)\n\n with subprocess.Popen(\n cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=is_windows\n ) as proc:\n out, err = proc.communicate()\n status = proc.poll()\n\n if err:\n print((\"\ud83d\uded1 \" if status else \"\") + err.decode(), file=sys.stderr)\n\n if status or not out:\n print(\n f\"\ud83d\udea8 Finished updating component packages: {source_glob} (status={status}) \ud83d\udea8\",\n file=sys.stderr,\n )\n sys.exit(1)\n\n for package in source_glob.split(\"|\"):\n build_directory = os.path.join(\n \"components\", package, package.replace(\"-\", \"_\").rstrip(\"/\\\\\")\n )\n\n dest_dir = (\n \"dcc\"\n if package == \"dash-core-components\"\n else \"html\"\n if package == \"dash-html-components\"\n else \"dash_table\"\n )\n\n dest_path = os.path.join(\"dash\", dest_dir)\n\n if not os.path.exists(dest_path):\n try:\n os.makedirs(dest_path)\n except OSError:\n logger.exception(\"\ud83d\udea8 Having issues manipulating %s\", dest_path)\n sys.exit(1)\n\n if not os.path.exists(build_directory):\n print(\n \"\ud83d\udea8 Could not locate build artifacts.\"\n + \" Check that the npm build process completed\"\n + f\" successfully for package: {package} \ud83d\udea8\"\n )\n sys.exit(1)\n else:\n print(f\"\ud83d\ude9a Moving build artifacts from {build_directory} to Dash \ud83d\ude9a\")\n shutil.rmtree(dest_path)\n shutil.copytree(build_directory, dest_path)\n with open(os.path.join(dest_path, \".gitkeep\"), \"w\"):\n pass\n print(f\"\ud83d\udfe2 Finished moving build artifacts from {build_directory} to Dash \ud83d\udfe2\")\n\n\ndef cli():\n parser = argparse.ArgumentParser(\n prog=\"dash-update-components\",\n formatter_class=_CombinedFormatter,\n description=\"Update the specified subcomponent libraries within Dash\"\n \" by copying over build artifacts, dependencies, and dependency metadata.\",\n )\n parser.add_argument(\n \"components_source\",\n help=\"A glob string that matches the Dash component libraries to be updated\"\n \" (eg.'dash-table' // 'dash-core-components|dash-html-components' // 'all').\"\n \" The default argument is 'all'.\",\n default=\"all\",\n )\n parser.add_argument(\n \"--concurrency\",\n type=int,\n default=3,\n help=\"Maximum concurrent steps, up to 3 (ie all components in parallel)\",\n )\n parser.add_argument(\n \"--ci\",\n help=\"For clean-install use '--ci True'\",\n default=\"False\",\n )\n\n args = parser.parse_args()\n\n bootstrap_components(\n args.components_source, args.concurrency, \"ci\" if args.ci == \"True\" else \"i\"\n )\n build_components(args.components_source, args.concurrency)\n\n\nif __name__ == \"__main__\":\n cli()\n", "path": "dash/development/update_components.py"}], "after_files": [{"content": "import sys\nimport subprocess\nimport shlex\nimport os\nimport argparse\nimport shutil\nimport logging\nimport coloredlogs\n\n\nclass _CombinedFormatter(\n argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter\n):\n pass\n\n\nlogger = logging.getLogger(__name__)\ncoloredlogs.install(\n fmt=\"%(asctime)s,%(msecs)03d %(levelname)s - %(message)s\", datefmt=\"%H:%M:%S\"\n)\n\n\ndef bootstrap_components(components_source, concurrency, install_type):\n\n is_windows = sys.platform == \"win32\"\n\n source_glob = (\n components_source\n if components_source != \"all\"\n else \"dash-core-components|dash-html-components|dash-table\"\n )\n\n cmdstr = f\"npx lerna exec --concurrency {concurrency} --scope *@({source_glob})* -- npm {install_type}\"\n cmd = shlex.split(cmdstr, posix=not is_windows)\n print(cmdstr)\n\n with subprocess.Popen(\n cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=is_windows\n ) as proc:\n out, err = proc.communicate()\n status = proc.poll()\n\n if err:\n print((\"\ud83d\uded1 \" if status else \"\") + err.decode(), file=sys.stderr)\n\n if status or not out:\n print(\n f\"\ud83d\udea8 Failed installing npm dependencies for component packages: {source_glob} (status={status}) \ud83d\udea8\",\n file=sys.stderr,\n )\n sys.exit(1)\n else:\n print(\n f\"\ud83d\udfe2 Finished installing npm dependencies for component packages: {source_glob} \ud83d\udfe2\",\n file=sys.stderr,\n )\n\n\ndef build_components(components_source, concurrency):\n\n is_windows = sys.platform == \"win32\"\n\n source_glob = (\n components_source\n if components_source != \"all\"\n else \"dash-core-components|dash-html-components|dash-table\"\n )\n\n cmdstr = f\"npx lerna exec --concurrency {concurrency} --scope *@({source_glob})* -- npm run build\"\n cmd = shlex.split(cmdstr, posix=not is_windows)\n print(cmdstr)\n\n with subprocess.Popen(\n cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=is_windows\n ) as proc:\n out, err = proc.communicate()\n status = proc.poll()\n\n if err:\n print((\"\ud83d\uded1 \" if status else \"\") + err.decode(), file=sys.stderr)\n\n if status or not out:\n print(\n f\"\ud83d\udea8 Finished updating component packages: {source_glob} (status={status}) \ud83d\udea8\",\n file=sys.stderr,\n )\n sys.exit(1)\n\n for package in source_glob.split(\"|\"):\n build_directory = os.path.join(\n \"components\", package, package.replace(\"-\", \"_\").rstrip(\"/\\\\\")\n )\n\n dest_dir = (\n \"dcc\"\n if package == \"dash-core-components\"\n else \"html\"\n if package == \"dash-html-components\"\n else \"dash_table\"\n )\n\n dest_path = os.path.join(\"dash\", dest_dir)\n\n if not os.path.exists(dest_path):\n try:\n os.makedirs(dest_path)\n except OSError:\n logger.exception(\"\ud83d\udea8 Having issues manipulating %s\", dest_path)\n sys.exit(1)\n\n if not os.path.exists(build_directory):\n print(\n \"\ud83d\udea8 Could not locate build artifacts.\"\n + \" Check that the npm build process completed\"\n + f\" successfully for package: {package} \ud83d\udea8\"\n )\n sys.exit(1)\n else:\n print(f\"\ud83d\ude9a Moving build artifacts from {build_directory} to Dash \ud83d\ude9a\")\n shutil.rmtree(dest_path)\n shutil.copytree(build_directory, dest_path)\n with open(os.path.join(dest_path, \".gitkeep\"), \"w\"):\n pass\n print(f\"\ud83d\udfe2 Finished moving build artifacts from {build_directory} to Dash \ud83d\udfe2\")\n\n\ndef cli():\n parser = argparse.ArgumentParser(\n prog=\"dash-update-components\",\n formatter_class=_CombinedFormatter,\n description=\"Update the specified subcomponent libraries within Dash\"\n \" by copying over build artifacts, dependencies, and dependency metadata.\",\n )\n parser.add_argument(\n \"components_source\",\n help=\"A glob string that matches the Dash component libraries to be updated\"\n \" (eg.'dash-table' // 'dash-core-components|dash-html-components' // 'all').\"\n \" The default argument is 'all'.\",\n default=\"all\",\n )\n parser.add_argument(\n \"--concurrency\",\n type=int,\n default=3,\n help=\"Maximum concurrent steps, up to 3 (ie all components in parallel)\",\n )\n parser.add_argument(\n \"--ci\",\n help=\"For clean-install use '--ci True'\",\n default=\"False\",\n )\n\n args = parser.parse_args()\n\n bootstrap_components(\n args.components_source, args.concurrency, \"ci\" if args.ci == \"True\" else \"i\"\n )\n build_components(args.components_source, args.concurrency)\n\n\nif __name__ == \"__main__\":\n cli()\n", "path": "dash/development/update_components.py"}]}
| 1,856 | 127 |
gh_patches_debug_25784
|
rasdani/github-patches
|
git_diff
|
google__openhtf-185
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Start Test Prompt is timing out and raising PromptUnansweredError
Start Test Prompt appears to be using same timeout as user prompts. When using this in loop mode we shouldn't have a timeout
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `openhtf/exe/triggers.py`
Content:
```
1 # Copyright 2014 Google Inc. All Rights Reserved.
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6
7 # http://www.apache.org/licenses/LICENSE-2.0
8
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """Module for handling the triggering of test start/stop.
16
17 In order for the TestExecutor (see exe/__init__.py) to know when to start a
18 test, it needs a way to know when a DUT has been connected. Also, the test
19 can't restart until the DUT is removed and re-appears. The serial for the
20 TestRun can be read from the DUT, or from the frontend.
21
22 This module provides some built-in triggers. Custom implementations of test
23 start and stop triggers must follow the following interface:
24
25 TestStart:
26 Args:
27 None
28 Returns:
29 DUT identifier, or None if it is not known at test start time.
30
31 TestStop:
32 Args:
33 dut_id: DUT identifier of the test that is stopping.
34 Returns:
35 Blocks until the test can re-start, then returns None.
36 """
37
38 import logging
39 import time
40
41 import gflags
42
43 from openhtf.io import user_input
44
45 gflags.DEFINE_string('dut_serial', 'UNKNOWN_DUT_ID',
46 'DUT serial to start the test with. '
47 'Only use if using the AutoStart trigger.')
48
49 FLAGS = gflags.FLAGS
50 _LOG = logging.getLogger(__name__)
51
52 def AutoStart(): # pylint: disable=invalid-name
53 """Start the test immediately with a dummy DUT ID."""
54 return FLAGS.dut_serial
55
56
57 def AutoStop(dummy_dut_id): # pylint: disable=invalid-name
58 """Stop the test immediately regardless of DUT ID given."""
59 pass
60
61
62 # pylint: disable=invalid-name
63 def PromptForTestStart(message='Provide a DUT ID in order to start the test.',
64 text_input=True):
65 """Make a test start trigger based on prompting the user for input."""
66 def trigger(): # pylint: disable=missing-docstring
67 prompt_manager = user_input.get_prompt_manager()
68 return prompt_manager.DisplayPrompt(message, text_input=text_input)
69 return trigger
70
71
72 def PromptForTestStop(message='Hit ENTER to complete the test.',
73 text_input=False):
74 """Make a test stop trigger based on prompting the user for a response."""
75 def trigger(dummy_dut_id): # pylint: disable=missing-docstring
76 prompt_manager = user_input.get_prompt_manager()
77 return prompt_manager.DisplayPrompt(message, text_input=text_input)
78 return trigger
79
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/openhtf/exe/triggers.py b/openhtf/exe/triggers.py
--- a/openhtf/exe/triggers.py
+++ b/openhtf/exe/triggers.py
@@ -61,18 +61,20 @@
# pylint: disable=invalid-name
def PromptForTestStart(message='Provide a DUT ID in order to start the test.',
- text_input=True):
+ text_input=False, timeout_s=60*60*24):
"""Make a test start trigger based on prompting the user for input."""
def trigger(): # pylint: disable=missing-docstring
prompt_manager = user_input.get_prompt_manager()
- return prompt_manager.DisplayPrompt(message, text_input=text_input)
+ return prompt_manager.DisplayPrompt(
+ message, text_input=text_input, timeout_s=timeout_s)
return trigger
def PromptForTestStop(message='Hit ENTER to complete the test.',
- text_input=False):
+ text_input=False, timeout_s=60*60*24):
"""Make a test stop trigger based on prompting the user for a response."""
def trigger(dummy_dut_id): # pylint: disable=missing-docstring
prompt_manager = user_input.get_prompt_manager()
- return prompt_manager.DisplayPrompt(message, text_input=text_input)
+ return prompt_manager.DisplayPrompt(
+ message, text_input=text_input, timeout_s=timeout_s)
return trigger
|
{"golden_diff": "diff --git a/openhtf/exe/triggers.py b/openhtf/exe/triggers.py\n--- a/openhtf/exe/triggers.py\n+++ b/openhtf/exe/triggers.py\n@@ -61,18 +61,20 @@\n \n # pylint: disable=invalid-name\n def PromptForTestStart(message='Provide a DUT ID in order to start the test.',\n- text_input=True):\n+ text_input=False, timeout_s=60*60*24):\n \"\"\"Make a test start trigger based on prompting the user for input.\"\"\"\n def trigger(): # pylint: disable=missing-docstring\n prompt_manager = user_input.get_prompt_manager()\n- return prompt_manager.DisplayPrompt(message, text_input=text_input)\n+ return prompt_manager.DisplayPrompt(\n+ message, text_input=text_input, timeout_s=timeout_s)\n return trigger\n \n \n def PromptForTestStop(message='Hit ENTER to complete the test.',\n- text_input=False):\n+ text_input=False, timeout_s=60*60*24):\n \"\"\"Make a test stop trigger based on prompting the user for a response.\"\"\"\n def trigger(dummy_dut_id): # pylint: disable=missing-docstring\n prompt_manager = user_input.get_prompt_manager()\n- return prompt_manager.DisplayPrompt(message, text_input=text_input)\n+ return prompt_manager.DisplayPrompt(\n+ message, text_input=text_input, timeout_s=timeout_s)\n return trigger\n", "issue": "Start Test Prompt is timing out and raising PromptUnansweredError\nStart Test Prompt appears to be using same timeout as user prompts. When using this in loop mode we shouldn't have a timeout\n\n", "before_files": [{"content": "# Copyright 2014 Google Inc. All Rights Reserved.\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Module for handling the triggering of test start/stop.\n\nIn order for the TestExecutor (see exe/__init__.py) to know when to start a\ntest, it needs a way to know when a DUT has been connected. Also, the test\ncan't restart until the DUT is removed and re-appears. The serial for the\nTestRun can be read from the DUT, or from the frontend.\n\nThis module provides some built-in triggers. Custom implementations of test\nstart and stop triggers must follow the following interface:\n\nTestStart:\n Args:\n None\n Returns:\n DUT identifier, or None if it is not known at test start time.\n\nTestStop:\n Args:\n dut_id: DUT identifier of the test that is stopping.\n Returns:\n Blocks until the test can re-start, then returns None.\n\"\"\"\n\nimport logging\nimport time\n\nimport gflags\n\nfrom openhtf.io import user_input\n\ngflags.DEFINE_string('dut_serial', 'UNKNOWN_DUT_ID',\n 'DUT serial to start the test with. '\n 'Only use if using the AutoStart trigger.')\n\nFLAGS = gflags.FLAGS\n_LOG = logging.getLogger(__name__)\n\ndef AutoStart(): # pylint: disable=invalid-name\n \"\"\"Start the test immediately with a dummy DUT ID.\"\"\"\n return FLAGS.dut_serial\n\n\ndef AutoStop(dummy_dut_id): # pylint: disable=invalid-name\n \"\"\"Stop the test immediately regardless of DUT ID given.\"\"\"\n pass\n\n\n# pylint: disable=invalid-name\ndef PromptForTestStart(message='Provide a DUT ID in order to start the test.',\n text_input=True):\n \"\"\"Make a test start trigger based on prompting the user for input.\"\"\"\n def trigger(): # pylint: disable=missing-docstring\n prompt_manager = user_input.get_prompt_manager()\n return prompt_manager.DisplayPrompt(message, text_input=text_input)\n return trigger\n\n\ndef PromptForTestStop(message='Hit ENTER to complete the test.',\n text_input=False):\n \"\"\"Make a test stop trigger based on prompting the user for a response.\"\"\"\n def trigger(dummy_dut_id): # pylint: disable=missing-docstring\n prompt_manager = user_input.get_prompt_manager()\n return prompt_manager.DisplayPrompt(message, text_input=text_input)\n return trigger\n", "path": "openhtf/exe/triggers.py"}], "after_files": [{"content": "# Copyright 2014 Google Inc. All Rights Reserved.\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Module for handling the triggering of test start/stop.\n\nIn order for the TestExecutor (see exe/__init__.py) to know when to start a\ntest, it needs a way to know when a DUT has been connected. Also, the test\ncan't restart until the DUT is removed and re-appears. The serial for the\nTestRun can be read from the DUT, or from the frontend.\n\nThis module provides some built-in triggers. Custom implementations of test\nstart and stop triggers must follow the following interface:\n\nTestStart:\n Args:\n None\n Returns:\n DUT identifier, or None if it is not known at test start time.\n\nTestStop:\n Args:\n dut_id: DUT identifier of the test that is stopping.\n Returns:\n Blocks until the test can re-start, then returns None.\n\"\"\"\n\nimport logging\nimport time\n\nimport gflags\n\nfrom openhtf.io import user_input\n\ngflags.DEFINE_string('dut_serial', 'UNKNOWN_DUT_ID',\n 'DUT serial to start the test with. '\n 'Only use if using the AutoStart trigger.')\n\nFLAGS = gflags.FLAGS\n_LOG = logging.getLogger(__name__)\n\ndef AutoStart(): # pylint: disable=invalid-name\n \"\"\"Start the test immediately with a dummy DUT ID.\"\"\"\n return FLAGS.dut_serial\n\n\ndef AutoStop(dummy_dut_id): # pylint: disable=invalid-name\n \"\"\"Stop the test immediately regardless of DUT ID given.\"\"\"\n pass\n\n\n# pylint: disable=invalid-name\ndef PromptForTestStart(message='Provide a DUT ID in order to start the test.',\n text_input=False, timeout_s=60*60*24):\n \"\"\"Make a test start trigger based on prompting the user for input.\"\"\"\n def trigger(): # pylint: disable=missing-docstring\n prompt_manager = user_input.get_prompt_manager()\n return prompt_manager.DisplayPrompt(\n message, text_input=text_input, timeout_s=timeout_s)\n return trigger\n\n\ndef PromptForTestStop(message='Hit ENTER to complete the test.',\n text_input=False, timeout_s=60*60*24):\n \"\"\"Make a test stop trigger based on prompting the user for a response.\"\"\"\n def trigger(dummy_dut_id): # pylint: disable=missing-docstring\n prompt_manager = user_input.get_prompt_manager()\n return prompt_manager.DisplayPrompt(\n message, text_input=text_input, timeout_s=timeout_s)\n return trigger\n", "path": "openhtf/exe/triggers.py"}]}
| 1,088 | 314 |
gh_patches_debug_14909
|
rasdani/github-patches
|
git_diff
|
getsentry__sentry-python-2818
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Memory leak with unclosed span in opentelemetry integration
### How do you use Sentry?
Sentry Saas (sentry.io)
### Version
1.40.3
### Steps to Reproduce
`SentrySpanProcessor` stores all open span in `self.otel_span_map` dict. This leads to a memory leak if otel span is deleted without closing. E.g. due to a bug: https://github.com/open-telemetry/opentelemetry-python-contrib/issues/2149
### Expected Result
Sentry span should be deleted after otel span is deleted by GC.
### Actual Result
Sentry spans are stored in self.otel_span_map forever.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sentry_sdk/scrubber.py`
Content:
```
1 try:
2 from typing import cast
3 except ImportError:
4 cast = lambda _, obj: obj
5
6 from sentry_sdk.utils import (
7 capture_internal_exceptions,
8 AnnotatedValue,
9 iter_event_frames,
10 )
11 from sentry_sdk._types import TYPE_CHECKING
12
13 if TYPE_CHECKING:
14 from sentry_sdk._types import Event
15 from typing import List
16 from typing import Optional
17
18
19 DEFAULT_DENYLIST = [
20 # stolen from relay
21 "password",
22 "passwd",
23 "secret",
24 "api_key",
25 "apikey",
26 "auth",
27 "credentials",
28 "mysql_pwd",
29 "privatekey",
30 "private_key",
31 "token",
32 "ip_address",
33 "session",
34 # django
35 "csrftoken",
36 "sessionid",
37 # wsgi
38 "remote_addr",
39 "x_csrftoken",
40 "x_forwarded_for",
41 "set_cookie",
42 "cookie",
43 "authorization",
44 "x_api_key",
45 "x_forwarded_for",
46 "x_real_ip",
47 # other common names used in the wild
48 "aiohttp_session", # aiohttp
49 "connect.sid", # Express
50 "csrf_token", # Pyramid
51 "csrf", # (this is a cookie name used in accepted answers on stack overflow)
52 "_csrf", # Express
53 "_csrf_token", # Bottle
54 "PHPSESSID", # PHP
55 "_session", # Sanic
56 "symfony", # Symfony
57 "user_session", # Vue
58 "_xsrf", # Tornado
59 "XSRF-TOKEN", # Angular, Laravel
60 ]
61
62
63 class EventScrubber(object):
64 def __init__(self, denylist=None, recursive=False):
65 # type: (Optional[List[str]], bool) -> None
66 self.denylist = DEFAULT_DENYLIST if denylist is None else denylist
67 self.denylist = [x.lower() for x in self.denylist]
68 self.recursive = recursive
69
70 def scrub_list(self, lst):
71 # type: (object) -> None
72 """
73 If a list is passed to this method, the method recursively searches the list and any
74 nested lists for any dictionaries. The method calls scrub_dict on all dictionaries
75 it finds.
76 If the parameter passed to this method is not a list, the method does nothing.
77 """
78 if not isinstance(lst, list):
79 return
80
81 for v in lst:
82 self.scrub_dict(v) # no-op unless v is a dict
83 self.scrub_list(v) # no-op unless v is a list
84
85 def scrub_dict(self, d):
86 # type: (object) -> None
87 """
88 If a dictionary is passed to this method, the method scrubs the dictionary of any
89 sensitive data. The method calls itself recursively on any nested dictionaries (
90 including dictionaries nested in lists) if self.recursive is True.
91 This method does nothing if the parameter passed to it is not a dictionary.
92 """
93 if not isinstance(d, dict):
94 return
95
96 for k, v in d.items():
97 if isinstance(k, str) and k.lower() in self.denylist:
98 d[k] = AnnotatedValue.substituted_because_contains_sensitive_data()
99 elif self.recursive:
100 self.scrub_dict(v) # no-op unless v is a dict
101 self.scrub_list(v) # no-op unless v is a list
102
103 def scrub_request(self, event):
104 # type: (Event) -> None
105 with capture_internal_exceptions():
106 if "request" in event:
107 if "headers" in event["request"]:
108 self.scrub_dict(event["request"]["headers"])
109 if "cookies" in event["request"]:
110 self.scrub_dict(event["request"]["cookies"])
111 if "data" in event["request"]:
112 self.scrub_dict(event["request"]["data"])
113
114 def scrub_extra(self, event):
115 # type: (Event) -> None
116 with capture_internal_exceptions():
117 if "extra" in event:
118 self.scrub_dict(event["extra"])
119
120 def scrub_user(self, event):
121 # type: (Event) -> None
122 with capture_internal_exceptions():
123 if "user" in event:
124 self.scrub_dict(event["user"])
125
126 def scrub_breadcrumbs(self, event):
127 # type: (Event) -> None
128 with capture_internal_exceptions():
129 if "breadcrumbs" in event:
130 if "values" in event["breadcrumbs"]:
131 for value in event["breadcrumbs"]["values"]:
132 if "data" in value:
133 self.scrub_dict(value["data"])
134
135 def scrub_frames(self, event):
136 # type: (Event) -> None
137 with capture_internal_exceptions():
138 for frame in iter_event_frames(event):
139 if "vars" in frame:
140 self.scrub_dict(frame["vars"])
141
142 def scrub_spans(self, event):
143 # type: (Event) -> None
144 with capture_internal_exceptions():
145 if "spans" in event:
146 for span in event["spans"]:
147 if "data" in span:
148 self.scrub_dict(span["data"])
149
150 def scrub_event(self, event):
151 # type: (Event) -> None
152 self.scrub_request(event)
153 self.scrub_extra(event)
154 self.scrub_user(event)
155 self.scrub_breadcrumbs(event)
156 self.scrub_frames(event)
157 self.scrub_spans(event)
158
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/sentry_sdk/scrubber.py b/sentry_sdk/scrubber.py
--- a/sentry_sdk/scrubber.py
+++ b/sentry_sdk/scrubber.py
@@ -1,8 +1,3 @@
-try:
- from typing import cast
-except ImportError:
- cast = lambda _, obj: obj
-
from sentry_sdk.utils import (
capture_internal_exceptions,
AnnotatedValue,
@@ -94,6 +89,8 @@
return
for k, v in d.items():
+ # The cast is needed because mypy is not smart enough to figure out that k must be a
+ # string after the isinstance check.
if isinstance(k, str) and k.lower() in self.denylist:
d[k] = AnnotatedValue.substituted_because_contains_sensitive_data()
elif self.recursive:
|
{"golden_diff": "diff --git a/sentry_sdk/scrubber.py b/sentry_sdk/scrubber.py\n--- a/sentry_sdk/scrubber.py\n+++ b/sentry_sdk/scrubber.py\n@@ -1,8 +1,3 @@\n-try:\n- from typing import cast\n-except ImportError:\n- cast = lambda _, obj: obj\n-\n from sentry_sdk.utils import (\n capture_internal_exceptions,\n AnnotatedValue,\n@@ -94,6 +89,8 @@\n return\n \n for k, v in d.items():\n+ # The cast is needed because mypy is not smart enough to figure out that k must be a\n+ # string after the isinstance check.\n if isinstance(k, str) and k.lower() in self.denylist:\n d[k] = AnnotatedValue.substituted_because_contains_sensitive_data()\n elif self.recursive:\n", "issue": "Memory leak with unclosed span in opentelemetry integration\n### How do you use Sentry?\n\nSentry Saas (sentry.io)\n\n### Version\n\n1.40.3\n\n### Steps to Reproduce\n\n`SentrySpanProcessor` stores all open span in `self.otel_span_map` dict. This leads to a memory leak if otel span is deleted without closing. E.g. due to a bug: https://github.com/open-telemetry/opentelemetry-python-contrib/issues/2149\n\n### Expected Result\n\nSentry span should be deleted after otel span is deleted by GC.\n\n### Actual Result\n\nSentry spans are stored in self.otel_span_map forever.\n", "before_files": [{"content": "try:\n from typing import cast\nexcept ImportError:\n cast = lambda _, obj: obj\n\nfrom sentry_sdk.utils import (\n capture_internal_exceptions,\n AnnotatedValue,\n iter_event_frames,\n)\nfrom sentry_sdk._types import TYPE_CHECKING\n\nif TYPE_CHECKING:\n from sentry_sdk._types import Event\n from typing import List\n from typing import Optional\n\n\nDEFAULT_DENYLIST = [\n # stolen from relay\n \"password\",\n \"passwd\",\n \"secret\",\n \"api_key\",\n \"apikey\",\n \"auth\",\n \"credentials\",\n \"mysql_pwd\",\n \"privatekey\",\n \"private_key\",\n \"token\",\n \"ip_address\",\n \"session\",\n # django\n \"csrftoken\",\n \"sessionid\",\n # wsgi\n \"remote_addr\",\n \"x_csrftoken\",\n \"x_forwarded_for\",\n \"set_cookie\",\n \"cookie\",\n \"authorization\",\n \"x_api_key\",\n \"x_forwarded_for\",\n \"x_real_ip\",\n # other common names used in the wild\n \"aiohttp_session\", # aiohttp\n \"connect.sid\", # Express\n \"csrf_token\", # Pyramid\n \"csrf\", # (this is a cookie name used in accepted answers on stack overflow)\n \"_csrf\", # Express\n \"_csrf_token\", # Bottle\n \"PHPSESSID\", # PHP\n \"_session\", # Sanic\n \"symfony\", # Symfony\n \"user_session\", # Vue\n \"_xsrf\", # Tornado\n \"XSRF-TOKEN\", # Angular, Laravel\n]\n\n\nclass EventScrubber(object):\n def __init__(self, denylist=None, recursive=False):\n # type: (Optional[List[str]], bool) -> None\n self.denylist = DEFAULT_DENYLIST if denylist is None else denylist\n self.denylist = [x.lower() for x in self.denylist]\n self.recursive = recursive\n\n def scrub_list(self, lst):\n # type: (object) -> None\n \"\"\"\n If a list is passed to this method, the method recursively searches the list and any\n nested lists for any dictionaries. The method calls scrub_dict on all dictionaries\n it finds.\n If the parameter passed to this method is not a list, the method does nothing.\n \"\"\"\n if not isinstance(lst, list):\n return\n\n for v in lst:\n self.scrub_dict(v) # no-op unless v is a dict\n self.scrub_list(v) # no-op unless v is a list\n\n def scrub_dict(self, d):\n # type: (object) -> None\n \"\"\"\n If a dictionary is passed to this method, the method scrubs the dictionary of any\n sensitive data. The method calls itself recursively on any nested dictionaries (\n including dictionaries nested in lists) if self.recursive is True.\n This method does nothing if the parameter passed to it is not a dictionary.\n \"\"\"\n if not isinstance(d, dict):\n return\n\n for k, v in d.items():\n if isinstance(k, str) and k.lower() in self.denylist:\n d[k] = AnnotatedValue.substituted_because_contains_sensitive_data()\n elif self.recursive:\n self.scrub_dict(v) # no-op unless v is a dict\n self.scrub_list(v) # no-op unless v is a list\n\n def scrub_request(self, event):\n # type: (Event) -> None\n with capture_internal_exceptions():\n if \"request\" in event:\n if \"headers\" in event[\"request\"]:\n self.scrub_dict(event[\"request\"][\"headers\"])\n if \"cookies\" in event[\"request\"]:\n self.scrub_dict(event[\"request\"][\"cookies\"])\n if \"data\" in event[\"request\"]:\n self.scrub_dict(event[\"request\"][\"data\"])\n\n def scrub_extra(self, event):\n # type: (Event) -> None\n with capture_internal_exceptions():\n if \"extra\" in event:\n self.scrub_dict(event[\"extra\"])\n\n def scrub_user(self, event):\n # type: (Event) -> None\n with capture_internal_exceptions():\n if \"user\" in event:\n self.scrub_dict(event[\"user\"])\n\n def scrub_breadcrumbs(self, event):\n # type: (Event) -> None\n with capture_internal_exceptions():\n if \"breadcrumbs\" in event:\n if \"values\" in event[\"breadcrumbs\"]:\n for value in event[\"breadcrumbs\"][\"values\"]:\n if \"data\" in value:\n self.scrub_dict(value[\"data\"])\n\n def scrub_frames(self, event):\n # type: (Event) -> None\n with capture_internal_exceptions():\n for frame in iter_event_frames(event):\n if \"vars\" in frame:\n self.scrub_dict(frame[\"vars\"])\n\n def scrub_spans(self, event):\n # type: (Event) -> None\n with capture_internal_exceptions():\n if \"spans\" in event:\n for span in event[\"spans\"]:\n if \"data\" in span:\n self.scrub_dict(span[\"data\"])\n\n def scrub_event(self, event):\n # type: (Event) -> None\n self.scrub_request(event)\n self.scrub_extra(event)\n self.scrub_user(event)\n self.scrub_breadcrumbs(event)\n self.scrub_frames(event)\n self.scrub_spans(event)\n", "path": "sentry_sdk/scrubber.py"}], "after_files": [{"content": "from sentry_sdk.utils import (\n capture_internal_exceptions,\n AnnotatedValue,\n iter_event_frames,\n)\nfrom sentry_sdk._types import TYPE_CHECKING\n\nif TYPE_CHECKING:\n from sentry_sdk._types import Event\n from typing import List\n from typing import Optional\n\n\nDEFAULT_DENYLIST = [\n # stolen from relay\n \"password\",\n \"passwd\",\n \"secret\",\n \"api_key\",\n \"apikey\",\n \"auth\",\n \"credentials\",\n \"mysql_pwd\",\n \"privatekey\",\n \"private_key\",\n \"token\",\n \"ip_address\",\n \"session\",\n # django\n \"csrftoken\",\n \"sessionid\",\n # wsgi\n \"remote_addr\",\n \"x_csrftoken\",\n \"x_forwarded_for\",\n \"set_cookie\",\n \"cookie\",\n \"authorization\",\n \"x_api_key\",\n \"x_forwarded_for\",\n \"x_real_ip\",\n # other common names used in the wild\n \"aiohttp_session\", # aiohttp\n \"connect.sid\", # Express\n \"csrf_token\", # Pyramid\n \"csrf\", # (this is a cookie name used in accepted answers on stack overflow)\n \"_csrf\", # Express\n \"_csrf_token\", # Bottle\n \"PHPSESSID\", # PHP\n \"_session\", # Sanic\n \"symfony\", # Symfony\n \"user_session\", # Vue\n \"_xsrf\", # Tornado\n \"XSRF-TOKEN\", # Angular, Laravel\n]\n\n\nclass EventScrubber(object):\n def __init__(self, denylist=None, recursive=False):\n # type: (Optional[List[str]], bool) -> None\n self.denylist = DEFAULT_DENYLIST if denylist is None else denylist\n self.denylist = [x.lower() for x in self.denylist]\n self.recursive = recursive\n\n def scrub_list(self, lst):\n # type: (object) -> None\n \"\"\"\n If a list is passed to this method, the method recursively searches the list and any\n nested lists for any dictionaries. The method calls scrub_dict on all dictionaries\n it finds.\n If the parameter passed to this method is not a list, the method does nothing.\n \"\"\"\n if not isinstance(lst, list):\n return\n\n for v in lst:\n self.scrub_dict(v) # no-op unless v is a dict\n self.scrub_list(v) # no-op unless v is a list\n\n def scrub_dict(self, d):\n # type: (object) -> None\n \"\"\"\n If a dictionary is passed to this method, the method scrubs the dictionary of any\n sensitive data. The method calls itself recursively on any nested dictionaries (\n including dictionaries nested in lists) if self.recursive is True.\n This method does nothing if the parameter passed to it is not a dictionary.\n \"\"\"\n if not isinstance(d, dict):\n return\n\n for k, v in d.items():\n # The cast is needed because mypy is not smart enough to figure out that k must be a\n # string after the isinstance check.\n if isinstance(k, str) and k.lower() in self.denylist:\n d[k] = AnnotatedValue.substituted_because_contains_sensitive_data()\n elif self.recursive:\n self.scrub_dict(v) # no-op unless v is a dict\n self.scrub_list(v) # no-op unless v is a list\n\n def scrub_request(self, event):\n # type: (Event) -> None\n with capture_internal_exceptions():\n if \"request\" in event:\n if \"headers\" in event[\"request\"]:\n self.scrub_dict(event[\"request\"][\"headers\"])\n if \"cookies\" in event[\"request\"]:\n self.scrub_dict(event[\"request\"][\"cookies\"])\n if \"data\" in event[\"request\"]:\n self.scrub_dict(event[\"request\"][\"data\"])\n\n def scrub_extra(self, event):\n # type: (Event) -> None\n with capture_internal_exceptions():\n if \"extra\" in event:\n self.scrub_dict(event[\"extra\"])\n\n def scrub_user(self, event):\n # type: (Event) -> None\n with capture_internal_exceptions():\n if \"user\" in event:\n self.scrub_dict(event[\"user\"])\n\n def scrub_breadcrumbs(self, event):\n # type: (Event) -> None\n with capture_internal_exceptions():\n if \"breadcrumbs\" in event:\n if \"values\" in event[\"breadcrumbs\"]:\n for value in event[\"breadcrumbs\"][\"values\"]:\n if \"data\" in value:\n self.scrub_dict(value[\"data\"])\n\n def scrub_frames(self, event):\n # type: (Event) -> None\n with capture_internal_exceptions():\n for frame in iter_event_frames(event):\n if \"vars\" in frame:\n self.scrub_dict(frame[\"vars\"])\n\n def scrub_spans(self, event):\n # type: (Event) -> None\n with capture_internal_exceptions():\n if \"spans\" in event:\n for span in event[\"spans\"]:\n if \"data\" in span:\n self.scrub_dict(span[\"data\"])\n\n def scrub_event(self, event):\n # type: (Event) -> None\n self.scrub_request(event)\n self.scrub_extra(event)\n self.scrub_user(event)\n self.scrub_breadcrumbs(event)\n self.scrub_frames(event)\n self.scrub_spans(event)\n", "path": "sentry_sdk/scrubber.py"}]}
| 1,972 | 188 |
gh_patches_debug_22762
|
rasdani/github-patches
|
git_diff
|
quantumlib__Cirq-4777
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[cirq_web 3D circuit] X**0.5 and Y**0.5 look the same as X and Y
**Description of the issue**
The default look for `X**0.5` and `Y**0.5` look the same as `X` and `Y` in the 3D circuit.
**How to reproduce the issue**
```
import cirq_web
a, b, c, d = cirq.GridQubit.rect(2, 2)
cirq_web.Circuit3D(cirq.Circuit(cirq.X(a)**0.5, cirq.Y(b)**0.5, cirq.X(c), cirq.Y(d))).generate_html_file(
file_name="hello.html")
```

**Cirq version**
You can get the cirq version by printing `cirq.__version__`. From the command line:
0.13.1
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `cirq-web/cirq_web/circuits/symbols.py`
Content:
```
1 # Copyright 2021 The Cirq Developers
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # https://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 # This is more of a placeholder for now, we can add
16 # official color schemes in follow-ups.
17 import abc
18 import dataclasses
19 from typing import Iterable, List, Optional
20 import cirq
21
22
23 @dataclasses.dataclass
24 class SymbolInfo:
25 """Organizes information about a symbol."""
26
27 labels: List[str]
28 colors: List[str]
29
30 @staticmethod
31 def unknown_operation(num_qubits: int) -> 'SymbolInfo':
32 """Generates a SymbolInfo object for an unknown operation.
33
34 Args:
35 num_qubits: the number of qubits in the operation
36 """
37 symbol_info = SymbolInfo([], [])
38 for _ in range(num_qubits):
39 symbol_info.colors.append('gray')
40 symbol_info.labels.append('?')
41 return symbol_info
42
43
44 class SymbolResolver(metaclass=abc.ABCMeta):
45 """Abstract class providing the interface for users to specify information
46 about how a particular symbol should be displayed in the 3D circuit
47 """
48
49 def __call__(self, operation: cirq.Operation) -> Optional[SymbolInfo]:
50 return self.resolve(operation)
51
52 @abc.abstractmethod
53 def resolve(self, operation: cirq.Operation) -> Optional[SymbolInfo]:
54 """Converts cirq.Operation objects into SymbolInfo objects for serialization."""
55
56
57 class DefaultResolver(SymbolResolver):
58 """Default symbol resolver implementation. Takes information
59 from circuit_diagram_info, if unavailable, returns information representing
60 an unknown symbol.
61 """
62
63 _SYMBOL_COLORS = {
64 '@': 'black',
65 'H': 'yellow',
66 'I': 'orange',
67 'X': 'black',
68 'Y': 'pink',
69 'Z': 'cyan',
70 'S': '#90EE90',
71 'T': '#CBC3E3',
72 }
73
74 def resolve(self, operation: cirq.Operation) -> Optional[SymbolInfo]:
75 """Checks for the _circuit_diagram_info attribute of the operation,
76 and if it exists, build the symbol information from it. Otherwise,
77 builds symbol info for an unknown operation.
78
79 Args:
80 operation: the cirq.Operation object to resolve
81 """
82 try:
83 wire_symbols = cirq.circuit_diagram_info(operation).wire_symbols
84 except TypeError:
85 return SymbolInfo.unknown_operation(cirq.num_qubits(operation))
86
87 symbol_info = SymbolInfo(list(wire_symbols), [])
88 for symbol in wire_symbols:
89 symbol_info.colors.append(DefaultResolver._SYMBOL_COLORS.get(symbol, 'gray'))
90
91 return symbol_info
92
93
94 DEFAULT_SYMBOL_RESOLVERS: Iterable[SymbolResolver] = tuple([DefaultResolver()])
95
96
97 def resolve_operation(operation: cirq.Operation, resolvers: Iterable[SymbolResolver]) -> SymbolInfo:
98 """Builds a SymbolInfo object based off of a designated operation
99 and list of resolvers. The latest resolver takes precendent.
100
101 Args:
102 operation: the cirq.Operation object to resolve
103 resolvers: a list of SymbolResolvers which provides instructions
104 on how to build SymbolInfo objects.
105
106 Raises:
107 ValueError: if the operation cannot be resolved into a symbol.
108 """
109 symbol_info = None
110 for resolver in resolvers:
111 info = resolver(operation)
112 if info is not None:
113 symbol_info = info
114
115 if symbol_info is None:
116 raise ValueError(f'Cannot resolve operation: {operation}')
117 return symbol_info
118
119
120 class Operation3DSymbol:
121 def __init__(self, wire_symbols, location_info, color_info, moment):
122 """Gathers symbol information from an operation and builds an
123 object to represent it in 3D.
124
125 Args:
126 wire_symbols: a list of symbols taken from circuit_diagram_info()
127 that will be used to represent the operation in the 3D circuit.
128
129 location_info: A list of coordinates for each wire_symbol. The
130 index of the coordinate tuple in the location_info list must
131 correspond with the index of the symbol in the wire_symbols list.
132
133 color_info: a list representing the desired color of the symbol(s).
134 These will also correspond to index of the symbol in the
135 wire_symbols list.
136
137 moment: the moment where the symbol should be.
138 """
139 self.wire_symbols = wire_symbols
140 self.location_info = location_info
141 self.color_info = color_info
142 self.moment = moment
143
144 def to_typescript(self):
145 return {
146 'wire_symbols': list(self.wire_symbols),
147 'location_info': self.location_info,
148 'color_info': self.color_info,
149 'moment': self.moment,
150 }
151
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/cirq-web/cirq_web/circuits/symbols.py b/cirq-web/cirq_web/circuits/symbols.py
--- a/cirq-web/cirq_web/circuits/symbols.py
+++ b/cirq-web/cirq_web/circuits/symbols.py
@@ -18,6 +18,7 @@
import dataclasses
from typing import Iterable, List, Optional
import cirq
+from cirq.protocols.circuit_diagram_info_protocol import CircuitDiagramInfoArgs
@dataclasses.dataclass
@@ -80,11 +81,15 @@
operation: the cirq.Operation object to resolve
"""
try:
- wire_symbols = cirq.circuit_diagram_info(operation).wire_symbols
+ info = cirq.circuit_diagram_info(operation)
except TypeError:
return SymbolInfo.unknown_operation(cirq.num_qubits(operation))
- symbol_info = SymbolInfo(list(wire_symbols), [])
+ wire_symbols = info.wire_symbols
+ symbol_exponent = info._wire_symbols_including_formatted_exponent(
+ CircuitDiagramInfoArgs.UNINFORMED_DEFAULT
+ )
+ symbol_info = SymbolInfo(list(symbol_exponent), [])
for symbol in wire_symbols:
symbol_info.colors.append(DefaultResolver._SYMBOL_COLORS.get(symbol, 'gray'))
|
{"golden_diff": "diff --git a/cirq-web/cirq_web/circuits/symbols.py b/cirq-web/cirq_web/circuits/symbols.py\n--- a/cirq-web/cirq_web/circuits/symbols.py\n+++ b/cirq-web/cirq_web/circuits/symbols.py\n@@ -18,6 +18,7 @@\n import dataclasses\n from typing import Iterable, List, Optional\n import cirq\n+from cirq.protocols.circuit_diagram_info_protocol import CircuitDiagramInfoArgs\n \n \n @dataclasses.dataclass\n@@ -80,11 +81,15 @@\n operation: the cirq.Operation object to resolve\n \"\"\"\n try:\n- wire_symbols = cirq.circuit_diagram_info(operation).wire_symbols\n+ info = cirq.circuit_diagram_info(operation)\n except TypeError:\n return SymbolInfo.unknown_operation(cirq.num_qubits(operation))\n \n- symbol_info = SymbolInfo(list(wire_symbols), [])\n+ wire_symbols = info.wire_symbols\n+ symbol_exponent = info._wire_symbols_including_formatted_exponent(\n+ CircuitDiagramInfoArgs.UNINFORMED_DEFAULT\n+ )\n+ symbol_info = SymbolInfo(list(symbol_exponent), [])\n for symbol in wire_symbols:\n symbol_info.colors.append(DefaultResolver._SYMBOL_COLORS.get(symbol, 'gray'))\n", "issue": "[cirq_web 3D circuit] X**0.5 and Y**0.5 look the same as X and Y\n**Description of the issue**\r\n\r\nThe default look for `X**0.5` and `Y**0.5` look the same as `X` and `Y` in the 3D circuit.\r\n\r\n**How to reproduce the issue**\r\n\r\n```\r\nimport cirq_web\r\na, b, c, d = cirq.GridQubit.rect(2, 2)\r\ncirq_web.Circuit3D(cirq.Circuit(cirq.X(a)**0.5, cirq.Y(b)**0.5, cirq.X(c), cirq.Y(d))).generate_html_file(\r\n file_name=\"hello.html\")\r\n```\r\n\r\n\r\n\r\n\r\n**Cirq version**\r\nYou can get the cirq version by printing `cirq.__version__`. From the command line:\r\n\r\n0.13.1\r\n\n", "before_files": [{"content": "# Copyright 2021 The Cirq Developers\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# This is more of a placeholder for now, we can add\n# official color schemes in follow-ups.\nimport abc\nimport dataclasses\nfrom typing import Iterable, List, Optional\nimport cirq\n\n\[email protected]\nclass SymbolInfo:\n \"\"\"Organizes information about a symbol.\"\"\"\n\n labels: List[str]\n colors: List[str]\n\n @staticmethod\n def unknown_operation(num_qubits: int) -> 'SymbolInfo':\n \"\"\"Generates a SymbolInfo object for an unknown operation.\n\n Args:\n num_qubits: the number of qubits in the operation\n \"\"\"\n symbol_info = SymbolInfo([], [])\n for _ in range(num_qubits):\n symbol_info.colors.append('gray')\n symbol_info.labels.append('?')\n return symbol_info\n\n\nclass SymbolResolver(metaclass=abc.ABCMeta):\n \"\"\"Abstract class providing the interface for users to specify information\n about how a particular symbol should be displayed in the 3D circuit\n \"\"\"\n\n def __call__(self, operation: cirq.Operation) -> Optional[SymbolInfo]:\n return self.resolve(operation)\n\n @abc.abstractmethod\n def resolve(self, operation: cirq.Operation) -> Optional[SymbolInfo]:\n \"\"\"Converts cirq.Operation objects into SymbolInfo objects for serialization.\"\"\"\n\n\nclass DefaultResolver(SymbolResolver):\n \"\"\"Default symbol resolver implementation. Takes information\n from circuit_diagram_info, if unavailable, returns information representing\n an unknown symbol.\n \"\"\"\n\n _SYMBOL_COLORS = {\n '@': 'black',\n 'H': 'yellow',\n 'I': 'orange',\n 'X': 'black',\n 'Y': 'pink',\n 'Z': 'cyan',\n 'S': '#90EE90',\n 'T': '#CBC3E3',\n }\n\n def resolve(self, operation: cirq.Operation) -> Optional[SymbolInfo]:\n \"\"\"Checks for the _circuit_diagram_info attribute of the operation,\n and if it exists, build the symbol information from it. Otherwise,\n builds symbol info for an unknown operation.\n\n Args:\n operation: the cirq.Operation object to resolve\n \"\"\"\n try:\n wire_symbols = cirq.circuit_diagram_info(operation).wire_symbols\n except TypeError:\n return SymbolInfo.unknown_operation(cirq.num_qubits(operation))\n\n symbol_info = SymbolInfo(list(wire_symbols), [])\n for symbol in wire_symbols:\n symbol_info.colors.append(DefaultResolver._SYMBOL_COLORS.get(symbol, 'gray'))\n\n return symbol_info\n\n\nDEFAULT_SYMBOL_RESOLVERS: Iterable[SymbolResolver] = tuple([DefaultResolver()])\n\n\ndef resolve_operation(operation: cirq.Operation, resolvers: Iterable[SymbolResolver]) -> SymbolInfo:\n \"\"\"Builds a SymbolInfo object based off of a designated operation\n and list of resolvers. The latest resolver takes precendent.\n\n Args:\n operation: the cirq.Operation object to resolve\n resolvers: a list of SymbolResolvers which provides instructions\n on how to build SymbolInfo objects.\n\n Raises:\n ValueError: if the operation cannot be resolved into a symbol.\n \"\"\"\n symbol_info = None\n for resolver in resolvers:\n info = resolver(operation)\n if info is not None:\n symbol_info = info\n\n if symbol_info is None:\n raise ValueError(f'Cannot resolve operation: {operation}')\n return symbol_info\n\n\nclass Operation3DSymbol:\n def __init__(self, wire_symbols, location_info, color_info, moment):\n \"\"\"Gathers symbol information from an operation and builds an\n object to represent it in 3D.\n\n Args:\n wire_symbols: a list of symbols taken from circuit_diagram_info()\n that will be used to represent the operation in the 3D circuit.\n\n location_info: A list of coordinates for each wire_symbol. The\n index of the coordinate tuple in the location_info list must\n correspond with the index of the symbol in the wire_symbols list.\n\n color_info: a list representing the desired color of the symbol(s).\n These will also correspond to index of the symbol in the\n wire_symbols list.\n\n moment: the moment where the symbol should be.\n \"\"\"\n self.wire_symbols = wire_symbols\n self.location_info = location_info\n self.color_info = color_info\n self.moment = moment\n\n def to_typescript(self):\n return {\n 'wire_symbols': list(self.wire_symbols),\n 'location_info': self.location_info,\n 'color_info': self.color_info,\n 'moment': self.moment,\n }\n", "path": "cirq-web/cirq_web/circuits/symbols.py"}], "after_files": [{"content": "# Copyright 2021 The Cirq Developers\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# This is more of a placeholder for now, we can add\n# official color schemes in follow-ups.\nimport abc\nimport dataclasses\nfrom typing import Iterable, List, Optional\nimport cirq\nfrom cirq.protocols.circuit_diagram_info_protocol import CircuitDiagramInfoArgs\n\n\[email protected]\nclass SymbolInfo:\n \"\"\"Organizes information about a symbol.\"\"\"\n\n labels: List[str]\n colors: List[str]\n\n @staticmethod\n def unknown_operation(num_qubits: int) -> 'SymbolInfo':\n \"\"\"Generates a SymbolInfo object for an unknown operation.\n\n Args:\n num_qubits: the number of qubits in the operation\n \"\"\"\n symbol_info = SymbolInfo([], [])\n for _ in range(num_qubits):\n symbol_info.colors.append('gray')\n symbol_info.labels.append('?')\n return symbol_info\n\n\nclass SymbolResolver(metaclass=abc.ABCMeta):\n \"\"\"Abstract class providing the interface for users to specify information\n about how a particular symbol should be displayed in the 3D circuit\n \"\"\"\n\n def __call__(self, operation: cirq.Operation) -> Optional[SymbolInfo]:\n return self.resolve(operation)\n\n @abc.abstractmethod\n def resolve(self, operation: cirq.Operation) -> Optional[SymbolInfo]:\n \"\"\"Converts cirq.Operation objects into SymbolInfo objects for serialization.\"\"\"\n\n\nclass DefaultResolver(SymbolResolver):\n \"\"\"Default symbol resolver implementation. Takes information\n from circuit_diagram_info, if unavailable, returns information representing\n an unknown symbol.\n \"\"\"\n\n _SYMBOL_COLORS = {\n '@': 'black',\n 'H': 'yellow',\n 'I': 'orange',\n 'X': 'black',\n 'Y': 'pink',\n 'Z': 'cyan',\n 'S': '#90EE90',\n 'T': '#CBC3E3',\n }\n\n def resolve(self, operation: cirq.Operation) -> Optional[SymbolInfo]:\n \"\"\"Checks for the _circuit_diagram_info attribute of the operation,\n and if it exists, build the symbol information from it. Otherwise,\n builds symbol info for an unknown operation.\n\n Args:\n operation: the cirq.Operation object to resolve\n \"\"\"\n try:\n info = cirq.circuit_diagram_info(operation)\n except TypeError:\n return SymbolInfo.unknown_operation(cirq.num_qubits(operation))\n\n wire_symbols = info.wire_symbols\n symbol_exponent = info._wire_symbols_including_formatted_exponent(\n CircuitDiagramInfoArgs.UNINFORMED_DEFAULT\n )\n symbol_info = SymbolInfo(list(symbol_exponent), [])\n for symbol in wire_symbols:\n symbol_info.colors.append(DefaultResolver._SYMBOL_COLORS.get(symbol, 'gray'))\n\n return symbol_info\n\n\nDEFAULT_SYMBOL_RESOLVERS: Iterable[SymbolResolver] = tuple([DefaultResolver()])\n\n\ndef resolve_operation(operation: cirq.Operation, resolvers: Iterable[SymbolResolver]) -> SymbolInfo:\n \"\"\"Builds a SymbolInfo object based off of a designated operation\n and list of resolvers. The latest resolver takes precendent.\n\n Args:\n operation: the cirq.Operation object to resolve\n resolvers: a list of SymbolResolvers which provides instructions\n on how to build SymbolInfo objects.\n\n Raises:\n ValueError: if the operation cannot be resolved into a symbol.\n \"\"\"\n symbol_info = None\n for resolver in resolvers:\n info = resolver(operation)\n if info is not None:\n symbol_info = info\n\n if symbol_info is None:\n raise ValueError(f'Cannot resolve operation: {operation}')\n return symbol_info\n\n\nclass Operation3DSymbol:\n def __init__(self, wire_symbols, location_info, color_info, moment):\n \"\"\"Gathers symbol information from an operation and builds an\n object to represent it in 3D.\n\n Args:\n wire_symbols: a list of symbols taken from circuit_diagram_info()\n that will be used to represent the operation in the 3D circuit.\n\n location_info: A list of coordinates for each wire_symbol. The\n index of the coordinate tuple in the location_info list must\n correspond with the index of the symbol in the wire_symbols list.\n\n color_info: a list representing the desired color of the symbol(s).\n These will also correspond to index of the symbol in the\n wire_symbols list.\n\n moment: the moment where the symbol should be.\n \"\"\"\n self.wire_symbols = wire_symbols\n self.location_info = location_info\n self.color_info = color_info\n self.moment = moment\n\n def to_typescript(self):\n return {\n 'wire_symbols': list(self.wire_symbols),\n 'location_info': self.location_info,\n 'color_info': self.color_info,\n 'moment': self.moment,\n }\n", "path": "cirq-web/cirq_web/circuits/symbols.py"}]}
| 1,980 | 283 |
gh_patches_debug_279
|
rasdani/github-patches
|
git_diff
|
sanic-org__sanic-1292
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
New release on Pypi ?
Hello,
I was looking for a tool to autoreload my code when I develop and I found this commit : https://github.com/channelcat/sanic/commit/52c2a8484e6aa5fa13aaade49e1f2597dd006e15
So it seems Sanic already integrates it since December 07, 2017. But the the latest version on Pypi dates from the day before (https://github.com/channelcat/sanic/commit/1ea3ab7fe8ab03a6ddf4d75a3de8cb719f4c584c) : https://pypi.org/project/Sanic/#history
Is-it possible to release a new version on Pypi please ? Other features (like the UUID support in routes) are also interesting :)
Thanks in advance !
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sanic/__init__.py`
Content:
```
1 from sanic.app import Sanic
2 from sanic.blueprints import Blueprint
3
4 __version__ = '0.7.0'
5
6 __all__ = ['Sanic', 'Blueprint']
7
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/sanic/__init__.py b/sanic/__init__.py
--- a/sanic/__init__.py
+++ b/sanic/__init__.py
@@ -1,6 +1,6 @@
from sanic.app import Sanic
from sanic.blueprints import Blueprint
-__version__ = '0.7.0'
+__version__ = '0.8.0'
__all__ = ['Sanic', 'Blueprint']
|
{"golden_diff": "diff --git a/sanic/__init__.py b/sanic/__init__.py\n--- a/sanic/__init__.py\n+++ b/sanic/__init__.py\n@@ -1,6 +1,6 @@\n from sanic.app import Sanic\n from sanic.blueprints import Blueprint\n \n-__version__ = '0.7.0'\n+__version__ = '0.8.0'\n \n __all__ = ['Sanic', 'Blueprint']\n", "issue": "New release on Pypi ?\nHello,\r\n\r\nI was looking for a tool to autoreload my code when I develop and I found this commit : https://github.com/channelcat/sanic/commit/52c2a8484e6aa5fa13aaade49e1f2597dd006e15\r\n\r\nSo it seems Sanic already integrates it since December 07, 2017. But the the latest version on Pypi dates from the day before (https://github.com/channelcat/sanic/commit/1ea3ab7fe8ab03a6ddf4d75a3de8cb719f4c584c) : https://pypi.org/project/Sanic/#history\r\n\r\nIs-it possible to release a new version on Pypi please ? Other features (like the UUID support in routes) are also interesting :)\r\n\r\nThanks in advance !\n", "before_files": [{"content": "from sanic.app import Sanic\nfrom sanic.blueprints import Blueprint\n\n__version__ = '0.7.0'\n\n__all__ = ['Sanic', 'Blueprint']\n", "path": "sanic/__init__.py"}], "after_files": [{"content": "from sanic.app import Sanic\nfrom sanic.blueprints import Blueprint\n\n__version__ = '0.8.0'\n\n__all__ = ['Sanic', 'Blueprint']\n", "path": "sanic/__init__.py"}]}
| 504 | 98 |
gh_patches_debug_4424
|
rasdani/github-patches
|
git_diff
|
mozilla__bugbug-2654
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Replace typing.Dict with dict
It is now possible to use `dict` directly instead of `typing.Dict` in type definitions.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 # This Source Code Form is subject to the terms of the Mozilla Public
3 # License, v. 2.0. If a copy of the MPL was not distributed with this file,
4 # You can obtain one at http://mozilla.org/MPL/2.0/.
5
6 import os
7
8 from setuptools import find_packages, setup
9
10 here = os.path.dirname(__file__)
11
12
13 def read_requirements(file_):
14 with open(os.path.join(here, file_)) as f:
15 return sorted(list(set(line.split("#")[0].strip() for line in f)))
16
17
18 install_requires = read_requirements("requirements.txt")
19
20
21 with open(os.path.join(here, "VERSION")) as f:
22 version = f.read().strip()
23
24 # Read the extra requirements
25 extras = ["nlp", "nn"]
26
27 extras_require = {}
28
29 for extra in extras:
30 extras_require[extra] = read_requirements("extra-%s-requirements.txt" % extra)
31
32
33 setup(
34 name="bugbug",
35 version=version,
36 description="ML tools for Mozilla projects",
37 author="Marco Castelluccio",
38 author_email="[email protected]",
39 install_requires=install_requires,
40 extras_require=extras_require,
41 packages=find_packages(exclude=["contrib", "docs", "tests"]),
42 include_package_data=True,
43 license="MPL2",
44 entry_points={
45 "console_scripts": [
46 "bugbug-data-commits = scripts.commit_retriever:main",
47 "bugbug-data-bugzilla = scripts.bug_retriever:main",
48 "bugbug-data-test-scheduling-history = scripts.test_scheduling_history_retriever:main",
49 "bugbug-data-revisions = scripts.revision_retriever:main",
50 "bugbug-train = scripts.trainer:main",
51 "bugbug-train-similarity = scripts.similarity_trainer:main",
52 "bugbug-check = scripts.check:main",
53 "bugbug-microannotate-generate = scripts.microannotate_generator:main",
54 "bugbug-classify-commit = scripts.commit_classifier:main",
55 "bugbug-classify-bug = scripts.bug_classifier:main",
56 "bugbug-regressor-finder = scripts.regressor_finder:main",
57 "bugbug-retrieve-training-metrics = scripts.retrieve_training_metrics:main",
58 "bugbug-analyze-training-metrics = scripts.analyze_training_metrics:main",
59 "bugbug-check-all-metrics = scripts.check_all_metrics:main",
60 "bugbug-past-bugs-by-unit = scripts.past_bugs_by_unit:main",
61 "bugbug-testing-policy-stats = scripts.testing_policy_stats:main",
62 "bugbug-generate-landings-risk-report = scripts.generate_landings_risk_report:main",
63 "bugbug-shadow-scheduler-stats = scripts.shadow_scheduler_stats:main",
64 "bugbug-data-github = scripts.github_issue_retriever:main",
65 ]
66 },
67 classifiers=[
68 "Programming Language :: Python :: 3.7",
69 "Programming Language :: Python :: 3.8",
70 "Programming Language :: Python :: 3.9",
71 "Programming Language :: Python :: 3 :: Only",
72 "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
73 ],
74 )
75
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -65,8 +65,6 @@
]
},
classifiers=[
- "Programming Language :: Python :: 3.7",
- "Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3 :: Only",
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -65,8 +65,6 @@\n ]\n },\n classifiers=[\n- \"Programming Language :: Python :: 3.7\",\n- \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3 :: Only\",\n \"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)\",\n", "issue": "Replace typing.Dict with dict\nIt is now possible to use `dict` directly instead of `typing.Dict` in type definitions.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n# This Source Code Form is subject to the terms of the Mozilla Public\n# License, v. 2.0. If a copy of the MPL was not distributed with this file,\n# You can obtain one at http://mozilla.org/MPL/2.0/.\n\nimport os\n\nfrom setuptools import find_packages, setup\n\nhere = os.path.dirname(__file__)\n\n\ndef read_requirements(file_):\n with open(os.path.join(here, file_)) as f:\n return sorted(list(set(line.split(\"#\")[0].strip() for line in f)))\n\n\ninstall_requires = read_requirements(\"requirements.txt\")\n\n\nwith open(os.path.join(here, \"VERSION\")) as f:\n version = f.read().strip()\n\n# Read the extra requirements\nextras = [\"nlp\", \"nn\"]\n\nextras_require = {}\n\nfor extra in extras:\n extras_require[extra] = read_requirements(\"extra-%s-requirements.txt\" % extra)\n\n\nsetup(\n name=\"bugbug\",\n version=version,\n description=\"ML tools for Mozilla projects\",\n author=\"Marco Castelluccio\",\n author_email=\"[email protected]\",\n install_requires=install_requires,\n extras_require=extras_require,\n packages=find_packages(exclude=[\"contrib\", \"docs\", \"tests\"]),\n include_package_data=True,\n license=\"MPL2\",\n entry_points={\n \"console_scripts\": [\n \"bugbug-data-commits = scripts.commit_retriever:main\",\n \"bugbug-data-bugzilla = scripts.bug_retriever:main\",\n \"bugbug-data-test-scheduling-history = scripts.test_scheduling_history_retriever:main\",\n \"bugbug-data-revisions = scripts.revision_retriever:main\",\n \"bugbug-train = scripts.trainer:main\",\n \"bugbug-train-similarity = scripts.similarity_trainer:main\",\n \"bugbug-check = scripts.check:main\",\n \"bugbug-microannotate-generate = scripts.microannotate_generator:main\",\n \"bugbug-classify-commit = scripts.commit_classifier:main\",\n \"bugbug-classify-bug = scripts.bug_classifier:main\",\n \"bugbug-regressor-finder = scripts.regressor_finder:main\",\n \"bugbug-retrieve-training-metrics = scripts.retrieve_training_metrics:main\",\n \"bugbug-analyze-training-metrics = scripts.analyze_training_metrics:main\",\n \"bugbug-check-all-metrics = scripts.check_all_metrics:main\",\n \"bugbug-past-bugs-by-unit = scripts.past_bugs_by_unit:main\",\n \"bugbug-testing-policy-stats = scripts.testing_policy_stats:main\",\n \"bugbug-generate-landings-risk-report = scripts.generate_landings_risk_report:main\",\n \"bugbug-shadow-scheduler-stats = scripts.shadow_scheduler_stats:main\",\n \"bugbug-data-github = scripts.github_issue_retriever:main\",\n ]\n },\n classifiers=[\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3 :: Only\",\n \"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)\",\n ],\n)\n", "path": "setup.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n# This Source Code Form is subject to the terms of the Mozilla Public\n# License, v. 2.0. If a copy of the MPL was not distributed with this file,\n# You can obtain one at http://mozilla.org/MPL/2.0/.\n\nimport os\n\nfrom setuptools import find_packages, setup\n\nhere = os.path.dirname(__file__)\n\n\ndef read_requirements(file_):\n with open(os.path.join(here, file_)) as f:\n return sorted(list(set(line.split(\"#\")[0].strip() for line in f)))\n\n\ninstall_requires = read_requirements(\"requirements.txt\")\n\n\nwith open(os.path.join(here, \"VERSION\")) as f:\n version = f.read().strip()\n\n# Read the extra requirements\nextras = [\"nlp\", \"nn\"]\n\nextras_require = {}\n\nfor extra in extras:\n extras_require[extra] = read_requirements(\"extra-%s-requirements.txt\" % extra)\n\n\nsetup(\n name=\"bugbug\",\n version=version,\n description=\"ML tools for Mozilla projects\",\n author=\"Marco Castelluccio\",\n author_email=\"[email protected]\",\n install_requires=install_requires,\n extras_require=extras_require,\n packages=find_packages(exclude=[\"contrib\", \"docs\", \"tests\"]),\n include_package_data=True,\n license=\"MPL2\",\n entry_points={\n \"console_scripts\": [\n \"bugbug-data-commits = scripts.commit_retriever:main\",\n \"bugbug-data-bugzilla = scripts.bug_retriever:main\",\n \"bugbug-data-test-scheduling-history = scripts.test_scheduling_history_retriever:main\",\n \"bugbug-data-revisions = scripts.revision_retriever:main\",\n \"bugbug-train = scripts.trainer:main\",\n \"bugbug-train-similarity = scripts.similarity_trainer:main\",\n \"bugbug-check = scripts.check:main\",\n \"bugbug-microannotate-generate = scripts.microannotate_generator:main\",\n \"bugbug-classify-commit = scripts.commit_classifier:main\",\n \"bugbug-classify-bug = scripts.bug_classifier:main\",\n \"bugbug-regressor-finder = scripts.regressor_finder:main\",\n \"bugbug-retrieve-training-metrics = scripts.retrieve_training_metrics:main\",\n \"bugbug-analyze-training-metrics = scripts.analyze_training_metrics:main\",\n \"bugbug-check-all-metrics = scripts.check_all_metrics:main\",\n \"bugbug-past-bugs-by-unit = scripts.past_bugs_by_unit:main\",\n \"bugbug-testing-policy-stats = scripts.testing_policy_stats:main\",\n \"bugbug-generate-landings-risk-report = scripts.generate_landings_risk_report:main\",\n \"bugbug-shadow-scheduler-stats = scripts.shadow_scheduler_stats:main\",\n \"bugbug-data-github = scripts.github_issue_retriever:main\",\n ]\n },\n classifiers=[\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3 :: Only\",\n \"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)\",\n ],\n)\n", "path": "setup.py"}]}
| 1,128 | 111 |
gh_patches_debug_24527
|
rasdani/github-patches
|
git_diff
|
cloud-custodian__cloud-custodian-7570
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
NoneType Issue
I dropped a yaml file into a new OU/SubOU and its not working, though it works in other OUs just fine. Nothing was changed in the file but I am still getting this error, not sure why.
```
Traceback (most recent call last):
File "/root/.pyenv/versions/3.9.12/bin/custodian", line 8, in <module>
sys.exit(main())
File "/root/.pyenv/versions/3.9.12/lib/python3.9/site-packages/c7n/cli.py", line 363, in main
command(config)
File "/root/.pyenv/versions/3.9.12/lib/python3.9/site-packages/c7n/commands.py", line 219, in validate
structure.validate(data)
File "/root/.pyenv/versions/3.9.12/lib/python3.9/site-packages/c7n/structure.py", line 48, in validate
self.validate_policy(p)
File "/root/.pyenv/versions/3.9.12/lib/python3.9/site-packages/c7n/structure.py", line 78, in validate_policy
for a in p.get('actions', ()):
TypeError: 'NoneType' object is not iterable
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `c7n/structure.py`
Content:
```
1 # Copyright The Cloud Custodian Authors.
2 # SPDX-License-Identifier: Apache-2.0
3
4 import json
5
6 from c7n.exceptions import PolicyValidationError
7
8
9 class StructureParser:
10 """Provide fast validation and inspection of a policy file.
11
12 Intent is to provide more humane validation for top level errors
13 instead of printing full schema as error message.
14 """
15 allowed_file_keys = {'vars', 'policies'}
16 required_policy_keys = {'name', 'resource'}
17 allowed_policy_keys = {'name', 'resource', 'title', 'description', 'mode',
18 'tags', 'max-resources', 'metadata', 'query',
19 'filters', 'actions', 'source', 'conditions',
20 # legacy keys subject to deprecation.
21 'region', 'start', 'end', 'tz', 'max-resources-percent',
22 'comments', 'comment'}
23
24 def validate(self, data):
25 if not isinstance(data, dict):
26 raise PolicyValidationError((
27 "Policy file top level data structure "
28 "should be a mapping/dict, instead found:%s") % (
29 type(data).__name__))
30 dkeys = set(data.keys())
31
32 extra = dkeys.difference(self.allowed_file_keys)
33 if extra:
34 raise PolicyValidationError((
35 'Policy files top level keys are %s, found extra: %s' % (
36 ', '.join(self.allowed_file_keys),
37 ', '.join(extra))))
38
39 if 'policies' not in data:
40 raise PolicyValidationError("`policies` list missing")
41
42 pdata = data.get('policies', [])
43 if not isinstance(pdata, list):
44 raise PolicyValidationError((
45 '`policies` key should be an array/list found: %s' % (
46 type(pdata).__name__)))
47 for p in pdata:
48 self.validate_policy(p)
49
50 def validate_policy(self, p):
51 if not isinstance(p, dict):
52 raise PolicyValidationError((
53 'policy must be a dictionary/mapping found:%s policy:\n %s' % (
54 type(p).__name__, json.dumps(p, indent=2))))
55 pkeys = set(p)
56 if self.required_policy_keys.difference(pkeys):
57 raise PolicyValidationError(
58 'policy missing required keys (name, resource) data:\n %s' % (
59 json.dumps(p, indent=2)))
60 if pkeys.difference(self.allowed_policy_keys):
61 raise PolicyValidationError(
62 'policy:%s has unknown keys: %s' % (
63 p['name'], ','.join(pkeys.difference(self.allowed_policy_keys))))
64 if not isinstance(p.get('filters', []), (list, type(None))):
65 raise PolicyValidationError((
66 'policy:%s must use a list for filters found:%s' % (
67 p['name'], type(p['filters']).__name__)))
68 element_types = (dict, str)
69 for f in p.get('filters', ()):
70 if not isinstance(f, element_types):
71 raise PolicyValidationError((
72 'policy:%s filter must be a mapping/dict found:%s' % (
73 p.get('name', 'unknown'), type(f).__name__)))
74 if not isinstance(p.get('actions', []), (list, type(None))):
75 raise PolicyValidationError((
76 'policy:%s must use a list for actions found:%s' % (
77 p.get('name', 'unknown'), type(p['actions']).__name__)))
78 for a in p.get('actions', ()):
79 if not isinstance(a, element_types):
80 raise PolicyValidationError((
81 'policy:%s action must be a mapping/dict found:%s' % (
82 p.get('name', 'unknown'), type(a).__name__)))
83
84 def get_resource_types(self, data):
85 resources = set()
86 for p in data.get('policies', []):
87 rtype = p['resource']
88 if '.' not in rtype:
89 rtype = 'aws.%s' % rtype
90 resources.add(rtype)
91 return resources
92
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/c7n/structure.py b/c7n/structure.py
--- a/c7n/structure.py
+++ b/c7n/structure.py
@@ -66,7 +66,7 @@
'policy:%s must use a list for filters found:%s' % (
p['name'], type(p['filters']).__name__)))
element_types = (dict, str)
- for f in p.get('filters', ()):
+ for f in p.get('filters', ()) or []:
if not isinstance(f, element_types):
raise PolicyValidationError((
'policy:%s filter must be a mapping/dict found:%s' % (
@@ -75,7 +75,7 @@
raise PolicyValidationError((
'policy:%s must use a list for actions found:%s' % (
p.get('name', 'unknown'), type(p['actions']).__name__)))
- for a in p.get('actions', ()):
+ for a in p.get('actions', ()) or []:
if not isinstance(a, element_types):
raise PolicyValidationError((
'policy:%s action must be a mapping/dict found:%s' % (
|
{"golden_diff": "diff --git a/c7n/structure.py b/c7n/structure.py\n--- a/c7n/structure.py\n+++ b/c7n/structure.py\n@@ -66,7 +66,7 @@\n 'policy:%s must use a list for filters found:%s' % (\n p['name'], type(p['filters']).__name__)))\n element_types = (dict, str)\n- for f in p.get('filters', ()):\n+ for f in p.get('filters', ()) or []:\n if not isinstance(f, element_types):\n raise PolicyValidationError((\n 'policy:%s filter must be a mapping/dict found:%s' % (\n@@ -75,7 +75,7 @@\n raise PolicyValidationError((\n 'policy:%s must use a list for actions found:%s' % (\n p.get('name', 'unknown'), type(p['actions']).__name__)))\n- for a in p.get('actions', ()):\n+ for a in p.get('actions', ()) or []:\n if not isinstance(a, element_types):\n raise PolicyValidationError((\n 'policy:%s action must be a mapping/dict found:%s' % (\n", "issue": "NoneType Issue\nI dropped a yaml file into a new OU/SubOU and its not working, though it works in other OUs just fine. Nothing was changed in the file but I am still getting this error, not sure why.\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"/root/.pyenv/versions/3.9.12/bin/custodian\", line 8, in <module>\r\n sys.exit(main())\r\n File \"/root/.pyenv/versions/3.9.12/lib/python3.9/site-packages/c7n/cli.py\", line 363, in main\r\n command(config)\r\n File \"/root/.pyenv/versions/3.9.12/lib/python3.9/site-packages/c7n/commands.py\", line 219, in validate\r\n structure.validate(data)\r\n File \"/root/.pyenv/versions/3.9.12/lib/python3.9/site-packages/c7n/structure.py\", line 48, in validate\r\n self.validate_policy(p)\r\n File \"/root/.pyenv/versions/3.9.12/lib/python3.9/site-packages/c7n/structure.py\", line 78, in validate_policy\r\n for a in p.get('actions', ()):\r\nTypeError: 'NoneType' object is not iterable\r\n```\n", "before_files": [{"content": "# Copyright The Cloud Custodian Authors.\n# SPDX-License-Identifier: Apache-2.0\n\nimport json\n\nfrom c7n.exceptions import PolicyValidationError\n\n\nclass StructureParser:\n \"\"\"Provide fast validation and inspection of a policy file.\n\n Intent is to provide more humane validation for top level errors\n instead of printing full schema as error message.\n \"\"\"\n allowed_file_keys = {'vars', 'policies'}\n required_policy_keys = {'name', 'resource'}\n allowed_policy_keys = {'name', 'resource', 'title', 'description', 'mode',\n 'tags', 'max-resources', 'metadata', 'query',\n 'filters', 'actions', 'source', 'conditions',\n # legacy keys subject to deprecation.\n 'region', 'start', 'end', 'tz', 'max-resources-percent',\n 'comments', 'comment'}\n\n def validate(self, data):\n if not isinstance(data, dict):\n raise PolicyValidationError((\n \"Policy file top level data structure \"\n \"should be a mapping/dict, instead found:%s\") % (\n type(data).__name__))\n dkeys = set(data.keys())\n\n extra = dkeys.difference(self.allowed_file_keys)\n if extra:\n raise PolicyValidationError((\n 'Policy files top level keys are %s, found extra: %s' % (\n ', '.join(self.allowed_file_keys),\n ', '.join(extra))))\n\n if 'policies' not in data:\n raise PolicyValidationError(\"`policies` list missing\")\n\n pdata = data.get('policies', [])\n if not isinstance(pdata, list):\n raise PolicyValidationError((\n '`policies` key should be an array/list found: %s' % (\n type(pdata).__name__)))\n for p in pdata:\n self.validate_policy(p)\n\n def validate_policy(self, p):\n if not isinstance(p, dict):\n raise PolicyValidationError((\n 'policy must be a dictionary/mapping found:%s policy:\\n %s' % (\n type(p).__name__, json.dumps(p, indent=2))))\n pkeys = set(p)\n if self.required_policy_keys.difference(pkeys):\n raise PolicyValidationError(\n 'policy missing required keys (name, resource) data:\\n %s' % (\n json.dumps(p, indent=2)))\n if pkeys.difference(self.allowed_policy_keys):\n raise PolicyValidationError(\n 'policy:%s has unknown keys: %s' % (\n p['name'], ','.join(pkeys.difference(self.allowed_policy_keys))))\n if not isinstance(p.get('filters', []), (list, type(None))):\n raise PolicyValidationError((\n 'policy:%s must use a list for filters found:%s' % (\n p['name'], type(p['filters']).__name__)))\n element_types = (dict, str)\n for f in p.get('filters', ()):\n if not isinstance(f, element_types):\n raise PolicyValidationError((\n 'policy:%s filter must be a mapping/dict found:%s' % (\n p.get('name', 'unknown'), type(f).__name__)))\n if not isinstance(p.get('actions', []), (list, type(None))):\n raise PolicyValidationError((\n 'policy:%s must use a list for actions found:%s' % (\n p.get('name', 'unknown'), type(p['actions']).__name__)))\n for a in p.get('actions', ()):\n if not isinstance(a, element_types):\n raise PolicyValidationError((\n 'policy:%s action must be a mapping/dict found:%s' % (\n p.get('name', 'unknown'), type(a).__name__)))\n\n def get_resource_types(self, data):\n resources = set()\n for p in data.get('policies', []):\n rtype = p['resource']\n if '.' not in rtype:\n rtype = 'aws.%s' % rtype\n resources.add(rtype)\n return resources\n", "path": "c7n/structure.py"}], "after_files": [{"content": "# Copyright The Cloud Custodian Authors.\n# SPDX-License-Identifier: Apache-2.0\n\nimport json\n\nfrom c7n.exceptions import PolicyValidationError\n\n\nclass StructureParser:\n \"\"\"Provide fast validation and inspection of a policy file.\n\n Intent is to provide more humane validation for top level errors\n instead of printing full schema as error message.\n \"\"\"\n allowed_file_keys = {'vars', 'policies'}\n required_policy_keys = {'name', 'resource'}\n allowed_policy_keys = {'name', 'resource', 'title', 'description', 'mode',\n 'tags', 'max-resources', 'metadata', 'query',\n 'filters', 'actions', 'source', 'conditions',\n # legacy keys subject to deprecation.\n 'region', 'start', 'end', 'tz', 'max-resources-percent',\n 'comments', 'comment'}\n\n def validate(self, data):\n if not isinstance(data, dict):\n raise PolicyValidationError((\n \"Policy file top level data structure \"\n \"should be a mapping/dict, instead found:%s\") % (\n type(data).__name__))\n dkeys = set(data.keys())\n\n extra = dkeys.difference(self.allowed_file_keys)\n if extra:\n raise PolicyValidationError((\n 'Policy files top level keys are %s, found extra: %s' % (\n ', '.join(self.allowed_file_keys),\n ', '.join(extra))))\n\n if 'policies' not in data:\n raise PolicyValidationError(\"`policies` list missing\")\n\n pdata = data.get('policies', [])\n if not isinstance(pdata, list):\n raise PolicyValidationError((\n '`policies` key should be an array/list found: %s' % (\n type(pdata).__name__)))\n for p in pdata:\n self.validate_policy(p)\n\n def validate_policy(self, p):\n if not isinstance(p, dict):\n raise PolicyValidationError((\n 'policy must be a dictionary/mapping found:%s policy:\\n %s' % (\n type(p).__name__, json.dumps(p, indent=2))))\n pkeys = set(p)\n if self.required_policy_keys.difference(pkeys):\n raise PolicyValidationError(\n 'policy missing required keys (name, resource) data:\\n %s' % (\n json.dumps(p, indent=2)))\n if pkeys.difference(self.allowed_policy_keys):\n raise PolicyValidationError(\n 'policy:%s has unknown keys: %s' % (\n p['name'], ','.join(pkeys.difference(self.allowed_policy_keys))))\n if not isinstance(p.get('filters', []), (list, type(None))):\n raise PolicyValidationError((\n 'policy:%s must use a list for filters found:%s' % (\n p['name'], type(p['filters']).__name__)))\n element_types = (dict, str)\n for f in p.get('filters', ()) or []:\n if not isinstance(f, element_types):\n raise PolicyValidationError((\n 'policy:%s filter must be a mapping/dict found:%s' % (\n p.get('name', 'unknown'), type(f).__name__)))\n if not isinstance(p.get('actions', []), (list, type(None))):\n raise PolicyValidationError((\n 'policy:%s must use a list for actions found:%s' % (\n p.get('name', 'unknown'), type(p['actions']).__name__)))\n for a in p.get('actions', ()) or []:\n if not isinstance(a, element_types):\n raise PolicyValidationError((\n 'policy:%s action must be a mapping/dict found:%s' % (\n p.get('name', 'unknown'), type(a).__name__)))\n\n def get_resource_types(self, data):\n resources = set()\n for p in data.get('policies', []):\n rtype = p['resource']\n if '.' not in rtype:\n rtype = 'aws.%s' % rtype\n resources.add(rtype)\n return resources\n", "path": "c7n/structure.py"}]}
| 1,557 | 254 |
gh_patches_debug_7875
|
rasdani/github-patches
|
git_diff
|
getsentry__sentry-python-875
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Incorrect parsing of complex urls in django
Sentry is parsing a complex URL as `/api/{version})/log` instead of `/api/{version}/log`.
<img width="207" alt="Screenshot 2020-10-17 at 10 40 47 AM" src="https://user-images.githubusercontent.com/4463796/96328987-70cb1c80-1066-11eb-94a4-ff8e15fb81ed.png">
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sentry_sdk/integrations/django/transactions.py`
Content:
```
1 """
2 Copied from raven-python. Used for
3 `DjangoIntegration(transaction_fron="raven_legacy")`.
4 """
5
6 from __future__ import absolute_import
7
8 import re
9
10 from sentry_sdk._types import MYPY
11
12 if MYPY:
13 from django.urls.resolvers import URLResolver
14 from typing import Dict
15 from typing import List
16 from typing import Optional
17 from django.urls.resolvers import URLPattern
18 from typing import Tuple
19 from typing import Union
20 from re import Pattern
21
22 try:
23 from django.urls import get_resolver
24 except ImportError:
25 from django.core.urlresolvers import get_resolver
26
27
28 def get_regex(resolver_or_pattern):
29 # type: (Union[URLPattern, URLResolver]) -> Pattern[str]
30 """Utility method for django's deprecated resolver.regex"""
31 try:
32 regex = resolver_or_pattern.regex
33 except AttributeError:
34 regex = resolver_or_pattern.pattern.regex
35 return regex
36
37
38 class RavenResolver(object):
39 _optional_group_matcher = re.compile(r"\(\?\:([^\)]+)\)")
40 _named_group_matcher = re.compile(r"\(\?P<(\w+)>[^\)]+\)")
41 _non_named_group_matcher = re.compile(r"\([^\)]+\)")
42 # [foo|bar|baz]
43 _either_option_matcher = re.compile(r"\[([^\]]+)\|([^\]]+)\]")
44 _camel_re = re.compile(r"([A-Z]+)([a-z])")
45
46 _cache = {} # type: Dict[URLPattern, str]
47
48 def _simplify(self, pattern):
49 # type: (str) -> str
50 r"""
51 Clean up urlpattern regexes into something readable by humans:
52
53 From:
54 > "^(?P<sport_slug>\w+)/athletes/(?P<athlete_slug>\w+)/$"
55
56 To:
57 > "{sport_slug}/athletes/{athlete_slug}/"
58 """
59 # remove optional params
60 # TODO(dcramer): it'd be nice to change these into [%s] but it currently
61 # conflicts with the other rules because we're doing regexp matches
62 # rather than parsing tokens
63 result = self._optional_group_matcher.sub(lambda m: "%s" % m.group(1), pattern)
64
65 # handle named groups first
66 result = self._named_group_matcher.sub(lambda m: "{%s}" % m.group(1), result)
67
68 # handle non-named groups
69 result = self._non_named_group_matcher.sub("{var}", result)
70
71 # handle optional params
72 result = self._either_option_matcher.sub(lambda m: m.group(1), result)
73
74 # clean up any outstanding regex-y characters.
75 result = (
76 result.replace("^", "")
77 .replace("$", "")
78 .replace("?", "")
79 .replace("//", "/")
80 .replace("\\", "")
81 )
82
83 return result
84
85 def _resolve(self, resolver, path, parents=None):
86 # type: (URLResolver, str, Optional[List[URLResolver]]) -> Optional[str]
87
88 match = get_regex(resolver).search(path) # Django < 2.0
89
90 if not match:
91 return None
92
93 if parents is None:
94 parents = [resolver]
95 elif resolver not in parents:
96 parents = parents + [resolver]
97
98 new_path = path[match.end() :]
99 for pattern in resolver.url_patterns:
100 # this is an include()
101 if not pattern.callback:
102 match_ = self._resolve(pattern, new_path, parents)
103 if match_:
104 return match_
105 continue
106 elif not get_regex(pattern).search(new_path):
107 continue
108
109 try:
110 return self._cache[pattern]
111 except KeyError:
112 pass
113
114 prefix = "".join(self._simplify(get_regex(p).pattern) for p in parents)
115 result = prefix + self._simplify(get_regex(pattern).pattern)
116 if not result.startswith("/"):
117 result = "/" + result
118 self._cache[pattern] = result
119 return result
120
121 return None
122
123 def resolve(
124 self,
125 path, # type: str
126 urlconf=None, # type: Union[None, Tuple[URLPattern, URLPattern, URLResolver], Tuple[URLPattern]]
127 ):
128 # type: (...) -> str
129 resolver = get_resolver(urlconf)
130 match = self._resolve(resolver, path)
131 return match or path
132
133
134 LEGACY_RESOLVER = RavenResolver()
135
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/sentry_sdk/integrations/django/transactions.py b/sentry_sdk/integrations/django/transactions.py
--- a/sentry_sdk/integrations/django/transactions.py
+++ b/sentry_sdk/integrations/django/transactions.py
@@ -37,7 +37,7 @@
class RavenResolver(object):
_optional_group_matcher = re.compile(r"\(\?\:([^\)]+)\)")
- _named_group_matcher = re.compile(r"\(\?P<(\w+)>[^\)]+\)")
+ _named_group_matcher = re.compile(r"\(\?P<(\w+)>[^\)]+\)+")
_non_named_group_matcher = re.compile(r"\([^\)]+\)")
# [foo|bar|baz]
_either_option_matcher = re.compile(r"\[([^\]]+)\|([^\]]+)\]")
|
{"golden_diff": "diff --git a/sentry_sdk/integrations/django/transactions.py b/sentry_sdk/integrations/django/transactions.py\n--- a/sentry_sdk/integrations/django/transactions.py\n+++ b/sentry_sdk/integrations/django/transactions.py\n@@ -37,7 +37,7 @@\n \n class RavenResolver(object):\n _optional_group_matcher = re.compile(r\"\\(\\?\\:([^\\)]+)\\)\")\n- _named_group_matcher = re.compile(r\"\\(\\?P<(\\w+)>[^\\)]+\\)\")\n+ _named_group_matcher = re.compile(r\"\\(\\?P<(\\w+)>[^\\)]+\\)+\")\n _non_named_group_matcher = re.compile(r\"\\([^\\)]+\\)\")\n # [foo|bar|baz]\n _either_option_matcher = re.compile(r\"\\[([^\\]]+)\\|([^\\]]+)\\]\")\n", "issue": "Incorrect parsing of complex urls in django\nSentry is parsing a complex URL as `/api/{version})/log` instead of `/api/{version}/log`.\r\n\r\n<img width=\"207\" alt=\"Screenshot 2020-10-17 at 10 40 47 AM\" src=\"https://user-images.githubusercontent.com/4463796/96328987-70cb1c80-1066-11eb-94a4-ff8e15fb81ed.png\">\r\n\n", "before_files": [{"content": "\"\"\"\nCopied from raven-python. Used for\n`DjangoIntegration(transaction_fron=\"raven_legacy\")`.\n\"\"\"\n\nfrom __future__ import absolute_import\n\nimport re\n\nfrom sentry_sdk._types import MYPY\n\nif MYPY:\n from django.urls.resolvers import URLResolver\n from typing import Dict\n from typing import List\n from typing import Optional\n from django.urls.resolvers import URLPattern\n from typing import Tuple\n from typing import Union\n from re import Pattern\n\ntry:\n from django.urls import get_resolver\nexcept ImportError:\n from django.core.urlresolvers import get_resolver\n\n\ndef get_regex(resolver_or_pattern):\n # type: (Union[URLPattern, URLResolver]) -> Pattern[str]\n \"\"\"Utility method for django's deprecated resolver.regex\"\"\"\n try:\n regex = resolver_or_pattern.regex\n except AttributeError:\n regex = resolver_or_pattern.pattern.regex\n return regex\n\n\nclass RavenResolver(object):\n _optional_group_matcher = re.compile(r\"\\(\\?\\:([^\\)]+)\\)\")\n _named_group_matcher = re.compile(r\"\\(\\?P<(\\w+)>[^\\)]+\\)\")\n _non_named_group_matcher = re.compile(r\"\\([^\\)]+\\)\")\n # [foo|bar|baz]\n _either_option_matcher = re.compile(r\"\\[([^\\]]+)\\|([^\\]]+)\\]\")\n _camel_re = re.compile(r\"([A-Z]+)([a-z])\")\n\n _cache = {} # type: Dict[URLPattern, str]\n\n def _simplify(self, pattern):\n # type: (str) -> str\n r\"\"\"\n Clean up urlpattern regexes into something readable by humans:\n\n From:\n > \"^(?P<sport_slug>\\w+)/athletes/(?P<athlete_slug>\\w+)/$\"\n\n To:\n > \"{sport_slug}/athletes/{athlete_slug}/\"\n \"\"\"\n # remove optional params\n # TODO(dcramer): it'd be nice to change these into [%s] but it currently\n # conflicts with the other rules because we're doing regexp matches\n # rather than parsing tokens\n result = self._optional_group_matcher.sub(lambda m: \"%s\" % m.group(1), pattern)\n\n # handle named groups first\n result = self._named_group_matcher.sub(lambda m: \"{%s}\" % m.group(1), result)\n\n # handle non-named groups\n result = self._non_named_group_matcher.sub(\"{var}\", result)\n\n # handle optional params\n result = self._either_option_matcher.sub(lambda m: m.group(1), result)\n\n # clean up any outstanding regex-y characters.\n result = (\n result.replace(\"^\", \"\")\n .replace(\"$\", \"\")\n .replace(\"?\", \"\")\n .replace(\"//\", \"/\")\n .replace(\"\\\\\", \"\")\n )\n\n return result\n\n def _resolve(self, resolver, path, parents=None):\n # type: (URLResolver, str, Optional[List[URLResolver]]) -> Optional[str]\n\n match = get_regex(resolver).search(path) # Django < 2.0\n\n if not match:\n return None\n\n if parents is None:\n parents = [resolver]\n elif resolver not in parents:\n parents = parents + [resolver]\n\n new_path = path[match.end() :]\n for pattern in resolver.url_patterns:\n # this is an include()\n if not pattern.callback:\n match_ = self._resolve(pattern, new_path, parents)\n if match_:\n return match_\n continue\n elif not get_regex(pattern).search(new_path):\n continue\n\n try:\n return self._cache[pattern]\n except KeyError:\n pass\n\n prefix = \"\".join(self._simplify(get_regex(p).pattern) for p in parents)\n result = prefix + self._simplify(get_regex(pattern).pattern)\n if not result.startswith(\"/\"):\n result = \"/\" + result\n self._cache[pattern] = result\n return result\n\n return None\n\n def resolve(\n self,\n path, # type: str\n urlconf=None, # type: Union[None, Tuple[URLPattern, URLPattern, URLResolver], Tuple[URLPattern]]\n ):\n # type: (...) -> str\n resolver = get_resolver(urlconf)\n match = self._resolve(resolver, path)\n return match or path\n\n\nLEGACY_RESOLVER = RavenResolver()\n", "path": "sentry_sdk/integrations/django/transactions.py"}], "after_files": [{"content": "\"\"\"\nCopied from raven-python. Used for\n`DjangoIntegration(transaction_fron=\"raven_legacy\")`.\n\"\"\"\n\nfrom __future__ import absolute_import\n\nimport re\n\nfrom sentry_sdk._types import MYPY\n\nif MYPY:\n from django.urls.resolvers import URLResolver\n from typing import Dict\n from typing import List\n from typing import Optional\n from django.urls.resolvers import URLPattern\n from typing import Tuple\n from typing import Union\n from re import Pattern\n\ntry:\n from django.urls import get_resolver\nexcept ImportError:\n from django.core.urlresolvers import get_resolver\n\n\ndef get_regex(resolver_or_pattern):\n # type: (Union[URLPattern, URLResolver]) -> Pattern[str]\n \"\"\"Utility method for django's deprecated resolver.regex\"\"\"\n try:\n regex = resolver_or_pattern.regex\n except AttributeError:\n regex = resolver_or_pattern.pattern.regex\n return regex\n\n\nclass RavenResolver(object):\n _optional_group_matcher = re.compile(r\"\\(\\?\\:([^\\)]+)\\)\")\n _named_group_matcher = re.compile(r\"\\(\\?P<(\\w+)>[^\\)]+\\)+\")\n _non_named_group_matcher = re.compile(r\"\\([^\\)]+\\)\")\n # [foo|bar|baz]\n _either_option_matcher = re.compile(r\"\\[([^\\]]+)\\|([^\\]]+)\\]\")\n _camel_re = re.compile(r\"([A-Z]+)([a-z])\")\n\n _cache = {} # type: Dict[URLPattern, str]\n\n def _simplify(self, pattern):\n # type: (str) -> str\n r\"\"\"\n Clean up urlpattern regexes into something readable by humans:\n\n From:\n > \"^(?P<sport_slug>\\w+)/athletes/(?P<athlete_slug>\\w+)/$\"\n\n To:\n > \"{sport_slug}/athletes/{athlete_slug}/\"\n \"\"\"\n # remove optional params\n # TODO(dcramer): it'd be nice to change these into [%s] but it currently\n # conflicts with the other rules because we're doing regexp matches\n # rather than parsing tokens\n result = self._optional_group_matcher.sub(lambda m: \"%s\" % m.group(1), pattern)\n\n # handle named groups first\n result = self._named_group_matcher.sub(lambda m: \"{%s}\" % m.group(1), result)\n\n # handle non-named groups\n result = self._non_named_group_matcher.sub(\"{var}\", result)\n\n # handle optional params\n result = self._either_option_matcher.sub(lambda m: m.group(1), result)\n\n # clean up any outstanding regex-y characters.\n result = (\n result.replace(\"^\", \"\")\n .replace(\"$\", \"\")\n .replace(\"?\", \"\")\n .replace(\"//\", \"/\")\n .replace(\"\\\\\", \"\")\n )\n\n return result\n\n def _resolve(self, resolver, path, parents=None):\n # type: (URLResolver, str, Optional[List[URLResolver]]) -> Optional[str]\n\n match = get_regex(resolver).search(path) # Django < 2.0\n\n if not match:\n return None\n\n if parents is None:\n parents = [resolver]\n elif resolver not in parents:\n parents = parents + [resolver]\n\n new_path = path[match.end() :]\n for pattern in resolver.url_patterns:\n # this is an include()\n if not pattern.callback:\n match_ = self._resolve(pattern, new_path, parents)\n if match_:\n return match_\n continue\n elif not get_regex(pattern).search(new_path):\n continue\n\n try:\n return self._cache[pattern]\n except KeyError:\n pass\n\n prefix = \"\".join(self._simplify(get_regex(p).pattern) for p in parents)\n result = prefix + self._simplify(get_regex(pattern).pattern)\n if not result.startswith(\"/\"):\n result = \"/\" + result\n self._cache[pattern] = result\n return result\n\n return None\n\n def resolve(\n self,\n path, # type: str\n urlconf=None, # type: Union[None, Tuple[URLPattern, URLPattern, URLResolver], Tuple[URLPattern]]\n ):\n # type: (...) -> str\n resolver = get_resolver(urlconf)\n match = self._resolve(resolver, path)\n return match or path\n\n\nLEGACY_RESOLVER = RavenResolver()\n", "path": "sentry_sdk/integrations/django/transactions.py"}]}
| 1,665 | 192 |
gh_patches_debug_5874
|
rasdani/github-patches
|
git_diff
|
python-poetry__poetry-1862
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Document the --no-root option
<!--
Hi there! Thank you for wanting to make Poetry better.
Before you submit this; let's make sure of a few things.
Please make sure the following boxes are ticked if they are correct.
If not, please try and fulfill these first.
-->
<!-- Checked checkbox should look like this: [x] -->
- [x] I have searched the [issues](https://github.com/python-poetry/poetry/issues) of this repo and believe that this is not a duplicate.
- [x] I have searched the [documentation](https://python-poetry.org/docs/) and believe that my question is not covered.
## Feature Request
<!-- Now feel free to write your idea for improvement. Thanks again 🙌 ❤️ -->
The `--no-root` option described in https://github.com/python-poetry/poetry/issues/1525 works fine for installation. Unfortunately I found it only when looking for duplicate issues before raising this. `poetry help install` does not describe that option.
Please add it to the `help install` output.
Document the --no-root option
<!--
Hi there! Thank you for wanting to make Poetry better.
Before you submit this; let's make sure of a few things.
Please make sure the following boxes are ticked if they are correct.
If not, please try and fulfill these first.
-->
<!-- Checked checkbox should look like this: [x] -->
- [x] I have searched the [issues](https://github.com/python-poetry/poetry/issues) of this repo and believe that this is not a duplicate.
- [x] I have searched the [documentation](https://python-poetry.org/docs/) and believe that my question is not covered.
## Feature Request
<!-- Now feel free to write your idea for improvement. Thanks again 🙌 ❤️ -->
The `--no-root` option described in https://github.com/python-poetry/poetry/issues/1525 works fine for installation. Unfortunately I found it only when looking for duplicate issues before raising this. `poetry help install` does not describe that option.
Please add it to the `help install` output.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `poetry/console/commands/install.py`
Content:
```
1 from cleo import option
2
3 from .env_command import EnvCommand
4
5
6 class InstallCommand(EnvCommand):
7
8 name = "install"
9 description = "Installs the project dependencies."
10
11 options = [
12 option("no-dev", None, "Do not install the development dependencies."),
13 option(
14 "no-root", None, "Do not install the root package (the current project)."
15 ),
16 option(
17 "dry-run",
18 None,
19 "Output the operations but do not execute anything "
20 "(implicitly enables --verbose).",
21 ),
22 option(
23 "extras",
24 "E",
25 "Extra sets of dependencies to install.",
26 flag=False,
27 multiple=True,
28 ),
29 ]
30
31 help = """The <info>install</info> command reads the <comment>poetry.lock</> file from
32 the current directory, processes it, and downloads and installs all the
33 libraries and dependencies outlined in that file. If the file does not
34 exist it will look for <comment>pyproject.toml</> and do the same.
35
36 <info>poetry install</info>
37 """
38
39 _loggers = ["poetry.repositories.pypi_repository"]
40
41 def handle(self):
42 from clikit.io import NullIO
43 from poetry.installation.installer import Installer
44 from poetry.masonry.builders import EditableBuilder
45 from poetry.masonry.utils.module import ModuleOrPackageNotFound
46
47 installer = Installer(
48 self.io, self.env, self.poetry.package, self.poetry.locker, self.poetry.pool
49 )
50
51 extras = []
52 for extra in self.option("extras"):
53 if " " in extra:
54 extras += [e.strip() for e in extra.split(" ")]
55 else:
56 extras.append(extra)
57
58 installer.extras(extras)
59 installer.dev_mode(not self.option("no-dev"))
60 installer.dry_run(self.option("dry-run"))
61 installer.verbose(self.option("verbose"))
62
63 return_code = installer.run()
64
65 if return_code != 0:
66 return return_code
67
68 if self.option("no-root"):
69 return 0
70
71 try:
72 builder = EditableBuilder(self.poetry, self._env, NullIO())
73 except ModuleOrPackageNotFound:
74 # This is likely due to the fact that the project is an application
75 # not following the structure expected by Poetry
76 # If this is a true error it will be picked up later by build anyway.
77 return 0
78
79 self.line(
80 " - Installing <c1>{}</c1> (<b>{}</b>)".format(
81 self.poetry.package.pretty_name, self.poetry.package.pretty_version
82 )
83 )
84
85 if self.option("dry-run"):
86 return 0
87
88 builder.build()
89
90 return 0
91
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/poetry/console/commands/install.py b/poetry/console/commands/install.py
--- a/poetry/console/commands/install.py
+++ b/poetry/console/commands/install.py
@@ -34,6 +34,12 @@
exist it will look for <comment>pyproject.toml</> and do the same.
<info>poetry install</info>
+
+By default, the above command will also install the current project. To install only the
+dependencies and not including the current project, run the command with the
+<info>--no-root</info> option like below:
+
+<info> poetry install --no-root</info>
"""
_loggers = ["poetry.repositories.pypi_repository"]
|
{"golden_diff": "diff --git a/poetry/console/commands/install.py b/poetry/console/commands/install.py\n--- a/poetry/console/commands/install.py\n+++ b/poetry/console/commands/install.py\n@@ -34,6 +34,12 @@\n exist it will look for <comment>pyproject.toml</> and do the same.\n \n <info>poetry install</info>\n+\n+By default, the above command will also install the current project. To install only the\n+dependencies and not including the current project, run the command with the\n+<info>--no-root</info> option like below:\n+\n+<info> poetry install --no-root</info>\n \"\"\"\n \n _loggers = [\"poetry.repositories.pypi_repository\"]\n", "issue": "Document the --no-root option\n<!--\r\n Hi there! Thank you for wanting to make Poetry better.\r\n\r\n Before you submit this; let's make sure of a few things.\r\n Please make sure the following boxes are ticked if they are correct.\r\n If not, please try and fulfill these first.\r\n-->\r\n\r\n<!-- Checked checkbox should look like this: [x] -->\r\n- [x] I have searched the [issues](https://github.com/python-poetry/poetry/issues) of this repo and believe that this is not a duplicate.\r\n- [x] I have searched the [documentation](https://python-poetry.org/docs/) and believe that my question is not covered.\r\n\r\n## Feature Request\r\n<!-- Now feel free to write your idea for improvement. Thanks again \ud83d\ude4c \u2764\ufe0f -->\r\nThe `--no-root` option described in https://github.com/python-poetry/poetry/issues/1525 works fine for installation. Unfortunately I found it only when looking for duplicate issues before raising this. `poetry help install` does not describe that option.\r\n\r\nPlease add it to the `help install` output.\nDocument the --no-root option\n<!--\r\n Hi there! Thank you for wanting to make Poetry better.\r\n\r\n Before you submit this; let's make sure of a few things.\r\n Please make sure the following boxes are ticked if they are correct.\r\n If not, please try and fulfill these first.\r\n-->\r\n\r\n<!-- Checked checkbox should look like this: [x] -->\r\n- [x] I have searched the [issues](https://github.com/python-poetry/poetry/issues) of this repo and believe that this is not a duplicate.\r\n- [x] I have searched the [documentation](https://python-poetry.org/docs/) and believe that my question is not covered.\r\n\r\n## Feature Request\r\n<!-- Now feel free to write your idea for improvement. Thanks again \ud83d\ude4c \u2764\ufe0f -->\r\nThe `--no-root` option described in https://github.com/python-poetry/poetry/issues/1525 works fine for installation. Unfortunately I found it only when looking for duplicate issues before raising this. `poetry help install` does not describe that option.\r\n\r\nPlease add it to the `help install` output.\n", "before_files": [{"content": "from cleo import option\n\nfrom .env_command import EnvCommand\n\n\nclass InstallCommand(EnvCommand):\n\n name = \"install\"\n description = \"Installs the project dependencies.\"\n\n options = [\n option(\"no-dev\", None, \"Do not install the development dependencies.\"),\n option(\n \"no-root\", None, \"Do not install the root package (the current project).\"\n ),\n option(\n \"dry-run\",\n None,\n \"Output the operations but do not execute anything \"\n \"(implicitly enables --verbose).\",\n ),\n option(\n \"extras\",\n \"E\",\n \"Extra sets of dependencies to install.\",\n flag=False,\n multiple=True,\n ),\n ]\n\n help = \"\"\"The <info>install</info> command reads the <comment>poetry.lock</> file from\nthe current directory, processes it, and downloads and installs all the\nlibraries and dependencies outlined in that file. If the file does not\nexist it will look for <comment>pyproject.toml</> and do the same.\n\n<info>poetry install</info>\n\"\"\"\n\n _loggers = [\"poetry.repositories.pypi_repository\"]\n\n def handle(self):\n from clikit.io import NullIO\n from poetry.installation.installer import Installer\n from poetry.masonry.builders import EditableBuilder\n from poetry.masonry.utils.module import ModuleOrPackageNotFound\n\n installer = Installer(\n self.io, self.env, self.poetry.package, self.poetry.locker, self.poetry.pool\n )\n\n extras = []\n for extra in self.option(\"extras\"):\n if \" \" in extra:\n extras += [e.strip() for e in extra.split(\" \")]\n else:\n extras.append(extra)\n\n installer.extras(extras)\n installer.dev_mode(not self.option(\"no-dev\"))\n installer.dry_run(self.option(\"dry-run\"))\n installer.verbose(self.option(\"verbose\"))\n\n return_code = installer.run()\n\n if return_code != 0:\n return return_code\n\n if self.option(\"no-root\"):\n return 0\n\n try:\n builder = EditableBuilder(self.poetry, self._env, NullIO())\n except ModuleOrPackageNotFound:\n # This is likely due to the fact that the project is an application\n # not following the structure expected by Poetry\n # If this is a true error it will be picked up later by build anyway.\n return 0\n\n self.line(\n \" - Installing <c1>{}</c1> (<b>{}</b>)\".format(\n self.poetry.package.pretty_name, self.poetry.package.pretty_version\n )\n )\n\n if self.option(\"dry-run\"):\n return 0\n\n builder.build()\n\n return 0\n", "path": "poetry/console/commands/install.py"}], "after_files": [{"content": "from cleo import option\n\nfrom .env_command import EnvCommand\n\n\nclass InstallCommand(EnvCommand):\n\n name = \"install\"\n description = \"Installs the project dependencies.\"\n\n options = [\n option(\"no-dev\", None, \"Do not install the development dependencies.\"),\n option(\n \"no-root\", None, \"Do not install the root package (the current project).\"\n ),\n option(\n \"dry-run\",\n None,\n \"Output the operations but do not execute anything \"\n \"(implicitly enables --verbose).\",\n ),\n option(\n \"extras\",\n \"E\",\n \"Extra sets of dependencies to install.\",\n flag=False,\n multiple=True,\n ),\n ]\n\n help = \"\"\"The <info>install</info> command reads the <comment>poetry.lock</> file from\nthe current directory, processes it, and downloads and installs all the\nlibraries and dependencies outlined in that file. If the file does not\nexist it will look for <comment>pyproject.toml</> and do the same.\n\n<info>poetry install</info>\n\nBy default, the above command will also install the current project. To install only the\ndependencies and not including the current project, run the command with the\n<info>--no-root</info> option like below:\n\n<info> poetry install --no-root</info>\n\"\"\"\n\n _loggers = [\"poetry.repositories.pypi_repository\"]\n\n def handle(self):\n from clikit.io import NullIO\n from poetry.installation.installer import Installer\n from poetry.masonry.builders import EditableBuilder\n from poetry.masonry.utils.module import ModuleOrPackageNotFound\n\n installer = Installer(\n self.io, self.env, self.poetry.package, self.poetry.locker, self.poetry.pool\n )\n\n extras = []\n for extra in self.option(\"extras\"):\n if \" \" in extra:\n extras += [e.strip() for e in extra.split(\" \")]\n else:\n extras.append(extra)\n\n installer.extras(extras)\n installer.dev_mode(not self.option(\"no-dev\"))\n installer.dry_run(self.option(\"dry-run\"))\n installer.verbose(self.option(\"verbose\"))\n\n return_code = installer.run()\n\n if return_code != 0:\n return return_code\n\n if self.option(\"no-root\"):\n return 0\n\n try:\n builder = EditableBuilder(self.poetry, self._env, NullIO())\n except ModuleOrPackageNotFound:\n # This is likely due to the fact that the project is an application\n # not following the structure expected by Poetry\n # If this is a true error it will be picked up later by build anyway.\n return 0\n\n self.line(\n \" - Installing <c1>{}</c1> (<b>{}</b>)\".format(\n self.poetry.package.pretty_name, self.poetry.package.pretty_version\n )\n )\n\n if self.option(\"dry-run\"):\n return 0\n\n builder.build()\n\n return 0\n", "path": "poetry/console/commands/install.py"}]}
| 1,491 | 162 |
gh_patches_debug_5564
|
rasdani/github-patches
|
git_diff
|
Parsl__parsl-930
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Log app args
For easier debugging, we should log the arguments apps are called with.
Requested by @mjwilde
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `parsl/app/bash.py`
Content:
```
1 import logging
2 from functools import update_wrapper
3 from inspect import signature, Parameter
4
5 from parsl.app.errors import wrap_error
6 from parsl.app.futures import DataFuture
7 from parsl.app.app import AppBase
8 from parsl.dataflow.dflow import DataFlowKernelLoader
9
10 logger = logging.getLogger(__name__)
11
12
13 def remote_side_bash_executor(func, *args, **kwargs):
14 """Execute the bash app type function and return the command line string.
15
16 This string is reformatted with the *args, and **kwargs
17 from call time.
18 """
19 import os
20 import time
21 import subprocess
22 import logging
23 import parsl.app.errors as pe
24
25 logging.basicConfig(filename='/tmp/bashexec.{0}.log'.format(time.time()), level=logging.DEBUG)
26
27 # start_t = time.time()
28
29 func_name = func.__name__
30
31 partial_cmdline = None
32
33 # Try to run the func to compose the commandline
34 try:
35 # Execute the func to get the commandline
36 partial_cmdline = func(*args, **kwargs)
37 # Reformat the commandline with current args and kwargs
38 executable = partial_cmdline.format(*args, **kwargs)
39
40 except AttributeError as e:
41 if partial_cmdline is not None:
42 raise pe.AppBadFormatting("App formatting failed for app '{}' with AttributeError: {}".format(func_name, e))
43 else:
44 raise pe.BashAppNoReturn("Bash app '{}' did not return a value, or returned none - with this exception: {}".format(func_name, e), None)
45
46 except IndexError as e:
47 raise pe.AppBadFormatting("App formatting failed for app '{}' with IndexError: {}".format(func_name, e))
48 except Exception as e:
49 logging.error("Caught exception during formatting of app '{}': {}".format(func_name, e))
50 raise e
51
52 logging.debug("Executable: %s", executable)
53
54 # Updating stdout, stderr if values passed at call time.
55
56 def open_std_fd(fdname):
57 # fdname is 'stdout' or 'stderr'
58 stdfspec = kwargs.get(fdname) # spec is str name or tuple (name, mode)
59 if stdfspec is None:
60 return None
61 elif isinstance(stdfspec, str):
62 fname = stdfspec
63 mode = 'a+'
64 elif isinstance(stdfspec, tuple):
65 if len(stdfspec) != 2:
66 raise pe.BadStdStreamFile("std descriptor %s has incorrect tuple length %s" % (fdname, len(stdfspec)), TypeError('Bad Tuple Length'))
67 fname, mode = stdfspec
68 else:
69 raise pe.BadStdStreamFile("std descriptor %s has unexpected type %s" % (fdname, str(type(stdfspec))), TypeError('Bad Tuple Type'))
70 try:
71 fd = open(fname, mode)
72 except Exception as e:
73 raise pe.BadStdStreamFile(fname, e)
74 return fd
75
76 std_out = open_std_fd('stdout')
77 std_err = open_std_fd('stderr')
78 timeout = kwargs.get('walltime')
79
80 returncode = None
81 try:
82 proc = subprocess.Popen(executable, stdout=std_out, stderr=std_err, shell=True, executable='/bin/bash')
83 proc.wait(timeout=timeout)
84 returncode = proc.returncode
85
86 except subprocess.TimeoutExpired:
87 # print("Timeout")
88 raise pe.AppTimeout("[{}] App exceeded walltime: {}".format(func_name, timeout))
89
90 except Exception as e:
91 # print("Caught exception: ", e)
92 raise pe.AppException("[{}] App caught exception: {}".format(func_name, proc.returncode), e)
93
94 if returncode != 0:
95 raise pe.AppFailure("[{}] App failed with exit code: {}".format(func_name, proc.returncode), proc.returncode)
96
97 # TODO : Add support for globs here
98
99 missing = []
100 for outputfile in kwargs.get('outputs', []):
101 fpath = outputfile
102 if type(outputfile) != str:
103 fpath = outputfile.filepath
104
105 if not os.path.exists(fpath):
106 missing.extend([outputfile])
107
108 if missing:
109 raise pe.MissingOutputs("[{}] Missing outputs".format(func_name), missing)
110
111 # exec_duration = time.time() - start_t
112 return returncode
113
114
115 class BashApp(AppBase):
116
117 def __init__(self, func, data_flow_kernel=None, walltime=60, cache=False, executors='all'):
118 super().__init__(func, data_flow_kernel=data_flow_kernel, walltime=60, executors=executors, cache=cache)
119 self.kwargs = {}
120
121 # We duplicate the extraction of parameter defaults
122 # to self.kwargs to ensure availability at point of
123 # command string format. Refer: #349
124 sig = signature(func)
125
126 for s in sig.parameters:
127 if sig.parameters[s].default != Parameter.empty:
128 self.kwargs[s] = sig.parameters[s].default
129
130 def __call__(self, *args, **kwargs):
131 """Handle the call to a Bash app.
132
133 Args:
134 - Arbitrary
135
136 Kwargs:
137 - Arbitrary
138
139 Returns:
140 If outputs=[...] was a kwarg then:
141 App_fut, [Data_Futures...]
142 else:
143 App_fut
144
145 """
146 # Update kwargs in the app definition with ones passed in at calltime
147 self.kwargs.update(kwargs)
148
149 if self.data_flow_kernel is None:
150 dfk = DataFlowKernelLoader.dfk()
151 else:
152 dfk = self.data_flow_kernel
153
154 app_fut = dfk.submit(wrap_error(update_wrapper(remote_side_bash_executor, self.func)),
155 self.func, *args,
156 executors=self.executors,
157 fn_hash=self.func_hash,
158 cache=self.cache,
159 **self.kwargs)
160
161 out_futs = [DataFuture(app_fut, o, tid=app_fut.tid)
162 for o in kwargs.get('outputs', [])]
163 app_fut._outputs = out_futs
164
165 return app_fut
166
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/parsl/app/bash.py b/parsl/app/bash.py
--- a/parsl/app/bash.py
+++ b/parsl/app/bash.py
@@ -77,6 +77,9 @@
std_err = open_std_fd('stderr')
timeout = kwargs.get('walltime')
+ if std_err is not None:
+ print('--> executable follows <--\n{}\n--> end executable <--'.format(executable), file=std_err)
+
returncode = None
try:
proc = subprocess.Popen(executable, stdout=std_out, stderr=std_err, shell=True, executable='/bin/bash')
|
{"golden_diff": "diff --git a/parsl/app/bash.py b/parsl/app/bash.py\n--- a/parsl/app/bash.py\n+++ b/parsl/app/bash.py\n@@ -77,6 +77,9 @@\n std_err = open_std_fd('stderr')\n timeout = kwargs.get('walltime')\n \n+ if std_err is not None:\n+ print('--> executable follows <--\\n{}\\n--> end executable <--'.format(executable), file=std_err)\n+\n returncode = None\n try:\n proc = subprocess.Popen(executable, stdout=std_out, stderr=std_err, shell=True, executable='/bin/bash')\n", "issue": "Log app args\nFor easier debugging, we should log the arguments apps are called with.\r\n\r\nRequested by @mjwilde \n", "before_files": [{"content": "import logging\nfrom functools import update_wrapper\nfrom inspect import signature, Parameter\n\nfrom parsl.app.errors import wrap_error\nfrom parsl.app.futures import DataFuture\nfrom parsl.app.app import AppBase\nfrom parsl.dataflow.dflow import DataFlowKernelLoader\n\nlogger = logging.getLogger(__name__)\n\n\ndef remote_side_bash_executor(func, *args, **kwargs):\n \"\"\"Execute the bash app type function and return the command line string.\n\n This string is reformatted with the *args, and **kwargs\n from call time.\n \"\"\"\n import os\n import time\n import subprocess\n import logging\n import parsl.app.errors as pe\n\n logging.basicConfig(filename='/tmp/bashexec.{0}.log'.format(time.time()), level=logging.DEBUG)\n\n # start_t = time.time()\n\n func_name = func.__name__\n\n partial_cmdline = None\n\n # Try to run the func to compose the commandline\n try:\n # Execute the func to get the commandline\n partial_cmdline = func(*args, **kwargs)\n # Reformat the commandline with current args and kwargs\n executable = partial_cmdline.format(*args, **kwargs)\n\n except AttributeError as e:\n if partial_cmdline is not None:\n raise pe.AppBadFormatting(\"App formatting failed for app '{}' with AttributeError: {}\".format(func_name, e))\n else:\n raise pe.BashAppNoReturn(\"Bash app '{}' did not return a value, or returned none - with this exception: {}\".format(func_name, e), None)\n\n except IndexError as e:\n raise pe.AppBadFormatting(\"App formatting failed for app '{}' with IndexError: {}\".format(func_name, e))\n except Exception as e:\n logging.error(\"Caught exception during formatting of app '{}': {}\".format(func_name, e))\n raise e\n\n logging.debug(\"Executable: %s\", executable)\n\n # Updating stdout, stderr if values passed at call time.\n\n def open_std_fd(fdname):\n # fdname is 'stdout' or 'stderr'\n stdfspec = kwargs.get(fdname) # spec is str name or tuple (name, mode)\n if stdfspec is None:\n return None\n elif isinstance(stdfspec, str):\n fname = stdfspec\n mode = 'a+'\n elif isinstance(stdfspec, tuple):\n if len(stdfspec) != 2:\n raise pe.BadStdStreamFile(\"std descriptor %s has incorrect tuple length %s\" % (fdname, len(stdfspec)), TypeError('Bad Tuple Length'))\n fname, mode = stdfspec\n else:\n raise pe.BadStdStreamFile(\"std descriptor %s has unexpected type %s\" % (fdname, str(type(stdfspec))), TypeError('Bad Tuple Type'))\n try:\n fd = open(fname, mode)\n except Exception as e:\n raise pe.BadStdStreamFile(fname, e)\n return fd\n\n std_out = open_std_fd('stdout')\n std_err = open_std_fd('stderr')\n timeout = kwargs.get('walltime')\n\n returncode = None\n try:\n proc = subprocess.Popen(executable, stdout=std_out, stderr=std_err, shell=True, executable='/bin/bash')\n proc.wait(timeout=timeout)\n returncode = proc.returncode\n\n except subprocess.TimeoutExpired:\n # print(\"Timeout\")\n raise pe.AppTimeout(\"[{}] App exceeded walltime: {}\".format(func_name, timeout))\n\n except Exception as e:\n # print(\"Caught exception: \", e)\n raise pe.AppException(\"[{}] App caught exception: {}\".format(func_name, proc.returncode), e)\n\n if returncode != 0:\n raise pe.AppFailure(\"[{}] App failed with exit code: {}\".format(func_name, proc.returncode), proc.returncode)\n\n # TODO : Add support for globs here\n\n missing = []\n for outputfile in kwargs.get('outputs', []):\n fpath = outputfile\n if type(outputfile) != str:\n fpath = outputfile.filepath\n\n if not os.path.exists(fpath):\n missing.extend([outputfile])\n\n if missing:\n raise pe.MissingOutputs(\"[{}] Missing outputs\".format(func_name), missing)\n\n # exec_duration = time.time() - start_t\n return returncode\n\n\nclass BashApp(AppBase):\n\n def __init__(self, func, data_flow_kernel=None, walltime=60, cache=False, executors='all'):\n super().__init__(func, data_flow_kernel=data_flow_kernel, walltime=60, executors=executors, cache=cache)\n self.kwargs = {}\n\n # We duplicate the extraction of parameter defaults\n # to self.kwargs to ensure availability at point of\n # command string format. Refer: #349\n sig = signature(func)\n\n for s in sig.parameters:\n if sig.parameters[s].default != Parameter.empty:\n self.kwargs[s] = sig.parameters[s].default\n\n def __call__(self, *args, **kwargs):\n \"\"\"Handle the call to a Bash app.\n\n Args:\n - Arbitrary\n\n Kwargs:\n - Arbitrary\n\n Returns:\n If outputs=[...] was a kwarg then:\n App_fut, [Data_Futures...]\n else:\n App_fut\n\n \"\"\"\n # Update kwargs in the app definition with ones passed in at calltime\n self.kwargs.update(kwargs)\n\n if self.data_flow_kernel is None:\n dfk = DataFlowKernelLoader.dfk()\n else:\n dfk = self.data_flow_kernel\n\n app_fut = dfk.submit(wrap_error(update_wrapper(remote_side_bash_executor, self.func)),\n self.func, *args,\n executors=self.executors,\n fn_hash=self.func_hash,\n cache=self.cache,\n **self.kwargs)\n\n out_futs = [DataFuture(app_fut, o, tid=app_fut.tid)\n for o in kwargs.get('outputs', [])]\n app_fut._outputs = out_futs\n\n return app_fut\n", "path": "parsl/app/bash.py"}], "after_files": [{"content": "import logging\nfrom functools import update_wrapper\nfrom inspect import signature, Parameter\n\nfrom parsl.app.errors import wrap_error\nfrom parsl.app.futures import DataFuture\nfrom parsl.app.app import AppBase\nfrom parsl.dataflow.dflow import DataFlowKernelLoader\n\nlogger = logging.getLogger(__name__)\n\n\ndef remote_side_bash_executor(func, *args, **kwargs):\n \"\"\"Execute the bash app type function and return the command line string.\n\n This string is reformatted with the *args, and **kwargs\n from call time.\n \"\"\"\n import os\n import time\n import subprocess\n import logging\n import parsl.app.errors as pe\n\n logging.basicConfig(filename='/tmp/bashexec.{0}.log'.format(time.time()), level=logging.DEBUG)\n\n # start_t = time.time()\n\n func_name = func.__name__\n\n partial_cmdline = None\n\n # Try to run the func to compose the commandline\n try:\n # Execute the func to get the commandline\n partial_cmdline = func(*args, **kwargs)\n # Reformat the commandline with current args and kwargs\n executable = partial_cmdline.format(*args, **kwargs)\n\n except AttributeError as e:\n if partial_cmdline is not None:\n raise pe.AppBadFormatting(\"App formatting failed for app '{}' with AttributeError: {}\".format(func_name, e))\n else:\n raise pe.BashAppNoReturn(\"Bash app '{}' did not return a value, or returned none - with this exception: {}\".format(func_name, e), None)\n\n except IndexError as e:\n raise pe.AppBadFormatting(\"App formatting failed for app '{}' with IndexError: {}\".format(func_name, e))\n except Exception as e:\n logging.error(\"Caught exception during formatting of app '{}': {}\".format(func_name, e))\n raise e\n\n logging.debug(\"Executable: %s\", executable)\n\n # Updating stdout, stderr if values passed at call time.\n\n def open_std_fd(fdname):\n # fdname is 'stdout' or 'stderr'\n stdfspec = kwargs.get(fdname) # spec is str name or tuple (name, mode)\n if stdfspec is None:\n return None\n elif isinstance(stdfspec, str):\n fname = stdfspec\n mode = 'a+'\n elif isinstance(stdfspec, tuple):\n if len(stdfspec) != 2:\n raise pe.BadStdStreamFile(\"std descriptor %s has incorrect tuple length %s\" % (fdname, len(stdfspec)), TypeError('Bad Tuple Length'))\n fname, mode = stdfspec\n else:\n raise pe.BadStdStreamFile(\"std descriptor %s has unexpected type %s\" % (fdname, str(type(stdfspec))), TypeError('Bad Tuple Type'))\n try:\n fd = open(fname, mode)\n except Exception as e:\n raise pe.BadStdStreamFile(fname, e)\n return fd\n\n std_out = open_std_fd('stdout')\n std_err = open_std_fd('stderr')\n timeout = kwargs.get('walltime')\n\n if std_err is not None:\n print('--> executable follows <--\\n{}\\n--> end executable <--'.format(executable), file=std_err)\n\n returncode = None\n try:\n proc = subprocess.Popen(executable, stdout=std_out, stderr=std_err, shell=True, executable='/bin/bash')\n proc.wait(timeout=timeout)\n returncode = proc.returncode\n\n except subprocess.TimeoutExpired:\n # print(\"Timeout\")\n raise pe.AppTimeout(\"[{}] App exceeded walltime: {}\".format(func_name, timeout))\n\n except Exception as e:\n # print(\"Caught exception: \", e)\n raise pe.AppException(\"[{}] App caught exception: {}\".format(func_name, proc.returncode), e)\n\n if returncode != 0:\n raise pe.AppFailure(\"[{}] App failed with exit code: {}\".format(func_name, proc.returncode), proc.returncode)\n\n # TODO : Add support for globs here\n\n missing = []\n for outputfile in kwargs.get('outputs', []):\n fpath = outputfile\n if type(outputfile) != str:\n fpath = outputfile.filepath\n\n if not os.path.exists(fpath):\n missing.extend([outputfile])\n\n if missing:\n raise pe.MissingOutputs(\"[{}] Missing outputs\".format(func_name), missing)\n\n # exec_duration = time.time() - start_t\n return returncode\n\n\nclass BashApp(AppBase):\n\n def __init__(self, func, data_flow_kernel=None, walltime=60, cache=False, executors='all'):\n super().__init__(func, data_flow_kernel=data_flow_kernel, walltime=60, executors=executors, cache=cache)\n self.kwargs = {}\n\n # We duplicate the extraction of parameter defaults\n # to self.kwargs to ensure availability at point of\n # command string format. Refer: #349\n sig = signature(func)\n\n for s in sig.parameters:\n if sig.parameters[s].default != Parameter.empty:\n self.kwargs[s] = sig.parameters[s].default\n\n def __call__(self, *args, **kwargs):\n \"\"\"Handle the call to a Bash app.\n\n Args:\n - Arbitrary\n\n Kwargs:\n - Arbitrary\n\n Returns:\n If outputs=[...] was a kwarg then:\n App_fut, [Data_Futures...]\n else:\n App_fut\n\n \"\"\"\n # Update kwargs in the app definition with ones passed in at calltime\n self.kwargs.update(kwargs)\n\n if self.data_flow_kernel is None:\n dfk = DataFlowKernelLoader.dfk()\n else:\n dfk = self.data_flow_kernel\n\n app_fut = dfk.submit(wrap_error(update_wrapper(remote_side_bash_executor, self.func)),\n self.func, *args,\n executors=self.executors,\n fn_hash=self.func_hash,\n cache=self.cache,\n **self.kwargs)\n\n out_futs = [DataFuture(app_fut, o, tid=app_fut.tid)\n for o in kwargs.get('outputs', [])]\n app_fut._outputs = out_futs\n\n return app_fut\n", "path": "parsl/app/bash.py"}]}
| 1,991 | 139 |
gh_patches_debug_1187
|
rasdani/github-patches
|
git_diff
|
freedomofpress__securedrop-6051
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Alembic operations fail with multiple head revisions
## Description
All Alembic operations fail with Alembic error:
ERROR [alembic.util.messaging] Multiple head revisions are present for given argument 'head'; please specify a specific target revision, '<branchname>@head' to narrow to a specific head, or 'heads' for all heads
Cf. consistent recent failures of CI jobs `app-tests` and `staging-test-with-rebase` since #5974.
## Steps to Reproduce
`make test` on `develop`; open or push to a PR; etc.
## Expected Behavior
Alembic operations succeed and Alembic-based tests pass.
## Actual Behavior
All Alembic operations and tests fail with Alembic error:
ERROR [alembic.util.messaging] Multiple head revisions are present for given argument 'head'; please specify a specific target revision, '<branchname>@head' to narrow to a specific head, or 'heads' for all heads
## Comments
This is essentially an Alembic-level merge-conflict. PR forthcoming with the one-line fix.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `securedrop/alembic/versions/1ddb81fb88c2_unique_index_for_instanceconfig_valid_.py`
Content:
```
1 """unique_index_for_instanceconfig_valid_until
2
3 Revision ID: 1ddb81fb88c2
4 Revises: 92fba0be98e9
5 Create Date: 2021-06-04 17:28:25.725563
6
7 """
8 from alembic import op
9 import sqlalchemy as sa
10
11
12 # revision identifiers, used by Alembic.
13 revision = '1ddb81fb88c2'
14 down_revision = '92fba0be98e9'
15 branch_labels = None
16 depends_on = None
17
18
19 def upgrade():
20 # ### commands auto generated by Alembic - please adjust! ###
21 with op.batch_alter_table('instance_config', schema=None) as batch_op:
22 batch_op.create_index('ix_one_active_instance_config', [sa.text('valid_until IS NULL')], unique=True, sqlite_where=sa.text('valid_until IS NULL'))
23
24 # ### end Alembic commands ###
25
26
27 def downgrade():
28 # ### commands auto generated by Alembic - please adjust! ###
29 with op.batch_alter_table('instance_config', schema=None) as batch_op:
30 batch_op.drop_index('ix_one_active_instance_config')
31
32 # ### end Alembic commands ###
33
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/securedrop/alembic/versions/1ddb81fb88c2_unique_index_for_instanceconfig_valid_.py b/securedrop/alembic/versions/1ddb81fb88c2_unique_index_for_instanceconfig_valid_.py
--- a/securedrop/alembic/versions/1ddb81fb88c2_unique_index_for_instanceconfig_valid_.py
+++ b/securedrop/alembic/versions/1ddb81fb88c2_unique_index_for_instanceconfig_valid_.py
@@ -11,7 +11,7 @@
# revision identifiers, used by Alembic.
revision = '1ddb81fb88c2'
-down_revision = '92fba0be98e9'
+down_revision = 'b060f38c0c31'
branch_labels = None
depends_on = None
|
{"golden_diff": "diff --git a/securedrop/alembic/versions/1ddb81fb88c2_unique_index_for_instanceconfig_valid_.py b/securedrop/alembic/versions/1ddb81fb88c2_unique_index_for_instanceconfig_valid_.py\n--- a/securedrop/alembic/versions/1ddb81fb88c2_unique_index_for_instanceconfig_valid_.py\n+++ b/securedrop/alembic/versions/1ddb81fb88c2_unique_index_for_instanceconfig_valid_.py\n@@ -11,7 +11,7 @@\n \n # revision identifiers, used by Alembic.\n revision = '1ddb81fb88c2'\n-down_revision = '92fba0be98e9'\n+down_revision = 'b060f38c0c31'\n branch_labels = None\n depends_on = None\n", "issue": "Alembic operations fail with multiple head revisions\n## Description\r\n\r\nAll Alembic operations fail with Alembic error:\r\n\r\n ERROR [alembic.util.messaging] Multiple head revisions are present for given argument 'head'; please specify a specific target revision, '<branchname>@head' to narrow to a specific head, or 'heads' for all heads\r\n\r\nCf. consistent recent failures of CI jobs `app-tests` and `staging-test-with-rebase` since #5974.\r\n\r\n## Steps to Reproduce\r\n\r\n`make test` on `develop`; open or push to a PR; etc.\r\n\r\n## Expected Behavior\r\n\r\nAlembic operations succeed and Alembic-based tests pass.\r\n\r\n## Actual Behavior\r\n\r\nAll Alembic operations and tests fail with Alembic error:\r\n\r\n ERROR [alembic.util.messaging] Multiple head revisions are present for given argument 'head'; please specify a specific target revision, '<branchname>@head' to narrow to a specific head, or 'heads' for all heads\r\n\r\n## Comments\r\n\r\nThis is essentially an Alembic-level merge-conflict. PR forthcoming with the one-line fix.\n", "before_files": [{"content": "\"\"\"unique_index_for_instanceconfig_valid_until\n\nRevision ID: 1ddb81fb88c2\nRevises: 92fba0be98e9\nCreate Date: 2021-06-04 17:28:25.725563\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = '1ddb81fb88c2'\ndown_revision = '92fba0be98e9'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n with op.batch_alter_table('instance_config', schema=None) as batch_op:\n batch_op.create_index('ix_one_active_instance_config', [sa.text('valid_until IS NULL')], unique=True, sqlite_where=sa.text('valid_until IS NULL'))\n\n # ### end Alembic commands ###\n\n\ndef downgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n with op.batch_alter_table('instance_config', schema=None) as batch_op:\n batch_op.drop_index('ix_one_active_instance_config')\n\n # ### end Alembic commands ###\n", "path": "securedrop/alembic/versions/1ddb81fb88c2_unique_index_for_instanceconfig_valid_.py"}], "after_files": [{"content": "\"\"\"unique_index_for_instanceconfig_valid_until\n\nRevision ID: 1ddb81fb88c2\nRevises: 92fba0be98e9\nCreate Date: 2021-06-04 17:28:25.725563\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = '1ddb81fb88c2'\ndown_revision = 'b060f38c0c31'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n with op.batch_alter_table('instance_config', schema=None) as batch_op:\n batch_op.create_index('ix_one_active_instance_config', [sa.text('valid_until IS NULL')], unique=True, sqlite_where=sa.text('valid_until IS NULL'))\n\n # ### end Alembic commands ###\n\n\ndef downgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n with op.batch_alter_table('instance_config', schema=None) as batch_op:\n batch_op.drop_index('ix_one_active_instance_config')\n\n # ### end Alembic commands ###\n", "path": "securedrop/alembic/versions/1ddb81fb88c2_unique_index_for_instanceconfig_valid_.py"}]}
| 848 | 199 |
gh_patches_debug_33549
|
rasdani/github-patches
|
git_diff
|
nextcloud__appstore-56
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Test if zip bombs are possible
We should know if zip bombs are possible currently
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `nextcloudappstore/core/api/v1/release/parser.py`
Content:
```
1 import re
2 import tarfile # type: ignore
3 import lxml.etree # type: ignore
4 from typing import Dict, Any, Tuple
5
6 from nextcloudappstore.core.api.v1.release import ReleaseConfig
7 from nextcloudappstore.core.versioning import pad_max_version, pad_min_version
8 from rest_framework.exceptions import APIException # type: ignore
9
10
11 class MaxSizeAppMetadataXmlException(APIException):
12 pass
13
14
15 class InvalidAppMetadataXmlException(APIException):
16 pass
17
18
19 class UnsupportedAppArchiveException(APIException):
20 pass
21
22
23 class InvalidAppPackageStructureException(APIException):
24 pass
25
26
27 class XMLSyntaxError(APIException):
28 pass
29
30
31 class GunZipAppMetadataExtractor:
32 def __init__(self, config: ReleaseConfig) -> None:
33 """
34 :argument config the config
35 """
36 self.config = config
37 self.app_folder_regex = re.compile(r'^[a-z]+[a-z_]*$')
38
39 def extract_app_metadata(self, archive_path: str) -> Tuple[str, str]:
40 """
41 Extracts the info.xml from an tar.gz archive
42 :argument archive_path the path to the tar.gz archive
43 :raises InvalidAppPackageStructureException if the first level folder
44 does not equal the app_id or no info.xml file could be found in the
45 appinfo folder
46 :return the info.xml as string
47 """
48 if not tarfile.is_tarfile(archive_path):
49 msg = '%s is not a valid tar.gz archive ' % archive_path
50 raise UnsupportedAppArchiveException(msg)
51
52 with tarfile.open(archive_path, 'r:gz') as tar:
53 result = self._parse_archive(tar)
54 return result
55
56 def _parse_archive(self, tar: Any) -> Tuple[str, str]:
57 folder = list(
58 filter(lambda name: re.match(self.app_folder_regex, name),
59 tar.getnames()
60 )
61 )
62 if len(folder) > 1:
63 msg = 'More than one possible app folder found'
64 raise InvalidAppPackageStructureException(msg)
65 elif len(folder) == 0:
66 msg = 'No possible app folder found. App folder must contain ' \
67 'only lowercase ASCII characters or underscores'
68 raise InvalidAppPackageStructureException(msg)
69
70 app_id = folder[0]
71 info_path = '%s/appinfo/info.xml' % app_id
72 try:
73 app_member = tar.getmember(app_id)
74 appinfo_member = tar.getmember('%s/appinfo' % app_id)
75 info_member = tar.getmember(info_path)
76 possible_links = [app_member, appinfo_member, info_member]
77
78 for possible_link in possible_links:
79 if possible_link.issym() or possible_link.islnk():
80 msg = 'Symlinks and hard links can not be used for %s' %\
81 possible_link
82 raise InvalidAppPackageStructureException(msg)
83
84 if info_member.size > self.config.max_info_size:
85 msg = '%s was bigger than allowed %i bytes' % (
86 info_path, self.config.max_info_size)
87 raise MaxSizeAppMetadataXmlException(msg)
88 info_file = tar.extractfile(info_member)
89 return info_file.read().decode('utf-8'), app_id
90 except KeyError:
91 msg = 'Could not find %s file inside the archive' % info_path
92 raise InvalidAppPackageStructureException(msg)
93
94
95 def element_to_dict(element: Any) -> Dict:
96 type = element.get('type')
97 key = element.tag.replace('-', '_')
98 if type == 'int':
99 return {key: int(element.text)}
100 elif type == 'list':
101 return {key: list(map(element_to_dict, element.iterchildren()))}
102 elif type == 'min-version':
103 return {key: pad_min_version(element.text)}
104 elif type == 'max-version':
105 return {key: pad_max_version(element.text)}
106 elif len(list(element)) > 0:
107 contents = {}
108 for child in element.iterchildren():
109 contents.update(element_to_dict(child))
110 return {key: contents}
111 else:
112 return {key: element.text}
113
114
115 def parse_app_metadata(xml: str, schema: str, pre_xslt: str,
116 xslt: str) -> Dict:
117 """
118 Parses, validates and maps the xml onto a dict
119 :argument xml the info.xml string to parse
120 :argument schema the schema xml as string
121 :argument pre_xslt xslt which is run before validation to ensure that
122 everything is in the correct order and that unknown elements are excluded
123 :argument xslt the xslt to transform it to a matching structure
124 :raises InvalidAppMetadataXmlException if the schema does not validate
125 :return the parsed xml as dict
126 """
127 parser = lxml.etree.XMLParser(resolve_entities=False, no_network=True,
128 remove_comments=True, load_dtd=False,
129 remove_blank_text=True, dtd_validation=False
130 )
131 try:
132 doc = lxml.etree.fromstring(bytes(xml, encoding='utf-8'), parser)
133 except lxml.etree.XMLSyntaxError as e:
134 msg = 'info.xml contains malformed xml: %s' % e
135 raise XMLSyntaxError(msg)
136 for _ in doc.iter(lxml.etree.Entity):
137 raise InvalidAppMetadataXmlException('Must not contain entities')
138 pre_transform = lxml.etree.XSLT(lxml.etree.XML(pre_xslt))
139 pre_transformed_doc = pre_transform(doc)
140 schema_doc = lxml.etree.fromstring(bytes(schema, encoding='utf-8'), parser)
141 schema = lxml.etree.XMLSchema(schema_doc)
142 try:
143 schema.assertValid(pre_transformed_doc) # type: ignore
144 except lxml.etree.DocumentInvalid as e:
145 msg = 'info.xml did not validate: %s' % e
146 raise InvalidAppMetadataXmlException(msg)
147 transform = lxml.etree.XSLT(lxml.etree.XML(xslt))
148 transformed_doc = transform(pre_transformed_doc)
149 mapped = element_to_dict(transformed_doc.getroot())
150 return mapped
151
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/nextcloudappstore/core/api/v1/release/parser.py b/nextcloudappstore/core/api/v1/release/parser.py
--- a/nextcloudappstore/core/api/v1/release/parser.py
+++ b/nextcloudappstore/core/api/v1/release/parser.py
@@ -77,20 +77,44 @@
for possible_link in possible_links:
if possible_link.issym() or possible_link.islnk():
- msg = 'Symlinks and hard links can not be used for %s' %\
+ msg = 'Symlinks and hard links can not be used for %s' % \
possible_link
raise InvalidAppPackageStructureException(msg)
-
- if info_member.size > self.config.max_info_size:
- msg = '%s was bigger than allowed %i bytes' % (
- info_path, self.config.max_info_size)
- raise MaxSizeAppMetadataXmlException(msg)
info_file = tar.extractfile(info_member)
- return info_file.read().decode('utf-8'), app_id
+ contents = self._stream_read_file(info_file,
+ self.config.max_info_size)
+ return contents, app_id
except KeyError:
msg = 'Could not find %s file inside the archive' % info_path
raise InvalidAppPackageStructureException(msg)
+ def _stream_read_file(self, info_file: Any, max_info_size: int) -> str:
+ """
+ Instead of reading everything in one go which is vulnerable to
+ zip bombs, stream and accumulate the bytes
+ :argument info_file: buffered io reader
+ :argument max_info_size: maximum file size in bytes
+ :raises MaxSizeAppMetadataXmlException if the maximum size was reached
+ :return: the parsed info.xml
+ """
+ # FIXME: If someone finds a less ugly version, please feel free to
+ # improve it
+ size = 0
+ result = b''
+ while True:
+ size += 1024
+ if size > max_info_size:
+ msg = 'info.xml was bigger than allowed %i bytes' % \
+ max_info_size
+ raise MaxSizeAppMetadataXmlException(msg)
+
+ chunk = info_file.read(1024)
+ if not chunk:
+ break
+ result += chunk
+
+ return result.decode('utf-8')
+
def element_to_dict(element: Any) -> Dict:
type = element.get('type')
|
{"golden_diff": "diff --git a/nextcloudappstore/core/api/v1/release/parser.py b/nextcloudappstore/core/api/v1/release/parser.py\n--- a/nextcloudappstore/core/api/v1/release/parser.py\n+++ b/nextcloudappstore/core/api/v1/release/parser.py\n@@ -77,20 +77,44 @@\n \n for possible_link in possible_links:\n if possible_link.issym() or possible_link.islnk():\n- msg = 'Symlinks and hard links can not be used for %s' %\\\n+ msg = 'Symlinks and hard links can not be used for %s' % \\\n possible_link\n raise InvalidAppPackageStructureException(msg)\n-\n- if info_member.size > self.config.max_info_size:\n- msg = '%s was bigger than allowed %i bytes' % (\n- info_path, self.config.max_info_size)\n- raise MaxSizeAppMetadataXmlException(msg)\n info_file = tar.extractfile(info_member)\n- return info_file.read().decode('utf-8'), app_id\n+ contents = self._stream_read_file(info_file,\n+ self.config.max_info_size)\n+ return contents, app_id\n except KeyError:\n msg = 'Could not find %s file inside the archive' % info_path\n raise InvalidAppPackageStructureException(msg)\n \n+ def _stream_read_file(self, info_file: Any, max_info_size: int) -> str:\n+ \"\"\"\n+ Instead of reading everything in one go which is vulnerable to\n+ zip bombs, stream and accumulate the bytes\n+ :argument info_file: buffered io reader\n+ :argument max_info_size: maximum file size in bytes\n+ :raises MaxSizeAppMetadataXmlException if the maximum size was reached\n+ :return: the parsed info.xml\n+ \"\"\"\n+ # FIXME: If someone finds a less ugly version, please feel free to\n+ # improve it\n+ size = 0\n+ result = b''\n+ while True:\n+ size += 1024\n+ if size > max_info_size:\n+ msg = 'info.xml was bigger than allowed %i bytes' % \\\n+ max_info_size\n+ raise MaxSizeAppMetadataXmlException(msg)\n+\n+ chunk = info_file.read(1024)\n+ if not chunk:\n+ break\n+ result += chunk\n+\n+ return result.decode('utf-8')\n+\n \n def element_to_dict(element: Any) -> Dict:\n type = element.get('type')\n", "issue": "Test if zip bombs are possible\nWe should know if zip bombs are possible currently\n\n", "before_files": [{"content": "import re\nimport tarfile # type: ignore\nimport lxml.etree # type: ignore\nfrom typing import Dict, Any, Tuple\n\nfrom nextcloudappstore.core.api.v1.release import ReleaseConfig\nfrom nextcloudappstore.core.versioning import pad_max_version, pad_min_version\nfrom rest_framework.exceptions import APIException # type: ignore\n\n\nclass MaxSizeAppMetadataXmlException(APIException):\n pass\n\n\nclass InvalidAppMetadataXmlException(APIException):\n pass\n\n\nclass UnsupportedAppArchiveException(APIException):\n pass\n\n\nclass InvalidAppPackageStructureException(APIException):\n pass\n\n\nclass XMLSyntaxError(APIException):\n pass\n\n\nclass GunZipAppMetadataExtractor:\n def __init__(self, config: ReleaseConfig) -> None:\n \"\"\"\n :argument config the config\n \"\"\"\n self.config = config\n self.app_folder_regex = re.compile(r'^[a-z]+[a-z_]*$')\n\n def extract_app_metadata(self, archive_path: str) -> Tuple[str, str]:\n \"\"\"\n Extracts the info.xml from an tar.gz archive\n :argument archive_path the path to the tar.gz archive\n :raises InvalidAppPackageStructureException if the first level folder\n does not equal the app_id or no info.xml file could be found in the\n appinfo folder\n :return the info.xml as string\n \"\"\"\n if not tarfile.is_tarfile(archive_path):\n msg = '%s is not a valid tar.gz archive ' % archive_path\n raise UnsupportedAppArchiveException(msg)\n\n with tarfile.open(archive_path, 'r:gz') as tar:\n result = self._parse_archive(tar)\n return result\n\n def _parse_archive(self, tar: Any) -> Tuple[str, str]:\n folder = list(\n filter(lambda name: re.match(self.app_folder_regex, name),\n tar.getnames()\n )\n )\n if len(folder) > 1:\n msg = 'More than one possible app folder found'\n raise InvalidAppPackageStructureException(msg)\n elif len(folder) == 0:\n msg = 'No possible app folder found. App folder must contain ' \\\n 'only lowercase ASCII characters or underscores'\n raise InvalidAppPackageStructureException(msg)\n\n app_id = folder[0]\n info_path = '%s/appinfo/info.xml' % app_id\n try:\n app_member = tar.getmember(app_id)\n appinfo_member = tar.getmember('%s/appinfo' % app_id)\n info_member = tar.getmember(info_path)\n possible_links = [app_member, appinfo_member, info_member]\n\n for possible_link in possible_links:\n if possible_link.issym() or possible_link.islnk():\n msg = 'Symlinks and hard links can not be used for %s' %\\\n possible_link\n raise InvalidAppPackageStructureException(msg)\n\n if info_member.size > self.config.max_info_size:\n msg = '%s was bigger than allowed %i bytes' % (\n info_path, self.config.max_info_size)\n raise MaxSizeAppMetadataXmlException(msg)\n info_file = tar.extractfile(info_member)\n return info_file.read().decode('utf-8'), app_id\n except KeyError:\n msg = 'Could not find %s file inside the archive' % info_path\n raise InvalidAppPackageStructureException(msg)\n\n\ndef element_to_dict(element: Any) -> Dict:\n type = element.get('type')\n key = element.tag.replace('-', '_')\n if type == 'int':\n return {key: int(element.text)}\n elif type == 'list':\n return {key: list(map(element_to_dict, element.iterchildren()))}\n elif type == 'min-version':\n return {key: pad_min_version(element.text)}\n elif type == 'max-version':\n return {key: pad_max_version(element.text)}\n elif len(list(element)) > 0:\n contents = {}\n for child in element.iterchildren():\n contents.update(element_to_dict(child))\n return {key: contents}\n else:\n return {key: element.text}\n\n\ndef parse_app_metadata(xml: str, schema: str, pre_xslt: str,\n xslt: str) -> Dict:\n \"\"\"\n Parses, validates and maps the xml onto a dict\n :argument xml the info.xml string to parse\n :argument schema the schema xml as string\n :argument pre_xslt xslt which is run before validation to ensure that\n everything is in the correct order and that unknown elements are excluded\n :argument xslt the xslt to transform it to a matching structure\n :raises InvalidAppMetadataXmlException if the schema does not validate\n :return the parsed xml as dict\n \"\"\"\n parser = lxml.etree.XMLParser(resolve_entities=False, no_network=True,\n remove_comments=True, load_dtd=False,\n remove_blank_text=True, dtd_validation=False\n )\n try:\n doc = lxml.etree.fromstring(bytes(xml, encoding='utf-8'), parser)\n except lxml.etree.XMLSyntaxError as e:\n msg = 'info.xml contains malformed xml: %s' % e\n raise XMLSyntaxError(msg)\n for _ in doc.iter(lxml.etree.Entity):\n raise InvalidAppMetadataXmlException('Must not contain entities')\n pre_transform = lxml.etree.XSLT(lxml.etree.XML(pre_xslt))\n pre_transformed_doc = pre_transform(doc)\n schema_doc = lxml.etree.fromstring(bytes(schema, encoding='utf-8'), parser)\n schema = lxml.etree.XMLSchema(schema_doc)\n try:\n schema.assertValid(pre_transformed_doc) # type: ignore\n except lxml.etree.DocumentInvalid as e:\n msg = 'info.xml did not validate: %s' % e\n raise InvalidAppMetadataXmlException(msg)\n transform = lxml.etree.XSLT(lxml.etree.XML(xslt))\n transformed_doc = transform(pre_transformed_doc)\n mapped = element_to_dict(transformed_doc.getroot())\n return mapped\n", "path": "nextcloudappstore/core/api/v1/release/parser.py"}], "after_files": [{"content": "import re\nimport tarfile # type: ignore\nimport lxml.etree # type: ignore\nfrom typing import Dict, Any, Tuple\n\nfrom nextcloudappstore.core.api.v1.release import ReleaseConfig\nfrom nextcloudappstore.core.versioning import pad_max_version, pad_min_version\nfrom rest_framework.exceptions import APIException # type: ignore\n\n\nclass MaxSizeAppMetadataXmlException(APIException):\n pass\n\n\nclass InvalidAppMetadataXmlException(APIException):\n pass\n\n\nclass UnsupportedAppArchiveException(APIException):\n pass\n\n\nclass InvalidAppPackageStructureException(APIException):\n pass\n\n\nclass XMLSyntaxError(APIException):\n pass\n\n\nclass GunZipAppMetadataExtractor:\n def __init__(self, config: ReleaseConfig) -> None:\n \"\"\"\n :argument config the config\n \"\"\"\n self.config = config\n self.app_folder_regex = re.compile(r'^[a-z]+[a-z_]*$')\n\n def extract_app_metadata(self, archive_path: str) -> Tuple[str, str]:\n \"\"\"\n Extracts the info.xml from an tar.gz archive\n :argument archive_path the path to the tar.gz archive\n :raises InvalidAppPackageStructureException if the first level folder\n does not equal the app_id or no info.xml file could be found in the\n appinfo folder\n :return the info.xml as string\n \"\"\"\n if not tarfile.is_tarfile(archive_path):\n msg = '%s is not a valid tar.gz archive ' % archive_path\n raise UnsupportedAppArchiveException(msg)\n\n with tarfile.open(archive_path, 'r:gz') as tar:\n result = self._parse_archive(tar)\n return result\n\n def _parse_archive(self, tar: Any) -> Tuple[str, str]:\n folder = list(\n filter(lambda name: re.match(self.app_folder_regex, name),\n tar.getnames()\n )\n )\n if len(folder) > 1:\n msg = 'More than one possible app folder found'\n raise InvalidAppPackageStructureException(msg)\n elif len(folder) == 0:\n msg = 'No possible app folder found. App folder must contain ' \\\n 'only lowercase ASCII characters or underscores'\n raise InvalidAppPackageStructureException(msg)\n\n app_id = folder[0]\n info_path = '%s/appinfo/info.xml' % app_id\n try:\n app_member = tar.getmember(app_id)\n appinfo_member = tar.getmember('%s/appinfo' % app_id)\n info_member = tar.getmember(info_path)\n possible_links = [app_member, appinfo_member, info_member]\n\n for possible_link in possible_links:\n if possible_link.issym() or possible_link.islnk():\n msg = 'Symlinks and hard links can not be used for %s' % \\\n possible_link\n raise InvalidAppPackageStructureException(msg)\n info_file = tar.extractfile(info_member)\n contents = self._stream_read_file(info_file,\n self.config.max_info_size)\n return contents, app_id\n except KeyError:\n msg = 'Could not find %s file inside the archive' % info_path\n raise InvalidAppPackageStructureException(msg)\n\n def _stream_read_file(self, info_file: Any, max_info_size: int) -> str:\n \"\"\"\n Instead of reading everything in one go which is vulnerable to\n zip bombs, stream and accumulate the bytes\n :argument info_file: buffered io reader\n :argument max_info_size: maximum file size in bytes\n :raises MaxSizeAppMetadataXmlException if the maximum size was reached\n :return: the parsed info.xml\n \"\"\"\n # FIXME: If someone finds a less ugly version, please feel free to\n # improve it\n size = 0\n result = b''\n while True:\n size += 1024\n if size > max_info_size:\n msg = 'info.xml was bigger than allowed %i bytes' % \\\n max_info_size\n raise MaxSizeAppMetadataXmlException(msg)\n\n chunk = info_file.read(1024)\n if not chunk:\n break\n result += chunk\n\n return result.decode('utf-8')\n\n\ndef element_to_dict(element: Any) -> Dict:\n type = element.get('type')\n key = element.tag.replace('-', '_')\n if type == 'int':\n return {key: int(element.text)}\n elif type == 'list':\n return {key: list(map(element_to_dict, element.iterchildren()))}\n elif type == 'min-version':\n return {key: pad_min_version(element.text)}\n elif type == 'max-version':\n return {key: pad_max_version(element.text)}\n elif len(list(element)) > 0:\n contents = {}\n for child in element.iterchildren():\n contents.update(element_to_dict(child))\n return {key: contents}\n else:\n return {key: element.text}\n\n\ndef parse_app_metadata(xml: str, schema: str, pre_xslt: str,\n xslt: str) -> Dict:\n \"\"\"\n Parses, validates and maps the xml onto a dict\n :argument xml the info.xml string to parse\n :argument schema the schema xml as string\n :argument pre_xslt xslt which is run before validation to ensure that\n everything is in the correct order and that unknown elements are excluded\n :argument xslt the xslt to transform it to a matching structure\n :raises InvalidAppMetadataXmlException if the schema does not validate\n :return the parsed xml as dict\n \"\"\"\n parser = lxml.etree.XMLParser(resolve_entities=False, no_network=True,\n remove_comments=True, load_dtd=False,\n remove_blank_text=True, dtd_validation=False\n )\n try:\n doc = lxml.etree.fromstring(bytes(xml, encoding='utf-8'), parser)\n except lxml.etree.XMLSyntaxError as e:\n msg = 'info.xml contains malformed xml: %s' % e\n raise XMLSyntaxError(msg)\n for _ in doc.iter(lxml.etree.Entity):\n raise InvalidAppMetadataXmlException('Must not contain entities')\n pre_transform = lxml.etree.XSLT(lxml.etree.XML(pre_xslt))\n pre_transformed_doc = pre_transform(doc)\n schema_doc = lxml.etree.fromstring(bytes(schema, encoding='utf-8'), parser)\n schema = lxml.etree.XMLSchema(schema_doc)\n try:\n schema.assertValid(pre_transformed_doc) # type: ignore\n except lxml.etree.DocumentInvalid as e:\n msg = 'info.xml did not validate: %s' % e\n raise InvalidAppMetadataXmlException(msg)\n transform = lxml.etree.XSLT(lxml.etree.XML(xslt))\n transformed_doc = transform(pre_transformed_doc)\n mapped = element_to_dict(transformed_doc.getroot())\n return mapped\n", "path": "nextcloudappstore/core/api/v1/release/parser.py"}]}
| 1,923 | 555 |
gh_patches_debug_14171
|
rasdani/github-patches
|
git_diff
|
sql-machine-learning__elasticdl-373
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
When worker/master image creation failed, client should fail instead of trying to launch master.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `elasticdl/client/client.py`
Content:
```
1 import argparse
2 import os
3 import inspect
4 import tempfile
5 import time
6 import getpass
7 import sys
8 from string import Template
9 import docker
10 import yaml
11 from kubernetes.client.apis import core_v1_api
12 from kubernetes import config
13
14
15 def _m_file_in_docker(model_file):
16 return "/model/" + os.path.basename(model_file)
17
18 def _build_docker_image(
19 m_file, image_name, image_base="elasticdl:dev",
20 repository=None
21 ):
22 DOCKER_TEMPLATE = """
23 FROM {}
24 COPY {} {}
25 """
26
27 with tempfile.NamedTemporaryFile(mode="w+", delete=False) as df:
28 df.write(DOCKER_TEMPLATE.format(image_base, m_file, _m_file_in_docker(m_file)))
29
30 client = docker.APIClient(base_url="unix://var/run/docker.sock")
31 print("===== Building Docker Image =====")
32 for line in client.build(
33 dockerfile=df.name, path=".", rm=True, tag=image_name, decode=True
34 ):
35 text = line.get("stream", None)
36 if text:
37 sys.stdout.write(text)
38 sys.stdout.flush()
39 print("===== Docker Image Built =====")
40 if repository != None:
41 for line in client.push(image_name, stream=True, decode=True):
42 print(line)
43
44 def _gen_master_def(image_name, model_file, job_name, argv):
45 master_yaml = """
46 apiVersion: v1
47 kind: Pod
48 metadata:
49 name: "elasticdl-master-{job_name}"
50 labels:
51 purpose: test-command
52 spec:
53 containers:
54 - name: "elasticdl-master-{job_name}"
55 image: "{image_name}"
56 command: ["python"]
57 args: [
58 "-m", "elasticdl.master.main",
59 "--job_name", "{job_name}",
60 "--worker_image", "{image_name}",
61 "--model_file", "{m_file}"
62 ]
63 imagePullPolicy: IfNotPresent
64 env:
65 - name: MY_POD_IP
66 valueFrom:
67 fieldRef:
68 fieldPath: status.podIP
69 restartPolicy: Never
70 """ .format(m_file=_m_file_in_docker(model_file), image_name=image_name, job_name=job_name)
71
72 master_def = yaml.safe_load(master_yaml)
73
74 # Build master arguments
75 master_def['spec']['containers'][0]['args'].extend(argv)
76 return master_def
77
78 def _submit(image_name, model_file, job_name, argv):
79 master_def = _gen_master_def(image_name, model_file, job_name, argv)
80 config.load_kube_config()
81 api = core_v1_api.CoreV1Api()
82 resp = api.create_namespaced_pod(body=master_def, namespace="default")
83 print("Master launched. status='%s'" % str(resp.status))
84
85 def main():
86 parser = argparse.ArgumentParser(description="ElasticDL Client")
87 # Rewrite model_file argument and pass all other arguments to master.
88 parser.add_argument("--model_file", help="Path to Model file", required=True)
89 parser.add_argument("--image_base", help="Base image containing elasticdl runtime environment.", required=True)
90 parser.add_argument("--repository", help="The repository to push docker image to.")
91 parser.add_argument("--job_name", help="ElasticDL job name", required=True)
92 args, argv = parser.parse_known_args()
93
94 job_name = args.job_name + "-" + str(int(round(time.time() * 1000)))
95 image_name = args.image_base + '_' + job_name
96 _build_docker_image(args.model_file, image_name, image_base=args.image_base,
97 repository=args.repository)
98 _submit(image_name, args.model_file, job_name, argv)
99
100
101 if __name__ == "__main__":
102 main()
103
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/elasticdl/client/client.py b/elasticdl/client/client.py
--- a/elasticdl/client/client.py
+++ b/elasticdl/client/client.py
@@ -32,12 +32,14 @@
for line in client.build(
dockerfile=df.name, path=".", rm=True, tag=image_name, decode=True
):
+ if "error" in line:
+ raise RuntimeError("Docker image build failure: " % line["error"])
text = line.get("stream", None)
if text:
sys.stdout.write(text)
sys.stdout.flush()
print("===== Docker Image Built =====")
- if repository != None:
+ if repository is not None:
for line in client.push(image_name, stream=True, decode=True):
print(line)
|
{"golden_diff": "diff --git a/elasticdl/client/client.py b/elasticdl/client/client.py\n--- a/elasticdl/client/client.py\n+++ b/elasticdl/client/client.py\n@@ -32,12 +32,14 @@\n for line in client.build(\n dockerfile=df.name, path=\".\", rm=True, tag=image_name, decode=True\n ):\n+ if \"error\" in line:\n+ raise RuntimeError(\"Docker image build failure: \" % line[\"error\"])\n text = line.get(\"stream\", None)\n if text:\n sys.stdout.write(text)\n sys.stdout.flush()\n print(\"===== Docker Image Built =====\")\n- if repository != None:\n+ if repository is not None:\n for line in client.push(image_name, stream=True, decode=True):\n print(line)\n", "issue": "When worker/master image creation failed, client should fail instead of trying to launch master.\n\n", "before_files": [{"content": "import argparse\nimport os\nimport inspect\nimport tempfile\nimport time\nimport getpass\nimport sys\nfrom string import Template\nimport docker\nimport yaml\nfrom kubernetes.client.apis import core_v1_api\nfrom kubernetes import config\n\n\ndef _m_file_in_docker(model_file):\n return \"/model/\" + os.path.basename(model_file)\n\ndef _build_docker_image(\n m_file, image_name, image_base=\"elasticdl:dev\",\n repository=None\n):\n DOCKER_TEMPLATE = \"\"\"\nFROM {}\nCOPY {} {}\n\"\"\"\n\n with tempfile.NamedTemporaryFile(mode=\"w+\", delete=False) as df:\n df.write(DOCKER_TEMPLATE.format(image_base, m_file, _m_file_in_docker(m_file)))\n\n client = docker.APIClient(base_url=\"unix://var/run/docker.sock\")\n print(\"===== Building Docker Image =====\")\n for line in client.build(\n dockerfile=df.name, path=\".\", rm=True, tag=image_name, decode=True\n ):\n text = line.get(\"stream\", None)\n if text:\n sys.stdout.write(text)\n sys.stdout.flush()\n print(\"===== Docker Image Built =====\")\n if repository != None:\n for line in client.push(image_name, stream=True, decode=True):\n print(line)\n\ndef _gen_master_def(image_name, model_file, job_name, argv):\n master_yaml = \"\"\"\napiVersion: v1\nkind: Pod\nmetadata:\n name: \"elasticdl-master-{job_name}\"\n labels:\n purpose: test-command\nspec:\n containers:\n - name: \"elasticdl-master-{job_name}\"\n image: \"{image_name}\"\n command: [\"python\"]\n args: [\n \"-m\", \"elasticdl.master.main\",\n \"--job_name\", \"{job_name}\",\n \"--worker_image\", \"{image_name}\",\n \"--model_file\", \"{m_file}\"\n ]\n imagePullPolicy: IfNotPresent \n env:\n - name: MY_POD_IP\n valueFrom:\n fieldRef:\n fieldPath: status.podIP\n restartPolicy: Never\n\"\"\" .format(m_file=_m_file_in_docker(model_file), image_name=image_name, job_name=job_name)\n\n master_def = yaml.safe_load(master_yaml)\n\n # Build master arguments\n master_def['spec']['containers'][0]['args'].extend(argv)\n return master_def\n\ndef _submit(image_name, model_file, job_name, argv):\n master_def = _gen_master_def(image_name, model_file, job_name, argv)\n config.load_kube_config()\n api = core_v1_api.CoreV1Api()\n resp = api.create_namespaced_pod(body=master_def, namespace=\"default\")\n print(\"Master launched. status='%s'\" % str(resp.status))\n\ndef main():\n parser = argparse.ArgumentParser(description=\"ElasticDL Client\")\n # Rewrite model_file argument and pass all other arguments to master.\n parser.add_argument(\"--model_file\", help=\"Path to Model file\", required=True)\n parser.add_argument(\"--image_base\", help=\"Base image containing elasticdl runtime environment.\", required=True)\n parser.add_argument(\"--repository\", help=\"The repository to push docker image to.\")\n parser.add_argument(\"--job_name\", help=\"ElasticDL job name\", required=True)\n args, argv = parser.parse_known_args()\n\n job_name = args.job_name + \"-\" + str(int(round(time.time() * 1000)))\n image_name = args.image_base + '_' + job_name \n _build_docker_image(args.model_file, image_name, image_base=args.image_base,\n repository=args.repository)\n _submit(image_name, args.model_file, job_name, argv)\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "elasticdl/client/client.py"}], "after_files": [{"content": "import argparse\nimport os\nimport inspect\nimport tempfile\nimport time\nimport getpass\nimport sys\nfrom string import Template\nimport docker\nimport yaml\nfrom kubernetes.client.apis import core_v1_api\nfrom kubernetes import config\n\n\ndef _m_file_in_docker(model_file):\n return \"/model/\" + os.path.basename(model_file)\n\ndef _build_docker_image(\n m_file, image_name, image_base=\"elasticdl:dev\",\n repository=None\n):\n DOCKER_TEMPLATE = \"\"\"\nFROM {}\nCOPY {} {}\n\"\"\"\n\n with tempfile.NamedTemporaryFile(mode=\"w+\", delete=False) as df:\n df.write(DOCKER_TEMPLATE.format(image_base, m_file, _m_file_in_docker(m_file)))\n\n client = docker.APIClient(base_url=\"unix://var/run/docker.sock\")\n print(\"===== Building Docker Image =====\")\n for line in client.build(\n dockerfile=df.name, path=\".\", rm=True, tag=image_name, decode=True\n ):\n if \"error\" in line:\n raise RuntimeError(\"Docker image build failure: \" % line[\"error\"])\n text = line.get(\"stream\", None)\n if text:\n sys.stdout.write(text)\n sys.stdout.flush()\n print(\"===== Docker Image Built =====\")\n if repository is not None:\n for line in client.push(image_name, stream=True, decode=True):\n print(line)\n\ndef _gen_master_def(image_name, model_file, job_name, argv):\n master_yaml = \"\"\"\napiVersion: v1\nkind: Pod\nmetadata:\n name: \"elasticdl-master-{job_name}\"\n labels:\n purpose: test-command\nspec:\n containers:\n - name: \"elasticdl-master-{job_name}\"\n image: \"{image_name}\"\n command: [\"python\"]\n args: [\n \"-m\", \"elasticdl.master.main\",\n \"--job_name\", \"{job_name}\",\n \"--worker_image\", \"{image_name}\",\n \"--model_file\", \"{m_file}\"\n ]\n imagePullPolicy: IfNotPresent \n env:\n - name: MY_POD_IP\n valueFrom:\n fieldRef:\n fieldPath: status.podIP\n restartPolicy: Never\n\"\"\" .format(m_file=_m_file_in_docker(model_file), image_name=image_name, job_name=job_name)\n\n master_def = yaml.safe_load(master_yaml)\n\n # Build master arguments\n master_def['spec']['containers'][0]['args'].extend(argv)\n return master_def\n\ndef _submit(image_name, model_file, job_name, argv):\n master_def = _gen_master_def(image_name, model_file, job_name, argv)\n config.load_kube_config()\n api = core_v1_api.CoreV1Api()\n resp = api.create_namespaced_pod(body=master_def, namespace=\"default\")\n print(\"Master launched. status='%s'\" % str(resp.status))\n\ndef main():\n parser = argparse.ArgumentParser(description=\"ElasticDL Client\")\n # Rewrite model_file argument and pass all other arguments to master.\n parser.add_argument(\"--model_file\", help=\"Path to Model file\", required=True)\n parser.add_argument(\"--image_base\", help=\"Base image containing elasticdl runtime environment.\", required=True)\n parser.add_argument(\"--repository\", help=\"The repository to push docker image to.\")\n parser.add_argument(\"--job_name\", help=\"ElasticDL job name\", required=True)\n args, argv = parser.parse_known_args()\n\n job_name = args.job_name + \"-\" + str(int(round(time.time() * 1000)))\n image_name = args.image_base + '_' + job_name \n _build_docker_image(args.model_file, image_name, image_base=args.image_base,\n repository=args.repository)\n _submit(image_name, args.model_file, job_name, argv)\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "elasticdl/client/client.py"}]}
| 1,268 | 172 |
gh_patches_debug_9153
|
rasdani/github-patches
|
git_diff
|
RedHatInsights__insights-core-2101
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
bash_version example doesn't work with json format
Running `insights run -p examples/rules -f json` results in a traceback because the `bash_version` rule puts an `InstalledRpm` object into its response:
```
TypeError: Object of type 'InstalledRpm' is not JSON serializable
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/rules/bash_version.py`
Content:
```
1 """
2 Bash Version
3 ============
4
5 This is a simple rule and can be run against the local host
6 using the following command::
7
8 $ insights-run -p examples.rules.bash_version
9
10 or from the examples/rules directory::
11
12 $ python sample_rules.py
13 """
14 from insights.core.plugins import make_pass, rule
15 from insights.parsers.installed_rpms import InstalledRpms
16
17 KEY = "BASH_VERSION"
18
19 CONTENT = "Bash RPM Version: {{ bash_version }}"
20
21
22 @rule(InstalledRpms)
23 def report(rpms):
24 bash_ver = rpms.get_max('bash')
25 return make_pass(KEY, bash_version=bash_ver)
26
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/examples/rules/bash_version.py b/examples/rules/bash_version.py
--- a/examples/rules/bash_version.py
+++ b/examples/rules/bash_version.py
@@ -11,7 +11,7 @@
$ python sample_rules.py
"""
-from insights.core.plugins import make_pass, rule
+from insights.core.plugins import make_info, rule
from insights.parsers.installed_rpms import InstalledRpms
KEY = "BASH_VERSION"
@@ -21,5 +21,5 @@
@rule(InstalledRpms)
def report(rpms):
- bash_ver = rpms.get_max('bash')
- return make_pass(KEY, bash_version=bash_ver)
+ bash = rpms.get_max('bash')
+ return make_info(KEY, bash_version=bash.nvra)
|
{"golden_diff": "diff --git a/examples/rules/bash_version.py b/examples/rules/bash_version.py\n--- a/examples/rules/bash_version.py\n+++ b/examples/rules/bash_version.py\n@@ -11,7 +11,7 @@\n \n $ python sample_rules.py\n \"\"\"\n-from insights.core.plugins import make_pass, rule\n+from insights.core.plugins import make_info, rule\n from insights.parsers.installed_rpms import InstalledRpms\n \n KEY = \"BASH_VERSION\"\n@@ -21,5 +21,5 @@\n \n @rule(InstalledRpms)\n def report(rpms):\n- bash_ver = rpms.get_max('bash')\n- return make_pass(KEY, bash_version=bash_ver)\n+ bash = rpms.get_max('bash')\n+ return make_info(KEY, bash_version=bash.nvra)\n", "issue": "bash_version example doesn't work with json format\nRunning `insights run -p examples/rules -f json` results in a traceback because the `bash_version` rule puts an `InstalledRpm` object into its response:\r\n\r\n```\r\nTypeError: Object of type 'InstalledRpm' is not JSON serializable\r\n```\n", "before_files": [{"content": "\"\"\"\nBash Version\n============\n\nThis is a simple rule and can be run against the local host\nusing the following command::\n\n$ insights-run -p examples.rules.bash_version\n\nor from the examples/rules directory::\n\n$ python sample_rules.py\n\"\"\"\nfrom insights.core.plugins import make_pass, rule\nfrom insights.parsers.installed_rpms import InstalledRpms\n\nKEY = \"BASH_VERSION\"\n\nCONTENT = \"Bash RPM Version: {{ bash_version }}\"\n\n\n@rule(InstalledRpms)\ndef report(rpms):\n bash_ver = rpms.get_max('bash')\n return make_pass(KEY, bash_version=bash_ver)\n", "path": "examples/rules/bash_version.py"}], "after_files": [{"content": "\"\"\"\nBash Version\n============\n\nThis is a simple rule and can be run against the local host\nusing the following command::\n\n$ insights-run -p examples.rules.bash_version\n\nor from the examples/rules directory::\n\n$ python sample_rules.py\n\"\"\"\nfrom insights.core.plugins import make_info, rule\nfrom insights.parsers.installed_rpms import InstalledRpms\n\nKEY = \"BASH_VERSION\"\n\nCONTENT = \"Bash RPM Version: {{ bash_version }}\"\n\n\n@rule(InstalledRpms)\ndef report(rpms):\n bash = rpms.get_max('bash')\n return make_info(KEY, bash_version=bash.nvra)\n", "path": "examples/rules/bash_version.py"}]}
| 501 | 167 |
gh_patches_debug_19148
|
rasdani/github-patches
|
git_diff
|
coala__coala-bears-1422
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Most YAML documents use document starts (---)
Hi,
I am the creator of yamllint, the linter coala uses for YAML.
Since #965 was merged three months ago, coala fails on many projects like Ansible, OpenStack and even yamllint itself, because coala doesn't accept document start markers (`---`) anymore.
Document start markers are commonly used, and required when declaring multiple documents in a single `.yaml` file (see [the spec](http://yaml.org/spec/1.2/spec.html#id2800132)).
The proposed fix in the original issue (#923) was to disable the rule, but the implemented fix (#965) made document starts forbidden.
My opinion is that coala should either require document starts, or disable the rule by default.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `bears/yaml/YAMLLintBear.py`
Content:
```
1 from coalib.bearlib.abstractions.Linter import linter
2 from dependency_management.requirements.PipRequirement import PipRequirement
3 import yaml
4
5
6 @linter(executable='yamllint',
7 output_format='regex',
8 output_regex=r'.+:(?P<line>\d+):(?P<column>\d+): '
9 r'\[(?P<severity>error|warning)\] (?P<message>.+)')
10 class YAMLLintBear:
11 """
12 Check yaml code for errors and possible problems.
13
14 You can read more about capabilities at
15 <http://yamllint.readthedocs.org/en/latest/rules.html>.
16 """
17
18 LANGUAGES = {'YAML'}
19 REQUIREMENTS = {PipRequirement('yamllint', '1.5')}
20 AUTHORS = {'The coala developers'}
21 AUTHORS_EMAILS = {'[email protected]'}
22 LICENSE = 'AGPL-3.0'
23 CAN_DETECT = {'Syntax', 'Formatting'}
24
25 @staticmethod
26 def generate_config(filename, file,
27 document_start: bool=False):
28 """
29 :param document_start:
30 Use this rule to require or forbid the use of document start
31 marker (---).
32 """
33 yamllint_configs = {
34 'extends': 'default',
35 'rules': {
36 'document-start': {
37 'present': False
38 }
39 }
40 }
41 if document_start:
42 yamllint_configs['rules']['document-start']['present'] = True
43
44 return yaml.dump(yamllint_configs)
45
46 @staticmethod
47 def create_arguments(filename, file, config_file, yamllint_config: str=''):
48 """
49 :param yamllint_config: Path to a custom configuration file.
50 """
51 args = ('-f', 'parsable', filename)
52 if yamllint_config:
53 args += ('--config-file=' + yamllint_config,)
54 else:
55 args += ('--config-file=' + config_file,)
56 return args
57
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/bears/yaml/YAMLLintBear.py b/bears/yaml/YAMLLintBear.py
--- a/bears/yaml/YAMLLintBear.py
+++ b/bears/yaml/YAMLLintBear.py
@@ -24,7 +24,7 @@
@staticmethod
def generate_config(filename, file,
- document_start: bool=False):
+ document_start: bool=None):
"""
:param document_start:
Use this rule to require or forbid the use of document start
@@ -33,13 +33,10 @@
yamllint_configs = {
'extends': 'default',
'rules': {
- 'document-start': {
- 'present': False
- }
- }
+ 'document-start': 'disable' if document_start is None
+ else {'present': document_start},
+ },
}
- if document_start:
- yamllint_configs['rules']['document-start']['present'] = True
return yaml.dump(yamllint_configs)
|
{"golden_diff": "diff --git a/bears/yaml/YAMLLintBear.py b/bears/yaml/YAMLLintBear.py\n--- a/bears/yaml/YAMLLintBear.py\n+++ b/bears/yaml/YAMLLintBear.py\n@@ -24,7 +24,7 @@\n \n @staticmethod\n def generate_config(filename, file,\n- document_start: bool=False):\n+ document_start: bool=None):\n \"\"\"\n :param document_start:\n Use this rule to require or forbid the use of document start\n@@ -33,13 +33,10 @@\n yamllint_configs = {\n 'extends': 'default',\n 'rules': {\n- 'document-start': {\n- 'present': False\n- }\n- }\n+ 'document-start': 'disable' if document_start is None\n+ else {'present': document_start},\n+ },\n }\n- if document_start:\n- yamllint_configs['rules']['document-start']['present'] = True\n \n return yaml.dump(yamllint_configs)\n", "issue": "Most YAML documents use document starts (---)\nHi,\r\n\r\nI am the creator of yamllint, the linter coala uses for YAML.\r\n\r\nSince #965 was merged three months ago, coala fails on many projects like Ansible, OpenStack and even yamllint itself, because coala doesn't accept document start markers (`---`) anymore.\r\n\r\nDocument start markers are commonly used, and required when declaring multiple documents in a single `.yaml` file (see [the spec](http://yaml.org/spec/1.2/spec.html#id2800132)).\r\n\r\nThe proposed fix in the original issue (#923) was to disable the rule, but the implemented fix (#965) made document starts forbidden.\r\n\r\nMy opinion is that coala should either require document starts, or disable the rule by default.\n", "before_files": [{"content": "from coalib.bearlib.abstractions.Linter import linter\nfrom dependency_management.requirements.PipRequirement import PipRequirement\nimport yaml\n\n\n@linter(executable='yamllint',\n output_format='regex',\n output_regex=r'.+:(?P<line>\\d+):(?P<column>\\d+): '\n r'\\[(?P<severity>error|warning)\\] (?P<message>.+)')\nclass YAMLLintBear:\n \"\"\"\n Check yaml code for errors and possible problems.\n\n You can read more about capabilities at\n <http://yamllint.readthedocs.org/en/latest/rules.html>.\n \"\"\"\n\n LANGUAGES = {'YAML'}\n REQUIREMENTS = {PipRequirement('yamllint', '1.5')}\n AUTHORS = {'The coala developers'}\n AUTHORS_EMAILS = {'[email protected]'}\n LICENSE = 'AGPL-3.0'\n CAN_DETECT = {'Syntax', 'Formatting'}\n\n @staticmethod\n def generate_config(filename, file,\n document_start: bool=False):\n \"\"\"\n :param document_start:\n Use this rule to require or forbid the use of document start\n marker (---).\n \"\"\"\n yamllint_configs = {\n 'extends': 'default',\n 'rules': {\n 'document-start': {\n 'present': False\n }\n }\n }\n if document_start:\n yamllint_configs['rules']['document-start']['present'] = True\n\n return yaml.dump(yamllint_configs)\n\n @staticmethod\n def create_arguments(filename, file, config_file, yamllint_config: str=''):\n \"\"\"\n :param yamllint_config: Path to a custom configuration file.\n \"\"\"\n args = ('-f', 'parsable', filename)\n if yamllint_config:\n args += ('--config-file=' + yamllint_config,)\n else:\n args += ('--config-file=' + config_file,)\n return args\n", "path": "bears/yaml/YAMLLintBear.py"}], "after_files": [{"content": "from coalib.bearlib.abstractions.Linter import linter\nfrom dependency_management.requirements.PipRequirement import PipRequirement\nimport yaml\n\n\n@linter(executable='yamllint',\n output_format='regex',\n output_regex=r'.+:(?P<line>\\d+):(?P<column>\\d+): '\n r'\\[(?P<severity>error|warning)\\] (?P<message>.+)')\nclass YAMLLintBear:\n \"\"\"\n Check yaml code for errors and possible problems.\n\n You can read more about capabilities at\n <http://yamllint.readthedocs.org/en/latest/rules.html>.\n \"\"\"\n\n LANGUAGES = {'YAML'}\n REQUIREMENTS = {PipRequirement('yamllint', '1.5')}\n AUTHORS = {'The coala developers'}\n AUTHORS_EMAILS = {'[email protected]'}\n LICENSE = 'AGPL-3.0'\n CAN_DETECT = {'Syntax', 'Formatting'}\n\n @staticmethod\n def generate_config(filename, file,\n document_start: bool=None):\n \"\"\"\n :param document_start:\n Use this rule to require or forbid the use of document start\n marker (---).\n \"\"\"\n yamllint_configs = {\n 'extends': 'default',\n 'rules': {\n 'document-start': 'disable' if document_start is None\n else {'present': document_start},\n },\n }\n\n return yaml.dump(yamllint_configs)\n\n @staticmethod\n def create_arguments(filename, file, config_file, yamllint_config: str=''):\n \"\"\"\n :param yamllint_config: Path to a custom configuration file.\n \"\"\"\n args = ('-f', 'parsable', filename)\n if yamllint_config:\n args += ('--config-file=' + yamllint_config,)\n else:\n args += ('--config-file=' + config_file,)\n return args\n", "path": "bears/yaml/YAMLLintBear.py"}]}
| 976 | 232 |
gh_patches_debug_22852
|
rasdani/github-patches
|
git_diff
|
python__mypy-3330
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
mypy_extensions is listed as owned by David Foster
See https://github.com/python/mypy/blob/master/extensions/setup.py#L37
David Foster did indeed create the first version but I presume he doesn't want to be bothered about the subsequent additions?
We should probably change this to "The mypy developers" -- but where to point the email? Maybe it can be omitted. The url might also better point to GitHub.
Attn: @davidfstr
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `extensions/setup.py`
Content:
```
1 #!/usr/bin/env python
2
3 # NOTE: This package must support Python 2.7 in addition to Python 3.x
4
5 from distutils.core import setup
6
7 version = '0.2.0-dev'
8 description = 'Experimental type system extensions for programs checked with the mypy typechecker.'
9 long_description = '''
10 Mypy Extensions
11 ===============
12
13 The "mypy_extensions" module defines experimental extensions to the
14 standard "typing" module that are supported by the mypy typechecker.
15 '''.lstrip()
16
17 classifiers = [
18 'Development Status :: 2 - Pre-Alpha',
19 'Environment :: Console',
20 'Intended Audience :: Developers',
21 'License :: OSI Approved :: MIT License',
22 'Operating System :: POSIX',
23 'Programming Language :: Python :: 2',
24 'Programming Language :: Python :: 2.7',
25 'Programming Language :: Python :: 3',
26 'Programming Language :: Python :: 3.3',
27 'Programming Language :: Python :: 3.4',
28 'Programming Language :: Python :: 3.5',
29 'Topic :: Software Development',
30 ]
31
32 setup(
33 name='mypy_extensions',
34 version=version,
35 description=description,
36 long_description=long_description,
37 author='David Foster',
38 author_email='[email protected]',
39 url='http://www.mypy-lang.org/',
40 license='MIT License',
41 platforms=['POSIX'],
42 py_modules=['mypy_extensions'],
43 classifiers=classifiers,
44 )
45
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/extensions/setup.py b/extensions/setup.py
--- a/extensions/setup.py
+++ b/extensions/setup.py
@@ -4,7 +4,7 @@
from distutils.core import setup
-version = '0.2.0-dev'
+version = '0.2.0'
description = 'Experimental type system extensions for programs checked with the mypy typechecker.'
long_description = '''
Mypy Extensions
@@ -26,6 +26,7 @@
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
'Topic :: Software Development',
]
@@ -34,8 +35,8 @@
version=version,
description=description,
long_description=long_description,
- author='David Foster',
- author_email='[email protected]',
+ author='The mypy developers',
+ author_email='[email protected]',
url='http://www.mypy-lang.org/',
license='MIT License',
platforms=['POSIX'],
|
{"golden_diff": "diff --git a/extensions/setup.py b/extensions/setup.py\n--- a/extensions/setup.py\n+++ b/extensions/setup.py\n@@ -4,7 +4,7 @@\n \n from distutils.core import setup\n \n-version = '0.2.0-dev'\n+version = '0.2.0'\n description = 'Experimental type system extensions for programs checked with the mypy typechecker.'\n long_description = '''\n Mypy Extensions\n@@ -26,6 +26,7 @@\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n+ 'Programming Language :: Python :: 3.6',\n 'Topic :: Software Development',\n ]\n \n@@ -34,8 +35,8 @@\n version=version,\n description=description,\n long_description=long_description,\n- author='David Foster',\n- author_email='[email protected]',\n+ author='The mypy developers',\n+ author_email='[email protected]',\n url='http://www.mypy-lang.org/',\n license='MIT License',\n platforms=['POSIX'],\n", "issue": "mypy_extensions is listed as owned by David Foster\nSee https://github.com/python/mypy/blob/master/extensions/setup.py#L37\r\n\r\nDavid Foster did indeed create the first version but I presume he doesn't want to be bothered about the subsequent additions?\r\n\r\nWe should probably change this to \"The mypy developers\" -- but where to point the email? Maybe it can be omitted. The url might also better point to GitHub.\r\n\r\nAttn: @davidfstr \n", "before_files": [{"content": "#!/usr/bin/env python\n\n# NOTE: This package must support Python 2.7 in addition to Python 3.x\n\nfrom distutils.core import setup\n\nversion = '0.2.0-dev'\ndescription = 'Experimental type system extensions for programs checked with the mypy typechecker.'\nlong_description = '''\nMypy Extensions\n===============\n\nThe \"mypy_extensions\" module defines experimental extensions to the\nstandard \"typing\" module that are supported by the mypy typechecker.\n'''.lstrip()\n\nclassifiers = [\n 'Development Status :: 2 - Pre-Alpha',\n 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: MIT License',\n 'Operating System :: POSIX',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Topic :: Software Development',\n]\n\nsetup(\n name='mypy_extensions',\n version=version,\n description=description,\n long_description=long_description,\n author='David Foster',\n author_email='[email protected]',\n url='http://www.mypy-lang.org/',\n license='MIT License',\n platforms=['POSIX'],\n py_modules=['mypy_extensions'],\n classifiers=classifiers,\n)\n", "path": "extensions/setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\n# NOTE: This package must support Python 2.7 in addition to Python 3.x\n\nfrom distutils.core import setup\n\nversion = '0.2.0'\ndescription = 'Experimental type system extensions for programs checked with the mypy typechecker.'\nlong_description = '''\nMypy Extensions\n===============\n\nThe \"mypy_extensions\" module defines experimental extensions to the\nstandard \"typing\" module that are supported by the mypy typechecker.\n'''.lstrip()\n\nclassifiers = [\n 'Development Status :: 2 - Pre-Alpha',\n 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: MIT License',\n 'Operating System :: POSIX',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Topic :: Software Development',\n]\n\nsetup(\n name='mypy_extensions',\n version=version,\n description=description,\n long_description=long_description,\n author='The mypy developers',\n author_email='[email protected]',\n url='http://www.mypy-lang.org/',\n license='MIT License',\n platforms=['POSIX'],\n py_modules=['mypy_extensions'],\n classifiers=classifiers,\n)\n", "path": "extensions/setup.py"}]}
| 748 | 253 |
gh_patches_debug_4534
|
rasdani/github-patches
|
git_diff
|
bridgecrewio__checkov-4237
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
False positive for check CKV_AZURE_5: "Ensure RBAC is enabled on AKS clusters"
**Describe the issue**
The check CKV_AZURE_5 for terraform resource `azurerm_kubernetes_cluster` ensures that RBAC is enabled in the kubernetes cluster.
Depending on how the `role_based_access_control_enabled` property is set, the check result is exact or not :
- `role_based_access_control_enabled = true`: the check passes. It's ok.
- `role_based_access_control_enabled = false`: the check fails. It's ok.
- `role_based_access_control_enabled` not defined : check fails. It's NOT ok as default value of this property is `true` (see https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/kubernetes_cluster#role_based_access_control_enabled)
**Examples**
This example will fails but it shouldn't:
```
resource "azurerm_resource_group" "foo" {
name = "foo"
location = "West Europe"
}
resource "azurerm_kubernetes_cluster" "foo" {
name = "foo"
resource_group_name = azurerm_resource_group.foo.name
location = azurerm_resource_group.foo.location
dns_prefix = "foo"
default_node_pool {
name = "default"
node_count = 1
vm_size = "Standard_D2_v2"
}
identity {
type = "SystemAssigned"
}
# role_based_access_control_enabled = true
}
```
**Version (please complete the following information):**
- Checkov Version : `2.2.252` (latest docker image)
**Additional context**
The problem is in this source file : https://github.com/bridgecrewio/checkov/blob/48abe40926c97bd2e6f8c80491369be462ce3edd/checkov/terraform/checks/resource/azure/AKSRbacEnabled.py#L19-L29
It returns `false` if the property is not found in the resource. It shouldn't be the case as the default value of the property is `true`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `checkov/terraform/checks/resource/azure/AKSRbacEnabled.py`
Content:
```
1 import dpath.util
2 from checkov.common.models.enums import CheckCategories, CheckResult
3 from checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck
4
5
6 class AKSRbacEnabled(BaseResourceCheck):
7 def __init__(self):
8 name = "Ensure RBAC is enabled on AKS clusters"
9 id = "CKV_AZURE_5"
10 supported_resources = ["azurerm_kubernetes_cluster"]
11 categories = [CheckCategories.KUBERNETES]
12 super().__init__(
13 name=name,
14 id=id,
15 categories=categories,
16 supported_resources=supported_resources,
17 )
18
19 def scan_resource_conf(self, conf):
20 self.evaluated_keys = [
21 "role_based_access_control/[0]/enabled", # azurerm < 2.99.0
22 "role_based_access_control_enabled", # azurerm >= 2.99.0
23 ]
24
25 for key in self.evaluated_keys:
26 if dpath.search(conf, key) and dpath.get(conf, key)[0]:
27 return CheckResult.PASSED
28
29 return CheckResult.FAILED
30
31
32 check = AKSRbacEnabled()
33
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/checkov/terraform/checks/resource/azure/AKSRbacEnabled.py b/checkov/terraform/checks/resource/azure/AKSRbacEnabled.py
--- a/checkov/terraform/checks/resource/azure/AKSRbacEnabled.py
+++ b/checkov/terraform/checks/resource/azure/AKSRbacEnabled.py
@@ -23,10 +23,10 @@
]
for key in self.evaluated_keys:
- if dpath.search(conf, key) and dpath.get(conf, key)[0]:
- return CheckResult.PASSED
+ if dpath.search(conf, key):
+ return CheckResult.PASSED if dpath.get(conf, key)[0] else CheckResult.FAILED
- return CheckResult.FAILED
+ return CheckResult.PASSED
check = AKSRbacEnabled()
|
{"golden_diff": "diff --git a/checkov/terraform/checks/resource/azure/AKSRbacEnabled.py b/checkov/terraform/checks/resource/azure/AKSRbacEnabled.py\n--- a/checkov/terraform/checks/resource/azure/AKSRbacEnabled.py\n+++ b/checkov/terraform/checks/resource/azure/AKSRbacEnabled.py\n@@ -23,10 +23,10 @@\n ]\n \n for key in self.evaluated_keys:\n- if dpath.search(conf, key) and dpath.get(conf, key)[0]:\n- return CheckResult.PASSED\n+ if dpath.search(conf, key):\n+ return CheckResult.PASSED if dpath.get(conf, key)[0] else CheckResult.FAILED\n \n- return CheckResult.FAILED\n+ return CheckResult.PASSED\n \n \n check = AKSRbacEnabled()\n", "issue": "False positive for check CKV_AZURE_5: \"Ensure RBAC is enabled on AKS clusters\"\n**Describe the issue**\r\nThe check CKV_AZURE_5 for terraform resource `azurerm_kubernetes_cluster` ensures that RBAC is enabled in the kubernetes cluster.\r\nDepending on how the `role_based_access_control_enabled` property is set, the check result is exact or not :\r\n- `role_based_access_control_enabled = true`: the check passes. It's ok.\r\n- `role_based_access_control_enabled = false`: the check fails. It's ok.\r\n- `role_based_access_control_enabled` not defined : check fails. It's NOT ok as default value of this property is `true` (see https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/kubernetes_cluster#role_based_access_control_enabled)\r\n\r\n**Examples**\r\nThis example will fails but it shouldn't:\r\n```\r\nresource \"azurerm_resource_group\" \"foo\" {\r\n name = \"foo\"\r\n location = \"West Europe\"\r\n}\r\n\r\nresource \"azurerm_kubernetes_cluster\" \"foo\" {\r\n name = \"foo\"\r\n resource_group_name = azurerm_resource_group.foo.name\r\n location = azurerm_resource_group.foo.location\r\n dns_prefix = \"foo\"\r\n\r\n default_node_pool {\r\n name = \"default\"\r\n node_count = 1\r\n vm_size = \"Standard_D2_v2\"\r\n }\r\n\r\n identity {\r\n type = \"SystemAssigned\"\r\n }\r\n\r\n # role_based_access_control_enabled = true\r\n}\r\n```\r\n\r\n**Version (please complete the following information):**\r\n - Checkov Version : `2.2.252` (latest docker image)\r\n\r\n**Additional context**\r\nThe problem is in this source file : https://github.com/bridgecrewio/checkov/blob/48abe40926c97bd2e6f8c80491369be462ce3edd/checkov/terraform/checks/resource/azure/AKSRbacEnabled.py#L19-L29\r\n\r\nIt returns `false` if the property is not found in the resource. It shouldn't be the case as the default value of the property is `true`\r\n\n", "before_files": [{"content": "import dpath.util\nfrom checkov.common.models.enums import CheckCategories, CheckResult\nfrom checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck\n\n\nclass AKSRbacEnabled(BaseResourceCheck):\n def __init__(self):\n name = \"Ensure RBAC is enabled on AKS clusters\"\n id = \"CKV_AZURE_5\"\n supported_resources = [\"azurerm_kubernetes_cluster\"]\n categories = [CheckCategories.KUBERNETES]\n super().__init__(\n name=name,\n id=id,\n categories=categories,\n supported_resources=supported_resources,\n )\n\n def scan_resource_conf(self, conf):\n self.evaluated_keys = [\n \"role_based_access_control/[0]/enabled\", # azurerm < 2.99.0\n \"role_based_access_control_enabled\", # azurerm >= 2.99.0\n ]\n\n for key in self.evaluated_keys:\n if dpath.search(conf, key) and dpath.get(conf, key)[0]:\n return CheckResult.PASSED\n\n return CheckResult.FAILED\n\n\ncheck = AKSRbacEnabled()\n", "path": "checkov/terraform/checks/resource/azure/AKSRbacEnabled.py"}], "after_files": [{"content": "import dpath.util\nfrom checkov.common.models.enums import CheckCategories, CheckResult\nfrom checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck\n\n\nclass AKSRbacEnabled(BaseResourceCheck):\n def __init__(self):\n name = \"Ensure RBAC is enabled on AKS clusters\"\n id = \"CKV_AZURE_5\"\n supported_resources = [\"azurerm_kubernetes_cluster\"]\n categories = [CheckCategories.KUBERNETES]\n super().__init__(\n name=name,\n id=id,\n categories=categories,\n supported_resources=supported_resources,\n )\n\n def scan_resource_conf(self, conf):\n self.evaluated_keys = [\n \"role_based_access_control/[0]/enabled\", # azurerm < 2.99.0\n \"role_based_access_control_enabled\", # azurerm >= 2.99.0\n ]\n\n for key in self.evaluated_keys:\n if dpath.search(conf, key):\n return CheckResult.PASSED if dpath.get(conf, key)[0] else CheckResult.FAILED\n\n return CheckResult.PASSED\n\n\ncheck = AKSRbacEnabled()\n", "path": "checkov/terraform/checks/resource/azure/AKSRbacEnabled.py"}]}
| 1,047 | 188 |
gh_patches_debug_24671
|
rasdani/github-patches
|
git_diff
|
docker__docker-py-45
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Failure to import requests.packages.urllib3.connectionpool
With requests 1.2.3 (the version which gets installed), this happens.
Workaround: use requests 1.2.0.
``` ipython
$ pip install --user docker-py
Downloading/unpacking docker-py
Downloading docker-py-0.1.5.tar.gz
Running setup.py egg_info for package docker-py
Requirement already satisfied (use --upgrade to upgrade): requests in /usr/lib/python2.7/dist-packages (from docker-py)
Requirement already satisfied (use --upgrade to upgrade): six in /usr/lib/python2.7/dist-packages (from docker-py)
Installing collected packages: docker-py
Running setup.py install for docker-py
Successfully installed docker-py
Cleaning up...
pwaller@fractal:~$ ipython
imporPython 2.7.5+ (default, Jun 5 2013, 10:40:07)
Type "copyright", "credits" or "license" for more information.
IPython 1.1.0 -- An enhanced Interactive Python.
? -> Introduction and overview of IPython's features.
%quickref -> Quick reference.
help -> Python's own help system.
object? -> Details about 'object', use 'object??' for extra details.
In [1]: import docker
---------------------------------------------------------------------------
ImportError Traceback (most recent call last)
<ipython-input-1-3ac1c348f58a> in <module>()
----> 1 import docker
/home/pwaller/.local/lib/python2.7/site-packages/docker/__init__.py in <module>()
----> 1 from .client import Client
/home/pwaller/.local/lib/python2.7/site-packages/docker/client.py in <module>()
15 from requests.exceptions import HTTPError
16 from requests.adapters import HTTPAdapter
---> 17 from requests.packages.urllib3.connectionpool import HTTPConnectionPool
18
19 if six.PY3:
ImportError: No module named packages.urllib3.connectionpool
In [2]:
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `docker/unixconn.py`
Content:
```
1 # Copyright 2013 dotCloud inc.
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6
7 # http://www.apache.org/licenses/LICENSE-2.0
8
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 import httplib
16 import requests.adapters
17 import requests.packages.urllib3.connectionpool
18 import socket
19
20 HTTPConnectionPool = requests.packages.urllib3.connectionpool.HTTPConnectionPool
21
22
23 class UnixHTTPConnection(httplib.HTTPConnection, object):
24 def __init__(self, base_url, unix_socket):
25 httplib.HTTPConnection.__init__(self, 'localhost')
26 self.base_url = base_url
27 self.unix_socket = unix_socket
28
29 def connect(self):
30 sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
31 sock.connect(self.base_url.replace("unix:/",""))
32 self.sock = sock
33
34 def _extract_path(self, url):
35 #remove the base_url entirely..
36 return url.replace(self.base_url, "")
37
38 def request(self, method, url, **kwargs):
39 url = self._extract_path(self.unix_socket)
40 super(UnixHTTPConnection, self).request(method, url, **kwargs)
41
42
43 class UnixHTTPConnectionPool(HTTPConnectionPool):
44 def __init__(self, base_url, socket_path):
45 self.socket_path = socket_path
46 self.base_url = base_url
47 super(UnixHTTPConnectionPool, self).__init__(self, 'localhost')
48
49 def _new_conn(self):
50 return UnixHTTPConnection(self.base_url, self.socket_path)
51
52
53 class UnixAdapter(requests.adapters.HTTPAdapter):
54 def __init__(self, base_url):
55 self.base_url = base_url
56 super(UnixAdapter, self).__init__()
57
58 def get_connection(self, socket_path, proxies=None):
59 return UnixHTTPConnectionPool(self.base_url, socket_path)
60
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/docker/unixconn.py b/docker/unixconn.py
--- a/docker/unixconn.py
+++ b/docker/unixconn.py
@@ -14,10 +14,12 @@
import httplib
import requests.adapters
-import requests.packages.urllib3.connectionpool
import socket
-HTTPConnectionPool = requests.packages.urllib3.connectionpool.HTTPConnectionPool
+try:
+ import requests.packages.urllib3.connectionpool as connectionpool
+except ImportError:
+ import urllib3.connectionpool as connectionpool
class UnixHTTPConnection(httplib.HTTPConnection, object):
@@ -28,7 +30,7 @@
def connect(self):
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
- sock.connect(self.base_url.replace("unix:/",""))
+ sock.connect(self.base_url.replace("unix:/", ""))
self.sock = sock
def _extract_path(self, url):
@@ -40,7 +42,7 @@
super(UnixHTTPConnection, self).request(method, url, **kwargs)
-class UnixHTTPConnectionPool(HTTPConnectionPool):
+class UnixHTTPConnectionPool(connectionpool.HTTPConnectionPool):
def __init__(self, base_url, socket_path):
self.socket_path = socket_path
self.base_url = base_url
|
{"golden_diff": "diff --git a/docker/unixconn.py b/docker/unixconn.py\n--- a/docker/unixconn.py\n+++ b/docker/unixconn.py\n@@ -14,10 +14,12 @@\n \n import httplib\n import requests.adapters\n-import requests.packages.urllib3.connectionpool\n import socket\n \n-HTTPConnectionPool = requests.packages.urllib3.connectionpool.HTTPConnectionPool\n+try:\n+ import requests.packages.urllib3.connectionpool as connectionpool\n+except ImportError:\n+ import urllib3.connectionpool as connectionpool\n \n \n class UnixHTTPConnection(httplib.HTTPConnection, object):\n@@ -28,7 +30,7 @@\n \n def connect(self):\n sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)\n- sock.connect(self.base_url.replace(\"unix:/\",\"\"))\n+ sock.connect(self.base_url.replace(\"unix:/\", \"\"))\n self.sock = sock\n \n def _extract_path(self, url):\n@@ -40,7 +42,7 @@\n super(UnixHTTPConnection, self).request(method, url, **kwargs)\n \n \n-class UnixHTTPConnectionPool(HTTPConnectionPool):\n+class UnixHTTPConnectionPool(connectionpool.HTTPConnectionPool):\n def __init__(self, base_url, socket_path):\n self.socket_path = socket_path\n self.base_url = base_url\n", "issue": "Failure to import requests.packages.urllib3.connectionpool\nWith requests 1.2.3 (the version which gets installed), this happens.\n\nWorkaround: use requests 1.2.0.\n\n``` ipython\n$ pip install --user docker-py\nDownloading/unpacking docker-py\n Downloading docker-py-0.1.5.tar.gz\n Running setup.py egg_info for package docker-py\n\nRequirement already satisfied (use --upgrade to upgrade): requests in /usr/lib/python2.7/dist-packages (from docker-py)\nRequirement already satisfied (use --upgrade to upgrade): six in /usr/lib/python2.7/dist-packages (from docker-py)\nInstalling collected packages: docker-py\n Running setup.py install for docker-py\n\nSuccessfully installed docker-py\nCleaning up...\npwaller@fractal:~$ ipython\nimporPython 2.7.5+ (default, Jun 5 2013, 10:40:07) \nType \"copyright\", \"credits\" or \"license\" for more information.\n\nIPython 1.1.0 -- An enhanced Interactive Python.\n? -> Introduction and overview of IPython's features.\n%quickref -> Quick reference.\nhelp -> Python's own help system.\nobject? -> Details about 'object', use 'object??' for extra details.\n\nIn [1]: import docker\n---------------------------------------------------------------------------\nImportError Traceback (most recent call last)\n<ipython-input-1-3ac1c348f58a> in <module>()\n----> 1 import docker\n\n/home/pwaller/.local/lib/python2.7/site-packages/docker/__init__.py in <module>()\n----> 1 from .client import Client\n\n/home/pwaller/.local/lib/python2.7/site-packages/docker/client.py in <module>()\n 15 from requests.exceptions import HTTPError\n 16 from requests.adapters import HTTPAdapter\n---> 17 from requests.packages.urllib3.connectionpool import HTTPConnectionPool\n 18 \n 19 if six.PY3:\n\nImportError: No module named packages.urllib3.connectionpool\n\nIn [2]: \n```\n\n", "before_files": [{"content": "# Copyright 2013 dotCloud inc.\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport httplib\nimport requests.adapters\nimport requests.packages.urllib3.connectionpool\nimport socket\n\nHTTPConnectionPool = requests.packages.urllib3.connectionpool.HTTPConnectionPool\n\n\nclass UnixHTTPConnection(httplib.HTTPConnection, object):\n def __init__(self, base_url, unix_socket):\n httplib.HTTPConnection.__init__(self, 'localhost')\n self.base_url = base_url\n self.unix_socket = unix_socket\n\n def connect(self):\n sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)\n sock.connect(self.base_url.replace(\"unix:/\",\"\"))\n self.sock = sock\n\n def _extract_path(self, url):\n #remove the base_url entirely..\n return url.replace(self.base_url, \"\")\n\n def request(self, method, url, **kwargs):\n url = self._extract_path(self.unix_socket)\n super(UnixHTTPConnection, self).request(method, url, **kwargs)\n\n\nclass UnixHTTPConnectionPool(HTTPConnectionPool):\n def __init__(self, base_url, socket_path):\n self.socket_path = socket_path\n self.base_url = base_url\n super(UnixHTTPConnectionPool, self).__init__(self, 'localhost')\n\n def _new_conn(self):\n return UnixHTTPConnection(self.base_url, self.socket_path)\n\n\nclass UnixAdapter(requests.adapters.HTTPAdapter):\n def __init__(self, base_url):\n self.base_url = base_url\n super(UnixAdapter, self).__init__()\n\n def get_connection(self, socket_path, proxies=None):\n return UnixHTTPConnectionPool(self.base_url, socket_path)\n", "path": "docker/unixconn.py"}], "after_files": [{"content": "# Copyright 2013 dotCloud inc.\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport httplib\nimport requests.adapters\nimport socket\n\ntry:\n import requests.packages.urllib3.connectionpool as connectionpool\nexcept ImportError:\n import urllib3.connectionpool as connectionpool\n\n\nclass UnixHTTPConnection(httplib.HTTPConnection, object):\n def __init__(self, base_url, unix_socket):\n httplib.HTTPConnection.__init__(self, 'localhost')\n self.base_url = base_url\n self.unix_socket = unix_socket\n\n def connect(self):\n sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)\n sock.connect(self.base_url.replace(\"unix:/\", \"\"))\n self.sock = sock\n\n def _extract_path(self, url):\n #remove the base_url entirely..\n return url.replace(self.base_url, \"\")\n\n def request(self, method, url, **kwargs):\n url = self._extract_path(self.unix_socket)\n super(UnixHTTPConnection, self).request(method, url, **kwargs)\n\n\nclass UnixHTTPConnectionPool(connectionpool.HTTPConnectionPool):\n def __init__(self, base_url, socket_path):\n self.socket_path = socket_path\n self.base_url = base_url\n super(UnixHTTPConnectionPool, self).__init__(self, 'localhost')\n\n def _new_conn(self):\n return UnixHTTPConnection(self.base_url, self.socket_path)\n\n\nclass UnixAdapter(requests.adapters.HTTPAdapter):\n def __init__(self, base_url):\n self.base_url = base_url\n super(UnixAdapter, self).__init__()\n\n def get_connection(self, socket_path, proxies=None):\n return UnixHTTPConnectionPool(self.base_url, socket_path)\n", "path": "docker/unixconn.py"}]}
| 1,321 | 282 |
gh_patches_debug_4916
|
rasdani/github-patches
|
git_diff
|
e-valuation__EvaP-566
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
colorize average grades on course detail pages
the numbers in the lower right should be css'd like the ones in the upper left.

--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `evap/evaluation/templatetags/evaluation_templatetags.py`
Content:
```
1 from django.template import Library
2
3 register = Library()
4
5
6 @register.inclusion_tag("user_list_with_links.html")
7 def include_user_list_with_links(users):
8 return dict(users=users)
9
10
11 @register.inclusion_tag("sortable_form_js.html")
12 def include_sortable_form_js():
13 return dict()
14
15 @register.inclusion_tag("progress_bar.html")
16 def include_progress_bar(done, total, large=False):
17 return dict(done=done, total=total, large=large)
18
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/evap/evaluation/templatetags/evaluation_templatetags.py b/evap/evaluation/templatetags/evaluation_templatetags.py
--- a/evap/evaluation/templatetags/evaluation_templatetags.py
+++ b/evap/evaluation/templatetags/evaluation_templatetags.py
@@ -15,3 +15,7 @@
@register.inclusion_tag("progress_bar.html")
def include_progress_bar(done, total, large=False):
return dict(done=done, total=total, large=large)
+
[email protected]_tag("result_bar.html")
+def include_result_bar(result, show_grades, questionnaire_warning=False):
+ return dict(result=result, show_grades=show_grades, questionnaire_warning=questionnaire_warning)
|
{"golden_diff": "diff --git a/evap/evaluation/templatetags/evaluation_templatetags.py b/evap/evaluation/templatetags/evaluation_templatetags.py\n--- a/evap/evaluation/templatetags/evaluation_templatetags.py\n+++ b/evap/evaluation/templatetags/evaluation_templatetags.py\n@@ -15,3 +15,7 @@\n @register.inclusion_tag(\"progress_bar.html\")\n def include_progress_bar(done, total, large=False):\n return dict(done=done, total=total, large=large)\n+\[email protected]_tag(\"result_bar.html\")\n+def include_result_bar(result, show_grades, questionnaire_warning=False):\n+ return dict(result=result, show_grades=show_grades, questionnaire_warning=questionnaire_warning)\n", "issue": "colorize average grades on course detail pages\nthe numbers in the lower right should be css'd like the ones in the upper left.\n\n\n\n", "before_files": [{"content": "from django.template import Library\n\nregister = Library()\n\n\[email protected]_tag(\"user_list_with_links.html\")\ndef include_user_list_with_links(users):\n return dict(users=users)\n\n\[email protected]_tag(\"sortable_form_js.html\")\ndef include_sortable_form_js():\n return dict()\n\[email protected]_tag(\"progress_bar.html\")\ndef include_progress_bar(done, total, large=False):\n return dict(done=done, total=total, large=large)\n", "path": "evap/evaluation/templatetags/evaluation_templatetags.py"}], "after_files": [{"content": "from django.template import Library\n\nregister = Library()\n\n\[email protected]_tag(\"user_list_with_links.html\")\ndef include_user_list_with_links(users):\n return dict(users=users)\n\n\[email protected]_tag(\"sortable_form_js.html\")\ndef include_sortable_form_js():\n return dict()\n\[email protected]_tag(\"progress_bar.html\")\ndef include_progress_bar(done, total, large=False):\n return dict(done=done, total=total, large=large)\n\[email protected]_tag(\"result_bar.html\")\ndef include_result_bar(result, show_grades, questionnaire_warning=False):\n return dict(result=result, show_grades=show_grades, questionnaire_warning=questionnaire_warning)\n", "path": "evap/evaluation/templatetags/evaluation_templatetags.py"}]}
| 511 | 181 |
gh_patches_debug_17091
|
rasdani/github-patches
|
git_diff
|
Kinto__kinto-493
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add creation date and current kinto version to generated config file
Using comments for example:
``` ini
# Created at Thu, 03 Mar 2016 17:02:37 +0100
# Using Kinto version 1.11.2
[server:main]
use = egg:waitress#main
host = 0.0.0.0
port = 8888
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kinto/config/__init__.py`
Content:
```
1 import os
2 import codecs
3
4 from cliquet import utils as cliquet_utils
5
6 from kinto import logger
7
8 HERE = os.path.abspath(os.path.dirname(__file__))
9
10
11 def render_template(template, destination, **kwargs):
12 template = os.path.join(HERE, template)
13 folder = os.path.dirname(destination)
14
15 if folder and not os.path.exists(folder):
16 os.makedirs(folder)
17
18 logger.info("Created config {}".format(os.path.abspath(destination)))
19
20 with codecs.open(template, 'r', encoding='utf-8') as f:
21 raw_template = f.read()
22 rendered = raw_template.format(**kwargs)
23 with codecs.open(destination, 'w+', encoding='utf-8') as output:
24 output.write(rendered)
25
26
27 def init(config_file, backend):
28 values = {}
29
30 values['secret'] = cliquet_utils.random_bytes_hex(32)
31
32 values['storage_backend'] = "cliquet.storage.%s" % backend
33 values['cache_backend'] = "cliquet.cache.%s" % backend
34 values['permission_backend'] = "cliquet.permission.%s" % backend
35
36 if backend == 'postgresql':
37 postgresql_url = "postgres://postgres:postgres@localhost/postgres"
38 values['storage_url'] = postgresql_url
39 values['cache_url'] = postgresql_url
40 values['permission_url'] = postgresql_url
41
42 elif backend == 'redis':
43 redis_url = "redis://localhost:6379"
44 values['storage_url'] = redis_url + "/1"
45 values['cache_url'] = redis_url + "/2"
46 values['permission_url'] = redis_url + "/3"
47
48 else:
49 values['storage_url'] = ''
50 values['cache_url'] = ''
51 values['permission_url'] = ''
52
53 render_template("kinto.tpl", config_file, **values)
54
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/kinto/config/__init__.py b/kinto/config/__init__.py
--- a/kinto/config/__init__.py
+++ b/kinto/config/__init__.py
@@ -1,9 +1,11 @@
import os
import codecs
+from time import strftime
from cliquet import utils as cliquet_utils
from kinto import logger
+from kinto import __version__
HERE = os.path.abspath(os.path.dirname(__file__))
@@ -29,6 +31,9 @@
values['secret'] = cliquet_utils.random_bytes_hex(32)
+ values['kinto_version'] = __version__
+ values['config_file_timestamp'] = strftime('%a, %d %b %Y %H:%M:%S %z')
+
values['storage_backend'] = "cliquet.storage.%s" % backend
values['cache_backend'] = "cliquet.cache.%s" % backend
values['permission_backend'] = "cliquet.permission.%s" % backend
|
{"golden_diff": "diff --git a/kinto/config/__init__.py b/kinto/config/__init__.py\n--- a/kinto/config/__init__.py\n+++ b/kinto/config/__init__.py\n@@ -1,9 +1,11 @@\n import os\n import codecs\n+from time import strftime\n \n from cliquet import utils as cliquet_utils\n \n from kinto import logger\n+from kinto import __version__\n \n HERE = os.path.abspath(os.path.dirname(__file__))\n \n@@ -29,6 +31,9 @@\n \n values['secret'] = cliquet_utils.random_bytes_hex(32)\n \n+ values['kinto_version'] = __version__\n+ values['config_file_timestamp'] = strftime('%a, %d %b %Y %H:%M:%S %z')\n+\n values['storage_backend'] = \"cliquet.storage.%s\" % backend\n values['cache_backend'] = \"cliquet.cache.%s\" % backend\n values['permission_backend'] = \"cliquet.permission.%s\" % backend\n", "issue": "Add creation date and current kinto version to generated config file\nUsing comments for example:\n\n``` ini\n# Created at Thu, 03 Mar 2016 17:02:37 +0100\n# Using Kinto version 1.11.2\n\n[server:main]\nuse = egg:waitress#main\nhost = 0.0.0.0\nport = 8888\n\n```\n\n", "before_files": [{"content": "import os\nimport codecs\n\nfrom cliquet import utils as cliquet_utils\n\nfrom kinto import logger\n\nHERE = os.path.abspath(os.path.dirname(__file__))\n\n\ndef render_template(template, destination, **kwargs):\n template = os.path.join(HERE, template)\n folder = os.path.dirname(destination)\n\n if folder and not os.path.exists(folder):\n os.makedirs(folder)\n\n logger.info(\"Created config {}\".format(os.path.abspath(destination)))\n\n with codecs.open(template, 'r', encoding='utf-8') as f:\n raw_template = f.read()\n rendered = raw_template.format(**kwargs)\n with codecs.open(destination, 'w+', encoding='utf-8') as output:\n output.write(rendered)\n\n\ndef init(config_file, backend):\n values = {}\n\n values['secret'] = cliquet_utils.random_bytes_hex(32)\n\n values['storage_backend'] = \"cliquet.storage.%s\" % backend\n values['cache_backend'] = \"cliquet.cache.%s\" % backend\n values['permission_backend'] = \"cliquet.permission.%s\" % backend\n\n if backend == 'postgresql':\n postgresql_url = \"postgres://postgres:postgres@localhost/postgres\"\n values['storage_url'] = postgresql_url\n values['cache_url'] = postgresql_url\n values['permission_url'] = postgresql_url\n\n elif backend == 'redis':\n redis_url = \"redis://localhost:6379\"\n values['storage_url'] = redis_url + \"/1\"\n values['cache_url'] = redis_url + \"/2\"\n values['permission_url'] = redis_url + \"/3\"\n\n else:\n values['storage_url'] = ''\n values['cache_url'] = ''\n values['permission_url'] = ''\n\n render_template(\"kinto.tpl\", config_file, **values)\n", "path": "kinto/config/__init__.py"}], "after_files": [{"content": "import os\nimport codecs\nfrom time import strftime\n\nfrom cliquet import utils as cliquet_utils\n\nfrom kinto import logger\nfrom kinto import __version__\n\nHERE = os.path.abspath(os.path.dirname(__file__))\n\n\ndef render_template(template, destination, **kwargs):\n template = os.path.join(HERE, template)\n folder = os.path.dirname(destination)\n\n if folder and not os.path.exists(folder):\n os.makedirs(folder)\n\n logger.info(\"Created config {}\".format(os.path.abspath(destination)))\n\n with codecs.open(template, 'r', encoding='utf-8') as f:\n raw_template = f.read()\n rendered = raw_template.format(**kwargs)\n with codecs.open(destination, 'w+', encoding='utf-8') as output:\n output.write(rendered)\n\n\ndef init(config_file, backend):\n values = {}\n\n values['secret'] = cliquet_utils.random_bytes_hex(32)\n\n values['kinto_version'] = __version__\n values['config_file_timestamp'] = strftime('%a, %d %b %Y %H:%M:%S %z')\n\n values['storage_backend'] = \"cliquet.storage.%s\" % backend\n values['cache_backend'] = \"cliquet.cache.%s\" % backend\n values['permission_backend'] = \"cliquet.permission.%s\" % backend\n\n if backend == 'postgresql':\n postgresql_url = \"postgres://postgres:postgres@localhost/postgres\"\n values['storage_url'] = postgresql_url\n values['cache_url'] = postgresql_url\n values['permission_url'] = postgresql_url\n\n elif backend == 'redis':\n redis_url = \"redis://localhost:6379\"\n values['storage_url'] = redis_url + \"/1\"\n values['cache_url'] = redis_url + \"/2\"\n values['permission_url'] = redis_url + \"/3\"\n\n else:\n values['storage_url'] = ''\n values['cache_url'] = ''\n values['permission_url'] = ''\n\n render_template(\"kinto.tpl\", config_file, **values)\n", "path": "kinto/config/__init__.py"}]}
| 860 | 225 |
gh_patches_debug_7807
|
rasdani/github-patches
|
git_diff
|
locustio__locust-2609
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[Report][Modern-UI] HTML report is blank
### Prerequisites
- [X] I am using [the latest version of Locust](https://github.com/locustio/locust/releases/)
- [X] I am reporting a bug, not asking a question
### Description
Run a test then open the HTML report -> Noticed that it is blank
Note: This bug occurs from 2.22.0, and did not occur on 2.21.0

### Command line
locust -f SimpleWeb.py -u 100 -r 10 -t 30s --html=samplelocust.html
### Locustfile contents
```python3
from locust import FastHttpUser, HttpUser, between, constant_pacing, events, task
from loguru import logger
class QuickstartUser(FastHttpUser):
wait_time = between(2, 5)
host = "http://127.0.0.1:5000"
# begin = time.time()
@task()
def get_tasks_1(self):
res = None
try:
payload = {}
headers = {"Cache-Control": "max-age=0, no-cache, no-store, must-revalidate"}
res = self.client.get("/api/tasks", headers=headers, data=payload, name="Get Tasks")
except Exception as exception:
logger.error(exception)
@task()
def post_lewin(self):
try:
payload = {}
headers = {"Cache-Control": "max-age=0, no-cache, no-store, must-revalidate"}
self.client.post("/api/lewin", headers=headers, data=payload, name="Post Lewin")
except Exception as exception:
logger.error(exception)
```
### Python version
3.9.18
### Locust version
2.23.1
### Operating system
macOS 14.2.1 (23C71)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/web_ui_auth.py`
Content:
```
1 """
2 Example of implementing authentication for Locust when the --web-login flag is given
3
4 This is only to serve as a starting point, proper authentication should be implemented
5 according to your projects specifications.
6
7 For more information, see https://docs.locust.io/en/stable/extending-locust.html#authentication
8 """
9 from locust import HttpUser, events
10
11 import json
12 import os
13
14 from flask import Blueprint, make_response, redirect, request, session, url_for
15 from flask_login import UserMixin, login_user
16
17
18 class LocustHttpUser(HttpUser):
19 pass
20
21
22 class AuthUser(UserMixin):
23 def __init__(self, username):
24 self.username = username
25
26 def get_id(self):
27 return self.username
28
29
30 auth_blueprint = Blueprint("auth", "web_ui_auth")
31
32
33 def load_user(user_id):
34 return AuthUser(session.get("username"))
35
36
37 @events.init.add_listener
38 def locust_init(environment, **kwargs):
39 if environment.web_ui:
40 environment.web_ui.login_manager.user_loader(load_user)
41
42 environment.web_ui.app.config["SECRET_KEY"] = os.getenv("FLASK_SECRET_KEY")
43
44 environment.web_ui.auth_args = {
45 "username_password_callback": "/login_submit",
46 "auth_providers": [
47 {
48 "label": "Github",
49 "callback_url": "/login/github",
50 "icon_url": "https://static-00.iconduck.com/assets.00/github-icon-1024x994-4h5sdmko.png",
51 },
52 ],
53 }
54
55 @auth_blueprint.route("/login/github")
56 def google_login():
57 # Implement authentication with desired auth provider
58 username = "username"
59 session["username"] = username
60 login_user(AuthUser("username"))
61
62 return redirect(url_for("index"))
63
64 @auth_blueprint.route("/login_submit")
65 def login_submit():
66 username = request.args.get("username")
67 password = request.args.get("password")
68
69 # Implement real password verification here
70 if password:
71 session["username"] = username
72 login_user(AuthUser(username))
73
74 return redirect(url_for("index"))
75
76 environment.web_ui.auth_args = {**environment.web_ui.auth_args, "error": "Invalid username or password"}
77
78 return redirect(url_for("login"))
79
80 environment.web_ui.app.register_blueprint(auth_blueprint)
81
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/examples/web_ui_auth.py b/examples/web_ui_auth.py
--- a/examples/web_ui_auth.py
+++ b/examples/web_ui_auth.py
@@ -6,7 +6,7 @@
For more information, see https://docs.locust.io/en/stable/extending-locust.html#authentication
"""
-from locust import HttpUser, events
+from locust import HttpUser, events, task
import json
import os
@@ -16,7 +16,9 @@
class LocustHttpUser(HttpUser):
- pass
+ @task
+ def example(self):
+ self.client.get("/")
class AuthUser(UserMixin):
|
{"golden_diff": "diff --git a/examples/web_ui_auth.py b/examples/web_ui_auth.py\n--- a/examples/web_ui_auth.py\n+++ b/examples/web_ui_auth.py\n@@ -6,7 +6,7 @@\n \n For more information, see https://docs.locust.io/en/stable/extending-locust.html#authentication\n \"\"\"\n-from locust import HttpUser, events\n+from locust import HttpUser, events, task\n \n import json\n import os\n@@ -16,7 +16,9 @@\n \n \n class LocustHttpUser(HttpUser):\n- pass\n+ @task\n+ def example(self):\n+ self.client.get(\"/\")\n \n \n class AuthUser(UserMixin):\n", "issue": "[Report][Modern-UI] HTML report is blank\n### Prerequisites\n\n- [X] I am using [the latest version of Locust](https://github.com/locustio/locust/releases/)\n- [X] I am reporting a bug, not asking a question\n\n### Description\n\nRun a test then open the HTML report -> Noticed that it is blank\r\nNote: This bug occurs from 2.22.0, and did not occur on 2.21.0 \r\n\r\n\n\n### Command line\n\nlocust -f SimpleWeb.py -u 100 -r 10 -t 30s --html=samplelocust.html\n\n### Locustfile contents\n\n```python3\nfrom locust import FastHttpUser, HttpUser, between, constant_pacing, events, task\r\nfrom loguru import logger\r\n\r\n\r\nclass QuickstartUser(FastHttpUser):\r\n wait_time = between(2, 5)\r\n\r\n\r\n host = \"http://127.0.0.1:5000\"\r\n # begin = time.time()\r\n\r\n @task()\r\n def get_tasks_1(self):\r\n res = None\r\n try:\r\n payload = {}\r\n headers = {\"Cache-Control\": \"max-age=0, no-cache, no-store, must-revalidate\"}\r\n res = self.client.get(\"/api/tasks\", headers=headers, data=payload, name=\"Get Tasks\")\r\n except Exception as exception:\r\n logger.error(exception)\r\n\r\n @task()\r\n def post_lewin(self):\r\n try:\r\n payload = {}\r\n headers = {\"Cache-Control\": \"max-age=0, no-cache, no-store, must-revalidate\"}\r\n self.client.post(\"/api/lewin\", headers=headers, data=payload, name=\"Post Lewin\")\r\n except Exception as exception:\r\n logger.error(exception)\n```\n\n\n### Python version\n\n3.9.18\n\n### Locust version\n\n2.23.1\n\n### Operating system\n\nmacOS 14.2.1 (23C71)\n", "before_files": [{"content": "\"\"\"\nExample of implementing authentication for Locust when the --web-login flag is given\n\nThis is only to serve as a starting point, proper authentication should be implemented\naccording to your projects specifications.\n\nFor more information, see https://docs.locust.io/en/stable/extending-locust.html#authentication\n\"\"\"\nfrom locust import HttpUser, events\n\nimport json\nimport os\n\nfrom flask import Blueprint, make_response, redirect, request, session, url_for\nfrom flask_login import UserMixin, login_user\n\n\nclass LocustHttpUser(HttpUser):\n pass\n\n\nclass AuthUser(UserMixin):\n def __init__(self, username):\n self.username = username\n\n def get_id(self):\n return self.username\n\n\nauth_blueprint = Blueprint(\"auth\", \"web_ui_auth\")\n\n\ndef load_user(user_id):\n return AuthUser(session.get(\"username\"))\n\n\[email protected]_listener\ndef locust_init(environment, **kwargs):\n if environment.web_ui:\n environment.web_ui.login_manager.user_loader(load_user)\n\n environment.web_ui.app.config[\"SECRET_KEY\"] = os.getenv(\"FLASK_SECRET_KEY\")\n\n environment.web_ui.auth_args = {\n \"username_password_callback\": \"/login_submit\",\n \"auth_providers\": [\n {\n \"label\": \"Github\",\n \"callback_url\": \"/login/github\",\n \"icon_url\": \"https://static-00.iconduck.com/assets.00/github-icon-1024x994-4h5sdmko.png\",\n },\n ],\n }\n\n @auth_blueprint.route(\"/login/github\")\n def google_login():\n # Implement authentication with desired auth provider\n username = \"username\"\n session[\"username\"] = username\n login_user(AuthUser(\"username\"))\n\n return redirect(url_for(\"index\"))\n\n @auth_blueprint.route(\"/login_submit\")\n def login_submit():\n username = request.args.get(\"username\")\n password = request.args.get(\"password\")\n\n # Implement real password verification here\n if password:\n session[\"username\"] = username\n login_user(AuthUser(username))\n\n return redirect(url_for(\"index\"))\n\n environment.web_ui.auth_args = {**environment.web_ui.auth_args, \"error\": \"Invalid username or password\"}\n\n return redirect(url_for(\"login\"))\n\n environment.web_ui.app.register_blueprint(auth_blueprint)\n", "path": "examples/web_ui_auth.py"}], "after_files": [{"content": "\"\"\"\nExample of implementing authentication for Locust when the --web-login flag is given\n\nThis is only to serve as a starting point, proper authentication should be implemented\naccording to your projects specifications.\n\nFor more information, see https://docs.locust.io/en/stable/extending-locust.html#authentication\n\"\"\"\nfrom locust import HttpUser, events, task\n\nimport json\nimport os\n\nfrom flask import Blueprint, make_response, redirect, request, session, url_for\nfrom flask_login import UserMixin, login_user\n\n\nclass LocustHttpUser(HttpUser):\n @task\n def example(self):\n self.client.get(\"/\")\n\n\nclass AuthUser(UserMixin):\n def __init__(self, username):\n self.username = username\n\n def get_id(self):\n return self.username\n\n\nauth_blueprint = Blueprint(\"auth\", \"web_ui_auth\")\n\n\ndef load_user(user_id):\n return AuthUser(session.get(\"username\"))\n\n\[email protected]_listener\ndef locust_init(environment, **kwargs):\n if environment.web_ui:\n environment.web_ui.login_manager.user_loader(load_user)\n\n environment.web_ui.app.config[\"SECRET_KEY\"] = os.getenv(\"FLASK_SECRET_KEY\")\n\n environment.web_ui.auth_args = {\n \"username_password_callback\": \"/login_submit\",\n \"auth_providers\": [\n {\n \"label\": \"Github\",\n \"callback_url\": \"/login/github\",\n \"icon_url\": \"https://static-00.iconduck.com/assets.00/github-icon-1024x994-4h5sdmko.png\",\n },\n ],\n }\n\n @auth_blueprint.route(\"/login/github\")\n def google_login():\n # Implement authentication with desired auth provider\n username = \"username\"\n session[\"username\"] = username\n login_user(AuthUser(\"username\"))\n\n return redirect(url_for(\"index\"))\n\n @auth_blueprint.route(\"/login_submit\")\n def login_submit():\n username = request.args.get(\"username\")\n password = request.args.get(\"password\")\n\n # Implement real password verification here\n if password:\n session[\"username\"] = username\n login_user(AuthUser(username))\n\n return redirect(url_for(\"index\"))\n\n environment.web_ui.auth_args = {**environment.web_ui.auth_args, \"error\": \"Invalid username or password\"}\n\n return redirect(url_for(\"login\"))\n\n environment.web_ui.app.register_blueprint(auth_blueprint)\n", "path": "examples/web_ui_auth.py"}]}
| 1,400 | 143 |
gh_patches_debug_888
|
rasdani/github-patches
|
git_diff
|
helmholtz-analytics__heat-1268
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Fix Pytorch release tracking workflows
## Due Diligence
<!--- Please address the following points before setting your PR "ready for review".
--->
- General:
- [x] **base branch** must be `main` for new features, latest release branch (e.g. `release/1.3.x`) for bug fixes
- [x] **title** of the PR is suitable to appear in the [Release Notes](https://github.com/helmholtz-analytics/heat/releases/latest)
- Implementation:
- [x] unit tests: all split configurations tested
- [x] unit tests: multiple dtypes tested
- [x] documentation updated where needed
## Description
<!--- Include a summary of the change/s.
Please also include relevant motivation and context. List any dependencies that are required for this change.
--->
Issue/s resolved: #1241
## Changes proposed:
- upgrade to the latest version of checkout action
- delete the token parameter such that the default action token is used
## Type of change
<!--
i.e.
- Bug fix (non-breaking change which fixes an issue)
- New feature (non-breaking change which adds functionality)
- Breaking change (fix or feature that would cause existing functionality to not work as expected)
- Documentation update
--->
## Memory requirements
<!--- Compare memory requirements to previous implementation / relevant torch operations if applicable:
- in distributed and non-distributed mode
- with `split=None` and `split not None`
This can be done using https://github.com/pythonprofilers/memory_profiler for CPU memory measurements,
GPU measurements can be done with https://pytorch.org/docs/master/generated/torch.cuda.max_memory_allocated.html.
These tools only profile the memory used by each process, not the entire function.
--->
## Performance
<!--- Compare performance to previous implementation / relevant torch operations if applicable:
- in distributed and non-distributed mode
- with `split=None` and `split not None`
Python has an embedded profiler: https://docs.python.org/3.9/library/profile.html
Again, this will only profile the performance on each process. Printing the results with many processes
may be illegible. It may be easiest to save the output of each to a file.
--->
#### Does this change modify the behaviour of other functions? If so, which?
no
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `heat/core/version.py`
Content:
```
1 """This module contains Heat's version information."""
2
3
4 major: int = 1
5 """Indicates Heat's main version."""
6 minor: int = 3
7 """Indicates feature extension."""
8 micro: int = 0
9 """Indicates revisions for bugfixes."""
10 extension: str = "dev"
11 """Indicates special builds, e.g. for specific hardware."""
12
13 if not extension:
14 __version__: str = f"{major}.{minor}.{micro}"
15 """The combined version string, consisting out of major, minor, micro and possibly extension."""
16 else:
17 __version__: str = f"{major}.{minor}.{micro}-{extension}"
18
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/heat/core/version.py b/heat/core/version.py
--- a/heat/core/version.py
+++ b/heat/core/version.py
@@ -3,7 +3,7 @@
major: int = 1
"""Indicates Heat's main version."""
-minor: int = 3
+minor: int = 4
"""Indicates feature extension."""
micro: int = 0
"""Indicates revisions for bugfixes."""
|
{"golden_diff": "diff --git a/heat/core/version.py b/heat/core/version.py\n--- a/heat/core/version.py\n+++ b/heat/core/version.py\n@@ -3,7 +3,7 @@\n \n major: int = 1\n \"\"\"Indicates Heat's main version.\"\"\"\n-minor: int = 3\n+minor: int = 4\n \"\"\"Indicates feature extension.\"\"\"\n micro: int = 0\n \"\"\"Indicates revisions for bugfixes.\"\"\"\n", "issue": "Fix Pytorch release tracking workflows\n## Due Diligence\r\n<!--- Please address the following points before setting your PR \"ready for review\".\r\n--->\r\n- General:\r\n - [x] **base branch** must be `main` for new features, latest release branch (e.g. `release/1.3.x`) for bug fixes\r\n - [x] **title** of the PR is suitable to appear in the [Release Notes](https://github.com/helmholtz-analytics/heat/releases/latest)\r\n- Implementation:\r\n - [x] unit tests: all split configurations tested\r\n - [x] unit tests: multiple dtypes tested\r\n - [x] documentation updated where needed\r\n\r\n## Description\r\n\r\n<!--- Include a summary of the change/s.\r\nPlease also include relevant motivation and context. List any dependencies that are required for this change.\r\n--->\r\n\r\nIssue/s resolved: #1241 \r\n\r\n## Changes proposed:\r\n\r\n- upgrade to the latest version of checkout action\r\n- delete the token parameter such that the default action token is used\r\n\r\n## Type of change\r\n<!--\r\ni.e.\r\n- Bug fix (non-breaking change which fixes an issue)\r\n- New feature (non-breaking change which adds functionality)\r\n- Breaking change (fix or feature that would cause existing functionality to not work as expected)\r\n- Documentation update\r\n--->\r\n\r\n## Memory requirements\r\n<!--- Compare memory requirements to previous implementation / relevant torch operations if applicable:\r\n- in distributed and non-distributed mode\r\n- with `split=None` and `split not None`\r\n\r\nThis can be done using https://github.com/pythonprofilers/memory_profiler for CPU memory measurements,\r\nGPU measurements can be done with https://pytorch.org/docs/master/generated/torch.cuda.max_memory_allocated.html.\r\nThese tools only profile the memory used by each process, not the entire function.\r\n--->\r\n\r\n## Performance\r\n<!--- Compare performance to previous implementation / relevant torch operations if applicable:\r\n- in distributed and non-distributed mode\r\n- with `split=None` and `split not None`\r\n\r\nPython has an embedded profiler: https://docs.python.org/3.9/library/profile.html\r\nAgain, this will only profile the performance on each process. Printing the results with many processes\r\nmay be illegible. It may be easiest to save the output of each to a file.\r\n--->\r\n\r\n#### Does this change modify the behaviour of other functions? If so, which?\r\nno\r\n\n", "before_files": [{"content": "\"\"\"This module contains Heat's version information.\"\"\"\n\n\nmajor: int = 1\n\"\"\"Indicates Heat's main version.\"\"\"\nminor: int = 3\n\"\"\"Indicates feature extension.\"\"\"\nmicro: int = 0\n\"\"\"Indicates revisions for bugfixes.\"\"\"\nextension: str = \"dev\"\n\"\"\"Indicates special builds, e.g. for specific hardware.\"\"\"\n\nif not extension:\n __version__: str = f\"{major}.{minor}.{micro}\"\n \"\"\"The combined version string, consisting out of major, minor, micro and possibly extension.\"\"\"\nelse:\n __version__: str = f\"{major}.{minor}.{micro}-{extension}\"\n", "path": "heat/core/version.py"}], "after_files": [{"content": "\"\"\"This module contains Heat's version information.\"\"\"\n\n\nmajor: int = 1\n\"\"\"Indicates Heat's main version.\"\"\"\nminor: int = 4\n\"\"\"Indicates feature extension.\"\"\"\nmicro: int = 0\n\"\"\"Indicates revisions for bugfixes.\"\"\"\nextension: str = \"dev\"\n\"\"\"Indicates special builds, e.g. for specific hardware.\"\"\"\n\nif not extension:\n __version__: str = f\"{major}.{minor}.{micro}\"\n \"\"\"The combined version string, consisting out of major, minor, micro and possibly extension.\"\"\"\nelse:\n __version__: str = f\"{major}.{minor}.{micro}-{extension}\"\n", "path": "heat/core/version.py"}]}
| 916 | 96 |
gh_patches_debug_28436
|
rasdani/github-patches
|
git_diff
|
pyinstaller__pyinstaller-4749
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
shapely hook doesn't work on windows
Using current develop, the shapely hook fails when it runs `binaries += [(os.path.join(lib_dir, f), '') for f in os.listdir(lib_dir)]`. `lib_dir` here equals `Lib/site-packages/shapely/DLLs`. The actual directory on my conda python 3.6 installation is `Library/bin/`. My old spec file uses the following ugly code to copy these libraries over:
```
lib_dir = sys.executable.replace("python.exe", os.path.join("Library", "bin"))
binaries += [(os.path.join(lib_dir, 'geos_c.dll'), '')]
binaries += [(os.path.join(lib_dir, 'geos.dll'), '')]
binaries += [(os.path.join(lib_dir, 'mkl_*.dll'), '')]
```
Is there a better way to get a hold of this Library directory with some pyinstaller utility function? Does anyone know if other python environments (non-conda) have the directory used in the hook or @durden did you just guess on the Windows path?
Side issue: Shapely 1.6+ doesn't seem to work on at least windows (haven't updated on other platforms). It fails to find the geos libraries mentioned above unless you execute the pyinstaller-made (inno setup packaged) executable from the install directory (`C:\Program Files (x86)\myprgm\bin\`). For now I'm just downgrading to 1.5.17.
shapely hook doesn't work on windows
Using current develop, the shapely hook fails when it runs `binaries += [(os.path.join(lib_dir, f), '') for f in os.listdir(lib_dir)]`. `lib_dir` here equals `Lib/site-packages/shapely/DLLs`. The actual directory on my conda python 3.6 installation is `Library/bin/`. My old spec file uses the following ugly code to copy these libraries over:
```
lib_dir = sys.executable.replace("python.exe", os.path.join("Library", "bin"))
binaries += [(os.path.join(lib_dir, 'geos_c.dll'), '')]
binaries += [(os.path.join(lib_dir, 'geos.dll'), '')]
binaries += [(os.path.join(lib_dir, 'mkl_*.dll'), '')]
```
Is there a better way to get a hold of this Library directory with some pyinstaller utility function? Does anyone know if other python environments (non-conda) have the directory used in the hook or @durden did you just guess on the Windows path?
Side issue: Shapely 1.6+ doesn't seem to work on at least windows (haven't updated on other platforms). It fails to find the geos libraries mentioned above unless you execute the pyinstaller-made (inno setup packaged) executable from the install directory (`C:\Program Files (x86)\myprgm\bin\`). For now I'm just downgrading to 1.5.17.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `PyInstaller/hooks/hook-shapely.py`
Content:
```
1 #-----------------------------------------------------------------------------
2 # Copyright (c) 2017-2020, PyInstaller Development Team.
3 #
4 # Distributed under the terms of the GNU General Public License (version 2
5 # or later) with exception for distributing the bootloader.
6 #
7 # The full license is in the file COPYING.txt, distributed with this software.
8 #
9 # SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
10 #-----------------------------------------------------------------------------
11
12 import os
13
14 from PyInstaller.utils.hooks import get_package_paths
15 from PyInstaller.utils.hooks import is_module_satisfies
16 from PyInstaller import compat
17
18 # Necessary when using the vectorized subpackage
19 hiddenimports = ['shapely.prepared']
20
21 pkg_base, pkg_dir = get_package_paths('shapely')
22
23
24 binaries = []
25 if compat.is_win:
26 if compat.is_conda:
27 lib_dir = os.path.join(compat.base_prefix, 'Library', 'bin')
28 else:
29 lib_dir = os.path.join(pkg_dir, 'DLLs')
30 dll_files = ['geos_c.dll', 'geos.dll']
31 binaries += [(os.path.join(lib_dir, f), '.') for f in dll_files]
32 elif compat.is_linux:
33 lib_dir = os.path.join(pkg_dir, '.libs')
34 dest_dir = os.path.join('shapely', '.libs')
35
36 # This duplicates the libgeos*.so* files in the build. PyInstaller will
37 # copy them into the root of the build by default, but shapely cannot load
38 # them from there in linux IF shapely was installed via a whl file. The
39 # whl bundles its' own libgeos with a different name, something like
40 # libgeos_c-*.so.* but shapely tries to load libgeos_c.so if there isn't a
41 # ./libs directory under its' package. There is a proposed fix for this in
42 # shapely but it has not been accepted it:
43 # https://github.com/Toblerity/Shapely/pull/485
44 if is_module_satisfies('shapely <= 1.6'):
45 binaries += [(os.path.join(lib_dir, f), dest_dir) for f in os.listdir(lib_dir)]
46
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/PyInstaller/hooks/hook-shapely.py b/PyInstaller/hooks/hook-shapely.py
--- a/PyInstaller/hooks/hook-shapely.py
+++ b/PyInstaller/hooks/hook-shapely.py
@@ -10,6 +10,7 @@
#-----------------------------------------------------------------------------
import os
+from ctypes.util import find_library
from PyInstaller.utils.hooks import get_package_paths
from PyInstaller.utils.hooks import is_module_satisfies
@@ -23,12 +24,25 @@
binaries = []
if compat.is_win:
+ # Search conda directory if conda is active, then search standard
+ # directory. This is the same order of precidence used in shapely.
+ standard_path = os.path.join(pkg_dir, 'DLLs')
+ lib_paths = [standard_path, os.environ['PATH']]
if compat.is_conda:
- lib_dir = os.path.join(compat.base_prefix, 'Library', 'bin')
- else:
- lib_dir = os.path.join(pkg_dir, 'DLLs')
- dll_files = ['geos_c.dll', 'geos.dll']
- binaries += [(os.path.join(lib_dir, f), '.') for f in dll_files]
+ conda_path = os.path.join(compat.base_prefix, 'Library', 'bin')
+ lib_paths.insert(0, conda_path)
+ original_path = os.environ['PATH']
+ try:
+ os.environ['PATH'] = os.pathsep.join(lib_paths)
+ dll_path = find_library('geos_c')
+ finally:
+ os.environ['PATH'] = original_path
+ if dll_path is None:
+ raise SystemExit(
+ "Error: geos_c.dll not found, required by hook-shapely.py.\n"
+ "Please check your installation or provide a pull request to "
+ "PyInstaller to update hook-shapely.py.")
+ binaries += [(dll_path, '.')]
elif compat.is_linux:
lib_dir = os.path.join(pkg_dir, '.libs')
dest_dir = os.path.join('shapely', '.libs')
|
{"golden_diff": "diff --git a/PyInstaller/hooks/hook-shapely.py b/PyInstaller/hooks/hook-shapely.py\n--- a/PyInstaller/hooks/hook-shapely.py\n+++ b/PyInstaller/hooks/hook-shapely.py\n@@ -10,6 +10,7 @@\n #-----------------------------------------------------------------------------\n \n import os\n+from ctypes.util import find_library\n \n from PyInstaller.utils.hooks import get_package_paths\n from PyInstaller.utils.hooks import is_module_satisfies\n@@ -23,12 +24,25 @@\n \n binaries = []\n if compat.is_win:\n+ # Search conda directory if conda is active, then search standard\n+ # directory. This is the same order of precidence used in shapely.\n+ standard_path = os.path.join(pkg_dir, 'DLLs')\n+ lib_paths = [standard_path, os.environ['PATH']]\n if compat.is_conda:\n- lib_dir = os.path.join(compat.base_prefix, 'Library', 'bin')\n- else:\n- lib_dir = os.path.join(pkg_dir, 'DLLs')\n- dll_files = ['geos_c.dll', 'geos.dll']\n- binaries += [(os.path.join(lib_dir, f), '.') for f in dll_files]\n+ conda_path = os.path.join(compat.base_prefix, 'Library', 'bin')\n+ lib_paths.insert(0, conda_path)\n+ original_path = os.environ['PATH']\n+ try:\n+ os.environ['PATH'] = os.pathsep.join(lib_paths)\n+ dll_path = find_library('geos_c')\n+ finally:\n+ os.environ['PATH'] = original_path\n+ if dll_path is None:\n+ raise SystemExit(\n+ \"Error: geos_c.dll not found, required by hook-shapely.py.\\n\"\n+ \"Please check your installation or provide a pull request to \"\n+ \"PyInstaller to update hook-shapely.py.\")\n+ binaries += [(dll_path, '.')]\n elif compat.is_linux:\n lib_dir = os.path.join(pkg_dir, '.libs')\n dest_dir = os.path.join('shapely', '.libs')\n", "issue": "shapely hook doesn't work on windows\nUsing current develop, the shapely hook fails when it runs `binaries += [(os.path.join(lib_dir, f), '') for f in os.listdir(lib_dir)]`. `lib_dir` here equals `Lib/site-packages/shapely/DLLs`. The actual directory on my conda python 3.6 installation is `Library/bin/`. My old spec file uses the following ugly code to copy these libraries over:\r\n\r\n```\r\n lib_dir = sys.executable.replace(\"python.exe\", os.path.join(\"Library\", \"bin\"))\r\n binaries += [(os.path.join(lib_dir, 'geos_c.dll'), '')]\r\n binaries += [(os.path.join(lib_dir, 'geos.dll'), '')]\r\n binaries += [(os.path.join(lib_dir, 'mkl_*.dll'), '')]\r\n```\r\n\r\nIs there a better way to get a hold of this Library directory with some pyinstaller utility function? Does anyone know if other python environments (non-conda) have the directory used in the hook or @durden did you just guess on the Windows path?\r\n\r\nSide issue: Shapely 1.6+ doesn't seem to work on at least windows (haven't updated on other platforms). It fails to find the geos libraries mentioned above unless you execute the pyinstaller-made (inno setup packaged) executable from the install directory (`C:\\Program Files (x86)\\myprgm\\bin\\`). For now I'm just downgrading to 1.5.17.\nshapely hook doesn't work on windows\nUsing current develop, the shapely hook fails when it runs `binaries += [(os.path.join(lib_dir, f), '') for f in os.listdir(lib_dir)]`. `lib_dir` here equals `Lib/site-packages/shapely/DLLs`. The actual directory on my conda python 3.6 installation is `Library/bin/`. My old spec file uses the following ugly code to copy these libraries over:\r\n\r\n```\r\n lib_dir = sys.executable.replace(\"python.exe\", os.path.join(\"Library\", \"bin\"))\r\n binaries += [(os.path.join(lib_dir, 'geos_c.dll'), '')]\r\n binaries += [(os.path.join(lib_dir, 'geos.dll'), '')]\r\n binaries += [(os.path.join(lib_dir, 'mkl_*.dll'), '')]\r\n```\r\n\r\nIs there a better way to get a hold of this Library directory with some pyinstaller utility function? Does anyone know if other python environments (non-conda) have the directory used in the hook or @durden did you just guess on the Windows path?\r\n\r\nSide issue: Shapely 1.6+ doesn't seem to work on at least windows (haven't updated on other platforms). It fails to find the geos libraries mentioned above unless you execute the pyinstaller-made (inno setup packaged) executable from the install directory (`C:\\Program Files (x86)\\myprgm\\bin\\`). For now I'm just downgrading to 1.5.17.\n", "before_files": [{"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2017-2020, PyInstaller Development Team.\n#\n# Distributed under the terms of the GNU General Public License (version 2\n# or later) with exception for distributing the bootloader.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#\n# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)\n#-----------------------------------------------------------------------------\n\nimport os\n\nfrom PyInstaller.utils.hooks import get_package_paths\nfrom PyInstaller.utils.hooks import is_module_satisfies\nfrom PyInstaller import compat\n\n# Necessary when using the vectorized subpackage\nhiddenimports = ['shapely.prepared']\n\npkg_base, pkg_dir = get_package_paths('shapely')\n\n\nbinaries = []\nif compat.is_win:\n if compat.is_conda:\n lib_dir = os.path.join(compat.base_prefix, 'Library', 'bin')\n else:\n lib_dir = os.path.join(pkg_dir, 'DLLs')\n dll_files = ['geos_c.dll', 'geos.dll']\n binaries += [(os.path.join(lib_dir, f), '.') for f in dll_files]\nelif compat.is_linux:\n lib_dir = os.path.join(pkg_dir, '.libs')\n dest_dir = os.path.join('shapely', '.libs')\n\n # This duplicates the libgeos*.so* files in the build. PyInstaller will\n # copy them into the root of the build by default, but shapely cannot load\n # them from there in linux IF shapely was installed via a whl file. The\n # whl bundles its' own libgeos with a different name, something like\n # libgeos_c-*.so.* but shapely tries to load libgeos_c.so if there isn't a\n # ./libs directory under its' package. There is a proposed fix for this in\n # shapely but it has not been accepted it:\n # https://github.com/Toblerity/Shapely/pull/485\n if is_module_satisfies('shapely <= 1.6'):\n binaries += [(os.path.join(lib_dir, f), dest_dir) for f in os.listdir(lib_dir)]\n", "path": "PyInstaller/hooks/hook-shapely.py"}], "after_files": [{"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2017-2020, PyInstaller Development Team.\n#\n# Distributed under the terms of the GNU General Public License (version 2\n# or later) with exception for distributing the bootloader.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#\n# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)\n#-----------------------------------------------------------------------------\n\nimport os\nfrom ctypes.util import find_library\n\nfrom PyInstaller.utils.hooks import get_package_paths\nfrom PyInstaller.utils.hooks import is_module_satisfies\nfrom PyInstaller import compat\n\n# Necessary when using the vectorized subpackage\nhiddenimports = ['shapely.prepared']\n\npkg_base, pkg_dir = get_package_paths('shapely')\n\n\nbinaries = []\nif compat.is_win:\n # Search conda directory if conda is active, then search standard\n # directory. This is the same order of precidence used in shapely.\n standard_path = os.path.join(pkg_dir, 'DLLs')\n lib_paths = [standard_path, os.environ['PATH']]\n if compat.is_conda:\n conda_path = os.path.join(compat.base_prefix, 'Library', 'bin')\n lib_paths.insert(0, conda_path)\n original_path = os.environ['PATH']\n try:\n os.environ['PATH'] = os.pathsep.join(lib_paths)\n dll_path = find_library('geos_c')\n finally:\n os.environ['PATH'] = original_path\n if dll_path is None:\n raise SystemExit(\n \"Error: geos_c.dll not found, required by hook-shapely.py.\\n\"\n \"Please check your installation or provide a pull request to \"\n \"PyInstaller to update hook-shapely.py.\")\n binaries += [(dll_path, '.')]\nelif compat.is_linux:\n lib_dir = os.path.join(pkg_dir, '.libs')\n dest_dir = os.path.join('shapely', '.libs')\n\n # This duplicates the libgeos*.so* files in the build. PyInstaller will\n # copy them into the root of the build by default, but shapely cannot load\n # them from there in linux IF shapely was installed via a whl file. The\n # whl bundles its' own libgeos with a different name, something like\n # libgeos_c-*.so.* but shapely tries to load libgeos_c.so if there isn't a\n # ./libs directory under its' package. There is a proposed fix for this in\n # shapely but it has not been accepted it:\n # https://github.com/Toblerity/Shapely/pull/485\n if is_module_satisfies('shapely <= 1.6'):\n binaries += [(os.path.join(lib_dir, f), dest_dir) for f in os.listdir(lib_dir)]\n", "path": "PyInstaller/hooks/hook-shapely.py"}]}
| 1,467 | 466 |
gh_patches_debug_30430
|
rasdani/github-patches
|
git_diff
|
pyca__cryptography-1716
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Move C code for padding into it's own .c and .h files
See `constant_time` for the same idea.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/cryptography/hazmat/primitives/padding.py`
Content:
```
1 # This file is dual licensed under the terms of the Apache License, Version
2 # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3 # for complete details.
4
5 from __future__ import absolute_import, division, print_function
6
7 import abc
8
9 import six
10
11 from cryptography import utils
12 from cryptography.exceptions import AlreadyFinalized
13 from cryptography.hazmat.bindings.utils import LazyLibrary, build_ffi
14
15
16 TYPES = """
17 uint8_t Cryptography_check_pkcs7_padding(const uint8_t *, uint8_t);
18 """
19
20 FUNCTIONS = """
21 /* Returns the value of the input with the most-significant-bit copied to all
22 of the bits. */
23 static uint8_t Cryptography_DUPLICATE_MSB_TO_ALL(uint8_t a) {
24 return (1 - (a >> (sizeof(uint8_t) * 8 - 1))) - 1;
25 }
26
27 /* This returns 0xFF if a < b else 0x00, but does so in a constant time
28 fashion */
29 static uint8_t Cryptography_constant_time_lt(uint8_t a, uint8_t b) {
30 a -= b;
31 return Cryptography_DUPLICATE_MSB_TO_ALL(a);
32 }
33
34 uint8_t Cryptography_check_pkcs7_padding(const uint8_t *data,
35 uint8_t block_len) {
36 uint8_t i;
37 uint8_t pad_size = data[block_len - 1];
38 uint8_t mismatch = 0;
39 for (i = 0; i < block_len; i++) {
40 unsigned int mask = Cryptography_constant_time_lt(i, pad_size);
41 uint8_t b = data[block_len - 1 - i];
42 mismatch |= (mask & (pad_size ^ b));
43 }
44
45 /* Check to make sure the pad_size was within the valid range. */
46 mismatch |= ~Cryptography_constant_time_lt(0, pad_size);
47 mismatch |= Cryptography_constant_time_lt(block_len, pad_size);
48
49 /* Make sure any bits set are copied to the lowest bit */
50 mismatch |= mismatch >> 4;
51 mismatch |= mismatch >> 2;
52 mismatch |= mismatch >> 1;
53 /* Now check the low bit to see if it's set */
54 return (mismatch & 1) == 0;
55 }
56 """
57
58
59 _ffi = build_ffi(cdef_source=TYPES, verify_source=FUNCTIONS)
60 _lib = LazyLibrary(_ffi)
61
62
63 @six.add_metaclass(abc.ABCMeta)
64 class PaddingContext(object):
65 @abc.abstractmethod
66 def update(self, data):
67 """
68 Pads the provided bytes and returns any available data as bytes.
69 """
70
71 @abc.abstractmethod
72 def finalize(self):
73 """
74 Finalize the padding, returns bytes.
75 """
76
77
78 class PKCS7(object):
79 def __init__(self, block_size):
80 if not (0 <= block_size < 256):
81 raise ValueError("block_size must be in range(0, 256).")
82
83 if block_size % 8 != 0:
84 raise ValueError("block_size must be a multiple of 8.")
85
86 self.block_size = block_size
87
88 def padder(self):
89 return _PKCS7PaddingContext(self.block_size)
90
91 def unpadder(self):
92 return _PKCS7UnpaddingContext(self.block_size)
93
94
95 @utils.register_interface(PaddingContext)
96 class _PKCS7PaddingContext(object):
97 def __init__(self, block_size):
98 self.block_size = block_size
99 # TODO: more copies than necessary, we should use zero-buffer (#193)
100 self._buffer = b""
101
102 def update(self, data):
103 if self._buffer is None:
104 raise AlreadyFinalized("Context was already finalized.")
105
106 if not isinstance(data, bytes):
107 raise TypeError("data must be bytes.")
108
109 self._buffer += data
110
111 finished_blocks = len(self._buffer) // (self.block_size // 8)
112
113 result = self._buffer[:finished_blocks * (self.block_size // 8)]
114 self._buffer = self._buffer[finished_blocks * (self.block_size // 8):]
115
116 return result
117
118 def finalize(self):
119 if self._buffer is None:
120 raise AlreadyFinalized("Context was already finalized.")
121
122 pad_size = self.block_size // 8 - len(self._buffer)
123 result = self._buffer + six.int2byte(pad_size) * pad_size
124 self._buffer = None
125 return result
126
127
128 @utils.register_interface(PaddingContext)
129 class _PKCS7UnpaddingContext(object):
130 def __init__(self, block_size):
131 self.block_size = block_size
132 # TODO: more copies than necessary, we should use zero-buffer (#193)
133 self._buffer = b""
134
135 def update(self, data):
136 if self._buffer is None:
137 raise AlreadyFinalized("Context was already finalized.")
138
139 if not isinstance(data, bytes):
140 raise TypeError("data must be bytes.")
141
142 self._buffer += data
143
144 finished_blocks = max(
145 len(self._buffer) // (self.block_size // 8) - 1,
146 0
147 )
148
149 result = self._buffer[:finished_blocks * (self.block_size // 8)]
150 self._buffer = self._buffer[finished_blocks * (self.block_size // 8):]
151
152 return result
153
154 def finalize(self):
155 if self._buffer is None:
156 raise AlreadyFinalized("Context was already finalized.")
157
158 if len(self._buffer) != self.block_size // 8:
159 raise ValueError("Invalid padding bytes.")
160
161 valid = _lib.Cryptography_check_pkcs7_padding(
162 self._buffer, self.block_size // 8
163 )
164
165 if not valid:
166 raise ValueError("Invalid padding bytes.")
167
168 pad_size = six.indexbytes(self._buffer, -1)
169 res = self._buffer[:-pad_size]
170 self._buffer = None
171 return res
172
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/cryptography/hazmat/primitives/padding.py b/src/cryptography/hazmat/primitives/padding.py
--- a/src/cryptography/hazmat/primitives/padding.py
+++ b/src/cryptography/hazmat/primitives/padding.py
@@ -6,6 +6,8 @@
import abc
+import os
+
import six
from cryptography import utils
@@ -13,47 +15,11 @@
from cryptography.hazmat.bindings.utils import LazyLibrary, build_ffi
-TYPES = """
-uint8_t Cryptography_check_pkcs7_padding(const uint8_t *, uint8_t);
-"""
-
-FUNCTIONS = """
-/* Returns the value of the input with the most-significant-bit copied to all
- of the bits. */
-static uint8_t Cryptography_DUPLICATE_MSB_TO_ALL(uint8_t a) {
- return (1 - (a >> (sizeof(uint8_t) * 8 - 1))) - 1;
-}
-
-/* This returns 0xFF if a < b else 0x00, but does so in a constant time
- fashion */
-static uint8_t Cryptography_constant_time_lt(uint8_t a, uint8_t b) {
- a -= b;
- return Cryptography_DUPLICATE_MSB_TO_ALL(a);
-}
-
-uint8_t Cryptography_check_pkcs7_padding(const uint8_t *data,
- uint8_t block_len) {
- uint8_t i;
- uint8_t pad_size = data[block_len - 1];
- uint8_t mismatch = 0;
- for (i = 0; i < block_len; i++) {
- unsigned int mask = Cryptography_constant_time_lt(i, pad_size);
- uint8_t b = data[block_len - 1 - i];
- mismatch |= (mask & (pad_size ^ b));
- }
-
- /* Check to make sure the pad_size was within the valid range. */
- mismatch |= ~Cryptography_constant_time_lt(0, pad_size);
- mismatch |= Cryptography_constant_time_lt(block_len, pad_size);
-
- /* Make sure any bits set are copied to the lowest bit */
- mismatch |= mismatch >> 4;
- mismatch |= mismatch >> 2;
- mismatch |= mismatch >> 1;
- /* Now check the low bit to see if it's set */
- return (mismatch & 1) == 0;
-}
-"""
+with open(os.path.join(os.path.dirname(__file__), "src/padding.h")) as f:
+ TYPES = f.read()
+
+with open(os.path.join(os.path.dirname(__file__), "src/padding.c")) as f:
+ FUNCTIONS = f.read()
_ffi = build_ffi(cdef_source=TYPES, verify_source=FUNCTIONS)
|
{"golden_diff": "diff --git a/src/cryptography/hazmat/primitives/padding.py b/src/cryptography/hazmat/primitives/padding.py\n--- a/src/cryptography/hazmat/primitives/padding.py\n+++ b/src/cryptography/hazmat/primitives/padding.py\n@@ -6,6 +6,8 @@\n \n import abc\n \n+import os\n+\n import six\n \n from cryptography import utils\n@@ -13,47 +15,11 @@\n from cryptography.hazmat.bindings.utils import LazyLibrary, build_ffi\n \n \n-TYPES = \"\"\"\n-uint8_t Cryptography_check_pkcs7_padding(const uint8_t *, uint8_t);\n-\"\"\"\n-\n-FUNCTIONS = \"\"\"\n-/* Returns the value of the input with the most-significant-bit copied to all\n- of the bits. */\n-static uint8_t Cryptography_DUPLICATE_MSB_TO_ALL(uint8_t a) {\n- return (1 - (a >> (sizeof(uint8_t) * 8 - 1))) - 1;\n-}\n-\n-/* This returns 0xFF if a < b else 0x00, but does so in a constant time\n- fashion */\n-static uint8_t Cryptography_constant_time_lt(uint8_t a, uint8_t b) {\n- a -= b;\n- return Cryptography_DUPLICATE_MSB_TO_ALL(a);\n-}\n-\n-uint8_t Cryptography_check_pkcs7_padding(const uint8_t *data,\n- uint8_t block_len) {\n- uint8_t i;\n- uint8_t pad_size = data[block_len - 1];\n- uint8_t mismatch = 0;\n- for (i = 0; i < block_len; i++) {\n- unsigned int mask = Cryptography_constant_time_lt(i, pad_size);\n- uint8_t b = data[block_len - 1 - i];\n- mismatch |= (mask & (pad_size ^ b));\n- }\n-\n- /* Check to make sure the pad_size was within the valid range. */\n- mismatch |= ~Cryptography_constant_time_lt(0, pad_size);\n- mismatch |= Cryptography_constant_time_lt(block_len, pad_size);\n-\n- /* Make sure any bits set are copied to the lowest bit */\n- mismatch |= mismatch >> 4;\n- mismatch |= mismatch >> 2;\n- mismatch |= mismatch >> 1;\n- /* Now check the low bit to see if it's set */\n- return (mismatch & 1) == 0;\n-}\n-\"\"\"\n+with open(os.path.join(os.path.dirname(__file__), \"src/padding.h\")) as f:\n+ TYPES = f.read()\n+\n+with open(os.path.join(os.path.dirname(__file__), \"src/padding.c\")) as f:\n+ FUNCTIONS = f.read()\n \n \n _ffi = build_ffi(cdef_source=TYPES, verify_source=FUNCTIONS)\n", "issue": "Move C code for padding into it's own .c and .h files\nSee `constant_time` for the same idea.\n\n", "before_files": [{"content": "# This file is dual licensed under the terms of the Apache License, Version\n# 2.0, and the BSD License. See the LICENSE file in the root of this repository\n# for complete details.\n\nfrom __future__ import absolute_import, division, print_function\n\nimport abc\n\nimport six\n\nfrom cryptography import utils\nfrom cryptography.exceptions import AlreadyFinalized\nfrom cryptography.hazmat.bindings.utils import LazyLibrary, build_ffi\n\n\nTYPES = \"\"\"\nuint8_t Cryptography_check_pkcs7_padding(const uint8_t *, uint8_t);\n\"\"\"\n\nFUNCTIONS = \"\"\"\n/* Returns the value of the input with the most-significant-bit copied to all\n of the bits. */\nstatic uint8_t Cryptography_DUPLICATE_MSB_TO_ALL(uint8_t a) {\n return (1 - (a >> (sizeof(uint8_t) * 8 - 1))) - 1;\n}\n\n/* This returns 0xFF if a < b else 0x00, but does so in a constant time\n fashion */\nstatic uint8_t Cryptography_constant_time_lt(uint8_t a, uint8_t b) {\n a -= b;\n return Cryptography_DUPLICATE_MSB_TO_ALL(a);\n}\n\nuint8_t Cryptography_check_pkcs7_padding(const uint8_t *data,\n uint8_t block_len) {\n uint8_t i;\n uint8_t pad_size = data[block_len - 1];\n uint8_t mismatch = 0;\n for (i = 0; i < block_len; i++) {\n unsigned int mask = Cryptography_constant_time_lt(i, pad_size);\n uint8_t b = data[block_len - 1 - i];\n mismatch |= (mask & (pad_size ^ b));\n }\n\n /* Check to make sure the pad_size was within the valid range. */\n mismatch |= ~Cryptography_constant_time_lt(0, pad_size);\n mismatch |= Cryptography_constant_time_lt(block_len, pad_size);\n\n /* Make sure any bits set are copied to the lowest bit */\n mismatch |= mismatch >> 4;\n mismatch |= mismatch >> 2;\n mismatch |= mismatch >> 1;\n /* Now check the low bit to see if it's set */\n return (mismatch & 1) == 0;\n}\n\"\"\"\n\n\n_ffi = build_ffi(cdef_source=TYPES, verify_source=FUNCTIONS)\n_lib = LazyLibrary(_ffi)\n\n\[email protected]_metaclass(abc.ABCMeta)\nclass PaddingContext(object):\n @abc.abstractmethod\n def update(self, data):\n \"\"\"\n Pads the provided bytes and returns any available data as bytes.\n \"\"\"\n\n @abc.abstractmethod\n def finalize(self):\n \"\"\"\n Finalize the padding, returns bytes.\n \"\"\"\n\n\nclass PKCS7(object):\n def __init__(self, block_size):\n if not (0 <= block_size < 256):\n raise ValueError(\"block_size must be in range(0, 256).\")\n\n if block_size % 8 != 0:\n raise ValueError(\"block_size must be a multiple of 8.\")\n\n self.block_size = block_size\n\n def padder(self):\n return _PKCS7PaddingContext(self.block_size)\n\n def unpadder(self):\n return _PKCS7UnpaddingContext(self.block_size)\n\n\[email protected]_interface(PaddingContext)\nclass _PKCS7PaddingContext(object):\n def __init__(self, block_size):\n self.block_size = block_size\n # TODO: more copies than necessary, we should use zero-buffer (#193)\n self._buffer = b\"\"\n\n def update(self, data):\n if self._buffer is None:\n raise AlreadyFinalized(\"Context was already finalized.\")\n\n if not isinstance(data, bytes):\n raise TypeError(\"data must be bytes.\")\n\n self._buffer += data\n\n finished_blocks = len(self._buffer) // (self.block_size // 8)\n\n result = self._buffer[:finished_blocks * (self.block_size // 8)]\n self._buffer = self._buffer[finished_blocks * (self.block_size // 8):]\n\n return result\n\n def finalize(self):\n if self._buffer is None:\n raise AlreadyFinalized(\"Context was already finalized.\")\n\n pad_size = self.block_size // 8 - len(self._buffer)\n result = self._buffer + six.int2byte(pad_size) * pad_size\n self._buffer = None\n return result\n\n\[email protected]_interface(PaddingContext)\nclass _PKCS7UnpaddingContext(object):\n def __init__(self, block_size):\n self.block_size = block_size\n # TODO: more copies than necessary, we should use zero-buffer (#193)\n self._buffer = b\"\"\n\n def update(self, data):\n if self._buffer is None:\n raise AlreadyFinalized(\"Context was already finalized.\")\n\n if not isinstance(data, bytes):\n raise TypeError(\"data must be bytes.\")\n\n self._buffer += data\n\n finished_blocks = max(\n len(self._buffer) // (self.block_size // 8) - 1,\n 0\n )\n\n result = self._buffer[:finished_blocks * (self.block_size // 8)]\n self._buffer = self._buffer[finished_blocks * (self.block_size // 8):]\n\n return result\n\n def finalize(self):\n if self._buffer is None:\n raise AlreadyFinalized(\"Context was already finalized.\")\n\n if len(self._buffer) != self.block_size // 8:\n raise ValueError(\"Invalid padding bytes.\")\n\n valid = _lib.Cryptography_check_pkcs7_padding(\n self._buffer, self.block_size // 8\n )\n\n if not valid:\n raise ValueError(\"Invalid padding bytes.\")\n\n pad_size = six.indexbytes(self._buffer, -1)\n res = self._buffer[:-pad_size]\n self._buffer = None\n return res\n", "path": "src/cryptography/hazmat/primitives/padding.py"}], "after_files": [{"content": "# This file is dual licensed under the terms of the Apache License, Version\n# 2.0, and the BSD License. See the LICENSE file in the root of this repository\n# for complete details.\n\nfrom __future__ import absolute_import, division, print_function\n\nimport abc\n\nimport os\n\nimport six\n\nfrom cryptography import utils\nfrom cryptography.exceptions import AlreadyFinalized\nfrom cryptography.hazmat.bindings.utils import LazyLibrary, build_ffi\n\n\nwith open(os.path.join(os.path.dirname(__file__), \"src/padding.h\")) as f:\n TYPES = f.read()\n\nwith open(os.path.join(os.path.dirname(__file__), \"src/padding.c\")) as f:\n FUNCTIONS = f.read()\n\n\n_ffi = build_ffi(cdef_source=TYPES, verify_source=FUNCTIONS)\n_lib = LazyLibrary(_ffi)\n\n\[email protected]_metaclass(abc.ABCMeta)\nclass PaddingContext(object):\n @abc.abstractmethod\n def update(self, data):\n \"\"\"\n Pads the provided bytes and returns any available data as bytes.\n \"\"\"\n\n @abc.abstractmethod\n def finalize(self):\n \"\"\"\n Finalize the padding, returns bytes.\n \"\"\"\n\n\nclass PKCS7(object):\n def __init__(self, block_size):\n if not (0 <= block_size < 256):\n raise ValueError(\"block_size must be in range(0, 256).\")\n\n if block_size % 8 != 0:\n raise ValueError(\"block_size must be a multiple of 8.\")\n\n self.block_size = block_size\n\n def padder(self):\n return _PKCS7PaddingContext(self.block_size)\n\n def unpadder(self):\n return _PKCS7UnpaddingContext(self.block_size)\n\n\[email protected]_interface(PaddingContext)\nclass _PKCS7PaddingContext(object):\n def __init__(self, block_size):\n self.block_size = block_size\n # TODO: more copies than necessary, we should use zero-buffer (#193)\n self._buffer = b\"\"\n\n def update(self, data):\n if self._buffer is None:\n raise AlreadyFinalized(\"Context was already finalized.\")\n\n if not isinstance(data, bytes):\n raise TypeError(\"data must be bytes.\")\n\n self._buffer += data\n\n finished_blocks = len(self._buffer) // (self.block_size // 8)\n\n result = self._buffer[:finished_blocks * (self.block_size // 8)]\n self._buffer = self._buffer[finished_blocks * (self.block_size // 8):]\n\n return result\n\n def finalize(self):\n if self._buffer is None:\n raise AlreadyFinalized(\"Context was already finalized.\")\n\n pad_size = self.block_size // 8 - len(self._buffer)\n result = self._buffer + six.int2byte(pad_size) * pad_size\n self._buffer = None\n return result\n\n\[email protected]_interface(PaddingContext)\nclass _PKCS7UnpaddingContext(object):\n def __init__(self, block_size):\n self.block_size = block_size\n # TODO: more copies than necessary, we should use zero-buffer (#193)\n self._buffer = b\"\"\n\n def update(self, data):\n if self._buffer is None:\n raise AlreadyFinalized(\"Context was already finalized.\")\n\n if not isinstance(data, bytes):\n raise TypeError(\"data must be bytes.\")\n\n self._buffer += data\n\n finished_blocks = max(\n len(self._buffer) // (self.block_size // 8) - 1,\n 0\n )\n\n result = self._buffer[:finished_blocks * (self.block_size // 8)]\n self._buffer = self._buffer[finished_blocks * (self.block_size // 8):]\n\n return result\n\n def finalize(self):\n if self._buffer is None:\n raise AlreadyFinalized(\"Context was already finalized.\")\n\n if len(self._buffer) != self.block_size // 8:\n raise ValueError(\"Invalid padding bytes.\")\n\n valid = _lib.Cryptography_check_pkcs7_padding(\n self._buffer, self.block_size // 8\n )\n\n if not valid:\n raise ValueError(\"Invalid padding bytes.\")\n\n pad_size = six.indexbytes(self._buffer, -1)\n res = self._buffer[:-pad_size]\n self._buffer = None\n return res\n", "path": "src/cryptography/hazmat/primitives/padding.py"}]}
| 1,982 | 613 |
gh_patches_debug_4112
|
rasdani/github-patches
|
git_diff
|
getsentry__sentry-12417
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Google Pubsub push messages mis-identified as crawler
## Important Details
How are you running Sentry?
* [ ] On-Premise docker [Version xyz]
* [x] Saas (sentry.io)
* [ ] Other [briefly describe your environment]
## Description
We get the Sentry API error `Sentry responded with an API error: APIError(Event dropped due to filter: web-crawlers)` when there's an exception in a [Google Pubsub push](https://cloud.google.com/pubsub/docs/push) handler.
Apparently the user agent is `APIs-Google`.
## Steps to Reproduce
1. Set up a Google Pubsub push HTTP event handler
2. Have an exception in the message handler code
3. Not get report in Sentry
### What you expected to happen
`APIs-Google` isn't identified as a web crawler.
### Possible Solution
Improve the regex? 😸
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/sentry/filters/web_crawlers.py`
Content:
```
1 from __future__ import absolute_import
2
3 import re
4
5 from .base import Filter
6 from sentry.utils.data_filters import FilterStatKeys
7 from sentry.utils.safe import get_path
8
9 # not all of these agents are guaranteed to execute JavaScript, but to avoid
10 # overhead of identifying which ones do, and which ones will over time we simply
11 # target all of the major ones
12 CRAWLERS = re.compile(
13 r'|'.join(
14 (
15 # various Google services
16 r'AdsBot',
17 # Google Adsense
18 r'Mediapartners',
19 # Google+ and Google web search
20 r'Google',
21 # Bing search
22 r'BingBot',
23 r'BingPreview',
24 # Baidu search
25 r'Baiduspider',
26 # Yahoo
27 r'Slurp',
28 # Sogou
29 r'Sogou',
30 # facebook
31 r'facebook',
32 # Alexa
33 r'ia_archiver',
34 # Generic bot
35 r'bots?[\/\s\)\;]',
36 # Generic spider
37 r'spider[\/\s\)\;]',
38 # Slack - see https://api.slack.com/robots
39 r'Slack',
40 # Google indexing bot
41 r'Calypso AppCrawler',
42 )
43 ),
44 re.I
45 )
46
47
48 class WebCrawlersFilter(Filter):
49 id = FilterStatKeys.WEB_CRAWLER
50 name = 'Filter out known web crawlers'
51 description = 'Some crawlers may execute pages in incompatible ways which then cause errors that are unlikely to be seen by a normal user.'
52 default = True
53
54 def get_user_agent(self, data):
55 try:
56 for key, value in get_path(data, 'request', 'headers', filter=True) or ():
57 if key.lower() == 'user-agent':
58 return value
59 except LookupError:
60 return ''
61
62 def test(self, data):
63 # TODO(dcramer): we could also look at UA parser and use the 'Spider'
64 # device type
65 user_agent = self.get_user_agent(data)
66 if not user_agent:
67 return False
68 return bool(CRAWLERS.search(user_agent))
69
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/sentry/filters/web_crawlers.py b/src/sentry/filters/web_crawlers.py
--- a/src/sentry/filters/web_crawlers.py
+++ b/src/sentry/filters/web_crawlers.py
@@ -16,8 +16,8 @@
r'AdsBot',
# Google Adsense
r'Mediapartners',
- # Google+ and Google web search
- r'Google',
+ # Google+ and Google web search, but not apis-google
+ r'(?<!APIs-)Google',
# Bing search
r'BingBot',
r'BingPreview',
|
{"golden_diff": "diff --git a/src/sentry/filters/web_crawlers.py b/src/sentry/filters/web_crawlers.py\n--- a/src/sentry/filters/web_crawlers.py\n+++ b/src/sentry/filters/web_crawlers.py\n@@ -16,8 +16,8 @@\n r'AdsBot',\n # Google Adsense\n r'Mediapartners',\n- # Google+ and Google web search\n- r'Google',\n+ # Google+ and Google web search, but not apis-google\n+ r'(?<!APIs-)Google',\n # Bing search\n r'BingBot',\n r'BingPreview',\n", "issue": "Google Pubsub push messages mis-identified as crawler\n## Important Details\r\n\r\nHow are you running Sentry?\r\n\r\n* [ ] On-Premise docker [Version xyz]\r\n* [x] Saas (sentry.io)\r\n* [ ] Other [briefly describe your environment]\r\n\r\n## Description\r\n\r\nWe get the Sentry API error `Sentry responded with an API error: APIError(Event dropped due to filter: web-crawlers)` when there's an exception in a [Google Pubsub push](https://cloud.google.com/pubsub/docs/push) handler.\r\n\r\nApparently the user agent is `APIs-Google`.\r\n\r\n## Steps to Reproduce\r\n\r\n1. Set up a Google Pubsub push HTTP event handler\r\n2. Have an exception in the message handler code\r\n3. Not get report in Sentry\r\n\r\n### What you expected to happen\r\n\r\n`APIs-Google` isn't identified as a web crawler.\r\n\r\n### Possible Solution\r\n\r\nImprove the regex? \ud83d\ude38 \r\n\n", "before_files": [{"content": "from __future__ import absolute_import\n\nimport re\n\nfrom .base import Filter\nfrom sentry.utils.data_filters import FilterStatKeys\nfrom sentry.utils.safe import get_path\n\n# not all of these agents are guaranteed to execute JavaScript, but to avoid\n# overhead of identifying which ones do, and which ones will over time we simply\n# target all of the major ones\nCRAWLERS = re.compile(\n r'|'.join(\n (\n # various Google services\n r'AdsBot',\n # Google Adsense\n r'Mediapartners',\n # Google+ and Google web search\n r'Google',\n # Bing search\n r'BingBot',\n r'BingPreview',\n # Baidu search\n r'Baiduspider',\n # Yahoo\n r'Slurp',\n # Sogou\n r'Sogou',\n # facebook\n r'facebook',\n # Alexa\n r'ia_archiver',\n # Generic bot\n r'bots?[\\/\\s\\)\\;]',\n # Generic spider\n r'spider[\\/\\s\\)\\;]',\n # Slack - see https://api.slack.com/robots\n r'Slack',\n # Google indexing bot\n r'Calypso AppCrawler',\n )\n ),\n re.I\n)\n\n\nclass WebCrawlersFilter(Filter):\n id = FilterStatKeys.WEB_CRAWLER\n name = 'Filter out known web crawlers'\n description = 'Some crawlers may execute pages in incompatible ways which then cause errors that are unlikely to be seen by a normal user.'\n default = True\n\n def get_user_agent(self, data):\n try:\n for key, value in get_path(data, 'request', 'headers', filter=True) or ():\n if key.lower() == 'user-agent':\n return value\n except LookupError:\n return ''\n\n def test(self, data):\n # TODO(dcramer): we could also look at UA parser and use the 'Spider'\n # device type\n user_agent = self.get_user_agent(data)\n if not user_agent:\n return False\n return bool(CRAWLERS.search(user_agent))\n", "path": "src/sentry/filters/web_crawlers.py"}], "after_files": [{"content": "from __future__ import absolute_import\n\nimport re\n\nfrom .base import Filter\nfrom sentry.utils.data_filters import FilterStatKeys\nfrom sentry.utils.safe import get_path\n\n# not all of these agents are guaranteed to execute JavaScript, but to avoid\n# overhead of identifying which ones do, and which ones will over time we simply\n# target all of the major ones\nCRAWLERS = re.compile(\n r'|'.join(\n (\n # various Google services\n r'AdsBot',\n # Google Adsense\n r'Mediapartners',\n # Google+ and Google web search, but not apis-google\n r'(?<!APIs-)Google',\n # Bing search\n r'BingBot',\n r'BingPreview',\n # Baidu search\n r'Baiduspider',\n # Yahoo\n r'Slurp',\n # Sogou\n r'Sogou',\n # facebook\n r'facebook',\n # Alexa\n r'ia_archiver',\n # Generic bot\n r'bots?[\\/\\s\\)\\;]',\n # Generic spider\n r'spider[\\/\\s\\)\\;]',\n # Slack - see https://api.slack.com/robots\n r'Slack',\n # Google indexing bot\n r'Calypso AppCrawler',\n )\n ),\n re.I\n)\n\n\nclass WebCrawlersFilter(Filter):\n id = FilterStatKeys.WEB_CRAWLER\n name = 'Filter out known web crawlers'\n description = 'Some crawlers may execute pages in incompatible ways which then cause errors that are unlikely to be seen by a normal user.'\n default = True\n\n def get_user_agent(self, data):\n try:\n for key, value in get_path(data, 'request', 'headers', filter=True) or ():\n if key.lower() == 'user-agent':\n return value\n except LookupError:\n return ''\n\n def test(self, data):\n # TODO(dcramer): we could also look at UA parser and use the 'Spider'\n # device type\n user_agent = self.get_user_agent(data)\n if not user_agent:\n return False\n return bool(CRAWLERS.search(user_agent))\n", "path": "src/sentry/filters/web_crawlers.py"}]}
| 1,067 | 143 |
gh_patches_debug_30232
|
rasdani/github-patches
|
git_diff
|
hpcaitech__ColossalAI-5315
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[tensor] fix some unittests
[tensor] fix some unittests
[tensor] fix some unittests
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `colossalai/kernel/triton/rms_layernorm.py`
Content:
```
1 import torch
2
3 try:
4 import triton
5 import triton.language as tl
6
7 HAS_TRITON = True
8 except ImportError:
9 HAS_TRITON = False
10 print("please install triton from https://github.com/openai/triton")
11
12 if HAS_TRITON:
13 # CREDITS: These functions are adapted from the Triton tutorial
14 # https://triton-lang.org/main/getting-started/tutorials/05-layer-norm.html
15
16 @triton.jit
17 def _rmsnorm_kernel(
18 X, # pointer to the input
19 Y, # pointer to the output
20 W, # pointer to the weights
21 stride, # how much to increase the pointer when moving by 1 row
22 N, # number of columns in X
23 eps, # epsilon to avoid division by zero
24 BLOCK_SIZE: tl.constexpr,
25 ):
26
27 # This triton kernel implements Root Mean Square Layer Norm (RMSNorm).
28
29 # Map the program id to the row of X and Y it should compute.
30 row = tl.program_id(0)
31 Y += row * stride
32 X += row * stride
33 # Compute variance
34 _var = tl.zeros([BLOCK_SIZE], dtype=tl.float32)
35 for off in range(0, N, BLOCK_SIZE):
36 cols = off + tl.arange(0, BLOCK_SIZE)
37 x = tl.load(X + cols, mask=cols < N, other=0.0).to(tl.float32)
38 x = tl.where(cols < N, x, 0.0)
39 _var += x * x
40 var = tl.sum(_var, axis=0) / N
41 rstd = 1 / tl.sqrt(var + eps)
42 # Normalize and apply linear transformation
43 for off in range(0, N, BLOCK_SIZE):
44 cols = off + tl.arange(0, BLOCK_SIZE)
45 mask = cols < N
46 w = tl.load(W + cols, mask=mask)
47 x = tl.load(X + cols, mask=mask, other=0.0).to(tl.float32)
48 x_hat = x * rstd
49 y = x_hat * w
50 # Write output
51 tl.store(Y + cols, y.to(tl.float16), mask=mask)
52
53 @torch.no_grad()
54 def rms_layernorm(x, weight, eps):
55 # allocate output
56 y = torch.empty_like(x)
57 # reshape input data into 2D tensor
58 x_arg = x.reshape(-1, x.shape[-1])
59 M, N = x_arg.shape
60 # Less than 64KB per feature: enqueue fused kernel
61 MAX_FUSED_SIZE = 65536 // x.element_size()
62 BLOCK_SIZE = min(MAX_FUSED_SIZE, triton.next_power_of_2(N))
63 if N > BLOCK_SIZE:
64 raise RuntimeError("This layer norm doesn't support feature dim >= 64KB.")
65 # heuristics for number of warps
66 num_warps = min(max(BLOCK_SIZE // 256, 1), 8)
67 # enqueue kernel
68 _rmsnorm_kernel[(M,)](
69 x_arg, y, weight, x_arg.stride(0), N, eps, BLOCK_SIZE=BLOCK_SIZE, num_warps=num_warps
70 )
71 return y
72
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/colossalai/kernel/triton/rms_layernorm.py b/colossalai/kernel/triton/rms_layernorm.py
--- a/colossalai/kernel/triton/rms_layernorm.py
+++ b/colossalai/kernel/triton/rms_layernorm.py
@@ -23,7 +23,6 @@
eps, # epsilon to avoid division by zero
BLOCK_SIZE: tl.constexpr,
):
-
# This triton kernel implements Root Mean Square Layer Norm (RMSNorm).
# Map the program id to the row of X and Y it should compute.
@@ -54,18 +53,19 @@
def rms_layernorm(x, weight, eps):
# allocate output
y = torch.empty_like(x)
- # reshape input data into 2D tensor
+ # reshape input data into 2D tensor, (total token, hidden_size)
x_arg = x.reshape(-1, x.shape[-1])
M, N = x_arg.shape
# Less than 64KB per feature: enqueue fused kernel
MAX_FUSED_SIZE = 65536 // x.element_size()
+
BLOCK_SIZE = min(MAX_FUSED_SIZE, triton.next_power_of_2(N))
- if N > BLOCK_SIZE:
+ if N > MAX_FUSED_SIZE:
raise RuntimeError("This layer norm doesn't support feature dim >= 64KB.")
+
# heuristics for number of warps
- num_warps = min(max(BLOCK_SIZE // 256, 1), 8)
+ num_warps = min(max(triton.next_power_of_2(N) // 256, 8), 32)
+
# enqueue kernel
- _rmsnorm_kernel[(M,)](
- x_arg, y, weight, x_arg.stride(0), N, eps, BLOCK_SIZE=BLOCK_SIZE, num_warps=num_warps
- )
+ _rmsnorm_kernel[(M,)](x_arg, y, weight, x_arg.stride(0), N, eps, BLOCK_SIZE=BLOCK_SIZE, num_warps=num_warps)
return y
|
{"golden_diff": "diff --git a/colossalai/kernel/triton/rms_layernorm.py b/colossalai/kernel/triton/rms_layernorm.py\n--- a/colossalai/kernel/triton/rms_layernorm.py\n+++ b/colossalai/kernel/triton/rms_layernorm.py\n@@ -23,7 +23,6 @@\n eps, # epsilon to avoid division by zero\n BLOCK_SIZE: tl.constexpr,\n ):\n-\n # This triton kernel implements Root Mean Square Layer Norm (RMSNorm).\n \n # Map the program id to the row of X and Y it should compute.\n@@ -54,18 +53,19 @@\n def rms_layernorm(x, weight, eps):\n # allocate output\n y = torch.empty_like(x)\n- # reshape input data into 2D tensor\n+ # reshape input data into 2D tensor, (total token, hidden_size)\n x_arg = x.reshape(-1, x.shape[-1])\n M, N = x_arg.shape\n # Less than 64KB per feature: enqueue fused kernel\n MAX_FUSED_SIZE = 65536 // x.element_size()\n+\n BLOCK_SIZE = min(MAX_FUSED_SIZE, triton.next_power_of_2(N))\n- if N > BLOCK_SIZE:\n+ if N > MAX_FUSED_SIZE:\n raise RuntimeError(\"This layer norm doesn't support feature dim >= 64KB.\")\n+\n # heuristics for number of warps\n- num_warps = min(max(BLOCK_SIZE // 256, 1), 8)\n+ num_warps = min(max(triton.next_power_of_2(N) // 256, 8), 32)\n+\n # enqueue kernel\n- _rmsnorm_kernel[(M,)](\n- x_arg, y, weight, x_arg.stride(0), N, eps, BLOCK_SIZE=BLOCK_SIZE, num_warps=num_warps\n- )\n+ _rmsnorm_kernel[(M,)](x_arg, y, weight, x_arg.stride(0), N, eps, BLOCK_SIZE=BLOCK_SIZE, num_warps=num_warps)\n return y\n", "issue": "[tensor] fix some unittests\n\n[tensor] fix some unittests\n\n[tensor] fix some unittests\n\n", "before_files": [{"content": "import torch\n\ntry:\n import triton\n import triton.language as tl\n\n HAS_TRITON = True\nexcept ImportError:\n HAS_TRITON = False\n print(\"please install triton from https://github.com/openai/triton\")\n\nif HAS_TRITON:\n # CREDITS: These functions are adapted from the Triton tutorial\n # https://triton-lang.org/main/getting-started/tutorials/05-layer-norm.html\n\n @triton.jit\n def _rmsnorm_kernel(\n X, # pointer to the input\n Y, # pointer to the output\n W, # pointer to the weights\n stride, # how much to increase the pointer when moving by 1 row\n N, # number of columns in X\n eps, # epsilon to avoid division by zero\n BLOCK_SIZE: tl.constexpr,\n ):\n\n # This triton kernel implements Root Mean Square Layer Norm (RMSNorm).\n\n # Map the program id to the row of X and Y it should compute.\n row = tl.program_id(0)\n Y += row * stride\n X += row * stride\n # Compute variance\n _var = tl.zeros([BLOCK_SIZE], dtype=tl.float32)\n for off in range(0, N, BLOCK_SIZE):\n cols = off + tl.arange(0, BLOCK_SIZE)\n x = tl.load(X + cols, mask=cols < N, other=0.0).to(tl.float32)\n x = tl.where(cols < N, x, 0.0)\n _var += x * x\n var = tl.sum(_var, axis=0) / N\n rstd = 1 / tl.sqrt(var + eps)\n # Normalize and apply linear transformation\n for off in range(0, N, BLOCK_SIZE):\n cols = off + tl.arange(0, BLOCK_SIZE)\n mask = cols < N\n w = tl.load(W + cols, mask=mask)\n x = tl.load(X + cols, mask=mask, other=0.0).to(tl.float32)\n x_hat = x * rstd\n y = x_hat * w\n # Write output\n tl.store(Y + cols, y.to(tl.float16), mask=mask)\n\n @torch.no_grad()\n def rms_layernorm(x, weight, eps):\n # allocate output\n y = torch.empty_like(x)\n # reshape input data into 2D tensor\n x_arg = x.reshape(-1, x.shape[-1])\n M, N = x_arg.shape\n # Less than 64KB per feature: enqueue fused kernel\n MAX_FUSED_SIZE = 65536 // x.element_size()\n BLOCK_SIZE = min(MAX_FUSED_SIZE, triton.next_power_of_2(N))\n if N > BLOCK_SIZE:\n raise RuntimeError(\"This layer norm doesn't support feature dim >= 64KB.\")\n # heuristics for number of warps\n num_warps = min(max(BLOCK_SIZE // 256, 1), 8)\n # enqueue kernel\n _rmsnorm_kernel[(M,)](\n x_arg, y, weight, x_arg.stride(0), N, eps, BLOCK_SIZE=BLOCK_SIZE, num_warps=num_warps\n )\n return y\n", "path": "colossalai/kernel/triton/rms_layernorm.py"}], "after_files": [{"content": "import torch\n\ntry:\n import triton\n import triton.language as tl\n\n HAS_TRITON = True\nexcept ImportError:\n HAS_TRITON = False\n print(\"please install triton from https://github.com/openai/triton\")\n\nif HAS_TRITON:\n # CREDITS: These functions are adapted from the Triton tutorial\n # https://triton-lang.org/main/getting-started/tutorials/05-layer-norm.html\n\n @triton.jit\n def _rmsnorm_kernel(\n X, # pointer to the input\n Y, # pointer to the output\n W, # pointer to the weights\n stride, # how much to increase the pointer when moving by 1 row\n N, # number of columns in X\n eps, # epsilon to avoid division by zero\n BLOCK_SIZE: tl.constexpr,\n ):\n # This triton kernel implements Root Mean Square Layer Norm (RMSNorm).\n\n # Map the program id to the row of X and Y it should compute.\n row = tl.program_id(0)\n Y += row * stride\n X += row * stride\n # Compute variance\n _var = tl.zeros([BLOCK_SIZE], dtype=tl.float32)\n for off in range(0, N, BLOCK_SIZE):\n cols = off + tl.arange(0, BLOCK_SIZE)\n x = tl.load(X + cols, mask=cols < N, other=0.0).to(tl.float32)\n x = tl.where(cols < N, x, 0.0)\n _var += x * x\n var = tl.sum(_var, axis=0) / N\n rstd = 1 / tl.sqrt(var + eps)\n # Normalize and apply linear transformation\n for off in range(0, N, BLOCK_SIZE):\n cols = off + tl.arange(0, BLOCK_SIZE)\n mask = cols < N\n w = tl.load(W + cols, mask=mask)\n x = tl.load(X + cols, mask=mask, other=0.0).to(tl.float32)\n x_hat = x * rstd\n y = x_hat * w\n # Write output\n tl.store(Y + cols, y.to(tl.float16), mask=mask)\n\n @torch.no_grad()\n def rms_layernorm(x, weight, eps):\n # allocate output\n y = torch.empty_like(x)\n # reshape input data into 2D tensor, (total token, hidden_size)\n x_arg = x.reshape(-1, x.shape[-1])\n M, N = x_arg.shape\n # Less than 64KB per feature: enqueue fused kernel\n MAX_FUSED_SIZE = 65536 // x.element_size()\n\n BLOCK_SIZE = min(MAX_FUSED_SIZE, triton.next_power_of_2(N))\n if N > MAX_FUSED_SIZE:\n raise RuntimeError(\"This layer norm doesn't support feature dim >= 64KB.\")\n\n # heuristics for number of warps\n num_warps = min(max(triton.next_power_of_2(N) // 256, 8), 32)\n\n # enqueue kernel\n _rmsnorm_kernel[(M,)](x_arg, y, weight, x_arg.stride(0), N, eps, BLOCK_SIZE=BLOCK_SIZE, num_warps=num_warps)\n return y\n", "path": "colossalai/kernel/triton/rms_layernorm.py"}]}
| 1,160 | 482 |
gh_patches_debug_12269
|
rasdani/github-patches
|
git_diff
|
mathesar-foundation__mathesar-513
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Implement schema list page
**Problem**
<!-- Please provide a clear and concise description of the problem that this feature request is designed to solve.-->
Users should be able to create a new schema, edit schema names, and delete schemas.
**Proposed solution**
<!-- A clear and concise description of your proposed solution or feature. -->
We should provide a way to do these actions from the UI using the schema list page introduced in the [design spec](https://wiki.mathesar.org/en/design/specs/schemas).
**Additional context**
<!-- Add any other context or screenshots about the feature request here.-->
- #166
- #168
- #170
- #393
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mathesar/urls.py`
Content:
```
1 from django.urls import include, path
2 from rest_framework_nested import routers
3
4 from mathesar.views import api, frontend
5
6
7 router = routers.DefaultRouter()
8 router.register(r'tables', api.TableViewSet, basename='table')
9 router.register(r'schemas', api.SchemaViewSet, basename='schema')
10 router.register(r'database_keys', api.DatabaseKeyViewSet, basename='database-key')
11 router.register(r'databases', api.DatabaseViewSet, basename='database')
12 router.register(r'data_files', api.DataFileViewSet, basename='data-file')
13
14 table_router = routers.NestedSimpleRouter(router, r'tables', lookup='table')
15 table_router.register(r'records', api.RecordViewSet, basename='table-record')
16 table_router.register(r'columns', api.ColumnViewSet, basename='table-column')
17
18 urlpatterns = [
19 path('', frontend.index, name="index"),
20 path('api/v0/', include(router.urls)),
21 path('api/v0/', include(table_router.urls)),
22 # TODO: Handle known urls like /favicon.ico etc.,
23 # Currenty, this catches all
24 path('<dbname>', frontend.index, name="index"),
25 ]
26
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/mathesar/urls.py b/mathesar/urls.py
--- a/mathesar/urls.py
+++ b/mathesar/urls.py
@@ -1,4 +1,4 @@
-from django.urls import include, path
+from django.urls import include, path, re_path
from rest_framework_nested import routers
from mathesar.views import api, frontend
@@ -20,6 +20,6 @@
path('api/v0/', include(router.urls)),
path('api/v0/', include(table_router.urls)),
# TODO: Handle known urls like /favicon.ico etc.,
- # Currenty, this catches all
- path('<dbname>', frontend.index, name="index"),
+ # Currently, this catches all
+ re_path(r'(?P<dbname>\w+)/.*$', frontend.index, name="index"),
]
|
{"golden_diff": "diff --git a/mathesar/urls.py b/mathesar/urls.py\n--- a/mathesar/urls.py\n+++ b/mathesar/urls.py\n@@ -1,4 +1,4 @@\n-from django.urls import include, path\n+from django.urls import include, path, re_path\n from rest_framework_nested import routers\n \n from mathesar.views import api, frontend\n@@ -20,6 +20,6 @@\n path('api/v0/', include(router.urls)),\n path('api/v0/', include(table_router.urls)),\n # TODO: Handle known urls like /favicon.ico etc.,\n- # Currenty, this catches all\n- path('<dbname>', frontend.index, name=\"index\"),\n+ # Currently, this catches all\n+ re_path(r'(?P<dbname>\\w+)/.*$', frontend.index, name=\"index\"),\n ]\n", "issue": "Implement schema list page\n**Problem**\r\n<!-- Please provide a clear and concise description of the problem that this feature request is designed to solve.-->\r\nUsers should be able to create a new schema, edit schema names, and delete schemas.\r\n\r\n**Proposed solution**\r\n<!-- A clear and concise description of your proposed solution or feature. -->\r\nWe should provide a way to do these actions from the UI using the schema list page introduced in the [design spec](https://wiki.mathesar.org/en/design/specs/schemas).\r\n\r\n**Additional context**\r\n<!-- Add any other context or screenshots about the feature request here.-->\r\n- #166\r\n- #168 \r\n- #170\r\n- #393\n", "before_files": [{"content": "from django.urls import include, path\nfrom rest_framework_nested import routers\n\nfrom mathesar.views import api, frontend\n\n\nrouter = routers.DefaultRouter()\nrouter.register(r'tables', api.TableViewSet, basename='table')\nrouter.register(r'schemas', api.SchemaViewSet, basename='schema')\nrouter.register(r'database_keys', api.DatabaseKeyViewSet, basename='database-key')\nrouter.register(r'databases', api.DatabaseViewSet, basename='database')\nrouter.register(r'data_files', api.DataFileViewSet, basename='data-file')\n\ntable_router = routers.NestedSimpleRouter(router, r'tables', lookup='table')\ntable_router.register(r'records', api.RecordViewSet, basename='table-record')\ntable_router.register(r'columns', api.ColumnViewSet, basename='table-column')\n\nurlpatterns = [\n path('', frontend.index, name=\"index\"),\n path('api/v0/', include(router.urls)),\n path('api/v0/', include(table_router.urls)),\n # TODO: Handle known urls like /favicon.ico etc.,\n # Currenty, this catches all\n path('<dbname>', frontend.index, name=\"index\"),\n]\n", "path": "mathesar/urls.py"}], "after_files": [{"content": "from django.urls import include, path, re_path\nfrom rest_framework_nested import routers\n\nfrom mathesar.views import api, frontend\n\n\nrouter = routers.DefaultRouter()\nrouter.register(r'tables', api.TableViewSet, basename='table')\nrouter.register(r'schemas', api.SchemaViewSet, basename='schema')\nrouter.register(r'database_keys', api.DatabaseKeyViewSet, basename='database-key')\nrouter.register(r'databases', api.DatabaseViewSet, basename='database')\nrouter.register(r'data_files', api.DataFileViewSet, basename='data-file')\n\ntable_router = routers.NestedSimpleRouter(router, r'tables', lookup='table')\ntable_router.register(r'records', api.RecordViewSet, basename='table-record')\ntable_router.register(r'columns', api.ColumnViewSet, basename='table-column')\n\nurlpatterns = [\n path('', frontend.index, name=\"index\"),\n path('api/v0/', include(router.urls)),\n path('api/v0/', include(table_router.urls)),\n # TODO: Handle known urls like /favicon.ico etc.,\n # Currently, this catches all\n re_path(r'(?P<dbname>\\w+)/.*$', frontend.index, name=\"index\"),\n]\n", "path": "mathesar/urls.py"}]}
| 682 | 181 |
gh_patches_debug_41643
|
rasdani/github-patches
|
git_diff
|
microsoft__Qcodes-1171
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Keithley 2400 does not get added to the station cleanly
The ":read:" command and possibly others does not work when output is off but fails with an error. This is called when getting volt and current are snapshotted
We should wrap these calls in checking that output is off
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `qcodes/instrument_drivers/tektronix/Keithley_2400.py`
Content:
```
1 from qcodes import VisaInstrument
2 from qcodes.utils.validators import Strings, Enum
3
4
5 class Keithley_2400(VisaInstrument):
6 """
7 QCoDeS driver for the Keithley 2400 voltage source.
8 """
9 def __init__(self, name, address, **kwargs):
10 super().__init__(name, address, terminator='\n', **kwargs)
11
12 self.add_parameter('rangev',
13 get_cmd='SENS:VOLT:RANG?',
14 get_parser=float,
15 set_cmd='SOUR:VOLT:RANG {:f}',
16 label='Voltage range')
17
18 self.add_parameter('rangei',
19 get_cmd='SENS:CURR:RANG?',
20 get_parser=float,
21 set_cmd='SOUR:CURR:RANG {:f}',
22 label='Current range')
23
24 self.add_parameter('compliancev',
25 get_cmd='SENS:VOLT:PROT?',
26 get_parser=float,
27 set_cmd='SENS:VOLT:PROT {:f}',
28 label='Voltage Compliance')
29
30 self.add_parameter('compliancei',
31 get_cmd='SENS:CURR:PROT?',
32 get_parser=float,
33 set_cmd='SENS:CURR:PROT {:f}',
34 label='Current Compliance')
35
36 self.add_parameter('volt',
37 get_cmd=':READ?',
38 get_parser=self._volt_parser,
39 set_cmd=':SOUR:VOLT:LEV {:.8f}',
40 label='Voltage',
41 unit='V')
42
43 self.add_parameter('curr',
44 get_cmd=':READ?',
45 get_parser=self._curr_parser,
46 set_cmd=':SOUR:CURR:LEV {:.8f}',
47 label='Current',
48 unit='A')
49
50 self.add_parameter('mode',
51 vals=Enum('VOLT', 'CURR'),
52 get_cmd=':SOUR:FUNC?',
53 set_cmd=self._set_mode_and_sense,
54 label='Mode')
55
56 self.add_parameter('sense',
57 vals=Strings(),
58 get_cmd=':SENS:FUNC?',
59 set_cmd=':SENS:FUNC "{:s}"',
60 label='Sense mode')
61
62 self.add_parameter('output',
63 get_parser=int,
64 set_cmd=':OUTP:STAT {:d}',
65 get_cmd=':OUTP:STAT?')
66
67 self.add_parameter('nplcv',
68 get_cmd='SENS:VOLT:NPLC?',
69 get_parser=float,
70 set_cmd='SENS:VOLT:NPLC {:f}',
71 label='Voltage integration time')
72
73 self.add_parameter('nplci',
74 get_cmd='SENS:CURR:NPLC?',
75 get_parser=float,
76 set_cmd='SENS:CURR:NPLC {:f}',
77 label='Current integration time')
78
79 self.add_parameter('resistance',
80 get_cmd=':READ?',
81 get_parser=self._resistance_parser,
82 label='Resistance',
83 unit='Ohm')
84
85 def _set_mode_and_sense(self, msg):
86 # This helps set the correct read out curr/volt
87 if msg == 'VOLT':
88 self.sense('CURR')
89 elif msg == 'CURR':
90 self.sense('VOLT')
91 else:
92 raise AttributeError('Mode does not exist')
93 self.write(':SOUR:FUNC {:s}'.format(msg))
94
95 def reset(self):
96 """
97 Reset the instrument. When the instrument is reset, it performs the
98 following actions.
99
100 Returns the SourceMeter to the GPIB default conditions.
101
102 Cancels all pending commands.
103
104 Cancels all previously send `*OPC` and `*OPC?`
105 """
106 self.write(':*RST')
107
108 def _volt_parser(self, msg):
109 fields = [float(x) for x in msg.split(',')]
110 return fields[0]
111
112 def _curr_parser(self, msg):
113 fields = [float(x) for x in msg.split(',')]
114 return fields[1]
115
116 def _resistance_parser(self, msg):
117 fields = [float(x) for x in msg.split(',')]
118 return fields[0]/fields[1]
119
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/qcodes/instrument_drivers/tektronix/Keithley_2400.py b/qcodes/instrument_drivers/tektronix/Keithley_2400.py
--- a/qcodes/instrument_drivers/tektronix/Keithley_2400.py
+++ b/qcodes/instrument_drivers/tektronix/Keithley_2400.py
@@ -34,18 +34,31 @@
label='Current Compliance')
self.add_parameter('volt',
- get_cmd=':READ?',
+ get_cmd=self._get_read_output_protected,
get_parser=self._volt_parser,
set_cmd=':SOUR:VOLT:LEV {:.8f}',
label='Voltage',
- unit='V')
+ unit='V',
+ docstring="Sets voltage in 'VOLT' mode. "
+ "Get returns measured voltage if "
+ "sensing 'VOLT' otherwise it returns "
+ "setpoint value. "
+ "Note that it is an error to read voltage with "
+ "output off")
self.add_parameter('curr',
- get_cmd=':READ?',
+ get_cmd=self._get_read_output_protected,
get_parser=self._curr_parser,
set_cmd=':SOUR:CURR:LEV {:.8f}',
label='Current',
- unit='A')
+ unit='A',
+ docstring = "Sets current in 'CURR' mode. "
+ "Get returns measured current if "
+ "sensing 'CURR' otherwise it returns "
+ "setpoint value. "
+ "Note that it is an error to read current with "
+ "output off")
+
self.add_parameter('mode',
vals=Enum('VOLT', 'CURR'),
@@ -77,10 +90,32 @@
label='Current integration time')
self.add_parameter('resistance',
- get_cmd=':READ?',
+ get_cmd=self._get_read_output_protected,
get_parser=self._resistance_parser,
label='Resistance',
- unit='Ohm')
+ unit='Ohm',
+ docstring="Measure resistance from current and voltage "
+ "Note that it is an error to read current "
+ "and voltage with output off")
+
+ def _get_read_output_protected(self) -> str:
+ """
+ This wrapper function around ":READ?" exists because calling
+ ":READ?" on an instrument with output disabled is an error.
+ So first we check that output is on and if not we return
+ nan for volt, curr etc.
+ """
+ output = self.output.get_latest()
+ if output is None:
+ # if get_latest returns None we have
+ # to ask the instrument for the status of output
+ output = self.output.get()
+
+ if output == 1:
+ msg = self.ask(':READ?')
+ else:
+ raise RuntimeError("Cannot perform read with output off")
+ return msg
def _set_mode_and_sense(self, msg):
# This helps set the correct read out curr/volt
@@ -115,4 +150,5 @@
def _resistance_parser(self, msg):
fields = [float(x) for x in msg.split(',')]
- return fields[0]/fields[1]
+ res = fields[0] / fields[1]
+ return res
|
{"golden_diff": "diff --git a/qcodes/instrument_drivers/tektronix/Keithley_2400.py b/qcodes/instrument_drivers/tektronix/Keithley_2400.py\n--- a/qcodes/instrument_drivers/tektronix/Keithley_2400.py\n+++ b/qcodes/instrument_drivers/tektronix/Keithley_2400.py\n@@ -34,18 +34,31 @@\n label='Current Compliance')\n \n self.add_parameter('volt',\n- get_cmd=':READ?',\n+ get_cmd=self._get_read_output_protected,\n get_parser=self._volt_parser,\n set_cmd=':SOUR:VOLT:LEV {:.8f}',\n label='Voltage',\n- unit='V')\n+ unit='V',\n+ docstring=\"Sets voltage in 'VOLT' mode. \"\n+ \"Get returns measured voltage if \"\n+ \"sensing 'VOLT' otherwise it returns \"\n+ \"setpoint value. \"\n+ \"Note that it is an error to read voltage with \"\n+ \"output off\")\n \n self.add_parameter('curr',\n- get_cmd=':READ?',\n+ get_cmd=self._get_read_output_protected,\n get_parser=self._curr_parser,\n set_cmd=':SOUR:CURR:LEV {:.8f}',\n label='Current',\n- unit='A')\n+ unit='A',\n+ docstring = \"Sets current in 'CURR' mode. \"\n+ \"Get returns measured current if \"\n+ \"sensing 'CURR' otherwise it returns \"\n+ \"setpoint value. \"\n+ \"Note that it is an error to read current with \"\n+ \"output off\")\n+\n \n self.add_parameter('mode',\n vals=Enum('VOLT', 'CURR'),\n@@ -77,10 +90,32 @@\n label='Current integration time')\n \n self.add_parameter('resistance',\n- get_cmd=':READ?',\n+ get_cmd=self._get_read_output_protected,\n get_parser=self._resistance_parser,\n label='Resistance',\n- unit='Ohm')\n+ unit='Ohm',\n+ docstring=\"Measure resistance from current and voltage \"\n+ \"Note that it is an error to read current \"\n+ \"and voltage with output off\")\n+\n+ def _get_read_output_protected(self) -> str:\n+ \"\"\"\n+ This wrapper function around \":READ?\" exists because calling\n+ \":READ?\" on an instrument with output disabled is an error.\n+ So first we check that output is on and if not we return\n+ nan for volt, curr etc.\n+ \"\"\"\n+ output = self.output.get_latest()\n+ if output is None:\n+ # if get_latest returns None we have\n+ # to ask the instrument for the status of output\n+ output = self.output.get()\n+\n+ if output == 1:\n+ msg = self.ask(':READ?')\n+ else:\n+ raise RuntimeError(\"Cannot perform read with output off\")\n+ return msg\n \n def _set_mode_and_sense(self, msg):\n # This helps set the correct read out curr/volt\n@@ -115,4 +150,5 @@\n \n def _resistance_parser(self, msg):\n fields = [float(x) for x in msg.split(',')]\n- return fields[0]/fields[1]\n+ res = fields[0] / fields[1]\n+ return res\n", "issue": "Keithley 2400 does not get added to the station cleanly\nThe \":read:\" command and possibly others does not work when output is off but fails with an error. This is called when getting volt and current are snapshotted \r\n\r\nWe should wrap these calls in checking that output is off\n", "before_files": [{"content": "from qcodes import VisaInstrument\nfrom qcodes.utils.validators import Strings, Enum\n\n\nclass Keithley_2400(VisaInstrument):\n \"\"\"\n QCoDeS driver for the Keithley 2400 voltage source.\n \"\"\"\n def __init__(self, name, address, **kwargs):\n super().__init__(name, address, terminator='\\n', **kwargs)\n\n self.add_parameter('rangev',\n get_cmd='SENS:VOLT:RANG?',\n get_parser=float,\n set_cmd='SOUR:VOLT:RANG {:f}',\n label='Voltage range')\n\n self.add_parameter('rangei',\n get_cmd='SENS:CURR:RANG?',\n get_parser=float,\n set_cmd='SOUR:CURR:RANG {:f}',\n label='Current range')\n\n self.add_parameter('compliancev',\n get_cmd='SENS:VOLT:PROT?',\n get_parser=float,\n set_cmd='SENS:VOLT:PROT {:f}',\n label='Voltage Compliance')\n\n self.add_parameter('compliancei',\n get_cmd='SENS:CURR:PROT?',\n get_parser=float,\n set_cmd='SENS:CURR:PROT {:f}',\n label='Current Compliance')\n\n self.add_parameter('volt',\n get_cmd=':READ?',\n get_parser=self._volt_parser,\n set_cmd=':SOUR:VOLT:LEV {:.8f}',\n label='Voltage',\n unit='V')\n\n self.add_parameter('curr',\n get_cmd=':READ?',\n get_parser=self._curr_parser,\n set_cmd=':SOUR:CURR:LEV {:.8f}',\n label='Current',\n unit='A')\n\n self.add_parameter('mode',\n vals=Enum('VOLT', 'CURR'),\n get_cmd=':SOUR:FUNC?',\n set_cmd=self._set_mode_and_sense,\n label='Mode')\n\n self.add_parameter('sense',\n vals=Strings(),\n get_cmd=':SENS:FUNC?',\n set_cmd=':SENS:FUNC \"{:s}\"',\n label='Sense mode')\n\n self.add_parameter('output',\n get_parser=int,\n set_cmd=':OUTP:STAT {:d}',\n get_cmd=':OUTP:STAT?')\n\n self.add_parameter('nplcv',\n get_cmd='SENS:VOLT:NPLC?',\n get_parser=float,\n set_cmd='SENS:VOLT:NPLC {:f}',\n label='Voltage integration time')\n\n self.add_parameter('nplci',\n get_cmd='SENS:CURR:NPLC?',\n get_parser=float,\n set_cmd='SENS:CURR:NPLC {:f}',\n label='Current integration time')\n\n self.add_parameter('resistance',\n get_cmd=':READ?',\n get_parser=self._resistance_parser,\n label='Resistance',\n unit='Ohm')\n\n def _set_mode_and_sense(self, msg):\n # This helps set the correct read out curr/volt\n if msg == 'VOLT':\n self.sense('CURR')\n elif msg == 'CURR':\n self.sense('VOLT')\n else:\n raise AttributeError('Mode does not exist')\n self.write(':SOUR:FUNC {:s}'.format(msg))\n\n def reset(self):\n \"\"\"\n Reset the instrument. When the instrument is reset, it performs the\n following actions.\n\n Returns the SourceMeter to the GPIB default conditions.\n\n Cancels all pending commands.\n\n Cancels all previously send `*OPC` and `*OPC?`\n \"\"\"\n self.write(':*RST')\n\n def _volt_parser(self, msg):\n fields = [float(x) for x in msg.split(',')]\n return fields[0]\n\n def _curr_parser(self, msg):\n fields = [float(x) for x in msg.split(',')]\n return fields[1]\n\n def _resistance_parser(self, msg):\n fields = [float(x) for x in msg.split(',')]\n return fields[0]/fields[1]\n", "path": "qcodes/instrument_drivers/tektronix/Keithley_2400.py"}], "after_files": [{"content": "from qcodes import VisaInstrument\nfrom qcodes.utils.validators import Strings, Enum\n\n\nclass Keithley_2400(VisaInstrument):\n \"\"\"\n QCoDeS driver for the Keithley 2400 voltage source.\n \"\"\"\n def __init__(self, name, address, **kwargs):\n super().__init__(name, address, terminator='\\n', **kwargs)\n\n self.add_parameter('rangev',\n get_cmd='SENS:VOLT:RANG?',\n get_parser=float,\n set_cmd='SOUR:VOLT:RANG {:f}',\n label='Voltage range')\n\n self.add_parameter('rangei',\n get_cmd='SENS:CURR:RANG?',\n get_parser=float,\n set_cmd='SOUR:CURR:RANG {:f}',\n label='Current range')\n\n self.add_parameter('compliancev',\n get_cmd='SENS:VOLT:PROT?',\n get_parser=float,\n set_cmd='SENS:VOLT:PROT {:f}',\n label='Voltage Compliance')\n\n self.add_parameter('compliancei',\n get_cmd='SENS:CURR:PROT?',\n get_parser=float,\n set_cmd='SENS:CURR:PROT {:f}',\n label='Current Compliance')\n\n self.add_parameter('volt',\n get_cmd=self._get_read_output_protected,\n get_parser=self._volt_parser,\n set_cmd=':SOUR:VOLT:LEV {:.8f}',\n label='Voltage',\n unit='V',\n docstring=\"Sets voltage in 'VOLT' mode. \"\n \"Get returns measured voltage if \"\n \"sensing 'VOLT' otherwise it returns \"\n \"setpoint value. \"\n \"Note that it is an error to read voltage with \"\n \"output off\")\n\n self.add_parameter('curr',\n get_cmd=self._get_read_output_protected,\n get_parser=self._curr_parser,\n set_cmd=':SOUR:CURR:LEV {:.8f}',\n label='Current',\n unit='A',\n docstring = \"Sets current in 'CURR' mode. \"\n \"Get returns measured current if \"\n \"sensing 'CURR' otherwise it returns \"\n \"setpoint value. \"\n \"Note that it is an error to read current with \"\n \"output off\")\n\n\n self.add_parameter('mode',\n vals=Enum('VOLT', 'CURR'),\n get_cmd=':SOUR:FUNC?',\n set_cmd=self._set_mode_and_sense,\n label='Mode')\n\n self.add_parameter('sense',\n vals=Strings(),\n get_cmd=':SENS:FUNC?',\n set_cmd=':SENS:FUNC \"{:s}\"',\n label='Sense mode')\n\n self.add_parameter('output',\n get_parser=int,\n set_cmd=':OUTP:STAT {:d}',\n get_cmd=':OUTP:STAT?')\n\n self.add_parameter('nplcv',\n get_cmd='SENS:VOLT:NPLC?',\n get_parser=float,\n set_cmd='SENS:VOLT:NPLC {:f}',\n label='Voltage integration time')\n\n self.add_parameter('nplci',\n get_cmd='SENS:CURR:NPLC?',\n get_parser=float,\n set_cmd='SENS:CURR:NPLC {:f}',\n label='Current integration time')\n\n self.add_parameter('resistance',\n get_cmd=self._get_read_output_protected,\n get_parser=self._resistance_parser,\n label='Resistance',\n unit='Ohm',\n docstring=\"Measure resistance from current and voltage \"\n \"Note that it is an error to read current \"\n \"and voltage with output off\")\n\n def _get_read_output_protected(self) -> str:\n \"\"\"\n This wrapper function around \":READ?\" exists because calling\n \":READ?\" on an instrument with output disabled is an error.\n So first we check that output is on and if not we return\n nan for volt, curr etc.\n \"\"\"\n output = self.output.get_latest()\n if output is None:\n # if get_latest returns None we have\n # to ask the instrument for the status of output\n output = self.output.get()\n\n if output == 1:\n msg = self.ask(':READ?')\n else:\n raise RuntimeError(\"Cannot perform read with output off\")\n return msg\n\n def _set_mode_and_sense(self, msg):\n # This helps set the correct read out curr/volt\n if msg == 'VOLT':\n self.sense('CURR')\n elif msg == 'CURR':\n self.sense('VOLT')\n else:\n raise AttributeError('Mode does not exist')\n self.write(':SOUR:FUNC {:s}'.format(msg))\n\n def reset(self):\n \"\"\"\n Reset the instrument. When the instrument is reset, it performs the\n following actions.\n\n Returns the SourceMeter to the GPIB default conditions.\n\n Cancels all pending commands.\n\n Cancels all previously send `*OPC` and `*OPC?`\n \"\"\"\n self.write(':*RST')\n\n def _volt_parser(self, msg):\n fields = [float(x) for x in msg.split(',')]\n return fields[0]\n\n def _curr_parser(self, msg):\n fields = [float(x) for x in msg.split(',')]\n return fields[1]\n\n def _resistance_parser(self, msg):\n fields = [float(x) for x in msg.split(',')]\n res = fields[0] / fields[1]\n return res\n", "path": "qcodes/instrument_drivers/tektronix/Keithley_2400.py"}]}
| 1,465 | 765 |
gh_patches_debug_17736
|
rasdani/github-patches
|
git_diff
|
beeware__toga-31
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
"ImportError: cannot import name WebKit" on Ubuntu 14.04
Installed toga via global `sudo pip install toga`. Then, tried to import it:
```
>>> import toga
ERROR:root:Could not find any typelib for WebKit
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python2.7/dist-packages/toga/__init__.py", line 86, in <module>
from .platform.gtk.app import *
File "/usr/local/lib/python2.7/dist-packages/toga/platform/gtk/app.py", line 7, in <module>
from .window import Window
File "/usr/local/lib/python2.7/dist-packages/toga/platform/gtk/window.py", line 6, in <module>
from .command import SEPARATOR, SPACER, EXPANDING_SPACER
File "/usr/local/lib/python2.7/dist-packages/toga/platform/gtk/command.py", line 1, in <module>
from .widgets import Icon
File "/usr/local/lib/python2.7/dist-packages/toga/platform/gtk/widgets/__init__.py", line 17, in <module>
from .webview import WebView
File "/usr/local/lib/python2.7/dist-packages/toga/platform/gtk/widgets/webview.py", line 3, in <module>
from gi.repository import Gtk, WebKit
ImportError: cannot import name WebKit
```
Did a `sudo apt-get install python-webkit`, but still getting the same import error. I'm running Ubuntu under Crouton on a Chromebook, which doesn't always contain the full set of packages.
Since the application I aim to create (a GUI launcher for [KA Lite](https://github.com/learningequality/ka-lite/)) would rely on toga's awesome dedication to being pure Python and not needing any extra packages to be installed to work cross-platform, and since we wouldn't be needing the WebView, would it be possible to have it handle a lack of WebKit more gracefully, only erroring out if a WebView was actually used? Thanks!
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `toga/platform/gtk/widgets/webview.py`
Content:
```
1 from __future__ import print_function, absolute_import, division
2
3 from gi.repository import Gtk, WebKit
4
5 from .base import Widget
6
7
8 class WebView(Widget):
9 def __init__(self, url=None):
10 super(WebView, self).__init__()
11 self._url = url
12
13 self._webview = None
14
15 def _startup(self):
16 self._impl = Gtk.ScrolledWindow()
17 self._impl.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
18
19 self._webview = WebKit.WebView()
20
21 if self._url:
22 self._webview.load_uri(self._url)
23
24 self._impl.add(self._webview)
25 self._impl.set_min_content_width(200)
26 self._impl.set_min_content_height(200)
27
28 @property
29 def url(self):
30 return self._url
31
32 @url.setter
33 def url(self, value):
34 self._url = value
35 if self._impl:
36 self._webview.load_uri(self._url)
37
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/toga/platform/gtk/widgets/webview.py b/toga/platform/gtk/widgets/webview.py
--- a/toga/platform/gtk/widgets/webview.py
+++ b/toga/platform/gtk/widgets/webview.py
@@ -1,6 +1,13 @@
from __future__ import print_function, absolute_import, division
-from gi.repository import Gtk, WebKit
+from gi.repository import Gtk
+
+# The following import sometimes fails; handle failure gracefully
+# (see https://github.com/pybee/toga/issues/26)
+try:
+ from gi.repository import WebKit
+except ImportError:
+ WebKit = None
from .base import Widget
@@ -13,6 +20,12 @@
self._webview = None
def _startup(self):
+
+ if WebKit is None:
+ raise RuntimeError(
+ "Import 'from gi.repository import WebKit' failed;" +
+ " may need to install gir1.2-webkit-3.0 or similar.")
+
self._impl = Gtk.ScrolledWindow()
self._impl.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
|
{"golden_diff": "diff --git a/toga/platform/gtk/widgets/webview.py b/toga/platform/gtk/widgets/webview.py\n--- a/toga/platform/gtk/widgets/webview.py\n+++ b/toga/platform/gtk/widgets/webview.py\n@@ -1,6 +1,13 @@\n from __future__ import print_function, absolute_import, division\n \n-from gi.repository import Gtk, WebKit\n+from gi.repository import Gtk\n+\n+# The following import sometimes fails; handle failure gracefully\n+# (see https://github.com/pybee/toga/issues/26)\n+try:\n+ from gi.repository import WebKit\n+except ImportError:\n+ WebKit = None\n \n from .base import Widget\n \n@@ -13,6 +20,12 @@\n self._webview = None\n \n def _startup(self):\n+\n+ if WebKit is None:\n+ raise RuntimeError(\n+ \"Import 'from gi.repository import WebKit' failed;\" +\n+ \" may need to install gir1.2-webkit-3.0 or similar.\")\n+\n self._impl = Gtk.ScrolledWindow()\n self._impl.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)\n", "issue": "\"ImportError: cannot import name WebKit\" on Ubuntu 14.04\nInstalled toga via global `sudo pip install toga`. Then, tried to import it:\n\n```\n>>> import toga\nERROR:root:Could not find any typelib for WebKit\nTraceback (most recent call last):\n File \"<stdin>\", line 1, in <module>\n File \"/usr/local/lib/python2.7/dist-packages/toga/__init__.py\", line 86, in <module>\n from .platform.gtk.app import *\n File \"/usr/local/lib/python2.7/dist-packages/toga/platform/gtk/app.py\", line 7, in <module>\n from .window import Window\n File \"/usr/local/lib/python2.7/dist-packages/toga/platform/gtk/window.py\", line 6, in <module>\n from .command import SEPARATOR, SPACER, EXPANDING_SPACER\n File \"/usr/local/lib/python2.7/dist-packages/toga/platform/gtk/command.py\", line 1, in <module>\n from .widgets import Icon\n File \"/usr/local/lib/python2.7/dist-packages/toga/platform/gtk/widgets/__init__.py\", line 17, in <module>\n from .webview import WebView\n File \"/usr/local/lib/python2.7/dist-packages/toga/platform/gtk/widgets/webview.py\", line 3, in <module>\n from gi.repository import Gtk, WebKit\nImportError: cannot import name WebKit\n```\n\nDid a `sudo apt-get install python-webkit`, but still getting the same import error. I'm running Ubuntu under Crouton on a Chromebook, which doesn't always contain the full set of packages.\n\nSince the application I aim to create (a GUI launcher for [KA Lite](https://github.com/learningequality/ka-lite/)) would rely on toga's awesome dedication to being pure Python and not needing any extra packages to be installed to work cross-platform, and since we wouldn't be needing the WebView, would it be possible to have it handle a lack of WebKit more gracefully, only erroring out if a WebView was actually used? Thanks!\n\n", "before_files": [{"content": "from __future__ import print_function, absolute_import, division\n\nfrom gi.repository import Gtk, WebKit\n\nfrom .base import Widget\n\n\nclass WebView(Widget):\n def __init__(self, url=None):\n super(WebView, self).__init__()\n self._url = url\n\n self._webview = None\n\n def _startup(self):\n self._impl = Gtk.ScrolledWindow()\n self._impl.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)\n\n self._webview = WebKit.WebView()\n\n if self._url:\n self._webview.load_uri(self._url)\n\n self._impl.add(self._webview)\n self._impl.set_min_content_width(200)\n self._impl.set_min_content_height(200)\n\n @property\n def url(self):\n return self._url\n\n @url.setter\n def url(self, value):\n self._url = value\n if self._impl:\n self._webview.load_uri(self._url)\n", "path": "toga/platform/gtk/widgets/webview.py"}], "after_files": [{"content": "from __future__ import print_function, absolute_import, division\n\nfrom gi.repository import Gtk\n\n# The following import sometimes fails; handle failure gracefully\n# (see https://github.com/pybee/toga/issues/26)\ntry:\n from gi.repository import WebKit\nexcept ImportError:\n WebKit = None\n\nfrom .base import Widget\n\n\nclass WebView(Widget):\n def __init__(self, url=None):\n super(WebView, self).__init__()\n self._url = url\n\n self._webview = None\n\n def _startup(self):\n\n if WebKit is None:\n raise RuntimeError(\n \"Import 'from gi.repository import WebKit' failed;\" +\n \" may need to install gir1.2-webkit-3.0 or similar.\")\n\n self._impl = Gtk.ScrolledWindow()\n self._impl.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)\n\n self._webview = WebKit.WebView()\n\n if self._url:\n self._webview.load_uri(self._url)\n\n self._impl.add(self._webview)\n self._impl.set_min_content_width(200)\n self._impl.set_min_content_height(200)\n\n @property\n def url(self):\n return self._url\n\n @url.setter\n def url(self, value):\n self._url = value\n if self._impl:\n self._webview.load_uri(self._url)\n", "path": "toga/platform/gtk/widgets/webview.py"}]}
| 1,019 | 254 |
gh_patches_debug_6994
|
rasdani/github-patches
|
git_diff
|
comic__grand-challenge.org-2146
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Count of results displayed on the challenge card does not match leaderboard count
**Describe the bug**
The card for the node21 challenge currently notes there are 21 results. Clicking on this brings you to the leaderboard where only 2 results are present. It seems that the count is including submissions which failed and/or submissions where the evaluation failed, which is misleading.
**To Reproduce**
Steps to reproduce the behavior:
- Choose a challenge where the database includes many failed submissions or failed evaluations (e.g. node21 at present)
- View the card for this challenge (currently it is on the GC front page)
- Verify that the number of results shown on the card does not match the number of results on the leaderboard (click the number shown on the card).
**Expected behavior**
The number of reported results should match the number of results on the leaderboard
**Screenshots**

--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `app/grandchallenge/challenges/tasks.py`
Content:
```
1 from celery import shared_task
2 from django.contrib.auth import get_user_model
3 from django.core.mail import mail_managers
4 from django.db.models import Count, Max
5 from requests import exceptions, get
6
7 from grandchallenge.challenges.models import Challenge, ExternalChallenge
8 from grandchallenge.evaluation.models import Evaluation
9 from grandchallenge.subdomains.utils import reverse
10
11
12 @shared_task
13 def update_challenge_results_cache():
14 challenges = Challenge.objects.all()
15 evaluation_info = (
16 Evaluation.objects.filter(published=True)
17 .values("submission__phase__challenge_id")
18 .annotate(
19 cached_num_results=Count("submission__phase__challenge_id"),
20 cached_latest_result=Max("created"),
21 )
22 )
23 evaluation_info_by_challenge = {
24 str(v["submission__phase__challenge_id"]): v for v in evaluation_info
25 }
26 participant_counts = (
27 get_user_model()
28 .objects.values("groups__participants_of_challenge")
29 .annotate(cached_num_participants=Count("pk"))
30 )
31 participant_counts_by_challenge = {
32 str(v["groups__participants_of_challenge"]): v
33 for v in participant_counts
34 }
35
36 for c in challenges:
37 c.cached_num_results = evaluation_info_by_challenge.get(
38 str(c.pk), {}
39 ).get("cached_num_results", 0)
40 c.cached_latest_result = evaluation_info_by_challenge.get(
41 str(c.pk), {}
42 ).get("cached_latest_result", None)
43 c.cached_num_participants = participant_counts_by_challenge.get(
44 str(c.pk), {}
45 ).get("cached_num_participants", 0)
46
47 Challenge.objects.bulk_update(
48 challenges,
49 [
50 "cached_num_results",
51 "cached_num_participants",
52 "cached_latest_result",
53 ],
54 )
55
56
57 @shared_task
58 def check_external_challenge_urls():
59 """
60 Checks that all external challenge urls are reachable.
61
62 Emails the managers if any of the challenges are not.
63 """
64 challenges = ExternalChallenge.objects.filter(hidden=False)
65 errors = []
66
67 for challenge in challenges:
68 try:
69 url = challenge.homepage
70 if not url.startswith("http"):
71 url = "http://" + url
72 r = get(url, timeout=60)
73 # raise an exception when we receive a http error (e.g., 404)
74 r.raise_for_status()
75 except exceptions.RequestException as err:
76 update_url = reverse(
77 "challenges:external-update",
78 kwargs={"short_name": challenge.short_name},
79 )
80 errors.append(
81 f"Error when trying to access '{challenge}': {err}. You can "
82 f"update it here: {update_url}"
83 )
84
85 if errors:
86 mail_managers(
87 subject=f"Unreachable external challenges ({len(errors)})",
88 message="\n\n".join(errors),
89 )
90
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/app/grandchallenge/challenges/tasks.py b/app/grandchallenge/challenges/tasks.py
--- a/app/grandchallenge/challenges/tasks.py
+++ b/app/grandchallenge/challenges/tasks.py
@@ -13,7 +13,7 @@
def update_challenge_results_cache():
challenges = Challenge.objects.all()
evaluation_info = (
- Evaluation.objects.filter(published=True)
+ Evaluation.objects.filter(published=True, rank__gt=0)
.values("submission__phase__challenge_id")
.annotate(
cached_num_results=Count("submission__phase__challenge_id"),
|
{"golden_diff": "diff --git a/app/grandchallenge/challenges/tasks.py b/app/grandchallenge/challenges/tasks.py\n--- a/app/grandchallenge/challenges/tasks.py\n+++ b/app/grandchallenge/challenges/tasks.py\n@@ -13,7 +13,7 @@\n def update_challenge_results_cache():\n challenges = Challenge.objects.all()\n evaluation_info = (\n- Evaluation.objects.filter(published=True)\n+ Evaluation.objects.filter(published=True, rank__gt=0)\n .values(\"submission__phase__challenge_id\")\n .annotate(\n cached_num_results=Count(\"submission__phase__challenge_id\"),\n", "issue": "Count of results displayed on the challenge card does not match leaderboard count\n**Describe the bug**\r\nThe card for the node21 challenge currently notes there are 21 results. Clicking on this brings you to the leaderboard where only 2 results are present. It seems that the count is including submissions which failed and/or submissions where the evaluation failed, which is misleading. \r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n - Choose a challenge where the database includes many failed submissions or failed evaluations (e.g. node21 at present)\r\n- View the card for this challenge (currently it is on the GC front page)\r\n - Verify that the number of results shown on the card does not match the number of results on the leaderboard (click the number shown on the card).\r\n\r\n**Expected behavior**\r\nThe number of reported results should match the number of results on the leaderboard\r\n\r\n**Screenshots**\r\n\r\n\r\n\n", "before_files": [{"content": "from celery import shared_task\nfrom django.contrib.auth import get_user_model\nfrom django.core.mail import mail_managers\nfrom django.db.models import Count, Max\nfrom requests import exceptions, get\n\nfrom grandchallenge.challenges.models import Challenge, ExternalChallenge\nfrom grandchallenge.evaluation.models import Evaluation\nfrom grandchallenge.subdomains.utils import reverse\n\n\n@shared_task\ndef update_challenge_results_cache():\n challenges = Challenge.objects.all()\n evaluation_info = (\n Evaluation.objects.filter(published=True)\n .values(\"submission__phase__challenge_id\")\n .annotate(\n cached_num_results=Count(\"submission__phase__challenge_id\"),\n cached_latest_result=Max(\"created\"),\n )\n )\n evaluation_info_by_challenge = {\n str(v[\"submission__phase__challenge_id\"]): v for v in evaluation_info\n }\n participant_counts = (\n get_user_model()\n .objects.values(\"groups__participants_of_challenge\")\n .annotate(cached_num_participants=Count(\"pk\"))\n )\n participant_counts_by_challenge = {\n str(v[\"groups__participants_of_challenge\"]): v\n for v in participant_counts\n }\n\n for c in challenges:\n c.cached_num_results = evaluation_info_by_challenge.get(\n str(c.pk), {}\n ).get(\"cached_num_results\", 0)\n c.cached_latest_result = evaluation_info_by_challenge.get(\n str(c.pk), {}\n ).get(\"cached_latest_result\", None)\n c.cached_num_participants = participant_counts_by_challenge.get(\n str(c.pk), {}\n ).get(\"cached_num_participants\", 0)\n\n Challenge.objects.bulk_update(\n challenges,\n [\n \"cached_num_results\",\n \"cached_num_participants\",\n \"cached_latest_result\",\n ],\n )\n\n\n@shared_task\ndef check_external_challenge_urls():\n \"\"\"\n Checks that all external challenge urls are reachable.\n\n Emails the managers if any of the challenges are not.\n \"\"\"\n challenges = ExternalChallenge.objects.filter(hidden=False)\n errors = []\n\n for challenge in challenges:\n try:\n url = challenge.homepage\n if not url.startswith(\"http\"):\n url = \"http://\" + url\n r = get(url, timeout=60)\n # raise an exception when we receive a http error (e.g., 404)\n r.raise_for_status()\n except exceptions.RequestException as err:\n update_url = reverse(\n \"challenges:external-update\",\n kwargs={\"short_name\": challenge.short_name},\n )\n errors.append(\n f\"Error when trying to access '{challenge}': {err}. You can \"\n f\"update it here: {update_url}\"\n )\n\n if errors:\n mail_managers(\n subject=f\"Unreachable external challenges ({len(errors)})\",\n message=\"\\n\\n\".join(errors),\n )\n", "path": "app/grandchallenge/challenges/tasks.py"}], "after_files": [{"content": "from celery import shared_task\nfrom django.contrib.auth import get_user_model\nfrom django.core.mail import mail_managers\nfrom django.db.models import Count, Max\nfrom requests import exceptions, get\n\nfrom grandchallenge.challenges.models import Challenge, ExternalChallenge\nfrom grandchallenge.evaluation.models import Evaluation\nfrom grandchallenge.subdomains.utils import reverse\n\n\n@shared_task\ndef update_challenge_results_cache():\n challenges = Challenge.objects.all()\n evaluation_info = (\n Evaluation.objects.filter(published=True, rank__gt=0)\n .values(\"submission__phase__challenge_id\")\n .annotate(\n cached_num_results=Count(\"submission__phase__challenge_id\"),\n cached_latest_result=Max(\"created\"),\n )\n )\n evaluation_info_by_challenge = {\n str(v[\"submission__phase__challenge_id\"]): v for v in evaluation_info\n }\n participant_counts = (\n get_user_model()\n .objects.values(\"groups__participants_of_challenge\")\n .annotate(cached_num_participants=Count(\"pk\"))\n )\n participant_counts_by_challenge = {\n str(v[\"groups__participants_of_challenge\"]): v\n for v in participant_counts\n }\n\n for c in challenges:\n c.cached_num_results = evaluation_info_by_challenge.get(\n str(c.pk), {}\n ).get(\"cached_num_results\", 0)\n c.cached_latest_result = evaluation_info_by_challenge.get(\n str(c.pk), {}\n ).get(\"cached_latest_result\", None)\n c.cached_num_participants = participant_counts_by_challenge.get(\n str(c.pk), {}\n ).get(\"cached_num_participants\", 0)\n\n Challenge.objects.bulk_update(\n challenges,\n [\n \"cached_num_results\",\n \"cached_num_participants\",\n \"cached_latest_result\",\n ],\n )\n\n\n@shared_task\ndef check_external_challenge_urls():\n \"\"\"\n Checks that all external challenge urls are reachable.\n\n Emails the managers if any of the challenges are not.\n \"\"\"\n challenges = ExternalChallenge.objects.filter(hidden=False)\n errors = []\n\n for challenge in challenges:\n try:\n url = challenge.homepage\n if not url.startswith(\"http\"):\n url = \"http://\" + url\n r = get(url, timeout=60)\n # raise an exception when we receive a http error (e.g., 404)\n r.raise_for_status()\n except exceptions.RequestException as err:\n update_url = reverse(\n \"challenges:external-update\",\n kwargs={\"short_name\": challenge.short_name},\n )\n errors.append(\n f\"Error when trying to access '{challenge}': {err}. You can \"\n f\"update it here: {update_url}\"\n )\n\n if errors:\n mail_managers(\n subject=f\"Unreachable external challenges ({len(errors)})\",\n message=\"\\n\\n\".join(errors),\n )\n", "path": "app/grandchallenge/challenges/tasks.py"}]}
| 1,278 | 127 |
gh_patches_debug_11634
|
rasdani/github-patches
|
git_diff
|
encode__uvicorn-623
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Duplicate logs when using root logger with 'gunicorn -k uvicorn.workers.UvicornWorker ...'
Here is a small test file with minimal gunicorn and uvicorn apps. But my real interest is the log statements at the top of the file.
```
import logging
logging.error('TEST 1 -- LOGGING ERROR')
logging.getLogger().error('TEST 2 -- ROOT LOGGER ERROR')
logging.getLogger('foo').error('TEST 3 -- FOO LOGGER ERROR')
# minimal gunicorn app
def appG(environ, start_response):
data = b'Hello, World!\n'
status = '200 OK'
response_headers = [
('Content-type', 'text/plain'),
('Content-Length', str(len(data)))
]
start_response(status, response_headers)
return iter([data])
# minimal uvicorn app
async def appU(scope, receive, send):
assert scope['type'] == 'http'
await send({
'type': 'http.response.start',
'status': 200,
'headers': [
[b'content-type', b'text/plain'],
]
})
await send({
'type': 'http.response.body',
'body': b'Hello, world!',
})
```
The logs "work" when the file is run by gunicorn or uvicorn individually.
But when I use gunicorn and uvicorn **together**, I get doubled uvicorn logs.
```
$ gunicorn -k uvicorn.workers.UvicornWorker test3:appU
[2020-04-07 22:47:53 -0400] [16015] [INFO] Starting gunicorn 20.0.4
[2020-04-07 22:47:53 -0400] [16015] [INFO] Listening at: http://127.0.0.1:8000 (16015)
[2020-04-07 22:47:53 -0400] [16015] [INFO] Using worker: uvicorn.workers.UvicornWorker
[2020-04-07 22:47:53 -0400] [16018] [INFO] Booting worker with pid: 16018
ERROR:root:TEST 1 -- LOGGING ERROR
ERROR:root:TEST 2 -- ROOT LOGGER ERROR
ERROR:foo:TEST 3 -- FOO LOGGER ERROR
[2020-04-07 22:47:53 -0400] [16018] [INFO] Started server process [16018]
INFO:uvicorn.error:Started server process [16018]
[2020-04-07 22:47:53 -0400] [16018] [INFO] Waiting for application startup.
INFO:uvicorn.error:Waiting for application startup.
[2020-04-07 22:47:53 -0400] [16018] [INFO] ASGI 'lifespan' protocol appears unsupported.
INFO:uvicorn.error:ASGI 'lifespan' protocol appears unsupported.
[2020-04-07 22:47:53 -0400] [16018] [INFO] Application startup complete.
INFO:uvicorn.error:Application startup complete.
```
Note the last several lines are double logged with different formats. (Two handlers?)
FYI,
```
$ pip freeze |grep corn
gunicorn==20.0.4
uvicorn==0.11.3
```
I'd love a work around for **both** `gunicorn -k uvicorn.workers.UvicornWorker ...` and `uvicorn ...` that has an inheritable root logger.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `uvicorn/workers.py`
Content:
```
1 import asyncio
2 import logging
3
4 from gunicorn.workers.base import Worker
5 from uvicorn.config import Config
6 from uvicorn.main import Server
7
8
9 class UvicornWorker(Worker):
10 """
11 A worker class for Gunicorn that interfaces with an ASGI consumer callable,
12 rather than a WSGI callable.
13 """
14
15 CONFIG_KWARGS = {"loop": "uvloop", "http": "httptools"}
16
17 def __init__(self, *args, **kwargs):
18 super(UvicornWorker, self).__init__(*args, **kwargs)
19
20 logger = logging.getLogger("uvicorn.error")
21 logger.handlers = self.log.error_log.handlers
22 logger.setLevel(self.log.error_log.level)
23
24 logger = logging.getLogger("uvicorn.access")
25 logger.handlers = self.log.access_log.handlers
26 logger.setLevel(self.log.access_log.level)
27
28 config_kwargs = {
29 "app": None,
30 "log_config": None,
31 "timeout_keep_alive": self.cfg.keepalive,
32 "timeout_notify": self.timeout,
33 "callback_notify": self.callback_notify,
34 "limit_max_requests": self.max_requests,
35 }
36
37 if self.cfg.is_ssl:
38 ssl_kwargs = {
39 "ssl_keyfile": self.cfg.ssl_options.get("keyfile"),
40 "ssl_certfile": self.cfg.ssl_options.get("certfile"),
41 "ssl_version": self.cfg.ssl_options.get("ssl_version"),
42 "ssl_cert_reqs": self.cfg.ssl_options.get("cert_reqs"),
43 "ssl_ca_certs": self.cfg.ssl_options.get("ca_certs"),
44 "ssl_ciphers": self.cfg.ssl_options.get("ciphers"),
45 }
46 config_kwargs.update(ssl_kwargs)
47
48 if self.cfg.settings["backlog"].value:
49 config_kwargs["backlog"] = self.cfg.settings["backlog"].value
50
51 config_kwargs.update(self.CONFIG_KWARGS)
52
53 self.config = Config(**config_kwargs)
54
55 def init_process(self):
56 self.config.setup_event_loop()
57 super(UvicornWorker, self).init_process()
58
59 def init_signals(self):
60 pass
61
62 def run(self):
63 self.config.app = self.wsgi
64 server = Server(config=self.config)
65 loop = asyncio.get_event_loop()
66 loop.run_until_complete(server.serve(sockets=self.sockets))
67
68 async def callback_notify(self):
69 self.notify()
70
71
72 class UvicornH11Worker(UvicornWorker):
73 CONFIG_KWARGS = {"loop": "asyncio", "http": "h11"}
74
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/uvicorn/workers.py b/uvicorn/workers.py
--- a/uvicorn/workers.py
+++ b/uvicorn/workers.py
@@ -20,10 +20,12 @@
logger = logging.getLogger("uvicorn.error")
logger.handlers = self.log.error_log.handlers
logger.setLevel(self.log.error_log.level)
+ logger.propagate = False
logger = logging.getLogger("uvicorn.access")
logger.handlers = self.log.access_log.handlers
logger.setLevel(self.log.access_log.level)
+ logger.propagate = False
config_kwargs = {
"app": None,
|
{"golden_diff": "diff --git a/uvicorn/workers.py b/uvicorn/workers.py\n--- a/uvicorn/workers.py\n+++ b/uvicorn/workers.py\n@@ -20,10 +20,12 @@\n logger = logging.getLogger(\"uvicorn.error\")\n logger.handlers = self.log.error_log.handlers\n logger.setLevel(self.log.error_log.level)\n+ logger.propagate = False\n \n logger = logging.getLogger(\"uvicorn.access\")\n logger.handlers = self.log.access_log.handlers\n logger.setLevel(self.log.access_log.level)\n+ logger.propagate = False\n \n config_kwargs = {\n \"app\": None,\n", "issue": "Duplicate logs when using root logger with 'gunicorn -k uvicorn.workers.UvicornWorker ...'\nHere is a small test file with minimal gunicorn and uvicorn apps. But my real interest is the log statements at the top of the file.\r\n\r\n```\r\nimport logging\r\n\r\nlogging.error('TEST 1 -- LOGGING ERROR')\r\nlogging.getLogger().error('TEST 2 -- ROOT LOGGER ERROR')\r\nlogging.getLogger('foo').error('TEST 3 -- FOO LOGGER ERROR')\r\n\r\n\r\n# minimal gunicorn app\r\ndef appG(environ, start_response):\r\n data = b'Hello, World!\\n'\r\n status = '200 OK'\r\n response_headers = [\r\n ('Content-type', 'text/plain'),\r\n ('Content-Length', str(len(data)))\r\n ]\r\n start_response(status, response_headers)\r\n return iter([data])\r\n\r\n\r\n# minimal uvicorn app\r\nasync def appU(scope, receive, send):\r\n assert scope['type'] == 'http'\r\n await send({\r\n 'type': 'http.response.start',\r\n 'status': 200,\r\n 'headers': [\r\n [b'content-type', b'text/plain'],\r\n ]\r\n })\r\n await send({\r\n 'type': 'http.response.body',\r\n 'body': b'Hello, world!',\r\n })\r\n```\r\n\r\nThe logs \"work\" when the file is run by gunicorn or uvicorn individually.\r\n\r\nBut when I use gunicorn and uvicorn **together**, I get doubled uvicorn logs.\r\n\r\n```\r\n$ gunicorn -k uvicorn.workers.UvicornWorker test3:appU\r\n[2020-04-07 22:47:53 -0400] [16015] [INFO] Starting gunicorn 20.0.4\r\n[2020-04-07 22:47:53 -0400] [16015] [INFO] Listening at: http://127.0.0.1:8000 (16015)\r\n[2020-04-07 22:47:53 -0400] [16015] [INFO] Using worker: uvicorn.workers.UvicornWorker\r\n[2020-04-07 22:47:53 -0400] [16018] [INFO] Booting worker with pid: 16018\r\nERROR:root:TEST 1 -- LOGGING ERROR\r\nERROR:root:TEST 2 -- ROOT LOGGER ERROR\r\nERROR:foo:TEST 3 -- FOO LOGGER ERROR\r\n[2020-04-07 22:47:53 -0400] [16018] [INFO] Started server process [16018]\r\nINFO:uvicorn.error:Started server process [16018]\r\n[2020-04-07 22:47:53 -0400] [16018] [INFO] Waiting for application startup.\r\nINFO:uvicorn.error:Waiting for application startup.\r\n[2020-04-07 22:47:53 -0400] [16018] [INFO] ASGI 'lifespan' protocol appears unsupported.\r\nINFO:uvicorn.error:ASGI 'lifespan' protocol appears unsupported.\r\n[2020-04-07 22:47:53 -0400] [16018] [INFO] Application startup complete.\r\nINFO:uvicorn.error:Application startup complete.\r\n```\r\nNote the last several lines are double logged with different formats. (Two handlers?)\r\n\r\nFYI,\r\n```\r\n$ pip freeze |grep corn\r\ngunicorn==20.0.4\r\nuvicorn==0.11.3\r\n```\r\n\r\nI'd love a work around for **both** `gunicorn -k uvicorn.workers.UvicornWorker ...` and `uvicorn ...` that has an inheritable root logger.\r\n\n", "before_files": [{"content": "import asyncio\nimport logging\n\nfrom gunicorn.workers.base import Worker\nfrom uvicorn.config import Config\nfrom uvicorn.main import Server\n\n\nclass UvicornWorker(Worker):\n \"\"\"\n A worker class for Gunicorn that interfaces with an ASGI consumer callable,\n rather than a WSGI callable.\n \"\"\"\n\n CONFIG_KWARGS = {\"loop\": \"uvloop\", \"http\": \"httptools\"}\n\n def __init__(self, *args, **kwargs):\n super(UvicornWorker, self).__init__(*args, **kwargs)\n\n logger = logging.getLogger(\"uvicorn.error\")\n logger.handlers = self.log.error_log.handlers\n logger.setLevel(self.log.error_log.level)\n\n logger = logging.getLogger(\"uvicorn.access\")\n logger.handlers = self.log.access_log.handlers\n logger.setLevel(self.log.access_log.level)\n\n config_kwargs = {\n \"app\": None,\n \"log_config\": None,\n \"timeout_keep_alive\": self.cfg.keepalive,\n \"timeout_notify\": self.timeout,\n \"callback_notify\": self.callback_notify,\n \"limit_max_requests\": self.max_requests,\n }\n\n if self.cfg.is_ssl:\n ssl_kwargs = {\n \"ssl_keyfile\": self.cfg.ssl_options.get(\"keyfile\"),\n \"ssl_certfile\": self.cfg.ssl_options.get(\"certfile\"),\n \"ssl_version\": self.cfg.ssl_options.get(\"ssl_version\"),\n \"ssl_cert_reqs\": self.cfg.ssl_options.get(\"cert_reqs\"),\n \"ssl_ca_certs\": self.cfg.ssl_options.get(\"ca_certs\"),\n \"ssl_ciphers\": self.cfg.ssl_options.get(\"ciphers\"),\n }\n config_kwargs.update(ssl_kwargs)\n\n if self.cfg.settings[\"backlog\"].value:\n config_kwargs[\"backlog\"] = self.cfg.settings[\"backlog\"].value\n\n config_kwargs.update(self.CONFIG_KWARGS)\n\n self.config = Config(**config_kwargs)\n\n def init_process(self):\n self.config.setup_event_loop()\n super(UvicornWorker, self).init_process()\n\n def init_signals(self):\n pass\n\n def run(self):\n self.config.app = self.wsgi\n server = Server(config=self.config)\n loop = asyncio.get_event_loop()\n loop.run_until_complete(server.serve(sockets=self.sockets))\n\n async def callback_notify(self):\n self.notify()\n\n\nclass UvicornH11Worker(UvicornWorker):\n CONFIG_KWARGS = {\"loop\": \"asyncio\", \"http\": \"h11\"}\n", "path": "uvicorn/workers.py"}], "after_files": [{"content": "import asyncio\nimport logging\n\nfrom gunicorn.workers.base import Worker\nfrom uvicorn.config import Config\nfrom uvicorn.main import Server\n\n\nclass UvicornWorker(Worker):\n \"\"\"\n A worker class for Gunicorn that interfaces with an ASGI consumer callable,\n rather than a WSGI callable.\n \"\"\"\n\n CONFIG_KWARGS = {\"loop\": \"uvloop\", \"http\": \"httptools\"}\n\n def __init__(self, *args, **kwargs):\n super(UvicornWorker, self).__init__(*args, **kwargs)\n\n logger = logging.getLogger(\"uvicorn.error\")\n logger.handlers = self.log.error_log.handlers\n logger.setLevel(self.log.error_log.level)\n logger.propagate = False\n\n logger = logging.getLogger(\"uvicorn.access\")\n logger.handlers = self.log.access_log.handlers\n logger.setLevel(self.log.access_log.level)\n logger.propagate = False\n\n config_kwargs = {\n \"app\": None,\n \"log_config\": None,\n \"timeout_keep_alive\": self.cfg.keepalive,\n \"timeout_notify\": self.timeout,\n \"callback_notify\": self.callback_notify,\n \"limit_max_requests\": self.max_requests,\n }\n\n if self.cfg.is_ssl:\n ssl_kwargs = {\n \"ssl_keyfile\": self.cfg.ssl_options.get(\"keyfile\"),\n \"ssl_certfile\": self.cfg.ssl_options.get(\"certfile\"),\n \"ssl_version\": self.cfg.ssl_options.get(\"ssl_version\"),\n \"ssl_cert_reqs\": self.cfg.ssl_options.get(\"cert_reqs\"),\n \"ssl_ca_certs\": self.cfg.ssl_options.get(\"ca_certs\"),\n \"ssl_ciphers\": self.cfg.ssl_options.get(\"ciphers\"),\n }\n config_kwargs.update(ssl_kwargs)\n\n if self.cfg.settings[\"backlog\"].value:\n config_kwargs[\"backlog\"] = self.cfg.settings[\"backlog\"].value\n\n config_kwargs.update(self.CONFIG_KWARGS)\n\n self.config = Config(**config_kwargs)\n\n def init_process(self):\n self.config.setup_event_loop()\n super(UvicornWorker, self).init_process()\n\n def init_signals(self):\n pass\n\n def run(self):\n self.config.app = self.wsgi\n server = Server(config=self.config)\n loop = asyncio.get_event_loop()\n loop.run_until_complete(server.serve(sockets=self.sockets))\n\n async def callback_notify(self):\n self.notify()\n\n\nclass UvicornH11Worker(UvicornWorker):\n CONFIG_KWARGS = {\"loop\": \"asyncio\", \"http\": \"h11\"}\n", "path": "uvicorn/workers.py"}]}
| 1,829 | 135 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.