problem_id
stringlengths 18
22
| source
stringclasses 1
value | task_type
stringclasses 1
value | in_source_id
stringlengths 13
58
| prompt
stringlengths 1.1k
25.4k
| golden_diff
stringlengths 145
5.13k
| verification_info
stringlengths 582
39.1k
| num_tokens
int64 271
4.1k
| num_tokens_diff
int64 47
1.02k
|
---|---|---|---|---|---|---|---|---|
gh_patches_debug_24793
|
rasdani/github-patches
|
git_diff
|
optuna__optuna-4095
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Reduce warnings in unit tests
### Motivation
The Optuna's unit tests raise many warnings, which make it hard to recognize unexpected warning messages.
### Description
We can filter expected warnings using `@pytest.mark.filterwarnings` or `warnings.simplefilter("ignore")`. Some warnings may be unexpected or unsuitable. We should fix tests or Optuna's codes in such cases.
This issue is contribution-welcome. We welcome any fixes, even how trivial.
### Alternatives (optional)
_No response_
### Additional context (optional)
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `optuna/visualization/matplotlib/_param_importances.py`
Content:
```
1 from typing import Callable
2 from typing import List
3 from typing import Optional
4
5 import numpy as np
6
7 from optuna._experimental import experimental_func
8 from optuna.importance._base import BaseImportanceEvaluator
9 from optuna.logging import get_logger
10 from optuna.study import Study
11 from optuna.trial import FrozenTrial
12 from optuna.visualization._param_importances import _get_importances_info
13 from optuna.visualization._param_importances import _ImportancesInfo
14 from optuna.visualization.matplotlib._matplotlib_imports import _imports
15
16
17 if _imports.is_successful():
18 from optuna.visualization.matplotlib._matplotlib_imports import Axes
19 from optuna.visualization.matplotlib._matplotlib_imports import cm
20 from optuna.visualization.matplotlib._matplotlib_imports import plt
21
22
23 _logger = get_logger(__name__)
24
25
26 AXES_PADDING_RATIO = 1.05
27
28
29 @experimental_func("2.2.0")
30 def plot_param_importances(
31 study: Study,
32 evaluator: Optional[BaseImportanceEvaluator] = None,
33 params: Optional[List[str]] = None,
34 *,
35 target: Optional[Callable[[FrozenTrial], float]] = None,
36 target_name: str = "Objective Value",
37 ) -> "Axes":
38 """Plot hyperparameter importances with Matplotlib.
39
40 .. seealso::
41 Please refer to :func:`optuna.visualization.plot_param_importances` for an example.
42
43 Example:
44
45 The following code snippet shows how to plot hyperparameter importances.
46
47 .. plot::
48
49 import optuna
50
51
52 def objective(trial):
53 x = trial.suggest_int("x", 0, 2)
54 y = trial.suggest_float("y", -1.0, 1.0)
55 z = trial.suggest_float("z", 0.0, 1.5)
56 return x ** 2 + y ** 3 - z ** 4
57
58
59 sampler = optuna.samplers.RandomSampler(seed=10)
60 study = optuna.create_study(sampler=sampler)
61 study.optimize(objective, n_trials=100)
62
63 optuna.visualization.matplotlib.plot_param_importances(study)
64
65 Args:
66 study:
67 An optimized study.
68 evaluator:
69 An importance evaluator object that specifies which algorithm to base the importance
70 assessment on.
71 Defaults to
72 :class:`~optuna.importance.FanovaImportanceEvaluator`.
73 params:
74 A list of names of parameters to assess.
75 If :obj:`None`, all parameters that are present in all of the completed trials are
76 assessed.
77 target:
78 A function to specify the value to display. If it is :obj:`None` and ``study`` is being
79 used for single-objective optimization, the objective values are plotted.
80
81 .. note::
82 Specify this argument if ``study`` is being used for multi-objective
83 optimization. For example, to get the hyperparameter importance of the first
84 objective, use ``target=lambda t: t.values[0]`` for the target parameter.
85 target_name:
86 Target's name to display on the axis label.
87
88 Returns:
89 A :class:`matplotlib.axes.Axes` object.
90 """
91
92 _imports.check()
93
94 importances_info = _get_importances_info(study, evaluator, params, target, target_name)
95 return _get_importances_plot(importances_info)
96
97
98 def _get_importances_plot(info: _ImportancesInfo) -> "Axes":
99 # Set up the graph style.
100 plt.style.use("ggplot") # Use ggplot style sheet for similar outputs to plotly.
101 fig, ax = plt.subplots()
102 ax.set_title("Hyperparameter Importances")
103 ax.set_xlabel(f"Importance for {info.target_name}")
104 ax.set_ylabel("Hyperparameter")
105
106 param_names = info.param_names
107 pos = np.arange(len(param_names))
108 importance_values = info.importance_values
109
110 if len(importance_values) == 0:
111 return ax
112
113 # Draw horizontal bars.
114 ax.barh(
115 pos,
116 importance_values,
117 align="center",
118 color=cm.get_cmap("tab20c")(0),
119 tick_label=param_names,
120 )
121
122 renderer = fig.canvas.get_renderer()
123 for idx, (val, label) in enumerate(zip(importance_values, info.importance_labels)):
124 text = ax.text(val, idx, label, va="center")
125
126 # Sometimes horizontal axis needs to be re-scaled
127 # to avoid text going over plot area.
128 bbox = text.get_window_extent(renderer)
129 bbox = bbox.transformed(ax.transData.inverted())
130 _, plot_xmax = ax.get_xlim()
131 bbox_xmax = bbox.xmax
132
133 if bbox_xmax > plot_xmax:
134 ax.set_xlim(xmax=AXES_PADDING_RATIO * bbox_xmax)
135
136 return ax
137
```
Path: `optuna/visualization/matplotlib/_matplotlib_imports.py`
Content:
```
1 from packaging import version
2
3 from optuna._imports import try_import
4
5
6 with try_import() as _imports: # NOQA
7 # TODO(ytknzw): Add specific imports.
8 import matplotlib # NOQA
9 from matplotlib import __version__ as matplotlib_version
10 from matplotlib import cm # NOQA
11 from matplotlib import pyplot as plt # NOQA
12 from matplotlib.axes._axes import Axes # NOQA
13 from matplotlib.collections import LineCollection # NOQA
14 from matplotlib.collections import PathCollection # NOQA
15 from matplotlib.colors import Colormap # NOQA
16 from matplotlib.contour import ContourSet # NOQA
17 from matplotlib.patches import Rectangle # NOQA
18
19 # TODO(ytknzw): Set precise version.
20 if version.parse(matplotlib_version) < version.parse("3.0.0"):
21 raise ImportError(
22 "Your version of Matplotlib is " + matplotlib_version + " . "
23 "Please install Matplotlib version 3.0.0 or higher. "
24 "Matplotlib can be installed by executing `$ pip install -U matplotlib>=3.0.0`. "
25 "For further information, please refer to the installation guide of Matplotlib. ",
26 name="matplotlib",
27 )
28
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/optuna/visualization/matplotlib/_matplotlib_imports.py b/optuna/visualization/matplotlib/_matplotlib_imports.py
--- a/optuna/visualization/matplotlib/_matplotlib_imports.py
+++ b/optuna/visualization/matplotlib/_matplotlib_imports.py
@@ -7,7 +7,6 @@
# TODO(ytknzw): Add specific imports.
import matplotlib # NOQA
from matplotlib import __version__ as matplotlib_version
- from matplotlib import cm # NOQA
from matplotlib import pyplot as plt # NOQA
from matplotlib.axes._axes import Axes # NOQA
from matplotlib.collections import LineCollection # NOQA
diff --git a/optuna/visualization/matplotlib/_param_importances.py b/optuna/visualization/matplotlib/_param_importances.py
--- a/optuna/visualization/matplotlib/_param_importances.py
+++ b/optuna/visualization/matplotlib/_param_importances.py
@@ -16,7 +16,6 @@
if _imports.is_successful():
from optuna.visualization.matplotlib._matplotlib_imports import Axes
- from optuna.visualization.matplotlib._matplotlib_imports import cm
from optuna.visualization.matplotlib._matplotlib_imports import plt
@@ -115,7 +114,7 @@
pos,
importance_values,
align="center",
- color=cm.get_cmap("tab20c")(0),
+ color=plt.get_cmap("tab20c")(0),
tick_label=param_names,
)
|
{"golden_diff": "diff --git a/optuna/visualization/matplotlib/_matplotlib_imports.py b/optuna/visualization/matplotlib/_matplotlib_imports.py\n--- a/optuna/visualization/matplotlib/_matplotlib_imports.py\n+++ b/optuna/visualization/matplotlib/_matplotlib_imports.py\n@@ -7,7 +7,6 @@\n # TODO(ytknzw): Add specific imports.\n import matplotlib # NOQA\n from matplotlib import __version__ as matplotlib_version\n- from matplotlib import cm # NOQA\n from matplotlib import pyplot as plt # NOQA\n from matplotlib.axes._axes import Axes # NOQA\n from matplotlib.collections import LineCollection # NOQA\ndiff --git a/optuna/visualization/matplotlib/_param_importances.py b/optuna/visualization/matplotlib/_param_importances.py\n--- a/optuna/visualization/matplotlib/_param_importances.py\n+++ b/optuna/visualization/matplotlib/_param_importances.py\n@@ -16,7 +16,6 @@\n \n if _imports.is_successful():\n from optuna.visualization.matplotlib._matplotlib_imports import Axes\n- from optuna.visualization.matplotlib._matplotlib_imports import cm\n from optuna.visualization.matplotlib._matplotlib_imports import plt\n \n \n@@ -115,7 +114,7 @@\n pos,\n importance_values,\n align=\"center\",\n- color=cm.get_cmap(\"tab20c\")(0),\n+ color=plt.get_cmap(\"tab20c\")(0),\n tick_label=param_names,\n )\n", "issue": "Reduce warnings in unit tests\n### Motivation\n\nThe Optuna's unit tests raise many warnings, which make it hard to recognize unexpected warning messages.\n\n### Description\n\nWe can filter expected warnings using `@pytest.mark.filterwarnings` or `warnings.simplefilter(\"ignore\")`. Some warnings may be unexpected or unsuitable. We should fix tests or Optuna's codes in such cases.\r\n\r\nThis issue is contribution-welcome. We welcome any fixes, even how trivial.\n\n### Alternatives (optional)\n\n_No response_\n\n### Additional context (optional)\n\n_No response_\n", "before_files": [{"content": "from typing import Callable\nfrom typing import List\nfrom typing import Optional\n\nimport numpy as np\n\nfrom optuna._experimental import experimental_func\nfrom optuna.importance._base import BaseImportanceEvaluator\nfrom optuna.logging import get_logger\nfrom optuna.study import Study\nfrom optuna.trial import FrozenTrial\nfrom optuna.visualization._param_importances import _get_importances_info\nfrom optuna.visualization._param_importances import _ImportancesInfo\nfrom optuna.visualization.matplotlib._matplotlib_imports import _imports\n\n\nif _imports.is_successful():\n from optuna.visualization.matplotlib._matplotlib_imports import Axes\n from optuna.visualization.matplotlib._matplotlib_imports import cm\n from optuna.visualization.matplotlib._matplotlib_imports import plt\n\n\n_logger = get_logger(__name__)\n\n\nAXES_PADDING_RATIO = 1.05\n\n\n@experimental_func(\"2.2.0\")\ndef plot_param_importances(\n study: Study,\n evaluator: Optional[BaseImportanceEvaluator] = None,\n params: Optional[List[str]] = None,\n *,\n target: Optional[Callable[[FrozenTrial], float]] = None,\n target_name: str = \"Objective Value\",\n) -> \"Axes\":\n \"\"\"Plot hyperparameter importances with Matplotlib.\n\n .. seealso::\n Please refer to :func:`optuna.visualization.plot_param_importances` for an example.\n\n Example:\n\n The following code snippet shows how to plot hyperparameter importances.\n\n .. plot::\n\n import optuna\n\n\n def objective(trial):\n x = trial.suggest_int(\"x\", 0, 2)\n y = trial.suggest_float(\"y\", -1.0, 1.0)\n z = trial.suggest_float(\"z\", 0.0, 1.5)\n return x ** 2 + y ** 3 - z ** 4\n\n\n sampler = optuna.samplers.RandomSampler(seed=10)\n study = optuna.create_study(sampler=sampler)\n study.optimize(objective, n_trials=100)\n\n optuna.visualization.matplotlib.plot_param_importances(study)\n\n Args:\n study:\n An optimized study.\n evaluator:\n An importance evaluator object that specifies which algorithm to base the importance\n assessment on.\n Defaults to\n :class:`~optuna.importance.FanovaImportanceEvaluator`.\n params:\n A list of names of parameters to assess.\n If :obj:`None`, all parameters that are present in all of the completed trials are\n assessed.\n target:\n A function to specify the value to display. If it is :obj:`None` and ``study`` is being\n used for single-objective optimization, the objective values are plotted.\n\n .. note::\n Specify this argument if ``study`` is being used for multi-objective\n optimization. For example, to get the hyperparameter importance of the first\n objective, use ``target=lambda t: t.values[0]`` for the target parameter.\n target_name:\n Target's name to display on the axis label.\n\n Returns:\n A :class:`matplotlib.axes.Axes` object.\n \"\"\"\n\n _imports.check()\n\n importances_info = _get_importances_info(study, evaluator, params, target, target_name)\n return _get_importances_plot(importances_info)\n\n\ndef _get_importances_plot(info: _ImportancesInfo) -> \"Axes\":\n # Set up the graph style.\n plt.style.use(\"ggplot\") # Use ggplot style sheet for similar outputs to plotly.\n fig, ax = plt.subplots()\n ax.set_title(\"Hyperparameter Importances\")\n ax.set_xlabel(f\"Importance for {info.target_name}\")\n ax.set_ylabel(\"Hyperparameter\")\n\n param_names = info.param_names\n pos = np.arange(len(param_names))\n importance_values = info.importance_values\n\n if len(importance_values) == 0:\n return ax\n\n # Draw horizontal bars.\n ax.barh(\n pos,\n importance_values,\n align=\"center\",\n color=cm.get_cmap(\"tab20c\")(0),\n tick_label=param_names,\n )\n\n renderer = fig.canvas.get_renderer()\n for idx, (val, label) in enumerate(zip(importance_values, info.importance_labels)):\n text = ax.text(val, idx, label, va=\"center\")\n\n # Sometimes horizontal axis needs to be re-scaled\n # to avoid text going over plot area.\n bbox = text.get_window_extent(renderer)\n bbox = bbox.transformed(ax.transData.inverted())\n _, plot_xmax = ax.get_xlim()\n bbox_xmax = bbox.xmax\n\n if bbox_xmax > plot_xmax:\n ax.set_xlim(xmax=AXES_PADDING_RATIO * bbox_xmax)\n\n return ax\n", "path": "optuna/visualization/matplotlib/_param_importances.py"}, {"content": "from packaging import version\n\nfrom optuna._imports import try_import\n\n\nwith try_import() as _imports: # NOQA\n # TODO(ytknzw): Add specific imports.\n import matplotlib # NOQA\n from matplotlib import __version__ as matplotlib_version\n from matplotlib import cm # NOQA\n from matplotlib import pyplot as plt # NOQA\n from matplotlib.axes._axes import Axes # NOQA\n from matplotlib.collections import LineCollection # NOQA\n from matplotlib.collections import PathCollection # NOQA\n from matplotlib.colors import Colormap # NOQA\n from matplotlib.contour import ContourSet # NOQA\n from matplotlib.patches import Rectangle # NOQA\n\n # TODO(ytknzw): Set precise version.\n if version.parse(matplotlib_version) < version.parse(\"3.0.0\"):\n raise ImportError(\n \"Your version of Matplotlib is \" + matplotlib_version + \" . \"\n \"Please install Matplotlib version 3.0.0 or higher. \"\n \"Matplotlib can be installed by executing `$ pip install -U matplotlib>=3.0.0`. \"\n \"For further information, please refer to the installation guide of Matplotlib. \",\n name=\"matplotlib\",\n )\n", "path": "optuna/visualization/matplotlib/_matplotlib_imports.py"}], "after_files": [{"content": "from typing import Callable\nfrom typing import List\nfrom typing import Optional\n\nimport numpy as np\n\nfrom optuna._experimental import experimental_func\nfrom optuna.importance._base import BaseImportanceEvaluator\nfrom optuna.logging import get_logger\nfrom optuna.study import Study\nfrom optuna.trial import FrozenTrial\nfrom optuna.visualization._param_importances import _get_importances_info\nfrom optuna.visualization._param_importances import _ImportancesInfo\nfrom optuna.visualization.matplotlib._matplotlib_imports import _imports\n\n\nif _imports.is_successful():\n from optuna.visualization.matplotlib._matplotlib_imports import Axes\n from optuna.visualization.matplotlib._matplotlib_imports import plt\n\n\n_logger = get_logger(__name__)\n\n\nAXES_PADDING_RATIO = 1.05\n\n\n@experimental_func(\"2.2.0\")\ndef plot_param_importances(\n study: Study,\n evaluator: Optional[BaseImportanceEvaluator] = None,\n params: Optional[List[str]] = None,\n *,\n target: Optional[Callable[[FrozenTrial], float]] = None,\n target_name: str = \"Objective Value\",\n) -> \"Axes\":\n \"\"\"Plot hyperparameter importances with Matplotlib.\n\n .. seealso::\n Please refer to :func:`optuna.visualization.plot_param_importances` for an example.\n\n Example:\n\n The following code snippet shows how to plot hyperparameter importances.\n\n .. plot::\n\n import optuna\n\n\n def objective(trial):\n x = trial.suggest_int(\"x\", 0, 2)\n y = trial.suggest_float(\"y\", -1.0, 1.0)\n z = trial.suggest_float(\"z\", 0.0, 1.5)\n return x ** 2 + y ** 3 - z ** 4\n\n\n sampler = optuna.samplers.RandomSampler(seed=10)\n study = optuna.create_study(sampler=sampler)\n study.optimize(objective, n_trials=100)\n\n optuna.visualization.matplotlib.plot_param_importances(study)\n\n Args:\n study:\n An optimized study.\n evaluator:\n An importance evaluator object that specifies which algorithm to base the importance\n assessment on.\n Defaults to\n :class:`~optuna.importance.FanovaImportanceEvaluator`.\n params:\n A list of names of parameters to assess.\n If :obj:`None`, all parameters that are present in all of the completed trials are\n assessed.\n target:\n A function to specify the value to display. If it is :obj:`None` and ``study`` is being\n used for single-objective optimization, the objective values are plotted.\n\n .. note::\n Specify this argument if ``study`` is being used for multi-objective\n optimization. For example, to get the hyperparameter importance of the first\n objective, use ``target=lambda t: t.values[0]`` for the target parameter.\n target_name:\n Target's name to display on the axis label.\n\n Returns:\n A :class:`matplotlib.axes.Axes` object.\n \"\"\"\n\n _imports.check()\n\n importances_info = _get_importances_info(study, evaluator, params, target, target_name)\n return _get_importances_plot(importances_info)\n\n\ndef _get_importances_plot(info: _ImportancesInfo) -> \"Axes\":\n # Set up the graph style.\n plt.style.use(\"ggplot\") # Use ggplot style sheet for similar outputs to plotly.\n fig, ax = plt.subplots()\n ax.set_title(\"Hyperparameter Importances\")\n ax.set_xlabel(f\"Importance for {info.target_name}\")\n ax.set_ylabel(\"Hyperparameter\")\n\n param_names = info.param_names\n pos = np.arange(len(param_names))\n importance_values = info.importance_values\n\n if len(importance_values) == 0:\n return ax\n\n # Draw horizontal bars.\n ax.barh(\n pos,\n importance_values,\n align=\"center\",\n color=plt.get_cmap(\"tab20c\")(0),\n tick_label=param_names,\n )\n\n renderer = fig.canvas.get_renderer()\n for idx, (val, label) in enumerate(zip(importance_values, info.importance_labels)):\n text = ax.text(val, idx, label, va=\"center\")\n\n # Sometimes horizontal axis needs to be re-scaled\n # to avoid text going over plot area.\n bbox = text.get_window_extent(renderer)\n bbox = bbox.transformed(ax.transData.inverted())\n _, plot_xmax = ax.get_xlim()\n bbox_xmax = bbox.xmax\n\n if bbox_xmax > plot_xmax:\n ax.set_xlim(xmax=AXES_PADDING_RATIO * bbox_xmax)\n\n return ax\n", "path": "optuna/visualization/matplotlib/_param_importances.py"}, {"content": "from packaging import version\n\nfrom optuna._imports import try_import\n\n\nwith try_import() as _imports: # NOQA\n # TODO(ytknzw): Add specific imports.\n import matplotlib # NOQA\n from matplotlib import __version__ as matplotlib_version\n from matplotlib import pyplot as plt # NOQA\n from matplotlib.axes._axes import Axes # NOQA\n from matplotlib.collections import LineCollection # NOQA\n from matplotlib.collections import PathCollection # NOQA\n from matplotlib.colors import Colormap # NOQA\n from matplotlib.contour import ContourSet # NOQA\n from matplotlib.patches import Rectangle # NOQA\n\n # TODO(ytknzw): Set precise version.\n if version.parse(matplotlib_version) < version.parse(\"3.0.0\"):\n raise ImportError(\n \"Your version of Matplotlib is \" + matplotlib_version + \" . \"\n \"Please install Matplotlib version 3.0.0 or higher. \"\n \"Matplotlib can be installed by executing `$ pip install -U matplotlib>=3.0.0`. \"\n \"For further information, please refer to the installation guide of Matplotlib. \",\n name=\"matplotlib\",\n )\n", "path": "optuna/visualization/matplotlib/_matplotlib_imports.py"}]}
| 2,064 | 330 |
gh_patches_debug_28053
|
rasdani/github-patches
|
git_diff
|
numba__numba-6281
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Reminder to remove the `numba.jitclass` shim
This is a reminder to remove the `numba.jitclass` shim for 0.5x, at the moment x=1. Perhaps x should be 2, so that users have a bit more time. xref schedule: http://numba.pydata.org/numba-doc/latest/reference/deprecation.html#id7
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `numba/experimental/jitclass/decorators.py`
Content:
```
1 import warnings
2
3 from numba.core import types, config, errors
4 from numba.experimental.jitclass.base import register_class_type, ClassBuilder
5
6
7 def jitclass(spec):
8 """
9 A decorator for creating a jitclass.
10
11 **arguments**:
12
13 - spec:
14 Specifies the types of each field on this class.
15 Must be a dictionary or a sequence.
16 With a dictionary, use collections.OrderedDict for stable ordering.
17 With a sequence, it must contain 2-tuples of (fieldname, fieldtype).
18
19 **returns**:
20
21 A callable that takes a class object, which will be compiled.
22 """
23
24 def wrap(cls):
25 if config.DISABLE_JIT:
26 return cls
27 else:
28 return register_class_type(cls, spec, types.ClassType, ClassBuilder)
29
30 return wrap
31
32
33 def _warning_jitclass(spec):
34 """
35 Re-export of numba.experimental.jitclass with a warning.
36 To be used in numba/__init__.py.
37 This endpoint is deprecated.
38 """
39 url = ("https://numba.pydata.org/numba-doc/latest/reference/"
40 "deprecation.html#change-of-jitclass-location")
41
42 msg = ("The 'numba.jitclass' decorator has moved to "
43 "'numba.experimental.jitclass' to better reflect the experimental "
44 "nature of the functionality. Please update your imports to "
45 "accommodate this change and see {} for the time frame.".format(url))
46
47 warnings.warn(msg, category=errors.NumbaDeprecationWarning,
48 stacklevel=2)
49
50 return jitclass(spec)
51
```
Path: `numba/__init__.py`
Content:
```
1 """
2 Expose top-level symbols that are safe for import *
3 """
4
5 import platform
6 import re
7 import sys
8 import warnings
9
10 from ._version import get_versions
11 from numba.misc.init_utils import generate_version_info
12
13 __version__ = get_versions()['version']
14 version_info = generate_version_info(__version__)
15 del get_versions
16 del generate_version_info
17
18
19 from numba.core import config
20 from numba.testing import _runtests as runtests
21 from numba.core import types, errors
22
23 # Re-export typeof
24 from numba.misc.special import (
25 typeof, prange, pndindex, gdb, gdb_breakpoint, gdb_init,
26 literally, literal_unroll
27 )
28
29 # Re-export error classes
30 from numba.core.errors import *
31
32 # Re-export types itself
33 import numba.core.types as types
34
35 # Re-export all type names
36 from numba.core.types import *
37
38 # Re-export decorators
39 from numba.core.decorators import (cfunc, generated_jit, jit, njit, stencil,
40 jit_module)
41
42 # Re-export vectorize decorators and the thread layer querying function
43 from numba.np.ufunc import (vectorize, guvectorize, threading_layer,
44 get_num_threads, set_num_threads)
45
46 # Re-export Numpy helpers
47 from numba.np.numpy_support import carray, farray, from_dtype
48
49 # Re-export experimental
50 from numba import experimental
51
52 # Re-export experimental.jitclass as jitclass, this is deprecated
53 from numba.experimental.jitclass.decorators import _warning_jitclass as jitclass
54
55 # Initialize withcontexts
56 import numba.core.withcontexts
57 from numba.core.withcontexts import objmode_context as objmode
58
59 # Keep this for backward compatibility.
60 test = runtests.main
61
62
63 __all__ = """
64 cfunc
65 from_dtype
66 guvectorize
67 jit
68 experimental
69 njit
70 stencil
71 jit_module
72 jitclass
73 typeof
74 prange
75 gdb
76 gdb_breakpoint
77 gdb_init
78 vectorize
79 objmode
80 literal_unroll
81 get_num_threads
82 set_num_threads
83 """.split() + types.__all__ + errors.__all__
84
85
86 _min_llvmlite_version = (0, 33, 0)
87 _min_llvm_version = (9, 0, 0)
88
89 def _ensure_llvm():
90 """
91 Make sure llvmlite is operational.
92 """
93 import warnings
94 import llvmlite
95
96 # Only look at the the major, minor and bugfix version numbers.
97 # Ignore other stuffs
98 regex = re.compile(r'(\d+)\.(\d+).(\d+)')
99 m = regex.match(llvmlite.__version__)
100 if m:
101 ver = tuple(map(int, m.groups()))
102 if ver < _min_llvmlite_version:
103 msg = ("Numba requires at least version %d.%d.%d of llvmlite.\n"
104 "Installed version is %s.\n"
105 "Please update llvmlite." %
106 (_min_llvmlite_version + (llvmlite.__version__,)))
107 raise ImportError(msg)
108 else:
109 # Not matching?
110 warnings.warn("llvmlite version format not recognized!")
111
112 from llvmlite.binding import llvm_version_info, check_jit_execution
113
114 if llvm_version_info < _min_llvm_version:
115 msg = ("Numba requires at least version %d.%d.%d of LLVM.\n"
116 "Installed llvmlite is built against version %d.%d.%d.\n"
117 "Please update llvmlite." %
118 (_min_llvm_version + llvm_version_info))
119 raise ImportError(msg)
120
121 check_jit_execution()
122
123 def _ensure_critical_deps():
124 """
125 Make sure Python, NumPy and SciPy have supported versions.
126 """
127 from numba.np.numpy_support import numpy_version
128 from numba.core.utils import PYVERSION
129
130 if PYVERSION < (3, 6):
131 raise ImportError("Numba needs Python 3.6 or greater")
132
133 if numpy_version < (1, 15):
134 raise ImportError("Numba needs NumPy 1.15 or greater")
135
136 try:
137 import scipy
138 except ImportError:
139 pass
140 else:
141 sp_version = tuple(map(int, scipy.__version__.split('.')[:2]))
142 if sp_version < (1, 0):
143 raise ImportError("Numba requires SciPy version 1.0 or greater")
144
145
146 def _try_enable_svml():
147 """
148 Tries to enable SVML if configuration permits use and the library is found.
149 """
150 if not config.DISABLE_INTEL_SVML:
151 try:
152 if sys.platform.startswith('linux'):
153 llvmlite.binding.load_library_permanently("libsvml.so")
154 elif sys.platform.startswith('darwin'):
155 llvmlite.binding.load_library_permanently("libsvml.dylib")
156 elif sys.platform.startswith('win'):
157 llvmlite.binding.load_library_permanently("svml_dispmd")
158 else:
159 return False
160 # The SVML library is loaded, therefore SVML *could* be supported.
161 # Now see if LLVM has been compiled with the SVML support patch.
162 # If llvmlite has the checking function `has_svml` and it returns
163 # True, then LLVM was compiled with SVML support and the the setup
164 # for SVML can proceed. We err on the side of caution and if the
165 # checking function is missing, regardless of that being fine for
166 # most 0.23.{0,1} llvmlite instances (i.e. conda or pip installed),
167 # we assume that SVML was not compiled in. llvmlite 0.23.2 is a
168 # bugfix release with the checking function present that will always
169 # produce correct behaviour. For context see: #3006.
170 try:
171 if not getattr(llvmlite.binding.targets, "has_svml")():
172 # has detection function, but no svml compiled in, therefore
173 # disable SVML
174 return False
175 except AttributeError:
176 if platform.machine() == 'x86_64' and config.DEBUG:
177 msg = ("SVML was found but llvmlite >= 0.23.2 is "
178 "needed to support it.")
179 warnings.warn(msg)
180 # does not have detection function, cannot detect reliably,
181 # disable SVML.
182 return False
183
184 # All is well, detection function present and reports SVML is
185 # compiled in, set the vector library to SVML.
186 llvmlite.binding.set_option('SVML', '-vector-library=SVML')
187 return True
188 except:
189 if platform.machine() == 'x86_64' and config.DEBUG:
190 warnings.warn("SVML was not found/could not be loaded.")
191 return False
192
193 _ensure_llvm()
194 _ensure_critical_deps()
195
196 # we know llvmlite is working as the above tests passed, import it now as SVML
197 # needs to mutate runtime options (sets the `-vector-library`).
198 import llvmlite
199
200 """
201 Is set to True if Intel SVML is in use.
202 """
203 config.USING_SVML = _try_enable_svml()
204
205
206 # ---------------------- WARNING WARNING WARNING ----------------------------
207 # The following imports occur below here (SVML init) because somewhere in their
208 # import sequence they have a `@njit` wrapped function. This triggers too early
209 # a bind to the underlying LLVM libraries which then irretrievably sets the LLVM
210 # SVML state to "no SVML". See https://github.com/numba/numba/issues/4689 for
211 # context.
212 # ---------------------- WARNING WARNING WARNING ----------------------------
213
214 # Initialize typed containers
215 import numba.typed
216
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/numba/__init__.py b/numba/__init__.py
--- a/numba/__init__.py
+++ b/numba/__init__.py
@@ -49,9 +49,6 @@
# Re-export experimental
from numba import experimental
-# Re-export experimental.jitclass as jitclass, this is deprecated
-from numba.experimental.jitclass.decorators import _warning_jitclass as jitclass
-
# Initialize withcontexts
import numba.core.withcontexts
from numba.core.withcontexts import objmode_context as objmode
diff --git a/numba/experimental/jitclass/decorators.py b/numba/experimental/jitclass/decorators.py
--- a/numba/experimental/jitclass/decorators.py
+++ b/numba/experimental/jitclass/decorators.py
@@ -1,6 +1,4 @@
-import warnings
-
-from numba.core import types, config, errors
+from numba.core import types, config
from numba.experimental.jitclass.base import register_class_type, ClassBuilder
@@ -28,23 +26,3 @@
return register_class_type(cls, spec, types.ClassType, ClassBuilder)
return wrap
-
-
-def _warning_jitclass(spec):
- """
- Re-export of numba.experimental.jitclass with a warning.
- To be used in numba/__init__.py.
- This endpoint is deprecated.
- """
- url = ("https://numba.pydata.org/numba-doc/latest/reference/"
- "deprecation.html#change-of-jitclass-location")
-
- msg = ("The 'numba.jitclass' decorator has moved to "
- "'numba.experimental.jitclass' to better reflect the experimental "
- "nature of the functionality. Please update your imports to "
- "accommodate this change and see {} for the time frame.".format(url))
-
- warnings.warn(msg, category=errors.NumbaDeprecationWarning,
- stacklevel=2)
-
- return jitclass(spec)
|
{"golden_diff": "diff --git a/numba/__init__.py b/numba/__init__.py\n--- a/numba/__init__.py\n+++ b/numba/__init__.py\n@@ -49,9 +49,6 @@\n # Re-export experimental\n from numba import experimental\n \n-# Re-export experimental.jitclass as jitclass, this is deprecated\n-from numba.experimental.jitclass.decorators import _warning_jitclass as jitclass\n-\n # Initialize withcontexts\n import numba.core.withcontexts\n from numba.core.withcontexts import objmode_context as objmode\ndiff --git a/numba/experimental/jitclass/decorators.py b/numba/experimental/jitclass/decorators.py\n--- a/numba/experimental/jitclass/decorators.py\n+++ b/numba/experimental/jitclass/decorators.py\n@@ -1,6 +1,4 @@\n-import warnings\n-\n-from numba.core import types, config, errors\n+from numba.core import types, config\n from numba.experimental.jitclass.base import register_class_type, ClassBuilder\n \n \n@@ -28,23 +26,3 @@\n return register_class_type(cls, spec, types.ClassType, ClassBuilder)\n \n return wrap\n-\n-\n-def _warning_jitclass(spec):\n- \"\"\"\n- Re-export of numba.experimental.jitclass with a warning.\n- To be used in numba/__init__.py.\n- This endpoint is deprecated.\n- \"\"\"\n- url = (\"https://numba.pydata.org/numba-doc/latest/reference/\"\n- \"deprecation.html#change-of-jitclass-location\")\n-\n- msg = (\"The 'numba.jitclass' decorator has moved to \"\n- \"'numba.experimental.jitclass' to better reflect the experimental \"\n- \"nature of the functionality. Please update your imports to \"\n- \"accommodate this change and see {} for the time frame.\".format(url))\n-\n- warnings.warn(msg, category=errors.NumbaDeprecationWarning,\n- stacklevel=2)\n-\n- return jitclass(spec)\n", "issue": "Reminder to remove the `numba.jitclass` shim\nThis is a reminder to remove the `numba.jitclass` shim for 0.5x, at the moment x=1. Perhaps x should be 2, so that users have a bit more time. xref schedule: http://numba.pydata.org/numba-doc/latest/reference/deprecation.html#id7\n", "before_files": [{"content": "import warnings\n\nfrom numba.core import types, config, errors\nfrom numba.experimental.jitclass.base import register_class_type, ClassBuilder\n\n\ndef jitclass(spec):\n \"\"\"\n A decorator for creating a jitclass.\n\n **arguments**:\n\n - spec:\n Specifies the types of each field on this class.\n Must be a dictionary or a sequence.\n With a dictionary, use collections.OrderedDict for stable ordering.\n With a sequence, it must contain 2-tuples of (fieldname, fieldtype).\n\n **returns**:\n\n A callable that takes a class object, which will be compiled.\n \"\"\"\n\n def wrap(cls):\n if config.DISABLE_JIT:\n return cls\n else:\n return register_class_type(cls, spec, types.ClassType, ClassBuilder)\n\n return wrap\n\n\ndef _warning_jitclass(spec):\n \"\"\"\n Re-export of numba.experimental.jitclass with a warning.\n To be used in numba/__init__.py.\n This endpoint is deprecated.\n \"\"\"\n url = (\"https://numba.pydata.org/numba-doc/latest/reference/\"\n \"deprecation.html#change-of-jitclass-location\")\n\n msg = (\"The 'numba.jitclass' decorator has moved to \"\n \"'numba.experimental.jitclass' to better reflect the experimental \"\n \"nature of the functionality. Please update your imports to \"\n \"accommodate this change and see {} for the time frame.\".format(url))\n\n warnings.warn(msg, category=errors.NumbaDeprecationWarning,\n stacklevel=2)\n\n return jitclass(spec)\n", "path": "numba/experimental/jitclass/decorators.py"}, {"content": "\"\"\"\nExpose top-level symbols that are safe for import *\n\"\"\"\n\nimport platform\nimport re\nimport sys\nimport warnings\n\nfrom ._version import get_versions\nfrom numba.misc.init_utils import generate_version_info\n\n__version__ = get_versions()['version']\nversion_info = generate_version_info(__version__)\ndel get_versions\ndel generate_version_info\n\n\nfrom numba.core import config\nfrom numba.testing import _runtests as runtests\nfrom numba.core import types, errors\n\n# Re-export typeof\nfrom numba.misc.special import (\n typeof, prange, pndindex, gdb, gdb_breakpoint, gdb_init,\n literally, literal_unroll\n)\n\n# Re-export error classes\nfrom numba.core.errors import *\n\n# Re-export types itself\nimport numba.core.types as types\n\n# Re-export all type names\nfrom numba.core.types import *\n\n# Re-export decorators\nfrom numba.core.decorators import (cfunc, generated_jit, jit, njit, stencil,\n jit_module)\n\n# Re-export vectorize decorators and the thread layer querying function\nfrom numba.np.ufunc import (vectorize, guvectorize, threading_layer,\n get_num_threads, set_num_threads)\n\n# Re-export Numpy helpers\nfrom numba.np.numpy_support import carray, farray, from_dtype\n\n# Re-export experimental\nfrom numba import experimental\n\n# Re-export experimental.jitclass as jitclass, this is deprecated\nfrom numba.experimental.jitclass.decorators import _warning_jitclass as jitclass\n\n# Initialize withcontexts\nimport numba.core.withcontexts\nfrom numba.core.withcontexts import objmode_context as objmode\n\n# Keep this for backward compatibility.\ntest = runtests.main\n\n\n__all__ = \"\"\"\n cfunc\n from_dtype\n guvectorize\n jit\n experimental\n njit\n stencil\n jit_module\n jitclass\n typeof\n prange\n gdb\n gdb_breakpoint\n gdb_init\n vectorize\n objmode\n literal_unroll\n get_num_threads\n set_num_threads\n \"\"\".split() + types.__all__ + errors.__all__\n\n\n_min_llvmlite_version = (0, 33, 0)\n_min_llvm_version = (9, 0, 0)\n\ndef _ensure_llvm():\n \"\"\"\n Make sure llvmlite is operational.\n \"\"\"\n import warnings\n import llvmlite\n\n # Only look at the the major, minor and bugfix version numbers.\n # Ignore other stuffs\n regex = re.compile(r'(\\d+)\\.(\\d+).(\\d+)')\n m = regex.match(llvmlite.__version__)\n if m:\n ver = tuple(map(int, m.groups()))\n if ver < _min_llvmlite_version:\n msg = (\"Numba requires at least version %d.%d.%d of llvmlite.\\n\"\n \"Installed version is %s.\\n\"\n \"Please update llvmlite.\" %\n (_min_llvmlite_version + (llvmlite.__version__,)))\n raise ImportError(msg)\n else:\n # Not matching?\n warnings.warn(\"llvmlite version format not recognized!\")\n\n from llvmlite.binding import llvm_version_info, check_jit_execution\n\n if llvm_version_info < _min_llvm_version:\n msg = (\"Numba requires at least version %d.%d.%d of LLVM.\\n\"\n \"Installed llvmlite is built against version %d.%d.%d.\\n\"\n \"Please update llvmlite.\" %\n (_min_llvm_version + llvm_version_info))\n raise ImportError(msg)\n\n check_jit_execution()\n\ndef _ensure_critical_deps():\n \"\"\"\n Make sure Python, NumPy and SciPy have supported versions.\n \"\"\"\n from numba.np.numpy_support import numpy_version\n from numba.core.utils import PYVERSION\n\n if PYVERSION < (3, 6):\n raise ImportError(\"Numba needs Python 3.6 or greater\")\n\n if numpy_version < (1, 15):\n raise ImportError(\"Numba needs NumPy 1.15 or greater\")\n\n try:\n import scipy\n except ImportError:\n pass\n else:\n sp_version = tuple(map(int, scipy.__version__.split('.')[:2]))\n if sp_version < (1, 0):\n raise ImportError(\"Numba requires SciPy version 1.0 or greater\")\n\n\ndef _try_enable_svml():\n \"\"\"\n Tries to enable SVML if configuration permits use and the library is found.\n \"\"\"\n if not config.DISABLE_INTEL_SVML:\n try:\n if sys.platform.startswith('linux'):\n llvmlite.binding.load_library_permanently(\"libsvml.so\")\n elif sys.platform.startswith('darwin'):\n llvmlite.binding.load_library_permanently(\"libsvml.dylib\")\n elif sys.platform.startswith('win'):\n llvmlite.binding.load_library_permanently(\"svml_dispmd\")\n else:\n return False\n # The SVML library is loaded, therefore SVML *could* be supported.\n # Now see if LLVM has been compiled with the SVML support patch.\n # If llvmlite has the checking function `has_svml` and it returns\n # True, then LLVM was compiled with SVML support and the the setup\n # for SVML can proceed. We err on the side of caution and if the\n # checking function is missing, regardless of that being fine for\n # most 0.23.{0,1} llvmlite instances (i.e. conda or pip installed),\n # we assume that SVML was not compiled in. llvmlite 0.23.2 is a\n # bugfix release with the checking function present that will always\n # produce correct behaviour. For context see: #3006.\n try:\n if not getattr(llvmlite.binding.targets, \"has_svml\")():\n # has detection function, but no svml compiled in, therefore\n # disable SVML\n return False\n except AttributeError:\n if platform.machine() == 'x86_64' and config.DEBUG:\n msg = (\"SVML was found but llvmlite >= 0.23.2 is \"\n \"needed to support it.\")\n warnings.warn(msg)\n # does not have detection function, cannot detect reliably,\n # disable SVML.\n return False\n\n # All is well, detection function present and reports SVML is\n # compiled in, set the vector library to SVML.\n llvmlite.binding.set_option('SVML', '-vector-library=SVML')\n return True\n except:\n if platform.machine() == 'x86_64' and config.DEBUG:\n warnings.warn(\"SVML was not found/could not be loaded.\")\n return False\n\n_ensure_llvm()\n_ensure_critical_deps()\n\n# we know llvmlite is working as the above tests passed, import it now as SVML\n# needs to mutate runtime options (sets the `-vector-library`).\nimport llvmlite\n\n\"\"\"\nIs set to True if Intel SVML is in use.\n\"\"\"\nconfig.USING_SVML = _try_enable_svml()\n\n\n# ---------------------- WARNING WARNING WARNING ----------------------------\n# The following imports occur below here (SVML init) because somewhere in their\n# import sequence they have a `@njit` wrapped function. This triggers too early\n# a bind to the underlying LLVM libraries which then irretrievably sets the LLVM\n# SVML state to \"no SVML\". See https://github.com/numba/numba/issues/4689 for\n# context.\n# ---------------------- WARNING WARNING WARNING ----------------------------\n\n# Initialize typed containers\nimport numba.typed\n", "path": "numba/__init__.py"}], "after_files": [{"content": "from numba.core import types, config\nfrom numba.experimental.jitclass.base import register_class_type, ClassBuilder\n\n\ndef jitclass(spec):\n \"\"\"\n A decorator for creating a jitclass.\n\n **arguments**:\n\n - spec:\n Specifies the types of each field on this class.\n Must be a dictionary or a sequence.\n With a dictionary, use collections.OrderedDict for stable ordering.\n With a sequence, it must contain 2-tuples of (fieldname, fieldtype).\n\n **returns**:\n\n A callable that takes a class object, which will be compiled.\n \"\"\"\n\n def wrap(cls):\n if config.DISABLE_JIT:\n return cls\n else:\n return register_class_type(cls, spec, types.ClassType, ClassBuilder)\n\n return wrap\n", "path": "numba/experimental/jitclass/decorators.py"}, {"content": "\"\"\"\nExpose top-level symbols that are safe for import *\n\"\"\"\n\nimport platform\nimport re\nimport sys\nimport warnings\n\nfrom ._version import get_versions\nfrom numba.misc.init_utils import generate_version_info\n\n__version__ = get_versions()['version']\nversion_info = generate_version_info(__version__)\ndel get_versions\ndel generate_version_info\n\n\nfrom numba.core import config\nfrom numba.testing import _runtests as runtests\nfrom numba.core import types, errors\n\n# Re-export typeof\nfrom numba.misc.special import (\n typeof, prange, pndindex, gdb, gdb_breakpoint, gdb_init,\n literally, literal_unroll\n)\n\n# Re-export error classes\nfrom numba.core.errors import *\n\n# Re-export types itself\nimport numba.core.types as types\n\n# Re-export all type names\nfrom numba.core.types import *\n\n# Re-export decorators\nfrom numba.core.decorators import (cfunc, generated_jit, jit, njit, stencil,\n jit_module)\n\n# Re-export vectorize decorators and the thread layer querying function\nfrom numba.np.ufunc import (vectorize, guvectorize, threading_layer,\n get_num_threads, set_num_threads)\n\n# Re-export Numpy helpers\nfrom numba.np.numpy_support import carray, farray, from_dtype\n\n# Re-export experimental\nfrom numba import experimental\n\n# Initialize withcontexts\nimport numba.core.withcontexts\nfrom numba.core.withcontexts import objmode_context as objmode\n\n# Keep this for backward compatibility.\ntest = runtests.main\n\n\n__all__ = \"\"\"\n cfunc\n from_dtype\n guvectorize\n jit\n experimental\n njit\n stencil\n jit_module\n jitclass\n typeof\n prange\n gdb\n gdb_breakpoint\n gdb_init\n vectorize\n objmode\n literal_unroll\n get_num_threads\n set_num_threads\n \"\"\".split() + types.__all__ + errors.__all__\n\n\n_min_llvmlite_version = (0, 33, 0)\n_min_llvm_version = (9, 0, 0)\n\ndef _ensure_llvm():\n \"\"\"\n Make sure llvmlite is operational.\n \"\"\"\n import warnings\n import llvmlite\n\n # Only look at the the major, minor and bugfix version numbers.\n # Ignore other stuffs\n regex = re.compile(r'(\\d+)\\.(\\d+).(\\d+)')\n m = regex.match(llvmlite.__version__)\n if m:\n ver = tuple(map(int, m.groups()))\n if ver < _min_llvmlite_version:\n msg = (\"Numba requires at least version %d.%d.%d of llvmlite.\\n\"\n \"Installed version is %s.\\n\"\n \"Please update llvmlite.\" %\n (_min_llvmlite_version + (llvmlite.__version__,)))\n raise ImportError(msg)\n else:\n # Not matching?\n warnings.warn(\"llvmlite version format not recognized!\")\n\n from llvmlite.binding import llvm_version_info, check_jit_execution\n\n if llvm_version_info < _min_llvm_version:\n msg = (\"Numba requires at least version %d.%d.%d of LLVM.\\n\"\n \"Installed llvmlite is built against version %d.%d.%d.\\n\"\n \"Please update llvmlite.\" %\n (_min_llvm_version + llvm_version_info))\n raise ImportError(msg)\n\n check_jit_execution()\n\ndef _ensure_critical_deps():\n \"\"\"\n Make sure Python, NumPy and SciPy have supported versions.\n \"\"\"\n from numba.np.numpy_support import numpy_version\n from numba.core.utils import PYVERSION\n\n if PYVERSION < (3, 6):\n raise ImportError(\"Numba needs Python 3.6 or greater\")\n\n if numpy_version < (1, 15):\n raise ImportError(\"Numba needs NumPy 1.15 or greater\")\n\n try:\n import scipy\n except ImportError:\n pass\n else:\n sp_version = tuple(map(int, scipy.__version__.split('.')[:2]))\n if sp_version < (1, 0):\n raise ImportError(\"Numba requires SciPy version 1.0 or greater\")\n\n\ndef _try_enable_svml():\n \"\"\"\n Tries to enable SVML if configuration permits use and the library is found.\n \"\"\"\n if not config.DISABLE_INTEL_SVML:\n try:\n if sys.platform.startswith('linux'):\n llvmlite.binding.load_library_permanently(\"libsvml.so\")\n elif sys.platform.startswith('darwin'):\n llvmlite.binding.load_library_permanently(\"libsvml.dylib\")\n elif sys.platform.startswith('win'):\n llvmlite.binding.load_library_permanently(\"svml_dispmd\")\n else:\n return False\n # The SVML library is loaded, therefore SVML *could* be supported.\n # Now see if LLVM has been compiled with the SVML support patch.\n # If llvmlite has the checking function `has_svml` and it returns\n # True, then LLVM was compiled with SVML support and the the setup\n # for SVML can proceed. We err on the side of caution and if the\n # checking function is missing, regardless of that being fine for\n # most 0.23.{0,1} llvmlite instances (i.e. conda or pip installed),\n # we assume that SVML was not compiled in. llvmlite 0.23.2 is a\n # bugfix release with the checking function present that will always\n # produce correct behaviour. For context see: #3006.\n try:\n if not getattr(llvmlite.binding.targets, \"has_svml\")():\n # has detection function, but no svml compiled in, therefore\n # disable SVML\n return False\n except AttributeError:\n if platform.machine() == 'x86_64' and config.DEBUG:\n msg = (\"SVML was found but llvmlite >= 0.23.2 is \"\n \"needed to support it.\")\n warnings.warn(msg)\n # does not have detection function, cannot detect reliably,\n # disable SVML.\n return False\n\n # All is well, detection function present and reports SVML is\n # compiled in, set the vector library to SVML.\n llvmlite.binding.set_option('SVML', '-vector-library=SVML')\n return True\n except:\n if platform.machine() == 'x86_64' and config.DEBUG:\n warnings.warn(\"SVML was not found/could not be loaded.\")\n return False\n\n_ensure_llvm()\n_ensure_critical_deps()\n\n# we know llvmlite is working as the above tests passed, import it now as SVML\n# needs to mutate runtime options (sets the `-vector-library`).\nimport llvmlite\n\n\"\"\"\nIs set to True if Intel SVML is in use.\n\"\"\"\nconfig.USING_SVML = _try_enable_svml()\n\n\n# ---------------------- WARNING WARNING WARNING ----------------------------\n# The following imports occur below here (SVML init) because somewhere in their\n# import sequence they have a `@njit` wrapped function. This triggers too early\n# a bind to the underlying LLVM libraries which then irretrievably sets the LLVM\n# SVML state to \"no SVML\". See https://github.com/numba/numba/issues/4689 for\n# context.\n# ---------------------- WARNING WARNING WARNING ----------------------------\n\n# Initialize typed containers\nimport numba.typed\n", "path": "numba/__init__.py"}]}
| 3,048 | 454 |
gh_patches_debug_5329
|
rasdani/github-patches
|
git_diff
|
saleor__saleor-3928
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Broken logo in docs
### Steps to reproduce the problem
1. https://saleor.readthedocs.io/en/latest/gettingstarted.html
2.

**System information**
Operating system: OSX
Browser: Chrome
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `docs/conf.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 #
3 # saleor documentation build configuration file, created by
4 # sphinx-quickstart on Tue Jan 5 11:54:57 2016.
5 #
6 # This file is execfile()d with the current directory set to its
7 # containing dir.
8 #
9 # Note that not all possible configuration values are present in this
10 # autogenerated file.
11 #
12 # All configuration values have a default; values that are commented out
13 # serve to show the default.
14
15 import sys
16 import os
17 import shlex
18
19 # If extensions (or modules to document with autodoc) are in another directory,
20 # add these directories to sys.path here. If the directory is relative to the
21 # documentation root, use os.path.abspath to make it absolute, like shown here.
22 #sys.path.insert(0, os.path.abspath('.'))
23
24 # -- General configuration ------------------------------------------------
25
26 # If your documentation needs a minimal Sphinx version, state it here.
27 #needs_sphinx = '1.0'
28
29 # Add any Sphinx extension module names here, as strings. They can be
30 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
31 # ones.
32 extensions = [
33 'sphinx.ext.autodoc',
34 ]
35
36 # Add any paths that contain templates here, relative to this directory.
37 templates_path = ['_templates']
38
39 # The suffix(es) of source filenames.
40 # You can specify multiple suffix as a list of string:
41 # source_suffix = ['.rst', '.md']
42 source_suffix = '.rst'
43
44 # The encoding of source files.
45 #source_encoding = 'utf-8-sig'
46
47 # The master toctree document.
48 master_doc = 'index'
49
50 # General information about the project.
51 project = u'Saleor'
52 copyright = u'2015-2018, Mirumee Software'
53 author = u'Mirumee Software'
54
55 # The version info for the project you're documenting, acts as replacement for
56 # |version| and |release|, also used in various other places throughout the
57 # built documents.
58 #
59 # The short X.Y version.
60 version = u'2018.01'
61 # The full version, including alpha/beta/rc tags.
62 release = u'2018.01'
63
64 # The language for content autogenerated by Sphinx. Refer to documentation
65 # for a list of supported languages.
66 #
67 # This is also used if you do content translation via gettext catalogs.
68 # Usually you set "language" from the command line for these cases.
69 language = None
70
71 # There are two options for replacing |today|: either, you set today to some
72 # non-false value, then it is used:
73 #today = ''
74 # Else, today_fmt is used as the format for a strftime call.
75 #today_fmt = '%B %d, %Y'
76
77 # List of patterns, relative to source directory, that match files and
78 # directories to ignore when looking for source files.
79 exclude_patterns = ['_build']
80
81 # The reST default role (used for this markup: `text`) to use for all
82 # documents.
83 #default_role = None
84
85 # If true, '()' will be appended to :func: etc. cross-reference text.
86 #add_function_parentheses = True
87
88 # If true, the current module name will be prepended to all description
89 # unit titles (such as .. function::).
90 #add_module_names = True
91
92 # If true, sectionauthor and moduleauthor directives will be shown in the
93 # output. They are ignored by default.
94 #show_authors = False
95
96 # The name of the Pygments (syntax highlighting) style to use.
97 pygments_style = 'sphinx'
98
99 # A list of ignored prefixes for module index sorting.
100 #modindex_common_prefix = []
101
102 # If true, keep warnings as "system message" paragraphs in the built documents.
103 #keep_warnings = False
104
105 # If true, `todo` and `todoList` produce output, else they produce nothing.
106 todo_include_todos = False
107
108
109 # -- Options for HTML output ----------------------------------------------
110
111 # The theme to use for HTML and HTML Help pages. See the documentation for
112 # a list of builtin themes.
113 html_theme = 'sphinx_rtd_theme'
114
115 # Theme options are theme-specific and customize the look and feel of a theme
116 # further. For a list of options available for each theme, see the
117 # documentation.
118 #html_theme_options = {}
119
120 # Add any paths that contain custom themes here, relative to this directory.
121 #html_theme_path = []
122
123 # The name for this set of Sphinx documents. If None, it defaults to
124 # "<project> v<release> documentation".
125 html_title = 'Saleor documentation'
126
127 # A shorter title for the navigation bar. Default is the same as html_title.
128 #html_short_title = None
129
130 # The name of an image file (relative to this directory) to place at the top
131 # of the sidebar.
132 html_logo = 'logo-white.svg'
133
134 # The name of an image file (within the static path) to use as favicon of the
135 # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
136 # pixels large.
137 #html_favicon = None
138
139 # Add any paths that contain custom static files (such as style sheets) here,
140 # relative to this directory. They are copied after the builtin static files,
141 # so a file named "default.css" will overwrite the builtin "default.css".
142 html_static_path = ['_static']
143
144 # Add any extra paths that contain custom files (such as robots.txt or
145 # .htaccess) here, relative to this directory. These files are copied
146 # directly to the root of the documentation.
147 #html_extra_path = []
148
149 # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
150 # using the given strftime format.
151 #html_last_updated_fmt = '%b %d, %Y'
152
153 # If true, SmartyPants will be used to convert quotes and dashes to
154 # typographically correct entities.
155 #html_use_smartypants = True
156
157 # Custom sidebar templates, maps document names to template names.
158 #html_sidebars = {}
159
160 # Additional templates that should be rendered to pages, maps page names to
161 # template names.
162 #html_additional_pages = {}
163
164 # If false, no module index is generated.
165 #html_domain_indices = True
166
167 # If false, no index is generated.
168 #html_use_index = True
169
170 # If true, the index is split into individual pages for each letter.
171 #html_split_index = False
172
173 # If true, links to the reST sources are added to the pages.
174 #html_show_sourcelink = True
175
176 # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
177 #html_show_sphinx = True
178
179 # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
180 #html_show_copyright = True
181
182 # If true, an OpenSearch description file will be output, and all pages will
183 # contain a <link> tag referring to it. The value of this option must be the
184 # base URL from which the finished HTML is served.
185 #html_use_opensearch = ''
186
187 # This is the file name suffix for HTML files (e.g. ".xhtml").
188 #html_file_suffix = None
189
190 # Language to be used for generating the HTML full-text search index.
191 # Sphinx supports the following languages:
192 # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
193 # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
194 #html_search_language = 'en'
195
196 # A dictionary with options for the search language support, empty by default.
197 # Now only 'ja' uses this config value
198 #html_search_options = {'type': 'default'}
199
200 # The name of a javascript file (relative to the configuration directory) that
201 # implements a search results scorer. If empty, the default will be used.
202 #html_search_scorer = 'scorer.js'
203
204 # Output file base name for HTML help builder.
205 htmlhelp_basename = 'saleordoc'
206
207 # -- Options for LaTeX output ---------------------------------------------
208
209 latex_elements = {
210 # The paper size ('letterpaper' or 'a4paper').
211 #'papersize': 'letterpaper',
212
213 # The font size ('10pt', '11pt' or '12pt').
214 #'pointsize': '10pt',
215
216 # Additional stuff for the LaTeX preamble.
217 #'preamble': '',
218
219 # Latex figure (float) alignment
220 #'figure_align': 'htbp',
221 }
222
223 # Grouping the document tree into LaTeX files. List of tuples
224 # (source start file, target name, title,
225 # author, documentclass [howto, manual, or own class]).
226 latex_documents = [
227 (master_doc, 'saleor.tex', u'saleor Documentation',
228 u'Mirumee Software', 'manual'),
229 ]
230
231 # The name of an image file (relative to this directory) to place at the top of
232 # the title page.
233 #latex_logo = None
234
235 # For "manual" documents, if this is true, then toplevel headings are parts,
236 # not chapters.
237 #latex_use_parts = False
238
239 # If true, show page references after internal links.
240 #latex_show_pagerefs = False
241
242 # If true, show URL addresses after external links.
243 #latex_show_urls = False
244
245 # Documents to append as an appendix to all manuals.
246 #latex_appendices = []
247
248 # If false, no module index is generated.
249 #latex_domain_indices = True
250
251
252 # -- Options for manual page output ---------------------------------------
253
254 # One entry per manual page. List of tuples
255 # (source start file, name, description, authors, manual section).
256 man_pages = [
257 (master_doc, 'saleor', u'saleor Documentation',
258 [author], 1)
259 ]
260
261 # If true, show URL addresses after external links.
262 #man_show_urls = False
263
264
265 # -- Options for Texinfo output -------------------------------------------
266
267 # Grouping the document tree into Texinfo files. List of tuples
268 # (source start file, target name, title, author,
269 # dir menu entry, description, category)
270 texinfo_documents = [
271 (master_doc, 'saleor', u'saleor Documentation',
272 author, 'saleor', 'One line description of project.',
273 'Miscellaneous'),
274 ]
275
276 # Documents to append as an appendix to all manuals.
277 #texinfo_appendices = []
278
279 # If false, no module index is generated.
280 #texinfo_domain_indices = True
281
282 # How to display URL addresses: 'footnote', 'no', or 'inline'.
283 #texinfo_show_urls = 'footnote'
284
285 # If true, do not generate a @detailmenu in the "Top" node's menu.
286 #texinfo_no_detailmenu = False
287
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -129,7 +129,7 @@
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
-html_logo = 'logo-white.svg'
+html_logo = 'logo.svg'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
{"golden_diff": "diff --git a/docs/conf.py b/docs/conf.py\n--- a/docs/conf.py\n+++ b/docs/conf.py\n@@ -129,7 +129,7 @@\n \n # The name of an image file (relative to this directory) to place at the top\n # of the sidebar.\n-html_logo = 'logo-white.svg'\n+html_logo = 'logo.svg'\n \n # The name of an image file (within the static path) to use as favicon of the\n # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32\n", "issue": "Broken logo in docs\n### Steps to reproduce the problem\r\n1. https://saleor.readthedocs.io/en/latest/gettingstarted.html\r\n2. \r\n\r\n\r\n**System information**\r\nOperating system: OSX\r\nBrowser: Chrome\r\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# saleor documentation build configuration file, created by\n# sphinx-quickstart on Tue Jan 5 11:54:57 2016.\n#\n# This file is execfile()d with the current directory set to its\n# containing dir.\n#\n# Note that not all possible configuration values are present in this\n# autogenerated file.\n#\n# All configuration values have a default; values that are commented out\n# serve to show the default.\n\nimport sys\nimport os\nimport shlex\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#sys.path.insert(0, os.path.abspath('.'))\n\n# -- General configuration ------------------------------------------------\n\n# If your documentation needs a minimal Sphinx version, state it here.\n#needs_sphinx = '1.0'\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n 'sphinx.ext.autodoc',\n]\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = ['_templates']\n\n# The suffix(es) of source filenames.\n# You can specify multiple suffix as a list of string:\n# source_suffix = ['.rst', '.md']\nsource_suffix = '.rst'\n\n# The encoding of source files.\n#source_encoding = 'utf-8-sig'\n\n# The master toctree document.\nmaster_doc = 'index'\n\n# General information about the project.\nproject = u'Saleor'\ncopyright = u'2015-2018, Mirumee Software'\nauthor = u'Mirumee Software'\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n#\n# The short X.Y version.\nversion = u'2018.01'\n# The full version, including alpha/beta/rc tags.\nrelease = u'2018.01'\n\n# The language for content autogenerated by Sphinx. Refer to documentation\n# for a list of supported languages.\n#\n# This is also used if you do content translation via gettext catalogs.\n# Usually you set \"language\" from the command line for these cases.\nlanguage = None\n\n# There are two options for replacing |today|: either, you set today to some\n# non-false value, then it is used:\n#today = ''\n# Else, today_fmt is used as the format for a strftime call.\n#today_fmt = '%B %d, %Y'\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\nexclude_patterns = ['_build']\n\n# The reST default role (used for this markup: `text`) to use for all\n# documents.\n#default_role = None\n\n# If true, '()' will be appended to :func: etc. cross-reference text.\n#add_function_parentheses = True\n\n# If true, the current module name will be prepended to all description\n# unit titles (such as .. function::).\n#add_module_names = True\n\n# If true, sectionauthor and moduleauthor directives will be shown in the\n# output. They are ignored by default.\n#show_authors = False\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = 'sphinx'\n\n# A list of ignored prefixes for module index sorting.\n#modindex_common_prefix = []\n\n# If true, keep warnings as \"system message\" paragraphs in the built documents.\n#keep_warnings = False\n\n# If true, `todo` and `todoList` produce output, else they produce nothing.\ntodo_include_todos = False\n\n\n# -- Options for HTML output ----------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\nhtml_theme = 'sphinx_rtd_theme'\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further. For a list of options available for each theme, see the\n# documentation.\n#html_theme_options = {}\n\n# Add any paths that contain custom themes here, relative to this directory.\n#html_theme_path = []\n\n# The name for this set of Sphinx documents. If None, it defaults to\n# \"<project> v<release> documentation\".\nhtml_title = 'Saleor documentation'\n\n# A shorter title for the navigation bar. Default is the same as html_title.\n#html_short_title = None\n\n# The name of an image file (relative to this directory) to place at the top\n# of the sidebar.\nhtml_logo = 'logo-white.svg'\n\n# The name of an image file (within the static path) to use as favicon of the\n# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32\n# pixels large.\n#html_favicon = None\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = ['_static']\n\n# Add any extra paths that contain custom files (such as robots.txt or\n# .htaccess) here, relative to this directory. These files are copied\n# directly to the root of the documentation.\n#html_extra_path = []\n\n# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,\n# using the given strftime format.\n#html_last_updated_fmt = '%b %d, %Y'\n\n# If true, SmartyPants will be used to convert quotes and dashes to\n# typographically correct entities.\n#html_use_smartypants = True\n\n# Custom sidebar templates, maps document names to template names.\n#html_sidebars = {}\n\n# Additional templates that should be rendered to pages, maps page names to\n# template names.\n#html_additional_pages = {}\n\n# If false, no module index is generated.\n#html_domain_indices = True\n\n# If false, no index is generated.\n#html_use_index = True\n\n# If true, the index is split into individual pages for each letter.\n#html_split_index = False\n\n# If true, links to the reST sources are added to the pages.\n#html_show_sourcelink = True\n\n# If true, \"Created using Sphinx\" is shown in the HTML footer. Default is True.\n#html_show_sphinx = True\n\n# If true, \"(C) Copyright ...\" is shown in the HTML footer. Default is True.\n#html_show_copyright = True\n\n# If true, an OpenSearch description file will be output, and all pages will\n# contain a <link> tag referring to it. The value of this option must be the\n# base URL from which the finished HTML is served.\n#html_use_opensearch = ''\n\n# This is the file name suffix for HTML files (e.g. \".xhtml\").\n#html_file_suffix = None\n\n# Language to be used for generating the HTML full-text search index.\n# Sphinx supports the following languages:\n# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'\n# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'\n#html_search_language = 'en'\n\n# A dictionary with options for the search language support, empty by default.\n# Now only 'ja' uses this config value\n#html_search_options = {'type': 'default'}\n\n# The name of a javascript file (relative to the configuration directory) that\n# implements a search results scorer. If empty, the default will be used.\n#html_search_scorer = 'scorer.js'\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = 'saleordoc'\n\n# -- Options for LaTeX output ---------------------------------------------\n\nlatex_elements = {\n# The paper size ('letterpaper' or 'a4paper').\n#'papersize': 'letterpaper',\n\n# The font size ('10pt', '11pt' or '12pt').\n#'pointsize': '10pt',\n\n# Additional stuff for the LaTeX preamble.\n#'preamble': '',\n\n# Latex figure (float) alignment\n#'figure_align': 'htbp',\n}\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title,\n# author, documentclass [howto, manual, or own class]).\nlatex_documents = [\n (master_doc, 'saleor.tex', u'saleor Documentation',\n u'Mirumee Software', 'manual'),\n]\n\n# The name of an image file (relative to this directory) to place at the top of\n# the title page.\n#latex_logo = None\n\n# For \"manual\" documents, if this is true, then toplevel headings are parts,\n# not chapters.\n#latex_use_parts = False\n\n# If true, show page references after internal links.\n#latex_show_pagerefs = False\n\n# If true, show URL addresses after external links.\n#latex_show_urls = False\n\n# Documents to append as an appendix to all manuals.\n#latex_appendices = []\n\n# If false, no module index is generated.\n#latex_domain_indices = True\n\n\n# -- Options for manual page output ---------------------------------------\n\n# One entry per manual page. List of tuples\n# (source start file, name, description, authors, manual section).\nman_pages = [\n (master_doc, 'saleor', u'saleor Documentation',\n [author], 1)\n]\n\n# If true, show URL addresses after external links.\n#man_show_urls = False\n\n\n# -- Options for Texinfo output -------------------------------------------\n\n# Grouping the document tree into Texinfo files. List of tuples\n# (source start file, target name, title, author,\n# dir menu entry, description, category)\ntexinfo_documents = [\n (master_doc, 'saleor', u'saleor Documentation',\n author, 'saleor', 'One line description of project.',\n 'Miscellaneous'),\n]\n\n# Documents to append as an appendix to all manuals.\n#texinfo_appendices = []\n\n# If false, no module index is generated.\n#texinfo_domain_indices = True\n\n# How to display URL addresses: 'footnote', 'no', or 'inline'.\n#texinfo_show_urls = 'footnote'\n\n# If true, do not generate a @detailmenu in the \"Top\" node's menu.\n#texinfo_no_detailmenu = False\n", "path": "docs/conf.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# saleor documentation build configuration file, created by\n# sphinx-quickstart on Tue Jan 5 11:54:57 2016.\n#\n# This file is execfile()d with the current directory set to its\n# containing dir.\n#\n# Note that not all possible configuration values are present in this\n# autogenerated file.\n#\n# All configuration values have a default; values that are commented out\n# serve to show the default.\n\nimport sys\nimport os\nimport shlex\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#sys.path.insert(0, os.path.abspath('.'))\n\n# -- General configuration ------------------------------------------------\n\n# If your documentation needs a minimal Sphinx version, state it here.\n#needs_sphinx = '1.0'\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n 'sphinx.ext.autodoc',\n]\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = ['_templates']\n\n# The suffix(es) of source filenames.\n# You can specify multiple suffix as a list of string:\n# source_suffix = ['.rst', '.md']\nsource_suffix = '.rst'\n\n# The encoding of source files.\n#source_encoding = 'utf-8-sig'\n\n# The master toctree document.\nmaster_doc = 'index'\n\n# General information about the project.\nproject = u'Saleor'\ncopyright = u'2015-2018, Mirumee Software'\nauthor = u'Mirumee Software'\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n#\n# The short X.Y version.\nversion = u'2018.01'\n# The full version, including alpha/beta/rc tags.\nrelease = u'2018.01'\n\n# The language for content autogenerated by Sphinx. Refer to documentation\n# for a list of supported languages.\n#\n# This is also used if you do content translation via gettext catalogs.\n# Usually you set \"language\" from the command line for these cases.\nlanguage = None\n\n# There are two options for replacing |today|: either, you set today to some\n# non-false value, then it is used:\n#today = ''\n# Else, today_fmt is used as the format for a strftime call.\n#today_fmt = '%B %d, %Y'\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\nexclude_patterns = ['_build']\n\n# The reST default role (used for this markup: `text`) to use for all\n# documents.\n#default_role = None\n\n# If true, '()' will be appended to :func: etc. cross-reference text.\n#add_function_parentheses = True\n\n# If true, the current module name will be prepended to all description\n# unit titles (such as .. function::).\n#add_module_names = True\n\n# If true, sectionauthor and moduleauthor directives will be shown in the\n# output. They are ignored by default.\n#show_authors = False\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = 'sphinx'\n\n# A list of ignored prefixes for module index sorting.\n#modindex_common_prefix = []\n\n# If true, keep warnings as \"system message\" paragraphs in the built documents.\n#keep_warnings = False\n\n# If true, `todo` and `todoList` produce output, else they produce nothing.\ntodo_include_todos = False\n\n\n# -- Options for HTML output ----------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\nhtml_theme = 'sphinx_rtd_theme'\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further. For a list of options available for each theme, see the\n# documentation.\n#html_theme_options = {}\n\n# Add any paths that contain custom themes here, relative to this directory.\n#html_theme_path = []\n\n# The name for this set of Sphinx documents. If None, it defaults to\n# \"<project> v<release> documentation\".\nhtml_title = 'Saleor documentation'\n\n# A shorter title for the navigation bar. Default is the same as html_title.\n#html_short_title = None\n\n# The name of an image file (relative to this directory) to place at the top\n# of the sidebar.\nhtml_logo = 'logo.svg'\n\n# The name of an image file (within the static path) to use as favicon of the\n# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32\n# pixels large.\n#html_favicon = None\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = ['_static']\n\n# Add any extra paths that contain custom files (such as robots.txt or\n# .htaccess) here, relative to this directory. These files are copied\n# directly to the root of the documentation.\n#html_extra_path = []\n\n# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,\n# using the given strftime format.\n#html_last_updated_fmt = '%b %d, %Y'\n\n# If true, SmartyPants will be used to convert quotes and dashes to\n# typographically correct entities.\n#html_use_smartypants = True\n\n# Custom sidebar templates, maps document names to template names.\n#html_sidebars = {}\n\n# Additional templates that should be rendered to pages, maps page names to\n# template names.\n#html_additional_pages = {}\n\n# If false, no module index is generated.\n#html_domain_indices = True\n\n# If false, no index is generated.\n#html_use_index = True\n\n# If true, the index is split into individual pages for each letter.\n#html_split_index = False\n\n# If true, links to the reST sources are added to the pages.\n#html_show_sourcelink = True\n\n# If true, \"Created using Sphinx\" is shown in the HTML footer. Default is True.\n#html_show_sphinx = True\n\n# If true, \"(C) Copyright ...\" is shown in the HTML footer. Default is True.\n#html_show_copyright = True\n\n# If true, an OpenSearch description file will be output, and all pages will\n# contain a <link> tag referring to it. The value of this option must be the\n# base URL from which the finished HTML is served.\n#html_use_opensearch = ''\n\n# This is the file name suffix for HTML files (e.g. \".xhtml\").\n#html_file_suffix = None\n\n# Language to be used for generating the HTML full-text search index.\n# Sphinx supports the following languages:\n# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'\n# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'\n#html_search_language = 'en'\n\n# A dictionary with options for the search language support, empty by default.\n# Now only 'ja' uses this config value\n#html_search_options = {'type': 'default'}\n\n# The name of a javascript file (relative to the configuration directory) that\n# implements a search results scorer. If empty, the default will be used.\n#html_search_scorer = 'scorer.js'\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = 'saleordoc'\n\n# -- Options for LaTeX output ---------------------------------------------\n\nlatex_elements = {\n# The paper size ('letterpaper' or 'a4paper').\n#'papersize': 'letterpaper',\n\n# The font size ('10pt', '11pt' or '12pt').\n#'pointsize': '10pt',\n\n# Additional stuff for the LaTeX preamble.\n#'preamble': '',\n\n# Latex figure (float) alignment\n#'figure_align': 'htbp',\n}\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title,\n# author, documentclass [howto, manual, or own class]).\nlatex_documents = [\n (master_doc, 'saleor.tex', u'saleor Documentation',\n u'Mirumee Software', 'manual'),\n]\n\n# The name of an image file (relative to this directory) to place at the top of\n# the title page.\n#latex_logo = None\n\n# For \"manual\" documents, if this is true, then toplevel headings are parts,\n# not chapters.\n#latex_use_parts = False\n\n# If true, show page references after internal links.\n#latex_show_pagerefs = False\n\n# If true, show URL addresses after external links.\n#latex_show_urls = False\n\n# Documents to append as an appendix to all manuals.\n#latex_appendices = []\n\n# If false, no module index is generated.\n#latex_domain_indices = True\n\n\n# -- Options for manual page output ---------------------------------------\n\n# One entry per manual page. List of tuples\n# (source start file, name, description, authors, manual section).\nman_pages = [\n (master_doc, 'saleor', u'saleor Documentation',\n [author], 1)\n]\n\n# If true, show URL addresses after external links.\n#man_show_urls = False\n\n\n# -- Options for Texinfo output -------------------------------------------\n\n# Grouping the document tree into Texinfo files. List of tuples\n# (source start file, target name, title, author,\n# dir menu entry, description, category)\ntexinfo_documents = [\n (master_doc, 'saleor', u'saleor Documentation',\n author, 'saleor', 'One line description of project.',\n 'Miscellaneous'),\n]\n\n# Documents to append as an appendix to all manuals.\n#texinfo_appendices = []\n\n# If false, no module index is generated.\n#texinfo_domain_indices = True\n\n# How to display URL addresses: 'footnote', 'no', or 'inline'.\n#texinfo_show_urls = 'footnote'\n\n# If true, do not generate a @detailmenu in the \"Top\" node's menu.\n#texinfo_no_detailmenu = False\n", "path": "docs/conf.py"}]}
| 3,487 | 130 |
gh_patches_debug_22430
|
rasdani/github-patches
|
git_diff
|
mars-project__mars-40
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[BUG][client] web session has a different signature with the local session
**Describe the bug**
Web has it's own session, but it's `run` method has a different signature with the local session, which lead to a bug that when user submits more than one tensor, only the first tensor would be executed.
**To Reproduce**
```python
In [14]: from mars.session import new_session
In [15]: sess = new_session('http://0.0.0.0:49911')
In [16]: a = mt.ones((2, 2))
In [17]: sess.run(a, a+1)
Out[17]:
[array([[1., 1.],
[1., 1.]])]
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mars/web/session.py`
Content:
```
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 # Copyright 1999-2018 Alibaba Group Holding Ltd.
4 #
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
16
17 import base64
18 import json
19 import time
20 import logging
21
22 import requests
23
24 from ..compat import six, TimeoutError
25 from ..serialize import dataserializer
26 from ..errors import ExecutionInterrupted
27
28 logger = logging.getLogger(__name__)
29
30
31 class Session(object):
32 def __init__(self, endpoint, args=None):
33 self._endpoint = endpoint
34 self._args = args
35 self._tensor_to_graph = dict()
36
37 self._req_session = requests.Session()
38
39 from requests.adapters import HTTPAdapter
40 self._req_session.mount('http://stackoverflow.com', HTTPAdapter(max_retries=5))
41 self._main()
42
43 @property
44 def session_id(self):
45 return self._session_id
46
47 def _main(self):
48 resp = self._req_session.post(self._endpoint + '/api/session', self._args)
49 if resp.status_code >= 400:
50 raise SystemError('Failed to create mars session.')
51 content = json.loads(resp.text)
52 self._session_id = content['session_id']
53
54 def run(self, tensors, compose=True, wait=True, timeout=-1):
55 from ..graph import DirectedGraph
56 graph = DirectedGraph()
57
58 if not isinstance(tensors, (list, tuple, set)):
59 tensors = [tensors]
60 for t in tensors:
61 graph = t.build_graph(graph=graph, tiled=False, compose=compose)
62 targets = [t.key for t in tensors]
63
64 targets_join = ','.join(targets)
65 session_url = self._endpoint + '/api/session/' + self._session_id
66 graph_json = graph.to_json()
67
68 resp_json = self._submit_graph(graph_json, targets_join)
69 graph_key = resp_json['graph_key']
70 graph_url = session_url + '/graph/' + graph_key
71
72 for t in tensors:
73 self._tensor_to_graph[t.key] = graph_key
74
75 if wait:
76 exec_start_time = time.time()
77 while timeout <= 0 or time.time() - exec_start_time <= timeout:
78 try:
79 time.sleep(1)
80 try:
81 resp = self._req_session.get(graph_url)
82 except requests.ConnectionError as ex:
83 err_msg = str(ex)
84 if 'ConnectionResetError' in err_msg or 'Connection refused' in err_msg:
85 continue
86 raise
87 if resp.status_code == 504:
88 logging.debug('Gateway Time-out, try again')
89 continue
90 if resp.status_code >= 400:
91 raise SystemError('Failed to read task status. Code: %d, Reason: %s, Content:\n%s' %
92 (resp.status_code, resp.reason, resp.text))
93 resp_json = json.loads(resp.text)
94 if resp_json['state'] in ('running', 'preparing'):
95 continue
96 elif resp_json['state'] == 'success':
97 break
98 elif resp_json['state'] == 'cancelled':
99 raise ExecutionInterrupted
100 elif resp_json['state'] == 'failed':
101 # TODO add traceback
102 if 'traceback' in resp_json:
103 traceback = resp_json['traceback']
104 if isinstance(traceback, list):
105 traceback = ''.join(str(s) for s in traceback)
106 raise SystemError('Graph execution failed.\nMessage: %s\nTraceback from server:\n%s' %
107 (resp_json['msg'], traceback))
108 else:
109 raise SystemError('Graph execution failed with unknown reason.')
110 else:
111 raise SystemError('Unknown graph execution state %s' % resp_json['state'])
112 except KeyboardInterrupt:
113 resp = self._req_session.delete(graph_url)
114 if resp.status_code >= 400:
115 raise SystemError('Failed to stop graph execution. Code: %d, Reason: %s, Content:\n%s' %
116 (resp.status_code, resp.reason, resp.text))
117 if 0 < timeout < time.time() - exec_start_time:
118 raise TimeoutError
119 data_list = []
120 for tk in targets:
121 resp = self._req_session.get(session_url + '/graph/' + graph_key + '/data/' + tk)
122 if resp.status_code >= 400:
123 continue
124 data_list.append(dataserializer.loads(resp.content))
125 return data_list
126 else:
127 return graph_key
128
129 def decref(self, *keys):
130 session_url = self._endpoint + '/api/session/' + self._session_id
131 for k in keys:
132 if k not in self._tensor_to_graph:
133 continue
134 data_url = session_url + '/graph/%s/data/%s' % (self._tensor_to_graph[k], k)
135 self._req_session.delete(data_url)
136
137 def stop(self, graph_key):
138 session_url = self._endpoint + '/api/session/' + self._session_id
139 graph_url = session_url + '/graph/' + graph_key
140 resp = self._req_session.delete(graph_url)
141 if resp.status_code >= 400:
142 raise SystemError('Failed to stop graph execution. Code: %d, Reason: %s, Content:\n%s' %
143 (resp.status_code, resp.reason, resp.text))
144
145 def _submit_graph(self, graph_json, targets):
146 session_url = self._endpoint + '/api/session/' + self._session_id
147 resp = self._req_session.post(session_url + '/graph', dict(
148 graph=json.dumps(graph_json),
149 target=targets,
150 ))
151 if resp.status_code >= 400:
152 resp_json = json.loads(resp.text)
153 exc_info = base64.b64decode(resp_json['exc_info'])
154 six.reraise(*exc_info)
155 resp_json = json.loads(resp.text)
156 return resp_json
157
158 def close(self):
159 self.decref(*list(self._tensor_to_graph.keys()))
160
161 resp = self._req_session.delete(self._endpoint + '/api/session/' + self._session_id)
162 if resp.status_code >= 400:
163 raise SystemError('Failed to close mars session.')
164
165 def check_service_ready(self, timeout=1):
166 try:
167 resp = self._req_session.get(self._endpoint + '/api', timeout=timeout)
168 except (requests.ConnectionError, requests.Timeout):
169 return False
170 if resp.status_code >= 400:
171 return False
172 return True
173
174 def count_workers(self):
175 resp = self._req_session.get(self._endpoint + '/api/worker', timeout=1)
176 return json.loads(resp.text)
177
178 def __enter__(self):
179 return self
180
181 def __exit__(self, exc_type, exc_val, exc_tb):
182 self.close()
183
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/mars/web/session.py b/mars/web/session.py
--- a/mars/web/session.py
+++ b/mars/web/session.py
@@ -24,6 +24,7 @@
from ..compat import six, TimeoutError
from ..serialize import dataserializer
from ..errors import ExecutionInterrupted
+from ..graph import DirectedGraph
logger = logging.getLogger(__name__)
@@ -51,12 +52,14 @@
content = json.loads(resp.text)
self._session_id = content['session_id']
- def run(self, tensors, compose=True, wait=True, timeout=-1):
- from ..graph import DirectedGraph
- graph = DirectedGraph()
+ def run(self, *tensors, **kw):
+ timeout = kw.pop('timeout', -1)
+ compose = kw.pop('compose', True)
+ wait = kw.pop('wait', True)
+ if kw:
+ raise TypeError('run got unexpected key arguments {0}'.format(', '.join(kw.keys())))
- if not isinstance(tensors, (list, tuple, set)):
- tensors = [tensors]
+ graph = DirectedGraph()
for t in tensors:
graph = t.build_graph(graph=graph, tiled=False, compose=compose)
targets = [t.key for t in tensors]
|
{"golden_diff": "diff --git a/mars/web/session.py b/mars/web/session.py\n--- a/mars/web/session.py\n+++ b/mars/web/session.py\n@@ -24,6 +24,7 @@\n from ..compat import six, TimeoutError\n from ..serialize import dataserializer\n from ..errors import ExecutionInterrupted\n+from ..graph import DirectedGraph\n \n logger = logging.getLogger(__name__)\n \n@@ -51,12 +52,14 @@\n content = json.loads(resp.text)\n self._session_id = content['session_id']\n \n- def run(self, tensors, compose=True, wait=True, timeout=-1):\n- from ..graph import DirectedGraph\n- graph = DirectedGraph()\n+ def run(self, *tensors, **kw):\n+ timeout = kw.pop('timeout', -1)\n+ compose = kw.pop('compose', True)\n+ wait = kw.pop('wait', True)\n+ if kw:\n+ raise TypeError('run got unexpected key arguments {0}'.format(', '.join(kw.keys())))\n \n- if not isinstance(tensors, (list, tuple, set)):\n- tensors = [tensors]\n+ graph = DirectedGraph()\n for t in tensors:\n graph = t.build_graph(graph=graph, tiled=False, compose=compose)\n targets = [t.key for t in tensors]\n", "issue": "[BUG][client] web session has a different signature with the local session\n**Describe the bug**\r\n\r\nWeb has it's own session, but it's `run` method has a different signature with the local session, which lead to a bug that when user submits more than one tensor, only the first tensor would be executed.\r\n\r\n**To Reproduce**\r\n\r\n```python\r\nIn [14]: from mars.session import new_session \r\n\r\nIn [15]: sess = new_session('http://0.0.0.0:49911') \r\n\r\nIn [16]: a = mt.ones((2, 2)) \r\n\r\nIn [17]: sess.run(a, a+1) \r\nOut[17]: \r\n[array([[1., 1.],\r\n [1., 1.]])]\r\n```\r\n\r\n\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n# Copyright 1999-2018 Alibaba Group Holding Ltd.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport base64\nimport json\nimport time\nimport logging\n\nimport requests\n\nfrom ..compat import six, TimeoutError\nfrom ..serialize import dataserializer\nfrom ..errors import ExecutionInterrupted\n\nlogger = logging.getLogger(__name__)\n\n\nclass Session(object):\n def __init__(self, endpoint, args=None):\n self._endpoint = endpoint\n self._args = args\n self._tensor_to_graph = dict()\n\n self._req_session = requests.Session()\n\n from requests.adapters import HTTPAdapter\n self._req_session.mount('http://stackoverflow.com', HTTPAdapter(max_retries=5))\n self._main()\n\n @property\n def session_id(self):\n return self._session_id\n\n def _main(self):\n resp = self._req_session.post(self._endpoint + '/api/session', self._args)\n if resp.status_code >= 400:\n raise SystemError('Failed to create mars session.')\n content = json.loads(resp.text)\n self._session_id = content['session_id']\n\n def run(self, tensors, compose=True, wait=True, timeout=-1):\n from ..graph import DirectedGraph\n graph = DirectedGraph()\n\n if not isinstance(tensors, (list, tuple, set)):\n tensors = [tensors]\n for t in tensors:\n graph = t.build_graph(graph=graph, tiled=False, compose=compose)\n targets = [t.key for t in tensors]\n\n targets_join = ','.join(targets)\n session_url = self._endpoint + '/api/session/' + self._session_id\n graph_json = graph.to_json()\n\n resp_json = self._submit_graph(graph_json, targets_join)\n graph_key = resp_json['graph_key']\n graph_url = session_url + '/graph/' + graph_key\n\n for t in tensors:\n self._tensor_to_graph[t.key] = graph_key\n\n if wait:\n exec_start_time = time.time()\n while timeout <= 0 or time.time() - exec_start_time <= timeout:\n try:\n time.sleep(1)\n try:\n resp = self._req_session.get(graph_url)\n except requests.ConnectionError as ex:\n err_msg = str(ex)\n if 'ConnectionResetError' in err_msg or 'Connection refused' in err_msg:\n continue\n raise\n if resp.status_code == 504:\n logging.debug('Gateway Time-out, try again')\n continue\n if resp.status_code >= 400:\n raise SystemError('Failed to read task status. Code: %d, Reason: %s, Content:\\n%s' %\n (resp.status_code, resp.reason, resp.text))\n resp_json = json.loads(resp.text)\n if resp_json['state'] in ('running', 'preparing'):\n continue\n elif resp_json['state'] == 'success':\n break\n elif resp_json['state'] == 'cancelled':\n raise ExecutionInterrupted\n elif resp_json['state'] == 'failed':\n # TODO add traceback\n if 'traceback' in resp_json:\n traceback = resp_json['traceback']\n if isinstance(traceback, list):\n traceback = ''.join(str(s) for s in traceback)\n raise SystemError('Graph execution failed.\\nMessage: %s\\nTraceback from server:\\n%s' %\n (resp_json['msg'], traceback))\n else:\n raise SystemError('Graph execution failed with unknown reason.')\n else:\n raise SystemError('Unknown graph execution state %s' % resp_json['state'])\n except KeyboardInterrupt:\n resp = self._req_session.delete(graph_url)\n if resp.status_code >= 400:\n raise SystemError('Failed to stop graph execution. Code: %d, Reason: %s, Content:\\n%s' %\n (resp.status_code, resp.reason, resp.text))\n if 0 < timeout < time.time() - exec_start_time:\n raise TimeoutError\n data_list = []\n for tk in targets:\n resp = self._req_session.get(session_url + '/graph/' + graph_key + '/data/' + tk)\n if resp.status_code >= 400:\n continue\n data_list.append(dataserializer.loads(resp.content))\n return data_list\n else:\n return graph_key\n\n def decref(self, *keys):\n session_url = self._endpoint + '/api/session/' + self._session_id\n for k in keys:\n if k not in self._tensor_to_graph:\n continue\n data_url = session_url + '/graph/%s/data/%s' % (self._tensor_to_graph[k], k)\n self._req_session.delete(data_url)\n\n def stop(self, graph_key):\n session_url = self._endpoint + '/api/session/' + self._session_id\n graph_url = session_url + '/graph/' + graph_key\n resp = self._req_session.delete(graph_url)\n if resp.status_code >= 400:\n raise SystemError('Failed to stop graph execution. Code: %d, Reason: %s, Content:\\n%s' %\n (resp.status_code, resp.reason, resp.text))\n\n def _submit_graph(self, graph_json, targets):\n session_url = self._endpoint + '/api/session/' + self._session_id\n resp = self._req_session.post(session_url + '/graph', dict(\n graph=json.dumps(graph_json),\n target=targets,\n ))\n if resp.status_code >= 400:\n resp_json = json.loads(resp.text)\n exc_info = base64.b64decode(resp_json['exc_info'])\n six.reraise(*exc_info)\n resp_json = json.loads(resp.text)\n return resp_json\n\n def close(self):\n self.decref(*list(self._tensor_to_graph.keys()))\n\n resp = self._req_session.delete(self._endpoint + '/api/session/' + self._session_id)\n if resp.status_code >= 400:\n raise SystemError('Failed to close mars session.')\n\n def check_service_ready(self, timeout=1):\n try:\n resp = self._req_session.get(self._endpoint + '/api', timeout=timeout)\n except (requests.ConnectionError, requests.Timeout):\n return False\n if resp.status_code >= 400:\n return False\n return True\n\n def count_workers(self):\n resp = self._req_session.get(self._endpoint + '/api/worker', timeout=1)\n return json.loads(resp.text)\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n self.close()\n", "path": "mars/web/session.py"}], "after_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n# Copyright 1999-2018 Alibaba Group Holding Ltd.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport base64\nimport json\nimport time\nimport logging\n\nimport requests\n\nfrom ..compat import six, TimeoutError\nfrom ..serialize import dataserializer\nfrom ..errors import ExecutionInterrupted\nfrom ..graph import DirectedGraph\n\nlogger = logging.getLogger(__name__)\n\n\nclass Session(object):\n def __init__(self, endpoint, args=None):\n self._endpoint = endpoint\n self._args = args\n self._tensor_to_graph = dict()\n\n self._req_session = requests.Session()\n\n from requests.adapters import HTTPAdapter\n self._req_session.mount('http://stackoverflow.com', HTTPAdapter(max_retries=5))\n self._main()\n\n @property\n def session_id(self):\n return self._session_id\n\n def _main(self):\n resp = self._req_session.post(self._endpoint + '/api/session', self._args)\n if resp.status_code >= 400:\n raise SystemError('Failed to create mars session.')\n content = json.loads(resp.text)\n self._session_id = content['session_id']\n\n def run(self, *tensors, **kw):\n timeout = kw.pop('timeout', -1)\n compose = kw.pop('compose', True)\n wait = kw.pop('wait', True)\n if kw:\n raise TypeError('run got unexpected key arguments {0}'.format(', '.join(kw.keys())))\n\n graph = DirectedGraph()\n for t in tensors:\n graph = t.build_graph(graph=graph, tiled=False, compose=compose)\n targets = [t.key for t in tensors]\n\n targets_join = ','.join(targets)\n session_url = self._endpoint + '/api/session/' + self._session_id\n graph_json = graph.to_json()\n\n resp_json = self._submit_graph(graph_json, targets_join)\n graph_key = resp_json['graph_key']\n graph_url = session_url + '/graph/' + graph_key\n\n for t in tensors:\n self._tensor_to_graph[t.key] = graph_key\n\n if wait:\n exec_start_time = time.time()\n while timeout <= 0 or time.time() - exec_start_time <= timeout:\n try:\n time.sleep(1)\n try:\n resp = self._req_session.get(graph_url)\n except requests.ConnectionError as ex:\n err_msg = str(ex)\n if 'ConnectionResetError' in err_msg or 'Connection refused' in err_msg:\n continue\n raise\n if resp.status_code == 504:\n logging.debug('Gateway Time-out, try again')\n continue\n if resp.status_code >= 400:\n raise SystemError('Failed to read task status. Code: %d, Reason: %s, Content:\\n%s' %\n (resp.status_code, resp.reason, resp.text))\n resp_json = json.loads(resp.text)\n if resp_json['state'] in ('running', 'preparing'):\n continue\n elif resp_json['state'] == 'success':\n break\n elif resp_json['state'] == 'cancelled':\n raise ExecutionInterrupted\n elif resp_json['state'] == 'failed':\n # TODO add traceback\n if 'traceback' in resp_json:\n traceback = resp_json['traceback']\n if isinstance(traceback, list):\n traceback = ''.join(str(s) for s in traceback)\n raise SystemError('Graph execution failed.\\nMessage: %s\\nTraceback from server:\\n%s' %\n (resp_json['msg'], traceback))\n else:\n raise SystemError('Graph execution failed with unknown reason.')\n else:\n raise SystemError('Unknown graph execution state %s' % resp_json['state'])\n except KeyboardInterrupt:\n resp = self._req_session.delete(graph_url)\n if resp.status_code >= 400:\n raise SystemError('Failed to stop graph execution. Code: %d, Reason: %s, Content:\\n%s' %\n (resp.status_code, resp.reason, resp.text))\n if 0 < timeout < time.time() - exec_start_time:\n raise TimeoutError\n data_list = []\n for tk in targets:\n resp = self._req_session.get(session_url + '/graph/' + graph_key + '/data/' + tk)\n if resp.status_code >= 400:\n continue\n data_list.append(dataserializer.loads(resp.content))\n return data_list\n else:\n return graph_key\n\n def decref(self, *keys):\n session_url = self._endpoint + '/api/session/' + self._session_id\n for k in keys:\n if k not in self._tensor_to_graph:\n continue\n data_url = session_url + '/graph/%s/data/%s' % (self._tensor_to_graph[k], k)\n self._req_session.delete(data_url)\n\n def stop(self, graph_key):\n session_url = self._endpoint + '/api/session/' + self._session_id\n graph_url = session_url + '/graph/' + graph_key\n resp = self._req_session.delete(graph_url)\n if resp.status_code >= 400:\n raise SystemError('Failed to stop graph execution. Code: %d, Reason: %s, Content:\\n%s' %\n (resp.status_code, resp.reason, resp.text))\n\n def _submit_graph(self, graph_json, targets):\n session_url = self._endpoint + '/api/session/' + self._session_id\n resp = self._req_session.post(session_url + '/graph', dict(\n graph=json.dumps(graph_json),\n target=targets,\n ))\n if resp.status_code >= 400:\n resp_json = json.loads(resp.text)\n exc_info = base64.b64decode(resp_json['exc_info'])\n six.reraise(*exc_info)\n resp_json = json.loads(resp.text)\n return resp_json\n\n def close(self):\n self.decref(*list(self._tensor_to_graph.keys()))\n\n resp = self._req_session.delete(self._endpoint + '/api/session/' + self._session_id)\n if resp.status_code >= 400:\n raise SystemError('Failed to close mars session.')\n\n def check_service_ready(self, timeout=1):\n try:\n resp = self._req_session.get(self._endpoint + '/api', timeout=timeout)\n except (requests.ConnectionError, requests.Timeout):\n return False\n if resp.status_code >= 400:\n return False\n return True\n\n def count_workers(self):\n resp = self._req_session.get(self._endpoint + '/api/worker', timeout=1)\n return json.loads(resp.text)\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n self.close()\n", "path": "mars/web/session.py"}]}
| 2,447 | 290 |
gh_patches_debug_5368
|
rasdani/github-patches
|
git_diff
|
mindsdb__mindsdb-337
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add consistency check for model data object in the CI tests
We should take a snapshot of a "correct" model data object (as returned by `get_model_data()`) for the CI testing dataset and always compare the json-parsed outputs of `get_model_data` with it.
Might be a bit tedious to alter every time when we run `get_model_data` ,but otherwise there are certain situation where we can get silent failures. Since certain parts of the Model Analyzer have more general exception catching built in (since certain data simply can't be produced in various situations) and `get_model_data` itself is made to work with incomplete data in order to return this data during training.
If we want to be really scrupulous we should check the model data object at each phase of training, but I think that'd be overdoing it.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mindsdb/__about__.py`
Content:
```
1 __title__ = 'MindsDB'
2 __package_name__ = 'mindsdb'
3 __version__ = '1.7.11'
4 __description__ = "MindsDB's goal is to make it very simple for developers to use the power of artificial neural networks in their projects."
5 __email__ = "[email protected]"
6 __author__ = 'MindsDB Inc'
7 __github__ = 'https://github.com/mindsdb/mindsdb'
8 __pypi__ = 'https://pypi.org/project/mindsdb'
9 __license__ = 'MIT'
10 __copyright__ = 'Copyright 2018- mindsdb'
11
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/mindsdb/__about__.py b/mindsdb/__about__.py
--- a/mindsdb/__about__.py
+++ b/mindsdb/__about__.py
@@ -1,6 +1,6 @@
__title__ = 'MindsDB'
__package_name__ = 'mindsdb'
-__version__ = '1.7.11'
+__version__ = '1.7.12'
__description__ = "MindsDB's goal is to make it very simple for developers to use the power of artificial neural networks in their projects."
__email__ = "[email protected]"
__author__ = 'MindsDB Inc'
|
{"golden_diff": "diff --git a/mindsdb/__about__.py b/mindsdb/__about__.py\n--- a/mindsdb/__about__.py\n+++ b/mindsdb/__about__.py\n@@ -1,6 +1,6 @@\n __title__ = 'MindsDB'\n __package_name__ = 'mindsdb'\n-__version__ = '1.7.11'\n+__version__ = '1.7.12'\n __description__ = \"MindsDB's goal is to make it very simple for developers to use the power of artificial neural networks in their projects.\"\n __email__ = \"[email protected]\"\n __author__ = 'MindsDB Inc'\n", "issue": "Add consistency check for model data object in the CI tests\nWe should take a snapshot of a \"correct\" model data object (as returned by `get_model_data()`) for the CI testing dataset and always compare the json-parsed outputs of `get_model_data` with it.\r\n\r\nMight be a bit tedious to alter every time when we run `get_model_data` ,but otherwise there are certain situation where we can get silent failures. Since certain parts of the Model Analyzer have more general exception catching built in (since certain data simply can't be produced in various situations) and `get_model_data` itself is made to work with incomplete data in order to return this data during training.\r\n\r\nIf we want to be really scrupulous we should check the model data object at each phase of training, but I think that'd be overdoing it.\n", "before_files": [{"content": "__title__ = 'MindsDB'\n__package_name__ = 'mindsdb'\n__version__ = '1.7.11'\n__description__ = \"MindsDB's goal is to make it very simple for developers to use the power of artificial neural networks in their projects.\"\n__email__ = \"[email protected]\"\n__author__ = 'MindsDB Inc'\n__github__ = 'https://github.com/mindsdb/mindsdb'\n__pypi__ = 'https://pypi.org/project/mindsdb'\n__license__ = 'MIT'\n__copyright__ = 'Copyright 2018- mindsdb'\n", "path": "mindsdb/__about__.py"}], "after_files": [{"content": "__title__ = 'MindsDB'\n__package_name__ = 'mindsdb'\n__version__ = '1.7.12'\n__description__ = \"MindsDB's goal is to make it very simple for developers to use the power of artificial neural networks in their projects.\"\n__email__ = \"[email protected]\"\n__author__ = 'MindsDB Inc'\n__github__ = 'https://github.com/mindsdb/mindsdb'\n__pypi__ = 'https://pypi.org/project/mindsdb'\n__license__ = 'MIT'\n__copyright__ = 'Copyright 2018- mindsdb'\n", "path": "mindsdb/__about__.py"}]}
| 582 | 147 |
gh_patches_debug_34409
|
rasdani/github-patches
|
git_diff
|
ultrabug__py3status-822
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[clock] It is more intuitive to round clock emoji to the nearest 15 or 45 minutes
The emoji clock is being used in many apps these days, and I noticed that py3status implementation is not consistent with the rest and is less intuitive to me.
Currently the clock emoji is not being updated until the hour exactly reaches the next step, so for example:
- 14:59 🕝
Instead, it is more intuitive to choose icon for the time rounded to the nearest 15 or 45 minutes, that is:
- 14:01 🕑
- 14:14 🕑
- 14:15 🕝
- 14:44 🕝
- 14:45 🕒
- 14:59 🕒
/cc @tobes as you created this module.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `py3status/modules/clock.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 """
3 Display date and time.
4
5 This module allows one or more datetimes to be displayed.
6 All datetimes share the same format_time but can set their own timezones.
7 Timezones are defined in the `format` using the TZ name in squiggly brackets eg
8 `{GMT}`, `{Portugal}`, `{Europe/Paris}`, `{America/Argentina/Buenos_Aires}`.
9
10 ISO-3166 two letter country codes eg `{de}` can also be used but if more than
11 one timezone exists for the country eg `{us}` the first one will be selected.
12
13 `{Local}` can be used for the local settings of your computer.
14
15 Note: Timezones are case sensitive
16
17 A full list of timezones can be found at
18 https://en.wikipedia.org/wiki/List_of_tz_database_time_zones
19
20 Configuration parameters:
21 block_hours: length of time period for all blocks in hours (default 12)
22 blocks: a string, where each character represents time period
23 from the start of a time period.
24 (default '🕛🕧🕐🕜🕑🕝🕒🕞🕓🕟🕔🕠🕕🕡🕖🕢🕗🕣🕘🕤🕙🕥🕚🕦')
25 button_change_format: button that switches format used setting to None
26 disables (default 1)
27 button_change_time_format: button that switches format_time used. Setting
28 to None disables (default 2)
29 button_reset: button that switches display to the first timezone. Setting
30 to None disables (default 3)
31 cycle: If more than one display then how many seconds between changing the
32 display (default 0)
33 format: defines the timezones displayed. This can be a single string or a
34 list. If a list is supplied then the formats can be cycled through
35 using `cycle` or by button click. (default '{Local}')
36 format_time: format to use for the time, strftime directives such as `%H`
37 can be used this can be either a string or to allow multiple formats as
38 a list. The one used can be changed by button click.
39 *(default ['[{name_unclear} ]%c', '[{name_unclear} ]%x %X',
40 '[{name_unclear} ]%a %H:%M', '[{name_unclear} ]{icon}'])*
41
42 Format placeholders:
43 {icon} a character representing the time from `blocks`
44 {name} friendly timezone name eg `Buenos Aires`
45 {name_unclear} friendly timezone name eg `Buenos Aires` but is empty if
46 only one timezone is provided
47 {timezone} full timezone name eg `America/Argentina/Buenos_Aires`
48 {timezone_unclear} full timezone name eg `America/Argentina/Buenos_Aires`
49 but is empty if only one timezone is provided
50
51
52 Requires:
53 pytz: python library
54 tzlocal: python library
55
56 i3status.conf example:
57
58 ```
59 # cycling through London, Warsaw, Tokyo
60 clock {
61 cycle = 30
62 format = ["{Europe/London}", "{Europe/Warsaw}", "{Asia/Tokyo}"]
63 format_time = "{name} %H:%M"
64 }
65
66
67 # Show the time and date in New York
68 clock {
69 format = "Big Apple {America/New_York}"
70 format_time = "%Y-%m-%d %H:%M:%S"
71 }
72
73
74 # wall clocks
75 clock {
76 format = "{Asia/Calcutta} {Africa/Nairobi} {Asia/Bangkok}"
77 format_time = "{name} {icon}"
78 }
79 ```
80
81 @author tobes
82 @license BSD
83
84 SAMPLE OUTPUT
85 {'full_text': 'Sun 15 Jan 2017 23:27:17 GMT'}
86
87 london
88 {'full_text': 'Thursday Feb 23 1:42 AM London'}
89 """
90
91 import re
92 import math
93 from datetime import datetime
94 from time import time
95
96 import pytz
97 import tzlocal
98
99 CLOCK_BLOCKS = u'🕛🕧🕐🕜🕑🕝🕒🕞🕓🕟🕔🕠🕕🕡🕖🕢🕗🕣🕘🕤🕙🕥🕚🕦'
100
101
102 class Py3status:
103 """
104 """
105 # available configuration parameters
106 block_hours = 12
107 blocks = CLOCK_BLOCKS
108 button_change_format = 1
109 button_change_time_format = 2
110 button_reset = 3
111 cycle = 0
112 format = "{Local}"
113 format_time = [
114 '[{name_unclear} ]%c',
115 '[{name_unclear} ]%x %X',
116 '[{name_unclear} ]%a %H:%M',
117 '[{name_unclear} ]{icon}',
118 ]
119
120 def post_config_hook(self):
121 # Multiple clocks are possible that can be cycled through
122 if not isinstance(self.format, list):
123 self.format = [self.format]
124 # if only one item we don't need to cycle
125 if len(self.format) == 1:
126 self.cycle = 0
127 # find any declared timezones eg {Europe/London}
128 self._items = {}
129 matches = re.findall('\{([^}]*)\}', ''.join(self.format))
130 for match in matches:
131 self._items[match] = self._get_timezone(match)
132
133 self.multiple_tz = len(self._items) > 1
134
135 if not isinstance(self.format_time, list):
136 self.format_time = [self.format_time]
137
138 # workout how often in seconds we will need to do an update to keep the
139 # display fresh
140 self.time_deltas = []
141 for format in self.format_time:
142 format_time = re.sub('\{([^}]*)\}', '', format)
143 format_time = format_time.replace('%%', '')
144 if '%f' in format_time:
145 # microseconds
146 time_delta = 0
147 elif '%S' in format_time:
148 # seconds
149 time_delta = 1
150 elif '%s' in format_time:
151 # seconds since unix epoch start
152 time_delta = 1
153 elif '%T' in format_time:
154 # seconds included in "%H:%M:%S"
155 time_delta = 1
156 elif '%c' in format_time:
157 # Locale’s appropriate date and time representation
158 time_delta = 1
159 elif '%X' in format_time:
160 # Locale’s appropriate time representation
161 time_delta = 1
162 else:
163 time_delta = 60
164 self.time_deltas.append(time_delta)
165
166 self.active_time_format = 0
167
168 self._cycle_time = time() + self.cycle
169 self.active = 0
170
171 def _get_timezone(self, tz):
172 """
173 Find and return the time zone if possible
174 """
175 # special Local timezone
176 if tz == 'Local':
177 try:
178 return tzlocal.get_localzone()
179 except pytz.UnknownTimeZoneError:
180 return '?'
181
182 # we can use a country code to get tz
183 # FIXME this is broken for multi-timezone countries eg US
184 # for now we just grab the first one
185 if len(tz) == 2:
186 try:
187 zones = pytz.country_timezones(tz)
188 except KeyError:
189 return '?'
190 tz = zones[0]
191
192 # get the timezone
193 try:
194 zone = pytz.timezone(tz)
195 except pytz.UnknownTimeZoneError:
196 return '?'
197 return zone
198
199 def _change_active(self, diff):
200 self.active = (self.active + diff) % len(self.format)
201
202 def on_click(self, i3s_output_list, i3s_config, event):
203 """
204 Switch the displayed module or pass the event on to the active module
205 """
206 # reset cycle time
207 if event['button'] == self.button_reset:
208 self.active = 0
209 # reset the cycle time
210 self._cycle_time = time() + self.cycle
211 elif event['button'] == self.button_change_time_format:
212 self.active_time_format += 1
213 if self.active_time_format >= len(self.format_time):
214 self.active_time_format = 0
215 elif event['button'] == self.button_change_format:
216 self._change_active(1)
217
218 def clock(self, i3s_output_list, i3s_config):
219
220 # cycling
221 if self.cycle and time() >= self._cycle_time:
222 self._change_active(1)
223 self._cycle_time = time() + self.cycle
224
225 # update our times
226 times = {}
227 for name, zone in self._items.items():
228 if zone == '?':
229 times[name] = '?'
230 else:
231 t = datetime.now(zone)
232 format_time = self.format_time[self.active_time_format]
233 icon = None
234 if self.py3.format_contains(format_time, 'icon'):
235 # calculate the decimal hour
236 h = t.hour + t.minute / 60.
237 # make 12 hourly etc
238 h = h % self.block_hours
239 idx = int(math.floor(h / self.block_hours * (len(
240 self.blocks))))
241 icon = self.blocks[idx]
242
243 timezone = zone.zone
244 tzname = timezone.split('/')[-1].replace('_', ' ')
245
246 if self.multiple_tz:
247 name_unclear = tzname
248 timezone_unclear = timezone
249 else:
250 name_unclear = ''
251 timezone_unclear = ''
252
253 format_time = self.py3.safe_format(
254 format_time,
255 dict(
256 icon=icon,
257 name=tzname,
258 name_unclear=name_unclear,
259 timezone=timezone,
260 timezone_unclear=timezone_unclear,
261 ))
262 if self.py3.is_python_2():
263 format_time = t.strftime(format_time.encode('utf-8'))
264 else:
265 format_time = t.strftime(format_time)
266 times[name] = format_time
267
268 # work out when we need to update
269 timeout = self.py3.time_in(
270 sync_to=self.time_deltas[self.active_time_format]
271 )
272
273 # if cycling we need to make sure we update when they are needed
274 if self.cycle:
275 cycle_timeout = self._cycle_time
276 timeout = min(timeout, cycle_timeout)
277
278 return {
279 'full_text': self.py3.safe_format(self.format[self.active], times),
280 'cached_until': timeout
281 }
282
283
284 if __name__ == "__main__":
285 """
286 Run module in test mode.
287 """
288 from py3status.module_test import module_test
289 module_test(Py3status)
290
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/py3status/modules/clock.py b/py3status/modules/clock.py
--- a/py3status/modules/clock.py
+++ b/py3status/modules/clock.py
@@ -38,6 +38,10 @@
a list. The one used can be changed by button click.
*(default ['[{name_unclear} ]%c', '[{name_unclear} ]%x %X',
'[{name_unclear} ]%a %H:%M', '[{name_unclear} ]{icon}'])*
+ round_to_nearest_block: defines how a block icon is chosen. Examples:
+ when set to True, '13:14' is '🕐', '13:16' is '🕜' and '13:31' is '🕜';
+ when set to False, '13:14' is '🕐', '13:16' is '🕐' and '13:31' is '🕜'.
+ (default True)
Format placeholders:
{icon} a character representing the time from `blocks`
@@ -87,9 +91,9 @@
london
{'full_text': 'Thursday Feb 23 1:42 AM London'}
"""
+from __future__ import division
import re
-import math
from datetime import datetime
from time import time
@@ -116,6 +120,7 @@
'[{name_unclear} ]%a %H:%M',
'[{name_unclear} ]{icon}',
]
+ round_to_nearest_block = True
def post_config_hook(self):
# Multiple clocks are possible that can be cycled through
@@ -234,10 +239,11 @@
if self.py3.format_contains(format_time, 'icon'):
# calculate the decimal hour
h = t.hour + t.minute / 60.
+ if self.round_to_nearest_block:
+ h += (self.block_hours / len(self.blocks)) / 2
# make 12 hourly etc
h = h % self.block_hours
- idx = int(math.floor(h / self.block_hours * (len(
- self.blocks))))
+ idx = int(h / self.block_hours * len(self.blocks))
icon = self.blocks[idx]
timezone = zone.zone
|
{"golden_diff": "diff --git a/py3status/modules/clock.py b/py3status/modules/clock.py\n--- a/py3status/modules/clock.py\n+++ b/py3status/modules/clock.py\n@@ -38,6 +38,10 @@\n a list. The one used can be changed by button click.\n *(default ['[{name_unclear} ]%c', '[{name_unclear} ]%x %X',\n '[{name_unclear} ]%a %H:%M', '[{name_unclear} ]{icon}'])*\n+ round_to_nearest_block: defines how a block icon is chosen. Examples:\n+ when set to True, '13:14' is '\ud83d\udd50', '13:16' is '\ud83d\udd5c' and '13:31' is '\ud83d\udd5c';\n+ when set to False, '13:14' is '\ud83d\udd50', '13:16' is '\ud83d\udd50' and '13:31' is '\ud83d\udd5c'.\n+ (default True)\n \n Format placeholders:\n {icon} a character representing the time from `blocks`\n@@ -87,9 +91,9 @@\n london\n {'full_text': 'Thursday Feb 23 1:42 AM London'}\n \"\"\"\n+from __future__ import division\n \n import re\n-import math\n from datetime import datetime\n from time import time\n \n@@ -116,6 +120,7 @@\n '[{name_unclear} ]%a %H:%M',\n '[{name_unclear} ]{icon}',\n ]\n+ round_to_nearest_block = True\n \n def post_config_hook(self):\n # Multiple clocks are possible that can be cycled through\n@@ -234,10 +239,11 @@\n if self.py3.format_contains(format_time, 'icon'):\n # calculate the decimal hour\n h = t.hour + t.minute / 60.\n+ if self.round_to_nearest_block:\n+ h += (self.block_hours / len(self.blocks)) / 2\n # make 12 hourly etc\n h = h % self.block_hours\n- idx = int(math.floor(h / self.block_hours * (len(\n- self.blocks))))\n+ idx = int(h / self.block_hours * len(self.blocks))\n icon = self.blocks[idx]\n \n timezone = zone.zone\n", "issue": "[clock] It is more intuitive to round clock emoji to the nearest 15 or 45 minutes\nThe emoji clock is being used in many apps these days, and I noticed that py3status implementation is not consistent with the rest and is less intuitive to me.\r\n\r\nCurrently the clock emoji is not being updated until the hour exactly reaches the next step, so for example:\r\n- 14:59 \ud83d\udd5d\r\n\r\nInstead, it is more intuitive to choose icon for the time rounded to the nearest 15 or 45 minutes, that is:\r\n\r\n- 14:01 \ud83d\udd51\r\n- 14:14 \ud83d\udd51\r\n- 14:15 \ud83d\udd5d\r\n- 14:44 \ud83d\udd5d\r\n- 14:45 \ud83d\udd52\r\n- 14:59 \ud83d\udd52\r\n\r\n/cc @tobes as you created this module.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"\nDisplay date and time.\n\nThis module allows one or more datetimes to be displayed.\nAll datetimes share the same format_time but can set their own timezones.\nTimezones are defined in the `format` using the TZ name in squiggly brackets eg\n`{GMT}`, `{Portugal}`, `{Europe/Paris}`, `{America/Argentina/Buenos_Aires}`.\n\nISO-3166 two letter country codes eg `{de}` can also be used but if more than\none timezone exists for the country eg `{us}` the first one will be selected.\n\n`{Local}` can be used for the local settings of your computer.\n\nNote: Timezones are case sensitive\n\nA full list of timezones can be found at\nhttps://en.wikipedia.org/wiki/List_of_tz_database_time_zones\n\nConfiguration parameters:\n block_hours: length of time period for all blocks in hours (default 12)\n blocks: a string, where each character represents time period\n from the start of a time period.\n (default '\ud83d\udd5b\ud83d\udd67\ud83d\udd50\ud83d\udd5c\ud83d\udd51\ud83d\udd5d\ud83d\udd52\ud83d\udd5e\ud83d\udd53\ud83d\udd5f\ud83d\udd54\ud83d\udd60\ud83d\udd55\ud83d\udd61\ud83d\udd56\ud83d\udd62\ud83d\udd57\ud83d\udd63\ud83d\udd58\ud83d\udd64\ud83d\udd59\ud83d\udd65\ud83d\udd5a\ud83d\udd66')\n button_change_format: button that switches format used setting to None\n disables (default 1)\n button_change_time_format: button that switches format_time used. Setting\n to None disables (default 2)\n button_reset: button that switches display to the first timezone. Setting\n to None disables (default 3)\n cycle: If more than one display then how many seconds between changing the\n display (default 0)\n format: defines the timezones displayed. This can be a single string or a\n list. If a list is supplied then the formats can be cycled through\n using `cycle` or by button click. (default '{Local}')\n format_time: format to use for the time, strftime directives such as `%H`\n can be used this can be either a string or to allow multiple formats as\n a list. The one used can be changed by button click.\n *(default ['[{name_unclear} ]%c', '[{name_unclear} ]%x %X',\n '[{name_unclear} ]%a %H:%M', '[{name_unclear} ]{icon}'])*\n\nFormat placeholders:\n {icon} a character representing the time from `blocks`\n {name} friendly timezone name eg `Buenos Aires`\n {name_unclear} friendly timezone name eg `Buenos Aires` but is empty if\n only one timezone is provided\n {timezone} full timezone name eg `America/Argentina/Buenos_Aires`\n {timezone_unclear} full timezone name eg `America/Argentina/Buenos_Aires`\n but is empty if only one timezone is provided\n\n\nRequires:\n pytz: python library\n tzlocal: python library\n\ni3status.conf example:\n\n```\n# cycling through London, Warsaw, Tokyo\nclock {\n cycle = 30\n format = [\"{Europe/London}\", \"{Europe/Warsaw}\", \"{Asia/Tokyo}\"]\n format_time = \"{name} %H:%M\"\n}\n\n\n# Show the time and date in New York\nclock {\n format = \"Big Apple {America/New_York}\"\n format_time = \"%Y-%m-%d %H:%M:%S\"\n}\n\n\n# wall clocks\nclock {\n format = \"{Asia/Calcutta} {Africa/Nairobi} {Asia/Bangkok}\"\n format_time = \"{name} {icon}\"\n}\n```\n\n@author tobes\n@license BSD\n\nSAMPLE OUTPUT\n{'full_text': 'Sun 15 Jan 2017 23:27:17 GMT'}\n\nlondon\n{'full_text': 'Thursday Feb 23 1:42 AM London'}\n\"\"\"\n\nimport re\nimport math\nfrom datetime import datetime\nfrom time import time\n\nimport pytz\nimport tzlocal\n\nCLOCK_BLOCKS = u'\ud83d\udd5b\ud83d\udd67\ud83d\udd50\ud83d\udd5c\ud83d\udd51\ud83d\udd5d\ud83d\udd52\ud83d\udd5e\ud83d\udd53\ud83d\udd5f\ud83d\udd54\ud83d\udd60\ud83d\udd55\ud83d\udd61\ud83d\udd56\ud83d\udd62\ud83d\udd57\ud83d\udd63\ud83d\udd58\ud83d\udd64\ud83d\udd59\ud83d\udd65\ud83d\udd5a\ud83d\udd66'\n\n\nclass Py3status:\n \"\"\"\n \"\"\"\n # available configuration parameters\n block_hours = 12\n blocks = CLOCK_BLOCKS\n button_change_format = 1\n button_change_time_format = 2\n button_reset = 3\n cycle = 0\n format = \"{Local}\"\n format_time = [\n '[{name_unclear} ]%c',\n '[{name_unclear} ]%x %X',\n '[{name_unclear} ]%a %H:%M',\n '[{name_unclear} ]{icon}',\n ]\n\n def post_config_hook(self):\n # Multiple clocks are possible that can be cycled through\n if not isinstance(self.format, list):\n self.format = [self.format]\n # if only one item we don't need to cycle\n if len(self.format) == 1:\n self.cycle = 0\n # find any declared timezones eg {Europe/London}\n self._items = {}\n matches = re.findall('\\{([^}]*)\\}', ''.join(self.format))\n for match in matches:\n self._items[match] = self._get_timezone(match)\n\n self.multiple_tz = len(self._items) > 1\n\n if not isinstance(self.format_time, list):\n self.format_time = [self.format_time]\n\n # workout how often in seconds we will need to do an update to keep the\n # display fresh\n self.time_deltas = []\n for format in self.format_time:\n format_time = re.sub('\\{([^}]*)\\}', '', format)\n format_time = format_time.replace('%%', '')\n if '%f' in format_time:\n # microseconds\n time_delta = 0\n elif '%S' in format_time:\n # seconds\n time_delta = 1\n elif '%s' in format_time:\n # seconds since unix epoch start\n time_delta = 1\n elif '%T' in format_time:\n # seconds included in \"%H:%M:%S\"\n time_delta = 1\n elif '%c' in format_time:\n # Locale\u2019s appropriate date and time representation\n time_delta = 1\n elif '%X' in format_time:\n # Locale\u2019s appropriate time representation\n time_delta = 1\n else:\n time_delta = 60\n self.time_deltas.append(time_delta)\n\n self.active_time_format = 0\n\n self._cycle_time = time() + self.cycle\n self.active = 0\n\n def _get_timezone(self, tz):\n \"\"\"\n Find and return the time zone if possible\n \"\"\"\n # special Local timezone\n if tz == 'Local':\n try:\n return tzlocal.get_localzone()\n except pytz.UnknownTimeZoneError:\n return '?'\n\n # we can use a country code to get tz\n # FIXME this is broken for multi-timezone countries eg US\n # for now we just grab the first one\n if len(tz) == 2:\n try:\n zones = pytz.country_timezones(tz)\n except KeyError:\n return '?'\n tz = zones[0]\n\n # get the timezone\n try:\n zone = pytz.timezone(tz)\n except pytz.UnknownTimeZoneError:\n return '?'\n return zone\n\n def _change_active(self, diff):\n self.active = (self.active + diff) % len(self.format)\n\n def on_click(self, i3s_output_list, i3s_config, event):\n \"\"\"\n Switch the displayed module or pass the event on to the active module\n \"\"\"\n # reset cycle time\n if event['button'] == self.button_reset:\n self.active = 0\n # reset the cycle time\n self._cycle_time = time() + self.cycle\n elif event['button'] == self.button_change_time_format:\n self.active_time_format += 1\n if self.active_time_format >= len(self.format_time):\n self.active_time_format = 0\n elif event['button'] == self.button_change_format:\n self._change_active(1)\n\n def clock(self, i3s_output_list, i3s_config):\n\n # cycling\n if self.cycle and time() >= self._cycle_time:\n self._change_active(1)\n self._cycle_time = time() + self.cycle\n\n # update our times\n times = {}\n for name, zone in self._items.items():\n if zone == '?':\n times[name] = '?'\n else:\n t = datetime.now(zone)\n format_time = self.format_time[self.active_time_format]\n icon = None\n if self.py3.format_contains(format_time, 'icon'):\n # calculate the decimal hour\n h = t.hour + t.minute / 60.\n # make 12 hourly etc\n h = h % self.block_hours\n idx = int(math.floor(h / self.block_hours * (len(\n self.blocks))))\n icon = self.blocks[idx]\n\n timezone = zone.zone\n tzname = timezone.split('/')[-1].replace('_', ' ')\n\n if self.multiple_tz:\n name_unclear = tzname\n timezone_unclear = timezone\n else:\n name_unclear = ''\n timezone_unclear = ''\n\n format_time = self.py3.safe_format(\n format_time,\n dict(\n icon=icon,\n name=tzname,\n name_unclear=name_unclear,\n timezone=timezone,\n timezone_unclear=timezone_unclear,\n ))\n if self.py3.is_python_2():\n format_time = t.strftime(format_time.encode('utf-8'))\n else:\n format_time = t.strftime(format_time)\n times[name] = format_time\n\n # work out when we need to update\n timeout = self.py3.time_in(\n sync_to=self.time_deltas[self.active_time_format]\n )\n\n # if cycling we need to make sure we update when they are needed\n if self.cycle:\n cycle_timeout = self._cycle_time\n timeout = min(timeout, cycle_timeout)\n\n return {\n 'full_text': self.py3.safe_format(self.format[self.active], times),\n 'cached_until': timeout\n }\n\n\nif __name__ == \"__main__\":\n \"\"\"\n Run module in test mode.\n \"\"\"\n from py3status.module_test import module_test\n module_test(Py3status)\n", "path": "py3status/modules/clock.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"\nDisplay date and time.\n\nThis module allows one or more datetimes to be displayed.\nAll datetimes share the same format_time but can set their own timezones.\nTimezones are defined in the `format` using the TZ name in squiggly brackets eg\n`{GMT}`, `{Portugal}`, `{Europe/Paris}`, `{America/Argentina/Buenos_Aires}`.\n\nISO-3166 two letter country codes eg `{de}` can also be used but if more than\none timezone exists for the country eg `{us}` the first one will be selected.\n\n`{Local}` can be used for the local settings of your computer.\n\nNote: Timezones are case sensitive\n\nA full list of timezones can be found at\nhttps://en.wikipedia.org/wiki/List_of_tz_database_time_zones\n\nConfiguration parameters:\n block_hours: length of time period for all blocks in hours (default 12)\n blocks: a string, where each character represents time period\n from the start of a time period.\n (default '\ud83d\udd5b\ud83d\udd67\ud83d\udd50\ud83d\udd5c\ud83d\udd51\ud83d\udd5d\ud83d\udd52\ud83d\udd5e\ud83d\udd53\ud83d\udd5f\ud83d\udd54\ud83d\udd60\ud83d\udd55\ud83d\udd61\ud83d\udd56\ud83d\udd62\ud83d\udd57\ud83d\udd63\ud83d\udd58\ud83d\udd64\ud83d\udd59\ud83d\udd65\ud83d\udd5a\ud83d\udd66')\n button_change_format: button that switches format used setting to None\n disables (default 1)\n button_change_time_format: button that switches format_time used. Setting\n to None disables (default 2)\n button_reset: button that switches display to the first timezone. Setting\n to None disables (default 3)\n cycle: If more than one display then how many seconds between changing the\n display (default 0)\n format: defines the timezones displayed. This can be a single string or a\n list. If a list is supplied then the formats can be cycled through\n using `cycle` or by button click. (default '{Local}')\n format_time: format to use for the time, strftime directives such as `%H`\n can be used this can be either a string or to allow multiple formats as\n a list. The one used can be changed by button click.\n *(default ['[{name_unclear} ]%c', '[{name_unclear} ]%x %X',\n '[{name_unclear} ]%a %H:%M', '[{name_unclear} ]{icon}'])*\n round_to_nearest_block: defines how a block icon is chosen. Examples:\n when set to True, '13:14' is '\ud83d\udd50', '13:16' is '\ud83d\udd5c' and '13:31' is '\ud83d\udd5c';\n when set to False, '13:14' is '\ud83d\udd50', '13:16' is '\ud83d\udd50' and '13:31' is '\ud83d\udd5c'.\n (default True)\n\nFormat placeholders:\n {icon} a character representing the time from `blocks`\n {name} friendly timezone name eg `Buenos Aires`\n {name_unclear} friendly timezone name eg `Buenos Aires` but is empty if\n only one timezone is provided\n {timezone} full timezone name eg `America/Argentina/Buenos_Aires`\n {timezone_unclear} full timezone name eg `America/Argentina/Buenos_Aires`\n but is empty if only one timezone is provided\n\n\nRequires:\n pytz: python library\n tzlocal: python library\n\ni3status.conf example:\n\n```\n# cycling through London, Warsaw, Tokyo\nclock {\n cycle = 30\n format = [\"{Europe/London}\", \"{Europe/Warsaw}\", \"{Asia/Tokyo}\"]\n format_time = \"{name} %H:%M\"\n}\n\n\n# Show the time and date in New York\nclock {\n format = \"Big Apple {America/New_York}\"\n format_time = \"%Y-%m-%d %H:%M:%S\"\n}\n\n\n# wall clocks\nclock {\n format = \"{Asia/Calcutta} {Africa/Nairobi} {Asia/Bangkok}\"\n format_time = \"{name} {icon}\"\n}\n```\n\n@author tobes\n@license BSD\n\nSAMPLE OUTPUT\n{'full_text': 'Sun 15 Jan 2017 23:27:17 GMT'}\n\nlondon\n{'full_text': 'Thursday Feb 23 1:42 AM London'}\n\"\"\"\nfrom __future__ import division\n\nimport re\nfrom datetime import datetime\nfrom time import time\n\nimport pytz\nimport tzlocal\n\nCLOCK_BLOCKS = u'\ud83d\udd5b\ud83d\udd67\ud83d\udd50\ud83d\udd5c\ud83d\udd51\ud83d\udd5d\ud83d\udd52\ud83d\udd5e\ud83d\udd53\ud83d\udd5f\ud83d\udd54\ud83d\udd60\ud83d\udd55\ud83d\udd61\ud83d\udd56\ud83d\udd62\ud83d\udd57\ud83d\udd63\ud83d\udd58\ud83d\udd64\ud83d\udd59\ud83d\udd65\ud83d\udd5a\ud83d\udd66'\n\n\nclass Py3status:\n \"\"\"\n \"\"\"\n # available configuration parameters\n block_hours = 12\n blocks = CLOCK_BLOCKS\n button_change_format = 1\n button_change_time_format = 2\n button_reset = 3\n cycle = 0\n format = \"{Local}\"\n format_time = [\n '[{name_unclear} ]%c',\n '[{name_unclear} ]%x %X',\n '[{name_unclear} ]%a %H:%M',\n '[{name_unclear} ]{icon}',\n ]\n round_to_nearest_block = True\n\n def post_config_hook(self):\n # Multiple clocks are possible that can be cycled through\n if not isinstance(self.format, list):\n self.format = [self.format]\n # if only one item we don't need to cycle\n if len(self.format) == 1:\n self.cycle = 0\n # find any declared timezones eg {Europe/London}\n self._items = {}\n matches = re.findall('\\{([^}]*)\\}', ''.join(self.format))\n for match in matches:\n self._items[match] = self._get_timezone(match)\n\n self.multiple_tz = len(self._items) > 1\n\n if not isinstance(self.format_time, list):\n self.format_time = [self.format_time]\n\n # workout how often in seconds we will need to do an update to keep the\n # display fresh\n self.time_deltas = []\n for format in self.format_time:\n format_time = re.sub('\\{([^}]*)\\}', '', format)\n format_time = format_time.replace('%%', '')\n if '%f' in format_time:\n # microseconds\n time_delta = 0\n elif '%S' in format_time:\n # seconds\n time_delta = 1\n elif '%s' in format_time:\n # seconds since unix epoch start\n time_delta = 1\n elif '%T' in format_time:\n # seconds included in \"%H:%M:%S\"\n time_delta = 1\n elif '%c' in format_time:\n # Locale\u2019s appropriate date and time representation\n time_delta = 1\n elif '%X' in format_time:\n # Locale\u2019s appropriate time representation\n time_delta = 1\n else:\n time_delta = 60\n self.time_deltas.append(time_delta)\n\n self.active_time_format = 0\n\n self._cycle_time = time() + self.cycle\n self.active = 0\n\n def _get_timezone(self, tz):\n \"\"\"\n Find and return the time zone if possible\n \"\"\"\n # special Local timezone\n if tz == 'Local':\n try:\n return tzlocal.get_localzone()\n except pytz.UnknownTimeZoneError:\n return '?'\n\n # we can use a country code to get tz\n # FIXME this is broken for multi-timezone countries eg US\n # for now we just grab the first one\n if len(tz) == 2:\n try:\n zones = pytz.country_timezones(tz)\n except KeyError:\n return '?'\n tz = zones[0]\n\n # get the timezone\n try:\n zone = pytz.timezone(tz)\n except pytz.UnknownTimeZoneError:\n return '?'\n return zone\n\n def _change_active(self, diff):\n self.active = (self.active + diff) % len(self.format)\n\n def on_click(self, i3s_output_list, i3s_config, event):\n \"\"\"\n Switch the displayed module or pass the event on to the active module\n \"\"\"\n # reset cycle time\n if event['button'] == self.button_reset:\n self.active = 0\n # reset the cycle time\n self._cycle_time = time() + self.cycle\n elif event['button'] == self.button_change_time_format:\n self.active_time_format += 1\n if self.active_time_format >= len(self.format_time):\n self.active_time_format = 0\n elif event['button'] == self.button_change_format:\n self._change_active(1)\n\n def clock(self, i3s_output_list, i3s_config):\n\n # cycling\n if self.cycle and time() >= self._cycle_time:\n self._change_active(1)\n self._cycle_time = time() + self.cycle\n\n # update our times\n times = {}\n for name, zone in self._items.items():\n if zone == '?':\n times[name] = '?'\n else:\n t = datetime.now(zone)\n format_time = self.format_time[self.active_time_format]\n icon = None\n if self.py3.format_contains(format_time, 'icon'):\n # calculate the decimal hour\n h = t.hour + t.minute / 60.\n if self.round_to_nearest_block:\n h += (self.block_hours / len(self.blocks)) / 2\n # make 12 hourly etc\n h = h % self.block_hours\n idx = int(h / self.block_hours * len(self.blocks))\n icon = self.blocks[idx]\n\n timezone = zone.zone\n tzname = timezone.split('/')[-1].replace('_', ' ')\n\n if self.multiple_tz:\n name_unclear = tzname\n timezone_unclear = timezone\n else:\n name_unclear = ''\n timezone_unclear = ''\n\n format_time = self.py3.safe_format(\n format_time,\n dict(\n icon=icon,\n name=tzname,\n name_unclear=name_unclear,\n timezone=timezone,\n timezone_unclear=timezone_unclear,\n ))\n if self.py3.is_python_2():\n format_time = t.strftime(format_time.encode('utf-8'))\n else:\n format_time = t.strftime(format_time)\n times[name] = format_time\n\n # work out when we need to update\n timeout = self.py3.time_in(\n sync_to=self.time_deltas[self.active_time_format]\n )\n\n # if cycling we need to make sure we update when they are needed\n if self.cycle:\n cycle_timeout = self._cycle_time\n timeout = min(timeout, cycle_timeout)\n\n return {\n 'full_text': self.py3.safe_format(self.format[self.active], times),\n 'cached_until': timeout\n }\n\n\nif __name__ == \"__main__\":\n \"\"\"\n Run module in test mode.\n \"\"\"\n from py3status.module_test import module_test\n module_test(Py3status)\n", "path": "py3status/modules/clock.py"}]}
| 3,495 | 518 |
gh_patches_debug_39257
|
rasdani/github-patches
|
git_diff
|
cal-itp__benefits-336
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Update eligibility verification form to use dynamic data
## Background
This builds on #208 and #319 to incorporate specific `EligibilityVerifier` properties into the [eligibility verification form](https://github.com/cal-itp/benefits/blob/dev/benefits/eligibility/forms.py#L15)
## Tasks
* [x] Create an `__init__()` method on the form class accepting a new kwarg `verifier` of type `EligibilityVerifier`
* [x] If the `verifier.form_sub_pattern` field has a value, add a `pattern` attribute with the value to the `sub` input via [Widget.attrs](https://docs.djangoproject.com/en/3.2/ref/forms/widgets/#django.forms.Widget.attrs)
* [x] If the `verifier.form_name_max_length` field has a value, add a `maxlength` attribute to the `name` input via Widgets.attrs
* [x] Use attributes from the `verifier` as `msgid` keys for labels etc.
* [x] Rework the [`eligibility:confirm` view](https://github.com/cal-itp/benefits/blob/dev/benefits/eligibility/views.py#L46) to initialize the `Page` viewmodel and eligibility verification form from the selected verifier
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `benefits/eligibility/views.py`
Content:
```
1 """
2 The eligibility application: view definitions for the eligibility verification flow.
3 """
4 from django.contrib import messages
5 from django.shortcuts import redirect
6 from django.urls import reverse
7 from django.utils.decorators import decorator_from_middleware
8 from django.utils.translation import pgettext, gettext as _
9
10 from benefits.core import middleware, recaptcha, session, viewmodels
11 from benefits.core.views import PageTemplateResponse, _index_image
12 from . import analytics, api, forms
13
14
15 @decorator_from_middleware(middleware.AgencySessionRequired)
16 @decorator_from_middleware(middleware.VerifierSessionRequired)
17 def index(request):
18 """View handler for the eligibility verification getting started screen."""
19
20 session.update(request, eligibility_types=[], origin=reverse("eligibility:index"))
21
22 page = viewmodels.Page(
23 title=_("eligibility.pages.index.title"),
24 content_title=_("eligibility.pages.index.content_title"),
25 media=[
26 viewmodels.MediaItem(
27 icon=viewmodels.Icon("idcardcheck", pgettext("image alt text", "core.icons.idcardcheck")),
28 heading=_("eligibility.pages.index.items[0].title"),
29 details=_("eligibility.pages.index.items[0].text"),
30 ),
31 viewmodels.MediaItem(
32 icon=viewmodels.Icon("bankcardcheck", pgettext("image alt text", "core.icons.bankcardcheck")),
33 heading=_("eligibility.pages.index.items[1].title"),
34 details=_("eligibility.pages.index.items[1].text"),
35 ),
36 ],
37 paragraphs=[_("eligibility.pages.index.p[0]")],
38 image=_index_image(),
39 button=viewmodels.Button.primary(text=_("eligibility.pages.index.button"), url=reverse("eligibility:confirm")),
40 )
41
42 return PageTemplateResponse(request, page)
43
44
45 @decorator_from_middleware(middleware.AgencySessionRequired)
46 @decorator_from_middleware(middleware.RateLimit)
47 @decorator_from_middleware(middleware.VerifierSessionRequired)
48 def confirm(request):
49 """View handler for the eligibility verification form."""
50
51 page = viewmodels.Page(
52 title=_("eligibility.pages.confirm.title"),
53 content_title=_("eligibility.pages.confirm.content_title"),
54 paragraphs=[_("eligibility.pages.confirm.p[0]")],
55 form=forms.EligibilityVerificationForm(auto_id=True, label_suffix=""),
56 classes="text-lg-center",
57 )
58
59 if request.method == "POST":
60 analytics.started_eligibility(request)
61
62 form = forms.EligibilityVerificationForm(request.POST)
63 response = _verify(request, form)
64
65 if response is None:
66 # form was not valid, allow for correction/resubmission
67 analytics.returned_error(request, form.errors)
68 page.forms = [form]
69 response = PageTemplateResponse(request, page)
70 elif session.eligible(request):
71 eligibility = session.eligibility(request)
72 response = verified(request, [eligibility.name])
73 else:
74 response = PageTemplateResponse(request, page)
75
76 return response
77
78
79 def _verify(request, form):
80 """Helper calls the eligibility verification API with user input."""
81
82 if not form.is_valid():
83 if recaptcha.has_error(form):
84 messages.error(request, "Recaptcha failed. Please try again.")
85 return None
86
87 sub, name = form.cleaned_data.get("sub"), form.cleaned_data.get("name")
88
89 agency = session.agency(request)
90 verifier = session.verifier(request)
91 client = api.Client(agency, verifier)
92
93 response = client.verify(sub, name)
94
95 if response.error and any(response.error):
96 form.add_api_errors(response.error)
97 return None
98 elif any(response.eligibility):
99 return verified(request, response.eligibility)
100 else:
101 return unverified(request)
102
103
104 @decorator_from_middleware(middleware.AgencySessionRequired)
105 def verified(request, verified_types):
106 """View handler for the verified eligibility page."""
107
108 analytics.returned_success(request)
109
110 enrollment_index = reverse("enrollment:index")
111 session.update(request, eligibility_types=verified_types, origin=enrollment_index)
112
113 return redirect(enrollment_index)
114
115
116 @decorator_from_middleware(middleware.AgencySessionRequired)
117 def unverified(request):
118 """View handler for the unverified eligibility page."""
119
120 analytics.returned_fail(request)
121
122 # tel: link to agency phone number
123 agency = session.agency(request)
124 buttons = viewmodels.Button.agency_contact_links(agency)
125
126 page = viewmodels.Page(
127 title=_("eligibility.pages.unverified.title"),
128 content_title=_("eligibility.pages.unverified.content_title"),
129 icon=viewmodels.Icon("idcardquestion", pgettext("image alt text", "core.icons.idcardquestion")),
130 paragraphs=[_("eligibility.pages.unverified.p[0]"), _("eligibility.pages.unverified.p[1]")],
131 buttons=buttons,
132 classes="text-lg-center",
133 )
134
135 return PageTemplateResponse(request, page)
136
```
Path: `benefits/eligibility/forms.py`
Content:
```
1 """
2 The eligibility application: Form definition for the eligibility verification flow.
3 """
4 import logging
5
6 from django import forms
7 from django.utils.translation import gettext_lazy as _
8
9 from benefits.core import recaptcha, widgets
10
11
12 logger = logging.getLogger(__name__)
13
14
15 class EligibilityVerificationForm(forms.Form):
16 """Form to collect eligibility verification details."""
17
18 action_url = "eligibility:confirm"
19 method = "POST"
20
21 sub = forms.CharField(
22 label=_("eligibility.forms.confirm.fields.sub"), widget=widgets.FormControlTextInput(placeholder="A1234567")
23 )
24
25 name = forms.CharField(
26 label=_("eligibility.forms.confirm.fields.name"), widget=widgets.FormControlTextInput(placeholder="Rodriguez")
27 )
28
29 submit_value = _("eligibility.forms.confirm.submit")
30 submitting_value = _("eligibility.forms.confirm.submitting")
31
32 _error_messages = {
33 "invalid": _("eligibility.forms.confirm.errors.invalid"),
34 "missing": _("eligibility.forms.confirm.errors.missing"),
35 }
36
37 def add_api_errors(self, form_errors):
38 """Handle errors passed back from API server related to submitted form values."""
39
40 validation_errors = {
41 field: forms.ValidationError(self._error_messages.get(code, _("core.pages.error.title")), code=code)
42 for (field, code) in form_errors.items()
43 if field in self.fields
44 }
45
46 if len(validation_errors) > 0:
47 logger.warning("Form fields are invalid")
48
49 for (field, err) in validation_errors.items():
50 self.add_error(field, err)
51
52 def clean(self):
53 if not recaptcha.verify(self.data):
54 raise forms.ValidationError("reCAPTCHA failed")
55
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/benefits/eligibility/forms.py b/benefits/eligibility/forms.py
--- a/benefits/eligibility/forms.py
+++ b/benefits/eligibility/forms.py
@@ -6,7 +6,7 @@
from django import forms
from django.utils.translation import gettext_lazy as _
-from benefits.core import recaptcha, widgets
+from benefits.core import models, recaptcha, widgets
logger = logging.getLogger(__name__)
@@ -18,14 +18,6 @@
action_url = "eligibility:confirm"
method = "POST"
- sub = forms.CharField(
- label=_("eligibility.forms.confirm.fields.sub"), widget=widgets.FormControlTextInput(placeholder="A1234567")
- )
-
- name = forms.CharField(
- label=_("eligibility.forms.confirm.fields.name"), widget=widgets.FormControlTextInput(placeholder="Rodriguez")
- )
-
submit_value = _("eligibility.forms.confirm.submit")
submitting_value = _("eligibility.forms.confirm.submitting")
@@ -34,6 +26,21 @@
"missing": _("eligibility.forms.confirm.errors.missing"),
}
+ def __init__(self, verifier: models.EligibilityVerifier, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ sub_widget = widgets.FormControlTextInput(placeholder=verifier.form_sub_placeholder)
+ if verifier.form_sub_pattern:
+ sub_widget.attrs.update({"pattern": verifier.form_sub_pattern})
+
+ self.fields["sub"] = forms.CharField(label=_(verifier.form_sub_label), widget=sub_widget)
+
+ name_widget = widgets.FormControlTextInput(placeholder=verifier.form_name_placeholder)
+ if verifier.form_name_max_length:
+ name_widget.attrs.update({"maxlength": verifier.form_name_max_length})
+
+ self.fields["name"] = forms.CharField(label=_(verifier.form_name_label), widget=name_widget)
+
def add_api_errors(self, form_errors):
"""Handle errors passed back from API server related to submitted form values."""
diff --git a/benefits/eligibility/views.py b/benefits/eligibility/views.py
--- a/benefits/eligibility/views.py
+++ b/benefits/eligibility/views.py
@@ -48,18 +48,20 @@
def confirm(request):
"""View handler for the eligibility verification form."""
+ verifier = session.verifier(request)
+
page = viewmodels.Page(
- title=_("eligibility.pages.confirm.title"),
- content_title=_("eligibility.pages.confirm.content_title"),
- paragraphs=[_("eligibility.pages.confirm.p[0]")],
- form=forms.EligibilityVerificationForm(auto_id=True, label_suffix=""),
+ title=_(verifier.form_title),
+ content_title=_(verifier.form_content_title),
+ paragraphs=[_(verifier.form_blurb)],
+ form=forms.EligibilityVerificationForm(auto_id=True, label_suffix="", verifier=verifier),
classes="text-lg-center",
)
if request.method == "POST":
analytics.started_eligibility(request)
- form = forms.EligibilityVerificationForm(request.POST)
+ form = forms.EligibilityVerificationForm(data=request.POST, verifier=verifier)
response = _verify(request, form)
if response is None:
|
{"golden_diff": "diff --git a/benefits/eligibility/forms.py b/benefits/eligibility/forms.py\n--- a/benefits/eligibility/forms.py\n+++ b/benefits/eligibility/forms.py\n@@ -6,7 +6,7 @@\n from django import forms\n from django.utils.translation import gettext_lazy as _\n \n-from benefits.core import recaptcha, widgets\n+from benefits.core import models, recaptcha, widgets\n \n \n logger = logging.getLogger(__name__)\n@@ -18,14 +18,6 @@\n action_url = \"eligibility:confirm\"\n method = \"POST\"\n \n- sub = forms.CharField(\n- label=_(\"eligibility.forms.confirm.fields.sub\"), widget=widgets.FormControlTextInput(placeholder=\"A1234567\")\n- )\n-\n- name = forms.CharField(\n- label=_(\"eligibility.forms.confirm.fields.name\"), widget=widgets.FormControlTextInput(placeholder=\"Rodriguez\")\n- )\n-\n submit_value = _(\"eligibility.forms.confirm.submit\")\n submitting_value = _(\"eligibility.forms.confirm.submitting\")\n \n@@ -34,6 +26,21 @@\n \"missing\": _(\"eligibility.forms.confirm.errors.missing\"),\n }\n \n+ def __init__(self, verifier: models.EligibilityVerifier, *args, **kwargs):\n+ super().__init__(*args, **kwargs)\n+\n+ sub_widget = widgets.FormControlTextInput(placeholder=verifier.form_sub_placeholder)\n+ if verifier.form_sub_pattern:\n+ sub_widget.attrs.update({\"pattern\": verifier.form_sub_pattern})\n+\n+ self.fields[\"sub\"] = forms.CharField(label=_(verifier.form_sub_label), widget=sub_widget)\n+\n+ name_widget = widgets.FormControlTextInput(placeholder=verifier.form_name_placeholder)\n+ if verifier.form_name_max_length:\n+ name_widget.attrs.update({\"maxlength\": verifier.form_name_max_length})\n+\n+ self.fields[\"name\"] = forms.CharField(label=_(verifier.form_name_label), widget=name_widget)\n+\n def add_api_errors(self, form_errors):\n \"\"\"Handle errors passed back from API server related to submitted form values.\"\"\"\n \ndiff --git a/benefits/eligibility/views.py b/benefits/eligibility/views.py\n--- a/benefits/eligibility/views.py\n+++ b/benefits/eligibility/views.py\n@@ -48,18 +48,20 @@\n def confirm(request):\n \"\"\"View handler for the eligibility verification form.\"\"\"\n \n+ verifier = session.verifier(request)\n+\n page = viewmodels.Page(\n- title=_(\"eligibility.pages.confirm.title\"),\n- content_title=_(\"eligibility.pages.confirm.content_title\"),\n- paragraphs=[_(\"eligibility.pages.confirm.p[0]\")],\n- form=forms.EligibilityVerificationForm(auto_id=True, label_suffix=\"\"),\n+ title=_(verifier.form_title),\n+ content_title=_(verifier.form_content_title),\n+ paragraphs=[_(verifier.form_blurb)],\n+ form=forms.EligibilityVerificationForm(auto_id=True, label_suffix=\"\", verifier=verifier),\n classes=\"text-lg-center\",\n )\n \n if request.method == \"POST\":\n analytics.started_eligibility(request)\n \n- form = forms.EligibilityVerificationForm(request.POST)\n+ form = forms.EligibilityVerificationForm(data=request.POST, verifier=verifier)\n response = _verify(request, form)\n \n if response is None:\n", "issue": "Update eligibility verification form to use dynamic data\n## Background\r\n\r\nThis builds on #208 and #319 to incorporate specific `EligibilityVerifier` properties into the [eligibility verification form](https://github.com/cal-itp/benefits/blob/dev/benefits/eligibility/forms.py#L15)\r\n\r\n## Tasks\r\n\r\n* [x] Create an `__init__()` method on the form class accepting a new kwarg `verifier` of type `EligibilityVerifier`\r\n* [x] If the `verifier.form_sub_pattern` field has a value, add a `pattern` attribute with the value to the `sub` input via [Widget.attrs](https://docs.djangoproject.com/en/3.2/ref/forms/widgets/#django.forms.Widget.attrs)\r\n* [x] If the `verifier.form_name_max_length` field has a value, add a `maxlength` attribute to the `name` input via Widgets.attrs\r\n* [x] Use attributes from the `verifier` as `msgid` keys for labels etc.\r\n* [x] Rework the [`eligibility:confirm` view](https://github.com/cal-itp/benefits/blob/dev/benefits/eligibility/views.py#L46) to initialize the `Page` viewmodel and eligibility verification form from the selected verifier\n", "before_files": [{"content": "\"\"\"\nThe eligibility application: view definitions for the eligibility verification flow.\n\"\"\"\nfrom django.contrib import messages\nfrom django.shortcuts import redirect\nfrom django.urls import reverse\nfrom django.utils.decorators import decorator_from_middleware\nfrom django.utils.translation import pgettext, gettext as _\n\nfrom benefits.core import middleware, recaptcha, session, viewmodels\nfrom benefits.core.views import PageTemplateResponse, _index_image\nfrom . import analytics, api, forms\n\n\n@decorator_from_middleware(middleware.AgencySessionRequired)\n@decorator_from_middleware(middleware.VerifierSessionRequired)\ndef index(request):\n \"\"\"View handler for the eligibility verification getting started screen.\"\"\"\n\n session.update(request, eligibility_types=[], origin=reverse(\"eligibility:index\"))\n\n page = viewmodels.Page(\n title=_(\"eligibility.pages.index.title\"),\n content_title=_(\"eligibility.pages.index.content_title\"),\n media=[\n viewmodels.MediaItem(\n icon=viewmodels.Icon(\"idcardcheck\", pgettext(\"image alt text\", \"core.icons.idcardcheck\")),\n heading=_(\"eligibility.pages.index.items[0].title\"),\n details=_(\"eligibility.pages.index.items[0].text\"),\n ),\n viewmodels.MediaItem(\n icon=viewmodels.Icon(\"bankcardcheck\", pgettext(\"image alt text\", \"core.icons.bankcardcheck\")),\n heading=_(\"eligibility.pages.index.items[1].title\"),\n details=_(\"eligibility.pages.index.items[1].text\"),\n ),\n ],\n paragraphs=[_(\"eligibility.pages.index.p[0]\")],\n image=_index_image(),\n button=viewmodels.Button.primary(text=_(\"eligibility.pages.index.button\"), url=reverse(\"eligibility:confirm\")),\n )\n\n return PageTemplateResponse(request, page)\n\n\n@decorator_from_middleware(middleware.AgencySessionRequired)\n@decorator_from_middleware(middleware.RateLimit)\n@decorator_from_middleware(middleware.VerifierSessionRequired)\ndef confirm(request):\n \"\"\"View handler for the eligibility verification form.\"\"\"\n\n page = viewmodels.Page(\n title=_(\"eligibility.pages.confirm.title\"),\n content_title=_(\"eligibility.pages.confirm.content_title\"),\n paragraphs=[_(\"eligibility.pages.confirm.p[0]\")],\n form=forms.EligibilityVerificationForm(auto_id=True, label_suffix=\"\"),\n classes=\"text-lg-center\",\n )\n\n if request.method == \"POST\":\n analytics.started_eligibility(request)\n\n form = forms.EligibilityVerificationForm(request.POST)\n response = _verify(request, form)\n\n if response is None:\n # form was not valid, allow for correction/resubmission\n analytics.returned_error(request, form.errors)\n page.forms = [form]\n response = PageTemplateResponse(request, page)\n elif session.eligible(request):\n eligibility = session.eligibility(request)\n response = verified(request, [eligibility.name])\n else:\n response = PageTemplateResponse(request, page)\n\n return response\n\n\ndef _verify(request, form):\n \"\"\"Helper calls the eligibility verification API with user input.\"\"\"\n\n if not form.is_valid():\n if recaptcha.has_error(form):\n messages.error(request, \"Recaptcha failed. Please try again.\")\n return None\n\n sub, name = form.cleaned_data.get(\"sub\"), form.cleaned_data.get(\"name\")\n\n agency = session.agency(request)\n verifier = session.verifier(request)\n client = api.Client(agency, verifier)\n\n response = client.verify(sub, name)\n\n if response.error and any(response.error):\n form.add_api_errors(response.error)\n return None\n elif any(response.eligibility):\n return verified(request, response.eligibility)\n else:\n return unverified(request)\n\n\n@decorator_from_middleware(middleware.AgencySessionRequired)\ndef verified(request, verified_types):\n \"\"\"View handler for the verified eligibility page.\"\"\"\n\n analytics.returned_success(request)\n\n enrollment_index = reverse(\"enrollment:index\")\n session.update(request, eligibility_types=verified_types, origin=enrollment_index)\n\n return redirect(enrollment_index)\n\n\n@decorator_from_middleware(middleware.AgencySessionRequired)\ndef unverified(request):\n \"\"\"View handler for the unverified eligibility page.\"\"\"\n\n analytics.returned_fail(request)\n\n # tel: link to agency phone number\n agency = session.agency(request)\n buttons = viewmodels.Button.agency_contact_links(agency)\n\n page = viewmodels.Page(\n title=_(\"eligibility.pages.unverified.title\"),\n content_title=_(\"eligibility.pages.unverified.content_title\"),\n icon=viewmodels.Icon(\"idcardquestion\", pgettext(\"image alt text\", \"core.icons.idcardquestion\")),\n paragraphs=[_(\"eligibility.pages.unverified.p[0]\"), _(\"eligibility.pages.unverified.p[1]\")],\n buttons=buttons,\n classes=\"text-lg-center\",\n )\n\n return PageTemplateResponse(request, page)\n", "path": "benefits/eligibility/views.py"}, {"content": "\"\"\"\nThe eligibility application: Form definition for the eligibility verification flow.\n\"\"\"\nimport logging\n\nfrom django import forms\nfrom django.utils.translation import gettext_lazy as _\n\nfrom benefits.core import recaptcha, widgets\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass EligibilityVerificationForm(forms.Form):\n \"\"\"Form to collect eligibility verification details.\"\"\"\n\n action_url = \"eligibility:confirm\"\n method = \"POST\"\n\n sub = forms.CharField(\n label=_(\"eligibility.forms.confirm.fields.sub\"), widget=widgets.FormControlTextInput(placeholder=\"A1234567\")\n )\n\n name = forms.CharField(\n label=_(\"eligibility.forms.confirm.fields.name\"), widget=widgets.FormControlTextInput(placeholder=\"Rodriguez\")\n )\n\n submit_value = _(\"eligibility.forms.confirm.submit\")\n submitting_value = _(\"eligibility.forms.confirm.submitting\")\n\n _error_messages = {\n \"invalid\": _(\"eligibility.forms.confirm.errors.invalid\"),\n \"missing\": _(\"eligibility.forms.confirm.errors.missing\"),\n }\n\n def add_api_errors(self, form_errors):\n \"\"\"Handle errors passed back from API server related to submitted form values.\"\"\"\n\n validation_errors = {\n field: forms.ValidationError(self._error_messages.get(code, _(\"core.pages.error.title\")), code=code)\n for (field, code) in form_errors.items()\n if field in self.fields\n }\n\n if len(validation_errors) > 0:\n logger.warning(\"Form fields are invalid\")\n\n for (field, err) in validation_errors.items():\n self.add_error(field, err)\n\n def clean(self):\n if not recaptcha.verify(self.data):\n raise forms.ValidationError(\"reCAPTCHA failed\")\n", "path": "benefits/eligibility/forms.py"}], "after_files": [{"content": "\"\"\"\nThe eligibility application: view definitions for the eligibility verification flow.\n\"\"\"\nfrom django.contrib import messages\nfrom django.shortcuts import redirect\nfrom django.urls import reverse\nfrom django.utils.decorators import decorator_from_middleware\nfrom django.utils.translation import pgettext, gettext as _\n\nfrom benefits.core import middleware, recaptcha, session, viewmodels\nfrom benefits.core.views import PageTemplateResponse, _index_image\nfrom . import analytics, api, forms\n\n\n@decorator_from_middleware(middleware.AgencySessionRequired)\n@decorator_from_middleware(middleware.VerifierSessionRequired)\ndef index(request):\n \"\"\"View handler for the eligibility verification getting started screen.\"\"\"\n\n session.update(request, eligibility_types=[], origin=reverse(\"eligibility:index\"))\n\n page = viewmodels.Page(\n title=_(\"eligibility.pages.index.title\"),\n content_title=_(\"eligibility.pages.index.content_title\"),\n media=[\n viewmodels.MediaItem(\n icon=viewmodels.Icon(\"idcardcheck\", pgettext(\"image alt text\", \"core.icons.idcardcheck\")),\n heading=_(\"eligibility.pages.index.items[0].title\"),\n details=_(\"eligibility.pages.index.items[0].text\"),\n ),\n viewmodels.MediaItem(\n icon=viewmodels.Icon(\"bankcardcheck\", pgettext(\"image alt text\", \"core.icons.bankcardcheck\")),\n heading=_(\"eligibility.pages.index.items[1].title\"),\n details=_(\"eligibility.pages.index.items[1].text\"),\n ),\n ],\n paragraphs=[_(\"eligibility.pages.index.p[0]\")],\n image=_index_image(),\n button=viewmodels.Button.primary(text=_(\"eligibility.pages.index.button\"), url=reverse(\"eligibility:confirm\")),\n )\n\n return PageTemplateResponse(request, page)\n\n\n@decorator_from_middleware(middleware.AgencySessionRequired)\n@decorator_from_middleware(middleware.RateLimit)\n@decorator_from_middleware(middleware.VerifierSessionRequired)\ndef confirm(request):\n \"\"\"View handler for the eligibility verification form.\"\"\"\n\n verifier = session.verifier(request)\n\n page = viewmodels.Page(\n title=_(verifier.form_title),\n content_title=_(verifier.form_content_title),\n paragraphs=[_(verifier.form_blurb)],\n form=forms.EligibilityVerificationForm(auto_id=True, label_suffix=\"\", verifier=verifier),\n classes=\"text-lg-center\",\n )\n\n if request.method == \"POST\":\n analytics.started_eligibility(request)\n\n form = forms.EligibilityVerificationForm(data=request.POST, verifier=verifier)\n response = _verify(request, form)\n\n if response is None:\n # form was not valid, allow for correction/resubmission\n analytics.returned_error(request, form.errors)\n page.forms = [form]\n response = PageTemplateResponse(request, page)\n elif session.eligible(request):\n eligibility = session.eligibility(request)\n response = verified(request, [eligibility.name])\n else:\n response = PageTemplateResponse(request, page)\n\n return response\n\n\ndef _verify(request, form):\n \"\"\"Helper calls the eligibility verification API with user input.\"\"\"\n\n if not form.is_valid():\n if recaptcha.has_error(form):\n messages.error(request, \"Recaptcha failed. Please try again.\")\n return None\n\n sub, name = form.cleaned_data.get(\"sub\"), form.cleaned_data.get(\"name\")\n\n agency = session.agency(request)\n verifier = session.verifier(request)\n client = api.Client(agency, verifier)\n\n response = client.verify(sub, name)\n\n if response.error and any(response.error):\n form.add_api_errors(response.error)\n return None\n elif any(response.eligibility):\n return verified(request, response.eligibility)\n else:\n return unverified(request)\n\n\n@decorator_from_middleware(middleware.AgencySessionRequired)\ndef verified(request, verified_types):\n \"\"\"View handler for the verified eligibility page.\"\"\"\n\n analytics.returned_success(request)\n\n enrollment_index = reverse(\"enrollment:index\")\n session.update(request, eligibility_types=verified_types, origin=enrollment_index)\n\n return redirect(enrollment_index)\n\n\n@decorator_from_middleware(middleware.AgencySessionRequired)\ndef unverified(request):\n \"\"\"View handler for the unverified eligibility page.\"\"\"\n\n analytics.returned_fail(request)\n\n # tel: link to agency phone number\n agency = session.agency(request)\n buttons = viewmodels.Button.agency_contact_links(agency)\n\n page = viewmodels.Page(\n title=_(\"eligibility.pages.unverified.title\"),\n content_title=_(\"eligibility.pages.unverified.content_title\"),\n icon=viewmodels.Icon(\"idcardquestion\", pgettext(\"image alt text\", \"core.icons.idcardquestion\")),\n paragraphs=[_(\"eligibility.pages.unverified.p[0]\"), _(\"eligibility.pages.unverified.p[1]\")],\n buttons=buttons,\n classes=\"text-lg-center\",\n )\n\n return PageTemplateResponse(request, page)\n", "path": "benefits/eligibility/views.py"}, {"content": "\"\"\"\nThe eligibility application: Form definition for the eligibility verification flow.\n\"\"\"\nimport logging\n\nfrom django import forms\nfrom django.utils.translation import gettext_lazy as _\n\nfrom benefits.core import models, recaptcha, widgets\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass EligibilityVerificationForm(forms.Form):\n \"\"\"Form to collect eligibility verification details.\"\"\"\n\n action_url = \"eligibility:confirm\"\n method = \"POST\"\n\n submit_value = _(\"eligibility.forms.confirm.submit\")\n submitting_value = _(\"eligibility.forms.confirm.submitting\")\n\n _error_messages = {\n \"invalid\": _(\"eligibility.forms.confirm.errors.invalid\"),\n \"missing\": _(\"eligibility.forms.confirm.errors.missing\"),\n }\n\n def __init__(self, verifier: models.EligibilityVerifier, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n sub_widget = widgets.FormControlTextInput(placeholder=verifier.form_sub_placeholder)\n if verifier.form_sub_pattern:\n sub_widget.attrs.update({\"pattern\": verifier.form_sub_pattern})\n\n self.fields[\"sub\"] = forms.CharField(label=_(verifier.form_sub_label), widget=sub_widget)\n\n name_widget = widgets.FormControlTextInput(placeholder=verifier.form_name_placeholder)\n if verifier.form_name_max_length:\n name_widget.attrs.update({\"maxlength\": verifier.form_name_max_length})\n\n self.fields[\"name\"] = forms.CharField(label=_(verifier.form_name_label), widget=name_widget)\n\n def add_api_errors(self, form_errors):\n \"\"\"Handle errors passed back from API server related to submitted form values.\"\"\"\n\n validation_errors = {\n field: forms.ValidationError(self._error_messages.get(code, _(\"core.pages.error.title\")), code=code)\n for (field, code) in form_errors.items()\n if field in self.fields\n }\n\n if len(validation_errors) > 0:\n logger.warning(\"Form fields are invalid\")\n\n for (field, err) in validation_errors.items():\n self.add_error(field, err)\n\n def clean(self):\n if not recaptcha.verify(self.data):\n raise forms.ValidationError(\"reCAPTCHA failed\")\n", "path": "benefits/eligibility/forms.py"}]}
| 2,325 | 720 |
gh_patches_debug_41684
|
rasdani/github-patches
|
git_diff
|
GoogleCloudPlatform__public-datasets-pipelines-91
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Datasets without `variables.json` should still deploy properly
## Description
Some datasets (and their pipelines) may not need Airflow variables at all.
During the deploy step, when a dataset's `variables.json` file is missing, the deploy script errors out with a `FileNotFound` error. We should let such datasets skip importing Airflow variables and proceed with deploying the pipeline DAGs.
## Checklist
- [x] I created this issue in accordance with the [Code of Conduct](https://github.com/GoogleCloudPlatform/public-datasets-pipelines/blob/main/CODE_OF_CONDUCT.md).
- [x] This issue is appropriately labeled.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `scripts/deploy_dag.py`
Content:
```
1 # Copyright 2021 Google LLC
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15
16 import argparse
17 import pathlib
18 import subprocess
19 import typing
20
21 CURRENT_PATH = pathlib.Path(__file__).resolve().parent
22 PROJECT_ROOT = CURRENT_PATH.parent
23 DATASETS_PATH = PROJECT_ROOT / "datasets"
24
25
26 def main(
27 local: bool,
28 env_path: pathlib.Path,
29 dataset_id: str,
30 pipeline: str = None,
31 airflow_home: pathlib.Path = None,
32 composer_env: str = None,
33 composer_bucket: str = None,
34 composer_region: str = None,
35 ):
36 print("\n========== AIRFLOW VARIABLES ==========")
37 copy_variables_to_airflow_data_folder(
38 local, env_path, dataset_id, airflow_home, composer_bucket
39 )
40 import_variables_to_airflow_env(
41 local, env_path, dataset_id, composer_env, composer_bucket, composer_region
42 )
43
44 print("========== AIRFLOW DAGS ==========")
45 if pipeline:
46 pipelines = [env_path / "datasets" / pipeline]
47 else:
48 pipelines = list_subdirs(env_path / "datasets" / dataset_id)
49
50 for pipeline_path in pipelines:
51 copy_custom_callables_to_airflow_dags_folder(
52 local,
53 env_path,
54 dataset_id,
55 pipeline_path.name,
56 composer_bucket,
57 airflow_home,
58 )
59
60 copy_generated_dag_to_airflow_dags_folder(
61 local,
62 env_path,
63 dataset_id,
64 pipeline_path.name,
65 composer_bucket,
66 airflow_home,
67 )
68
69
70 def copy_variables_to_airflow_data_folder(
71 local: bool,
72 env_path: pathlib.Path,
73 dataset_id: str,
74 airflow_home: pathlib.Path = None,
75 composer_bucket: str = None,
76 ):
77 """
78 cd .{ENV}/{DATASET_ID}
79 """
80 cwd = env_path / "datasets" / dataset_id
81 filename = f"{dataset_id}_variables.json"
82
83 check_existence_of_variables_file(cwd / filename)
84
85 if local:
86 """
87 cp {DATASET_ID}_variables.json {AIRFLOW_HOME}/data/variables/{DATASET_ID}_variables.json
88 """
89 target_path = airflow_home / "data" / "variables" / filename
90 target_path.mkdir(parents=True, exist_ok=True)
91 print(
92 "\nCopying variables JSON file into Airflow data folder\n\n"
93 f" Source:\n {cwd / filename}\n\n"
94 f" Destination:\n {target_path}\n"
95 )
96
97 subprocess.check_call(["cp", "-rf", filename, str(target_path)], cwd=cwd)
98 else:
99 """
100 [remote]
101 gsutil cp {DATASET_ID}_variables.json gs://{COMPOSER_BUCKET}/data/variables/{DATASET_ID}_variables.json...
102 """
103 gcs_uri = f"gs://{composer_bucket}/data/variables/{filename}"
104 print(
105 "\nCopying variables JSON file into Cloud Composer data folder\n\n"
106 f" Source:\n {cwd / filename}\n\n"
107 f" Destination:\n {gcs_uri}\n"
108 )
109 subprocess.check_call(["gsutil", "cp", filename, gcs_uri], cwd=cwd)
110
111
112 def import_variables_to_airflow_env(
113 local: bool,
114 env_path: pathlib.Path,
115 dataset_id: str,
116 composer_env: str = None,
117 composer_bucket: str = None,
118 composer_region: str = None,
119 ):
120 """
121 [local]
122 airflow variables import .{ENV}/datasets/{DATASET_ID}/variables.json
123
124 [remote]
125 gcloud composer environments run COMPOSER_ENV --location COMPOSER_REGION variables -- --import /home/airflow/gcs/data/variables/{DATASET_ID}_variables.json
126 """
127 cwd = env_path / "datasets" / dataset_id
128 filename = f"{dataset_id}_variables.json"
129
130 if local:
131 print(f"\nImporting Airflow variables from {cwd / filename}...\n")
132 subprocess.check_call(
133 ["airflow", "variables", "import", str(cwd / filename)], cwd=cwd
134 )
135 else:
136 gcs_uri = f"gs://{composer_bucket}/data/variables/{filename}"
137 airflow_path = f"/home/airflow/gcs/data/variables/{filename}"
138 print(f"\nImporting Airflow variables from {gcs_uri} ({airflow_path})...\n")
139 subprocess.check_call(
140 [
141 "gcloud",
142 "composer",
143 "environments",
144 "run",
145 str(composer_env),
146 "--location",
147 str(composer_region),
148 "variables",
149 "--",
150 "--import",
151 str(airflow_path),
152 ],
153 cwd=cwd,
154 )
155
156
157 def copy_generated_dag_to_airflow_dags_folder(
158 local: bool,
159 env_path: pathlib.Path,
160 dataset_id: str,
161 pipeline_id: str,
162 composer_bucket: str = None,
163 airflow_home: pathlib.Path = None,
164 ):
165 """
166 cd {DATASET_ID}/{PIPELINE_ID}
167
168 [local]
169 cp {PIPELINE_ID}_dag.py {AIRFLOW_HOME}/dags/{DATASET_ID}__{PIPELINE_ID}_dag.py
170
171 [remote]
172 gsutil cp {PIPELINE_ID}_dag.py gs://{COMPOSER_BUCKET}/dags/{DATASET_ID}__{PIPELINE_ID}_dag.py
173 """
174 cwd = env_path / "datasets" / dataset_id / pipeline_id
175 filename = f"{pipeline_id}_dag.py"
176
177 if local:
178 target = airflow_home / "dags" / f"{dataset_id}__{pipeline_id}_dag.py"
179 print(
180 f"\nCopying DAG file for pipeline `{pipeline_id}` into Airflow DAGs folder\n\n"
181 f" Source:\n {cwd / filename}\n\n"
182 f" Destination:\n {target}\n"
183 )
184 subprocess.check_call(["cp", "-rf", filename, str(target)], cwd=cwd)
185 else:
186 target = f"gs://{composer_bucket}/dags/{dataset_id}__{pipeline_id}_dag.py"
187 print(
188 f"\nCopying DAG file for pipeline `{pipeline_id}` into Cloud Composer DAG folder\n\n"
189 f" Source:\n {cwd / filename}\n\n"
190 f" Destination:\n {target}\n"
191 )
192 subprocess.check_call(["gsutil", "cp", filename, target], cwd=cwd)
193
194
195 def copy_custom_callables_to_airflow_dags_folder(
196 local: bool,
197 env_path: pathlib.Path,
198 dataset_id: str,
199 pipeline_id: str,
200 composer_bucket: str = None,
201 airflow_home: pathlib.Path = None,
202 ):
203 """
204 cd {DATASET_ID}/{PIPELINE_ID}
205
206 [local]
207 mkdir -p {AIRFLOW_HOME}/dags/DATASET_ID/PIPELINE_ID/custom
208 cp -rf custom {AIRFLOW_HOME}/dags/DATASET_ID/PIPELINE_ID/custom
209
210 [remote]
211 gsutil cp -r custom gs://{COMPOSER_BUCKET}/dags/{DATASET_ID}/{PIPELINE_ID}/
212 """
213 cwd = env_path / "datasets" / dataset_id / pipeline_id
214
215 if not (cwd / "custom").exists():
216 return
217
218 if local:
219 target_parent = airflow_home / "dags" / dataset_id / pipeline_id
220 target_parent.mkdir(parents=True, exist_ok=True)
221 print(
222 f"\nCopying custom callables folder for pipeline `{pipeline_id}` into Airflow DAGs folder\n\n"
223 f" Source:\n {cwd / 'custom'}\n\n"
224 f" Destination:\n {target_parent / 'custom'}\n"
225 )
226 subprocess.check_call(["cp", "-rf", "custom", str(target_parent)], cwd=cwd)
227 else:
228 target = f"gs://{composer_bucket}/dags/{dataset_id}/{pipeline_id}/"
229 print(
230 f"\nCopying custom callables folder for pipeline `{pipeline_id}` into Cloud Composer DAG folder\n\n"
231 f" Source:\n {cwd / 'custom'}\n\n"
232 f" Destination:\n {target}\n"
233 )
234 subprocess.check_call(["gsutil", "cp", "-r", "custom", target], cwd=cwd)
235
236
237 def check_existence_of_variables_file(file_path: pathlib.Path):
238 if not file_path:
239 raise FileNotFoundError(f"Airflow variables file {file_path} does not exist.")
240
241
242 def list_subdirs(path: pathlib.Path) -> typing.List[pathlib.Path]:
243 """Returns a list of subdirectories"""
244 subdirs = [f for f in path.iterdir() if f.is_dir() and not f.name[0] in (".", "_")]
245 return subdirs
246
247
248 if __name__ == "__main__":
249 parser = argparse.ArgumentParser(
250 description="Deploy DAGs and variables to an Airflow environment"
251 )
252 parser.add_argument(
253 "-d",
254 "--dataset",
255 required=True,
256 type=str,
257 dest="dataset",
258 help="The directory name of the dataset.",
259 )
260 parser.add_argument(
261 "-e",
262 "--env",
263 type=str,
264 default="dev",
265 dest="env",
266 help="The stage used for the resources: dev|staging|prod",
267 )
268 parser.add_argument(
269 "-n",
270 "--composer-env",
271 type=str,
272 dest="composer_env",
273 help="The Google Cloud Composer environment name",
274 )
275 parser.add_argument(
276 "-b",
277 "--composer-bucket",
278 type=str,
279 dest="composer_bucket",
280 help="The Google Cloud Composer bucket name",
281 )
282 parser.add_argument(
283 "-r",
284 "--composer-region",
285 type=str,
286 dest="composer_region",
287 help="The region of the Google Cloud Composer environment",
288 )
289 parser.add_argument(
290 "-p",
291 "--pipeline",
292 required=False,
293 type=str,
294 dest="pipeline",
295 help="The directory name of the pipeline",
296 )
297 parser.add_argument(
298 "-a",
299 "--airflow-home",
300 type=str,
301 default="~/airflow",
302 dest="airflow_home",
303 help="pathlib.Path to the Airflow home directory (defaults to `~/airflow`)",
304 )
305 parser.add_argument("--local", required=False, dest="local", action="store_true")
306
307 args = parser.parse_args()
308 airflow_path = pathlib.Path(args.airflow_home).expanduser()
309
310 if args.local:
311 if not airflow_path.exists() and airflow_path.is_dir():
312 raise ValueError(
313 "Argument `-a|--airflow-home` must exist and be a directory"
314 )
315 else:
316 if not args.composer_env:
317 raise ValueError(
318 "Argument `-n|--composer-env` (Composer environment name) not specified"
319 )
320
321 if not args.composer_bucket:
322 raise ValueError(
323 "Argument `-b|--composer-bucket` (Composer bucket name) not specified"
324 )
325
326 if not args.composer_region:
327 raise ValueError(
328 "Argument `-r|--composer-region` (Composer environment region) not specified"
329 )
330
331 main(
332 local=args.local,
333 env_path=PROJECT_ROOT / f".{args.env}",
334 dataset_id=args.dataset,
335 pipeline=args.pipeline,
336 airflow_home=airflow_path,
337 composer_env=args.composer_env,
338 composer_bucket=args.composer_bucket,
339 composer_region=args.composer_region,
340 )
341
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/scripts/deploy_dag.py b/scripts/deploy_dag.py
--- a/scripts/deploy_dag.py
+++ b/scripts/deploy_dag.py
@@ -17,6 +17,7 @@
import pathlib
import subprocess
import typing
+import warnings
CURRENT_PATH = pathlib.Path(__file__).resolve().parent
PROJECT_ROOT = CURRENT_PATH.parent
@@ -67,6 +68,10 @@
)
+def run_gsutil_cmd(args: typing.List[str], cwd: pathlib.Path):
+ subprocess.check_call(["gsutil"] + args, cwd=cwd)
+
+
def copy_variables_to_airflow_data_folder(
local: bool,
env_path: pathlib.Path,
@@ -80,7 +85,9 @@
cwd = env_path / "datasets" / dataset_id
filename = f"{dataset_id}_variables.json"
- check_existence_of_variables_file(cwd / filename)
+ if not (cwd / filename).exists():
+ warnings.warn(f"Airflow variables file {filename} does not exist.")
+ return
if local:
"""
@@ -106,7 +113,31 @@
f" Source:\n {cwd / filename}\n\n"
f" Destination:\n {gcs_uri}\n"
)
- subprocess.check_call(["gsutil", "cp", filename, gcs_uri], cwd=cwd)
+ run_gsutil_cmd(["cp", filename, gcs_uri], cwd=cwd)
+
+
+def run_cloud_composer_vars_import(
+ composer_env: str,
+ composer_region: str,
+ airflow_path: pathlib.Path,
+ cwd: pathlib.Path,
+):
+ subprocess.check_call(
+ [
+ "gcloud",
+ "composer",
+ "environments",
+ "run",
+ str(composer_env),
+ "--location",
+ str(composer_region),
+ "variables",
+ "--",
+ "--import",
+ str(airflow_path),
+ ],
+ cwd=cwd,
+ )
def import_variables_to_airflow_env(
@@ -136,21 +167,8 @@
gcs_uri = f"gs://{composer_bucket}/data/variables/{filename}"
airflow_path = f"/home/airflow/gcs/data/variables/{filename}"
print(f"\nImporting Airflow variables from {gcs_uri} ({airflow_path})...\n")
- subprocess.check_call(
- [
- "gcloud",
- "composer",
- "environments",
- "run",
- str(composer_env),
- "--location",
- str(composer_region),
- "variables",
- "--",
- "--import",
- str(airflow_path),
- ],
- cwd=cwd,
+ run_cloud_composer_vars_import(
+ composer_env, composer_region, airflow_path, cwd=cwd
)
@@ -189,7 +207,7 @@
f" Source:\n {cwd / filename}\n\n"
f" Destination:\n {target}\n"
)
- subprocess.check_call(["gsutil", "cp", filename, target], cwd=cwd)
+ run_gsutil_cmd(["cp", filename, target], cwd=cwd)
def copy_custom_callables_to_airflow_dags_folder(
@@ -231,7 +249,7 @@
f" Source:\n {cwd / 'custom'}\n\n"
f" Destination:\n {target}\n"
)
- subprocess.check_call(["gsutil", "cp", "-r", "custom", target], cwd=cwd)
+ run_gsutil_cmd(["cp", "-r", "custom", target], cwd=cwd)
def check_existence_of_variables_file(file_path: pathlib.Path):
|
{"golden_diff": "diff --git a/scripts/deploy_dag.py b/scripts/deploy_dag.py\n--- a/scripts/deploy_dag.py\n+++ b/scripts/deploy_dag.py\n@@ -17,6 +17,7 @@\n import pathlib\n import subprocess\n import typing\n+import warnings\n \n CURRENT_PATH = pathlib.Path(__file__).resolve().parent\n PROJECT_ROOT = CURRENT_PATH.parent\n@@ -67,6 +68,10 @@\n )\n \n \n+def run_gsutil_cmd(args: typing.List[str], cwd: pathlib.Path):\n+ subprocess.check_call([\"gsutil\"] + args, cwd=cwd)\n+\n+\n def copy_variables_to_airflow_data_folder(\n local: bool,\n env_path: pathlib.Path,\n@@ -80,7 +85,9 @@\n cwd = env_path / \"datasets\" / dataset_id\n filename = f\"{dataset_id}_variables.json\"\n \n- check_existence_of_variables_file(cwd / filename)\n+ if not (cwd / filename).exists():\n+ warnings.warn(f\"Airflow variables file {filename} does not exist.\")\n+ return\n \n if local:\n \"\"\"\n@@ -106,7 +113,31 @@\n f\" Source:\\n {cwd / filename}\\n\\n\"\n f\" Destination:\\n {gcs_uri}\\n\"\n )\n- subprocess.check_call([\"gsutil\", \"cp\", filename, gcs_uri], cwd=cwd)\n+ run_gsutil_cmd([\"cp\", filename, gcs_uri], cwd=cwd)\n+\n+\n+def run_cloud_composer_vars_import(\n+ composer_env: str,\n+ composer_region: str,\n+ airflow_path: pathlib.Path,\n+ cwd: pathlib.Path,\n+):\n+ subprocess.check_call(\n+ [\n+ \"gcloud\",\n+ \"composer\",\n+ \"environments\",\n+ \"run\",\n+ str(composer_env),\n+ \"--location\",\n+ str(composer_region),\n+ \"variables\",\n+ \"--\",\n+ \"--import\",\n+ str(airflow_path),\n+ ],\n+ cwd=cwd,\n+ )\n \n \n def import_variables_to_airflow_env(\n@@ -136,21 +167,8 @@\n gcs_uri = f\"gs://{composer_bucket}/data/variables/{filename}\"\n airflow_path = f\"/home/airflow/gcs/data/variables/{filename}\"\n print(f\"\\nImporting Airflow variables from {gcs_uri} ({airflow_path})...\\n\")\n- subprocess.check_call(\n- [\n- \"gcloud\",\n- \"composer\",\n- \"environments\",\n- \"run\",\n- str(composer_env),\n- \"--location\",\n- str(composer_region),\n- \"variables\",\n- \"--\",\n- \"--import\",\n- str(airflow_path),\n- ],\n- cwd=cwd,\n+ run_cloud_composer_vars_import(\n+ composer_env, composer_region, airflow_path, cwd=cwd\n )\n \n \n@@ -189,7 +207,7 @@\n f\" Source:\\n {cwd / filename}\\n\\n\"\n f\" Destination:\\n {target}\\n\"\n )\n- subprocess.check_call([\"gsutil\", \"cp\", filename, target], cwd=cwd)\n+ run_gsutil_cmd([\"cp\", filename, target], cwd=cwd)\n \n \n def copy_custom_callables_to_airflow_dags_folder(\n@@ -231,7 +249,7 @@\n f\" Source:\\n {cwd / 'custom'}\\n\\n\"\n f\" Destination:\\n {target}\\n\"\n )\n- subprocess.check_call([\"gsutil\", \"cp\", \"-r\", \"custom\", target], cwd=cwd)\n+ run_gsutil_cmd([\"cp\", \"-r\", \"custom\", target], cwd=cwd)\n \n \n def check_existence_of_variables_file(file_path: pathlib.Path):\n", "issue": "Datasets without `variables.json` should still deploy properly\n## Description\r\n\r\nSome datasets (and their pipelines) may not need Airflow variables at all. \r\n\r\nDuring the deploy step, when a dataset's `variables.json` file is missing, the deploy script errors out with a `FileNotFound` error. We should let such datasets skip importing Airflow variables and proceed with deploying the pipeline DAGs.\r\n\r\n## Checklist\r\n- [x] I created this issue in accordance with the [Code of Conduct](https://github.com/GoogleCloudPlatform/public-datasets-pipelines/blob/main/CODE_OF_CONDUCT.md).\r\n- [x] This issue is appropriately labeled.\r\n\n", "before_files": [{"content": "# Copyright 2021 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nimport argparse\nimport pathlib\nimport subprocess\nimport typing\n\nCURRENT_PATH = pathlib.Path(__file__).resolve().parent\nPROJECT_ROOT = CURRENT_PATH.parent\nDATASETS_PATH = PROJECT_ROOT / \"datasets\"\n\n\ndef main(\n local: bool,\n env_path: pathlib.Path,\n dataset_id: str,\n pipeline: str = None,\n airflow_home: pathlib.Path = None,\n composer_env: str = None,\n composer_bucket: str = None,\n composer_region: str = None,\n):\n print(\"\\n========== AIRFLOW VARIABLES ==========\")\n copy_variables_to_airflow_data_folder(\n local, env_path, dataset_id, airflow_home, composer_bucket\n )\n import_variables_to_airflow_env(\n local, env_path, dataset_id, composer_env, composer_bucket, composer_region\n )\n\n print(\"========== AIRFLOW DAGS ==========\")\n if pipeline:\n pipelines = [env_path / \"datasets\" / pipeline]\n else:\n pipelines = list_subdirs(env_path / \"datasets\" / dataset_id)\n\n for pipeline_path in pipelines:\n copy_custom_callables_to_airflow_dags_folder(\n local,\n env_path,\n dataset_id,\n pipeline_path.name,\n composer_bucket,\n airflow_home,\n )\n\n copy_generated_dag_to_airflow_dags_folder(\n local,\n env_path,\n dataset_id,\n pipeline_path.name,\n composer_bucket,\n airflow_home,\n )\n\n\ndef copy_variables_to_airflow_data_folder(\n local: bool,\n env_path: pathlib.Path,\n dataset_id: str,\n airflow_home: pathlib.Path = None,\n composer_bucket: str = None,\n):\n \"\"\"\n cd .{ENV}/{DATASET_ID}\n \"\"\"\n cwd = env_path / \"datasets\" / dataset_id\n filename = f\"{dataset_id}_variables.json\"\n\n check_existence_of_variables_file(cwd / filename)\n\n if local:\n \"\"\"\n cp {DATASET_ID}_variables.json {AIRFLOW_HOME}/data/variables/{DATASET_ID}_variables.json\n \"\"\"\n target_path = airflow_home / \"data\" / \"variables\" / filename\n target_path.mkdir(parents=True, exist_ok=True)\n print(\n \"\\nCopying variables JSON file into Airflow data folder\\n\\n\"\n f\" Source:\\n {cwd / filename}\\n\\n\"\n f\" Destination:\\n {target_path}\\n\"\n )\n\n subprocess.check_call([\"cp\", \"-rf\", filename, str(target_path)], cwd=cwd)\n else:\n \"\"\"\n [remote]\n gsutil cp {DATASET_ID}_variables.json gs://{COMPOSER_BUCKET}/data/variables/{DATASET_ID}_variables.json...\n \"\"\"\n gcs_uri = f\"gs://{composer_bucket}/data/variables/{filename}\"\n print(\n \"\\nCopying variables JSON file into Cloud Composer data folder\\n\\n\"\n f\" Source:\\n {cwd / filename}\\n\\n\"\n f\" Destination:\\n {gcs_uri}\\n\"\n )\n subprocess.check_call([\"gsutil\", \"cp\", filename, gcs_uri], cwd=cwd)\n\n\ndef import_variables_to_airflow_env(\n local: bool,\n env_path: pathlib.Path,\n dataset_id: str,\n composer_env: str = None,\n composer_bucket: str = None,\n composer_region: str = None,\n):\n \"\"\"\n [local]\n airflow variables import .{ENV}/datasets/{DATASET_ID}/variables.json\n\n [remote]\n gcloud composer environments run COMPOSER_ENV --location COMPOSER_REGION variables -- --import /home/airflow/gcs/data/variables/{DATASET_ID}_variables.json\n \"\"\"\n cwd = env_path / \"datasets\" / dataset_id\n filename = f\"{dataset_id}_variables.json\"\n\n if local:\n print(f\"\\nImporting Airflow variables from {cwd / filename}...\\n\")\n subprocess.check_call(\n [\"airflow\", \"variables\", \"import\", str(cwd / filename)], cwd=cwd\n )\n else:\n gcs_uri = f\"gs://{composer_bucket}/data/variables/{filename}\"\n airflow_path = f\"/home/airflow/gcs/data/variables/{filename}\"\n print(f\"\\nImporting Airflow variables from {gcs_uri} ({airflow_path})...\\n\")\n subprocess.check_call(\n [\n \"gcloud\",\n \"composer\",\n \"environments\",\n \"run\",\n str(composer_env),\n \"--location\",\n str(composer_region),\n \"variables\",\n \"--\",\n \"--import\",\n str(airflow_path),\n ],\n cwd=cwd,\n )\n\n\ndef copy_generated_dag_to_airflow_dags_folder(\n local: bool,\n env_path: pathlib.Path,\n dataset_id: str,\n pipeline_id: str,\n composer_bucket: str = None,\n airflow_home: pathlib.Path = None,\n):\n \"\"\"\n cd {DATASET_ID}/{PIPELINE_ID}\n\n [local]\n cp {PIPELINE_ID}_dag.py {AIRFLOW_HOME}/dags/{DATASET_ID}__{PIPELINE_ID}_dag.py\n\n [remote]\n gsutil cp {PIPELINE_ID}_dag.py gs://{COMPOSER_BUCKET}/dags/{DATASET_ID}__{PIPELINE_ID}_dag.py\n \"\"\"\n cwd = env_path / \"datasets\" / dataset_id / pipeline_id\n filename = f\"{pipeline_id}_dag.py\"\n\n if local:\n target = airflow_home / \"dags\" / f\"{dataset_id}__{pipeline_id}_dag.py\"\n print(\n f\"\\nCopying DAG file for pipeline `{pipeline_id}` into Airflow DAGs folder\\n\\n\"\n f\" Source:\\n {cwd / filename}\\n\\n\"\n f\" Destination:\\n {target}\\n\"\n )\n subprocess.check_call([\"cp\", \"-rf\", filename, str(target)], cwd=cwd)\n else:\n target = f\"gs://{composer_bucket}/dags/{dataset_id}__{pipeline_id}_dag.py\"\n print(\n f\"\\nCopying DAG file for pipeline `{pipeline_id}` into Cloud Composer DAG folder\\n\\n\"\n f\" Source:\\n {cwd / filename}\\n\\n\"\n f\" Destination:\\n {target}\\n\"\n )\n subprocess.check_call([\"gsutil\", \"cp\", filename, target], cwd=cwd)\n\n\ndef copy_custom_callables_to_airflow_dags_folder(\n local: bool,\n env_path: pathlib.Path,\n dataset_id: str,\n pipeline_id: str,\n composer_bucket: str = None,\n airflow_home: pathlib.Path = None,\n):\n \"\"\"\n cd {DATASET_ID}/{PIPELINE_ID}\n\n [local]\n mkdir -p {AIRFLOW_HOME}/dags/DATASET_ID/PIPELINE_ID/custom\n cp -rf custom {AIRFLOW_HOME}/dags/DATASET_ID/PIPELINE_ID/custom\n\n [remote]\n gsutil cp -r custom gs://{COMPOSER_BUCKET}/dags/{DATASET_ID}/{PIPELINE_ID}/\n \"\"\"\n cwd = env_path / \"datasets\" / dataset_id / pipeline_id\n\n if not (cwd / \"custom\").exists():\n return\n\n if local:\n target_parent = airflow_home / \"dags\" / dataset_id / pipeline_id\n target_parent.mkdir(parents=True, exist_ok=True)\n print(\n f\"\\nCopying custom callables folder for pipeline `{pipeline_id}` into Airflow DAGs folder\\n\\n\"\n f\" Source:\\n {cwd / 'custom'}\\n\\n\"\n f\" Destination:\\n {target_parent / 'custom'}\\n\"\n )\n subprocess.check_call([\"cp\", \"-rf\", \"custom\", str(target_parent)], cwd=cwd)\n else:\n target = f\"gs://{composer_bucket}/dags/{dataset_id}/{pipeline_id}/\"\n print(\n f\"\\nCopying custom callables folder for pipeline `{pipeline_id}` into Cloud Composer DAG folder\\n\\n\"\n f\" Source:\\n {cwd / 'custom'}\\n\\n\"\n f\" Destination:\\n {target}\\n\"\n )\n subprocess.check_call([\"gsutil\", \"cp\", \"-r\", \"custom\", target], cwd=cwd)\n\n\ndef check_existence_of_variables_file(file_path: pathlib.Path):\n if not file_path:\n raise FileNotFoundError(f\"Airflow variables file {file_path} does not exist.\")\n\n\ndef list_subdirs(path: pathlib.Path) -> typing.List[pathlib.Path]:\n \"\"\"Returns a list of subdirectories\"\"\"\n subdirs = [f for f in path.iterdir() if f.is_dir() and not f.name[0] in (\".\", \"_\")]\n return subdirs\n\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser(\n description=\"Deploy DAGs and variables to an Airflow environment\"\n )\n parser.add_argument(\n \"-d\",\n \"--dataset\",\n required=True,\n type=str,\n dest=\"dataset\",\n help=\"The directory name of the dataset.\",\n )\n parser.add_argument(\n \"-e\",\n \"--env\",\n type=str,\n default=\"dev\",\n dest=\"env\",\n help=\"The stage used for the resources: dev|staging|prod\",\n )\n parser.add_argument(\n \"-n\",\n \"--composer-env\",\n type=str,\n dest=\"composer_env\",\n help=\"The Google Cloud Composer environment name\",\n )\n parser.add_argument(\n \"-b\",\n \"--composer-bucket\",\n type=str,\n dest=\"composer_bucket\",\n help=\"The Google Cloud Composer bucket name\",\n )\n parser.add_argument(\n \"-r\",\n \"--composer-region\",\n type=str,\n dest=\"composer_region\",\n help=\"The region of the Google Cloud Composer environment\",\n )\n parser.add_argument(\n \"-p\",\n \"--pipeline\",\n required=False,\n type=str,\n dest=\"pipeline\",\n help=\"The directory name of the pipeline\",\n )\n parser.add_argument(\n \"-a\",\n \"--airflow-home\",\n type=str,\n default=\"~/airflow\",\n dest=\"airflow_home\",\n help=\"pathlib.Path to the Airflow home directory (defaults to `~/airflow`)\",\n )\n parser.add_argument(\"--local\", required=False, dest=\"local\", action=\"store_true\")\n\n args = parser.parse_args()\n airflow_path = pathlib.Path(args.airflow_home).expanduser()\n\n if args.local:\n if not airflow_path.exists() and airflow_path.is_dir():\n raise ValueError(\n \"Argument `-a|--airflow-home` must exist and be a directory\"\n )\n else:\n if not args.composer_env:\n raise ValueError(\n \"Argument `-n|--composer-env` (Composer environment name) not specified\"\n )\n\n if not args.composer_bucket:\n raise ValueError(\n \"Argument `-b|--composer-bucket` (Composer bucket name) not specified\"\n )\n\n if not args.composer_region:\n raise ValueError(\n \"Argument `-r|--composer-region` (Composer environment region) not specified\"\n )\n\n main(\n local=args.local,\n env_path=PROJECT_ROOT / f\".{args.env}\",\n dataset_id=args.dataset,\n pipeline=args.pipeline,\n airflow_home=airflow_path,\n composer_env=args.composer_env,\n composer_bucket=args.composer_bucket,\n composer_region=args.composer_region,\n )\n", "path": "scripts/deploy_dag.py"}], "after_files": [{"content": "# Copyright 2021 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nimport argparse\nimport pathlib\nimport subprocess\nimport typing\nimport warnings\n\nCURRENT_PATH = pathlib.Path(__file__).resolve().parent\nPROJECT_ROOT = CURRENT_PATH.parent\nDATASETS_PATH = PROJECT_ROOT / \"datasets\"\n\n\ndef main(\n local: bool,\n env_path: pathlib.Path,\n dataset_id: str,\n pipeline: str = None,\n airflow_home: pathlib.Path = None,\n composer_env: str = None,\n composer_bucket: str = None,\n composer_region: str = None,\n):\n print(\"\\n========== AIRFLOW VARIABLES ==========\")\n copy_variables_to_airflow_data_folder(\n local, env_path, dataset_id, airflow_home, composer_bucket\n )\n import_variables_to_airflow_env(\n local, env_path, dataset_id, composer_env, composer_bucket, composer_region\n )\n\n print(\"========== AIRFLOW DAGS ==========\")\n if pipeline:\n pipelines = [env_path / \"datasets\" / pipeline]\n else:\n pipelines = list_subdirs(env_path / \"datasets\" / dataset_id)\n\n for pipeline_path in pipelines:\n copy_custom_callables_to_airflow_dags_folder(\n local,\n env_path,\n dataset_id,\n pipeline_path.name,\n composer_bucket,\n airflow_home,\n )\n\n copy_generated_dag_to_airflow_dags_folder(\n local,\n env_path,\n dataset_id,\n pipeline_path.name,\n composer_bucket,\n airflow_home,\n )\n\n\ndef run_gsutil_cmd(args: typing.List[str], cwd: pathlib.Path):\n subprocess.check_call([\"gsutil\"] + args, cwd=cwd)\n\n\ndef copy_variables_to_airflow_data_folder(\n local: bool,\n env_path: pathlib.Path,\n dataset_id: str,\n airflow_home: pathlib.Path = None,\n composer_bucket: str = None,\n):\n \"\"\"\n cd .{ENV}/{DATASET_ID}\n \"\"\"\n cwd = env_path / \"datasets\" / dataset_id\n filename = f\"{dataset_id}_variables.json\"\n\n if not (cwd / filename).exists():\n warnings.warn(f\"Airflow variables file {filename} does not exist.\")\n return\n\n if local:\n \"\"\"\n cp {DATASET_ID}_variables.json {AIRFLOW_HOME}/data/variables/{DATASET_ID}_variables.json\n \"\"\"\n target_path = airflow_home / \"data\" / \"variables\" / filename\n target_path.mkdir(parents=True, exist_ok=True)\n print(\n \"\\nCopying variables JSON file into Airflow data folder\\n\\n\"\n f\" Source:\\n {cwd / filename}\\n\\n\"\n f\" Destination:\\n {target_path}\\n\"\n )\n\n subprocess.check_call([\"cp\", \"-rf\", filename, str(target_path)], cwd=cwd)\n else:\n \"\"\"\n [remote]\n gsutil cp {DATASET_ID}_variables.json gs://{COMPOSER_BUCKET}/data/variables/{DATASET_ID}_variables.json...\n \"\"\"\n gcs_uri = f\"gs://{composer_bucket}/data/variables/{filename}\"\n print(\n \"\\nCopying variables JSON file into Cloud Composer data folder\\n\\n\"\n f\" Source:\\n {cwd / filename}\\n\\n\"\n f\" Destination:\\n {gcs_uri}\\n\"\n )\n run_gsutil_cmd([\"cp\", filename, gcs_uri], cwd=cwd)\n\n\ndef run_cloud_composer_vars_import(\n composer_env: str,\n composer_region: str,\n airflow_path: pathlib.Path,\n cwd: pathlib.Path,\n):\n subprocess.check_call(\n [\n \"gcloud\",\n \"composer\",\n \"environments\",\n \"run\",\n str(composer_env),\n \"--location\",\n str(composer_region),\n \"variables\",\n \"--\",\n \"--import\",\n str(airflow_path),\n ],\n cwd=cwd,\n )\n\n\ndef import_variables_to_airflow_env(\n local: bool,\n env_path: pathlib.Path,\n dataset_id: str,\n composer_env: str = None,\n composer_bucket: str = None,\n composer_region: str = None,\n):\n \"\"\"\n [local]\n airflow variables import .{ENV}/datasets/{DATASET_ID}/variables.json\n\n [remote]\n gcloud composer environments run COMPOSER_ENV --location COMPOSER_REGION variables -- --import /home/airflow/gcs/data/variables/{DATASET_ID}_variables.json\n \"\"\"\n cwd = env_path / \"datasets\" / dataset_id\n filename = f\"{dataset_id}_variables.json\"\n\n if local:\n print(f\"\\nImporting Airflow variables from {cwd / filename}...\\n\")\n subprocess.check_call(\n [\"airflow\", \"variables\", \"import\", str(cwd / filename)], cwd=cwd\n )\n else:\n gcs_uri = f\"gs://{composer_bucket}/data/variables/{filename}\"\n airflow_path = f\"/home/airflow/gcs/data/variables/{filename}\"\n print(f\"\\nImporting Airflow variables from {gcs_uri} ({airflow_path})...\\n\")\n run_cloud_composer_vars_import(\n composer_env, composer_region, airflow_path, cwd=cwd\n )\n\n\ndef copy_generated_dag_to_airflow_dags_folder(\n local: bool,\n env_path: pathlib.Path,\n dataset_id: str,\n pipeline_id: str,\n composer_bucket: str = None,\n airflow_home: pathlib.Path = None,\n):\n \"\"\"\n cd {DATASET_ID}/{PIPELINE_ID}\n\n [local]\n cp {PIPELINE_ID}_dag.py {AIRFLOW_HOME}/dags/{DATASET_ID}__{PIPELINE_ID}_dag.py\n\n [remote]\n gsutil cp {PIPELINE_ID}_dag.py gs://{COMPOSER_BUCKET}/dags/{DATASET_ID}__{PIPELINE_ID}_dag.py\n \"\"\"\n cwd = env_path / \"datasets\" / dataset_id / pipeline_id\n filename = f\"{pipeline_id}_dag.py\"\n\n if local:\n target = airflow_home / \"dags\" / f\"{dataset_id}__{pipeline_id}_dag.py\"\n print(\n f\"\\nCopying DAG file for pipeline `{pipeline_id}` into Airflow DAGs folder\\n\\n\"\n f\" Source:\\n {cwd / filename}\\n\\n\"\n f\" Destination:\\n {target}\\n\"\n )\n subprocess.check_call([\"cp\", \"-rf\", filename, str(target)], cwd=cwd)\n else:\n target = f\"gs://{composer_bucket}/dags/{dataset_id}__{pipeline_id}_dag.py\"\n print(\n f\"\\nCopying DAG file for pipeline `{pipeline_id}` into Cloud Composer DAG folder\\n\\n\"\n f\" Source:\\n {cwd / filename}\\n\\n\"\n f\" Destination:\\n {target}\\n\"\n )\n run_gsutil_cmd([\"cp\", filename, target], cwd=cwd)\n\n\ndef copy_custom_callables_to_airflow_dags_folder(\n local: bool,\n env_path: pathlib.Path,\n dataset_id: str,\n pipeline_id: str,\n composer_bucket: str = None,\n airflow_home: pathlib.Path = None,\n):\n \"\"\"\n cd {DATASET_ID}/{PIPELINE_ID}\n\n [local]\n mkdir -p {AIRFLOW_HOME}/dags/DATASET_ID/PIPELINE_ID/custom\n cp -rf custom {AIRFLOW_HOME}/dags/DATASET_ID/PIPELINE_ID/custom\n\n [remote]\n gsutil cp -r custom gs://{COMPOSER_BUCKET}/dags/{DATASET_ID}/{PIPELINE_ID}/\n \"\"\"\n cwd = env_path / \"datasets\" / dataset_id / pipeline_id\n\n if not (cwd / \"custom\").exists():\n return\n\n if local:\n target_parent = airflow_home / \"dags\" / dataset_id / pipeline_id\n target_parent.mkdir(parents=True, exist_ok=True)\n print(\n f\"\\nCopying custom callables folder for pipeline `{pipeline_id}` into Airflow DAGs folder\\n\\n\"\n f\" Source:\\n {cwd / 'custom'}\\n\\n\"\n f\" Destination:\\n {target_parent / 'custom'}\\n\"\n )\n subprocess.check_call([\"cp\", \"-rf\", \"custom\", str(target_parent)], cwd=cwd)\n else:\n target = f\"gs://{composer_bucket}/dags/{dataset_id}/{pipeline_id}/\"\n print(\n f\"\\nCopying custom callables folder for pipeline `{pipeline_id}` into Cloud Composer DAG folder\\n\\n\"\n f\" Source:\\n {cwd / 'custom'}\\n\\n\"\n f\" Destination:\\n {target}\\n\"\n )\n run_gsutil_cmd([\"cp\", \"-r\", \"custom\", target], cwd=cwd)\n\n\ndef check_existence_of_variables_file(file_path: pathlib.Path):\n if not file_path:\n raise FileNotFoundError(f\"Airflow variables file {file_path} does not exist.\")\n\n\ndef list_subdirs(path: pathlib.Path) -> typing.List[pathlib.Path]:\n \"\"\"Returns a list of subdirectories\"\"\"\n subdirs = [f for f in path.iterdir() if f.is_dir() and not f.name[0] in (\".\", \"_\")]\n return subdirs\n\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser(\n description=\"Deploy DAGs and variables to an Airflow environment\"\n )\n parser.add_argument(\n \"-d\",\n \"--dataset\",\n required=True,\n type=str,\n dest=\"dataset\",\n help=\"The directory name of the dataset.\",\n )\n parser.add_argument(\n \"-e\",\n \"--env\",\n type=str,\n default=\"dev\",\n dest=\"env\",\n help=\"The stage used for the resources: dev|staging|prod\",\n )\n parser.add_argument(\n \"-n\",\n \"--composer-env\",\n type=str,\n dest=\"composer_env\",\n help=\"The Google Cloud Composer environment name\",\n )\n parser.add_argument(\n \"-b\",\n \"--composer-bucket\",\n type=str,\n dest=\"composer_bucket\",\n help=\"The Google Cloud Composer bucket name\",\n )\n parser.add_argument(\n \"-r\",\n \"--composer-region\",\n type=str,\n dest=\"composer_region\",\n help=\"The region of the Google Cloud Composer environment\",\n )\n parser.add_argument(\n \"-p\",\n \"--pipeline\",\n required=False,\n type=str,\n dest=\"pipeline\",\n help=\"The directory name of the pipeline\",\n )\n parser.add_argument(\n \"-a\",\n \"--airflow-home\",\n type=str,\n default=\"~/airflow\",\n dest=\"airflow_home\",\n help=\"pathlib.Path to the Airflow home directory (defaults to `~/airflow`)\",\n )\n parser.add_argument(\"--local\", required=False, dest=\"local\", action=\"store_true\")\n\n args = parser.parse_args()\n airflow_path = pathlib.Path(args.airflow_home).expanduser()\n\n if args.local:\n if not airflow_path.exists() and airflow_path.is_dir():\n raise ValueError(\n \"Argument `-a|--airflow-home` must exist and be a directory\"\n )\n else:\n if not args.composer_env:\n raise ValueError(\n \"Argument `-n|--composer-env` (Composer environment name) not specified\"\n )\n\n if not args.composer_bucket:\n raise ValueError(\n \"Argument `-b|--composer-bucket` (Composer bucket name) not specified\"\n )\n\n if not args.composer_region:\n raise ValueError(\n \"Argument `-r|--composer-region` (Composer environment region) not specified\"\n )\n\n main(\n local=args.local,\n env_path=PROJECT_ROOT / f\".{args.env}\",\n dataset_id=args.dataset,\n pipeline=args.pipeline,\n airflow_home=airflow_path,\n composer_env=args.composer_env,\n composer_bucket=args.composer_bucket,\n composer_region=args.composer_region,\n )\n", "path": "scripts/deploy_dag.py"}]}
| 3,893 | 846 |
gh_patches_debug_33001
|
rasdani/github-patches
|
git_diff
|
conan-io__conan-3456
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Unnecessary and deranging override message when using alias packages
In a situation with 3 packages CA, CB and CC where CB requires CA (via an alias) and CC requires CB and CA also via aliases, when installing CC with a "conan install ..." conan reports a version override while actually the alias is pointing to exaclty the version of the package that conan says overrides the alias:
CB/0.0.2@user/testing requirement CA/ALIAS@user/testing overriden by CC/0.0.3@user/testing to CA/0.0.1@user/testing
To help us debug your issue please `explain:`
- [X] I've read the [CONTRIBUTING guide](https://raw.githubusercontent.com/conan-io/conan/develop/.github/CONTRIBUTING.md).
- [X] I've specified the Conan version, operating system version and any tool that can be relevant.
- [X] I've explained the steps to reproduce the error or the motivation/use case of the question/suggestion.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `conans/client/graph/graph_builder.py`
Content:
```
1 import time
2
3 from conans.model.conan_file import get_env_context_manager
4 from conans.model.requires import Requirements
5 from conans.model.ref import ConanFileReference
6 from conans.errors import (ConanException, conanfile_exception_formatter,
7 ConanExceptionInUserConanfileMethod)
8 from conans.client.output import ScopedOutput
9 from conans.util.log import logger
10 from conans.client.graph.graph import DepsGraph, Node, RECIPE_WORKSPACE
11 from conans.model.workspace import WORKSPACE_FILE
12
13
14 class DepsGraphBuilder(object):
15 """ Responsible for computing the dependencies graph DepsGraph
16 """
17 def __init__(self, proxy, output, loader, resolver, workspace, recorder):
18 self._proxy = proxy
19 self._output = output
20 self._loader = loader
21 self._resolver = resolver
22 self._workspace = workspace
23 self._recorder = recorder
24
25 def load_graph(self, conanfile, check_updates, update, remote_name, processed_profile):
26 check_updates = check_updates or update
27 dep_graph = DepsGraph()
28 # compute the conanfile entry point for this dependency graph
29 root_node = Node(None, conanfile)
30 dep_graph.add_node(root_node)
31 public_deps = {} # {name: Node} dict with public nodes, so they are not added again
32 aliased = {}
33 # enter recursive computation
34 t1 = time.time()
35 loop_ancestors = []
36 self._load_deps(root_node, Requirements(), dep_graph, public_deps, None, None,
37 loop_ancestors, aliased, check_updates, update, remote_name,
38 processed_profile)
39 logger.debug("Deps-builder: Time to load deps %s" % (time.time() - t1))
40 t1 = time.time()
41 dep_graph.compute_package_ids()
42 logger.debug("Deps-builder: Propagate info %s" % (time.time() - t1))
43 return dep_graph
44
45 def _resolve_deps(self, node, aliased, update, remote_name):
46 # Resolve possible version ranges of the current node requirements
47 # new_reqs is a shallow copy of what is propagated upstream, so changes done by the
48 # RangeResolver are also done in new_reqs, and then propagated!
49 conanfile, conanref = node.conanfile, node.conan_ref
50 for _, require in conanfile.requires.items():
51 self._resolver.resolve(require, conanref, update, remote_name)
52
53 # After resolving ranges,
54 for req in conanfile.requires.values():
55 alias = aliased.get(req.conan_reference)
56 if alias:
57 req.conan_reference = alias
58
59 if not hasattr(conanfile, "_conan_evaluated_requires"):
60 conanfile._conan_evaluated_requires = conanfile.requires.copy()
61 elif conanfile.requires != conanfile._conan_evaluated_requires:
62 raise ConanException("%s: Incompatible requirements obtained in different "
63 "evaluations of 'requirements'\n"
64 " Previous requirements: %s\n"
65 " New requirements: %s"
66 % (conanref, list(conanfile._conan_evaluated_requires.values()),
67 list(conanfile.requires.values())))
68
69 def _load_deps(self, node, down_reqs, dep_graph, public_deps, down_ref, down_options,
70 loop_ancestors, aliased, check_updates, update, remote_name, processed_profile):
71 """ loads a Conan object from the given file
72 param node: Node object to be expanded in this step
73 down_reqs: the Requirements as coming from downstream, which can overwrite current
74 values
75 param deps: DepsGraph result
76 param public_deps: {name: Node} of already expanded public Nodes, not to be repeated
77 in graph
78 param down_ref: ConanFileReference of who is depending on current node for this expansion
79 """
80 # basic node configuration
81 new_reqs, new_options = self._config_node(node, down_reqs, down_ref, down_options)
82
83 self._resolve_deps(node, aliased, update, remote_name)
84
85 # Expand each one of the current requirements
86 for name, require in node.conanfile.requires.items():
87 if require.override:
88 continue
89 if require.conan_reference in loop_ancestors:
90 raise ConanException("Loop detected: %s"
91 % "->".join(str(r) for r in loop_ancestors))
92 new_loop_ancestors = loop_ancestors[:] # Copy for propagating
93 new_loop_ancestors.append(require.conan_reference)
94 previous = public_deps.get(name)
95 if require.private or not previous: # new node, must be added and expanded
96 new_node = self._create_new_node(node, dep_graph, require, public_deps, name,
97 aliased, check_updates, update, remote_name,
98 processed_profile)
99 # RECURSION!
100 # Make sure the subgraph is truly private
101 new_public_deps = {} if require.private else public_deps
102 self._load_deps(new_node, new_reqs, dep_graph, new_public_deps, node.conan_ref,
103 new_options, new_loop_ancestors, aliased, check_updates, update,
104 remote_name, processed_profile)
105 else: # a public node already exist with this name
106 previous_node, closure = previous
107 alias_ref = aliased.get(require.conan_reference, require.conan_reference)
108 # Necessary to make sure that it is pointing to the correct aliased
109 require.conan_reference = alias_ref
110 if previous_node.conan_ref != alias_ref:
111 raise ConanException("Conflict in %s\n"
112 " Requirement %s conflicts with already defined %s\n"
113 " Keeping %s\n"
114 " To change it, override it in your base requirements"
115 % (node.conan_ref, require.conan_reference,
116 previous_node.conan_ref, previous_node.conan_ref))
117 dep_graph.add_edge(node, previous_node)
118 # RECURSION!
119 if closure is None:
120 closure = dep_graph.closure(node)
121 public_deps[name] = previous_node, closure
122 if self._recurse(closure, new_reqs, new_options):
123 self._load_deps(previous_node, new_reqs, dep_graph, public_deps, node.conan_ref,
124 new_options, new_loop_ancestors, aliased, check_updates, update,
125 remote_name, processed_profile)
126
127 def _recurse(self, closure, new_reqs, new_options):
128 """ For a given closure, if some requirements or options coming from downstream
129 is incompatible with the current closure, then it is necessary to recurse
130 then, incompatibilities will be raised as usually"""
131 for req in new_reqs.values():
132 n = closure.get(req.conan_reference.name)
133 if n and n.conan_ref != req.conan_reference:
134 return True
135 for pkg_name, options_values in new_options.items():
136 n = closure.get(pkg_name)
137 if n:
138 options = n.conanfile.options
139 for option, value in options_values.items():
140 if getattr(options, option) != value:
141 return True
142 return False
143
144 def _config_node(self, node, down_reqs, down_ref, down_options):
145 """ update settings and option in the current ConanFile, computing actual
146 requirement values, cause they can be overridden by downstream requires
147 param settings: dict of settings values => {"os": "windows"}
148 """
149 try:
150 conanfile, conanref = node.conanfile, node.conan_ref
151 # Avoid extra time manipulating the sys.path for python
152 with get_env_context_manager(conanfile, without_python=True):
153 if hasattr(conanfile, "config"):
154 if not conanref:
155 output = ScopedOutput(str("PROJECT"), self._output)
156 output.warn("config() has been deprecated."
157 " Use config_options and configure")
158 with conanfile_exception_formatter(str(conanfile), "config"):
159 conanfile.config()
160 with conanfile_exception_formatter(str(conanfile), "config_options"):
161 conanfile.config_options()
162 conanfile.options.propagate_upstream(down_options, down_ref, conanref)
163 if hasattr(conanfile, "config"):
164 with conanfile_exception_formatter(str(conanfile), "config"):
165 conanfile.config()
166
167 with conanfile_exception_formatter(str(conanfile), "configure"):
168 conanfile.configure()
169
170 conanfile.settings.validate() # All has to be ok!
171 conanfile.options.validate()
172
173 # Update requirements (overwrites), computing new upstream
174 if hasattr(conanfile, "requirements"):
175 # If re-evaluating the recipe, in a diamond graph, with different options,
176 # it could happen that one execution path of requirements() defines a package
177 # and another one a different package raising Duplicate dependency error
178 # Or the two consecutive calls, adding 2 different dependencies for the two paths
179 # So it is necessary to save the "requires" state and restore it before a second
180 # execution of requirements(). It is a shallow copy, if first iteration is
181 # RequireResolve'd or overridden, the inner requirements are modified
182 if not hasattr(conanfile, "_conan_original_requires"):
183 conanfile._conan_original_requires = conanfile.requires.copy()
184 else:
185 conanfile.requires = conanfile._conan_original_requires.copy()
186
187 with conanfile_exception_formatter(str(conanfile), "requirements"):
188 conanfile.requirements()
189
190 new_options = conanfile.options.deps_package_values
191 new_down_reqs = conanfile.requires.update(down_reqs, self._output, conanref,
192 down_ref)
193 except ConanExceptionInUserConanfileMethod:
194 raise
195 except ConanException as e:
196 raise ConanException("%s: %s" % (conanref or "Conanfile", str(e)))
197 except Exception as e:
198 raise ConanException(e)
199
200 return new_down_reqs, new_options
201
202 def _create_new_node(self, current_node, dep_graph, requirement, public_deps, name_req, aliased,
203 check_updates, update, remote_name, processed_profile, alias_ref=None):
204 """ creates and adds a new node to the dependency graph
205 """
206 workspace_package = self._workspace[requirement.conan_reference] if self._workspace else None
207 if workspace_package:
208 conanfile_path = workspace_package.conanfile_path
209 recipe_status = RECIPE_WORKSPACE
210 remote = WORKSPACE_FILE
211 else:
212 try:
213 result = self._proxy.get_recipe(requirement.conan_reference,
214 check_updates, update, remote_name, self._recorder)
215 except ConanException as e:
216 base_ref = str(current_node.conan_ref or "PROJECT")
217 self._output.error("Failed requirement '%s' from '%s'"
218 % (requirement.conan_reference, base_ref))
219 raise e
220 conanfile_path, recipe_status, remote, _ = result
221
222 output = ScopedOutput(str(requirement.conan_reference), self._output)
223 dep_conanfile = self._loader.load_conanfile(conanfile_path, output, processed_profile,
224 reference=requirement.conan_reference)
225
226 if workspace_package:
227 workspace_package.conanfile = dep_conanfile
228 if getattr(dep_conanfile, "alias", None):
229 alias_reference = alias_ref or requirement.conan_reference
230 requirement.conan_reference = ConanFileReference.loads(dep_conanfile.alias)
231 aliased[alias_reference] = requirement.conan_reference
232 return self._create_new_node(current_node, dep_graph, requirement, public_deps,
233 name_req, aliased, check_updates, update,
234 remote_name, processed_profile, alias_ref=alias_reference)
235
236 new_node = Node(requirement.conan_reference, dep_conanfile)
237 new_node.recipe = recipe_status
238 new_node.remote = remote
239 dep_graph.add_node(new_node)
240 dep_graph.add_edge(current_node, new_node, requirement.private)
241 if not requirement.private:
242 public_deps[name_req] = new_node, None
243 return new_node
244
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/conans/client/graph/graph_builder.py b/conans/client/graph/graph_builder.py
--- a/conans/client/graph/graph_builder.py
+++ b/conans/client/graph/graph_builder.py
@@ -78,7 +78,7 @@
param down_ref: ConanFileReference of who is depending on current node for this expansion
"""
# basic node configuration
- new_reqs, new_options = self._config_node(node, down_reqs, down_ref, down_options)
+ new_reqs, new_options = self._config_node(node, down_reqs, down_ref, down_options, aliased)
self._resolve_deps(node, aliased, update, remote_name)
@@ -141,7 +141,7 @@
return True
return False
- def _config_node(self, node, down_reqs, down_ref, down_options):
+ def _config_node(self, node, down_reqs, down_ref, down_options, aliased):
""" update settings and option in the current ConanFile, computing actual
requirement values, cause they can be overridden by downstream requires
param settings: dict of settings values => {"os": "windows"}
@@ -188,8 +188,12 @@
conanfile.requirements()
new_options = conanfile.options.deps_package_values
- new_down_reqs = conanfile.requires.update(down_reqs, self._output, conanref,
- down_ref)
+ if aliased:
+ for req in conanfile.requires.values():
+ req.conan_reference = aliased.get(req.conan_reference,
+ req.conan_reference)
+ new_down_reqs = conanfile.requires.update(down_reqs, self._output,
+ conanref, down_ref)
except ConanExceptionInUserConanfileMethod:
raise
except ConanException as e:
|
{"golden_diff": "diff --git a/conans/client/graph/graph_builder.py b/conans/client/graph/graph_builder.py\n--- a/conans/client/graph/graph_builder.py\n+++ b/conans/client/graph/graph_builder.py\n@@ -78,7 +78,7 @@\n param down_ref: ConanFileReference of who is depending on current node for this expansion\n \"\"\"\n # basic node configuration\n- new_reqs, new_options = self._config_node(node, down_reqs, down_ref, down_options)\n+ new_reqs, new_options = self._config_node(node, down_reqs, down_ref, down_options, aliased)\n \n self._resolve_deps(node, aliased, update, remote_name)\n \n@@ -141,7 +141,7 @@\n return True\n return False\n \n- def _config_node(self, node, down_reqs, down_ref, down_options):\n+ def _config_node(self, node, down_reqs, down_ref, down_options, aliased):\n \"\"\" update settings and option in the current ConanFile, computing actual\n requirement values, cause they can be overridden by downstream requires\n param settings: dict of settings values => {\"os\": \"windows\"}\n@@ -188,8 +188,12 @@\n conanfile.requirements()\n \n new_options = conanfile.options.deps_package_values\n- new_down_reqs = conanfile.requires.update(down_reqs, self._output, conanref,\n- down_ref)\n+ if aliased:\n+ for req in conanfile.requires.values():\n+ req.conan_reference = aliased.get(req.conan_reference,\n+ req.conan_reference)\n+ new_down_reqs = conanfile.requires.update(down_reqs, self._output,\n+ conanref, down_ref)\n except ConanExceptionInUserConanfileMethod:\n raise\n except ConanException as e:\n", "issue": "Unnecessary and deranging override message when using alias packages\nIn a situation with 3 packages CA, CB and CC where CB requires CA (via an alias) and CC requires CB and CA also via aliases, when installing CC with a \"conan install ...\" conan reports a version override while actually the alias is pointing to exaclty the version of the package that conan says overrides the alias:\r\n\r\nCB/0.0.2@user/testing requirement CA/ALIAS@user/testing overriden by CC/0.0.3@user/testing to CA/0.0.1@user/testing\r\n\r\n\r\nTo help us debug your issue please `explain:`\r\n\r\n- [X] I've read the [CONTRIBUTING guide](https://raw.githubusercontent.com/conan-io/conan/develop/.github/CONTRIBUTING.md).\r\n- [X] I've specified the Conan version, operating system version and any tool that can be relevant.\r\n- [X] I've explained the steps to reproduce the error or the motivation/use case of the question/suggestion.\r\n\r\n\n", "before_files": [{"content": "import time\n\nfrom conans.model.conan_file import get_env_context_manager\nfrom conans.model.requires import Requirements\nfrom conans.model.ref import ConanFileReference\nfrom conans.errors import (ConanException, conanfile_exception_formatter,\n ConanExceptionInUserConanfileMethod)\nfrom conans.client.output import ScopedOutput\nfrom conans.util.log import logger\nfrom conans.client.graph.graph import DepsGraph, Node, RECIPE_WORKSPACE\nfrom conans.model.workspace import WORKSPACE_FILE\n\n\nclass DepsGraphBuilder(object):\n \"\"\" Responsible for computing the dependencies graph DepsGraph\n \"\"\"\n def __init__(self, proxy, output, loader, resolver, workspace, recorder):\n self._proxy = proxy\n self._output = output\n self._loader = loader\n self._resolver = resolver\n self._workspace = workspace\n self._recorder = recorder\n\n def load_graph(self, conanfile, check_updates, update, remote_name, processed_profile):\n check_updates = check_updates or update\n dep_graph = DepsGraph()\n # compute the conanfile entry point for this dependency graph\n root_node = Node(None, conanfile)\n dep_graph.add_node(root_node)\n public_deps = {} # {name: Node} dict with public nodes, so they are not added again\n aliased = {}\n # enter recursive computation\n t1 = time.time()\n loop_ancestors = []\n self._load_deps(root_node, Requirements(), dep_graph, public_deps, None, None,\n loop_ancestors, aliased, check_updates, update, remote_name,\n processed_profile)\n logger.debug(\"Deps-builder: Time to load deps %s\" % (time.time() - t1))\n t1 = time.time()\n dep_graph.compute_package_ids()\n logger.debug(\"Deps-builder: Propagate info %s\" % (time.time() - t1))\n return dep_graph\n\n def _resolve_deps(self, node, aliased, update, remote_name):\n # Resolve possible version ranges of the current node requirements\n # new_reqs is a shallow copy of what is propagated upstream, so changes done by the\n # RangeResolver are also done in new_reqs, and then propagated!\n conanfile, conanref = node.conanfile, node.conan_ref\n for _, require in conanfile.requires.items():\n self._resolver.resolve(require, conanref, update, remote_name)\n\n # After resolving ranges,\n for req in conanfile.requires.values():\n alias = aliased.get(req.conan_reference)\n if alias:\n req.conan_reference = alias\n\n if not hasattr(conanfile, \"_conan_evaluated_requires\"):\n conanfile._conan_evaluated_requires = conanfile.requires.copy()\n elif conanfile.requires != conanfile._conan_evaluated_requires:\n raise ConanException(\"%s: Incompatible requirements obtained in different \"\n \"evaluations of 'requirements'\\n\"\n \" Previous requirements: %s\\n\"\n \" New requirements: %s\"\n % (conanref, list(conanfile._conan_evaluated_requires.values()),\n list(conanfile.requires.values())))\n\n def _load_deps(self, node, down_reqs, dep_graph, public_deps, down_ref, down_options,\n loop_ancestors, aliased, check_updates, update, remote_name, processed_profile):\n \"\"\" loads a Conan object from the given file\n param node: Node object to be expanded in this step\n down_reqs: the Requirements as coming from downstream, which can overwrite current\n values\n param deps: DepsGraph result\n param public_deps: {name: Node} of already expanded public Nodes, not to be repeated\n in graph\n param down_ref: ConanFileReference of who is depending on current node for this expansion\n \"\"\"\n # basic node configuration\n new_reqs, new_options = self._config_node(node, down_reqs, down_ref, down_options)\n\n self._resolve_deps(node, aliased, update, remote_name)\n\n # Expand each one of the current requirements\n for name, require in node.conanfile.requires.items():\n if require.override:\n continue\n if require.conan_reference in loop_ancestors:\n raise ConanException(\"Loop detected: %s\"\n % \"->\".join(str(r) for r in loop_ancestors))\n new_loop_ancestors = loop_ancestors[:] # Copy for propagating\n new_loop_ancestors.append(require.conan_reference)\n previous = public_deps.get(name)\n if require.private or not previous: # new node, must be added and expanded\n new_node = self._create_new_node(node, dep_graph, require, public_deps, name,\n aliased, check_updates, update, remote_name,\n processed_profile)\n # RECURSION!\n # Make sure the subgraph is truly private\n new_public_deps = {} if require.private else public_deps\n self._load_deps(new_node, new_reqs, dep_graph, new_public_deps, node.conan_ref,\n new_options, new_loop_ancestors, aliased, check_updates, update,\n remote_name, processed_profile)\n else: # a public node already exist with this name\n previous_node, closure = previous\n alias_ref = aliased.get(require.conan_reference, require.conan_reference)\n # Necessary to make sure that it is pointing to the correct aliased\n require.conan_reference = alias_ref\n if previous_node.conan_ref != alias_ref:\n raise ConanException(\"Conflict in %s\\n\"\n \" Requirement %s conflicts with already defined %s\\n\"\n \" Keeping %s\\n\"\n \" To change it, override it in your base requirements\"\n % (node.conan_ref, require.conan_reference,\n previous_node.conan_ref, previous_node.conan_ref))\n dep_graph.add_edge(node, previous_node)\n # RECURSION!\n if closure is None:\n closure = dep_graph.closure(node)\n public_deps[name] = previous_node, closure\n if self._recurse(closure, new_reqs, new_options):\n self._load_deps(previous_node, new_reqs, dep_graph, public_deps, node.conan_ref,\n new_options, new_loop_ancestors, aliased, check_updates, update,\n remote_name, processed_profile)\n\n def _recurse(self, closure, new_reqs, new_options):\n \"\"\" For a given closure, if some requirements or options coming from downstream\n is incompatible with the current closure, then it is necessary to recurse\n then, incompatibilities will be raised as usually\"\"\"\n for req in new_reqs.values():\n n = closure.get(req.conan_reference.name)\n if n and n.conan_ref != req.conan_reference:\n return True\n for pkg_name, options_values in new_options.items():\n n = closure.get(pkg_name)\n if n:\n options = n.conanfile.options\n for option, value in options_values.items():\n if getattr(options, option) != value:\n return True\n return False\n\n def _config_node(self, node, down_reqs, down_ref, down_options):\n \"\"\" update settings and option in the current ConanFile, computing actual\n requirement values, cause they can be overridden by downstream requires\n param settings: dict of settings values => {\"os\": \"windows\"}\n \"\"\"\n try:\n conanfile, conanref = node.conanfile, node.conan_ref\n # Avoid extra time manipulating the sys.path for python\n with get_env_context_manager(conanfile, without_python=True):\n if hasattr(conanfile, \"config\"):\n if not conanref:\n output = ScopedOutput(str(\"PROJECT\"), self._output)\n output.warn(\"config() has been deprecated.\"\n \" Use config_options and configure\")\n with conanfile_exception_formatter(str(conanfile), \"config\"):\n conanfile.config()\n with conanfile_exception_formatter(str(conanfile), \"config_options\"):\n conanfile.config_options()\n conanfile.options.propagate_upstream(down_options, down_ref, conanref)\n if hasattr(conanfile, \"config\"):\n with conanfile_exception_formatter(str(conanfile), \"config\"):\n conanfile.config()\n\n with conanfile_exception_formatter(str(conanfile), \"configure\"):\n conanfile.configure()\n\n conanfile.settings.validate() # All has to be ok!\n conanfile.options.validate()\n\n # Update requirements (overwrites), computing new upstream\n if hasattr(conanfile, \"requirements\"):\n # If re-evaluating the recipe, in a diamond graph, with different options,\n # it could happen that one execution path of requirements() defines a package\n # and another one a different package raising Duplicate dependency error\n # Or the two consecutive calls, adding 2 different dependencies for the two paths\n # So it is necessary to save the \"requires\" state and restore it before a second\n # execution of requirements(). It is a shallow copy, if first iteration is\n # RequireResolve'd or overridden, the inner requirements are modified\n if not hasattr(conanfile, \"_conan_original_requires\"):\n conanfile._conan_original_requires = conanfile.requires.copy()\n else:\n conanfile.requires = conanfile._conan_original_requires.copy()\n\n with conanfile_exception_formatter(str(conanfile), \"requirements\"):\n conanfile.requirements()\n\n new_options = conanfile.options.deps_package_values\n new_down_reqs = conanfile.requires.update(down_reqs, self._output, conanref,\n down_ref)\n except ConanExceptionInUserConanfileMethod:\n raise\n except ConanException as e:\n raise ConanException(\"%s: %s\" % (conanref or \"Conanfile\", str(e)))\n except Exception as e:\n raise ConanException(e)\n\n return new_down_reqs, new_options\n\n def _create_new_node(self, current_node, dep_graph, requirement, public_deps, name_req, aliased,\n check_updates, update, remote_name, processed_profile, alias_ref=None):\n \"\"\" creates and adds a new node to the dependency graph\n \"\"\"\n workspace_package = self._workspace[requirement.conan_reference] if self._workspace else None\n if workspace_package:\n conanfile_path = workspace_package.conanfile_path\n recipe_status = RECIPE_WORKSPACE\n remote = WORKSPACE_FILE\n else:\n try:\n result = self._proxy.get_recipe(requirement.conan_reference,\n check_updates, update, remote_name, self._recorder)\n except ConanException as e:\n base_ref = str(current_node.conan_ref or \"PROJECT\")\n self._output.error(\"Failed requirement '%s' from '%s'\"\n % (requirement.conan_reference, base_ref))\n raise e\n conanfile_path, recipe_status, remote, _ = result\n\n output = ScopedOutput(str(requirement.conan_reference), self._output)\n dep_conanfile = self._loader.load_conanfile(conanfile_path, output, processed_profile,\n reference=requirement.conan_reference)\n\n if workspace_package:\n workspace_package.conanfile = dep_conanfile\n if getattr(dep_conanfile, \"alias\", None):\n alias_reference = alias_ref or requirement.conan_reference\n requirement.conan_reference = ConanFileReference.loads(dep_conanfile.alias)\n aliased[alias_reference] = requirement.conan_reference\n return self._create_new_node(current_node, dep_graph, requirement, public_deps,\n name_req, aliased, check_updates, update,\n remote_name, processed_profile, alias_ref=alias_reference)\n\n new_node = Node(requirement.conan_reference, dep_conanfile)\n new_node.recipe = recipe_status\n new_node.remote = remote\n dep_graph.add_node(new_node)\n dep_graph.add_edge(current_node, new_node, requirement.private)\n if not requirement.private:\n public_deps[name_req] = new_node, None\n return new_node\n", "path": "conans/client/graph/graph_builder.py"}], "after_files": [{"content": "import time\n\nfrom conans.model.conan_file import get_env_context_manager\nfrom conans.model.requires import Requirements\nfrom conans.model.ref import ConanFileReference\nfrom conans.errors import (ConanException, conanfile_exception_formatter,\n ConanExceptionInUserConanfileMethod)\nfrom conans.client.output import ScopedOutput\nfrom conans.util.log import logger\nfrom conans.client.graph.graph import DepsGraph, Node, RECIPE_WORKSPACE\nfrom conans.model.workspace import WORKSPACE_FILE\n\n\nclass DepsGraphBuilder(object):\n \"\"\" Responsible for computing the dependencies graph DepsGraph\n \"\"\"\n def __init__(self, proxy, output, loader, resolver, workspace, recorder):\n self._proxy = proxy\n self._output = output\n self._loader = loader\n self._resolver = resolver\n self._workspace = workspace\n self._recorder = recorder\n\n def load_graph(self, conanfile, check_updates, update, remote_name, processed_profile):\n check_updates = check_updates or update\n dep_graph = DepsGraph()\n # compute the conanfile entry point for this dependency graph\n root_node = Node(None, conanfile)\n dep_graph.add_node(root_node)\n public_deps = {} # {name: Node} dict with public nodes, so they are not added again\n aliased = {}\n # enter recursive computation\n t1 = time.time()\n loop_ancestors = []\n self._load_deps(root_node, Requirements(), dep_graph, public_deps, None, None,\n loop_ancestors, aliased, check_updates, update, remote_name,\n processed_profile)\n logger.debug(\"Deps-builder: Time to load deps %s\" % (time.time() - t1))\n t1 = time.time()\n dep_graph.compute_package_ids()\n logger.debug(\"Deps-builder: Propagate info %s\" % (time.time() - t1))\n return dep_graph\n\n def _resolve_deps(self, node, aliased, update, remote_name):\n # Resolve possible version ranges of the current node requirements\n # new_reqs is a shallow copy of what is propagated upstream, so changes done by the\n # RangeResolver are also done in new_reqs, and then propagated!\n conanfile, conanref = node.conanfile, node.conan_ref\n for _, require in conanfile.requires.items():\n self._resolver.resolve(require, conanref, update, remote_name)\n\n # After resolving ranges,\n for req in conanfile.requires.values():\n alias = aliased.get(req.conan_reference)\n if alias:\n req.conan_reference = alias\n\n if not hasattr(conanfile, \"_conan_evaluated_requires\"):\n conanfile._conan_evaluated_requires = conanfile.requires.copy()\n elif conanfile.requires != conanfile._conan_evaluated_requires:\n raise ConanException(\"%s: Incompatible requirements obtained in different \"\n \"evaluations of 'requirements'\\n\"\n \" Previous requirements: %s\\n\"\n \" New requirements: %s\"\n % (conanref, list(conanfile._conan_evaluated_requires.values()),\n list(conanfile.requires.values())))\n\n def _load_deps(self, node, down_reqs, dep_graph, public_deps, down_ref, down_options,\n loop_ancestors, aliased, check_updates, update, remote_name, processed_profile):\n \"\"\" loads a Conan object from the given file\n param node: Node object to be expanded in this step\n down_reqs: the Requirements as coming from downstream, which can overwrite current\n values\n param deps: DepsGraph result\n param public_deps: {name: Node} of already expanded public Nodes, not to be repeated\n in graph\n param down_ref: ConanFileReference of who is depending on current node for this expansion\n \"\"\"\n # basic node configuration\n new_reqs, new_options = self._config_node(node, down_reqs, down_ref, down_options, aliased)\n\n self._resolve_deps(node, aliased, update, remote_name)\n\n # Expand each one of the current requirements\n for name, require in node.conanfile.requires.items():\n if require.override:\n continue\n if require.conan_reference in loop_ancestors:\n raise ConanException(\"Loop detected: %s\"\n % \"->\".join(str(r) for r in loop_ancestors))\n new_loop_ancestors = loop_ancestors[:] # Copy for propagating\n new_loop_ancestors.append(require.conan_reference)\n previous = public_deps.get(name)\n if require.private or not previous: # new node, must be added and expanded\n new_node = self._create_new_node(node, dep_graph, require, public_deps, name,\n aliased, check_updates, update, remote_name,\n processed_profile)\n # RECURSION!\n # Make sure the subgraph is truly private\n new_public_deps = {} if require.private else public_deps\n self._load_deps(new_node, new_reqs, dep_graph, new_public_deps, node.conan_ref,\n new_options, new_loop_ancestors, aliased, check_updates, update,\n remote_name, processed_profile)\n else: # a public node already exist with this name\n previous_node, closure = previous\n alias_ref = aliased.get(require.conan_reference, require.conan_reference)\n # Necessary to make sure that it is pointing to the correct aliased\n require.conan_reference = alias_ref\n if previous_node.conan_ref != alias_ref:\n raise ConanException(\"Conflict in %s\\n\"\n \" Requirement %s conflicts with already defined %s\\n\"\n \" Keeping %s\\n\"\n \" To change it, override it in your base requirements\"\n % (node.conan_ref, require.conan_reference,\n previous_node.conan_ref, previous_node.conan_ref))\n dep_graph.add_edge(node, previous_node)\n # RECURSION!\n if closure is None:\n closure = dep_graph.closure(node)\n public_deps[name] = previous_node, closure\n if self._recurse(closure, new_reqs, new_options):\n self._load_deps(previous_node, new_reqs, dep_graph, public_deps, node.conan_ref,\n new_options, new_loop_ancestors, aliased, check_updates, update,\n remote_name, processed_profile)\n\n def _recurse(self, closure, new_reqs, new_options):\n \"\"\" For a given closure, if some requirements or options coming from downstream\n is incompatible with the current closure, then it is necessary to recurse\n then, incompatibilities will be raised as usually\"\"\"\n for req in new_reqs.values():\n n = closure.get(req.conan_reference.name)\n if n and n.conan_ref != req.conan_reference:\n return True\n for pkg_name, options_values in new_options.items():\n n = closure.get(pkg_name)\n if n:\n options = n.conanfile.options\n for option, value in options_values.items():\n if getattr(options, option) != value:\n return True\n return False\n\n def _config_node(self, node, down_reqs, down_ref, down_options, aliased):\n \"\"\" update settings and option in the current ConanFile, computing actual\n requirement values, cause they can be overridden by downstream requires\n param settings: dict of settings values => {\"os\": \"windows\"}\n \"\"\"\n try:\n conanfile, conanref = node.conanfile, node.conan_ref\n # Avoid extra time manipulating the sys.path for python\n with get_env_context_manager(conanfile, without_python=True):\n if hasattr(conanfile, \"config\"):\n if not conanref:\n output = ScopedOutput(str(\"PROJECT\"), self._output)\n output.warn(\"config() has been deprecated.\"\n \" Use config_options and configure\")\n with conanfile_exception_formatter(str(conanfile), \"config\"):\n conanfile.config()\n with conanfile_exception_formatter(str(conanfile), \"config_options\"):\n conanfile.config_options()\n conanfile.options.propagate_upstream(down_options, down_ref, conanref)\n if hasattr(conanfile, \"config\"):\n with conanfile_exception_formatter(str(conanfile), \"config\"):\n conanfile.config()\n\n with conanfile_exception_formatter(str(conanfile), \"configure\"):\n conanfile.configure()\n\n conanfile.settings.validate() # All has to be ok!\n conanfile.options.validate()\n\n # Update requirements (overwrites), computing new upstream\n if hasattr(conanfile, \"requirements\"):\n # If re-evaluating the recipe, in a diamond graph, with different options,\n # it could happen that one execution path of requirements() defines a package\n # and another one a different package raising Duplicate dependency error\n # Or the two consecutive calls, adding 2 different dependencies for the two paths\n # So it is necessary to save the \"requires\" state and restore it before a second\n # execution of requirements(). It is a shallow copy, if first iteration is\n # RequireResolve'd or overridden, the inner requirements are modified\n if not hasattr(conanfile, \"_conan_original_requires\"):\n conanfile._conan_original_requires = conanfile.requires.copy()\n else:\n conanfile.requires = conanfile._conan_original_requires.copy()\n\n with conanfile_exception_formatter(str(conanfile), \"requirements\"):\n conanfile.requirements()\n\n new_options = conanfile.options.deps_package_values\n if aliased:\n for req in conanfile.requires.values():\n req.conan_reference = aliased.get(req.conan_reference,\n req.conan_reference)\n new_down_reqs = conanfile.requires.update(down_reqs, self._output,\n conanref, down_ref)\n except ConanExceptionInUserConanfileMethod:\n raise\n except ConanException as e:\n raise ConanException(\"%s: %s\" % (conanref or \"Conanfile\", str(e)))\n except Exception as e:\n raise ConanException(e)\n\n return new_down_reqs, new_options\n\n def _create_new_node(self, current_node, dep_graph, requirement, public_deps, name_req, aliased,\n check_updates, update, remote_name, processed_profile, alias_ref=None):\n \"\"\" creates and adds a new node to the dependency graph\n \"\"\"\n workspace_package = self._workspace[requirement.conan_reference] if self._workspace else None\n if workspace_package:\n conanfile_path = workspace_package.conanfile_path\n recipe_status = RECIPE_WORKSPACE\n remote = WORKSPACE_FILE\n else:\n try:\n result = self._proxy.get_recipe(requirement.conan_reference,\n check_updates, update, remote_name, self._recorder)\n except ConanException as e:\n base_ref = str(current_node.conan_ref or \"PROJECT\")\n self._output.error(\"Failed requirement '%s' from '%s'\"\n % (requirement.conan_reference, base_ref))\n raise e\n conanfile_path, recipe_status, remote, _ = result\n\n output = ScopedOutput(str(requirement.conan_reference), self._output)\n dep_conanfile = self._loader.load_conanfile(conanfile_path, output, processed_profile,\n reference=requirement.conan_reference)\n\n if workspace_package:\n workspace_package.conanfile = dep_conanfile\n if getattr(dep_conanfile, \"alias\", None):\n alias_reference = alias_ref or requirement.conan_reference\n requirement.conan_reference = ConanFileReference.loads(dep_conanfile.alias)\n aliased[alias_reference] = requirement.conan_reference\n return self._create_new_node(current_node, dep_graph, requirement, public_deps,\n name_req, aliased, check_updates, update,\n remote_name, processed_profile, alias_ref=alias_reference)\n\n new_node = Node(requirement.conan_reference, dep_conanfile)\n new_node.recipe = recipe_status\n new_node.remote = remote\n dep_graph.add_node(new_node)\n dep_graph.add_edge(current_node, new_node, requirement.private)\n if not requirement.private:\n public_deps[name_req] = new_node, None\n return new_node\n", "path": "conans/client/graph/graph_builder.py"}]}
| 3,733 | 411 |
gh_patches_debug_41862
|
rasdani/github-patches
|
git_diff
|
tiangolo__fastapi-1540
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Tutorial websocket doc example
**Describe the bug**
Hi,
On the docs of websocket the last example doesn't work.
**To Reproduce**
Steps to reproduce the behavior:
1. Create a file main.py with the last example on the bottom of the file
>https://fastapi.tiangolo.com/tutorial/websockets/#create-a-websocket
```python
from fastapi import Cookie, Depends, FastAPI, Header
from starlette.responses import HTMLResponse
from starlette.status import WS_1008_POLICY_VIOLATION
from starlette.websockets import WebSocket
app = FastAPI()
html = """
<!DOCTYPE html>
<html>
<head>
<title>Chat</title>
</head>
<body>
<h1>WebSocket Chat</h1>
<form action="" onsubmit="sendMessage(event)">
<label>Item ID: <input type="text" id="itemId" autocomplete="off" value="foo"/></label>
<button onclick="connect(event)">Connect</button>
<br>
<label>Message: <input type="text" id="messageText" autocomplete="off"/></label>
<button>Send</button>
</form>
<ul id='messages'>
</ul>
<script>
var ws = null;
function connect(event) {
var input = document.getElementById("itemId")
ws = new WebSocket("ws://localhost:8000/items/" + input.value + "/ws");
ws.onmessage = function(event) {
var messages = document.getElementById('messages')
var message = document.createElement('li')
var content = document.createTextNode(event.data)
message.appendChild(content)
messages.appendChild(message)
};
}
function sendMessage(event) {
var input = document.getElementById("messageText")
ws.send(input.value)
input.value = ''
event.preventDefault()
}
</script>
</body>
</html>
"""
@app.get("/")
async def get():
return HTMLResponse(html)
async def get_cookie_or_client(
websocket: WebSocket, session: str = Cookie(None), x_client: str = Header(None)
):
if session is None and x_client is None:
await websocket.close(code=WS_1008_POLICY_VIOLATION)
return session or x_client
@app.websocket("/items/{item_id}/ws")
async def websocket_endpoint(
websocket: WebSocket,
item_id: int,
q: str = None,
cookie_or_client: str = Depends(get_cookie_or_client),
):
await websocket.accept()
while True:
data = await websocket.receive_text()
await websocket.send_text(
f"Session Cookie or X-Client Header value is: {cookie_or_client}"
)
if q is not None:
await websocket.send_text(f"Query parameter q is: {q}")
await websocket.send_text(f"Message text was: {data}, for item ID: {item_id}")
```
2. Run the application with the cmd:
```
uvicorn main:app --log-level debug --reload
```
3. Open the browser 127.0.0.01
- the first time i connect with ItemID foo , press the button connect
- send message hi with ItemID foo and press the button send.
it's look like the connect fail but the second ,but the send have return code 200
but nothing happen on the web side.

4. See error
```python
INFO: Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit)
INFO: Started reloader process [366952]
email-validator not installed, email fields will be treated as str.
To install, run: pip install email-validator
INFO: Started server process [366957]
INFO: Waiting for application startup.
DEBUG: None - ASGI [1] Started
DEBUG: None - ASGI [1] Sent {'type': 'lifespan.startup'}
DEBUG: None - ASGI [1] Received {'type': 'lifespan.startup.complete'}
DEBUG: ('127.0.0.1', 50056) - Connected
DEBUG: server - state = CONNECTING
DEBUG: server - event = connection_made(<TCPTransport closed=False reading=True 0x1819178>)
DEBUG: ('127.0.0.1', 50056) - ASGI [2] Started
DEBUG: ('127.0.0.1', 50056) - ASGI [2] Received {'type': 'websocket.close', 'code': 1008}
INFO: ('127.0.0.1', 50056) - "WebSocket /items/foo/ws" 403
DEBUG: ('127.0.0.1', 50056) - ASGI [2] Raised exception
ERROR: Exception in ASGI application
Traceback (most recent call last):
File "/data/experiments/realtime_web_socket/lib/python3.7/site-packages/uvicorn/protocols/websockets/websockets_impl.py", line 147, in run_asgi
result = await self.app(self.scope, self.asgi_receive, self.asgi_send)
File "/data/experiments/realtime_web_socket/lib/python3.7/site-packages/uvicorn/middleware/message_logger.py", line 58, in __call__
raise exc from None
File "/data/experiments/realtime_web_socket/lib/python3.7/site-packages/uvicorn/middleware/message_logger.py", line 54, in __call__
await self.app(scope, inner_receive, inner_send)
File "/data/experiments/realtime_web_socket/lib/python3.7/site-packages/starlette/applications.py", line 133, in __call__
await self.error_middleware(scope, receive, send)
File "/data/experiments/realtime_web_socket/lib/python3.7/site-packages/starlette/middleware/errors.py", line 87, in __call__
await self.app(scope, receive, send)
File "/data/experiments/realtime_web_socket/lib/python3.7/site-packages/starlette/exceptions.py", line 49, in __call__
await self.app(scope, receive, send)
File "/data/experiments/realtime_web_socket/lib/python3.7/site-packages/starlette/routing.py", line 585, in __call__
await route(scope, receive, send)
File "/data/experiments/realtime_web_socket/lib/python3.7/site-packages/starlette/routing.py", line 265, in __call__
await self.app(scope, receive, send)
File "/data/experiments/realtime_web_socket/lib/python3.7/site-packages/starlette/routing.py", line 56, in app
await func(session)
File "/data/experiments/realtime_web_socket/lib/python3.7/site-packages/fastapi/routing.py", line 148, in app
await websocket.close(code=WS_1008_POLICY_VIOLATION)
File "/data/experiments/realtime_web_socket/lib/python3.7/site-packages/starlette/websockets.py", line 121, in close
await self.send({"type": "websocket.close", "code": code})
File "/data/experiments/realtime_web_socket/lib/python3.7/site-packages/starlette/websockets.py", line 70, in send
raise RuntimeError('Cannot call "send" once a close message has been sent.')
RuntimeError: Cannot call "send" once a close message has been sent.
DEBUG: server ! failing WebSocket connection in the CONNECTING state: 1006 [no reason]
DEBUG: ('127.0.0.1', 50058) - Connected
DEBUG: server x half-closing TCP connection
DEBUG: ('127.0.0.1', 50058) - ASGI [3] Started
DEBUG: ('127.0.0.1', 50058) - ASGI [3] Received {'type': 'http.response.start', 'status': 200, 'headers': '<...>'}
INFO: ('127.0.0.1', 50058) - "GET / HTTP/1.1" 200
DEBUG: ('127.0.0.1', 50058) - ASGI [3] Received {'type': 'http.response.body', 'body': '<1419 bytes>'}
DEBUG: ('127.0.0.1', 50058) - ASGI [3] Completed
DEBUG: server - event = eof_received()
DEBUG: server - event = connection_lost(None)
DEBUG: server - state = CLOSED
DEBUG: server x code = 1006, reason = [no reason]
DEBUG: ('127.0.0.1', 50058) - Disconnected
DEBUG: ('127.0.0.1', 50060) - Connected
DEBUG: ('127.0.0.1', 50060) - ASGI [4] Started
DEBUG: ('127.0.0.1', 50060) - ASGI [4] Received {'type': 'http.response.start', 'status': 200, 'headers': '<...>'}
INFO: ('127.0.0.1', 50060) - "GET / HTTP/1.1" 200
DEBUG: ('127.0.0.1', 50060) - ASGI [4] Received {'type': 'http.response.body', 'body': '<1419 bytes>'}
DEBUG: ('127.0.0.1', 50060) - ASGI [4] Completed
DEBUG: ('127.0.0.1', 50060) - Disconnected
```
**Expected behavior**
expected to appear the send bold message on the web page.
**Environment:**
- OS: centos 7
- FastAPI Version [e.g. 0.3.0], get it with: fastapi==0.31.0
```Python
import fastapi
print(fastapi.__version__)
0.31.0
```
- Python version, get it with:
```bash
python --version
Python 3.7.3
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `docs_src/websockets/tutorial002.py`
Content:
```
1 from fastapi import Cookie, Depends, FastAPI, Header, WebSocket, status
2 from fastapi.responses import HTMLResponse
3
4 app = FastAPI()
5
6 html = """
7 <!DOCTYPE html>
8 <html>
9 <head>
10 <title>Chat</title>
11 </head>
12 <body>
13 <h1>WebSocket Chat</h1>
14 <form action="" onsubmit="sendMessage(event)">
15 <label>Item ID: <input type="text" id="itemId" autocomplete="off" value="foo"/></label>
16 <button onclick="connect(event)">Connect</button>
17 <br>
18 <label>Message: <input type="text" id="messageText" autocomplete="off"/></label>
19 <button>Send</button>
20 </form>
21 <ul id='messages'>
22 </ul>
23 <script>
24 var ws = null;
25 function connect(event) {
26 var input = document.getElementById("itemId")
27 ws = new WebSocket("ws://localhost:8000/items/" + input.value + "/ws");
28 ws.onmessage = function(event) {
29 var messages = document.getElementById('messages')
30 var message = document.createElement('li')
31 var content = document.createTextNode(event.data)
32 message.appendChild(content)
33 messages.appendChild(message)
34 };
35 }
36 function sendMessage(event) {
37 var input = document.getElementById("messageText")
38 ws.send(input.value)
39 input.value = ''
40 event.preventDefault()
41 }
42 </script>
43 </body>
44 </html>
45 """
46
47
48 @app.get("/")
49 async def get():
50 return HTMLResponse(html)
51
52
53 async def get_cookie_or_client(
54 websocket: WebSocket, session: str = Cookie(None), x_client: str = Header(None)
55 ):
56 if session is None and x_client is None:
57 await websocket.close(code=status.WS_1008_POLICY_VIOLATION)
58 return session or x_client
59
60
61 @app.websocket("/items/{item_id}/ws")
62 async def websocket_endpoint(
63 websocket: WebSocket,
64 item_id: int,
65 q: str = None,
66 cookie_or_client: str = Depends(get_cookie_or_client),
67 ):
68 await websocket.accept()
69 while True:
70 data = await websocket.receive_text()
71 await websocket.send_text(
72 f"Session Cookie or X-Client Header value is: {cookie_or_client}"
73 )
74 if q is not None:
75 await websocket.send_text(f"Query parameter q is: {q}")
76 await websocket.send_text(f"Message text was: {data}, for item ID: {item_id}")
77
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/docs_src/websockets/tutorial002.py b/docs_src/websockets/tutorial002.py
--- a/docs_src/websockets/tutorial002.py
+++ b/docs_src/websockets/tutorial002.py
@@ -1,4 +1,4 @@
-from fastapi import Cookie, Depends, FastAPI, Header, WebSocket, status
+from fastapi import Cookie, Depends, FastAPI, Query, WebSocket, status
from fastapi.responses import HTMLResponse
app = FastAPI()
@@ -13,8 +13,9 @@
<h1>WebSocket Chat</h1>
<form action="" onsubmit="sendMessage(event)">
<label>Item ID: <input type="text" id="itemId" autocomplete="off" value="foo"/></label>
+ <label>Token: <input type="text" id="token" autocomplete="off" value="some-key-token"/></label>
<button onclick="connect(event)">Connect</button>
- <br>
+ <hr>
<label>Message: <input type="text" id="messageText" autocomplete="off"/></label>
<button>Send</button>
</form>
@@ -23,8 +24,9 @@
<script>
var ws = null;
function connect(event) {
- var input = document.getElementById("itemId")
- ws = new WebSocket("ws://localhost:8000/items/" + input.value + "/ws");
+ var itemId = document.getElementById("itemId")
+ var token = document.getElementById("token")
+ ws = new WebSocket("ws://localhost:8000/items/" + itemId.value + "/ws?token=" + token.value);
ws.onmessage = function(event) {
var messages = document.getElementById('messages')
var message = document.createElement('li')
@@ -32,6 +34,7 @@
message.appendChild(content)
messages.appendChild(message)
};
+ event.preventDefault()
}
function sendMessage(event) {
var input = document.getElementById("messageText")
@@ -50,26 +53,26 @@
return HTMLResponse(html)
-async def get_cookie_or_client(
- websocket: WebSocket, session: str = Cookie(None), x_client: str = Header(None)
+async def get_cookie_or_token(
+ websocket: WebSocket, session: str = Cookie(None), token: str = Query(None)
):
- if session is None and x_client is None:
+ if session is None and token is None:
await websocket.close(code=status.WS_1008_POLICY_VIOLATION)
- return session or x_client
+ return session or token
@app.websocket("/items/{item_id}/ws")
async def websocket_endpoint(
websocket: WebSocket,
- item_id: int,
- q: str = None,
- cookie_or_client: str = Depends(get_cookie_or_client),
+ item_id: str,
+ q: int = None,
+ cookie_or_token: str = Depends(get_cookie_or_token),
):
await websocket.accept()
while True:
data = await websocket.receive_text()
await websocket.send_text(
- f"Session Cookie or X-Client Header value is: {cookie_or_client}"
+ f"Session cookie or query token value is: {cookie_or_token}"
)
if q is not None:
await websocket.send_text(f"Query parameter q is: {q}")
|
{"golden_diff": "diff --git a/docs_src/websockets/tutorial002.py b/docs_src/websockets/tutorial002.py\n--- a/docs_src/websockets/tutorial002.py\n+++ b/docs_src/websockets/tutorial002.py\n@@ -1,4 +1,4 @@\n-from fastapi import Cookie, Depends, FastAPI, Header, WebSocket, status\n+from fastapi import Cookie, Depends, FastAPI, Query, WebSocket, status\n from fastapi.responses import HTMLResponse\n \n app = FastAPI()\n@@ -13,8 +13,9 @@\n <h1>WebSocket Chat</h1>\n <form action=\"\" onsubmit=\"sendMessage(event)\">\n <label>Item ID: <input type=\"text\" id=\"itemId\" autocomplete=\"off\" value=\"foo\"/></label>\n+ <label>Token: <input type=\"text\" id=\"token\" autocomplete=\"off\" value=\"some-key-token\"/></label>\n <button onclick=\"connect(event)\">Connect</button>\n- <br>\n+ <hr>\n <label>Message: <input type=\"text\" id=\"messageText\" autocomplete=\"off\"/></label>\n <button>Send</button>\n </form>\n@@ -23,8 +24,9 @@\n <script>\n var ws = null;\n function connect(event) {\n- var input = document.getElementById(\"itemId\")\n- ws = new WebSocket(\"ws://localhost:8000/items/\" + input.value + \"/ws\");\n+ var itemId = document.getElementById(\"itemId\")\n+ var token = document.getElementById(\"token\")\n+ ws = new WebSocket(\"ws://localhost:8000/items/\" + itemId.value + \"/ws?token=\" + token.value);\n ws.onmessage = function(event) {\n var messages = document.getElementById('messages')\n var message = document.createElement('li')\n@@ -32,6 +34,7 @@\n message.appendChild(content)\n messages.appendChild(message)\n };\n+ event.preventDefault()\n }\n function sendMessage(event) {\n var input = document.getElementById(\"messageText\")\n@@ -50,26 +53,26 @@\n return HTMLResponse(html)\n \n \n-async def get_cookie_or_client(\n- websocket: WebSocket, session: str = Cookie(None), x_client: str = Header(None)\n+async def get_cookie_or_token(\n+ websocket: WebSocket, session: str = Cookie(None), token: str = Query(None)\n ):\n- if session is None and x_client is None:\n+ if session is None and token is None:\n await websocket.close(code=status.WS_1008_POLICY_VIOLATION)\n- return session or x_client\n+ return session or token\n \n \n @app.websocket(\"/items/{item_id}/ws\")\n async def websocket_endpoint(\n websocket: WebSocket,\n- item_id: int,\n- q: str = None,\n- cookie_or_client: str = Depends(get_cookie_or_client),\n+ item_id: str,\n+ q: int = None,\n+ cookie_or_token: str = Depends(get_cookie_or_token),\n ):\n await websocket.accept()\n while True:\n data = await websocket.receive_text()\n await websocket.send_text(\n- f\"Session Cookie or X-Client Header value is: {cookie_or_client}\"\n+ f\"Session cookie or query token value is: {cookie_or_token}\"\n )\n if q is not None:\n await websocket.send_text(f\"Query parameter q is: {q}\")\n", "issue": "Tutorial websocket doc example\n**Describe the bug**\r\nHi, \r\nOn the docs of websocket the last example doesn't work.\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n1. Create a file main.py with the last example on the bottom of the file\r\n\r\n>https://fastapi.tiangolo.com/tutorial/websockets/#create-a-websocket\r\n```python\r\nfrom fastapi import Cookie, Depends, FastAPI, Header\r\nfrom starlette.responses import HTMLResponse\r\nfrom starlette.status import WS_1008_POLICY_VIOLATION\r\nfrom starlette.websockets import WebSocket\r\n\r\napp = FastAPI()\r\n\r\nhtml = \"\"\"\r\n<!DOCTYPE html>\r\n<html>\r\n <head>\r\n <title>Chat</title>\r\n </head>\r\n <body>\r\n <h1>WebSocket Chat</h1>\r\n <form action=\"\" onsubmit=\"sendMessage(event)\">\r\n <label>Item ID: <input type=\"text\" id=\"itemId\" autocomplete=\"off\" value=\"foo\"/></label>\r\n <button onclick=\"connect(event)\">Connect</button>\r\n <br>\r\n <label>Message: <input type=\"text\" id=\"messageText\" autocomplete=\"off\"/></label>\r\n <button>Send</button>\r\n </form>\r\n <ul id='messages'>\r\n </ul>\r\n <script>\r\n var ws = null;\r\n function connect(event) {\r\n var input = document.getElementById(\"itemId\")\r\n ws = new WebSocket(\"ws://localhost:8000/items/\" + input.value + \"/ws\");\r\n ws.onmessage = function(event) {\r\n var messages = document.getElementById('messages')\r\n var message = document.createElement('li')\r\n var content = document.createTextNode(event.data)\r\n message.appendChild(content)\r\n messages.appendChild(message)\r\n };\r\n }\r\n function sendMessage(event) {\r\n var input = document.getElementById(\"messageText\")\r\n ws.send(input.value)\r\n input.value = ''\r\n event.preventDefault()\r\n }\r\n </script>\r\n </body>\r\n</html>\r\n\"\"\"\r\n\r\n\r\[email protected](\"/\")\r\nasync def get():\r\n return HTMLResponse(html)\r\n\r\n\r\nasync def get_cookie_or_client(\r\n websocket: WebSocket, session: str = Cookie(None), x_client: str = Header(None)\r\n):\r\n if session is None and x_client is None:\r\n await websocket.close(code=WS_1008_POLICY_VIOLATION)\r\n return session or x_client\r\n\r\n\r\[email protected](\"/items/{item_id}/ws\")\r\nasync def websocket_endpoint(\r\n websocket: WebSocket,\r\n item_id: int,\r\n q: str = None,\r\n cookie_or_client: str = Depends(get_cookie_or_client),\r\n):\r\n await websocket.accept()\r\n while True:\r\n data = await websocket.receive_text()\r\n await websocket.send_text(\r\n f\"Session Cookie or X-Client Header value is: {cookie_or_client}\"\r\n )\r\n if q is not None:\r\n await websocket.send_text(f\"Query parameter q is: {q}\")\r\n await websocket.send_text(f\"Message text was: {data}, for item ID: {item_id}\")\r\n``` \r\n2. Run the application with the cmd:\r\n```\r\nuvicorn main:app --log-level debug --reload\r\n```\r\n3. Open the browser 127.0.0.01\r\n - the first time i connect with ItemID foo , press the button connect \r\n - send message hi with ItemID foo and press the button send.\r\n it's look like the connect fail but the second ,but the send have return code 200\r\n but nothing happen on the web side. \r\n\r\n\r\n\r\n4. See error\r\n```python\r\n\r\nINFO: Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit)\r\nINFO: Started reloader process [366952]\r\nemail-validator not installed, email fields will be treated as str.\r\nTo install, run: pip install email-validator\r\nINFO: Started server process [366957]\r\nINFO: Waiting for application startup.\r\nDEBUG: None - ASGI [1] Started\r\nDEBUG: None - ASGI [1] Sent {'type': 'lifespan.startup'}\r\nDEBUG: None - ASGI [1] Received {'type': 'lifespan.startup.complete'}\r\nDEBUG: ('127.0.0.1', 50056) - Connected\r\nDEBUG: server - state = CONNECTING\r\nDEBUG: server - event = connection_made(<TCPTransport closed=False reading=True 0x1819178>)\r\nDEBUG: ('127.0.0.1', 50056) - ASGI [2] Started\r\nDEBUG: ('127.0.0.1', 50056) - ASGI [2] Received {'type': 'websocket.close', 'code': 1008}\r\nINFO: ('127.0.0.1', 50056) - \"WebSocket /items/foo/ws\" 403\r\nDEBUG: ('127.0.0.1', 50056) - ASGI [2] Raised exception\r\nERROR: Exception in ASGI application\r\nTraceback (most recent call last):\r\n File \"/data/experiments/realtime_web_socket/lib/python3.7/site-packages/uvicorn/protocols/websockets/websockets_impl.py\", line 147, in run_asgi\r\n result = await self.app(self.scope, self.asgi_receive, self.asgi_send)\r\n File \"/data/experiments/realtime_web_socket/lib/python3.7/site-packages/uvicorn/middleware/message_logger.py\", line 58, in __call__\r\n raise exc from None\r\n File \"/data/experiments/realtime_web_socket/lib/python3.7/site-packages/uvicorn/middleware/message_logger.py\", line 54, in __call__\r\n await self.app(scope, inner_receive, inner_send)\r\n File \"/data/experiments/realtime_web_socket/lib/python3.7/site-packages/starlette/applications.py\", line 133, in __call__\r\n await self.error_middleware(scope, receive, send)\r\n File \"/data/experiments/realtime_web_socket/lib/python3.7/site-packages/starlette/middleware/errors.py\", line 87, in __call__\r\n await self.app(scope, receive, send)\r\n File \"/data/experiments/realtime_web_socket/lib/python3.7/site-packages/starlette/exceptions.py\", line 49, in __call__\r\n await self.app(scope, receive, send)\r\n File \"/data/experiments/realtime_web_socket/lib/python3.7/site-packages/starlette/routing.py\", line 585, in __call__\r\n await route(scope, receive, send)\r\n File \"/data/experiments/realtime_web_socket/lib/python3.7/site-packages/starlette/routing.py\", line 265, in __call__\r\n await self.app(scope, receive, send)\r\n File \"/data/experiments/realtime_web_socket/lib/python3.7/site-packages/starlette/routing.py\", line 56, in app\r\n await func(session)\r\n File \"/data/experiments/realtime_web_socket/lib/python3.7/site-packages/fastapi/routing.py\", line 148, in app\r\n await websocket.close(code=WS_1008_POLICY_VIOLATION)\r\n File \"/data/experiments/realtime_web_socket/lib/python3.7/site-packages/starlette/websockets.py\", line 121, in close\r\n await self.send({\"type\": \"websocket.close\", \"code\": code})\r\n File \"/data/experiments/realtime_web_socket/lib/python3.7/site-packages/starlette/websockets.py\", line 70, in send\r\n raise RuntimeError('Cannot call \"send\" once a close message has been sent.')\r\nRuntimeError: Cannot call \"send\" once a close message has been sent.\r\nDEBUG: server ! failing WebSocket connection in the CONNECTING state: 1006 [no reason]\r\nDEBUG: ('127.0.0.1', 50058) - Connected\r\nDEBUG: server x half-closing TCP connection\r\nDEBUG: ('127.0.0.1', 50058) - ASGI [3] Started\r\nDEBUG: ('127.0.0.1', 50058) - ASGI [3] Received {'type': 'http.response.start', 'status': 200, 'headers': '<...>'}\r\nINFO: ('127.0.0.1', 50058) - \"GET / HTTP/1.1\" 200\r\nDEBUG: ('127.0.0.1', 50058) - ASGI [3] Received {'type': 'http.response.body', 'body': '<1419 bytes>'}\r\nDEBUG: ('127.0.0.1', 50058) - ASGI [3] Completed\r\nDEBUG: server - event = eof_received()\r\nDEBUG: server - event = connection_lost(None)\r\nDEBUG: server - state = CLOSED\r\nDEBUG: server x code = 1006, reason = [no reason]\r\nDEBUG: ('127.0.0.1', 50058) - Disconnected\r\nDEBUG: ('127.0.0.1', 50060) - Connected\r\nDEBUG: ('127.0.0.1', 50060) - ASGI [4] Started\r\nDEBUG: ('127.0.0.1', 50060) - ASGI [4] Received {'type': 'http.response.start', 'status': 200, 'headers': '<...>'}\r\nINFO: ('127.0.0.1', 50060) - \"GET / HTTP/1.1\" 200\r\nDEBUG: ('127.0.0.1', 50060) - ASGI [4] Received {'type': 'http.response.body', 'body': '<1419 bytes>'}\r\nDEBUG: ('127.0.0.1', 50060) - ASGI [4] Completed\r\nDEBUG: ('127.0.0.1', 50060) - Disconnected\r\n\r\n\r\n```\r\n**Expected behavior**\r\nexpected to appear the send bold message on the web page.\r\n\r\n**Environment:**\r\n - OS: centos 7\r\n - FastAPI Version [e.g. 0.3.0], get it with: fastapi==0.31.0\r\n\r\n```Python\r\nimport fastapi\r\nprint(fastapi.__version__)\r\n0.31.0\r\n```\r\n\r\n- Python version, get it with:\r\n\r\n```bash\r\npython --version\r\nPython 3.7.3\r\n```\r\n\r\n\n", "before_files": [{"content": "from fastapi import Cookie, Depends, FastAPI, Header, WebSocket, status\nfrom fastapi.responses import HTMLResponse\n\napp = FastAPI()\n\nhtml = \"\"\"\n<!DOCTYPE html>\n<html>\n <head>\n <title>Chat</title>\n </head>\n <body>\n <h1>WebSocket Chat</h1>\n <form action=\"\" onsubmit=\"sendMessage(event)\">\n <label>Item ID: <input type=\"text\" id=\"itemId\" autocomplete=\"off\" value=\"foo\"/></label>\n <button onclick=\"connect(event)\">Connect</button>\n <br>\n <label>Message: <input type=\"text\" id=\"messageText\" autocomplete=\"off\"/></label>\n <button>Send</button>\n </form>\n <ul id='messages'>\n </ul>\n <script>\n var ws = null;\n function connect(event) {\n var input = document.getElementById(\"itemId\")\n ws = new WebSocket(\"ws://localhost:8000/items/\" + input.value + \"/ws\");\n ws.onmessage = function(event) {\n var messages = document.getElementById('messages')\n var message = document.createElement('li')\n var content = document.createTextNode(event.data)\n message.appendChild(content)\n messages.appendChild(message)\n };\n }\n function sendMessage(event) {\n var input = document.getElementById(\"messageText\")\n ws.send(input.value)\n input.value = ''\n event.preventDefault()\n }\n </script>\n </body>\n</html>\n\"\"\"\n\n\[email protected](\"/\")\nasync def get():\n return HTMLResponse(html)\n\n\nasync def get_cookie_or_client(\n websocket: WebSocket, session: str = Cookie(None), x_client: str = Header(None)\n):\n if session is None and x_client is None:\n await websocket.close(code=status.WS_1008_POLICY_VIOLATION)\n return session or x_client\n\n\[email protected](\"/items/{item_id}/ws\")\nasync def websocket_endpoint(\n websocket: WebSocket,\n item_id: int,\n q: str = None,\n cookie_or_client: str = Depends(get_cookie_or_client),\n):\n await websocket.accept()\n while True:\n data = await websocket.receive_text()\n await websocket.send_text(\n f\"Session Cookie or X-Client Header value is: {cookie_or_client}\"\n )\n if q is not None:\n await websocket.send_text(f\"Query parameter q is: {q}\")\n await websocket.send_text(f\"Message text was: {data}, for item ID: {item_id}\")\n", "path": "docs_src/websockets/tutorial002.py"}], "after_files": [{"content": "from fastapi import Cookie, Depends, FastAPI, Query, WebSocket, status\nfrom fastapi.responses import HTMLResponse\n\napp = FastAPI()\n\nhtml = \"\"\"\n<!DOCTYPE html>\n<html>\n <head>\n <title>Chat</title>\n </head>\n <body>\n <h1>WebSocket Chat</h1>\n <form action=\"\" onsubmit=\"sendMessage(event)\">\n <label>Item ID: <input type=\"text\" id=\"itemId\" autocomplete=\"off\" value=\"foo\"/></label>\n <label>Token: <input type=\"text\" id=\"token\" autocomplete=\"off\" value=\"some-key-token\"/></label>\n <button onclick=\"connect(event)\">Connect</button>\n <hr>\n <label>Message: <input type=\"text\" id=\"messageText\" autocomplete=\"off\"/></label>\n <button>Send</button>\n </form>\n <ul id='messages'>\n </ul>\n <script>\n var ws = null;\n function connect(event) {\n var itemId = document.getElementById(\"itemId\")\n var token = document.getElementById(\"token\")\n ws = new WebSocket(\"ws://localhost:8000/items/\" + itemId.value + \"/ws?token=\" + token.value);\n ws.onmessage = function(event) {\n var messages = document.getElementById('messages')\n var message = document.createElement('li')\n var content = document.createTextNode(event.data)\n message.appendChild(content)\n messages.appendChild(message)\n };\n event.preventDefault()\n }\n function sendMessage(event) {\n var input = document.getElementById(\"messageText\")\n ws.send(input.value)\n input.value = ''\n event.preventDefault()\n }\n </script>\n </body>\n</html>\n\"\"\"\n\n\[email protected](\"/\")\nasync def get():\n return HTMLResponse(html)\n\n\nasync def get_cookie_or_token(\n websocket: WebSocket, session: str = Cookie(None), token: str = Query(None)\n):\n if session is None and token is None:\n await websocket.close(code=status.WS_1008_POLICY_VIOLATION)\n return session or token\n\n\[email protected](\"/items/{item_id}/ws\")\nasync def websocket_endpoint(\n websocket: WebSocket,\n item_id: str,\n q: int = None,\n cookie_or_token: str = Depends(get_cookie_or_token),\n):\n await websocket.accept()\n while True:\n data = await websocket.receive_text()\n await websocket.send_text(\n f\"Session cookie or query token value is: {cookie_or_token}\"\n )\n if q is not None:\n await websocket.send_text(f\"Query parameter q is: {q}\")\n await websocket.send_text(f\"Message text was: {data}, for item ID: {item_id}\")\n", "path": "docs_src/websockets/tutorial002.py"}]}
| 3,331 | 739 |
gh_patches_debug_19507
|
rasdani/github-patches
|
git_diff
|
scoutapp__scout_apm_python-269
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Config to capture query string, not just path
When a request comes in, we capture `request.path`, which doesn't appear to capture any query string.
We should match the Ruby agent:
* Default to capturing the entire path and query string
* Config to not capture query string https://docs.scoutapm.com/#uri_reporting
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/scout_apm/core/web_requests.py`
Content:
```
1 # coding=utf-8
2 from scout_apm.compat import urlencode
3
4 # Originally derived from:
5 # 1. Rails:
6 # https://github.com/rails/rails/blob/0196551e6039ca864d1eee1e01819fcae12c1dc9/railties/lib/rails/generators/rails/app/templates/config/initializers/filter_parameter_logging.rb.tt
7 # 2. Sentry server side scrubbing:
8 # https://docs.sentry.io/data-management/sensitive-data/#server-side-scrubbing
9 FILTER_PARAMETERS = frozenset(
10 [
11 "access",
12 "access_token",
13 "api_key",
14 "apikey",
15 "auth",
16 "auth_token",
17 "card[number]",
18 "certificate",
19 "credentials",
20 "crypt",
21 "key",
22 "mysql_pwd",
23 "otp",
24 "passwd",
25 "password",
26 "private",
27 "protected",
28 "salt",
29 "secret",
30 "ssn",
31 "stripetoken",
32 "token",
33 ]
34 )
35
36
37 def create_filtered_path(path, query_params):
38 filtered_params = sorted(
39 (
40 (k, "[FILTERED]" if k.lower() in FILTER_PARAMETERS else v)
41 for k, v in query_params
42 )
43 )
44 if not filtered_params:
45 return path
46 return path + "?" + urlencode(filtered_params)
47
```
Path: `src/scout_apm/core/config.py`
Content:
```
1 # coding=utf-8
2 from __future__ import absolute_import, division, print_function, unicode_literals
3
4 import logging
5 import os
6
7 from scout_apm.compat import string_type
8 from scout_apm.core import platform_detection
9 from scout_apm.core.util import octal
10
11 logger = logging.getLogger(__name__)
12
13
14 class ScoutConfig(object):
15 """
16 Configuration object for the ScoutApm agent.
17
18 Contains a list of configuration "layers". When a configuration key is
19 looked up, each layer is asked in turn if it knows the value. The first one
20 to answer affirmatively returns the value.
21 """
22
23 def __init__(self):
24 self.layers = [
25 ScoutConfigEnv(),
26 ScoutConfigPython(),
27 ScoutConfigDerived(self),
28 ScoutConfigDefaults(),
29 ScoutConfigNull(),
30 ]
31
32 def value(self, key):
33 value = self.locate_layer_for_key(key).value(key)
34 if key in CONVERSIONS:
35 return CONVERSIONS[key](value)
36 return value
37
38 def locate_layer_for_key(self, key):
39 for layer in self.layers:
40 if layer.has_config(key):
41 return layer
42
43 # Should be unreachable because ScoutConfigNull returns None for all
44 # keys.
45 raise ValueError("key {!r} not found in any layer".format(key))
46
47 def log(self):
48 logger.debug("Configuration Loaded:")
49 for key in self.known_keys():
50 layer = self.locate_layer_for_key(key)
51 logger.debug("%-9s: %s = %s", layer.name(), key, layer.value(key))
52
53 def known_keys(self):
54 return [
55 "app_server",
56 "application_root",
57 "core_agent_dir",
58 "core_agent_download",
59 "core_agent_launch",
60 "core_agent_permissions",
61 "core_agent_version",
62 "disabled_instruments",
63 "download_url",
64 "framework",
65 "framework_version",
66 "hostname",
67 "ignore",
68 "key",
69 "log_level",
70 "monitor",
71 "name",
72 "revision_sha",
73 "scm_subdirectory",
74 "socket_path",
75 ]
76
77 def core_agent_permissions(self):
78 try:
79 return octal(self.value("core_agent_permissions"))
80 except ValueError:
81 logger.exception(
82 "Invalid core_agent_permissions value, using default of 0o700"
83 )
84 return 0o700
85
86 @classmethod
87 def set(cls, **kwargs):
88 """
89 Sets a configuration value for the Scout agent. Values set here will
90 not override values set in ENV.
91 """
92 global SCOUT_PYTHON_VALUES
93 for key, value in kwargs.items():
94 SCOUT_PYTHON_VALUES[key] = value
95
96 @classmethod
97 def unset(cls, *keys):
98 """
99 Removes a configuration value for the Scout agent.
100 """
101 global SCOUT_PYTHON_VALUES
102 for key in keys:
103 SCOUT_PYTHON_VALUES.pop(key, None)
104
105 @classmethod
106 def reset_all(cls):
107 """
108 Remove all configuration settings set via `ScoutConfig.set(...)`.
109
110 This is meant for use in testing.
111 """
112 global SCOUT_PYTHON_VALUES
113 SCOUT_PYTHON_VALUES.clear()
114
115
116 # Module-level data, the ScoutConfig.set(key="value") adds to this
117 SCOUT_PYTHON_VALUES = {}
118
119
120 class ScoutConfigPython(object):
121 """
122 A configuration overlay that lets other parts of python set values.
123 """
124
125 def name(self):
126 return "Python"
127
128 def has_config(self, key):
129 return key in SCOUT_PYTHON_VALUES
130
131 def value(self, key):
132 return SCOUT_PYTHON_VALUES[key]
133
134
135 class ScoutConfigEnv(object):
136 """
137 Reads configuration from environment by prefixing the key
138 requested with "SCOUT_"
139
140 Example: the `log_level` config looks for SCOUT_LOG_LEVEL
141 environment variable
142 """
143
144 def name(self):
145 return "ENV"
146
147 def has_config(self, key):
148 env_key = self.modify_key(key)
149 return env_key in os.environ
150
151 def value(self, key):
152 env_key = self.modify_key(key)
153 return os.environ[env_key]
154
155 def modify_key(self, key):
156 env_key = ("SCOUT_" + key).upper()
157 return env_key
158
159
160 class ScoutConfigDerived(object):
161 """
162 A configuration overlay that calculates from other values.
163 """
164
165 def __init__(self, config):
166 """
167 config argument is the overall ScoutConfig var, so we can lookup the
168 components of the derived info.
169 """
170 self.config = config
171
172 def name(self):
173 return "Derived"
174
175 def has_config(self, key):
176 return self.lookup_func(key) is not None
177
178 def value(self, key):
179 return self.lookup_func(key)()
180
181 def lookup_func(self, key):
182 """
183 Returns the derive_#{key} function, or None if it isn't defined
184 """
185 func_name = "derive_" + key
186 return getattr(self, func_name, None)
187
188 def derive_socket_path(self):
189 return "{}/{}/scout-agent.sock".format(
190 self.config.value("core_agent_dir"),
191 self.config.value("core_agent_full_name"),
192 )
193
194 def derive_core_agent_full_name(self):
195 triple = self.config.value("core_agent_triple")
196 if not platform_detection.is_valid_triple(triple):
197 logger.warning("Invalid value for core_agent_triple: %s", triple)
198 return "{name}-{version}-{triple}".format(
199 name="scout_apm_core",
200 version=self.config.value("core_agent_version"),
201 triple=triple,
202 )
203
204 def derive_core_agent_triple(self):
205 return platform_detection.get_triple()
206
207
208 class ScoutConfigDefaults(object):
209 """
210 Provides default values for important configurations
211 """
212
213 def name(self):
214 return "Defaults"
215
216 def __init__(self):
217 self.defaults = {
218 "app_server": "",
219 "application_root": "",
220 "core_agent_dir": "/tmp/scout_apm_core",
221 "core_agent_download": True,
222 "core_agent_launch": True,
223 "core_agent_permissions": 700,
224 "core_agent_version": "v1.2.0", # can be an exact tag name, or 'latest'
225 "disabled_instruments": [],
226 "download_url": "https://s3-us-west-1.amazonaws.com/scout-public-downloads/apm_core_agent/release", # noqa: E501
227 "framework": "",
228 "framework_version": "",
229 "hostname": None,
230 "key": "",
231 "log_level": "info",
232 "monitor": False,
233 "name": "",
234 "revision_sha": self._git_revision_sha(),
235 "scm_subdirectory": "",
236 }
237
238 def _git_revision_sha(self):
239 # N.B. The environment variable SCOUT_REVISION_SHA may also be used,
240 # but that will be picked up by ScoutConfigEnv
241 return os.environ.get("HEROKU_SLUG_COMMIT", "")
242
243 def has_config(self, key):
244 return key in self.defaults
245
246 def value(self, key):
247 return self.defaults[key]
248
249
250 # Always returns None to any key
251 class ScoutConfigNull(object):
252 """
253 Always answers that a key is present, but the value is None
254
255 Used as the last step of the layered configuration.
256 """
257
258 def name(self):
259 return "Null"
260
261 def has_config(self, key):
262 return True
263
264 def value(self, key):
265 return None
266
267
268 def convert_to_bool(value):
269 if isinstance(value, bool):
270 return value
271 if isinstance(value, string_type):
272 return value.lower() in ("yes", "true", "t", "1")
273 # Unknown type - default to false?
274 return False
275
276
277 def convert_to_list(value):
278 if isinstance(value, list):
279 return value
280 if isinstance(value, tuple):
281 return list(value)
282 if isinstance(value, string_type):
283 # Split on commas
284 return [item.strip() for item in value.split(",") if item]
285 # Unknown type - default to empty?
286 return []
287
288
289 CONVERSIONS = {
290 "core_agent_download": convert_to_bool,
291 "core_agent_launch": convert_to_bool,
292 "monitor": convert_to_bool,
293 "disabled_instruments": convert_to_list,
294 "ignore": convert_to_list,
295 }
296
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/scout_apm/core/config.py b/src/scout_apm/core/config.py
--- a/src/scout_apm/core/config.py
+++ b/src/scout_apm/core/config.py
@@ -233,6 +233,7 @@
"name": "",
"revision_sha": self._git_revision_sha(),
"scm_subdirectory": "",
+ "uri_reporting": "filtered_params",
}
def _git_revision_sha(self):
diff --git a/src/scout_apm/core/web_requests.py b/src/scout_apm/core/web_requests.py
--- a/src/scout_apm/core/web_requests.py
+++ b/src/scout_apm/core/web_requests.py
@@ -1,5 +1,6 @@
# coding=utf-8
from scout_apm.compat import urlencode
+from scout_apm.core.context import AgentContext
# Originally derived from:
# 1. Rails:
@@ -35,6 +36,8 @@
def create_filtered_path(path, query_params):
+ if AgentContext.instance.config.value("uri_reporting") == "path":
+ return path
filtered_params = sorted(
(
(k, "[FILTERED]" if k.lower() in FILTER_PARAMETERS else v)
|
{"golden_diff": "diff --git a/src/scout_apm/core/config.py b/src/scout_apm/core/config.py\n--- a/src/scout_apm/core/config.py\n+++ b/src/scout_apm/core/config.py\n@@ -233,6 +233,7 @@\n \"name\": \"\",\n \"revision_sha\": self._git_revision_sha(),\n \"scm_subdirectory\": \"\",\n+ \"uri_reporting\": \"filtered_params\",\n }\n \n def _git_revision_sha(self):\ndiff --git a/src/scout_apm/core/web_requests.py b/src/scout_apm/core/web_requests.py\n--- a/src/scout_apm/core/web_requests.py\n+++ b/src/scout_apm/core/web_requests.py\n@@ -1,5 +1,6 @@\n # coding=utf-8\n from scout_apm.compat import urlencode\n+from scout_apm.core.context import AgentContext\n \n # Originally derived from:\n # 1. Rails:\n@@ -35,6 +36,8 @@\n \n \n def create_filtered_path(path, query_params):\n+ if AgentContext.instance.config.value(\"uri_reporting\") == \"path\":\n+ return path\n filtered_params = sorted(\n (\n (k, \"[FILTERED]\" if k.lower() in FILTER_PARAMETERS else v)\n", "issue": "Config to capture query string, not just path\nWhen a request comes in, we capture `request.path`, which doesn't appear to capture any query string.\r\n\r\nWe should match the Ruby agent:\r\n* Default to capturing the entire path and query string\r\n* Config to not capture query string https://docs.scoutapm.com/#uri_reporting\n", "before_files": [{"content": "# coding=utf-8\nfrom scout_apm.compat import urlencode\n\n# Originally derived from:\n# 1. Rails:\n# https://github.com/rails/rails/blob/0196551e6039ca864d1eee1e01819fcae12c1dc9/railties/lib/rails/generators/rails/app/templates/config/initializers/filter_parameter_logging.rb.tt\n# 2. Sentry server side scrubbing:\n# https://docs.sentry.io/data-management/sensitive-data/#server-side-scrubbing\nFILTER_PARAMETERS = frozenset(\n [\n \"access\",\n \"access_token\",\n \"api_key\",\n \"apikey\",\n \"auth\",\n \"auth_token\",\n \"card[number]\",\n \"certificate\",\n \"credentials\",\n \"crypt\",\n \"key\",\n \"mysql_pwd\",\n \"otp\",\n \"passwd\",\n \"password\",\n \"private\",\n \"protected\",\n \"salt\",\n \"secret\",\n \"ssn\",\n \"stripetoken\",\n \"token\",\n ]\n)\n\n\ndef create_filtered_path(path, query_params):\n filtered_params = sorted(\n (\n (k, \"[FILTERED]\" if k.lower() in FILTER_PARAMETERS else v)\n for k, v in query_params\n )\n )\n if not filtered_params:\n return path\n return path + \"?\" + urlencode(filtered_params)\n", "path": "src/scout_apm/core/web_requests.py"}, {"content": "# coding=utf-8\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nimport logging\nimport os\n\nfrom scout_apm.compat import string_type\nfrom scout_apm.core import platform_detection\nfrom scout_apm.core.util import octal\n\nlogger = logging.getLogger(__name__)\n\n\nclass ScoutConfig(object):\n \"\"\"\n Configuration object for the ScoutApm agent.\n\n Contains a list of configuration \"layers\". When a configuration key is\n looked up, each layer is asked in turn if it knows the value. The first one\n to answer affirmatively returns the value.\n \"\"\"\n\n def __init__(self):\n self.layers = [\n ScoutConfigEnv(),\n ScoutConfigPython(),\n ScoutConfigDerived(self),\n ScoutConfigDefaults(),\n ScoutConfigNull(),\n ]\n\n def value(self, key):\n value = self.locate_layer_for_key(key).value(key)\n if key in CONVERSIONS:\n return CONVERSIONS[key](value)\n return value\n\n def locate_layer_for_key(self, key):\n for layer in self.layers:\n if layer.has_config(key):\n return layer\n\n # Should be unreachable because ScoutConfigNull returns None for all\n # keys.\n raise ValueError(\"key {!r} not found in any layer\".format(key))\n\n def log(self):\n logger.debug(\"Configuration Loaded:\")\n for key in self.known_keys():\n layer = self.locate_layer_for_key(key)\n logger.debug(\"%-9s: %s = %s\", layer.name(), key, layer.value(key))\n\n def known_keys(self):\n return [\n \"app_server\",\n \"application_root\",\n \"core_agent_dir\",\n \"core_agent_download\",\n \"core_agent_launch\",\n \"core_agent_permissions\",\n \"core_agent_version\",\n \"disabled_instruments\",\n \"download_url\",\n \"framework\",\n \"framework_version\",\n \"hostname\",\n \"ignore\",\n \"key\",\n \"log_level\",\n \"monitor\",\n \"name\",\n \"revision_sha\",\n \"scm_subdirectory\",\n \"socket_path\",\n ]\n\n def core_agent_permissions(self):\n try:\n return octal(self.value(\"core_agent_permissions\"))\n except ValueError:\n logger.exception(\n \"Invalid core_agent_permissions value, using default of 0o700\"\n )\n return 0o700\n\n @classmethod\n def set(cls, **kwargs):\n \"\"\"\n Sets a configuration value for the Scout agent. Values set here will\n not override values set in ENV.\n \"\"\"\n global SCOUT_PYTHON_VALUES\n for key, value in kwargs.items():\n SCOUT_PYTHON_VALUES[key] = value\n\n @classmethod\n def unset(cls, *keys):\n \"\"\"\n Removes a configuration value for the Scout agent.\n \"\"\"\n global SCOUT_PYTHON_VALUES\n for key in keys:\n SCOUT_PYTHON_VALUES.pop(key, None)\n\n @classmethod\n def reset_all(cls):\n \"\"\"\n Remove all configuration settings set via `ScoutConfig.set(...)`.\n\n This is meant for use in testing.\n \"\"\"\n global SCOUT_PYTHON_VALUES\n SCOUT_PYTHON_VALUES.clear()\n\n\n# Module-level data, the ScoutConfig.set(key=\"value\") adds to this\nSCOUT_PYTHON_VALUES = {}\n\n\nclass ScoutConfigPython(object):\n \"\"\"\n A configuration overlay that lets other parts of python set values.\n \"\"\"\n\n def name(self):\n return \"Python\"\n\n def has_config(self, key):\n return key in SCOUT_PYTHON_VALUES\n\n def value(self, key):\n return SCOUT_PYTHON_VALUES[key]\n\n\nclass ScoutConfigEnv(object):\n \"\"\"\n Reads configuration from environment by prefixing the key\n requested with \"SCOUT_\"\n\n Example: the `log_level` config looks for SCOUT_LOG_LEVEL\n environment variable\n \"\"\"\n\n def name(self):\n return \"ENV\"\n\n def has_config(self, key):\n env_key = self.modify_key(key)\n return env_key in os.environ\n\n def value(self, key):\n env_key = self.modify_key(key)\n return os.environ[env_key]\n\n def modify_key(self, key):\n env_key = (\"SCOUT_\" + key).upper()\n return env_key\n\n\nclass ScoutConfigDerived(object):\n \"\"\"\n A configuration overlay that calculates from other values.\n \"\"\"\n\n def __init__(self, config):\n \"\"\"\n config argument is the overall ScoutConfig var, so we can lookup the\n components of the derived info.\n \"\"\"\n self.config = config\n\n def name(self):\n return \"Derived\"\n\n def has_config(self, key):\n return self.lookup_func(key) is not None\n\n def value(self, key):\n return self.lookup_func(key)()\n\n def lookup_func(self, key):\n \"\"\"\n Returns the derive_#{key} function, or None if it isn't defined\n \"\"\"\n func_name = \"derive_\" + key\n return getattr(self, func_name, None)\n\n def derive_socket_path(self):\n return \"{}/{}/scout-agent.sock\".format(\n self.config.value(\"core_agent_dir\"),\n self.config.value(\"core_agent_full_name\"),\n )\n\n def derive_core_agent_full_name(self):\n triple = self.config.value(\"core_agent_triple\")\n if not platform_detection.is_valid_triple(triple):\n logger.warning(\"Invalid value for core_agent_triple: %s\", triple)\n return \"{name}-{version}-{triple}\".format(\n name=\"scout_apm_core\",\n version=self.config.value(\"core_agent_version\"),\n triple=triple,\n )\n\n def derive_core_agent_triple(self):\n return platform_detection.get_triple()\n\n\nclass ScoutConfigDefaults(object):\n \"\"\"\n Provides default values for important configurations\n \"\"\"\n\n def name(self):\n return \"Defaults\"\n\n def __init__(self):\n self.defaults = {\n \"app_server\": \"\",\n \"application_root\": \"\",\n \"core_agent_dir\": \"/tmp/scout_apm_core\",\n \"core_agent_download\": True,\n \"core_agent_launch\": True,\n \"core_agent_permissions\": 700,\n \"core_agent_version\": \"v1.2.0\", # can be an exact tag name, or 'latest'\n \"disabled_instruments\": [],\n \"download_url\": \"https://s3-us-west-1.amazonaws.com/scout-public-downloads/apm_core_agent/release\", # noqa: E501\n \"framework\": \"\",\n \"framework_version\": \"\",\n \"hostname\": None,\n \"key\": \"\",\n \"log_level\": \"info\",\n \"monitor\": False,\n \"name\": \"\",\n \"revision_sha\": self._git_revision_sha(),\n \"scm_subdirectory\": \"\",\n }\n\n def _git_revision_sha(self):\n # N.B. The environment variable SCOUT_REVISION_SHA may also be used,\n # but that will be picked up by ScoutConfigEnv\n return os.environ.get(\"HEROKU_SLUG_COMMIT\", \"\")\n\n def has_config(self, key):\n return key in self.defaults\n\n def value(self, key):\n return self.defaults[key]\n\n\n# Always returns None to any key\nclass ScoutConfigNull(object):\n \"\"\"\n Always answers that a key is present, but the value is None\n\n Used as the last step of the layered configuration.\n \"\"\"\n\n def name(self):\n return \"Null\"\n\n def has_config(self, key):\n return True\n\n def value(self, key):\n return None\n\n\ndef convert_to_bool(value):\n if isinstance(value, bool):\n return value\n if isinstance(value, string_type):\n return value.lower() in (\"yes\", \"true\", \"t\", \"1\")\n # Unknown type - default to false?\n return False\n\n\ndef convert_to_list(value):\n if isinstance(value, list):\n return value\n if isinstance(value, tuple):\n return list(value)\n if isinstance(value, string_type):\n # Split on commas\n return [item.strip() for item in value.split(\",\") if item]\n # Unknown type - default to empty?\n return []\n\n\nCONVERSIONS = {\n \"core_agent_download\": convert_to_bool,\n \"core_agent_launch\": convert_to_bool,\n \"monitor\": convert_to_bool,\n \"disabled_instruments\": convert_to_list,\n \"ignore\": convert_to_list,\n}\n", "path": "src/scout_apm/core/config.py"}], "after_files": [{"content": "# coding=utf-8\nfrom scout_apm.compat import urlencode\nfrom scout_apm.core.context import AgentContext\n\n# Originally derived from:\n# 1. Rails:\n# https://github.com/rails/rails/blob/0196551e6039ca864d1eee1e01819fcae12c1dc9/railties/lib/rails/generators/rails/app/templates/config/initializers/filter_parameter_logging.rb.tt\n# 2. Sentry server side scrubbing:\n# https://docs.sentry.io/data-management/sensitive-data/#server-side-scrubbing\nFILTER_PARAMETERS = frozenset(\n [\n \"access\",\n \"access_token\",\n \"api_key\",\n \"apikey\",\n \"auth\",\n \"auth_token\",\n \"card[number]\",\n \"certificate\",\n \"credentials\",\n \"crypt\",\n \"key\",\n \"mysql_pwd\",\n \"otp\",\n \"passwd\",\n \"password\",\n \"private\",\n \"protected\",\n \"salt\",\n \"secret\",\n \"ssn\",\n \"stripetoken\",\n \"token\",\n ]\n)\n\n\ndef create_filtered_path(path, query_params):\n if AgentContext.instance.config.value(\"uri_reporting\") == \"path\":\n return path\n filtered_params = sorted(\n (\n (k, \"[FILTERED]\" if k.lower() in FILTER_PARAMETERS else v)\n for k, v in query_params\n )\n )\n if not filtered_params:\n return path\n return path + \"?\" + urlencode(filtered_params)\n", "path": "src/scout_apm/core/web_requests.py"}, {"content": "# coding=utf-8\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nimport logging\nimport os\n\nfrom scout_apm.compat import string_type\nfrom scout_apm.core import platform_detection\nfrom scout_apm.core.util import octal\n\nlogger = logging.getLogger(__name__)\n\n\nclass ScoutConfig(object):\n \"\"\"\n Configuration object for the ScoutApm agent.\n\n Contains a list of configuration \"layers\". When a configuration key is\n looked up, each layer is asked in turn if it knows the value. The first one\n to answer affirmatively returns the value.\n \"\"\"\n\n def __init__(self):\n self.layers = [\n ScoutConfigEnv(),\n ScoutConfigPython(),\n ScoutConfigDerived(self),\n ScoutConfigDefaults(),\n ScoutConfigNull(),\n ]\n\n def value(self, key):\n value = self.locate_layer_for_key(key).value(key)\n if key in CONVERSIONS:\n return CONVERSIONS[key](value)\n return value\n\n def locate_layer_for_key(self, key):\n for layer in self.layers:\n if layer.has_config(key):\n return layer\n\n # Should be unreachable because ScoutConfigNull returns None for all\n # keys.\n raise ValueError(\"key {!r} not found in any layer\".format(key))\n\n def log(self):\n logger.debug(\"Configuration Loaded:\")\n for key in self.known_keys():\n layer = self.locate_layer_for_key(key)\n logger.debug(\"%-9s: %s = %s\", layer.name(), key, layer.value(key))\n\n def known_keys(self):\n return [\n \"app_server\",\n \"application_root\",\n \"core_agent_dir\",\n \"core_agent_download\",\n \"core_agent_launch\",\n \"core_agent_permissions\",\n \"core_agent_version\",\n \"disabled_instruments\",\n \"download_url\",\n \"framework\",\n \"framework_version\",\n \"hostname\",\n \"ignore\",\n \"key\",\n \"log_level\",\n \"monitor\",\n \"name\",\n \"revision_sha\",\n \"scm_subdirectory\",\n \"socket_path\",\n ]\n\n def core_agent_permissions(self):\n try:\n return octal(self.value(\"core_agent_permissions\"))\n except ValueError:\n logger.exception(\n \"Invalid core_agent_permissions value, using default of 0o700\"\n )\n return 0o700\n\n @classmethod\n def set(cls, **kwargs):\n \"\"\"\n Sets a configuration value for the Scout agent. Values set here will\n not override values set in ENV.\n \"\"\"\n global SCOUT_PYTHON_VALUES\n for key, value in kwargs.items():\n SCOUT_PYTHON_VALUES[key] = value\n\n @classmethod\n def unset(cls, *keys):\n \"\"\"\n Removes a configuration value for the Scout agent.\n \"\"\"\n global SCOUT_PYTHON_VALUES\n for key in keys:\n SCOUT_PYTHON_VALUES.pop(key, None)\n\n @classmethod\n def reset_all(cls):\n \"\"\"\n Remove all configuration settings set via `ScoutConfig.set(...)`.\n\n This is meant for use in testing.\n \"\"\"\n global SCOUT_PYTHON_VALUES\n SCOUT_PYTHON_VALUES.clear()\n\n\n# Module-level data, the ScoutConfig.set(key=\"value\") adds to this\nSCOUT_PYTHON_VALUES = {}\n\n\nclass ScoutConfigPython(object):\n \"\"\"\n A configuration overlay that lets other parts of python set values.\n \"\"\"\n\n def name(self):\n return \"Python\"\n\n def has_config(self, key):\n return key in SCOUT_PYTHON_VALUES\n\n def value(self, key):\n return SCOUT_PYTHON_VALUES[key]\n\n\nclass ScoutConfigEnv(object):\n \"\"\"\n Reads configuration from environment by prefixing the key\n requested with \"SCOUT_\"\n\n Example: the `log_level` config looks for SCOUT_LOG_LEVEL\n environment variable\n \"\"\"\n\n def name(self):\n return \"ENV\"\n\n def has_config(self, key):\n env_key = self.modify_key(key)\n return env_key in os.environ\n\n def value(self, key):\n env_key = self.modify_key(key)\n return os.environ[env_key]\n\n def modify_key(self, key):\n env_key = (\"SCOUT_\" + key).upper()\n return env_key\n\n\nclass ScoutConfigDerived(object):\n \"\"\"\n A configuration overlay that calculates from other values.\n \"\"\"\n\n def __init__(self, config):\n \"\"\"\n config argument is the overall ScoutConfig var, so we can lookup the\n components of the derived info.\n \"\"\"\n self.config = config\n\n def name(self):\n return \"Derived\"\n\n def has_config(self, key):\n return self.lookup_func(key) is not None\n\n def value(self, key):\n return self.lookup_func(key)()\n\n def lookup_func(self, key):\n \"\"\"\n Returns the derive_#{key} function, or None if it isn't defined\n \"\"\"\n func_name = \"derive_\" + key\n return getattr(self, func_name, None)\n\n def derive_socket_path(self):\n return \"{}/{}/scout-agent.sock\".format(\n self.config.value(\"core_agent_dir\"),\n self.config.value(\"core_agent_full_name\"),\n )\n\n def derive_core_agent_full_name(self):\n triple = self.config.value(\"core_agent_triple\")\n if not platform_detection.is_valid_triple(triple):\n logger.warning(\"Invalid value for core_agent_triple: %s\", triple)\n return \"{name}-{version}-{triple}\".format(\n name=\"scout_apm_core\",\n version=self.config.value(\"core_agent_version\"),\n triple=triple,\n )\n\n def derive_core_agent_triple(self):\n return platform_detection.get_triple()\n\n\nclass ScoutConfigDefaults(object):\n \"\"\"\n Provides default values for important configurations\n \"\"\"\n\n def name(self):\n return \"Defaults\"\n\n def __init__(self):\n self.defaults = {\n \"app_server\": \"\",\n \"application_root\": \"\",\n \"core_agent_dir\": \"/tmp/scout_apm_core\",\n \"core_agent_download\": True,\n \"core_agent_launch\": True,\n \"core_agent_permissions\": 700,\n \"core_agent_version\": \"v1.2.0\", # can be an exact tag name, or 'latest'\n \"disabled_instruments\": [],\n \"download_url\": \"https://s3-us-west-1.amazonaws.com/scout-public-downloads/apm_core_agent/release\", # noqa: E501\n \"framework\": \"\",\n \"framework_version\": \"\",\n \"hostname\": None,\n \"key\": \"\",\n \"log_level\": \"info\",\n \"monitor\": False,\n \"name\": \"\",\n \"revision_sha\": self._git_revision_sha(),\n \"scm_subdirectory\": \"\",\n \"uri_reporting\": \"filtered_params\",\n }\n\n def _git_revision_sha(self):\n # N.B. The environment variable SCOUT_REVISION_SHA may also be used,\n # but that will be picked up by ScoutConfigEnv\n return os.environ.get(\"HEROKU_SLUG_COMMIT\", \"\")\n\n def has_config(self, key):\n return key in self.defaults\n\n def value(self, key):\n return self.defaults[key]\n\n\n# Always returns None to any key\nclass ScoutConfigNull(object):\n \"\"\"\n Always answers that a key is present, but the value is None\n\n Used as the last step of the layered configuration.\n \"\"\"\n\n def name(self):\n return \"Null\"\n\n def has_config(self, key):\n return True\n\n def value(self, key):\n return None\n\n\ndef convert_to_bool(value):\n if isinstance(value, bool):\n return value\n if isinstance(value, string_type):\n return value.lower() in (\"yes\", \"true\", \"t\", \"1\")\n # Unknown type - default to false?\n return False\n\n\ndef convert_to_list(value):\n if isinstance(value, list):\n return value\n if isinstance(value, tuple):\n return list(value)\n if isinstance(value, string_type):\n # Split on commas\n return [item.strip() for item in value.split(\",\") if item]\n # Unknown type - default to empty?\n return []\n\n\nCONVERSIONS = {\n \"core_agent_download\": convert_to_bool,\n \"core_agent_launch\": convert_to_bool,\n \"monitor\": convert_to_bool,\n \"disabled_instruments\": convert_to_list,\n \"ignore\": convert_to_list,\n}\n", "path": "src/scout_apm/core/config.py"}]}
| 3,336 | 266 |
gh_patches_debug_20887
|
rasdani/github-patches
|
git_diff
|
zulip__zulip-19155
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Make muting work for deactivated users
Even after a user deactivated, it may be helpful to hide that user's messages, avatar, etc. We should therefore make it possible to mute deactivated users.
This will also fix a bug where if a user tries to mute a deactivated user, the action silently fails.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `zerver/views/muting.py`
Content:
```
1 import datetime
2 from typing import Optional
3
4 from django.http import HttpRequest, HttpResponse
5 from django.utils.timezone import now as timezone_now
6 from django.utils.translation import gettext as _
7
8 from zerver.lib.actions import do_mute_topic, do_mute_user, do_unmute_topic, do_unmute_user
9 from zerver.lib.exceptions import JsonableError
10 from zerver.lib.request import REQ, has_request_variables
11 from zerver.lib.response import json_success
12 from zerver.lib.streams import (
13 access_stream_by_id,
14 access_stream_by_name,
15 access_stream_for_unmute_topic_by_id,
16 access_stream_for_unmute_topic_by_name,
17 check_for_exactly_one_stream_arg,
18 )
19 from zerver.lib.topic_mutes import topic_is_muted
20 from zerver.lib.user_mutes import get_mute_object
21 from zerver.lib.users import access_user_by_id
22 from zerver.lib.validator import check_int
23 from zerver.models import UserProfile
24
25
26 def mute_topic(
27 user_profile: UserProfile,
28 stream_id: Optional[int],
29 stream_name: Optional[str],
30 topic_name: str,
31 date_muted: datetime.datetime,
32 ) -> HttpResponse:
33 if stream_name is not None:
34 (stream, sub) = access_stream_by_name(user_profile, stream_name)
35 else:
36 assert stream_id is not None
37 (stream, sub) = access_stream_by_id(user_profile, stream_id)
38
39 if topic_is_muted(user_profile, stream.id, topic_name):
40 raise JsonableError(_("Topic already muted"))
41
42 do_mute_topic(user_profile, stream, topic_name, date_muted)
43 return json_success()
44
45
46 def unmute_topic(
47 user_profile: UserProfile, stream_id: Optional[int], stream_name: Optional[str], topic_name: str
48 ) -> HttpResponse:
49 error = _("Topic is not muted")
50
51 if stream_name is not None:
52 stream = access_stream_for_unmute_topic_by_name(user_profile, stream_name, error)
53 else:
54 assert stream_id is not None
55 stream = access_stream_for_unmute_topic_by_id(user_profile, stream_id, error)
56
57 if not topic_is_muted(user_profile, stream.id, topic_name):
58 raise JsonableError(error)
59
60 do_unmute_topic(user_profile, stream, topic_name)
61 return json_success()
62
63
64 @has_request_variables
65 def update_muted_topic(
66 request: HttpRequest,
67 user_profile: UserProfile,
68 stream_id: Optional[int] = REQ(json_validator=check_int, default=None),
69 stream: Optional[str] = REQ(default=None),
70 topic: str = REQ(),
71 op: str = REQ(),
72 ) -> HttpResponse:
73
74 check_for_exactly_one_stream_arg(stream_id=stream_id, stream=stream)
75
76 if op == "add":
77 return mute_topic(
78 user_profile=user_profile,
79 stream_id=stream_id,
80 stream_name=stream,
81 topic_name=topic,
82 date_muted=timezone_now(),
83 )
84 elif op == "remove":
85 return unmute_topic(
86 user_profile=user_profile,
87 stream_id=stream_id,
88 stream_name=stream,
89 topic_name=topic,
90 )
91
92
93 def mute_user(request: HttpRequest, user_profile: UserProfile, muted_user_id: int) -> HttpResponse:
94 if user_profile.id == muted_user_id:
95 raise JsonableError(_("Cannot mute self"))
96
97 muted_user = access_user_by_id(user_profile, muted_user_id, allow_bots=False, for_admin=False)
98 date_muted = timezone_now()
99
100 if get_mute_object(user_profile, muted_user) is not None:
101 raise JsonableError(_("User already muted"))
102
103 do_mute_user(user_profile, muted_user, date_muted)
104 return json_success()
105
106
107 def unmute_user(
108 request: HttpRequest, user_profile: UserProfile, muted_user_id: int
109 ) -> HttpResponse:
110 muted_user = access_user_by_id(user_profile, muted_user_id, allow_bots=False, for_admin=False)
111 mute_object = get_mute_object(user_profile, muted_user)
112
113 if mute_object is None:
114 raise JsonableError(_("User is not muted"))
115
116 do_unmute_user(mute_object)
117 return json_success()
118
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/zerver/views/muting.py b/zerver/views/muting.py
--- a/zerver/views/muting.py
+++ b/zerver/views/muting.py
@@ -94,7 +94,9 @@
if user_profile.id == muted_user_id:
raise JsonableError(_("Cannot mute self"))
- muted_user = access_user_by_id(user_profile, muted_user_id, allow_bots=False, for_admin=False)
+ muted_user = access_user_by_id(
+ user_profile, muted_user_id, allow_bots=False, allow_deactivated=True, for_admin=False
+ )
date_muted = timezone_now()
if get_mute_object(user_profile, muted_user) is not None:
@@ -107,7 +109,9 @@
def unmute_user(
request: HttpRequest, user_profile: UserProfile, muted_user_id: int
) -> HttpResponse:
- muted_user = access_user_by_id(user_profile, muted_user_id, allow_bots=False, for_admin=False)
+ muted_user = access_user_by_id(
+ user_profile, muted_user_id, allow_bots=False, allow_deactivated=True, for_admin=False
+ )
mute_object = get_mute_object(user_profile, muted_user)
if mute_object is None:
|
{"golden_diff": "diff --git a/zerver/views/muting.py b/zerver/views/muting.py\n--- a/zerver/views/muting.py\n+++ b/zerver/views/muting.py\n@@ -94,7 +94,9 @@\n if user_profile.id == muted_user_id:\n raise JsonableError(_(\"Cannot mute self\"))\n \n- muted_user = access_user_by_id(user_profile, muted_user_id, allow_bots=False, for_admin=False)\n+ muted_user = access_user_by_id(\n+ user_profile, muted_user_id, allow_bots=False, allow_deactivated=True, for_admin=False\n+ )\n date_muted = timezone_now()\n \n if get_mute_object(user_profile, muted_user) is not None:\n@@ -107,7 +109,9 @@\n def unmute_user(\n request: HttpRequest, user_profile: UserProfile, muted_user_id: int\n ) -> HttpResponse:\n- muted_user = access_user_by_id(user_profile, muted_user_id, allow_bots=False, for_admin=False)\n+ muted_user = access_user_by_id(\n+ user_profile, muted_user_id, allow_bots=False, allow_deactivated=True, for_admin=False\n+ )\n mute_object = get_mute_object(user_profile, muted_user)\n \n if mute_object is None:\n", "issue": "Make muting work for deactivated users\nEven after a user deactivated, it may be helpful to hide that user's messages, avatar, etc. We should therefore make it possible to mute deactivated users.\r\n\r\nThis will also fix a bug where if a user tries to mute a deactivated user, the action silently fails.\n", "before_files": [{"content": "import datetime\nfrom typing import Optional\n\nfrom django.http import HttpRequest, HttpResponse\nfrom django.utils.timezone import now as timezone_now\nfrom django.utils.translation import gettext as _\n\nfrom zerver.lib.actions import do_mute_topic, do_mute_user, do_unmute_topic, do_unmute_user\nfrom zerver.lib.exceptions import JsonableError\nfrom zerver.lib.request import REQ, has_request_variables\nfrom zerver.lib.response import json_success\nfrom zerver.lib.streams import (\n access_stream_by_id,\n access_stream_by_name,\n access_stream_for_unmute_topic_by_id,\n access_stream_for_unmute_topic_by_name,\n check_for_exactly_one_stream_arg,\n)\nfrom zerver.lib.topic_mutes import topic_is_muted\nfrom zerver.lib.user_mutes import get_mute_object\nfrom zerver.lib.users import access_user_by_id\nfrom zerver.lib.validator import check_int\nfrom zerver.models import UserProfile\n\n\ndef mute_topic(\n user_profile: UserProfile,\n stream_id: Optional[int],\n stream_name: Optional[str],\n topic_name: str,\n date_muted: datetime.datetime,\n) -> HttpResponse:\n if stream_name is not None:\n (stream, sub) = access_stream_by_name(user_profile, stream_name)\n else:\n assert stream_id is not None\n (stream, sub) = access_stream_by_id(user_profile, stream_id)\n\n if topic_is_muted(user_profile, stream.id, topic_name):\n raise JsonableError(_(\"Topic already muted\"))\n\n do_mute_topic(user_profile, stream, topic_name, date_muted)\n return json_success()\n\n\ndef unmute_topic(\n user_profile: UserProfile, stream_id: Optional[int], stream_name: Optional[str], topic_name: str\n) -> HttpResponse:\n error = _(\"Topic is not muted\")\n\n if stream_name is not None:\n stream = access_stream_for_unmute_topic_by_name(user_profile, stream_name, error)\n else:\n assert stream_id is not None\n stream = access_stream_for_unmute_topic_by_id(user_profile, stream_id, error)\n\n if not topic_is_muted(user_profile, stream.id, topic_name):\n raise JsonableError(error)\n\n do_unmute_topic(user_profile, stream, topic_name)\n return json_success()\n\n\n@has_request_variables\ndef update_muted_topic(\n request: HttpRequest,\n user_profile: UserProfile,\n stream_id: Optional[int] = REQ(json_validator=check_int, default=None),\n stream: Optional[str] = REQ(default=None),\n topic: str = REQ(),\n op: str = REQ(),\n) -> HttpResponse:\n\n check_for_exactly_one_stream_arg(stream_id=stream_id, stream=stream)\n\n if op == \"add\":\n return mute_topic(\n user_profile=user_profile,\n stream_id=stream_id,\n stream_name=stream,\n topic_name=topic,\n date_muted=timezone_now(),\n )\n elif op == \"remove\":\n return unmute_topic(\n user_profile=user_profile,\n stream_id=stream_id,\n stream_name=stream,\n topic_name=topic,\n )\n\n\ndef mute_user(request: HttpRequest, user_profile: UserProfile, muted_user_id: int) -> HttpResponse:\n if user_profile.id == muted_user_id:\n raise JsonableError(_(\"Cannot mute self\"))\n\n muted_user = access_user_by_id(user_profile, muted_user_id, allow_bots=False, for_admin=False)\n date_muted = timezone_now()\n\n if get_mute_object(user_profile, muted_user) is not None:\n raise JsonableError(_(\"User already muted\"))\n\n do_mute_user(user_profile, muted_user, date_muted)\n return json_success()\n\n\ndef unmute_user(\n request: HttpRequest, user_profile: UserProfile, muted_user_id: int\n) -> HttpResponse:\n muted_user = access_user_by_id(user_profile, muted_user_id, allow_bots=False, for_admin=False)\n mute_object = get_mute_object(user_profile, muted_user)\n\n if mute_object is None:\n raise JsonableError(_(\"User is not muted\"))\n\n do_unmute_user(mute_object)\n return json_success()\n", "path": "zerver/views/muting.py"}], "after_files": [{"content": "import datetime\nfrom typing import Optional\n\nfrom django.http import HttpRequest, HttpResponse\nfrom django.utils.timezone import now as timezone_now\nfrom django.utils.translation import gettext as _\n\nfrom zerver.lib.actions import do_mute_topic, do_mute_user, do_unmute_topic, do_unmute_user\nfrom zerver.lib.exceptions import JsonableError\nfrom zerver.lib.request import REQ, has_request_variables\nfrom zerver.lib.response import json_success\nfrom zerver.lib.streams import (\n access_stream_by_id,\n access_stream_by_name,\n access_stream_for_unmute_topic_by_id,\n access_stream_for_unmute_topic_by_name,\n check_for_exactly_one_stream_arg,\n)\nfrom zerver.lib.topic_mutes import topic_is_muted\nfrom zerver.lib.user_mutes import get_mute_object\nfrom zerver.lib.users import access_user_by_id\nfrom zerver.lib.validator import check_int\nfrom zerver.models import UserProfile\n\n\ndef mute_topic(\n user_profile: UserProfile,\n stream_id: Optional[int],\n stream_name: Optional[str],\n topic_name: str,\n date_muted: datetime.datetime,\n) -> HttpResponse:\n if stream_name is not None:\n (stream, sub) = access_stream_by_name(user_profile, stream_name)\n else:\n assert stream_id is not None\n (stream, sub) = access_stream_by_id(user_profile, stream_id)\n\n if topic_is_muted(user_profile, stream.id, topic_name):\n raise JsonableError(_(\"Topic already muted\"))\n\n do_mute_topic(user_profile, stream, topic_name, date_muted)\n return json_success()\n\n\ndef unmute_topic(\n user_profile: UserProfile, stream_id: Optional[int], stream_name: Optional[str], topic_name: str\n) -> HttpResponse:\n error = _(\"Topic is not muted\")\n\n if stream_name is not None:\n stream = access_stream_for_unmute_topic_by_name(user_profile, stream_name, error)\n else:\n assert stream_id is not None\n stream = access_stream_for_unmute_topic_by_id(user_profile, stream_id, error)\n\n if not topic_is_muted(user_profile, stream.id, topic_name):\n raise JsonableError(error)\n\n do_unmute_topic(user_profile, stream, topic_name)\n return json_success()\n\n\n@has_request_variables\ndef update_muted_topic(\n request: HttpRequest,\n user_profile: UserProfile,\n stream_id: Optional[int] = REQ(json_validator=check_int, default=None),\n stream: Optional[str] = REQ(default=None),\n topic: str = REQ(),\n op: str = REQ(),\n) -> HttpResponse:\n\n check_for_exactly_one_stream_arg(stream_id=stream_id, stream=stream)\n\n if op == \"add\":\n return mute_topic(\n user_profile=user_profile,\n stream_id=stream_id,\n stream_name=stream,\n topic_name=topic,\n date_muted=timezone_now(),\n )\n elif op == \"remove\":\n return unmute_topic(\n user_profile=user_profile,\n stream_id=stream_id,\n stream_name=stream,\n topic_name=topic,\n )\n\n\ndef mute_user(request: HttpRequest, user_profile: UserProfile, muted_user_id: int) -> HttpResponse:\n if user_profile.id == muted_user_id:\n raise JsonableError(_(\"Cannot mute self\"))\n\n muted_user = access_user_by_id(\n user_profile, muted_user_id, allow_bots=False, allow_deactivated=True, for_admin=False\n )\n date_muted = timezone_now()\n\n if get_mute_object(user_profile, muted_user) is not None:\n raise JsonableError(_(\"User already muted\"))\n\n do_mute_user(user_profile, muted_user, date_muted)\n return json_success()\n\n\ndef unmute_user(\n request: HttpRequest, user_profile: UserProfile, muted_user_id: int\n) -> HttpResponse:\n muted_user = access_user_by_id(\n user_profile, muted_user_id, allow_bots=False, allow_deactivated=True, for_admin=False\n )\n mute_object = get_mute_object(user_profile, muted_user)\n\n if mute_object is None:\n raise JsonableError(_(\"User is not muted\"))\n\n do_unmute_user(mute_object)\n return json_success()\n", "path": "zerver/views/muting.py"}]}
| 1,455 | 277 |
gh_patches_debug_28380
|
rasdani/github-patches
|
git_diff
|
freedomofpress__securedrop-5679
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add clearer action reminder in Journalist Interface to enable v3
As part of the 1.7.0 release, we've agreed that we want to improve our reminder in the Journalist Interface to enable v3 onion services. The current banner looks as follows:

You can also see it on https://demo-journalist.securedrop.org/ after logging in using the dev credentials.
The plan of record is as follows:
- SecureDrop 1.7.0 (released in January) will still fully support v2 onion services
- SecureDrop 1.8.0 (released in February) will support Ubuntu 20.04 (#4768), and _exclusively_ support v3 onion services for Focal installs, without changing the behavior on Xenial.
- After April 30 (Ubuntu 16.04 EOL), instances not upgraded to Ubuntu 20.04 will self-disable.
In other words, the v3 switch is unavoidable due to the Xenial end-of-life. Our goal with the 1.7.0 release is to get more admins to make the switch prior to a reinstall on Focal, to make the process a bit easier for them. The specific action we need to motivate:
1) *If v3 is already enabled, but v2 services are still available:* Disable v2 services, ensure that all journalists/admins have v3 creds, and ensure that landing page points to v3 onion.
2) *If v3 is not enabled yet:* Enable v3 services (can still run them alongside v2 for a bit, per docs, then go v3-only).
We've discussed that this could potentially be done via different banners for 1) and 2) in the Journalist Interface. Let's kick around language/UX a bit in the comments, and I'll add the final agreed upon spec to the top-level issue.
## User Story
As an administrator, I want to be reminded of critical actions I must take to keep my instance running, so that I'm not caught by surprise.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `securedrop/journalist_app/__init__.py`
Content:
```
1 # -*- coding: utf-8 -*-
2
3 from datetime import datetime, timedelta
4 from flask import (Flask, session, redirect, url_for, flash, g, request,
5 render_template)
6 from flask_assets import Environment
7 from flask_babel import gettext
8 from flask_wtf.csrf import CSRFProtect, CSRFError
9 from os import path
10 import sys
11 from werkzeug.exceptions import default_exceptions
12
13 import i18n
14 import template_filters
15 import version
16
17 from crypto_util import CryptoUtil
18 from db import db
19 from journalist_app import account, admin, api, main, col
20 from journalist_app.utils import (get_source, logged_in,
21 JournalistInterfaceSessionInterface,
22 cleanup_expired_revoked_tokens)
23 from models import InstanceConfig, Journalist
24 from store import Storage
25
26 import typing
27 # https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking
28 if typing.TYPE_CHECKING:
29 # flake8 can not understand type annotation yet.
30 # That is why all type annotation relative import
31 # statements has to be marked as noqa.
32 # http://flake8.pycqa.org/en/latest/user/error-codes.html?highlight=f401
33 from sdconfig import SDConfig # noqa: F401
34 from typing import Optional, Union, Tuple, Any # noqa: F401
35 from werkzeug import Response # noqa: F401
36 from werkzeug.exceptions import HTTPException # noqa: F401
37
38 _insecure_views = ['main.login', 'main.select_logo', 'static']
39
40
41 def create_app(config: 'SDConfig') -> Flask:
42 app = Flask(__name__,
43 template_folder=config.JOURNALIST_TEMPLATES_DIR,
44 static_folder=path.join(config.SECUREDROP_ROOT, 'static'))
45
46 app.config.from_object(config.JOURNALIST_APP_FLASK_CONFIG_CLS)
47 app.session_interface = JournalistInterfaceSessionInterface()
48
49 csrf = CSRFProtect(app)
50 Environment(app)
51
52 app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
53 app.config['SQLALCHEMY_DATABASE_URI'] = config.DATABASE_URI
54 db.init_app(app)
55
56 v2_enabled = path.exists(path.join(config.SECUREDROP_DATA_ROOT, 'source_v2_url'))
57 v3_enabled = path.exists(path.join(config.SECUREDROP_DATA_ROOT, 'source_v3_url'))
58 app.config.update(V2_ONION_ENABLED=v2_enabled, V3_ONION_ENABLED=v3_enabled)
59
60 # TODO: Attaching a Storage dynamically like this disables all type checking (and
61 # breaks code analysis tools) for code that uses current_app.storage; it should be refactored
62 app.storage = Storage(config.STORE_DIR,
63 config.TEMP_DIR,
64 config.JOURNALIST_KEY)
65
66 # TODO: Attaching a CryptoUtil dynamically like this disables all type checking (and
67 # breaks code analysis tools) for code that uses current_app.storage; it should be refactored
68 app.crypto_util = CryptoUtil(
69 scrypt_params=config.SCRYPT_PARAMS,
70 scrypt_id_pepper=config.SCRYPT_ID_PEPPER,
71 scrypt_gpg_pepper=config.SCRYPT_GPG_PEPPER,
72 securedrop_root=config.SECUREDROP_ROOT,
73 nouns_file=config.NOUNS,
74 adjectives_file=config.ADJECTIVES,
75 gpg_key_dir=config.GPG_KEY_DIR,
76 )
77
78 @app.errorhandler(CSRFError)
79 def handle_csrf_error(e: CSRFError) -> 'Response':
80 # render the message first to ensure it's localized.
81 msg = gettext('You have been logged out due to inactivity.')
82 session.clear()
83 flash(msg, 'error')
84 return redirect(url_for('main.login'))
85
86 def _handle_http_exception(
87 error: 'HTTPException'
88 ) -> 'Tuple[Union[Response, str], Optional[int]]':
89 # Workaround for no blueprint-level 404/5 error handlers, see:
90 # https://github.com/pallets/flask/issues/503#issuecomment-71383286
91 handler = list(app.error_handler_spec['api'][error.code].values())[0]
92 if request.path.startswith('/api/') and handler:
93 return handler(error)
94
95 return render_template('error.html', error=error), error.code
96
97 for code in default_exceptions:
98 app.errorhandler(code)(_handle_http_exception)
99
100 i18n.setup_app(config, app)
101
102 app.jinja_env.trim_blocks = True
103 app.jinja_env.lstrip_blocks = True
104 app.jinja_env.globals['version'] = version.__version__
105 app.jinja_env.filters['rel_datetime_format'] = \
106 template_filters.rel_datetime_format
107 app.jinja_env.filters['filesizeformat'] = template_filters.filesizeformat
108
109 @app.before_first_request
110 def expire_blacklisted_tokens() -> None:
111 cleanup_expired_revoked_tokens()
112
113 @app.before_request
114 def load_instance_config() -> None:
115 app.instance_config = InstanceConfig.get_current()
116
117 @app.before_request
118 def setup_g() -> 'Optional[Response]':
119 """Store commonly used values in Flask's special g object"""
120 if 'expires' in session and datetime.utcnow() >= session['expires']:
121 session.clear()
122 flash(gettext('You have been logged out due to inactivity.'),
123 'error')
124
125 uid = session.get('uid', None)
126 if uid:
127 user = Journalist.query.get(uid)
128 if user and 'nonce' in session and \
129 session['nonce'] != user.session_nonce:
130 session.clear()
131 flash(gettext('You have been logged out due to password change'),
132 'error')
133
134 session['expires'] = datetime.utcnow() + \
135 timedelta(minutes=getattr(config,
136 'SESSION_EXPIRATION_MINUTES',
137 120))
138
139 # Work around https://github.com/lepture/flask-wtf/issues/275
140 # -- after upgrading from Python 2 to Python 3, any existing
141 # session's csrf_token value will be retrieved as bytes,
142 # causing a TypeError. This simple fix, deleting the existing
143 # token, was suggested in the issue comments. This code will
144 # be safe to remove after Python 2 reaches EOL in 2020, and no
145 # supported SecureDrop installations can still have this
146 # problem.
147 if sys.version_info.major > 2 and type(session.get('csrf_token')) is bytes:
148 del session['csrf_token']
149
150 uid = session.get('uid', None)
151 if uid:
152 g.user = Journalist.query.get(uid)
153
154 g.locale = i18n.get_locale(config)
155 g.text_direction = i18n.get_text_direction(g.locale)
156 g.html_lang = i18n.locale_to_rfc_5646(g.locale)
157 g.locales = i18n.get_locale2name()
158
159 if app.instance_config.organization_name:
160 g.organization_name = app.instance_config.organization_name
161 else:
162 g.organization_name = gettext('SecureDrop')
163
164 if not app.config['V3_ONION_ENABLED'] or app.config['V2_ONION_ENABLED']:
165 g.show_v2_onion_eol_warning = True
166
167 if request.path.split('/')[1] == 'api':
168 pass # We use the @token_required decorator for the API endpoints
169 else: # We are not using the API
170 if request.endpoint not in _insecure_views and not logged_in():
171 return redirect(url_for('main.login'))
172
173 if request.method == 'POST':
174 filesystem_id = request.form.get('filesystem_id')
175 if filesystem_id:
176 g.filesystem_id = filesystem_id
177 g.source = get_source(filesystem_id)
178
179 return None
180
181 app.register_blueprint(main.make_blueprint(config))
182 app.register_blueprint(account.make_blueprint(config),
183 url_prefix='/account')
184 app.register_blueprint(admin.make_blueprint(config), url_prefix='/admin')
185 app.register_blueprint(col.make_blueprint(config), url_prefix='/col')
186 api_blueprint = api.make_blueprint(config)
187 app.register_blueprint(api_blueprint, url_prefix='/api/v1')
188 csrf.exempt(api_blueprint)
189
190 return app
191
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/securedrop/journalist_app/__init__.py b/securedrop/journalist_app/__init__.py
--- a/securedrop/journalist_app/__init__.py
+++ b/securedrop/journalist_app/__init__.py
@@ -53,8 +53,13 @@
app.config['SQLALCHEMY_DATABASE_URI'] = config.DATABASE_URI
db.init_app(app)
- v2_enabled = path.exists(path.join(config.SECUREDROP_DATA_ROOT, 'source_v2_url'))
- v3_enabled = path.exists(path.join(config.SECUREDROP_DATA_ROOT, 'source_v3_url'))
+ def _url_exists(u: str) -> bool:
+ return path.exists(path.join(config.SECUREDROP_DATA_ROOT, u))
+
+ v2_enabled = _url_exists('source_v2_url') or ((not _url_exists('source_v2_url'))
+ and (not _url_exists('source_v3_url')))
+ v3_enabled = _url_exists('source_v3_url')
+
app.config.update(V2_ONION_ENABLED=v2_enabled, V3_ONION_ENABLED=v3_enabled)
# TODO: Attaching a Storage dynamically like this disables all type checking (and
@@ -161,9 +166,12 @@
else:
g.organization_name = gettext('SecureDrop')
- if not app.config['V3_ONION_ENABLED'] or app.config['V2_ONION_ENABLED']:
+ if app.config['V2_ONION_ENABLED'] and not app.config['V3_ONION_ENABLED']:
g.show_v2_onion_eol_warning = True
+ if app.config['V2_ONION_ENABLED'] and app.config['V3_ONION_ENABLED']:
+ g.show_v2_onion_migration_warning = True
+
if request.path.split('/')[1] == 'api':
pass # We use the @token_required decorator for the API endpoints
else: # We are not using the API
|
{"golden_diff": "diff --git a/securedrop/journalist_app/__init__.py b/securedrop/journalist_app/__init__.py\n--- a/securedrop/journalist_app/__init__.py\n+++ b/securedrop/journalist_app/__init__.py\n@@ -53,8 +53,13 @@\n app.config['SQLALCHEMY_DATABASE_URI'] = config.DATABASE_URI\n db.init_app(app)\n \n- v2_enabled = path.exists(path.join(config.SECUREDROP_DATA_ROOT, 'source_v2_url'))\n- v3_enabled = path.exists(path.join(config.SECUREDROP_DATA_ROOT, 'source_v3_url'))\n+ def _url_exists(u: str) -> bool:\n+ return path.exists(path.join(config.SECUREDROP_DATA_ROOT, u))\n+\n+ v2_enabled = _url_exists('source_v2_url') or ((not _url_exists('source_v2_url'))\n+ and (not _url_exists('source_v3_url')))\n+ v3_enabled = _url_exists('source_v3_url')\n+\n app.config.update(V2_ONION_ENABLED=v2_enabled, V3_ONION_ENABLED=v3_enabled)\n \n # TODO: Attaching a Storage dynamically like this disables all type checking (and\n@@ -161,9 +166,12 @@\n else:\n g.organization_name = gettext('SecureDrop')\n \n- if not app.config['V3_ONION_ENABLED'] or app.config['V2_ONION_ENABLED']:\n+ if app.config['V2_ONION_ENABLED'] and not app.config['V3_ONION_ENABLED']:\n g.show_v2_onion_eol_warning = True\n \n+ if app.config['V2_ONION_ENABLED'] and app.config['V3_ONION_ENABLED']:\n+ g.show_v2_onion_migration_warning = True\n+\n if request.path.split('/')[1] == 'api':\n pass # We use the @token_required decorator for the API endpoints\n else: # We are not using the API\n", "issue": "Add clearer action reminder in Journalist Interface to enable v3\nAs part of the 1.7.0 release, we've agreed that we want to improve our reminder in the Journalist Interface to enable v3 onion services. The current banner looks as follows:\r\n\r\n\r\n\r\nYou can also see it on https://demo-journalist.securedrop.org/ after logging in using the dev credentials.\r\n\r\nThe plan of record is as follows:\r\n- SecureDrop 1.7.0 (released in January) will still fully support v2 onion services\r\n- SecureDrop 1.8.0 (released in February) will support Ubuntu 20.04 (#4768), and _exclusively_ support v3 onion services for Focal installs, without changing the behavior on Xenial.\r\n- After April 30 (Ubuntu 16.04 EOL), instances not upgraded to Ubuntu 20.04 will self-disable.\r\n\r\nIn other words, the v3 switch is unavoidable due to the Xenial end-of-life. Our goal with the 1.7.0 release is to get more admins to make the switch prior to a reinstall on Focal, to make the process a bit easier for them. The specific action we need to motivate:\r\n\r\n1) *If v3 is already enabled, but v2 services are still available:* Disable v2 services, ensure that all journalists/admins have v3 creds, and ensure that landing page points to v3 onion.\r\n\r\n2) *If v3 is not enabled yet:* Enable v3 services (can still run them alongside v2 for a bit, per docs, then go v3-only).\r\n\r\nWe've discussed that this could potentially be done via different banners for 1) and 2) in the Journalist Interface. Let's kick around language/UX a bit in the comments, and I'll add the final agreed upon spec to the top-level issue.\r\n\r\n## User Story\r\n\r\nAs an administrator, I want to be reminded of critical actions I must take to keep my instance running, so that I'm not caught by surprise.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\nfrom datetime import datetime, timedelta\nfrom flask import (Flask, session, redirect, url_for, flash, g, request,\n render_template)\nfrom flask_assets import Environment\nfrom flask_babel import gettext\nfrom flask_wtf.csrf import CSRFProtect, CSRFError\nfrom os import path\nimport sys\nfrom werkzeug.exceptions import default_exceptions\n\nimport i18n\nimport template_filters\nimport version\n\nfrom crypto_util import CryptoUtil\nfrom db import db\nfrom journalist_app import account, admin, api, main, col\nfrom journalist_app.utils import (get_source, logged_in,\n JournalistInterfaceSessionInterface,\n cleanup_expired_revoked_tokens)\nfrom models import InstanceConfig, Journalist\nfrom store import Storage\n\nimport typing\n# https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking\nif typing.TYPE_CHECKING:\n # flake8 can not understand type annotation yet.\n # That is why all type annotation relative import\n # statements has to be marked as noqa.\n # http://flake8.pycqa.org/en/latest/user/error-codes.html?highlight=f401\n from sdconfig import SDConfig # noqa: F401\n from typing import Optional, Union, Tuple, Any # noqa: F401\n from werkzeug import Response # noqa: F401\n from werkzeug.exceptions import HTTPException # noqa: F401\n\n_insecure_views = ['main.login', 'main.select_logo', 'static']\n\n\ndef create_app(config: 'SDConfig') -> Flask:\n app = Flask(__name__,\n template_folder=config.JOURNALIST_TEMPLATES_DIR,\n static_folder=path.join(config.SECUREDROP_ROOT, 'static'))\n\n app.config.from_object(config.JOURNALIST_APP_FLASK_CONFIG_CLS)\n app.session_interface = JournalistInterfaceSessionInterface()\n\n csrf = CSRFProtect(app)\n Environment(app)\n\n app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\n app.config['SQLALCHEMY_DATABASE_URI'] = config.DATABASE_URI\n db.init_app(app)\n\n v2_enabled = path.exists(path.join(config.SECUREDROP_DATA_ROOT, 'source_v2_url'))\n v3_enabled = path.exists(path.join(config.SECUREDROP_DATA_ROOT, 'source_v3_url'))\n app.config.update(V2_ONION_ENABLED=v2_enabled, V3_ONION_ENABLED=v3_enabled)\n\n # TODO: Attaching a Storage dynamically like this disables all type checking (and\n # breaks code analysis tools) for code that uses current_app.storage; it should be refactored\n app.storage = Storage(config.STORE_DIR,\n config.TEMP_DIR,\n config.JOURNALIST_KEY)\n\n # TODO: Attaching a CryptoUtil dynamically like this disables all type checking (and\n # breaks code analysis tools) for code that uses current_app.storage; it should be refactored\n app.crypto_util = CryptoUtil(\n scrypt_params=config.SCRYPT_PARAMS,\n scrypt_id_pepper=config.SCRYPT_ID_PEPPER,\n scrypt_gpg_pepper=config.SCRYPT_GPG_PEPPER,\n securedrop_root=config.SECUREDROP_ROOT,\n nouns_file=config.NOUNS,\n adjectives_file=config.ADJECTIVES,\n gpg_key_dir=config.GPG_KEY_DIR,\n )\n\n @app.errorhandler(CSRFError)\n def handle_csrf_error(e: CSRFError) -> 'Response':\n # render the message first to ensure it's localized.\n msg = gettext('You have been logged out due to inactivity.')\n session.clear()\n flash(msg, 'error')\n return redirect(url_for('main.login'))\n\n def _handle_http_exception(\n error: 'HTTPException'\n ) -> 'Tuple[Union[Response, str], Optional[int]]':\n # Workaround for no blueprint-level 404/5 error handlers, see:\n # https://github.com/pallets/flask/issues/503#issuecomment-71383286\n handler = list(app.error_handler_spec['api'][error.code].values())[0]\n if request.path.startswith('/api/') and handler:\n return handler(error)\n\n return render_template('error.html', error=error), error.code\n\n for code in default_exceptions:\n app.errorhandler(code)(_handle_http_exception)\n\n i18n.setup_app(config, app)\n\n app.jinja_env.trim_blocks = True\n app.jinja_env.lstrip_blocks = True\n app.jinja_env.globals['version'] = version.__version__\n app.jinja_env.filters['rel_datetime_format'] = \\\n template_filters.rel_datetime_format\n app.jinja_env.filters['filesizeformat'] = template_filters.filesizeformat\n\n @app.before_first_request\n def expire_blacklisted_tokens() -> None:\n cleanup_expired_revoked_tokens()\n\n @app.before_request\n def load_instance_config() -> None:\n app.instance_config = InstanceConfig.get_current()\n\n @app.before_request\n def setup_g() -> 'Optional[Response]':\n \"\"\"Store commonly used values in Flask's special g object\"\"\"\n if 'expires' in session and datetime.utcnow() >= session['expires']:\n session.clear()\n flash(gettext('You have been logged out due to inactivity.'),\n 'error')\n\n uid = session.get('uid', None)\n if uid:\n user = Journalist.query.get(uid)\n if user and 'nonce' in session and \\\n session['nonce'] != user.session_nonce:\n session.clear()\n flash(gettext('You have been logged out due to password change'),\n 'error')\n\n session['expires'] = datetime.utcnow() + \\\n timedelta(minutes=getattr(config,\n 'SESSION_EXPIRATION_MINUTES',\n 120))\n\n # Work around https://github.com/lepture/flask-wtf/issues/275\n # -- after upgrading from Python 2 to Python 3, any existing\n # session's csrf_token value will be retrieved as bytes,\n # causing a TypeError. This simple fix, deleting the existing\n # token, was suggested in the issue comments. This code will\n # be safe to remove after Python 2 reaches EOL in 2020, and no\n # supported SecureDrop installations can still have this\n # problem.\n if sys.version_info.major > 2 and type(session.get('csrf_token')) is bytes:\n del session['csrf_token']\n\n uid = session.get('uid', None)\n if uid:\n g.user = Journalist.query.get(uid)\n\n g.locale = i18n.get_locale(config)\n g.text_direction = i18n.get_text_direction(g.locale)\n g.html_lang = i18n.locale_to_rfc_5646(g.locale)\n g.locales = i18n.get_locale2name()\n\n if app.instance_config.organization_name:\n g.organization_name = app.instance_config.organization_name\n else:\n g.organization_name = gettext('SecureDrop')\n\n if not app.config['V3_ONION_ENABLED'] or app.config['V2_ONION_ENABLED']:\n g.show_v2_onion_eol_warning = True\n\n if request.path.split('/')[1] == 'api':\n pass # We use the @token_required decorator for the API endpoints\n else: # We are not using the API\n if request.endpoint not in _insecure_views and not logged_in():\n return redirect(url_for('main.login'))\n\n if request.method == 'POST':\n filesystem_id = request.form.get('filesystem_id')\n if filesystem_id:\n g.filesystem_id = filesystem_id\n g.source = get_source(filesystem_id)\n\n return None\n\n app.register_blueprint(main.make_blueprint(config))\n app.register_blueprint(account.make_blueprint(config),\n url_prefix='/account')\n app.register_blueprint(admin.make_blueprint(config), url_prefix='/admin')\n app.register_blueprint(col.make_blueprint(config), url_prefix='/col')\n api_blueprint = api.make_blueprint(config)\n app.register_blueprint(api_blueprint, url_prefix='/api/v1')\n csrf.exempt(api_blueprint)\n\n return app\n", "path": "securedrop/journalist_app/__init__.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\nfrom datetime import datetime, timedelta\nfrom flask import (Flask, session, redirect, url_for, flash, g, request,\n render_template)\nfrom flask_assets import Environment\nfrom flask_babel import gettext\nfrom flask_wtf.csrf import CSRFProtect, CSRFError\nfrom os import path\nimport sys\nfrom werkzeug.exceptions import default_exceptions\n\nimport i18n\nimport template_filters\nimport version\n\nfrom crypto_util import CryptoUtil\nfrom db import db\nfrom journalist_app import account, admin, api, main, col\nfrom journalist_app.utils import (get_source, logged_in,\n JournalistInterfaceSessionInterface,\n cleanup_expired_revoked_tokens)\nfrom models import InstanceConfig, Journalist\nfrom store import Storage\n\nimport typing\n# https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking\nif typing.TYPE_CHECKING:\n # flake8 can not understand type annotation yet.\n # That is why all type annotation relative import\n # statements has to be marked as noqa.\n # http://flake8.pycqa.org/en/latest/user/error-codes.html?highlight=f401\n from sdconfig import SDConfig # noqa: F401\n from typing import Optional, Union, Tuple, Any # noqa: F401\n from werkzeug import Response # noqa: F401\n from werkzeug.exceptions import HTTPException # noqa: F401\n\n_insecure_views = ['main.login', 'main.select_logo', 'static']\n\n\ndef create_app(config: 'SDConfig') -> Flask:\n app = Flask(__name__,\n template_folder=config.JOURNALIST_TEMPLATES_DIR,\n static_folder=path.join(config.SECUREDROP_ROOT, 'static'))\n\n app.config.from_object(config.JOURNALIST_APP_FLASK_CONFIG_CLS)\n app.session_interface = JournalistInterfaceSessionInterface()\n\n csrf = CSRFProtect(app)\n Environment(app)\n\n app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\n app.config['SQLALCHEMY_DATABASE_URI'] = config.DATABASE_URI\n db.init_app(app)\n\n def _url_exists(u: str) -> bool:\n return path.exists(path.join(config.SECUREDROP_DATA_ROOT, u))\n\n v2_enabled = _url_exists('source_v2_url') or ((not _url_exists('source_v2_url'))\n and (not _url_exists('source_v3_url')))\n v3_enabled = _url_exists('source_v3_url')\n\n app.config.update(V2_ONION_ENABLED=v2_enabled, V3_ONION_ENABLED=v3_enabled)\n\n # TODO: Attaching a Storage dynamically like this disables all type checking (and\n # breaks code analysis tools) for code that uses current_app.storage; it should be refactored\n app.storage = Storage(config.STORE_DIR,\n config.TEMP_DIR,\n config.JOURNALIST_KEY)\n\n # TODO: Attaching a CryptoUtil dynamically like this disables all type checking (and\n # breaks code analysis tools) for code that uses current_app.storage; it should be refactored\n app.crypto_util = CryptoUtil(\n scrypt_params=config.SCRYPT_PARAMS,\n scrypt_id_pepper=config.SCRYPT_ID_PEPPER,\n scrypt_gpg_pepper=config.SCRYPT_GPG_PEPPER,\n securedrop_root=config.SECUREDROP_ROOT,\n nouns_file=config.NOUNS,\n adjectives_file=config.ADJECTIVES,\n gpg_key_dir=config.GPG_KEY_DIR,\n )\n\n @app.errorhandler(CSRFError)\n def handle_csrf_error(e: CSRFError) -> 'Response':\n # render the message first to ensure it's localized.\n msg = gettext('You have been logged out due to inactivity.')\n session.clear()\n flash(msg, 'error')\n return redirect(url_for('main.login'))\n\n def _handle_http_exception(\n error: 'HTTPException'\n ) -> 'Tuple[Union[Response, str], Optional[int]]':\n # Workaround for no blueprint-level 404/5 error handlers, see:\n # https://github.com/pallets/flask/issues/503#issuecomment-71383286\n handler = list(app.error_handler_spec['api'][error.code].values())[0]\n if request.path.startswith('/api/') and handler:\n return handler(error)\n\n return render_template('error.html', error=error), error.code\n\n for code in default_exceptions:\n app.errorhandler(code)(_handle_http_exception)\n\n i18n.setup_app(config, app)\n\n app.jinja_env.trim_blocks = True\n app.jinja_env.lstrip_blocks = True\n app.jinja_env.globals['version'] = version.__version__\n app.jinja_env.filters['rel_datetime_format'] = \\\n template_filters.rel_datetime_format\n app.jinja_env.filters['filesizeformat'] = template_filters.filesizeformat\n\n @app.before_first_request\n def expire_blacklisted_tokens() -> None:\n cleanup_expired_revoked_tokens()\n\n @app.before_request\n def load_instance_config() -> None:\n app.instance_config = InstanceConfig.get_current()\n\n @app.before_request\n def setup_g() -> 'Optional[Response]':\n \"\"\"Store commonly used values in Flask's special g object\"\"\"\n if 'expires' in session and datetime.utcnow() >= session['expires']:\n session.clear()\n flash(gettext('You have been logged out due to inactivity.'),\n 'error')\n\n uid = session.get('uid', None)\n if uid:\n user = Journalist.query.get(uid)\n if user and 'nonce' in session and \\\n session['nonce'] != user.session_nonce:\n session.clear()\n flash(gettext('You have been logged out due to password change'),\n 'error')\n\n session['expires'] = datetime.utcnow() + \\\n timedelta(minutes=getattr(config,\n 'SESSION_EXPIRATION_MINUTES',\n 120))\n\n # Work around https://github.com/lepture/flask-wtf/issues/275\n # -- after upgrading from Python 2 to Python 3, any existing\n # session's csrf_token value will be retrieved as bytes,\n # causing a TypeError. This simple fix, deleting the existing\n # token, was suggested in the issue comments. This code will\n # be safe to remove after Python 2 reaches EOL in 2020, and no\n # supported SecureDrop installations can still have this\n # problem.\n if sys.version_info.major > 2 and type(session.get('csrf_token')) is bytes:\n del session['csrf_token']\n\n uid = session.get('uid', None)\n if uid:\n g.user = Journalist.query.get(uid)\n\n g.locale = i18n.get_locale(config)\n g.text_direction = i18n.get_text_direction(g.locale)\n g.html_lang = i18n.locale_to_rfc_5646(g.locale)\n g.locales = i18n.get_locale2name()\n\n if app.instance_config.organization_name:\n g.organization_name = app.instance_config.organization_name\n else:\n g.organization_name = gettext('SecureDrop')\n\n if app.config['V2_ONION_ENABLED'] and not app.config['V3_ONION_ENABLED']:\n g.show_v2_onion_eol_warning = True\n\n if app.config['V2_ONION_ENABLED'] and app.config['V3_ONION_ENABLED']:\n g.show_v2_onion_migration_warning = True\n\n if request.path.split('/')[1] == 'api':\n pass # We use the @token_required decorator for the API endpoints\n else: # We are not using the API\n if request.endpoint not in _insecure_views and not logged_in():\n return redirect(url_for('main.login'))\n\n if request.method == 'POST':\n filesystem_id = request.form.get('filesystem_id')\n if filesystem_id:\n g.filesystem_id = filesystem_id\n g.source = get_source(filesystem_id)\n\n return None\n\n app.register_blueprint(main.make_blueprint(config))\n app.register_blueprint(account.make_blueprint(config),\n url_prefix='/account')\n app.register_blueprint(admin.make_blueprint(config), url_prefix='/admin')\n app.register_blueprint(col.make_blueprint(config), url_prefix='/col')\n api_blueprint = api.make_blueprint(config)\n app.register_blueprint(api_blueprint, url_prefix='/api/v1')\n csrf.exempt(api_blueprint)\n\n return app\n", "path": "securedrop/journalist_app/__init__.py"}]}
| 3,005 | 432 |
gh_patches_debug_15617
|
rasdani/github-patches
|
git_diff
|
python__peps-2826
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Decide what PEPs fall under `Topic: Packaging` and act accordingly
Right now, as originally discussed in #2096 , there seem to be two plausible interpretation of what qualifies for the `Topic: Packaging` label: PEPs that _relate_ to packaging in some direct way, and PEPs that apply to packaging ecosystem (as opposed to the stdlib), and are thus discussed primarily on packaging community fora and accepted/rejected by the relevant packaging-community standing PEP delegate.
In particular, given PEP 632 (PEP-632), on deprecating and removing `disutils`, was removed from packaging at the request of @zooba for relating to the core stdlib, this would seem to suggest that PEP 453 (PEP-453) and PEP 477 (PEP-477), on `ensurepip` and its backport to Python 2.7, respectively, should also be removed by the same standard (or, alternatively, PEP 632 restored).
Presuming consensus can be reached prior to its merging, I can implement this on the existing #2690 , given it makes a number of related improvements/refinements to the packaging-tagged PEP headers.
<details>
<summary>Relevant discussion from #2096 (click to expand)</summary>
@pfmoore
> this is a core Python PEP, not a packaging PEP, and like @dstufft noted, I don't think we tend to modify those after approval.
@CAM-Gerlach
> In that case, as originally discussed on https://github.com/python/peps/pull/2656 and done for PEP 632 ([PEP-632](https://peps.python.org/632)), maybe we should drop Topic: Packaging from this PEP as well as the related PEP 477 ([PEP-477](https://peps.python.org/477)) ? I can do it in https://github.com/python/peps/pull/2690 since its closely related to the other changes in that PR (also, your input there on updating PEP 262 ([PEP-262](https://peps.python.org/262))'s status to reflect reality would be helpful).
@pfmoore
> I don't know on that. It's packaging related in the sense that it is about packaging tools, so if people are looking for things that are "about" packaging, it does belong in the Packaging topic. But it's not a "PyPA specification" in the sense defined [here](https://www.pypa.io/en/latest/specifications/), which is why I said that the normal PEP rules about modifications should apply, rather than the PyPA-specific process. Ultimately, I'm not actually sure what the rules are for when a PEP qualifies as being in the Packaging topic...
@CAM-Gerlach
> Yeah, exactly. That basically comes down to whether a Topic is considered more of a "Category" (i.e. the former), or a "Track" (i.e. the latter). I presented both alternatives at the beginning of the discussion, and both were favored at different points in its evolution; the initial consensus in the Discourse thread was something more like a "Track", but on the PR and naming of the "Topic" header that was eventually implemented (that allows multiple topics per PR), it ended up basically the latter, at least nominally.
>
> However, with PEP 632 ([PEP-632](https://peps.python.org/632)) being removed at the behest of @zooba in https://github.com/python/peps/pull/2636#discussion_r898173875 , given these two PEPs basically just concern adding/backporting a utility module from the core standard library rather than PyPA/PyPI standards, it seems to me to not make sense to keep those PEPs but not another very similar one about the deprecation and removal of what was the core packaging module. So, I suggest either removing those two, or adding back PEP 632.
@pradyunsg
> Alternatively, we can defer doing anything here until the authors have a concern with being listed under the packaging index as well or till we see/identify more instances of something like this? In any case, this discussion should move to a separate issue IMO.
@zooba
> I think this is in the same boat as PEP 632, and shouldn't be tagged as a packaging PEP.
>
> As a proposal of the rule I'd use, if we'd let the packaging delegate (i.e. Paul, right now) decide without going to python-dev or the SC, then it's a packaging PEP. Whether something is added to the standard library clearly falls outside of this scope, and so this PEP is standards track and not packaging.
</details>
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pep_sphinx_extensions/pep_zero_generator/constants.py`
Content:
```
1 """Holds type and status constants for PEP 0 generation."""
2
3 STATUS_ACCEPTED = "Accepted"
4 STATUS_ACTIVE = "Active"
5 STATUS_DEFERRED = "Deferred"
6 STATUS_DRAFT = "Draft"
7 STATUS_FINAL = "Final"
8 STATUS_PROVISIONAL = "Provisional"
9 STATUS_REJECTED = "Rejected"
10 STATUS_SUPERSEDED = "Superseded"
11 STATUS_WITHDRAWN = "Withdrawn"
12
13 # Valid values for the Status header.
14 STATUS_VALUES = {
15 STATUS_ACCEPTED, STATUS_PROVISIONAL, STATUS_REJECTED, STATUS_WITHDRAWN,
16 STATUS_DEFERRED, STATUS_FINAL, STATUS_ACTIVE, STATUS_DRAFT, STATUS_SUPERSEDED,
17 }
18 # Map of invalid/special statuses to their valid counterparts
19 SPECIAL_STATUSES = {
20 "April Fool!": STATUS_REJECTED, # See PEP 401 :)
21 }
22 # Draft PEPs have no status displayed, Active shares a key with Accepted
23 HIDE_STATUS = {STATUS_DRAFT, STATUS_ACTIVE}
24 # Dead PEP statuses
25 DEAD_STATUSES = {STATUS_REJECTED, STATUS_WITHDRAWN, STATUS_SUPERSEDED}
26
27 TYPE_INFO = "Informational"
28 TYPE_PROCESS = "Process"
29 TYPE_STANDARDS = "Standards Track"
30
31 # Valid values for the Type header.
32 TYPE_VALUES = {TYPE_STANDARDS, TYPE_INFO, TYPE_PROCESS}
33 # Active PEPs can only be for Informational or Process PEPs.
34 ACTIVE_ALLOWED = {TYPE_PROCESS, TYPE_INFO}
35
36 # map of topic -> additional description
37 SUBINDICES_BY_TOPIC = {
38 "packaging": """\
39 The canonical, up-to-date packaging specifications can be found on the
40 `Python Packaging Authority`_ (PyPA) `specifications`_ page.
41 Packaging PEPs follow the `PyPA specification update process`_.
42 They are used to propose major additions or changes to the PyPA specifications.
43
44 .. _Python Packaging Authority: https://www.pypa.io/
45 .. _specifications: https://packaging.python.org/en/latest/specifications/
46 .. _PyPA specification update process: https://www.pypa.io/en/latest/specifications/#specification-update-process
47 """,
48 "release": """\
49 A PEP is written to specify the release cycle for each feature release of Python.
50 See the `developer's guide`_ for more information.
51
52 .. _developer's guide: https://devguide.python.org/devcycle/
53 """,
54 "typing": """\
55 Many recent PEPs propose changes to Python's static type system
56 or otherwise relate to type annotations.
57 They are listed here for reference.
58 """
59 }
60
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pep_sphinx_extensions/pep_zero_generator/constants.py b/pep_sphinx_extensions/pep_zero_generator/constants.py
--- a/pep_sphinx_extensions/pep_zero_generator/constants.py
+++ b/pep_sphinx_extensions/pep_zero_generator/constants.py
@@ -36,10 +36,10 @@
# map of topic -> additional description
SUBINDICES_BY_TOPIC = {
"packaging": """\
-The canonical, up-to-date packaging specifications can be found on the
-`Python Packaging Authority`_ (PyPA) `specifications`_ page.
Packaging PEPs follow the `PyPA specification update process`_.
They are used to propose major additions or changes to the PyPA specifications.
+The canonical, up-to-date packaging specifications can be found on the
+`Python Packaging Authority`_ (PyPA) `specifications`_ page.
.. _Python Packaging Authority: https://www.pypa.io/
.. _specifications: https://packaging.python.org/en/latest/specifications/
|
{"golden_diff": "diff --git a/pep_sphinx_extensions/pep_zero_generator/constants.py b/pep_sphinx_extensions/pep_zero_generator/constants.py\n--- a/pep_sphinx_extensions/pep_zero_generator/constants.py\n+++ b/pep_sphinx_extensions/pep_zero_generator/constants.py\n@@ -36,10 +36,10 @@\n # map of topic -> additional description\n SUBINDICES_BY_TOPIC = {\n \"packaging\": \"\"\"\\\n-The canonical, up-to-date packaging specifications can be found on the\n-`Python Packaging Authority`_ (PyPA) `specifications`_ page.\n Packaging PEPs follow the `PyPA specification update process`_.\n They are used to propose major additions or changes to the PyPA specifications.\n+The canonical, up-to-date packaging specifications can be found on the\n+`Python Packaging Authority`_ (PyPA) `specifications`_ page.\n \n .. _Python Packaging Authority: https://www.pypa.io/\n .. _specifications: https://packaging.python.org/en/latest/specifications/\n", "issue": "Decide what PEPs fall under `Topic: Packaging` and act accordingly\nRight now, as originally discussed in #2096 , there seem to be two plausible interpretation of what qualifies for the `Topic: Packaging` label: PEPs that _relate_ to packaging in some direct way, and PEPs that apply to packaging ecosystem (as opposed to the stdlib), and are thus discussed primarily on packaging community fora and accepted/rejected by the relevant packaging-community standing PEP delegate.\r\n\r\nIn particular, given PEP 632 (PEP-632), on deprecating and removing `disutils`, was removed from packaging at the request of @zooba for relating to the core stdlib, this would seem to suggest that PEP 453 (PEP-453) and PEP 477 (PEP-477), on `ensurepip` and its backport to Python 2.7, respectively, should also be removed by the same standard (or, alternatively, PEP 632 restored).\r\n\r\nPresuming consensus can be reached prior to its merging, I can implement this on the existing #2690 , given it makes a number of related improvements/refinements to the packaging-tagged PEP headers.\r\n\r\n<details>\r\n\r\n<summary>Relevant discussion from #2096 (click to expand)</summary>\r\n\r\n@pfmoore \r\n\r\n> this is a core Python PEP, not a packaging PEP, and like @dstufft noted, I don't think we tend to modify those after approval.\r\n\r\n@CAM-Gerlach \r\n\r\n> In that case, as originally discussed on https://github.com/python/peps/pull/2656 and done for PEP 632 ([PEP-632](https://peps.python.org/632)), maybe we should drop Topic: Packaging from this PEP as well as the related PEP 477 ([PEP-477](https://peps.python.org/477)) ? I can do it in https://github.com/python/peps/pull/2690 since its closely related to the other changes in that PR (also, your input there on updating PEP 262 ([PEP-262](https://peps.python.org/262))'s status to reflect reality would be helpful).\r\n\r\n@pfmoore \r\n\r\n> I don't know on that. It's packaging related in the sense that it is about packaging tools, so if people are looking for things that are \"about\" packaging, it does belong in the Packaging topic. But it's not a \"PyPA specification\" in the sense defined [here](https://www.pypa.io/en/latest/specifications/), which is why I said that the normal PEP rules about modifications should apply, rather than the PyPA-specific process. Ultimately, I'm not actually sure what the rules are for when a PEP qualifies as being in the Packaging topic...\r\n\r\n@CAM-Gerlach \r\n\r\n> Yeah, exactly. That basically comes down to whether a Topic is considered more of a \"Category\" (i.e. the former), or a \"Track\" (i.e. the latter). I presented both alternatives at the beginning of the discussion, and both were favored at different points in its evolution; the initial consensus in the Discourse thread was something more like a \"Track\", but on the PR and naming of the \"Topic\" header that was eventually implemented (that allows multiple topics per PR), it ended up basically the latter, at least nominally.\r\n>\r\n> However, with PEP 632 ([PEP-632](https://peps.python.org/632)) being removed at the behest of @zooba in https://github.com/python/peps/pull/2636#discussion_r898173875 , given these two PEPs basically just concern adding/backporting a utility module from the core standard library rather than PyPA/PyPI standards, it seems to me to not make sense to keep those PEPs but not another very similar one about the deprecation and removal of what was the core packaging module. So, I suggest either removing those two, or adding back PEP 632.\r\n\r\n@pradyunsg \r\n\r\n> Alternatively, we can defer doing anything here until the authors have a concern with being listed under the packaging index as well or till we see/identify more instances of something like this? In any case, this discussion should move to a separate issue IMO.\r\n\r\n@zooba \r\n\r\n> I think this is in the same boat as PEP 632, and shouldn't be tagged as a packaging PEP.\r\n>\r\n> As a proposal of the rule I'd use, if we'd let the packaging delegate (i.e. Paul, right now) decide without going to python-dev or the SC, then it's a packaging PEP. Whether something is added to the standard library clearly falls outside of this scope, and so this PEP is standards track and not packaging.\r\n\r\n</details>\n", "before_files": [{"content": "\"\"\"Holds type and status constants for PEP 0 generation.\"\"\"\n\nSTATUS_ACCEPTED = \"Accepted\"\nSTATUS_ACTIVE = \"Active\"\nSTATUS_DEFERRED = \"Deferred\"\nSTATUS_DRAFT = \"Draft\"\nSTATUS_FINAL = \"Final\"\nSTATUS_PROVISIONAL = \"Provisional\"\nSTATUS_REJECTED = \"Rejected\"\nSTATUS_SUPERSEDED = \"Superseded\"\nSTATUS_WITHDRAWN = \"Withdrawn\"\n\n# Valid values for the Status header.\nSTATUS_VALUES = {\n STATUS_ACCEPTED, STATUS_PROVISIONAL, STATUS_REJECTED, STATUS_WITHDRAWN,\n STATUS_DEFERRED, STATUS_FINAL, STATUS_ACTIVE, STATUS_DRAFT, STATUS_SUPERSEDED,\n}\n# Map of invalid/special statuses to their valid counterparts\nSPECIAL_STATUSES = {\n \"April Fool!\": STATUS_REJECTED, # See PEP 401 :)\n}\n# Draft PEPs have no status displayed, Active shares a key with Accepted\nHIDE_STATUS = {STATUS_DRAFT, STATUS_ACTIVE}\n# Dead PEP statuses\nDEAD_STATUSES = {STATUS_REJECTED, STATUS_WITHDRAWN, STATUS_SUPERSEDED}\n\nTYPE_INFO = \"Informational\"\nTYPE_PROCESS = \"Process\"\nTYPE_STANDARDS = \"Standards Track\"\n\n# Valid values for the Type header.\nTYPE_VALUES = {TYPE_STANDARDS, TYPE_INFO, TYPE_PROCESS}\n# Active PEPs can only be for Informational or Process PEPs.\nACTIVE_ALLOWED = {TYPE_PROCESS, TYPE_INFO}\n\n# map of topic -> additional description\nSUBINDICES_BY_TOPIC = {\n \"packaging\": \"\"\"\\\nThe canonical, up-to-date packaging specifications can be found on the\n`Python Packaging Authority`_ (PyPA) `specifications`_ page.\nPackaging PEPs follow the `PyPA specification update process`_.\nThey are used to propose major additions or changes to the PyPA specifications.\n\n.. _Python Packaging Authority: https://www.pypa.io/\n.. _specifications: https://packaging.python.org/en/latest/specifications/\n.. _PyPA specification update process: https://www.pypa.io/en/latest/specifications/#specification-update-process\n\"\"\",\n \"release\": \"\"\"\\\nA PEP is written to specify the release cycle for each feature release of Python.\nSee the `developer's guide`_ for more information.\n\n.. _developer's guide: https://devguide.python.org/devcycle/\n\"\"\",\n \"typing\": \"\"\"\\\nMany recent PEPs propose changes to Python's static type system\nor otherwise relate to type annotations.\nThey are listed here for reference.\n\"\"\"\n}\n", "path": "pep_sphinx_extensions/pep_zero_generator/constants.py"}], "after_files": [{"content": "\"\"\"Holds type and status constants for PEP 0 generation.\"\"\"\n\nSTATUS_ACCEPTED = \"Accepted\"\nSTATUS_ACTIVE = \"Active\"\nSTATUS_DEFERRED = \"Deferred\"\nSTATUS_DRAFT = \"Draft\"\nSTATUS_FINAL = \"Final\"\nSTATUS_PROVISIONAL = \"Provisional\"\nSTATUS_REJECTED = \"Rejected\"\nSTATUS_SUPERSEDED = \"Superseded\"\nSTATUS_WITHDRAWN = \"Withdrawn\"\n\n# Valid values for the Status header.\nSTATUS_VALUES = {\n STATUS_ACCEPTED, STATUS_PROVISIONAL, STATUS_REJECTED, STATUS_WITHDRAWN,\n STATUS_DEFERRED, STATUS_FINAL, STATUS_ACTIVE, STATUS_DRAFT, STATUS_SUPERSEDED,\n}\n# Map of invalid/special statuses to their valid counterparts\nSPECIAL_STATUSES = {\n \"April Fool!\": STATUS_REJECTED, # See PEP 401 :)\n}\n# Draft PEPs have no status displayed, Active shares a key with Accepted\nHIDE_STATUS = {STATUS_DRAFT, STATUS_ACTIVE}\n# Dead PEP statuses\nDEAD_STATUSES = {STATUS_REJECTED, STATUS_WITHDRAWN, STATUS_SUPERSEDED}\n\nTYPE_INFO = \"Informational\"\nTYPE_PROCESS = \"Process\"\nTYPE_STANDARDS = \"Standards Track\"\n\n# Valid values for the Type header.\nTYPE_VALUES = {TYPE_STANDARDS, TYPE_INFO, TYPE_PROCESS}\n# Active PEPs can only be for Informational or Process PEPs.\nACTIVE_ALLOWED = {TYPE_PROCESS, TYPE_INFO}\n\n# map of topic -> additional description\nSUBINDICES_BY_TOPIC = {\n \"packaging\": \"\"\"\\\nPackaging PEPs follow the `PyPA specification update process`_.\nThey are used to propose major additions or changes to the PyPA specifications.\nThe canonical, up-to-date packaging specifications can be found on the\n`Python Packaging Authority`_ (PyPA) `specifications`_ page.\n\n.. _Python Packaging Authority: https://www.pypa.io/\n.. _specifications: https://packaging.python.org/en/latest/specifications/\n.. _PyPA specification update process: https://www.pypa.io/en/latest/specifications/#specification-update-process\n\"\"\",\n \"release\": \"\"\"\\\nA PEP is written to specify the release cycle for each feature release of Python.\nSee the `developer's guide`_ for more information.\n\n.. _developer's guide: https://devguide.python.org/devcycle/\n\"\"\",\n \"typing\": \"\"\"\\\nMany recent PEPs propose changes to Python's static type system\nor otherwise relate to type annotations.\nThey are listed here for reference.\n\"\"\"\n}\n", "path": "pep_sphinx_extensions/pep_zero_generator/constants.py"}]}
| 1,988 | 226 |
gh_patches_debug_38505
|
rasdani/github-patches
|
git_diff
|
interlegis__sapl-1821
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Participante de Comissão - data de desligamento
a data de desligamento deve ser posterior à data da designação.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sapl/comissoes/forms.py`
Content:
```
1 from django import forms
2 from django.contrib.contenttypes.models import ContentType
3 from django.core.exceptions import ValidationError
4 from django.db import transaction
5 from django.db.models import Q
6 from django.forms import ModelForm
7 from django.utils.translation import ugettext_lazy as _
8
9 from sapl.base.models import Autor, TipoAutor
10 from sapl.comissoes.models import (Comissao, Composicao, DocumentoAcessorio,
11 Participacao, Reuniao)
12 from sapl.parlamentares.models import Legislatura, Mandato, Parlamentar
13
14 class ComposicaoForm(forms.ModelForm):
15
16 class Meta:
17 model = Composicao
18 exclude = []
19
20 def clean(self):
21 cleaned_data = super(ComposicaoForm, self).clean()
22
23 if not self.is_valid():
24 return cleaned_data
25
26 periodo = cleaned_data['periodo']
27 comissao_pk = cleaned_data['comissao'].id
28 intersecao_periodo = Composicao.objects.filter(
29 Q(periodo__data_inicio__lte=periodo.data_fim,
30 periodo__data_fim__gte=periodo.data_fim) |
31 Q(periodo__data_inicio__gte=periodo.data_inicio,
32 periodo__data_fim__lte=periodo.data_inicio),
33 comissao_id=comissao_pk)
34
35 if intersecao_periodo:
36 raise ValidationError('O período informado '
37 'choca com períodos já '
38 'cadastrados para esta comissão')
39
40 return cleaned_data
41
42 class ParticipacaoCreateForm(forms.ModelForm):
43
44 parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())
45
46 class Meta:
47 model = Participacao
48 fields = '__all__'
49 exclude = ['composicao']
50
51 def __init__(self, user=None, **kwargs):
52 super(ParticipacaoCreateForm, self).__init__(**kwargs)
53
54 if self.instance:
55 comissao = kwargs['initial']
56 comissao_pk = int(comissao['parent_pk'])
57 composicao = Composicao.objects.get(id=comissao_pk)
58 participantes = composicao.participacao_set.all()
59 id_part = [p.parlamentar.id for p in participantes]
60 else:
61 id_part = []
62
63 qs = self.create_participacao()
64
65 parlamentares = Mandato.objects.filter(qs,
66 parlamentar__ativo=True
67 ).prefetch_related('parlamentar').\
68 values_list('parlamentar',
69 flat=True).distinct()
70
71 qs = Parlamentar.objects.filter(id__in=parlamentares).distinct().\
72 exclude(id__in=id_part)
73 eligible = self.verifica()
74 result = list(set(qs) & set(eligible))
75 if not cmp(result, eligible): # se igual a 0 significa que o qs e o eli são iguais!
76 self.fields['parlamentar'].queryset = qs
77 else:
78 ids = [e.id for e in eligible]
79 qs = Parlamentar.objects.filter(id__in=ids)
80 self.fields['parlamentar'].queryset = qs
81
82
83 def clean(self):
84 cleaned_data = super(ParticipacaoCreateForm, self).clean()
85
86 if not self.is_valid():
87 return cleaned_data
88
89 composicao = Composicao.objects.get(id=self.initial['parent_pk'])
90 cargos_unicos = [c.cargo.nome for c in composicao.participacao_set.filter(cargo__unico=True)]
91
92 if cleaned_data['cargo'].nome in cargos_unicos:
93 msg = _('Este cargo é único para esta Comissão.')
94 raise ValidationError(msg)
95
96
97 def create_participacao(self):
98 composicao = Composicao.objects.get(id=self.initial['parent_pk'])
99 data_inicio_comissao = composicao.periodo.data_inicio
100 data_fim_comissao = composicao.periodo.data_fim
101 q1 = Q(data_fim_mandato__isnull=False,
102 data_fim_mandato__gte=data_inicio_comissao)
103 q2 = Q(data_inicio_mandato__gte=data_inicio_comissao) \
104 & Q(data_inicio_mandato__lte=data_fim_comissao)
105 q3 = Q(data_fim_mandato__isnull=True,
106 data_inicio_mandato__lte=data_inicio_comissao)
107 qs = q1 | q2 | q3
108 return qs
109
110 def verifica(self):
111 composicao = Composicao.objects.get(id=self.initial['parent_pk'])
112 participantes = composicao.participacao_set.all()
113 participantes_id = [p.parlamentar.id for p in participantes]
114 parlamentares = Parlamentar.objects.all().exclude(
115 id__in=participantes_id).order_by('nome_completo')
116 parlamentares = [p for p in parlamentares if p.ativo]
117
118 lista = []
119
120 for p in parlamentares:
121 mandatos = p.mandato_set.all()
122 for m in mandatos:
123 data_inicio = m.data_inicio_mandato
124 data_fim = m.data_fim_mandato
125 comp_data_inicio = composicao.periodo.data_inicio
126 comp_data_fim = composicao.periodo.data_fim
127 if (data_fim and data_fim >= comp_data_inicio)\
128 or (data_inicio >= comp_data_inicio and data_inicio <= comp_data_fim)\
129 or (data_fim is None and data_inicio <= comp_data_inicio):
130 lista.append(p)
131
132 lista = list(set(lista))
133
134 return lista
135
136
137 class ParticipacaoEditForm(forms.ModelForm):
138
139 parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())
140 nome_parlamentar = forms.CharField(required=False, label='Parlamentar')
141
142 class Meta:
143 model = Participacao
144 fields = ['nome_parlamentar', 'parlamentar', 'cargo', 'titular',
145 'data_designacao', 'data_desligamento',
146 'motivo_desligamento', 'observacao']
147 widgets = {
148 'parlamentar': forms.HiddenInput(),
149 }
150
151 def __init__(self, user=None, **kwargs):
152 super(ParticipacaoEditForm, self).__init__(**kwargs)
153 self.initial['nome_parlamentar'] = Parlamentar.objects.get(
154 id=self.initial['parlamentar']).nome_parlamentar
155 self.fields['nome_parlamentar'].widget.attrs['disabled'] = 'disabled'
156
157
158 class ComissaoForm(forms.ModelForm):
159
160 class Meta:
161 model = Comissao
162 fields = '__all__'
163
164 def __init__(self, user=None, **kwargs):
165 super(ComissaoForm, self).__init__(**kwargs)
166 inst = self.instance
167 if inst.pk:
168 if inst.tipo.natureza == 'P':
169 self.fields['apelido_temp'].widget.attrs['disabled'] = 'disabled'
170 self.fields['data_instalacao_temp'].widget.attrs['disabled'] = 'disabled'
171 self.fields['data_final_prevista_temp'].widget.attrs['disabled'] = 'disabled'
172 self.fields['data_prorrogada_temp'].widget.attrs['disabled'] = 'disabled'
173 self.fields['data_fim_comissao'].widget.attrs['disabled'] = 'disabled'
174
175
176
177 def clean(self):
178 super(ComissaoForm, self).clean()
179
180 if not self.is_valid():
181 return self.cleaned_data
182
183 if self.cleaned_data['data_extincao']:
184 if (self.cleaned_data['data_extincao'] <
185 self.cleaned_data['data_criacao']):
186 msg = _('Data de extinção não pode ser menor que a de criação')
187 raise ValidationError(msg)
188 return self.cleaned_data
189
190 @transaction.atomic
191 def save(self, commit=True):
192 inst = self.instance
193 if not inst.pk:
194 comissao = super(ComissaoForm, self).save(commit)
195 content_type = ContentType.objects.get_for_model(Comissao)
196 object_id = comissao.pk
197 tipo = TipoAutor.objects.get(descricao__icontains='Comiss')
198 nome = comissao.sigla + ' - ' + comissao.nome
199 Autor.objects.create(
200 content_type=content_type,
201 object_id=object_id,
202 tipo=tipo,
203 nome=nome
204 )
205 return comissao
206 else:
207 comissao = super(ComissaoForm, self).save(commit)
208 return comissao
209
210
211 class ReuniaoForm(ModelForm):
212
213 comissao = forms.ModelChoiceField(queryset=Comissao.objects.all(),
214 widget=forms.HiddenInput())
215
216 class Meta:
217 model = Reuniao
218 exclude = ['cod_andamento_reuniao']
219
220 def clean(self):
221 super(ReuniaoForm, self).clean()
222
223 if not self.is_valid():
224 return self.cleaned_data
225
226 if self.cleaned_data['hora_fim']:
227 if (self.cleaned_data['hora_fim'] <
228 self.cleaned_data['hora_inicio']):
229 msg = _('A hora de término da reunião não pode ser menor que a de início')
230 raise ValidationError(msg)
231 return self.cleaned_data
232
233 class DocumentoAcessorioCreateForm(forms.ModelForm):
234
235 parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())
236
237 class Meta:
238 model = DocumentoAcessorio
239 exclude = ['reuniao']
240
241 def __init__(self, user=None, **kwargs):
242 super(DocumentoAcessorioCreateForm, self).__init__(**kwargs)
243
244 if self.instance:
245 reuniao = Reuniao.objects.get(id=self.initial['parent_pk'])
246 comissao = reuniao.comissao
247 comissao_pk = comissao.id
248 documentos = reuniao.documentoacessorio_set.all()
249 return self.create_documentoacessorio()
250
251
252 def create_documentoacessorio(self):
253 reuniao = Reuniao.objects.get(id=self.initial['parent_pk'])
254
255
256 class DocumentoAcessorioEditForm(forms.ModelForm):
257
258 parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())
259
260 class Meta:
261 model = DocumentoAcessorio
262 fields = ['nome', 'data', 'autor', 'ementa',
263 'indexacao', 'arquivo']
264
265 def __init__(self, user=None, **kwargs):
266 super(DocumentoAcessorioEditForm, self).__init__(**kwargs)
267
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/sapl/comissoes/forms.py b/sapl/comissoes/forms.py
--- a/sapl/comissoes/forms.py
+++ b/sapl/comissoes/forms.py
@@ -65,8 +65,9 @@
parlamentares = Mandato.objects.filter(qs,
parlamentar__ativo=True
).prefetch_related('parlamentar').\
- values_list('parlamentar',
- flat=True).distinct()
+ values_list('parlamentar',
+ flat=True
+ ).distinct()
qs = Parlamentar.objects.filter(id__in=parlamentares).distinct().\
exclude(id__in=id_part)
@@ -86,12 +87,21 @@
if not self.is_valid():
return cleaned_data
+ data_designacao = cleaned_data['data_designacao']
+ data_desligamento = cleaned_data['data_desligamento']
+
+ if data_desligamento and \
+ data_designacao > data_desligamento:
+ raise ValidationError(_('Data de designação não pode ser superior '
+ 'à data de desligamento'))
+
composicao = Composicao.objects.get(id=self.initial['parent_pk'])
cargos_unicos = [c.cargo.nome for c in composicao.participacao_set.filter(cargo__unico=True)]
if cleaned_data['cargo'].nome in cargos_unicos:
msg = _('Este cargo é único para esta Comissão.')
raise ValidationError(msg)
+ return cleaned_data
def create_participacao(self):
@@ -154,6 +164,31 @@
id=self.initial['parlamentar']).nome_parlamentar
self.fields['nome_parlamentar'].widget.attrs['disabled'] = 'disabled'
+ def clean(self):
+ cleaned_data = super(ParticipacaoEditForm, self).clean()
+
+ if not self.is_valid():
+ return cleaned_data
+
+ data_designacao = cleaned_data['data_designacao']
+ data_desligamento = cleaned_data['data_desligamento']
+
+ if data_desligamento and \
+ data_designacao > data_desligamento:
+ raise ValidationError(_('Data de designação não pode ser superior '
+ 'à data de desligamento'))
+
+ composicao_id = self.instance.composicao_id
+
+ composicao = Composicao.objects.get(id=composicao_id)
+ cargos_unicos = [c.cargo.nome for c in composicao.participacao_set.filter(cargo__unico=True)]
+
+ if cleaned_data['cargo'].nome in cargos_unicos:
+ msg = _('Este cargo é único para esta Comissão.')
+ raise ValidationError(msg)
+
+ return cleaned_data
+
class ComissaoForm(forms.ModelForm):
|
{"golden_diff": "diff --git a/sapl/comissoes/forms.py b/sapl/comissoes/forms.py\n--- a/sapl/comissoes/forms.py\n+++ b/sapl/comissoes/forms.py\n@@ -65,8 +65,9 @@\n parlamentares = Mandato.objects.filter(qs,\n parlamentar__ativo=True\n ).prefetch_related('parlamentar').\\\n- values_list('parlamentar',\n- flat=True).distinct()\n+ values_list('parlamentar',\n+ flat=True\n+ ).distinct()\n \n qs = Parlamentar.objects.filter(id__in=parlamentares).distinct().\\\n exclude(id__in=id_part)\n@@ -86,12 +87,21 @@\n if not self.is_valid():\n return cleaned_data\n \n+ data_designacao = cleaned_data['data_designacao']\n+ data_desligamento = cleaned_data['data_desligamento']\n+\n+ if data_desligamento and \\\n+ data_designacao > data_desligamento:\n+ raise ValidationError(_('Data de designa\u00e7\u00e3o n\u00e3o pode ser superior '\n+ '\u00e0 data de desligamento'))\n+\n composicao = Composicao.objects.get(id=self.initial['parent_pk'])\n cargos_unicos = [c.cargo.nome for c in composicao.participacao_set.filter(cargo__unico=True)]\n \n if cleaned_data['cargo'].nome in cargos_unicos:\n msg = _('Este cargo \u00e9 \u00fanico para esta Comiss\u00e3o.')\n raise ValidationError(msg)\n+ return cleaned_data\n \n \n def create_participacao(self):\n@@ -154,6 +164,31 @@\n id=self.initial['parlamentar']).nome_parlamentar\n self.fields['nome_parlamentar'].widget.attrs['disabled'] = 'disabled'\n \n+ def clean(self):\n+ cleaned_data = super(ParticipacaoEditForm, self).clean()\n+\n+ if not self.is_valid():\n+ return cleaned_data\n+\n+ data_designacao = cleaned_data['data_designacao']\n+ data_desligamento = cleaned_data['data_desligamento']\n+\n+ if data_desligamento and \\\n+ data_designacao > data_desligamento:\n+ raise ValidationError(_('Data de designa\u00e7\u00e3o n\u00e3o pode ser superior '\n+ '\u00e0 data de desligamento'))\n+\n+ composicao_id = self.instance.composicao_id\n+\n+ composicao = Composicao.objects.get(id=composicao_id)\n+ cargos_unicos = [c.cargo.nome for c in composicao.participacao_set.filter(cargo__unico=True)]\n+\n+ if cleaned_data['cargo'].nome in cargos_unicos:\n+ msg = _('Este cargo \u00e9 \u00fanico para esta Comiss\u00e3o.')\n+ raise ValidationError(msg)\n+\n+ return cleaned_data\n+\n \n class ComissaoForm(forms.ModelForm):\n", "issue": "Participante de Comiss\u00e3o - data de desligamento\na data de desligamento deve ser posterior \u00e0 data da designa\u00e7\u00e3o.\n", "before_files": [{"content": "from django import forms\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.core.exceptions import ValidationError\nfrom django.db import transaction\nfrom django.db.models import Q\nfrom django.forms import ModelForm\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom sapl.base.models import Autor, TipoAutor\nfrom sapl.comissoes.models import (Comissao, Composicao, DocumentoAcessorio,\n Participacao, Reuniao)\nfrom sapl.parlamentares.models import Legislatura, Mandato, Parlamentar\n\nclass ComposicaoForm(forms.ModelForm):\n\n class Meta:\n model = Composicao\n exclude = []\n\n def clean(self):\n cleaned_data = super(ComposicaoForm, self).clean()\n\n if not self.is_valid():\n return cleaned_data\n\n periodo = cleaned_data['periodo']\n comissao_pk = cleaned_data['comissao'].id\n intersecao_periodo = Composicao.objects.filter(\n Q(periodo__data_inicio__lte=periodo.data_fim,\n periodo__data_fim__gte=periodo.data_fim) |\n Q(periodo__data_inicio__gte=periodo.data_inicio,\n periodo__data_fim__lte=periodo.data_inicio),\n comissao_id=comissao_pk)\n\n if intersecao_periodo:\n raise ValidationError('O per\u00edodo informado '\n 'choca com per\u00edodos j\u00e1 '\n 'cadastrados para esta comiss\u00e3o')\n\n return cleaned_data\n\nclass ParticipacaoCreateForm(forms.ModelForm):\n\n parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())\n\n class Meta:\n model = Participacao\n fields = '__all__'\n exclude = ['composicao']\n\n def __init__(self, user=None, **kwargs):\n super(ParticipacaoCreateForm, self).__init__(**kwargs)\n\n if self.instance:\n comissao = kwargs['initial']\n comissao_pk = int(comissao['parent_pk'])\n composicao = Composicao.objects.get(id=comissao_pk)\n participantes = composicao.participacao_set.all()\n id_part = [p.parlamentar.id for p in participantes]\n else:\n id_part = []\n\n qs = self.create_participacao()\n\n parlamentares = Mandato.objects.filter(qs,\n parlamentar__ativo=True\n ).prefetch_related('parlamentar').\\\n values_list('parlamentar',\n flat=True).distinct()\n\n qs = Parlamentar.objects.filter(id__in=parlamentares).distinct().\\\n exclude(id__in=id_part)\n eligible = self.verifica()\n result = list(set(qs) & set(eligible))\n if not cmp(result, eligible): # se igual a 0 significa que o qs e o eli s\u00e3o iguais!\n self.fields['parlamentar'].queryset = qs\n else:\n ids = [e.id for e in eligible]\n qs = Parlamentar.objects.filter(id__in=ids)\n self.fields['parlamentar'].queryset = qs\n\n\n def clean(self):\n cleaned_data = super(ParticipacaoCreateForm, self).clean()\n\n if not self.is_valid():\n return cleaned_data\n\n composicao = Composicao.objects.get(id=self.initial['parent_pk'])\n cargos_unicos = [c.cargo.nome for c in composicao.participacao_set.filter(cargo__unico=True)]\n\n if cleaned_data['cargo'].nome in cargos_unicos:\n msg = _('Este cargo \u00e9 \u00fanico para esta Comiss\u00e3o.')\n raise ValidationError(msg)\n\n\n def create_participacao(self):\n composicao = Composicao.objects.get(id=self.initial['parent_pk'])\n data_inicio_comissao = composicao.periodo.data_inicio\n data_fim_comissao = composicao.periodo.data_fim\n q1 = Q(data_fim_mandato__isnull=False,\n data_fim_mandato__gte=data_inicio_comissao)\n q2 = Q(data_inicio_mandato__gte=data_inicio_comissao) \\\n & Q(data_inicio_mandato__lte=data_fim_comissao)\n q3 = Q(data_fim_mandato__isnull=True,\n data_inicio_mandato__lte=data_inicio_comissao)\n qs = q1 | q2 | q3\n return qs\n\n def verifica(self):\n composicao = Composicao.objects.get(id=self.initial['parent_pk'])\n participantes = composicao.participacao_set.all()\n participantes_id = [p.parlamentar.id for p in participantes]\n parlamentares = Parlamentar.objects.all().exclude(\n id__in=participantes_id).order_by('nome_completo')\n parlamentares = [p for p in parlamentares if p.ativo]\n\n lista = []\n\n for p in parlamentares:\n mandatos = p.mandato_set.all()\n for m in mandatos:\n data_inicio = m.data_inicio_mandato\n data_fim = m.data_fim_mandato\n comp_data_inicio = composicao.periodo.data_inicio\n comp_data_fim = composicao.periodo.data_fim\n if (data_fim and data_fim >= comp_data_inicio)\\\n or (data_inicio >= comp_data_inicio and data_inicio <= comp_data_fim)\\\n or (data_fim is None and data_inicio <= comp_data_inicio):\n lista.append(p)\n\n lista = list(set(lista))\n\n return lista\n\n\nclass ParticipacaoEditForm(forms.ModelForm):\n\n parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())\n nome_parlamentar = forms.CharField(required=False, label='Parlamentar')\n\n class Meta:\n model = Participacao\n fields = ['nome_parlamentar', 'parlamentar', 'cargo', 'titular',\n 'data_designacao', 'data_desligamento',\n 'motivo_desligamento', 'observacao']\n widgets = {\n 'parlamentar': forms.HiddenInput(),\n }\n\n def __init__(self, user=None, **kwargs):\n super(ParticipacaoEditForm, self).__init__(**kwargs)\n self.initial['nome_parlamentar'] = Parlamentar.objects.get(\n id=self.initial['parlamentar']).nome_parlamentar\n self.fields['nome_parlamentar'].widget.attrs['disabled'] = 'disabled'\n\n\nclass ComissaoForm(forms.ModelForm):\n\n class Meta:\n model = Comissao\n fields = '__all__'\n\n def __init__(self, user=None, **kwargs):\n super(ComissaoForm, self).__init__(**kwargs)\n inst = self.instance\n if inst.pk:\n if inst.tipo.natureza == 'P':\n self.fields['apelido_temp'].widget.attrs['disabled'] = 'disabled'\n self.fields['data_instalacao_temp'].widget.attrs['disabled'] = 'disabled'\n self.fields['data_final_prevista_temp'].widget.attrs['disabled'] = 'disabled'\n self.fields['data_prorrogada_temp'].widget.attrs['disabled'] = 'disabled'\n self.fields['data_fim_comissao'].widget.attrs['disabled'] = 'disabled'\n\n\n\n def clean(self):\n super(ComissaoForm, self).clean()\n\n if not self.is_valid():\n return self.cleaned_data\n\n if self.cleaned_data['data_extincao']:\n if (self.cleaned_data['data_extincao'] <\n self.cleaned_data['data_criacao']):\n msg = _('Data de extin\u00e7\u00e3o n\u00e3o pode ser menor que a de cria\u00e7\u00e3o')\n raise ValidationError(msg)\n return self.cleaned_data\n\n @transaction.atomic\n def save(self, commit=True):\n inst = self.instance\n if not inst.pk:\n comissao = super(ComissaoForm, self).save(commit)\n content_type = ContentType.objects.get_for_model(Comissao)\n object_id = comissao.pk\n tipo = TipoAutor.objects.get(descricao__icontains='Comiss')\n nome = comissao.sigla + ' - ' + comissao.nome\n Autor.objects.create(\n content_type=content_type,\n object_id=object_id,\n tipo=tipo,\n nome=nome\n )\n return comissao\n else:\n comissao = super(ComissaoForm, self).save(commit)\n return comissao\n\n\nclass ReuniaoForm(ModelForm):\n\n comissao = forms.ModelChoiceField(queryset=Comissao.objects.all(),\n widget=forms.HiddenInput())\n\n class Meta:\n model = Reuniao\n exclude = ['cod_andamento_reuniao']\n\n def clean(self):\n super(ReuniaoForm, self).clean()\n\n if not self.is_valid():\n return self.cleaned_data\n\n if self.cleaned_data['hora_fim']:\n if (self.cleaned_data['hora_fim'] <\n self.cleaned_data['hora_inicio']):\n msg = _('A hora de t\u00e9rmino da reuni\u00e3o n\u00e3o pode ser menor que a de in\u00edcio')\n raise ValidationError(msg)\n return self.cleaned_data\n\nclass DocumentoAcessorioCreateForm(forms.ModelForm):\n\n parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())\n\n class Meta:\n model = DocumentoAcessorio\n exclude = ['reuniao']\n\n def __init__(self, user=None, **kwargs):\n super(DocumentoAcessorioCreateForm, self).__init__(**kwargs)\n\n if self.instance:\n reuniao = Reuniao.objects.get(id=self.initial['parent_pk'])\n comissao = reuniao.comissao\n comissao_pk = comissao.id\n documentos = reuniao.documentoacessorio_set.all()\n return self.create_documentoacessorio()\n\n\n def create_documentoacessorio(self):\n reuniao = Reuniao.objects.get(id=self.initial['parent_pk'])\n\n\nclass DocumentoAcessorioEditForm(forms.ModelForm):\n\n parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())\n\n class Meta:\n model = DocumentoAcessorio\n fields = ['nome', 'data', 'autor', 'ementa',\n 'indexacao', 'arquivo']\n\n def __init__(self, user=None, **kwargs):\n super(DocumentoAcessorioEditForm, self).__init__(**kwargs)\n", "path": "sapl/comissoes/forms.py"}], "after_files": [{"content": "from django import forms\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.core.exceptions import ValidationError\nfrom django.db import transaction\nfrom django.db.models import Q\nfrom django.forms import ModelForm\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom sapl.base.models import Autor, TipoAutor\nfrom sapl.comissoes.models import (Comissao, Composicao, DocumentoAcessorio,\n Participacao, Reuniao)\nfrom sapl.parlamentares.models import Legislatura, Mandato, Parlamentar\n\nclass ComposicaoForm(forms.ModelForm):\n\n class Meta:\n model = Composicao\n exclude = []\n\n def clean(self):\n cleaned_data = super(ComposicaoForm, self).clean()\n\n if not self.is_valid():\n return cleaned_data\n\n periodo = cleaned_data['periodo']\n comissao_pk = cleaned_data['comissao'].id\n intersecao_periodo = Composicao.objects.filter(\n Q(periodo__data_inicio__lte=periodo.data_fim,\n periodo__data_fim__gte=periodo.data_fim) |\n Q(periodo__data_inicio__gte=periodo.data_inicio,\n periodo__data_fim__lte=periodo.data_inicio),\n comissao_id=comissao_pk)\n\n if intersecao_periodo:\n raise ValidationError('O per\u00edodo informado '\n 'choca com per\u00edodos j\u00e1 '\n 'cadastrados para esta comiss\u00e3o')\n\n return cleaned_data\n\nclass ParticipacaoCreateForm(forms.ModelForm):\n\n parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())\n\n class Meta:\n model = Participacao\n fields = '__all__'\n exclude = ['composicao']\n\n def __init__(self, user=None, **kwargs):\n super(ParticipacaoCreateForm, self).__init__(**kwargs)\n\n if self.instance:\n comissao = kwargs['initial']\n comissao_pk = int(comissao['parent_pk'])\n composicao = Composicao.objects.get(id=comissao_pk)\n participantes = composicao.participacao_set.all()\n id_part = [p.parlamentar.id for p in participantes]\n else:\n id_part = []\n\n qs = self.create_participacao()\n\n parlamentares = Mandato.objects.filter(qs,\n parlamentar__ativo=True\n ).prefetch_related('parlamentar').\\\n values_list('parlamentar',\n flat=True\n ).distinct()\n\n qs = Parlamentar.objects.filter(id__in=parlamentares).distinct().\\\n exclude(id__in=id_part)\n eligible = self.verifica()\n result = list(set(qs) & set(eligible))\n if not cmp(result, eligible): # se igual a 0 significa que o qs e o eli s\u00e3o iguais!\n self.fields['parlamentar'].queryset = qs\n else:\n ids = [e.id for e in eligible]\n qs = Parlamentar.objects.filter(id__in=ids)\n self.fields['parlamentar'].queryset = qs\n\n\n def clean(self):\n cleaned_data = super(ParticipacaoCreateForm, self).clean()\n\n if not self.is_valid():\n return cleaned_data\n\n data_designacao = cleaned_data['data_designacao']\n data_desligamento = cleaned_data['data_desligamento']\n\n if data_desligamento and \\\n data_designacao > data_desligamento:\n raise ValidationError(_('Data de designa\u00e7\u00e3o n\u00e3o pode ser superior '\n '\u00e0 data de desligamento'))\n\n composicao = Composicao.objects.get(id=self.initial['parent_pk'])\n cargos_unicos = [c.cargo.nome for c in composicao.participacao_set.filter(cargo__unico=True)]\n\n if cleaned_data['cargo'].nome in cargos_unicos:\n msg = _('Este cargo \u00e9 \u00fanico para esta Comiss\u00e3o.')\n raise ValidationError(msg)\n return cleaned_data\n\n\n def create_participacao(self):\n composicao = Composicao.objects.get(id=self.initial['parent_pk'])\n data_inicio_comissao = composicao.periodo.data_inicio\n data_fim_comissao = composicao.periodo.data_fim\n q1 = Q(data_fim_mandato__isnull=False,\n data_fim_mandato__gte=data_inicio_comissao)\n q2 = Q(data_inicio_mandato__gte=data_inicio_comissao) \\\n & Q(data_inicio_mandato__lte=data_fim_comissao)\n q3 = Q(data_fim_mandato__isnull=True,\n data_inicio_mandato__lte=data_inicio_comissao)\n qs = q1 | q2 | q3\n return qs\n\n def verifica(self):\n composicao = Composicao.objects.get(id=self.initial['parent_pk'])\n participantes = composicao.participacao_set.all()\n participantes_id = [p.parlamentar.id for p in participantes]\n parlamentares = Parlamentar.objects.all().exclude(\n id__in=participantes_id).order_by('nome_completo')\n parlamentares = [p for p in parlamentares if p.ativo]\n\n lista = []\n\n for p in parlamentares:\n mandatos = p.mandato_set.all()\n for m in mandatos:\n data_inicio = m.data_inicio_mandato\n data_fim = m.data_fim_mandato\n comp_data_inicio = composicao.periodo.data_inicio\n comp_data_fim = composicao.periodo.data_fim\n if (data_fim and data_fim >= comp_data_inicio)\\\n or (data_inicio >= comp_data_inicio and data_inicio <= comp_data_fim)\\\n or (data_fim is None and data_inicio <= comp_data_inicio):\n lista.append(p)\n\n lista = list(set(lista))\n\n return lista\n\n\nclass ParticipacaoEditForm(forms.ModelForm):\n\n parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())\n nome_parlamentar = forms.CharField(required=False, label='Parlamentar')\n\n class Meta:\n model = Participacao\n fields = ['nome_parlamentar', 'parlamentar', 'cargo', 'titular',\n 'data_designacao', 'data_desligamento',\n 'motivo_desligamento', 'observacao']\n widgets = {\n 'parlamentar': forms.HiddenInput(),\n }\n\n def __init__(self, user=None, **kwargs):\n super(ParticipacaoEditForm, self).__init__(**kwargs)\n self.initial['nome_parlamentar'] = Parlamentar.objects.get(\n id=self.initial['parlamentar']).nome_parlamentar\n self.fields['nome_parlamentar'].widget.attrs['disabled'] = 'disabled'\n\n def clean(self):\n cleaned_data = super(ParticipacaoEditForm, self).clean()\n\n if not self.is_valid():\n return cleaned_data\n\n data_designacao = cleaned_data['data_designacao']\n data_desligamento = cleaned_data['data_desligamento']\n\n if data_desligamento and \\\n data_designacao > data_desligamento:\n raise ValidationError(_('Data de designa\u00e7\u00e3o n\u00e3o pode ser superior '\n '\u00e0 data de desligamento'))\n\n composicao_id = self.instance.composicao_id\n\n composicao = Composicao.objects.get(id=composicao_id)\n cargos_unicos = [c.cargo.nome for c in composicao.participacao_set.filter(cargo__unico=True)]\n\n if cleaned_data['cargo'].nome in cargos_unicos:\n msg = _('Este cargo \u00e9 \u00fanico para esta Comiss\u00e3o.')\n raise ValidationError(msg)\n\n return cleaned_data\n\n\nclass ComissaoForm(forms.ModelForm):\n\n class Meta:\n model = Comissao\n fields = '__all__'\n\n def __init__(self, user=None, **kwargs):\n super(ComissaoForm, self).__init__(**kwargs)\n inst = self.instance\n if inst.pk:\n if inst.tipo.natureza == 'P':\n self.fields['apelido_temp'].widget.attrs['disabled'] = 'disabled'\n self.fields['data_instalacao_temp'].widget.attrs['disabled'] = 'disabled'\n self.fields['data_final_prevista_temp'].widget.attrs['disabled'] = 'disabled'\n self.fields['data_prorrogada_temp'].widget.attrs['disabled'] = 'disabled'\n self.fields['data_fim_comissao'].widget.attrs['disabled'] = 'disabled'\n\n\n\n def clean(self):\n super(ComissaoForm, self).clean()\n\n if not self.is_valid():\n return self.cleaned_data\n\n if self.cleaned_data['data_extincao']:\n if (self.cleaned_data['data_extincao'] <\n self.cleaned_data['data_criacao']):\n msg = _('Data de extin\u00e7\u00e3o n\u00e3o pode ser menor que a de cria\u00e7\u00e3o')\n raise ValidationError(msg)\n return self.cleaned_data\n\n @transaction.atomic\n def save(self, commit=True):\n inst = self.instance\n if not inst.pk:\n comissao = super(ComissaoForm, self).save(commit)\n content_type = ContentType.objects.get_for_model(Comissao)\n object_id = comissao.pk\n tipo = TipoAutor.objects.get(descricao__icontains='Comiss')\n nome = comissao.sigla + ' - ' + comissao.nome\n Autor.objects.create(\n content_type=content_type,\n object_id=object_id,\n tipo=tipo,\n nome=nome\n )\n return comissao\n else:\n comissao = super(ComissaoForm, self).save(commit)\n return comissao\n\n\nclass ReuniaoForm(ModelForm):\n\n comissao = forms.ModelChoiceField(queryset=Comissao.objects.all(),\n widget=forms.HiddenInput())\n\n class Meta:\n model = Reuniao\n exclude = ['cod_andamento_reuniao']\n\n def clean(self):\n super(ReuniaoForm, self).clean()\n\n if not self.is_valid():\n return self.cleaned_data\n\n if self.cleaned_data['hora_fim']:\n if (self.cleaned_data['hora_fim'] <\n self.cleaned_data['hora_inicio']):\n msg = _('A hora de t\u00e9rmino da reuni\u00e3o n\u00e3o pode ser menor que a de in\u00edcio')\n raise ValidationError(msg)\n return self.cleaned_data\n\nclass DocumentoAcessorioCreateForm(forms.ModelForm):\n\n parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())\n\n class Meta:\n model = DocumentoAcessorio\n exclude = ['reuniao']\n\n def __init__(self, user=None, **kwargs):\n super(DocumentoAcessorioCreateForm, self).__init__(**kwargs)\n\n if self.instance:\n reuniao = Reuniao.objects.get(id=self.initial['parent_pk'])\n comissao = reuniao.comissao\n comissao_pk = comissao.id\n documentos = reuniao.documentoacessorio_set.all()\n return self.create_documentoacessorio()\n\n\n def create_documentoacessorio(self):\n reuniao = Reuniao.objects.get(id=self.initial['parent_pk'])\n\n\nclass DocumentoAcessorioEditForm(forms.ModelForm):\n\n parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())\n\n class Meta:\n model = DocumentoAcessorio\n fields = ['nome', 'data', 'autor', 'ementa',\n 'indexacao', 'arquivo']\n\n def __init__(self, user=None, **kwargs):\n super(DocumentoAcessorioEditForm, self).__init__(**kwargs)\n", "path": "sapl/comissoes/forms.py"}]}
| 3,268 | 628 |
gh_patches_debug_38044
|
rasdani/github-patches
|
git_diff
|
python-telegram-bot__python-telegram-bot-339
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
since 4.3 can´t connect behind proxy
<!--
Thanks for reporting issues of python-telegram-bot!
To make it easier for us to help you please enter detailed information below.
-->
### Steps to reproduce
1. pip3 install python-telegram-bot --upgrade
2. Don´t send anything
3. pip3 install python-telegram-bot-4.2.1
4. Works again!
### Expected behaviour
Tell us what should happen
### Actual behaviour
Tell us what happens instead
### Configuration
**Operating System:**
**Version of Python:**
`$ python -V` 3.5
**Version of python-telegram-bot:**
4.3.2, 4.2.1
`$ python -c 'import telegram; print(telegram.__version__)'`
### Logs
Insert logs here (if necessary)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `telegram/utils/request.py`
Content:
```
1 #!/usr/bin/env python
2 #
3 # A library that provides a Python interface to the Telegram Bot API
4 # Copyright (C) 2015-2016
5 # Leandro Toledo de Souza <[email protected]>
6 #
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU Lesser Public License as published by
9 # the Free Software Foundation, either version 3 of the License, or
10 # (at your option) any later version.
11 #
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU Lesser Public License for more details.
16 #
17 # You should have received a copy of the GNU Lesser Public License
18 # along with this program. If not, see [http://www.gnu.org/licenses/].
19 """This module contains methods to make POST and GET requests"""
20
21 import json
22 import socket
23 import logging
24
25 import certifi
26 import urllib3
27 from urllib3.connection import HTTPConnection
28
29 from telegram import (InputFile, TelegramError)
30 from telegram.error import Unauthorized, NetworkError, TimedOut, BadRequest
31
32 _CON_POOL = None
33 """:type: urllib3.PoolManager"""
34 CON_POOL_SIZE = 1
35
36 logging.getLogger('urllib3').setLevel(logging.WARNING)
37
38
39 def _get_con_pool():
40 global _CON_POOL
41
42 if _CON_POOL is not None:
43 return _CON_POOL
44
45 _CON_POOL = urllib3.PoolManager(maxsize=CON_POOL_SIZE,
46 cert_reqs='CERT_REQUIRED',
47 ca_certs=certifi.where(),
48 socket_options=HTTPConnection.default_socket_options + [
49 (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
50 ])
51 return _CON_POOL
52
53
54 def is_con_pool_initialized():
55 return _CON_POOL is not None
56
57
58 def stop_con_pool():
59 global _CON_POOL
60 if _CON_POOL is not None:
61 _CON_POOL.clear()
62 _CON_POOL = None
63
64
65 def _parse(json_data):
66 """Try and parse the JSON returned from Telegram.
67
68 Returns:
69 dict: A JSON parsed as Python dict with results - on error this dict will be empty.
70
71 """
72 decoded_s = json_data.decode('utf-8')
73 try:
74 data = json.loads(decoded_s)
75 except ValueError:
76 raise TelegramError('Invalid server response')
77
78 if not data.get('ok') and data.get('description'):
79 return data['description']
80
81 return data['result']
82
83
84 def _request_wrapper(*args, **kwargs):
85 """Wraps urllib3 request for handling known exceptions.
86
87 Args:
88 args: unnamed arguments, passed to urllib3 request.
89 kwargs: keyword arguments, passed tp urllib3 request.
90
91 Returns:
92 str: A non-parsed JSON text.
93
94 Raises:
95 TelegramError
96
97 """
98
99 try:
100 resp = _get_con_pool().request(*args, **kwargs)
101 except urllib3.exceptions.TimeoutError as error:
102 raise TimedOut()
103 except urllib3.exceptions.HTTPError as error:
104 # HTTPError must come last as its the base urllib3 exception class
105 # TODO: do something smart here; for now just raise NetworkError
106 raise NetworkError('urllib3 HTTPError {0}'.format(error))
107
108 if 200 <= resp.status <= 299:
109 # 200-299 range are HTTP success statuses
110 return resp.data
111
112 try:
113 message = _parse(resp.data)
114 except ValueError:
115 raise NetworkError('Unknown HTTPError {0}'.format(resp.status))
116
117 if resp.status in (401, 403):
118 raise Unauthorized()
119 elif resp.status == 400:
120 raise BadRequest(repr(message))
121 elif resp.status == 502:
122 raise NetworkError('Bad Gateway')
123 else:
124 raise NetworkError('{0} ({1})'.format(message, resp.status))
125
126
127 def get(url):
128 """Request an URL.
129 Args:
130 url:
131 The web location we want to retrieve.
132
133 Returns:
134 A JSON object.
135
136 """
137 result = _request_wrapper('GET', url)
138
139 return _parse(result)
140
141
142 def post(url, data, timeout=None):
143 """Request an URL.
144 Args:
145 url:
146 The web location we want to retrieve.
147 data:
148 A dict of (str, unicode) key/value pairs.
149 timeout:
150 float. If this value is specified, use it as the definitive timeout (in
151 seconds) for urlopen() operations. [Optional]
152
153 Notes:
154 If neither `timeout` nor `data['timeout']` is specified. The underlying
155 defaults are used.
156
157 Returns:
158 A JSON object.
159
160 """
161 urlopen_kwargs = {}
162
163 if timeout is not None:
164 urlopen_kwargs['timeout'] = timeout
165
166 if InputFile.is_inputfile(data):
167 data = InputFile(data)
168 result = _request_wrapper('POST', url, body=data.to_form(), headers=data.headers)
169 else:
170 data = json.dumps(data)
171 result = _request_wrapper('POST',
172 url,
173 body=data.encode(),
174 headers={'Content-Type': 'application/json'},
175 **urlopen_kwargs)
176
177 return _parse(result)
178
179
180 def download(url, filename):
181 """Download a file by its URL.
182 Args:
183 url:
184 The web location we want to retrieve.
185
186 filename:
187 The filename within the path to download the file.
188
189 """
190 buf = _request_wrapper('GET', url)
191 with open(filename, 'wb') as fobj:
192 fobj.write(buf)
193
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/telegram/utils/request.py b/telegram/utils/request.py
--- a/telegram/utils/request.py
+++ b/telegram/utils/request.py
@@ -19,6 +19,7 @@
"""This module contains methods to make POST and GET requests"""
import json
+import os
import socket
import logging
@@ -31,26 +32,41 @@
_CON_POOL = None
""":type: urllib3.PoolManager"""
+_CON_POOL_PROXY = None
+_CON_POOL_PROXY_KWARGS = {}
CON_POOL_SIZE = 1
logging.getLogger('urllib3').setLevel(logging.WARNING)
def _get_con_pool():
- global _CON_POOL
-
if _CON_POOL is not None:
return _CON_POOL
- _CON_POOL = urllib3.PoolManager(maxsize=CON_POOL_SIZE,
- cert_reqs='CERT_REQUIRED',
- ca_certs=certifi.where(),
- socket_options=HTTPConnection.default_socket_options + [
- (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
- ])
+ _init_con_pool()
return _CON_POOL
+def _init_con_pool():
+ global _CON_POOL
+ kwargs = dict(maxsize=CON_POOL_SIZE,
+ cert_reqs='CERT_REQUIRED',
+ ca_certs=certifi.where(),
+ socket_options=HTTPConnection.default_socket_options + [
+ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
+ ])
+ proxy_url = _get_con_pool_proxy()
+ if not proxy_url:
+ mgr = urllib3.PoolManager
+ else:
+ kwargs['proxy_url'] = proxy_url
+ if _CON_POOL_PROXY_KWARGS:
+ kwargs.update(_CON_POOL_PROXY_KWARGS)
+ mgr = urllib3.ProxyManager
+
+ _CON_POOL = mgr(**kwargs)
+
+
def is_con_pool_initialized():
return _CON_POOL is not None
@@ -62,6 +78,47 @@
_CON_POOL = None
+def set_con_pool_proxy(url, **urllib3_kwargs):
+ """Setup connection pool behind a proxy
+
+ Args:
+ url (str): The URL to the proxy server. For example: `http://127.0.0.1:3128`
+ urllib3_kwargs (dict): Arbitrary arguments passed as-is to `urllib3.ProxyManager`
+
+ """
+ global _CON_POOL_PROXY
+ global _CON_POOL_PROXY_KWARGS
+
+ if is_con_pool_initialized():
+ raise TelegramError('conpool already initialized')
+
+ _CON_POOL_PROXY = url
+ _CON_POOL_PROXY_KWARGS = urllib3_kwargs
+
+
+def _get_con_pool_proxy():
+ """Return the user configured proxy according to the following order:
+
+ * proxy configured using `set_con_pool_proxy()`.
+ * proxy set in `HTTPS_PROXY` env. var.
+ * proxy set in `https_proxy` env. var.
+ * None (if no proxy is configured)
+
+ Returns:
+ str | None
+
+ """
+ if _CON_POOL_PROXY:
+ return _CON_POOL_PROXY
+ from_env = os.environ.get('HTTPS_PROXY')
+ if from_env:
+ return from_env
+ from_env = os.environ.get('https_proxy')
+ if from_env:
+ return from_env
+ return None
+
+
def _parse(json_data):
"""Try and parse the JSON returned from Telegram.
|
{"golden_diff": "diff --git a/telegram/utils/request.py b/telegram/utils/request.py\n--- a/telegram/utils/request.py\n+++ b/telegram/utils/request.py\n@@ -19,6 +19,7 @@\n \"\"\"This module contains methods to make POST and GET requests\"\"\"\n \n import json\n+import os\n import socket\n import logging\n \n@@ -31,26 +32,41 @@\n \n _CON_POOL = None\n \"\"\":type: urllib3.PoolManager\"\"\"\n+_CON_POOL_PROXY = None\n+_CON_POOL_PROXY_KWARGS = {}\n CON_POOL_SIZE = 1\n \n logging.getLogger('urllib3').setLevel(logging.WARNING)\n \n \n def _get_con_pool():\n- global _CON_POOL\n-\n if _CON_POOL is not None:\n return _CON_POOL\n \n- _CON_POOL = urllib3.PoolManager(maxsize=CON_POOL_SIZE,\n- cert_reqs='CERT_REQUIRED',\n- ca_certs=certifi.where(),\n- socket_options=HTTPConnection.default_socket_options + [\n- (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),\n- ])\n+ _init_con_pool()\n return _CON_POOL\n \n \n+def _init_con_pool():\n+ global _CON_POOL\n+ kwargs = dict(maxsize=CON_POOL_SIZE,\n+ cert_reqs='CERT_REQUIRED',\n+ ca_certs=certifi.where(),\n+ socket_options=HTTPConnection.default_socket_options + [\n+ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),\n+ ])\n+ proxy_url = _get_con_pool_proxy()\n+ if not proxy_url:\n+ mgr = urllib3.PoolManager\n+ else:\n+ kwargs['proxy_url'] = proxy_url\n+ if _CON_POOL_PROXY_KWARGS:\n+ kwargs.update(_CON_POOL_PROXY_KWARGS)\n+ mgr = urllib3.ProxyManager\n+\n+ _CON_POOL = mgr(**kwargs)\n+\n+\n def is_con_pool_initialized():\n return _CON_POOL is not None\n \n@@ -62,6 +78,47 @@\n _CON_POOL = None\n \n \n+def set_con_pool_proxy(url, **urllib3_kwargs):\n+ \"\"\"Setup connection pool behind a proxy\n+\n+ Args:\n+ url (str): The URL to the proxy server. For example: `http://127.0.0.1:3128`\n+ urllib3_kwargs (dict): Arbitrary arguments passed as-is to `urllib3.ProxyManager`\n+\n+ \"\"\"\n+ global _CON_POOL_PROXY\n+ global _CON_POOL_PROXY_KWARGS\n+\n+ if is_con_pool_initialized():\n+ raise TelegramError('conpool already initialized')\n+\n+ _CON_POOL_PROXY = url\n+ _CON_POOL_PROXY_KWARGS = urllib3_kwargs\n+\n+\n+def _get_con_pool_proxy():\n+ \"\"\"Return the user configured proxy according to the following order:\n+\n+ * proxy configured using `set_con_pool_proxy()`.\n+ * proxy set in `HTTPS_PROXY` env. var.\n+ * proxy set in `https_proxy` env. var.\n+ * None (if no proxy is configured)\n+\n+ Returns:\n+ str | None\n+\n+ \"\"\"\n+ if _CON_POOL_PROXY:\n+ return _CON_POOL_PROXY\n+ from_env = os.environ.get('HTTPS_PROXY')\n+ if from_env:\n+ return from_env\n+ from_env = os.environ.get('https_proxy')\n+ if from_env:\n+ return from_env\n+ return None\n+\n+\n def _parse(json_data):\n \"\"\"Try and parse the JSON returned from Telegram.\n", "issue": "since 4.3 can\u00b4t connect behind proxy\n<!--\nThanks for reporting issues of python-telegram-bot!\nTo make it easier for us to help you please enter detailed information below.\n-->\n### Steps to reproduce\n1. pip3 install python-telegram-bot --upgrade\n2. Don\u00b4t send anything\n3. pip3 install python-telegram-bot-4.2.1\n4. Works again!\n### Expected behaviour\n\nTell us what should happen\n### Actual behaviour\n\nTell us what happens instead\n### Configuration\n\n**Operating System:**\n\n**Version of Python:**\n\n`$ python -V` 3.5\n\n**Version of python-telegram-bot:**\n4.3.2, 4.2.1\n`$ python -c 'import telegram; print(telegram.__version__)'`\n### Logs\n\nInsert logs here (if necessary)\n\n", "before_files": [{"content": "#!/usr/bin/env python\n#\n# A library that provides a Python interface to the Telegram Bot API\n# Copyright (C) 2015-2016\n# Leandro Toledo de Souza <[email protected]>\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser Public License for more details.\n#\n# You should have received a copy of the GNU Lesser Public License\n# along with this program. If not, see [http://www.gnu.org/licenses/].\n\"\"\"This module contains methods to make POST and GET requests\"\"\"\n\nimport json\nimport socket\nimport logging\n\nimport certifi\nimport urllib3\nfrom urllib3.connection import HTTPConnection\n\nfrom telegram import (InputFile, TelegramError)\nfrom telegram.error import Unauthorized, NetworkError, TimedOut, BadRequest\n\n_CON_POOL = None\n\"\"\":type: urllib3.PoolManager\"\"\"\nCON_POOL_SIZE = 1\n\nlogging.getLogger('urllib3').setLevel(logging.WARNING)\n\n\ndef _get_con_pool():\n global _CON_POOL\n\n if _CON_POOL is not None:\n return _CON_POOL\n\n _CON_POOL = urllib3.PoolManager(maxsize=CON_POOL_SIZE,\n cert_reqs='CERT_REQUIRED',\n ca_certs=certifi.where(),\n socket_options=HTTPConnection.default_socket_options + [\n (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),\n ])\n return _CON_POOL\n\n\ndef is_con_pool_initialized():\n return _CON_POOL is not None\n\n\ndef stop_con_pool():\n global _CON_POOL\n if _CON_POOL is not None:\n _CON_POOL.clear()\n _CON_POOL = None\n\n\ndef _parse(json_data):\n \"\"\"Try and parse the JSON returned from Telegram.\n\n Returns:\n dict: A JSON parsed as Python dict with results - on error this dict will be empty.\n\n \"\"\"\n decoded_s = json_data.decode('utf-8')\n try:\n data = json.loads(decoded_s)\n except ValueError:\n raise TelegramError('Invalid server response')\n\n if not data.get('ok') and data.get('description'):\n return data['description']\n\n return data['result']\n\n\ndef _request_wrapper(*args, **kwargs):\n \"\"\"Wraps urllib3 request for handling known exceptions.\n\n Args:\n args: unnamed arguments, passed to urllib3 request.\n kwargs: keyword arguments, passed tp urllib3 request.\n\n Returns:\n str: A non-parsed JSON text.\n\n Raises:\n TelegramError\n\n \"\"\"\n\n try:\n resp = _get_con_pool().request(*args, **kwargs)\n except urllib3.exceptions.TimeoutError as error:\n raise TimedOut()\n except urllib3.exceptions.HTTPError as error:\n # HTTPError must come last as its the base urllib3 exception class\n # TODO: do something smart here; for now just raise NetworkError\n raise NetworkError('urllib3 HTTPError {0}'.format(error))\n\n if 200 <= resp.status <= 299:\n # 200-299 range are HTTP success statuses\n return resp.data\n\n try:\n message = _parse(resp.data)\n except ValueError:\n raise NetworkError('Unknown HTTPError {0}'.format(resp.status))\n\n if resp.status in (401, 403):\n raise Unauthorized()\n elif resp.status == 400:\n raise BadRequest(repr(message))\n elif resp.status == 502:\n raise NetworkError('Bad Gateway')\n else:\n raise NetworkError('{0} ({1})'.format(message, resp.status))\n\n\ndef get(url):\n \"\"\"Request an URL.\n Args:\n url:\n The web location we want to retrieve.\n\n Returns:\n A JSON object.\n\n \"\"\"\n result = _request_wrapper('GET', url)\n\n return _parse(result)\n\n\ndef post(url, data, timeout=None):\n \"\"\"Request an URL.\n Args:\n url:\n The web location we want to retrieve.\n data:\n A dict of (str, unicode) key/value pairs.\n timeout:\n float. If this value is specified, use it as the definitive timeout (in\n seconds) for urlopen() operations. [Optional]\n\n Notes:\n If neither `timeout` nor `data['timeout']` is specified. The underlying\n defaults are used.\n\n Returns:\n A JSON object.\n\n \"\"\"\n urlopen_kwargs = {}\n\n if timeout is not None:\n urlopen_kwargs['timeout'] = timeout\n\n if InputFile.is_inputfile(data):\n data = InputFile(data)\n result = _request_wrapper('POST', url, body=data.to_form(), headers=data.headers)\n else:\n data = json.dumps(data)\n result = _request_wrapper('POST',\n url,\n body=data.encode(),\n headers={'Content-Type': 'application/json'},\n **urlopen_kwargs)\n\n return _parse(result)\n\n\ndef download(url, filename):\n \"\"\"Download a file by its URL.\n Args:\n url:\n The web location we want to retrieve.\n\n filename:\n The filename within the path to download the file.\n\n \"\"\"\n buf = _request_wrapper('GET', url)\n with open(filename, 'wb') as fobj:\n fobj.write(buf)\n", "path": "telegram/utils/request.py"}], "after_files": [{"content": "#!/usr/bin/env python\n#\n# A library that provides a Python interface to the Telegram Bot API\n# Copyright (C) 2015-2016\n# Leandro Toledo de Souza <[email protected]>\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser Public License for more details.\n#\n# You should have received a copy of the GNU Lesser Public License\n# along with this program. If not, see [http://www.gnu.org/licenses/].\n\"\"\"This module contains methods to make POST and GET requests\"\"\"\n\nimport json\nimport os\nimport socket\nimport logging\n\nimport certifi\nimport urllib3\nfrom urllib3.connection import HTTPConnection\n\nfrom telegram import (InputFile, TelegramError)\nfrom telegram.error import Unauthorized, NetworkError, TimedOut, BadRequest\n\n_CON_POOL = None\n\"\"\":type: urllib3.PoolManager\"\"\"\n_CON_POOL_PROXY = None\n_CON_POOL_PROXY_KWARGS = {}\nCON_POOL_SIZE = 1\n\nlogging.getLogger('urllib3').setLevel(logging.WARNING)\n\n\ndef _get_con_pool():\n if _CON_POOL is not None:\n return _CON_POOL\n\n _init_con_pool()\n return _CON_POOL\n\n\ndef _init_con_pool():\n global _CON_POOL\n kwargs = dict(maxsize=CON_POOL_SIZE,\n cert_reqs='CERT_REQUIRED',\n ca_certs=certifi.where(),\n socket_options=HTTPConnection.default_socket_options + [\n (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),\n ])\n proxy_url = _get_con_pool_proxy()\n if not proxy_url:\n mgr = urllib3.PoolManager\n else:\n kwargs['proxy_url'] = proxy_url\n if _CON_POOL_PROXY_KWARGS:\n kwargs.update(_CON_POOL_PROXY_KWARGS)\n mgr = urllib3.ProxyManager\n\n _CON_POOL = mgr(**kwargs)\n\n\ndef is_con_pool_initialized():\n return _CON_POOL is not None\n\n\ndef stop_con_pool():\n global _CON_POOL\n if _CON_POOL is not None:\n _CON_POOL.clear()\n _CON_POOL = None\n\n\ndef set_con_pool_proxy(url, **urllib3_kwargs):\n \"\"\"Setup connection pool behind a proxy\n\n Args:\n url (str): The URL to the proxy server. For example: `http://127.0.0.1:3128`\n urllib3_kwargs (dict): Arbitrary arguments passed as-is to `urllib3.ProxyManager`\n\n \"\"\"\n global _CON_POOL_PROXY\n global _CON_POOL_PROXY_KWARGS\n\n if is_con_pool_initialized():\n raise TelegramError('conpool already initialized')\n\n _CON_POOL_PROXY = url\n _CON_POOL_PROXY_KWARGS = urllib3_kwargs\n\n\ndef _get_con_pool_proxy():\n \"\"\"Return the user configured proxy according to the following order:\n\n * proxy configured using `set_con_pool_proxy()`.\n * proxy set in `HTTPS_PROXY` env. var.\n * proxy set in `https_proxy` env. var.\n * None (if no proxy is configured)\n\n Returns:\n str | None\n\n \"\"\"\n if _CON_POOL_PROXY:\n return _CON_POOL_PROXY\n from_env = os.environ.get('HTTPS_PROXY')\n if from_env:\n return from_env\n from_env = os.environ.get('https_proxy')\n if from_env:\n return from_env\n return None\n\n\ndef _parse(json_data):\n \"\"\"Try and parse the JSON returned from Telegram.\n\n Returns:\n dict: A JSON parsed as Python dict with results - on error this dict will be empty.\n\n \"\"\"\n decoded_s = json_data.decode('utf-8')\n try:\n data = json.loads(decoded_s)\n except ValueError:\n raise TelegramError('Invalid server response')\n\n if not data.get('ok') and data.get('description'):\n return data['description']\n\n return data['result']\n\n\ndef _request_wrapper(*args, **kwargs):\n \"\"\"Wraps urllib3 request for handling known exceptions.\n\n Args:\n args: unnamed arguments, passed to urllib3 request.\n kwargs: keyword arguments, passed tp urllib3 request.\n\n Returns:\n str: A non-parsed JSON text.\n\n Raises:\n TelegramError\n\n \"\"\"\n\n try:\n resp = _get_con_pool().request(*args, **kwargs)\n except urllib3.exceptions.TimeoutError as error:\n raise TimedOut()\n except urllib3.exceptions.HTTPError as error:\n # HTTPError must come last as its the base urllib3 exception class\n # TODO: do something smart here; for now just raise NetworkError\n raise NetworkError('urllib3 HTTPError {0}'.format(error))\n\n if 200 <= resp.status <= 299:\n # 200-299 range are HTTP success statuses\n return resp.data\n\n try:\n message = _parse(resp.data)\n except ValueError:\n raise NetworkError('Unknown HTTPError {0}'.format(resp.status))\n\n if resp.status in (401, 403):\n raise Unauthorized()\n elif resp.status == 400:\n raise BadRequest(repr(message))\n elif resp.status == 502:\n raise NetworkError('Bad Gateway')\n else:\n raise NetworkError('{0} ({1})'.format(message, resp.status))\n\n\ndef get(url):\n \"\"\"Request an URL.\n Args:\n url:\n The web location we want to retrieve.\n\n Returns:\n A JSON object.\n\n \"\"\"\n result = _request_wrapper('GET', url)\n\n return _parse(result)\n\n\ndef post(url, data, timeout=None):\n \"\"\"Request an URL.\n Args:\n url:\n The web location we want to retrieve.\n data:\n A dict of (str, unicode) key/value pairs.\n timeout:\n float. If this value is specified, use it as the definitive timeout (in\n seconds) for urlopen() operations. [Optional]\n\n Notes:\n If neither `timeout` nor `data['timeout']` is specified. The underlying\n defaults are used.\n\n Returns:\n A JSON object.\n\n \"\"\"\n urlopen_kwargs = {}\n\n if timeout is not None:\n urlopen_kwargs['timeout'] = timeout\n\n if InputFile.is_inputfile(data):\n data = InputFile(data)\n result = _request_wrapper('POST', url, body=data.to_form(), headers=data.headers)\n else:\n data = json.dumps(data)\n result = _request_wrapper('POST',\n url,\n body=data.encode(),\n headers={'Content-Type': 'application/json'},\n **urlopen_kwargs)\n\n return _parse(result)\n\n\ndef download(url, filename):\n \"\"\"Download a file by its URL.\n Args:\n url:\n The web location we want to retrieve.\n\n filename:\n The filename within the path to download the file.\n\n \"\"\"\n buf = _request_wrapper('GET', url)\n with open(filename, 'wb') as fobj:\n fobj.write(buf)\n", "path": "telegram/utils/request.py"}]}
| 2,147 | 778 |
gh_patches_debug_44634
|
rasdani/github-patches
|
git_diff
|
qutebrowser__qutebrowser-2905
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
URL percent-encoding in the SQL history
There are couple of problems:
- The [add_url](https://github.com/qutebrowser/qutebrowser/blob/49b858e3599a361cf5c994358c6b189dddfb522c/qutebrowser/browser/history.py#L162) method does percent-encoding, but the [parse_entry](https://github.com/qutebrowser/qutebrowser/blob/49b858e3599a361cf5c994358c6b189dddfb522c/qutebrowser/browser/history.py#L192) method does not. So the URLs which were imported from the text file are not percent-encoded, but the newly added URLs are, which leads to "duplicates" if I open an old URL.
- The old history completion called [entry.url.toDisplayString()](https://github.com/qutebrowser/qutebrowser/pull/2295/files#diff-d0e110fc69342b1ed59bb5c1a96b0234L101) when the URL was added to the completion. Now the completion shows URLs as they are in the database, which is inconsistent (see the previous point) and a visual regression (I want pretty URLs in the completion).
cc @rcorre
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `qutebrowser/browser/history.py`
Content:
```
1 # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
2
3 # Copyright 2015-2017 Florian Bruhin (The Compiler) <[email protected]>
4 #
5 # This file is part of qutebrowser.
6 #
7 # qutebrowser is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation, either version 3 of the License, or
10 # (at your option) any later version.
11 #
12 # qutebrowser is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
16 #
17 # You should have received a copy of the GNU General Public License
18 # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
19
20 """Simple history which gets written to disk."""
21
22 import os
23 import time
24
25 from PyQt5.QtCore import pyqtSlot, QUrl, QTimer
26
27 from qutebrowser.commands import cmdutils, cmdexc
28 from qutebrowser.utils import (utils, objreg, log, usertypes, message,
29 debug, standarddir)
30 from qutebrowser.misc import objects, sql
31
32
33 class CompletionHistory(sql.SqlTable):
34
35 """History which only has the newest entry for each URL."""
36
37 def __init__(self, parent=None):
38 super().__init__("CompletionHistory", ['url', 'title', 'last_atime'],
39 constraints={'url': 'PRIMARY KEY'}, parent=parent)
40 self.create_index('CompletionHistoryAtimeIndex', 'last_atime')
41
42
43 class WebHistory(sql.SqlTable):
44
45 """The global history of visited pages."""
46
47 def __init__(self, parent=None):
48 super().__init__("History", ['url', 'title', 'atime', 'redirect'],
49 parent=parent)
50 self.completion = CompletionHistory(parent=self)
51 self.create_index('HistoryIndex', 'url')
52 self.create_index('HistoryAtimeIndex', 'atime')
53 self._contains_query = self.contains_query('url')
54 self._between_query = sql.Query('SELECT * FROM History '
55 'where not redirect '
56 'and not url like "qute://%" '
57 'and atime > :earliest '
58 'and atime <= :latest '
59 'ORDER BY atime desc')
60
61 self._before_query = sql.Query('SELECT * FROM History '
62 'where not redirect '
63 'and not url like "qute://%" '
64 'and atime <= :latest '
65 'ORDER BY atime desc '
66 'limit :limit offset :offset')
67
68 def __repr__(self):
69 return utils.get_repr(self, length=len(self))
70
71 def __contains__(self, url):
72 return self._contains_query.run(val=url).value()
73
74 def get_recent(self):
75 """Get the most recent history entries."""
76 return self.select(sort_by='atime', sort_order='desc', limit=100)
77
78 def entries_between(self, earliest, latest):
79 """Iterate non-redirect, non-qute entries between two timestamps.
80
81 Args:
82 earliest: Omit timestamps earlier than this.
83 latest: Omit timestamps later than this.
84 """
85 self._between_query.run(earliest=earliest, latest=latest)
86 return iter(self._between_query)
87
88 def entries_before(self, latest, limit, offset):
89 """Iterate non-redirect, non-qute entries occurring before a timestamp.
90
91 Args:
92 latest: Omit timestamps more recent than this.
93 limit: Max number of entries to include.
94 offset: Number of entries to skip.
95 """
96 self._before_query.run(latest=latest, limit=limit, offset=offset)
97 return iter(self._before_query)
98
99 @cmdutils.register(name='history-clear', instance='web-history')
100 def clear(self, force=False):
101 """Clear all browsing history.
102
103 Note this only clears the global history
104 (e.g. `~/.local/share/qutebrowser/history` on Linux) but not cookies,
105 the back/forward history of a tab, cache or other persistent data.
106
107 Args:
108 force: Don't ask for confirmation.
109 """
110 if force:
111 self._do_clear()
112 else:
113 message.confirm_async(self._do_clear, title="Clear all browsing "
114 "history?")
115
116 def _do_clear(self):
117 self.delete_all()
118 self.completion.delete_all()
119
120 def delete_url(self, url):
121 """Remove all history entries with the given url.
122
123 Args:
124 url: URL string to delete.
125 """
126 self.delete('url', url)
127 self.completion.delete('url', url)
128
129 @pyqtSlot(QUrl, QUrl, str)
130 def add_from_tab(self, url, requested_url, title):
131 """Add a new history entry as slot, called from a BrowserTab."""
132 if url.scheme() == 'data' or requested_url.scheme() == 'data':
133 return
134 if url.isEmpty():
135 # things set via setHtml
136 return
137
138 no_formatting = QUrl.UrlFormattingOption(0)
139 if (requested_url.isValid() and
140 not requested_url.matches(url, no_formatting)):
141 # If the url of the page is different than the url of the link
142 # originally clicked, save them both.
143 self.add_url(requested_url, title, redirect=True)
144 self.add_url(url, title)
145
146 def add_url(self, url, title="", *, redirect=False, atime=None):
147 """Called via add_from_tab when a URL should be added to the history.
148
149 Args:
150 url: A url (as QUrl) to add to the history.
151 redirect: Whether the entry was redirected to another URL
152 (hidden in completion)
153 atime: Override the atime used to add the entry
154 """
155 if not url.isValid(): # pragma: no cover
156 # the no cover pragma is a WORKAROUND for this not being covered in
157 # old Qt versions.
158 log.misc.warning("Ignoring invalid URL being added to history")
159 return
160
161 atime = int(atime) if (atime is not None) else int(time.time())
162 url_str = url.toString(QUrl.FullyEncoded | QUrl.RemovePassword)
163 self.insert({'url': url_str,
164 'title': title,
165 'atime': atime,
166 'redirect': redirect})
167 if not redirect:
168 self.completion.insert({'url': url_str,
169 'title': title,
170 'last_atime': atime},
171 replace=True)
172
173 def _parse_entry(self, line):
174 """Parse a history line like '12345 http://example.com title'."""
175 if not line or line.startswith('#'):
176 return None
177 data = line.split(maxsplit=2)
178 if len(data) == 2:
179 atime, url = data
180 title = ""
181 elif len(data) == 3:
182 atime, url, title = data
183 else:
184 raise ValueError("2 or 3 fields expected")
185
186 # http://xn--pple-43d.com/ with
187 # https://bugreports.qt.io/browse/QTBUG-60364
188 if url in ['http://.com/', 'https://.com/',
189 'http://www..com/', 'https://www..com/']:
190 return None
191
192 url = QUrl(url)
193 if not url.isValid():
194 raise ValueError("Invalid URL: {}".format(url.errorString()))
195
196 # https://github.com/qutebrowser/qutebrowser/issues/2646
197 if url.scheme() == 'data':
198 return None
199
200 # https://github.com/qutebrowser/qutebrowser/issues/670
201 atime = atime.lstrip('\0')
202
203 if '-' in atime:
204 atime, flags = atime.split('-')
205 else:
206 flags = ''
207
208 if not set(flags).issubset('r'):
209 raise ValueError("Invalid flags {!r}".format(flags))
210
211 redirect = 'r' in flags
212 return (url, title, int(atime), redirect)
213
214 def import_txt(self):
215 """Import a history text file into sqlite if it exists.
216
217 In older versions of qutebrowser, history was stored in a text format.
218 This converts that file into the new sqlite format and moves it to a
219 backup location.
220 """
221 path = os.path.join(standarddir.data(), 'history')
222 if not os.path.isfile(path):
223 return
224
225 def action():
226 with debug.log_time(log.init, 'Import old history file to sqlite'):
227 try:
228 self._read(path)
229 except ValueError as ex:
230 message.error('Failed to import history: {}'.format(ex))
231 else:
232 bakpath = path + '.bak'
233 message.info('History import complete. Moving {} to {}'
234 .format(path, bakpath))
235 os.rename(path, bakpath)
236
237 # delay to give message time to appear before locking down for import
238 message.info('Converting {} to sqlite...'.format(path))
239 QTimer.singleShot(100, action)
240
241 def _read(self, path):
242 """Import a text file into the sql database."""
243 with open(path, 'r', encoding='utf-8') as f:
244 data = {'url': [], 'title': [], 'atime': [], 'redirect': []}
245 completion_data = {'url': [], 'title': [], 'last_atime': []}
246 for (i, line) in enumerate(f):
247 try:
248 parsed = self._parse_entry(line.strip())
249 if parsed is None:
250 continue
251 url, title, atime, redirect = parsed
252 data['url'].append(url)
253 data['title'].append(title)
254 data['atime'].append(atime)
255 data['redirect'].append(redirect)
256 if not redirect:
257 completion_data['url'].append(url)
258 completion_data['title'].append(title)
259 completion_data['last_atime'].append(atime)
260 except ValueError as ex:
261 raise ValueError('Failed to parse line #{} of {}: "{}"'
262 .format(i, path, ex))
263 self.insert_batch(data)
264 self.completion.insert_batch(completion_data, replace=True)
265
266 @cmdutils.register(instance='web-history', debug=True)
267 def debug_dump_history(self, dest):
268 """Dump the history to a file in the old pre-SQL format.
269
270 Args:
271 dest: Where to write the file to.
272 """
273 dest = os.path.expanduser(dest)
274
275 lines = ('{}{} {} {}'
276 .format(int(x.atime), '-r' * x.redirect, x.url, x.title)
277 for x in self.select(sort_by='atime', sort_order='asc'))
278
279 try:
280 with open(dest, 'w', encoding='utf-8') as f:
281 f.write('\n'.join(lines))
282 message.info("Dumped history to {}".format(dest))
283 except OSError as e:
284 raise cmdexc.CommandError('Could not write history: {}', e)
285
286
287 def init(parent=None):
288 """Initialize the web history.
289
290 Args:
291 parent: The parent to use for WebHistory.
292 """
293 history = WebHistory(parent=parent)
294 objreg.register('web-history', history)
295
296 if objects.backend == usertypes.Backend.QtWebKit:
297 from qutebrowser.browser.webkit import webkithistory
298 webkithistory.init(history)
299
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/qutebrowser/browser/history.py b/qutebrowser/browser/history.py
--- a/qutebrowser/browser/history.py
+++ b/qutebrowser/browser/history.py
@@ -30,6 +30,10 @@
from qutebrowser.misc import objects, sql
+# increment to indicate that HistoryCompletion must be regenerated
+_USER_VERSION = 1
+
+
class CompletionHistory(sql.SqlTable):
"""History which only has the newest entry for each URL."""
@@ -48,6 +52,11 @@
super().__init__("History", ['url', 'title', 'atime', 'redirect'],
parent=parent)
self.completion = CompletionHistory(parent=self)
+ if sql.Query('pragma user_version').run().value() < _USER_VERSION:
+ self.completion.delete_all()
+ if not self.completion:
+ # either the table is out-of-date or the user wiped it manually
+ self._rebuild_completion()
self.create_index('HistoryIndex', 'url')
self.create_index('HistoryAtimeIndex', 'atime')
self._contains_query = self.contains_query('url')
@@ -71,6 +80,18 @@
def __contains__(self, url):
return self._contains_query.run(val=url).value()
+ def _rebuild_completion(self):
+ data = {'url': [], 'title': [], 'last_atime': []}
+ # select the latest entry for each url
+ q = sql.Query('SELECT url, title, max(atime) AS atime FROM History '
+ 'WHERE NOT redirect GROUP BY url ORDER BY atime asc')
+ for entry in q.run():
+ data['url'].append(self._format_completion_url(QUrl(entry.url)))
+ data['title'].append(entry.title)
+ data['last_atime'].append(entry.atime)
+ self.completion.insert_batch(data, replace=True)
+ sql.Query('pragma user_version = {}'.format(_USER_VERSION)).run()
+
def get_recent(self):
"""Get the most recent history entries."""
return self.select(sort_by='atime', sort_order='desc', limit=100)
@@ -159,13 +180,12 @@
return
atime = int(atime) if (atime is not None) else int(time.time())
- url_str = url.toString(QUrl.FullyEncoded | QUrl.RemovePassword)
- self.insert({'url': url_str,
+ self.insert({'url': self._format_url(url),
'title': title,
'atime': atime,
'redirect': redirect})
if not redirect:
- self.completion.insert({'url': url_str,
+ self.completion.insert({'url': self._format_completion_url(url),
'title': title,
'last_atime': atime},
replace=True)
@@ -249,12 +269,13 @@
if parsed is None:
continue
url, title, atime, redirect = parsed
- data['url'].append(url)
+ data['url'].append(self._format_url(url))
data['title'].append(title)
data['atime'].append(atime)
data['redirect'].append(redirect)
if not redirect:
- completion_data['url'].append(url)
+ completion_data['url'].append(
+ self._format_completion_url(url))
completion_data['title'].append(title)
completion_data['last_atime'].append(atime)
except ValueError as ex:
@@ -263,6 +284,12 @@
self.insert_batch(data)
self.completion.insert_batch(completion_data, replace=True)
+ def _format_url(self, url):
+ return url.toString(QUrl.RemovePassword | QUrl.FullyEncoded)
+
+ def _format_completion_url(self, url):
+ return url.toString(QUrl.RemovePassword)
+
@cmdutils.register(instance='web-history', debug=True)
def debug_dump_history(self, dest):
"""Dump the history to a file in the old pre-SQL format.
|
{"golden_diff": "diff --git a/qutebrowser/browser/history.py b/qutebrowser/browser/history.py\n--- a/qutebrowser/browser/history.py\n+++ b/qutebrowser/browser/history.py\n@@ -30,6 +30,10 @@\n from qutebrowser.misc import objects, sql\n \n \n+# increment to indicate that HistoryCompletion must be regenerated\n+_USER_VERSION = 1\n+\n+\n class CompletionHistory(sql.SqlTable):\n \n \"\"\"History which only has the newest entry for each URL.\"\"\"\n@@ -48,6 +52,11 @@\n super().__init__(\"History\", ['url', 'title', 'atime', 'redirect'],\n parent=parent)\n self.completion = CompletionHistory(parent=self)\n+ if sql.Query('pragma user_version').run().value() < _USER_VERSION:\n+ self.completion.delete_all()\n+ if not self.completion:\n+ # either the table is out-of-date or the user wiped it manually\n+ self._rebuild_completion()\n self.create_index('HistoryIndex', 'url')\n self.create_index('HistoryAtimeIndex', 'atime')\n self._contains_query = self.contains_query('url')\n@@ -71,6 +80,18 @@\n def __contains__(self, url):\n return self._contains_query.run(val=url).value()\n \n+ def _rebuild_completion(self):\n+ data = {'url': [], 'title': [], 'last_atime': []}\n+ # select the latest entry for each url\n+ q = sql.Query('SELECT url, title, max(atime) AS atime FROM History '\n+ 'WHERE NOT redirect GROUP BY url ORDER BY atime asc')\n+ for entry in q.run():\n+ data['url'].append(self._format_completion_url(QUrl(entry.url)))\n+ data['title'].append(entry.title)\n+ data['last_atime'].append(entry.atime)\n+ self.completion.insert_batch(data, replace=True)\n+ sql.Query('pragma user_version = {}'.format(_USER_VERSION)).run()\n+\n def get_recent(self):\n \"\"\"Get the most recent history entries.\"\"\"\n return self.select(sort_by='atime', sort_order='desc', limit=100)\n@@ -159,13 +180,12 @@\n return\n \n atime = int(atime) if (atime is not None) else int(time.time())\n- url_str = url.toString(QUrl.FullyEncoded | QUrl.RemovePassword)\n- self.insert({'url': url_str,\n+ self.insert({'url': self._format_url(url),\n 'title': title,\n 'atime': atime,\n 'redirect': redirect})\n if not redirect:\n- self.completion.insert({'url': url_str,\n+ self.completion.insert({'url': self._format_completion_url(url),\n 'title': title,\n 'last_atime': atime},\n replace=True)\n@@ -249,12 +269,13 @@\n if parsed is None:\n continue\n url, title, atime, redirect = parsed\n- data['url'].append(url)\n+ data['url'].append(self._format_url(url))\n data['title'].append(title)\n data['atime'].append(atime)\n data['redirect'].append(redirect)\n if not redirect:\n- completion_data['url'].append(url)\n+ completion_data['url'].append(\n+ self._format_completion_url(url))\n completion_data['title'].append(title)\n completion_data['last_atime'].append(atime)\n except ValueError as ex:\n@@ -263,6 +284,12 @@\n self.insert_batch(data)\n self.completion.insert_batch(completion_data, replace=True)\n \n+ def _format_url(self, url):\n+ return url.toString(QUrl.RemovePassword | QUrl.FullyEncoded)\n+\n+ def _format_completion_url(self, url):\n+ return url.toString(QUrl.RemovePassword)\n+\n @cmdutils.register(instance='web-history', debug=True)\n def debug_dump_history(self, dest):\n \"\"\"Dump the history to a file in the old pre-SQL format.\n", "issue": "URL percent-encoding in the SQL history\nThere are couple of problems:\r\n\r\n- The [add_url](https://github.com/qutebrowser/qutebrowser/blob/49b858e3599a361cf5c994358c6b189dddfb522c/qutebrowser/browser/history.py#L162) method does percent-encoding, but the [parse_entry](https://github.com/qutebrowser/qutebrowser/blob/49b858e3599a361cf5c994358c6b189dddfb522c/qutebrowser/browser/history.py#L192) method does not. So the URLs which were imported from the text file are not percent-encoded, but the newly added URLs are, which leads to \"duplicates\" if I open an old URL.\r\n- The old history completion called [entry.url.toDisplayString()](https://github.com/qutebrowser/qutebrowser/pull/2295/files#diff-d0e110fc69342b1ed59bb5c1a96b0234L101) when the URL was added to the completion. Now the completion shows URLs as they are in the database, which is inconsistent (see the previous point) and a visual regression (I want pretty URLs in the completion).\r\n\r\ncc @rcorre \n", "before_files": [{"content": "# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:\n\n# Copyright 2015-2017 Florian Bruhin (The Compiler) <[email protected]>\n#\n# This file is part of qutebrowser.\n#\n# qutebrowser is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# qutebrowser is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.\n\n\"\"\"Simple history which gets written to disk.\"\"\"\n\nimport os\nimport time\n\nfrom PyQt5.QtCore import pyqtSlot, QUrl, QTimer\n\nfrom qutebrowser.commands import cmdutils, cmdexc\nfrom qutebrowser.utils import (utils, objreg, log, usertypes, message,\n debug, standarddir)\nfrom qutebrowser.misc import objects, sql\n\n\nclass CompletionHistory(sql.SqlTable):\n\n \"\"\"History which only has the newest entry for each URL.\"\"\"\n\n def __init__(self, parent=None):\n super().__init__(\"CompletionHistory\", ['url', 'title', 'last_atime'],\n constraints={'url': 'PRIMARY KEY'}, parent=parent)\n self.create_index('CompletionHistoryAtimeIndex', 'last_atime')\n\n\nclass WebHistory(sql.SqlTable):\n\n \"\"\"The global history of visited pages.\"\"\"\n\n def __init__(self, parent=None):\n super().__init__(\"History\", ['url', 'title', 'atime', 'redirect'],\n parent=parent)\n self.completion = CompletionHistory(parent=self)\n self.create_index('HistoryIndex', 'url')\n self.create_index('HistoryAtimeIndex', 'atime')\n self._contains_query = self.contains_query('url')\n self._between_query = sql.Query('SELECT * FROM History '\n 'where not redirect '\n 'and not url like \"qute://%\" '\n 'and atime > :earliest '\n 'and atime <= :latest '\n 'ORDER BY atime desc')\n\n self._before_query = sql.Query('SELECT * FROM History '\n 'where not redirect '\n 'and not url like \"qute://%\" '\n 'and atime <= :latest '\n 'ORDER BY atime desc '\n 'limit :limit offset :offset')\n\n def __repr__(self):\n return utils.get_repr(self, length=len(self))\n\n def __contains__(self, url):\n return self._contains_query.run(val=url).value()\n\n def get_recent(self):\n \"\"\"Get the most recent history entries.\"\"\"\n return self.select(sort_by='atime', sort_order='desc', limit=100)\n\n def entries_between(self, earliest, latest):\n \"\"\"Iterate non-redirect, non-qute entries between two timestamps.\n\n Args:\n earliest: Omit timestamps earlier than this.\n latest: Omit timestamps later than this.\n \"\"\"\n self._between_query.run(earliest=earliest, latest=latest)\n return iter(self._between_query)\n\n def entries_before(self, latest, limit, offset):\n \"\"\"Iterate non-redirect, non-qute entries occurring before a timestamp.\n\n Args:\n latest: Omit timestamps more recent than this.\n limit: Max number of entries to include.\n offset: Number of entries to skip.\n \"\"\"\n self._before_query.run(latest=latest, limit=limit, offset=offset)\n return iter(self._before_query)\n\n @cmdutils.register(name='history-clear', instance='web-history')\n def clear(self, force=False):\n \"\"\"Clear all browsing history.\n\n Note this only clears the global history\n (e.g. `~/.local/share/qutebrowser/history` on Linux) but not cookies,\n the back/forward history of a tab, cache or other persistent data.\n\n Args:\n force: Don't ask for confirmation.\n \"\"\"\n if force:\n self._do_clear()\n else:\n message.confirm_async(self._do_clear, title=\"Clear all browsing \"\n \"history?\")\n\n def _do_clear(self):\n self.delete_all()\n self.completion.delete_all()\n\n def delete_url(self, url):\n \"\"\"Remove all history entries with the given url.\n\n Args:\n url: URL string to delete.\n \"\"\"\n self.delete('url', url)\n self.completion.delete('url', url)\n\n @pyqtSlot(QUrl, QUrl, str)\n def add_from_tab(self, url, requested_url, title):\n \"\"\"Add a new history entry as slot, called from a BrowserTab.\"\"\"\n if url.scheme() == 'data' or requested_url.scheme() == 'data':\n return\n if url.isEmpty():\n # things set via setHtml\n return\n\n no_formatting = QUrl.UrlFormattingOption(0)\n if (requested_url.isValid() and\n not requested_url.matches(url, no_formatting)):\n # If the url of the page is different than the url of the link\n # originally clicked, save them both.\n self.add_url(requested_url, title, redirect=True)\n self.add_url(url, title)\n\n def add_url(self, url, title=\"\", *, redirect=False, atime=None):\n \"\"\"Called via add_from_tab when a URL should be added to the history.\n\n Args:\n url: A url (as QUrl) to add to the history.\n redirect: Whether the entry was redirected to another URL\n (hidden in completion)\n atime: Override the atime used to add the entry\n \"\"\"\n if not url.isValid(): # pragma: no cover\n # the no cover pragma is a WORKAROUND for this not being covered in\n # old Qt versions.\n log.misc.warning(\"Ignoring invalid URL being added to history\")\n return\n\n atime = int(atime) if (atime is not None) else int(time.time())\n url_str = url.toString(QUrl.FullyEncoded | QUrl.RemovePassword)\n self.insert({'url': url_str,\n 'title': title,\n 'atime': atime,\n 'redirect': redirect})\n if not redirect:\n self.completion.insert({'url': url_str,\n 'title': title,\n 'last_atime': atime},\n replace=True)\n\n def _parse_entry(self, line):\n \"\"\"Parse a history line like '12345 http://example.com title'.\"\"\"\n if not line or line.startswith('#'):\n return None\n data = line.split(maxsplit=2)\n if len(data) == 2:\n atime, url = data\n title = \"\"\n elif len(data) == 3:\n atime, url, title = data\n else:\n raise ValueError(\"2 or 3 fields expected\")\n\n # http://xn--pple-43d.com/ with\n # https://bugreports.qt.io/browse/QTBUG-60364\n if url in ['http://.com/', 'https://.com/',\n 'http://www..com/', 'https://www..com/']:\n return None\n\n url = QUrl(url)\n if not url.isValid():\n raise ValueError(\"Invalid URL: {}\".format(url.errorString()))\n\n # https://github.com/qutebrowser/qutebrowser/issues/2646\n if url.scheme() == 'data':\n return None\n\n # https://github.com/qutebrowser/qutebrowser/issues/670\n atime = atime.lstrip('\\0')\n\n if '-' in atime:\n atime, flags = atime.split('-')\n else:\n flags = ''\n\n if not set(flags).issubset('r'):\n raise ValueError(\"Invalid flags {!r}\".format(flags))\n\n redirect = 'r' in flags\n return (url, title, int(atime), redirect)\n\n def import_txt(self):\n \"\"\"Import a history text file into sqlite if it exists.\n\n In older versions of qutebrowser, history was stored in a text format.\n This converts that file into the new sqlite format and moves it to a\n backup location.\n \"\"\"\n path = os.path.join(standarddir.data(), 'history')\n if not os.path.isfile(path):\n return\n\n def action():\n with debug.log_time(log.init, 'Import old history file to sqlite'):\n try:\n self._read(path)\n except ValueError as ex:\n message.error('Failed to import history: {}'.format(ex))\n else:\n bakpath = path + '.bak'\n message.info('History import complete. Moving {} to {}'\n .format(path, bakpath))\n os.rename(path, bakpath)\n\n # delay to give message time to appear before locking down for import\n message.info('Converting {} to sqlite...'.format(path))\n QTimer.singleShot(100, action)\n\n def _read(self, path):\n \"\"\"Import a text file into the sql database.\"\"\"\n with open(path, 'r', encoding='utf-8') as f:\n data = {'url': [], 'title': [], 'atime': [], 'redirect': []}\n completion_data = {'url': [], 'title': [], 'last_atime': []}\n for (i, line) in enumerate(f):\n try:\n parsed = self._parse_entry(line.strip())\n if parsed is None:\n continue\n url, title, atime, redirect = parsed\n data['url'].append(url)\n data['title'].append(title)\n data['atime'].append(atime)\n data['redirect'].append(redirect)\n if not redirect:\n completion_data['url'].append(url)\n completion_data['title'].append(title)\n completion_data['last_atime'].append(atime)\n except ValueError as ex:\n raise ValueError('Failed to parse line #{} of {}: \"{}\"'\n .format(i, path, ex))\n self.insert_batch(data)\n self.completion.insert_batch(completion_data, replace=True)\n\n @cmdutils.register(instance='web-history', debug=True)\n def debug_dump_history(self, dest):\n \"\"\"Dump the history to a file in the old pre-SQL format.\n\n Args:\n dest: Where to write the file to.\n \"\"\"\n dest = os.path.expanduser(dest)\n\n lines = ('{}{} {} {}'\n .format(int(x.atime), '-r' * x.redirect, x.url, x.title)\n for x in self.select(sort_by='atime', sort_order='asc'))\n\n try:\n with open(dest, 'w', encoding='utf-8') as f:\n f.write('\\n'.join(lines))\n message.info(\"Dumped history to {}\".format(dest))\n except OSError as e:\n raise cmdexc.CommandError('Could not write history: {}', e)\n\n\ndef init(parent=None):\n \"\"\"Initialize the web history.\n\n Args:\n parent: The parent to use for WebHistory.\n \"\"\"\n history = WebHistory(parent=parent)\n objreg.register('web-history', history)\n\n if objects.backend == usertypes.Backend.QtWebKit:\n from qutebrowser.browser.webkit import webkithistory\n webkithistory.init(history)\n", "path": "qutebrowser/browser/history.py"}], "after_files": [{"content": "# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:\n\n# Copyright 2015-2017 Florian Bruhin (The Compiler) <[email protected]>\n#\n# This file is part of qutebrowser.\n#\n# qutebrowser is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# qutebrowser is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.\n\n\"\"\"Simple history which gets written to disk.\"\"\"\n\nimport os\nimport time\n\nfrom PyQt5.QtCore import pyqtSlot, QUrl, QTimer\n\nfrom qutebrowser.commands import cmdutils, cmdexc\nfrom qutebrowser.utils import (utils, objreg, log, usertypes, message,\n debug, standarddir)\nfrom qutebrowser.misc import objects, sql\n\n\n# increment to indicate that HistoryCompletion must be regenerated\n_USER_VERSION = 1\n\n\nclass CompletionHistory(sql.SqlTable):\n\n \"\"\"History which only has the newest entry for each URL.\"\"\"\n\n def __init__(self, parent=None):\n super().__init__(\"CompletionHistory\", ['url', 'title', 'last_atime'],\n constraints={'url': 'PRIMARY KEY'}, parent=parent)\n self.create_index('CompletionHistoryAtimeIndex', 'last_atime')\n\n\nclass WebHistory(sql.SqlTable):\n\n \"\"\"The global history of visited pages.\"\"\"\n\n def __init__(self, parent=None):\n super().__init__(\"History\", ['url', 'title', 'atime', 'redirect'],\n parent=parent)\n self.completion = CompletionHistory(parent=self)\n if sql.Query('pragma user_version').run().value() < _USER_VERSION:\n self.completion.delete_all()\n if not self.completion:\n # either the table is out-of-date or the user wiped it manually\n self._rebuild_completion()\n self.create_index('HistoryIndex', 'url')\n self.create_index('HistoryAtimeIndex', 'atime')\n self._contains_query = self.contains_query('url')\n self._between_query = sql.Query('SELECT * FROM History '\n 'where not redirect '\n 'and not url like \"qute://%\" '\n 'and atime > :earliest '\n 'and atime <= :latest '\n 'ORDER BY atime desc')\n\n self._before_query = sql.Query('SELECT * FROM History '\n 'where not redirect '\n 'and not url like \"qute://%\" '\n 'and atime <= :latest '\n 'ORDER BY atime desc '\n 'limit :limit offset :offset')\n\n def __repr__(self):\n return utils.get_repr(self, length=len(self))\n\n def __contains__(self, url):\n return self._contains_query.run(val=url).value()\n\n def _rebuild_completion(self):\n data = {'url': [], 'title': [], 'last_atime': []}\n # select the latest entry for each url\n q = sql.Query('SELECT url, title, max(atime) AS atime FROM History '\n 'WHERE NOT redirect GROUP BY url ORDER BY atime asc')\n for entry in q.run():\n data['url'].append(self._format_completion_url(QUrl(entry.url)))\n data['title'].append(entry.title)\n data['last_atime'].append(entry.atime)\n self.completion.insert_batch(data, replace=True)\n sql.Query('pragma user_version = {}'.format(_USER_VERSION)).run()\n\n def get_recent(self):\n \"\"\"Get the most recent history entries.\"\"\"\n return self.select(sort_by='atime', sort_order='desc', limit=100)\n\n def entries_between(self, earliest, latest):\n \"\"\"Iterate non-redirect, non-qute entries between two timestamps.\n\n Args:\n earliest: Omit timestamps earlier than this.\n latest: Omit timestamps later than this.\n \"\"\"\n self._between_query.run(earliest=earliest, latest=latest)\n return iter(self._between_query)\n\n def entries_before(self, latest, limit, offset):\n \"\"\"Iterate non-redirect, non-qute entries occurring before a timestamp.\n\n Args:\n latest: Omit timestamps more recent than this.\n limit: Max number of entries to include.\n offset: Number of entries to skip.\n \"\"\"\n self._before_query.run(latest=latest, limit=limit, offset=offset)\n return iter(self._before_query)\n\n @cmdutils.register(name='history-clear', instance='web-history')\n def clear(self, force=False):\n \"\"\"Clear all browsing history.\n\n Note this only clears the global history\n (e.g. `~/.local/share/qutebrowser/history` on Linux) but not cookies,\n the back/forward history of a tab, cache or other persistent data.\n\n Args:\n force: Don't ask for confirmation.\n \"\"\"\n if force:\n self._do_clear()\n else:\n message.confirm_async(self._do_clear, title=\"Clear all browsing \"\n \"history?\")\n\n def _do_clear(self):\n self.delete_all()\n self.completion.delete_all()\n\n def delete_url(self, url):\n \"\"\"Remove all history entries with the given url.\n\n Args:\n url: URL string to delete.\n \"\"\"\n self.delete('url', url)\n self.completion.delete('url', url)\n\n @pyqtSlot(QUrl, QUrl, str)\n def add_from_tab(self, url, requested_url, title):\n \"\"\"Add a new history entry as slot, called from a BrowserTab.\"\"\"\n if url.scheme() == 'data' or requested_url.scheme() == 'data':\n return\n if url.isEmpty():\n # things set via setHtml\n return\n\n no_formatting = QUrl.UrlFormattingOption(0)\n if (requested_url.isValid() and\n not requested_url.matches(url, no_formatting)):\n # If the url of the page is different than the url of the link\n # originally clicked, save them both.\n self.add_url(requested_url, title, redirect=True)\n self.add_url(url, title)\n\n def add_url(self, url, title=\"\", *, redirect=False, atime=None):\n \"\"\"Called via add_from_tab when a URL should be added to the history.\n\n Args:\n url: A url (as QUrl) to add to the history.\n redirect: Whether the entry was redirected to another URL\n (hidden in completion)\n atime: Override the atime used to add the entry\n \"\"\"\n if not url.isValid(): # pragma: no cover\n # the no cover pragma is a WORKAROUND for this not being covered in\n # old Qt versions.\n log.misc.warning(\"Ignoring invalid URL being added to history\")\n return\n\n atime = int(atime) if (atime is not None) else int(time.time())\n self.insert({'url': self._format_url(url),\n 'title': title,\n 'atime': atime,\n 'redirect': redirect})\n if not redirect:\n self.completion.insert({'url': self._format_completion_url(url),\n 'title': title,\n 'last_atime': atime},\n replace=True)\n\n def _parse_entry(self, line):\n \"\"\"Parse a history line like '12345 http://example.com title'.\"\"\"\n if not line or line.startswith('#'):\n return None\n data = line.split(maxsplit=2)\n if len(data) == 2:\n atime, url = data\n title = \"\"\n elif len(data) == 3:\n atime, url, title = data\n else:\n raise ValueError(\"2 or 3 fields expected\")\n\n # http://xn--pple-43d.com/ with\n # https://bugreports.qt.io/browse/QTBUG-60364\n if url in ['http://.com/', 'https://.com/',\n 'http://www..com/', 'https://www..com/']:\n return None\n\n url = QUrl(url)\n if not url.isValid():\n raise ValueError(\"Invalid URL: {}\".format(url.errorString()))\n\n # https://github.com/qutebrowser/qutebrowser/issues/2646\n if url.scheme() == 'data':\n return None\n\n # https://github.com/qutebrowser/qutebrowser/issues/670\n atime = atime.lstrip('\\0')\n\n if '-' in atime:\n atime, flags = atime.split('-')\n else:\n flags = ''\n\n if not set(flags).issubset('r'):\n raise ValueError(\"Invalid flags {!r}\".format(flags))\n\n redirect = 'r' in flags\n return (url, title, int(atime), redirect)\n\n def import_txt(self):\n \"\"\"Import a history text file into sqlite if it exists.\n\n In older versions of qutebrowser, history was stored in a text format.\n This converts that file into the new sqlite format and moves it to a\n backup location.\n \"\"\"\n path = os.path.join(standarddir.data(), 'history')\n if not os.path.isfile(path):\n return\n\n def action():\n with debug.log_time(log.init, 'Import old history file to sqlite'):\n try:\n self._read(path)\n except ValueError as ex:\n message.error('Failed to import history: {}'.format(ex))\n else:\n bakpath = path + '.bak'\n message.info('History import complete. Moving {} to {}'\n .format(path, bakpath))\n os.rename(path, bakpath)\n\n # delay to give message time to appear before locking down for import\n message.info('Converting {} to sqlite...'.format(path))\n QTimer.singleShot(100, action)\n\n def _read(self, path):\n \"\"\"Import a text file into the sql database.\"\"\"\n with open(path, 'r', encoding='utf-8') as f:\n data = {'url': [], 'title': [], 'atime': [], 'redirect': []}\n completion_data = {'url': [], 'title': [], 'last_atime': []}\n for (i, line) in enumerate(f):\n try:\n parsed = self._parse_entry(line.strip())\n if parsed is None:\n continue\n url, title, atime, redirect = parsed\n data['url'].append(self._format_url(url))\n data['title'].append(title)\n data['atime'].append(atime)\n data['redirect'].append(redirect)\n if not redirect:\n completion_data['url'].append(\n self._format_completion_url(url))\n completion_data['title'].append(title)\n completion_data['last_atime'].append(atime)\n except ValueError as ex:\n raise ValueError('Failed to parse line #{} of {}: \"{}\"'\n .format(i, path, ex))\n self.insert_batch(data)\n self.completion.insert_batch(completion_data, replace=True)\n\n def _format_url(self, url):\n return url.toString(QUrl.RemovePassword | QUrl.FullyEncoded)\n\n def _format_completion_url(self, url):\n return url.toString(QUrl.RemovePassword)\n\n @cmdutils.register(instance='web-history', debug=True)\n def debug_dump_history(self, dest):\n \"\"\"Dump the history to a file in the old pre-SQL format.\n\n Args:\n dest: Where to write the file to.\n \"\"\"\n dest = os.path.expanduser(dest)\n\n lines = ('{}{} {} {}'\n .format(int(x.atime), '-r' * x.redirect, x.url, x.title)\n for x in self.select(sort_by='atime', sort_order='asc'))\n\n try:\n with open(dest, 'w', encoding='utf-8') as f:\n f.write('\\n'.join(lines))\n message.info(\"Dumped history to {}\".format(dest))\n except OSError as e:\n raise cmdexc.CommandError('Could not write history: {}', e)\n\n\ndef init(parent=None):\n \"\"\"Initialize the web history.\n\n Args:\n parent: The parent to use for WebHistory.\n \"\"\"\n history = WebHistory(parent=parent)\n objreg.register('web-history', history)\n\n if objects.backend == usertypes.Backend.QtWebKit:\n from qutebrowser.browser.webkit import webkithistory\n webkithistory.init(history)\n", "path": "qutebrowser/browser/history.py"}]}
| 3,861 | 891 |
gh_patches_debug_30937
|
rasdani/github-patches
|
git_diff
|
svthalia__concrexit-3097
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add http endpoint for the next event
### Is your feature request related to a problem? Please describe.
We want to use a dns redirect to redirect to the first upcoming Thalia event.
### Describe the solution you'd like
An endpoint (like /events/next) that redicects to the next Thalia event
### Motivation
We want to setup a domain (sog.je) that redirects to this
### Describe alternatives you've considered
Static html host that uses javascriot, but this would require using additional resources
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `website/events/views.py`
Content:
```
1 """Views provided by the events package."""
2
3 from django.contrib import messages
4 from django.contrib.auth.decorators import login_required
5 from django.shortcuts import get_object_or_404, redirect
6 from django.utils import timezone
7 from django.utils.decorators import method_decorator
8 from django.utils.translation import gettext_lazy as _
9 from django.views import View
10 from django.views.generic import DetailView, FormView, TemplateView
11
12 from events import services
13 from events.exceptions import RegistrationError
14 from events.models import categories
15 from events.services import is_user_registered
16 from payments.models import Payment
17
18 from .forms import FieldsForm
19 from .models import Event, EventRegistration
20
21
22 class EventIndex(TemplateView):
23 """Render the events calendar overview."""
24
25 template_name = "events/index.html"
26
27 def get_context_data(self, **kwargs):
28 context = super().get_context_data(**kwargs)
29
30 upcoming_activity = (
31 Event.objects.filter(published=True, end__gte=timezone.now())
32 .order_by("end")
33 .first()
34 )
35 context["upcoming_activity"] = upcoming_activity
36
37 return context
38
39
40 class EventDetail(DetailView):
41 """Render a single event detail page."""
42
43 model = Event
44 queryset = Event.objects.filter(published=True)
45 template_name = "events/event.html"
46 context_object_name = "event"
47
48 def get_context_data(self, **kwargs):
49 context = super().get_context_data(**kwargs)
50 context["user"] = self.request.user
51 context["payment_method_tpay"] = Payment.TPAY
52
53 event = context["event"]
54 if event.max_participants:
55 perc = 100.0 * len(event.participants) / event.max_participants
56 context["registration_percentage"] = perc
57
58 try:
59 context["registration"] = EventRegistration.objects.get(
60 event=event, member=self.request.member
61 )
62 except (EventRegistration.DoesNotExist, TypeError):
63 pass
64
65 registration_status = services.registration_status(
66 event, context.get("registration"), self.request.member
67 )
68 context["registration_status"] = services.registration_status_string(
69 registration_status, event, context.get("registration")
70 )
71
72 context["show_cancel_status"] = services.show_cancel_status(registration_status)
73 if context["show_cancel_status"]:
74 cancel_status = services.cancel_status(event, context.get("registration"))
75 context["cancel_info"] = services.cancel_info_string(
76 event, cancel_status, registration_status
77 )
78
79 context["permissions"] = services.event_permissions(self.request.member, event)
80
81 context["date_now"] = timezone.now()
82
83 context["slide_size"] = "slide"
84
85 context["participants"] = event.participants.select_related(
86 "member", "member__profile"
87 )
88
89 return context
90
91
92 class AlumniEventsView(TemplateView):
93 """Render the alumni events page."""
94
95 template_name = "events/alumni.html"
96
97 def get_context_data(self, **kwargs):
98 context = super().get_context_data(**kwargs)
99
100 events = Event.objects.filter(
101 published=True, category=categories.CATEGORY_ALUMNI, end__gte=timezone.now()
102 ).order_by("end")[:3]
103 context["events"] = events
104
105 return context
106
107
108 @method_decorator(login_required, name="dispatch")
109 class EventRegisterView(View):
110 """Define a view that allows the user to register for an event using a POST request.
111
112 The user should be authenticated.
113 """
114
115 def get(self, request, *args, **kwargs):
116 return redirect("events:event", pk=kwargs["pk"])
117
118 def post(self, request, *args, **kwargs):
119 event = get_object_or_404(Event, pk=kwargs["pk"])
120 try:
121 services.create_registration(request.member, event)
122
123 if event.has_fields:
124 return redirect("events:registration", event.pk)
125
126 messages.success(request, _("Registration successful."))
127 except RegistrationError as e:
128 messages.error(request, e)
129
130 return redirect(event)
131
132
133 @method_decorator(login_required, name="dispatch")
134 class EventCancelView(View):
135 """Define a view that allows the user to cancel their event registration using a POST request.
136
137 The user should be authenticated.
138 """
139
140 def get(self, request, *args, **kwargs):
141 return redirect("events:event", pk=kwargs["pk"])
142
143 def post(self, request, *args, **kwargs):
144 event = get_object_or_404(Event, pk=kwargs["pk"])
145 try:
146 services.cancel_registration(request.member, event)
147 messages.success(request, _("Registration successfully cancelled."))
148 except RegistrationError as e:
149 messages.error(request, e)
150
151 return redirect(event)
152
153
154 @method_decorator(login_required, name="dispatch")
155 class RegistrationView(FormView):
156 """Render a form that allows the user to change the details of their registration.
157
158 The user should be authenticated.
159 """
160
161 form_class = FieldsForm
162 template_name = "events/registration.html"
163 event = None
164
165 def get_context_data(self, **kwargs):
166 context = super().get_context_data(**kwargs)
167 context["event"] = self.event
168 return context
169
170 def get_form_kwargs(self):
171 kwargs = super().get_form_kwargs()
172 kwargs["fields"] = services.registration_fields(
173 self.request, self.request.member, self.event
174 )
175 return kwargs
176
177 def form_valid(self, form):
178 values = form.field_values()
179 try:
180 services.update_registration(
181 self.request.member, self.event, field_values=values
182 )
183 messages.success(self.request, _("Registration successfully saved."))
184 return redirect(self.event)
185 except RegistrationError as e:
186 messages.error(self.request, e)
187 return self.render_to_response(self.get_context_data(form=form))
188
189 def dispatch(self, request, *args, **kwargs):
190 self.event = get_object_or_404(Event, pk=self.kwargs["pk"])
191 try:
192 if self.event.has_fields:
193 return super().dispatch(request, *args, **kwargs)
194 except RegistrationError:
195 pass
196 return redirect(self.event)
197
198
199 @method_decorator(login_required, name="dispatch")
200 class MarkPresentView(View):
201 """A view that allows uses to mark their presence at an event using a secret token."""
202
203 def get(self, request, *args, **kwargs):
204 """Mark a user as present.
205
206 Checks if the url is correct, the event has not ended yet, and the user is registered.
207 """
208 event = get_object_or_404(Event, pk=kwargs["pk"])
209 if kwargs["token"] != event.mark_present_url_token:
210 messages.error(request, _("Invalid url."))
211 elif not request.member or not is_user_registered(request.member, event):
212 messages.error(request, _("You are not registered for this event."))
213 else:
214 registration = event.registrations.get(
215 member=request.member, date_cancelled=None
216 )
217
218 if registration.present:
219 messages.info(request, _("You were already marked as present."))
220 elif event.end < timezone.now():
221 messages.error(request, _("This event has already ended."))
222 else:
223 registration.present = True
224 registration.save()
225 messages.success(request, _("You have been marked as present."))
226
227 return redirect(event)
228
```
Path: `website/events/urls.py`
Content:
```
1 """Routes defined by the events package."""
2 from django.urls import include, path
3
4 from events.feeds import EventFeed
5 from events.views import (
6 AlumniEventsView,
7 EventCancelView,
8 EventDetail,
9 EventIndex,
10 EventRegisterView,
11 MarkPresentView,
12 RegistrationView,
13 )
14
15 app_name = "events"
16
17 urlpatterns = [
18 path(
19 "events/",
20 include(
21 [
22 path("<int:pk>/", EventDetail.as_view(), name="event"),
23 path("<slug:slug>/", EventDetail.as_view(), name="event"),
24 path(
25 "<int:pk>/registration/register/",
26 EventRegisterView.as_view(),
27 name="register",
28 ),
29 path(
30 "<int:pk>/registration/cancel/",
31 EventCancelView.as_view(),
32 name="cancel",
33 ),
34 path(
35 "<int:pk>/mark-present/<uuid:token>/",
36 MarkPresentView.as_view(),
37 name="mark-present",
38 ),
39 path(
40 "<int:pk>/registration/",
41 RegistrationView.as_view(),
42 name="registration",
43 ),
44 path("", EventIndex.as_view(), name="index"),
45 path("ical/nl.ics", EventFeed(), name="ical-nl"),
46 path("ical/en.ics", EventFeed(), name="ical-en"),
47 ]
48 ),
49 ),
50 path("association/alumni/", AlumniEventsView.as_view(), name="alumni"),
51 ]
52
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/website/events/urls.py b/website/events/urls.py
--- a/website/events/urls.py
+++ b/website/events/urls.py
@@ -9,6 +9,7 @@
EventIndex,
EventRegisterView,
MarkPresentView,
+ NextEventView,
RegistrationView,
)
@@ -20,6 +21,7 @@
include(
[
path("<int:pk>/", EventDetail.as_view(), name="event"),
+ path("next/", NextEventView.as_view(), name="next"),
path("<slug:slug>/", EventDetail.as_view(), name="event"),
path(
"<int:pk>/registration/register/",
diff --git a/website/events/views.py b/website/events/views.py
--- a/website/events/views.py
+++ b/website/events/views.py
@@ -2,6 +2,7 @@
from django.contrib import messages
from django.contrib.auth.decorators import login_required
+from django.http import Http404
from django.shortcuts import get_object_or_404, redirect
from django.utils import timezone
from django.utils.decorators import method_decorator
@@ -225,3 +226,20 @@
messages.success(request, _("You have been marked as present."))
return redirect(event)
+
+
+class NextEventView(View):
+ def get(self, request, *args, **kwargs):
+ """HTTP redirect to the next event.
+
+ Checks if there is an upcoming event. Raise a 404 if none exists.
+ """
+ upcoming_activity = (
+ Event.objects.filter(published=True, end__gte=timezone.now())
+ .order_by("end")
+ .first()
+ )
+ if not upcoming_activity:
+ raise Http404("There is no upcoming event.")
+
+ return redirect(upcoming_activity)
|
{"golden_diff": "diff --git a/website/events/urls.py b/website/events/urls.py\n--- a/website/events/urls.py\n+++ b/website/events/urls.py\n@@ -9,6 +9,7 @@\n EventIndex,\n EventRegisterView,\n MarkPresentView,\n+ NextEventView,\n RegistrationView,\n )\n \n@@ -20,6 +21,7 @@\n include(\n [\n path(\"<int:pk>/\", EventDetail.as_view(), name=\"event\"),\n+ path(\"next/\", NextEventView.as_view(), name=\"next\"),\n path(\"<slug:slug>/\", EventDetail.as_view(), name=\"event\"),\n path(\n \"<int:pk>/registration/register/\",\ndiff --git a/website/events/views.py b/website/events/views.py\n--- a/website/events/views.py\n+++ b/website/events/views.py\n@@ -2,6 +2,7 @@\n \n from django.contrib import messages\n from django.contrib.auth.decorators import login_required\n+from django.http import Http404\n from django.shortcuts import get_object_or_404, redirect\n from django.utils import timezone\n from django.utils.decorators import method_decorator\n@@ -225,3 +226,20 @@\n messages.success(request, _(\"You have been marked as present.\"))\n \n return redirect(event)\n+\n+\n+class NextEventView(View):\n+ def get(self, request, *args, **kwargs):\n+ \"\"\"HTTP redirect to the next event.\n+\n+ Checks if there is an upcoming event. Raise a 404 if none exists.\n+ \"\"\"\n+ upcoming_activity = (\n+ Event.objects.filter(published=True, end__gte=timezone.now())\n+ .order_by(\"end\")\n+ .first()\n+ )\n+ if not upcoming_activity:\n+ raise Http404(\"There is no upcoming event.\")\n+\n+ return redirect(upcoming_activity)\n", "issue": "Add http endpoint for the next event \n### Is your feature request related to a problem? Please describe.\r\nWe want to use a dns redirect to redirect to the first upcoming Thalia event. \r\n\r\n### Describe the solution you'd like\r\nAn endpoint (like /events/next) that redicects to the next Thalia event \r\n### Motivation\r\nWe want to setup a domain (sog.je) that redirects to this \r\n\r\n### Describe alternatives you've considered\r\nStatic html host that uses javascriot, but this would require using additional resources \n", "before_files": [{"content": "\"\"\"Views provided by the events package.\"\"\"\n\nfrom django.contrib import messages\nfrom django.contrib.auth.decorators import login_required\nfrom django.shortcuts import get_object_or_404, redirect\nfrom django.utils import timezone\nfrom django.utils.decorators import method_decorator\nfrom django.utils.translation import gettext_lazy as _\nfrom django.views import View\nfrom django.views.generic import DetailView, FormView, TemplateView\n\nfrom events import services\nfrom events.exceptions import RegistrationError\nfrom events.models import categories\nfrom events.services import is_user_registered\nfrom payments.models import Payment\n\nfrom .forms import FieldsForm\nfrom .models import Event, EventRegistration\n\n\nclass EventIndex(TemplateView):\n \"\"\"Render the events calendar overview.\"\"\"\n\n template_name = \"events/index.html\"\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n\n upcoming_activity = (\n Event.objects.filter(published=True, end__gte=timezone.now())\n .order_by(\"end\")\n .first()\n )\n context[\"upcoming_activity\"] = upcoming_activity\n\n return context\n\n\nclass EventDetail(DetailView):\n \"\"\"Render a single event detail page.\"\"\"\n\n model = Event\n queryset = Event.objects.filter(published=True)\n template_name = \"events/event.html\"\n context_object_name = \"event\"\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context[\"user\"] = self.request.user\n context[\"payment_method_tpay\"] = Payment.TPAY\n\n event = context[\"event\"]\n if event.max_participants:\n perc = 100.0 * len(event.participants) / event.max_participants\n context[\"registration_percentage\"] = perc\n\n try:\n context[\"registration\"] = EventRegistration.objects.get(\n event=event, member=self.request.member\n )\n except (EventRegistration.DoesNotExist, TypeError):\n pass\n\n registration_status = services.registration_status(\n event, context.get(\"registration\"), self.request.member\n )\n context[\"registration_status\"] = services.registration_status_string(\n registration_status, event, context.get(\"registration\")\n )\n\n context[\"show_cancel_status\"] = services.show_cancel_status(registration_status)\n if context[\"show_cancel_status\"]:\n cancel_status = services.cancel_status(event, context.get(\"registration\"))\n context[\"cancel_info\"] = services.cancel_info_string(\n event, cancel_status, registration_status\n )\n\n context[\"permissions\"] = services.event_permissions(self.request.member, event)\n\n context[\"date_now\"] = timezone.now()\n\n context[\"slide_size\"] = \"slide\"\n\n context[\"participants\"] = event.participants.select_related(\n \"member\", \"member__profile\"\n )\n\n return context\n\n\nclass AlumniEventsView(TemplateView):\n \"\"\"Render the alumni events page.\"\"\"\n\n template_name = \"events/alumni.html\"\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n\n events = Event.objects.filter(\n published=True, category=categories.CATEGORY_ALUMNI, end__gte=timezone.now()\n ).order_by(\"end\")[:3]\n context[\"events\"] = events\n\n return context\n\n\n@method_decorator(login_required, name=\"dispatch\")\nclass EventRegisterView(View):\n \"\"\"Define a view that allows the user to register for an event using a POST request.\n\n The user should be authenticated.\n \"\"\"\n\n def get(self, request, *args, **kwargs):\n return redirect(\"events:event\", pk=kwargs[\"pk\"])\n\n def post(self, request, *args, **kwargs):\n event = get_object_or_404(Event, pk=kwargs[\"pk\"])\n try:\n services.create_registration(request.member, event)\n\n if event.has_fields:\n return redirect(\"events:registration\", event.pk)\n\n messages.success(request, _(\"Registration successful.\"))\n except RegistrationError as e:\n messages.error(request, e)\n\n return redirect(event)\n\n\n@method_decorator(login_required, name=\"dispatch\")\nclass EventCancelView(View):\n \"\"\"Define a view that allows the user to cancel their event registration using a POST request.\n\n The user should be authenticated.\n \"\"\"\n\n def get(self, request, *args, **kwargs):\n return redirect(\"events:event\", pk=kwargs[\"pk\"])\n\n def post(self, request, *args, **kwargs):\n event = get_object_or_404(Event, pk=kwargs[\"pk\"])\n try:\n services.cancel_registration(request.member, event)\n messages.success(request, _(\"Registration successfully cancelled.\"))\n except RegistrationError as e:\n messages.error(request, e)\n\n return redirect(event)\n\n\n@method_decorator(login_required, name=\"dispatch\")\nclass RegistrationView(FormView):\n \"\"\"Render a form that allows the user to change the details of their registration.\n\n The user should be authenticated.\n \"\"\"\n\n form_class = FieldsForm\n template_name = \"events/registration.html\"\n event = None\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context[\"event\"] = self.event\n return context\n\n def get_form_kwargs(self):\n kwargs = super().get_form_kwargs()\n kwargs[\"fields\"] = services.registration_fields(\n self.request, self.request.member, self.event\n )\n return kwargs\n\n def form_valid(self, form):\n values = form.field_values()\n try:\n services.update_registration(\n self.request.member, self.event, field_values=values\n )\n messages.success(self.request, _(\"Registration successfully saved.\"))\n return redirect(self.event)\n except RegistrationError as e:\n messages.error(self.request, e)\n return self.render_to_response(self.get_context_data(form=form))\n\n def dispatch(self, request, *args, **kwargs):\n self.event = get_object_or_404(Event, pk=self.kwargs[\"pk\"])\n try:\n if self.event.has_fields:\n return super().dispatch(request, *args, **kwargs)\n except RegistrationError:\n pass\n return redirect(self.event)\n\n\n@method_decorator(login_required, name=\"dispatch\")\nclass MarkPresentView(View):\n \"\"\"A view that allows uses to mark their presence at an event using a secret token.\"\"\"\n\n def get(self, request, *args, **kwargs):\n \"\"\"Mark a user as present.\n\n Checks if the url is correct, the event has not ended yet, and the user is registered.\n \"\"\"\n event = get_object_or_404(Event, pk=kwargs[\"pk\"])\n if kwargs[\"token\"] != event.mark_present_url_token:\n messages.error(request, _(\"Invalid url.\"))\n elif not request.member or not is_user_registered(request.member, event):\n messages.error(request, _(\"You are not registered for this event.\"))\n else:\n registration = event.registrations.get(\n member=request.member, date_cancelled=None\n )\n\n if registration.present:\n messages.info(request, _(\"You were already marked as present.\"))\n elif event.end < timezone.now():\n messages.error(request, _(\"This event has already ended.\"))\n else:\n registration.present = True\n registration.save()\n messages.success(request, _(\"You have been marked as present.\"))\n\n return redirect(event)\n", "path": "website/events/views.py"}, {"content": "\"\"\"Routes defined by the events package.\"\"\"\nfrom django.urls import include, path\n\nfrom events.feeds import EventFeed\nfrom events.views import (\n AlumniEventsView,\n EventCancelView,\n EventDetail,\n EventIndex,\n EventRegisterView,\n MarkPresentView,\n RegistrationView,\n)\n\napp_name = \"events\"\n\nurlpatterns = [\n path(\n \"events/\",\n include(\n [\n path(\"<int:pk>/\", EventDetail.as_view(), name=\"event\"),\n path(\"<slug:slug>/\", EventDetail.as_view(), name=\"event\"),\n path(\n \"<int:pk>/registration/register/\",\n EventRegisterView.as_view(),\n name=\"register\",\n ),\n path(\n \"<int:pk>/registration/cancel/\",\n EventCancelView.as_view(),\n name=\"cancel\",\n ),\n path(\n \"<int:pk>/mark-present/<uuid:token>/\",\n MarkPresentView.as_view(),\n name=\"mark-present\",\n ),\n path(\n \"<int:pk>/registration/\",\n RegistrationView.as_view(),\n name=\"registration\",\n ),\n path(\"\", EventIndex.as_view(), name=\"index\"),\n path(\"ical/nl.ics\", EventFeed(), name=\"ical-nl\"),\n path(\"ical/en.ics\", EventFeed(), name=\"ical-en\"),\n ]\n ),\n ),\n path(\"association/alumni/\", AlumniEventsView.as_view(), name=\"alumni\"),\n]\n", "path": "website/events/urls.py"}], "after_files": [{"content": "\"\"\"Views provided by the events package.\"\"\"\n\nfrom django.contrib import messages\nfrom django.contrib.auth.decorators import login_required\nfrom django.http import Http404\nfrom django.shortcuts import get_object_or_404, redirect\nfrom django.utils import timezone\nfrom django.utils.decorators import method_decorator\nfrom django.utils.translation import gettext_lazy as _\nfrom django.views import View\nfrom django.views.generic import DetailView, FormView, TemplateView\n\nfrom events import services\nfrom events.exceptions import RegistrationError\nfrom events.models import categories\nfrom events.services import is_user_registered\nfrom payments.models import Payment\n\nfrom .forms import FieldsForm\nfrom .models import Event, EventRegistration\n\n\nclass EventIndex(TemplateView):\n \"\"\"Render the events calendar overview.\"\"\"\n\n template_name = \"events/index.html\"\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n\n upcoming_activity = (\n Event.objects.filter(published=True, end__gte=timezone.now())\n .order_by(\"end\")\n .first()\n )\n context[\"upcoming_activity\"] = upcoming_activity\n\n return context\n\n\nclass EventDetail(DetailView):\n \"\"\"Render a single event detail page.\"\"\"\n\n model = Event\n queryset = Event.objects.filter(published=True)\n template_name = \"events/event.html\"\n context_object_name = \"event\"\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context[\"user\"] = self.request.user\n context[\"payment_method_tpay\"] = Payment.TPAY\n\n event = context[\"event\"]\n if event.max_participants:\n perc = 100.0 * len(event.participants) / event.max_participants\n context[\"registration_percentage\"] = perc\n\n try:\n context[\"registration\"] = EventRegistration.objects.get(\n event=event, member=self.request.member\n )\n except (EventRegistration.DoesNotExist, TypeError):\n pass\n\n registration_status = services.registration_status(\n event, context.get(\"registration\"), self.request.member\n )\n context[\"registration_status\"] = services.registration_status_string(\n registration_status, event, context.get(\"registration\")\n )\n\n context[\"show_cancel_status\"] = services.show_cancel_status(registration_status)\n if context[\"show_cancel_status\"]:\n cancel_status = services.cancel_status(event, context.get(\"registration\"))\n context[\"cancel_info\"] = services.cancel_info_string(\n event, cancel_status, registration_status\n )\n\n context[\"permissions\"] = services.event_permissions(self.request.member, event)\n\n context[\"date_now\"] = timezone.now()\n\n context[\"slide_size\"] = \"slide\"\n\n context[\"participants\"] = event.participants.select_related(\n \"member\", \"member__profile\"\n )\n\n return context\n\n\nclass AlumniEventsView(TemplateView):\n \"\"\"Render the alumni events page.\"\"\"\n\n template_name = \"events/alumni.html\"\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n\n events = Event.objects.filter(\n published=True, category=categories.CATEGORY_ALUMNI, end__gte=timezone.now()\n ).order_by(\"end\")[:3]\n context[\"events\"] = events\n\n return context\n\n\n@method_decorator(login_required, name=\"dispatch\")\nclass EventRegisterView(View):\n \"\"\"Define a view that allows the user to register for an event using a POST request.\n\n The user should be authenticated.\n \"\"\"\n\n def get(self, request, *args, **kwargs):\n return redirect(\"events:event\", pk=kwargs[\"pk\"])\n\n def post(self, request, *args, **kwargs):\n event = get_object_or_404(Event, pk=kwargs[\"pk\"])\n try:\n services.create_registration(request.member, event)\n\n if event.has_fields:\n return redirect(\"events:registration\", event.pk)\n\n messages.success(request, _(\"Registration successful.\"))\n except RegistrationError as e:\n messages.error(request, e)\n\n return redirect(event)\n\n\n@method_decorator(login_required, name=\"dispatch\")\nclass EventCancelView(View):\n \"\"\"Define a view that allows the user to cancel their event registration using a POST request.\n\n The user should be authenticated.\n \"\"\"\n\n def get(self, request, *args, **kwargs):\n return redirect(\"events:event\", pk=kwargs[\"pk\"])\n\n def post(self, request, *args, **kwargs):\n event = get_object_or_404(Event, pk=kwargs[\"pk\"])\n try:\n services.cancel_registration(request.member, event)\n messages.success(request, _(\"Registration successfully cancelled.\"))\n except RegistrationError as e:\n messages.error(request, e)\n\n return redirect(event)\n\n\n@method_decorator(login_required, name=\"dispatch\")\nclass RegistrationView(FormView):\n \"\"\"Render a form that allows the user to change the details of their registration.\n\n The user should be authenticated.\n \"\"\"\n\n form_class = FieldsForm\n template_name = \"events/registration.html\"\n event = None\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context[\"event\"] = self.event\n return context\n\n def get_form_kwargs(self):\n kwargs = super().get_form_kwargs()\n kwargs[\"fields\"] = services.registration_fields(\n self.request, self.request.member, self.event\n )\n return kwargs\n\n def form_valid(self, form):\n values = form.field_values()\n try:\n services.update_registration(\n self.request.member, self.event, field_values=values\n )\n messages.success(self.request, _(\"Registration successfully saved.\"))\n return redirect(self.event)\n except RegistrationError as e:\n messages.error(self.request, e)\n return self.render_to_response(self.get_context_data(form=form))\n\n def dispatch(self, request, *args, **kwargs):\n self.event = get_object_or_404(Event, pk=self.kwargs[\"pk\"])\n try:\n if self.event.has_fields:\n return super().dispatch(request, *args, **kwargs)\n except RegistrationError:\n pass\n return redirect(self.event)\n\n\n@method_decorator(login_required, name=\"dispatch\")\nclass MarkPresentView(View):\n \"\"\"A view that allows uses to mark their presence at an event using a secret token.\"\"\"\n\n def get(self, request, *args, **kwargs):\n \"\"\"Mark a user as present.\n\n Checks if the url is correct, the event has not ended yet, and the user is registered.\n \"\"\"\n event = get_object_or_404(Event, pk=kwargs[\"pk\"])\n if kwargs[\"token\"] != event.mark_present_url_token:\n messages.error(request, _(\"Invalid url.\"))\n elif not request.member or not is_user_registered(request.member, event):\n messages.error(request, _(\"You are not registered for this event.\"))\n else:\n registration = event.registrations.get(\n member=request.member, date_cancelled=None\n )\n\n if registration.present:\n messages.info(request, _(\"You were already marked as present.\"))\n elif event.end < timezone.now():\n messages.error(request, _(\"This event has already ended.\"))\n else:\n registration.present = True\n registration.save()\n messages.success(request, _(\"You have been marked as present.\"))\n\n return redirect(event)\n\n\nclass NextEventView(View):\n def get(self, request, *args, **kwargs):\n \"\"\"HTTP redirect to the next event.\n\n Checks if there is an upcoming event. Raise a 404 if none exists.\n \"\"\"\n upcoming_activity = (\n Event.objects.filter(published=True, end__gte=timezone.now())\n .order_by(\"end\")\n .first()\n )\n if not upcoming_activity:\n raise Http404(\"There is no upcoming event.\")\n\n return redirect(upcoming_activity)\n", "path": "website/events/views.py"}, {"content": "\"\"\"Routes defined by the events package.\"\"\"\nfrom django.urls import include, path\n\nfrom events.feeds import EventFeed\nfrom events.views import (\n AlumniEventsView,\n EventCancelView,\n EventDetail,\n EventIndex,\n EventRegisterView,\n MarkPresentView,\n NextEventView,\n RegistrationView,\n)\n\napp_name = \"events\"\n\nurlpatterns = [\n path(\n \"events/\",\n include(\n [\n path(\"<int:pk>/\", EventDetail.as_view(), name=\"event\"),\n path(\"next/\", NextEventView.as_view(), name=\"next\"),\n path(\"<slug:slug>/\", EventDetail.as_view(), name=\"event\"),\n path(\n \"<int:pk>/registration/register/\",\n EventRegisterView.as_view(),\n name=\"register\",\n ),\n path(\n \"<int:pk>/registration/cancel/\",\n EventCancelView.as_view(),\n name=\"cancel\",\n ),\n path(\n \"<int:pk>/mark-present/<uuid:token>/\",\n MarkPresentView.as_view(),\n name=\"mark-present\",\n ),\n path(\n \"<int:pk>/registration/\",\n RegistrationView.as_view(),\n name=\"registration\",\n ),\n path(\"\", EventIndex.as_view(), name=\"index\"),\n path(\"ical/nl.ics\", EventFeed(), name=\"ical-nl\"),\n path(\"ical/en.ics\", EventFeed(), name=\"ical-en\"),\n ]\n ),\n ),\n path(\"association/alumni/\", AlumniEventsView.as_view(), name=\"alumni\"),\n]\n", "path": "website/events/urls.py"}]}
| 2,912 | 405 |
gh_patches_debug_17774
|
rasdani/github-patches
|
git_diff
|
GeotrekCE__Geotrek-admin-2580
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Static pages images URL ?
J'ai ajouté une cover_image à une page statique sur le serveur de DEMO sur cette page : https://geotrekdemo.ecrins-parcnational.fr/admin/flatpages/flatpage/3/change/ (admin / admin)
L'URL de l'image indiquée (dans l'Adminsite et dans l'API V2) est https://geotrekdemo.ecrins-parcnational.fr/media/paperclip/flatpages_flatpage/3/biodivecrins.jpg, mais elle n'est pas accessible.
L'image est bien sur mon serveur dans /opt/geotrek-admin/var/media/paperclip/flatpages_flatpage/3/
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mapentity/views/base.py`
Content:
```
1 from __future__ import unicode_literals
2
3 from io import BytesIO
4 import json
5 import logging
6 import mimetypes
7 import os
8 from datetime import datetime
9 import re
10 from urllib.parse import quote
11
12 from django.conf import settings
13 from django.contrib.auth.decorators import login_required
14 from django.contrib.gis.db.models import GeometryField
15 from django.core.exceptions import PermissionDenied
16 from django.urls import reverse
17 from django.http import (HttpResponse, HttpResponseBadRequest, Http404)
18 from django.shortcuts import get_object_or_404
19 from django.views import static
20 from django.views.decorators.csrf import csrf_exempt
21 from django.views.decorators.http import require_http_methods
22 from django.views.generic.base import TemplateView
23 from paperclip.settings import get_attachment_permission, get_attachment_model
24
25 from mapentity import models as mapentity_models
26 from .mixins import JSONResponseMixin, FilterListMixin, ModelViewMixin
27 from ..decorators import view_permission_required
28 from ..helpers import capture_image
29 from ..settings import app_settings
30
31 logger = logging.getLogger(__name__)
32
33
34 def serve_attachment(request, path):
35 """
36 Serve media/ for authorized users only, since it can contain sensitive
37 information (uploaded documents)
38 """
39 original_path = re.sub(r'\.\d+x\d+_q\d+(_crop)?\.(jpg|png|jpeg)$', '', path, count=1, flags=re.IGNORECASE)
40 attachment = get_object_or_404(get_attachment_model(), attachment_file=original_path)
41 obj = attachment.content_object
42 if not issubclass(obj._meta.model, mapentity_models.MapEntityMixin):
43 raise Http404
44 if not obj.is_public():
45 if not request.user.is_authenticated:
46 raise PermissionDenied
47 if not request.user.has_perm(get_attachment_permission('read_attachment')):
48 raise PermissionDenied
49 if not request.user.has_perm('{}.read_{}'.format(obj._meta.app_label, obj._meta.model_name)):
50 raise PermissionDenied
51
52 content_type, encoding = mimetypes.guess_type(path)
53
54 if settings.DEBUG:
55 response = static.serve(request, path, settings.MEDIA_ROOT)
56 else:
57 response = HttpResponse()
58 response[app_settings['SENDFILE_HTTP_HEADER']] = os.path.join(settings.MEDIA_URL_SECURE, path)
59 response["Content-Type"] = content_type or 'application/octet-stream'
60 if encoding:
61 response["Content-Encoding"] = encoding
62 if app_settings['SERVE_MEDIA_AS_ATTACHMENT']:
63 response['Content-Disposition'] = "attachment; filename={0}".format(
64 os.path.basename(path))
65 return response
66
67
68 class JSSettings(JSONResponseMixin, TemplateView):
69 """
70 Javascript settings, in JSON format.
71 Likely to be overriden. Contains only necessary stuff
72 for mapentity.
73 """
74
75 def get_context_data(self):
76 dictsettings = {}
77 dictsettings['debug'] = settings.DEBUG
78 dictsettings['map'] = dict(
79 extent=getattr(settings, 'LEAFLET_CONFIG', {}).get('SPATIAL_EXTENT'),
80 styles=app_settings['MAP_STYLES'],
81 )
82
83 # URLs
84 root_url = app_settings['ROOT_URL']
85 root_url = root_url if root_url.endswith('/') else '{}/'.format(root_url)
86 dictsettings['urls'] = {}
87 dictsettings['urls']['root'] = root_url
88
89 from django.db import models
90 from mapentity.registry import MapEntityOptions
91
92 class ModelName(mapentity_models.MapEntityMixin, models.Model):
93 pass
94
95 options = MapEntityOptions(ModelName)
96
97 dictsettings['urls']['static'] = settings.STATIC_URL
98 dictsettings['urls']['layer'] = '{}{}'.format(root_url, options._url_path(mapentity_models.ENTITY_LAYER)[1:-1])
99 dictsettings['urls']['detail'] = '{}modelname/0/'.format(root_url)
100 dictsettings['urls']['format_list'] = '{}{}'.format(
101 root_url, options._url_path(mapentity_models.ENTITY_FORMAT_LIST)[1:-1]
102 )
103 dictsettings['urls']['screenshot'] = reverse("mapentity:map_screenshot")
104
105 # Useful for JS calendars
106 date_format = settings.DATE_INPUT_FORMATS[0].replace('%Y', 'yyyy').replace('%m', 'mm').replace('%d', 'dd')
107 dictsettings['date_format'] = date_format
108 # Languages
109 dictsettings['languages'] = dict(available=dict(app_settings['TRANSLATED_LANGUAGES']),
110 default=app_settings['LANGUAGE_CODE'])
111 return dictsettings
112
113
114 class BaseListView(FilterListMixin, ModelViewMixin):
115 columns = None
116
117 def __init__(self, *args, **kwargs):
118 super(BaseListView, self).__init__(*args, **kwargs)
119
120 if self.columns is None:
121 # All model fields except geometries
122 self.columns = [field.name for field in self.get_model()._meta.fields
123 if not isinstance(field, GeometryField)]
124 # Id column should be the first one
125 self.columns.remove('id')
126 self.columns.insert(0, 'id')
127
128 @view_permission_required()
129 def dispatch(self, *args, **kwargs):
130 return super(BaseListView, self).dispatch(*args, **kwargs)
131
132
133 @csrf_exempt
134 @login_required
135 def map_screenshot(request):
136 """
137 This view allows to take screenshots, via a django-screamshot service, of
138 the map **currently viewed by the user**.
139
140 - A context full of information is built on client-side and posted here.
141 - We reproduce this context, via headless browser, and take a capture
142 - We return the resulting image as attachment.
143
144 This seems overkill ? Please look around and find a better way.
145 """
146 try:
147 printcontext = request.POST['printcontext']
148 assert len(printcontext) < 2048, "Print context is way too big."
149
150 # Prepare context, extract and add infos
151 context = json.loads(printcontext)
152 selector = context.pop('selector')
153 map_url = context.pop('url')
154 map_url = request.build_absolute_uri(map_url)
155 context['print'] = True
156 printcontext = json.dumps(context)
157 contextencoded = quote(printcontext)
158 map_url += '?context=%s' % contextencoded
159 logger.debug("Capture %s" % map_url)
160
161 # Capture image and return it
162 width = context.get('viewport', {}).get('width')
163 height = context.get('viewport', {}).get('height')
164
165 stream = BytesIO()
166 capture_image(map_url, stream, width=width, height=height, selector=selector)
167 response = HttpResponse(stream.getvalue(), content_type='image/png')
168 response['Content-Disposition'] = 'attachment; filename=%s.png' % datetime.now().strftime('%Y%m%d-%H%M%S')
169 return response
170
171 except Exception as exc:
172 logger.exception(exc)
173 return HttpResponseBadRequest(exc)
174
175
176 @require_http_methods(["POST"])
177 @csrf_exempt
178 @login_required
179 def history_delete(request, path=None):
180 path = request.POST.get('path', path)
181 if path:
182 history = request.session.get('history')
183 if history:
184 history = [h for h in history if h['path'] != path]
185 request.session['history'] = history
186 return HttpResponse()
187
```
Path: `geotrek/flatpages/models.py`
Content:
```
1 import mimetypes
2
3 from django.contrib.contenttypes.fields import GenericRelation
4 from django.db import models
5 from django.utils.translation import gettext_lazy as _
6 from django.template.defaultfilters import slugify
7 from django.conf import settings
8 from django.urls import reverse
9
10 from bs4 import BeautifulSoup
11 from extended_choices import Choices
12
13 from mapentity.serializers import plain_text
14 from geotrek.common.mixins import TimeStampedModelMixin, BasePublishableMixin
15
16
17 FLATPAGES_TARGETS = Choices(
18 ('ALL', 'all', _('All')),
19 ('MOBILE', 'mobile', _('Mobile')),
20 ('HIDDEN', 'hidden', _('Hidden')),
21 ('WEB', 'web', _('Web')),
22 )
23
24
25 class FlatPage(BasePublishableMixin, TimeStampedModelMixin):
26 """
27 Manage *Geotrek-rando* static pages from Geotrek admin.
28
29 Historically, we started static pages as static HTML files within
30 *Geotrek-rando* folders.
31 """
32 title = models.CharField(verbose_name=_('Title'), max_length=200)
33 external_url = models.URLField(verbose_name=_('External URL'), blank=True, default='',
34 help_text=_('Link to external website instead of HTML content'))
35 content = models.TextField(verbose_name=_('Content'), null=True, blank=True,
36 help_text=_('HTML content'))
37 target = models.CharField(verbose_name=_('Target'), max_length=12, choices=FLATPAGES_TARGETS,
38 default=FLATPAGES_TARGETS.ALL)
39 source = models.ManyToManyField('common.RecordSource',
40 blank=True, related_name='flatpages',
41 verbose_name=_("Source"))
42 portal = models.ManyToManyField('common.TargetPortal',
43 blank=True, related_name='flatpages',
44 verbose_name=_("Portal"))
45 order = models.IntegerField(default=None, null=True, blank=True,
46 help_text=_("ID order if blank", ),
47 verbose_name=_("Order"))
48 attachments = GenericRelation(settings.PAPERCLIP_ATTACHMENT_MODEL)
49
50 @property
51 def slug(self):
52 return slugify(self.title)
53
54 class Meta:
55 verbose_name = _('Flat page')
56 verbose_name_plural = _('Flat pages')
57 ordering = ['order', 'id']
58 permissions = (
59 ("read_flatpage", "Can read FlatPage"),
60 )
61
62 def __str__(self):
63 return self.title
64
65 def get_permission_codename(self, *args):
66 return
67
68 def clean(self):
69 html_content = ''
70 for language in settings.MAPENTITY_CONFIG['TRANSLATED_LANGUAGES']:
71 html_content += getattr(self, 'content_%s' % language[0], None) or ''
72
73 def parse_media(self):
74 soup = BeautifulSoup(self.content or '', 'lxml')
75 images = soup.findAll('img')
76 results = []
77 for image in images:
78 url = image.get('src')
79 if url is None:
80 continue
81
82 mt = mimetypes.guess_type(url, strict=True)[0]
83 if mt is None:
84 mt = 'application/octet-stream'
85
86 results.append({
87 'url': url,
88 'title': image.get('title', ''),
89 'alt': image.get('alt', ''),
90 'mimetype': mt.split('/'),
91 })
92
93 return results
94
95 def get_add_url(self):
96 return reverse('admin:flatpages_flatpage_add')
97
98 def get_update_url(self):
99 return reverse('admin:flatpages_flatpage_change', args=[self.pk])
100
101 def get_delete_url(self):
102 return reverse('admin:flatpages_flatpage_delete', args=[self.pk])
103
104 @property
105 def rando_url(self):
106 return 'informations/{}/'.format(self.slug)
107
108 @property
109 def meta_description(self):
110 return plain_text(self.content)[:500]
111
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/geotrek/flatpages/models.py b/geotrek/flatpages/models.py
--- a/geotrek/flatpages/models.py
+++ b/geotrek/flatpages/models.py
@@ -108,3 +108,6 @@
@property
def meta_description(self):
return plain_text(self.content)[:500]
+
+ def is_public(self):
+ return self.any_published
diff --git a/mapentity/views/base.py b/mapentity/views/base.py
--- a/mapentity/views/base.py
+++ b/mapentity/views/base.py
@@ -39,7 +39,7 @@
original_path = re.sub(r'\.\d+x\d+_q\d+(_crop)?\.(jpg|png|jpeg)$', '', path, count=1, flags=re.IGNORECASE)
attachment = get_object_or_404(get_attachment_model(), attachment_file=original_path)
obj = attachment.content_object
- if not issubclass(obj._meta.model, mapentity_models.MapEntityMixin):
+ if not hasattr(obj._meta.model, 'attachments'):
raise Http404
if not obj.is_public():
if not request.user.is_authenticated:
|
{"golden_diff": "diff --git a/geotrek/flatpages/models.py b/geotrek/flatpages/models.py\n--- a/geotrek/flatpages/models.py\n+++ b/geotrek/flatpages/models.py\n@@ -108,3 +108,6 @@\n @property\n def meta_description(self):\n return plain_text(self.content)[:500]\n+\n+ def is_public(self):\n+ return self.any_published\ndiff --git a/mapentity/views/base.py b/mapentity/views/base.py\n--- a/mapentity/views/base.py\n+++ b/mapentity/views/base.py\n@@ -39,7 +39,7 @@\n original_path = re.sub(r'\\.\\d+x\\d+_q\\d+(_crop)?\\.(jpg|png|jpeg)$', '', path, count=1, flags=re.IGNORECASE)\n attachment = get_object_or_404(get_attachment_model(), attachment_file=original_path)\n obj = attachment.content_object\n- if not issubclass(obj._meta.model, mapentity_models.MapEntityMixin):\n+ if not hasattr(obj._meta.model, 'attachments'):\n raise Http404\n if not obj.is_public():\n if not request.user.is_authenticated:\n", "issue": "Static pages images URL ?\nJ'ai ajout\u00e9 une cover_image \u00e0 une page statique sur le serveur de DEMO sur cette page : https://geotrekdemo.ecrins-parcnational.fr/admin/flatpages/flatpage/3/change/ (admin / admin)\r\n\r\nL'URL de l'image indiqu\u00e9e (dans l'Adminsite et dans l'API V2) est https://geotrekdemo.ecrins-parcnational.fr/media/paperclip/flatpages_flatpage/3/biodivecrins.jpg, mais elle n'est pas accessible.\r\n\r\nL'image est bien sur mon serveur dans /opt/geotrek-admin/var/media/paperclip/flatpages_flatpage/3/\n", "before_files": [{"content": "from __future__ import unicode_literals\n\nfrom io import BytesIO\nimport json\nimport logging\nimport mimetypes\nimport os\nfrom datetime import datetime\nimport re\nfrom urllib.parse import quote\n\nfrom django.conf import settings\nfrom django.contrib.auth.decorators import login_required\nfrom django.contrib.gis.db.models import GeometryField\nfrom django.core.exceptions import PermissionDenied\nfrom django.urls import reverse\nfrom django.http import (HttpResponse, HttpResponseBadRequest, Http404)\nfrom django.shortcuts import get_object_or_404\nfrom django.views import static\nfrom django.views.decorators.csrf import csrf_exempt\nfrom django.views.decorators.http import require_http_methods\nfrom django.views.generic.base import TemplateView\nfrom paperclip.settings import get_attachment_permission, get_attachment_model\n\nfrom mapentity import models as mapentity_models\nfrom .mixins import JSONResponseMixin, FilterListMixin, ModelViewMixin\nfrom ..decorators import view_permission_required\nfrom ..helpers import capture_image\nfrom ..settings import app_settings\n\nlogger = logging.getLogger(__name__)\n\n\ndef serve_attachment(request, path):\n \"\"\"\n Serve media/ for authorized users only, since it can contain sensitive\n information (uploaded documents)\n \"\"\"\n original_path = re.sub(r'\\.\\d+x\\d+_q\\d+(_crop)?\\.(jpg|png|jpeg)$', '', path, count=1, flags=re.IGNORECASE)\n attachment = get_object_or_404(get_attachment_model(), attachment_file=original_path)\n obj = attachment.content_object\n if not issubclass(obj._meta.model, mapentity_models.MapEntityMixin):\n raise Http404\n if not obj.is_public():\n if not request.user.is_authenticated:\n raise PermissionDenied\n if not request.user.has_perm(get_attachment_permission('read_attachment')):\n raise PermissionDenied\n if not request.user.has_perm('{}.read_{}'.format(obj._meta.app_label, obj._meta.model_name)):\n raise PermissionDenied\n\n content_type, encoding = mimetypes.guess_type(path)\n\n if settings.DEBUG:\n response = static.serve(request, path, settings.MEDIA_ROOT)\n else:\n response = HttpResponse()\n response[app_settings['SENDFILE_HTTP_HEADER']] = os.path.join(settings.MEDIA_URL_SECURE, path)\n response[\"Content-Type\"] = content_type or 'application/octet-stream'\n if encoding:\n response[\"Content-Encoding\"] = encoding\n if app_settings['SERVE_MEDIA_AS_ATTACHMENT']:\n response['Content-Disposition'] = \"attachment; filename={0}\".format(\n os.path.basename(path))\n return response\n\n\nclass JSSettings(JSONResponseMixin, TemplateView):\n \"\"\"\n Javascript settings, in JSON format.\n Likely to be overriden. Contains only necessary stuff\n for mapentity.\n \"\"\"\n\n def get_context_data(self):\n dictsettings = {}\n dictsettings['debug'] = settings.DEBUG\n dictsettings['map'] = dict(\n extent=getattr(settings, 'LEAFLET_CONFIG', {}).get('SPATIAL_EXTENT'),\n styles=app_settings['MAP_STYLES'],\n )\n\n # URLs\n root_url = app_settings['ROOT_URL']\n root_url = root_url if root_url.endswith('/') else '{}/'.format(root_url)\n dictsettings['urls'] = {}\n dictsettings['urls']['root'] = root_url\n\n from django.db import models\n from mapentity.registry import MapEntityOptions\n\n class ModelName(mapentity_models.MapEntityMixin, models.Model):\n pass\n\n options = MapEntityOptions(ModelName)\n\n dictsettings['urls']['static'] = settings.STATIC_URL\n dictsettings['urls']['layer'] = '{}{}'.format(root_url, options._url_path(mapentity_models.ENTITY_LAYER)[1:-1])\n dictsettings['urls']['detail'] = '{}modelname/0/'.format(root_url)\n dictsettings['urls']['format_list'] = '{}{}'.format(\n root_url, options._url_path(mapentity_models.ENTITY_FORMAT_LIST)[1:-1]\n )\n dictsettings['urls']['screenshot'] = reverse(\"mapentity:map_screenshot\")\n\n # Useful for JS calendars\n date_format = settings.DATE_INPUT_FORMATS[0].replace('%Y', 'yyyy').replace('%m', 'mm').replace('%d', 'dd')\n dictsettings['date_format'] = date_format\n # Languages\n dictsettings['languages'] = dict(available=dict(app_settings['TRANSLATED_LANGUAGES']),\n default=app_settings['LANGUAGE_CODE'])\n return dictsettings\n\n\nclass BaseListView(FilterListMixin, ModelViewMixin):\n columns = None\n\n def __init__(self, *args, **kwargs):\n super(BaseListView, self).__init__(*args, **kwargs)\n\n if self.columns is None:\n # All model fields except geometries\n self.columns = [field.name for field in self.get_model()._meta.fields\n if not isinstance(field, GeometryField)]\n # Id column should be the first one\n self.columns.remove('id')\n self.columns.insert(0, 'id')\n\n @view_permission_required()\n def dispatch(self, *args, **kwargs):\n return super(BaseListView, self).dispatch(*args, **kwargs)\n\n\n@csrf_exempt\n@login_required\ndef map_screenshot(request):\n \"\"\"\n This view allows to take screenshots, via a django-screamshot service, of\n the map **currently viewed by the user**.\n\n - A context full of information is built on client-side and posted here.\n - We reproduce this context, via headless browser, and take a capture\n - We return the resulting image as attachment.\n\n This seems overkill ? Please look around and find a better way.\n \"\"\"\n try:\n printcontext = request.POST['printcontext']\n assert len(printcontext) < 2048, \"Print context is way too big.\"\n\n # Prepare context, extract and add infos\n context = json.loads(printcontext)\n selector = context.pop('selector')\n map_url = context.pop('url')\n map_url = request.build_absolute_uri(map_url)\n context['print'] = True\n printcontext = json.dumps(context)\n contextencoded = quote(printcontext)\n map_url += '?context=%s' % contextencoded\n logger.debug(\"Capture %s\" % map_url)\n\n # Capture image and return it\n width = context.get('viewport', {}).get('width')\n height = context.get('viewport', {}).get('height')\n\n stream = BytesIO()\n capture_image(map_url, stream, width=width, height=height, selector=selector)\n response = HttpResponse(stream.getvalue(), content_type='image/png')\n response['Content-Disposition'] = 'attachment; filename=%s.png' % datetime.now().strftime('%Y%m%d-%H%M%S')\n return response\n\n except Exception as exc:\n logger.exception(exc)\n return HttpResponseBadRequest(exc)\n\n\n@require_http_methods([\"POST\"])\n@csrf_exempt\n@login_required\ndef history_delete(request, path=None):\n path = request.POST.get('path', path)\n if path:\n history = request.session.get('history')\n if history:\n history = [h for h in history if h['path'] != path]\n request.session['history'] = history\n return HttpResponse()\n", "path": "mapentity/views/base.py"}, {"content": "import mimetypes\n\nfrom django.contrib.contenttypes.fields import GenericRelation\nfrom django.db import models\nfrom django.utils.translation import gettext_lazy as _\nfrom django.template.defaultfilters import slugify\nfrom django.conf import settings\nfrom django.urls import reverse\n\nfrom bs4 import BeautifulSoup\nfrom extended_choices import Choices\n\nfrom mapentity.serializers import plain_text\nfrom geotrek.common.mixins import TimeStampedModelMixin, BasePublishableMixin\n\n\nFLATPAGES_TARGETS = Choices(\n ('ALL', 'all', _('All')),\n ('MOBILE', 'mobile', _('Mobile')),\n ('HIDDEN', 'hidden', _('Hidden')),\n ('WEB', 'web', _('Web')),\n)\n\n\nclass FlatPage(BasePublishableMixin, TimeStampedModelMixin):\n \"\"\"\n Manage *Geotrek-rando* static pages from Geotrek admin.\n\n Historically, we started static pages as static HTML files within\n *Geotrek-rando* folders.\n \"\"\"\n title = models.CharField(verbose_name=_('Title'), max_length=200)\n external_url = models.URLField(verbose_name=_('External URL'), blank=True, default='',\n help_text=_('Link to external website instead of HTML content'))\n content = models.TextField(verbose_name=_('Content'), null=True, blank=True,\n help_text=_('HTML content'))\n target = models.CharField(verbose_name=_('Target'), max_length=12, choices=FLATPAGES_TARGETS,\n default=FLATPAGES_TARGETS.ALL)\n source = models.ManyToManyField('common.RecordSource',\n blank=True, related_name='flatpages',\n verbose_name=_(\"Source\"))\n portal = models.ManyToManyField('common.TargetPortal',\n blank=True, related_name='flatpages',\n verbose_name=_(\"Portal\"))\n order = models.IntegerField(default=None, null=True, blank=True,\n help_text=_(\"ID order if blank\", ),\n verbose_name=_(\"Order\"))\n attachments = GenericRelation(settings.PAPERCLIP_ATTACHMENT_MODEL)\n\n @property\n def slug(self):\n return slugify(self.title)\n\n class Meta:\n verbose_name = _('Flat page')\n verbose_name_plural = _('Flat pages')\n ordering = ['order', 'id']\n permissions = (\n (\"read_flatpage\", \"Can read FlatPage\"),\n )\n\n def __str__(self):\n return self.title\n\n def get_permission_codename(self, *args):\n return\n\n def clean(self):\n html_content = ''\n for language in settings.MAPENTITY_CONFIG['TRANSLATED_LANGUAGES']:\n html_content += getattr(self, 'content_%s' % language[0], None) or ''\n\n def parse_media(self):\n soup = BeautifulSoup(self.content or '', 'lxml')\n images = soup.findAll('img')\n results = []\n for image in images:\n url = image.get('src')\n if url is None:\n continue\n\n mt = mimetypes.guess_type(url, strict=True)[0]\n if mt is None:\n mt = 'application/octet-stream'\n\n results.append({\n 'url': url,\n 'title': image.get('title', ''),\n 'alt': image.get('alt', ''),\n 'mimetype': mt.split('/'),\n })\n\n return results\n\n def get_add_url(self):\n return reverse('admin:flatpages_flatpage_add')\n\n def get_update_url(self):\n return reverse('admin:flatpages_flatpage_change', args=[self.pk])\n\n def get_delete_url(self):\n return reverse('admin:flatpages_flatpage_delete', args=[self.pk])\n\n @property\n def rando_url(self):\n return 'informations/{}/'.format(self.slug)\n\n @property\n def meta_description(self):\n return plain_text(self.content)[:500]\n", "path": "geotrek/flatpages/models.py"}], "after_files": [{"content": "from __future__ import unicode_literals\n\nfrom io import BytesIO\nimport json\nimport logging\nimport mimetypes\nimport os\nfrom datetime import datetime\nimport re\nfrom urllib.parse import quote\n\nfrom django.conf import settings\nfrom django.contrib.auth.decorators import login_required\nfrom django.contrib.gis.db.models import GeometryField\nfrom django.core.exceptions import PermissionDenied\nfrom django.urls import reverse\nfrom django.http import (HttpResponse, HttpResponseBadRequest, Http404)\nfrom django.shortcuts import get_object_or_404\nfrom django.views import static\nfrom django.views.decorators.csrf import csrf_exempt\nfrom django.views.decorators.http import require_http_methods\nfrom django.views.generic.base import TemplateView\nfrom paperclip.settings import get_attachment_permission, get_attachment_model\n\nfrom mapentity import models as mapentity_models\nfrom .mixins import JSONResponseMixin, FilterListMixin, ModelViewMixin\nfrom ..decorators import view_permission_required\nfrom ..helpers import capture_image\nfrom ..settings import app_settings\n\nlogger = logging.getLogger(__name__)\n\n\ndef serve_attachment(request, path):\n \"\"\"\n Serve media/ for authorized users only, since it can contain sensitive\n information (uploaded documents)\n \"\"\"\n original_path = re.sub(r'\\.\\d+x\\d+_q\\d+(_crop)?\\.(jpg|png|jpeg)$', '', path, count=1, flags=re.IGNORECASE)\n attachment = get_object_or_404(get_attachment_model(), attachment_file=original_path)\n obj = attachment.content_object\n if not hasattr(obj._meta.model, 'attachments'):\n raise Http404\n if not obj.is_public():\n if not request.user.is_authenticated:\n raise PermissionDenied\n if not request.user.has_perm(get_attachment_permission('read_attachment')):\n raise PermissionDenied\n if not request.user.has_perm('{}.read_{}'.format(obj._meta.app_label, obj._meta.model_name)):\n raise PermissionDenied\n\n content_type, encoding = mimetypes.guess_type(path)\n\n if settings.DEBUG:\n response = static.serve(request, path, settings.MEDIA_ROOT)\n else:\n response = HttpResponse()\n response[app_settings['SENDFILE_HTTP_HEADER']] = os.path.join(settings.MEDIA_URL_SECURE, path)\n response[\"Content-Type\"] = content_type or 'application/octet-stream'\n if encoding:\n response[\"Content-Encoding\"] = encoding\n if app_settings['SERVE_MEDIA_AS_ATTACHMENT']:\n response['Content-Disposition'] = \"attachment; filename={0}\".format(\n os.path.basename(path))\n return response\n\n\nclass JSSettings(JSONResponseMixin, TemplateView):\n \"\"\"\n Javascript settings, in JSON format.\n Likely to be overriden. Contains only necessary stuff\n for mapentity.\n \"\"\"\n\n def get_context_data(self):\n dictsettings = {}\n dictsettings['debug'] = settings.DEBUG\n dictsettings['map'] = dict(\n extent=getattr(settings, 'LEAFLET_CONFIG', {}).get('SPATIAL_EXTENT'),\n styles=app_settings['MAP_STYLES'],\n )\n\n # URLs\n root_url = app_settings['ROOT_URL']\n root_url = root_url if root_url.endswith('/') else '{}/'.format(root_url)\n dictsettings['urls'] = {}\n dictsettings['urls']['root'] = root_url\n\n from django.db import models\n from mapentity.registry import MapEntityOptions\n\n class ModelName(mapentity_models.MapEntityMixin, models.Model):\n pass\n\n options = MapEntityOptions(ModelName)\n\n dictsettings['urls']['static'] = settings.STATIC_URL\n dictsettings['urls']['layer'] = '{}{}'.format(root_url, options._url_path(mapentity_models.ENTITY_LAYER)[1:-1])\n dictsettings['urls']['detail'] = '{}modelname/0/'.format(root_url)\n dictsettings['urls']['format_list'] = '{}{}'.format(\n root_url, options._url_path(mapentity_models.ENTITY_FORMAT_LIST)[1:-1]\n )\n dictsettings['urls']['screenshot'] = reverse(\"mapentity:map_screenshot\")\n\n # Useful for JS calendars\n date_format = settings.DATE_INPUT_FORMATS[0].replace('%Y', 'yyyy').replace('%m', 'mm').replace('%d', 'dd')\n dictsettings['date_format'] = date_format\n # Languages\n dictsettings['languages'] = dict(available=dict(app_settings['TRANSLATED_LANGUAGES']),\n default=app_settings['LANGUAGE_CODE'])\n return dictsettings\n\n\nclass BaseListView(FilterListMixin, ModelViewMixin):\n columns = None\n\n def __init__(self, *args, **kwargs):\n super(BaseListView, self).__init__(*args, **kwargs)\n\n if self.columns is None:\n # All model fields except geometries\n self.columns = [field.name for field in self.get_model()._meta.fields\n if not isinstance(field, GeometryField)]\n # Id column should be the first one\n self.columns.remove('id')\n self.columns.insert(0, 'id')\n\n @view_permission_required()\n def dispatch(self, *args, **kwargs):\n return super(BaseListView, self).dispatch(*args, **kwargs)\n\n\n@csrf_exempt\n@login_required\ndef map_screenshot(request):\n \"\"\"\n This view allows to take screenshots, via a django-screamshot service, of\n the map **currently viewed by the user**.\n\n - A context full of information is built on client-side and posted here.\n - We reproduce this context, via headless browser, and take a capture\n - We return the resulting image as attachment.\n\n This seems overkill ? Please look around and find a better way.\n \"\"\"\n try:\n printcontext = request.POST['printcontext']\n assert len(printcontext) < 2048, \"Print context is way too big.\"\n\n # Prepare context, extract and add infos\n context = json.loads(printcontext)\n selector = context.pop('selector')\n map_url = context.pop('url')\n map_url = request.build_absolute_uri(map_url)\n context['print'] = True\n printcontext = json.dumps(context)\n contextencoded = quote(printcontext)\n map_url += '?context=%s' % contextencoded\n logger.debug(\"Capture %s\" % map_url)\n\n # Capture image and return it\n width = context.get('viewport', {}).get('width')\n height = context.get('viewport', {}).get('height')\n\n stream = BytesIO()\n capture_image(map_url, stream, width=width, height=height, selector=selector)\n response = HttpResponse(stream.getvalue(), content_type='image/png')\n response['Content-Disposition'] = 'attachment; filename=%s.png' % datetime.now().strftime('%Y%m%d-%H%M%S')\n return response\n\n except Exception as exc:\n logger.exception(exc)\n return HttpResponseBadRequest(exc)\n\n\n@require_http_methods([\"POST\"])\n@csrf_exempt\n@login_required\ndef history_delete(request, path=None):\n path = request.POST.get('path', path)\n if path:\n history = request.session.get('history')\n if history:\n history = [h for h in history if h['path'] != path]\n request.session['history'] = history\n return HttpResponse()\n", "path": "mapentity/views/base.py"}, {"content": "import mimetypes\n\nfrom django.contrib.contenttypes.fields import GenericRelation\nfrom django.db import models\nfrom django.utils.translation import gettext_lazy as _\nfrom django.template.defaultfilters import slugify\nfrom django.conf import settings\nfrom django.urls import reverse\n\nfrom bs4 import BeautifulSoup\nfrom extended_choices import Choices\n\nfrom mapentity.serializers import plain_text\nfrom geotrek.common.mixins import TimeStampedModelMixin, BasePublishableMixin\n\n\nFLATPAGES_TARGETS = Choices(\n ('ALL', 'all', _('All')),\n ('MOBILE', 'mobile', _('Mobile')),\n ('HIDDEN', 'hidden', _('Hidden')),\n ('WEB', 'web', _('Web')),\n)\n\n\nclass FlatPage(BasePublishableMixin, TimeStampedModelMixin):\n \"\"\"\n Manage *Geotrek-rando* static pages from Geotrek admin.\n\n Historically, we started static pages as static HTML files within\n *Geotrek-rando* folders.\n \"\"\"\n title = models.CharField(verbose_name=_('Title'), max_length=200)\n external_url = models.URLField(verbose_name=_('External URL'), blank=True, default='',\n help_text=_('Link to external website instead of HTML content'))\n content = models.TextField(verbose_name=_('Content'), null=True, blank=True,\n help_text=_('HTML content'))\n target = models.CharField(verbose_name=_('Target'), max_length=12, choices=FLATPAGES_TARGETS,\n default=FLATPAGES_TARGETS.ALL)\n source = models.ManyToManyField('common.RecordSource',\n blank=True, related_name='flatpages',\n verbose_name=_(\"Source\"))\n portal = models.ManyToManyField('common.TargetPortal',\n blank=True, related_name='flatpages',\n verbose_name=_(\"Portal\"))\n order = models.IntegerField(default=None, null=True, blank=True,\n help_text=_(\"ID order if blank\", ),\n verbose_name=_(\"Order\"))\n attachments = GenericRelation(settings.PAPERCLIP_ATTACHMENT_MODEL)\n\n @property\n def slug(self):\n return slugify(self.title)\n\n class Meta:\n verbose_name = _('Flat page')\n verbose_name_plural = _('Flat pages')\n ordering = ['order', 'id']\n permissions = (\n (\"read_flatpage\", \"Can read FlatPage\"),\n )\n\n def __str__(self):\n return self.title\n\n def get_permission_codename(self, *args):\n return\n\n def clean(self):\n html_content = ''\n for language in settings.MAPENTITY_CONFIG['TRANSLATED_LANGUAGES']:\n html_content += getattr(self, 'content_%s' % language[0], None) or ''\n\n def parse_media(self):\n soup = BeautifulSoup(self.content or '', 'lxml')\n images = soup.findAll('img')\n results = []\n for image in images:\n url = image.get('src')\n if url is None:\n continue\n\n mt = mimetypes.guess_type(url, strict=True)[0]\n if mt is None:\n mt = 'application/octet-stream'\n\n results.append({\n 'url': url,\n 'title': image.get('title', ''),\n 'alt': image.get('alt', ''),\n 'mimetype': mt.split('/'),\n })\n\n return results\n\n def get_add_url(self):\n return reverse('admin:flatpages_flatpage_add')\n\n def get_update_url(self):\n return reverse('admin:flatpages_flatpage_change', args=[self.pk])\n\n def get_delete_url(self):\n return reverse('admin:flatpages_flatpage_delete', args=[self.pk])\n\n @property\n def rando_url(self):\n return 'informations/{}/'.format(self.slug)\n\n @property\n def meta_description(self):\n return plain_text(self.content)[:500]\n\n def is_public(self):\n return self.any_published\n", "path": "geotrek/flatpages/models.py"}]}
| 3,456 | 258 |
gh_patches_debug_9013
|
rasdani/github-patches
|
git_diff
|
bridgecrewio__checkov-1817
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
checkov silently returns [] when an `--external-checks-dir` doesn't have an init file
**Describe the bug**
When I use `--external-checks-dir` with a directory without an `__init__.py`, all `checkov` does is output `[]` when doing --output json.
**Expected behavior**
Yell at me for giving a directory without any `__init__.py` file in it.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `checkov/common/runners/runner_registry.py`
Content:
```
1 import argparse
2 import itertools
3 import json
4 import logging
5 import os
6 from abc import abstractmethod
7 from typing import List, Union, Dict, Any, Tuple, Optional
8
9 from typing_extensions import Literal
10 import platform
11 import multiprocessing
12 from multiprocessing import Pipe
13
14 from cyclonedx.output import get_instance as get_cyclonedx_outputter
15
16 from checkov.common.bridgecrew.integration_features.integration_feature_registry import integration_feature_registry
17 from checkov.common.output.baseline import Baseline
18 from checkov.common.output.report import Report, report_to_cyclonedx
19 from checkov.common.runners.base_runner import BaseRunner
20 from checkov.common.util import data_structures_utils
21 from checkov.runner_filter import RunnerFilter
22 from checkov.terraform.context_parsers.registry import parser_registry
23 from checkov.terraform.runner import Runner as tf_runner
24 from checkov.terraform.parser import Parser
25
26
27 CHECK_BLOCK_TYPES = frozenset(["resource", "data", "provider", "module"])
28 OUTPUT_CHOICES = ["cli", "cyclonedx", "json", "junitxml", "github_failed_only", "sarif"]
29 OUTPUT_DELIMITER = "\n--- OUTPUT DELIMITER ---\n"
30
31
32 class RunnerRegistry:
33 runners: List[BaseRunner] = []
34 scan_reports: List[Report] = []
35 banner = ""
36
37 def __init__(self, banner: str, runner_filter: RunnerFilter, *runners: BaseRunner) -> None:
38 self.logger = logging.getLogger(__name__)
39 self.runner_filter = runner_filter
40 self.runners = list(runners)
41 self.banner = banner
42 self.scan_reports = []
43 self.filter_runner_framework()
44
45 @abstractmethod
46 def extract_entity_details(self, entity: Dict[str, Any]) -> Tuple[str, str, Dict[str, Any]]:
47 raise NotImplementedError()
48
49 def run(
50 self,
51 root_folder: Optional[str] = None,
52 external_checks_dir: Optional[List[str]] = None,
53 files: Optional[List[str]] = None,
54 guidelines: Optional[Dict[str, str]] = None,
55 collect_skip_comments: bool = True,
56 repo_root_for_plan_enrichment: Optional[List[Union[str, os.PathLike]]] = None,
57 ) -> List[Report]:
58 if platform.system() == 'Windows':
59 integration_feature_registry.run_pre_runner()
60 for runner in self.runners:
61 report = runner.run(root_folder, external_checks_dir=external_checks_dir, files=files,
62 runner_filter=self.runner_filter, collect_skip_comments=collect_skip_comments)
63 self._handle_report(report, guidelines, repo_root_for_plan_enrichment)
64 return self.scan_reports
65
66 # use multiprocessing for unix os
67 logging.info("Running the runners using multiprocessing")
68 processes = []
69 integration_feature_registry.run_pre_runner()
70 for runner in self.runners:
71 parent_conn, child_conn = Pipe(duplex=False)
72 process = multiprocessing.get_context("fork").Process(target=RunnerRegistry._run_runner,
73 args=(runner, root_folder, external_checks_dir, files,
74 self.runner_filter, collect_skip_comments, child_conn))
75 processes.append((process, parent_conn))
76 process.start()
77
78 for process, parent_conn in processes:
79 scan_report = parent_conn.recv()
80 self._handle_report(scan_report, guidelines, repo_root_for_plan_enrichment)
81 return self.scan_reports
82
83 def _handle_report(self, scan_report, guidelines, repo_root_for_plan_enrichment):
84 integration_feature_registry.run_post_runner(scan_report)
85 if guidelines:
86 RunnerRegistry.enrich_report_with_guidelines(scan_report, guidelines)
87 if repo_root_for_plan_enrichment:
88 enriched_resources = RunnerRegistry.get_enriched_resources(repo_root_for_plan_enrichment)
89 scan_report = Report("terraform_plan").enrich_plan_report(scan_report, enriched_resources)
90 scan_report = Report("terraform_plan").handle_skipped_checks(scan_report, enriched_resources)
91 self.scan_reports.append(scan_report)
92
93 @staticmethod
94 def _run_runner(runner, root_folder, external_checks_dir, files, runner_filter, collect_skip_comments,
95 child_conn):
96 report = runner.run(root_folder, external_checks_dir=external_checks_dir, files=files,
97 runner_filter=runner_filter, collect_skip_comments=collect_skip_comments)
98 child_conn.send(report)
99 child_conn.close()
100
101 def print_reports(
102 self,
103 scan_reports: List[Report],
104 config: argparse.Namespace,
105 url: Optional[str] = None,
106 created_baseline_path: Optional[str] = None,
107 baseline: Optional[Baseline] = None,
108 ) -> Literal[0, 1]:
109 output_formats = set(config.output)
110
111 if "cli" in config.output and not config.quiet:
112 print(f"{self.banner}\n")
113 exit_codes = []
114 report_jsons = []
115 sarif_reports = []
116 junit_reports = []
117 cyclonedx_reports = []
118 for report in scan_reports:
119 if not report.is_empty():
120 if "json" in config.output:
121 report_jsons.append(report.get_dict(is_quiet=config.quiet, url=url))
122 if "junitxml" in config.output:
123 junit_reports.append(report)
124 # report.print_junit_xml()
125 if "github_failed_only" in config.output:
126 report.print_failed_github_md(use_bc_ids=config.output_bc_ids)
127 if "sarif" in config.output:
128 sarif_reports.append(report)
129 if "cli" in config.output:
130 report.print_console(
131 is_quiet=config.quiet,
132 is_compact=config.compact,
133 created_baseline_path=created_baseline_path,
134 baseline=baseline,
135 use_bc_ids=config.output_bc_ids,
136 )
137 if url:
138 print("More details: {}".format(url))
139 output_formats.discard("cli")
140 if output_formats:
141 print(OUTPUT_DELIMITER)
142 if "cyclonedx" in config.output:
143 cyclonedx_reports.append(report)
144 exit_codes.append(report.get_exit_code(config.soft_fail, config.soft_fail_on, config.hard_fail_on))
145
146 if "sarif" in config.output:
147 master_report = Report(None)
148 for report in sarif_reports:
149 master_report.failed_checks += report.failed_checks
150 master_report.print_sarif_report()
151 output_formats.remove("sarif")
152 if output_formats:
153 print(OUTPUT_DELIMITER)
154 if "json" in config.output:
155 if len(report_jsons) == 1:
156 print(json.dumps(report_jsons[0], indent=4))
157 else:
158 print(json.dumps(report_jsons, indent=4))
159 output_formats.remove("json")
160 if output_formats:
161 print(OUTPUT_DELIMITER)
162 if "junitxml" in config.output:
163 if len(junit_reports) == 1:
164 junit_reports[0].print_junit_xml(use_bc_ids=config.output_bc_ids)
165 else:
166 master_report = Report(None)
167 for report in junit_reports:
168 master_report.skipped_checks += report.skipped_checks
169 master_report.passed_checks += report.passed_checks
170 master_report.failed_checks += report.failed_checks
171 master_report.print_junit_xml(use_bc_ids=config.output_bc_ids)
172 output_formats.remove("junitxml")
173 if output_formats:
174 print(OUTPUT_DELIMITER)
175
176 if "cyclonedx" in config.output:
177 if cyclonedx_reports:
178 # More than one Report - combine Reports first
179 report = Report(None)
180 for r in cyclonedx_reports:
181 report.passed_checks += r.passed_checks
182 report.skipped_checks += r.skipped_checks
183 report.failed_checks += r.failed_checks
184 else:
185 report = cyclonedx_reports[0]
186 cyclonedx_output = get_cyclonedx_outputter(
187 bom=report.get_cyclonedx_bom()
188 )
189 print(cyclonedx_output.output_as_string())
190 output_formats.remove("cyclonedx")
191 if output_formats:
192 print(OUTPUT_DELIMITER)
193
194 exit_code = 1 if 1 in exit_codes else 0
195 return exit_code
196
197 def filter_runner_framework(self) -> None:
198 if not self.runner_filter:
199 return
200 if self.runner_filter.framework is None:
201 return
202 if self.runner_filter.framework == "all":
203 return
204 self.runners = [runner for runner in self.runners if runner.check_type in self.runner_filter.framework]
205
206 def remove_runner(self, runner: BaseRunner) -> None:
207 if runner in self.runners:
208 self.runners.remove(runner)
209
210 @staticmethod
211 def enrich_report_with_guidelines(scan_report: Report, guidelines: Dict[str, str]) -> None:
212 for record in itertools.chain(scan_report.failed_checks, scan_report.passed_checks, scan_report.skipped_checks):
213 if record.check_id in guidelines:
214 record.set_guideline(guidelines[record.check_id])
215
216 @staticmethod
217 def get_enriched_resources(repo_roots: List[Union[str, os.PathLike]]) -> Dict[str, Dict[str, Any]]:
218 repo_definitions = {}
219 for repo_root in repo_roots:
220 tf_definitions = {}
221 parsing_errors = {}
222 Parser().parse_directory(
223 directory=repo_root, # assume plan file is in the repo-root
224 out_definitions=tf_definitions,
225 out_parsing_errors=parsing_errors,
226 )
227 repo_definitions[repo_root] = { 'tf_definitions': tf_definitions, 'parsing_errors': parsing_errors }
228
229 enriched_resources = {}
230 for repo_root, parse_results in repo_definitions.items():
231 for full_file_path, definition in parse_results['tf_definitions'].items():
232 definitions_context = parser_registry.enrich_definitions_context((full_file_path, definition))
233 abs_scanned_file, _ = tf_runner._strip_module_referrer(full_file_path)
234 scanned_file = os.path.relpath(abs_scanned_file, repo_root)
235 for block_type, block_value in definition.items():
236 if block_type in CHECK_BLOCK_TYPES:
237 for entity in block_value:
238 context_parser = parser_registry.context_parsers[block_type]
239 definition_path = context_parser.get_entity_context_path(entity)
240 entity_id = ".".join(definition_path)
241 entity_context_path = [block_type] + definition_path
242 entity_context = data_structures_utils.get_inner_dict(
243 definitions_context[full_file_path], entity_context_path
244 )
245 entity_lines_range = [
246 entity_context.get("start_line"),
247 entity_context.get("end_line"),
248 ]
249 entity_code_lines = entity_context.get("code_lines")
250 skipped_checks = entity_context.get("skipped_checks")
251 enriched_resources[entity_id] = {
252 "entity_code_lines": entity_code_lines,
253 "entity_lines_range": entity_lines_range,
254 "scanned_file": scanned_file,
255 "skipped_checks": skipped_checks,
256 }
257 return enriched_resources
258
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/checkov/common/runners/runner_registry.py b/checkov/common/runners/runner_registry.py
--- a/checkov/common/runners/runner_registry.py
+++ b/checkov/common/runners/runner_registry.py
@@ -152,7 +152,9 @@
if output_formats:
print(OUTPUT_DELIMITER)
if "json" in config.output:
- if len(report_jsons) == 1:
+ if not report_jsons:
+ print(json.dumps(Report(None).get_summary(), indent=4))
+ elif len(report_jsons) == 1:
print(json.dumps(report_jsons[0], indent=4))
else:
print(json.dumps(report_jsons, indent=4))
|
{"golden_diff": "diff --git a/checkov/common/runners/runner_registry.py b/checkov/common/runners/runner_registry.py\n--- a/checkov/common/runners/runner_registry.py\n+++ b/checkov/common/runners/runner_registry.py\n@@ -152,7 +152,9 @@\n if output_formats:\n print(OUTPUT_DELIMITER)\n if \"json\" in config.output:\n- if len(report_jsons) == 1:\n+ if not report_jsons:\n+ print(json.dumps(Report(None).get_summary(), indent=4))\n+ elif len(report_jsons) == 1:\n print(json.dumps(report_jsons[0], indent=4))\n else:\n print(json.dumps(report_jsons, indent=4))\n", "issue": "checkov silently returns [] when an `--external-checks-dir` doesn't have an init file\n**Describe the bug**\r\nWhen I use `--external-checks-dir` with a directory without an `__init__.py`, all `checkov` does is output `[]` when doing --output json.\r\n\r\n\r\n**Expected behavior**\r\nYell at me for giving a directory without any `__init__.py` file in it.\r\n\n", "before_files": [{"content": "import argparse\nimport itertools\nimport json\nimport logging\nimport os\nfrom abc import abstractmethod\nfrom typing import List, Union, Dict, Any, Tuple, Optional\n\nfrom typing_extensions import Literal\nimport platform\nimport multiprocessing\nfrom multiprocessing import Pipe\n\nfrom cyclonedx.output import get_instance as get_cyclonedx_outputter\n\nfrom checkov.common.bridgecrew.integration_features.integration_feature_registry import integration_feature_registry\nfrom checkov.common.output.baseline import Baseline\nfrom checkov.common.output.report import Report, report_to_cyclonedx\nfrom checkov.common.runners.base_runner import BaseRunner\nfrom checkov.common.util import data_structures_utils\nfrom checkov.runner_filter import RunnerFilter\nfrom checkov.terraform.context_parsers.registry import parser_registry\nfrom checkov.terraform.runner import Runner as tf_runner\nfrom checkov.terraform.parser import Parser\n\n\nCHECK_BLOCK_TYPES = frozenset([\"resource\", \"data\", \"provider\", \"module\"])\nOUTPUT_CHOICES = [\"cli\", \"cyclonedx\", \"json\", \"junitxml\", \"github_failed_only\", \"sarif\"]\nOUTPUT_DELIMITER = \"\\n--- OUTPUT DELIMITER ---\\n\"\n\n\nclass RunnerRegistry:\n runners: List[BaseRunner] = []\n scan_reports: List[Report] = []\n banner = \"\"\n\n def __init__(self, banner: str, runner_filter: RunnerFilter, *runners: BaseRunner) -> None:\n self.logger = logging.getLogger(__name__)\n self.runner_filter = runner_filter\n self.runners = list(runners)\n self.banner = banner\n self.scan_reports = []\n self.filter_runner_framework()\n\n @abstractmethod\n def extract_entity_details(self, entity: Dict[str, Any]) -> Tuple[str, str, Dict[str, Any]]:\n raise NotImplementedError()\n\n def run(\n self,\n root_folder: Optional[str] = None,\n external_checks_dir: Optional[List[str]] = None,\n files: Optional[List[str]] = None,\n guidelines: Optional[Dict[str, str]] = None,\n collect_skip_comments: bool = True,\n repo_root_for_plan_enrichment: Optional[List[Union[str, os.PathLike]]] = None,\n ) -> List[Report]:\n if platform.system() == 'Windows':\n integration_feature_registry.run_pre_runner()\n for runner in self.runners:\n report = runner.run(root_folder, external_checks_dir=external_checks_dir, files=files,\n runner_filter=self.runner_filter, collect_skip_comments=collect_skip_comments)\n self._handle_report(report, guidelines, repo_root_for_plan_enrichment)\n return self.scan_reports\n\n # use multiprocessing for unix os\n logging.info(\"Running the runners using multiprocessing\")\n processes = []\n integration_feature_registry.run_pre_runner()\n for runner in self.runners:\n parent_conn, child_conn = Pipe(duplex=False)\n process = multiprocessing.get_context(\"fork\").Process(target=RunnerRegistry._run_runner,\n args=(runner, root_folder, external_checks_dir, files,\n self.runner_filter, collect_skip_comments, child_conn))\n processes.append((process, parent_conn))\n process.start()\n\n for process, parent_conn in processes:\n scan_report = parent_conn.recv()\n self._handle_report(scan_report, guidelines, repo_root_for_plan_enrichment)\n return self.scan_reports\n\n def _handle_report(self, scan_report, guidelines, repo_root_for_plan_enrichment):\n integration_feature_registry.run_post_runner(scan_report)\n if guidelines:\n RunnerRegistry.enrich_report_with_guidelines(scan_report, guidelines)\n if repo_root_for_plan_enrichment:\n enriched_resources = RunnerRegistry.get_enriched_resources(repo_root_for_plan_enrichment)\n scan_report = Report(\"terraform_plan\").enrich_plan_report(scan_report, enriched_resources)\n scan_report = Report(\"terraform_plan\").handle_skipped_checks(scan_report, enriched_resources)\n self.scan_reports.append(scan_report)\n\n @staticmethod\n def _run_runner(runner, root_folder, external_checks_dir, files, runner_filter, collect_skip_comments,\n child_conn):\n report = runner.run(root_folder, external_checks_dir=external_checks_dir, files=files,\n runner_filter=runner_filter, collect_skip_comments=collect_skip_comments)\n child_conn.send(report)\n child_conn.close()\n\n def print_reports(\n self,\n scan_reports: List[Report],\n config: argparse.Namespace,\n url: Optional[str] = None,\n created_baseline_path: Optional[str] = None,\n baseline: Optional[Baseline] = None,\n ) -> Literal[0, 1]:\n output_formats = set(config.output)\n\n if \"cli\" in config.output and not config.quiet:\n print(f\"{self.banner}\\n\")\n exit_codes = []\n report_jsons = []\n sarif_reports = []\n junit_reports = []\n cyclonedx_reports = []\n for report in scan_reports:\n if not report.is_empty():\n if \"json\" in config.output:\n report_jsons.append(report.get_dict(is_quiet=config.quiet, url=url))\n if \"junitxml\" in config.output:\n junit_reports.append(report)\n # report.print_junit_xml()\n if \"github_failed_only\" in config.output:\n report.print_failed_github_md(use_bc_ids=config.output_bc_ids)\n if \"sarif\" in config.output:\n sarif_reports.append(report)\n if \"cli\" in config.output:\n report.print_console(\n is_quiet=config.quiet,\n is_compact=config.compact,\n created_baseline_path=created_baseline_path,\n baseline=baseline,\n use_bc_ids=config.output_bc_ids,\n )\n if url:\n print(\"More details: {}\".format(url))\n output_formats.discard(\"cli\")\n if output_formats:\n print(OUTPUT_DELIMITER)\n if \"cyclonedx\" in config.output:\n cyclonedx_reports.append(report)\n exit_codes.append(report.get_exit_code(config.soft_fail, config.soft_fail_on, config.hard_fail_on))\n\n if \"sarif\" in config.output:\n master_report = Report(None)\n for report in sarif_reports:\n master_report.failed_checks += report.failed_checks\n master_report.print_sarif_report()\n output_formats.remove(\"sarif\")\n if output_formats:\n print(OUTPUT_DELIMITER)\n if \"json\" in config.output:\n if len(report_jsons) == 1:\n print(json.dumps(report_jsons[0], indent=4))\n else:\n print(json.dumps(report_jsons, indent=4))\n output_formats.remove(\"json\")\n if output_formats:\n print(OUTPUT_DELIMITER)\n if \"junitxml\" in config.output:\n if len(junit_reports) == 1:\n junit_reports[0].print_junit_xml(use_bc_ids=config.output_bc_ids)\n else:\n master_report = Report(None)\n for report in junit_reports:\n master_report.skipped_checks += report.skipped_checks\n master_report.passed_checks += report.passed_checks\n master_report.failed_checks += report.failed_checks\n master_report.print_junit_xml(use_bc_ids=config.output_bc_ids)\n output_formats.remove(\"junitxml\")\n if output_formats:\n print(OUTPUT_DELIMITER)\n\n if \"cyclonedx\" in config.output:\n if cyclonedx_reports:\n # More than one Report - combine Reports first\n report = Report(None)\n for r in cyclonedx_reports:\n report.passed_checks += r.passed_checks\n report.skipped_checks += r.skipped_checks\n report.failed_checks += r.failed_checks\n else:\n report = cyclonedx_reports[0]\n cyclonedx_output = get_cyclonedx_outputter(\n bom=report.get_cyclonedx_bom()\n )\n print(cyclonedx_output.output_as_string())\n output_formats.remove(\"cyclonedx\")\n if output_formats:\n print(OUTPUT_DELIMITER)\n\n exit_code = 1 if 1 in exit_codes else 0\n return exit_code\n\n def filter_runner_framework(self) -> None:\n if not self.runner_filter:\n return\n if self.runner_filter.framework is None:\n return\n if self.runner_filter.framework == \"all\":\n return\n self.runners = [runner for runner in self.runners if runner.check_type in self.runner_filter.framework]\n\n def remove_runner(self, runner: BaseRunner) -> None:\n if runner in self.runners:\n self.runners.remove(runner)\n\n @staticmethod\n def enrich_report_with_guidelines(scan_report: Report, guidelines: Dict[str, str]) -> None:\n for record in itertools.chain(scan_report.failed_checks, scan_report.passed_checks, scan_report.skipped_checks):\n if record.check_id in guidelines:\n record.set_guideline(guidelines[record.check_id])\n\n @staticmethod\n def get_enriched_resources(repo_roots: List[Union[str, os.PathLike]]) -> Dict[str, Dict[str, Any]]:\n repo_definitions = {}\n for repo_root in repo_roots:\n tf_definitions = {}\n parsing_errors = {}\n Parser().parse_directory(\n directory=repo_root, # assume plan file is in the repo-root\n out_definitions=tf_definitions,\n out_parsing_errors=parsing_errors,\n )\n repo_definitions[repo_root] = { 'tf_definitions': tf_definitions, 'parsing_errors': parsing_errors }\n\n enriched_resources = {}\n for repo_root, parse_results in repo_definitions.items():\n for full_file_path, definition in parse_results['tf_definitions'].items():\n definitions_context = parser_registry.enrich_definitions_context((full_file_path, definition))\n abs_scanned_file, _ = tf_runner._strip_module_referrer(full_file_path)\n scanned_file = os.path.relpath(abs_scanned_file, repo_root)\n for block_type, block_value in definition.items():\n if block_type in CHECK_BLOCK_TYPES:\n for entity in block_value:\n context_parser = parser_registry.context_parsers[block_type]\n definition_path = context_parser.get_entity_context_path(entity)\n entity_id = \".\".join(definition_path)\n entity_context_path = [block_type] + definition_path\n entity_context = data_structures_utils.get_inner_dict(\n definitions_context[full_file_path], entity_context_path\n )\n entity_lines_range = [\n entity_context.get(\"start_line\"),\n entity_context.get(\"end_line\"),\n ]\n entity_code_lines = entity_context.get(\"code_lines\")\n skipped_checks = entity_context.get(\"skipped_checks\")\n enriched_resources[entity_id] = {\n \"entity_code_lines\": entity_code_lines,\n \"entity_lines_range\": entity_lines_range,\n \"scanned_file\": scanned_file,\n \"skipped_checks\": skipped_checks,\n }\n return enriched_resources\n", "path": "checkov/common/runners/runner_registry.py"}], "after_files": [{"content": "import argparse\nimport itertools\nimport json\nimport logging\nimport os\nfrom abc import abstractmethod\nfrom typing import List, Union, Dict, Any, Tuple, Optional\n\nfrom typing_extensions import Literal\nimport platform\nimport multiprocessing\nfrom multiprocessing import Pipe\n\nfrom cyclonedx.output import get_instance as get_cyclonedx_outputter\n\nfrom checkov.common.bridgecrew.integration_features.integration_feature_registry import integration_feature_registry\nfrom checkov.common.output.baseline import Baseline\nfrom checkov.common.output.report import Report, report_to_cyclonedx\nfrom checkov.common.runners.base_runner import BaseRunner\nfrom checkov.common.util import data_structures_utils\nfrom checkov.runner_filter import RunnerFilter\nfrom checkov.terraform.context_parsers.registry import parser_registry\nfrom checkov.terraform.runner import Runner as tf_runner\nfrom checkov.terraform.parser import Parser\n\n\nCHECK_BLOCK_TYPES = frozenset([\"resource\", \"data\", \"provider\", \"module\"])\nOUTPUT_CHOICES = [\"cli\", \"cyclonedx\", \"json\", \"junitxml\", \"github_failed_only\", \"sarif\"]\nOUTPUT_DELIMITER = \"\\n--- OUTPUT DELIMITER ---\\n\"\n\n\nclass RunnerRegistry:\n runners: List[BaseRunner] = []\n scan_reports: List[Report] = []\n banner = \"\"\n\n def __init__(self, banner: str, runner_filter: RunnerFilter, *runners: BaseRunner) -> None:\n self.logger = logging.getLogger(__name__)\n self.runner_filter = runner_filter\n self.runners = list(runners)\n self.banner = banner\n self.scan_reports = []\n self.filter_runner_framework()\n\n @abstractmethod\n def extract_entity_details(self, entity: Dict[str, Any]) -> Tuple[str, str, Dict[str, Any]]:\n raise NotImplementedError()\n\n def run(\n self,\n root_folder: Optional[str] = None,\n external_checks_dir: Optional[List[str]] = None,\n files: Optional[List[str]] = None,\n guidelines: Optional[Dict[str, str]] = None,\n collect_skip_comments: bool = True,\n repo_root_for_plan_enrichment: Optional[List[Union[str, os.PathLike]]] = None,\n ) -> List[Report]:\n if platform.system() == 'Windows':\n integration_feature_registry.run_pre_runner()\n for runner in self.runners:\n report = runner.run(root_folder, external_checks_dir=external_checks_dir, files=files,\n runner_filter=self.runner_filter, collect_skip_comments=collect_skip_comments)\n self._handle_report(report, guidelines, repo_root_for_plan_enrichment)\n return self.scan_reports\n\n # use multiprocessing for unix os\n logging.info(\"Running the runners using multiprocessing\")\n processes = []\n integration_feature_registry.run_pre_runner()\n for runner in self.runners:\n parent_conn, child_conn = Pipe(duplex=False)\n process = multiprocessing.get_context(\"fork\").Process(target=RunnerRegistry._run_runner,\n args=(runner, root_folder, external_checks_dir, files,\n self.runner_filter, collect_skip_comments, child_conn))\n processes.append((process, parent_conn))\n process.start()\n\n for process, parent_conn in processes:\n scan_report = parent_conn.recv()\n self._handle_report(scan_report, guidelines, repo_root_for_plan_enrichment)\n return self.scan_reports\n\n def _handle_report(self, scan_report, guidelines, repo_root_for_plan_enrichment):\n integration_feature_registry.run_post_runner(scan_report)\n if guidelines:\n RunnerRegistry.enrich_report_with_guidelines(scan_report, guidelines)\n if repo_root_for_plan_enrichment:\n enriched_resources = RunnerRegistry.get_enriched_resources(repo_root_for_plan_enrichment)\n scan_report = Report(\"terraform_plan\").enrich_plan_report(scan_report, enriched_resources)\n scan_report = Report(\"terraform_plan\").handle_skipped_checks(scan_report, enriched_resources)\n self.scan_reports.append(scan_report)\n\n @staticmethod\n def _run_runner(runner, root_folder, external_checks_dir, files, runner_filter, collect_skip_comments,\n child_conn):\n report = runner.run(root_folder, external_checks_dir=external_checks_dir, files=files,\n runner_filter=runner_filter, collect_skip_comments=collect_skip_comments)\n child_conn.send(report)\n child_conn.close()\n\n def print_reports(\n self,\n scan_reports: List[Report],\n config: argparse.Namespace,\n url: Optional[str] = None,\n created_baseline_path: Optional[str] = None,\n baseline: Optional[Baseline] = None,\n ) -> Literal[0, 1]:\n output_formats = set(config.output)\n\n if \"cli\" in config.output and not config.quiet:\n print(f\"{self.banner}\\n\")\n exit_codes = []\n report_jsons = []\n sarif_reports = []\n junit_reports = []\n cyclonedx_reports = []\n for report in scan_reports:\n if not report.is_empty():\n if \"json\" in config.output:\n report_jsons.append(report.get_dict(is_quiet=config.quiet, url=url))\n if \"junitxml\" in config.output:\n junit_reports.append(report)\n # report.print_junit_xml()\n if \"github_failed_only\" in config.output:\n report.print_failed_github_md(use_bc_ids=config.output_bc_ids)\n if \"sarif\" in config.output:\n sarif_reports.append(report)\n if \"cli\" in config.output:\n report.print_console(\n is_quiet=config.quiet,\n is_compact=config.compact,\n created_baseline_path=created_baseline_path,\n baseline=baseline,\n use_bc_ids=config.output_bc_ids,\n )\n if url:\n print(\"More details: {}\".format(url))\n output_formats.discard(\"cli\")\n if output_formats:\n print(OUTPUT_DELIMITER)\n if \"cyclonedx\" in config.output:\n cyclonedx_reports.append(report)\n exit_codes.append(report.get_exit_code(config.soft_fail, config.soft_fail_on, config.hard_fail_on))\n\n if \"sarif\" in config.output:\n master_report = Report(None)\n for report in sarif_reports:\n master_report.failed_checks += report.failed_checks\n master_report.print_sarif_report()\n output_formats.remove(\"sarif\")\n if output_formats:\n print(OUTPUT_DELIMITER)\n if \"json\" in config.output:\n if not report_jsons:\n print(json.dumps(Report(None).get_summary(), indent=4))\n elif len(report_jsons) == 1:\n print(json.dumps(report_jsons[0], indent=4))\n else:\n print(json.dumps(report_jsons, indent=4))\n output_formats.remove(\"json\")\n if output_formats:\n print(OUTPUT_DELIMITER)\n if \"junitxml\" in config.output:\n if len(junit_reports) == 1:\n junit_reports[0].print_junit_xml(use_bc_ids=config.output_bc_ids)\n else:\n master_report = Report(None)\n for report in junit_reports:\n master_report.skipped_checks += report.skipped_checks\n master_report.passed_checks += report.passed_checks\n master_report.failed_checks += report.failed_checks\n master_report.print_junit_xml(use_bc_ids=config.output_bc_ids)\n output_formats.remove(\"junitxml\")\n if output_formats:\n print(OUTPUT_DELIMITER)\n\n if \"cyclonedx\" in config.output:\n if cyclonedx_reports:\n # More than one Report - combine Reports first\n report = Report(None)\n for r in cyclonedx_reports:\n report.passed_checks += r.passed_checks\n report.skipped_checks += r.skipped_checks\n report.failed_checks += r.failed_checks\n else:\n report = cyclonedx_reports[0]\n cyclonedx_output = get_cyclonedx_outputter(\n bom=report.get_cyclonedx_bom()\n )\n print(cyclonedx_output.output_as_string())\n output_formats.remove(\"cyclonedx\")\n if output_formats:\n print(OUTPUT_DELIMITER)\n\n exit_code = 1 if 1 in exit_codes else 0\n return exit_code\n\n def filter_runner_framework(self) -> None:\n if not self.runner_filter:\n return\n if self.runner_filter.framework is None:\n return\n if self.runner_filter.framework == \"all\":\n return\n self.runners = [runner for runner in self.runners if runner.check_type in self.runner_filter.framework]\n\n def remove_runner(self, runner: BaseRunner) -> None:\n if runner in self.runners:\n self.runners.remove(runner)\n\n @staticmethod\n def enrich_report_with_guidelines(scan_report: Report, guidelines: Dict[str, str]) -> None:\n for record in itertools.chain(scan_report.failed_checks, scan_report.passed_checks, scan_report.skipped_checks):\n if record.check_id in guidelines:\n record.set_guideline(guidelines[record.check_id])\n\n @staticmethod\n def get_enriched_resources(repo_roots: List[Union[str, os.PathLike]]) -> Dict[str, Dict[str, Any]]:\n repo_definitions = {}\n for repo_root in repo_roots:\n tf_definitions = {}\n parsing_errors = {}\n Parser().parse_directory(\n directory=repo_root, # assume plan file is in the repo-root\n out_definitions=tf_definitions,\n out_parsing_errors=parsing_errors,\n )\n repo_definitions[repo_root] = { 'tf_definitions': tf_definitions, 'parsing_errors': parsing_errors }\n\n enriched_resources = {}\n for repo_root, parse_results in repo_definitions.items():\n for full_file_path, definition in parse_results['tf_definitions'].items():\n definitions_context = parser_registry.enrich_definitions_context((full_file_path, definition))\n abs_scanned_file, _ = tf_runner._strip_module_referrer(full_file_path)\n scanned_file = os.path.relpath(abs_scanned_file, repo_root)\n for block_type, block_value in definition.items():\n if block_type in CHECK_BLOCK_TYPES:\n for entity in block_value:\n context_parser = parser_registry.context_parsers[block_type]\n definition_path = context_parser.get_entity_context_path(entity)\n entity_id = \".\".join(definition_path)\n entity_context_path = [block_type] + definition_path\n entity_context = data_structures_utils.get_inner_dict(\n definitions_context[full_file_path], entity_context_path\n )\n entity_lines_range = [\n entity_context.get(\"start_line\"),\n entity_context.get(\"end_line\"),\n ]\n entity_code_lines = entity_context.get(\"code_lines\")\n skipped_checks = entity_context.get(\"skipped_checks\")\n enriched_resources[entity_id] = {\n \"entity_code_lines\": entity_code_lines,\n \"entity_lines_range\": entity_lines_range,\n \"scanned_file\": scanned_file,\n \"skipped_checks\": skipped_checks,\n }\n return enriched_resources\n", "path": "checkov/common/runners/runner_registry.py"}]}
| 3,298 | 160 |
gh_patches_debug_30630
|
rasdani/github-patches
|
git_diff
|
Project-MONAI__MONAI-1566
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
DVF to DDF
**Is your feature request related to a problem? Please describe.**
The current repository only supports warping based on DDF, but not on DVF.
**Describe the solution you'd like**
Implement a DVF to DDF transformation layer. So for models outputting DVF, we can first transform DVF to DDF and then used the existing Warp layer to resample input image based on the transformed DDF.
**Describe alternatives you've considered**
N/A
**Additional context**
DeepReg implementation of [IntDVF](https://github.com/DeepRegNet/DeepReg/blob/80854094feafec998fa6237199066556c73f31f9/deepreg/model/layer.py#L508-L535)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `monai/networks/blocks/__init__.py`
Content:
```
1 # Copyright 2020 - 2021 MONAI Consortium
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at
5 # http://www.apache.org/licenses/LICENSE-2.0
6 # Unless required by applicable law or agreed to in writing, software
7 # distributed under the License is distributed on an "AS IS" BASIS,
8 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
9 # See the License for the specific language governing permissions and
10 # limitations under the License.
11
12 from .acti_norm import ADN
13 from .activation import Mish, Swish
14 from .aspp import SimpleASPP
15 from .convolutions import Convolution, ResidualUnit
16 from .downsample import MaxAvgPool
17 from .dynunet_block import UnetBasicBlock, UnetOutBlock, UnetResBlock, UnetUpBlock, get_output_padding, get_padding
18 from .fcn import FCN, GCN, MCFCN, Refine
19 from .localnet_block import LocalNetDownSampleBlock, LocalNetFeatureExtractorBlock, LocalNetUpSampleBlock
20 from .segresnet_block import ResBlock
21 from .squeeze_and_excitation import (
22 ChannelSELayer,
23 ResidualSELayer,
24 SEBlock,
25 SEBottleneck,
26 SEResNetBottleneck,
27 SEResNeXtBottleneck,
28 )
29 from .upsample import SubpixelUpsample, Subpixelupsample, SubpixelUpSample, Upsample, UpSample
30 from .warp import Warp
31
```
Path: `monai/networks/blocks/warp.py`
Content:
```
1 from typing import List, Optional, Union
2
3 import torch
4 from torch import nn
5 from torch.nn import functional as F
6
7 from monai.utils import GridSamplePadMode
8
9
10 class Warp(nn.Module):
11 """
12 Warp an image with given DDF.
13 """
14
15 def __init__(
16 self,
17 spatial_dims: int,
18 mode: int = 1,
19 padding_mode: Optional[Union[GridSamplePadMode, str]] = GridSamplePadMode.ZEROS,
20 ):
21 """
22 Args:
23 spatial_dims: {2, 3}. number of spatial dimensions
24 mode: interpolation mode to calculate output values, defaults to 1.
25 Possible values are::
26
27 - 0 or 'nearest' or InterpolationType.nearest
28 - 1 or 'linear' or InterpolationType.linear
29 - 2 or 'quadratic' or InterpolationType.quadratic
30 - 3 or 'cubic' or InterpolationType.cubic
31 - 4 or 'fourth' or InterpolationType.fourth
32 - etc.
33 padding_mode: {``"zeros"``, ``"border"``, ``"reflection"``}
34 Padding mode for outside grid values. Defaults to ``"border"``.
35 See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample
36 """
37 super(Warp, self).__init__()
38 if spatial_dims not in [2, 3]:
39 raise ValueError(f"got unsupported spatial_dims={spatial_dims}, only support 2-d and 3-d input")
40 self.spatial_dims = spatial_dims
41 if mode < 0:
42 raise ValueError(f"do not support negative mode, got mode={mode}")
43 self.mode = mode
44 self.padding_mode: GridSamplePadMode = GridSamplePadMode(padding_mode)
45
46 @staticmethod
47 def get_reference_grid(ddf: torch.Tensor) -> torch.Tensor:
48 mesh_points = [torch.arange(0, dim) for dim in ddf.shape[2:]]
49 grid = torch.stack(torch.meshgrid(*mesh_points), dim=0) # (spatial_dims, ...)
50 grid = torch.stack([grid] * ddf.shape[0], dim=0) # (batch, spatial_dims, ...)
51 grid = grid.to(ddf)
52 return grid
53
54 @staticmethod
55 def normalize_grid(grid: torch.Tensor) -> torch.Tensor:
56 # (batch, ..., self.spatial_dims)
57 for i, dim in enumerate(grid.shape[1:-1]):
58 grid[..., i] = grid[..., i] * 2 / (dim - 1) - 1
59 return grid
60
61 def forward(self, image: torch.Tensor, ddf: torch.Tensor) -> torch.Tensor:
62 """
63 Args:
64 image: Tensor in shape (batch, num_channels, H, W[, D])
65 ddf: Tensor in the same spatial size as image, in shape (batch, spatial_dims, H, W[, D])
66
67 Returns:
68 warped_image in the same shape as image (batch, num_channels, H, W[, D])
69 """
70 if len(image.shape) != 2 + self.spatial_dims:
71 raise ValueError(f"expecting {self.spatial_dims + 2}-d input, " f"got input in shape {image.shape}")
72 if len(ddf.shape) != 2 + self.spatial_dims or ddf.shape[1] != self.spatial_dims:
73 raise ValueError(
74 f"expecting {self.spatial_dims + 2}-d ddf with {self.spatial_dims} channels, "
75 f"got ddf in shape {ddf.shape}"
76 )
77 if image.shape[0] != ddf.shape[0] or image.shape[2:] != ddf.shape[2:]:
78 raise ValueError(
79 "expecting image and ddf of same batch size and spatial size, "
80 f"got image of shape {image.shape}, ddf of shape {ddf.shape}"
81 )
82
83 grid = self.get_reference_grid(ddf) + ddf
84 grid = grid.permute([0] + list(range(2, 2 + self.spatial_dims)) + [1]) # (batch, ..., self.spatial_dims)
85
86 if self.mode > 1:
87 raise ValueError(f"{self.mode}-order interpolation not yet implemented.")
88 # if not USE_COMPILED:
89 # raise ValueError(f"cannot perform {self.mode}-order interpolation without C compile.")
90 # _padding_mode = self.padding_mode.value
91 # if _padding_mode == "zeros":
92 # bound = 7
93 # elif _padding_mode == "border":
94 # bound = 0
95 # else:
96 # bound = 1
97 # warped_image: torch.Tensor = grid_pull(
98 # image,
99 # grid,
100 # bound=bound,
101 # extrapolate=True,
102 # interpolation=self.mode,
103 # )
104 else:
105 grid = self.normalize_grid(grid)
106 index_ordering: List[int] = list(range(self.spatial_dims - 1, -1, -1))
107 grid = grid[..., index_ordering] # z, y, x -> x, y, z
108 _interp_mode = "bilinear" if self.mode == 1 else "nearest"
109 warped_image = F.grid_sample(
110 image, grid, mode=_interp_mode, padding_mode=self.padding_mode.value, align_corners=True
111 )
112
113 return warped_image
114
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/monai/networks/blocks/__init__.py b/monai/networks/blocks/__init__.py
--- a/monai/networks/blocks/__init__.py
+++ b/monai/networks/blocks/__init__.py
@@ -27,4 +27,4 @@
SEResNeXtBottleneck,
)
from .upsample import SubpixelUpsample, Subpixelupsample, SubpixelUpSample, Upsample, UpSample
-from .warp import Warp
+from .warp import DVF2DDF, Warp
diff --git a/monai/networks/blocks/warp.py b/monai/networks/blocks/warp.py
--- a/monai/networks/blocks/warp.py
+++ b/monai/networks/blocks/warp.py
@@ -62,7 +62,7 @@
"""
Args:
image: Tensor in shape (batch, num_channels, H, W[, D])
- ddf: Tensor in the same spatial size as image, in shape (batch, spatial_dims, H, W[, D])
+ ddf: Tensor in the same spatial size as image, in shape (batch, ``spatial_dims``, H, W[, D])
Returns:
warped_image in the same shape as image (batch, num_channels, H, W[, D])
@@ -111,3 +111,40 @@
)
return warped_image
+
+
+class DVF2DDF(nn.Module):
+ """
+ Layer calculates a dense velocity field (DVF) from a dense displacement field (DDF)
+ with scaling and squaring.
+
+ Adapted from:
+ DeepReg (https://github.com/DeepRegNet/DeepReg)
+
+ """
+
+ def __init__(
+ self,
+ spatial_dims: int,
+ num_steps: int = 7,
+ mode: int = 1,
+ padding_mode: Optional[Union[GridSamplePadMode, str]] = GridSamplePadMode.ZEROS,
+ ):
+ super(DVF2DDF, self).__init__()
+ if num_steps <= 0:
+ raise ValueError(f"expecting positive num_steps, got {num_steps}")
+ self.num_steps = num_steps
+ self.warp_layer = Warp(spatial_dims=spatial_dims, mode=mode, padding_mode=padding_mode)
+
+ def forward(self, dvf):
+ """
+ Args:
+ dvf: dvf to be transformed, in shape (batch, ``spatial_dims``, H, W[,D])
+
+ Returns:
+
+ """
+ ddf: torch.Tensor = dvf / (2 ** self.num_steps)
+ for _ in range(self.num_steps):
+ ddf = ddf + self.warp_layer(image=ddf, ddf=ddf)
+ return ddf
|
{"golden_diff": "diff --git a/monai/networks/blocks/__init__.py b/monai/networks/blocks/__init__.py\n--- a/monai/networks/blocks/__init__.py\n+++ b/monai/networks/blocks/__init__.py\n@@ -27,4 +27,4 @@\n SEResNeXtBottleneck,\n )\n from .upsample import SubpixelUpsample, Subpixelupsample, SubpixelUpSample, Upsample, UpSample\n-from .warp import Warp\n+from .warp import DVF2DDF, Warp\ndiff --git a/monai/networks/blocks/warp.py b/monai/networks/blocks/warp.py\n--- a/monai/networks/blocks/warp.py\n+++ b/monai/networks/blocks/warp.py\n@@ -62,7 +62,7 @@\n \"\"\"\n Args:\n image: Tensor in shape (batch, num_channels, H, W[, D])\n- ddf: Tensor in the same spatial size as image, in shape (batch, spatial_dims, H, W[, D])\n+ ddf: Tensor in the same spatial size as image, in shape (batch, ``spatial_dims``, H, W[, D])\n \n Returns:\n warped_image in the same shape as image (batch, num_channels, H, W[, D])\n@@ -111,3 +111,40 @@\n )\n \n return warped_image\n+\n+\n+class DVF2DDF(nn.Module):\n+ \"\"\"\n+ Layer calculates a dense velocity field (DVF) from a dense displacement field (DDF)\n+ with scaling and squaring.\n+\n+ Adapted from:\n+ DeepReg (https://github.com/DeepRegNet/DeepReg)\n+\n+ \"\"\"\n+\n+ def __init__(\n+ self,\n+ spatial_dims: int,\n+ num_steps: int = 7,\n+ mode: int = 1,\n+ padding_mode: Optional[Union[GridSamplePadMode, str]] = GridSamplePadMode.ZEROS,\n+ ):\n+ super(DVF2DDF, self).__init__()\n+ if num_steps <= 0:\n+ raise ValueError(f\"expecting positive num_steps, got {num_steps}\")\n+ self.num_steps = num_steps\n+ self.warp_layer = Warp(spatial_dims=spatial_dims, mode=mode, padding_mode=padding_mode)\n+\n+ def forward(self, dvf):\n+ \"\"\"\n+ Args:\n+ dvf: dvf to be transformed, in shape (batch, ``spatial_dims``, H, W[,D])\n+\n+ Returns:\n+\n+ \"\"\"\n+ ddf: torch.Tensor = dvf / (2 ** self.num_steps)\n+ for _ in range(self.num_steps):\n+ ddf = ddf + self.warp_layer(image=ddf, ddf=ddf)\n+ return ddf\n", "issue": "DVF to DDF\n**Is your feature request related to a problem? Please describe.**\r\nThe current repository only supports warping based on DDF, but not on DVF.\r\n\r\n**Describe the solution you'd like**\r\nImplement a DVF to DDF transformation layer. So for models outputting DVF, we can first transform DVF to DDF and then used the existing Warp layer to resample input image based on the transformed DDF.\r\n\r\n**Describe alternatives you've considered**\r\nN/A\r\n\r\n**Additional context**\r\nDeepReg implementation of [IntDVF](https://github.com/DeepRegNet/DeepReg/blob/80854094feafec998fa6237199066556c73f31f9/deepreg/model/layer.py#L508-L535)\r\n\n", "before_files": [{"content": "# Copyright 2020 - 2021 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom .acti_norm import ADN\nfrom .activation import Mish, Swish\nfrom .aspp import SimpleASPP\nfrom .convolutions import Convolution, ResidualUnit\nfrom .downsample import MaxAvgPool\nfrom .dynunet_block import UnetBasicBlock, UnetOutBlock, UnetResBlock, UnetUpBlock, get_output_padding, get_padding\nfrom .fcn import FCN, GCN, MCFCN, Refine\nfrom .localnet_block import LocalNetDownSampleBlock, LocalNetFeatureExtractorBlock, LocalNetUpSampleBlock\nfrom .segresnet_block import ResBlock\nfrom .squeeze_and_excitation import (\n ChannelSELayer,\n ResidualSELayer,\n SEBlock,\n SEBottleneck,\n SEResNetBottleneck,\n SEResNeXtBottleneck,\n)\nfrom .upsample import SubpixelUpsample, Subpixelupsample, SubpixelUpSample, Upsample, UpSample\nfrom .warp import Warp\n", "path": "monai/networks/blocks/__init__.py"}, {"content": "from typing import List, Optional, Union\n\nimport torch\nfrom torch import nn\nfrom torch.nn import functional as F\n\nfrom monai.utils import GridSamplePadMode\n\n\nclass Warp(nn.Module):\n \"\"\"\n Warp an image with given DDF.\n \"\"\"\n\n def __init__(\n self,\n spatial_dims: int,\n mode: int = 1,\n padding_mode: Optional[Union[GridSamplePadMode, str]] = GridSamplePadMode.ZEROS,\n ):\n \"\"\"\n Args:\n spatial_dims: {2, 3}. number of spatial dimensions\n mode: interpolation mode to calculate output values, defaults to 1.\n Possible values are::\n\n - 0 or 'nearest' or InterpolationType.nearest\n - 1 or 'linear' or InterpolationType.linear\n - 2 or 'quadratic' or InterpolationType.quadratic\n - 3 or 'cubic' or InterpolationType.cubic\n - 4 or 'fourth' or InterpolationType.fourth\n - etc.\n padding_mode: {``\"zeros\"``, ``\"border\"``, ``\"reflection\"``}\n Padding mode for outside grid values. Defaults to ``\"border\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n \"\"\"\n super(Warp, self).__init__()\n if spatial_dims not in [2, 3]:\n raise ValueError(f\"got unsupported spatial_dims={spatial_dims}, only support 2-d and 3-d input\")\n self.spatial_dims = spatial_dims\n if mode < 0:\n raise ValueError(f\"do not support negative mode, got mode={mode}\")\n self.mode = mode\n self.padding_mode: GridSamplePadMode = GridSamplePadMode(padding_mode)\n\n @staticmethod\n def get_reference_grid(ddf: torch.Tensor) -> torch.Tensor:\n mesh_points = [torch.arange(0, dim) for dim in ddf.shape[2:]]\n grid = torch.stack(torch.meshgrid(*mesh_points), dim=0) # (spatial_dims, ...)\n grid = torch.stack([grid] * ddf.shape[0], dim=0) # (batch, spatial_dims, ...)\n grid = grid.to(ddf)\n return grid\n\n @staticmethod\n def normalize_grid(grid: torch.Tensor) -> torch.Tensor:\n # (batch, ..., self.spatial_dims)\n for i, dim in enumerate(grid.shape[1:-1]):\n grid[..., i] = grid[..., i] * 2 / (dim - 1) - 1\n return grid\n\n def forward(self, image: torch.Tensor, ddf: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Args:\n image: Tensor in shape (batch, num_channels, H, W[, D])\n ddf: Tensor in the same spatial size as image, in shape (batch, spatial_dims, H, W[, D])\n\n Returns:\n warped_image in the same shape as image (batch, num_channels, H, W[, D])\n \"\"\"\n if len(image.shape) != 2 + self.spatial_dims:\n raise ValueError(f\"expecting {self.spatial_dims + 2}-d input, \" f\"got input in shape {image.shape}\")\n if len(ddf.shape) != 2 + self.spatial_dims or ddf.shape[1] != self.spatial_dims:\n raise ValueError(\n f\"expecting {self.spatial_dims + 2}-d ddf with {self.spatial_dims} channels, \"\n f\"got ddf in shape {ddf.shape}\"\n )\n if image.shape[0] != ddf.shape[0] or image.shape[2:] != ddf.shape[2:]:\n raise ValueError(\n \"expecting image and ddf of same batch size and spatial size, \"\n f\"got image of shape {image.shape}, ddf of shape {ddf.shape}\"\n )\n\n grid = self.get_reference_grid(ddf) + ddf\n grid = grid.permute([0] + list(range(2, 2 + self.spatial_dims)) + [1]) # (batch, ..., self.spatial_dims)\n\n if self.mode > 1:\n raise ValueError(f\"{self.mode}-order interpolation not yet implemented.\")\n # if not USE_COMPILED:\n # raise ValueError(f\"cannot perform {self.mode}-order interpolation without C compile.\")\n # _padding_mode = self.padding_mode.value\n # if _padding_mode == \"zeros\":\n # bound = 7\n # elif _padding_mode == \"border\":\n # bound = 0\n # else:\n # bound = 1\n # warped_image: torch.Tensor = grid_pull(\n # image,\n # grid,\n # bound=bound,\n # extrapolate=True,\n # interpolation=self.mode,\n # )\n else:\n grid = self.normalize_grid(grid)\n index_ordering: List[int] = list(range(self.spatial_dims - 1, -1, -1))\n grid = grid[..., index_ordering] # z, y, x -> x, y, z\n _interp_mode = \"bilinear\" if self.mode == 1 else \"nearest\"\n warped_image = F.grid_sample(\n image, grid, mode=_interp_mode, padding_mode=self.padding_mode.value, align_corners=True\n )\n\n return warped_image\n", "path": "monai/networks/blocks/warp.py"}], "after_files": [{"content": "# Copyright 2020 - 2021 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom .acti_norm import ADN\nfrom .activation import Mish, Swish\nfrom .aspp import SimpleASPP\nfrom .convolutions import Convolution, ResidualUnit\nfrom .downsample import MaxAvgPool\nfrom .dynunet_block import UnetBasicBlock, UnetOutBlock, UnetResBlock, UnetUpBlock, get_output_padding, get_padding\nfrom .fcn import FCN, GCN, MCFCN, Refine\nfrom .localnet_block import LocalNetDownSampleBlock, LocalNetFeatureExtractorBlock, LocalNetUpSampleBlock\nfrom .segresnet_block import ResBlock\nfrom .squeeze_and_excitation import (\n ChannelSELayer,\n ResidualSELayer,\n SEBlock,\n SEBottleneck,\n SEResNetBottleneck,\n SEResNeXtBottleneck,\n)\nfrom .upsample import SubpixelUpsample, Subpixelupsample, SubpixelUpSample, Upsample, UpSample\nfrom .warp import DVF2DDF, Warp\n", "path": "monai/networks/blocks/__init__.py"}, {"content": "from typing import List, Optional, Union\n\nimport torch\nfrom torch import nn\nfrom torch.nn import functional as F\n\nfrom monai.utils import GridSamplePadMode\n\n\nclass Warp(nn.Module):\n \"\"\"\n Warp an image with given DDF.\n \"\"\"\n\n def __init__(\n self,\n spatial_dims: int,\n mode: int = 1,\n padding_mode: Optional[Union[GridSamplePadMode, str]] = GridSamplePadMode.ZEROS,\n ):\n \"\"\"\n Args:\n spatial_dims: {2, 3}. number of spatial dimensions\n mode: interpolation mode to calculate output values, defaults to 1.\n Possible values are::\n\n - 0 or 'nearest' or InterpolationType.nearest\n - 1 or 'linear' or InterpolationType.linear\n - 2 or 'quadratic' or InterpolationType.quadratic\n - 3 or 'cubic' or InterpolationType.cubic\n - 4 or 'fourth' or InterpolationType.fourth\n - etc.\n padding_mode: {``\"zeros\"``, ``\"border\"``, ``\"reflection\"``}\n Padding mode for outside grid values. Defaults to ``\"border\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n \"\"\"\n super(Warp, self).__init__()\n if spatial_dims not in [2, 3]:\n raise ValueError(f\"got unsupported spatial_dims={spatial_dims}, only support 2-d and 3-d input\")\n self.spatial_dims = spatial_dims\n if mode < 0:\n raise ValueError(f\"do not support negative mode, got mode={mode}\")\n self.mode = mode\n self.padding_mode: GridSamplePadMode = GridSamplePadMode(padding_mode)\n\n @staticmethod\n def get_reference_grid(ddf: torch.Tensor) -> torch.Tensor:\n mesh_points = [torch.arange(0, dim) for dim in ddf.shape[2:]]\n grid = torch.stack(torch.meshgrid(*mesh_points), dim=0) # (spatial_dims, ...)\n grid = torch.stack([grid] * ddf.shape[0], dim=0) # (batch, spatial_dims, ...)\n grid = grid.to(ddf)\n return grid\n\n @staticmethod\n def normalize_grid(grid: torch.Tensor) -> torch.Tensor:\n # (batch, ..., self.spatial_dims)\n for i, dim in enumerate(grid.shape[1:-1]):\n grid[..., i] = grid[..., i] * 2 / (dim - 1) - 1\n return grid\n\n def forward(self, image: torch.Tensor, ddf: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Args:\n image: Tensor in shape (batch, num_channels, H, W[, D])\n ddf: Tensor in the same spatial size as image, in shape (batch, ``spatial_dims``, H, W[, D])\n\n Returns:\n warped_image in the same shape as image (batch, num_channels, H, W[, D])\n \"\"\"\n if len(image.shape) != 2 + self.spatial_dims:\n raise ValueError(f\"expecting {self.spatial_dims + 2}-d input, \" f\"got input in shape {image.shape}\")\n if len(ddf.shape) != 2 + self.spatial_dims or ddf.shape[1] != self.spatial_dims:\n raise ValueError(\n f\"expecting {self.spatial_dims + 2}-d ddf with {self.spatial_dims} channels, \"\n f\"got ddf in shape {ddf.shape}\"\n )\n if image.shape[0] != ddf.shape[0] or image.shape[2:] != ddf.shape[2:]:\n raise ValueError(\n \"expecting image and ddf of same batch size and spatial size, \"\n f\"got image of shape {image.shape}, ddf of shape {ddf.shape}\"\n )\n\n grid = self.get_reference_grid(ddf) + ddf\n grid = grid.permute([0] + list(range(2, 2 + self.spatial_dims)) + [1]) # (batch, ..., self.spatial_dims)\n\n if self.mode > 1:\n raise ValueError(f\"{self.mode}-order interpolation not yet implemented.\")\n # if not USE_COMPILED:\n # raise ValueError(f\"cannot perform {self.mode}-order interpolation without C compile.\")\n # _padding_mode = self.padding_mode.value\n # if _padding_mode == \"zeros\":\n # bound = 7\n # elif _padding_mode == \"border\":\n # bound = 0\n # else:\n # bound = 1\n # warped_image: torch.Tensor = grid_pull(\n # image,\n # grid,\n # bound=bound,\n # extrapolate=True,\n # interpolation=self.mode,\n # )\n else:\n grid = self.normalize_grid(grid)\n index_ordering: List[int] = list(range(self.spatial_dims - 1, -1, -1))\n grid = grid[..., index_ordering] # z, y, x -> x, y, z\n _interp_mode = \"bilinear\" if self.mode == 1 else \"nearest\"\n warped_image = F.grid_sample(\n image, grid, mode=_interp_mode, padding_mode=self.padding_mode.value, align_corners=True\n )\n\n return warped_image\n\n\nclass DVF2DDF(nn.Module):\n \"\"\"\n Layer calculates a dense velocity field (DVF) from a dense displacement field (DDF)\n with scaling and squaring.\n\n Adapted from:\n DeepReg (https://github.com/DeepRegNet/DeepReg)\n\n \"\"\"\n\n def __init__(\n self,\n spatial_dims: int,\n num_steps: int = 7,\n mode: int = 1,\n padding_mode: Optional[Union[GridSamplePadMode, str]] = GridSamplePadMode.ZEROS,\n ):\n super(DVF2DDF, self).__init__()\n if num_steps <= 0:\n raise ValueError(f\"expecting positive num_steps, got {num_steps}\")\n self.num_steps = num_steps\n self.warp_layer = Warp(spatial_dims=spatial_dims, mode=mode, padding_mode=padding_mode)\n\n def forward(self, dvf):\n \"\"\"\n Args:\n dvf: dvf to be transformed, in shape (batch, ``spatial_dims``, H, W[,D])\n\n Returns:\n\n \"\"\"\n ddf: torch.Tensor = dvf / (2 ** self.num_steps)\n for _ in range(self.num_steps):\n ddf = ddf + self.warp_layer(image=ddf, ddf=ddf)\n return ddf\n", "path": "monai/networks/blocks/warp.py"}]}
| 2,271 | 638 |
gh_patches_debug_5833
|
rasdani/github-patches
|
git_diff
|
cupy__cupy-499
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Different behaviour of argmax
I found the behaviour of `argmax` is different between `numpy` and `cupy` if the array has `0` in its shape and `axis` argument is used. I don't know whether this behaviour is intended or not.
```
np.empty((0, 1)).argmax(axis=1) # array([], dtype=int64)
cupy.empty((0, 1)).argmax(axis=1)
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-9-a5737d72bcba> in <module>()
----> 1 cupy.empty((0, 1)).argmax(axis=1)
cupy/core/core.pyx in cupy.core.core.ndarray.argmax (cupy/core/core.cpp:17701)()
cupy/core/core.pyx in cupy.core.core.ndarray.argmax (cupy/core/core.cpp:17556)()
cupy/core/reduction.pxi in cupy.core.core.simple_reduction_function.__call__ (cupy/core/core.cpp:52697)()
ValueError: zero-size array to reduction operation cupy_argmax which has no identity
```
I used cupy 2.0.0a1.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `cupy/logic/truth.py`
Content:
```
1 def all(a, axis=None, out=None, keepdims=False):
2 # TODO(okuta): check type
3 return a.all(axis=axis, out=out, keepdims=keepdims)
4
5
6 def any(a, axis=None, out=None, keepdims=False):
7 # TODO(okuta): check type
8 return a.any(axis=axis, out=out, keepdims=keepdims)
9
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/cupy/logic/truth.py b/cupy/logic/truth.py
--- a/cupy/logic/truth.py
+++ b/cupy/logic/truth.py
@@ -1,8 +1,11 @@
+import cupy
+
+
def all(a, axis=None, out=None, keepdims=False):
- # TODO(okuta): check type
+ assert isinstance(a, cupy.ndarray)
return a.all(axis=axis, out=out, keepdims=keepdims)
def any(a, axis=None, out=None, keepdims=False):
- # TODO(okuta): check type
+ assert isinstance(a, cupy.ndarray)
return a.any(axis=axis, out=out, keepdims=keepdims)
|
{"golden_diff": "diff --git a/cupy/logic/truth.py b/cupy/logic/truth.py\n--- a/cupy/logic/truth.py\n+++ b/cupy/logic/truth.py\n@@ -1,8 +1,11 @@\n+import cupy\n+\n+\n def all(a, axis=None, out=None, keepdims=False):\n- # TODO(okuta): check type\n+ assert isinstance(a, cupy.ndarray)\n return a.all(axis=axis, out=out, keepdims=keepdims)\n \n \n def any(a, axis=None, out=None, keepdims=False):\n- # TODO(okuta): check type\n+ assert isinstance(a, cupy.ndarray)\n return a.any(axis=axis, out=out, keepdims=keepdims)\n", "issue": "Different behaviour of argmax\nI found the behaviour of `argmax` is different between `numpy` and `cupy` if the array has `0` in its shape and `axis` argument is used. I don't know whether this behaviour is intended or not.\r\n\r\n```\r\nnp.empty((0, 1)).argmax(axis=1) # array([], dtype=int64)\r\ncupy.empty((0, 1)).argmax(axis=1)\r\n---------------------------------------------------------------------------\r\nValueError Traceback (most recent call last)\r\n<ipython-input-9-a5737d72bcba> in <module>()\r\n----> 1 cupy.empty((0, 1)).argmax(axis=1)\r\n\r\ncupy/core/core.pyx in cupy.core.core.ndarray.argmax (cupy/core/core.cpp:17701)()\r\n\r\ncupy/core/core.pyx in cupy.core.core.ndarray.argmax (cupy/core/core.cpp:17556)()\r\n\r\ncupy/core/reduction.pxi in cupy.core.core.simple_reduction_function.__call__ (cupy/core/core.cpp:52697)()\r\n\r\nValueError: zero-size array to reduction operation cupy_argmax which has no identity\r\n```\r\n\r\nI used cupy 2.0.0a1.\n", "before_files": [{"content": "def all(a, axis=None, out=None, keepdims=False):\n # TODO(okuta): check type\n return a.all(axis=axis, out=out, keepdims=keepdims)\n\n\ndef any(a, axis=None, out=None, keepdims=False):\n # TODO(okuta): check type\n return a.any(axis=axis, out=out, keepdims=keepdims)\n", "path": "cupy/logic/truth.py"}], "after_files": [{"content": "import cupy\n\n\ndef all(a, axis=None, out=None, keepdims=False):\n assert isinstance(a, cupy.ndarray)\n return a.all(axis=axis, out=out, keepdims=keepdims)\n\n\ndef any(a, axis=None, out=None, keepdims=False):\n assert isinstance(a, cupy.ndarray)\n return a.any(axis=axis, out=out, keepdims=keepdims)\n", "path": "cupy/logic/truth.py"}]}
| 621 | 161 |
gh_patches_debug_33801
|
rasdani/github-patches
|
git_diff
|
open-telemetry__opentelemetry-python-contrib-1258
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Uninstrument Existing fastapi instances
Existing instances shouldn't continue to send any telemetry after the `uninstrument`.
_Originally posted by @srikanthccv in https://github.com/open-telemetry/opentelemetry-python-contrib/pull/1199#discussion_r957094027_
`uninstrument` method of `FastAPIInstrumentor` restore the original `fastapi.FastAPI` class, but all the existing objects of the earlier instrumented class do not get uninstrumented, which is not the expected behaviour.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `instrumentation/opentelemetry-instrumentation-fastapi/src/opentelemetry/instrumentation/fastapi/__init__.py`
Content:
```
1 # Copyright The OpenTelemetry Authors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """
16 Usage
17 -----
18
19 .. code-block:: python
20
21 import fastapi
22 from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
23
24 app = fastapi.FastAPI()
25
26 @app.get("/foobar")
27 async def foobar():
28 return {"message": "hello world"}
29
30 FastAPIInstrumentor.instrument_app(app)
31
32 Configuration
33 -------------
34
35 Exclude lists
36 *************
37 To exclude certain URLs from being tracked, set the environment variable ``OTEL_PYTHON_FASTAPI_EXCLUDED_URLS``
38 (or ``OTEL_PYTHON_EXCLUDED_URLS`` as fallback) with comma delimited regexes representing which URLs to exclude.
39
40 For example,
41
42 ::
43
44 export OTEL_PYTHON_FASTAPI_EXCLUDED_URLS="client/.*/info,healthcheck"
45
46 will exclude requests such as ``https://site/client/123/info`` and ``https://site/xyz/healthcheck``.
47
48 You can also pass the comma delimited regexes to the ``instrument_app`` method directly:
49
50 .. code-block:: python
51
52 FastAPIInstrumentor.instrument_app(app, excluded_urls="client/.*/info,healthcheck")
53
54 Request/Response hooks
55 **********************
56
57 Utilize request/response hooks to execute custom logic to be performed before/after performing a request. The server request hook takes in a server span and ASGI
58 scope object for every incoming request. The client request hook is called with the internal span and an ASGI scope which is sent as a dictionary for when the method receive is called.
59 The client response hook is called with the internal span and an ASGI event which is sent as a dictionary for when the method send is called.
60
61 .. code-block:: python
62
63 def server_request_hook(span: Span, scope: dict):
64 if span and span.is_recording():
65 span.set_attribute("custom_user_attribute_from_request_hook", "some-value")
66
67 def client_request_hook(span: Span, scope: dict):
68 if span and span.is_recording():
69 span.set_attribute("custom_user_attribute_from_client_request_hook", "some-value")
70
71 def client_response_hook(span: Span, message: dict):
72 if span and span.is_recording():
73 span.set_attribute("custom_user_attribute_from_response_hook", "some-value")
74
75 FastAPIInstrumentor().instrument(server_request_hook=server_request_hook, client_request_hook=client_request_hook, client_response_hook=client_response_hook)
76
77 Capture HTTP request and response headers
78 *****************************************
79 You can configure the agent to capture predefined HTTP headers as span attributes, according to the `semantic convention <https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/http.md#http-request-and-response-headers>`_.
80
81 Request headers
82 ***************
83 To capture predefined HTTP request headers as span attributes, set the environment variable ``OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST``
84 to a comma-separated list of HTTP header names.
85
86 For example,
87
88 ::
89
90 export OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST="content-type,custom_request_header"
91
92 will extract ``content-type`` and ``custom_request_header`` from request headers and add them as span attributes.
93
94 It is recommended that you should give the correct names of the headers to be captured in the environment variable.
95 Request header names in fastapi are case insensitive. So, giving header name as ``CUStom-Header`` in environment variable will be able capture header with name ``custom-header``.
96
97 The name of the added span attribute will follow the format ``http.request.header.<header_name>`` where ``<header_name>`` being the normalized HTTP header name (lowercase, with - characters replaced by _ ).
98 The value of the attribute will be single item list containing all the header values.
99
100 Example of the added span attribute,
101 ``http.request.header.custom_request_header = ["<value1>,<value2>"]``
102
103 Response headers
104 ****************
105 To capture predefined HTTP response headers as span attributes, set the environment variable ``OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_RESPONSE``
106 to a comma-separated list of HTTP header names.
107
108 For example,
109
110 ::
111
112 export OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_RESPONSE="content-type,custom_response_header"
113
114 will extract ``content-type`` and ``custom_response_header`` from response headers and add them as span attributes.
115
116 It is recommended that you should give the correct names of the headers to be captured in the environment variable.
117 Response header names captured in fastapi are case insensitive. So, giving header name as ``CUStomHeader`` in environment variable will be able capture header with name ``customheader``.
118
119 The name of the added span attribute will follow the format ``http.response.header.<header_name>`` where ``<header_name>`` being the normalized HTTP header name (lowercase, with - characters replaced by _ ).
120 The value of the attribute will be single item list containing all the header values.
121
122 Example of the added span attribute,
123 ``http.response.header.custom_response_header = ["<value1>,<value2>"]``
124
125 Note:
126 Environment variable names to capture http headers are still experimental, and thus are subject to change.
127
128 API
129 ---
130 """
131 import logging
132 import typing
133 from typing import Collection
134
135 import fastapi
136 from starlette.routing import Match
137
138 from opentelemetry.instrumentation.asgi import OpenTelemetryMiddleware
139 from opentelemetry.instrumentation.asgi.package import _instruments
140 from opentelemetry.instrumentation.fastapi.version import __version__
141 from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
142 from opentelemetry.metrics import get_meter
143 from opentelemetry.semconv.trace import SpanAttributes
144 from opentelemetry.trace import Span
145 from opentelemetry.util.http import get_excluded_urls, parse_excluded_urls
146
147 _excluded_urls_from_env = get_excluded_urls("FASTAPI")
148 _logger = logging.getLogger(__name__)
149
150 _ServerRequestHookT = typing.Optional[typing.Callable[[Span, dict], None]]
151 _ClientRequestHookT = typing.Optional[typing.Callable[[Span, dict], None]]
152 _ClientResponseHookT = typing.Optional[typing.Callable[[Span, dict], None]]
153
154
155 class FastAPIInstrumentor(BaseInstrumentor):
156 """An instrumentor for FastAPI
157
158 See `BaseInstrumentor`
159 """
160
161 _original_fastapi = None
162
163 @staticmethod
164 def instrument_app(
165 app: fastapi.FastAPI,
166 server_request_hook: _ServerRequestHookT = None,
167 client_request_hook: _ClientRequestHookT = None,
168 client_response_hook: _ClientResponseHookT = None,
169 tracer_provider=None,
170 meter_provider=None,
171 excluded_urls=None,
172 ):
173 """Instrument an uninstrumented FastAPI application."""
174 if not hasattr(app, "_is_instrumented_by_opentelemetry"):
175 app._is_instrumented_by_opentelemetry = False
176
177 if not getattr(app, "_is_instrumented_by_opentelemetry", False):
178 if excluded_urls is None:
179 excluded_urls = _excluded_urls_from_env
180 else:
181 excluded_urls = parse_excluded_urls(excluded_urls)
182 meter = get_meter(__name__, __version__, meter_provider)
183
184 app.add_middleware(
185 OpenTelemetryMiddleware,
186 excluded_urls=excluded_urls,
187 default_span_details=_get_route_details,
188 server_request_hook=server_request_hook,
189 client_request_hook=client_request_hook,
190 client_response_hook=client_response_hook,
191 tracer_provider=tracer_provider,
192 meter=meter,
193 )
194 app._is_instrumented_by_opentelemetry = True
195 else:
196 _logger.warning(
197 "Attempting to instrument FastAPI app while already instrumented"
198 )
199
200 @staticmethod
201 def uninstrument_app(app: fastapi.FastAPI):
202 app.user_middleware = [
203 x
204 for x in app.user_middleware
205 if x.cls is not OpenTelemetryMiddleware
206 ]
207 app.middleware_stack = app.build_middleware_stack()
208 app._is_instrumented_by_opentelemetry = False
209
210 def instrumentation_dependencies(self) -> Collection[str]:
211 return _instruments
212
213 def _instrument(self, **kwargs):
214 self._original_fastapi = fastapi.FastAPI
215 _InstrumentedFastAPI._tracer_provider = kwargs.get("tracer_provider")
216 _InstrumentedFastAPI._server_request_hook = kwargs.get(
217 "server_request_hook"
218 )
219 _InstrumentedFastAPI._client_request_hook = kwargs.get(
220 "client_request_hook"
221 )
222 _InstrumentedFastAPI._client_response_hook = kwargs.get(
223 "client_response_hook"
224 )
225 _excluded_urls = kwargs.get("excluded_urls")
226 _InstrumentedFastAPI._excluded_urls = (
227 _excluded_urls_from_env
228 if _excluded_urls is None
229 else parse_excluded_urls(_excluded_urls)
230 )
231 _InstrumentedFastAPI._meter_provider = kwargs.get("meter_provider")
232 fastapi.FastAPI = _InstrumentedFastAPI
233
234 def _uninstrument(self, **kwargs):
235 fastapi.FastAPI = self._original_fastapi
236
237
238 class _InstrumentedFastAPI(fastapi.FastAPI):
239 _tracer_provider = None
240 _meter_provider = None
241 _excluded_urls = None
242 _server_request_hook: _ServerRequestHookT = None
243 _client_request_hook: _ClientRequestHookT = None
244 _client_response_hook: _ClientResponseHookT = None
245
246 def __init__(self, *args, **kwargs):
247 super().__init__(*args, **kwargs)
248 meter = get_meter(
249 __name__, __version__, _InstrumentedFastAPI._meter_provider
250 )
251 self.add_middleware(
252 OpenTelemetryMiddleware,
253 excluded_urls=_InstrumentedFastAPI._excluded_urls,
254 default_span_details=_get_route_details,
255 server_request_hook=_InstrumentedFastAPI._server_request_hook,
256 client_request_hook=_InstrumentedFastAPI._client_request_hook,
257 client_response_hook=_InstrumentedFastAPI._client_response_hook,
258 tracer_provider=_InstrumentedFastAPI._tracer_provider,
259 meter=meter,
260 )
261 self._is_instrumented_by_opentelemetry = True
262
263
264 def _get_route_details(scope):
265 """Callback to retrieve the fastapi route being served.
266
267 TODO: there is currently no way to retrieve http.route from
268 a starlette application from scope.
269
270 See: https://github.com/encode/starlette/pull/804
271 """
272 app = scope["app"]
273 route = None
274 for starlette_route in app.routes:
275 match, _ = starlette_route.matches(scope)
276 if match == Match.FULL:
277 route = starlette_route.path
278 break
279 if match == Match.PARTIAL:
280 route = starlette_route.path
281 # method only exists for http, if websocket
282 # leave it blank.
283 span_name = route or scope.get("method", "")
284 attributes = {}
285 if route:
286 attributes[SpanAttributes.HTTP_ROUTE] = route
287 return span_name, attributes
288
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/instrumentation/opentelemetry-instrumentation-fastapi/src/opentelemetry/instrumentation/fastapi/__init__.py b/instrumentation/opentelemetry-instrumentation-fastapi/src/opentelemetry/instrumentation/fastapi/__init__.py
--- a/instrumentation/opentelemetry-instrumentation-fastapi/src/opentelemetry/instrumentation/fastapi/__init__.py
+++ b/instrumentation/opentelemetry-instrumentation-fastapi/src/opentelemetry/instrumentation/fastapi/__init__.py
@@ -192,6 +192,8 @@
meter=meter,
)
app._is_instrumented_by_opentelemetry = True
+ if app not in _InstrumentedFastAPI._instrumented_fastapi_apps:
+ _InstrumentedFastAPI._instrumented_fastapi_apps.add(app)
else:
_logger.warning(
"Attempting to instrument FastAPI app while already instrumented"
@@ -232,6 +234,9 @@
fastapi.FastAPI = _InstrumentedFastAPI
def _uninstrument(self, **kwargs):
+ for instance in _InstrumentedFastAPI._instrumented_fastapi_apps:
+ self.uninstrument_app(instance)
+ _InstrumentedFastAPI._instrumented_fastapi_apps.clear()
fastapi.FastAPI = self._original_fastapi
@@ -242,6 +247,7 @@
_server_request_hook: _ServerRequestHookT = None
_client_request_hook: _ClientRequestHookT = None
_client_response_hook: _ClientResponseHookT = None
+ _instrumented_fastapi_apps = set()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@@ -259,6 +265,11 @@
meter=meter,
)
self._is_instrumented_by_opentelemetry = True
+ _InstrumentedFastAPI._instrumented_fastapi_apps.add(self)
+
+ def __del__(self):
+ if self in _InstrumentedFastAPI._instrumented_fastapi_apps:
+ _InstrumentedFastAPI._instrumented_fastapi_apps.remove(self)
def _get_route_details(scope):
|
{"golden_diff": "diff --git a/instrumentation/opentelemetry-instrumentation-fastapi/src/opentelemetry/instrumentation/fastapi/__init__.py b/instrumentation/opentelemetry-instrumentation-fastapi/src/opentelemetry/instrumentation/fastapi/__init__.py\n--- a/instrumentation/opentelemetry-instrumentation-fastapi/src/opentelemetry/instrumentation/fastapi/__init__.py\n+++ b/instrumentation/opentelemetry-instrumentation-fastapi/src/opentelemetry/instrumentation/fastapi/__init__.py\n@@ -192,6 +192,8 @@\n meter=meter,\n )\n app._is_instrumented_by_opentelemetry = True\n+ if app not in _InstrumentedFastAPI._instrumented_fastapi_apps:\n+ _InstrumentedFastAPI._instrumented_fastapi_apps.add(app)\n else:\n _logger.warning(\n \"Attempting to instrument FastAPI app while already instrumented\"\n@@ -232,6 +234,9 @@\n fastapi.FastAPI = _InstrumentedFastAPI\n \n def _uninstrument(self, **kwargs):\n+ for instance in _InstrumentedFastAPI._instrumented_fastapi_apps:\n+ self.uninstrument_app(instance)\n+ _InstrumentedFastAPI._instrumented_fastapi_apps.clear()\n fastapi.FastAPI = self._original_fastapi\n \n \n@@ -242,6 +247,7 @@\n _server_request_hook: _ServerRequestHookT = None\n _client_request_hook: _ClientRequestHookT = None\n _client_response_hook: _ClientResponseHookT = None\n+ _instrumented_fastapi_apps = set()\n \n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n@@ -259,6 +265,11 @@\n meter=meter,\n )\n self._is_instrumented_by_opentelemetry = True\n+ _InstrumentedFastAPI._instrumented_fastapi_apps.add(self)\n+\n+ def __del__(self):\n+ if self in _InstrumentedFastAPI._instrumented_fastapi_apps:\n+ _InstrumentedFastAPI._instrumented_fastapi_apps.remove(self)\n \n \n def _get_route_details(scope):\n", "issue": "Uninstrument Existing fastapi instances\nExisting instances shouldn't continue to send any telemetry after the `uninstrument`.\r\n\r\n_Originally posted by @srikanthccv in https://github.com/open-telemetry/opentelemetry-python-contrib/pull/1199#discussion_r957094027_\r\n\r\n`uninstrument` method of `FastAPIInstrumentor` restore the original `fastapi.FastAPI` class, but all the existing objects of the earlier instrumented class do not get uninstrumented, which is not the expected behaviour.\n", "before_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\nUsage\n-----\n\n.. code-block:: python\n\n import fastapi\n from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor\n\n app = fastapi.FastAPI()\n\n @app.get(\"/foobar\")\n async def foobar():\n return {\"message\": \"hello world\"}\n\n FastAPIInstrumentor.instrument_app(app)\n\nConfiguration\n-------------\n\nExclude lists\n*************\nTo exclude certain URLs from being tracked, set the environment variable ``OTEL_PYTHON_FASTAPI_EXCLUDED_URLS``\n(or ``OTEL_PYTHON_EXCLUDED_URLS`` as fallback) with comma delimited regexes representing which URLs to exclude.\n\nFor example,\n\n::\n\n export OTEL_PYTHON_FASTAPI_EXCLUDED_URLS=\"client/.*/info,healthcheck\"\n\nwill exclude requests such as ``https://site/client/123/info`` and ``https://site/xyz/healthcheck``.\n\nYou can also pass the comma delimited regexes to the ``instrument_app`` method directly:\n\n.. code-block:: python\n\n FastAPIInstrumentor.instrument_app(app, excluded_urls=\"client/.*/info,healthcheck\")\n\nRequest/Response hooks\n**********************\n\nUtilize request/response hooks to execute custom logic to be performed before/after performing a request. The server request hook takes in a server span and ASGI\nscope object for every incoming request. The client request hook is called with the internal span and an ASGI scope which is sent as a dictionary for when the method receive is called.\nThe client response hook is called with the internal span and an ASGI event which is sent as a dictionary for when the method send is called.\n\n.. code-block:: python\n\n def server_request_hook(span: Span, scope: dict):\n if span and span.is_recording():\n span.set_attribute(\"custom_user_attribute_from_request_hook\", \"some-value\")\n\n def client_request_hook(span: Span, scope: dict):\n if span and span.is_recording():\n span.set_attribute(\"custom_user_attribute_from_client_request_hook\", \"some-value\")\n\n def client_response_hook(span: Span, message: dict):\n if span and span.is_recording():\n span.set_attribute(\"custom_user_attribute_from_response_hook\", \"some-value\")\n\n FastAPIInstrumentor().instrument(server_request_hook=server_request_hook, client_request_hook=client_request_hook, client_response_hook=client_response_hook)\n\nCapture HTTP request and response headers\n*****************************************\nYou can configure the agent to capture predefined HTTP headers as span attributes, according to the `semantic convention <https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/http.md#http-request-and-response-headers>`_.\n\nRequest headers\n***************\nTo capture predefined HTTP request headers as span attributes, set the environment variable ``OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST``\nto a comma-separated list of HTTP header names.\n\nFor example,\n\n::\n\n export OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST=\"content-type,custom_request_header\"\n\nwill extract ``content-type`` and ``custom_request_header`` from request headers and add them as span attributes.\n\nIt is recommended that you should give the correct names of the headers to be captured in the environment variable.\nRequest header names in fastapi are case insensitive. So, giving header name as ``CUStom-Header`` in environment variable will be able capture header with name ``custom-header``.\n\nThe name of the added span attribute will follow the format ``http.request.header.<header_name>`` where ``<header_name>`` being the normalized HTTP header name (lowercase, with - characters replaced by _ ).\nThe value of the attribute will be single item list containing all the header values.\n\nExample of the added span attribute,\n``http.request.header.custom_request_header = [\"<value1>,<value2>\"]``\n\nResponse headers\n****************\nTo capture predefined HTTP response headers as span attributes, set the environment variable ``OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_RESPONSE``\nto a comma-separated list of HTTP header names.\n\nFor example,\n\n::\n\n export OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_RESPONSE=\"content-type,custom_response_header\"\n\nwill extract ``content-type`` and ``custom_response_header`` from response headers and add them as span attributes.\n\nIt is recommended that you should give the correct names of the headers to be captured in the environment variable.\nResponse header names captured in fastapi are case insensitive. So, giving header name as ``CUStomHeader`` in environment variable will be able capture header with name ``customheader``.\n\nThe name of the added span attribute will follow the format ``http.response.header.<header_name>`` where ``<header_name>`` being the normalized HTTP header name (lowercase, with - characters replaced by _ ).\nThe value of the attribute will be single item list containing all the header values.\n\nExample of the added span attribute,\n``http.response.header.custom_response_header = [\"<value1>,<value2>\"]``\n\nNote:\n Environment variable names to capture http headers are still experimental, and thus are subject to change.\n\nAPI\n---\n\"\"\"\nimport logging\nimport typing\nfrom typing import Collection\n\nimport fastapi\nfrom starlette.routing import Match\n\nfrom opentelemetry.instrumentation.asgi import OpenTelemetryMiddleware\nfrom opentelemetry.instrumentation.asgi.package import _instruments\nfrom opentelemetry.instrumentation.fastapi.version import __version__\nfrom opentelemetry.instrumentation.instrumentor import BaseInstrumentor\nfrom opentelemetry.metrics import get_meter\nfrom opentelemetry.semconv.trace import SpanAttributes\nfrom opentelemetry.trace import Span\nfrom opentelemetry.util.http import get_excluded_urls, parse_excluded_urls\n\n_excluded_urls_from_env = get_excluded_urls(\"FASTAPI\")\n_logger = logging.getLogger(__name__)\n\n_ServerRequestHookT = typing.Optional[typing.Callable[[Span, dict], None]]\n_ClientRequestHookT = typing.Optional[typing.Callable[[Span, dict], None]]\n_ClientResponseHookT = typing.Optional[typing.Callable[[Span, dict], None]]\n\n\nclass FastAPIInstrumentor(BaseInstrumentor):\n \"\"\"An instrumentor for FastAPI\n\n See `BaseInstrumentor`\n \"\"\"\n\n _original_fastapi = None\n\n @staticmethod\n def instrument_app(\n app: fastapi.FastAPI,\n server_request_hook: _ServerRequestHookT = None,\n client_request_hook: _ClientRequestHookT = None,\n client_response_hook: _ClientResponseHookT = None,\n tracer_provider=None,\n meter_provider=None,\n excluded_urls=None,\n ):\n \"\"\"Instrument an uninstrumented FastAPI application.\"\"\"\n if not hasattr(app, \"_is_instrumented_by_opentelemetry\"):\n app._is_instrumented_by_opentelemetry = False\n\n if not getattr(app, \"_is_instrumented_by_opentelemetry\", False):\n if excluded_urls is None:\n excluded_urls = _excluded_urls_from_env\n else:\n excluded_urls = parse_excluded_urls(excluded_urls)\n meter = get_meter(__name__, __version__, meter_provider)\n\n app.add_middleware(\n OpenTelemetryMiddleware,\n excluded_urls=excluded_urls,\n default_span_details=_get_route_details,\n server_request_hook=server_request_hook,\n client_request_hook=client_request_hook,\n client_response_hook=client_response_hook,\n tracer_provider=tracer_provider,\n meter=meter,\n )\n app._is_instrumented_by_opentelemetry = True\n else:\n _logger.warning(\n \"Attempting to instrument FastAPI app while already instrumented\"\n )\n\n @staticmethod\n def uninstrument_app(app: fastapi.FastAPI):\n app.user_middleware = [\n x\n for x in app.user_middleware\n if x.cls is not OpenTelemetryMiddleware\n ]\n app.middleware_stack = app.build_middleware_stack()\n app._is_instrumented_by_opentelemetry = False\n\n def instrumentation_dependencies(self) -> Collection[str]:\n return _instruments\n\n def _instrument(self, **kwargs):\n self._original_fastapi = fastapi.FastAPI\n _InstrumentedFastAPI._tracer_provider = kwargs.get(\"tracer_provider\")\n _InstrumentedFastAPI._server_request_hook = kwargs.get(\n \"server_request_hook\"\n )\n _InstrumentedFastAPI._client_request_hook = kwargs.get(\n \"client_request_hook\"\n )\n _InstrumentedFastAPI._client_response_hook = kwargs.get(\n \"client_response_hook\"\n )\n _excluded_urls = kwargs.get(\"excluded_urls\")\n _InstrumentedFastAPI._excluded_urls = (\n _excluded_urls_from_env\n if _excluded_urls is None\n else parse_excluded_urls(_excluded_urls)\n )\n _InstrumentedFastAPI._meter_provider = kwargs.get(\"meter_provider\")\n fastapi.FastAPI = _InstrumentedFastAPI\n\n def _uninstrument(self, **kwargs):\n fastapi.FastAPI = self._original_fastapi\n\n\nclass _InstrumentedFastAPI(fastapi.FastAPI):\n _tracer_provider = None\n _meter_provider = None\n _excluded_urls = None\n _server_request_hook: _ServerRequestHookT = None\n _client_request_hook: _ClientRequestHookT = None\n _client_response_hook: _ClientResponseHookT = None\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n meter = get_meter(\n __name__, __version__, _InstrumentedFastAPI._meter_provider\n )\n self.add_middleware(\n OpenTelemetryMiddleware,\n excluded_urls=_InstrumentedFastAPI._excluded_urls,\n default_span_details=_get_route_details,\n server_request_hook=_InstrumentedFastAPI._server_request_hook,\n client_request_hook=_InstrumentedFastAPI._client_request_hook,\n client_response_hook=_InstrumentedFastAPI._client_response_hook,\n tracer_provider=_InstrumentedFastAPI._tracer_provider,\n meter=meter,\n )\n self._is_instrumented_by_opentelemetry = True\n\n\ndef _get_route_details(scope):\n \"\"\"Callback to retrieve the fastapi route being served.\n\n TODO: there is currently no way to retrieve http.route from\n a starlette application from scope.\n\n See: https://github.com/encode/starlette/pull/804\n \"\"\"\n app = scope[\"app\"]\n route = None\n for starlette_route in app.routes:\n match, _ = starlette_route.matches(scope)\n if match == Match.FULL:\n route = starlette_route.path\n break\n if match == Match.PARTIAL:\n route = starlette_route.path\n # method only exists for http, if websocket\n # leave it blank.\n span_name = route or scope.get(\"method\", \"\")\n attributes = {}\n if route:\n attributes[SpanAttributes.HTTP_ROUTE] = route\n return span_name, attributes\n", "path": "instrumentation/opentelemetry-instrumentation-fastapi/src/opentelemetry/instrumentation/fastapi/__init__.py"}], "after_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\nUsage\n-----\n\n.. code-block:: python\n\n import fastapi\n from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor\n\n app = fastapi.FastAPI()\n\n @app.get(\"/foobar\")\n async def foobar():\n return {\"message\": \"hello world\"}\n\n FastAPIInstrumentor.instrument_app(app)\n\nConfiguration\n-------------\n\nExclude lists\n*************\nTo exclude certain URLs from being tracked, set the environment variable ``OTEL_PYTHON_FASTAPI_EXCLUDED_URLS``\n(or ``OTEL_PYTHON_EXCLUDED_URLS`` as fallback) with comma delimited regexes representing which URLs to exclude.\n\nFor example,\n\n::\n\n export OTEL_PYTHON_FASTAPI_EXCLUDED_URLS=\"client/.*/info,healthcheck\"\n\nwill exclude requests such as ``https://site/client/123/info`` and ``https://site/xyz/healthcheck``.\n\nYou can also pass the comma delimited regexes to the ``instrument_app`` method directly:\n\n.. code-block:: python\n\n FastAPIInstrumentor.instrument_app(app, excluded_urls=\"client/.*/info,healthcheck\")\n\nRequest/Response hooks\n**********************\n\nUtilize request/response hooks to execute custom logic to be performed before/after performing a request. The server request hook takes in a server span and ASGI\nscope object for every incoming request. The client request hook is called with the internal span and an ASGI scope which is sent as a dictionary for when the method receive is called.\nThe client response hook is called with the internal span and an ASGI event which is sent as a dictionary for when the method send is called.\n\n.. code-block:: python\n\n def server_request_hook(span: Span, scope: dict):\n if span and span.is_recording():\n span.set_attribute(\"custom_user_attribute_from_request_hook\", \"some-value\")\n\n def client_request_hook(span: Span, scope: dict):\n if span and span.is_recording():\n span.set_attribute(\"custom_user_attribute_from_client_request_hook\", \"some-value\")\n\n def client_response_hook(span: Span, message: dict):\n if span and span.is_recording():\n span.set_attribute(\"custom_user_attribute_from_response_hook\", \"some-value\")\n\n FastAPIInstrumentor().instrument(server_request_hook=server_request_hook, client_request_hook=client_request_hook, client_response_hook=client_response_hook)\n\nCapture HTTP request and response headers\n*****************************************\nYou can configure the agent to capture predefined HTTP headers as span attributes, according to the `semantic convention <https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/http.md#http-request-and-response-headers>`_.\n\nRequest headers\n***************\nTo capture predefined HTTP request headers as span attributes, set the environment variable ``OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST``\nto a comma-separated list of HTTP header names.\n\nFor example,\n\n::\n\n export OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST=\"content-type,custom_request_header\"\n\nwill extract ``content-type`` and ``custom_request_header`` from request headers and add them as span attributes.\n\nIt is recommended that you should give the correct names of the headers to be captured in the environment variable.\nRequest header names in fastapi are case insensitive. So, giving header name as ``CUStom-Header`` in environment variable will be able capture header with name ``custom-header``.\n\nThe name of the added span attribute will follow the format ``http.request.header.<header_name>`` where ``<header_name>`` being the normalized HTTP header name (lowercase, with - characters replaced by _ ).\nThe value of the attribute will be single item list containing all the header values.\n\nExample of the added span attribute,\n``http.request.header.custom_request_header = [\"<value1>,<value2>\"]``\n\nResponse headers\n****************\nTo capture predefined HTTP response headers as span attributes, set the environment variable ``OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_RESPONSE``\nto a comma-separated list of HTTP header names.\n\nFor example,\n\n::\n\n export OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_RESPONSE=\"content-type,custom_response_header\"\n\nwill extract ``content-type`` and ``custom_response_header`` from response headers and add them as span attributes.\n\nIt is recommended that you should give the correct names of the headers to be captured in the environment variable.\nResponse header names captured in fastapi are case insensitive. So, giving header name as ``CUStomHeader`` in environment variable will be able capture header with name ``customheader``.\n\nThe name of the added span attribute will follow the format ``http.response.header.<header_name>`` where ``<header_name>`` being the normalized HTTP header name (lowercase, with - characters replaced by _ ).\nThe value of the attribute will be single item list containing all the header values.\n\nExample of the added span attribute,\n``http.response.header.custom_response_header = [\"<value1>,<value2>\"]``\n\nNote:\n Environment variable names to capture http headers are still experimental, and thus are subject to change.\n\nAPI\n---\n\"\"\"\nimport logging\nimport typing\nfrom typing import Collection\n\nimport fastapi\nfrom starlette.routing import Match\n\nfrom opentelemetry.instrumentation.asgi import OpenTelemetryMiddleware\nfrom opentelemetry.instrumentation.asgi.package import _instruments\nfrom opentelemetry.instrumentation.fastapi.version import __version__\nfrom opentelemetry.instrumentation.instrumentor import BaseInstrumentor\nfrom opentelemetry.metrics import get_meter\nfrom opentelemetry.semconv.trace import SpanAttributes\nfrom opentelemetry.trace import Span\nfrom opentelemetry.util.http import get_excluded_urls, parse_excluded_urls\n\n_excluded_urls_from_env = get_excluded_urls(\"FASTAPI\")\n_logger = logging.getLogger(__name__)\n\n_ServerRequestHookT = typing.Optional[typing.Callable[[Span, dict], None]]\n_ClientRequestHookT = typing.Optional[typing.Callable[[Span, dict], None]]\n_ClientResponseHookT = typing.Optional[typing.Callable[[Span, dict], None]]\n\n\nclass FastAPIInstrumentor(BaseInstrumentor):\n \"\"\"An instrumentor for FastAPI\n\n See `BaseInstrumentor`\n \"\"\"\n\n _original_fastapi = None\n\n @staticmethod\n def instrument_app(\n app: fastapi.FastAPI,\n server_request_hook: _ServerRequestHookT = None,\n client_request_hook: _ClientRequestHookT = None,\n client_response_hook: _ClientResponseHookT = None,\n tracer_provider=None,\n meter_provider=None,\n excluded_urls=None,\n ):\n \"\"\"Instrument an uninstrumented FastAPI application.\"\"\"\n if not hasattr(app, \"_is_instrumented_by_opentelemetry\"):\n app._is_instrumented_by_opentelemetry = False\n\n if not getattr(app, \"_is_instrumented_by_opentelemetry\", False):\n if excluded_urls is None:\n excluded_urls = _excluded_urls_from_env\n else:\n excluded_urls = parse_excluded_urls(excluded_urls)\n meter = get_meter(__name__, __version__, meter_provider)\n\n app.add_middleware(\n OpenTelemetryMiddleware,\n excluded_urls=excluded_urls,\n default_span_details=_get_route_details,\n server_request_hook=server_request_hook,\n client_request_hook=client_request_hook,\n client_response_hook=client_response_hook,\n tracer_provider=tracer_provider,\n meter=meter,\n )\n app._is_instrumented_by_opentelemetry = True\n if app not in _InstrumentedFastAPI._instrumented_fastapi_apps:\n _InstrumentedFastAPI._instrumented_fastapi_apps.add(app)\n else:\n _logger.warning(\n \"Attempting to instrument FastAPI app while already instrumented\"\n )\n\n @staticmethod\n def uninstrument_app(app: fastapi.FastAPI):\n app.user_middleware = [\n x\n for x in app.user_middleware\n if x.cls is not OpenTelemetryMiddleware\n ]\n app.middleware_stack = app.build_middleware_stack()\n app._is_instrumented_by_opentelemetry = False\n\n def instrumentation_dependencies(self) -> Collection[str]:\n return _instruments\n\n def _instrument(self, **kwargs):\n self._original_fastapi = fastapi.FastAPI\n _InstrumentedFastAPI._tracer_provider = kwargs.get(\"tracer_provider\")\n _InstrumentedFastAPI._server_request_hook = kwargs.get(\n \"server_request_hook\"\n )\n _InstrumentedFastAPI._client_request_hook = kwargs.get(\n \"client_request_hook\"\n )\n _InstrumentedFastAPI._client_response_hook = kwargs.get(\n \"client_response_hook\"\n )\n _excluded_urls = kwargs.get(\"excluded_urls\")\n _InstrumentedFastAPI._excluded_urls = (\n _excluded_urls_from_env\n if _excluded_urls is None\n else parse_excluded_urls(_excluded_urls)\n )\n _InstrumentedFastAPI._meter_provider = kwargs.get(\"meter_provider\")\n fastapi.FastAPI = _InstrumentedFastAPI\n\n def _uninstrument(self, **kwargs):\n for instance in _InstrumentedFastAPI._instrumented_fastapi_apps:\n self.uninstrument_app(instance)\n _InstrumentedFastAPI._instrumented_fastapi_apps.clear()\n fastapi.FastAPI = self._original_fastapi\n\n\nclass _InstrumentedFastAPI(fastapi.FastAPI):\n _tracer_provider = None\n _meter_provider = None\n _excluded_urls = None\n _server_request_hook: _ServerRequestHookT = None\n _client_request_hook: _ClientRequestHookT = None\n _client_response_hook: _ClientResponseHookT = None\n _instrumented_fastapi_apps = set()\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n meter = get_meter(\n __name__, __version__, _InstrumentedFastAPI._meter_provider\n )\n self.add_middleware(\n OpenTelemetryMiddleware,\n excluded_urls=_InstrumentedFastAPI._excluded_urls,\n default_span_details=_get_route_details,\n server_request_hook=_InstrumentedFastAPI._server_request_hook,\n client_request_hook=_InstrumentedFastAPI._client_request_hook,\n client_response_hook=_InstrumentedFastAPI._client_response_hook,\n tracer_provider=_InstrumentedFastAPI._tracer_provider,\n meter=meter,\n )\n self._is_instrumented_by_opentelemetry = True\n _InstrumentedFastAPI._instrumented_fastapi_apps.add(self)\n\n def __del__(self):\n if self in _InstrumentedFastAPI._instrumented_fastapi_apps:\n _InstrumentedFastAPI._instrumented_fastapi_apps.remove(self)\n\n\ndef _get_route_details(scope):\n \"\"\"Callback to retrieve the fastapi route being served.\n\n TODO: there is currently no way to retrieve http.route from\n a starlette application from scope.\n\n See: https://github.com/encode/starlette/pull/804\n \"\"\"\n app = scope[\"app\"]\n route = None\n for starlette_route in app.routes:\n match, _ = starlette_route.matches(scope)\n if match == Match.FULL:\n route = starlette_route.path\n break\n if match == Match.PARTIAL:\n route = starlette_route.path\n # method only exists for http, if websocket\n # leave it blank.\n span_name = route or scope.get(\"method\", \"\")\n attributes = {}\n if route:\n attributes[SpanAttributes.HTTP_ROUTE] = route\n return span_name, attributes\n", "path": "instrumentation/opentelemetry-instrumentation-fastapi/src/opentelemetry/instrumentation/fastapi/__init__.py"}]}
| 3,621 | 482 |
gh_patches_debug_42424
|
rasdani/github-patches
|
git_diff
|
python-discord__bot-1452
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Delete messages when `!pypi` module is invalid
When users use the `!pypi` command with an invalid module, you currently get a response saying `"Package could not be found."`.
What I'm proposing it that after a delay (I believe we have `bot.constants.RedirectOutput.delete_delay` for this) the bot should delete both its response and the invocation message, like [`!d` currently does](https://github.com/python-discord/bot/blob/master/bot/exts/info/doc.py#L363-L365).
This would help reduce channel flooding when someone is trying to remember the name of a module, and since it's not a trivial change I'm happy to try and implement this myself if it's something we decide we want.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `bot/exts/info/pypi.py`
Content:
```
1 import itertools
2 import logging
3 import random
4 import re
5
6 from discord import Embed
7 from discord.ext.commands import Cog, Context, command
8 from discord.utils import escape_markdown
9
10 from bot.bot import Bot
11 from bot.constants import Colours, NEGATIVE_REPLIES
12
13 URL = "https://pypi.org/pypi/{package}/json"
14 FIELDS = ("author", "requires_python", "summary", "license")
15 PYPI_ICON = "https://cdn.discordapp.com/emojis/766274397257334814.png"
16
17 PYPI_COLOURS = itertools.cycle((Colours.yellow, Colours.blue, Colours.white))
18
19 ILLEGAL_CHARACTERS = re.compile(r"[^a-zA-Z0-9-.]+")
20
21 log = logging.getLogger(__name__)
22
23
24 class PyPi(Cog):
25 """Cog for getting information about PyPi packages."""
26
27 def __init__(self, bot: Bot):
28 self.bot = bot
29
30 @command(name="pypi", aliases=("package", "pack"))
31 async def get_package_info(self, ctx: Context, package: str) -> None:
32 """Provide information about a specific package from PyPI."""
33 embed = Embed(
34 title=random.choice(NEGATIVE_REPLIES),
35 colour=Colours.soft_red
36 )
37 embed.set_thumbnail(url=PYPI_ICON)
38
39 if (character := re.search(ILLEGAL_CHARACTERS, package)) is not None:
40 embed.description = f"Illegal character passed into command: '{escape_markdown(character.group(0))}'"
41 await ctx.send(embed=embed)
42 return
43
44 async with self.bot.http_session.get(URL.format(package=package)) as response:
45 if response.status == 404:
46 embed.description = "Package could not be found."
47
48 elif response.status == 200 and response.content_type == "application/json":
49 response_json = await response.json()
50 info = response_json["info"]
51
52 embed.title = f"{info['name']} v{info['version']}"
53 embed.url = info['package_url']
54 embed.colour = next(PYPI_COLOURS)
55
56 for field in FIELDS:
57 field_data = info[field]
58
59 # Field could be completely empty, in some cases can be a string with whitespaces, or None.
60 if field_data and not field_data.isspace():
61 if '\n' in field_data and field == "license":
62 field_data = field_data.split('\n')[0]
63
64 embed.add_field(
65 name=field.replace("_", " ").title(),
66 value=escape_markdown(field_data),
67 inline=False,
68 )
69
70 else:
71 embed.description = "There was an error when fetching your PyPi package."
72 log.trace(f"Error when fetching PyPi package: {response.status}.")
73
74 await ctx.send(embed=embed)
75
76
77 def setup(bot: Bot) -> None:
78 """Load the PyPi cog."""
79 bot.add_cog(PyPi(bot))
80
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/bot/exts/info/pypi.py b/bot/exts/info/pypi.py
--- a/bot/exts/info/pypi.py
+++ b/bot/exts/info/pypi.py
@@ -8,7 +8,7 @@
from discord.utils import escape_markdown
from bot.bot import Bot
-from bot.constants import Colours, NEGATIVE_REPLIES
+from bot.constants import Colours, NEGATIVE_REPLIES, RedirectOutput
URL = "https://pypi.org/pypi/{package}/json"
FIELDS = ("author", "requires_python", "summary", "license")
@@ -17,6 +17,7 @@
PYPI_COLOURS = itertools.cycle((Colours.yellow, Colours.blue, Colours.white))
ILLEGAL_CHARACTERS = re.compile(r"[^a-zA-Z0-9-.]+")
+INVALID_INPUT_DELETE_DELAY = RedirectOutput.delete_delay
log = logging.getLogger(__name__)
@@ -36,42 +37,49 @@
)
embed.set_thumbnail(url=PYPI_ICON)
+ error = True
+
if (character := re.search(ILLEGAL_CHARACTERS, package)) is not None:
embed.description = f"Illegal character passed into command: '{escape_markdown(character.group(0))}'"
- await ctx.send(embed=embed)
- return
- async with self.bot.http_session.get(URL.format(package=package)) as response:
- if response.status == 404:
- embed.description = "Package could not be found."
+ else:
+ async with self.bot.http_session.get(URL.format(package=package)) as response:
+ if response.status == 404:
+ embed.description = "Package could not be found."
- elif response.status == 200 and response.content_type == "application/json":
- response_json = await response.json()
- info = response_json["info"]
+ elif response.status == 200 and response.content_type == "application/json":
+ response_json = await response.json()
+ info = response_json["info"]
- embed.title = f"{info['name']} v{info['version']}"
- embed.url = info['package_url']
- embed.colour = next(PYPI_COLOURS)
+ embed.title = f"{info['name']} v{info['version']}"
+ embed.url = info['package_url']
+ embed.colour = next(PYPI_COLOURS)
- for field in FIELDS:
- field_data = info[field]
+ for field in FIELDS:
+ field_data = info[field]
- # Field could be completely empty, in some cases can be a string with whitespaces, or None.
- if field_data and not field_data.isspace():
- if '\n' in field_data and field == "license":
- field_data = field_data.split('\n')[0]
+ # Field could be completely empty, in some cases can be a string with whitespaces, or None.
+ if field_data and not field_data.isspace():
+ if '\n' in field_data and field == "license":
+ field_data = field_data.split('\n')[0]
- embed.add_field(
- name=field.replace("_", " ").title(),
- value=escape_markdown(field_data),
- inline=False,
- )
+ embed.add_field(
+ name=field.replace("_", " ").title(),
+ value=escape_markdown(field_data),
+ inline=False,
+ )
- else:
- embed.description = "There was an error when fetching your PyPi package."
- log.trace(f"Error when fetching PyPi package: {response.status}.")
+ error = False
- await ctx.send(embed=embed)
+ else:
+ embed.description = "There was an error when fetching your PyPi package."
+ log.trace(f"Error when fetching PyPi package: {response.status}.")
+
+ if error:
+ await ctx.send(embed=embed, delete_after=INVALID_INPUT_DELETE_DELAY)
+ await ctx.message.delete(delay=INVALID_INPUT_DELETE_DELAY)
+ else:
+ await ctx.send(embed=embed)
def setup(bot: Bot) -> None:
|
{"golden_diff": "diff --git a/bot/exts/info/pypi.py b/bot/exts/info/pypi.py\n--- a/bot/exts/info/pypi.py\n+++ b/bot/exts/info/pypi.py\n@@ -8,7 +8,7 @@\n from discord.utils import escape_markdown\n \n from bot.bot import Bot\n-from bot.constants import Colours, NEGATIVE_REPLIES\n+from bot.constants import Colours, NEGATIVE_REPLIES, RedirectOutput\n \n URL = \"https://pypi.org/pypi/{package}/json\"\n FIELDS = (\"author\", \"requires_python\", \"summary\", \"license\")\n@@ -17,6 +17,7 @@\n PYPI_COLOURS = itertools.cycle((Colours.yellow, Colours.blue, Colours.white))\n \n ILLEGAL_CHARACTERS = re.compile(r\"[^a-zA-Z0-9-.]+\")\n+INVALID_INPUT_DELETE_DELAY = RedirectOutput.delete_delay\n \n log = logging.getLogger(__name__)\n \n@@ -36,42 +37,49 @@\n )\n embed.set_thumbnail(url=PYPI_ICON)\n \n+ error = True\n+\n if (character := re.search(ILLEGAL_CHARACTERS, package)) is not None:\n embed.description = f\"Illegal character passed into command: '{escape_markdown(character.group(0))}'\"\n- await ctx.send(embed=embed)\n- return\n \n- async with self.bot.http_session.get(URL.format(package=package)) as response:\n- if response.status == 404:\n- embed.description = \"Package could not be found.\"\n+ else:\n+ async with self.bot.http_session.get(URL.format(package=package)) as response:\n+ if response.status == 404:\n+ embed.description = \"Package could not be found.\"\n \n- elif response.status == 200 and response.content_type == \"application/json\":\n- response_json = await response.json()\n- info = response_json[\"info\"]\n+ elif response.status == 200 and response.content_type == \"application/json\":\n+ response_json = await response.json()\n+ info = response_json[\"info\"]\n \n- embed.title = f\"{info['name']} v{info['version']}\"\n- embed.url = info['package_url']\n- embed.colour = next(PYPI_COLOURS)\n+ embed.title = f\"{info['name']} v{info['version']}\"\n+ embed.url = info['package_url']\n+ embed.colour = next(PYPI_COLOURS)\n \n- for field in FIELDS:\n- field_data = info[field]\n+ for field in FIELDS:\n+ field_data = info[field]\n \n- # Field could be completely empty, in some cases can be a string with whitespaces, or None.\n- if field_data and not field_data.isspace():\n- if '\\n' in field_data and field == \"license\":\n- field_data = field_data.split('\\n')[0]\n+ # Field could be completely empty, in some cases can be a string with whitespaces, or None.\n+ if field_data and not field_data.isspace():\n+ if '\\n' in field_data and field == \"license\":\n+ field_data = field_data.split('\\n')[0]\n \n- embed.add_field(\n- name=field.replace(\"_\", \" \").title(),\n- value=escape_markdown(field_data),\n- inline=False,\n- )\n+ embed.add_field(\n+ name=field.replace(\"_\", \" \").title(),\n+ value=escape_markdown(field_data),\n+ inline=False,\n+ )\n \n- else:\n- embed.description = \"There was an error when fetching your PyPi package.\"\n- log.trace(f\"Error when fetching PyPi package: {response.status}.\")\n+ error = False\n \n- await ctx.send(embed=embed)\n+ else:\n+ embed.description = \"There was an error when fetching your PyPi package.\"\n+ log.trace(f\"Error when fetching PyPi package: {response.status}.\")\n+\n+ if error:\n+ await ctx.send(embed=embed, delete_after=INVALID_INPUT_DELETE_DELAY)\n+ await ctx.message.delete(delay=INVALID_INPUT_DELETE_DELAY)\n+ else:\n+ await ctx.send(embed=embed)\n \n \n def setup(bot: Bot) -> None:\n", "issue": "Delete messages when `!pypi` module is invalid\nWhen users use the `!pypi` command with an invalid module, you currently get a response saying `\"Package could not be found.\"`. \r\n\r\nWhat I'm proposing it that after a delay (I believe we have `bot.constants.RedirectOutput.delete_delay` for this) the bot should delete both its response and the invocation message, like [`!d` currently does](https://github.com/python-discord/bot/blob/master/bot/exts/info/doc.py#L363-L365).\r\n\r\nThis would help reduce channel flooding when someone is trying to remember the name of a module, and since it's not a trivial change I'm happy to try and implement this myself if it's something we decide we want.\n", "before_files": [{"content": "import itertools\nimport logging\nimport random\nimport re\n\nfrom discord import Embed\nfrom discord.ext.commands import Cog, Context, command\nfrom discord.utils import escape_markdown\n\nfrom bot.bot import Bot\nfrom bot.constants import Colours, NEGATIVE_REPLIES\n\nURL = \"https://pypi.org/pypi/{package}/json\"\nFIELDS = (\"author\", \"requires_python\", \"summary\", \"license\")\nPYPI_ICON = \"https://cdn.discordapp.com/emojis/766274397257334814.png\"\n\nPYPI_COLOURS = itertools.cycle((Colours.yellow, Colours.blue, Colours.white))\n\nILLEGAL_CHARACTERS = re.compile(r\"[^a-zA-Z0-9-.]+\")\n\nlog = logging.getLogger(__name__)\n\n\nclass PyPi(Cog):\n \"\"\"Cog for getting information about PyPi packages.\"\"\"\n\n def __init__(self, bot: Bot):\n self.bot = bot\n\n @command(name=\"pypi\", aliases=(\"package\", \"pack\"))\n async def get_package_info(self, ctx: Context, package: str) -> None:\n \"\"\"Provide information about a specific package from PyPI.\"\"\"\n embed = Embed(\n title=random.choice(NEGATIVE_REPLIES),\n colour=Colours.soft_red\n )\n embed.set_thumbnail(url=PYPI_ICON)\n\n if (character := re.search(ILLEGAL_CHARACTERS, package)) is not None:\n embed.description = f\"Illegal character passed into command: '{escape_markdown(character.group(0))}'\"\n await ctx.send(embed=embed)\n return\n\n async with self.bot.http_session.get(URL.format(package=package)) as response:\n if response.status == 404:\n embed.description = \"Package could not be found.\"\n\n elif response.status == 200 and response.content_type == \"application/json\":\n response_json = await response.json()\n info = response_json[\"info\"]\n\n embed.title = f\"{info['name']} v{info['version']}\"\n embed.url = info['package_url']\n embed.colour = next(PYPI_COLOURS)\n\n for field in FIELDS:\n field_data = info[field]\n\n # Field could be completely empty, in some cases can be a string with whitespaces, or None.\n if field_data and not field_data.isspace():\n if '\\n' in field_data and field == \"license\":\n field_data = field_data.split('\\n')[0]\n\n embed.add_field(\n name=field.replace(\"_\", \" \").title(),\n value=escape_markdown(field_data),\n inline=False,\n )\n\n else:\n embed.description = \"There was an error when fetching your PyPi package.\"\n log.trace(f\"Error when fetching PyPi package: {response.status}.\")\n\n await ctx.send(embed=embed)\n\n\ndef setup(bot: Bot) -> None:\n \"\"\"Load the PyPi cog.\"\"\"\n bot.add_cog(PyPi(bot))\n", "path": "bot/exts/info/pypi.py"}], "after_files": [{"content": "import itertools\nimport logging\nimport random\nimport re\n\nfrom discord import Embed\nfrom discord.ext.commands import Cog, Context, command\nfrom discord.utils import escape_markdown\n\nfrom bot.bot import Bot\nfrom bot.constants import Colours, NEGATIVE_REPLIES, RedirectOutput\n\nURL = \"https://pypi.org/pypi/{package}/json\"\nFIELDS = (\"author\", \"requires_python\", \"summary\", \"license\")\nPYPI_ICON = \"https://cdn.discordapp.com/emojis/766274397257334814.png\"\n\nPYPI_COLOURS = itertools.cycle((Colours.yellow, Colours.blue, Colours.white))\n\nILLEGAL_CHARACTERS = re.compile(r\"[^a-zA-Z0-9-.]+\")\nINVALID_INPUT_DELETE_DELAY = RedirectOutput.delete_delay\n\nlog = logging.getLogger(__name__)\n\n\nclass PyPi(Cog):\n \"\"\"Cog for getting information about PyPi packages.\"\"\"\n\n def __init__(self, bot: Bot):\n self.bot = bot\n\n @command(name=\"pypi\", aliases=(\"package\", \"pack\"))\n async def get_package_info(self, ctx: Context, package: str) -> None:\n \"\"\"Provide information about a specific package from PyPI.\"\"\"\n embed = Embed(\n title=random.choice(NEGATIVE_REPLIES),\n colour=Colours.soft_red\n )\n embed.set_thumbnail(url=PYPI_ICON)\n\n error = True\n\n if (character := re.search(ILLEGAL_CHARACTERS, package)) is not None:\n embed.description = f\"Illegal character passed into command: '{escape_markdown(character.group(0))}'\"\n\n else:\n async with self.bot.http_session.get(URL.format(package=package)) as response:\n if response.status == 404:\n embed.description = \"Package could not be found.\"\n\n elif response.status == 200 and response.content_type == \"application/json\":\n response_json = await response.json()\n info = response_json[\"info\"]\n\n embed.title = f\"{info['name']} v{info['version']}\"\n embed.url = info['package_url']\n embed.colour = next(PYPI_COLOURS)\n\n for field in FIELDS:\n field_data = info[field]\n\n # Field could be completely empty, in some cases can be a string with whitespaces, or None.\n if field_data and not field_data.isspace():\n if '\\n' in field_data and field == \"license\":\n field_data = field_data.split('\\n')[0]\n\n embed.add_field(\n name=field.replace(\"_\", \" \").title(),\n value=escape_markdown(field_data),\n inline=False,\n )\n\n error = False\n\n else:\n embed.description = \"There was an error when fetching your PyPi package.\"\n log.trace(f\"Error when fetching PyPi package: {response.status}.\")\n\n if error:\n await ctx.send(embed=embed, delete_after=INVALID_INPUT_DELETE_DELAY)\n await ctx.message.delete(delay=INVALID_INPUT_DELETE_DELAY)\n else:\n await ctx.send(embed=embed)\n\n\ndef setup(bot: Bot) -> None:\n \"\"\"Load the PyPi cog.\"\"\"\n bot.add_cog(PyPi(bot))\n", "path": "bot/exts/info/pypi.py"}]}
| 1,219 | 924 |
gh_patches_debug_58079
|
rasdani/github-patches
|
git_diff
|
secondmind-labs__trieste-140
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Ensure type information is packaged with installable
**Describe the feature you'd like**
As a user, I want to be able to type check my code that calls into trieste. It's not clear at the moment if this is currently possible, given there is no py.typed file.
See the python PEP for more info https://www.python.org/dev/peps/pep-0561/
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 # Copyright 2020 The Trieste Contributors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from setuptools import find_packages, setup
16
17 with open("README.md", "r") as file:
18 long_description = file.read()
19
20 setup(
21 name="trieste",
22 version="0.3.1",
23 author="The Trieste contributors",
24 author_email="[email protected]",
25 description="A Bayesian optimization research toolbox built on TensorFlow",
26 long_description=long_description,
27 long_description_content_type="text/markdown",
28 url="https://github.com/secondmind-labs/trieste",
29 packages=find_packages(include=("trieste*",)),
30 classifiers=[
31 "Programming Language :: Python :: 3.7",
32 "License :: OSI Approved :: Apache Software License",
33 "Operating System :: OS Independent",
34 ],
35 python_requires="~=3.7",
36 install_requires=[
37 "absl-py",
38 "gpflow==2.1.*",
39 "numpy",
40 # tensorflow!=2.2.0,!=2.3.0 because of https://github.com/advisories/GHSA-8fxw-76px-3rxv
41 "tensorflow>=2.1,!=2.2.0,!=2.3.0",
42 "tensorflow-probability>=0.9",
43 ],
44 )
45
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -27,6 +27,9 @@
long_description_content_type="text/markdown",
url="https://github.com/secondmind-labs/trieste",
packages=find_packages(include=("trieste*",)),
+ package_data={
+ "trieste": ["py.typed"],
+ },
classifiers=[
"Programming Language :: Python :: 3.7",
"License :: OSI Approved :: Apache Software License",
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -27,6 +27,9 @@\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/secondmind-labs/trieste\",\n packages=find_packages(include=(\"trieste*\",)),\n+ package_data={\n+ \"trieste\": [\"py.typed\"],\n+ },\n classifiers=[\n \"Programming Language :: Python :: 3.7\",\n \"License :: OSI Approved :: Apache Software License\",\n", "issue": "Ensure type information is packaged with installable\n**Describe the feature you'd like**\r\nAs a user, I want to be able to type check my code that calls into trieste. It's not clear at the moment if this is currently possible, given there is no py.typed file.\r\n\r\nSee the python PEP for more info https://www.python.org/dev/peps/pep-0561/\n", "before_files": [{"content": "# Copyright 2020 The Trieste Contributors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom setuptools import find_packages, setup\n\nwith open(\"README.md\", \"r\") as file:\n long_description = file.read()\n\nsetup(\n name=\"trieste\",\n version=\"0.3.1\",\n author=\"The Trieste contributors\",\n author_email=\"[email protected]\",\n description=\"A Bayesian optimization research toolbox built on TensorFlow\",\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/secondmind-labs/trieste\",\n packages=find_packages(include=(\"trieste*\",)),\n classifiers=[\n \"Programming Language :: Python :: 3.7\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: OS Independent\",\n ],\n python_requires=\"~=3.7\",\n install_requires=[\n \"absl-py\",\n \"gpflow==2.1.*\",\n \"numpy\",\n # tensorflow!=2.2.0,!=2.3.0 because of https://github.com/advisories/GHSA-8fxw-76px-3rxv\n \"tensorflow>=2.1,!=2.2.0,!=2.3.0\",\n \"tensorflow-probability>=0.9\",\n ],\n)\n", "path": "setup.py"}], "after_files": [{"content": "# Copyright 2020 The Trieste Contributors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom setuptools import find_packages, setup\n\nwith open(\"README.md\", \"r\") as file:\n long_description = file.read()\n\nsetup(\n name=\"trieste\",\n version=\"0.3.1\",\n author=\"The Trieste contributors\",\n author_email=\"[email protected]\",\n description=\"A Bayesian optimization research toolbox built on TensorFlow\",\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/secondmind-labs/trieste\",\n packages=find_packages(include=(\"trieste*\",)),\n package_data={\n \"trieste\": [\"py.typed\"],\n },\n classifiers=[\n \"Programming Language :: Python :: 3.7\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: OS Independent\",\n ],\n python_requires=\"~=3.7\",\n install_requires=[\n \"absl-py\",\n \"gpflow==2.1.*\",\n \"numpy\",\n # tensorflow!=2.2.0,!=2.3.0 because of https://github.com/advisories/GHSA-8fxw-76px-3rxv\n \"tensorflow>=2.1,!=2.2.0,!=2.3.0\",\n \"tensorflow-probability>=0.9\",\n ],\n)\n", "path": "setup.py"}]}
| 823 | 115 |
gh_patches_debug_19184
|
rasdani/github-patches
|
git_diff
|
mathesar-foundation__mathesar-3044
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add OpenAPI Specification for the /datafiles/ endpoint and fix the incorrect specs
## Problem
In order to ensure the accuracy of the specs generated by drf-spectacular for /datafiles/ endpoint , we will generate an OpenAPI Specification specifically for this endpoint. This will allow us to verify its correctness by comparing it with the existing test cases and the DRF browsable API page and also confirm it with the front-end team.
## Proposed solution
* Implement custom preprocessing hook function to filter out all endpoints except for the /datafiles/ endpoint.The hook function selectively retains only the endpoint paths that match the /datafiles/
* Update the Spectacular settings in the _common_settings.py_ file to include the custom preprocessing hook.
* Configure the PREPROCESSING_HOOKS setting with the custom hook function, we ensure that only the /datafiles/ endpoint is considered during the OpenAPI specification generation process.
* Finally, generate the spec file using the _./manage.py spectacular --color --file schema.yml_ command
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `config/settings/common_settings.py`
Content:
```
1 """
2 Base settings to build other settings files upon.
3
4 Generated by 'django-admin startproject' using Django 3.1.7.
5
6 For more information on this file, see
7 https://docs.djangoproject.com/en/3.1/topics/settings/
8
9 For the full list of settings and their values, see
10 https://docs.djangoproject.com/en/3.1/ref/settings/
11 """
12
13 import os
14 from pathlib import Path
15
16 from decouple import Csv, config as decouple_config
17 from dj_database_url import parse as db_url
18
19
20 # We use a 'tuple' with pipes as delimiters as decople naively splits the global
21 # variables on commas when casting to Csv()
22 def pipe_delim(pipe_string):
23 # Remove opening and closing brackets
24 pipe_string = pipe_string[1:-1]
25 # Split on pipe delim
26 return pipe_string.split("|")
27
28
29 # Build paths inside the project like this: BASE_DIR / 'subdir'.
30 BASE_DIR = Path(__file__).resolve().parent.parent.parent
31
32 # Application definition
33
34 INSTALLED_APPS = [
35 "django.contrib.admin",
36 "django.contrib.auth",
37 "django.contrib.contenttypes",
38 "django.contrib.sessions",
39 "django.contrib.messages",
40 "whitenoise.runserver_nostatic",
41 "django.contrib.staticfiles",
42 "rest_framework",
43 "django_filters",
44 "django_property_filter",
45 "drf_spectacular",
46 "mathesar",
47 ]
48
49 MIDDLEWARE = [
50 "django.middleware.security.SecurityMiddleware",
51 "whitenoise.middleware.WhiteNoiseMiddleware",
52 "django.contrib.sessions.middleware.SessionMiddleware",
53 "django.middleware.common.CommonMiddleware",
54 "django.middleware.csrf.CsrfViewMiddleware",
55 "django.contrib.auth.middleware.AuthenticationMiddleware",
56 "django.contrib.messages.middleware.MessageMiddleware",
57 "django.middleware.clickjacking.XFrameOptionsMiddleware",
58 "mathesar.middleware.CursorClosedHandlerMiddleware",
59 "mathesar.middleware.PasswordChangeNeededMiddleware",
60 'django_userforeignkey.middleware.UserForeignKeyMiddleware',
61 'django_request_cache.middleware.RequestCacheMiddleware',
62 ]
63
64 ROOT_URLCONF = "config.urls"
65
66 TEMPLATES = [
67 {
68 "BACKEND": "django.template.backends.django.DjangoTemplates",
69 "DIRS": [],
70 "APP_DIRS": True,
71 "OPTIONS": {
72 "context_processors": [
73 "config.context_processors.frontend_settings",
74 "django.template.context_processors.debug",
75 "django.template.context_processors.request",
76 "django.contrib.auth.context_processors.auth",
77 "django.contrib.messages.context_processors.messages",
78 "mathesar.template_context_processors.base_template_extensions.script_extension_templates"
79 ],
80 },
81 },
82 ]
83
84 WSGI_APPLICATION = "config.wsgi.application"
85
86 # Database
87 # https://docs.djangoproject.com/en/3.1/ref/settings/#databases
88
89 # TODO: Add to documentation that database keys should not be than 128 characters.
90
91 # MATHESAR_DATABASES should be of the form '({db_name}|{db_url}), ({db_name}|{db_url})'
92 # See pipe_delim above for why we use pipes as delimiters
93 DATABASES = {
94 db_key: db_url(url_string)
95 for db_key, url_string in decouple_config('MATHESAR_DATABASES', cast=Csv(pipe_delim))
96 }
97 DATABASES[decouple_config('DJANGO_DATABASE_KEY', default="default")] = decouple_config('DJANGO_DATABASE_URL', cast=db_url)
98
99 for db_key, db_dict in DATABASES.items():
100 # Engine can be '.postgresql' or '.postgresql_psycopg2'
101 if not db_dict['ENGINE'].startswith('django.db.backends.postgresql'):
102 raise ValueError(
103 f"{db_key} is not a PostgreSQL database. "
104 f"{db_dict['ENGINE']} found for {db_key}'s engine."
105 )
106
107 # pytest-django will create a new database named 'test_{DATABASES[table_db]['NAME']}'
108 # and use it for our API tests if we don't specify DATABASES[table_db]['TEST']['NAME']
109 TEST = decouple_config('TEST', default=False, cast=bool)
110 if TEST:
111 for db_key, _ in decouple_config('MATHESAR_DATABASES', cast=Csv(pipe_delim)):
112 DATABASES[db_key]['TEST'] = {'NAME': DATABASES[db_key]['NAME']}
113
114
115 # SECURITY WARNING: keep the secret key used in production secret!
116 SECRET_KEY = decouple_config('SECRET_KEY')
117
118 # SECURITY WARNING: don't run with debug turned on in production!
119 DEBUG = decouple_config('DEBUG', default=False, cast=bool)
120
121 ALLOWED_HOSTS = decouple_config('ALLOWED_HOSTS', cast=Csv())
122
123 # Password validation
124 # https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
125
126 AUTH_PASSWORD_VALIDATORS = [
127 {
128 "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
129 },
130 {
131 "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
132 },
133 {
134 "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
135 },
136 {
137 "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
138 },
139 ]
140
141 # Internationalization
142 # https://docs.djangoproject.com/en/3.1/topics/i18n/
143
144 LANGUAGE_CODE = "en-us"
145
146 TIME_ZONE = "UTC"
147
148 USE_I18N = True
149
150 USE_L10N = True
151
152 USE_TZ = True
153
154 # Static files (CSS, JavaScript, Images)
155 # https://docs.djangoproject.com/en/3.1/howto/static-files/
156 # https://docs.djangoproject.com/en/3.1/ref/contrib/staticfiles/
157
158 STATIC_URL = "/static/"
159
160 # When running with DEBUG=False, the webserver needs to serve files from this location
161 # python manage.py collectstatic has to be run to collect all static files into this location
162 # The files need to served in brotli or gzip compressed format
163 STATIC_ROOT = os.path.join(BASE_DIR, 'static/')
164
165 # Media files (uploaded by the user)
166
167 MEDIA_ROOT = os.path.join(BASE_DIR, '.media/')
168
169 MEDIA_URL = "/media/"
170
171 # Update Authentication classes, removed BasicAuthentication
172 # Defaults: https://www.django-rest-framework.org/api-guide/settings/
173 REST_FRAMEWORK = {
174 'DEFAULT_AUTHENTICATION_CLASSES': [
175 'rest_framework.authentication.TokenAuthentication',
176 'rest_framework.authentication.SessionAuthentication'
177 ],
178 'DEFAULT_PERMISSION_CLASSES': [
179 'rest_framework.permissions.IsAuthenticated',
180 ],
181 'DEFAULT_FILTER_BACKENDS': (
182 'django_filters.rest_framework.DjangoFilterBackend',
183 'rest_framework.filters.OrderingFilter',
184 ),
185 'TEST_REQUEST_DEFAULT_FORMAT': 'json',
186 'EXCEPTION_HANDLER':
187 'mathesar.exception_handlers.mathesar_exception_handler',
188 'DEFAULT_SCHEMA_CLASS': 'drf_spectacular.openapi.AutoSchema'
189 }
190 SPECTACULAR_SETTINGS = {
191 'TITLE': 'Mathesar API',
192 'DESCRIPTION': '',
193 'VERSION': '1.0.0',
194 'SERVE_INCLUDE_SCHEMA': False,
195 'POSTPROCESSING_HOOKS': [
196 'config.settings.openapi.remove_url_prefix_hook',
197 ],
198 # OTHER SETTINGS
199 }
200 FRIENDLY_ERRORS = {
201 'FIELD_ERRORS': {
202 # By default drf-friendly-errors does contain error codes for ListSerializer type
203 'ListSerializer': {
204 'required': 2007,
205 'null': 2027,
206 'invalid_choice': 2083,
207 'not_a_list': 2123,
208 'empty': 2093
209 },
210 'PermittedPkRelatedField': {
211 'required': 2007,
212 'null': 2027,
213 'does_not_exist': 2151,
214 'incorrect_type': 2161
215 },
216 'PermittedSlugRelatedField': {
217 'required': 2007, 'invalid': 2002, 'null': 2027,
218 'does_not_exist': 2151, 'incorrect_type': 2161
219 },
220 },
221 'EXCEPTION_DICT': {
222 'Http404': 4005
223 }
224 }
225 # Mathesar settings
226 MATHESAR_MODE = decouple_config('MODE', default='PRODUCTION')
227 MATHESAR_UI_BUILD_LOCATION = os.path.join(BASE_DIR, 'mathesar/static/mathesar/')
228 MATHESAR_MANIFEST_LOCATION = os.path.join(MATHESAR_UI_BUILD_LOCATION, 'manifest.json')
229 MATHESAR_CLIENT_DEV_URL = 'http://localhost:3000'
230 MATHESAR_UI_SOURCE_LOCATION = os.path.join(BASE_DIR, 'mathesar_ui/')
231 MATHESAR_CAPTURE_UNHANDLED_EXCEPTION = decouple_config('CAPTURE_UNHANDLED_EXCEPTION', default=False)
232 MATHESAR_STATIC_NON_CODE_FILES_LOCATION = os.path.join(BASE_DIR, 'mathesar/static/non-code/')
233
234 # UI source files have to be served by Django in order for static assets to be included during dev mode
235 # https://vitejs.dev/guide/assets.html
236 # https://vitejs.dev/guide/backend-integration.html
237 STATICFILES_DIRS = [MATHESAR_UI_SOURCE_LOCATION, MATHESAR_STATIC_NON_CODE_FILES_LOCATION] if MATHESAR_MODE == 'DEVELOPMENT' else [MATHESAR_UI_BUILD_LOCATION, MATHESAR_STATIC_NON_CODE_FILES_LOCATION]
238 STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
239
240 # Accounts
241 AUTH_USER_MODEL = 'mathesar.User'
242 LOGIN_URL = '/auth/login/'
243 LOGIN_REDIRECT_URL = '/'
244 LOGOUT_REDIRECT_URL = LOGIN_URL
245 DRF_ACCESS_POLICY = {
246 'reusable_conditions': ['mathesar.api.permission_conditions']
247 }
248 # List of Template names that contains additional script tags to be added to the base template
249 BASE_TEMPLATE_ADDITIONAL_SCRIPT_TEMPLATES = []
250
```
Path: `config/settings/openapi.py`
Content:
```
1 def remove_url_prefix_hook(result, **kwargs):
2 # Remove namespace and version URL prefix from the operation Id of the generated API schema
3 for path, path_info in result['paths'].items():
4 for method, operation in path_info.items():
5 operation_id = operation.get('operationId')
6 if operation_id:
7 if path.startswith('/api/db/v0/'):
8 operation['operationId'] = operation_id.replace('db_v0_', '')
9 elif path.startswith('/api/ui/v0/'):
10 operation['operationId'] = operation_id.replace('ui_v0_', '')
11
12 return result
13
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/config/settings/common_settings.py b/config/settings/common_settings.py
--- a/config/settings/common_settings.py
+++ b/config/settings/common_settings.py
@@ -192,6 +192,7 @@
'DESCRIPTION': '',
'VERSION': '1.0.0',
'SERVE_INCLUDE_SCHEMA': False,
+ 'PREPROCESSING_HOOKS': ['config.settings.openapi.custom_preprocessing_hook'],
'POSTPROCESSING_HOOKS': [
'config.settings.openapi.remove_url_prefix_hook',
],
diff --git a/config/settings/openapi.py b/config/settings/openapi.py
--- a/config/settings/openapi.py
+++ b/config/settings/openapi.py
@@ -1,3 +1,12 @@
+def custom_preprocessing_hook(endpoints):
+ filtered = []
+ for (path, path_regex, method, callback) in endpoints:
+ # Remove all but DRF API endpoints
+ if path.startswith("/api/db/v0/data_files/"):
+ filtered.append((path, path_regex, method, callback))
+ return filtered
+
+
def remove_url_prefix_hook(result, **kwargs):
# Remove namespace and version URL prefix from the operation Id of the generated API schema
for path, path_info in result['paths'].items():
|
{"golden_diff": "diff --git a/config/settings/common_settings.py b/config/settings/common_settings.py\n--- a/config/settings/common_settings.py\n+++ b/config/settings/common_settings.py\n@@ -192,6 +192,7 @@\n 'DESCRIPTION': '',\n 'VERSION': '1.0.0',\n 'SERVE_INCLUDE_SCHEMA': False,\n+ 'PREPROCESSING_HOOKS': ['config.settings.openapi.custom_preprocessing_hook'],\n 'POSTPROCESSING_HOOKS': [\n 'config.settings.openapi.remove_url_prefix_hook',\n ],\ndiff --git a/config/settings/openapi.py b/config/settings/openapi.py\n--- a/config/settings/openapi.py\n+++ b/config/settings/openapi.py\n@@ -1,3 +1,12 @@\n+def custom_preprocessing_hook(endpoints):\n+ filtered = []\n+ for (path, path_regex, method, callback) in endpoints:\n+ # Remove all but DRF API endpoints\n+ if path.startswith(\"/api/db/v0/data_files/\"):\n+ filtered.append((path, path_regex, method, callback))\n+ return filtered\n+\n+\n def remove_url_prefix_hook(result, **kwargs):\n # Remove namespace and version URL prefix from the operation Id of the generated API schema\n for path, path_info in result['paths'].items():\n", "issue": "Add OpenAPI Specification for the /datafiles/ endpoint and fix the incorrect specs\n## Problem\r\nIn order to ensure the accuracy of the specs generated by drf-spectacular for /datafiles/ endpoint , we will generate an OpenAPI Specification specifically for this endpoint. This will allow us to verify its correctness by comparing it with the existing test cases and the DRF browsable API page and also confirm it with the front-end team.\r\n\r\n## Proposed solution\r\n* Implement custom preprocessing hook function to filter out all endpoints except for the /datafiles/ endpoint.The hook function selectively retains only the endpoint paths that match the /datafiles/ \r\n* Update the Spectacular settings in the _common_settings.py_ file to include the custom preprocessing hook.\r\n* Configure the PREPROCESSING_HOOKS setting with the custom hook function, we ensure that only the /datafiles/ endpoint is considered during the OpenAPI specification generation process.\r\n* Finally, generate the spec file using the _./manage.py spectacular --color --file schema.yml_ command\r\n\r\n\n", "before_files": [{"content": "\"\"\"\nBase settings to build other settings files upon.\n\nGenerated by 'django-admin startproject' using Django 3.1.7.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/3.1/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/3.1/ref/settings/\n\"\"\"\n\nimport os\nfrom pathlib import Path\n\nfrom decouple import Csv, config as decouple_config\nfrom dj_database_url import parse as db_url\n\n\n# We use a 'tuple' with pipes as delimiters as decople naively splits the global\n# variables on commas when casting to Csv()\ndef pipe_delim(pipe_string):\n # Remove opening and closing brackets\n pipe_string = pipe_string[1:-1]\n # Split on pipe delim\n return pipe_string.split(\"|\")\n\n\n# Build paths inside the project like this: BASE_DIR / 'subdir'.\nBASE_DIR = Path(__file__).resolve().parent.parent.parent\n\n# Application definition\n\nINSTALLED_APPS = [\n \"django.contrib.admin\",\n \"django.contrib.auth\",\n \"django.contrib.contenttypes\",\n \"django.contrib.sessions\",\n \"django.contrib.messages\",\n \"whitenoise.runserver_nostatic\",\n \"django.contrib.staticfiles\",\n \"rest_framework\",\n \"django_filters\",\n \"django_property_filter\",\n \"drf_spectacular\",\n \"mathesar\",\n]\n\nMIDDLEWARE = [\n \"django.middleware.security.SecurityMiddleware\",\n \"whitenoise.middleware.WhiteNoiseMiddleware\",\n \"django.contrib.sessions.middleware.SessionMiddleware\",\n \"django.middleware.common.CommonMiddleware\",\n \"django.middleware.csrf.CsrfViewMiddleware\",\n \"django.contrib.auth.middleware.AuthenticationMiddleware\",\n \"django.contrib.messages.middleware.MessageMiddleware\",\n \"django.middleware.clickjacking.XFrameOptionsMiddleware\",\n \"mathesar.middleware.CursorClosedHandlerMiddleware\",\n \"mathesar.middleware.PasswordChangeNeededMiddleware\",\n 'django_userforeignkey.middleware.UserForeignKeyMiddleware',\n 'django_request_cache.middleware.RequestCacheMiddleware',\n]\n\nROOT_URLCONF = \"config.urls\"\n\nTEMPLATES = [\n {\n \"BACKEND\": \"django.template.backends.django.DjangoTemplates\",\n \"DIRS\": [],\n \"APP_DIRS\": True,\n \"OPTIONS\": {\n \"context_processors\": [\n \"config.context_processors.frontend_settings\",\n \"django.template.context_processors.debug\",\n \"django.template.context_processors.request\",\n \"django.contrib.auth.context_processors.auth\",\n \"django.contrib.messages.context_processors.messages\",\n \"mathesar.template_context_processors.base_template_extensions.script_extension_templates\"\n ],\n },\n },\n]\n\nWSGI_APPLICATION = \"config.wsgi.application\"\n\n# Database\n# https://docs.djangoproject.com/en/3.1/ref/settings/#databases\n\n# TODO: Add to documentation that database keys should not be than 128 characters.\n\n# MATHESAR_DATABASES should be of the form '({db_name}|{db_url}), ({db_name}|{db_url})'\n# See pipe_delim above for why we use pipes as delimiters\nDATABASES = {\n db_key: db_url(url_string)\n for db_key, url_string in decouple_config('MATHESAR_DATABASES', cast=Csv(pipe_delim))\n}\nDATABASES[decouple_config('DJANGO_DATABASE_KEY', default=\"default\")] = decouple_config('DJANGO_DATABASE_URL', cast=db_url)\n\nfor db_key, db_dict in DATABASES.items():\n # Engine can be '.postgresql' or '.postgresql_psycopg2'\n if not db_dict['ENGINE'].startswith('django.db.backends.postgresql'):\n raise ValueError(\n f\"{db_key} is not a PostgreSQL database. \"\n f\"{db_dict['ENGINE']} found for {db_key}'s engine.\"\n )\n\n# pytest-django will create a new database named 'test_{DATABASES[table_db]['NAME']}'\n# and use it for our API tests if we don't specify DATABASES[table_db]['TEST']['NAME']\nTEST = decouple_config('TEST', default=False, cast=bool)\nif TEST:\n for db_key, _ in decouple_config('MATHESAR_DATABASES', cast=Csv(pipe_delim)):\n DATABASES[db_key]['TEST'] = {'NAME': DATABASES[db_key]['NAME']}\n\n\n# SECURITY WARNING: keep the secret key used in production secret!\nSECRET_KEY = decouple_config('SECRET_KEY')\n\n# SECURITY WARNING: don't run with debug turned on in production!\nDEBUG = decouple_config('DEBUG', default=False, cast=bool)\n\nALLOWED_HOSTS = decouple_config('ALLOWED_HOSTS', cast=Csv())\n\n# Password validation\n# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators\n\nAUTH_PASSWORD_VALIDATORS = [\n {\n \"NAME\": \"django.contrib.auth.password_validation.UserAttributeSimilarityValidator\",\n },\n {\n \"NAME\": \"django.contrib.auth.password_validation.MinimumLengthValidator\",\n },\n {\n \"NAME\": \"django.contrib.auth.password_validation.CommonPasswordValidator\",\n },\n {\n \"NAME\": \"django.contrib.auth.password_validation.NumericPasswordValidator\",\n },\n]\n\n# Internationalization\n# https://docs.djangoproject.com/en/3.1/topics/i18n/\n\nLANGUAGE_CODE = \"en-us\"\n\nTIME_ZONE = \"UTC\"\n\nUSE_I18N = True\n\nUSE_L10N = True\n\nUSE_TZ = True\n\n# Static files (CSS, JavaScript, Images)\n# https://docs.djangoproject.com/en/3.1/howto/static-files/\n# https://docs.djangoproject.com/en/3.1/ref/contrib/staticfiles/\n\nSTATIC_URL = \"/static/\"\n\n# When running with DEBUG=False, the webserver needs to serve files from this location\n# python manage.py collectstatic has to be run to collect all static files into this location\n# The files need to served in brotli or gzip compressed format\nSTATIC_ROOT = os.path.join(BASE_DIR, 'static/')\n\n# Media files (uploaded by the user)\n\nMEDIA_ROOT = os.path.join(BASE_DIR, '.media/')\n\nMEDIA_URL = \"/media/\"\n\n# Update Authentication classes, removed BasicAuthentication\n# Defaults: https://www.django-rest-framework.org/api-guide/settings/\nREST_FRAMEWORK = {\n 'DEFAULT_AUTHENTICATION_CLASSES': [\n 'rest_framework.authentication.TokenAuthentication',\n 'rest_framework.authentication.SessionAuthentication'\n ],\n 'DEFAULT_PERMISSION_CLASSES': [\n 'rest_framework.permissions.IsAuthenticated',\n ],\n 'DEFAULT_FILTER_BACKENDS': (\n 'django_filters.rest_framework.DjangoFilterBackend',\n 'rest_framework.filters.OrderingFilter',\n ),\n 'TEST_REQUEST_DEFAULT_FORMAT': 'json',\n 'EXCEPTION_HANDLER':\n 'mathesar.exception_handlers.mathesar_exception_handler',\n 'DEFAULT_SCHEMA_CLASS': 'drf_spectacular.openapi.AutoSchema'\n}\nSPECTACULAR_SETTINGS = {\n 'TITLE': 'Mathesar API',\n 'DESCRIPTION': '',\n 'VERSION': '1.0.0',\n 'SERVE_INCLUDE_SCHEMA': False,\n 'POSTPROCESSING_HOOKS': [\n 'config.settings.openapi.remove_url_prefix_hook',\n ],\n # OTHER SETTINGS\n}\nFRIENDLY_ERRORS = {\n 'FIELD_ERRORS': {\n # By default drf-friendly-errors does contain error codes for ListSerializer type\n 'ListSerializer': {\n 'required': 2007,\n 'null': 2027,\n 'invalid_choice': 2083,\n 'not_a_list': 2123,\n 'empty': 2093\n },\n 'PermittedPkRelatedField': {\n 'required': 2007,\n 'null': 2027,\n 'does_not_exist': 2151,\n 'incorrect_type': 2161\n },\n 'PermittedSlugRelatedField': {\n 'required': 2007, 'invalid': 2002, 'null': 2027,\n 'does_not_exist': 2151, 'incorrect_type': 2161\n },\n },\n 'EXCEPTION_DICT': {\n 'Http404': 4005\n }\n}\n# Mathesar settings\nMATHESAR_MODE = decouple_config('MODE', default='PRODUCTION')\nMATHESAR_UI_BUILD_LOCATION = os.path.join(BASE_DIR, 'mathesar/static/mathesar/')\nMATHESAR_MANIFEST_LOCATION = os.path.join(MATHESAR_UI_BUILD_LOCATION, 'manifest.json')\nMATHESAR_CLIENT_DEV_URL = 'http://localhost:3000'\nMATHESAR_UI_SOURCE_LOCATION = os.path.join(BASE_DIR, 'mathesar_ui/')\nMATHESAR_CAPTURE_UNHANDLED_EXCEPTION = decouple_config('CAPTURE_UNHANDLED_EXCEPTION', default=False)\nMATHESAR_STATIC_NON_CODE_FILES_LOCATION = os.path.join(BASE_DIR, 'mathesar/static/non-code/')\n\n# UI source files have to be served by Django in order for static assets to be included during dev mode\n# https://vitejs.dev/guide/assets.html\n# https://vitejs.dev/guide/backend-integration.html\nSTATICFILES_DIRS = [MATHESAR_UI_SOURCE_LOCATION, MATHESAR_STATIC_NON_CODE_FILES_LOCATION] if MATHESAR_MODE == 'DEVELOPMENT' else [MATHESAR_UI_BUILD_LOCATION, MATHESAR_STATIC_NON_CODE_FILES_LOCATION]\nSTATICFILES_STORAGE = \"whitenoise.storage.CompressedManifestStaticFilesStorage\"\n\n# Accounts\nAUTH_USER_MODEL = 'mathesar.User'\nLOGIN_URL = '/auth/login/'\nLOGIN_REDIRECT_URL = '/'\nLOGOUT_REDIRECT_URL = LOGIN_URL\nDRF_ACCESS_POLICY = {\n 'reusable_conditions': ['mathesar.api.permission_conditions']\n}\n# List of Template names that contains additional script tags to be added to the base template\nBASE_TEMPLATE_ADDITIONAL_SCRIPT_TEMPLATES = []\n", "path": "config/settings/common_settings.py"}, {"content": "def remove_url_prefix_hook(result, **kwargs):\n # Remove namespace and version URL prefix from the operation Id of the generated API schema\n for path, path_info in result['paths'].items():\n for method, operation in path_info.items():\n operation_id = operation.get('operationId')\n if operation_id:\n if path.startswith('/api/db/v0/'):\n operation['operationId'] = operation_id.replace('db_v0_', '')\n elif path.startswith('/api/ui/v0/'):\n operation['operationId'] = operation_id.replace('ui_v0_', '')\n\n return result\n", "path": "config/settings/openapi.py"}], "after_files": [{"content": "\"\"\"\nBase settings to build other settings files upon.\n\nGenerated by 'django-admin startproject' using Django 3.1.7.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/3.1/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/3.1/ref/settings/\n\"\"\"\n\nimport os\nfrom pathlib import Path\n\nfrom decouple import Csv, config as decouple_config\nfrom dj_database_url import parse as db_url\n\n\n# We use a 'tuple' with pipes as delimiters as decople naively splits the global\n# variables on commas when casting to Csv()\ndef pipe_delim(pipe_string):\n # Remove opening and closing brackets\n pipe_string = pipe_string[1:-1]\n # Split on pipe delim\n return pipe_string.split(\"|\")\n\n\n# Build paths inside the project like this: BASE_DIR / 'subdir'.\nBASE_DIR = Path(__file__).resolve().parent.parent.parent\n\n# Application definition\n\nINSTALLED_APPS = [\n \"django.contrib.admin\",\n \"django.contrib.auth\",\n \"django.contrib.contenttypes\",\n \"django.contrib.sessions\",\n \"django.contrib.messages\",\n \"whitenoise.runserver_nostatic\",\n \"django.contrib.staticfiles\",\n \"rest_framework\",\n \"django_filters\",\n \"django_property_filter\",\n \"drf_spectacular\",\n \"mathesar\",\n]\n\nMIDDLEWARE = [\n \"django.middleware.security.SecurityMiddleware\",\n \"whitenoise.middleware.WhiteNoiseMiddleware\",\n \"django.contrib.sessions.middleware.SessionMiddleware\",\n \"django.middleware.common.CommonMiddleware\",\n \"django.middleware.csrf.CsrfViewMiddleware\",\n \"django.contrib.auth.middleware.AuthenticationMiddleware\",\n \"django.contrib.messages.middleware.MessageMiddleware\",\n \"django.middleware.clickjacking.XFrameOptionsMiddleware\",\n \"mathesar.middleware.CursorClosedHandlerMiddleware\",\n \"mathesar.middleware.PasswordChangeNeededMiddleware\",\n 'django_userforeignkey.middleware.UserForeignKeyMiddleware',\n 'django_request_cache.middleware.RequestCacheMiddleware',\n]\n\nROOT_URLCONF = \"config.urls\"\n\nTEMPLATES = [\n {\n \"BACKEND\": \"django.template.backends.django.DjangoTemplates\",\n \"DIRS\": [],\n \"APP_DIRS\": True,\n \"OPTIONS\": {\n \"context_processors\": [\n \"config.context_processors.frontend_settings\",\n \"django.template.context_processors.debug\",\n \"django.template.context_processors.request\",\n \"django.contrib.auth.context_processors.auth\",\n \"django.contrib.messages.context_processors.messages\",\n \"mathesar.template_context_processors.base_template_extensions.script_extension_templates\"\n ],\n },\n },\n]\n\nWSGI_APPLICATION = \"config.wsgi.application\"\n\n# Database\n# https://docs.djangoproject.com/en/3.1/ref/settings/#databases\n\n# TODO: Add to documentation that database keys should not be than 128 characters.\n\n# MATHESAR_DATABASES should be of the form '({db_name}|{db_url}), ({db_name}|{db_url})'\n# See pipe_delim above for why we use pipes as delimiters\nDATABASES = {\n db_key: db_url(url_string)\n for db_key, url_string in decouple_config('MATHESAR_DATABASES', cast=Csv(pipe_delim))\n}\nDATABASES[decouple_config('DJANGO_DATABASE_KEY', default=\"default\")] = decouple_config('DJANGO_DATABASE_URL', cast=db_url)\n\nfor db_key, db_dict in DATABASES.items():\n # Engine can be '.postgresql' or '.postgresql_psycopg2'\n if not db_dict['ENGINE'].startswith('django.db.backends.postgresql'):\n raise ValueError(\n f\"{db_key} is not a PostgreSQL database. \"\n f\"{db_dict['ENGINE']} found for {db_key}'s engine.\"\n )\n\n# pytest-django will create a new database named 'test_{DATABASES[table_db]['NAME']}'\n# and use it for our API tests if we don't specify DATABASES[table_db]['TEST']['NAME']\nTEST = decouple_config('TEST', default=False, cast=bool)\nif TEST:\n for db_key, _ in decouple_config('MATHESAR_DATABASES', cast=Csv(pipe_delim)):\n DATABASES[db_key]['TEST'] = {'NAME': DATABASES[db_key]['NAME']}\n\n\n# SECURITY WARNING: keep the secret key used in production secret!\nSECRET_KEY = decouple_config('SECRET_KEY')\n\n# SECURITY WARNING: don't run with debug turned on in production!\nDEBUG = decouple_config('DEBUG', default=False, cast=bool)\n\nALLOWED_HOSTS = decouple_config('ALLOWED_HOSTS', cast=Csv())\n\n# Password validation\n# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators\n\nAUTH_PASSWORD_VALIDATORS = [\n {\n \"NAME\": \"django.contrib.auth.password_validation.UserAttributeSimilarityValidator\",\n },\n {\n \"NAME\": \"django.contrib.auth.password_validation.MinimumLengthValidator\",\n },\n {\n \"NAME\": \"django.contrib.auth.password_validation.CommonPasswordValidator\",\n },\n {\n \"NAME\": \"django.contrib.auth.password_validation.NumericPasswordValidator\",\n },\n]\n\n# Internationalization\n# https://docs.djangoproject.com/en/3.1/topics/i18n/\n\nLANGUAGE_CODE = \"en-us\"\n\nTIME_ZONE = \"UTC\"\n\nUSE_I18N = True\n\nUSE_L10N = True\n\nUSE_TZ = True\n\n# Static files (CSS, JavaScript, Images)\n# https://docs.djangoproject.com/en/3.1/howto/static-files/\n# https://docs.djangoproject.com/en/3.1/ref/contrib/staticfiles/\n\nSTATIC_URL = \"/static/\"\n\n# When running with DEBUG=False, the webserver needs to serve files from this location\n# python manage.py collectstatic has to be run to collect all static files into this location\n# The files need to served in brotli or gzip compressed format\nSTATIC_ROOT = os.path.join(BASE_DIR, 'static/')\n\n# Media files (uploaded by the user)\n\nMEDIA_ROOT = os.path.join(BASE_DIR, '.media/')\n\nMEDIA_URL = \"/media/\"\n\n# Update Authentication classes, removed BasicAuthentication\n# Defaults: https://www.django-rest-framework.org/api-guide/settings/\nREST_FRAMEWORK = {\n 'DEFAULT_AUTHENTICATION_CLASSES': [\n 'rest_framework.authentication.TokenAuthentication',\n 'rest_framework.authentication.SessionAuthentication'\n ],\n 'DEFAULT_PERMISSION_CLASSES': [\n 'rest_framework.permissions.IsAuthenticated',\n ],\n 'DEFAULT_FILTER_BACKENDS': (\n 'django_filters.rest_framework.DjangoFilterBackend',\n 'rest_framework.filters.OrderingFilter',\n ),\n 'TEST_REQUEST_DEFAULT_FORMAT': 'json',\n 'EXCEPTION_HANDLER':\n 'mathesar.exception_handlers.mathesar_exception_handler',\n 'DEFAULT_SCHEMA_CLASS': 'drf_spectacular.openapi.AutoSchema'\n}\nSPECTACULAR_SETTINGS = {\n 'TITLE': 'Mathesar API',\n 'DESCRIPTION': '',\n 'VERSION': '1.0.0',\n 'SERVE_INCLUDE_SCHEMA': False,\n 'PREPROCESSING_HOOKS': ['config.settings.openapi.custom_preprocessing_hook'],\n 'POSTPROCESSING_HOOKS': [\n 'config.settings.openapi.remove_url_prefix_hook',\n ],\n # OTHER SETTINGS\n}\nFRIENDLY_ERRORS = {\n 'FIELD_ERRORS': {\n # By default drf-friendly-errors does contain error codes for ListSerializer type\n 'ListSerializer': {\n 'required': 2007,\n 'null': 2027,\n 'invalid_choice': 2083,\n 'not_a_list': 2123,\n 'empty': 2093\n },\n 'PermittedPkRelatedField': {\n 'required': 2007,\n 'null': 2027,\n 'does_not_exist': 2151,\n 'incorrect_type': 2161\n },\n 'PermittedSlugRelatedField': {\n 'required': 2007, 'invalid': 2002, 'null': 2027,\n 'does_not_exist': 2151, 'incorrect_type': 2161\n },\n },\n 'EXCEPTION_DICT': {\n 'Http404': 4005\n }\n}\n# Mathesar settings\nMATHESAR_MODE = decouple_config('MODE', default='PRODUCTION')\nMATHESAR_UI_BUILD_LOCATION = os.path.join(BASE_DIR, 'mathesar/static/mathesar/')\nMATHESAR_MANIFEST_LOCATION = os.path.join(MATHESAR_UI_BUILD_LOCATION, 'manifest.json')\nMATHESAR_CLIENT_DEV_URL = 'http://localhost:3000'\nMATHESAR_UI_SOURCE_LOCATION = os.path.join(BASE_DIR, 'mathesar_ui/')\nMATHESAR_CAPTURE_UNHANDLED_EXCEPTION = decouple_config('CAPTURE_UNHANDLED_EXCEPTION', default=False)\nMATHESAR_STATIC_NON_CODE_FILES_LOCATION = os.path.join(BASE_DIR, 'mathesar/static/non-code/')\n\n# UI source files have to be served by Django in order for static assets to be included during dev mode\n# https://vitejs.dev/guide/assets.html\n# https://vitejs.dev/guide/backend-integration.html\nSTATICFILES_DIRS = [MATHESAR_UI_SOURCE_LOCATION, MATHESAR_STATIC_NON_CODE_FILES_LOCATION] if MATHESAR_MODE == 'DEVELOPMENT' else [MATHESAR_UI_BUILD_LOCATION, MATHESAR_STATIC_NON_CODE_FILES_LOCATION]\nSTATICFILES_STORAGE = \"whitenoise.storage.CompressedManifestStaticFilesStorage\"\n\n# Accounts\nAUTH_USER_MODEL = 'mathesar.User'\nLOGIN_URL = '/auth/login/'\nLOGIN_REDIRECT_URL = '/'\nLOGOUT_REDIRECT_URL = LOGIN_URL\nDRF_ACCESS_POLICY = {\n 'reusable_conditions': ['mathesar.api.permission_conditions']\n}\n# List of Template names that contains additional script tags to be added to the base template\nBASE_TEMPLATE_ADDITIONAL_SCRIPT_TEMPLATES = []\n", "path": "config/settings/common_settings.py"}, {"content": "def custom_preprocessing_hook(endpoints):\n filtered = []\n for (path, path_regex, method, callback) in endpoints:\n # Remove all but DRF API endpoints\n if path.startswith(\"/api/db/v0/data_files/\"):\n filtered.append((path, path_regex, method, callback))\n return filtered\n\n\ndef remove_url_prefix_hook(result, **kwargs):\n # Remove namespace and version URL prefix from the operation Id of the generated API schema\n for path, path_info in result['paths'].items():\n for method, operation in path_info.items():\n operation_id = operation.get('operationId')\n if operation_id:\n if path.startswith('/api/db/v0/'):\n operation['operationId'] = operation_id.replace('db_v0_', '')\n elif path.startswith('/api/ui/v0/'):\n operation['operationId'] = operation_id.replace('ui_v0_', '')\n\n return result\n", "path": "config/settings/openapi.py"}]}
| 3,359 | 270 |
gh_patches_debug_10804
|
rasdani/github-patches
|
git_diff
|
MycroftAI__mycroft-core-2985
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add note about legal issues when using gTTS in public or commercial projects
originally opened in the OVOS plugin repo
> I've just posted an issue about potential legal issues with this API in the gTTS repo: [pndurette/gTTS#309](https://github.com/pndurette/gTTS/issues/309)
>
> From the plugin description people will only understand that the service can break anytime but there are more risks. Imagine someone will build a system that goes into "production" with this plugin active (you know how people are ^^) and commercializes it. In the best case Google will just ask the owner to shut it down, in the worst case they will get sued :-/.
>
> Personally I don't think an undocumented and unstable API moving in a legal gray area should be included in a system called OpenVoiceOS. For the same reasons it is not part of SEPIA OpenAssistant Framework.
>
> That said I know first hand how hard it is to find an alternative that has comparable quality, is free (=self-hosted) and fast enough to run on Raspberry Pi etc., but I think services like gTTS are just a distraction on the path to become really "open"!
links:
https://github.com/OpenVoiceOS/ovos-tts-plugin-google-tx/issues/1
https://github.com/pndurette/gTTS/issues/309
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mycroft/tts/google_tts.py`
Content:
```
1 # Copyright 2017 Mycroft AI Inc.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 #
15 from gtts import gTTS
16 from gtts.lang import tts_langs
17
18 from .tts import TTS, TTSValidator
19
20 from mycroft.util.log import LOG
21
22 # Live list of languages
23 # Cached list of supported languages (2021-02-09)
24 _default_langs = {'af': 'Afrikaans', 'sq': 'Albanian', 'ar': 'Arabic',
25 'hy': 'Armenian', 'bn': 'Bengali', 'bs': 'Bosnian',
26 'ca': 'Catalan', 'hr': 'Croatian', 'cs': 'Czech',
27 'da': 'Danish', 'nl': 'Dutch', 'en': 'English',
28 'eo': 'Esperanto', 'et': 'Estonian', 'tl': 'Filipino',
29 'fi': 'Finnish', 'fr': 'French', 'de': 'German',
30 'el': 'Greek', 'gu': 'Gujarati', 'hi': 'Hindi',
31 'hu': 'Hungarian', 'is': 'Icelandic', 'id': 'Indonesian',
32 'it': 'Italian', 'ja': 'Japanese', 'jw': 'Javanese',
33 'kn': 'Kannada', 'km': 'Khmer', 'ko': 'Korean',
34 'la': 'Latin', 'lv': 'Latvian', 'mk': 'Macedonian',
35 'ml': 'Malayalam', 'mr': 'Marathi',
36 'my': 'Myanmar (Burmese)', 'ne': 'Nepali',
37 'no': 'Norwegian', 'pl': 'Polish', 'pt': 'Portuguese',
38 'ro': 'Romanian', 'ru': 'Russian', 'sr': 'Serbian',
39 'si': 'Sinhala', 'sk': 'Slovak', 'es': 'Spanish',
40 'su': 'Sundanese', 'sw': 'Swahili', 'sv': 'Swedish',
41 'ta': 'Tamil', 'te': 'Telugu', 'th': 'Thai', 'tr': 'Turkish',
42 'uk': 'Ukrainian', 'ur': 'Urdu', 'vi': 'Vietnamese',
43 'cy': 'Welsh', 'zh': 'Chinese (Mandarin/China)'
44 }
45
46
47 _supported_langs = None
48
49
50 def get_supported_langs():
51 """Get dict of supported languages.
52
53 Tries to fetch remote list, if that fails a local cache will be used.
54
55 Returns:
56 (dict): Lang code to lang name map.
57 """
58 global _supported_langs
59 if not _supported_langs:
60 try:
61 _supported_langs = tts_langs()
62 except Exception:
63 LOG.warning('Couldn\'t fetch upto date language codes')
64 return _supported_langs or _default_langs
65
66
67 class GoogleTTS(TTS):
68 """Interface to google TTS."""
69 def __init__(self, lang, config):
70 self._google_lang = None
71 super(GoogleTTS, self).__init__(lang, config, GoogleTTSValidator(
72 self), 'mp3')
73
74 @property
75 def google_lang(self):
76 """Property containing a converted language code suitable for gTTS."""
77 supported_langs = get_supported_langs()
78 if not self._google_lang:
79 if self.lang.lower() in supported_langs:
80 self._google_lang = self.lang.lower()
81 elif self.lang[:2].lower() in supported_langs:
82 self._google_lang = self.lang[:2]
83 return self._google_lang or self.lang.lower()
84
85 def get_tts(self, sentence, wav_file):
86 """Fetch tts audio using gTTS.
87
88 Args:
89 sentence (str): Sentence to generate audio for
90 wav_file (str): output file path
91 Returns:
92 Tuple ((str) written file, None)
93 """
94 tts = gTTS(text=sentence, lang=self.google_lang)
95 tts.save(wav_file)
96 return (wav_file, None) # No phonemes
97
98
99 class GoogleTTSValidator(TTSValidator):
100 def __init__(self, tts):
101 super(GoogleTTSValidator, self).__init__(tts)
102
103 def validate_lang(self):
104 lang = self.tts.google_lang
105 if lang.lower() not in get_supported_langs():
106 raise ValueError("Language not supported by gTTS: {}".format(lang))
107
108 def validate_connection(self):
109 try:
110 gTTS(text='Hi').save(self.tts.filename)
111 except Exception:
112 raise Exception(
113 'GoogleTTS server could not be verified. Please check your '
114 'internet connection.')
115
116 def get_tts_class(self):
117 return GoogleTTS
118
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/mycroft/tts/google_tts.py b/mycroft/tts/google_tts.py
--- a/mycroft/tts/google_tts.py
+++ b/mycroft/tts/google_tts.py
@@ -70,6 +70,13 @@
self._google_lang = None
super(GoogleTTS, self).__init__(lang, config, GoogleTTSValidator(
self), 'mp3')
+ LOG.warning(
+ "The Google TTS module uses the gTTS Python package which itself "
+ "interfaces with the Google Translate text-to-speech API. This is "
+ "not intended for commercial or production usage. The service "
+ "may break at any time, and you are subject to their Terms of "
+ "Service that can be found at https://policies.google.com/terms"
+ )
@property
def google_lang(self):
|
{"golden_diff": "diff --git a/mycroft/tts/google_tts.py b/mycroft/tts/google_tts.py\n--- a/mycroft/tts/google_tts.py\n+++ b/mycroft/tts/google_tts.py\n@@ -70,6 +70,13 @@\n self._google_lang = None\n super(GoogleTTS, self).__init__(lang, config, GoogleTTSValidator(\n self), 'mp3')\n+ LOG.warning(\n+ \"The Google TTS module uses the gTTS Python package which itself \"\n+ \"interfaces with the Google Translate text-to-speech API. This is \"\n+ \"not intended for commercial or production usage. The service \"\n+ \"may break at any time, and you are subject to their Terms of \"\n+ \"Service that can be found at https://policies.google.com/terms\"\n+ )\n \n @property\n def google_lang(self):\n", "issue": "Add note about legal issues when using gTTS in public or commercial projects\noriginally opened in the OVOS plugin repo\r\n\r\n> I've just posted an issue about potential legal issues with this API in the gTTS repo: [pndurette/gTTS#309](https://github.com/pndurette/gTTS/issues/309)\r\n> \r\n> From the plugin description people will only understand that the service can break anytime but there are more risks. Imagine someone will build a system that goes into \"production\" with this plugin active (you know how people are ^^) and commercializes it. In the best case Google will just ask the owner to shut it down, in the worst case they will get sued :-/.\r\n> \r\n> Personally I don't think an undocumented and unstable API moving in a legal gray area should be included in a system called OpenVoiceOS. For the same reasons it is not part of SEPIA OpenAssistant Framework.\r\n> \r\n> That said I know first hand how hard it is to find an alternative that has comparable quality, is free (=self-hosted) and fast enough to run on Raspberry Pi etc., but I think services like gTTS are just a distraction on the path to become really \"open\"!\r\n\r\n\r\n\r\nlinks:\r\n\r\nhttps://github.com/OpenVoiceOS/ovos-tts-plugin-google-tx/issues/1\r\nhttps://github.com/pndurette/gTTS/issues/309\n", "before_files": [{"content": "# Copyright 2017 Mycroft AI Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nfrom gtts import gTTS\nfrom gtts.lang import tts_langs\n\nfrom .tts import TTS, TTSValidator\n\nfrom mycroft.util.log import LOG\n\n# Live list of languages\n# Cached list of supported languages (2021-02-09)\n_default_langs = {'af': 'Afrikaans', 'sq': 'Albanian', 'ar': 'Arabic',\n 'hy': 'Armenian', 'bn': 'Bengali', 'bs': 'Bosnian',\n 'ca': 'Catalan', 'hr': 'Croatian', 'cs': 'Czech',\n 'da': 'Danish', 'nl': 'Dutch', 'en': 'English',\n 'eo': 'Esperanto', 'et': 'Estonian', 'tl': 'Filipino',\n 'fi': 'Finnish', 'fr': 'French', 'de': 'German',\n 'el': 'Greek', 'gu': 'Gujarati', 'hi': 'Hindi',\n 'hu': 'Hungarian', 'is': 'Icelandic', 'id': 'Indonesian',\n 'it': 'Italian', 'ja': 'Japanese', 'jw': 'Javanese',\n 'kn': 'Kannada', 'km': 'Khmer', 'ko': 'Korean',\n 'la': 'Latin', 'lv': 'Latvian', 'mk': 'Macedonian',\n 'ml': 'Malayalam', 'mr': 'Marathi',\n 'my': 'Myanmar (Burmese)', 'ne': 'Nepali',\n 'no': 'Norwegian', 'pl': 'Polish', 'pt': 'Portuguese',\n 'ro': 'Romanian', 'ru': 'Russian', 'sr': 'Serbian',\n 'si': 'Sinhala', 'sk': 'Slovak', 'es': 'Spanish',\n 'su': 'Sundanese', 'sw': 'Swahili', 'sv': 'Swedish',\n 'ta': 'Tamil', 'te': 'Telugu', 'th': 'Thai', 'tr': 'Turkish',\n 'uk': 'Ukrainian', 'ur': 'Urdu', 'vi': 'Vietnamese',\n 'cy': 'Welsh', 'zh': 'Chinese (Mandarin/China)'\n }\n\n\n_supported_langs = None\n\n\ndef get_supported_langs():\n \"\"\"Get dict of supported languages.\n\n Tries to fetch remote list, if that fails a local cache will be used.\n\n Returns:\n (dict): Lang code to lang name map.\n \"\"\"\n global _supported_langs\n if not _supported_langs:\n try:\n _supported_langs = tts_langs()\n except Exception:\n LOG.warning('Couldn\\'t fetch upto date language codes')\n return _supported_langs or _default_langs\n\n\nclass GoogleTTS(TTS):\n \"\"\"Interface to google TTS.\"\"\"\n def __init__(self, lang, config):\n self._google_lang = None\n super(GoogleTTS, self).__init__(lang, config, GoogleTTSValidator(\n self), 'mp3')\n\n @property\n def google_lang(self):\n \"\"\"Property containing a converted language code suitable for gTTS.\"\"\"\n supported_langs = get_supported_langs()\n if not self._google_lang:\n if self.lang.lower() in supported_langs:\n self._google_lang = self.lang.lower()\n elif self.lang[:2].lower() in supported_langs:\n self._google_lang = self.lang[:2]\n return self._google_lang or self.lang.lower()\n\n def get_tts(self, sentence, wav_file):\n \"\"\"Fetch tts audio using gTTS.\n\n Args:\n sentence (str): Sentence to generate audio for\n wav_file (str): output file path\n Returns:\n Tuple ((str) written file, None)\n \"\"\"\n tts = gTTS(text=sentence, lang=self.google_lang)\n tts.save(wav_file)\n return (wav_file, None) # No phonemes\n\n\nclass GoogleTTSValidator(TTSValidator):\n def __init__(self, tts):\n super(GoogleTTSValidator, self).__init__(tts)\n\n def validate_lang(self):\n lang = self.tts.google_lang\n if lang.lower() not in get_supported_langs():\n raise ValueError(\"Language not supported by gTTS: {}\".format(lang))\n\n def validate_connection(self):\n try:\n gTTS(text='Hi').save(self.tts.filename)\n except Exception:\n raise Exception(\n 'GoogleTTS server could not be verified. Please check your '\n 'internet connection.')\n\n def get_tts_class(self):\n return GoogleTTS\n", "path": "mycroft/tts/google_tts.py"}], "after_files": [{"content": "# Copyright 2017 Mycroft AI Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nfrom gtts import gTTS\nfrom gtts.lang import tts_langs\n\nfrom .tts import TTS, TTSValidator\n\nfrom mycroft.util.log import LOG\n\n# Live list of languages\n# Cached list of supported languages (2021-02-09)\n_default_langs = {'af': 'Afrikaans', 'sq': 'Albanian', 'ar': 'Arabic',\n 'hy': 'Armenian', 'bn': 'Bengali', 'bs': 'Bosnian',\n 'ca': 'Catalan', 'hr': 'Croatian', 'cs': 'Czech',\n 'da': 'Danish', 'nl': 'Dutch', 'en': 'English',\n 'eo': 'Esperanto', 'et': 'Estonian', 'tl': 'Filipino',\n 'fi': 'Finnish', 'fr': 'French', 'de': 'German',\n 'el': 'Greek', 'gu': 'Gujarati', 'hi': 'Hindi',\n 'hu': 'Hungarian', 'is': 'Icelandic', 'id': 'Indonesian',\n 'it': 'Italian', 'ja': 'Japanese', 'jw': 'Javanese',\n 'kn': 'Kannada', 'km': 'Khmer', 'ko': 'Korean',\n 'la': 'Latin', 'lv': 'Latvian', 'mk': 'Macedonian',\n 'ml': 'Malayalam', 'mr': 'Marathi',\n 'my': 'Myanmar (Burmese)', 'ne': 'Nepali',\n 'no': 'Norwegian', 'pl': 'Polish', 'pt': 'Portuguese',\n 'ro': 'Romanian', 'ru': 'Russian', 'sr': 'Serbian',\n 'si': 'Sinhala', 'sk': 'Slovak', 'es': 'Spanish',\n 'su': 'Sundanese', 'sw': 'Swahili', 'sv': 'Swedish',\n 'ta': 'Tamil', 'te': 'Telugu', 'th': 'Thai', 'tr': 'Turkish',\n 'uk': 'Ukrainian', 'ur': 'Urdu', 'vi': 'Vietnamese',\n 'cy': 'Welsh', 'zh': 'Chinese (Mandarin/China)'\n }\n\n\n_supported_langs = None\n\n\ndef get_supported_langs():\n \"\"\"Get dict of supported languages.\n\n Tries to fetch remote list, if that fails a local cache will be used.\n\n Returns:\n (dict): Lang code to lang name map.\n \"\"\"\n global _supported_langs\n if not _supported_langs:\n try:\n _supported_langs = tts_langs()\n except Exception:\n LOG.warning('Couldn\\'t fetch upto date language codes')\n return _supported_langs or _default_langs\n\n\nclass GoogleTTS(TTS):\n \"\"\"Interface to google TTS.\"\"\"\n def __init__(self, lang, config):\n self._google_lang = None\n super(GoogleTTS, self).__init__(lang, config, GoogleTTSValidator(\n self), 'mp3')\n LOG.warning(\n \"The Google TTS module uses the gTTS Python package which itself \"\n \"interfaces with the Google Translate text-to-speech API. This is \"\n \"not intended for commercial or production usage. The service \"\n \"may break at any time, and you are subject to their Terms of \"\n \"Service that can be found at https://policies.google.com/terms\"\n )\n\n @property\n def google_lang(self):\n \"\"\"Property containing a converted language code suitable for gTTS.\"\"\"\n supported_langs = get_supported_langs()\n if not self._google_lang:\n if self.lang.lower() in supported_langs:\n self._google_lang = self.lang.lower()\n elif self.lang[:2].lower() in supported_langs:\n self._google_lang = self.lang[:2]\n return self._google_lang or self.lang.lower()\n\n def get_tts(self, sentence, wav_file):\n \"\"\"Fetch tts audio using gTTS.\n\n Args:\n sentence (str): Sentence to generate audio for\n wav_file (str): output file path\n Returns:\n Tuple ((str) written file, None)\n \"\"\"\n tts = gTTS(text=sentence, lang=self.google_lang)\n tts.save(wav_file)\n return (wav_file, None) # No phonemes\n\n\nclass GoogleTTSValidator(TTSValidator):\n def __init__(self, tts):\n super(GoogleTTSValidator, self).__init__(tts)\n\n def validate_lang(self):\n lang = self.tts.google_lang\n if lang.lower() not in get_supported_langs():\n raise ValueError(\"Language not supported by gTTS: {}\".format(lang))\n\n def validate_connection(self):\n try:\n gTTS(text='Hi').save(self.tts.filename)\n except Exception:\n raise Exception(\n 'GoogleTTS server could not be verified. Please check your '\n 'internet connection.')\n\n def get_tts_class(self):\n return GoogleTTS\n", "path": "mycroft/tts/google_tts.py"}]}
| 1,991 | 195 |
gh_patches_debug_27808
|
rasdani/github-patches
|
git_diff
|
sopel-irc__sopel-2069
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Support 2-pronoun sets in the pronoun module
### The problem
Setting pronouns in the pronouns module can be unintuitive because the bot accepts either a 1-pronoun "object" command (`.setpronouns she`) or a long 5-pronouns "subject/object/possessive-determiner/possessive-pronoun/reflexive" commnad (`.setpronouns she/her/her/hers/herself`). However, in my experience most people are used to the 2-pronoun convention of "subject/object" `.setpronouns she/her`).
### The solution
Supporting 2-word version of pronouns.
### Alternatives
An alternative would be to improve the message for unrecognised pronoun sets by adding explicit instructions to use either 1- or 5-pronoun sets.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sopel/modules/pronouns.py`
Content:
```
1 # coding=utf-8
2 """
3 pronouns.py - Sopel Pronouns Plugin
4 Copyright © 2016, Elsie Powell
5 Licensed under the Eiffel Forum License 2.
6
7 https://sopel.chat
8 """
9 from __future__ import absolute_import, division, print_function, unicode_literals
10
11 from sopel import plugin
12
13
14 # Copied from pronoun.is, leaving a *lot* out. If
15 # https://github.com/witch-house/pronoun.is/pull/96 gets merged, using that
16 # would be a lot easier.
17 KNOWN_SETS = {
18 'ze': 'ze/hir/hir/hirs/hirself',
19 'ze/hir': 'ze/hir/hir/hirs/hirself',
20 'ze/zir': 'ze/zir/zir/zirs/zirself',
21 'they': 'they/them/their/theirs/themselves',
22 'they/.../themselves': 'they/them/their/theirs/themselves',
23 'they/.../themself': 'they/them/their/theirs/themself',
24 'she': 'she/her/her/hers/herself',
25 'he': 'he/him/his/his/himself',
26 'xey': 'xey/xem/xyr/xyrs/xemself',
27 'sie': 'sie/hir/hir/hirs/hirself',
28 'it': 'it/it/its/its/itself',
29 'ey': 'ey/em/eir/eirs/eirslef',
30 }
31
32
33 @plugin.command('pronouns')
34 @plugin.example('.pronouns Embolalia')
35 def pronouns(bot, trigger):
36 """Show the pronouns for a given user, defaulting to the current user if left blank."""
37 if not trigger.group(3):
38 pronouns = bot.db.get_nick_value(trigger.nick, 'pronouns')
39 if pronouns:
40 say_pronouns(bot, trigger.nick, pronouns)
41 else:
42 bot.reply("I don't know your pronouns! You can set them with "
43 "{}setpronouns".format(bot.config.core.help_prefix))
44 else:
45 pronouns = bot.db.get_nick_value(trigger.group(3), 'pronouns')
46 if pronouns:
47 say_pronouns(bot, trigger.group(3), pronouns)
48 elif trigger.group(3) == bot.nick:
49 # You can stuff an entry into the database manually for your bot's
50 # gender, but like… it's a bot.
51 bot.say(
52 "I am a bot. Beep boop. My pronouns are it/it/its/its/itself. "
53 "See https://pronoun.is/it for examples."
54 )
55 else:
56 bot.reply("I don't know {}'s pronouns. They can set them with "
57 "{}setpronouns".format(trigger.group(3),
58 bot.config.core.help_prefix))
59
60
61 def say_pronouns(bot, nick, pronouns):
62 for short, set_ in KNOWN_SETS.items():
63 if pronouns == set_:
64 break
65 short = pronouns
66
67 bot.say("{}'s pronouns are {}. See https://pronoun.is/{} for "
68 "examples.".format(nick, pronouns, short))
69
70
71 @plugin.command('setpronouns')
72 @plugin.example('.setpronouns they/them/their/theirs/themselves')
73 def set_pronouns(bot, trigger):
74 pronouns = trigger.group(2)
75 """Set your pronouns."""
76 if not pronouns:
77 bot.reply('What pronouns do you use?')
78 return
79
80 disambig = ''
81 if pronouns == 'they':
82 disambig = ' You can also use they/.../themself, if you prefer.'
83 pronouns = KNOWN_SETS.get(pronouns)
84 elif pronouns == 'ze':
85 disambig = ' I have ze/hir. If you meant ze/zir, you can use that instead.'
86 pronouns = KNOWN_SETS.get(pronouns)
87 elif len(pronouns.split('/')) != 5:
88 pronouns = KNOWN_SETS.get(pronouns)
89 if not pronouns:
90 bot.reply(
91 "I'm sorry, I don't know those pronouns. "
92 "You can give me a set I don't know by formatting it "
93 "subject/object/possessive-determiner/possessive-pronoun/"
94 "reflexive, as in: they/them/their/theirs/themselves"
95 )
96 return
97 bot.db.set_nick_value(trigger.nick, 'pronouns', pronouns)
98 bot.reply("Thanks for telling me!" + disambig)
99
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/sopel/modules/pronouns.py b/sopel/modules/pronouns.py
--- a/sopel/modules/pronouns.py
+++ b/sopel/modules/pronouns.py
@@ -19,14 +19,21 @@
'ze/hir': 'ze/hir/hir/hirs/hirself',
'ze/zir': 'ze/zir/zir/zirs/zirself',
'they': 'they/them/their/theirs/themselves',
+ 'they/them': 'they/them/their/theirs/themselves',
'they/.../themselves': 'they/them/their/theirs/themselves',
'they/.../themself': 'they/them/their/theirs/themself',
'she': 'she/her/her/hers/herself',
+ 'she/her': 'she/her/her/hers/herself',
'he': 'he/him/his/his/himself',
+ 'he/him': 'he/him/his/his/himself',
'xey': 'xey/xem/xyr/xyrs/xemself',
+ 'xey/xem': 'xey/xem/xyr/xyrs/xemself',
'sie': 'sie/hir/hir/hirs/hirself',
+ 'sie/hir': 'sie/hir/hir/hirs/hirself',
'it': 'it/it/its/its/itself',
- 'ey': 'ey/em/eir/eirs/eirslef',
+ 'it/it': 'it/it/its/its/itself',
+ 'ey': 'ey/em/eir/eirs/eirself',
+ 'ey/em': 'ey/em/eir/eirs/eirself',
}
@@ -71,8 +78,8 @@
@plugin.command('setpronouns')
@plugin.example('.setpronouns they/them/their/theirs/themselves')
def set_pronouns(bot, trigger):
- pronouns = trigger.group(2)
"""Set your pronouns."""
+ pronouns = trigger.group(2)
if not pronouns:
bot.reply('What pronouns do you use?')
return
|
{"golden_diff": "diff --git a/sopel/modules/pronouns.py b/sopel/modules/pronouns.py\n--- a/sopel/modules/pronouns.py\n+++ b/sopel/modules/pronouns.py\n@@ -19,14 +19,21 @@\n 'ze/hir': 'ze/hir/hir/hirs/hirself',\n 'ze/zir': 'ze/zir/zir/zirs/zirself',\n 'they': 'they/them/their/theirs/themselves',\n+ 'they/them': 'they/them/their/theirs/themselves',\n 'they/.../themselves': 'they/them/their/theirs/themselves',\n 'they/.../themself': 'they/them/their/theirs/themself',\n 'she': 'she/her/her/hers/herself',\n+ 'she/her': 'she/her/her/hers/herself',\n 'he': 'he/him/his/his/himself',\n+ 'he/him': 'he/him/his/his/himself',\n 'xey': 'xey/xem/xyr/xyrs/xemself',\n+ 'xey/xem': 'xey/xem/xyr/xyrs/xemself',\n 'sie': 'sie/hir/hir/hirs/hirself',\n+ 'sie/hir': 'sie/hir/hir/hirs/hirself',\n 'it': 'it/it/its/its/itself',\n- 'ey': 'ey/em/eir/eirs/eirslef',\n+ 'it/it': 'it/it/its/its/itself',\n+ 'ey': 'ey/em/eir/eirs/eirself',\n+ 'ey/em': 'ey/em/eir/eirs/eirself',\n }\n \n \n@@ -71,8 +78,8 @@\n @plugin.command('setpronouns')\n @plugin.example('.setpronouns they/them/their/theirs/themselves')\n def set_pronouns(bot, trigger):\n- pronouns = trigger.group(2)\n \"\"\"Set your pronouns.\"\"\"\n+ pronouns = trigger.group(2)\n if not pronouns:\n bot.reply('What pronouns do you use?')\n return\n", "issue": "Support 2-pronoun sets in the pronoun module \n### The problem\r\n\r\nSetting pronouns in the pronouns module can be unintuitive because the bot accepts either a 1-pronoun \"object\" command (`.setpronouns she`) or a long 5-pronouns \"subject/object/possessive-determiner/possessive-pronoun/reflexive\" commnad (`.setpronouns she/her/her/hers/herself`). However, in my experience most people are used to the 2-pronoun convention of \"subject/object\" `.setpronouns she/her`). \r\n\r\n### The solution\r\n\r\nSupporting 2-word version of pronouns. \r\n\r\n### Alternatives\r\n\r\nAn alternative would be to improve the message for unrecognised pronoun sets by adding explicit instructions to use either 1- or 5-pronoun sets. \r\n\r\n\r\n\n", "before_files": [{"content": "# coding=utf-8\n\"\"\"\npronouns.py - Sopel Pronouns Plugin\nCopyright \u00a9 2016, Elsie Powell\nLicensed under the Eiffel Forum License 2.\n\nhttps://sopel.chat\n\"\"\"\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nfrom sopel import plugin\n\n\n# Copied from pronoun.is, leaving a *lot* out. If\n# https://github.com/witch-house/pronoun.is/pull/96 gets merged, using that\n# would be a lot easier.\nKNOWN_SETS = {\n 'ze': 'ze/hir/hir/hirs/hirself',\n 'ze/hir': 'ze/hir/hir/hirs/hirself',\n 'ze/zir': 'ze/zir/zir/zirs/zirself',\n 'they': 'they/them/their/theirs/themselves',\n 'they/.../themselves': 'they/them/their/theirs/themselves',\n 'they/.../themself': 'they/them/their/theirs/themself',\n 'she': 'she/her/her/hers/herself',\n 'he': 'he/him/his/his/himself',\n 'xey': 'xey/xem/xyr/xyrs/xemself',\n 'sie': 'sie/hir/hir/hirs/hirself',\n 'it': 'it/it/its/its/itself',\n 'ey': 'ey/em/eir/eirs/eirslef',\n}\n\n\[email protected]('pronouns')\[email protected]('.pronouns Embolalia')\ndef pronouns(bot, trigger):\n \"\"\"Show the pronouns for a given user, defaulting to the current user if left blank.\"\"\"\n if not trigger.group(3):\n pronouns = bot.db.get_nick_value(trigger.nick, 'pronouns')\n if pronouns:\n say_pronouns(bot, trigger.nick, pronouns)\n else:\n bot.reply(\"I don't know your pronouns! You can set them with \"\n \"{}setpronouns\".format(bot.config.core.help_prefix))\n else:\n pronouns = bot.db.get_nick_value(trigger.group(3), 'pronouns')\n if pronouns:\n say_pronouns(bot, trigger.group(3), pronouns)\n elif trigger.group(3) == bot.nick:\n # You can stuff an entry into the database manually for your bot's\n # gender, but like\u2026 it's a bot.\n bot.say(\n \"I am a bot. Beep boop. My pronouns are it/it/its/its/itself. \"\n \"See https://pronoun.is/it for examples.\"\n )\n else:\n bot.reply(\"I don't know {}'s pronouns. They can set them with \"\n \"{}setpronouns\".format(trigger.group(3),\n bot.config.core.help_prefix))\n\n\ndef say_pronouns(bot, nick, pronouns):\n for short, set_ in KNOWN_SETS.items():\n if pronouns == set_:\n break\n short = pronouns\n\n bot.say(\"{}'s pronouns are {}. See https://pronoun.is/{} for \"\n \"examples.\".format(nick, pronouns, short))\n\n\[email protected]('setpronouns')\[email protected]('.setpronouns they/them/their/theirs/themselves')\ndef set_pronouns(bot, trigger):\n pronouns = trigger.group(2)\n \"\"\"Set your pronouns.\"\"\"\n if not pronouns:\n bot.reply('What pronouns do you use?')\n return\n\n disambig = ''\n if pronouns == 'they':\n disambig = ' You can also use they/.../themself, if you prefer.'\n pronouns = KNOWN_SETS.get(pronouns)\n elif pronouns == 'ze':\n disambig = ' I have ze/hir. If you meant ze/zir, you can use that instead.'\n pronouns = KNOWN_SETS.get(pronouns)\n elif len(pronouns.split('/')) != 5:\n pronouns = KNOWN_SETS.get(pronouns)\n if not pronouns:\n bot.reply(\n \"I'm sorry, I don't know those pronouns. \"\n \"You can give me a set I don't know by formatting it \"\n \"subject/object/possessive-determiner/possessive-pronoun/\"\n \"reflexive, as in: they/them/their/theirs/themselves\"\n )\n return\n bot.db.set_nick_value(trigger.nick, 'pronouns', pronouns)\n bot.reply(\"Thanks for telling me!\" + disambig)\n", "path": "sopel/modules/pronouns.py"}], "after_files": [{"content": "# coding=utf-8\n\"\"\"\npronouns.py - Sopel Pronouns Plugin\nCopyright \u00a9 2016, Elsie Powell\nLicensed under the Eiffel Forum License 2.\n\nhttps://sopel.chat\n\"\"\"\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nfrom sopel import plugin\n\n\n# Copied from pronoun.is, leaving a *lot* out. If\n# https://github.com/witch-house/pronoun.is/pull/96 gets merged, using that\n# would be a lot easier.\nKNOWN_SETS = {\n 'ze': 'ze/hir/hir/hirs/hirself',\n 'ze/hir': 'ze/hir/hir/hirs/hirself',\n 'ze/zir': 'ze/zir/zir/zirs/zirself',\n 'they': 'they/them/their/theirs/themselves',\n 'they/them': 'they/them/their/theirs/themselves',\n 'they/.../themselves': 'they/them/their/theirs/themselves',\n 'they/.../themself': 'they/them/their/theirs/themself',\n 'she': 'she/her/her/hers/herself',\n 'she/her': 'she/her/her/hers/herself',\n 'he': 'he/him/his/his/himself',\n 'he/him': 'he/him/his/his/himself',\n 'xey': 'xey/xem/xyr/xyrs/xemself',\n 'xey/xem': 'xey/xem/xyr/xyrs/xemself',\n 'sie': 'sie/hir/hir/hirs/hirself',\n 'sie/hir': 'sie/hir/hir/hirs/hirself',\n 'it': 'it/it/its/its/itself',\n 'it/it': 'it/it/its/its/itself',\n 'ey': 'ey/em/eir/eirs/eirself',\n 'ey/em': 'ey/em/eir/eirs/eirself',\n}\n\n\[email protected]('pronouns')\[email protected]('.pronouns Embolalia')\ndef pronouns(bot, trigger):\n \"\"\"Show the pronouns for a given user, defaulting to the current user if left blank.\"\"\"\n if not trigger.group(3):\n pronouns = bot.db.get_nick_value(trigger.nick, 'pronouns')\n if pronouns:\n say_pronouns(bot, trigger.nick, pronouns)\n else:\n bot.reply(\"I don't know your pronouns! You can set them with \"\n \"{}setpronouns\".format(bot.config.core.help_prefix))\n else:\n pronouns = bot.db.get_nick_value(trigger.group(3), 'pronouns')\n if pronouns:\n say_pronouns(bot, trigger.group(3), pronouns)\n elif trigger.group(3) == bot.nick:\n # You can stuff an entry into the database manually for your bot's\n # gender, but like\u2026 it's a bot.\n bot.say(\n \"I am a bot. Beep boop. My pronouns are it/it/its/its/itself. \"\n \"See https://pronoun.is/it for examples.\"\n )\n else:\n bot.reply(\"I don't know {}'s pronouns. They can set them with \"\n \"{}setpronouns\".format(trigger.group(3),\n bot.config.core.help_prefix))\n\n\ndef say_pronouns(bot, nick, pronouns):\n for short, set_ in KNOWN_SETS.items():\n if pronouns == set_:\n break\n short = pronouns\n\n bot.say(\"{}'s pronouns are {}. See https://pronoun.is/{} for \"\n \"examples.\".format(nick, pronouns, short))\n\n\[email protected]('setpronouns')\[email protected]('.setpronouns they/them/their/theirs/themselves')\ndef set_pronouns(bot, trigger):\n \"\"\"Set your pronouns.\"\"\"\n pronouns = trigger.group(2)\n if not pronouns:\n bot.reply('What pronouns do you use?')\n return\n\n disambig = ''\n if pronouns == 'they':\n disambig = ' You can also use they/.../themself, if you prefer.'\n pronouns = KNOWN_SETS.get(pronouns)\n elif pronouns == 'ze':\n disambig = ' I have ze/hir. If you meant ze/zir, you can use that instead.'\n pronouns = KNOWN_SETS.get(pronouns)\n elif len(pronouns.split('/')) != 5:\n pronouns = KNOWN_SETS.get(pronouns)\n if not pronouns:\n bot.reply(\n \"I'm sorry, I don't know those pronouns. \"\n \"You can give me a set I don't know by formatting it \"\n \"subject/object/possessive-determiner/possessive-pronoun/\"\n \"reflexive, as in: they/them/their/theirs/themselves\"\n )\n return\n bot.db.set_nick_value(trigger.nick, 'pronouns', pronouns)\n bot.reply(\"Thanks for telling me!\" + disambig)\n", "path": "sopel/modules/pronouns.py"}]}
| 1,649 | 496 |
gh_patches_debug_21787
|
rasdani/github-patches
|
git_diff
|
mozilla__pontoon-3121
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Warn on empty Fluent rich editor fields
> In addition to the bug, I believe this is a UX regression? We should show a warning that one of the values is empty when trying to save such message.
The current practice is to consider any content in a Fluent value or attribute to qualify the whole message as not-empty. The check for this was added in #2675. Previously empty attributes may indeed have resulted in errors, due to stringifying as invalid Fluent.
_Originally posted by @eemeli in https://github.com/mozilla/pontoon/issues/2973#issuecomment-1750344569_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pontoon/checks/libraries/pontoon_db.py`
Content:
```
1 import html
2 import re
3
4 import bleach
5
6 from collections import defaultdict
7 from fluent.syntax import FluentParser, ast
8 from fluent.syntax.visitor import Visitor
9
10 from pontoon.sync.formats.ftl import localizable_entries
11
12
13 MAX_LENGTH_RE = re.compile(r"MAX_LENGTH:( *)(\d+)", re.MULTILINE)
14 parser = FluentParser()
15
16
17 def get_max_length(comment):
18 """
19 Return max length value for an entity with MAX_LENTH.
20 """
21 max_length = re.findall(MAX_LENGTH_RE, comment or "")
22
23 if max_length:
24 return int(max_length[0][1])
25
26 return None
27
28
29 class IsEmptyVisitor(Visitor):
30 def __init__(self):
31 self.is_empty = True
32
33 def visit_Placeable(self, node):
34 if isinstance(node.expression, ast.Literal):
35 if node.expression.parse()["value"]:
36 self.is_empty = False
37 elif isinstance(node.expression, ast.SelectExpression):
38 self.generic_visit(node.expression)
39 else:
40 self.is_empty = False
41
42 def visit_TextElement(self, node):
43 if node.value:
44 self.is_empty = False
45
46
47 def run_checks(entity, original, string):
48 """
49 Group all checks related to the base UI that get stored in the DB
50 :arg pontoon.base.models.Entity entity: Source entity
51 :arg basestring original: an original string
52 :arg basestring string: a translation
53 """
54 checks = defaultdict(list)
55 resource_ext = entity.resource.format
56
57 if resource_ext == "lang":
58 # Newlines are not allowed in .lang files (bug 1190754)
59 if "\n" in string:
60 checks["pErrors"].append("Newline characters are not allowed")
61
62 # Prevent translations exceeding the given length limit
63 max_length = get_max_length(entity.comment)
64
65 if max_length:
66 string_length = len(
67 html.unescape(bleach.clean(string, strip=True, tags=()))
68 )
69
70 if string_length > max_length:
71 checks["pErrors"].append("Translation too long")
72
73 # Bug 1599056: Original and translation must either both end in a newline,
74 # or none of them should.
75 if resource_ext == "po":
76 if original.endswith("\n") != string.endswith("\n"):
77 checks["pErrors"].append("Ending newline mismatch")
78
79 # Prevent empty translation submissions if not supported
80 if string == "" and not entity.resource.allows_empty_translations:
81 checks["pErrors"].append("Empty translations are not allowed")
82
83 # FTL checks
84 if resource_ext == "ftl" and string != "":
85 translation_ast = parser.parse_entry(string)
86 entity_ast = parser.parse_entry(entity.string)
87
88 # Parse error
89 if isinstance(translation_ast, ast.Junk):
90 checks["pErrors"].append(translation_ast.annotations[0].message)
91
92 # Not a localizable entry
93 elif not isinstance(translation_ast, localizable_entries):
94 checks["pErrors"].append(
95 "Translation needs to be a valid localizable entry"
96 )
97
98 # Message ID mismatch
99 elif entity_ast.id.name != translation_ast.id.name:
100 checks["pErrors"].append("Translation key needs to match source string key")
101
102 # Empty translation entry warning; set here rather than pontoon_non_db.py
103 # to avoid needing to parse the Fluent message twice.
104 else:
105 visitor = IsEmptyVisitor()
106 visitor.visit(translation_ast)
107 if visitor.is_empty:
108 checks["pndbWarnings"].append("Empty translation")
109
110 return checks
111
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pontoon/checks/libraries/pontoon_db.py b/pontoon/checks/libraries/pontoon_db.py
--- a/pontoon/checks/libraries/pontoon_db.py
+++ b/pontoon/checks/libraries/pontoon_db.py
@@ -28,20 +28,27 @@
class IsEmptyVisitor(Visitor):
def __init__(self):
- self.is_empty = True
+ self.is_empty = False
+ self.is_pattern_empty = True
+
+ def visit_Pattern(self, node):
+ self.is_pattern_empty = True
+ self.visit(node.elements)
+ if self.is_pattern_empty:
+ self.is_empty = True
def visit_Placeable(self, node):
if isinstance(node.expression, ast.Literal):
if node.expression.parse()["value"]:
- self.is_empty = False
+ self.is_pattern_empty = False
elif isinstance(node.expression, ast.SelectExpression):
self.generic_visit(node.expression)
else:
- self.is_empty = False
+ self.is_pattern_empty = False
def visit_TextElement(self, node):
if node.value:
- self.is_empty = False
+ self.is_pattern_empty = False
def run_checks(entity, original, string):
|
{"golden_diff": "diff --git a/pontoon/checks/libraries/pontoon_db.py b/pontoon/checks/libraries/pontoon_db.py\n--- a/pontoon/checks/libraries/pontoon_db.py\n+++ b/pontoon/checks/libraries/pontoon_db.py\n@@ -28,20 +28,27 @@\n \n class IsEmptyVisitor(Visitor):\n def __init__(self):\n- self.is_empty = True\n+ self.is_empty = False\n+ self.is_pattern_empty = True\n+\n+ def visit_Pattern(self, node):\n+ self.is_pattern_empty = True\n+ self.visit(node.elements)\n+ if self.is_pattern_empty:\n+ self.is_empty = True\n \n def visit_Placeable(self, node):\n if isinstance(node.expression, ast.Literal):\n if node.expression.parse()[\"value\"]:\n- self.is_empty = False\n+ self.is_pattern_empty = False\n elif isinstance(node.expression, ast.SelectExpression):\n self.generic_visit(node.expression)\n else:\n- self.is_empty = False\n+ self.is_pattern_empty = False\n \n def visit_TextElement(self, node):\n if node.value:\n- self.is_empty = False\n+ self.is_pattern_empty = False\n \n \n def run_checks(entity, original, string):\n", "issue": "Warn on empty Fluent rich editor fields\n> In addition to the bug, I believe this is a UX regression? We should show a warning that one of the values is empty when trying to save such message.\r\n\r\nThe current practice is to consider any content in a Fluent value or attribute to qualify the whole message as not-empty. The check for this was added in #2675. Previously empty attributes may indeed have resulted in errors, due to stringifying as invalid Fluent.\r\n\r\n_Originally posted by @eemeli in https://github.com/mozilla/pontoon/issues/2973#issuecomment-1750344569_\r\n \n", "before_files": [{"content": "import html\nimport re\n\nimport bleach\n\nfrom collections import defaultdict\nfrom fluent.syntax import FluentParser, ast\nfrom fluent.syntax.visitor import Visitor\n\nfrom pontoon.sync.formats.ftl import localizable_entries\n\n\nMAX_LENGTH_RE = re.compile(r\"MAX_LENGTH:( *)(\\d+)\", re.MULTILINE)\nparser = FluentParser()\n\n\ndef get_max_length(comment):\n \"\"\"\n Return max length value for an entity with MAX_LENTH.\n \"\"\"\n max_length = re.findall(MAX_LENGTH_RE, comment or \"\")\n\n if max_length:\n return int(max_length[0][1])\n\n return None\n\n\nclass IsEmptyVisitor(Visitor):\n def __init__(self):\n self.is_empty = True\n\n def visit_Placeable(self, node):\n if isinstance(node.expression, ast.Literal):\n if node.expression.parse()[\"value\"]:\n self.is_empty = False\n elif isinstance(node.expression, ast.SelectExpression):\n self.generic_visit(node.expression)\n else:\n self.is_empty = False\n\n def visit_TextElement(self, node):\n if node.value:\n self.is_empty = False\n\n\ndef run_checks(entity, original, string):\n \"\"\"\n Group all checks related to the base UI that get stored in the DB\n :arg pontoon.base.models.Entity entity: Source entity\n :arg basestring original: an original string\n :arg basestring string: a translation\n \"\"\"\n checks = defaultdict(list)\n resource_ext = entity.resource.format\n\n if resource_ext == \"lang\":\n # Newlines are not allowed in .lang files (bug 1190754)\n if \"\\n\" in string:\n checks[\"pErrors\"].append(\"Newline characters are not allowed\")\n\n # Prevent translations exceeding the given length limit\n max_length = get_max_length(entity.comment)\n\n if max_length:\n string_length = len(\n html.unescape(bleach.clean(string, strip=True, tags=()))\n )\n\n if string_length > max_length:\n checks[\"pErrors\"].append(\"Translation too long\")\n\n # Bug 1599056: Original and translation must either both end in a newline,\n # or none of them should.\n if resource_ext == \"po\":\n if original.endswith(\"\\n\") != string.endswith(\"\\n\"):\n checks[\"pErrors\"].append(\"Ending newline mismatch\")\n\n # Prevent empty translation submissions if not supported\n if string == \"\" and not entity.resource.allows_empty_translations:\n checks[\"pErrors\"].append(\"Empty translations are not allowed\")\n\n # FTL checks\n if resource_ext == \"ftl\" and string != \"\":\n translation_ast = parser.parse_entry(string)\n entity_ast = parser.parse_entry(entity.string)\n\n # Parse error\n if isinstance(translation_ast, ast.Junk):\n checks[\"pErrors\"].append(translation_ast.annotations[0].message)\n\n # Not a localizable entry\n elif not isinstance(translation_ast, localizable_entries):\n checks[\"pErrors\"].append(\n \"Translation needs to be a valid localizable entry\"\n )\n\n # Message ID mismatch\n elif entity_ast.id.name != translation_ast.id.name:\n checks[\"pErrors\"].append(\"Translation key needs to match source string key\")\n\n # Empty translation entry warning; set here rather than pontoon_non_db.py\n # to avoid needing to parse the Fluent message twice.\n else:\n visitor = IsEmptyVisitor()\n visitor.visit(translation_ast)\n if visitor.is_empty:\n checks[\"pndbWarnings\"].append(\"Empty translation\")\n\n return checks\n", "path": "pontoon/checks/libraries/pontoon_db.py"}], "after_files": [{"content": "import html\nimport re\n\nimport bleach\n\nfrom collections import defaultdict\nfrom fluent.syntax import FluentParser, ast\nfrom fluent.syntax.visitor import Visitor\n\nfrom pontoon.sync.formats.ftl import localizable_entries\n\n\nMAX_LENGTH_RE = re.compile(r\"MAX_LENGTH:( *)(\\d+)\", re.MULTILINE)\nparser = FluentParser()\n\n\ndef get_max_length(comment):\n \"\"\"\n Return max length value for an entity with MAX_LENTH.\n \"\"\"\n max_length = re.findall(MAX_LENGTH_RE, comment or \"\")\n\n if max_length:\n return int(max_length[0][1])\n\n return None\n\n\nclass IsEmptyVisitor(Visitor):\n def __init__(self):\n self.is_empty = False\n self.is_pattern_empty = True\n\n def visit_Pattern(self, node):\n self.is_pattern_empty = True\n self.visit(node.elements)\n if self.is_pattern_empty:\n self.is_empty = True\n\n def visit_Placeable(self, node):\n if isinstance(node.expression, ast.Literal):\n if node.expression.parse()[\"value\"]:\n self.is_pattern_empty = False\n elif isinstance(node.expression, ast.SelectExpression):\n self.generic_visit(node.expression)\n else:\n self.is_pattern_empty = False\n\n def visit_TextElement(self, node):\n if node.value:\n self.is_pattern_empty = False\n\n\ndef run_checks(entity, original, string):\n \"\"\"\n Group all checks related to the base UI that get stored in the DB\n :arg pontoon.base.models.Entity entity: Source entity\n :arg basestring original: an original string\n :arg basestring string: a translation\n \"\"\"\n checks = defaultdict(list)\n resource_ext = entity.resource.format\n\n if resource_ext == \"lang\":\n # Newlines are not allowed in .lang files (bug 1190754)\n if \"\\n\" in string:\n checks[\"pErrors\"].append(\"Newline characters are not allowed\")\n\n # Prevent translations exceeding the given length limit\n max_length = get_max_length(entity.comment)\n\n if max_length:\n string_length = len(\n html.unescape(bleach.clean(string, strip=True, tags=()))\n )\n\n if string_length > max_length:\n checks[\"pErrors\"].append(\"Translation too long\")\n\n # Bug 1599056: Original and translation must either both end in a newline,\n # or none of them should.\n if resource_ext == \"po\":\n if original.endswith(\"\\n\") != string.endswith(\"\\n\"):\n checks[\"pErrors\"].append(\"Ending newline mismatch\")\n\n # Prevent empty translation submissions if not supported\n if string == \"\" and not entity.resource.allows_empty_translations:\n checks[\"pErrors\"].append(\"Empty translations are not allowed\")\n\n # FTL checks\n if resource_ext == \"ftl\" and string != \"\":\n translation_ast = parser.parse_entry(string)\n entity_ast = parser.parse_entry(entity.string)\n\n # Parse error\n if isinstance(translation_ast, ast.Junk):\n checks[\"pErrors\"].append(translation_ast.annotations[0].message)\n\n # Not a localizable entry\n elif not isinstance(translation_ast, localizable_entries):\n checks[\"pErrors\"].append(\n \"Translation needs to be a valid localizable entry\"\n )\n\n # Message ID mismatch\n elif entity_ast.id.name != translation_ast.id.name:\n checks[\"pErrors\"].append(\"Translation key needs to match source string key\")\n\n # Empty translation entry warning; set here rather than pontoon_non_db.py\n # to avoid needing to parse the Fluent message twice.\n else:\n visitor = IsEmptyVisitor()\n visitor.visit(translation_ast)\n if visitor.is_empty:\n checks[\"pndbWarnings\"].append(\"Empty translation\")\n\n return checks\n", "path": "pontoon/checks/libraries/pontoon_db.py"}]}
| 1,400 | 280 |
gh_patches_debug_10412
|
rasdani/github-patches
|
git_diff
|
keras-team__keras-19382
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
CenterCrop.compute_output_shape() does not consider the list compatibility
When the input is a list of tensor, CenterCrop can correctly generate the output, while` CenterCrop.compute_output_shape()` will generate a wrong shape. Please refer to this [gist](https://colab.research.google.com/drive/1JLuEr0-dNCmawTb2dRX7kaSxOKGZ6Obv?usp=sharing).
Interestingly, I found the test code in [center_crop_test.py, ](https://github.com/keras-team/keras/blob/master/keras/layers/preprocessing/center_crop_test.py#L178) has tested the list compatibility, while its `compute_output_shape` function missed it.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `keras/layers/preprocessing/center_crop.py`
Content:
```
1 from keras import backend
2 from keras.api_export import keras_export
3 from keras.layers.preprocessing.tf_data_layer import TFDataLayer
4 from keras.utils import image_utils
5
6
7 @keras_export("keras.layers.CenterCrop")
8 class CenterCrop(TFDataLayer):
9 """A preprocessing layer which crops images.
10
11 This layers crops the central portion of the images to a target size. If an
12 image is smaller than the target size, it will be resized and cropped
13 so as to return the largest possible window in the image that matches
14 the target aspect ratio.
15
16 Input pixel values can be of any range (e.g. `[0., 1.)` or `[0, 255]`).
17
18 Input shape:
19 3D (unbatched) or 4D (batched) tensor with shape:
20 `(..., height, width, channels)`, in `"channels_last"` format,
21 or `(..., channels, height, width)`, in `"channels_first"` format.
22
23 Output shape:
24 3D (unbatched) or 4D (batched) tensor with shape:
25 `(..., target_height, target_width, channels)`,
26 or `(..., channels, target_height, target_width)`,
27 in `"channels_first"` format.
28
29 If the input height/width is even and the target height/width is odd (or
30 inversely), the input image is left-padded by 1 pixel.
31
32 **Note:** This layer is safe to use inside a `tf.data` pipeline
33 (independently of which backend you're using).
34
35 Args:
36 height: Integer, the height of the output shape.
37 width: Integer, the width of the output shape.
38 data_format: string, either `"channels_last"` or `"channels_first"`.
39 The ordering of the dimensions in the inputs. `"channels_last"`
40 corresponds to inputs with shape `(batch, height, width, channels)`
41 while `"channels_first"` corresponds to inputs with shape
42 `(batch, channels, height, width)`. It defaults to the
43 `image_data_format` value found in your Keras config file at
44 `~/.keras/keras.json`. If you never set it, then it will be
45 `"channels_last"`.
46 """
47
48 def __init__(self, height, width, data_format=None, **kwargs):
49 super().__init__(**kwargs)
50 self.height = height
51 self.width = width
52 self.data_format = backend.standardize_data_format(data_format)
53
54 def call(self, inputs):
55 inputs = self.backend.cast(inputs, self.compute_dtype)
56 if self.data_format == "channels_first":
57 init_height = inputs.shape[-2]
58 init_width = inputs.shape[-1]
59 else:
60 init_height = inputs.shape[-3]
61 init_width = inputs.shape[-2]
62
63 if init_height is None or init_width is None:
64 # Dynamic size case. TODO.
65 raise ValueError(
66 "At this time, CenterCrop can only "
67 "process images with a static spatial "
68 f"shape. Received: inputs.shape={inputs.shape}"
69 )
70
71 h_diff = init_height - self.height
72 w_diff = init_width - self.width
73
74 h_start = int(h_diff / 2)
75 w_start = int(w_diff / 2)
76
77 if h_diff >= 0 and w_diff >= 0:
78 if len(inputs.shape) == 4:
79 if self.data_format == "channels_first":
80 return inputs[
81 :,
82 :,
83 h_start : h_start + self.height,
84 w_start : w_start + self.width,
85 ]
86 return inputs[
87 :,
88 h_start : h_start + self.height,
89 w_start : w_start + self.width,
90 :,
91 ]
92 elif len(inputs.shape) == 3:
93 if self.data_format == "channels_first":
94 return inputs[
95 :,
96 h_start : h_start + self.height,
97 w_start : w_start + self.width,
98 ]
99 return inputs[
100 h_start : h_start + self.height,
101 w_start : w_start + self.width,
102 :,
103 ]
104
105 return image_utils.smart_resize(
106 inputs,
107 [self.height, self.width],
108 data_format=self.data_format,
109 backend_module=self.backend,
110 )
111
112 def compute_output_shape(self, input_shape):
113 input_shape = list(input_shape)
114 if len(input_shape) == 4:
115 if self.data_format == "channels_last":
116 input_shape[1] = self.height
117 input_shape[2] = self.width
118 else:
119 input_shape[2] = self.height
120 input_shape[3] = self.width
121 else:
122 if self.data_format == "channels_last":
123 input_shape[0] = self.height
124 input_shape[1] = self.width
125 else:
126 input_shape[1] = self.height
127 input_shape[2] = self.width
128 return tuple(input_shape)
129
130 def get_config(self):
131 base_config = super().get_config()
132 config = {
133 "height": self.height,
134 "width": self.width,
135 "data_format": self.data_format,
136 }
137 return {**base_config, **config}
138
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/keras/layers/preprocessing/center_crop.py b/keras/layers/preprocessing/center_crop.py
--- a/keras/layers/preprocessing/center_crop.py
+++ b/keras/layers/preprocessing/center_crop.py
@@ -111,6 +111,13 @@
def compute_output_shape(self, input_shape):
input_shape = list(input_shape)
+ if isinstance(input_shape[0], (list, tuple)) or len(
+ input_shape
+ ) not in (3, 4):
+ raise ValueError(
+ "`input_shape` must be a non-nested tuple or list "
+ "of rank-1 with size 3 (unbatched) or 4 (batched). "
+ )
if len(input_shape) == 4:
if self.data_format == "channels_last":
input_shape[1] = self.height
|
{"golden_diff": "diff --git a/keras/layers/preprocessing/center_crop.py b/keras/layers/preprocessing/center_crop.py\n--- a/keras/layers/preprocessing/center_crop.py\n+++ b/keras/layers/preprocessing/center_crop.py\n@@ -111,6 +111,13 @@\n \n def compute_output_shape(self, input_shape):\n input_shape = list(input_shape)\n+ if isinstance(input_shape[0], (list, tuple)) or len(\n+ input_shape\n+ ) not in (3, 4):\n+ raise ValueError(\n+ \"`input_shape` must be a non-nested tuple or list \"\n+ \"of rank-1 with size 3 (unbatched) or 4 (batched). \"\n+ )\n if len(input_shape) == 4:\n if self.data_format == \"channels_last\":\n input_shape[1] = self.height\n", "issue": "CenterCrop.compute_output_shape() does not consider the list compatibility\nWhen the input is a list of tensor, CenterCrop can correctly generate the output, while` CenterCrop.compute_output_shape()` will generate a wrong shape. Please refer to this [gist](https://colab.research.google.com/drive/1JLuEr0-dNCmawTb2dRX7kaSxOKGZ6Obv?usp=sharing).\r\n\r\nInterestingly, I found the test code in [center_crop_test.py, ](https://github.com/keras-team/keras/blob/master/keras/layers/preprocessing/center_crop_test.py#L178) has tested the list compatibility, while its `compute_output_shape` function missed it.\n", "before_files": [{"content": "from keras import backend\nfrom keras.api_export import keras_export\nfrom keras.layers.preprocessing.tf_data_layer import TFDataLayer\nfrom keras.utils import image_utils\n\n\n@keras_export(\"keras.layers.CenterCrop\")\nclass CenterCrop(TFDataLayer):\n \"\"\"A preprocessing layer which crops images.\n\n This layers crops the central portion of the images to a target size. If an\n image is smaller than the target size, it will be resized and cropped\n so as to return the largest possible window in the image that matches\n the target aspect ratio.\n\n Input pixel values can be of any range (e.g. `[0., 1.)` or `[0, 255]`).\n\n Input shape:\n 3D (unbatched) or 4D (batched) tensor with shape:\n `(..., height, width, channels)`, in `\"channels_last\"` format,\n or `(..., channels, height, width)`, in `\"channels_first\"` format.\n\n Output shape:\n 3D (unbatched) or 4D (batched) tensor with shape:\n `(..., target_height, target_width, channels)`,\n or `(..., channels, target_height, target_width)`,\n in `\"channels_first\"` format.\n\n If the input height/width is even and the target height/width is odd (or\n inversely), the input image is left-padded by 1 pixel.\n\n **Note:** This layer is safe to use inside a `tf.data` pipeline\n (independently of which backend you're using).\n\n Args:\n height: Integer, the height of the output shape.\n width: Integer, the width of the output shape.\n data_format: string, either `\"channels_last\"` or `\"channels_first\"`.\n The ordering of the dimensions in the inputs. `\"channels_last\"`\n corresponds to inputs with shape `(batch, height, width, channels)`\n while `\"channels_first\"` corresponds to inputs with shape\n `(batch, channels, height, width)`. It defaults to the\n `image_data_format` value found in your Keras config file at\n `~/.keras/keras.json`. If you never set it, then it will be\n `\"channels_last\"`.\n \"\"\"\n\n def __init__(self, height, width, data_format=None, **kwargs):\n super().__init__(**kwargs)\n self.height = height\n self.width = width\n self.data_format = backend.standardize_data_format(data_format)\n\n def call(self, inputs):\n inputs = self.backend.cast(inputs, self.compute_dtype)\n if self.data_format == \"channels_first\":\n init_height = inputs.shape[-2]\n init_width = inputs.shape[-1]\n else:\n init_height = inputs.shape[-3]\n init_width = inputs.shape[-2]\n\n if init_height is None or init_width is None:\n # Dynamic size case. TODO.\n raise ValueError(\n \"At this time, CenterCrop can only \"\n \"process images with a static spatial \"\n f\"shape. Received: inputs.shape={inputs.shape}\"\n )\n\n h_diff = init_height - self.height\n w_diff = init_width - self.width\n\n h_start = int(h_diff / 2)\n w_start = int(w_diff / 2)\n\n if h_diff >= 0 and w_diff >= 0:\n if len(inputs.shape) == 4:\n if self.data_format == \"channels_first\":\n return inputs[\n :,\n :,\n h_start : h_start + self.height,\n w_start : w_start + self.width,\n ]\n return inputs[\n :,\n h_start : h_start + self.height,\n w_start : w_start + self.width,\n :,\n ]\n elif len(inputs.shape) == 3:\n if self.data_format == \"channels_first\":\n return inputs[\n :,\n h_start : h_start + self.height,\n w_start : w_start + self.width,\n ]\n return inputs[\n h_start : h_start + self.height,\n w_start : w_start + self.width,\n :,\n ]\n\n return image_utils.smart_resize(\n inputs,\n [self.height, self.width],\n data_format=self.data_format,\n backend_module=self.backend,\n )\n\n def compute_output_shape(self, input_shape):\n input_shape = list(input_shape)\n if len(input_shape) == 4:\n if self.data_format == \"channels_last\":\n input_shape[1] = self.height\n input_shape[2] = self.width\n else:\n input_shape[2] = self.height\n input_shape[3] = self.width\n else:\n if self.data_format == \"channels_last\":\n input_shape[0] = self.height\n input_shape[1] = self.width\n else:\n input_shape[1] = self.height\n input_shape[2] = self.width\n return tuple(input_shape)\n\n def get_config(self):\n base_config = super().get_config()\n config = {\n \"height\": self.height,\n \"width\": self.width,\n \"data_format\": self.data_format,\n }\n return {**base_config, **config}\n", "path": "keras/layers/preprocessing/center_crop.py"}], "after_files": [{"content": "from keras import backend\nfrom keras.api_export import keras_export\nfrom keras.layers.preprocessing.tf_data_layer import TFDataLayer\nfrom keras.utils import image_utils\n\n\n@keras_export(\"keras.layers.CenterCrop\")\nclass CenterCrop(TFDataLayer):\n \"\"\"A preprocessing layer which crops images.\n\n This layers crops the central portion of the images to a target size. If an\n image is smaller than the target size, it will be resized and cropped\n so as to return the largest possible window in the image that matches\n the target aspect ratio.\n\n Input pixel values can be of any range (e.g. `[0., 1.)` or `[0, 255]`).\n\n Input shape:\n 3D (unbatched) or 4D (batched) tensor with shape:\n `(..., height, width, channels)`, in `\"channels_last\"` format,\n or `(..., channels, height, width)`, in `\"channels_first\"` format.\n\n Output shape:\n 3D (unbatched) or 4D (batched) tensor with shape:\n `(..., target_height, target_width, channels)`,\n or `(..., channels, target_height, target_width)`,\n in `\"channels_first\"` format.\n\n If the input height/width is even and the target height/width is odd (or\n inversely), the input image is left-padded by 1 pixel.\n\n **Note:** This layer is safe to use inside a `tf.data` pipeline\n (independently of which backend you're using).\n\n Args:\n height: Integer, the height of the output shape.\n width: Integer, the width of the output shape.\n data_format: string, either `\"channels_last\"` or `\"channels_first\"`.\n The ordering of the dimensions in the inputs. `\"channels_last\"`\n corresponds to inputs with shape `(batch, height, width, channels)`\n while `\"channels_first\"` corresponds to inputs with shape\n `(batch, channels, height, width)`. It defaults to the\n `image_data_format` value found in your Keras config file at\n `~/.keras/keras.json`. If you never set it, then it will be\n `\"channels_last\"`.\n \"\"\"\n\n def __init__(self, height, width, data_format=None, **kwargs):\n super().__init__(**kwargs)\n self.height = height\n self.width = width\n self.data_format = backend.standardize_data_format(data_format)\n\n def call(self, inputs):\n inputs = self.backend.cast(inputs, self.compute_dtype)\n if self.data_format == \"channels_first\":\n init_height = inputs.shape[-2]\n init_width = inputs.shape[-1]\n else:\n init_height = inputs.shape[-3]\n init_width = inputs.shape[-2]\n\n if init_height is None or init_width is None:\n # Dynamic size case. TODO.\n raise ValueError(\n \"At this time, CenterCrop can only \"\n \"process images with a static spatial \"\n f\"shape. Received: inputs.shape={inputs.shape}\"\n )\n\n h_diff = init_height - self.height\n w_diff = init_width - self.width\n\n h_start = int(h_diff / 2)\n w_start = int(w_diff / 2)\n\n if h_diff >= 0 and w_diff >= 0:\n if len(inputs.shape) == 4:\n if self.data_format == \"channels_first\":\n return inputs[\n :,\n :,\n h_start : h_start + self.height,\n w_start : w_start + self.width,\n ]\n return inputs[\n :,\n h_start : h_start + self.height,\n w_start : w_start + self.width,\n :,\n ]\n elif len(inputs.shape) == 3:\n if self.data_format == \"channels_first\":\n return inputs[\n :,\n h_start : h_start + self.height,\n w_start : w_start + self.width,\n ]\n return inputs[\n h_start : h_start + self.height,\n w_start : w_start + self.width,\n :,\n ]\n\n return image_utils.smart_resize(\n inputs,\n [self.height, self.width],\n data_format=self.data_format,\n backend_module=self.backend,\n )\n\n def compute_output_shape(self, input_shape):\n input_shape = list(input_shape)\n if isinstance(input_shape[0], (list, tuple)) or len(\n input_shape\n ) not in (3, 4):\n raise ValueError(\n \"`input_shape` must be a non-nested tuple or list \"\n \"of rank-1 with size 3 (unbatched) or 4 (batched). \"\n )\n if len(input_shape) == 4:\n if self.data_format == \"channels_last\":\n input_shape[1] = self.height\n input_shape[2] = self.width\n else:\n input_shape[2] = self.height\n input_shape[3] = self.width\n else:\n if self.data_format == \"channels_last\":\n input_shape[0] = self.height\n input_shape[1] = self.width\n else:\n input_shape[1] = self.height\n input_shape[2] = self.width\n return tuple(input_shape)\n\n def get_config(self):\n base_config = super().get_config()\n config = {\n \"height\": self.height,\n \"width\": self.width,\n \"data_format\": self.data_format,\n }\n return {**base_config, **config}\n", "path": "keras/layers/preprocessing/center_crop.py"}]}
| 1,851 | 201 |
gh_patches_debug_9086
|
rasdani/github-patches
|
git_diff
|
pytorch__ignite-2439
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Seemingly unnecessary dictionary comprehension in contrib's BaseOutputHandler
Hi,
`output_dict` is made sure to be a dictionary just above the line below.
https://github.com/pytorch/ignite/blob/21a2ccc25dd7ff0aec5f1538387597cb093fe36e/ignite/contrib/handlers/base_logger.py#L108
So this line could be replaced with:
```python
metrics_state_attrs.update(output_dict)
```
Seemingly unnecessary dictionary comprehension in contrib's BaseOutputHandler
Hi,
`output_dict` is made sure to be a dictionary just above the line below.
https://github.com/pytorch/ignite/blob/21a2ccc25dd7ff0aec5f1538387597cb093fe36e/ignite/contrib/handlers/base_logger.py#L108
So this line could be replaced with:
```python
metrics_state_attrs.update(output_dict)
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ignite/contrib/handlers/base_logger.py`
Content:
```
1 """Base logger and its helper handlers."""
2 import numbers
3 import warnings
4 from abc import ABCMeta, abstractmethod
5 from collections import OrderedDict
6 from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
7
8 import torch
9 import torch.nn as nn
10 from torch.optim import Optimizer
11
12 from ignite.engine import Engine, Events, EventsList, State
13 from ignite.engine.events import CallableEventWithFilter, RemovableEventHandle
14
15
16 class BaseHandler(metaclass=ABCMeta):
17 """Base handler for defining various useful handlers."""
18
19 @abstractmethod
20 def __call__(self, engine: Engine, logger: Any, event_name: Union[str, Events]) -> None:
21 pass
22
23
24 class BaseOptimizerParamsHandler(BaseHandler):
25 """
26 Base handler for logging optimizer parameters
27 """
28
29 def __init__(self, optimizer: Optimizer, param_name: str = "lr", tag: Optional[str] = None):
30 if not (
31 isinstance(optimizer, Optimizer)
32 or (hasattr(optimizer, "param_groups") and isinstance(optimizer.param_groups, Sequence))
33 ):
34 raise TypeError(
35 "Argument optimizer should be torch.optim.Optimizer or has attribute 'param_groups' as list/tuple, "
36 f"but given {type(optimizer)}"
37 )
38
39 self.optimizer = optimizer
40 self.param_name = param_name
41 self.tag = tag
42
43
44 class BaseOutputHandler(BaseHandler):
45 """
46 Helper handler to log engine's output and/or metrics
47 """
48
49 def __init__(
50 self,
51 tag: str,
52 metric_names: Optional[Union[str, List[str]]] = None,
53 output_transform: Optional[Callable] = None,
54 global_step_transform: Optional[Callable] = None,
55 state_attributes: Optional[List[str]] = None,
56 ):
57
58 if metric_names is not None:
59 if not (isinstance(metric_names, list) or (isinstance(metric_names, str) and metric_names == "all")):
60 raise TypeError(
61 f"metric_names should be either a list or equal 'all', got {type(metric_names)} instead."
62 )
63
64 if output_transform is not None and not callable(output_transform):
65 raise TypeError(f"output_transform should be a function, got {type(output_transform)} instead.")
66
67 if output_transform is None and metric_names is None and state_attributes is None:
68 raise ValueError("Either metric_names, output_transform or state_attributes should be defined")
69
70 if global_step_transform is not None and not callable(global_step_transform):
71 raise TypeError(f"global_step_transform should be a function, got {type(global_step_transform)} instead.")
72
73 if global_step_transform is None:
74
75 def global_step_transform(engine: Engine, event_name: Union[str, Events]) -> int:
76 return engine.state.get_event_attrib_value(event_name)
77
78 self.tag = tag
79 self.metric_names = metric_names
80 self.output_transform = output_transform
81 self.global_step_transform = global_step_transform
82 self.state_attributes = state_attributes
83
84 def _setup_output_metrics_state_attrs(
85 self, engine: Engine, log_text: Optional[bool] = False, key_tuple: Optional[bool] = True
86 ) -> Dict[Any, Any]:
87 """Helper method to setup metrics and state attributes to log"""
88 metrics_state_attrs = OrderedDict()
89 if self.metric_names is not None:
90 if isinstance(self.metric_names, str) and self.metric_names == "all":
91 metrics_state_attrs = OrderedDict(engine.state.metrics)
92 else:
93 for name in self.metric_names:
94 if name not in engine.state.metrics:
95 warnings.warn(
96 f"Provided metric name '{name}' is missing "
97 f"in engine's state metrics: {list(engine.state.metrics.keys())}"
98 )
99 continue
100 metrics_state_attrs[name] = engine.state.metrics[name]
101
102 if self.output_transform is not None:
103 output_dict = self.output_transform(engine.state.output)
104
105 if not isinstance(output_dict, dict):
106 output_dict = {"output": output_dict}
107
108 metrics_state_attrs.update({name: value for name, value in output_dict.items()})
109
110 if self.state_attributes is not None:
111 metrics_state_attrs.update({name: getattr(engine.state, name, None) for name in self.state_attributes})
112
113 metrics_state_attrs_dict = OrderedDict() # type: Dict[Any, Union[str, float, numbers.Number]]
114
115 def key_tuple_tf(tag: str, name: str, *args: str) -> Tuple[str, ...]:
116 return (tag, name) + args
117
118 def key_str_tf(tag: str, name: str, *args: str) -> str:
119 return "/".join((tag, name) + args)
120
121 key_tf = key_tuple_tf if key_tuple else key_str_tf
122
123 for name, value in metrics_state_attrs.items():
124 if isinstance(value, numbers.Number):
125 metrics_state_attrs_dict[key_tf(self.tag, name)] = value
126 elif isinstance(value, torch.Tensor) and value.ndimension() == 0:
127 metrics_state_attrs_dict[key_tf(self.tag, name)] = value.item()
128 elif isinstance(value, torch.Tensor) and value.ndimension() == 1:
129 for i, v in enumerate(value):
130 metrics_state_attrs_dict[key_tf(self.tag, name, str(i))] = v.item()
131 else:
132 if isinstance(value, str) and log_text:
133 metrics_state_attrs_dict[key_tf(self.tag, name)] = value
134 else:
135 warnings.warn(f"Logger output_handler can not log metrics value type {type(value)}")
136 return metrics_state_attrs_dict
137
138
139 class BaseWeightsScalarHandler(BaseHandler):
140 """
141 Helper handler to log model's weights as scalars.
142 """
143
144 def __init__(self, model: nn.Module, reduction: Callable = torch.norm, tag: Optional[str] = None):
145 if not isinstance(model, torch.nn.Module):
146 raise TypeError(f"Argument model should be of type torch.nn.Module, but given {type(model)}")
147
148 if not callable(reduction):
149 raise TypeError(f"Argument reduction should be callable, but given {type(reduction)}")
150
151 def _is_0D_tensor(t: torch.Tensor) -> bool:
152 return isinstance(t, torch.Tensor) and t.ndimension() == 0
153
154 # Test reduction function on a tensor
155 o = reduction(torch.ones(4, 2))
156 if not (isinstance(o, numbers.Number) or _is_0D_tensor(o)):
157 raise TypeError(f"Output of the reduction function should be a scalar, but got {type(o)}")
158
159 self.model = model
160 self.reduction = reduction
161 self.tag = tag
162
163
164 class BaseWeightsHistHandler(BaseHandler):
165 """
166 Helper handler to log model's weights as histograms.
167 """
168
169 def __init__(self, model: nn.Module, tag: Optional[str] = None):
170 if not isinstance(model, torch.nn.Module):
171 raise TypeError(f"Argument model should be of type torch.nn.Module, but given {type(model)}")
172
173 self.model = model
174 self.tag = tag
175
176
177 class BaseLogger(metaclass=ABCMeta):
178 """
179 Base logger handler. See implementations: TensorboardLogger, VisdomLogger, PolyaxonLogger, MLflowLogger, ...
180
181 """
182
183 def attach(
184 self,
185 engine: Engine,
186 log_handler: Callable,
187 event_name: Union[str, Events, CallableEventWithFilter, EventsList],
188 *args: Any,
189 **kwargs: Any,
190 ) -> RemovableEventHandle:
191 """Attach the logger to the engine and execute `log_handler` function at `event_name` events.
192
193 Args:
194 engine: engine object.
195 log_handler: a logging handler to execute
196 event_name: event to attach the logging handler to. Valid events are from
197 :class:`~ignite.engine.events.Events` or :class:`~ignite.engine.events.EventsList` or any `event_name`
198 added by :meth:`~ignite.engine.engine.Engine.register_events`.
199 args: args forwarded to the `log_handler` method
200 kwargs: kwargs forwarded to the `log_handler` method
201
202 Returns:
203 :class:`~ignite.engine.events.RemovableEventHandle`, which can be used to remove the handler.
204 """
205 if isinstance(event_name, EventsList):
206 for name in event_name:
207 if name not in State.event_to_attr:
208 raise RuntimeError(f"Unknown event name '{name}'")
209 engine.add_event_handler(name, log_handler, self, name)
210
211 return RemovableEventHandle(event_name, log_handler, engine)
212
213 else:
214
215 if event_name not in State.event_to_attr:
216 raise RuntimeError(f"Unknown event name '{event_name}'")
217
218 return engine.add_event_handler(event_name, log_handler, self, event_name, *args, **kwargs)
219
220 def attach_output_handler(self, engine: Engine, event_name: Any, *args: Any, **kwargs: Any) -> RemovableEventHandle:
221 """Shortcut method to attach `OutputHandler` to the logger.
222
223 Args:
224 engine: engine object.
225 event_name: event to attach the logging handler to. Valid events are from
226 :class:`~ignite.engine.events.Events` or any `event_name` added by
227 :meth:`~ignite.engine.engine.Engine.register_events`.
228 args: args to initialize `OutputHandler`
229 kwargs: kwargs to initialize `OutputHandler`
230
231 Returns:
232 :class:`~ignite.engine.events.RemovableEventHandle`, which can be used to remove the handler.
233 """
234 return self.attach(engine, self._create_output_handler(*args, **kwargs), event_name=event_name)
235
236 def attach_opt_params_handler(
237 self, engine: Engine, event_name: Any, *args: Any, **kwargs: Any
238 ) -> RemovableEventHandle:
239 """Shortcut method to attach `OptimizerParamsHandler` to the logger.
240
241 Args:
242 engine: engine object.
243 event_name: event to attach the logging handler to. Valid events are from
244 :class:`~ignite.engine.events.Events` or any `event_name` added by
245 :meth:`~ignite.engine.engine.Engine.register_events`.
246 args: args to initialize `OptimizerParamsHandler`
247 kwargs: kwargs to initialize `OptimizerParamsHandler`
248
249 Returns:
250 :class:`~ignite.engine.events.RemovableEventHandle`, which can be used to remove the handler.
251
252 .. versionchanged:: 0.4.3
253 Added missing return statement.
254 """
255 return self.attach(engine, self._create_opt_params_handler(*args, **kwargs), event_name=event_name)
256
257 @abstractmethod
258 def _create_output_handler(self, engine: Engine, *args: Any, **kwargs: Any) -> Callable:
259 pass
260
261 @abstractmethod
262 def _create_opt_params_handler(self, *args: Any, **kwargs: Any) -> Callable:
263 pass
264
265 def __enter__(self) -> "BaseLogger":
266 return self
267
268 def __exit__(self, type: Any, value: Any, traceback: Any) -> None:
269 self.close()
270
271 def close(self) -> None:
272 pass
273
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/ignite/contrib/handlers/base_logger.py b/ignite/contrib/handlers/base_logger.py
--- a/ignite/contrib/handlers/base_logger.py
+++ b/ignite/contrib/handlers/base_logger.py
@@ -105,7 +105,7 @@
if not isinstance(output_dict, dict):
output_dict = {"output": output_dict}
- metrics_state_attrs.update({name: value for name, value in output_dict.items()})
+ metrics_state_attrs.update(output_dict)
if self.state_attributes is not None:
metrics_state_attrs.update({name: getattr(engine.state, name, None) for name in self.state_attributes})
|
{"golden_diff": "diff --git a/ignite/contrib/handlers/base_logger.py b/ignite/contrib/handlers/base_logger.py\n--- a/ignite/contrib/handlers/base_logger.py\n+++ b/ignite/contrib/handlers/base_logger.py\n@@ -105,7 +105,7 @@\n if not isinstance(output_dict, dict):\n output_dict = {\"output\": output_dict}\n \n- metrics_state_attrs.update({name: value for name, value in output_dict.items()})\n+ metrics_state_attrs.update(output_dict)\n \n if self.state_attributes is not None:\n metrics_state_attrs.update({name: getattr(engine.state, name, None) for name in self.state_attributes})\n", "issue": "Seemingly unnecessary dictionary comprehension in contrib's BaseOutputHandler\nHi,\r\n`output_dict` is made sure to be a dictionary just above the line below. \r\n\r\nhttps://github.com/pytorch/ignite/blob/21a2ccc25dd7ff0aec5f1538387597cb093fe36e/ignite/contrib/handlers/base_logger.py#L108\r\n\r\nSo this line could be replaced with:\r\n\r\n```python\r\nmetrics_state_attrs.update(output_dict) \r\n```\nSeemingly unnecessary dictionary comprehension in contrib's BaseOutputHandler\nHi,\r\n`output_dict` is made sure to be a dictionary just above the line below. \r\n\r\nhttps://github.com/pytorch/ignite/blob/21a2ccc25dd7ff0aec5f1538387597cb093fe36e/ignite/contrib/handlers/base_logger.py#L108\r\n\r\nSo this line could be replaced with:\r\n\r\n```python\r\nmetrics_state_attrs.update(output_dict) \r\n```\n", "before_files": [{"content": "\"\"\"Base logger and its helper handlers.\"\"\"\nimport numbers\nimport warnings\nfrom abc import ABCMeta, abstractmethod\nfrom collections import OrderedDict\nfrom typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union\n\nimport torch\nimport torch.nn as nn\nfrom torch.optim import Optimizer\n\nfrom ignite.engine import Engine, Events, EventsList, State\nfrom ignite.engine.events import CallableEventWithFilter, RemovableEventHandle\n\n\nclass BaseHandler(metaclass=ABCMeta):\n \"\"\"Base handler for defining various useful handlers.\"\"\"\n\n @abstractmethod\n def __call__(self, engine: Engine, logger: Any, event_name: Union[str, Events]) -> None:\n pass\n\n\nclass BaseOptimizerParamsHandler(BaseHandler):\n \"\"\"\n Base handler for logging optimizer parameters\n \"\"\"\n\n def __init__(self, optimizer: Optimizer, param_name: str = \"lr\", tag: Optional[str] = None):\n if not (\n isinstance(optimizer, Optimizer)\n or (hasattr(optimizer, \"param_groups\") and isinstance(optimizer.param_groups, Sequence))\n ):\n raise TypeError(\n \"Argument optimizer should be torch.optim.Optimizer or has attribute 'param_groups' as list/tuple, \"\n f\"but given {type(optimizer)}\"\n )\n\n self.optimizer = optimizer\n self.param_name = param_name\n self.tag = tag\n\n\nclass BaseOutputHandler(BaseHandler):\n \"\"\"\n Helper handler to log engine's output and/or metrics\n \"\"\"\n\n def __init__(\n self,\n tag: str,\n metric_names: Optional[Union[str, List[str]]] = None,\n output_transform: Optional[Callable] = None,\n global_step_transform: Optional[Callable] = None,\n state_attributes: Optional[List[str]] = None,\n ):\n\n if metric_names is not None:\n if not (isinstance(metric_names, list) or (isinstance(metric_names, str) and metric_names == \"all\")):\n raise TypeError(\n f\"metric_names should be either a list or equal 'all', got {type(metric_names)} instead.\"\n )\n\n if output_transform is not None and not callable(output_transform):\n raise TypeError(f\"output_transform should be a function, got {type(output_transform)} instead.\")\n\n if output_transform is None and metric_names is None and state_attributes is None:\n raise ValueError(\"Either metric_names, output_transform or state_attributes should be defined\")\n\n if global_step_transform is not None and not callable(global_step_transform):\n raise TypeError(f\"global_step_transform should be a function, got {type(global_step_transform)} instead.\")\n\n if global_step_transform is None:\n\n def global_step_transform(engine: Engine, event_name: Union[str, Events]) -> int:\n return engine.state.get_event_attrib_value(event_name)\n\n self.tag = tag\n self.metric_names = metric_names\n self.output_transform = output_transform\n self.global_step_transform = global_step_transform\n self.state_attributes = state_attributes\n\n def _setup_output_metrics_state_attrs(\n self, engine: Engine, log_text: Optional[bool] = False, key_tuple: Optional[bool] = True\n ) -> Dict[Any, Any]:\n \"\"\"Helper method to setup metrics and state attributes to log\"\"\"\n metrics_state_attrs = OrderedDict()\n if self.metric_names is not None:\n if isinstance(self.metric_names, str) and self.metric_names == \"all\":\n metrics_state_attrs = OrderedDict(engine.state.metrics)\n else:\n for name in self.metric_names:\n if name not in engine.state.metrics:\n warnings.warn(\n f\"Provided metric name '{name}' is missing \"\n f\"in engine's state metrics: {list(engine.state.metrics.keys())}\"\n )\n continue\n metrics_state_attrs[name] = engine.state.metrics[name]\n\n if self.output_transform is not None:\n output_dict = self.output_transform(engine.state.output)\n\n if not isinstance(output_dict, dict):\n output_dict = {\"output\": output_dict}\n\n metrics_state_attrs.update({name: value for name, value in output_dict.items()})\n\n if self.state_attributes is not None:\n metrics_state_attrs.update({name: getattr(engine.state, name, None) for name in self.state_attributes})\n\n metrics_state_attrs_dict = OrderedDict() # type: Dict[Any, Union[str, float, numbers.Number]]\n\n def key_tuple_tf(tag: str, name: str, *args: str) -> Tuple[str, ...]:\n return (tag, name) + args\n\n def key_str_tf(tag: str, name: str, *args: str) -> str:\n return \"/\".join((tag, name) + args)\n\n key_tf = key_tuple_tf if key_tuple else key_str_tf\n\n for name, value in metrics_state_attrs.items():\n if isinstance(value, numbers.Number):\n metrics_state_attrs_dict[key_tf(self.tag, name)] = value\n elif isinstance(value, torch.Tensor) and value.ndimension() == 0:\n metrics_state_attrs_dict[key_tf(self.tag, name)] = value.item()\n elif isinstance(value, torch.Tensor) and value.ndimension() == 1:\n for i, v in enumerate(value):\n metrics_state_attrs_dict[key_tf(self.tag, name, str(i))] = v.item()\n else:\n if isinstance(value, str) and log_text:\n metrics_state_attrs_dict[key_tf(self.tag, name)] = value\n else:\n warnings.warn(f\"Logger output_handler can not log metrics value type {type(value)}\")\n return metrics_state_attrs_dict\n\n\nclass BaseWeightsScalarHandler(BaseHandler):\n \"\"\"\n Helper handler to log model's weights as scalars.\n \"\"\"\n\n def __init__(self, model: nn.Module, reduction: Callable = torch.norm, tag: Optional[str] = None):\n if not isinstance(model, torch.nn.Module):\n raise TypeError(f\"Argument model should be of type torch.nn.Module, but given {type(model)}\")\n\n if not callable(reduction):\n raise TypeError(f\"Argument reduction should be callable, but given {type(reduction)}\")\n\n def _is_0D_tensor(t: torch.Tensor) -> bool:\n return isinstance(t, torch.Tensor) and t.ndimension() == 0\n\n # Test reduction function on a tensor\n o = reduction(torch.ones(4, 2))\n if not (isinstance(o, numbers.Number) or _is_0D_tensor(o)):\n raise TypeError(f\"Output of the reduction function should be a scalar, but got {type(o)}\")\n\n self.model = model\n self.reduction = reduction\n self.tag = tag\n\n\nclass BaseWeightsHistHandler(BaseHandler):\n \"\"\"\n Helper handler to log model's weights as histograms.\n \"\"\"\n\n def __init__(self, model: nn.Module, tag: Optional[str] = None):\n if not isinstance(model, torch.nn.Module):\n raise TypeError(f\"Argument model should be of type torch.nn.Module, but given {type(model)}\")\n\n self.model = model\n self.tag = tag\n\n\nclass BaseLogger(metaclass=ABCMeta):\n \"\"\"\n Base logger handler. See implementations: TensorboardLogger, VisdomLogger, PolyaxonLogger, MLflowLogger, ...\n\n \"\"\"\n\n def attach(\n self,\n engine: Engine,\n log_handler: Callable,\n event_name: Union[str, Events, CallableEventWithFilter, EventsList],\n *args: Any,\n **kwargs: Any,\n ) -> RemovableEventHandle:\n \"\"\"Attach the logger to the engine and execute `log_handler` function at `event_name` events.\n\n Args:\n engine: engine object.\n log_handler: a logging handler to execute\n event_name: event to attach the logging handler to. Valid events are from\n :class:`~ignite.engine.events.Events` or :class:`~ignite.engine.events.EventsList` or any `event_name`\n added by :meth:`~ignite.engine.engine.Engine.register_events`.\n args: args forwarded to the `log_handler` method\n kwargs: kwargs forwarded to the `log_handler` method\n\n Returns:\n :class:`~ignite.engine.events.RemovableEventHandle`, which can be used to remove the handler.\n \"\"\"\n if isinstance(event_name, EventsList):\n for name in event_name:\n if name not in State.event_to_attr:\n raise RuntimeError(f\"Unknown event name '{name}'\")\n engine.add_event_handler(name, log_handler, self, name)\n\n return RemovableEventHandle(event_name, log_handler, engine)\n\n else:\n\n if event_name not in State.event_to_attr:\n raise RuntimeError(f\"Unknown event name '{event_name}'\")\n\n return engine.add_event_handler(event_name, log_handler, self, event_name, *args, **kwargs)\n\n def attach_output_handler(self, engine: Engine, event_name: Any, *args: Any, **kwargs: Any) -> RemovableEventHandle:\n \"\"\"Shortcut method to attach `OutputHandler` to the logger.\n\n Args:\n engine: engine object.\n event_name: event to attach the logging handler to. Valid events are from\n :class:`~ignite.engine.events.Events` or any `event_name` added by\n :meth:`~ignite.engine.engine.Engine.register_events`.\n args: args to initialize `OutputHandler`\n kwargs: kwargs to initialize `OutputHandler`\n\n Returns:\n :class:`~ignite.engine.events.RemovableEventHandle`, which can be used to remove the handler.\n \"\"\"\n return self.attach(engine, self._create_output_handler(*args, **kwargs), event_name=event_name)\n\n def attach_opt_params_handler(\n self, engine: Engine, event_name: Any, *args: Any, **kwargs: Any\n ) -> RemovableEventHandle:\n \"\"\"Shortcut method to attach `OptimizerParamsHandler` to the logger.\n\n Args:\n engine: engine object.\n event_name: event to attach the logging handler to. Valid events are from\n :class:`~ignite.engine.events.Events` or any `event_name` added by\n :meth:`~ignite.engine.engine.Engine.register_events`.\n args: args to initialize `OptimizerParamsHandler`\n kwargs: kwargs to initialize `OptimizerParamsHandler`\n\n Returns:\n :class:`~ignite.engine.events.RemovableEventHandle`, which can be used to remove the handler.\n\n .. versionchanged:: 0.4.3\n Added missing return statement.\n \"\"\"\n return self.attach(engine, self._create_opt_params_handler(*args, **kwargs), event_name=event_name)\n\n @abstractmethod\n def _create_output_handler(self, engine: Engine, *args: Any, **kwargs: Any) -> Callable:\n pass\n\n @abstractmethod\n def _create_opt_params_handler(self, *args: Any, **kwargs: Any) -> Callable:\n pass\n\n def __enter__(self) -> \"BaseLogger\":\n return self\n\n def __exit__(self, type: Any, value: Any, traceback: Any) -> None:\n self.close()\n\n def close(self) -> None:\n pass\n", "path": "ignite/contrib/handlers/base_logger.py"}], "after_files": [{"content": "\"\"\"Base logger and its helper handlers.\"\"\"\nimport numbers\nimport warnings\nfrom abc import ABCMeta, abstractmethod\nfrom collections import OrderedDict\nfrom typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union\n\nimport torch\nimport torch.nn as nn\nfrom torch.optim import Optimizer\n\nfrom ignite.engine import Engine, Events, EventsList, State\nfrom ignite.engine.events import CallableEventWithFilter, RemovableEventHandle\n\n\nclass BaseHandler(metaclass=ABCMeta):\n \"\"\"Base handler for defining various useful handlers.\"\"\"\n\n @abstractmethod\n def __call__(self, engine: Engine, logger: Any, event_name: Union[str, Events]) -> None:\n pass\n\n\nclass BaseOptimizerParamsHandler(BaseHandler):\n \"\"\"\n Base handler for logging optimizer parameters\n \"\"\"\n\n def __init__(self, optimizer: Optimizer, param_name: str = \"lr\", tag: Optional[str] = None):\n if not (\n isinstance(optimizer, Optimizer)\n or (hasattr(optimizer, \"param_groups\") and isinstance(optimizer.param_groups, Sequence))\n ):\n raise TypeError(\n \"Argument optimizer should be torch.optim.Optimizer or has attribute 'param_groups' as list/tuple, \"\n f\"but given {type(optimizer)}\"\n )\n\n self.optimizer = optimizer\n self.param_name = param_name\n self.tag = tag\n\n\nclass BaseOutputHandler(BaseHandler):\n \"\"\"\n Helper handler to log engine's output and/or metrics\n \"\"\"\n\n def __init__(\n self,\n tag: str,\n metric_names: Optional[Union[str, List[str]]] = None,\n output_transform: Optional[Callable] = None,\n global_step_transform: Optional[Callable] = None,\n state_attributes: Optional[List[str]] = None,\n ):\n\n if metric_names is not None:\n if not (isinstance(metric_names, list) or (isinstance(metric_names, str) and metric_names == \"all\")):\n raise TypeError(\n f\"metric_names should be either a list or equal 'all', got {type(metric_names)} instead.\"\n )\n\n if output_transform is not None and not callable(output_transform):\n raise TypeError(f\"output_transform should be a function, got {type(output_transform)} instead.\")\n\n if output_transform is None and metric_names is None and state_attributes is None:\n raise ValueError(\"Either metric_names, output_transform or state_attributes should be defined\")\n\n if global_step_transform is not None and not callable(global_step_transform):\n raise TypeError(f\"global_step_transform should be a function, got {type(global_step_transform)} instead.\")\n\n if global_step_transform is None:\n\n def global_step_transform(engine: Engine, event_name: Union[str, Events]) -> int:\n return engine.state.get_event_attrib_value(event_name)\n\n self.tag = tag\n self.metric_names = metric_names\n self.output_transform = output_transform\n self.global_step_transform = global_step_transform\n self.state_attributes = state_attributes\n\n def _setup_output_metrics_state_attrs(\n self, engine: Engine, log_text: Optional[bool] = False, key_tuple: Optional[bool] = True\n ) -> Dict[Any, Any]:\n \"\"\"Helper method to setup metrics and state attributes to log\"\"\"\n metrics_state_attrs = OrderedDict()\n if self.metric_names is not None:\n if isinstance(self.metric_names, str) and self.metric_names == \"all\":\n metrics_state_attrs = OrderedDict(engine.state.metrics)\n else:\n for name in self.metric_names:\n if name not in engine.state.metrics:\n warnings.warn(\n f\"Provided metric name '{name}' is missing \"\n f\"in engine's state metrics: {list(engine.state.metrics.keys())}\"\n )\n continue\n metrics_state_attrs[name] = engine.state.metrics[name]\n\n if self.output_transform is not None:\n output_dict = self.output_transform(engine.state.output)\n\n if not isinstance(output_dict, dict):\n output_dict = {\"output\": output_dict}\n\n metrics_state_attrs.update(output_dict)\n\n if self.state_attributes is not None:\n metrics_state_attrs.update({name: getattr(engine.state, name, None) for name in self.state_attributes})\n\n metrics_state_attrs_dict = OrderedDict() # type: Dict[Any, Union[str, float, numbers.Number]]\n\n def key_tuple_tf(tag: str, name: str, *args: str) -> Tuple[str, ...]:\n return (tag, name) + args\n\n def key_str_tf(tag: str, name: str, *args: str) -> str:\n return \"/\".join((tag, name) + args)\n\n key_tf = key_tuple_tf if key_tuple else key_str_tf\n\n for name, value in metrics_state_attrs.items():\n if isinstance(value, numbers.Number):\n metrics_state_attrs_dict[key_tf(self.tag, name)] = value\n elif isinstance(value, torch.Tensor) and value.ndimension() == 0:\n metrics_state_attrs_dict[key_tf(self.tag, name)] = value.item()\n elif isinstance(value, torch.Tensor) and value.ndimension() == 1:\n for i, v in enumerate(value):\n metrics_state_attrs_dict[key_tf(self.tag, name, str(i))] = v.item()\n else:\n if isinstance(value, str) and log_text:\n metrics_state_attrs_dict[key_tf(self.tag, name)] = value\n else:\n warnings.warn(f\"Logger output_handler can not log metrics value type {type(value)}\")\n return metrics_state_attrs_dict\n\n\nclass BaseWeightsScalarHandler(BaseHandler):\n \"\"\"\n Helper handler to log model's weights as scalars.\n \"\"\"\n\n def __init__(self, model: nn.Module, reduction: Callable = torch.norm, tag: Optional[str] = None):\n if not isinstance(model, torch.nn.Module):\n raise TypeError(f\"Argument model should be of type torch.nn.Module, but given {type(model)}\")\n\n if not callable(reduction):\n raise TypeError(f\"Argument reduction should be callable, but given {type(reduction)}\")\n\n def _is_0D_tensor(t: torch.Tensor) -> bool:\n return isinstance(t, torch.Tensor) and t.ndimension() == 0\n\n # Test reduction function on a tensor\n o = reduction(torch.ones(4, 2))\n if not (isinstance(o, numbers.Number) or _is_0D_tensor(o)):\n raise TypeError(f\"Output of the reduction function should be a scalar, but got {type(o)}\")\n\n self.model = model\n self.reduction = reduction\n self.tag = tag\n\n\nclass BaseWeightsHistHandler(BaseHandler):\n \"\"\"\n Helper handler to log model's weights as histograms.\n \"\"\"\n\n def __init__(self, model: nn.Module, tag: Optional[str] = None):\n if not isinstance(model, torch.nn.Module):\n raise TypeError(f\"Argument model should be of type torch.nn.Module, but given {type(model)}\")\n\n self.model = model\n self.tag = tag\n\n\nclass BaseLogger(metaclass=ABCMeta):\n \"\"\"\n Base logger handler. See implementations: TensorboardLogger, VisdomLogger, PolyaxonLogger, MLflowLogger, ...\n\n \"\"\"\n\n def attach(\n self,\n engine: Engine,\n log_handler: Callable,\n event_name: Union[str, Events, CallableEventWithFilter, EventsList],\n *args: Any,\n **kwargs: Any,\n ) -> RemovableEventHandle:\n \"\"\"Attach the logger to the engine and execute `log_handler` function at `event_name` events.\n\n Args:\n engine: engine object.\n log_handler: a logging handler to execute\n event_name: event to attach the logging handler to. Valid events are from\n :class:`~ignite.engine.events.Events` or :class:`~ignite.engine.events.EventsList` or any `event_name`\n added by :meth:`~ignite.engine.engine.Engine.register_events`.\n args: args forwarded to the `log_handler` method\n kwargs: kwargs forwarded to the `log_handler` method\n\n Returns:\n :class:`~ignite.engine.events.RemovableEventHandle`, which can be used to remove the handler.\n \"\"\"\n if isinstance(event_name, EventsList):\n for name in event_name:\n if name not in State.event_to_attr:\n raise RuntimeError(f\"Unknown event name '{name}'\")\n engine.add_event_handler(name, log_handler, self, name)\n\n return RemovableEventHandle(event_name, log_handler, engine)\n\n else:\n\n if event_name not in State.event_to_attr:\n raise RuntimeError(f\"Unknown event name '{event_name}'\")\n\n return engine.add_event_handler(event_name, log_handler, self, event_name, *args, **kwargs)\n\n def attach_output_handler(self, engine: Engine, event_name: Any, *args: Any, **kwargs: Any) -> RemovableEventHandle:\n \"\"\"Shortcut method to attach `OutputHandler` to the logger.\n\n Args:\n engine: engine object.\n event_name: event to attach the logging handler to. Valid events are from\n :class:`~ignite.engine.events.Events` or any `event_name` added by\n :meth:`~ignite.engine.engine.Engine.register_events`.\n args: args to initialize `OutputHandler`\n kwargs: kwargs to initialize `OutputHandler`\n\n Returns:\n :class:`~ignite.engine.events.RemovableEventHandle`, which can be used to remove the handler.\n \"\"\"\n return self.attach(engine, self._create_output_handler(*args, **kwargs), event_name=event_name)\n\n def attach_opt_params_handler(\n self, engine: Engine, event_name: Any, *args: Any, **kwargs: Any\n ) -> RemovableEventHandle:\n \"\"\"Shortcut method to attach `OptimizerParamsHandler` to the logger.\n\n Args:\n engine: engine object.\n event_name: event to attach the logging handler to. Valid events are from\n :class:`~ignite.engine.events.Events` or any `event_name` added by\n :meth:`~ignite.engine.engine.Engine.register_events`.\n args: args to initialize `OptimizerParamsHandler`\n kwargs: kwargs to initialize `OptimizerParamsHandler`\n\n Returns:\n :class:`~ignite.engine.events.RemovableEventHandle`, which can be used to remove the handler.\n\n .. versionchanged:: 0.4.3\n Added missing return statement.\n \"\"\"\n return self.attach(engine, self._create_opt_params_handler(*args, **kwargs), event_name=event_name)\n\n @abstractmethod\n def _create_output_handler(self, engine: Engine, *args: Any, **kwargs: Any) -> Callable:\n pass\n\n @abstractmethod\n def _create_opt_params_handler(self, *args: Any, **kwargs: Any) -> Callable:\n pass\n\n def __enter__(self) -> \"BaseLogger\":\n return self\n\n def __exit__(self, type: Any, value: Any, traceback: Any) -> None:\n self.close()\n\n def close(self) -> None:\n pass\n", "path": "ignite/contrib/handlers/base_logger.py"}]}
| 3,565 | 147 |
gh_patches_debug_6913
|
rasdani/github-patches
|
git_diff
|
horovod__horovod-214
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Error on using hvd.mpi_threads_supported() error: undefined symbol: mpi_threads_supported
Error on using hvd.mpi_threads_supported()
error: undefined symbol: mpi_threads_supported
`mpi_threads_supported = MPI_COMMON_LIB_CTYPES.mpi_threads_supported()`
should be
`mpi_threads_supported = MPI_COMMON_LIB_CTYPES.horovod_mpi_threads_supported()`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `horovod/common/__init__.py`
Content:
```
1 # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
2 # Modifications copyright (C) 2018 Uber Technologies, Inc.
3 #
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15 # =============================================================================
16
17 import ctypes
18 import os
19 import sysconfig
20
21
22 def get_ext_suffix():
23 """Determine library extension for various versions of Python."""
24 ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
25 if ext_suffix:
26 return ext_suffix
27
28 ext_suffix = sysconfig.get_config_var('SO')
29 if ext_suffix:
30 return ext_suffix
31
32 return '.so'
33
34
35 MPI_COMMON_LIB_CTYPES = \
36 ctypes.CDLL(os.path.join(os.path.dirname(__file__),
37 'mpi_lib' + get_ext_suffix()), mode=ctypes.RTLD_GLOBAL)
38
39
40 def init():
41 """A function that initializes Horovod.
42 """
43 return MPI_COMMON_LIB_CTYPES.horovod_init()
44
45
46 def size():
47 """A function that returns the number of Horovod processes.
48
49 Returns:
50 An integer scalar containing the number of Horovod processes.
51 """
52 size = MPI_COMMON_LIB_CTYPES.horovod_size()
53 if size == -1:
54 raise ValueError(
55 'Horovod has not been initialized; use hvd.init().')
56 return size
57
58
59 def local_size():
60 """A function that returns the number of Horovod processes within the
61 node the current process is running on.
62
63 Returns:
64 An integer scalar containing the number of local Horovod processes.
65 """
66 local_size = MPI_COMMON_LIB_CTYPES.horovod_local_size()
67 if local_size == -1:
68 raise ValueError(
69 'Horovod has not been initialized; use hvd.init().')
70 return local_size
71
72
73 def rank():
74 """A function that returns the Horovod rank of the calling process.
75
76 Returns:
77 An integer scalar with the Horovod rank of the calling process.
78 """
79 rank = MPI_COMMON_LIB_CTYPES.horovod_rank()
80 if rank == -1:
81 raise ValueError(
82 'Horovod has not been initialized; use hvd.init().')
83 return rank
84
85
86 def local_rank():
87 """A function that returns the local Horovod rank of the calling process, within the
88 node that it is running on. For example, if there are seven processes running
89 on a node, their local ranks will be zero through six, inclusive.
90
91 Returns:
92 An integer scalar with the local Horovod rank of the calling process.
93 """
94 local_rank = MPI_COMMON_LIB_CTYPES.horovod_local_rank()
95 if local_rank == -1:
96 raise ValueError(
97 'Horovod has not been initialized; use hvd.init().')
98 return local_rank
99
100
101 def mpi_threads_supported():
102 """A function that returns a flag indicating whether MPI multi-threading is supported.
103
104 If MPI multi-threading is supported, users may mix and match Horovod usage with other
105 MPI libraries, such as `mpi4py`.
106
107 Returns:
108 A boolean value indicating whether MPI multi-threading is supported.
109 """
110 mpi_threads_supported = MPI_COMMON_LIB_CTYPES.mpi_threads_supported()
111 if mpi_threads_supported == -1:
112 raise ValueError(
113 'Horovod has not been initialized; use hvd.init().')
114 return bool(mpi_threads_supported)
115
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/horovod/common/__init__.py b/horovod/common/__init__.py
--- a/horovod/common/__init__.py
+++ b/horovod/common/__init__.py
@@ -107,7 +107,7 @@
Returns:
A boolean value indicating whether MPI multi-threading is supported.
"""
- mpi_threads_supported = MPI_COMMON_LIB_CTYPES.mpi_threads_supported()
+ mpi_threads_supported = MPI_COMMON_LIB_CTYPES.horovod_mpi_threads_supported()
if mpi_threads_supported == -1:
raise ValueError(
'Horovod has not been initialized; use hvd.init().')
|
{"golden_diff": "diff --git a/horovod/common/__init__.py b/horovod/common/__init__.py\n--- a/horovod/common/__init__.py\n+++ b/horovod/common/__init__.py\n@@ -107,7 +107,7 @@\n Returns:\n A boolean value indicating whether MPI multi-threading is supported.\n \"\"\"\n- mpi_threads_supported = MPI_COMMON_LIB_CTYPES.mpi_threads_supported()\n+ mpi_threads_supported = MPI_COMMON_LIB_CTYPES.horovod_mpi_threads_supported()\n if mpi_threads_supported == -1:\n raise ValueError(\n 'Horovod has not been initialized; use hvd.init().')\n", "issue": "Error on using hvd.mpi_threads_supported() error: undefined symbol: mpi_threads_supported\nError on using hvd.mpi_threads_supported() \r\n\r\nerror: undefined symbol: mpi_threads_supported\r\n\r\n`mpi_threads_supported = MPI_COMMON_LIB_CTYPES.mpi_threads_supported()`\r\n should be \r\n`mpi_threads_supported = MPI_COMMON_LIB_CTYPES.horovod_mpi_threads_supported()`\r\n\n", "before_files": [{"content": "# Copyright 2016 The TensorFlow Authors. All Rights Reserved.\n# Modifications copyright (C) 2018 Uber Technologies, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# =============================================================================\n\nimport ctypes\nimport os\nimport sysconfig\n\n\ndef get_ext_suffix():\n \"\"\"Determine library extension for various versions of Python.\"\"\"\n ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')\n if ext_suffix:\n return ext_suffix\n\n ext_suffix = sysconfig.get_config_var('SO')\n if ext_suffix:\n return ext_suffix\n\n return '.so'\n\n\nMPI_COMMON_LIB_CTYPES = \\\n ctypes.CDLL(os.path.join(os.path.dirname(__file__),\n 'mpi_lib' + get_ext_suffix()), mode=ctypes.RTLD_GLOBAL)\n\n\ndef init():\n \"\"\"A function that initializes Horovod.\n \"\"\"\n return MPI_COMMON_LIB_CTYPES.horovod_init()\n\n\ndef size():\n \"\"\"A function that returns the number of Horovod processes.\n\n Returns:\n An integer scalar containing the number of Horovod processes.\n \"\"\"\n size = MPI_COMMON_LIB_CTYPES.horovod_size()\n if size == -1:\n raise ValueError(\n 'Horovod has not been initialized; use hvd.init().')\n return size\n\n\ndef local_size():\n \"\"\"A function that returns the number of Horovod processes within the\n node the current process is running on.\n\n Returns:\n An integer scalar containing the number of local Horovod processes.\n \"\"\"\n local_size = MPI_COMMON_LIB_CTYPES.horovod_local_size()\n if local_size == -1:\n raise ValueError(\n 'Horovod has not been initialized; use hvd.init().')\n return local_size\n\n\ndef rank():\n \"\"\"A function that returns the Horovod rank of the calling process.\n\n Returns:\n An integer scalar with the Horovod rank of the calling process.\n \"\"\"\n rank = MPI_COMMON_LIB_CTYPES.horovod_rank()\n if rank == -1:\n raise ValueError(\n 'Horovod has not been initialized; use hvd.init().')\n return rank\n\n\ndef local_rank():\n \"\"\"A function that returns the local Horovod rank of the calling process, within the\n node that it is running on. For example, if there are seven processes running\n on a node, their local ranks will be zero through six, inclusive.\n\n Returns:\n An integer scalar with the local Horovod rank of the calling process.\n \"\"\"\n local_rank = MPI_COMMON_LIB_CTYPES.horovod_local_rank()\n if local_rank == -1:\n raise ValueError(\n 'Horovod has not been initialized; use hvd.init().')\n return local_rank\n\n\ndef mpi_threads_supported():\n \"\"\"A function that returns a flag indicating whether MPI multi-threading is supported.\n\n If MPI multi-threading is supported, users may mix and match Horovod usage with other\n MPI libraries, such as `mpi4py`.\n\n Returns:\n A boolean value indicating whether MPI multi-threading is supported.\n \"\"\"\n mpi_threads_supported = MPI_COMMON_LIB_CTYPES.mpi_threads_supported()\n if mpi_threads_supported == -1:\n raise ValueError(\n 'Horovod has not been initialized; use hvd.init().')\n return bool(mpi_threads_supported)\n", "path": "horovod/common/__init__.py"}], "after_files": [{"content": "# Copyright 2016 The TensorFlow Authors. All Rights Reserved.\n# Modifications copyright (C) 2018 Uber Technologies, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# =============================================================================\n\nimport ctypes\nimport os\nimport sysconfig\n\n\ndef get_ext_suffix():\n \"\"\"Determine library extension for various versions of Python.\"\"\"\n ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')\n if ext_suffix:\n return ext_suffix\n\n ext_suffix = sysconfig.get_config_var('SO')\n if ext_suffix:\n return ext_suffix\n\n return '.so'\n\n\nMPI_COMMON_LIB_CTYPES = \\\n ctypes.CDLL(os.path.join(os.path.dirname(__file__),\n 'mpi_lib' + get_ext_suffix()), mode=ctypes.RTLD_GLOBAL)\n\n\ndef init():\n \"\"\"A function that initializes Horovod.\n \"\"\"\n return MPI_COMMON_LIB_CTYPES.horovod_init()\n\n\ndef size():\n \"\"\"A function that returns the number of Horovod processes.\n\n Returns:\n An integer scalar containing the number of Horovod processes.\n \"\"\"\n size = MPI_COMMON_LIB_CTYPES.horovod_size()\n if size == -1:\n raise ValueError(\n 'Horovod has not been initialized; use hvd.init().')\n return size\n\n\ndef local_size():\n \"\"\"A function that returns the number of Horovod processes within the\n node the current process is running on.\n\n Returns:\n An integer scalar containing the number of local Horovod processes.\n \"\"\"\n local_size = MPI_COMMON_LIB_CTYPES.horovod_local_size()\n if local_size == -1:\n raise ValueError(\n 'Horovod has not been initialized; use hvd.init().')\n return local_size\n\n\ndef rank():\n \"\"\"A function that returns the Horovod rank of the calling process.\n\n Returns:\n An integer scalar with the Horovod rank of the calling process.\n \"\"\"\n rank = MPI_COMMON_LIB_CTYPES.horovod_rank()\n if rank == -1:\n raise ValueError(\n 'Horovod has not been initialized; use hvd.init().')\n return rank\n\n\ndef local_rank():\n \"\"\"A function that returns the local Horovod rank of the calling process, within the\n node that it is running on. For example, if there are seven processes running\n on a node, their local ranks will be zero through six, inclusive.\n\n Returns:\n An integer scalar with the local Horovod rank of the calling process.\n \"\"\"\n local_rank = MPI_COMMON_LIB_CTYPES.horovod_local_rank()\n if local_rank == -1:\n raise ValueError(\n 'Horovod has not been initialized; use hvd.init().')\n return local_rank\n\n\ndef mpi_threads_supported():\n \"\"\"A function that returns a flag indicating whether MPI multi-threading is supported.\n\n If MPI multi-threading is supported, users may mix and match Horovod usage with other\n MPI libraries, such as `mpi4py`.\n\n Returns:\n A boolean value indicating whether MPI multi-threading is supported.\n \"\"\"\n mpi_threads_supported = MPI_COMMON_LIB_CTYPES.horovod_mpi_threads_supported()\n if mpi_threads_supported == -1:\n raise ValueError(\n 'Horovod has not been initialized; use hvd.init().')\n return bool(mpi_threads_supported)\n", "path": "horovod/common/__init__.py"}]}
| 1,414 | 144 |
gh_patches_debug_32181
|
rasdani/github-patches
|
git_diff
|
tensorflow__addons-1281
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Segmentation fault when using tfa.rotate in tf-nightly
**System information**
- OS Platform: Linux Ubuntu 18.04.3 LTS
- TensorFlow version: tf-nightly (2.2.0-dev20200310) installed with pip
- TensorFlow-Addons version: 0.8.3 (pip) and tfa-nightly (0.9.0-dev; pip)
- Python version: 3.6.9
- Is GPU used?: yes
**Describe the bug**
I currently have to use tf-nightly because of a separate issue. When I use tfa.rotate I get a segmentation fault. Behavior is the same with tfa 0.8.3 and tfa-nightly.
**Code to reproduce the issue**
```
import tensorflow as tf
import tensorflow_addons as tfa
img = tf.io.read_file(IMG_PATH)
img = tf.image.decode_png(img)
print(img.shape)
img = tfa.image.rotate(img, 90)
```
Output:
> (128, 128, 3)
> Segmentation fault (core dumped)
I also tried `interpolation="BILINEAR"`. Problem is the same.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `tensorflow_addons/utils/ensure_tf_install.py`
Content:
```
1 # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 # ==============================================================================
15
16
17 # Ensure TensorFlow is importable and its version is sufficiently recent. This
18 # needs to happen before anything else, since the imports below will try to
19 # import tensorflow, too.
20 def _ensure_tf_install():
21 """Attempt to import tensorflow, and ensure its version is sufficient.
22 Raises:
23 ImportError: if either tensorflow is not importable or its version is
24 inadequate.
25 """
26 import tensorflow as tf
27 import distutils.version
28
29 #
30 # Update this whenever we need to depend on a newer TensorFlow release.
31 #
32 required_tensorflow_version = "2.1.0"
33
34 if distutils.version.LooseVersion(tf.__version__) < distutils.version.LooseVersion(
35 required_tensorflow_version
36 ):
37 raise ImportError(
38 "This version of TensorFlow Addons requires TensorFlow "
39 "version >= {required}; Detected an installation of version "
40 "{present}. Please upgrade TensorFlow to proceed.".format(
41 required=required_tensorflow_version, present=tf.__version__
42 )
43 )
44
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/tensorflow_addons/utils/ensure_tf_install.py b/tensorflow_addons/utils/ensure_tf_install.py
--- a/tensorflow_addons/utils/ensure_tf_install.py
+++ b/tensorflow_addons/utils/ensure_tf_install.py
@@ -17,27 +17,40 @@
# Ensure TensorFlow is importable and its version is sufficiently recent. This
# needs to happen before anything else, since the imports below will try to
# import tensorflow, too.
+
+from distutils.version import LooseVersion
+import warnings
+
+import tensorflow as tf
+
+
+warning_template = """
+This version of TensorFlow Addons requires TensorFlow {required}.
+Detected an installation of version {present}.
+
+While some functions might work, TensorFlow Addons was not tested
+with this TensorFlow version. Also custom ops were not compiled
+against this version of TensorFlow. If you use custom ops,
+you might get errors (segmentation faults for example).
+
+It might help you to fallback to pure Python ops with
+TF_ADDONS_PY_OPS . To do that, see
+https://github.com/tensorflow/addons#gpucpu-custom-ops
+
+If you encounter errors, do *not* file bugs in GitHub because
+the version of TensorFlow you are using is not supported.
+"""
+
+
def _ensure_tf_install():
- """Attempt to import tensorflow, and ensure its version is sufficient.
- Raises:
- ImportError: if either tensorflow is not importable or its version is
- inadequate.
+ """Warn the user if the version of TensorFlow used is not supported.
"""
- import tensorflow as tf
- import distutils.version
- #
# Update this whenever we need to depend on a newer TensorFlow release.
- #
- required_tensorflow_version = "2.1.0"
-
- if distutils.version.LooseVersion(tf.__version__) < distutils.version.LooseVersion(
- required_tensorflow_version
- ):
- raise ImportError(
- "This version of TensorFlow Addons requires TensorFlow "
- "version >= {required}; Detected an installation of version "
- "{present}. Please upgrade TensorFlow to proceed.".format(
- required=required_tensorflow_version, present=tf.__version__
- )
+ required_tf_version = "2.1.0"
+
+ if LooseVersion(tf.__version__) != LooseVersion(required_tf_version):
+ message = warning_template.format(
+ required=required_tf_version, present=tf.__version__
)
+ warnings.warn(message, UserWarning)
|
{"golden_diff": "diff --git a/tensorflow_addons/utils/ensure_tf_install.py b/tensorflow_addons/utils/ensure_tf_install.py\n--- a/tensorflow_addons/utils/ensure_tf_install.py\n+++ b/tensorflow_addons/utils/ensure_tf_install.py\n@@ -17,27 +17,40 @@\n # Ensure TensorFlow is importable and its version is sufficiently recent. This\n # needs to happen before anything else, since the imports below will try to\n # import tensorflow, too.\n+\n+from distutils.version import LooseVersion\n+import warnings\n+\n+import tensorflow as tf\n+\n+\n+warning_template = \"\"\"\n+This version of TensorFlow Addons requires TensorFlow {required}.\n+Detected an installation of version {present}.\n+\n+While some functions might work, TensorFlow Addons was not tested\n+with this TensorFlow version. Also custom ops were not compiled\n+against this version of TensorFlow. If you use custom ops,\n+you might get errors (segmentation faults for example).\n+\n+It might help you to fallback to pure Python ops with\n+TF_ADDONS_PY_OPS . To do that, see\n+https://github.com/tensorflow/addons#gpucpu-custom-ops\n+\n+If you encounter errors, do *not* file bugs in GitHub because\n+the version of TensorFlow you are using is not supported.\n+\"\"\"\n+\n+\n def _ensure_tf_install():\n- \"\"\"Attempt to import tensorflow, and ensure its version is sufficient.\n- Raises:\n- ImportError: if either tensorflow is not importable or its version is\n- inadequate.\n+ \"\"\"Warn the user if the version of TensorFlow used is not supported.\n \"\"\"\n- import tensorflow as tf\n- import distutils.version\n \n- #\n # Update this whenever we need to depend on a newer TensorFlow release.\n- #\n- required_tensorflow_version = \"2.1.0\"\n-\n- if distutils.version.LooseVersion(tf.__version__) < distutils.version.LooseVersion(\n- required_tensorflow_version\n- ):\n- raise ImportError(\n- \"This version of TensorFlow Addons requires TensorFlow \"\n- \"version >= {required}; Detected an installation of version \"\n- \"{present}. Please upgrade TensorFlow to proceed.\".format(\n- required=required_tensorflow_version, present=tf.__version__\n- )\n+ required_tf_version = \"2.1.0\"\n+\n+ if LooseVersion(tf.__version__) != LooseVersion(required_tf_version):\n+ message = warning_template.format(\n+ required=required_tf_version, present=tf.__version__\n )\n+ warnings.warn(message, UserWarning)\n", "issue": "Segmentation fault when using tfa.rotate in tf-nightly\n**System information**\r\n- OS Platform: Linux Ubuntu 18.04.3 LTS\r\n- TensorFlow version: tf-nightly (2.2.0-dev20200310) installed with pip\r\n- TensorFlow-Addons version: 0.8.3 (pip) and tfa-nightly (0.9.0-dev; pip)\r\n- Python version: 3.6.9\r\n- Is GPU used?: yes\r\n\r\n**Describe the bug**\r\nI currently have to use tf-nightly because of a separate issue. When I use tfa.rotate I get a segmentation fault. Behavior is the same with tfa 0.8.3 and tfa-nightly.\r\n\r\n**Code to reproduce the issue**\r\n```\r\nimport tensorflow as tf\r\nimport tensorflow_addons as tfa\r\n\r\nimg = tf.io.read_file(IMG_PATH)\r\nimg = tf.image.decode_png(img)\r\nprint(img.shape)\r\nimg = tfa.image.rotate(img, 90)\r\n```\r\nOutput: \r\n> (128, 128, 3)\r\n> Segmentation fault (core dumped)\r\n\r\nI also tried `interpolation=\"BILINEAR\"`. Problem is the same. \n", "before_files": [{"content": "# Copyright 2019 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\n\n# Ensure TensorFlow is importable and its version is sufficiently recent. This\n# needs to happen before anything else, since the imports below will try to\n# import tensorflow, too.\ndef _ensure_tf_install():\n \"\"\"Attempt to import tensorflow, and ensure its version is sufficient.\n Raises:\n ImportError: if either tensorflow is not importable or its version is\n inadequate.\n \"\"\"\n import tensorflow as tf\n import distutils.version\n\n #\n # Update this whenever we need to depend on a newer TensorFlow release.\n #\n required_tensorflow_version = \"2.1.0\"\n\n if distutils.version.LooseVersion(tf.__version__) < distutils.version.LooseVersion(\n required_tensorflow_version\n ):\n raise ImportError(\n \"This version of TensorFlow Addons requires TensorFlow \"\n \"version >= {required}; Detected an installation of version \"\n \"{present}. Please upgrade TensorFlow to proceed.\".format(\n required=required_tensorflow_version, present=tf.__version__\n )\n )\n", "path": "tensorflow_addons/utils/ensure_tf_install.py"}], "after_files": [{"content": "# Copyright 2019 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\n\n# Ensure TensorFlow is importable and its version is sufficiently recent. This\n# needs to happen before anything else, since the imports below will try to\n# import tensorflow, too.\n\nfrom distutils.version import LooseVersion\nimport warnings\n\nimport tensorflow as tf\n\n\nwarning_template = \"\"\"\nThis version of TensorFlow Addons requires TensorFlow {required}.\nDetected an installation of version {present}.\n\nWhile some functions might work, TensorFlow Addons was not tested\nwith this TensorFlow version. Also custom ops were not compiled\nagainst this version of TensorFlow. If you use custom ops,\nyou might get errors (segmentation faults for example).\n\nIt might help you to fallback to pure Python ops with\nTF_ADDONS_PY_OPS . To do that, see\nhttps://github.com/tensorflow/addons#gpucpu-custom-ops\n\nIf you encounter errors, do *not* file bugs in GitHub because\nthe version of TensorFlow you are using is not supported.\n\"\"\"\n\n\ndef _ensure_tf_install():\n \"\"\"Warn the user if the version of TensorFlow used is not supported.\n \"\"\"\n\n # Update this whenever we need to depend on a newer TensorFlow release.\n required_tf_version = \"2.1.0\"\n\n if LooseVersion(tf.__version__) != LooseVersion(required_tf_version):\n message = warning_template.format(\n required=required_tf_version, present=tf.__version__\n )\n warnings.warn(message, UserWarning)\n", "path": "tensorflow_addons/utils/ensure_tf_install.py"}]}
| 953 | 556 |
gh_patches_debug_11369
|
rasdani/github-patches
|
git_diff
|
apluslms__a-plus-1348
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Use setUpTestData in tests
`setUp` is ran before every test method but `setUpTestData` is run only once for the whole test class. Each test method is run in a transaction and rolled back at the end, so there is no need to re-create the database data for each method separately. Most of the time in unit tests seems to be spent on creating data for the tests, so it would be a massive speed up to reuse the data by using `setUpTestData`.
`setUpTestData` takes the class as an argument but it can be used similarly to `setUp`: just assign the properties to the class (e.g. `cls.instance = CourseInstance...`) and the property can be accessed normally through the object (e.g. `self.instance`).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lib/testdata.py`
Content:
```
1 from datetime import timedelta
2 from django.conf import settings
3 from django.contrib.auth.models import User
4 from django.test import TestCase
5 from django.utils import timezone
6
7 from course.models import (
8 Course,
9 CourseInstance,
10 CourseModule,
11 LearningObjectCategory,
12 )
13 from exercise.models import (
14 BaseExercise,
15 StaticExercise,
16 Submission,
17 )
18
19
20 class CourseTestCase(TestCase):
21
22 def setUp(self):
23 self.setUpCourse()
24 self.setUpSubmissions()
25
26 def setUpCourse(self):
27
28 self.now = timezone.now()
29 self.tomorrow = self.now + timedelta(days=1)
30 self.two_days_after = self.now + timedelta(days=2)
31 self.three_days_after = self.now + timedelta(days=3)
32 self.yesterday = self.now - timedelta(days=1)
33 self.two_days_before = self.now - timedelta(days=2)
34
35 self.user = User(username='testUser')
36 self.user.set_password('testPassword')
37 self.user.save()
38
39 self.teacher = User(username='testTeacher')
40 self.teacher.set_password('testPassword')
41 self.teacher.save()
42
43 self.student = User(username='testStudent')
44 self.student.set_password('testPassword')
45 self.student.save()
46 self.student.userprofile.student_id = "123TEST"
47 self.student.userprofile.organization = settings.LOCAL_ORGANIZATION
48 self.student.userprofile.save()
49
50 self.course = Course.objects.create(
51 url="course",
52 name="Test Course",
53 code="123456",
54 )
55
56 self.instance = CourseInstance.objects.create(
57 course=self.course,
58 url="instance",
59 instance_name="2016",
60 starting_time=self.now,
61 ending_time=self.tomorrow,
62 )
63 self.instance.add_teacher(self.teacher.userprofile)
64 self.instance.enroll_student(self.student)
65
66 self.module = CourseModule.objects.create(
67 course_instance=self.instance,
68 url="module",
69 name="Test Module",
70 points_to_pass=10,
71 opening_time=self.now,
72 closing_time=self.tomorrow,
73 late_submissions_allowed=True,
74 late_submission_deadline=self.two_days_after,
75 late_submission_penalty=0.2
76 )
77 self.module2 = CourseModule.objects.create(
78 course_instance=self.instance,
79 url="module2",
80 name="Test Module 2",
81 points_to_pass=0,
82 opening_time=self.tomorrow,
83 closing_time=self.two_days_after,
84 )
85 self.module0 = CourseModule.objects.create(
86 course_instance=self.instance,
87 url="module0",
88 name="Past Module",
89 points_to_pass=10,
90 opening_time=self.two_days_before,
91 closing_time=self.yesterday,
92 )
93 self.category = LearningObjectCategory.objects.create(
94 course_instance=self.instance,
95 name="Test Category",
96 points_to_pass=5,
97 )
98
99 self.exercise = StaticExercise.objects.create(
100 course_module=self.module,
101 category=self.category,
102 url='e1',
103 name="Test Exercise",
104 exercise_page_content='$$exercise$$content',
105 submission_page_content='$$exercise$$received',
106 points_to_pass=0,
107 max_points=100,
108 order=1,
109 )
110 self.exercise2 = StaticExercise.objects.create(
111 course_module=self.module,
112 category=self.category,
113 url='e2',
114 name="Test Exercise 2",
115 exercise_page_content='$$exercise2$$content',
116 submission_page_content='$$exercise2$$received',
117 points_to_pass=10,
118 max_points=100,
119 order=2,
120 )
121 self.exercise3 = StaticExercise.objects.create(
122 course_module=self.module2,
123 category=self.category,
124 url='e3',
125 name="Test Exercise 3",
126 exercise_page_content='$$exercise3$$content',
127 submission_page_content='$$exercise3$$received',
128 points_to_pass=0,
129 max_points=100,
130 )
131 self.exercise0 = BaseExercise.objects.create(
132 course_module=self.module0,
133 category=self.category,
134 url='b0',
135 name="Base Exercise 0",
136 service_url="http://localhost/",
137 points_to_pass=0,
138 max_points=100,
139 min_group_size=1,
140 max_group_size=2,
141 )
142
143 def setUpSubmissions(self):
144
145 self.submission = Submission.objects.create(
146 exercise=self.exercise,
147 submission_data={'submission':1},
148 feedback='$$submission$$feedback',
149 )
150 self.submission.submitters.add(self.student.userprofile)
151 self.submission.set_points(1,2)
152 self.submission.set_ready()
153 self.submission.save()
154
155 self.submission2 = Submission.objects.create(
156 exercise=self.exercise,
157 submission_data={'submission':2},
158 )
159 self.submission2.submitters.add(self.student.userprofile)
160
161 self.submission3 = Submission.objects.create(
162 exercise=self.exercise2,
163 submission_data={'submission':3},
164 )
165 self.submission3.submitters.add(self.student.userprofile)
166 self.submission3.submitters.add(self.user.userprofile)
167
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/lib/testdata.py b/lib/testdata.py
--- a/lib/testdata.py
+++ b/lib/testdata.py
@@ -19,10 +19,12 @@
class CourseTestCase(TestCase):
- def setUp(self):
- self.setUpCourse()
- self.setUpSubmissions()
+ @classmethod
+ def setUpTestData(cls):
+ cls.setUpCourse()
+ cls.setUpSubmissions()
+ @classmethod
def setUpCourse(self):
self.now = timezone.now()
@@ -140,6 +142,7 @@
max_group_size=2,
)
+ @classmethod
def setUpSubmissions(self):
self.submission = Submission.objects.create(
|
{"golden_diff": "diff --git a/lib/testdata.py b/lib/testdata.py\n--- a/lib/testdata.py\n+++ b/lib/testdata.py\n@@ -19,10 +19,12 @@\n \n class CourseTestCase(TestCase):\n \n- def setUp(self):\n- self.setUpCourse()\n- self.setUpSubmissions()\n+ @classmethod\n+ def setUpTestData(cls):\n+ cls.setUpCourse()\n+ cls.setUpSubmissions()\n \n+ @classmethod\n def setUpCourse(self):\n \n self.now = timezone.now()\n@@ -140,6 +142,7 @@\n max_group_size=2,\n )\n \n+ @classmethod\n def setUpSubmissions(self):\n \n self.submission = Submission.objects.create(\n", "issue": "Use setUpTestData in tests \n`setUp` is ran before every test method but `setUpTestData` is run only once for the whole test class. Each test method is run in a transaction and rolled back at the end, so there is no need to re-create the database data for each method separately. Most of the time in unit tests seems to be spent on creating data for the tests, so it would be a massive speed up to reuse the data by using `setUpTestData`.\r\n\r\n`setUpTestData` takes the class as an argument but it can be used similarly to `setUp`: just assign the properties to the class (e.g. `cls.instance = CourseInstance...`) and the property can be accessed normally through the object (e.g. `self.instance`).\n", "before_files": [{"content": "from datetime import timedelta\nfrom django.conf import settings\nfrom django.contrib.auth.models import User\nfrom django.test import TestCase\nfrom django.utils import timezone\n\nfrom course.models import (\n Course,\n CourseInstance,\n CourseModule,\n LearningObjectCategory,\n)\nfrom exercise.models import (\n BaseExercise,\n StaticExercise,\n Submission,\n)\n\n\nclass CourseTestCase(TestCase):\n\n def setUp(self):\n self.setUpCourse()\n self.setUpSubmissions()\n\n def setUpCourse(self):\n\n self.now = timezone.now()\n self.tomorrow = self.now + timedelta(days=1)\n self.two_days_after = self.now + timedelta(days=2)\n self.three_days_after = self.now + timedelta(days=3)\n self.yesterday = self.now - timedelta(days=1)\n self.two_days_before = self.now - timedelta(days=2)\n\n self.user = User(username='testUser')\n self.user.set_password('testPassword')\n self.user.save()\n\n self.teacher = User(username='testTeacher')\n self.teacher.set_password('testPassword')\n self.teacher.save()\n\n self.student = User(username='testStudent')\n self.student.set_password('testPassword')\n self.student.save()\n self.student.userprofile.student_id = \"123TEST\"\n self.student.userprofile.organization = settings.LOCAL_ORGANIZATION\n self.student.userprofile.save()\n\n self.course = Course.objects.create(\n url=\"course\",\n name=\"Test Course\",\n code=\"123456\",\n )\n\n self.instance = CourseInstance.objects.create(\n course=self.course,\n url=\"instance\",\n instance_name=\"2016\",\n starting_time=self.now,\n ending_time=self.tomorrow,\n )\n self.instance.add_teacher(self.teacher.userprofile)\n self.instance.enroll_student(self.student)\n\n self.module = CourseModule.objects.create(\n course_instance=self.instance,\n url=\"module\",\n name=\"Test Module\",\n points_to_pass=10,\n opening_time=self.now,\n closing_time=self.tomorrow,\n late_submissions_allowed=True,\n late_submission_deadline=self.two_days_after,\n late_submission_penalty=0.2\n )\n self.module2 = CourseModule.objects.create(\n course_instance=self.instance,\n url=\"module2\",\n name=\"Test Module 2\",\n points_to_pass=0,\n opening_time=self.tomorrow,\n closing_time=self.two_days_after,\n )\n self.module0 = CourseModule.objects.create(\n course_instance=self.instance,\n url=\"module0\",\n name=\"Past Module\",\n points_to_pass=10,\n opening_time=self.two_days_before,\n closing_time=self.yesterday,\n )\n self.category = LearningObjectCategory.objects.create(\n course_instance=self.instance,\n name=\"Test Category\",\n points_to_pass=5,\n )\n\n self.exercise = StaticExercise.objects.create(\n course_module=self.module,\n category=self.category,\n url='e1',\n name=\"Test Exercise\",\n exercise_page_content='$$exercise$$content',\n submission_page_content='$$exercise$$received',\n points_to_pass=0,\n max_points=100,\n order=1,\n )\n self.exercise2 = StaticExercise.objects.create(\n course_module=self.module,\n category=self.category,\n url='e2',\n name=\"Test Exercise 2\",\n exercise_page_content='$$exercise2$$content',\n submission_page_content='$$exercise2$$received',\n points_to_pass=10,\n max_points=100,\n order=2,\n )\n self.exercise3 = StaticExercise.objects.create(\n course_module=self.module2,\n category=self.category,\n url='e3',\n name=\"Test Exercise 3\",\n exercise_page_content='$$exercise3$$content',\n submission_page_content='$$exercise3$$received',\n points_to_pass=0,\n max_points=100,\n )\n self.exercise0 = BaseExercise.objects.create(\n course_module=self.module0,\n category=self.category,\n url='b0',\n name=\"Base Exercise 0\",\n service_url=\"http://localhost/\",\n points_to_pass=0,\n max_points=100,\n min_group_size=1,\n max_group_size=2,\n )\n\n def setUpSubmissions(self):\n\n self.submission = Submission.objects.create(\n exercise=self.exercise,\n submission_data={'submission':1},\n feedback='$$submission$$feedback',\n )\n self.submission.submitters.add(self.student.userprofile)\n self.submission.set_points(1,2)\n self.submission.set_ready()\n self.submission.save()\n\n self.submission2 = Submission.objects.create(\n exercise=self.exercise,\n submission_data={'submission':2},\n )\n self.submission2.submitters.add(self.student.userprofile)\n\n self.submission3 = Submission.objects.create(\n exercise=self.exercise2,\n submission_data={'submission':3},\n )\n self.submission3.submitters.add(self.student.userprofile)\n self.submission3.submitters.add(self.user.userprofile)\n", "path": "lib/testdata.py"}], "after_files": [{"content": "from datetime import timedelta\nfrom django.conf import settings\nfrom django.contrib.auth.models import User\nfrom django.test import TestCase\nfrom django.utils import timezone\n\nfrom course.models import (\n Course,\n CourseInstance,\n CourseModule,\n LearningObjectCategory,\n)\nfrom exercise.models import (\n BaseExercise,\n StaticExercise,\n Submission,\n)\n\n\nclass CourseTestCase(TestCase):\n\n @classmethod\n def setUpTestData(cls):\n cls.setUpCourse()\n cls.setUpSubmissions()\n\n @classmethod\n def setUpCourse(self):\n\n self.now = timezone.now()\n self.tomorrow = self.now + timedelta(days=1)\n self.two_days_after = self.now + timedelta(days=2)\n self.three_days_after = self.now + timedelta(days=3)\n self.yesterday = self.now - timedelta(days=1)\n self.two_days_before = self.now - timedelta(days=2)\n\n self.user = User(username='testUser')\n self.user.set_password('testPassword')\n self.user.save()\n\n self.teacher = User(username='testTeacher')\n self.teacher.set_password('testPassword')\n self.teacher.save()\n\n self.student = User(username='testStudent')\n self.student.set_password('testPassword')\n self.student.save()\n self.student.userprofile.student_id = \"123TEST\"\n self.student.userprofile.organization = settings.LOCAL_ORGANIZATION\n self.student.userprofile.save()\n\n self.course = Course.objects.create(\n url=\"course\",\n name=\"Test Course\",\n code=\"123456\",\n )\n\n self.instance = CourseInstance.objects.create(\n course=self.course,\n url=\"instance\",\n instance_name=\"2016\",\n starting_time=self.now,\n ending_time=self.tomorrow,\n )\n self.instance.add_teacher(self.teacher.userprofile)\n self.instance.enroll_student(self.student)\n\n self.module = CourseModule.objects.create(\n course_instance=self.instance,\n url=\"module\",\n name=\"Test Module\",\n points_to_pass=10,\n opening_time=self.now,\n closing_time=self.tomorrow,\n late_submissions_allowed=True,\n late_submission_deadline=self.two_days_after,\n late_submission_penalty=0.2\n )\n self.module2 = CourseModule.objects.create(\n course_instance=self.instance,\n url=\"module2\",\n name=\"Test Module 2\",\n points_to_pass=0,\n opening_time=self.tomorrow,\n closing_time=self.two_days_after,\n )\n self.module0 = CourseModule.objects.create(\n course_instance=self.instance,\n url=\"module0\",\n name=\"Past Module\",\n points_to_pass=10,\n opening_time=self.two_days_before,\n closing_time=self.yesterday,\n )\n self.category = LearningObjectCategory.objects.create(\n course_instance=self.instance,\n name=\"Test Category\",\n points_to_pass=5,\n )\n\n self.exercise = StaticExercise.objects.create(\n course_module=self.module,\n category=self.category,\n url='e1',\n name=\"Test Exercise\",\n exercise_page_content='$$exercise$$content',\n submission_page_content='$$exercise$$received',\n points_to_pass=0,\n max_points=100,\n order=1,\n )\n self.exercise2 = StaticExercise.objects.create(\n course_module=self.module,\n category=self.category,\n url='e2',\n name=\"Test Exercise 2\",\n exercise_page_content='$$exercise2$$content',\n submission_page_content='$$exercise2$$received',\n points_to_pass=10,\n max_points=100,\n order=2,\n )\n self.exercise3 = StaticExercise.objects.create(\n course_module=self.module2,\n category=self.category,\n url='e3',\n name=\"Test Exercise 3\",\n exercise_page_content='$$exercise3$$content',\n submission_page_content='$$exercise3$$received',\n points_to_pass=0,\n max_points=100,\n )\n self.exercise0 = BaseExercise.objects.create(\n course_module=self.module0,\n category=self.category,\n url='b0',\n name=\"Base Exercise 0\",\n service_url=\"http://localhost/\",\n points_to_pass=0,\n max_points=100,\n min_group_size=1,\n max_group_size=2,\n )\n\n @classmethod\n def setUpSubmissions(self):\n\n self.submission = Submission.objects.create(\n exercise=self.exercise,\n submission_data={'submission':1},\n feedback='$$submission$$feedback',\n )\n self.submission.submitters.add(self.student.userprofile)\n self.submission.set_points(1,2)\n self.submission.set_ready()\n self.submission.save()\n\n self.submission2 = Submission.objects.create(\n exercise=self.exercise,\n submission_data={'submission':2},\n )\n self.submission2.submitters.add(self.student.userprofile)\n\n self.submission3 = Submission.objects.create(\n exercise=self.exercise2,\n submission_data={'submission':3},\n )\n self.submission3.submitters.add(self.student.userprofile)\n self.submission3.submitters.add(self.user.userprofile)\n", "path": "lib/testdata.py"}]}
| 1,886 | 159 |
gh_patches_debug_25730
|
rasdani/github-patches
|
git_diff
|
googleapis__google-cloud-python-2828
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[Discussion] oauth2client credentials in a post-oauth2client world.
We recently switched `core` to use [`google-auth`](https://google-auth.readthedocs.io) in #2639 and completely removed support for `oauth2client`.
It is possible that some users are explicitly passing `oauth2client` credentials into `Client` constructors, e.g.:
```python
credentials = oauth2client.contrib.gce.AppAppsertionCredentials()
storage_client = storage.Client(credentials=credentials)
```
After `google-cloud-core==0.22.0` is released this code will no longer work. Only credentials from `google-auth` will work. There is a potential that we could break some user code and there may be some valid reasons for sticking with oauth2client in the near-term (user credentials and storage, for example).
`google-auth` offers a way to [`convert`](https://github.com/GoogleCloudPlatform/google-auth-library-python/blob/443df66cb6e7960205c1d2108e67220d27408efe/google/auth/_oauth2client.py#L136) credentials:
```python
import google.auth._oauth2client
new_credentials = google.auth._oauth2client.convert(old_credentials)
```
We have two options:
1. Tell users who still want to use oauth2client to use `google.auth._oauth2client` to convert their credentials.
2. Add code in our `Client` base class to automatically convert credentials and issue a warning.
I strongly prefer (1) so that we do not prolong oauth2client's lifetime and so that we can more quickly free ourselves to switch http libraries.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `core/google/cloud/client.py`
Content:
```
1 # Copyright 2015 Google Inc.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """Base classes for client used to interact with Google Cloud APIs."""
16
17 from google.oauth2 import service_account
18 import six
19
20 from google.cloud._helpers import _determine_default_project
21 from google.cloud._http import Connection
22 from google.cloud.credentials import get_credentials
23
24
25 class _ClientFactoryMixin(object):
26 """Mixin to allow factories that create credentials.
27
28 .. note::
29
30 This class is virtual.
31 """
32
33 @classmethod
34 def from_service_account_json(cls, json_credentials_path, *args, **kwargs):
35 """Factory to retrieve JSON credentials while creating client.
36
37 :type json_credentials_path: str
38 :param json_credentials_path: The path to a private key file (this file
39 was given to you when you created the
40 service account). This file must contain
41 a JSON object with a private key and
42 other credentials information (downloaded
43 from the Google APIs console).
44
45 :type args: tuple
46 :param args: Remaining positional arguments to pass to constructor.
47
48 :type kwargs: dict
49 :param kwargs: Remaining keyword arguments to pass to constructor.
50
51 :rtype: :class:`google.cloud.pubsub.client.Client`
52 :returns: The client created with the retrieved JSON credentials.
53 :raises: :class:`TypeError` if there is a conflict with the kwargs
54 and the credentials created by the factory.
55 """
56 if 'credentials' in kwargs:
57 raise TypeError('credentials must not be in keyword arguments')
58 credentials = service_account.Credentials.from_service_account_file(
59 json_credentials_path)
60 kwargs['credentials'] = credentials
61 return cls(*args, **kwargs)
62
63
64 class Client(_ClientFactoryMixin):
65 """Client to bundle configuration needed for API requests.
66
67 Assumes that the associated ``_connection_class`` only accepts
68 ``http`` and ``credentials`` in its constructor.
69
70 :type credentials: :class:`google.auth.credentials.Credentials` or
71 :class:`NoneType`
72 :param credentials: The OAuth2 Credentials to use for the connection
73 owned by this client. If not passed (and if no ``http``
74 object is passed), falls back to the default inferred
75 from the environment.
76
77 :type http: :class:`httplib2.Http` or class that defines ``request()``.
78 :param http: An optional HTTP object to make requests. If not passed, an
79 ``http`` object is created that is bound to the
80 ``credentials`` for the current object.
81 """
82
83 _connection_class = Connection
84
85 def __init__(self, credentials=None, http=None):
86 if credentials is None and http is None:
87 credentials = get_credentials()
88 self._connection = self._connection_class(
89 credentials=credentials, http=http)
90
91
92 class _ClientProjectMixin(object):
93 """Mixin to allow setting the project on the client.
94
95 :type project: str
96 :param project: the project which the client acts on behalf of. If not
97 passed falls back to the default inferred from the
98 environment.
99
100 :raises: :class:`EnvironmentError` if the project is neither passed in nor
101 set in the environment. :class:`ValueError` if the project value
102 is invalid.
103 """
104
105 def __init__(self, project=None):
106 project = self._determine_default(project)
107 if project is None:
108 raise EnvironmentError('Project was not passed and could not be '
109 'determined from the environment.')
110 if isinstance(project, six.binary_type):
111 project = project.decode('utf-8')
112 if not isinstance(project, six.string_types):
113 raise ValueError('Project must be a string.')
114 self.project = project
115
116 @staticmethod
117 def _determine_default(project):
118 """Helper: use default project detection."""
119 return _determine_default_project(project)
120
121
122 class JSONClient(Client, _ClientProjectMixin):
123 """Client for Google JSON-based API.
124
125 Assumes such APIs use the ``project`` and the client needs to store this
126 value.
127
128 :type project: str
129 :param project: the project which the client acts on behalf of. If not
130 passed falls back to the default inferred from the
131 environment.
132
133 :type credentials: :class:`google.auth.credentials.Credentials` or
134 :class:`NoneType`
135 :param credentials: The OAuth2 Credentials to use for the connection
136 owned by this client. If not passed (and if no ``http``
137 object is passed), falls back to the default inferred
138 from the environment.
139
140 :type http: :class:`httplib2.Http` or class that defines ``request()``.
141 :param http: An optional HTTP object to make requests. If not passed, an
142 ``http`` object is created that is bound to the
143 ``credentials`` for the current object.
144
145 :raises: :class:`ValueError` if the project is neither passed in nor
146 set in the environment.
147 """
148
149 def __init__(self, project=None, credentials=None, http=None):
150 _ClientProjectMixin.__init__(self, project=project)
151 Client.__init__(self, credentials=credentials, http=http)
152
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/core/google/cloud/client.py b/core/google/cloud/client.py
--- a/core/google/cloud/client.py
+++ b/core/google/cloud/client.py
@@ -14,6 +14,7 @@
"""Base classes for client used to interact with Google Cloud APIs."""
+import google.auth.credentials
from google.oauth2 import service_account
import six
@@ -22,6 +23,13 @@
from google.cloud.credentials import get_credentials
+_GOOGLE_AUTH_CREDENTIALS_HELP = (
+ 'This library only supports credentials from google-auth-library-python. '
+ 'See https://google-cloud-python.readthedocs.io/en/latest/'
+ 'google-cloud-auth.html for help on authentication with this library.'
+)
+
+
class _ClientFactoryMixin(object):
"""Mixin to allow factories that create credentials.
@@ -83,6 +91,10 @@
_connection_class = Connection
def __init__(self, credentials=None, http=None):
+ if (credentials is not None and
+ not isinstance(
+ credentials, google.auth.credentials.Credentials)):
+ raise ValueError(_GOOGLE_AUTH_CREDENTIALS_HELP)
if credentials is None and http is None:
credentials = get_credentials()
self._connection = self._connection_class(
|
{"golden_diff": "diff --git a/core/google/cloud/client.py b/core/google/cloud/client.py\n--- a/core/google/cloud/client.py\n+++ b/core/google/cloud/client.py\n@@ -14,6 +14,7 @@\n \n \"\"\"Base classes for client used to interact with Google Cloud APIs.\"\"\"\n \n+import google.auth.credentials\n from google.oauth2 import service_account\n import six\n \n@@ -22,6 +23,13 @@\n from google.cloud.credentials import get_credentials\n \n \n+_GOOGLE_AUTH_CREDENTIALS_HELP = (\n+ 'This library only supports credentials from google-auth-library-python. '\n+ 'See https://google-cloud-python.readthedocs.io/en/latest/'\n+ 'google-cloud-auth.html for help on authentication with this library.'\n+)\n+\n+\n class _ClientFactoryMixin(object):\n \"\"\"Mixin to allow factories that create credentials.\n \n@@ -83,6 +91,10 @@\n _connection_class = Connection\n \n def __init__(self, credentials=None, http=None):\n+ if (credentials is not None and\n+ not isinstance(\n+ credentials, google.auth.credentials.Credentials)):\n+ raise ValueError(_GOOGLE_AUTH_CREDENTIALS_HELP)\n if credentials is None and http is None:\n credentials = get_credentials()\n self._connection = self._connection_class(\n", "issue": "[Discussion] oauth2client credentials in a post-oauth2client world.\nWe recently switched `core` to use [`google-auth`](https://google-auth.readthedocs.io) in #2639 and completely removed support for `oauth2client`.\r\n\r\nIt is possible that some users are explicitly passing `oauth2client` credentials into `Client` constructors, e.g.:\r\n\r\n```python\r\ncredentials = oauth2client.contrib.gce.AppAppsertionCredentials()\r\nstorage_client = storage.Client(credentials=credentials)\r\n```\r\n\r\nAfter `google-cloud-core==0.22.0` is released this code will no longer work. Only credentials from `google-auth` will work. There is a potential that we could break some user code and there may be some valid reasons for sticking with oauth2client in the near-term (user credentials and storage, for example).\r\n\r\n`google-auth` offers a way to [`convert`](https://github.com/GoogleCloudPlatform/google-auth-library-python/blob/443df66cb6e7960205c1d2108e67220d27408efe/google/auth/_oauth2client.py#L136) credentials:\r\n\r\n```python\r\nimport google.auth._oauth2client\r\n\r\nnew_credentials = google.auth._oauth2client.convert(old_credentials)\r\n```\r\n\r\nWe have two options:\r\n\r\n1. Tell users who still want to use oauth2client to use `google.auth._oauth2client` to convert their credentials.\r\n2. Add code in our `Client` base class to automatically convert credentials and issue a warning.\r\n\r\nI strongly prefer (1) so that we do not prolong oauth2client's lifetime and so that we can more quickly free ourselves to switch http libraries.\n", "before_files": [{"content": "# Copyright 2015 Google Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Base classes for client used to interact with Google Cloud APIs.\"\"\"\n\nfrom google.oauth2 import service_account\nimport six\n\nfrom google.cloud._helpers import _determine_default_project\nfrom google.cloud._http import Connection\nfrom google.cloud.credentials import get_credentials\n\n\nclass _ClientFactoryMixin(object):\n \"\"\"Mixin to allow factories that create credentials.\n\n .. note::\n\n This class is virtual.\n \"\"\"\n\n @classmethod\n def from_service_account_json(cls, json_credentials_path, *args, **kwargs):\n \"\"\"Factory to retrieve JSON credentials while creating client.\n\n :type json_credentials_path: str\n :param json_credentials_path: The path to a private key file (this file\n was given to you when you created the\n service account). This file must contain\n a JSON object with a private key and\n other credentials information (downloaded\n from the Google APIs console).\n\n :type args: tuple\n :param args: Remaining positional arguments to pass to constructor.\n\n :type kwargs: dict\n :param kwargs: Remaining keyword arguments to pass to constructor.\n\n :rtype: :class:`google.cloud.pubsub.client.Client`\n :returns: The client created with the retrieved JSON credentials.\n :raises: :class:`TypeError` if there is a conflict with the kwargs\n and the credentials created by the factory.\n \"\"\"\n if 'credentials' in kwargs:\n raise TypeError('credentials must not be in keyword arguments')\n credentials = service_account.Credentials.from_service_account_file(\n json_credentials_path)\n kwargs['credentials'] = credentials\n return cls(*args, **kwargs)\n\n\nclass Client(_ClientFactoryMixin):\n \"\"\"Client to bundle configuration needed for API requests.\n\n Assumes that the associated ``_connection_class`` only accepts\n ``http`` and ``credentials`` in its constructor.\n\n :type credentials: :class:`google.auth.credentials.Credentials` or\n :class:`NoneType`\n :param credentials: The OAuth2 Credentials to use for the connection\n owned by this client. If not passed (and if no ``http``\n object is passed), falls back to the default inferred\n from the environment.\n\n :type http: :class:`httplib2.Http` or class that defines ``request()``.\n :param http: An optional HTTP object to make requests. If not passed, an\n ``http`` object is created that is bound to the\n ``credentials`` for the current object.\n \"\"\"\n\n _connection_class = Connection\n\n def __init__(self, credentials=None, http=None):\n if credentials is None and http is None:\n credentials = get_credentials()\n self._connection = self._connection_class(\n credentials=credentials, http=http)\n\n\nclass _ClientProjectMixin(object):\n \"\"\"Mixin to allow setting the project on the client.\n\n :type project: str\n :param project: the project which the client acts on behalf of. If not\n passed falls back to the default inferred from the\n environment.\n\n :raises: :class:`EnvironmentError` if the project is neither passed in nor\n set in the environment. :class:`ValueError` if the project value\n is invalid.\n \"\"\"\n\n def __init__(self, project=None):\n project = self._determine_default(project)\n if project is None:\n raise EnvironmentError('Project was not passed and could not be '\n 'determined from the environment.')\n if isinstance(project, six.binary_type):\n project = project.decode('utf-8')\n if not isinstance(project, six.string_types):\n raise ValueError('Project must be a string.')\n self.project = project\n\n @staticmethod\n def _determine_default(project):\n \"\"\"Helper: use default project detection.\"\"\"\n return _determine_default_project(project)\n\n\nclass JSONClient(Client, _ClientProjectMixin):\n \"\"\"Client for Google JSON-based API.\n\n Assumes such APIs use the ``project`` and the client needs to store this\n value.\n\n :type project: str\n :param project: the project which the client acts on behalf of. If not\n passed falls back to the default inferred from the\n environment.\n\n :type credentials: :class:`google.auth.credentials.Credentials` or\n :class:`NoneType`\n :param credentials: The OAuth2 Credentials to use for the connection\n owned by this client. If not passed (and if no ``http``\n object is passed), falls back to the default inferred\n from the environment.\n\n :type http: :class:`httplib2.Http` or class that defines ``request()``.\n :param http: An optional HTTP object to make requests. If not passed, an\n ``http`` object is created that is bound to the\n ``credentials`` for the current object.\n\n :raises: :class:`ValueError` if the project is neither passed in nor\n set in the environment.\n \"\"\"\n\n def __init__(self, project=None, credentials=None, http=None):\n _ClientProjectMixin.__init__(self, project=project)\n Client.__init__(self, credentials=credentials, http=http)\n", "path": "core/google/cloud/client.py"}], "after_files": [{"content": "# Copyright 2015 Google Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Base classes for client used to interact with Google Cloud APIs.\"\"\"\n\nimport google.auth.credentials\nfrom google.oauth2 import service_account\nimport six\n\nfrom google.cloud._helpers import _determine_default_project\nfrom google.cloud._http import Connection\nfrom google.cloud.credentials import get_credentials\n\n\n_GOOGLE_AUTH_CREDENTIALS_HELP = (\n 'This library only supports credentials from google-auth-library-python. '\n 'See https://google-cloud-python.readthedocs.io/en/latest/'\n 'google-cloud-auth.html for help on authentication with this library.'\n)\n\n\nclass _ClientFactoryMixin(object):\n \"\"\"Mixin to allow factories that create credentials.\n\n .. note::\n\n This class is virtual.\n \"\"\"\n\n @classmethod\n def from_service_account_json(cls, json_credentials_path, *args, **kwargs):\n \"\"\"Factory to retrieve JSON credentials while creating client.\n\n :type json_credentials_path: str\n :param json_credentials_path: The path to a private key file (this file\n was given to you when you created the\n service account). This file must contain\n a JSON object with a private key and\n other credentials information (downloaded\n from the Google APIs console).\n\n :type args: tuple\n :param args: Remaining positional arguments to pass to constructor.\n\n :type kwargs: dict\n :param kwargs: Remaining keyword arguments to pass to constructor.\n\n :rtype: :class:`google.cloud.pubsub.client.Client`\n :returns: The client created with the retrieved JSON credentials.\n :raises: :class:`TypeError` if there is a conflict with the kwargs\n and the credentials created by the factory.\n \"\"\"\n if 'credentials' in kwargs:\n raise TypeError('credentials must not be in keyword arguments')\n credentials = service_account.Credentials.from_service_account_file(\n json_credentials_path)\n kwargs['credentials'] = credentials\n return cls(*args, **kwargs)\n\n\nclass Client(_ClientFactoryMixin):\n \"\"\"Client to bundle configuration needed for API requests.\n\n Assumes that the associated ``_connection_class`` only accepts\n ``http`` and ``credentials`` in its constructor.\n\n :type credentials: :class:`google.auth.credentials.Credentials` or\n :class:`NoneType`\n :param credentials: The OAuth2 Credentials to use for the connection\n owned by this client. If not passed (and if no ``http``\n object is passed), falls back to the default inferred\n from the environment.\n\n :type http: :class:`httplib2.Http` or class that defines ``request()``.\n :param http: An optional HTTP object to make requests. If not passed, an\n ``http`` object is created that is bound to the\n ``credentials`` for the current object.\n \"\"\"\n\n _connection_class = Connection\n\n def __init__(self, credentials=None, http=None):\n if (credentials is not None and\n not isinstance(\n credentials, google.auth.credentials.Credentials)):\n raise ValueError(_GOOGLE_AUTH_CREDENTIALS_HELP)\n if credentials is None and http is None:\n credentials = get_credentials()\n self._connection = self._connection_class(\n credentials=credentials, http=http)\n\n\nclass _ClientProjectMixin(object):\n \"\"\"Mixin to allow setting the project on the client.\n\n :type project: str\n :param project: the project which the client acts on behalf of. If not\n passed falls back to the default inferred from the\n environment.\n\n :raises: :class:`EnvironmentError` if the project is neither passed in nor\n set in the environment. :class:`ValueError` if the project value\n is invalid.\n \"\"\"\n\n def __init__(self, project=None):\n project = self._determine_default(project)\n if project is None:\n raise EnvironmentError('Project was not passed and could not be '\n 'determined from the environment.')\n if isinstance(project, six.binary_type):\n project = project.decode('utf-8')\n if not isinstance(project, six.string_types):\n raise ValueError('Project must be a string.')\n self.project = project\n\n @staticmethod\n def _determine_default(project):\n \"\"\"Helper: use default project detection.\"\"\"\n return _determine_default_project(project)\n\n\nclass JSONClient(Client, _ClientProjectMixin):\n \"\"\"Client for Google JSON-based API.\n\n Assumes such APIs use the ``project`` and the client needs to store this\n value.\n\n :type project: str\n :param project: the project which the client acts on behalf of. If not\n passed falls back to the default inferred from the\n environment.\n\n :type credentials: :class:`google.auth.credentials.Credentials` or\n :class:`NoneType`\n :param credentials: The OAuth2 Credentials to use for the connection\n owned by this client. If not passed (and if no ``http``\n object is passed), falls back to the default inferred\n from the environment.\n\n :type http: :class:`httplib2.Http` or class that defines ``request()``.\n :param http: An optional HTTP object to make requests. If not passed, an\n ``http`` object is created that is bound to the\n ``credentials`` for the current object.\n\n :raises: :class:`ValueError` if the project is neither passed in nor\n set in the environment.\n \"\"\"\n\n def __init__(self, project=None, credentials=None, http=None):\n _ClientProjectMixin.__init__(self, project=project)\n Client.__init__(self, credentials=credentials, http=http)\n", "path": "core/google/cloud/client.py"}]}
| 2,207 | 273 |
gh_patches_debug_42032
|
rasdani/github-patches
|
git_diff
|
translate__pootle-6200
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Fix fk issues in 2.6 migration/setup
seems like (at least for some users) there is foreign key constraints that are not removed during migration.
we probably need to add something here https://github.com/translate/pootle/blob/stable/2.6.2/pootle/apps/accounts/migrations/0005_drop_pootle_app_pootleprofile.py
specifically, to remove fk constraints to this table on unit commented_by/submitted_by
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pootle/apps/pootle_app/management/commands/setup.py`
Content:
```
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 #
4 # Copyright 2013, 2014 Zuza Software Foundation
5 #
6 # This file is part of Pootle.
7 #
8 # Pootle is free software; you can redistribute it and/or modify it under the
9 # terms of the GNU General Public License as published by the Free Software
10 # Foundation; either version 2 of the License, or (at your option) any later
11 # version.
12 #
13 # Pootle is distributed in the hope that it will be useful, but WITHOUT ANY
14 # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
15 # A PARTICULAR PURPOSE. See the GNU General Public License for more details.
16 #
17 # You should have received a copy of the GNU General Public License along with
18 # Pootle; if not, see <http://www.gnu.org/licenses/>.
19
20 import logging
21 import os
22
23 # This must be run before importing Django.
24 os.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings'
25
26 from django.core.management import call_command
27 from django.core.management.base import CommandError, NoArgsCommand
28
29 from pootle.__version__ import build as NEW_POOTLE_BUILD
30 from pootle_app.models.pootle_config import get_pootle_build
31
32
33 class Command(NoArgsCommand):
34 help = 'Runs the install/upgrade machinery.'
35
36 def handle_noargs(self, **options):
37 """Run the install or upgrade machinery.
38
39 If there is an up-to-date Pootle setup then no action is performed.
40 """
41 current_buildversion = get_pootle_build()
42
43 if not current_buildversion:
44 raise CommandError('Pootle 2.6.1 is not meant to be used in real '
45 'world deployments.'
46 '\n\n'
47 'If you want to install a fresh Pootle then '
48 'install Pootle 2.7.0 or later.'
49 '\n\n'
50 'Otherwise you are upgrading Pootle and you '
51 'already have have upgraded up to the 2.6.1 '
52 'stage, so you must now proceed now with the '
53 'final upgrade to Pootle 2.7.0 or later.')
54 elif current_buildversion < 22000:
55 # Trying to upgrade a deployment older than Pootle 2.5.0 for which
56 # we don't provide a direct upgrade.
57 raise CommandError('This Pootle installation is too old. Please '
58 'upgrade first to 2.5.1.3 before upgrading to '
59 'this version.')
60 elif current_buildversion < NEW_POOTLE_BUILD:
61 logging.info('Upgrading existing Pootle installation.')
62
63 call_command('syncdb', interactive=False)
64
65 if current_buildversion < 25100:
66 # We are upgrading from a pre-South installation (before Pootle
67 # 2.5.1), so it is necessary to fake the first migration for
68 # some apps.
69 OLD_APPS = ("pootle_app", "pootle_language",
70 "pootle_notifications", "pootle_project",
71 "pootle_statistics", "pootle_store",
72 "pootle_translationproject", "staticpages")
73
74 for app in OLD_APPS:
75 call_command("migrate", app, "0001", fake=True, interactive=False)
76
77 call_command('migrate', interactive=False)
78 call_command('upgrade')
79
80 logging.warning('\n\n\n Warning: Pootle 2.6.1 is an interim '
81 'release (a migration step to Pootle'
82 '\n 2.7.0). Do not use Pootle 2.6.1 '
83 'for any deployment.\n\n')
84
```
Path: `pootle/apps/accounts/migrations/0005_drop_pootle_app_pootleprofile.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 from south.utils import datetime_utils as datetime
3 from south.db import db
4 from south.v2 import SchemaMigration
5 from django.db import connection
6
7
8 class Migration(SchemaMigration):
9 depends_on = (
10 ("pootle_tagging", "0003_del_tag__del_taggeditem"),
11 )
12
13 no_dry_run = True
14
15 def forwards(self, orm):
16 # Deleting remnants of model 'PootleProfile'
17 if u'pootle_app_pootleprofile' in connection.introspection.table_names():
18 db.delete_table(u'pootle_app_pootleprofile')
19
20
21 def backwards(self, orm):
22 raise RuntimeError("Cannot reverse this migration.")
23
24 models = {
25 u'accounts.user': {
26 'Meta': {'object_name': 'User'},
27 '_unit_rows': ('django.db.models.fields.SmallIntegerField', [], {'default': '9', 'db_column': "'unit_rows'"}),
28 'alt_src_langs': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'user_alt_src_langs'", 'blank': 'True', 'db_index': 'True', 'to': u"orm['pootle_language.Language']"}),
29 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
30 'email': ('django.db.models.fields.EmailField', [], {'max_length': '255'}),
31 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
32 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
33 u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
34 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
35 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
36 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
37 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
38 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
39 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
40 },
41 u'auth.group': {
42 'Meta': {'object_name': 'Group'},
43 u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
44 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
45 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
46 },
47 u'auth.permission': {
48 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
49 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
50 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
51 u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
52 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
53 },
54 u'contenttypes.contenttype': {
55 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
56 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
57 u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
58 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
59 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
60 },
61 'pootle_app.directory': {
62 'Meta': {'ordering': "['name']", 'object_name': 'Directory'},
63 u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
64 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
65 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'child_dirs'", 'null': 'True', 'to': "orm['pootle_app.Directory']"}),
66 'pootle_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
67 },
68 u'pootle_language.language': {
69 'Meta': {'ordering': "['code']", 'object_name': 'Language', 'db_table': "'pootle_app_language'"},
70 'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
71 'description': ('pootle.core.markup.fields.MarkupField', [], {'blank': 'True'}),
72 'directory': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['pootle_app.Directory']", 'unique': 'True'}),
73 'fullname': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
74 u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
75 'nplurals': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
76 'pluralequation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
77 'specialchars': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
78 }
79 }
80
81 complete_apps = ['accounts']
82
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pootle/apps/accounts/migrations/0005_drop_pootle_app_pootleprofile.py b/pootle/apps/accounts/migrations/0005_drop_pootle_app_pootleprofile.py
--- a/pootle/apps/accounts/migrations/0005_drop_pootle_app_pootleprofile.py
+++ b/pootle/apps/accounts/migrations/0005_drop_pootle_app_pootleprofile.py
@@ -14,6 +14,9 @@
def forwards(self, orm):
# Deleting remnants of model 'PootleProfile'
+ db.delete_foreign_key('pootle_store_unit', 'commented_by_id')
+ db.delete_foreign_key('pootle_store_unit', 'submitted_by_id')
+ db.delete_foreign_key('pootle_app_submission', 'submitter_id')
if u'pootle_app_pootleprofile' in connection.introspection.table_names():
db.delete_table(u'pootle_app_pootleprofile')
diff --git a/pootle/apps/pootle_app/management/commands/setup.py b/pootle/apps/pootle_app/management/commands/setup.py
--- a/pootle/apps/pootle_app/management/commands/setup.py
+++ b/pootle/apps/pootle_app/management/commands/setup.py
@@ -26,7 +26,8 @@
from django.core.management import call_command
from django.core.management.base import CommandError, NoArgsCommand
-from pootle.__version__ import build as NEW_POOTLE_BUILD
+from pootle.__version__ import (build as NEW_POOTLE_BUILD,
+ sver as pootle_version)
from pootle_app.models.pootle_config import get_pootle_build
@@ -41,16 +42,17 @@
current_buildversion = get_pootle_build()
if not current_buildversion:
- raise CommandError('Pootle 2.6.1 is not meant to be used in real '
+ raise CommandError('Pootle %s is not meant to be used in real '
'world deployments.'
'\n\n'
'If you want to install a fresh Pootle then '
'install Pootle 2.7.0 or later.'
'\n\n'
'Otherwise you are upgrading Pootle and you '
- 'already have have upgraded up to the 2.6.1 '
+ 'already have have upgraded up to the %s '
'stage, so you must now proceed now with the '
- 'final upgrade to Pootle 2.7.0 or later.')
+ 'final upgrade to Pootle 2.7.0 or later.' %
+ (pootle_version, pootle_version))
elif current_buildversion < 22000:
# Trying to upgrade a deployment older than Pootle 2.5.0 for which
# we don't provide a direct upgrade.
@@ -72,12 +74,14 @@
"pootle_translationproject", "staticpages")
for app in OLD_APPS:
- call_command("migrate", app, "0001", fake=True, interactive=False)
+ call_command("migrate", app, "0001", fake=True,
+ interactive=False)
call_command('migrate', interactive=False)
call_command('upgrade')
- logging.warning('\n\n\n Warning: Pootle 2.6.1 is an interim '
+ logging.warning('\n\n\n Warning: Pootle %s is an interim '
'release (a migration step to Pootle'
- '\n 2.7.0). Do not use Pootle 2.6.1 '
- 'for any deployment.\n\n')
+ '\n 2.7.0). Do not use Pootle %s '
+ 'for any deployment.\n\n' %
+ (pootle_version, pootle_version))
|
{"golden_diff": "diff --git a/pootle/apps/accounts/migrations/0005_drop_pootle_app_pootleprofile.py b/pootle/apps/accounts/migrations/0005_drop_pootle_app_pootleprofile.py\n--- a/pootle/apps/accounts/migrations/0005_drop_pootle_app_pootleprofile.py\n+++ b/pootle/apps/accounts/migrations/0005_drop_pootle_app_pootleprofile.py\n@@ -14,6 +14,9 @@\n \n def forwards(self, orm):\n # Deleting remnants of model 'PootleProfile'\n+ db.delete_foreign_key('pootle_store_unit', 'commented_by_id')\n+ db.delete_foreign_key('pootle_store_unit', 'submitted_by_id')\n+ db.delete_foreign_key('pootle_app_submission', 'submitter_id')\n if u'pootle_app_pootleprofile' in connection.introspection.table_names():\n db.delete_table(u'pootle_app_pootleprofile')\n \ndiff --git a/pootle/apps/pootle_app/management/commands/setup.py b/pootle/apps/pootle_app/management/commands/setup.py\n--- a/pootle/apps/pootle_app/management/commands/setup.py\n+++ b/pootle/apps/pootle_app/management/commands/setup.py\n@@ -26,7 +26,8 @@\n from django.core.management import call_command\n from django.core.management.base import CommandError, NoArgsCommand\n \n-from pootle.__version__ import build as NEW_POOTLE_BUILD\n+from pootle.__version__ import (build as NEW_POOTLE_BUILD,\n+ sver as pootle_version)\n from pootle_app.models.pootle_config import get_pootle_build\n \n \n@@ -41,16 +42,17 @@\n current_buildversion = get_pootle_build()\n \n if not current_buildversion:\n- raise CommandError('Pootle 2.6.1 is not meant to be used in real '\n+ raise CommandError('Pootle %s is not meant to be used in real '\n 'world deployments.'\n '\\n\\n'\n 'If you want to install a fresh Pootle then '\n 'install Pootle 2.7.0 or later.'\n '\\n\\n'\n 'Otherwise you are upgrading Pootle and you '\n- 'already have have upgraded up to the 2.6.1 '\n+ 'already have have upgraded up to the %s '\n 'stage, so you must now proceed now with the '\n- 'final upgrade to Pootle 2.7.0 or later.')\n+ 'final upgrade to Pootle 2.7.0 or later.' %\n+ (pootle_version, pootle_version))\n elif current_buildversion < 22000:\n # Trying to upgrade a deployment older than Pootle 2.5.0 for which\n # we don't provide a direct upgrade.\n@@ -72,12 +74,14 @@\n \"pootle_translationproject\", \"staticpages\")\n \n for app in OLD_APPS:\n- call_command(\"migrate\", app, \"0001\", fake=True, interactive=False)\n+ call_command(\"migrate\", app, \"0001\", fake=True,\n+ interactive=False)\n \n call_command('migrate', interactive=False)\n call_command('upgrade')\n \n- logging.warning('\\n\\n\\n Warning: Pootle 2.6.1 is an interim '\n+ logging.warning('\\n\\n\\n Warning: Pootle %s is an interim '\n 'release (a migration step to Pootle'\n- '\\n 2.7.0). Do not use Pootle 2.6.1 '\n- 'for any deployment.\\n\\n')\n+ '\\n 2.7.0). Do not use Pootle %s '\n+ 'for any deployment.\\n\\n' %\n+ (pootle_version, pootle_version))\n", "issue": "Fix fk issues in 2.6 migration/setup\nseems like (at least for some users) there is foreign key constraints that are not removed during migration.\r\n\r\nwe probably need to add something here https://github.com/translate/pootle/blob/stable/2.6.2/pootle/apps/accounts/migrations/0005_drop_pootle_app_pootleprofile.py\r\n\r\nspecifically, to remove fk constraints to this table on unit commented_by/submitted_by\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright 2013, 2014 Zuza Software Foundation\n#\n# This file is part of Pootle.\n#\n# Pootle is free software; you can redistribute it and/or modify it under the\n# terms of the GNU General Public License as published by the Free Software\n# Foundation; either version 2 of the License, or (at your option) any later\n# version.\n#\n# Pootle is distributed in the hope that it will be useful, but WITHOUT ANY\n# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR\n# A PARTICULAR PURPOSE. See the GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License along with\n# Pootle; if not, see <http://www.gnu.org/licenses/>.\n\nimport logging\nimport os\n\n# This must be run before importing Django.\nos.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings'\n\nfrom django.core.management import call_command\nfrom django.core.management.base import CommandError, NoArgsCommand\n\nfrom pootle.__version__ import build as NEW_POOTLE_BUILD\nfrom pootle_app.models.pootle_config import get_pootle_build\n\n\nclass Command(NoArgsCommand):\n help = 'Runs the install/upgrade machinery.'\n\n def handle_noargs(self, **options):\n \"\"\"Run the install or upgrade machinery.\n\n If there is an up-to-date Pootle setup then no action is performed.\n \"\"\"\n current_buildversion = get_pootle_build()\n\n if not current_buildversion:\n raise CommandError('Pootle 2.6.1 is not meant to be used in real '\n 'world deployments.'\n '\\n\\n'\n 'If you want to install a fresh Pootle then '\n 'install Pootle 2.7.0 or later.'\n '\\n\\n'\n 'Otherwise you are upgrading Pootle and you '\n 'already have have upgraded up to the 2.6.1 '\n 'stage, so you must now proceed now with the '\n 'final upgrade to Pootle 2.7.0 or later.')\n elif current_buildversion < 22000:\n # Trying to upgrade a deployment older than Pootle 2.5.0 for which\n # we don't provide a direct upgrade.\n raise CommandError('This Pootle installation is too old. Please '\n 'upgrade first to 2.5.1.3 before upgrading to '\n 'this version.')\n elif current_buildversion < NEW_POOTLE_BUILD:\n logging.info('Upgrading existing Pootle installation.')\n\n call_command('syncdb', interactive=False)\n\n if current_buildversion < 25100:\n # We are upgrading from a pre-South installation (before Pootle\n # 2.5.1), so it is necessary to fake the first migration for\n # some apps.\n OLD_APPS = (\"pootle_app\", \"pootle_language\",\n \"pootle_notifications\", \"pootle_project\",\n \"pootle_statistics\", \"pootle_store\",\n \"pootle_translationproject\", \"staticpages\")\n\n for app in OLD_APPS:\n call_command(\"migrate\", app, \"0001\", fake=True, interactive=False)\n\n call_command('migrate', interactive=False)\n call_command('upgrade')\n\n logging.warning('\\n\\n\\n Warning: Pootle 2.6.1 is an interim '\n 'release (a migration step to Pootle'\n '\\n 2.7.0). Do not use Pootle 2.6.1 '\n 'for any deployment.\\n\\n')\n", "path": "pootle/apps/pootle_app/management/commands/setup.py"}, {"content": "# -*- coding: utf-8 -*-\nfrom south.utils import datetime_utils as datetime\nfrom south.db import db\nfrom south.v2 import SchemaMigration\nfrom django.db import connection\n\n\nclass Migration(SchemaMigration):\n depends_on = (\n (\"pootle_tagging\", \"0003_del_tag__del_taggeditem\"),\n )\n\n no_dry_run = True\n\n def forwards(self, orm):\n # Deleting remnants of model 'PootleProfile'\n if u'pootle_app_pootleprofile' in connection.introspection.table_names():\n db.delete_table(u'pootle_app_pootleprofile')\n\n\n def backwards(self, orm):\n raise RuntimeError(\"Cannot reverse this migration.\")\n\n models = {\n u'accounts.user': {\n 'Meta': {'object_name': 'User'},\n '_unit_rows': ('django.db.models.fields.SmallIntegerField', [], {'default': '9', 'db_column': \"'unit_rows'\"}),\n 'alt_src_langs': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': \"'user_alt_src_langs'\", 'blank': 'True', 'db_index': 'True', 'to': u\"orm['pootle_language.Language']\"}),\n 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),\n 'email': ('django.db.models.fields.EmailField', [], {'max_length': '255'}),\n 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),\n 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': \"u'user_set'\", 'blank': 'True', 'to': u\"orm['auth.Group']\"}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),\n 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),\n 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),\n 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),\n 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': \"u'user_set'\", 'blank': 'True', 'to': u\"orm['auth.Permission']\"}),\n 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})\n },\n u'auth.group': {\n 'Meta': {'object_name': 'Group'},\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),\n 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u\"orm['auth.Permission']\", 'symmetrical': 'False', 'blank': 'True'})\n },\n u'auth.permission': {\n 'Meta': {'ordering': \"(u'content_type__app_label', u'content_type__model', u'codename')\", 'unique_together': \"((u'content_type', u'codename'),)\", 'object_name': 'Permission'},\n 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['contenttypes.ContentType']\"}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})\n },\n u'contenttypes.contenttype': {\n 'Meta': {'ordering': \"('name',)\", 'unique_together': \"(('app_label', 'model'),)\", 'object_name': 'ContentType', 'db_table': \"'django_content_type'\"},\n 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})\n },\n 'pootle_app.directory': {\n 'Meta': {'ordering': \"['name']\", 'object_name': 'Directory'},\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),\n 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'child_dirs'\", 'null': 'True', 'to': \"orm['pootle_app.Directory']\"}),\n 'pootle_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})\n },\n u'pootle_language.language': {\n 'Meta': {'ordering': \"['code']\", 'object_name': 'Language', 'db_table': \"'pootle_app_language'\"},\n 'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),\n 'description': ('pootle.core.markup.fields.MarkupField', [], {'blank': 'True'}),\n 'directory': ('django.db.models.fields.related.OneToOneField', [], {'to': \"orm['pootle_app.Directory']\", 'unique': 'True'}),\n 'fullname': ('django.db.models.fields.CharField', [], {'max_length': '255'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'nplurals': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),\n 'pluralequation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),\n 'specialchars': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})\n }\n }\n\n complete_apps = ['accounts']\n", "path": "pootle/apps/accounts/migrations/0005_drop_pootle_app_pootleprofile.py"}], "after_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright 2013, 2014 Zuza Software Foundation\n#\n# This file is part of Pootle.\n#\n# Pootle is free software; you can redistribute it and/or modify it under the\n# terms of the GNU General Public License as published by the Free Software\n# Foundation; either version 2 of the License, or (at your option) any later\n# version.\n#\n# Pootle is distributed in the hope that it will be useful, but WITHOUT ANY\n# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR\n# A PARTICULAR PURPOSE. See the GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License along with\n# Pootle; if not, see <http://www.gnu.org/licenses/>.\n\nimport logging\nimport os\n\n# This must be run before importing Django.\nos.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings'\n\nfrom django.core.management import call_command\nfrom django.core.management.base import CommandError, NoArgsCommand\n\nfrom pootle.__version__ import (build as NEW_POOTLE_BUILD,\n sver as pootle_version)\nfrom pootle_app.models.pootle_config import get_pootle_build\n\n\nclass Command(NoArgsCommand):\n help = 'Runs the install/upgrade machinery.'\n\n def handle_noargs(self, **options):\n \"\"\"Run the install or upgrade machinery.\n\n If there is an up-to-date Pootle setup then no action is performed.\n \"\"\"\n current_buildversion = get_pootle_build()\n\n if not current_buildversion:\n raise CommandError('Pootle %s is not meant to be used in real '\n 'world deployments.'\n '\\n\\n'\n 'If you want to install a fresh Pootle then '\n 'install Pootle 2.7.0 or later.'\n '\\n\\n'\n 'Otherwise you are upgrading Pootle and you '\n 'already have have upgraded up to the %s '\n 'stage, so you must now proceed now with the '\n 'final upgrade to Pootle 2.7.0 or later.' %\n (pootle_version, pootle_version))\n elif current_buildversion < 22000:\n # Trying to upgrade a deployment older than Pootle 2.5.0 for which\n # we don't provide a direct upgrade.\n raise CommandError('This Pootle installation is too old. Please '\n 'upgrade first to 2.5.1.3 before upgrading to '\n 'this version.')\n elif current_buildversion < NEW_POOTLE_BUILD:\n logging.info('Upgrading existing Pootle installation.')\n\n call_command('syncdb', interactive=False)\n\n if current_buildversion < 25100:\n # We are upgrading from a pre-South installation (before Pootle\n # 2.5.1), so it is necessary to fake the first migration for\n # some apps.\n OLD_APPS = (\"pootle_app\", \"pootle_language\",\n \"pootle_notifications\", \"pootle_project\",\n \"pootle_statistics\", \"pootle_store\",\n \"pootle_translationproject\", \"staticpages\")\n\n for app in OLD_APPS:\n call_command(\"migrate\", app, \"0001\", fake=True,\n interactive=False)\n\n call_command('migrate', interactive=False)\n call_command('upgrade')\n\n logging.warning('\\n\\n\\n Warning: Pootle %s is an interim '\n 'release (a migration step to Pootle'\n '\\n 2.7.0). Do not use Pootle %s '\n 'for any deployment.\\n\\n' %\n (pootle_version, pootle_version))\n", "path": "pootle/apps/pootle_app/management/commands/setup.py"}, {"content": "# -*- coding: utf-8 -*-\nfrom south.utils import datetime_utils as datetime\nfrom south.db import db\nfrom south.v2 import SchemaMigration\nfrom django.db import connection\n\n\nclass Migration(SchemaMigration):\n depends_on = (\n (\"pootle_tagging\", \"0003_del_tag__del_taggeditem\"),\n )\n\n no_dry_run = True\n\n def forwards(self, orm):\n # Deleting remnants of model 'PootleProfile'\n db.delete_foreign_key('pootle_store_unit', 'commented_by_id')\n db.delete_foreign_key('pootle_store_unit', 'submitted_by_id')\n db.delete_foreign_key('pootle_app_submission', 'submitter_id')\n if u'pootle_app_pootleprofile' in connection.introspection.table_names():\n db.delete_table(u'pootle_app_pootleprofile')\n\n\n def backwards(self, orm):\n raise RuntimeError(\"Cannot reverse this migration.\")\n\n models = {\n u'accounts.user': {\n 'Meta': {'object_name': 'User'},\n '_unit_rows': ('django.db.models.fields.SmallIntegerField', [], {'default': '9', 'db_column': \"'unit_rows'\"}),\n 'alt_src_langs': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': \"'user_alt_src_langs'\", 'blank': 'True', 'db_index': 'True', 'to': u\"orm['pootle_language.Language']\"}),\n 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),\n 'email': ('django.db.models.fields.EmailField', [], {'max_length': '255'}),\n 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),\n 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': \"u'user_set'\", 'blank': 'True', 'to': u\"orm['auth.Group']\"}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),\n 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),\n 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),\n 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),\n 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': \"u'user_set'\", 'blank': 'True', 'to': u\"orm['auth.Permission']\"}),\n 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})\n },\n u'auth.group': {\n 'Meta': {'object_name': 'Group'},\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),\n 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u\"orm['auth.Permission']\", 'symmetrical': 'False', 'blank': 'True'})\n },\n u'auth.permission': {\n 'Meta': {'ordering': \"(u'content_type__app_label', u'content_type__model', u'codename')\", 'unique_together': \"((u'content_type', u'codename'),)\", 'object_name': 'Permission'},\n 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['contenttypes.ContentType']\"}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})\n },\n u'contenttypes.contenttype': {\n 'Meta': {'ordering': \"('name',)\", 'unique_together': \"(('app_label', 'model'),)\", 'object_name': 'ContentType', 'db_table': \"'django_content_type'\"},\n 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})\n },\n 'pootle_app.directory': {\n 'Meta': {'ordering': \"['name']\", 'object_name': 'Directory'},\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),\n 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'child_dirs'\", 'null': 'True', 'to': \"orm['pootle_app.Directory']\"}),\n 'pootle_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})\n },\n u'pootle_language.language': {\n 'Meta': {'ordering': \"['code']\", 'object_name': 'Language', 'db_table': \"'pootle_app_language'\"},\n 'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),\n 'description': ('pootle.core.markup.fields.MarkupField', [], {'blank': 'True'}),\n 'directory': ('django.db.models.fields.related.OneToOneField', [], {'to': \"orm['pootle_app.Directory']\", 'unique': 'True'}),\n 'fullname': ('django.db.models.fields.CharField', [], {'max_length': '255'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'nplurals': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),\n 'pluralequation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),\n 'specialchars': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})\n }\n }\n\n complete_apps = ['accounts']\n", "path": "pootle/apps/accounts/migrations/0005_drop_pootle_app_pootleprofile.py"}]}
| 2,883 | 891 |
gh_patches_debug_24075
|
rasdani/github-patches
|
git_diff
|
pymedusa__Medusa-2787
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[APP SUBMITTED]: AttributeError: 'BraceAdapter' object has no attribute 'DEBUG'
### INFO
**Python Version**: `2.7.13 (v2.7.13:a06454b1afa1, Dec 17 2016, 20:53:40) [MSC v.1500 64 bit (AMD64)]`
**Operating System**: `Windows-10-10.0.15063`
**Locale**: `cp1252`
**Branch**: [develop](../tree/develop)
**Database**: `44.8`
**Commit**: pymedusa/Medusa@a2aaaf4dd43c018019a67fe1a27074b124681fa4
**Link to Log**: https://gist.github.com/66f5f5fc2f3cc2fbaec732d8d994335e
### ERROR
<pre>
2017-05-12 08:29:52 ERROR SEARCHQUEUE-FORCED-309269 :: [Rarbg] :: [a2aaaf4] Unknown exception while searching Rarbg. Error: "'BraceAdapter' object has no attribute 'DEBUG'"
Traceback (most recent call last):
File "C:\Medusa\medusa\search\core.py", line 547, in search_providers
down_cur_quality, manual_search, manual_search_type)
File "C:\Medusa\medusa\providers\generic_provider.py", line 207, in find_search_results
items_list += self.search(search_string, ep_obj=episode)
File "C:\Medusa\medusa\providers\torrent\json\rarbg.py", line 136, in search
log_level = log.DEBUG
AttributeError: 'BraceAdapter' object has no attribute 'DEBUG'
</pre>
---
_STAFF NOTIFIED_: @pymedusa/support @pymedusa/moderators
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `medusa/providers/torrent/json/rarbg.py`
Content:
```
1 # coding=utf-8
2
3 """Provider code for RARBG."""
4
5 from __future__ import unicode_literals
6
7 import datetime
8 import logging
9 import time
10 import traceback
11
12 from medusa import (
13 app,
14 tv,
15 )
16 from medusa.helper.common import convert_size, try_int
17 from medusa.logger.adapters.style import BraceAdapter
18 from medusa.providers.torrent.torrent_provider import TorrentProvider
19
20 log = BraceAdapter(logging.getLogger(__name__))
21 log.logger.addHandler(logging.NullHandler())
22
23
24 class RarbgProvider(TorrentProvider):
25 """RARBG Torrent provider."""
26
27 def __init__(self):
28 """Initialize the class."""
29 super(self.__class__, self).__init__('Rarbg')
30
31 # Credentials
32 self.public = True
33 self.token = None
34 self.token_expires = None
35
36 # URLs
37 self.url = 'https://rarbg.com' # Spec: https://torrentapi.org/apidocs_v2.txt
38 self.urls = {
39 'api': 'http://torrentapi.org/pubapi_v2.php',
40 }
41
42 # Proper Strings
43 self.proper_strings = ['{{PROPER|REPACK|REAL|RERIP}}']
44
45 # Miscellaneous Options
46 self.ranked = None
47 self.sorting = None
48
49 # Torrent Stats
50 self.minseed = None
51 self.minleech = None
52
53 # Cache
54 self.cache = tv.Cache(self, min_time=10) # only poll RARBG every 10 minutes max
55
56 def search(self, search_strings, age=0, ep_obj=None):
57 """
58 Search a provider and parse the results.
59
60 :param search_strings: A dict with mode (key) and the search value (value)
61 :param age: Not used
62 :param ep_obj: Not used
63 :returns: A list of search results (structure)
64 """
65 results = []
66 if not self.login():
67 return results
68
69 # Search Params
70 search_params = {
71 'app_id': app.RARBG_APPID,
72 'category': 'tv',
73 'min_seeders': try_int(self.minseed),
74 'min_leechers': try_int(self.minleech),
75 'limit': 100,
76 'format': 'json_extended',
77 'ranked': try_int(self.ranked),
78 'token': self.token,
79 'sort': 'last',
80 'mode': 'list',
81 }
82
83 for mode in search_strings:
84 log.debug('Search mode: {0}', mode)
85
86 if mode == 'RSS':
87 search_params['search_string'] = None
88 search_params['search_tvdb'] = None
89 else:
90 search_params['sort'] = self.sorting if self.sorting else 'seeders'
91 search_params['mode'] = 'search'
92 search_params['search_tvdb'] = self._get_tvdb_id()
93
94 for search_string in search_strings[mode]:
95 if mode != 'RSS':
96 log.debug('Search string: {search}',
97 {'search': search_string})
98 if self.ranked:
99 log.debug('Searching only ranked torrents')
100
101 search_params['search_string'] = search_string
102
103 # Check if token is still valid before search
104 if not self.login():
105 continue
106
107 # Maximum requests allowed are 1req/2sec
108 # Changing to 5 because of server clock desync
109 time.sleep(5)
110
111 search_url = self.urls['api']
112 response = self.get_url(search_url, params=search_params, returns='response')
113 if not response or not response.content:
114 log.debug('No data returned from provider')
115 continue
116
117 try:
118 jdata = response.json()
119 except ValueError:
120 log.debug('No data returned from provider')
121 continue
122
123 error = jdata.get('error')
124 error_code = jdata.get('error_code')
125 if error:
126 # List of errors: https://github.com/rarbg/torrentapi/issues/1#issuecomment-114763312
127 if error_code == 5:
128 # 5 = Too many requests per second
129 log_level = log.INFO
130 elif error_code not in (4, 8, 10, 12, 14, 20):
131 # 4 = Invalid token. Use get_token for a new one!
132 # 8, 10, 12, 14 = Cant find * in database. Are you sure this * exists?
133 # 20 = No results found
134 log_level = log.WARNING
135 else:
136 log_level = log.DEBUG
137 log.log(log_level, '{msg} Code: {code}', {'msg': error, 'code': error_code})
138 continue
139
140 results += self.parse(jdata, mode)
141
142 return results
143
144 def parse(self, data, mode):
145 """
146 Parse search results for items.
147
148 :param data: The raw response from a search
149 :param mode: The current mode used to search, e.g. RSS
150
151 :return: A list of items found
152 """
153 items = []
154
155 torrent_rows = data.get('torrent_results', {})
156
157 if not torrent_rows:
158 log.debug('Data returned from provider does not contain any torrents')
159 return items
160
161 for row in torrent_rows:
162 try:
163 title = row.pop('title')
164 download_url = row.pop('download') + self._custom_trackers
165 if not all([title, download_url]):
166 continue
167
168 seeders = row.pop('seeders')
169 leechers = row.pop('leechers')
170
171 # Filter unseeded torrent
172 if seeders < min(self.minseed, 1):
173 if mode != 'RSS':
174 log.debug("Discarding torrent because it doesn't meet the"
175 " minimum seeders: {0}. Seeders: {1}",
176 title, seeders)
177 continue
178
179 torrent_size = row.pop('size', -1)
180 size = convert_size(torrent_size) or -1
181
182 pubdate_raw = row.pop('pubdate')
183 pubdate = self._parse_pubdate(pubdate_raw)
184
185 item = {
186 'title': title,
187 'link': download_url,
188 'size': size,
189 'seeders': seeders,
190 'leechers': leechers,
191 'pubdate': pubdate,
192 }
193 if mode != 'RSS':
194 log.debug('Found result: {0} with {1} seeders and {2} leechers',
195 title, seeders, leechers)
196
197 items.append(item)
198 except (AttributeError, TypeError, KeyError, ValueError, IndexError):
199 log.error('Failed parsing provider. Traceback: {0!r}',
200 traceback.format_exc())
201
202 return items
203
204 def login(self):
205 """Login method used for logging in before doing search and torrent downloads."""
206 if self.token and self.token_expires and datetime.datetime.now() < self.token_expires:
207 return True
208
209 login_params = {
210 'get_token': 'get_token',
211 'format': 'json',
212 'app_id': app.RARBG_APPID,
213 }
214
215 response = self.get_url(self.urls['api'], params=login_params, returns='json')
216 if not response:
217 log.warning('Unable to connect to provider')
218 return False
219
220 self.token = response.get('token')
221 self.token_expires = datetime.datetime.now() + datetime.timedelta(minutes=14) if self.token else None
222 return self.token is not None
223
224
225 provider = RarbgProvider()
226
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/medusa/providers/torrent/json/rarbg.py b/medusa/providers/torrent/json/rarbg.py
--- a/medusa/providers/torrent/json/rarbg.py
+++ b/medusa/providers/torrent/json/rarbg.py
@@ -126,14 +126,14 @@
# List of errors: https://github.com/rarbg/torrentapi/issues/1#issuecomment-114763312
if error_code == 5:
# 5 = Too many requests per second
- log_level = log.INFO
+ log_level = logging.INFO
elif error_code not in (4, 8, 10, 12, 14, 20):
# 4 = Invalid token. Use get_token for a new one!
# 8, 10, 12, 14 = Cant find * in database. Are you sure this * exists?
# 20 = No results found
- log_level = log.WARNING
+ log_level = logging.WARNING
else:
- log_level = log.DEBUG
+ log_level = logging.DEBUG
log.log(log_level, '{msg} Code: {code}', {'msg': error, 'code': error_code})
continue
|
{"golden_diff": "diff --git a/medusa/providers/torrent/json/rarbg.py b/medusa/providers/torrent/json/rarbg.py\n--- a/medusa/providers/torrent/json/rarbg.py\n+++ b/medusa/providers/torrent/json/rarbg.py\n@@ -126,14 +126,14 @@\n # List of errors: https://github.com/rarbg/torrentapi/issues/1#issuecomment-114763312\n if error_code == 5:\n # 5 = Too many requests per second\n- log_level = log.INFO\n+ log_level = logging.INFO\n elif error_code not in (4, 8, 10, 12, 14, 20):\n # 4 = Invalid token. Use get_token for a new one!\n # 8, 10, 12, 14 = Cant find * in database. Are you sure this * exists?\n # 20 = No results found\n- log_level = log.WARNING\n+ log_level = logging.WARNING\n else:\n- log_level = log.DEBUG\n+ log_level = logging.DEBUG\n log.log(log_level, '{msg} Code: {code}', {'msg': error, 'code': error_code})\n continue\n", "issue": "[APP SUBMITTED]: AttributeError: 'BraceAdapter' object has no attribute 'DEBUG'\n### INFO\n**Python Version**: `2.7.13 (v2.7.13:a06454b1afa1, Dec 17 2016, 20:53:40) [MSC v.1500 64 bit (AMD64)]`\n**Operating System**: `Windows-10-10.0.15063`\n**Locale**: `cp1252`\n**Branch**: [develop](../tree/develop)\n**Database**: `44.8`\n**Commit**: pymedusa/Medusa@a2aaaf4dd43c018019a67fe1a27074b124681fa4\n**Link to Log**: https://gist.github.com/66f5f5fc2f3cc2fbaec732d8d994335e\n### ERROR\n<pre>\n2017-05-12 08:29:52 ERROR SEARCHQUEUE-FORCED-309269 :: [Rarbg] :: [a2aaaf4] Unknown exception while searching Rarbg. Error: \"'BraceAdapter' object has no attribute 'DEBUG'\"\nTraceback (most recent call last):\n File \"C:\\Medusa\\medusa\\search\\core.py\", line 547, in search_providers\n down_cur_quality, manual_search, manual_search_type)\n File \"C:\\Medusa\\medusa\\providers\\generic_provider.py\", line 207, in find_search_results\n items_list += self.search(search_string, ep_obj=episode)\n File \"C:\\Medusa\\medusa\\providers\\torrent\\json\\rarbg.py\", line 136, in search\n log_level = log.DEBUG\nAttributeError: 'BraceAdapter' object has no attribute 'DEBUG'\n</pre>\n---\n_STAFF NOTIFIED_: @pymedusa/support @pymedusa/moderators\n", "before_files": [{"content": "# coding=utf-8\n\n\"\"\"Provider code for RARBG.\"\"\"\n\nfrom __future__ import unicode_literals\n\nimport datetime\nimport logging\nimport time\nimport traceback\n\nfrom medusa import (\n app,\n tv,\n)\nfrom medusa.helper.common import convert_size, try_int\nfrom medusa.logger.adapters.style import BraceAdapter\nfrom medusa.providers.torrent.torrent_provider import TorrentProvider\n\nlog = BraceAdapter(logging.getLogger(__name__))\nlog.logger.addHandler(logging.NullHandler())\n\n\nclass RarbgProvider(TorrentProvider):\n \"\"\"RARBG Torrent provider.\"\"\"\n\n def __init__(self):\n \"\"\"Initialize the class.\"\"\"\n super(self.__class__, self).__init__('Rarbg')\n\n # Credentials\n self.public = True\n self.token = None\n self.token_expires = None\n\n # URLs\n self.url = 'https://rarbg.com' # Spec: https://torrentapi.org/apidocs_v2.txt\n self.urls = {\n 'api': 'http://torrentapi.org/pubapi_v2.php',\n }\n\n # Proper Strings\n self.proper_strings = ['{{PROPER|REPACK|REAL|RERIP}}']\n\n # Miscellaneous Options\n self.ranked = None\n self.sorting = None\n\n # Torrent Stats\n self.minseed = None\n self.minleech = None\n\n # Cache\n self.cache = tv.Cache(self, min_time=10) # only poll RARBG every 10 minutes max\n\n def search(self, search_strings, age=0, ep_obj=None):\n \"\"\"\n Search a provider and parse the results.\n\n :param search_strings: A dict with mode (key) and the search value (value)\n :param age: Not used\n :param ep_obj: Not used\n :returns: A list of search results (structure)\n \"\"\"\n results = []\n if not self.login():\n return results\n\n # Search Params\n search_params = {\n 'app_id': app.RARBG_APPID,\n 'category': 'tv',\n 'min_seeders': try_int(self.minseed),\n 'min_leechers': try_int(self.minleech),\n 'limit': 100,\n 'format': 'json_extended',\n 'ranked': try_int(self.ranked),\n 'token': self.token,\n 'sort': 'last',\n 'mode': 'list',\n }\n\n for mode in search_strings:\n log.debug('Search mode: {0}', mode)\n\n if mode == 'RSS':\n search_params['search_string'] = None\n search_params['search_tvdb'] = None\n else:\n search_params['sort'] = self.sorting if self.sorting else 'seeders'\n search_params['mode'] = 'search'\n search_params['search_tvdb'] = self._get_tvdb_id()\n\n for search_string in search_strings[mode]:\n if mode != 'RSS':\n log.debug('Search string: {search}',\n {'search': search_string})\n if self.ranked:\n log.debug('Searching only ranked torrents')\n\n search_params['search_string'] = search_string\n\n # Check if token is still valid before search\n if not self.login():\n continue\n\n # Maximum requests allowed are 1req/2sec\n # Changing to 5 because of server clock desync\n time.sleep(5)\n\n search_url = self.urls['api']\n response = self.get_url(search_url, params=search_params, returns='response')\n if not response or not response.content:\n log.debug('No data returned from provider')\n continue\n\n try:\n jdata = response.json()\n except ValueError:\n log.debug('No data returned from provider')\n continue\n\n error = jdata.get('error')\n error_code = jdata.get('error_code')\n if error:\n # List of errors: https://github.com/rarbg/torrentapi/issues/1#issuecomment-114763312\n if error_code == 5:\n # 5 = Too many requests per second\n log_level = log.INFO\n elif error_code not in (4, 8, 10, 12, 14, 20):\n # 4 = Invalid token. Use get_token for a new one!\n # 8, 10, 12, 14 = Cant find * in database. Are you sure this * exists?\n # 20 = No results found\n log_level = log.WARNING\n else:\n log_level = log.DEBUG\n log.log(log_level, '{msg} Code: {code}', {'msg': error, 'code': error_code})\n continue\n\n results += self.parse(jdata, mode)\n\n return results\n\n def parse(self, data, mode):\n \"\"\"\n Parse search results for items.\n\n :param data: The raw response from a search\n :param mode: The current mode used to search, e.g. RSS\n\n :return: A list of items found\n \"\"\"\n items = []\n\n torrent_rows = data.get('torrent_results', {})\n\n if not torrent_rows:\n log.debug('Data returned from provider does not contain any torrents')\n return items\n\n for row in torrent_rows:\n try:\n title = row.pop('title')\n download_url = row.pop('download') + self._custom_trackers\n if not all([title, download_url]):\n continue\n\n seeders = row.pop('seeders')\n leechers = row.pop('leechers')\n\n # Filter unseeded torrent\n if seeders < min(self.minseed, 1):\n if mode != 'RSS':\n log.debug(\"Discarding torrent because it doesn't meet the\"\n \" minimum seeders: {0}. Seeders: {1}\",\n title, seeders)\n continue\n\n torrent_size = row.pop('size', -1)\n size = convert_size(torrent_size) or -1\n\n pubdate_raw = row.pop('pubdate')\n pubdate = self._parse_pubdate(pubdate_raw)\n\n item = {\n 'title': title,\n 'link': download_url,\n 'size': size,\n 'seeders': seeders,\n 'leechers': leechers,\n 'pubdate': pubdate,\n }\n if mode != 'RSS':\n log.debug('Found result: {0} with {1} seeders and {2} leechers',\n title, seeders, leechers)\n\n items.append(item)\n except (AttributeError, TypeError, KeyError, ValueError, IndexError):\n log.error('Failed parsing provider. Traceback: {0!r}',\n traceback.format_exc())\n\n return items\n\n def login(self):\n \"\"\"Login method used for logging in before doing search and torrent downloads.\"\"\"\n if self.token and self.token_expires and datetime.datetime.now() < self.token_expires:\n return True\n\n login_params = {\n 'get_token': 'get_token',\n 'format': 'json',\n 'app_id': app.RARBG_APPID,\n }\n\n response = self.get_url(self.urls['api'], params=login_params, returns='json')\n if not response:\n log.warning('Unable to connect to provider')\n return False\n\n self.token = response.get('token')\n self.token_expires = datetime.datetime.now() + datetime.timedelta(minutes=14) if self.token else None\n return self.token is not None\n\n\nprovider = RarbgProvider()\n", "path": "medusa/providers/torrent/json/rarbg.py"}], "after_files": [{"content": "# coding=utf-8\n\n\"\"\"Provider code for RARBG.\"\"\"\n\nfrom __future__ import unicode_literals\n\nimport datetime\nimport logging\nimport time\nimport traceback\n\nfrom medusa import (\n app,\n tv,\n)\nfrom medusa.helper.common import convert_size, try_int\nfrom medusa.logger.adapters.style import BraceAdapter\nfrom medusa.providers.torrent.torrent_provider import TorrentProvider\n\nlog = BraceAdapter(logging.getLogger(__name__))\nlog.logger.addHandler(logging.NullHandler())\n\n\nclass RarbgProvider(TorrentProvider):\n \"\"\"RARBG Torrent provider.\"\"\"\n\n def __init__(self):\n \"\"\"Initialize the class.\"\"\"\n super(self.__class__, self).__init__('Rarbg')\n\n # Credentials\n self.public = True\n self.token = None\n self.token_expires = None\n\n # URLs\n self.url = 'https://rarbg.com' # Spec: https://torrentapi.org/apidocs_v2.txt\n self.urls = {\n 'api': 'http://torrentapi.org/pubapi_v2.php',\n }\n\n # Proper Strings\n self.proper_strings = ['{{PROPER|REPACK|REAL|RERIP}}']\n\n # Miscellaneous Options\n self.ranked = None\n self.sorting = None\n\n # Torrent Stats\n self.minseed = None\n self.minleech = None\n\n # Cache\n self.cache = tv.Cache(self, min_time=10) # only poll RARBG every 10 minutes max\n\n def search(self, search_strings, age=0, ep_obj=None):\n \"\"\"\n Search a provider and parse the results.\n\n :param search_strings: A dict with mode (key) and the search value (value)\n :param age: Not used\n :param ep_obj: Not used\n :returns: A list of search results (structure)\n \"\"\"\n results = []\n if not self.login():\n return results\n\n # Search Params\n search_params = {\n 'app_id': app.RARBG_APPID,\n 'category': 'tv',\n 'min_seeders': try_int(self.minseed),\n 'min_leechers': try_int(self.minleech),\n 'limit': 100,\n 'format': 'json_extended',\n 'ranked': try_int(self.ranked),\n 'token': self.token,\n 'sort': 'last',\n 'mode': 'list',\n }\n\n for mode in search_strings:\n log.debug('Search mode: {0}', mode)\n\n if mode == 'RSS':\n search_params['search_string'] = None\n search_params['search_tvdb'] = None\n else:\n search_params['sort'] = self.sorting if self.sorting else 'seeders'\n search_params['mode'] = 'search'\n search_params['search_tvdb'] = self._get_tvdb_id()\n\n for search_string in search_strings[mode]:\n if mode != 'RSS':\n log.debug('Search string: {search}',\n {'search': search_string})\n if self.ranked:\n log.debug('Searching only ranked torrents')\n\n search_params['search_string'] = search_string\n\n # Check if token is still valid before search\n if not self.login():\n continue\n\n # Maximum requests allowed are 1req/2sec\n # Changing to 5 because of server clock desync\n time.sleep(5)\n\n search_url = self.urls['api']\n response = self.get_url(search_url, params=search_params, returns='response')\n if not response or not response.content:\n log.debug('No data returned from provider')\n continue\n\n try:\n jdata = response.json()\n except ValueError:\n log.debug('No data returned from provider')\n continue\n\n error = jdata.get('error')\n error_code = jdata.get('error_code')\n if error:\n # List of errors: https://github.com/rarbg/torrentapi/issues/1#issuecomment-114763312\n if error_code == 5:\n # 5 = Too many requests per second\n log_level = logging.INFO\n elif error_code not in (4, 8, 10, 12, 14, 20):\n # 4 = Invalid token. Use get_token for a new one!\n # 8, 10, 12, 14 = Cant find * in database. Are you sure this * exists?\n # 20 = No results found\n log_level = logging.WARNING\n else:\n log_level = logging.DEBUG\n log.log(log_level, '{msg} Code: {code}', {'msg': error, 'code': error_code})\n continue\n\n results += self.parse(jdata, mode)\n\n return results\n\n def parse(self, data, mode):\n \"\"\"\n Parse search results for items.\n\n :param data: The raw response from a search\n :param mode: The current mode used to search, e.g. RSS\n\n :return: A list of items found\n \"\"\"\n items = []\n\n torrent_rows = data.get('torrent_results', {})\n\n if not torrent_rows:\n log.debug('Data returned from provider does not contain any torrents')\n return items\n\n for row in torrent_rows:\n try:\n title = row.pop('title')\n download_url = row.pop('download') + self._custom_trackers\n if not all([title, download_url]):\n continue\n\n seeders = row.pop('seeders')\n leechers = row.pop('leechers')\n\n # Filter unseeded torrent\n if seeders < min(self.minseed, 1):\n if mode != 'RSS':\n log.debug(\"Discarding torrent because it doesn't meet the\"\n \" minimum seeders: {0}. Seeders: {1}\",\n title, seeders)\n continue\n\n torrent_size = row.pop('size', -1)\n size = convert_size(torrent_size) or -1\n\n pubdate_raw = row.pop('pubdate')\n pubdate = self._parse_pubdate(pubdate_raw)\n\n item = {\n 'title': title,\n 'link': download_url,\n 'size': size,\n 'seeders': seeders,\n 'leechers': leechers,\n 'pubdate': pubdate,\n }\n if mode != 'RSS':\n log.debug('Found result: {0} with {1} seeders and {2} leechers',\n title, seeders, leechers)\n\n items.append(item)\n except (AttributeError, TypeError, KeyError, ValueError, IndexError):\n log.error('Failed parsing provider. Traceback: {0!r}',\n traceback.format_exc())\n\n return items\n\n def login(self):\n \"\"\"Login method used for logging in before doing search and torrent downloads.\"\"\"\n if self.token and self.token_expires and datetime.datetime.now() < self.token_expires:\n return True\n\n login_params = {\n 'get_token': 'get_token',\n 'format': 'json',\n 'app_id': app.RARBG_APPID,\n }\n\n response = self.get_url(self.urls['api'], params=login_params, returns='json')\n if not response:\n log.warning('Unable to connect to provider')\n return False\n\n self.token = response.get('token')\n self.token_expires = datetime.datetime.now() + datetime.timedelta(minutes=14) if self.token else None\n return self.token is not None\n\n\nprovider = RarbgProvider()\n", "path": "medusa/providers/torrent/json/rarbg.py"}]}
| 2,949 | 287 |
gh_patches_debug_1204
|
rasdani/github-patches
|
git_diff
|
microsoft__playwright-python-1497
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[Question]: How to get the right BrowserType from a device name?
### Your question
I noticed that the CLI is able to figure out the right `BrowserType` to use when it is launched from the command line:
```
playwright open --device="Desktop Safari" wikipedia.org # Webkit
playwright open --device="Desktop Firefox" wikipedia.org # Firefox
playwright open --device="Desktop Chrome" wikipedia.org # Chrome
```
But [the documentation](https://playwright.dev/python/docs/api/class-playwright#playwright-devices) seems to say that I have to initialize a `BrowserType` before I can pass the settings to the context, which partially defeats the purpose of the device settings.
I can implement my own logic do initialize the right `BrowserType` for each device, but as `playwright open` can already do that, that seems superfluous.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `playwright/_impl/_playwright.py`
Content:
```
1 # Copyright (c) Microsoft Corporation.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from typing import Dict
16
17 from playwright._impl._browser_type import BrowserType
18 from playwright._impl._connection import ChannelOwner, from_channel
19 from playwright._impl._fetch import APIRequest
20 from playwright._impl._local_utils import LocalUtils
21 from playwright._impl._selectors import Selectors, SelectorsOwner
22
23
24 class Playwright(ChannelOwner):
25 devices: Dict
26 selectors: Selectors
27 chromium: BrowserType
28 firefox: BrowserType
29 webkit: BrowserType
30 request: APIRequest
31
32 def __init__(
33 self, parent: ChannelOwner, type: str, guid: str, initializer: Dict
34 ) -> None:
35 super().__init__(parent, type, guid, initializer)
36 self.request = APIRequest(self)
37 self.chromium = from_channel(initializer["chromium"])
38 self.chromium._playwright = self
39 self.firefox = from_channel(initializer["firefox"])
40 self.firefox._playwright = self
41 self.webkit = from_channel(initializer["webkit"])
42 self.webkit._playwright = self
43
44 self.selectors = Selectors(self._loop, self._dispatcher_fiber)
45 selectors_owner: SelectorsOwner = from_channel(initializer["selectors"])
46 self.selectors._add_channel(selectors_owner)
47
48 self._connection.on(
49 "close", lambda: self.selectors._remove_channel(selectors_owner)
50 )
51 self.devices = {}
52 self.devices = {
53 device["name"]: parse_device_descriptor(device["descriptor"])
54 for device in initializer["deviceDescriptors"]
55 }
56 self._utils: LocalUtils = from_channel(initializer["utils"])
57
58 def __getitem__(self, value: str) -> "BrowserType":
59 if value == "chromium":
60 return self.chromium
61 elif value == "firefox":
62 return self.firefox
63 elif value == "webkit":
64 return self.webkit
65 raise ValueError("Invalid browser " + value)
66
67 def _set_selectors(self, selectors: SelectorsOwner) -> None:
68 selectors_owner = from_channel(self._initializer["selectors"])
69 self.selectors._remove_channel(selectors_owner)
70 self.selectors = selectors
71 self.selectors._add_channel(selectors_owner)
72
73 def stop(self) -> None:
74 pass
75
76
77 def parse_device_descriptor(dict: Dict) -> Dict:
78 return {
79 "user_agent": dict["userAgent"],
80 "viewport": dict["viewport"],
81 "device_scale_factor": dict["deviceScaleFactor"],
82 "is_mobile": dict["isMobile"],
83 "has_touch": dict["hasTouch"],
84 }
85
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/playwright/_impl/_playwright.py b/playwright/_impl/_playwright.py
--- a/playwright/_impl/_playwright.py
+++ b/playwright/_impl/_playwright.py
@@ -81,4 +81,5 @@
"device_scale_factor": dict["deviceScaleFactor"],
"is_mobile": dict["isMobile"],
"has_touch": dict["hasTouch"],
+ "default_browser_type": dict["defaultBrowserType"],
}
|
{"golden_diff": "diff --git a/playwright/_impl/_playwright.py b/playwright/_impl/_playwright.py\n--- a/playwright/_impl/_playwright.py\n+++ b/playwright/_impl/_playwright.py\n@@ -81,4 +81,5 @@\n \"device_scale_factor\": dict[\"deviceScaleFactor\"],\n \"is_mobile\": dict[\"isMobile\"],\n \"has_touch\": dict[\"hasTouch\"],\n+ \"default_browser_type\": dict[\"defaultBrowserType\"],\n }\n", "issue": "[Question]: How to get the right BrowserType from a device name? \n### Your question\n\nI noticed that the CLI is able to figure out the right `BrowserType` to use when it is launched from the command line:\r\n\r\n```\r\nplaywright open --device=\"Desktop Safari\" wikipedia.org # Webkit\r\nplaywright open --device=\"Desktop Firefox\" wikipedia.org # Firefox\r\nplaywright open --device=\"Desktop Chrome\" wikipedia.org # Chrome\r\n``` \r\n\r\nBut [the documentation](https://playwright.dev/python/docs/api/class-playwright#playwright-devices) seems to say that I have to initialize a `BrowserType` before I can pass the settings to the context, which partially defeats the purpose of the device settings.\r\n\r\nI can implement my own logic do initialize the right `BrowserType` for each device, but as `playwright open` can already do that, that seems superfluous.\n", "before_files": [{"content": "# Copyright (c) Microsoft Corporation.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom typing import Dict\n\nfrom playwright._impl._browser_type import BrowserType\nfrom playwright._impl._connection import ChannelOwner, from_channel\nfrom playwright._impl._fetch import APIRequest\nfrom playwright._impl._local_utils import LocalUtils\nfrom playwright._impl._selectors import Selectors, SelectorsOwner\n\n\nclass Playwright(ChannelOwner):\n devices: Dict\n selectors: Selectors\n chromium: BrowserType\n firefox: BrowserType\n webkit: BrowserType\n request: APIRequest\n\n def __init__(\n self, parent: ChannelOwner, type: str, guid: str, initializer: Dict\n ) -> None:\n super().__init__(parent, type, guid, initializer)\n self.request = APIRequest(self)\n self.chromium = from_channel(initializer[\"chromium\"])\n self.chromium._playwright = self\n self.firefox = from_channel(initializer[\"firefox\"])\n self.firefox._playwright = self\n self.webkit = from_channel(initializer[\"webkit\"])\n self.webkit._playwright = self\n\n self.selectors = Selectors(self._loop, self._dispatcher_fiber)\n selectors_owner: SelectorsOwner = from_channel(initializer[\"selectors\"])\n self.selectors._add_channel(selectors_owner)\n\n self._connection.on(\n \"close\", lambda: self.selectors._remove_channel(selectors_owner)\n )\n self.devices = {}\n self.devices = {\n device[\"name\"]: parse_device_descriptor(device[\"descriptor\"])\n for device in initializer[\"deviceDescriptors\"]\n }\n self._utils: LocalUtils = from_channel(initializer[\"utils\"])\n\n def __getitem__(self, value: str) -> \"BrowserType\":\n if value == \"chromium\":\n return self.chromium\n elif value == \"firefox\":\n return self.firefox\n elif value == \"webkit\":\n return self.webkit\n raise ValueError(\"Invalid browser \" + value)\n\n def _set_selectors(self, selectors: SelectorsOwner) -> None:\n selectors_owner = from_channel(self._initializer[\"selectors\"])\n self.selectors._remove_channel(selectors_owner)\n self.selectors = selectors\n self.selectors._add_channel(selectors_owner)\n\n def stop(self) -> None:\n pass\n\n\ndef parse_device_descriptor(dict: Dict) -> Dict:\n return {\n \"user_agent\": dict[\"userAgent\"],\n \"viewport\": dict[\"viewport\"],\n \"device_scale_factor\": dict[\"deviceScaleFactor\"],\n \"is_mobile\": dict[\"isMobile\"],\n \"has_touch\": dict[\"hasTouch\"],\n }\n", "path": "playwright/_impl/_playwright.py"}], "after_files": [{"content": "# Copyright (c) Microsoft Corporation.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom typing import Dict\n\nfrom playwright._impl._browser_type import BrowserType\nfrom playwright._impl._connection import ChannelOwner, from_channel\nfrom playwright._impl._fetch import APIRequest\nfrom playwright._impl._local_utils import LocalUtils\nfrom playwright._impl._selectors import Selectors, SelectorsOwner\n\n\nclass Playwright(ChannelOwner):\n devices: Dict\n selectors: Selectors\n chromium: BrowserType\n firefox: BrowserType\n webkit: BrowserType\n request: APIRequest\n\n def __init__(\n self, parent: ChannelOwner, type: str, guid: str, initializer: Dict\n ) -> None:\n super().__init__(parent, type, guid, initializer)\n self.request = APIRequest(self)\n self.chromium = from_channel(initializer[\"chromium\"])\n self.chromium._playwright = self\n self.firefox = from_channel(initializer[\"firefox\"])\n self.firefox._playwright = self\n self.webkit = from_channel(initializer[\"webkit\"])\n self.webkit._playwright = self\n\n self.selectors = Selectors(self._loop, self._dispatcher_fiber)\n selectors_owner: SelectorsOwner = from_channel(initializer[\"selectors\"])\n self.selectors._add_channel(selectors_owner)\n\n self._connection.on(\n \"close\", lambda: self.selectors._remove_channel(selectors_owner)\n )\n self.devices = {}\n self.devices = {\n device[\"name\"]: parse_device_descriptor(device[\"descriptor\"])\n for device in initializer[\"deviceDescriptors\"]\n }\n self._utils: LocalUtils = from_channel(initializer[\"utils\"])\n\n def __getitem__(self, value: str) -> \"BrowserType\":\n if value == \"chromium\":\n return self.chromium\n elif value == \"firefox\":\n return self.firefox\n elif value == \"webkit\":\n return self.webkit\n raise ValueError(\"Invalid browser \" + value)\n\n def _set_selectors(self, selectors: SelectorsOwner) -> None:\n selectors_owner = from_channel(self._initializer[\"selectors\"])\n self.selectors._remove_channel(selectors_owner)\n self.selectors = selectors\n self.selectors._add_channel(selectors_owner)\n\n def stop(self) -> None:\n pass\n\n\ndef parse_device_descriptor(dict: Dict) -> Dict:\n return {\n \"user_agent\": dict[\"userAgent\"],\n \"viewport\": dict[\"viewport\"],\n \"device_scale_factor\": dict[\"deviceScaleFactor\"],\n \"is_mobile\": dict[\"isMobile\"],\n \"has_touch\": dict[\"hasTouch\"],\n \"default_browser_type\": dict[\"defaultBrowserType\"],\n }\n", "path": "playwright/_impl/_playwright.py"}]}
| 1,287 | 103 |
gh_patches_debug_11614
|
rasdani/github-patches
|
git_diff
|
digitalfabrik__integreat-cms-295
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Enforce new SECRET_KEY in production environments
The server should refuse to start if the SECRET_KEY does not differ from the default value in our repository.
The importance of this issue depends on whether the Federated Cloud feature is using this key or not.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `backend/cms/apps.py`
Content:
```
1 """
2 Django related class representing a config of an app
3 """
4 from django.apps import AppConfig
5
6
7 class CmsConfig(AppConfig):
8 """
9 Class inheriting the django AppConfig
10 """
11
12 name = 'cms'
13
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/backend/cms/apps.py b/backend/cms/apps.py
--- a/backend/cms/apps.py
+++ b/backend/cms/apps.py
@@ -1,8 +1,12 @@
"""
Django related class representing a config of an app
"""
+import logging
+import sys
+from django.conf import settings
from django.apps import AppConfig
+logger = logging.getLogger(__name__)
class CmsConfig(AppConfig):
"""
@@ -10,3 +14,8 @@
"""
name = 'cms'
+
+ def ready(self):
+ if settings.SECRET_KEY == '-!v282$zj815_q@htaxcubylo)(l%a+k*-xi78hw*#s2@i86@_' and not settings.DEBUG:
+ logger.error("You are running the Integreat CMS in production mode. Change the SECRET_KEY in the settings.py!")
+ sys.exit(1)
|
{"golden_diff": "diff --git a/backend/cms/apps.py b/backend/cms/apps.py\n--- a/backend/cms/apps.py\n+++ b/backend/cms/apps.py\n@@ -1,8 +1,12 @@\n \"\"\"\n Django related class representing a config of an app\n \"\"\"\n+import logging\n+import sys\n+from django.conf import settings\n from django.apps import AppConfig\n \n+logger = logging.getLogger(__name__)\n \n class CmsConfig(AppConfig):\n \"\"\"\n@@ -10,3 +14,8 @@\n \"\"\"\n \n name = 'cms'\n+\n+ def ready(self):\n+ if settings.SECRET_KEY == '-!v282$zj815_q@htaxcubylo)(l%a+k*-xi78hw*#s2@i86@_' and not settings.DEBUG:\n+ logger.error(\"You are running the Integreat CMS in production mode. Change the SECRET_KEY in the settings.py!\")\n+ sys.exit(1)\n", "issue": "Enforce new SECRET_KEY in production environments\nThe server should refuse to start if the SECRET_KEY does not differ from the default value in our repository.\r\n\r\nThe importance of this issue depends on whether the Federated Cloud feature is using this key or not.\n", "before_files": [{"content": "\"\"\"\nDjango related class representing a config of an app\n\"\"\"\nfrom django.apps import AppConfig\n\n\nclass CmsConfig(AppConfig):\n \"\"\"\n Class inheriting the django AppConfig\n \"\"\"\n\n name = 'cms'\n", "path": "backend/cms/apps.py"}], "after_files": [{"content": "\"\"\"\nDjango related class representing a config of an app\n\"\"\"\nimport logging\nimport sys\nfrom django.conf import settings\nfrom django.apps import AppConfig\n\nlogger = logging.getLogger(__name__)\n\nclass CmsConfig(AppConfig):\n \"\"\"\n Class inheriting the django AppConfig\n \"\"\"\n\n name = 'cms'\n\n def ready(self):\n if settings.SECRET_KEY == '-!v282$zj815_q@htaxcubylo)(l%a+k*-xi78hw*#s2@i86@_' and not settings.DEBUG:\n logger.error(\"You are running the Integreat CMS in production mode. Change the SECRET_KEY in the settings.py!\")\n sys.exit(1)\n", "path": "backend/cms/apps.py"}]}
| 371 | 204 |
gh_patches_debug_1858
|
rasdani/github-patches
|
git_diff
|
huggingface__accelerate-127
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Error when loading optimizer state
Thanks for this awesome product!!
When I try to load optimizer state on TPUs, I get an error, since self.device_placement is never initialized in AcceleratedOptimizer..!
https://github.com/huggingface/accelerate/blob/e0a420f7cb32124cadeeae690b56e463f8fc598f/src/accelerate/optimizer.py#L83-L86
Error when loading optimizer state
Thanks for this awesome product!!
When I try to load optimizer state on TPUs, I get an error, since self.device_placement is never initialized in AcceleratedOptimizer..!
https://github.com/huggingface/accelerate/blob/e0a420f7cb32124cadeeae690b56e463f8fc598f/src/accelerate/optimizer.py#L83-L86
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/accelerate/optimizer.py`
Content:
```
1 # Copyright 2021 The HuggingFace Team. All rights reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 import torch
16
17 from packaging import version
18
19 from .state import AcceleratorState, DistributedType, is_tpu_available
20 from .utils import honor_type
21
22
23 if is_tpu_available():
24 import torch_xla.core.xla_model as xm
25
26
27 def move_to_device(state, device):
28 if isinstance(state, (list, tuple)):
29 return honor_type(state, (move_to_device(t, device) for t in state))
30 elif isinstance(state, dict):
31 return type(state)({k: move_to_device(v, device) for k, v in state.items()})
32 elif isinstance(state, torch.Tensor):
33 return state.to(device)
34 return state
35
36
37 class AcceleratedOptimizer(torch.optim.Optimizer):
38 """
39 Internal wrapper around a torch optimizer.
40
41 Args:
42 optimizer (:obj:`torch.optim.optimizer.Optimizer`):
43 The optimizer to wrap.
44 device_placement (:obj:`bool`, `optional`, defaults to :obj:`True`):
45 Whether or not the optimizer should handle device placement. If so, it will place the state dictionary of
46 :obj:`optimizer` on the right device.
47 scaler (:obj:`torch.cuda.amp.grad_scaler.GradScaler`, `optional`):
48 The scaler to use in the step function if training with mixed precision.
49 """
50
51 def __init__(self, optimizer, device_placement=True, scaler=None):
52 self.optimizer = optimizer
53 self.scaler = scaler
54 self.state = AcceleratorState()
55
56 # Handle device placement
57 if device_placement:
58 state_dict = self.optimizer.state_dict()
59 if self.state.distributed_type == DistributedType.TPU:
60 xm.send_cpu_data_to_device(state_dict, self.state.device)
61 else:
62 state_dict = move_to_device(state_dict, self.state.device)
63 self.optimizer.load_state_dict(state_dict)
64
65 @property
66 def param_groups(self):
67 return self.optimizer.param_groups
68
69 @param_groups.setter
70 def param_groups(self, param_groups):
71 self.optimizer.param_groups = param_groups
72
73 @property
74 def defaults(self):
75 return self.optimizer.defaults
76
77 @defaults.setter
78 def defaults(self, defaults):
79 self.optimizer.defaults = defaults
80
81 def add_param_group(self, param_group):
82 self.optimizer.add_param_group(param_group)
83
84 def load_state_dict(self, state_dict):
85 if self.state.distributed_type == DistributedType.TPU and self.device_placement:
86 xm.send_cpu_data_to_device(state_dict, self.state.device)
87 self.optimizer.load_state_dict(state_dict)
88
89 def state_dict(self):
90 return self.optimizer.state_dict()
91
92 def zero_grad(self, set_to_none=None):
93 if version.parse(torch.__version__) < version.parse("1.7.0"):
94 if set_to_none is not None:
95 raise ValueError(
96 "`set_to_none` for Optimizer.zero_grad` was introduced in PyTorch 1.7.0 and can't be used for "
97 f"earlier versions (found version {torch.__version__})."
98 )
99 self.optimizer.zero_grad()
100 else:
101 if set_to_none is not None:
102 set_to_none = False
103 self.optimizer.zero_grad(set_to_none=set_to_none)
104
105 def step(self, closure=None):
106 if self.state.distributed_type == DistributedType.TPU:
107 optimizer_args = {"closure": closure} if closure is not None else {}
108 xm.optimizer_step(self.optimizer, optimizer_args=optimizer_args)
109 elif self.scaler is not None:
110 self.scaler.step(self.optimizer, closure)
111 self.scaler.update()
112 else:
113 self.optimizer.step(closure)
114
115 def _switch_parameters(self, parameters_map):
116 for param_group in self.optimizer.param_groups:
117 param_group["params"] = [parameters_map.get(p, p) for p in param_group["params"]]
118
119 @property
120 def is_overflow(self):
121 """This needs to be implemented at the end"""
122 return False # TODO: implement it
123
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/accelerate/optimizer.py b/src/accelerate/optimizer.py
--- a/src/accelerate/optimizer.py
+++ b/src/accelerate/optimizer.py
@@ -52,6 +52,7 @@
self.optimizer = optimizer
self.scaler = scaler
self.state = AcceleratorState()
+ self.device_placement = device_placement
# Handle device placement
if device_placement:
|
{"golden_diff": "diff --git a/src/accelerate/optimizer.py b/src/accelerate/optimizer.py\n--- a/src/accelerate/optimizer.py\n+++ b/src/accelerate/optimizer.py\n@@ -52,6 +52,7 @@\n self.optimizer = optimizer\n self.scaler = scaler\n self.state = AcceleratorState()\n+ self.device_placement = device_placement\n \n # Handle device placement\n if device_placement:\n", "issue": "Error when loading optimizer state\nThanks for this awesome product!!\r\n\r\nWhen I try to load optimizer state on TPUs, I get an error, since self.device_placement is never initialized in AcceleratedOptimizer..!\r\nhttps://github.com/huggingface/accelerate/blob/e0a420f7cb32124cadeeae690b56e463f8fc598f/src/accelerate/optimizer.py#L83-L86\nError when loading optimizer state\nThanks for this awesome product!!\r\n\r\nWhen I try to load optimizer state on TPUs, I get an error, since self.device_placement is never initialized in AcceleratedOptimizer..!\r\nhttps://github.com/huggingface/accelerate/blob/e0a420f7cb32124cadeeae690b56e463f8fc598f/src/accelerate/optimizer.py#L83-L86\n", "before_files": [{"content": "# Copyright 2021 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport torch\n\nfrom packaging import version\n\nfrom .state import AcceleratorState, DistributedType, is_tpu_available\nfrom .utils import honor_type\n\n\nif is_tpu_available():\n import torch_xla.core.xla_model as xm\n\n\ndef move_to_device(state, device):\n if isinstance(state, (list, tuple)):\n return honor_type(state, (move_to_device(t, device) for t in state))\n elif isinstance(state, dict):\n return type(state)({k: move_to_device(v, device) for k, v in state.items()})\n elif isinstance(state, torch.Tensor):\n return state.to(device)\n return state\n\n\nclass AcceleratedOptimizer(torch.optim.Optimizer):\n \"\"\"\n Internal wrapper around a torch optimizer.\n\n Args:\n optimizer (:obj:`torch.optim.optimizer.Optimizer`):\n The optimizer to wrap.\n device_placement (:obj:`bool`, `optional`, defaults to :obj:`True`):\n Whether or not the optimizer should handle device placement. If so, it will place the state dictionary of\n :obj:`optimizer` on the right device.\n scaler (:obj:`torch.cuda.amp.grad_scaler.GradScaler`, `optional`):\n The scaler to use in the step function if training with mixed precision.\n \"\"\"\n\n def __init__(self, optimizer, device_placement=True, scaler=None):\n self.optimizer = optimizer\n self.scaler = scaler\n self.state = AcceleratorState()\n\n # Handle device placement\n if device_placement:\n state_dict = self.optimizer.state_dict()\n if self.state.distributed_type == DistributedType.TPU:\n xm.send_cpu_data_to_device(state_dict, self.state.device)\n else:\n state_dict = move_to_device(state_dict, self.state.device)\n self.optimizer.load_state_dict(state_dict)\n\n @property\n def param_groups(self):\n return self.optimizer.param_groups\n\n @param_groups.setter\n def param_groups(self, param_groups):\n self.optimizer.param_groups = param_groups\n\n @property\n def defaults(self):\n return self.optimizer.defaults\n\n @defaults.setter\n def defaults(self, defaults):\n self.optimizer.defaults = defaults\n\n def add_param_group(self, param_group):\n self.optimizer.add_param_group(param_group)\n\n def load_state_dict(self, state_dict):\n if self.state.distributed_type == DistributedType.TPU and self.device_placement:\n xm.send_cpu_data_to_device(state_dict, self.state.device)\n self.optimizer.load_state_dict(state_dict)\n\n def state_dict(self):\n return self.optimizer.state_dict()\n\n def zero_grad(self, set_to_none=None):\n if version.parse(torch.__version__) < version.parse(\"1.7.0\"):\n if set_to_none is not None:\n raise ValueError(\n \"`set_to_none` for Optimizer.zero_grad` was introduced in PyTorch 1.7.0 and can't be used for \"\n f\"earlier versions (found version {torch.__version__}).\"\n )\n self.optimizer.zero_grad()\n else:\n if set_to_none is not None:\n set_to_none = False\n self.optimizer.zero_grad(set_to_none=set_to_none)\n\n def step(self, closure=None):\n if self.state.distributed_type == DistributedType.TPU:\n optimizer_args = {\"closure\": closure} if closure is not None else {}\n xm.optimizer_step(self.optimizer, optimizer_args=optimizer_args)\n elif self.scaler is not None:\n self.scaler.step(self.optimizer, closure)\n self.scaler.update()\n else:\n self.optimizer.step(closure)\n\n def _switch_parameters(self, parameters_map):\n for param_group in self.optimizer.param_groups:\n param_group[\"params\"] = [parameters_map.get(p, p) for p in param_group[\"params\"]]\n\n @property\n def is_overflow(self):\n \"\"\"This needs to be implemented at the end\"\"\"\n return False # TODO: implement it\n", "path": "src/accelerate/optimizer.py"}], "after_files": [{"content": "# Copyright 2021 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport torch\n\nfrom packaging import version\n\nfrom .state import AcceleratorState, DistributedType, is_tpu_available\nfrom .utils import honor_type\n\n\nif is_tpu_available():\n import torch_xla.core.xla_model as xm\n\n\ndef move_to_device(state, device):\n if isinstance(state, (list, tuple)):\n return honor_type(state, (move_to_device(t, device) for t in state))\n elif isinstance(state, dict):\n return type(state)({k: move_to_device(v, device) for k, v in state.items()})\n elif isinstance(state, torch.Tensor):\n return state.to(device)\n return state\n\n\nclass AcceleratedOptimizer(torch.optim.Optimizer):\n \"\"\"\n Internal wrapper around a torch optimizer.\n\n Args:\n optimizer (:obj:`torch.optim.optimizer.Optimizer`):\n The optimizer to wrap.\n device_placement (:obj:`bool`, `optional`, defaults to :obj:`True`):\n Whether or not the optimizer should handle device placement. If so, it will place the state dictionary of\n :obj:`optimizer` on the right device.\n scaler (:obj:`torch.cuda.amp.grad_scaler.GradScaler`, `optional`):\n The scaler to use in the step function if training with mixed precision.\n \"\"\"\n\n def __init__(self, optimizer, device_placement=True, scaler=None):\n self.optimizer = optimizer\n self.scaler = scaler\n self.state = AcceleratorState()\n self.device_placement = device_placement\n\n # Handle device placement\n if device_placement:\n state_dict = self.optimizer.state_dict()\n if self.state.distributed_type == DistributedType.TPU:\n xm.send_cpu_data_to_device(state_dict, self.state.device)\n else:\n state_dict = move_to_device(state_dict, self.state.device)\n self.optimizer.load_state_dict(state_dict)\n\n @property\n def param_groups(self):\n return self.optimizer.param_groups\n\n @param_groups.setter\n def param_groups(self, param_groups):\n self.optimizer.param_groups = param_groups\n\n @property\n def defaults(self):\n return self.optimizer.defaults\n\n @defaults.setter\n def defaults(self, defaults):\n self.optimizer.defaults = defaults\n\n def add_param_group(self, param_group):\n self.optimizer.add_param_group(param_group)\n\n def load_state_dict(self, state_dict):\n if self.state.distributed_type == DistributedType.TPU and self.device_placement:\n xm.send_cpu_data_to_device(state_dict, self.state.device)\n self.optimizer.load_state_dict(state_dict)\n\n def state_dict(self):\n return self.optimizer.state_dict()\n\n def zero_grad(self, set_to_none=None):\n if version.parse(torch.__version__) < version.parse(\"1.7.0\"):\n if set_to_none is not None:\n raise ValueError(\n \"`set_to_none` for Optimizer.zero_grad` was introduced in PyTorch 1.7.0 and can't be used for \"\n f\"earlier versions (found version {torch.__version__}).\"\n )\n self.optimizer.zero_grad()\n else:\n if set_to_none is not None:\n set_to_none = False\n self.optimizer.zero_grad(set_to_none=set_to_none)\n\n def step(self, closure=None):\n if self.state.distributed_type == DistributedType.TPU:\n optimizer_args = {\"closure\": closure} if closure is not None else {}\n xm.optimizer_step(self.optimizer, optimizer_args=optimizer_args)\n elif self.scaler is not None:\n self.scaler.step(self.optimizer, closure)\n self.scaler.update()\n else:\n self.optimizer.step(closure)\n\n def _switch_parameters(self, parameters_map):\n for param_group in self.optimizer.param_groups:\n param_group[\"params\"] = [parameters_map.get(p, p) for p in param_group[\"params\"]]\n\n @property\n def is_overflow(self):\n \"\"\"This needs to be implemented at the end\"\"\"\n return False # TODO: implement it\n", "path": "src/accelerate/optimizer.py"}]}
| 1,705 | 93 |
gh_patches_debug_15427
|
rasdani/github-patches
|
git_diff
|
google__jax-574
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
jax.config doesn't report command-line flags correctly
Another one from @jmgilmer and I - I don't think the jax config is parsing command line flags correctly. I don't know if this is functionally important or just a reporting error - but it is certainly important for user scripts knowing what flags have been set.
If I run this script:
```
from absl import app, flags
from jax.config import config
FLAGS = flags.FLAGS
def main(_):
print("FLAGS.jax_enable_x64", FLAGS.jax_enable_x64)
print("FLAGS.jax_debug_nans", FLAGS.jax_debug_nans)
print(config.values)
if __name__ == "__main__":
config.config_with_absl()
app.run(main)
```
I get the following problem: jax.config doesn't report the correct flag settings.
```
> python jaxtest.py --jax_enable_x64=1 --jax_debug_nans=1
FLAGS.jax_enable_x64 True
FLAGS.jax_debug_nans True
{'jax_enable_x64': 0, 'jax_xla_backend': 'xla', 'jax_backend_target': 'local', 'jax_platform_name': '', 'jax_device_values': 1, 'jax_debug_nans': 0, 'jax_disable_jit': 0}
```
if I run the same with envvars instead it works:
```
JAX_ENABLE_X64=1 JAX_DEBUG_NANS=1 python jaxtest.py
FLAGS.jax_enable_x64 True
FLAGS.jax_debug_nans True
{'jax_enable_x64': 1, 'jax_xla_backend': 'xla', 'jax_backend_target': 'local', 'jax_platform_name': '', 'jax_device_values': 1, 'jax_debug_nans': 1, 'jax_disable_jit': 0}
```
I've tried parsing the flags in different ways but nothing seems to fix the issue.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `jax/config.py`
Content:
```
1 # Copyright 2018 Google LLC
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # https://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 import sys
16
17
18 class Config(object):
19 def __init__(self):
20 self.values = {}
21 self.meta = {}
22 self.FLAGS = NameSpace(self.read)
23 self.use_absl = False
24
25 def update(self, name, val):
26 self.check_exists(name)
27 if name not in self.values:
28 raise Exception("Unrecognized config option: {}".format(name))
29 self.values[name] = val
30
31 def read(self, name):
32 if self.use_absl:
33 return getattr(self.absl_flags.FLAGS, name)
34 else:
35 self.check_exists(name)
36 return self.values[name]
37
38 def add_option(self, name, default, opt_type, meta_args, meta_kwargs):
39 if name in self.values:
40 raise Exception("Config option {} already defined".format(name))
41 self.values[name] = default
42 self.meta[name] = (opt_type, meta_args, meta_kwargs)
43
44 def check_exists(self, name):
45 if name not in self.values:
46 raise Exception("Unrecognized config option: {}".format(name))
47
48 def DEFINE_bool(self, name, default, *args, **kwargs):
49 self.add_option(name, default, bool, args, kwargs)
50
51 def DEFINE_integer(self, name, default, *args, **kwargs):
52 self.add_option(name, default, int, args, kwargs)
53
54 def DEFINE_string(self, name, default, *args, **kwargs):
55 self.add_option(name, default, str, args, kwargs)
56
57 def DEFINE_enum(self, name, default, *args, **kwargs):
58 self.add_option(name, default, 'enum', args, kwargs)
59
60 def config_with_absl(self):
61 # Run this before calling `app.run(main)` etc
62 import absl.flags as absl_FLAGS
63 from absl import app, flags as absl_flags
64
65 self.use_absl = True
66 self.absl_flags = absl_flags
67 absl_defs = { bool: absl_flags.DEFINE_bool,
68 int: absl_flags.DEFINE_integer,
69 str: absl_flags.DEFINE_string,
70 'enum': absl_flags.DEFINE_enum }
71
72 for name, val in self.values.items():
73 flag_type, meta_args, meta_kwargs = self.meta[name]
74 absl_defs[flag_type](name, val, *meta_args, **meta_kwargs)
75
76 def complete_absl_config(self, absl_flags):
77 for name, _ in self.values.items():
78 self.update(name, getattr(absl_flags.FLAGS, name))
79
80 def parse_flags_with_absl(self):
81 global already_configured_with_absl
82 if not already_configured_with_absl:
83 import absl.flags
84 self.config_with_absl()
85 absl.flags.FLAGS(sys.argv)
86 already_configured_with_absl = True
87
88
89 class NameSpace(object):
90 def __init__(self, getter):
91 self._getter = getter
92
93 def __getattr__(self, name):
94 return self._getter(name)
95
96
97 config = Config()
98 flags = config
99 already_configured_with_absl = False
100
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/jax/config.py b/jax/config.py
--- a/jax/config.py
+++ b/jax/config.py
@@ -73,6 +73,8 @@
flag_type, meta_args, meta_kwargs = self.meta[name]
absl_defs[flag_type](name, val, *meta_args, **meta_kwargs)
+ app.call_after_init(lambda: self.complete_absl_config(absl_flags))
+
def complete_absl_config(self, absl_flags):
for name, _ in self.values.items():
self.update(name, getattr(absl_flags.FLAGS, name))
@@ -83,6 +85,7 @@
import absl.flags
self.config_with_absl()
absl.flags.FLAGS(sys.argv)
+ self.complete_absl_config(absl.flags)
already_configured_with_absl = True
|
{"golden_diff": "diff --git a/jax/config.py b/jax/config.py\n--- a/jax/config.py\n+++ b/jax/config.py\n@@ -73,6 +73,8 @@\n flag_type, meta_args, meta_kwargs = self.meta[name]\n absl_defs[flag_type](name, val, *meta_args, **meta_kwargs)\n \n+ app.call_after_init(lambda: self.complete_absl_config(absl_flags))\n+\n def complete_absl_config(self, absl_flags):\n for name, _ in self.values.items():\n self.update(name, getattr(absl_flags.FLAGS, name))\n@@ -83,6 +85,7 @@\n import absl.flags\n self.config_with_absl()\n absl.flags.FLAGS(sys.argv)\n+ self.complete_absl_config(absl.flags)\n already_configured_with_absl = True\n", "issue": "jax.config doesn't report command-line flags correctly\nAnother one from @jmgilmer and I - I don't think the jax config is parsing command line flags correctly. I don't know if this is functionally important or just a reporting error - but it is certainly important for user scripts knowing what flags have been set.\r\n\r\nIf I run this script:\r\n```\r\nfrom absl import app, flags\r\nfrom jax.config import config\r\nFLAGS = flags.FLAGS\r\ndef main(_):\r\n print(\"FLAGS.jax_enable_x64\", FLAGS.jax_enable_x64)\r\n print(\"FLAGS.jax_debug_nans\", FLAGS.jax_debug_nans)\r\n print(config.values)\r\nif __name__ == \"__main__\":\r\n config.config_with_absl()\r\n app.run(main)\r\n```\r\nI get the following problem: jax.config doesn't report the correct flag settings.\r\n```\r\n> python jaxtest.py --jax_enable_x64=1 --jax_debug_nans=1\r\nFLAGS.jax_enable_x64 True\r\nFLAGS.jax_debug_nans True\r\n{'jax_enable_x64': 0, 'jax_xla_backend': 'xla', 'jax_backend_target': 'local', 'jax_platform_name': '', 'jax_device_values': 1, 'jax_debug_nans': 0, 'jax_disable_jit': 0}\r\n```\r\nif I run the same with envvars instead it works:\r\n```\r\nJAX_ENABLE_X64=1 JAX_DEBUG_NANS=1 python jaxtest.py\r\nFLAGS.jax_enable_x64 True\r\nFLAGS.jax_debug_nans True\r\n{'jax_enable_x64': 1, 'jax_xla_backend': 'xla', 'jax_backend_target': 'local', 'jax_platform_name': '', 'jax_device_values': 1, 'jax_debug_nans': 1, 'jax_disable_jit': 0}\r\n```\r\n\r\nI've tried parsing the flags in different ways but nothing seems to fix the issue.\n", "before_files": [{"content": "# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport sys\n\n\nclass Config(object):\n def __init__(self):\n self.values = {}\n self.meta = {}\n self.FLAGS = NameSpace(self.read)\n self.use_absl = False\n\n def update(self, name, val):\n self.check_exists(name)\n if name not in self.values:\n raise Exception(\"Unrecognized config option: {}\".format(name))\n self.values[name] = val\n\n def read(self, name):\n if self.use_absl:\n return getattr(self.absl_flags.FLAGS, name)\n else:\n self.check_exists(name)\n return self.values[name]\n\n def add_option(self, name, default, opt_type, meta_args, meta_kwargs):\n if name in self.values:\n raise Exception(\"Config option {} already defined\".format(name))\n self.values[name] = default\n self.meta[name] = (opt_type, meta_args, meta_kwargs)\n\n def check_exists(self, name):\n if name not in self.values:\n raise Exception(\"Unrecognized config option: {}\".format(name))\n\n def DEFINE_bool(self, name, default, *args, **kwargs):\n self.add_option(name, default, bool, args, kwargs)\n\n def DEFINE_integer(self, name, default, *args, **kwargs):\n self.add_option(name, default, int, args, kwargs)\n\n def DEFINE_string(self, name, default, *args, **kwargs):\n self.add_option(name, default, str, args, kwargs)\n\n def DEFINE_enum(self, name, default, *args, **kwargs):\n self.add_option(name, default, 'enum', args, kwargs)\n\n def config_with_absl(self):\n # Run this before calling `app.run(main)` etc\n import absl.flags as absl_FLAGS\n from absl import app, flags as absl_flags\n\n self.use_absl = True\n self.absl_flags = absl_flags\n absl_defs = { bool: absl_flags.DEFINE_bool,\n int: absl_flags.DEFINE_integer,\n str: absl_flags.DEFINE_string,\n 'enum': absl_flags.DEFINE_enum }\n\n for name, val in self.values.items():\n flag_type, meta_args, meta_kwargs = self.meta[name]\n absl_defs[flag_type](name, val, *meta_args, **meta_kwargs)\n\n def complete_absl_config(self, absl_flags):\n for name, _ in self.values.items():\n self.update(name, getattr(absl_flags.FLAGS, name))\n\n def parse_flags_with_absl(self):\n global already_configured_with_absl\n if not already_configured_with_absl:\n import absl.flags\n self.config_with_absl()\n absl.flags.FLAGS(sys.argv)\n already_configured_with_absl = True\n\n\nclass NameSpace(object):\n def __init__(self, getter):\n self._getter = getter\n\n def __getattr__(self, name):\n return self._getter(name)\n\n\nconfig = Config()\nflags = config\nalready_configured_with_absl = False\n", "path": "jax/config.py"}], "after_files": [{"content": "# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport sys\n\n\nclass Config(object):\n def __init__(self):\n self.values = {}\n self.meta = {}\n self.FLAGS = NameSpace(self.read)\n self.use_absl = False\n\n def update(self, name, val):\n self.check_exists(name)\n if name not in self.values:\n raise Exception(\"Unrecognized config option: {}\".format(name))\n self.values[name] = val\n\n def read(self, name):\n if self.use_absl:\n return getattr(self.absl_flags.FLAGS, name)\n else:\n self.check_exists(name)\n return self.values[name]\n\n def add_option(self, name, default, opt_type, meta_args, meta_kwargs):\n if name in self.values:\n raise Exception(\"Config option {} already defined\".format(name))\n self.values[name] = default\n self.meta[name] = (opt_type, meta_args, meta_kwargs)\n\n def check_exists(self, name):\n if name not in self.values:\n raise Exception(\"Unrecognized config option: {}\".format(name))\n\n def DEFINE_bool(self, name, default, *args, **kwargs):\n self.add_option(name, default, bool, args, kwargs)\n\n def DEFINE_integer(self, name, default, *args, **kwargs):\n self.add_option(name, default, int, args, kwargs)\n\n def DEFINE_string(self, name, default, *args, **kwargs):\n self.add_option(name, default, str, args, kwargs)\n\n def DEFINE_enum(self, name, default, *args, **kwargs):\n self.add_option(name, default, 'enum', args, kwargs)\n\n def config_with_absl(self):\n # Run this before calling `app.run(main)` etc\n import absl.flags as absl_FLAGS\n from absl import app, flags as absl_flags\n\n self.use_absl = True\n self.absl_flags = absl_flags\n absl_defs = { bool: absl_flags.DEFINE_bool,\n int: absl_flags.DEFINE_integer,\n str: absl_flags.DEFINE_string,\n 'enum': absl_flags.DEFINE_enum }\n\n for name, val in self.values.items():\n flag_type, meta_args, meta_kwargs = self.meta[name]\n absl_defs[flag_type](name, val, *meta_args, **meta_kwargs)\n\n app.call_after_init(lambda: self.complete_absl_config(absl_flags))\n\n def complete_absl_config(self, absl_flags):\n for name, _ in self.values.items():\n self.update(name, getattr(absl_flags.FLAGS, name))\n\n def parse_flags_with_absl(self):\n global already_configured_with_absl\n if not already_configured_with_absl:\n import absl.flags\n self.config_with_absl()\n absl.flags.FLAGS(sys.argv)\n self.complete_absl_config(absl.flags)\n already_configured_with_absl = True\n\n\nclass NameSpace(object):\n def __init__(self, getter):\n self._getter = getter\n\n def __getattr__(self, name):\n return self._getter(name)\n\n\nconfig = Config()\nflags = config\nalready_configured_with_absl = False\n", "path": "jax/config.py"}]}
| 1,641 | 180 |
gh_patches_debug_15851
|
rasdani/github-patches
|
git_diff
|
lk-geimfari__mimesis-875
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add pricetag support
# Feature request
## Thesis
Create a localized pricetag:
```
>>> mimesis.Business().pricetag()
"$ 12.34"
>>> mimesis.Business("de").pricetag(minimum=1000, maximum=1500)
"1.234,56 €"
>>> mimesis.Business("cs").pricetag()
"75,20 Kč" # rounded to 0.1
```
with defaults minimum=0.01 and maximum=1000.00 as a proposal for the default provider. Localized providers would define their own values, because of huge differences between the used values, such as USD/EUR/GBP going down to 0.01, and some minor currencies using millions as base.
## Reasoning
This is very locale dependent. Currency is already in the `CURRENCY_SYMBOLS`, but what would be the best place for localized number and price formatting as well as the localized defaults for minimum/maximum?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mimesis/providers/business.py`
Content:
```
1 # -*- coding: utf-8 -*-
2
3 """Business data provider."""
4
5 from mimesis.data import (
6 CRYPTOCURRENCY_ISO_CODES,
7 CRYPTOCURRENCY_SYMBOLS,
8 CURRENCY_ISO_CODES,
9 CURRENCY_SYMBOLS,
10 )
11 from mimesis.providers.base import BaseDataProvider
12
13 __all__ = ['Business']
14
15
16 class Business(BaseDataProvider):
17 """Class for generating data for business."""
18
19 def __init__(self, *args, **kwargs):
20 """Initialize attributes.
21
22 :param locale: Current locale.
23 """
24 super().__init__(*args, **kwargs)
25 self._datafile = 'business.json'
26 self.pull(self._datafile)
27
28 class Meta:
29 """Class for metadata."""
30
31 name = 'business'
32
33 def company(self) -> str:
34 """Get a random company name.
35
36 :return: Company name.
37 """
38 return self.random.choice(self._data['company']['name'])
39
40 def company_type(self, abbr: bool = False) -> str:
41 """Get a random type of business entity.
42
43 :param abbr: Abbreviated company type.
44 :return: Types of business entity.
45 """
46 key = 'abbr' if abbr else 'title'
47 return self.random.choice(
48 self._data['company']['type'][key],
49 )
50
51 def copyright(self) -> str: # noqa: A003
52 """Generate a random copyright.
53
54 :return: Copyright of company.
55 """
56 return '© {}, {}'.format(
57 self.company(),
58 self.company_type(abbr=True),
59 )
60
61 def currency_iso_code(self, allow_random: bool = False) -> str:
62 """Get code of the currency for current locale.
63
64 :param allow_random: Get a random ISO code.
65 :return: Currency code.
66 """
67 if allow_random:
68 return self.random.choice(CURRENCY_ISO_CODES)
69 else:
70 return self._data['currency-code']
71
72 def cryptocurrency_iso_code(self) -> str:
73 """Get symbol of random cryptocurrency.
74
75 :return: Symbol of cryptocurrency.
76 """
77 return self.random.choice(CRYPTOCURRENCY_ISO_CODES)
78
79 def currency_symbol(self):
80 """Get a currency symbol for current locale.
81
82 :return: Currency symbol.
83 """
84 return CURRENCY_SYMBOLS[self.locale]
85
86 def cryptocurrency_symbol(self) -> str:
87 """Get a cryptocurrency symbol.
88
89 :return: Symbol of cryptocurrency.
90 """
91 return self.random.choice(CRYPTOCURRENCY_SYMBOLS)
92
93 def price(self, minimum: float = 10.00,
94 maximum: float = 1000.00) -> str:
95 """Generate a random price.
96
97 :param minimum: Max value of price.
98 :param maximum: Min value of price.
99 :return: Price.
100 """
101 price = self.random.uniform(minimum, maximum, precision=2)
102 return '{0} {1}'.format(price, self.currency_symbol())
103
104 def price_in_btc(self, minimum: float = 0, maximum: float = 2) -> str:
105 """Generate random price in BTC.
106
107 :param minimum: Minimum value of price.
108 :param maximum: Maximum value of price.
109 :return: Price in BTC.
110 """
111 return '{} BTC'.format(
112 self.random.uniform(
113 minimum,
114 maximum,
115 precision=7,
116 ),
117 )
118
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/mimesis/providers/business.py b/mimesis/providers/business.py
--- a/mimesis/providers/business.py
+++ b/mimesis/providers/business.py
@@ -98,8 +98,19 @@
:param maximum: Min value of price.
:return: Price.
"""
- price = self.random.uniform(minimum, maximum, precision=2)
- return '{0} {1}'.format(price, self.currency_symbol())
+ price_format = self._data['price-format']
+ numeric_frac_digits = self._data['numeric-frac-digits']
+ delims = {
+ '.': self._data['numeric-decimal'],
+ ',': self._data['numeric-thousands'],
+ }
+
+ value = self.random.uniform(minimum, maximum)
+ price = '{:,.{}f}'.format(value, numeric_frac_digits)
+
+ price = ''.join(delims.get(char, char) for char in price)
+
+ return price_format.replace('#', price)
def price_in_btc(self, minimum: float = 0, maximum: float = 2) -> str:
"""Generate random price in BTC.
|
{"golden_diff": "diff --git a/mimesis/providers/business.py b/mimesis/providers/business.py\n--- a/mimesis/providers/business.py\n+++ b/mimesis/providers/business.py\n@@ -98,8 +98,19 @@\n :param maximum: Min value of price.\n :return: Price.\n \"\"\"\n- price = self.random.uniform(minimum, maximum, precision=2)\n- return '{0} {1}'.format(price, self.currency_symbol())\n+ price_format = self._data['price-format']\n+ numeric_frac_digits = self._data['numeric-frac-digits']\n+ delims = {\n+ '.': self._data['numeric-decimal'],\n+ ',': self._data['numeric-thousands'],\n+ }\n+\n+ value = self.random.uniform(minimum, maximum)\n+ price = '{:,.{}f}'.format(value, numeric_frac_digits)\n+\n+ price = ''.join(delims.get(char, char) for char in price)\n+\n+ return price_format.replace('#', price)\n \n def price_in_btc(self, minimum: float = 0, maximum: float = 2) -> str:\n \"\"\"Generate random price in BTC.\n", "issue": "Add pricetag support\n# Feature request\r\n## Thesis\r\n\r\nCreate a localized pricetag:\r\n\r\n```\r\n>>> mimesis.Business().pricetag()\r\n\"$ 12.34\"\r\n>>> mimesis.Business(\"de\").pricetag(minimum=1000, maximum=1500)\r\n\"1.234,56 \u20ac\"\r\n>>> mimesis.Business(\"cs\").pricetag()\r\n\"75,20 K\u010d\" # rounded to 0.1\r\n```\r\nwith defaults minimum=0.01 and maximum=1000.00 as a proposal for the default provider. Localized providers would define their own values, because of huge differences between the used values, such as USD/EUR/GBP going down to 0.01, and some minor currencies using millions as base.\r\n\r\n## Reasoning\r\n\r\nThis is very locale dependent. Currency is already in the `CURRENCY_SYMBOLS`, but what would be the best place for localized number and price formatting as well as the localized defaults for minimum/maximum?\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n\"\"\"Business data provider.\"\"\"\n\nfrom mimesis.data import (\n CRYPTOCURRENCY_ISO_CODES,\n CRYPTOCURRENCY_SYMBOLS,\n CURRENCY_ISO_CODES,\n CURRENCY_SYMBOLS,\n)\nfrom mimesis.providers.base import BaseDataProvider\n\n__all__ = ['Business']\n\n\nclass Business(BaseDataProvider):\n \"\"\"Class for generating data for business.\"\"\"\n\n def __init__(self, *args, **kwargs):\n \"\"\"Initialize attributes.\n\n :param locale: Current locale.\n \"\"\"\n super().__init__(*args, **kwargs)\n self._datafile = 'business.json'\n self.pull(self._datafile)\n\n class Meta:\n \"\"\"Class for metadata.\"\"\"\n\n name = 'business'\n\n def company(self) -> str:\n \"\"\"Get a random company name.\n\n :return: Company name.\n \"\"\"\n return self.random.choice(self._data['company']['name'])\n\n def company_type(self, abbr: bool = False) -> str:\n \"\"\"Get a random type of business entity.\n\n :param abbr: Abbreviated company type.\n :return: Types of business entity.\n \"\"\"\n key = 'abbr' if abbr else 'title'\n return self.random.choice(\n self._data['company']['type'][key],\n )\n\n def copyright(self) -> str: # noqa: A003\n \"\"\"Generate a random copyright.\n\n :return: Copyright of company.\n \"\"\"\n return '\u00a9 {}, {}'.format(\n self.company(),\n self.company_type(abbr=True),\n )\n\n def currency_iso_code(self, allow_random: bool = False) -> str:\n \"\"\"Get code of the currency for current locale.\n\n :param allow_random: Get a random ISO code.\n :return: Currency code.\n \"\"\"\n if allow_random:\n return self.random.choice(CURRENCY_ISO_CODES)\n else:\n return self._data['currency-code']\n\n def cryptocurrency_iso_code(self) -> str:\n \"\"\"Get symbol of random cryptocurrency.\n\n :return: Symbol of cryptocurrency.\n \"\"\"\n return self.random.choice(CRYPTOCURRENCY_ISO_CODES)\n\n def currency_symbol(self):\n \"\"\"Get a currency symbol for current locale.\n\n :return: Currency symbol.\n \"\"\"\n return CURRENCY_SYMBOLS[self.locale]\n\n def cryptocurrency_symbol(self) -> str:\n \"\"\"Get a cryptocurrency symbol.\n\n :return: Symbol of cryptocurrency.\n \"\"\"\n return self.random.choice(CRYPTOCURRENCY_SYMBOLS)\n\n def price(self, minimum: float = 10.00,\n maximum: float = 1000.00) -> str:\n \"\"\"Generate a random price.\n\n :param minimum: Max value of price.\n :param maximum: Min value of price.\n :return: Price.\n \"\"\"\n price = self.random.uniform(minimum, maximum, precision=2)\n return '{0} {1}'.format(price, self.currency_symbol())\n\n def price_in_btc(self, minimum: float = 0, maximum: float = 2) -> str:\n \"\"\"Generate random price in BTC.\n\n :param minimum: Minimum value of price.\n :param maximum: Maximum value of price.\n :return: Price in BTC.\n \"\"\"\n return '{} BTC'.format(\n self.random.uniform(\n minimum,\n maximum,\n precision=7,\n ),\n )\n", "path": "mimesis/providers/business.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\n\"\"\"Business data provider.\"\"\"\n\nfrom mimesis.data import (\n CRYPTOCURRENCY_ISO_CODES,\n CRYPTOCURRENCY_SYMBOLS,\n CURRENCY_ISO_CODES,\n CURRENCY_SYMBOLS,\n)\nfrom mimesis.providers.base import BaseDataProvider\n\n__all__ = ['Business']\n\n\nclass Business(BaseDataProvider):\n \"\"\"Class for generating data for business.\"\"\"\n\n def __init__(self, *args, **kwargs):\n \"\"\"Initialize attributes.\n\n :param locale: Current locale.\n \"\"\"\n super().__init__(*args, **kwargs)\n self._datafile = 'business.json'\n self.pull(self._datafile)\n\n class Meta:\n \"\"\"Class for metadata.\"\"\"\n\n name = 'business'\n\n def company(self) -> str:\n \"\"\"Get a random company name.\n\n :return: Company name.\n \"\"\"\n return self.random.choice(self._data['company']['name'])\n\n def company_type(self, abbr: bool = False) -> str:\n \"\"\"Get a random type of business entity.\n\n :param abbr: Abbreviated company type.\n :return: Types of business entity.\n \"\"\"\n key = 'abbr' if abbr else 'title'\n return self.random.choice(\n self._data['company']['type'][key],\n )\n\n def copyright(self) -> str: # noqa: A003\n \"\"\"Generate a random copyright.\n\n :return: Copyright of company.\n \"\"\"\n return '\u00a9 {}, {}'.format(\n self.company(),\n self.company_type(abbr=True),\n )\n\n def currency_iso_code(self, allow_random: bool = False) -> str:\n \"\"\"Get code of the currency for current locale.\n\n :param allow_random: Get a random ISO code.\n :return: Currency code.\n \"\"\"\n if allow_random:\n return self.random.choice(CURRENCY_ISO_CODES)\n else:\n return self._data['currency-code']\n\n def cryptocurrency_iso_code(self) -> str:\n \"\"\"Get symbol of random cryptocurrency.\n\n :return: Symbol of cryptocurrency.\n \"\"\"\n return self.random.choice(CRYPTOCURRENCY_ISO_CODES)\n\n def currency_symbol(self):\n \"\"\"Get a currency symbol for current locale.\n\n :return: Currency symbol.\n \"\"\"\n return CURRENCY_SYMBOLS[self.locale]\n\n def cryptocurrency_symbol(self) -> str:\n \"\"\"Get a cryptocurrency symbol.\n\n :return: Symbol of cryptocurrency.\n \"\"\"\n return self.random.choice(CRYPTOCURRENCY_SYMBOLS)\n\n def price(self, minimum: float = 10.00,\n maximum: float = 1000.00) -> str:\n \"\"\"Generate a random price.\n\n :param minimum: Max value of price.\n :param maximum: Min value of price.\n :return: Price.\n \"\"\"\n price_format = self._data['price-format']\n numeric_frac_digits = self._data['numeric-frac-digits']\n delims = {\n '.': self._data['numeric-decimal'],\n ',': self._data['numeric-thousands'],\n }\n\n value = self.random.uniform(minimum, maximum)\n price = '{:,.{}f}'.format(value, numeric_frac_digits)\n\n price = ''.join(delims.get(char, char) for char in price)\n\n return price_format.replace('#', price)\n\n def price_in_btc(self, minimum: float = 0, maximum: float = 2) -> str:\n \"\"\"Generate random price in BTC.\n\n :param minimum: Minimum value of price.\n :param maximum: Maximum value of price.\n :return: Price in BTC.\n \"\"\"\n return '{} BTC'.format(\n self.random.uniform(\n minimum,\n maximum,\n precision=7,\n ),\n )\n", "path": "mimesis/providers/business.py"}]}
| 1,459 | 254 |
gh_patches_debug_26099
|
rasdani/github-patches
|
git_diff
|
pytorch__ignite-1044
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
gpu_info crashes because it cannot parse "N/A"
## 🐛 Bug description
When trying to use gpu_info, it throws:
```
File "/home/blackhc/anaconda3/envs/hello-mnist/lib/python3.7/site-packages/ignite/contrib/metrics/gpu_info.py", line 91, in completed
engine.state.metrics[util_name] = int(util_report['gpu_util'])
ValueError: invalid literal for int() with base 10: 'N/A'
```
There is error handling code above it, but it does not catch the issue ("N/A" is returned).
I assume my GPU does not support it. However, it would be neat to have a graceful failure mode.
Thank you!
Andreas
## Environment
torch 1.5 on a GTX 780 TI (source)
ignite 0.3.0 (conda)
pynvml 8.0.4 (pip)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ignite/contrib/metrics/gpu_info.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 import warnings
3
4 import torch
5
6 from ignite.engine import Events
7 from ignite.metrics import Metric
8
9
10 class GpuInfo(Metric):
11 """Provides GPU information: a) used memory percentage, b) gpu utilization percentage values as Metric
12 on each iterations.
13
14 Examples:
15
16 .. code-block:: python
17
18 # Default GPU measurements
19 GpuInfo().attach(trainer, name='gpu') # metric names are 'gpu:X mem(%)', 'gpu:X util(%)'
20
21 # Logging with TQDM
22 ProgressBar(persist=True).attach(trainer, metric_names=['gpu:0 mem(%)', 'gpu:0 util(%)'])
23 # Progress bar will looks like
24 # Epoch [2/10]: [12/24] 50%|█████ , gpu:0 mem(%)=79, gpu:0 util(%)=59 [00:17<1:23]
25
26 # Logging with Tensorboard
27 tb_logger.attach(trainer,
28 log_handler=OutputHandler(tag="training", metric_names='all'),
29 event_name=Events.ITERATION_COMPLETED)
30 """
31
32 def __init__(self):
33 try:
34 import pynvml
35 except ImportError:
36 raise RuntimeError(
37 "This contrib module requires pynvml to be installed. "
38 "Please install it with command: \n pip install pynvml"
39 )
40 # Let's check available devices
41 if not torch.cuda.is_available():
42 raise RuntimeError("This contrib module requires available GPU")
43
44 from pynvml.smi import nvidia_smi
45
46 # Let it fail if no libnvidia drivers or NMVL library found
47 self.nvsmi = nvidia_smi.getInstance()
48 super(GpuInfo, self).__init__()
49
50 def reset(self):
51 pass
52
53 def update(self, output):
54 pass
55
56 def compute(self):
57 data = self.nvsmi.DeviceQuery("memory.used, memory.total, utilization.gpu")
58 if len(data) == 0 or ("gpu" not in data):
59 warnings.warn("No GPU information available")
60 return []
61 return data["gpu"]
62
63 def completed(self, engine, name):
64 data = self.compute()
65 if len(data) < 1:
66 warnings.warn("No GPU information available")
67 return
68
69 for i, data_by_rank in enumerate(data):
70 mem_name = "{}:{} mem(%)".format(name, i)
71
72 if "fb_memory_usage" not in data_by_rank:
73 warnings.warn("No GPU memory usage information available in {}".format(data_by_rank))
74 continue
75 mem_report = data_by_rank["fb_memory_usage"]
76 if not ("used" in mem_report and "total" in mem_report):
77 warnings.warn(
78 "GPU memory usage information does not provide used/total "
79 "memory consumption information in {}".format(mem_report)
80 )
81 continue
82
83 engine.state.metrics[mem_name] = int(mem_report["used"] * 100.0 / mem_report["total"])
84
85 for i, data_by_rank in enumerate(data):
86 util_name = "{}:{} util(%)".format(name, i)
87 if "utilization" not in data_by_rank:
88 warnings.warn("No GPU utilization information available in {}".format(data_by_rank))
89 continue
90 util_report = data_by_rank["utilization"]
91 if not ("gpu_util" in util_report):
92 warnings.warn(
93 "GPU utilization information does not provide 'gpu_util' information in " "{}".format(util_report)
94 )
95 continue
96
97 engine.state.metrics[util_name] = int(util_report["gpu_util"])
98
99 def attach(self, engine, name="gpu", event_name=Events.ITERATION_COMPLETED):
100 engine.add_event_handler(event_name, self.completed, name)
101
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/ignite/contrib/metrics/gpu_info.py b/ignite/contrib/metrics/gpu_info.py
--- a/ignite/contrib/metrics/gpu_info.py
+++ b/ignite/contrib/metrics/gpu_info.py
@@ -11,6 +11,10 @@
"""Provides GPU information: a) used memory percentage, b) gpu utilization percentage values as Metric
on each iterations.
+ .. Note ::
+
+ In case if gpu utilization reports "N/A" on a given GPU, corresponding metric value is not set.
+
Examples:
.. code-block:: python
@@ -90,11 +94,14 @@
util_report = data_by_rank["utilization"]
if not ("gpu_util" in util_report):
warnings.warn(
- "GPU utilization information does not provide 'gpu_util' information in " "{}".format(util_report)
+ "GPU utilization information does not provide 'gpu_util' information in {}".format(util_report)
)
continue
-
- engine.state.metrics[util_name] = int(util_report["gpu_util"])
+ try:
+ engine.state.metrics[util_name] = int(util_report["gpu_util"])
+ except ValueError:
+ # Do not set GPU utilization information
+ pass
def attach(self, engine, name="gpu", event_name=Events.ITERATION_COMPLETED):
engine.add_event_handler(event_name, self.completed, name)
|
{"golden_diff": "diff --git a/ignite/contrib/metrics/gpu_info.py b/ignite/contrib/metrics/gpu_info.py\n--- a/ignite/contrib/metrics/gpu_info.py\n+++ b/ignite/contrib/metrics/gpu_info.py\n@@ -11,6 +11,10 @@\n \"\"\"Provides GPU information: a) used memory percentage, b) gpu utilization percentage values as Metric\n on each iterations.\n \n+ .. Note ::\n+\n+ In case if gpu utilization reports \"N/A\" on a given GPU, corresponding metric value is not set.\n+\n Examples:\n \n .. code-block:: python\n@@ -90,11 +94,14 @@\n util_report = data_by_rank[\"utilization\"]\n if not (\"gpu_util\" in util_report):\n warnings.warn(\n- \"GPU utilization information does not provide 'gpu_util' information in \" \"{}\".format(util_report)\n+ \"GPU utilization information does not provide 'gpu_util' information in {}\".format(util_report)\n )\n continue\n-\n- engine.state.metrics[util_name] = int(util_report[\"gpu_util\"])\n+ try:\n+ engine.state.metrics[util_name] = int(util_report[\"gpu_util\"])\n+ except ValueError:\n+ # Do not set GPU utilization information\n+ pass\n \n def attach(self, engine, name=\"gpu\", event_name=Events.ITERATION_COMPLETED):\n engine.add_event_handler(event_name, self.completed, name)\n", "issue": "gpu_info crashes because it cannot parse \"N/A\"\n## \ud83d\udc1b Bug description\r\n\r\nWhen trying to use gpu_info, it throws:\r\n\r\n```\r\n File \"/home/blackhc/anaconda3/envs/hello-mnist/lib/python3.7/site-packages/ignite/contrib/metrics/gpu_info.py\", line 91, in completed\r\n engine.state.metrics[util_name] = int(util_report['gpu_util'])\r\nValueError: invalid literal for int() with base 10: 'N/A'\r\n```\r\n\r\nThere is error handling code above it, but it does not catch the issue (\"N/A\" is returned).\r\n\r\nI assume my GPU does not support it. However, it would be neat to have a graceful failure mode.\r\n\r\nThank you!\r\nAndreas\r\n\r\n## Environment\r\n\r\ntorch 1.5 on a GTX 780 TI (source)\r\nignite 0.3.0 (conda)\r\npynvml 8.0.4 (pip)\r\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nimport warnings\n\nimport torch\n\nfrom ignite.engine import Events\nfrom ignite.metrics import Metric\n\n\nclass GpuInfo(Metric):\n \"\"\"Provides GPU information: a) used memory percentage, b) gpu utilization percentage values as Metric\n on each iterations.\n\n Examples:\n\n .. code-block:: python\n\n # Default GPU measurements\n GpuInfo().attach(trainer, name='gpu') # metric names are 'gpu:X mem(%)', 'gpu:X util(%)'\n\n # Logging with TQDM\n ProgressBar(persist=True).attach(trainer, metric_names=['gpu:0 mem(%)', 'gpu:0 util(%)'])\n # Progress bar will looks like\n # Epoch [2/10]: [12/24] 50%|\u2588\u2588\u2588\u2588\u2588 , gpu:0 mem(%)=79, gpu:0 util(%)=59 [00:17<1:23]\n\n # Logging with Tensorboard\n tb_logger.attach(trainer,\n log_handler=OutputHandler(tag=\"training\", metric_names='all'),\n event_name=Events.ITERATION_COMPLETED)\n \"\"\"\n\n def __init__(self):\n try:\n import pynvml\n except ImportError:\n raise RuntimeError(\n \"This contrib module requires pynvml to be installed. \"\n \"Please install it with command: \\n pip install pynvml\"\n )\n # Let's check available devices\n if not torch.cuda.is_available():\n raise RuntimeError(\"This contrib module requires available GPU\")\n\n from pynvml.smi import nvidia_smi\n\n # Let it fail if no libnvidia drivers or NMVL library found\n self.nvsmi = nvidia_smi.getInstance()\n super(GpuInfo, self).__init__()\n\n def reset(self):\n pass\n\n def update(self, output):\n pass\n\n def compute(self):\n data = self.nvsmi.DeviceQuery(\"memory.used, memory.total, utilization.gpu\")\n if len(data) == 0 or (\"gpu\" not in data):\n warnings.warn(\"No GPU information available\")\n return []\n return data[\"gpu\"]\n\n def completed(self, engine, name):\n data = self.compute()\n if len(data) < 1:\n warnings.warn(\"No GPU information available\")\n return\n\n for i, data_by_rank in enumerate(data):\n mem_name = \"{}:{} mem(%)\".format(name, i)\n\n if \"fb_memory_usage\" not in data_by_rank:\n warnings.warn(\"No GPU memory usage information available in {}\".format(data_by_rank))\n continue\n mem_report = data_by_rank[\"fb_memory_usage\"]\n if not (\"used\" in mem_report and \"total\" in mem_report):\n warnings.warn(\n \"GPU memory usage information does not provide used/total \"\n \"memory consumption information in {}\".format(mem_report)\n )\n continue\n\n engine.state.metrics[mem_name] = int(mem_report[\"used\"] * 100.0 / mem_report[\"total\"])\n\n for i, data_by_rank in enumerate(data):\n util_name = \"{}:{} util(%)\".format(name, i)\n if \"utilization\" not in data_by_rank:\n warnings.warn(\"No GPU utilization information available in {}\".format(data_by_rank))\n continue\n util_report = data_by_rank[\"utilization\"]\n if not (\"gpu_util\" in util_report):\n warnings.warn(\n \"GPU utilization information does not provide 'gpu_util' information in \" \"{}\".format(util_report)\n )\n continue\n\n engine.state.metrics[util_name] = int(util_report[\"gpu_util\"])\n\n def attach(self, engine, name=\"gpu\", event_name=Events.ITERATION_COMPLETED):\n engine.add_event_handler(event_name, self.completed, name)\n", "path": "ignite/contrib/metrics/gpu_info.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\nimport warnings\n\nimport torch\n\nfrom ignite.engine import Events\nfrom ignite.metrics import Metric\n\n\nclass GpuInfo(Metric):\n \"\"\"Provides GPU information: a) used memory percentage, b) gpu utilization percentage values as Metric\n on each iterations.\n\n .. Note ::\n\n In case if gpu utilization reports \"N/A\" on a given GPU, corresponding metric value is not set.\n\n Examples:\n\n .. code-block:: python\n\n # Default GPU measurements\n GpuInfo().attach(trainer, name='gpu') # metric names are 'gpu:X mem(%)', 'gpu:X util(%)'\n\n # Logging with TQDM\n ProgressBar(persist=True).attach(trainer, metric_names=['gpu:0 mem(%)', 'gpu:0 util(%)'])\n # Progress bar will looks like\n # Epoch [2/10]: [12/24] 50%|\u2588\u2588\u2588\u2588\u2588 , gpu:0 mem(%)=79, gpu:0 util(%)=59 [00:17<1:23]\n\n # Logging with Tensorboard\n tb_logger.attach(trainer,\n log_handler=OutputHandler(tag=\"training\", metric_names='all'),\n event_name=Events.ITERATION_COMPLETED)\n \"\"\"\n\n def __init__(self):\n try:\n import pynvml\n except ImportError:\n raise RuntimeError(\n \"This contrib module requires pynvml to be installed. \"\n \"Please install it with command: \\n pip install pynvml\"\n )\n # Let's check available devices\n if not torch.cuda.is_available():\n raise RuntimeError(\"This contrib module requires available GPU\")\n\n from pynvml.smi import nvidia_smi\n\n # Let it fail if no libnvidia drivers or NMVL library found\n self.nvsmi = nvidia_smi.getInstance()\n super(GpuInfo, self).__init__()\n\n def reset(self):\n pass\n\n def update(self, output):\n pass\n\n def compute(self):\n data = self.nvsmi.DeviceQuery(\"memory.used, memory.total, utilization.gpu\")\n if len(data) == 0 or (\"gpu\" not in data):\n warnings.warn(\"No GPU information available\")\n return []\n return data[\"gpu\"]\n\n def completed(self, engine, name):\n data = self.compute()\n if len(data) < 1:\n warnings.warn(\"No GPU information available\")\n return\n\n for i, data_by_rank in enumerate(data):\n mem_name = \"{}:{} mem(%)\".format(name, i)\n\n if \"fb_memory_usage\" not in data_by_rank:\n warnings.warn(\"No GPU memory usage information available in {}\".format(data_by_rank))\n continue\n mem_report = data_by_rank[\"fb_memory_usage\"]\n if not (\"used\" in mem_report and \"total\" in mem_report):\n warnings.warn(\n \"GPU memory usage information does not provide used/total \"\n \"memory consumption information in {}\".format(mem_report)\n )\n continue\n\n engine.state.metrics[mem_name] = int(mem_report[\"used\"] * 100.0 / mem_report[\"total\"])\n\n for i, data_by_rank in enumerate(data):\n util_name = \"{}:{} util(%)\".format(name, i)\n if \"utilization\" not in data_by_rank:\n warnings.warn(\"No GPU utilization information available in {}\".format(data_by_rank))\n continue\n util_report = data_by_rank[\"utilization\"]\n if not (\"gpu_util\" in util_report):\n warnings.warn(\n \"GPU utilization information does not provide 'gpu_util' information in {}\".format(util_report)\n )\n continue\n try:\n engine.state.metrics[util_name] = int(util_report[\"gpu_util\"])\n except ValueError:\n # Do not set GPU utilization information\n pass\n\n def attach(self, engine, name=\"gpu\", event_name=Events.ITERATION_COMPLETED):\n engine.add_event_handler(event_name, self.completed, name)\n", "path": "ignite/contrib/metrics/gpu_info.py"}]}
| 1,491 | 312 |
gh_patches_debug_30950
|
rasdani/github-patches
|
git_diff
|
StackStorm__st2-2508
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
linux.wait_for_ssh action improvements
Currently `linux.wait_for_ssh` action (https://github.com/StackStorm/st2/blob/master/contrib/linux/actions/wait_for_ssh.yaml) only supports authenticating with RSA SSH key.
There are multiple ways we can improve this action and make it more generic.
- Support for other key types
- Support for password based authentication
- Refactor the action to utilize the new Paramiko SSH client
- Default to using system user (stanley) username and key file. Ideally key file location should be read from the config
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `contrib/linux/actions/wait_for_ssh.py`
Content:
```
1 #!/usr/bin/env python
2
3 import time
4
5 import paramiko
6
7 from st2actions.runners.pythonrunner import Action
8
9
10 class BaseAction(Action):
11 def run(self, keyfile, username, hostname, ssh_timeout, retries):
12 key = paramiko.RSAKey.from_private_key_file(keyfile)
13 client = paramiko.SSHClient()
14 client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
15
16 for index in range(retries):
17 attempt = index + 1
18
19 try:
20 self.logger.debug('SSH connection attempt: %s' % (attempt))
21 client.connect(hostname=hostname, username=username, pkey=key)
22 return True
23 except Exception as e:
24 self.logger.info('Attempt %s failed (%s), sleeping...' % (attempt, str(e)))
25 time.sleep(ssh_timeout)
26
27 raise Exception('Exceeded max retries (%s)' % (retries))
28
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/contrib/linux/actions/wait_for_ssh.py b/contrib/linux/actions/wait_for_ssh.py
--- a/contrib/linux/actions/wait_for_ssh.py
+++ b/contrib/linux/actions/wait_for_ssh.py
@@ -2,26 +2,36 @@
import time
-import paramiko
+from oslo_config import cfg
from st2actions.runners.pythonrunner import Action
+from st2actions.runners.ssh.paramiko_ssh import ParamikoSSHClient
class BaseAction(Action):
- def run(self, keyfile, username, hostname, ssh_timeout, retries):
- key = paramiko.RSAKey.from_private_key_file(keyfile)
- client = paramiko.SSHClient()
- client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+ def run(self, hostname, port, username, password=None, keyfile=None, ssh_timeout=5,
+ sleep_delay=20, retries=10):
+ # Note: If neither password nor key file is provided, we try to use system user
+ # key file
+ if not password and not keyfile:
+ keyfile = cfg.CONF.system_user.ssh_key_file
+ self.logger.info('Neither "password" nor "keyfile" parameter provided, '
+ 'defaulting to using "%s" key file' % (keyfile))
+
+ client = ParamikoSSHClient(hostname=hostname, port=port, username=username,
+ password=password, key_files=keyfile,
+ timeout=ssh_timeout)
for index in range(retries):
attempt = index + 1
try:
self.logger.debug('SSH connection attempt: %s' % (attempt))
- client.connect(hostname=hostname, username=username, pkey=key)
+ client.connect()
return True
except Exception as e:
- self.logger.info('Attempt %s failed (%s), sleeping...' % (attempt, str(e)))
- time.sleep(ssh_timeout)
+ self.logger.info('Attempt %s failed (%s), sleeping for %s seconds...' %
+ (attempt, str(e), sleep_delay))
+ time.sleep(sleep_delay)
raise Exception('Exceeded max retries (%s)' % (retries))
|
{"golden_diff": "diff --git a/contrib/linux/actions/wait_for_ssh.py b/contrib/linux/actions/wait_for_ssh.py\n--- a/contrib/linux/actions/wait_for_ssh.py\n+++ b/contrib/linux/actions/wait_for_ssh.py\n@@ -2,26 +2,36 @@\n \n import time\n \n-import paramiko\n+from oslo_config import cfg\n \n from st2actions.runners.pythonrunner import Action\n+from st2actions.runners.ssh.paramiko_ssh import ParamikoSSHClient\n \n \n class BaseAction(Action):\n- def run(self, keyfile, username, hostname, ssh_timeout, retries):\n- key = paramiko.RSAKey.from_private_key_file(keyfile)\n- client = paramiko.SSHClient()\n- client.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n+ def run(self, hostname, port, username, password=None, keyfile=None, ssh_timeout=5,\n+ sleep_delay=20, retries=10):\n+ # Note: If neither password nor key file is provided, we try to use system user\n+ # key file\n+ if not password and not keyfile:\n+ keyfile = cfg.CONF.system_user.ssh_key_file\n+ self.logger.info('Neither \"password\" nor \"keyfile\" parameter provided, '\n+ 'defaulting to using \"%s\" key file' % (keyfile))\n+\n+ client = ParamikoSSHClient(hostname=hostname, port=port, username=username,\n+ password=password, key_files=keyfile,\n+ timeout=ssh_timeout)\n \n for index in range(retries):\n attempt = index + 1\n \n try:\n self.logger.debug('SSH connection attempt: %s' % (attempt))\n- client.connect(hostname=hostname, username=username, pkey=key)\n+ client.connect()\n return True\n except Exception as e:\n- self.logger.info('Attempt %s failed (%s), sleeping...' % (attempt, str(e)))\n- time.sleep(ssh_timeout)\n+ self.logger.info('Attempt %s failed (%s), sleeping for %s seconds...' %\n+ (attempt, str(e), sleep_delay))\n+ time.sleep(sleep_delay)\n \n raise Exception('Exceeded max retries (%s)' % (retries))\n", "issue": "linux.wait_for_ssh action improvements\nCurrently `linux.wait_for_ssh` action (https://github.com/StackStorm/st2/blob/master/contrib/linux/actions/wait_for_ssh.yaml) only supports authenticating with RSA SSH key.\n\nThere are multiple ways we can improve this action and make it more generic.\n- Support for other key types\n- Support for password based authentication\n- Refactor the action to utilize the new Paramiko SSH client\n- Default to using system user (stanley) username and key file. Ideally key file location should be read from the config\n\n", "before_files": [{"content": "#!/usr/bin/env python\n\nimport time\n\nimport paramiko\n\nfrom st2actions.runners.pythonrunner import Action\n\n\nclass BaseAction(Action):\n def run(self, keyfile, username, hostname, ssh_timeout, retries):\n key = paramiko.RSAKey.from_private_key_file(keyfile)\n client = paramiko.SSHClient()\n client.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n\n for index in range(retries):\n attempt = index + 1\n\n try:\n self.logger.debug('SSH connection attempt: %s' % (attempt))\n client.connect(hostname=hostname, username=username, pkey=key)\n return True\n except Exception as e:\n self.logger.info('Attempt %s failed (%s), sleeping...' % (attempt, str(e)))\n time.sleep(ssh_timeout)\n\n raise Exception('Exceeded max retries (%s)' % (retries))\n", "path": "contrib/linux/actions/wait_for_ssh.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\nimport time\n\nfrom oslo_config import cfg\n\nfrom st2actions.runners.pythonrunner import Action\nfrom st2actions.runners.ssh.paramiko_ssh import ParamikoSSHClient\n\n\nclass BaseAction(Action):\n def run(self, hostname, port, username, password=None, keyfile=None, ssh_timeout=5,\n sleep_delay=20, retries=10):\n # Note: If neither password nor key file is provided, we try to use system user\n # key file\n if not password and not keyfile:\n keyfile = cfg.CONF.system_user.ssh_key_file\n self.logger.info('Neither \"password\" nor \"keyfile\" parameter provided, '\n 'defaulting to using \"%s\" key file' % (keyfile))\n\n client = ParamikoSSHClient(hostname=hostname, port=port, username=username,\n password=password, key_files=keyfile,\n timeout=ssh_timeout)\n\n for index in range(retries):\n attempt = index + 1\n\n try:\n self.logger.debug('SSH connection attempt: %s' % (attempt))\n client.connect()\n return True\n except Exception as e:\n self.logger.info('Attempt %s failed (%s), sleeping for %s seconds...' %\n (attempt, str(e), sleep_delay))\n time.sleep(sleep_delay)\n\n raise Exception('Exceeded max retries (%s)' % (retries))\n", "path": "contrib/linux/actions/wait_for_ssh.py"}]}
| 616 | 477 |
gh_patches_debug_23890
|
rasdani/github-patches
|
git_diff
|
wagtail__wagtail-8933
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Move "delete" from save draft drop up
We want to remove the delete action from the publishing menu and instead have it in the three dots "more" menu up by the breadcrumbs.
---
Note from @kaedroho: the Delete button should remain available even when the page is locked.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `wagtail/admin/action_menu.py`
Content:
```
1 """Handles rendering of the list of actions in the footer of the page create/edit views."""
2 from django.conf import settings
3 from django.forms import Media
4 from django.template.loader import render_to_string
5 from django.urls import reverse
6 from django.utils.functional import cached_property
7 from django.utils.translation import gettext_lazy as _
8
9 from wagtail import hooks
10 from wagtail.admin.ui.components import Component
11 from wagtail.models import UserPagePermissionsProxy
12
13
14 class ActionMenuItem(Component):
15 """Defines an item in the actions drop-up on the page creation/edit view"""
16
17 order = 100 # default order index if one is not specified on init
18 template_name = "wagtailadmin/pages/action_menu/menu_item.html"
19
20 label = ""
21 name = None
22 classname = ""
23 icon_name = ""
24
25 def __init__(self, order=None):
26 if order is not None:
27 self.order = order
28
29 def get_user_page_permissions_tester(self, context):
30 if "user_page_permissions_tester" in context:
31 return context["user_page_permissions_tester"]
32 return context["user_page_permissions"].for_page(context["page"])
33
34 def is_shown(self, context):
35 """
36 Whether this action should be shown on this request; permission checks etc should go here.
37 By default, actions are shown for unlocked pages, hidden for locked pages
38
39 context = dictionary containing at least:
40 'request' = the current request object
41 'view' = 'create', 'edit' or 'revisions_revert'
42 'page' (if view = 'edit' or 'revisions_revert') = the page being edited
43 'parent_page' (if view = 'create') = the parent page of the page being created
44 'user_page_permissions' = a UserPagePermissionsProxy for the current user, to test permissions against
45 may also contain:
46 'user_page_permissions_tester' = a PagePermissionTester for the current user and page
47 """
48 return (
49 context["view"] == "create"
50 or not self.get_user_page_permissions_tester(context).page_locked()
51 )
52
53 def get_context_data(self, parent_context):
54 """Defines context for the template, overridable to use more data"""
55 context = parent_context.copy()
56 url = self.get_url(parent_context)
57
58 context.update(
59 {
60 "label": self.label,
61 "url": url,
62 "name": self.name,
63 "classname": self.classname,
64 "icon_name": self.icon_name,
65 "request": parent_context["request"],
66 }
67 )
68 return context
69
70 def get_url(self, parent_context):
71 return None
72
73
74 class PublishMenuItem(ActionMenuItem):
75 label = _("Publish")
76 name = "action-publish"
77 template_name = "wagtailadmin/pages/action_menu/publish.html"
78 icon_name = "upload"
79
80 def is_shown(self, context):
81 if context["view"] == "create":
82 return (
83 context["user_page_permissions"]
84 .for_page(context["parent_page"])
85 .can_publish_subpage()
86 )
87 else: # view == 'edit' or 'revisions_revert'
88 perms_tester = self.get_user_page_permissions_tester(context)
89 return not perms_tester.page_locked() and perms_tester.can_publish()
90
91 def get_context_data(self, parent_context):
92 context = super().get_context_data(parent_context)
93 context["is_revision"] = context["view"] == "revisions_revert"
94 return context
95
96
97 class SubmitForModerationMenuItem(ActionMenuItem):
98 label = _("Submit for moderation")
99 name = "action-submit"
100 icon_name = "resubmit"
101
102 def is_shown(self, context):
103 if not getattr(settings, "WAGTAIL_MODERATION_ENABLED", True):
104 return False
105
106 if context["view"] == "create":
107 return context["parent_page"].has_workflow
108
109 if context["view"] == "edit":
110 perms_tester = self.get_user_page_permissions_tester(context)
111 return (
112 perms_tester.can_submit_for_moderation()
113 and not perms_tester.page_locked()
114 )
115 # context == revisions_revert
116 return False
117
118 def get_context_data(self, parent_context):
119 context = super().get_context_data(parent_context)
120 page = context.get("page")
121 workflow_state = page.current_workflow_state if page else None
122 if (
123 workflow_state
124 and workflow_state.status == workflow_state.STATUS_NEEDS_CHANGES
125 ):
126 context["label"] = _("Resubmit to {}").format(
127 workflow_state.current_task_state.task.name
128 )
129 elif page:
130 workflow = page.get_workflow()
131 if workflow:
132 context["label"] = _("Submit to {}").format(workflow.name)
133 return context
134
135
136 class WorkflowMenuItem(ActionMenuItem):
137 template_name = "wagtailadmin/pages/action_menu/workflow_menu_item.html"
138
139 def __init__(self, name, label, launch_modal, *args, **kwargs):
140 self.name = name
141 self.label = label
142 self.launch_modal = launch_modal
143
144 if kwargs.get("icon_name"):
145 self.icon_name = kwargs.pop("icon_name")
146
147 super().__init__(*args, **kwargs)
148
149 def get_context_data(self, parent_context):
150 context = super().get_context_data(parent_context)
151 context["launch_modal"] = self.launch_modal
152 context["current_task_state"] = context["page"].current_workflow_task_state
153 return context
154
155 def is_shown(self, context):
156 if context["view"] == "edit":
157 perms_tester = self.get_user_page_permissions_tester(context)
158 return not perms_tester.page_locked()
159
160
161 class RestartWorkflowMenuItem(ActionMenuItem):
162 label = _("Restart workflow ")
163 name = "action-restart-workflow"
164 classname = "button--icon-flipped"
165 icon_name = "login"
166
167 def is_shown(self, context):
168 if not getattr(settings, "WAGTAIL_MODERATION_ENABLED", True):
169 return False
170 elif context["view"] == "edit":
171 workflow_state = context["page"].current_workflow_state
172 perms_tester = self.get_user_page_permissions_tester(context)
173 return (
174 perms_tester.can_submit_for_moderation()
175 and not perms_tester.page_locked()
176 and workflow_state
177 and workflow_state.user_can_cancel(context["request"].user)
178 )
179 else:
180 return False
181
182
183 class CancelWorkflowMenuItem(ActionMenuItem):
184 label = _("Cancel workflow ")
185 name = "action-cancel-workflow"
186 icon_name = "error"
187
188 def is_shown(self, context):
189 if context["view"] == "edit":
190 workflow_state = context["page"].current_workflow_state
191 return workflow_state and workflow_state.user_can_cancel(
192 context["request"].user
193 )
194 return False
195
196
197 class UnpublishMenuItem(ActionMenuItem):
198 label = _("Unpublish")
199 name = "action-unpublish"
200 icon_name = "download-alt"
201 classname = "action-secondary"
202
203 def is_shown(self, context):
204 if context["view"] == "edit":
205 perms_tester = self.get_user_page_permissions_tester(context)
206 return not perms_tester.page_locked() and perms_tester.can_unpublish()
207
208 def get_url(self, context):
209 return reverse("wagtailadmin_pages:unpublish", args=(context["page"].id,))
210
211
212 class DeleteMenuItem(ActionMenuItem):
213 name = "action-delete"
214 label = _("Delete")
215 icon_name = "bin"
216 classname = "action-secondary"
217
218 def is_shown(self, context):
219 if context["view"] == "edit":
220 perms_tester = self.get_user_page_permissions_tester(context)
221 return not perms_tester.page_locked() and perms_tester.can_delete()
222
223 def get_url(self, context):
224 return reverse("wagtailadmin_pages:delete", args=(context["page"].id,))
225
226
227 class SaveDraftMenuItem(ActionMenuItem):
228 name = "action-save-draft"
229 label = _("Save Draft")
230 template_name = "wagtailadmin/pages/action_menu/save_draft.html"
231
232 def get_context_data(self, parent_context):
233 context = super().get_context_data(parent_context)
234 context["is_revision"] = context["view"] == "revisions_revert"
235 return context
236
237
238 class PageLockedMenuItem(ActionMenuItem):
239 name = "action-page-locked"
240 label = _("Page locked")
241 template_name = "wagtailadmin/pages/action_menu/page_locked.html"
242
243 def is_shown(self, context):
244 return (
245 "page" in context
246 and self.get_user_page_permissions_tester(context).page_locked()
247 )
248
249 def get_context_data(self, parent_context):
250 context = super().get_context_data(parent_context)
251 context["is_revision"] = context["view"] == "revisions_revert"
252 return context
253
254
255 BASE_PAGE_ACTION_MENU_ITEMS = None
256
257
258 def _get_base_page_action_menu_items():
259 """
260 Retrieve the global list of menu items for the page action menu,
261 which may then be customised on a per-request basis
262 """
263 global BASE_PAGE_ACTION_MENU_ITEMS
264
265 if BASE_PAGE_ACTION_MENU_ITEMS is None:
266 BASE_PAGE_ACTION_MENU_ITEMS = [
267 SaveDraftMenuItem(order=0),
268 DeleteMenuItem(order=10),
269 UnpublishMenuItem(order=20),
270 PublishMenuItem(order=30),
271 CancelWorkflowMenuItem(order=40),
272 RestartWorkflowMenuItem(order=50),
273 SubmitForModerationMenuItem(order=60),
274 PageLockedMenuItem(order=10000),
275 ]
276 for hook in hooks.get_hooks("register_page_action_menu_item"):
277 action_menu_item = hook()
278 if action_menu_item:
279 BASE_PAGE_ACTION_MENU_ITEMS.append(action_menu_item)
280
281 return BASE_PAGE_ACTION_MENU_ITEMS
282
283
284 class PageActionMenu:
285 template = "wagtailadmin/pages/action_menu/menu.html"
286
287 def __init__(self, request, **kwargs):
288 self.request = request
289 self.context = kwargs
290 self.context["request"] = request
291 page = self.context.get("page")
292 user_page_permissions = UserPagePermissionsProxy(self.request.user)
293 self.context["user_page_permissions"] = user_page_permissions
294 if page:
295 self.context[
296 "user_page_permissions_tester"
297 ] = user_page_permissions.for_page(page)
298
299 self.menu_items = []
300
301 if page:
302 task = page.current_workflow_task
303 current_workflow_state = page.current_workflow_state
304 is_final_task = (
305 current_workflow_state and current_workflow_state.is_at_final_task
306 )
307 if task:
308 actions = task.get_actions(page, request.user)
309 workflow_menu_items = []
310 for name, label, launch_modal in actions:
311 icon_name = "edit"
312 if name == "approve":
313 if is_final_task and not getattr(
314 settings,
315 "WAGTAIL_WORKFLOW_REQUIRE_REAPPROVAL_ON_EDIT",
316 False,
317 ):
318 label = _("%(label)s and Publish") % {"label": label}
319 icon_name = "success"
320
321 item = WorkflowMenuItem(
322 name, label, launch_modal, icon_name=icon_name
323 )
324
325 if item.is_shown(self.context):
326 workflow_menu_items.append(item)
327 self.menu_items.extend(workflow_menu_items)
328
329 for menu_item in _get_base_page_action_menu_items():
330 if menu_item.is_shown(self.context):
331 self.menu_items.append(menu_item)
332
333 self.menu_items.sort(key=lambda item: item.order)
334
335 for hook in hooks.get_hooks("construct_page_action_menu"):
336 hook(self.menu_items, self.request, self.context)
337
338 try:
339 self.default_item = self.menu_items.pop(0)
340 except IndexError:
341 self.default_item = None
342
343 def render_html(self):
344 rendered_menu_items = [
345 menu_item.render_html(self.context) for menu_item in self.menu_items
346 ]
347
348 rendered_default_item = self.default_item.render_html(self.context)
349
350 return render_to_string(
351 self.template,
352 {
353 "default_menu_item": rendered_default_item,
354 "show_menu": bool(self.menu_items),
355 "rendered_menu_items": rendered_menu_items,
356 },
357 request=self.request,
358 )
359
360 @cached_property
361 def media(self):
362 media = Media()
363 for item in self.menu_items:
364 media += item.media
365 return media
366
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/wagtail/admin/action_menu.py b/wagtail/admin/action_menu.py
--- a/wagtail/admin/action_menu.py
+++ b/wagtail/admin/action_menu.py
@@ -209,21 +209,6 @@
return reverse("wagtailadmin_pages:unpublish", args=(context["page"].id,))
-class DeleteMenuItem(ActionMenuItem):
- name = "action-delete"
- label = _("Delete")
- icon_name = "bin"
- classname = "action-secondary"
-
- def is_shown(self, context):
- if context["view"] == "edit":
- perms_tester = self.get_user_page_permissions_tester(context)
- return not perms_tester.page_locked() and perms_tester.can_delete()
-
- def get_url(self, context):
- return reverse("wagtailadmin_pages:delete", args=(context["page"].id,))
-
-
class SaveDraftMenuItem(ActionMenuItem):
name = "action-save-draft"
label = _("Save Draft")
@@ -265,7 +250,6 @@
if BASE_PAGE_ACTION_MENU_ITEMS is None:
BASE_PAGE_ACTION_MENU_ITEMS = [
SaveDraftMenuItem(order=0),
- DeleteMenuItem(order=10),
UnpublishMenuItem(order=20),
PublishMenuItem(order=30),
CancelWorkflowMenuItem(order=40),
|
{"golden_diff": "diff --git a/wagtail/admin/action_menu.py b/wagtail/admin/action_menu.py\n--- a/wagtail/admin/action_menu.py\n+++ b/wagtail/admin/action_menu.py\n@@ -209,21 +209,6 @@\n return reverse(\"wagtailadmin_pages:unpublish\", args=(context[\"page\"].id,))\n \n \n-class DeleteMenuItem(ActionMenuItem):\n- name = \"action-delete\"\n- label = _(\"Delete\")\n- icon_name = \"bin\"\n- classname = \"action-secondary\"\n-\n- def is_shown(self, context):\n- if context[\"view\"] == \"edit\":\n- perms_tester = self.get_user_page_permissions_tester(context)\n- return not perms_tester.page_locked() and perms_tester.can_delete()\n-\n- def get_url(self, context):\n- return reverse(\"wagtailadmin_pages:delete\", args=(context[\"page\"].id,))\n-\n-\n class SaveDraftMenuItem(ActionMenuItem):\n name = \"action-save-draft\"\n label = _(\"Save Draft\")\n@@ -265,7 +250,6 @@\n if BASE_PAGE_ACTION_MENU_ITEMS is None:\n BASE_PAGE_ACTION_MENU_ITEMS = [\n SaveDraftMenuItem(order=0),\n- DeleteMenuItem(order=10),\n UnpublishMenuItem(order=20),\n PublishMenuItem(order=30),\n CancelWorkflowMenuItem(order=40),\n", "issue": "Move \"delete\" from save draft drop up\nWe want to remove the delete action from the publishing menu and instead have it in the three dots \"more\" menu up by the breadcrumbs.\n\n---\n\nNote from @kaedroho: the Delete button should remain available even when the page is locked.\n", "before_files": [{"content": "\"\"\"Handles rendering of the list of actions in the footer of the page create/edit views.\"\"\"\nfrom django.conf import settings\nfrom django.forms import Media\nfrom django.template.loader import render_to_string\nfrom django.urls import reverse\nfrom django.utils.functional import cached_property\nfrom django.utils.translation import gettext_lazy as _\n\nfrom wagtail import hooks\nfrom wagtail.admin.ui.components import Component\nfrom wagtail.models import UserPagePermissionsProxy\n\n\nclass ActionMenuItem(Component):\n \"\"\"Defines an item in the actions drop-up on the page creation/edit view\"\"\"\n\n order = 100 # default order index if one is not specified on init\n template_name = \"wagtailadmin/pages/action_menu/menu_item.html\"\n\n label = \"\"\n name = None\n classname = \"\"\n icon_name = \"\"\n\n def __init__(self, order=None):\n if order is not None:\n self.order = order\n\n def get_user_page_permissions_tester(self, context):\n if \"user_page_permissions_tester\" in context:\n return context[\"user_page_permissions_tester\"]\n return context[\"user_page_permissions\"].for_page(context[\"page\"])\n\n def is_shown(self, context):\n \"\"\"\n Whether this action should be shown on this request; permission checks etc should go here.\n By default, actions are shown for unlocked pages, hidden for locked pages\n\n context = dictionary containing at least:\n 'request' = the current request object\n 'view' = 'create', 'edit' or 'revisions_revert'\n 'page' (if view = 'edit' or 'revisions_revert') = the page being edited\n 'parent_page' (if view = 'create') = the parent page of the page being created\n 'user_page_permissions' = a UserPagePermissionsProxy for the current user, to test permissions against\n may also contain:\n 'user_page_permissions_tester' = a PagePermissionTester for the current user and page\n \"\"\"\n return (\n context[\"view\"] == \"create\"\n or not self.get_user_page_permissions_tester(context).page_locked()\n )\n\n def get_context_data(self, parent_context):\n \"\"\"Defines context for the template, overridable to use more data\"\"\"\n context = parent_context.copy()\n url = self.get_url(parent_context)\n\n context.update(\n {\n \"label\": self.label,\n \"url\": url,\n \"name\": self.name,\n \"classname\": self.classname,\n \"icon_name\": self.icon_name,\n \"request\": parent_context[\"request\"],\n }\n )\n return context\n\n def get_url(self, parent_context):\n return None\n\n\nclass PublishMenuItem(ActionMenuItem):\n label = _(\"Publish\")\n name = \"action-publish\"\n template_name = \"wagtailadmin/pages/action_menu/publish.html\"\n icon_name = \"upload\"\n\n def is_shown(self, context):\n if context[\"view\"] == \"create\":\n return (\n context[\"user_page_permissions\"]\n .for_page(context[\"parent_page\"])\n .can_publish_subpage()\n )\n else: # view == 'edit' or 'revisions_revert'\n perms_tester = self.get_user_page_permissions_tester(context)\n return not perms_tester.page_locked() and perms_tester.can_publish()\n\n def get_context_data(self, parent_context):\n context = super().get_context_data(parent_context)\n context[\"is_revision\"] = context[\"view\"] == \"revisions_revert\"\n return context\n\n\nclass SubmitForModerationMenuItem(ActionMenuItem):\n label = _(\"Submit for moderation\")\n name = \"action-submit\"\n icon_name = \"resubmit\"\n\n def is_shown(self, context):\n if not getattr(settings, \"WAGTAIL_MODERATION_ENABLED\", True):\n return False\n\n if context[\"view\"] == \"create\":\n return context[\"parent_page\"].has_workflow\n\n if context[\"view\"] == \"edit\":\n perms_tester = self.get_user_page_permissions_tester(context)\n return (\n perms_tester.can_submit_for_moderation()\n and not perms_tester.page_locked()\n )\n # context == revisions_revert\n return False\n\n def get_context_data(self, parent_context):\n context = super().get_context_data(parent_context)\n page = context.get(\"page\")\n workflow_state = page.current_workflow_state if page else None\n if (\n workflow_state\n and workflow_state.status == workflow_state.STATUS_NEEDS_CHANGES\n ):\n context[\"label\"] = _(\"Resubmit to {}\").format(\n workflow_state.current_task_state.task.name\n )\n elif page:\n workflow = page.get_workflow()\n if workflow:\n context[\"label\"] = _(\"Submit to {}\").format(workflow.name)\n return context\n\n\nclass WorkflowMenuItem(ActionMenuItem):\n template_name = \"wagtailadmin/pages/action_menu/workflow_menu_item.html\"\n\n def __init__(self, name, label, launch_modal, *args, **kwargs):\n self.name = name\n self.label = label\n self.launch_modal = launch_modal\n\n if kwargs.get(\"icon_name\"):\n self.icon_name = kwargs.pop(\"icon_name\")\n\n super().__init__(*args, **kwargs)\n\n def get_context_data(self, parent_context):\n context = super().get_context_data(parent_context)\n context[\"launch_modal\"] = self.launch_modal\n context[\"current_task_state\"] = context[\"page\"].current_workflow_task_state\n return context\n\n def is_shown(self, context):\n if context[\"view\"] == \"edit\":\n perms_tester = self.get_user_page_permissions_tester(context)\n return not perms_tester.page_locked()\n\n\nclass RestartWorkflowMenuItem(ActionMenuItem):\n label = _(\"Restart workflow \")\n name = \"action-restart-workflow\"\n classname = \"button--icon-flipped\"\n icon_name = \"login\"\n\n def is_shown(self, context):\n if not getattr(settings, \"WAGTAIL_MODERATION_ENABLED\", True):\n return False\n elif context[\"view\"] == \"edit\":\n workflow_state = context[\"page\"].current_workflow_state\n perms_tester = self.get_user_page_permissions_tester(context)\n return (\n perms_tester.can_submit_for_moderation()\n and not perms_tester.page_locked()\n and workflow_state\n and workflow_state.user_can_cancel(context[\"request\"].user)\n )\n else:\n return False\n\n\nclass CancelWorkflowMenuItem(ActionMenuItem):\n label = _(\"Cancel workflow \")\n name = \"action-cancel-workflow\"\n icon_name = \"error\"\n\n def is_shown(self, context):\n if context[\"view\"] == \"edit\":\n workflow_state = context[\"page\"].current_workflow_state\n return workflow_state and workflow_state.user_can_cancel(\n context[\"request\"].user\n )\n return False\n\n\nclass UnpublishMenuItem(ActionMenuItem):\n label = _(\"Unpublish\")\n name = \"action-unpublish\"\n icon_name = \"download-alt\"\n classname = \"action-secondary\"\n\n def is_shown(self, context):\n if context[\"view\"] == \"edit\":\n perms_tester = self.get_user_page_permissions_tester(context)\n return not perms_tester.page_locked() and perms_tester.can_unpublish()\n\n def get_url(self, context):\n return reverse(\"wagtailadmin_pages:unpublish\", args=(context[\"page\"].id,))\n\n\nclass DeleteMenuItem(ActionMenuItem):\n name = \"action-delete\"\n label = _(\"Delete\")\n icon_name = \"bin\"\n classname = \"action-secondary\"\n\n def is_shown(self, context):\n if context[\"view\"] == \"edit\":\n perms_tester = self.get_user_page_permissions_tester(context)\n return not perms_tester.page_locked() and perms_tester.can_delete()\n\n def get_url(self, context):\n return reverse(\"wagtailadmin_pages:delete\", args=(context[\"page\"].id,))\n\n\nclass SaveDraftMenuItem(ActionMenuItem):\n name = \"action-save-draft\"\n label = _(\"Save Draft\")\n template_name = \"wagtailadmin/pages/action_menu/save_draft.html\"\n\n def get_context_data(self, parent_context):\n context = super().get_context_data(parent_context)\n context[\"is_revision\"] = context[\"view\"] == \"revisions_revert\"\n return context\n\n\nclass PageLockedMenuItem(ActionMenuItem):\n name = \"action-page-locked\"\n label = _(\"Page locked\")\n template_name = \"wagtailadmin/pages/action_menu/page_locked.html\"\n\n def is_shown(self, context):\n return (\n \"page\" in context\n and self.get_user_page_permissions_tester(context).page_locked()\n )\n\n def get_context_data(self, parent_context):\n context = super().get_context_data(parent_context)\n context[\"is_revision\"] = context[\"view\"] == \"revisions_revert\"\n return context\n\n\nBASE_PAGE_ACTION_MENU_ITEMS = None\n\n\ndef _get_base_page_action_menu_items():\n \"\"\"\n Retrieve the global list of menu items for the page action menu,\n which may then be customised on a per-request basis\n \"\"\"\n global BASE_PAGE_ACTION_MENU_ITEMS\n\n if BASE_PAGE_ACTION_MENU_ITEMS is None:\n BASE_PAGE_ACTION_MENU_ITEMS = [\n SaveDraftMenuItem(order=0),\n DeleteMenuItem(order=10),\n UnpublishMenuItem(order=20),\n PublishMenuItem(order=30),\n CancelWorkflowMenuItem(order=40),\n RestartWorkflowMenuItem(order=50),\n SubmitForModerationMenuItem(order=60),\n PageLockedMenuItem(order=10000),\n ]\n for hook in hooks.get_hooks(\"register_page_action_menu_item\"):\n action_menu_item = hook()\n if action_menu_item:\n BASE_PAGE_ACTION_MENU_ITEMS.append(action_menu_item)\n\n return BASE_PAGE_ACTION_MENU_ITEMS\n\n\nclass PageActionMenu:\n template = \"wagtailadmin/pages/action_menu/menu.html\"\n\n def __init__(self, request, **kwargs):\n self.request = request\n self.context = kwargs\n self.context[\"request\"] = request\n page = self.context.get(\"page\")\n user_page_permissions = UserPagePermissionsProxy(self.request.user)\n self.context[\"user_page_permissions\"] = user_page_permissions\n if page:\n self.context[\n \"user_page_permissions_tester\"\n ] = user_page_permissions.for_page(page)\n\n self.menu_items = []\n\n if page:\n task = page.current_workflow_task\n current_workflow_state = page.current_workflow_state\n is_final_task = (\n current_workflow_state and current_workflow_state.is_at_final_task\n )\n if task:\n actions = task.get_actions(page, request.user)\n workflow_menu_items = []\n for name, label, launch_modal in actions:\n icon_name = \"edit\"\n if name == \"approve\":\n if is_final_task and not getattr(\n settings,\n \"WAGTAIL_WORKFLOW_REQUIRE_REAPPROVAL_ON_EDIT\",\n False,\n ):\n label = _(\"%(label)s and Publish\") % {\"label\": label}\n icon_name = \"success\"\n\n item = WorkflowMenuItem(\n name, label, launch_modal, icon_name=icon_name\n )\n\n if item.is_shown(self.context):\n workflow_menu_items.append(item)\n self.menu_items.extend(workflow_menu_items)\n\n for menu_item in _get_base_page_action_menu_items():\n if menu_item.is_shown(self.context):\n self.menu_items.append(menu_item)\n\n self.menu_items.sort(key=lambda item: item.order)\n\n for hook in hooks.get_hooks(\"construct_page_action_menu\"):\n hook(self.menu_items, self.request, self.context)\n\n try:\n self.default_item = self.menu_items.pop(0)\n except IndexError:\n self.default_item = None\n\n def render_html(self):\n rendered_menu_items = [\n menu_item.render_html(self.context) for menu_item in self.menu_items\n ]\n\n rendered_default_item = self.default_item.render_html(self.context)\n\n return render_to_string(\n self.template,\n {\n \"default_menu_item\": rendered_default_item,\n \"show_menu\": bool(self.menu_items),\n \"rendered_menu_items\": rendered_menu_items,\n },\n request=self.request,\n )\n\n @cached_property\n def media(self):\n media = Media()\n for item in self.menu_items:\n media += item.media\n return media\n", "path": "wagtail/admin/action_menu.py"}], "after_files": [{"content": "\"\"\"Handles rendering of the list of actions in the footer of the page create/edit views.\"\"\"\nfrom django.conf import settings\nfrom django.forms import Media\nfrom django.template.loader import render_to_string\nfrom django.urls import reverse\nfrom django.utils.functional import cached_property\nfrom django.utils.translation import gettext_lazy as _\n\nfrom wagtail import hooks\nfrom wagtail.admin.ui.components import Component\nfrom wagtail.models import UserPagePermissionsProxy\n\n\nclass ActionMenuItem(Component):\n \"\"\"Defines an item in the actions drop-up on the page creation/edit view\"\"\"\n\n order = 100 # default order index if one is not specified on init\n template_name = \"wagtailadmin/pages/action_menu/menu_item.html\"\n\n label = \"\"\n name = None\n classname = \"\"\n icon_name = \"\"\n\n def __init__(self, order=None):\n if order is not None:\n self.order = order\n\n def get_user_page_permissions_tester(self, context):\n if \"user_page_permissions_tester\" in context:\n return context[\"user_page_permissions_tester\"]\n return context[\"user_page_permissions\"].for_page(context[\"page\"])\n\n def is_shown(self, context):\n \"\"\"\n Whether this action should be shown on this request; permission checks etc should go here.\n By default, actions are shown for unlocked pages, hidden for locked pages\n\n context = dictionary containing at least:\n 'request' = the current request object\n 'view' = 'create', 'edit' or 'revisions_revert'\n 'page' (if view = 'edit' or 'revisions_revert') = the page being edited\n 'parent_page' (if view = 'create') = the parent page of the page being created\n 'user_page_permissions' = a UserPagePermissionsProxy for the current user, to test permissions against\n may also contain:\n 'user_page_permissions_tester' = a PagePermissionTester for the current user and page\n \"\"\"\n return (\n context[\"view\"] == \"create\"\n or not self.get_user_page_permissions_tester(context).page_locked()\n )\n\n def get_context_data(self, parent_context):\n \"\"\"Defines context for the template, overridable to use more data\"\"\"\n context = parent_context.copy()\n url = self.get_url(parent_context)\n\n context.update(\n {\n \"label\": self.label,\n \"url\": url,\n \"name\": self.name,\n \"classname\": self.classname,\n \"icon_name\": self.icon_name,\n \"request\": parent_context[\"request\"],\n }\n )\n return context\n\n def get_url(self, parent_context):\n return None\n\n\nclass PublishMenuItem(ActionMenuItem):\n label = _(\"Publish\")\n name = \"action-publish\"\n template_name = \"wagtailadmin/pages/action_menu/publish.html\"\n icon_name = \"upload\"\n\n def is_shown(self, context):\n if context[\"view\"] == \"create\":\n return (\n context[\"user_page_permissions\"]\n .for_page(context[\"parent_page\"])\n .can_publish_subpage()\n )\n else: # view == 'edit' or 'revisions_revert'\n perms_tester = self.get_user_page_permissions_tester(context)\n return not perms_tester.page_locked() and perms_tester.can_publish()\n\n def get_context_data(self, parent_context):\n context = super().get_context_data(parent_context)\n context[\"is_revision\"] = context[\"view\"] == \"revisions_revert\"\n return context\n\n\nclass SubmitForModerationMenuItem(ActionMenuItem):\n label = _(\"Submit for moderation\")\n name = \"action-submit\"\n icon_name = \"resubmit\"\n\n def is_shown(self, context):\n if not getattr(settings, \"WAGTAIL_MODERATION_ENABLED\", True):\n return False\n\n if context[\"view\"] == \"create\":\n return context[\"parent_page\"].has_workflow\n\n if context[\"view\"] == \"edit\":\n perms_tester = self.get_user_page_permissions_tester(context)\n return (\n perms_tester.can_submit_for_moderation()\n and not perms_tester.page_locked()\n )\n # context == revisions_revert\n return False\n\n def get_context_data(self, parent_context):\n context = super().get_context_data(parent_context)\n page = context.get(\"page\")\n workflow_state = page.current_workflow_state if page else None\n if (\n workflow_state\n and workflow_state.status == workflow_state.STATUS_NEEDS_CHANGES\n ):\n context[\"label\"] = _(\"Resubmit to {}\").format(\n workflow_state.current_task_state.task.name\n )\n elif page:\n workflow = page.get_workflow()\n if workflow:\n context[\"label\"] = _(\"Submit to {}\").format(workflow.name)\n return context\n\n\nclass WorkflowMenuItem(ActionMenuItem):\n template_name = \"wagtailadmin/pages/action_menu/workflow_menu_item.html\"\n\n def __init__(self, name, label, launch_modal, *args, **kwargs):\n self.name = name\n self.label = label\n self.launch_modal = launch_modal\n\n if kwargs.get(\"icon_name\"):\n self.icon_name = kwargs.pop(\"icon_name\")\n\n super().__init__(*args, **kwargs)\n\n def get_context_data(self, parent_context):\n context = super().get_context_data(parent_context)\n context[\"launch_modal\"] = self.launch_modal\n context[\"current_task_state\"] = context[\"page\"].current_workflow_task_state\n return context\n\n def is_shown(self, context):\n if context[\"view\"] == \"edit\":\n perms_tester = self.get_user_page_permissions_tester(context)\n return not perms_tester.page_locked()\n\n\nclass RestartWorkflowMenuItem(ActionMenuItem):\n label = _(\"Restart workflow \")\n name = \"action-restart-workflow\"\n classname = \"button--icon-flipped\"\n icon_name = \"login\"\n\n def is_shown(self, context):\n if not getattr(settings, \"WAGTAIL_MODERATION_ENABLED\", True):\n return False\n elif context[\"view\"] == \"edit\":\n workflow_state = context[\"page\"].current_workflow_state\n perms_tester = self.get_user_page_permissions_tester(context)\n return (\n perms_tester.can_submit_for_moderation()\n and not perms_tester.page_locked()\n and workflow_state\n and workflow_state.user_can_cancel(context[\"request\"].user)\n )\n else:\n return False\n\n\nclass CancelWorkflowMenuItem(ActionMenuItem):\n label = _(\"Cancel workflow \")\n name = \"action-cancel-workflow\"\n icon_name = \"error\"\n\n def is_shown(self, context):\n if context[\"view\"] == \"edit\":\n workflow_state = context[\"page\"].current_workflow_state\n return workflow_state and workflow_state.user_can_cancel(\n context[\"request\"].user\n )\n return False\n\n\nclass UnpublishMenuItem(ActionMenuItem):\n label = _(\"Unpublish\")\n name = \"action-unpublish\"\n icon_name = \"download-alt\"\n classname = \"action-secondary\"\n\n def is_shown(self, context):\n if context[\"view\"] == \"edit\":\n perms_tester = self.get_user_page_permissions_tester(context)\n return not perms_tester.page_locked() and perms_tester.can_unpublish()\n\n def get_url(self, context):\n return reverse(\"wagtailadmin_pages:unpublish\", args=(context[\"page\"].id,))\n\n\nclass SaveDraftMenuItem(ActionMenuItem):\n name = \"action-save-draft\"\n label = _(\"Save Draft\")\n template_name = \"wagtailadmin/pages/action_menu/save_draft.html\"\n\n def get_context_data(self, parent_context):\n context = super().get_context_data(parent_context)\n context[\"is_revision\"] = context[\"view\"] == \"revisions_revert\"\n return context\n\n\nclass PageLockedMenuItem(ActionMenuItem):\n name = \"action-page-locked\"\n label = _(\"Page locked\")\n template_name = \"wagtailadmin/pages/action_menu/page_locked.html\"\n\n def is_shown(self, context):\n return (\n \"page\" in context\n and self.get_user_page_permissions_tester(context).page_locked()\n )\n\n def get_context_data(self, parent_context):\n context = super().get_context_data(parent_context)\n context[\"is_revision\"] = context[\"view\"] == \"revisions_revert\"\n return context\n\n\nBASE_PAGE_ACTION_MENU_ITEMS = None\n\n\ndef _get_base_page_action_menu_items():\n \"\"\"\n Retrieve the global list of menu items for the page action menu,\n which may then be customised on a per-request basis\n \"\"\"\n global BASE_PAGE_ACTION_MENU_ITEMS\n\n if BASE_PAGE_ACTION_MENU_ITEMS is None:\n BASE_PAGE_ACTION_MENU_ITEMS = [\n SaveDraftMenuItem(order=0),\n UnpublishMenuItem(order=20),\n PublishMenuItem(order=30),\n CancelWorkflowMenuItem(order=40),\n RestartWorkflowMenuItem(order=50),\n SubmitForModerationMenuItem(order=60),\n PageLockedMenuItem(order=10000),\n ]\n for hook in hooks.get_hooks(\"register_page_action_menu_item\"):\n action_menu_item = hook()\n if action_menu_item:\n BASE_PAGE_ACTION_MENU_ITEMS.append(action_menu_item)\n\n return BASE_PAGE_ACTION_MENU_ITEMS\n\n\nclass PageActionMenu:\n template = \"wagtailadmin/pages/action_menu/menu.html\"\n\n def __init__(self, request, **kwargs):\n self.request = request\n self.context = kwargs\n self.context[\"request\"] = request\n page = self.context.get(\"page\")\n user_page_permissions = UserPagePermissionsProxy(self.request.user)\n self.context[\"user_page_permissions\"] = user_page_permissions\n if page:\n self.context[\n \"user_page_permissions_tester\"\n ] = user_page_permissions.for_page(page)\n\n self.menu_items = []\n\n if page:\n task = page.current_workflow_task\n current_workflow_state = page.current_workflow_state\n is_final_task = (\n current_workflow_state and current_workflow_state.is_at_final_task\n )\n if task:\n actions = task.get_actions(page, request.user)\n workflow_menu_items = []\n for name, label, launch_modal in actions:\n icon_name = \"edit\"\n if name == \"approve\":\n if is_final_task and not getattr(\n settings,\n \"WAGTAIL_WORKFLOW_REQUIRE_REAPPROVAL_ON_EDIT\",\n False,\n ):\n label = _(\"%(label)s and Publish\") % {\"label\": label}\n icon_name = \"success\"\n\n item = WorkflowMenuItem(\n name, label, launch_modal, icon_name=icon_name\n )\n\n if item.is_shown(self.context):\n workflow_menu_items.append(item)\n self.menu_items.extend(workflow_menu_items)\n\n for menu_item in _get_base_page_action_menu_items():\n if menu_item.is_shown(self.context):\n self.menu_items.append(menu_item)\n\n self.menu_items.sort(key=lambda item: item.order)\n\n for hook in hooks.get_hooks(\"construct_page_action_menu\"):\n hook(self.menu_items, self.request, self.context)\n\n try:\n self.default_item = self.menu_items.pop(0)\n except IndexError:\n self.default_item = None\n\n def render_html(self):\n rendered_menu_items = [\n menu_item.render_html(self.context) for menu_item in self.menu_items\n ]\n\n rendered_default_item = self.default_item.render_html(self.context)\n\n return render_to_string(\n self.template,\n {\n \"default_menu_item\": rendered_default_item,\n \"show_menu\": bool(self.menu_items),\n \"rendered_menu_items\": rendered_menu_items,\n },\n request=self.request,\n )\n\n @cached_property\n def media(self):\n media = Media()\n for item in self.menu_items:\n media += item.media\n return media\n", "path": "wagtail/admin/action_menu.py"}]}
| 3,970 | 299 |
gh_patches_debug_31670
|
rasdani/github-patches
|
git_diff
|
sanic-org__sanic-2578
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Certificates not created with `mkcert` or `trustme` raise a RuntimeError
The `CertLoader` class in `sanic-org/sanic/sanic/worker/loader.py` checks the creator of the certificate. If the creator is not `mkcert` or `trustme` then it raises a `RuntimeError`. This will prevent Sanic from running with certificates from any other sources.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sanic/worker/loader.py`
Content:
```
1 from __future__ import annotations
2
3 import os
4 import sys
5
6 from importlib import import_module
7 from pathlib import Path
8 from typing import (
9 TYPE_CHECKING,
10 Any,
11 Callable,
12 Dict,
13 Optional,
14 Type,
15 Union,
16 cast,
17 )
18
19 from sanic.http.tls.creators import CertCreator, MkcertCreator, TrustmeCreator
20
21
22 if TYPE_CHECKING:
23 from sanic import Sanic as SanicApp
24
25
26 class AppLoader:
27 def __init__(
28 self,
29 module_input: str = "",
30 as_factory: bool = False,
31 as_simple: bool = False,
32 args: Any = None,
33 factory: Optional[Callable[[], SanicApp]] = None,
34 ) -> None:
35 self.module_input = module_input
36 self.module_name = ""
37 self.app_name = ""
38 self.as_factory = as_factory
39 self.as_simple = as_simple
40 self.args = args
41 self.factory = factory
42 self.cwd = os.getcwd()
43
44 if module_input:
45 delimiter = ":" if ":" in module_input else "."
46 if module_input.count(delimiter):
47 module_name, app_name = module_input.rsplit(delimiter, 1)
48 self.module_name = module_name
49 self.app_name = app_name
50 if self.app_name.endswith("()"):
51 self.as_factory = True
52 self.app_name = self.app_name[:-2]
53
54 def load(self) -> SanicApp:
55 module_path = os.path.abspath(self.cwd)
56 if module_path not in sys.path:
57 sys.path.append(module_path)
58
59 if self.factory:
60 return self.factory()
61 else:
62 from sanic.app import Sanic
63 from sanic.simple import create_simple_server
64
65 if self.as_simple:
66 path = Path(self.module_input)
67 app = create_simple_server(path)
68 else:
69 if self.module_name == "" and os.path.isdir(self.module_input):
70 raise ValueError(
71 "App not found.\n"
72 " Please use --simple if you are passing a "
73 "directory to sanic.\n"
74 f" eg. sanic {self.module_input} --simple"
75 )
76
77 module = import_module(self.module_name)
78 app = getattr(module, self.app_name, None)
79 if self.as_factory:
80 try:
81 app = app(self.args)
82 except TypeError:
83 app = app()
84
85 app_type_name = type(app).__name__
86
87 if (
88 not isinstance(app, Sanic)
89 and self.args
90 and hasattr(self.args, "module")
91 ):
92 if callable(app):
93 solution = f"sanic {self.args.module} --factory"
94 raise ValueError(
95 "Module is not a Sanic app, it is a "
96 f"{app_type_name}\n"
97 " If this callable returns a "
98 f"Sanic instance try: \n{solution}"
99 )
100
101 raise ValueError(
102 f"Module is not a Sanic app, it is a {app_type_name}\n"
103 f" Perhaps you meant {self.args.module}:app?"
104 )
105 return app
106
107
108 class CertLoader:
109 _creator_class: Type[CertCreator]
110
111 def __init__(self, ssl_data: Dict[str, Union[str, os.PathLike]]):
112 creator_name = ssl_data.get("creator")
113 if creator_name not in ("mkcert", "trustme"):
114 raise RuntimeError(f"Unknown certificate creator: {creator_name}")
115 elif creator_name == "mkcert":
116 self._creator_class = MkcertCreator
117 elif creator_name == "trustme":
118 self._creator_class = TrustmeCreator
119
120 self._key = ssl_data["key"]
121 self._cert = ssl_data["cert"]
122 self._localhost = cast(str, ssl_data["localhost"])
123
124 def load(self, app: SanicApp):
125 creator = self._creator_class(app, self._key, self._cert)
126 return creator.generate_cert(self._localhost)
127
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/sanic/worker/loader.py b/sanic/worker/loader.py
--- a/sanic/worker/loader.py
+++ b/sanic/worker/loader.py
@@ -5,18 +5,10 @@
from importlib import import_module
from pathlib import Path
-from typing import (
- TYPE_CHECKING,
- Any,
- Callable,
- Dict,
- Optional,
- Type,
- Union,
- cast,
-)
+from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Union, cast
-from sanic.http.tls.creators import CertCreator, MkcertCreator, TrustmeCreator
+from sanic.http.tls.context import process_to_context
+from sanic.http.tls.creators import MkcertCreator, TrustmeCreator
if TYPE_CHECKING:
@@ -106,21 +98,30 @@
class CertLoader:
- _creator_class: Type[CertCreator]
+ _creators = {
+ "mkcert": MkcertCreator,
+ "trustme": TrustmeCreator,
+ }
def __init__(self, ssl_data: Dict[str, Union[str, os.PathLike]]):
- creator_name = ssl_data.get("creator")
- if creator_name not in ("mkcert", "trustme"):
+ self._ssl_data = ssl_data
+
+ creator_name = cast(str, ssl_data.get("creator"))
+
+ self._creator_class = self._creators.get(creator_name)
+ if not creator_name:
+ return
+
+ if not self._creator_class:
raise RuntimeError(f"Unknown certificate creator: {creator_name}")
- elif creator_name == "mkcert":
- self._creator_class = MkcertCreator
- elif creator_name == "trustme":
- self._creator_class = TrustmeCreator
self._key = ssl_data["key"]
self._cert = ssl_data["cert"]
self._localhost = cast(str, ssl_data["localhost"])
def load(self, app: SanicApp):
+ if not self._creator_class:
+ return process_to_context(self._ssl_data)
+
creator = self._creator_class(app, self._key, self._cert)
return creator.generate_cert(self._localhost)
|
{"golden_diff": "diff --git a/sanic/worker/loader.py b/sanic/worker/loader.py\n--- a/sanic/worker/loader.py\n+++ b/sanic/worker/loader.py\n@@ -5,18 +5,10 @@\n \n from importlib import import_module\n from pathlib import Path\n-from typing import (\n- TYPE_CHECKING,\n- Any,\n- Callable,\n- Dict,\n- Optional,\n- Type,\n- Union,\n- cast,\n-)\n+from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Union, cast\n \n-from sanic.http.tls.creators import CertCreator, MkcertCreator, TrustmeCreator\n+from sanic.http.tls.context import process_to_context\n+from sanic.http.tls.creators import MkcertCreator, TrustmeCreator\n \n \n if TYPE_CHECKING:\n@@ -106,21 +98,30 @@\n \n \n class CertLoader:\n- _creator_class: Type[CertCreator]\n+ _creators = {\n+ \"mkcert\": MkcertCreator,\n+ \"trustme\": TrustmeCreator,\n+ }\n \n def __init__(self, ssl_data: Dict[str, Union[str, os.PathLike]]):\n- creator_name = ssl_data.get(\"creator\")\n- if creator_name not in (\"mkcert\", \"trustme\"):\n+ self._ssl_data = ssl_data\n+\n+ creator_name = cast(str, ssl_data.get(\"creator\"))\n+\n+ self._creator_class = self._creators.get(creator_name)\n+ if not creator_name:\n+ return\n+\n+ if not self._creator_class:\n raise RuntimeError(f\"Unknown certificate creator: {creator_name}\")\n- elif creator_name == \"mkcert\":\n- self._creator_class = MkcertCreator\n- elif creator_name == \"trustme\":\n- self._creator_class = TrustmeCreator\n \n self._key = ssl_data[\"key\"]\n self._cert = ssl_data[\"cert\"]\n self._localhost = cast(str, ssl_data[\"localhost\"])\n \n def load(self, app: SanicApp):\n+ if not self._creator_class:\n+ return process_to_context(self._ssl_data)\n+\n creator = self._creator_class(app, self._key, self._cert)\n return creator.generate_cert(self._localhost)\n", "issue": "Certificates not created with `mkcert` or `trustme` raise a RuntimeError\nThe `CertLoader` class in `sanic-org/sanic/sanic/worker/loader.py` checks the creator of the certificate. If the creator is not `mkcert` or `trustme` then it raises a `RuntimeError`. This will prevent Sanic from running with certificates from any other sources.\r\n\n", "before_files": [{"content": "from __future__ import annotations\n\nimport os\nimport sys\n\nfrom importlib import import_module\nfrom pathlib import Path\nfrom typing import (\n TYPE_CHECKING,\n Any,\n Callable,\n Dict,\n Optional,\n Type,\n Union,\n cast,\n)\n\nfrom sanic.http.tls.creators import CertCreator, MkcertCreator, TrustmeCreator\n\n\nif TYPE_CHECKING:\n from sanic import Sanic as SanicApp\n\n\nclass AppLoader:\n def __init__(\n self,\n module_input: str = \"\",\n as_factory: bool = False,\n as_simple: bool = False,\n args: Any = None,\n factory: Optional[Callable[[], SanicApp]] = None,\n ) -> None:\n self.module_input = module_input\n self.module_name = \"\"\n self.app_name = \"\"\n self.as_factory = as_factory\n self.as_simple = as_simple\n self.args = args\n self.factory = factory\n self.cwd = os.getcwd()\n\n if module_input:\n delimiter = \":\" if \":\" in module_input else \".\"\n if module_input.count(delimiter):\n module_name, app_name = module_input.rsplit(delimiter, 1)\n self.module_name = module_name\n self.app_name = app_name\n if self.app_name.endswith(\"()\"):\n self.as_factory = True\n self.app_name = self.app_name[:-2]\n\n def load(self) -> SanicApp:\n module_path = os.path.abspath(self.cwd)\n if module_path not in sys.path:\n sys.path.append(module_path)\n\n if self.factory:\n return self.factory()\n else:\n from sanic.app import Sanic\n from sanic.simple import create_simple_server\n\n if self.as_simple:\n path = Path(self.module_input)\n app = create_simple_server(path)\n else:\n if self.module_name == \"\" and os.path.isdir(self.module_input):\n raise ValueError(\n \"App not found.\\n\"\n \" Please use --simple if you are passing a \"\n \"directory to sanic.\\n\"\n f\" eg. sanic {self.module_input} --simple\"\n )\n\n module = import_module(self.module_name)\n app = getattr(module, self.app_name, None)\n if self.as_factory:\n try:\n app = app(self.args)\n except TypeError:\n app = app()\n\n app_type_name = type(app).__name__\n\n if (\n not isinstance(app, Sanic)\n and self.args\n and hasattr(self.args, \"module\")\n ):\n if callable(app):\n solution = f\"sanic {self.args.module} --factory\"\n raise ValueError(\n \"Module is not a Sanic app, it is a \"\n f\"{app_type_name}\\n\"\n \" If this callable returns a \"\n f\"Sanic instance try: \\n{solution}\"\n )\n\n raise ValueError(\n f\"Module is not a Sanic app, it is a {app_type_name}\\n\"\n f\" Perhaps you meant {self.args.module}:app?\"\n )\n return app\n\n\nclass CertLoader:\n _creator_class: Type[CertCreator]\n\n def __init__(self, ssl_data: Dict[str, Union[str, os.PathLike]]):\n creator_name = ssl_data.get(\"creator\")\n if creator_name not in (\"mkcert\", \"trustme\"):\n raise RuntimeError(f\"Unknown certificate creator: {creator_name}\")\n elif creator_name == \"mkcert\":\n self._creator_class = MkcertCreator\n elif creator_name == \"trustme\":\n self._creator_class = TrustmeCreator\n\n self._key = ssl_data[\"key\"]\n self._cert = ssl_data[\"cert\"]\n self._localhost = cast(str, ssl_data[\"localhost\"])\n\n def load(self, app: SanicApp):\n creator = self._creator_class(app, self._key, self._cert)\n return creator.generate_cert(self._localhost)\n", "path": "sanic/worker/loader.py"}], "after_files": [{"content": "from __future__ import annotations\n\nimport os\nimport sys\n\nfrom importlib import import_module\nfrom pathlib import Path\nfrom typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Union, cast\n\nfrom sanic.http.tls.context import process_to_context\nfrom sanic.http.tls.creators import MkcertCreator, TrustmeCreator\n\n\nif TYPE_CHECKING:\n from sanic import Sanic as SanicApp\n\n\nclass AppLoader:\n def __init__(\n self,\n module_input: str = \"\",\n as_factory: bool = False,\n as_simple: bool = False,\n args: Any = None,\n factory: Optional[Callable[[], SanicApp]] = None,\n ) -> None:\n self.module_input = module_input\n self.module_name = \"\"\n self.app_name = \"\"\n self.as_factory = as_factory\n self.as_simple = as_simple\n self.args = args\n self.factory = factory\n self.cwd = os.getcwd()\n\n if module_input:\n delimiter = \":\" if \":\" in module_input else \".\"\n if module_input.count(delimiter):\n module_name, app_name = module_input.rsplit(delimiter, 1)\n self.module_name = module_name\n self.app_name = app_name\n if self.app_name.endswith(\"()\"):\n self.as_factory = True\n self.app_name = self.app_name[:-2]\n\n def load(self) -> SanicApp:\n module_path = os.path.abspath(self.cwd)\n if module_path not in sys.path:\n sys.path.append(module_path)\n\n if self.factory:\n return self.factory()\n else:\n from sanic.app import Sanic\n from sanic.simple import create_simple_server\n\n if self.as_simple:\n path = Path(self.module_input)\n app = create_simple_server(path)\n else:\n if self.module_name == \"\" and os.path.isdir(self.module_input):\n raise ValueError(\n \"App not found.\\n\"\n \" Please use --simple if you are passing a \"\n \"directory to sanic.\\n\"\n f\" eg. sanic {self.module_input} --simple\"\n )\n\n module = import_module(self.module_name)\n app = getattr(module, self.app_name, None)\n if self.as_factory:\n try:\n app = app(self.args)\n except TypeError:\n app = app()\n\n app_type_name = type(app).__name__\n\n if (\n not isinstance(app, Sanic)\n and self.args\n and hasattr(self.args, \"module\")\n ):\n if callable(app):\n solution = f\"sanic {self.args.module} --factory\"\n raise ValueError(\n \"Module is not a Sanic app, it is a \"\n f\"{app_type_name}\\n\"\n \" If this callable returns a \"\n f\"Sanic instance try: \\n{solution}\"\n )\n\n raise ValueError(\n f\"Module is not a Sanic app, it is a {app_type_name}\\n\"\n f\" Perhaps you meant {self.args.module}:app?\"\n )\n return app\n\n\nclass CertLoader:\n _creators = {\n \"mkcert\": MkcertCreator,\n \"trustme\": TrustmeCreator,\n }\n\n def __init__(self, ssl_data: Dict[str, Union[str, os.PathLike]]):\n self._ssl_data = ssl_data\n\n creator_name = cast(str, ssl_data.get(\"creator\"))\n\n self._creator_class = self._creators.get(creator_name)\n if not creator_name:\n return\n\n if not self._creator_class:\n raise RuntimeError(f\"Unknown certificate creator: {creator_name}\")\n\n self._key = ssl_data[\"key\"]\n self._cert = ssl_data[\"cert\"]\n self._localhost = cast(str, ssl_data[\"localhost\"])\n\n def load(self, app: SanicApp):\n if not self._creator_class:\n return process_to_context(self._ssl_data)\n\n creator = self._creator_class(app, self._key, self._cert)\n return creator.generate_cert(self._localhost)\n", "path": "sanic/worker/loader.py"}]}
| 1,469 | 499 |
gh_patches_debug_17120
|
rasdani/github-patches
|
git_diff
|
DataDog__dd-trace-py-89
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Flask integration unicode support
I ran into a small issue where the Flask integration is unable to set tags when they contain unicode characters.
The specific case we have is when unicode was in the url.
https://github.com/DataDog/dd-trace-py/blob/8a9a7a59312de544d9c774e19872eb5e97cd8307/ddtrace/contrib/flask/middleware.py#L119
```
2016-10-28 13:47:58,953 - ERROR - error finishing trace
Traceback (most recent call last):
File "/home/vagrant/.virtualenvs/kennel/local/lib/python2.7/site-packages/ddtrace/contrib/flask/middleware.py", line 150, in _request_finished
self._finish_span(response=response)
File "/home/vagrant/.virtualenvs/kennel/local/lib/python2.7/site-packages/ddtrace/contrib/flask/middleware.py", line 119, in _finish_span
span.set_tag(http.URL, str(request.base_url or ""))
UnicodeEncodeError: 'ascii' codec can't encode character u'\xe8' in position 31: ordinal not in range(128)
```
Happy to submit a PR for a fix, not sure how you would like to go about it, my thinking was:
``` python
from ...compat import stringify
# Use `stringify` instead of `str` throughout the module
span.set_tag(http.URL, stringify(request.base_url or ""))
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ddtrace/contrib/flask/middleware.py`
Content:
```
1 """
2 Datadog trace code for flask.
3
4 Requires a modern version of flask and the `blinker` library (which is a
5 dependency of flask signals).
6 """
7
8 # stdlib
9 import logging
10
11 # project
12 from ...ext import http, errors, AppTypes
13
14 # 3p
15 import flask.templating
16 from flask import g, request, signals
17
18
19 log = logging.getLogger(__name__)
20
21
22 class TraceMiddleware(object):
23
24 def __init__(self, app, tracer, service="flask", use_signals=True):
25 self.app = app
26 self.app.logger.info("initializing trace middleware")
27
28 # save our traces.
29 self._tracer = tracer
30 self._service = service
31
32 self._tracer.set_service_info(
33 service=service,
34 app="flask",
35 app_type=AppTypes.web,
36 )
37
38 # warn the user if signals are unavailable (because blinker isn't
39 # installed) if they are asking to use them.
40 if use_signals and not signals.signals_available:
41 self.app.logger.info(_blinker_not_installed_msg)
42 self.use_signals = use_signals and signals.signals_available
43
44 # instrument request timings
45 timing_signals = {
46 'request_started': self._request_started,
47 'request_finished': self._request_finished,
48 'got_request_exception': self._request_exception,
49 }
50 if self.use_signals and _signals_exist(timing_signals):
51 self._connect(timing_signals)
52 else:
53 # Fallback to request hooks. Won't catch exceptions.
54 # handle exceptions.
55 self.app.before_request(self._before_request)
56 self.app.after_request(self._after_request)
57
58 # Instrument template rendering. If it's flask >= 0.11, we can use
59 # signals, Otherwise we have to patch a global method.
60 template_signals = {
61 'before_render_template': self._template_started, # added in 0.11
62 'template_rendered': self._template_done
63 }
64 if self.use_signals and _signals_exist(template_signals):
65 self._connect(template_signals)
66 else:
67 _patch_render(tracer)
68
69 def _flask_signals_exist(self, names):
70 """ Return true if the current version of flask has all of the given
71 signals.
72 """
73 return all(getattr(signals, n, None) for n in names)
74
75 def _connect(self, signal_to_handler):
76 connected = True
77 for name, handler in signal_to_handler.items():
78 s = getattr(signals, name, None)
79 if not s:
80 connected = False
81 log.warn("trying to instrument missing signal %s", name)
82 continue
83 s.connect(handler, sender=self.app)
84 return connected
85
86 # common methods
87
88 def _start_span(self):
89 try:
90 # if we have a parent span here, it means something was gone wrong.
91 # might as well clear it out.
92 self._tracer.clear_current_span()
93
94 g.flask_datadog_span = self._tracer.trace(
95 "flask.request",
96 service=self._service,
97 span_type=http.TYPE,
98 )
99 except Exception:
100 self.app.logger.exception("error tracing request")
101
102 def _finish_span(self, response=None, exception=None):
103 """ Close and finish the active span if it exists. """
104 span = getattr(g, 'flask_datadog_span', None)
105 if span:
106 if span.sampled:
107 error = 0
108 code = response.status_code if response else None
109
110 # if we didn't get a response, but we did get an exception, set
111 # codes accordingly.
112 if not response and exception:
113 error = 1
114 code = 500
115 span.set_tag(errors.ERROR_TYPE, type(exception))
116 span.set_tag(errors.ERROR_MSG, exception)
117
118 span.resource = str(request.endpoint or "").lower()
119 span.set_tag(http.URL, str(request.base_url or ""))
120 span.set_tag(http.STATUS_CODE, code)
121 span.error = error
122 span.finish()
123 # Clear our span just in case.
124 g.flask_datadog_span = None
125
126 # Request hook methods
127
128 def _before_request(self):
129 """ Starts tracing the current request and stores it in the global
130 request object.
131 """
132 self._start_span()
133
134 def _after_request(self, response):
135 """ handles a successful response. """
136 try:
137 self._finish_span(response=response)
138 except Exception:
139 self.app.logger.exception("error finishing trace")
140 finally:
141 return response
142
143 # signal handling methods
144
145 def _request_started(self, sender):
146 self._start_span()
147
148 def _request_finished(self, sender, response, **kwargs):
149 try:
150 self._finish_span(response=response)
151 except Exception:
152 self.app.logger.exception("error finishing trace")
153 return response
154
155 def _request_exception(self, *args, **kwargs):
156 """ handles an error response. """
157 exception = kwargs.pop("exception", None)
158 try:
159 self._finish_span(exception=exception)
160 except Exception:
161 self.app.logger.exception("error tracing error")
162
163 def _template_started(self, sender, template, *args, **kwargs):
164 span = self._tracer.trace('flask.template')
165 try:
166 span.span_type = http.TEMPLATE
167 span.set_tag("flask.template", template.name or "string")
168 finally:
169 g.flask_datadog_tmpl_span = span
170
171 def _template_done(self, *arg, **kwargs):
172 span = getattr(g, 'flask_datadog_tmpl_span', None)
173 if span:
174 span.finish()
175
176
177 def _patch_render(tracer):
178 """ patch flask's render template methods with the given tracer. """
179 # fall back to patching global method
180 _render = flask.templating._render
181
182 def _traced_render(template, context, app):
183 with tracer.trace('flask.template') as span:
184 span.span_type = http.TEMPLATE
185 span.set_tag("flask.template", template.name or "string")
186 return _render(template, context, app)
187
188 flask.templating._render = _traced_render
189
190
191 def _signals_exist(names):
192 """ Return true if all of the given signals exist in this version of flask.
193 """
194 return all(getattr(signals, n, False) for n in names)
195
196 _blinker_not_installed_msg = (
197 "please install blinker to use flask signals. "
198 "http://flask.pocoo.org/docs/0.11/signals/"
199 )
200
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/ddtrace/contrib/flask/middleware.py b/ddtrace/contrib/flask/middleware.py
--- a/ddtrace/contrib/flask/middleware.py
+++ b/ddtrace/contrib/flask/middleware.py
@@ -9,6 +9,7 @@
import logging
# project
+from ... import compat
from ...ext import http, errors, AppTypes
# 3p
@@ -115,8 +116,8 @@
span.set_tag(errors.ERROR_TYPE, type(exception))
span.set_tag(errors.ERROR_MSG, exception)
- span.resource = str(request.endpoint or "").lower()
- span.set_tag(http.URL, str(request.base_url or ""))
+ span.resource = compat.to_unicode(request.endpoint or '').lower()
+ span.set_tag(http.URL, compat.to_unicode(request.base_url or ''))
span.set_tag(http.STATUS_CODE, code)
span.error = error
span.finish()
|
{"golden_diff": "diff --git a/ddtrace/contrib/flask/middleware.py b/ddtrace/contrib/flask/middleware.py\n--- a/ddtrace/contrib/flask/middleware.py\n+++ b/ddtrace/contrib/flask/middleware.py\n@@ -9,6 +9,7 @@\n import logging\n \n # project\n+from ... import compat\n from ...ext import http, errors, AppTypes\n \n # 3p\n@@ -115,8 +116,8 @@\n span.set_tag(errors.ERROR_TYPE, type(exception))\n span.set_tag(errors.ERROR_MSG, exception)\n \n- span.resource = str(request.endpoint or \"\").lower()\n- span.set_tag(http.URL, str(request.base_url or \"\"))\n+ span.resource = compat.to_unicode(request.endpoint or '').lower()\n+ span.set_tag(http.URL, compat.to_unicode(request.base_url or ''))\n span.set_tag(http.STATUS_CODE, code)\n span.error = error\n span.finish()\n", "issue": "Flask integration unicode support\nI ran into a small issue where the Flask integration is unable to set tags when they contain unicode characters.\n\nThe specific case we have is when unicode was in the url.\n\nhttps://github.com/DataDog/dd-trace-py/blob/8a9a7a59312de544d9c774e19872eb5e97cd8307/ddtrace/contrib/flask/middleware.py#L119\n\n```\n2016-10-28 13:47:58,953 - ERROR - error finishing trace\nTraceback (most recent call last):\n File \"/home/vagrant/.virtualenvs/kennel/local/lib/python2.7/site-packages/ddtrace/contrib/flask/middleware.py\", line 150, in _request_finished\n self._finish_span(response=response)\n File \"/home/vagrant/.virtualenvs/kennel/local/lib/python2.7/site-packages/ddtrace/contrib/flask/middleware.py\", line 119, in _finish_span\n span.set_tag(http.URL, str(request.base_url or \"\"))\nUnicodeEncodeError: 'ascii' codec can't encode character u'\\xe8' in position 31: ordinal not in range(128)\n```\n\nHappy to submit a PR for a fix, not sure how you would like to go about it, my thinking was:\n\n``` python\nfrom ...compat import stringify\n\n# Use `stringify` instead of `str` throughout the module\nspan.set_tag(http.URL, stringify(request.base_url or \"\"))\n```\n\n", "before_files": [{"content": "\"\"\"\nDatadog trace code for flask.\n\nRequires a modern version of flask and the `blinker` library (which is a\ndependency of flask signals).\n\"\"\"\n\n# stdlib\nimport logging\n\n# project\nfrom ...ext import http, errors, AppTypes\n\n# 3p\nimport flask.templating\nfrom flask import g, request, signals\n\n\nlog = logging.getLogger(__name__)\n\n\nclass TraceMiddleware(object):\n\n def __init__(self, app, tracer, service=\"flask\", use_signals=True):\n self.app = app\n self.app.logger.info(\"initializing trace middleware\")\n\n # save our traces.\n self._tracer = tracer\n self._service = service\n\n self._tracer.set_service_info(\n service=service,\n app=\"flask\",\n app_type=AppTypes.web,\n )\n\n # warn the user if signals are unavailable (because blinker isn't\n # installed) if they are asking to use them.\n if use_signals and not signals.signals_available:\n self.app.logger.info(_blinker_not_installed_msg)\n self.use_signals = use_signals and signals.signals_available\n\n # instrument request timings\n timing_signals = {\n 'request_started': self._request_started,\n 'request_finished': self._request_finished,\n 'got_request_exception': self._request_exception,\n }\n if self.use_signals and _signals_exist(timing_signals):\n self._connect(timing_signals)\n else:\n # Fallback to request hooks. Won't catch exceptions.\n # handle exceptions.\n self.app.before_request(self._before_request)\n self.app.after_request(self._after_request)\n\n # Instrument template rendering. If it's flask >= 0.11, we can use\n # signals, Otherwise we have to patch a global method.\n template_signals = {\n 'before_render_template': self._template_started, # added in 0.11\n 'template_rendered': self._template_done\n }\n if self.use_signals and _signals_exist(template_signals):\n self._connect(template_signals)\n else:\n _patch_render(tracer)\n\n def _flask_signals_exist(self, names):\n \"\"\" Return true if the current version of flask has all of the given\n signals.\n \"\"\"\n return all(getattr(signals, n, None) for n in names)\n\n def _connect(self, signal_to_handler):\n connected = True\n for name, handler in signal_to_handler.items():\n s = getattr(signals, name, None)\n if not s:\n connected = False\n log.warn(\"trying to instrument missing signal %s\", name)\n continue\n s.connect(handler, sender=self.app)\n return connected\n\n # common methods\n\n def _start_span(self):\n try:\n # if we have a parent span here, it means something was gone wrong.\n # might as well clear it out.\n self._tracer.clear_current_span()\n\n g.flask_datadog_span = self._tracer.trace(\n \"flask.request\",\n service=self._service,\n span_type=http.TYPE,\n )\n except Exception:\n self.app.logger.exception(\"error tracing request\")\n\n def _finish_span(self, response=None, exception=None):\n \"\"\" Close and finish the active span if it exists. \"\"\"\n span = getattr(g, 'flask_datadog_span', None)\n if span:\n if span.sampled:\n error = 0\n code = response.status_code if response else None\n\n # if we didn't get a response, but we did get an exception, set\n # codes accordingly.\n if not response and exception:\n error = 1\n code = 500\n span.set_tag(errors.ERROR_TYPE, type(exception))\n span.set_tag(errors.ERROR_MSG, exception)\n\n span.resource = str(request.endpoint or \"\").lower()\n span.set_tag(http.URL, str(request.base_url or \"\"))\n span.set_tag(http.STATUS_CODE, code)\n span.error = error\n span.finish()\n # Clear our span just in case.\n g.flask_datadog_span = None\n\n # Request hook methods\n\n def _before_request(self):\n \"\"\" Starts tracing the current request and stores it in the global\n request object.\n \"\"\"\n self._start_span()\n\n def _after_request(self, response):\n \"\"\" handles a successful response. \"\"\"\n try:\n self._finish_span(response=response)\n except Exception:\n self.app.logger.exception(\"error finishing trace\")\n finally:\n return response\n\n # signal handling methods\n\n def _request_started(self, sender):\n self._start_span()\n\n def _request_finished(self, sender, response, **kwargs):\n try:\n self._finish_span(response=response)\n except Exception:\n self.app.logger.exception(\"error finishing trace\")\n return response\n\n def _request_exception(self, *args, **kwargs):\n \"\"\" handles an error response. \"\"\"\n exception = kwargs.pop(\"exception\", None)\n try:\n self._finish_span(exception=exception)\n except Exception:\n self.app.logger.exception(\"error tracing error\")\n\n def _template_started(self, sender, template, *args, **kwargs):\n span = self._tracer.trace('flask.template')\n try:\n span.span_type = http.TEMPLATE\n span.set_tag(\"flask.template\", template.name or \"string\")\n finally:\n g.flask_datadog_tmpl_span = span\n\n def _template_done(self, *arg, **kwargs):\n span = getattr(g, 'flask_datadog_tmpl_span', None)\n if span:\n span.finish()\n\n\ndef _patch_render(tracer):\n \"\"\" patch flask's render template methods with the given tracer. \"\"\"\n # fall back to patching global method\n _render = flask.templating._render\n\n def _traced_render(template, context, app):\n with tracer.trace('flask.template') as span:\n span.span_type = http.TEMPLATE\n span.set_tag(\"flask.template\", template.name or \"string\")\n return _render(template, context, app)\n\n flask.templating._render = _traced_render\n\n\ndef _signals_exist(names):\n \"\"\" Return true if all of the given signals exist in this version of flask.\n \"\"\"\n return all(getattr(signals, n, False) for n in names)\n\n_blinker_not_installed_msg = (\n \"please install blinker to use flask signals. \"\n \"http://flask.pocoo.org/docs/0.11/signals/\"\n)\n", "path": "ddtrace/contrib/flask/middleware.py"}], "after_files": [{"content": "\"\"\"\nDatadog trace code for flask.\n\nRequires a modern version of flask and the `blinker` library (which is a\ndependency of flask signals).\n\"\"\"\n\n# stdlib\nimport logging\n\n# project\nfrom ... import compat\nfrom ...ext import http, errors, AppTypes\n\n# 3p\nimport flask.templating\nfrom flask import g, request, signals\n\n\nlog = logging.getLogger(__name__)\n\n\nclass TraceMiddleware(object):\n\n def __init__(self, app, tracer, service=\"flask\", use_signals=True):\n self.app = app\n self.app.logger.info(\"initializing trace middleware\")\n\n # save our traces.\n self._tracer = tracer\n self._service = service\n\n self._tracer.set_service_info(\n service=service,\n app=\"flask\",\n app_type=AppTypes.web,\n )\n\n # warn the user if signals are unavailable (because blinker isn't\n # installed) if they are asking to use them.\n if use_signals and not signals.signals_available:\n self.app.logger.info(_blinker_not_installed_msg)\n self.use_signals = use_signals and signals.signals_available\n\n # instrument request timings\n timing_signals = {\n 'request_started': self._request_started,\n 'request_finished': self._request_finished,\n 'got_request_exception': self._request_exception,\n }\n if self.use_signals and _signals_exist(timing_signals):\n self._connect(timing_signals)\n else:\n # Fallback to request hooks. Won't catch exceptions.\n # handle exceptions.\n self.app.before_request(self._before_request)\n self.app.after_request(self._after_request)\n\n # Instrument template rendering. If it's flask >= 0.11, we can use\n # signals, Otherwise we have to patch a global method.\n template_signals = {\n 'before_render_template': self._template_started, # added in 0.11\n 'template_rendered': self._template_done\n }\n if self.use_signals and _signals_exist(template_signals):\n self._connect(template_signals)\n else:\n _patch_render(tracer)\n\n def _flask_signals_exist(self, names):\n \"\"\" Return true if the current version of flask has all of the given\n signals.\n \"\"\"\n return all(getattr(signals, n, None) for n in names)\n\n def _connect(self, signal_to_handler):\n connected = True\n for name, handler in signal_to_handler.items():\n s = getattr(signals, name, None)\n if not s:\n connected = False\n log.warn(\"trying to instrument missing signal %s\", name)\n continue\n s.connect(handler, sender=self.app)\n return connected\n\n # common methods\n\n def _start_span(self):\n try:\n # if we have a parent span here, it means something was gone wrong.\n # might as well clear it out.\n self._tracer.clear_current_span()\n\n g.flask_datadog_span = self._tracer.trace(\n \"flask.request\",\n service=self._service,\n span_type=http.TYPE,\n )\n except Exception:\n self.app.logger.exception(\"error tracing request\")\n\n def _finish_span(self, response=None, exception=None):\n \"\"\" Close and finish the active span if it exists. \"\"\"\n span = getattr(g, 'flask_datadog_span', None)\n if span:\n if span.sampled:\n error = 0\n code = response.status_code if response else None\n\n # if we didn't get a response, but we did get an exception, set\n # codes accordingly.\n if not response and exception:\n error = 1\n code = 500\n span.set_tag(errors.ERROR_TYPE, type(exception))\n span.set_tag(errors.ERROR_MSG, exception)\n\n span.resource = compat.to_unicode(request.endpoint or '').lower()\n span.set_tag(http.URL, compat.to_unicode(request.base_url or ''))\n span.set_tag(http.STATUS_CODE, code)\n span.error = error\n span.finish()\n # Clear our span just in case.\n g.flask_datadog_span = None\n\n # Request hook methods\n\n def _before_request(self):\n \"\"\" Starts tracing the current request and stores it in the global\n request object.\n \"\"\"\n self._start_span()\n\n def _after_request(self, response):\n \"\"\" handles a successful response. \"\"\"\n try:\n self._finish_span(response=response)\n except Exception:\n self.app.logger.exception(\"error finishing trace\")\n finally:\n return response\n\n # signal handling methods\n\n def _request_started(self, sender):\n self._start_span()\n\n def _request_finished(self, sender, response, **kwargs):\n try:\n self._finish_span(response=response)\n except Exception:\n self.app.logger.exception(\"error finishing trace\")\n return response\n\n def _request_exception(self, *args, **kwargs):\n \"\"\" handles an error response. \"\"\"\n exception = kwargs.pop(\"exception\", None)\n try:\n self._finish_span(exception=exception)\n except Exception:\n self.app.logger.exception(\"error tracing error\")\n\n def _template_started(self, sender, template, *args, **kwargs):\n span = self._tracer.trace('flask.template')\n try:\n span.span_type = http.TEMPLATE\n span.set_tag(\"flask.template\", template.name or \"string\")\n finally:\n g.flask_datadog_tmpl_span = span\n\n def _template_done(self, *arg, **kwargs):\n span = getattr(g, 'flask_datadog_tmpl_span', None)\n if span:\n span.finish()\n\n\ndef _patch_render(tracer):\n \"\"\" patch flask's render template methods with the given tracer. \"\"\"\n # fall back to patching global method\n _render = flask.templating._render\n\n def _traced_render(template, context, app):\n with tracer.trace('flask.template') as span:\n span.span_type = http.TEMPLATE\n span.set_tag(\"flask.template\", template.name or \"string\")\n return _render(template, context, app)\n\n flask.templating._render = _traced_render\n\n\ndef _signals_exist(names):\n \"\"\" Return true if all of the given signals exist in this version of flask.\n \"\"\"\n return all(getattr(signals, n, False) for n in names)\n\n_blinker_not_installed_msg = (\n \"please install blinker to use flask signals. \"\n \"http://flask.pocoo.org/docs/0.11/signals/\"\n)\n", "path": "ddtrace/contrib/flask/middleware.py"}]}
| 2,519 | 199 |
gh_patches_debug_14855
|
rasdani/github-patches
|
git_diff
|
mesonbuild__meson-9532
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[regression] String formatting has regressed since 0.60. ERROR: Format placeholder @9@ out of range.
**Describe the bug**
There is a regression of as yet unknown cause in Meson's string formatting, as discovered by @alex3kov and diagnosed by myself at libsixel/libsixel#50. This regression causes previously building package to no longer build.
**To Reproduce**
https://github.com/libsixel/libsixel/blob/v1.10.3/meson.build
**Expected behavior**
Good build
**system parameters**
* Is this a [cross build](https://mesonbuild.com/Cross-compilation.html) or just a plain native build (for the same computer)?
Plain build.
* what operating system (e.g. MacOS Catalina, Windows 10, CentOS 8.0, Ubuntu 18.04, etc.)
Arch Linux
* what Python version are you using e.g. 3.8.0
Python 3.9.7
* what `meson --version`
Versions above 0.59.2 I confirm are affected.
* what `ninja --version` if it's a Ninja build
We do not get to Ninja step, Meson is the cause.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mesonbuild/interpreter/primitives/string.py`
Content:
```
1 # Copyright 2021 The Meson development team
2 # SPDX-license-identifier: Apache-2.0
3
4 import re
5 import os
6 from pathlib import PurePath
7
8 import typing as T
9
10 from ...mesonlib import version_compare
11 from ...interpreterbase import (
12 ObjectHolder,
13 MesonOperator,
14 FeatureNew,
15 typed_operator,
16 noKwargs,
17 noPosargs,
18 typed_pos_args,
19
20 TYPE_var,
21 TYPE_kwargs,
22
23 InvalidArguments,
24 )
25
26
27 if T.TYPE_CHECKING:
28 # Object holders need the actual interpreter
29 from ...interpreter import Interpreter
30
31 class StringHolder(ObjectHolder[str]):
32 def __init__(self, obj: str, interpreter: 'Interpreter') -> None:
33 super().__init__(obj, interpreter)
34 self.methods.update({
35 'contains': self.contains_method,
36 'startswith': self.startswith_method,
37 'endswith': self.endswith_method,
38 'format': self.format_method,
39 'join': self.join_method,
40 'replace': self.replace_method,
41 'split': self.split_method,
42 'strip': self.strip_method,
43 'substring': self.substring_method,
44 'to_int': self.to_int_method,
45 'to_lower': self.to_lower_method,
46 'to_upper': self.to_upper_method,
47 'underscorify': self.underscorify_method,
48 'version_compare': self.version_compare_method,
49 })
50
51 self.trivial_operators.update({
52 # Arithmetic
53 MesonOperator.PLUS: (str, lambda x: self.held_object + x),
54
55 # Comparison
56 MesonOperator.EQUALS: (str, lambda x: self.held_object == x),
57 MesonOperator.NOT_EQUALS: (str, lambda x: self.held_object != x),
58 MesonOperator.GREATER: (str, lambda x: self.held_object > x),
59 MesonOperator.LESS: (str, lambda x: self.held_object < x),
60 MesonOperator.GREATER_EQUALS: (str, lambda x: self.held_object >= x),
61 MesonOperator.LESS_EQUALS: (str, lambda x: self.held_object <= x),
62 })
63
64 # Use actual methods for functions that require additional checks
65 self.operators.update({
66 MesonOperator.DIV: self.op_div,
67 MesonOperator.INDEX: self.op_index,
68 })
69
70 def display_name(self) -> str:
71 return 'str'
72
73 @noKwargs
74 @typed_pos_args('str.contains', str)
75 def contains_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
76 return self.held_object.find(args[0]) >= 0
77
78 @noKwargs
79 @typed_pos_args('str.startswith', str)
80 def startswith_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
81 return self.held_object.startswith(args[0])
82
83 @noKwargs
84 @typed_pos_args('str.endswith', str)
85 def endswith_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
86 return self.held_object.endswith(args[0])
87
88 @noKwargs
89 @typed_pos_args('str.format', varargs=object)
90 def format_method(self, args: T.Tuple[T.List[object]], kwargs: TYPE_kwargs) -> str:
91 arg_strings: T.List[str] = []
92 for arg in args[0]:
93 if isinstance(arg, bool): # Python boolean is upper case.
94 arg = str(arg).lower()
95 arg_strings.append(str(arg))
96
97 def arg_replace(match: T.Match[str]) -> str:
98 idx = int(match.group(1))
99 if idx >= len(arg_strings):
100 raise InvalidArguments(f'Format placeholder @{idx}@ out of range.')
101 return arg_strings[idx]
102
103 return re.sub(r'@(\d+)@', arg_replace, self.held_object)
104
105 @noKwargs
106 @typed_pos_args('str.join', varargs=str)
107 def join_method(self, args: T.Tuple[T.List[str]], kwargs: TYPE_kwargs) -> str:
108 return self.held_object.join(args[0])
109
110 @noKwargs
111 @typed_pos_args('str.replace', str, str)
112 def replace_method(self, args: T.Tuple[str, str], kwargs: TYPE_kwargs) -> str:
113 return self.held_object.replace(args[0], args[1])
114
115 @noKwargs
116 @typed_pos_args('str.split', optargs=[str])
117 def split_method(self, args: T.Tuple[T.Optional[str]], kwargs: TYPE_kwargs) -> T.List[str]:
118 return self.held_object.split(args[0])
119
120 @noKwargs
121 @typed_pos_args('str.strip', optargs=[str])
122 def strip_method(self, args: T.Tuple[T.Optional[str]], kwargs: TYPE_kwargs) -> str:
123 return self.held_object.strip(args[0])
124
125 @noKwargs
126 @typed_pos_args('str.substring', optargs=[int, int])
127 def substring_method(self, args: T.Tuple[T.Optional[int], T.Optional[int]], kwargs: TYPE_kwargs) -> str:
128 start = args[0] if args[0] is not None else 0
129 end = args[1] if args[1] is not None else len(self.held_object)
130 return self.held_object[start:end]
131
132 @noKwargs
133 @noPosargs
134 def to_int_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> int:
135 try:
136 return int(self.held_object)
137 except ValueError:
138 raise InvalidArguments(f'String {self.held_object!r} cannot be converted to int')
139
140 @noKwargs
141 @noPosargs
142 def to_lower_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
143 return self.held_object.lower()
144
145 @noKwargs
146 @noPosargs
147 def to_upper_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
148 return self.held_object.upper()
149
150 @noKwargs
151 @noPosargs
152 def underscorify_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
153 return re.sub(r'[^a-zA-Z0-9]', '_', self.held_object)
154
155 @noKwargs
156 @typed_pos_args('str.version_compare', str)
157 def version_compare_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
158 return version_compare(self.held_object, args[0])
159
160 @FeatureNew('/ with string arguments', '0.49.0')
161 @typed_operator(MesonOperator.DIV, str)
162 def op_div(self, other: str) -> str:
163 return os.path.join(self.held_object, other).replace('\\', '/')
164
165 @typed_operator(MesonOperator.INDEX, int)
166 def op_index(self, other: int) -> str:
167 try:
168 return self.held_object[other]
169 except IndexError:
170 raise InvalidArguments(f'Index {other} out of bounds of string of size {len(self.held_object)}.')
171
172
173 class MesonVersionString(str):
174 pass
175
176 class MesonVersionStringHolder(StringHolder):
177 @noKwargs
178 @typed_pos_args('str.version_compare', str)
179 def version_compare_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
180 self.interpreter.tmp_meson_version = args[0]
181 return version_compare(self.held_object, args[0])
182
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/mesonbuild/interpreter/primitives/string.py b/mesonbuild/interpreter/primitives/string.py
--- a/mesonbuild/interpreter/primitives/string.py
+++ b/mesonbuild/interpreter/primitives/string.py
@@ -13,6 +13,7 @@
MesonOperator,
FeatureNew,
typed_operator,
+ noArgsFlattening,
noKwargs,
noPosargs,
typed_pos_args,
@@ -85,6 +86,7 @@
def endswith_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
return self.held_object.endswith(args[0])
+ @noArgsFlattening
@noKwargs
@typed_pos_args('str.format', varargs=object)
def format_method(self, args: T.Tuple[T.List[object]], kwargs: TYPE_kwargs) -> str:
|
{"golden_diff": "diff --git a/mesonbuild/interpreter/primitives/string.py b/mesonbuild/interpreter/primitives/string.py\n--- a/mesonbuild/interpreter/primitives/string.py\n+++ b/mesonbuild/interpreter/primitives/string.py\n@@ -13,6 +13,7 @@\n MesonOperator,\n FeatureNew,\n typed_operator,\n+ noArgsFlattening,\n noKwargs,\n noPosargs,\n typed_pos_args,\n@@ -85,6 +86,7 @@\n def endswith_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:\n return self.held_object.endswith(args[0])\n \n+ @noArgsFlattening\n @noKwargs\n @typed_pos_args('str.format', varargs=object)\n def format_method(self, args: T.Tuple[T.List[object]], kwargs: TYPE_kwargs) -> str:\n", "issue": "[regression] String formatting has regressed since 0.60. ERROR: Format placeholder @9@ out of range.\n**Describe the bug**\r\nThere is a regression of as yet unknown cause in Meson's string formatting, as discovered by @alex3kov and diagnosed by myself at libsixel/libsixel#50. This regression causes previously building package to no longer build.\r\n\r\n**To Reproduce**\r\nhttps://github.com/libsixel/libsixel/blob/v1.10.3/meson.build\r\n\r\n**Expected behavior**\r\nGood build\r\n\r\n**system parameters**\r\n* Is this a [cross build](https://mesonbuild.com/Cross-compilation.html) or just a plain native build (for the same computer)?\r\n Plain build.\r\n* what operating system (e.g. MacOS Catalina, Windows 10, CentOS 8.0, Ubuntu 18.04, etc.)\r\n Arch Linux\r\n* what Python version are you using e.g. 3.8.0\r\n Python 3.9.7\r\n* what `meson --version`\r\n Versions above 0.59.2 I confirm are affected.\r\n* what `ninja --version` if it's a Ninja build\r\n We do not get to Ninja step, Meson is the cause.\n", "before_files": [{"content": "# Copyright 2021 The Meson development team\n# SPDX-license-identifier: Apache-2.0\n\nimport re\nimport os\nfrom pathlib import PurePath\n\nimport typing as T\n\nfrom ...mesonlib import version_compare\nfrom ...interpreterbase import (\n ObjectHolder,\n MesonOperator,\n FeatureNew,\n typed_operator,\n noKwargs,\n noPosargs,\n typed_pos_args,\n\n TYPE_var,\n TYPE_kwargs,\n\n InvalidArguments,\n)\n\n\nif T.TYPE_CHECKING:\n # Object holders need the actual interpreter\n from ...interpreter import Interpreter\n\nclass StringHolder(ObjectHolder[str]):\n def __init__(self, obj: str, interpreter: 'Interpreter') -> None:\n super().__init__(obj, interpreter)\n self.methods.update({\n 'contains': self.contains_method,\n 'startswith': self.startswith_method,\n 'endswith': self.endswith_method,\n 'format': self.format_method,\n 'join': self.join_method,\n 'replace': self.replace_method,\n 'split': self.split_method,\n 'strip': self.strip_method,\n 'substring': self.substring_method,\n 'to_int': self.to_int_method,\n 'to_lower': self.to_lower_method,\n 'to_upper': self.to_upper_method,\n 'underscorify': self.underscorify_method,\n 'version_compare': self.version_compare_method,\n })\n\n self.trivial_operators.update({\n # Arithmetic\n MesonOperator.PLUS: (str, lambda x: self.held_object + x),\n\n # Comparison\n MesonOperator.EQUALS: (str, lambda x: self.held_object == x),\n MesonOperator.NOT_EQUALS: (str, lambda x: self.held_object != x),\n MesonOperator.GREATER: (str, lambda x: self.held_object > x),\n MesonOperator.LESS: (str, lambda x: self.held_object < x),\n MesonOperator.GREATER_EQUALS: (str, lambda x: self.held_object >= x),\n MesonOperator.LESS_EQUALS: (str, lambda x: self.held_object <= x),\n })\n\n # Use actual methods for functions that require additional checks\n self.operators.update({\n MesonOperator.DIV: self.op_div,\n MesonOperator.INDEX: self.op_index,\n })\n\n def display_name(self) -> str:\n return 'str'\n\n @noKwargs\n @typed_pos_args('str.contains', str)\n def contains_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:\n return self.held_object.find(args[0]) >= 0\n\n @noKwargs\n @typed_pos_args('str.startswith', str)\n def startswith_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:\n return self.held_object.startswith(args[0])\n\n @noKwargs\n @typed_pos_args('str.endswith', str)\n def endswith_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:\n return self.held_object.endswith(args[0])\n\n @noKwargs\n @typed_pos_args('str.format', varargs=object)\n def format_method(self, args: T.Tuple[T.List[object]], kwargs: TYPE_kwargs) -> str:\n arg_strings: T.List[str] = []\n for arg in args[0]:\n if isinstance(arg, bool): # Python boolean is upper case.\n arg = str(arg).lower()\n arg_strings.append(str(arg))\n\n def arg_replace(match: T.Match[str]) -> str:\n idx = int(match.group(1))\n if idx >= len(arg_strings):\n raise InvalidArguments(f'Format placeholder @{idx}@ out of range.')\n return arg_strings[idx]\n\n return re.sub(r'@(\\d+)@', arg_replace, self.held_object)\n\n @noKwargs\n @typed_pos_args('str.join', varargs=str)\n def join_method(self, args: T.Tuple[T.List[str]], kwargs: TYPE_kwargs) -> str:\n return self.held_object.join(args[0])\n\n @noKwargs\n @typed_pos_args('str.replace', str, str)\n def replace_method(self, args: T.Tuple[str, str], kwargs: TYPE_kwargs) -> str:\n return self.held_object.replace(args[0], args[1])\n\n @noKwargs\n @typed_pos_args('str.split', optargs=[str])\n def split_method(self, args: T.Tuple[T.Optional[str]], kwargs: TYPE_kwargs) -> T.List[str]:\n return self.held_object.split(args[0])\n\n @noKwargs\n @typed_pos_args('str.strip', optargs=[str])\n def strip_method(self, args: T.Tuple[T.Optional[str]], kwargs: TYPE_kwargs) -> str:\n return self.held_object.strip(args[0])\n\n @noKwargs\n @typed_pos_args('str.substring', optargs=[int, int])\n def substring_method(self, args: T.Tuple[T.Optional[int], T.Optional[int]], kwargs: TYPE_kwargs) -> str:\n start = args[0] if args[0] is not None else 0\n end = args[1] if args[1] is not None else len(self.held_object)\n return self.held_object[start:end]\n\n @noKwargs\n @noPosargs\n def to_int_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> int:\n try:\n return int(self.held_object)\n except ValueError:\n raise InvalidArguments(f'String {self.held_object!r} cannot be converted to int')\n\n @noKwargs\n @noPosargs\n def to_lower_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:\n return self.held_object.lower()\n\n @noKwargs\n @noPosargs\n def to_upper_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:\n return self.held_object.upper()\n\n @noKwargs\n @noPosargs\n def underscorify_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:\n return re.sub(r'[^a-zA-Z0-9]', '_', self.held_object)\n\n @noKwargs\n @typed_pos_args('str.version_compare', str)\n def version_compare_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:\n return version_compare(self.held_object, args[0])\n\n @FeatureNew('/ with string arguments', '0.49.0')\n @typed_operator(MesonOperator.DIV, str)\n def op_div(self, other: str) -> str:\n return os.path.join(self.held_object, other).replace('\\\\', '/')\n\n @typed_operator(MesonOperator.INDEX, int)\n def op_index(self, other: int) -> str:\n try:\n return self.held_object[other]\n except IndexError:\n raise InvalidArguments(f'Index {other} out of bounds of string of size {len(self.held_object)}.')\n\n\nclass MesonVersionString(str):\n pass\n\nclass MesonVersionStringHolder(StringHolder):\n @noKwargs\n @typed_pos_args('str.version_compare', str)\n def version_compare_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:\n self.interpreter.tmp_meson_version = args[0]\n return version_compare(self.held_object, args[0])\n", "path": "mesonbuild/interpreter/primitives/string.py"}], "after_files": [{"content": "# Copyright 2021 The Meson development team\n# SPDX-license-identifier: Apache-2.0\n\nimport re\nimport os\nfrom pathlib import PurePath\n\nimport typing as T\n\nfrom ...mesonlib import version_compare\nfrom ...interpreterbase import (\n ObjectHolder,\n MesonOperator,\n FeatureNew,\n typed_operator,\n noArgsFlattening,\n noKwargs,\n noPosargs,\n typed_pos_args,\n\n TYPE_var,\n TYPE_kwargs,\n\n InvalidArguments,\n)\n\n\nif T.TYPE_CHECKING:\n # Object holders need the actual interpreter\n from ...interpreter import Interpreter\n\nclass StringHolder(ObjectHolder[str]):\n def __init__(self, obj: str, interpreter: 'Interpreter') -> None:\n super().__init__(obj, interpreter)\n self.methods.update({\n 'contains': self.contains_method,\n 'startswith': self.startswith_method,\n 'endswith': self.endswith_method,\n 'format': self.format_method,\n 'join': self.join_method,\n 'replace': self.replace_method,\n 'split': self.split_method,\n 'strip': self.strip_method,\n 'substring': self.substring_method,\n 'to_int': self.to_int_method,\n 'to_lower': self.to_lower_method,\n 'to_upper': self.to_upper_method,\n 'underscorify': self.underscorify_method,\n 'version_compare': self.version_compare_method,\n })\n\n self.trivial_operators.update({\n # Arithmetic\n MesonOperator.PLUS: (str, lambda x: self.held_object + x),\n\n # Comparison\n MesonOperator.EQUALS: (str, lambda x: self.held_object == x),\n MesonOperator.NOT_EQUALS: (str, lambda x: self.held_object != x),\n MesonOperator.GREATER: (str, lambda x: self.held_object > x),\n MesonOperator.LESS: (str, lambda x: self.held_object < x),\n MesonOperator.GREATER_EQUALS: (str, lambda x: self.held_object >= x),\n MesonOperator.LESS_EQUALS: (str, lambda x: self.held_object <= x),\n })\n\n # Use actual methods for functions that require additional checks\n self.operators.update({\n MesonOperator.DIV: self.op_div,\n MesonOperator.INDEX: self.op_index,\n })\n\n def display_name(self) -> str:\n return 'str'\n\n @noKwargs\n @typed_pos_args('str.contains', str)\n def contains_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:\n return self.held_object.find(args[0]) >= 0\n\n @noKwargs\n @typed_pos_args('str.startswith', str)\n def startswith_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:\n return self.held_object.startswith(args[0])\n\n @noKwargs\n @typed_pos_args('str.endswith', str)\n def endswith_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:\n return self.held_object.endswith(args[0])\n\n @noArgsFlattening\n @noKwargs\n @typed_pos_args('str.format', varargs=object)\n def format_method(self, args: T.Tuple[T.List[object]], kwargs: TYPE_kwargs) -> str:\n arg_strings: T.List[str] = []\n for arg in args[0]:\n if isinstance(arg, bool): # Python boolean is upper case.\n arg = str(arg).lower()\n arg_strings.append(str(arg))\n\n def arg_replace(match: T.Match[str]) -> str:\n idx = int(match.group(1))\n if idx >= len(arg_strings):\n raise InvalidArguments(f'Format placeholder @{idx}@ out of range.')\n return arg_strings[idx]\n\n return re.sub(r'@(\\d+)@', arg_replace, self.held_object)\n\n @noKwargs\n @typed_pos_args('str.join', varargs=str)\n def join_method(self, args: T.Tuple[T.List[str]], kwargs: TYPE_kwargs) -> str:\n return self.held_object.join(args[0])\n\n @noKwargs\n @typed_pos_args('str.replace', str, str)\n def replace_method(self, args: T.Tuple[str, str], kwargs: TYPE_kwargs) -> str:\n return self.held_object.replace(args[0], args[1])\n\n @noKwargs\n @typed_pos_args('str.split', optargs=[str])\n def split_method(self, args: T.Tuple[T.Optional[str]], kwargs: TYPE_kwargs) -> T.List[str]:\n return self.held_object.split(args[0])\n\n @noKwargs\n @typed_pos_args('str.strip', optargs=[str])\n def strip_method(self, args: T.Tuple[T.Optional[str]], kwargs: TYPE_kwargs) -> str:\n return self.held_object.strip(args[0])\n\n @noKwargs\n @typed_pos_args('str.substring', optargs=[int, int])\n def substring_method(self, args: T.Tuple[T.Optional[int], T.Optional[int]], kwargs: TYPE_kwargs) -> str:\n start = args[0] if args[0] is not None else 0\n end = args[1] if args[1] is not None else len(self.held_object)\n return self.held_object[start:end]\n\n @noKwargs\n @noPosargs\n def to_int_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> int:\n try:\n return int(self.held_object)\n except ValueError:\n raise InvalidArguments(f'String {self.held_object!r} cannot be converted to int')\n\n @noKwargs\n @noPosargs\n def to_lower_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:\n return self.held_object.lower()\n\n @noKwargs\n @noPosargs\n def to_upper_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:\n return self.held_object.upper()\n\n @noKwargs\n @noPosargs\n def underscorify_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:\n return re.sub(r'[^a-zA-Z0-9]', '_', self.held_object)\n\n @noKwargs\n @typed_pos_args('str.version_compare', str)\n def version_compare_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:\n return version_compare(self.held_object, args[0])\n\n @FeatureNew('/ with string arguments', '0.49.0')\n @typed_operator(MesonOperator.DIV, str)\n def op_div(self, other: str) -> str:\n return os.path.join(self.held_object, other).replace('\\\\', '/')\n\n @typed_operator(MesonOperator.INDEX, int)\n def op_index(self, other: int) -> str:\n try:\n return self.held_object[other]\n except IndexError:\n raise InvalidArguments(f'Index {other} out of bounds of string of size {len(self.held_object)}.')\n\n\nclass MesonVersionString(str):\n pass\n\nclass MesonVersionStringHolder(StringHolder):\n @noKwargs\n @typed_pos_args('str.version_compare', str)\n def version_compare_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:\n self.interpreter.tmp_meson_version = args[0]\n return version_compare(self.held_object, args[0])\n", "path": "mesonbuild/interpreter/primitives/string.py"}]}
| 2,624 | 195 |
gh_patches_debug_26613
|
rasdani/github-patches
|
git_diff
|
litestar-org__litestar-1852
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `litestar/contrib/sqlalchemy/dto.py`
Content:
```
1 from __future__ import annotations
2
3 from functools import singledispatchmethod
4 from typing import TYPE_CHECKING, Generic, TypeVar
5
6 from sqlalchemy import Column, inspect, orm, sql
7 from sqlalchemy.ext.associationproxy import AssociationProxy, AssociationProxyExtensionType
8 from sqlalchemy.ext.hybrid import HybridExtensionType, hybrid_property
9 from sqlalchemy.orm import (
10 ColumnProperty,
11 DeclarativeBase,
12 InspectionAttr,
13 Mapped,
14 NotExtension,
15 QueryableAttribute,
16 RelationshipProperty,
17 )
18
19 from litestar.dto.factory.abc import AbstractDTOFactory
20 from litestar.dto.factory.data_structures import FieldDefinition
21 from litestar.dto.factory.field import DTO_FIELD_META_KEY, DTOField, Mark
22 from litestar.dto.factory.utils import get_model_type_hints
23 from litestar.types.empty import Empty
24 from litestar.utils.helpers import get_fully_qualified_class_name
25 from litestar.utils.signature import ParsedSignature
26
27 if TYPE_CHECKING:
28 from typing import Any, ClassVar, Collection, Generator
29
30 from typing_extensions import TypeAlias
31
32 from litestar.typing import ParsedType
33
34 __all__ = ("SQLAlchemyDTO",)
35
36 T = TypeVar("T", bound="DeclarativeBase | Collection[DeclarativeBase]")
37 ElementType: TypeAlias = "Column[Any] | RelationshipProperty[Any]"
38
39 SQLA_NS = {**vars(orm), **vars(sql)}
40
41
42 class SQLAlchemyDTO(AbstractDTOFactory[T], Generic[T]):
43 """Support for domain modelling with SQLAlchemy."""
44
45 __slots__ = ()
46
47 model_type: ClassVar[type[DeclarativeBase]]
48
49 @singledispatchmethod
50 @classmethod
51 def handle_orm_descriptor(
52 cls,
53 extension_type: NotExtension | AssociationProxyExtensionType | HybridExtensionType,
54 orm_descriptor: InspectionAttr,
55 key: str,
56 model_type_hints: dict[str, ParsedType],
57 model_name: str,
58 ) -> list[FieldDefinition]:
59 raise NotImplementedError(f"Unsupported extension type: {extension_type}")
60
61 @handle_orm_descriptor.register(NotExtension)
62 @classmethod
63 def _(
64 cls,
65 extension_type: NotExtension,
66 key: str,
67 orm_descriptor: InspectionAttr,
68 model_type_hints: dict[str, ParsedType],
69 model_name: str,
70 ) -> list[FieldDefinition]:
71 if not isinstance(orm_descriptor, QueryableAttribute):
72 raise NotImplementedError(f"Unexpected descriptor type for '{extension_type}': '{orm_descriptor}'")
73
74 elem: ElementType
75 if isinstance(orm_descriptor.property, ColumnProperty):
76 if not isinstance(orm_descriptor.property.expression, Column):
77 raise NotImplementedError(f"Expected 'Column', got: '{orm_descriptor.property.expression}'")
78 elem = orm_descriptor.property.expression
79 elif isinstance(orm_descriptor.property, RelationshipProperty):
80 elem = orm_descriptor.property
81 else:
82 raise NotImplementedError(f"Unhandled property type: '{orm_descriptor.property}'")
83
84 default, default_factory = _detect_defaults(elem)
85
86 if (parsed_type := model_type_hints[key]).origin is Mapped:
87 (parsed_type,) = parsed_type.inner_types
88 else:
89 raise NotImplementedError(f"Expected 'Mapped' origin, got: '{parsed_type.origin}'")
90
91 return [
92 FieldDefinition(
93 name=key,
94 default=default,
95 parsed_type=parsed_type,
96 default_factory=default_factory,
97 dto_field=elem.info.get(DTO_FIELD_META_KEY, DTOField()),
98 unique_model_name=model_name,
99 dto_for=None,
100 )
101 ]
102
103 @handle_orm_descriptor.register(AssociationProxyExtensionType)
104 @classmethod
105 def _(
106 cls,
107 extension_type: AssociationProxyExtensionType,
108 key: str,
109 orm_descriptor: InspectionAttr,
110 model_type_hints: dict[str, ParsedType],
111 model_name: str,
112 ) -> list[FieldDefinition]:
113 if not isinstance(orm_descriptor, AssociationProxy):
114 raise NotImplementedError(f"Unexpected descriptor type '{orm_descriptor}' for '{extension_type}'")
115
116 if (parsed_type := model_type_hints[key]).origin is AssociationProxy:
117 (parsed_type,) = parsed_type.inner_types
118 else:
119 raise NotImplementedError(f"Expected 'AssociationProxy' origin, got: '{parsed_type.origin}'")
120
121 return [
122 FieldDefinition(
123 name=key,
124 default=Empty,
125 parsed_type=parsed_type,
126 default_factory=None,
127 dto_field=orm_descriptor.info.get(DTO_FIELD_META_KEY, DTOField(mark=Mark.READ_ONLY)),
128 unique_model_name=model_name,
129 dto_for=None,
130 )
131 ]
132
133 @handle_orm_descriptor.register(HybridExtensionType)
134 @classmethod
135 def _(
136 cls,
137 extension_type: HybridExtensionType,
138 key: str,
139 orm_descriptor: InspectionAttr,
140 model_type_hints: dict[str, ParsedType],
141 model_name: str,
142 ) -> list[FieldDefinition]:
143 if not isinstance(orm_descriptor, hybrid_property):
144 raise NotImplementedError(f"Unexpected descriptor type '{orm_descriptor}' for '{extension_type}'")
145
146 getter_sig = ParsedSignature.from_fn(orm_descriptor.fget, {})
147
148 field_defs = [
149 FieldDefinition(
150 name=orm_descriptor.__name__,
151 default=Empty,
152 parsed_type=getter_sig.return_type,
153 default_factory=None,
154 dto_field=orm_descriptor.info.get(DTO_FIELD_META_KEY, DTOField(mark=Mark.READ_ONLY)),
155 unique_model_name=model_name,
156 dto_for="return",
157 )
158 ]
159
160 if orm_descriptor.fset is not None:
161 setter_sig = ParsedSignature.from_fn(orm_descriptor.fset, {})
162 field_defs.append(
163 FieldDefinition(
164 name=orm_descriptor.__name__,
165 default=Empty,
166 parsed_type=next(iter(setter_sig.parameters.values())).parsed_type,
167 default_factory=None,
168 dto_field=orm_descriptor.info.get(DTO_FIELD_META_KEY, DTOField(mark=Mark.WRITE_ONLY)),
169 unique_model_name=model_name,
170 dto_for="data",
171 )
172 )
173
174 return field_defs
175
176 @classmethod
177 def generate_field_definitions(cls, model_type: type[DeclarativeBase]) -> Generator[FieldDefinition, None, None]:
178 if (mapper := inspect(model_type)) is None: # pragma: no cover
179 raise RuntimeError("Unexpected `None` value for mapper.")
180
181 # includes SQLAlchemy names and other mapped class names in the forward reference resolution namespace
182 namespace = {**SQLA_NS, **{m.class_.__name__: m.class_ for m in mapper.registry.mappers if m is not mapper}}
183 model_type_hints = get_model_type_hints(model_type, namespace=namespace)
184 model_name = get_fully_qualified_class_name(model_type)
185
186 # the same hybrid property descriptor can be included in `all_orm_descriptors` multiple times, once
187 # for each method name it is bound to. We only need to see it once, so track views of it here.
188 seen_hybrid_descriptors: set[hybrid_property] = set()
189 for key, orm_descriptor in mapper.all_orm_descriptors.items():
190 if isinstance(orm_descriptor, hybrid_property):
191 if orm_descriptor in seen_hybrid_descriptors:
192 continue
193 seen_hybrid_descriptors.add(orm_descriptor)
194
195 yield from cls.handle_orm_descriptor(
196 orm_descriptor.extension_type, key, orm_descriptor, model_type_hints, model_name
197 )
198
199 @classmethod
200 def detect_nested_field(cls, parsed_type: ParsedType) -> bool:
201 return parsed_type.is_subclass_of(DeclarativeBase)
202
203
204 def _detect_defaults(elem: ElementType) -> tuple[Any, Any]:
205 default: Any = Empty
206 default_factory: Any = None # pyright:ignore
207 if sqla_default := getattr(elem, "default", None):
208 if sqla_default.is_scalar:
209 default = sqla_default.arg
210 elif sqla_default.is_callable:
211
212 def default_factory(d: Any = sqla_default) -> Any:
213 return d.arg({})
214
215 else:
216 raise ValueError("Unexpected default type")
217 else:
218 if getattr(elem, "nullable", False):
219 default = None
220 return default, default_factory
221
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/litestar/contrib/sqlalchemy/dto.py b/litestar/contrib/sqlalchemy/dto.py
--- a/litestar/contrib/sqlalchemy/dto.py
+++ b/litestar/contrib/sqlalchemy/dto.py
@@ -20,6 +20,7 @@
from litestar.dto.factory.data_structures import FieldDefinition
from litestar.dto.factory.field import DTO_FIELD_META_KEY, DTOField, Mark
from litestar.dto.factory.utils import get_model_type_hints
+from litestar.exceptions import ImproperlyConfiguredException
from litestar.types.empty import Empty
from litestar.utils.helpers import get_fully_qualified_class_name
from litestar.utils.signature import ParsedSignature
@@ -83,10 +84,15 @@
default, default_factory = _detect_defaults(elem)
- if (parsed_type := model_type_hints[key]).origin is Mapped:
- (parsed_type,) = parsed_type.inner_types
- else:
- raise NotImplementedError(f"Expected 'Mapped' origin, got: '{parsed_type.origin}'")
+ try:
+ if (parsed_type := model_type_hints[key]).origin is Mapped:
+ (parsed_type,) = parsed_type.inner_types
+ else:
+ raise NotImplementedError(f"Expected 'Mapped' origin, got: '{parsed_type.origin}'")
+ except KeyError as e:
+ raise ImproperlyConfiguredException(
+ f"No type information found for '{orm_descriptor}'. Has a type annotation been added to the column?"
+ ) from e
return [
FieldDefinition(
|
{"golden_diff": "diff --git a/litestar/contrib/sqlalchemy/dto.py b/litestar/contrib/sqlalchemy/dto.py\n--- a/litestar/contrib/sqlalchemy/dto.py\n+++ b/litestar/contrib/sqlalchemy/dto.py\n@@ -20,6 +20,7 @@\n from litestar.dto.factory.data_structures import FieldDefinition\n from litestar.dto.factory.field import DTO_FIELD_META_KEY, DTOField, Mark\n from litestar.dto.factory.utils import get_model_type_hints\n+from litestar.exceptions import ImproperlyConfiguredException\n from litestar.types.empty import Empty\n from litestar.utils.helpers import get_fully_qualified_class_name\n from litestar.utils.signature import ParsedSignature\n@@ -83,10 +84,15 @@\n \n default, default_factory = _detect_defaults(elem)\n \n- if (parsed_type := model_type_hints[key]).origin is Mapped:\n- (parsed_type,) = parsed_type.inner_types\n- else:\n- raise NotImplementedError(f\"Expected 'Mapped' origin, got: '{parsed_type.origin}'\")\n+ try:\n+ if (parsed_type := model_type_hints[key]).origin is Mapped:\n+ (parsed_type,) = parsed_type.inner_types\n+ else:\n+ raise NotImplementedError(f\"Expected 'Mapped' origin, got: '{parsed_type.origin}'\")\n+ except KeyError as e:\n+ raise ImproperlyConfiguredException(\n+ f\"No type information found for '{orm_descriptor}'. Has a type annotation been added to the column?\"\n+ ) from e\n \n return [\n FieldDefinition(\n", "issue": "StaticFilesConfig and virtual directories\nI'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem. \r\n\r\nThis is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.\r\n\r\nhttps://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32\n", "before_files": [{"content": "from __future__ import annotations\n\nfrom functools import singledispatchmethod\nfrom typing import TYPE_CHECKING, Generic, TypeVar\n\nfrom sqlalchemy import Column, inspect, orm, sql\nfrom sqlalchemy.ext.associationproxy import AssociationProxy, AssociationProxyExtensionType\nfrom sqlalchemy.ext.hybrid import HybridExtensionType, hybrid_property\nfrom sqlalchemy.orm import (\n ColumnProperty,\n DeclarativeBase,\n InspectionAttr,\n Mapped,\n NotExtension,\n QueryableAttribute,\n RelationshipProperty,\n)\n\nfrom litestar.dto.factory.abc import AbstractDTOFactory\nfrom litestar.dto.factory.data_structures import FieldDefinition\nfrom litestar.dto.factory.field import DTO_FIELD_META_KEY, DTOField, Mark\nfrom litestar.dto.factory.utils import get_model_type_hints\nfrom litestar.types.empty import Empty\nfrom litestar.utils.helpers import get_fully_qualified_class_name\nfrom litestar.utils.signature import ParsedSignature\n\nif TYPE_CHECKING:\n from typing import Any, ClassVar, Collection, Generator\n\n from typing_extensions import TypeAlias\n\n from litestar.typing import ParsedType\n\n__all__ = (\"SQLAlchemyDTO\",)\n\nT = TypeVar(\"T\", bound=\"DeclarativeBase | Collection[DeclarativeBase]\")\nElementType: TypeAlias = \"Column[Any] | RelationshipProperty[Any]\"\n\nSQLA_NS = {**vars(orm), **vars(sql)}\n\n\nclass SQLAlchemyDTO(AbstractDTOFactory[T], Generic[T]):\n \"\"\"Support for domain modelling with SQLAlchemy.\"\"\"\n\n __slots__ = ()\n\n model_type: ClassVar[type[DeclarativeBase]]\n\n @singledispatchmethod\n @classmethod\n def handle_orm_descriptor(\n cls,\n extension_type: NotExtension | AssociationProxyExtensionType | HybridExtensionType,\n orm_descriptor: InspectionAttr,\n key: str,\n model_type_hints: dict[str, ParsedType],\n model_name: str,\n ) -> list[FieldDefinition]:\n raise NotImplementedError(f\"Unsupported extension type: {extension_type}\")\n\n @handle_orm_descriptor.register(NotExtension)\n @classmethod\n def _(\n cls,\n extension_type: NotExtension,\n key: str,\n orm_descriptor: InspectionAttr,\n model_type_hints: dict[str, ParsedType],\n model_name: str,\n ) -> list[FieldDefinition]:\n if not isinstance(orm_descriptor, QueryableAttribute):\n raise NotImplementedError(f\"Unexpected descriptor type for '{extension_type}': '{orm_descriptor}'\")\n\n elem: ElementType\n if isinstance(orm_descriptor.property, ColumnProperty):\n if not isinstance(orm_descriptor.property.expression, Column):\n raise NotImplementedError(f\"Expected 'Column', got: '{orm_descriptor.property.expression}'\")\n elem = orm_descriptor.property.expression\n elif isinstance(orm_descriptor.property, RelationshipProperty):\n elem = orm_descriptor.property\n else:\n raise NotImplementedError(f\"Unhandled property type: '{orm_descriptor.property}'\")\n\n default, default_factory = _detect_defaults(elem)\n\n if (parsed_type := model_type_hints[key]).origin is Mapped:\n (parsed_type,) = parsed_type.inner_types\n else:\n raise NotImplementedError(f\"Expected 'Mapped' origin, got: '{parsed_type.origin}'\")\n\n return [\n FieldDefinition(\n name=key,\n default=default,\n parsed_type=parsed_type,\n default_factory=default_factory,\n dto_field=elem.info.get(DTO_FIELD_META_KEY, DTOField()),\n unique_model_name=model_name,\n dto_for=None,\n )\n ]\n\n @handle_orm_descriptor.register(AssociationProxyExtensionType)\n @classmethod\n def _(\n cls,\n extension_type: AssociationProxyExtensionType,\n key: str,\n orm_descriptor: InspectionAttr,\n model_type_hints: dict[str, ParsedType],\n model_name: str,\n ) -> list[FieldDefinition]:\n if not isinstance(orm_descriptor, AssociationProxy):\n raise NotImplementedError(f\"Unexpected descriptor type '{orm_descriptor}' for '{extension_type}'\")\n\n if (parsed_type := model_type_hints[key]).origin is AssociationProxy:\n (parsed_type,) = parsed_type.inner_types\n else:\n raise NotImplementedError(f\"Expected 'AssociationProxy' origin, got: '{parsed_type.origin}'\")\n\n return [\n FieldDefinition(\n name=key,\n default=Empty,\n parsed_type=parsed_type,\n default_factory=None,\n dto_field=orm_descriptor.info.get(DTO_FIELD_META_KEY, DTOField(mark=Mark.READ_ONLY)),\n unique_model_name=model_name,\n dto_for=None,\n )\n ]\n\n @handle_orm_descriptor.register(HybridExtensionType)\n @classmethod\n def _(\n cls,\n extension_type: HybridExtensionType,\n key: str,\n orm_descriptor: InspectionAttr,\n model_type_hints: dict[str, ParsedType],\n model_name: str,\n ) -> list[FieldDefinition]:\n if not isinstance(orm_descriptor, hybrid_property):\n raise NotImplementedError(f\"Unexpected descriptor type '{orm_descriptor}' for '{extension_type}'\")\n\n getter_sig = ParsedSignature.from_fn(orm_descriptor.fget, {})\n\n field_defs = [\n FieldDefinition(\n name=orm_descriptor.__name__,\n default=Empty,\n parsed_type=getter_sig.return_type,\n default_factory=None,\n dto_field=orm_descriptor.info.get(DTO_FIELD_META_KEY, DTOField(mark=Mark.READ_ONLY)),\n unique_model_name=model_name,\n dto_for=\"return\",\n )\n ]\n\n if orm_descriptor.fset is not None:\n setter_sig = ParsedSignature.from_fn(orm_descriptor.fset, {})\n field_defs.append(\n FieldDefinition(\n name=orm_descriptor.__name__,\n default=Empty,\n parsed_type=next(iter(setter_sig.parameters.values())).parsed_type,\n default_factory=None,\n dto_field=orm_descriptor.info.get(DTO_FIELD_META_KEY, DTOField(mark=Mark.WRITE_ONLY)),\n unique_model_name=model_name,\n dto_for=\"data\",\n )\n )\n\n return field_defs\n\n @classmethod\n def generate_field_definitions(cls, model_type: type[DeclarativeBase]) -> Generator[FieldDefinition, None, None]:\n if (mapper := inspect(model_type)) is None: # pragma: no cover\n raise RuntimeError(\"Unexpected `None` value for mapper.\")\n\n # includes SQLAlchemy names and other mapped class names in the forward reference resolution namespace\n namespace = {**SQLA_NS, **{m.class_.__name__: m.class_ for m in mapper.registry.mappers if m is not mapper}}\n model_type_hints = get_model_type_hints(model_type, namespace=namespace)\n model_name = get_fully_qualified_class_name(model_type)\n\n # the same hybrid property descriptor can be included in `all_orm_descriptors` multiple times, once\n # for each method name it is bound to. We only need to see it once, so track views of it here.\n seen_hybrid_descriptors: set[hybrid_property] = set()\n for key, orm_descriptor in mapper.all_orm_descriptors.items():\n if isinstance(orm_descriptor, hybrid_property):\n if orm_descriptor in seen_hybrid_descriptors:\n continue\n seen_hybrid_descriptors.add(orm_descriptor)\n\n yield from cls.handle_orm_descriptor(\n orm_descriptor.extension_type, key, orm_descriptor, model_type_hints, model_name\n )\n\n @classmethod\n def detect_nested_field(cls, parsed_type: ParsedType) -> bool:\n return parsed_type.is_subclass_of(DeclarativeBase)\n\n\ndef _detect_defaults(elem: ElementType) -> tuple[Any, Any]:\n default: Any = Empty\n default_factory: Any = None # pyright:ignore\n if sqla_default := getattr(elem, \"default\", None):\n if sqla_default.is_scalar:\n default = sqla_default.arg\n elif sqla_default.is_callable:\n\n def default_factory(d: Any = sqla_default) -> Any:\n return d.arg({})\n\n else:\n raise ValueError(\"Unexpected default type\")\n else:\n if getattr(elem, \"nullable\", False):\n default = None\n return default, default_factory\n", "path": "litestar/contrib/sqlalchemy/dto.py"}], "after_files": [{"content": "from __future__ import annotations\n\nfrom functools import singledispatchmethod\nfrom typing import TYPE_CHECKING, Generic, TypeVar\n\nfrom sqlalchemy import Column, inspect, orm, sql\nfrom sqlalchemy.ext.associationproxy import AssociationProxy, AssociationProxyExtensionType\nfrom sqlalchemy.ext.hybrid import HybridExtensionType, hybrid_property\nfrom sqlalchemy.orm import (\n ColumnProperty,\n DeclarativeBase,\n InspectionAttr,\n Mapped,\n NotExtension,\n QueryableAttribute,\n RelationshipProperty,\n)\n\nfrom litestar.dto.factory.abc import AbstractDTOFactory\nfrom litestar.dto.factory.data_structures import FieldDefinition\nfrom litestar.dto.factory.field import DTO_FIELD_META_KEY, DTOField, Mark\nfrom litestar.dto.factory.utils import get_model_type_hints\nfrom litestar.exceptions import ImproperlyConfiguredException\nfrom litestar.types.empty import Empty\nfrom litestar.utils.helpers import get_fully_qualified_class_name\nfrom litestar.utils.signature import ParsedSignature\n\nif TYPE_CHECKING:\n from typing import Any, ClassVar, Collection, Generator\n\n from typing_extensions import TypeAlias\n\n from litestar.typing import ParsedType\n\n__all__ = (\"SQLAlchemyDTO\",)\n\nT = TypeVar(\"T\", bound=\"DeclarativeBase | Collection[DeclarativeBase]\")\nElementType: TypeAlias = \"Column[Any] | RelationshipProperty[Any]\"\n\nSQLA_NS = {**vars(orm), **vars(sql)}\n\n\nclass SQLAlchemyDTO(AbstractDTOFactory[T], Generic[T]):\n \"\"\"Support for domain modelling with SQLAlchemy.\"\"\"\n\n __slots__ = ()\n\n model_type: ClassVar[type[DeclarativeBase]]\n\n @singledispatchmethod\n @classmethod\n def handle_orm_descriptor(\n cls,\n extension_type: NotExtension | AssociationProxyExtensionType | HybridExtensionType,\n orm_descriptor: InspectionAttr,\n key: str,\n model_type_hints: dict[str, ParsedType],\n model_name: str,\n ) -> list[FieldDefinition]:\n raise NotImplementedError(f\"Unsupported extension type: {extension_type}\")\n\n @handle_orm_descriptor.register(NotExtension)\n @classmethod\n def _(\n cls,\n extension_type: NotExtension,\n key: str,\n orm_descriptor: InspectionAttr,\n model_type_hints: dict[str, ParsedType],\n model_name: str,\n ) -> list[FieldDefinition]:\n if not isinstance(orm_descriptor, QueryableAttribute):\n raise NotImplementedError(f\"Unexpected descriptor type for '{extension_type}': '{orm_descriptor}'\")\n\n elem: ElementType\n if isinstance(orm_descriptor.property, ColumnProperty):\n if not isinstance(orm_descriptor.property.expression, Column):\n raise NotImplementedError(f\"Expected 'Column', got: '{orm_descriptor.property.expression}'\")\n elem = orm_descriptor.property.expression\n elif isinstance(orm_descriptor.property, RelationshipProperty):\n elem = orm_descriptor.property\n else:\n raise NotImplementedError(f\"Unhandled property type: '{orm_descriptor.property}'\")\n\n default, default_factory = _detect_defaults(elem)\n\n try:\n if (parsed_type := model_type_hints[key]).origin is Mapped:\n (parsed_type,) = parsed_type.inner_types\n else:\n raise NotImplementedError(f\"Expected 'Mapped' origin, got: '{parsed_type.origin}'\")\n except KeyError as e:\n raise ImproperlyConfiguredException(\n f\"No type information found for '{orm_descriptor}'. Has a type annotation been added to the column?\"\n ) from e\n\n return [\n FieldDefinition(\n name=key,\n default=default,\n parsed_type=parsed_type,\n default_factory=default_factory,\n dto_field=elem.info.get(DTO_FIELD_META_KEY, DTOField()),\n unique_model_name=model_name,\n dto_for=None,\n )\n ]\n\n @handle_orm_descriptor.register(AssociationProxyExtensionType)\n @classmethod\n def _(\n cls,\n extension_type: AssociationProxyExtensionType,\n key: str,\n orm_descriptor: InspectionAttr,\n model_type_hints: dict[str, ParsedType],\n model_name: str,\n ) -> list[FieldDefinition]:\n if not isinstance(orm_descriptor, AssociationProxy):\n raise NotImplementedError(f\"Unexpected descriptor type '{orm_descriptor}' for '{extension_type}'\")\n\n if (parsed_type := model_type_hints[key]).origin is AssociationProxy:\n (parsed_type,) = parsed_type.inner_types\n else:\n raise NotImplementedError(f\"Expected 'AssociationProxy' origin, got: '{parsed_type.origin}'\")\n\n return [\n FieldDefinition(\n name=key,\n default=Empty,\n parsed_type=parsed_type,\n default_factory=None,\n dto_field=orm_descriptor.info.get(DTO_FIELD_META_KEY, DTOField(mark=Mark.READ_ONLY)),\n unique_model_name=model_name,\n dto_for=None,\n )\n ]\n\n @handle_orm_descriptor.register(HybridExtensionType)\n @classmethod\n def _(\n cls,\n extension_type: HybridExtensionType,\n key: str,\n orm_descriptor: InspectionAttr,\n model_type_hints: dict[str, ParsedType],\n model_name: str,\n ) -> list[FieldDefinition]:\n if not isinstance(orm_descriptor, hybrid_property):\n raise NotImplementedError(f\"Unexpected descriptor type '{orm_descriptor}' for '{extension_type}'\")\n\n getter_sig = ParsedSignature.from_fn(orm_descriptor.fget, {})\n\n field_defs = [\n FieldDefinition(\n name=orm_descriptor.__name__,\n default=Empty,\n parsed_type=getter_sig.return_type,\n default_factory=None,\n dto_field=orm_descriptor.info.get(DTO_FIELD_META_KEY, DTOField(mark=Mark.READ_ONLY)),\n unique_model_name=model_name,\n dto_for=\"return\",\n )\n ]\n\n if orm_descriptor.fset is not None:\n setter_sig = ParsedSignature.from_fn(orm_descriptor.fset, {})\n field_defs.append(\n FieldDefinition(\n name=orm_descriptor.__name__,\n default=Empty,\n parsed_type=next(iter(setter_sig.parameters.values())).parsed_type,\n default_factory=None,\n dto_field=orm_descriptor.info.get(DTO_FIELD_META_KEY, DTOField(mark=Mark.WRITE_ONLY)),\n unique_model_name=model_name,\n dto_for=\"data\",\n )\n )\n\n return field_defs\n\n @classmethod\n def generate_field_definitions(cls, model_type: type[DeclarativeBase]) -> Generator[FieldDefinition, None, None]:\n if (mapper := inspect(model_type)) is None: # pragma: no cover\n raise RuntimeError(\"Unexpected `None` value for mapper.\")\n\n # includes SQLAlchemy names and other mapped class names in the forward reference resolution namespace\n namespace = {**SQLA_NS, **{m.class_.__name__: m.class_ for m in mapper.registry.mappers if m is not mapper}}\n model_type_hints = get_model_type_hints(model_type, namespace=namespace)\n model_name = get_fully_qualified_class_name(model_type)\n\n # the same hybrid property descriptor can be included in `all_orm_descriptors` multiple times, once\n # for each method name it is bound to. We only need to see it once, so track views of it here.\n seen_hybrid_descriptors: set[hybrid_property] = set()\n for key, orm_descriptor in mapper.all_orm_descriptors.items():\n if isinstance(orm_descriptor, hybrid_property):\n if orm_descriptor in seen_hybrid_descriptors:\n continue\n seen_hybrid_descriptors.add(orm_descriptor)\n\n yield from cls.handle_orm_descriptor(\n orm_descriptor.extension_type, key, orm_descriptor, model_type_hints, model_name\n )\n\n @classmethod\n def detect_nested_field(cls, parsed_type: ParsedType) -> bool:\n return parsed_type.is_subclass_of(DeclarativeBase)\n\n\ndef _detect_defaults(elem: ElementType) -> tuple[Any, Any]:\n default: Any = Empty\n default_factory: Any = None # pyright:ignore\n if sqla_default := getattr(elem, \"default\", None):\n if sqla_default.is_scalar:\n default = sqla_default.arg\n elif sqla_default.is_callable:\n\n def default_factory(d: Any = sqla_default) -> Any:\n return d.arg({})\n\n else:\n raise ValueError(\"Unexpected default type\")\n else:\n if getattr(elem, \"nullable\", False):\n default = None\n return default, default_factory\n", "path": "litestar/contrib/sqlalchemy/dto.py"}]}
| 2,720 | 342 |
gh_patches_debug_19003
|
rasdani/github-patches
|
git_diff
|
bridgecrewio__checkov-5302
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
CKV_OPENAPI_20 incorrectly flags API keys via HTTPS
**Describe the issue**
#5253 added CKV_OPENAPI_20 with the message "Ensure that API keys are not sent over cleartext", but the [check](https://github.com/bridgecrewio/checkov/blob/main/checkov/openapi/checks/resource/generic/ClearTextAPIKey.py) does not check the API's supported schemes.
If the intent of this check is to prevent cleartext use of API keys, then if the root level [`schemes`](https://swagger.io/docs/specification/2-0/api-host-and-base-path/) key in OpenAPI 2.0 or [`servers`](https://swagger.io/docs/specification/api-host-and-base-path/) key in OpenAPI 3.0 specifies only `https` (2.0) or only `url`s which are HTTPS (3.0), this check should pass.
**Examples**
[fail2.json](https://github.com/bridgecrewio/checkov/blob/main/tests/openapi/checks/resource/generic/example_ClearTextAPIKey/fail2.json) and its YAML counterpart should fail, but if they specified `"schemes": ["https"]`, they should not.
Ditto for the OpenAPI 3.0 version of this example.
**Version (please complete the following information):**
2.3.312
**Additional context**
It may be that the message is wrong and you actually intend to flag all use of API keys, but if that's the case, the message should convey that. I also would argue that header API keys should not be scrutinized the same way as query parameter API keys, since the risk of leaking the API key unintentionally is higher with the latter.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `checkov/openapi/checks/resource/generic/ClearTextAPIKey.py`
Content:
```
1 from __future__ import annotations
2
3 from typing import Any
4 from checkov.common.models.enums import CheckResult, CheckCategories
5 from checkov.common.checks.enums import BlockType
6 from checkov.common.util.consts import LINE_FIELD_NAMES
7 from checkov.openapi.checks.base_openapi_check import BaseOpenapiCheck
8
9
10 class ClearTestAPIKey(BaseOpenapiCheck):
11 def __init__(self) -> None:
12 id = "CKV_OPENAPI_20"
13 name = "Ensure that API keys are not sent over cleartext"
14 categories = (CheckCategories.API_SECURITY,)
15 supported_resources = ('paths',)
16 super().__init__(name=name, id=id, categories=categories, supported_entities=supported_resources,
17 block_type=BlockType.DOCUMENT)
18
19 def scan_entity_conf(self, conf: dict[str, Any], entity_type: str) -> tuple[CheckResult, dict[str, Any]]: # type:ignore[override] # return type is different than the base class
20 components = conf.get("components")
21 security_def = conf.get("securityDefinitions")
22 if components and isinstance(components, dict):
23 security_schemes = components.get("securitySchemes") or {}
24 elif security_def:
25 security_schemes = security_def
26 else:
27 return CheckResult.PASSED, conf
28
29 paths = conf.get('paths')
30 if not isinstance(paths, dict):
31 return CheckResult.PASSED, security_schemes
32
33 filtered_dict = {}
34 if isinstance(security_schemes, dict):
35 for name, scheme in security_schemes.items():
36 if isinstance(scheme, dict) and scheme.get('type') == "apiKey":
37 filtered_dict[name] = scheme
38
39 if not filtered_dict:
40 return CheckResult.PASSED, security_schemes
41
42 for key, path in paths.items():
43 if not path:
44 continue
45 if key in LINE_FIELD_NAMES:
46 continue
47 for value in path.values():
48 if not isinstance(value, dict):
49 continue
50 operation_security = value.get('security')
51 if operation_security and isinstance(operation_security, list):
52 for sec in operation_security[0]:
53 if sec in filtered_dict:
54 return CheckResult.FAILED, security_schemes
55
56 return CheckResult.PASSED, conf
57
58
59 check = ClearTestAPIKey()
60
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/checkov/openapi/checks/resource/generic/ClearTextAPIKey.py b/checkov/openapi/checks/resource/generic/ClearTextAPIKey.py
--- a/checkov/openapi/checks/resource/generic/ClearTextAPIKey.py
+++ b/checkov/openapi/checks/resource/generic/ClearTextAPIKey.py
@@ -17,6 +17,16 @@
block_type=BlockType.DOCUMENT)
def scan_entity_conf(self, conf: dict[str, Any], entity_type: str) -> tuple[CheckResult, dict[str, Any]]: # type:ignore[override] # return type is different than the base class
+ schemes = conf.get("schemes")
+ if schemes and isinstance(schemes, list):
+ if "http" not in schemes and "wp" not in schemes:
+ return CheckResult.PASSED, conf
+
+ servers = conf.get("servers")
+ if servers and isinstance(servers, list):
+ if not any(server['url'].startswith('http://') for server in servers):
+ return CheckResult.PASSED, conf
+
components = conf.get("components")
security_def = conf.get("securityDefinitions")
if components and isinstance(components, dict):
|
{"golden_diff": "diff --git a/checkov/openapi/checks/resource/generic/ClearTextAPIKey.py b/checkov/openapi/checks/resource/generic/ClearTextAPIKey.py\n--- a/checkov/openapi/checks/resource/generic/ClearTextAPIKey.py\n+++ b/checkov/openapi/checks/resource/generic/ClearTextAPIKey.py\n@@ -17,6 +17,16 @@\n block_type=BlockType.DOCUMENT)\n \n def scan_entity_conf(self, conf: dict[str, Any], entity_type: str) -> tuple[CheckResult, dict[str, Any]]: # type:ignore[override] # return type is different than the base class\n+ schemes = conf.get(\"schemes\")\n+ if schemes and isinstance(schemes, list):\n+ if \"http\" not in schemes and \"wp\" not in schemes:\n+ return CheckResult.PASSED, conf\n+\n+ servers = conf.get(\"servers\")\n+ if servers and isinstance(servers, list):\n+ if not any(server['url'].startswith('http://') for server in servers):\n+ return CheckResult.PASSED, conf\n+\n components = conf.get(\"components\")\n security_def = conf.get(\"securityDefinitions\")\n if components and isinstance(components, dict):\n", "issue": "CKV_OPENAPI_20 incorrectly flags API keys via HTTPS\n**Describe the issue**\r\n\r\n#5253 added CKV_OPENAPI_20 with the message \"Ensure that API keys are not sent over cleartext\", but the [check](https://github.com/bridgecrewio/checkov/blob/main/checkov/openapi/checks/resource/generic/ClearTextAPIKey.py) does not check the API's supported schemes.\r\n\r\nIf the intent of this check is to prevent cleartext use of API keys, then if the root level [`schemes`](https://swagger.io/docs/specification/2-0/api-host-and-base-path/) key in OpenAPI 2.0 or [`servers`](https://swagger.io/docs/specification/api-host-and-base-path/) key in OpenAPI 3.0 specifies only `https` (2.0) or only `url`s which are HTTPS (3.0), this check should pass.\r\n\r\n**Examples**\r\n\r\n[fail2.json](https://github.com/bridgecrewio/checkov/blob/main/tests/openapi/checks/resource/generic/example_ClearTextAPIKey/fail2.json) and its YAML counterpart should fail, but if they specified `\"schemes\": [\"https\"]`, they should not.\r\n\r\nDitto for the OpenAPI 3.0 version of this example.\r\n\r\n**Version (please complete the following information):**\r\n\r\n2.3.312\r\n\r\n**Additional context**\r\n\r\nIt may be that the message is wrong and you actually intend to flag all use of API keys, but if that's the case, the message should convey that. I also would argue that header API keys should not be scrutinized the same way as query parameter API keys, since the risk of leaking the API key unintentionally is higher with the latter.\n", "before_files": [{"content": "from __future__ import annotations\n\nfrom typing import Any\nfrom checkov.common.models.enums import CheckResult, CheckCategories\nfrom checkov.common.checks.enums import BlockType\nfrom checkov.common.util.consts import LINE_FIELD_NAMES\nfrom checkov.openapi.checks.base_openapi_check import BaseOpenapiCheck\n\n\nclass ClearTestAPIKey(BaseOpenapiCheck):\n def __init__(self) -> None:\n id = \"CKV_OPENAPI_20\"\n name = \"Ensure that API keys are not sent over cleartext\"\n categories = (CheckCategories.API_SECURITY,)\n supported_resources = ('paths',)\n super().__init__(name=name, id=id, categories=categories, supported_entities=supported_resources,\n block_type=BlockType.DOCUMENT)\n\n def scan_entity_conf(self, conf: dict[str, Any], entity_type: str) -> tuple[CheckResult, dict[str, Any]]: # type:ignore[override] # return type is different than the base class\n components = conf.get(\"components\")\n security_def = conf.get(\"securityDefinitions\")\n if components and isinstance(components, dict):\n security_schemes = components.get(\"securitySchemes\") or {}\n elif security_def:\n security_schemes = security_def\n else:\n return CheckResult.PASSED, conf\n\n paths = conf.get('paths')\n if not isinstance(paths, dict):\n return CheckResult.PASSED, security_schemes\n\n filtered_dict = {}\n if isinstance(security_schemes, dict):\n for name, scheme in security_schemes.items():\n if isinstance(scheme, dict) and scheme.get('type') == \"apiKey\":\n filtered_dict[name] = scheme\n\n if not filtered_dict:\n return CheckResult.PASSED, security_schemes\n\n for key, path in paths.items():\n if not path:\n continue\n if key in LINE_FIELD_NAMES:\n continue\n for value in path.values():\n if not isinstance(value, dict):\n continue\n operation_security = value.get('security')\n if operation_security and isinstance(operation_security, list):\n for sec in operation_security[0]:\n if sec in filtered_dict:\n return CheckResult.FAILED, security_schemes\n\n return CheckResult.PASSED, conf\n\n\ncheck = ClearTestAPIKey()\n", "path": "checkov/openapi/checks/resource/generic/ClearTextAPIKey.py"}], "after_files": [{"content": "from __future__ import annotations\n\nfrom typing import Any\nfrom checkov.common.models.enums import CheckResult, CheckCategories\nfrom checkov.common.checks.enums import BlockType\nfrom checkov.common.util.consts import LINE_FIELD_NAMES\nfrom checkov.openapi.checks.base_openapi_check import BaseOpenapiCheck\n\n\nclass ClearTestAPIKey(BaseOpenapiCheck):\n def __init__(self) -> None:\n id = \"CKV_OPENAPI_20\"\n name = \"Ensure that API keys are not sent over cleartext\"\n categories = (CheckCategories.API_SECURITY,)\n supported_resources = ('paths',)\n super().__init__(name=name, id=id, categories=categories, supported_entities=supported_resources,\n block_type=BlockType.DOCUMENT)\n\n def scan_entity_conf(self, conf: dict[str, Any], entity_type: str) -> tuple[CheckResult, dict[str, Any]]: # type:ignore[override] # return type is different than the base class\n schemes = conf.get(\"schemes\")\n if schemes and isinstance(schemes, list):\n if \"http\" not in schemes and \"wp\" not in schemes:\n return CheckResult.PASSED, conf\n\n servers = conf.get(\"servers\")\n if servers and isinstance(servers, list):\n if not any(server['url'].startswith('http://') for server in servers):\n return CheckResult.PASSED, conf\n\n components = conf.get(\"components\")\n security_def = conf.get(\"securityDefinitions\")\n if components and isinstance(components, dict):\n security_schemes = components.get(\"securitySchemes\") or {}\n elif security_def:\n security_schemes = security_def\n else:\n return CheckResult.PASSED, conf\n\n paths = conf.get('paths')\n if not isinstance(paths, dict):\n return CheckResult.PASSED, security_schemes\n\n filtered_dict = {}\n if isinstance(security_schemes, dict):\n for name, scheme in security_schemes.items():\n if isinstance(scheme, dict) and scheme.get('type') == \"apiKey\":\n filtered_dict[name] = scheme\n\n if not filtered_dict:\n return CheckResult.PASSED, security_schemes\n\n for key, path in paths.items():\n if not path:\n continue\n if key in LINE_FIELD_NAMES:\n continue\n for value in path.values():\n if not isinstance(value, dict):\n continue\n operation_security = value.get('security')\n if operation_security and isinstance(operation_security, list):\n for sec in operation_security[0]:\n if sec in filtered_dict:\n return CheckResult.FAILED, security_schemes\n\n return CheckResult.PASSED, conf\n\n\ncheck = ClearTestAPIKey()\n", "path": "checkov/openapi/checks/resource/generic/ClearTextAPIKey.py"}]}
| 1,242 | 273 |
gh_patches_debug_4711
|
rasdani/github-patches
|
git_diff
|
scrapy__scrapy-4323
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Mac OS X, OS X → macOS
We have a few references in the documentation where we use the old name of that OS. We should update them.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `scrapy/extensions/memusage.py`
Content:
```
1 """
2 MemoryUsage extension
3
4 See documentation in docs/topics/extensions.rst
5 """
6 import sys
7 import socket
8 import logging
9 from pprint import pformat
10 from importlib import import_module
11
12 from twisted.internet import task
13
14 from scrapy import signals
15 from scrapy.exceptions import NotConfigured
16 from scrapy.mail import MailSender
17 from scrapy.utils.engine import get_engine_status
18
19 logger = logging.getLogger(__name__)
20
21
22 class MemoryUsage(object):
23
24 def __init__(self, crawler):
25 if not crawler.settings.getbool('MEMUSAGE_ENABLED'):
26 raise NotConfigured
27 try:
28 # stdlib's resource module is only available on unix platforms.
29 self.resource = import_module('resource')
30 except ImportError:
31 raise NotConfigured
32
33 self.crawler = crawler
34 self.warned = False
35 self.notify_mails = crawler.settings.getlist('MEMUSAGE_NOTIFY_MAIL')
36 self.limit = crawler.settings.getint('MEMUSAGE_LIMIT_MB')*1024*1024
37 self.warning = crawler.settings.getint('MEMUSAGE_WARNING_MB')*1024*1024
38 self.check_interval = crawler.settings.getfloat('MEMUSAGE_CHECK_INTERVAL_SECONDS')
39 self.mail = MailSender.from_settings(crawler.settings)
40 crawler.signals.connect(self.engine_started, signal=signals.engine_started)
41 crawler.signals.connect(self.engine_stopped, signal=signals.engine_stopped)
42
43 @classmethod
44 def from_crawler(cls, crawler):
45 return cls(crawler)
46
47 def get_virtual_size(self):
48 size = self.resource.getrusage(self.resource.RUSAGE_SELF).ru_maxrss
49 if sys.platform != 'darwin':
50 # on Mac OS X ru_maxrss is in bytes, on Linux it is in KB
51 size *= 1024
52 return size
53
54 def engine_started(self):
55 self.crawler.stats.set_value('memusage/startup', self.get_virtual_size())
56 self.tasks = []
57 tsk = task.LoopingCall(self.update)
58 self.tasks.append(tsk)
59 tsk.start(self.check_interval, now=True)
60 if self.limit:
61 tsk = task.LoopingCall(self._check_limit)
62 self.tasks.append(tsk)
63 tsk.start(self.check_interval, now=True)
64 if self.warning:
65 tsk = task.LoopingCall(self._check_warning)
66 self.tasks.append(tsk)
67 tsk.start(self.check_interval, now=True)
68
69 def engine_stopped(self):
70 for tsk in self.tasks:
71 if tsk.running:
72 tsk.stop()
73
74 def update(self):
75 self.crawler.stats.max_value('memusage/max', self.get_virtual_size())
76
77 def _check_limit(self):
78 if self.get_virtual_size() > self.limit:
79 self.crawler.stats.set_value('memusage/limit_reached', 1)
80 mem = self.limit/1024/1024
81 logger.error("Memory usage exceeded %(memusage)dM. Shutting down Scrapy...",
82 {'memusage': mem}, extra={'crawler': self.crawler})
83 if self.notify_mails:
84 subj = "%s terminated: memory usage exceeded %dM at %s" % \
85 (self.crawler.settings['BOT_NAME'], mem, socket.gethostname())
86 self._send_report(self.notify_mails, subj)
87 self.crawler.stats.set_value('memusage/limit_notified', 1)
88
89 open_spiders = self.crawler.engine.open_spiders
90 if open_spiders:
91 for spider in open_spiders:
92 self.crawler.engine.close_spider(spider, 'memusage_exceeded')
93 else:
94 self.crawler.stop()
95
96 def _check_warning(self):
97 if self.warned: # warn only once
98 return
99 if self.get_virtual_size() > self.warning:
100 self.crawler.stats.set_value('memusage/warning_reached', 1)
101 mem = self.warning/1024/1024
102 logger.warning("Memory usage reached %(memusage)dM",
103 {'memusage': mem}, extra={'crawler': self.crawler})
104 if self.notify_mails:
105 subj = "%s warning: memory usage reached %dM at %s" % \
106 (self.crawler.settings['BOT_NAME'], mem, socket.gethostname())
107 self._send_report(self.notify_mails, subj)
108 self.crawler.stats.set_value('memusage/warning_notified', 1)
109 self.warned = True
110
111 def _send_report(self, rcpts, subject):
112 """send notification mail with some additional useful info"""
113 stats = self.crawler.stats
114 s = "Memory usage at engine startup : %dM\r\n" % (stats.get_value('memusage/startup')/1024/1024)
115 s += "Maximum memory usage : %dM\r\n" % (stats.get_value('memusage/max')/1024/1024)
116 s += "Current memory usage : %dM\r\n" % (self.get_virtual_size()/1024/1024)
117
118 s += "ENGINE STATUS ------------------------------------------------------- \r\n"
119 s += "\r\n"
120 s += pformat(get_engine_status(self.crawler.engine))
121 s += "\r\n"
122 self.mail.send(rcpts, subject, s)
123
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/scrapy/extensions/memusage.py b/scrapy/extensions/memusage.py
--- a/scrapy/extensions/memusage.py
+++ b/scrapy/extensions/memusage.py
@@ -47,7 +47,7 @@
def get_virtual_size(self):
size = self.resource.getrusage(self.resource.RUSAGE_SELF).ru_maxrss
if sys.platform != 'darwin':
- # on Mac OS X ru_maxrss is in bytes, on Linux it is in KB
+ # on macOS ru_maxrss is in bytes, on Linux it is in KB
size *= 1024
return size
|
{"golden_diff": "diff --git a/scrapy/extensions/memusage.py b/scrapy/extensions/memusage.py\n--- a/scrapy/extensions/memusage.py\n+++ b/scrapy/extensions/memusage.py\n@@ -47,7 +47,7 @@\n def get_virtual_size(self):\n size = self.resource.getrusage(self.resource.RUSAGE_SELF).ru_maxrss\n if sys.platform != 'darwin':\n- # on Mac OS X ru_maxrss is in bytes, on Linux it is in KB\n+ # on macOS ru_maxrss is in bytes, on Linux it is in KB\n size *= 1024\n return size\n", "issue": "Mac OS X, OS X \u2192 macOS\nWe have a few references in the documentation where we use the old name of that OS. We should update them.\n", "before_files": [{"content": "\"\"\"\nMemoryUsage extension\n\nSee documentation in docs/topics/extensions.rst\n\"\"\"\nimport sys\nimport socket\nimport logging\nfrom pprint import pformat\nfrom importlib import import_module\n\nfrom twisted.internet import task\n\nfrom scrapy import signals\nfrom scrapy.exceptions import NotConfigured\nfrom scrapy.mail import MailSender\nfrom scrapy.utils.engine import get_engine_status\n\nlogger = logging.getLogger(__name__)\n\n\nclass MemoryUsage(object):\n\n def __init__(self, crawler):\n if not crawler.settings.getbool('MEMUSAGE_ENABLED'):\n raise NotConfigured\n try:\n # stdlib's resource module is only available on unix platforms.\n self.resource = import_module('resource')\n except ImportError:\n raise NotConfigured\n\n self.crawler = crawler\n self.warned = False\n self.notify_mails = crawler.settings.getlist('MEMUSAGE_NOTIFY_MAIL')\n self.limit = crawler.settings.getint('MEMUSAGE_LIMIT_MB')*1024*1024\n self.warning = crawler.settings.getint('MEMUSAGE_WARNING_MB')*1024*1024\n self.check_interval = crawler.settings.getfloat('MEMUSAGE_CHECK_INTERVAL_SECONDS')\n self.mail = MailSender.from_settings(crawler.settings)\n crawler.signals.connect(self.engine_started, signal=signals.engine_started)\n crawler.signals.connect(self.engine_stopped, signal=signals.engine_stopped)\n\n @classmethod\n def from_crawler(cls, crawler):\n return cls(crawler)\n\n def get_virtual_size(self):\n size = self.resource.getrusage(self.resource.RUSAGE_SELF).ru_maxrss\n if sys.platform != 'darwin':\n # on Mac OS X ru_maxrss is in bytes, on Linux it is in KB\n size *= 1024\n return size\n\n def engine_started(self):\n self.crawler.stats.set_value('memusage/startup', self.get_virtual_size())\n self.tasks = []\n tsk = task.LoopingCall(self.update)\n self.tasks.append(tsk)\n tsk.start(self.check_interval, now=True)\n if self.limit:\n tsk = task.LoopingCall(self._check_limit)\n self.tasks.append(tsk)\n tsk.start(self.check_interval, now=True)\n if self.warning:\n tsk = task.LoopingCall(self._check_warning)\n self.tasks.append(tsk)\n tsk.start(self.check_interval, now=True)\n\n def engine_stopped(self):\n for tsk in self.tasks:\n if tsk.running:\n tsk.stop()\n\n def update(self):\n self.crawler.stats.max_value('memusage/max', self.get_virtual_size())\n\n def _check_limit(self):\n if self.get_virtual_size() > self.limit:\n self.crawler.stats.set_value('memusage/limit_reached', 1)\n mem = self.limit/1024/1024\n logger.error(\"Memory usage exceeded %(memusage)dM. Shutting down Scrapy...\",\n {'memusage': mem}, extra={'crawler': self.crawler})\n if self.notify_mails:\n subj = \"%s terminated: memory usage exceeded %dM at %s\" % \\\n (self.crawler.settings['BOT_NAME'], mem, socket.gethostname())\n self._send_report(self.notify_mails, subj)\n self.crawler.stats.set_value('memusage/limit_notified', 1)\n\n open_spiders = self.crawler.engine.open_spiders\n if open_spiders:\n for spider in open_spiders:\n self.crawler.engine.close_spider(spider, 'memusage_exceeded')\n else:\n self.crawler.stop()\n\n def _check_warning(self):\n if self.warned: # warn only once\n return\n if self.get_virtual_size() > self.warning:\n self.crawler.stats.set_value('memusage/warning_reached', 1)\n mem = self.warning/1024/1024\n logger.warning(\"Memory usage reached %(memusage)dM\",\n {'memusage': mem}, extra={'crawler': self.crawler})\n if self.notify_mails:\n subj = \"%s warning: memory usage reached %dM at %s\" % \\\n (self.crawler.settings['BOT_NAME'], mem, socket.gethostname())\n self._send_report(self.notify_mails, subj)\n self.crawler.stats.set_value('memusage/warning_notified', 1)\n self.warned = True\n\n def _send_report(self, rcpts, subject):\n \"\"\"send notification mail with some additional useful info\"\"\"\n stats = self.crawler.stats\n s = \"Memory usage at engine startup : %dM\\r\\n\" % (stats.get_value('memusage/startup')/1024/1024)\n s += \"Maximum memory usage : %dM\\r\\n\" % (stats.get_value('memusage/max')/1024/1024)\n s += \"Current memory usage : %dM\\r\\n\" % (self.get_virtual_size()/1024/1024)\n\n s += \"ENGINE STATUS ------------------------------------------------------- \\r\\n\"\n s += \"\\r\\n\"\n s += pformat(get_engine_status(self.crawler.engine))\n s += \"\\r\\n\"\n self.mail.send(rcpts, subject, s)\n", "path": "scrapy/extensions/memusage.py"}], "after_files": [{"content": "\"\"\"\nMemoryUsage extension\n\nSee documentation in docs/topics/extensions.rst\n\"\"\"\nimport sys\nimport socket\nimport logging\nfrom pprint import pformat\nfrom importlib import import_module\n\nfrom twisted.internet import task\n\nfrom scrapy import signals\nfrom scrapy.exceptions import NotConfigured\nfrom scrapy.mail import MailSender\nfrom scrapy.utils.engine import get_engine_status\n\nlogger = logging.getLogger(__name__)\n\n\nclass MemoryUsage(object):\n\n def __init__(self, crawler):\n if not crawler.settings.getbool('MEMUSAGE_ENABLED'):\n raise NotConfigured\n try:\n # stdlib's resource module is only available on unix platforms.\n self.resource = import_module('resource')\n except ImportError:\n raise NotConfigured\n\n self.crawler = crawler\n self.warned = False\n self.notify_mails = crawler.settings.getlist('MEMUSAGE_NOTIFY_MAIL')\n self.limit = crawler.settings.getint('MEMUSAGE_LIMIT_MB')*1024*1024\n self.warning = crawler.settings.getint('MEMUSAGE_WARNING_MB')*1024*1024\n self.check_interval = crawler.settings.getfloat('MEMUSAGE_CHECK_INTERVAL_SECONDS')\n self.mail = MailSender.from_settings(crawler.settings)\n crawler.signals.connect(self.engine_started, signal=signals.engine_started)\n crawler.signals.connect(self.engine_stopped, signal=signals.engine_stopped)\n\n @classmethod\n def from_crawler(cls, crawler):\n return cls(crawler)\n\n def get_virtual_size(self):\n size = self.resource.getrusage(self.resource.RUSAGE_SELF).ru_maxrss\n if sys.platform != 'darwin':\n # on macOS ru_maxrss is in bytes, on Linux it is in KB\n size *= 1024\n return size\n\n def engine_started(self):\n self.crawler.stats.set_value('memusage/startup', self.get_virtual_size())\n self.tasks = []\n tsk = task.LoopingCall(self.update)\n self.tasks.append(tsk)\n tsk.start(self.check_interval, now=True)\n if self.limit:\n tsk = task.LoopingCall(self._check_limit)\n self.tasks.append(tsk)\n tsk.start(self.check_interval, now=True)\n if self.warning:\n tsk = task.LoopingCall(self._check_warning)\n self.tasks.append(tsk)\n tsk.start(self.check_interval, now=True)\n\n def engine_stopped(self):\n for tsk in self.tasks:\n if tsk.running:\n tsk.stop()\n\n def update(self):\n self.crawler.stats.max_value('memusage/max', self.get_virtual_size())\n\n def _check_limit(self):\n if self.get_virtual_size() > self.limit:\n self.crawler.stats.set_value('memusage/limit_reached', 1)\n mem = self.limit/1024/1024\n logger.error(\"Memory usage exceeded %(memusage)dM. Shutting down Scrapy...\",\n {'memusage': mem}, extra={'crawler': self.crawler})\n if self.notify_mails:\n subj = \"%s terminated: memory usage exceeded %dM at %s\" % \\\n (self.crawler.settings['BOT_NAME'], mem, socket.gethostname())\n self._send_report(self.notify_mails, subj)\n self.crawler.stats.set_value('memusage/limit_notified', 1)\n\n open_spiders = self.crawler.engine.open_spiders\n if open_spiders:\n for spider in open_spiders:\n self.crawler.engine.close_spider(spider, 'memusage_exceeded')\n else:\n self.crawler.stop()\n\n def _check_warning(self):\n if self.warned: # warn only once\n return\n if self.get_virtual_size() > self.warning:\n self.crawler.stats.set_value('memusage/warning_reached', 1)\n mem = self.warning/1024/1024\n logger.warning(\"Memory usage reached %(memusage)dM\",\n {'memusage': mem}, extra={'crawler': self.crawler})\n if self.notify_mails:\n subj = \"%s warning: memory usage reached %dM at %s\" % \\\n (self.crawler.settings['BOT_NAME'], mem, socket.gethostname())\n self._send_report(self.notify_mails, subj)\n self.crawler.stats.set_value('memusage/warning_notified', 1)\n self.warned = True\n\n def _send_report(self, rcpts, subject):\n \"\"\"send notification mail with some additional useful info\"\"\"\n stats = self.crawler.stats\n s = \"Memory usage at engine startup : %dM\\r\\n\" % (stats.get_value('memusage/startup')/1024/1024)\n s += \"Maximum memory usage : %dM\\r\\n\" % (stats.get_value('memusage/max')/1024/1024)\n s += \"Current memory usage : %dM\\r\\n\" % (self.get_virtual_size()/1024/1024)\n\n s += \"ENGINE STATUS ------------------------------------------------------- \\r\\n\"\n s += \"\\r\\n\"\n s += pformat(get_engine_status(self.crawler.engine))\n s += \"\\r\\n\"\n self.mail.send(rcpts, subject, s)\n", "path": "scrapy/extensions/memusage.py"}]}
| 1,691 | 138 |
gh_patches_debug_43040
|
rasdani/github-patches
|
git_diff
|
PrefectHQ__prefect-6999
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add `prefect cloud logout` command
### First check
- [X] I added a descriptive title to this issue.
- [X] I used the GitHub search to find a similar request and didn't find it.
- [X] I searched the Prefect documentation for this feature.
### Prefect Version
2.x
### Describe the current behavior
Add `prefect cloud logout` to log out of a workspace
### Describe the proposed behavior
Likely by unsetting the `PREFECT_API_KEY` and `PREFECT_API_URL` back to default
### Example Use
_No response_
### Additional context
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/prefect/cli/cloud.py`
Content:
```
1 """
2 Command line interface for interacting with Prefect Cloud
3 """
4 import re
5 from typing import Dict, Iterable
6
7 import httpx
8 import readchar
9 import typer
10 from rich.live import Live
11 from rich.table import Table
12
13 import prefect.context
14 import prefect.settings
15 from prefect.cli._types import PrefectTyper
16 from prefect.cli._utilities import exit_with_error, exit_with_success
17 from prefect.cli.root import app
18 from prefect.client.cloud import CloudUnauthorizedError, get_cloud_client
19 from prefect.settings import (
20 PREFECT_API_KEY,
21 PREFECT_API_URL,
22 PREFECT_CLOUD_URL,
23 load_profiles,
24 save_profiles,
25 update_current_profile,
26 )
27
28 cloud_app = PrefectTyper(
29 name="cloud", help="Commands for interacting with Prefect Cloud"
30 )
31 workspace_app = PrefectTyper(
32 name="workspace", help="Commands for interacting with Prefect Cloud Workspaces"
33 )
34 cloud_app.add_typer(workspace_app, aliases=["workspaces"])
35 app.add_typer(cloud_app)
36
37
38 def build_url_from_workspace(workspace: Dict) -> str:
39 return (
40 f"{PREFECT_CLOUD_URL.value()}"
41 f"/accounts/{workspace['account_id']}"
42 f"/workspaces/{workspace['workspace_id']}"
43 )
44
45
46 def confirm_logged_in():
47 if not PREFECT_API_KEY:
48 profile = prefect.context.get_settings_context().profile
49 exit_with_error(
50 f"Currently not authenticated in profile {profile.name!r}. "
51 "Please login with `prefect cloud login --key <API_KEY>`."
52 )
53
54
55 def get_current_workspace(workspaces):
56 workspace_handles_by_id = {
57 workspace[
58 "workspace_id"
59 ]: f"{workspace['account_handle']}/{workspace['workspace_handle']}"
60 for workspace in workspaces
61 }
62 current_workspace_id = re.match(
63 r".*accounts/.{36}/workspaces/(.{36})\Z", PREFECT_API_URL.value()
64 ).groups()[0]
65 return workspace_handles_by_id[current_workspace_id]
66
67
68 def build_table(selected_idx: int, workspaces: Iterable[str]) -> Table:
69 """
70 Generate a table of workspaces. The `select_idx` of workspaces will be highlighted.
71
72 Args:
73 selected_idx: currently selected index
74 workspaces: Iterable of strings
75
76 Returns:
77 rich.table.Table
78 """
79
80 table = Table()
81 table.add_column(
82 "[#024dfd]Select a Workspace:", justify="right", style="#8ea0ae", no_wrap=True
83 )
84
85 for i, workspace in enumerate(sorted(workspaces)):
86 if i == selected_idx:
87 table.add_row("[#024dfd on #FFFFFF]> " + workspace)
88 else:
89 table.add_row(" " + workspace)
90 return table
91
92
93 def select_workspace(workspaces: Iterable[str]) -> str:
94 """
95 Given a list of workspaces, display them to user in a Table
96 and allow them to select one.
97
98 Args:
99 workspaces: List of workspaces to choose from
100
101 Returns:
102 str: the selected workspace
103 """
104
105 workspaces = sorted(workspaces)
106 current_idx = 0
107 selected_workspace = None
108
109 with Live(
110 build_table(current_idx, workspaces), auto_refresh=False, console=app.console
111 ) as live:
112 while selected_workspace is None:
113 key = readchar.readkey()
114
115 if key == readchar.key.UP:
116 current_idx = current_idx - 1
117 # wrap to bottom if at the top
118 if current_idx < 0:
119 current_idx = len(workspaces) - 1
120 elif key == readchar.key.DOWN:
121 current_idx = current_idx + 1
122 # wrap to top if at the bottom
123 if current_idx >= len(workspaces):
124 current_idx = 0
125 elif key == readchar.key.CTRL_C:
126 # gracefully exit with no message
127 exit_with_error("")
128 elif key == readchar.key.ENTER:
129 selected_workspace = workspaces[current_idx]
130
131 live.update(build_table(current_idx, workspaces), refresh=True)
132
133 return selected_workspace
134
135
136 @cloud_app.command()
137 async def login(
138 key: str = typer.Option(
139 ..., "--key", "-k", help="API Key to authenticate with Prefect", prompt=True
140 ),
141 workspace_handle: str = typer.Option(
142 None,
143 "--workspace",
144 "-w",
145 help="Full handle of workspace, in format '<account_handle>/<workspace_handle>'",
146 ),
147 ):
148 """
149 Log in to Prefect Cloud.
150 Creates a new profile configured to use the specified PREFECT_API_KEY.
151 Uses a previously configured profile if it exists.
152 """
153 profiles = load_profiles()
154
155 async with get_cloud_client(api_key=key) as client:
156 try:
157 workspaces = await client.read_workspaces()
158 workspace_handle_details = {
159 f"{workspace['account_handle']}/{workspace['workspace_handle']}": workspace
160 for workspace in workspaces
161 }
162 except CloudUnauthorizedError:
163 if key.startswith("pcu"):
164 help_message = "It looks like you're using API key from Cloud 1 (https://cloud.prefect.io). Make sure that you generate API key using Cloud 2 (https://app.prefect.cloud)"
165 elif not key.startswith("pnu"):
166 help_message = "Your key is not in our expected format."
167 else:
168 help_message = "Please ensure your credentials are correct."
169 exit_with_error(
170 f"Unable to authenticate with Prefect Cloud. {help_message}"
171 )
172 except httpx.HTTPStatusError as exc:
173 exit_with_error(f"Error connecting to Prefect Cloud: {exc!r}")
174
175 for profile_name in profiles:
176 if key == profiles[profile_name].settings.get(PREFECT_API_KEY):
177 profiles.set_active(profile_name)
178 save_profiles(profiles)
179 with prefect.context.use_profile(profile_name):
180 current_workspace = get_current_workspace(workspaces)
181
182 if workspace_handle is not None:
183 if workspace_handle not in workspace_handle_details:
184 exit_with_error(f"Workspace {workspace_handle!r} not found.")
185
186 update_current_profile(
187 {
188 PREFECT_API_URL: build_url_from_workspace(
189 workspace_handle_details[workspace_handle]
190 )
191 }
192 )
193 current_workspace = workspace_handle
194
195 exit_with_success(
196 f"Logged in to Prefect Cloud using profile {profile_name!r}.\n"
197 f"Workspace is currently set to {current_workspace!r}. "
198 f"The workspace can be changed using `prefect cloud workspace set`."
199 )
200
201 workspace_handle_details = {
202 f"{workspace['account_handle']}/{workspace['workspace_handle']}": workspace
203 for workspace in workspaces
204 }
205
206 if not workspace_handle:
207 workspace_handle = select_workspace(workspace_handle_details.keys())
208
209 cloud_profile_name = app.console.input(
210 "Creating a profile for this Prefect Cloud login. Please specify a profile name: "
211 )
212
213 cloud_profile_name = cloud_profile_name.strip()
214 if cloud_profile_name == "":
215 exit_with_error("Please provide a non-empty profile name.")
216
217 if cloud_profile_name in profiles:
218 exit_with_error(f"Profile {cloud_profile_name!r} already exists.")
219
220 profiles.add_profile(
221 profiles[profiles.active_name].copy(
222 update={
223 "name": cloud_profile_name,
224 }
225 )
226 )
227
228 profiles.update_profile(
229 cloud_profile_name,
230 {
231 PREFECT_API_URL: build_url_from_workspace(
232 workspace_handle_details[workspace_handle]
233 ),
234 PREFECT_API_KEY: key,
235 },
236 )
237
238 profiles.set_active(cloud_profile_name)
239 save_profiles(profiles)
240
241 exit_with_success(
242 f"Logged in to Prefect Cloud using profile {cloud_profile_name!r}.\n"
243 f"Workspace is currently set to {workspace_handle!r}. "
244 f"The workspace can be changed using `prefect cloud workspace set`."
245 )
246
247
248 @workspace_app.command()
249 async def ls():
250 """List available workspaces."""
251
252 confirm_logged_in()
253
254 async with get_cloud_client() as client:
255 try:
256 workspaces = await client.read_workspaces()
257 except CloudUnauthorizedError:
258 exit_with_error(
259 "Unable to authenticate. Please ensure your credentials are correct."
260 )
261
262 workspace_handle_details = {
263 f"{workspace['account_handle']}/{workspace['workspace_handle']}": workspace
264 for workspace in workspaces
265 }
266
267 current_workspace = get_current_workspace(workspaces)
268
269 table = Table(caption="* active workspace")
270 table.add_column(
271 "[#024dfd]Available Workspaces:", justify="right", style="#8ea0ae", no_wrap=True
272 )
273
274 for i, workspace_handle in enumerate(sorted(workspace_handle_details)):
275 if workspace_handle == current_workspace:
276 table.add_row(f"[green] * {workspace_handle}[/green]")
277 else:
278 table.add_row(f" {workspace_handle}")
279 app.console.print(table)
280
281
282 @workspace_app.command()
283 async def set(
284 workspace_handle: str = typer.Option(
285 None,
286 "--workspace",
287 "-w",
288 help="Full handle of workspace, in format '<account_handle>/<workspace_handle>'",
289 ),
290 ):
291 """Set current workspace. Shows a workspace picker if no workspace is specified."""
292 confirm_logged_in()
293
294 async with get_cloud_client() as client:
295 try:
296 workspaces = await client.read_workspaces()
297 except CloudUnauthorizedError:
298 exit_with_error(
299 "Unable to authenticate. Please ensure your credentials are correct."
300 )
301 workspaces = {
302 f"{workspace['account_handle']}/{workspace['workspace_handle']}": workspace
303 for workspace in workspaces
304 }
305
306 if not workspace_handle:
307 workspace_handle = select_workspace(workspaces)
308
309 if workspace_handle not in workspaces:
310 exit_with_error(
311 f"Workspace {workspace_handle!r} not found. "
312 "Leave `--workspace` blank to select a workspace."
313 )
314
315 profile = update_current_profile(
316 {PREFECT_API_URL: build_url_from_workspace(workspaces[workspace_handle])}
317 )
318
319 exit_with_success(
320 f"Successfully set workspace to {workspace_handle!r} in profile {profile.name!r}."
321 )
322
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/prefect/cli/cloud.py b/src/prefect/cli/cloud.py
--- a/src/prefect/cli/cloud.py
+++ b/src/prefect/cli/cloud.py
@@ -150,8 +150,6 @@
Creates a new profile configured to use the specified PREFECT_API_KEY.
Uses a previously configured profile if it exists.
"""
- profiles = load_profiles()
-
async with get_cloud_client(api_key=key) as client:
try:
workspaces = await client.read_workspaces()
@@ -172,6 +170,8 @@
except httpx.HTTPStatusError as exc:
exit_with_error(f"Error connecting to Prefect Cloud: {exc!r}")
+ profiles = load_profiles()
+
for profile_name in profiles:
if key == profiles[profile_name].settings.get(PREFECT_API_KEY):
profiles.set_active(profile_name)
@@ -192,41 +192,19 @@
)
current_workspace = workspace_handle
- exit_with_success(
- f"Logged in to Prefect Cloud using profile {profile_name!r}.\n"
- f"Workspace is currently set to {current_workspace!r}. "
- f"The workspace can be changed using `prefect cloud workspace set`."
- )
-
- workspace_handle_details = {
- f"{workspace['account_handle']}/{workspace['workspace_handle']}": workspace
- for workspace in workspaces
- }
+ exit_with_success(
+ f"Logged in to Prefect Cloud using profile {profile_name!r}.\n"
+ f"Workspace is currently set to {current_workspace!r}. "
+ f"The workspace can be changed using `prefect cloud workspace set`."
+ )
if not workspace_handle:
workspace_handle = select_workspace(workspace_handle_details.keys())
- cloud_profile_name = app.console.input(
- "Creating a profile for this Prefect Cloud login. Please specify a profile name: "
- )
-
- cloud_profile_name = cloud_profile_name.strip()
- if cloud_profile_name == "":
- exit_with_error("Please provide a non-empty profile name.")
+ if workspace_handle not in workspace_handle_details:
+ exit_with_error(f"Workspace {workspace_handle!r} not found.")
- if cloud_profile_name in profiles:
- exit_with_error(f"Profile {cloud_profile_name!r} already exists.")
-
- profiles.add_profile(
- profiles[profiles.active_name].copy(
- update={
- "name": cloud_profile_name,
- }
- )
- )
-
- profiles.update_profile(
- cloud_profile_name,
+ current_profile = update_current_profile(
{
PREFECT_API_URL: build_url_from_workspace(
workspace_handle_details[workspace_handle]
@@ -235,16 +213,36 @@
},
)
- profiles.set_active(cloud_profile_name)
- save_profiles(profiles)
-
exit_with_success(
- f"Logged in to Prefect Cloud using profile {cloud_profile_name!r}.\n"
+ f"Logged in to Prefect Cloud using profile {current_profile.name!r}.\n"
f"Workspace is currently set to {workspace_handle!r}. "
f"The workspace can be changed using `prefect cloud workspace set`."
)
+@cloud_app.command()
+async def logout():
+ """
+ Logout the current workspace.
+ Reset PREFECT_API_KEY and PREFECT_API_URL to default.
+ """
+ current_profile = prefect.context.get_settings_context().profile
+ if current_profile is None:
+ exit_with_error("There is no current profile set.")
+
+ if current_profile.settings.get(PREFECT_API_KEY) is None:
+ exit_with_error("Current profile is not logged into Prefect Cloud.")
+
+ update_current_profile(
+ {
+ PREFECT_API_URL: None,
+ PREFECT_API_KEY: None,
+ },
+ )
+
+ exit_with_success("Logged out from Prefect Cloud.")
+
+
@workspace_app.command()
async def ls():
"""List available workspaces."""
|
{"golden_diff": "diff --git a/src/prefect/cli/cloud.py b/src/prefect/cli/cloud.py\n--- a/src/prefect/cli/cloud.py\n+++ b/src/prefect/cli/cloud.py\n@@ -150,8 +150,6 @@\n Creates a new profile configured to use the specified PREFECT_API_KEY.\n Uses a previously configured profile if it exists.\n \"\"\"\n- profiles = load_profiles()\n-\n async with get_cloud_client(api_key=key) as client:\n try:\n workspaces = await client.read_workspaces()\n@@ -172,6 +170,8 @@\n except httpx.HTTPStatusError as exc:\n exit_with_error(f\"Error connecting to Prefect Cloud: {exc!r}\")\n \n+ profiles = load_profiles()\n+\n for profile_name in profiles:\n if key == profiles[profile_name].settings.get(PREFECT_API_KEY):\n profiles.set_active(profile_name)\n@@ -192,41 +192,19 @@\n )\n current_workspace = workspace_handle\n \n- exit_with_success(\n- f\"Logged in to Prefect Cloud using profile {profile_name!r}.\\n\"\n- f\"Workspace is currently set to {current_workspace!r}. \"\n- f\"The workspace can be changed using `prefect cloud workspace set`.\"\n- )\n-\n- workspace_handle_details = {\n- f\"{workspace['account_handle']}/{workspace['workspace_handle']}\": workspace\n- for workspace in workspaces\n- }\n+ exit_with_success(\n+ f\"Logged in to Prefect Cloud using profile {profile_name!r}.\\n\"\n+ f\"Workspace is currently set to {current_workspace!r}. \"\n+ f\"The workspace can be changed using `prefect cloud workspace set`.\"\n+ )\n \n if not workspace_handle:\n workspace_handle = select_workspace(workspace_handle_details.keys())\n \n- cloud_profile_name = app.console.input(\n- \"Creating a profile for this Prefect Cloud login. Please specify a profile name: \"\n- )\n-\n- cloud_profile_name = cloud_profile_name.strip()\n- if cloud_profile_name == \"\":\n- exit_with_error(\"Please provide a non-empty profile name.\")\n+ if workspace_handle not in workspace_handle_details:\n+ exit_with_error(f\"Workspace {workspace_handle!r} not found.\")\n \n- if cloud_profile_name in profiles:\n- exit_with_error(f\"Profile {cloud_profile_name!r} already exists.\")\n-\n- profiles.add_profile(\n- profiles[profiles.active_name].copy(\n- update={\n- \"name\": cloud_profile_name,\n- }\n- )\n- )\n-\n- profiles.update_profile(\n- cloud_profile_name,\n+ current_profile = update_current_profile(\n {\n PREFECT_API_URL: build_url_from_workspace(\n workspace_handle_details[workspace_handle]\n@@ -235,16 +213,36 @@\n },\n )\n \n- profiles.set_active(cloud_profile_name)\n- save_profiles(profiles)\n-\n exit_with_success(\n- f\"Logged in to Prefect Cloud using profile {cloud_profile_name!r}.\\n\"\n+ f\"Logged in to Prefect Cloud using profile {current_profile.name!r}.\\n\"\n f\"Workspace is currently set to {workspace_handle!r}. \"\n f\"The workspace can be changed using `prefect cloud workspace set`.\"\n )\n \n \n+@cloud_app.command()\n+async def logout():\n+ \"\"\"\n+ Logout the current workspace.\n+ Reset PREFECT_API_KEY and PREFECT_API_URL to default.\n+ \"\"\"\n+ current_profile = prefect.context.get_settings_context().profile\n+ if current_profile is None:\n+ exit_with_error(\"There is no current profile set.\")\n+\n+ if current_profile.settings.get(PREFECT_API_KEY) is None:\n+ exit_with_error(\"Current profile is not logged into Prefect Cloud.\")\n+\n+ update_current_profile(\n+ {\n+ PREFECT_API_URL: None,\n+ PREFECT_API_KEY: None,\n+ },\n+ )\n+\n+ exit_with_success(\"Logged out from Prefect Cloud.\")\n+\n+\n @workspace_app.command()\n async def ls():\n \"\"\"List available workspaces.\"\"\"\n", "issue": "Add `prefect cloud logout` command\n### First check\n\n- [X] I added a descriptive title to this issue.\n- [X] I used the GitHub search to find a similar request and didn't find it.\n- [X] I searched the Prefect documentation for this feature.\n\n### Prefect Version\n\n2.x\n\n### Describe the current behavior\n\nAdd `prefect cloud logout` to log out of a workspace \n\n### Describe the proposed behavior\n\nLikely by unsetting the `PREFECT_API_KEY` and `PREFECT_API_URL` back to default\n\n### Example Use\n\n_No response_\n\n### Additional context\n\n_No response_\n", "before_files": [{"content": "\"\"\"\nCommand line interface for interacting with Prefect Cloud\n\"\"\"\nimport re\nfrom typing import Dict, Iterable\n\nimport httpx\nimport readchar\nimport typer\nfrom rich.live import Live\nfrom rich.table import Table\n\nimport prefect.context\nimport prefect.settings\nfrom prefect.cli._types import PrefectTyper\nfrom prefect.cli._utilities import exit_with_error, exit_with_success\nfrom prefect.cli.root import app\nfrom prefect.client.cloud import CloudUnauthorizedError, get_cloud_client\nfrom prefect.settings import (\n PREFECT_API_KEY,\n PREFECT_API_URL,\n PREFECT_CLOUD_URL,\n load_profiles,\n save_profiles,\n update_current_profile,\n)\n\ncloud_app = PrefectTyper(\n name=\"cloud\", help=\"Commands for interacting with Prefect Cloud\"\n)\nworkspace_app = PrefectTyper(\n name=\"workspace\", help=\"Commands for interacting with Prefect Cloud Workspaces\"\n)\ncloud_app.add_typer(workspace_app, aliases=[\"workspaces\"])\napp.add_typer(cloud_app)\n\n\ndef build_url_from_workspace(workspace: Dict) -> str:\n return (\n f\"{PREFECT_CLOUD_URL.value()}\"\n f\"/accounts/{workspace['account_id']}\"\n f\"/workspaces/{workspace['workspace_id']}\"\n )\n\n\ndef confirm_logged_in():\n if not PREFECT_API_KEY:\n profile = prefect.context.get_settings_context().profile\n exit_with_error(\n f\"Currently not authenticated in profile {profile.name!r}. \"\n \"Please login with `prefect cloud login --key <API_KEY>`.\"\n )\n\n\ndef get_current_workspace(workspaces):\n workspace_handles_by_id = {\n workspace[\n \"workspace_id\"\n ]: f\"{workspace['account_handle']}/{workspace['workspace_handle']}\"\n for workspace in workspaces\n }\n current_workspace_id = re.match(\n r\".*accounts/.{36}/workspaces/(.{36})\\Z\", PREFECT_API_URL.value()\n ).groups()[0]\n return workspace_handles_by_id[current_workspace_id]\n\n\ndef build_table(selected_idx: int, workspaces: Iterable[str]) -> Table:\n \"\"\"\n Generate a table of workspaces. The `select_idx` of workspaces will be highlighted.\n\n Args:\n selected_idx: currently selected index\n workspaces: Iterable of strings\n\n Returns:\n rich.table.Table\n \"\"\"\n\n table = Table()\n table.add_column(\n \"[#024dfd]Select a Workspace:\", justify=\"right\", style=\"#8ea0ae\", no_wrap=True\n )\n\n for i, workspace in enumerate(sorted(workspaces)):\n if i == selected_idx:\n table.add_row(\"[#024dfd on #FFFFFF]> \" + workspace)\n else:\n table.add_row(\" \" + workspace)\n return table\n\n\ndef select_workspace(workspaces: Iterable[str]) -> str:\n \"\"\"\n Given a list of workspaces, display them to user in a Table\n and allow them to select one.\n\n Args:\n workspaces: List of workspaces to choose from\n\n Returns:\n str: the selected workspace\n \"\"\"\n\n workspaces = sorted(workspaces)\n current_idx = 0\n selected_workspace = None\n\n with Live(\n build_table(current_idx, workspaces), auto_refresh=False, console=app.console\n ) as live:\n while selected_workspace is None:\n key = readchar.readkey()\n\n if key == readchar.key.UP:\n current_idx = current_idx - 1\n # wrap to bottom if at the top\n if current_idx < 0:\n current_idx = len(workspaces) - 1\n elif key == readchar.key.DOWN:\n current_idx = current_idx + 1\n # wrap to top if at the bottom\n if current_idx >= len(workspaces):\n current_idx = 0\n elif key == readchar.key.CTRL_C:\n # gracefully exit with no message\n exit_with_error(\"\")\n elif key == readchar.key.ENTER:\n selected_workspace = workspaces[current_idx]\n\n live.update(build_table(current_idx, workspaces), refresh=True)\n\n return selected_workspace\n\n\n@cloud_app.command()\nasync def login(\n key: str = typer.Option(\n ..., \"--key\", \"-k\", help=\"API Key to authenticate with Prefect\", prompt=True\n ),\n workspace_handle: str = typer.Option(\n None,\n \"--workspace\",\n \"-w\",\n help=\"Full handle of workspace, in format '<account_handle>/<workspace_handle>'\",\n ),\n):\n \"\"\"\n Log in to Prefect Cloud.\n Creates a new profile configured to use the specified PREFECT_API_KEY.\n Uses a previously configured profile if it exists.\n \"\"\"\n profiles = load_profiles()\n\n async with get_cloud_client(api_key=key) as client:\n try:\n workspaces = await client.read_workspaces()\n workspace_handle_details = {\n f\"{workspace['account_handle']}/{workspace['workspace_handle']}\": workspace\n for workspace in workspaces\n }\n except CloudUnauthorizedError:\n if key.startswith(\"pcu\"):\n help_message = \"It looks like you're using API key from Cloud 1 (https://cloud.prefect.io). Make sure that you generate API key using Cloud 2 (https://app.prefect.cloud)\"\n elif not key.startswith(\"pnu\"):\n help_message = \"Your key is not in our expected format.\"\n else:\n help_message = \"Please ensure your credentials are correct.\"\n exit_with_error(\n f\"Unable to authenticate with Prefect Cloud. {help_message}\"\n )\n except httpx.HTTPStatusError as exc:\n exit_with_error(f\"Error connecting to Prefect Cloud: {exc!r}\")\n\n for profile_name in profiles:\n if key == profiles[profile_name].settings.get(PREFECT_API_KEY):\n profiles.set_active(profile_name)\n save_profiles(profiles)\n with prefect.context.use_profile(profile_name):\n current_workspace = get_current_workspace(workspaces)\n\n if workspace_handle is not None:\n if workspace_handle not in workspace_handle_details:\n exit_with_error(f\"Workspace {workspace_handle!r} not found.\")\n\n update_current_profile(\n {\n PREFECT_API_URL: build_url_from_workspace(\n workspace_handle_details[workspace_handle]\n )\n }\n )\n current_workspace = workspace_handle\n\n exit_with_success(\n f\"Logged in to Prefect Cloud using profile {profile_name!r}.\\n\"\n f\"Workspace is currently set to {current_workspace!r}. \"\n f\"The workspace can be changed using `prefect cloud workspace set`.\"\n )\n\n workspace_handle_details = {\n f\"{workspace['account_handle']}/{workspace['workspace_handle']}\": workspace\n for workspace in workspaces\n }\n\n if not workspace_handle:\n workspace_handle = select_workspace(workspace_handle_details.keys())\n\n cloud_profile_name = app.console.input(\n \"Creating a profile for this Prefect Cloud login. Please specify a profile name: \"\n )\n\n cloud_profile_name = cloud_profile_name.strip()\n if cloud_profile_name == \"\":\n exit_with_error(\"Please provide a non-empty profile name.\")\n\n if cloud_profile_name in profiles:\n exit_with_error(f\"Profile {cloud_profile_name!r} already exists.\")\n\n profiles.add_profile(\n profiles[profiles.active_name].copy(\n update={\n \"name\": cloud_profile_name,\n }\n )\n )\n\n profiles.update_profile(\n cloud_profile_name,\n {\n PREFECT_API_URL: build_url_from_workspace(\n workspace_handle_details[workspace_handle]\n ),\n PREFECT_API_KEY: key,\n },\n )\n\n profiles.set_active(cloud_profile_name)\n save_profiles(profiles)\n\n exit_with_success(\n f\"Logged in to Prefect Cloud using profile {cloud_profile_name!r}.\\n\"\n f\"Workspace is currently set to {workspace_handle!r}. \"\n f\"The workspace can be changed using `prefect cloud workspace set`.\"\n )\n\n\n@workspace_app.command()\nasync def ls():\n \"\"\"List available workspaces.\"\"\"\n\n confirm_logged_in()\n\n async with get_cloud_client() as client:\n try:\n workspaces = await client.read_workspaces()\n except CloudUnauthorizedError:\n exit_with_error(\n \"Unable to authenticate. Please ensure your credentials are correct.\"\n )\n\n workspace_handle_details = {\n f\"{workspace['account_handle']}/{workspace['workspace_handle']}\": workspace\n for workspace in workspaces\n }\n\n current_workspace = get_current_workspace(workspaces)\n\n table = Table(caption=\"* active workspace\")\n table.add_column(\n \"[#024dfd]Available Workspaces:\", justify=\"right\", style=\"#8ea0ae\", no_wrap=True\n )\n\n for i, workspace_handle in enumerate(sorted(workspace_handle_details)):\n if workspace_handle == current_workspace:\n table.add_row(f\"[green] * {workspace_handle}[/green]\")\n else:\n table.add_row(f\" {workspace_handle}\")\n app.console.print(table)\n\n\n@workspace_app.command()\nasync def set(\n workspace_handle: str = typer.Option(\n None,\n \"--workspace\",\n \"-w\",\n help=\"Full handle of workspace, in format '<account_handle>/<workspace_handle>'\",\n ),\n):\n \"\"\"Set current workspace. Shows a workspace picker if no workspace is specified.\"\"\"\n confirm_logged_in()\n\n async with get_cloud_client() as client:\n try:\n workspaces = await client.read_workspaces()\n except CloudUnauthorizedError:\n exit_with_error(\n \"Unable to authenticate. Please ensure your credentials are correct.\"\n )\n workspaces = {\n f\"{workspace['account_handle']}/{workspace['workspace_handle']}\": workspace\n for workspace in workspaces\n }\n\n if not workspace_handle:\n workspace_handle = select_workspace(workspaces)\n\n if workspace_handle not in workspaces:\n exit_with_error(\n f\"Workspace {workspace_handle!r} not found. \"\n \"Leave `--workspace` blank to select a workspace.\"\n )\n\n profile = update_current_profile(\n {PREFECT_API_URL: build_url_from_workspace(workspaces[workspace_handle])}\n )\n\n exit_with_success(\n f\"Successfully set workspace to {workspace_handle!r} in profile {profile.name!r}.\"\n )\n", "path": "src/prefect/cli/cloud.py"}], "after_files": [{"content": "\"\"\"\nCommand line interface for interacting with Prefect Cloud\n\"\"\"\nimport re\nfrom typing import Dict, Iterable\n\nimport httpx\nimport readchar\nimport typer\nfrom rich.live import Live\nfrom rich.table import Table\n\nimport prefect.context\nimport prefect.settings\nfrom prefect.cli._types import PrefectTyper\nfrom prefect.cli._utilities import exit_with_error, exit_with_success\nfrom prefect.cli.root import app\nfrom prefect.client.cloud import CloudUnauthorizedError, get_cloud_client\nfrom prefect.settings import (\n PREFECT_API_KEY,\n PREFECT_API_URL,\n PREFECT_CLOUD_URL,\n load_profiles,\n save_profiles,\n update_current_profile,\n)\n\ncloud_app = PrefectTyper(\n name=\"cloud\", help=\"Commands for interacting with Prefect Cloud\"\n)\nworkspace_app = PrefectTyper(\n name=\"workspace\", help=\"Commands for interacting with Prefect Cloud Workspaces\"\n)\ncloud_app.add_typer(workspace_app, aliases=[\"workspaces\"])\napp.add_typer(cloud_app)\n\n\ndef build_url_from_workspace(workspace: Dict) -> str:\n return (\n f\"{PREFECT_CLOUD_URL.value()}\"\n f\"/accounts/{workspace['account_id']}\"\n f\"/workspaces/{workspace['workspace_id']}\"\n )\n\n\ndef confirm_logged_in():\n if not PREFECT_API_KEY:\n profile = prefect.context.get_settings_context().profile\n exit_with_error(\n f\"Currently not authenticated in profile {profile.name!r}. \"\n \"Please login with `prefect cloud login --key <API_KEY>`.\"\n )\n\n\ndef get_current_workspace(workspaces):\n workspace_handles_by_id = {\n workspace[\n \"workspace_id\"\n ]: f\"{workspace['account_handle']}/{workspace['workspace_handle']}\"\n for workspace in workspaces\n }\n current_workspace_id = re.match(\n r\".*accounts/.{36}/workspaces/(.{36})\\Z\", PREFECT_API_URL.value()\n ).groups()[0]\n return workspace_handles_by_id[current_workspace_id]\n\n\ndef build_table(selected_idx: int, workspaces: Iterable[str]) -> Table:\n \"\"\"\n Generate a table of workspaces. The `select_idx` of workspaces will be highlighted.\n\n Args:\n selected_idx: currently selected index\n workspaces: Iterable of strings\n\n Returns:\n rich.table.Table\n \"\"\"\n\n table = Table()\n table.add_column(\n \"[#024dfd]Select a Workspace:\", justify=\"right\", style=\"#8ea0ae\", no_wrap=True\n )\n\n for i, workspace in enumerate(sorted(workspaces)):\n if i == selected_idx:\n table.add_row(\"[#024dfd on #FFFFFF]> \" + workspace)\n else:\n table.add_row(\" \" + workspace)\n return table\n\n\ndef select_workspace(workspaces: Iterable[str]) -> str:\n \"\"\"\n Given a list of workspaces, display them to user in a Table\n and allow them to select one.\n\n Args:\n workspaces: List of workspaces to choose from\n\n Returns:\n str: the selected workspace\n \"\"\"\n\n workspaces = sorted(workspaces)\n current_idx = 0\n selected_workspace = None\n\n with Live(\n build_table(current_idx, workspaces), auto_refresh=False, console=app.console\n ) as live:\n while selected_workspace is None:\n key = readchar.readkey()\n\n if key == readchar.key.UP:\n current_idx = current_idx - 1\n # wrap to bottom if at the top\n if current_idx < 0:\n current_idx = len(workspaces) - 1\n elif key == readchar.key.DOWN:\n current_idx = current_idx + 1\n # wrap to top if at the bottom\n if current_idx >= len(workspaces):\n current_idx = 0\n elif key == readchar.key.CTRL_C:\n # gracefully exit with no message\n exit_with_error(\"\")\n elif key == readchar.key.ENTER:\n selected_workspace = workspaces[current_idx]\n\n live.update(build_table(current_idx, workspaces), refresh=True)\n\n return selected_workspace\n\n\n@cloud_app.command()\nasync def login(\n key: str = typer.Option(\n ..., \"--key\", \"-k\", help=\"API Key to authenticate with Prefect\", prompt=True\n ),\n workspace_handle: str = typer.Option(\n None,\n \"--workspace\",\n \"-w\",\n help=\"Full handle of workspace, in format '<account_handle>/<workspace_handle>'\",\n ),\n):\n \"\"\"\n Log in to Prefect Cloud.\n Creates a new profile configured to use the specified PREFECT_API_KEY.\n Uses a previously configured profile if it exists.\n \"\"\"\n async with get_cloud_client(api_key=key) as client:\n try:\n workspaces = await client.read_workspaces()\n workspace_handle_details = {\n f\"{workspace['account_handle']}/{workspace['workspace_handle']}\": workspace\n for workspace in workspaces\n }\n except CloudUnauthorizedError:\n if key.startswith(\"pcu\"):\n help_message = \"It looks like you're using API key from Cloud 1 (https://cloud.prefect.io). Make sure that you generate API key using Cloud 2 (https://app.prefect.cloud)\"\n elif not key.startswith(\"pnu\"):\n help_message = \"Your key is not in our expected format.\"\n else:\n help_message = \"Please ensure your credentials are correct.\"\n exit_with_error(\n f\"Unable to authenticate with Prefect Cloud. {help_message}\"\n )\n except httpx.HTTPStatusError as exc:\n exit_with_error(f\"Error connecting to Prefect Cloud: {exc!r}\")\n\n profiles = load_profiles()\n\n for profile_name in profiles:\n if key == profiles[profile_name].settings.get(PREFECT_API_KEY):\n profiles.set_active(profile_name)\n save_profiles(profiles)\n with prefect.context.use_profile(profile_name):\n current_workspace = get_current_workspace(workspaces)\n\n if workspace_handle is not None:\n if workspace_handle not in workspace_handle_details:\n exit_with_error(f\"Workspace {workspace_handle!r} not found.\")\n\n update_current_profile(\n {\n PREFECT_API_URL: build_url_from_workspace(\n workspace_handle_details[workspace_handle]\n )\n }\n )\n current_workspace = workspace_handle\n\n exit_with_success(\n f\"Logged in to Prefect Cloud using profile {profile_name!r}.\\n\"\n f\"Workspace is currently set to {current_workspace!r}. \"\n f\"The workspace can be changed using `prefect cloud workspace set`.\"\n )\n\n if not workspace_handle:\n workspace_handle = select_workspace(workspace_handle_details.keys())\n\n if workspace_handle not in workspace_handle_details:\n exit_with_error(f\"Workspace {workspace_handle!r} not found.\")\n\n current_profile = update_current_profile(\n {\n PREFECT_API_URL: build_url_from_workspace(\n workspace_handle_details[workspace_handle]\n ),\n PREFECT_API_KEY: key,\n },\n )\n\n exit_with_success(\n f\"Logged in to Prefect Cloud using profile {current_profile.name!r}.\\n\"\n f\"Workspace is currently set to {workspace_handle!r}. \"\n f\"The workspace can be changed using `prefect cloud workspace set`.\"\n )\n\n\n@cloud_app.command()\nasync def logout():\n \"\"\"\n Logout the current workspace.\n Reset PREFECT_API_KEY and PREFECT_API_URL to default.\n \"\"\"\n current_profile = prefect.context.get_settings_context().profile\n if current_profile is None:\n exit_with_error(\"There is no current profile set.\")\n\n if current_profile.settings.get(PREFECT_API_KEY) is None:\n exit_with_error(\"Current profile is not logged into Prefect Cloud.\")\n\n update_current_profile(\n {\n PREFECT_API_URL: None,\n PREFECT_API_KEY: None,\n },\n )\n\n exit_with_success(\"Logged out from Prefect Cloud.\")\n\n\n@workspace_app.command()\nasync def ls():\n \"\"\"List available workspaces.\"\"\"\n\n confirm_logged_in()\n\n async with get_cloud_client() as client:\n try:\n workspaces = await client.read_workspaces()\n except CloudUnauthorizedError:\n exit_with_error(\n \"Unable to authenticate. Please ensure your credentials are correct.\"\n )\n\n workspace_handle_details = {\n f\"{workspace['account_handle']}/{workspace['workspace_handle']}\": workspace\n for workspace in workspaces\n }\n\n current_workspace = get_current_workspace(workspaces)\n\n table = Table(caption=\"* active workspace\")\n table.add_column(\n \"[#024dfd]Available Workspaces:\", justify=\"right\", style=\"#8ea0ae\", no_wrap=True\n )\n\n for i, workspace_handle in enumerate(sorted(workspace_handle_details)):\n if workspace_handle == current_workspace:\n table.add_row(f\"[green] * {workspace_handle}[/green]\")\n else:\n table.add_row(f\" {workspace_handle}\")\n app.console.print(table)\n\n\n@workspace_app.command()\nasync def set(\n workspace_handle: str = typer.Option(\n None,\n \"--workspace\",\n \"-w\",\n help=\"Full handle of workspace, in format '<account_handle>/<workspace_handle>'\",\n ),\n):\n \"\"\"Set current workspace. Shows a workspace picker if no workspace is specified.\"\"\"\n confirm_logged_in()\n\n async with get_cloud_client() as client:\n try:\n workspaces = await client.read_workspaces()\n except CloudUnauthorizedError:\n exit_with_error(\n \"Unable to authenticate. Please ensure your credentials are correct.\"\n )\n workspaces = {\n f\"{workspace['account_handle']}/{workspace['workspace_handle']}\": workspace\n for workspace in workspaces\n }\n\n if not workspace_handle:\n workspace_handle = select_workspace(workspaces)\n\n if workspace_handle not in workspaces:\n exit_with_error(\n f\"Workspace {workspace_handle!r} not found. \"\n \"Leave `--workspace` blank to select a workspace.\"\n )\n\n profile = update_current_profile(\n {PREFECT_API_URL: build_url_from_workspace(workspaces[workspace_handle])}\n )\n\n exit_with_success(\n f\"Successfully set workspace to {workspace_handle!r} in profile {profile.name!r}.\"\n )\n", "path": "src/prefect/cli/cloud.py"}]}
| 3,454 | 903 |
gh_patches_debug_20026
|
rasdani/github-patches
|
git_diff
|
yt-dlp__yt-dlp-1063
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[niconico_dmc] Heartbeat failed with SOCKS5 `HTTPS?_PROXY` env
## Checklist
- [x] I'm reporting a broken site support
- [x] I've verified that I'm running yt-dlp version **2021.09.02**
- [x] I've checked that all provided URLs are alive and playable in a browser
- [x] I've checked that all URLs and arguments with special characters are properly quoted or escaped
- [x] I've searched the bugtracker for similar issues including closed ones
## Verbose log
```
$ HTTP_PROXY=socks5://127.0.0.1:10080 HTTPS_PROXY=socks5://127.0.0.1:10080 yt-dlp --verbose --proxy socks5://127.0.0.1:10080 https://www.nicovideo.jp/watch/sm9
[debug] Command-line config: ['--verbose', '--proxy', 'socks5://127.0.0.1:10080', 'https://www.nicovideo.jp/watch/sm9']
[debug] Encodings: locale UTF-8, fs utf-8, out utf-8, pref UTF-8
[debug] yt-dlp version 2021.09.02 (source)
[debug] Python version 3.10.0rc2 (CPython 32bit) - Linux-3.10.49-armv7l-with-libc
[debug] exe versions: ffmpeg 4.4, ffprobe 4.4
[debug] Optional libraries: mutagen, pycryptodome, sqlite, websockets
[debug] Proxy map: {'http': 'socks5://127.0.0.1:10080', 'https': 'socks5://127.0.0.1:10080'}
[debug] [niconico] Extracting URL: https://www.nicovideo.jp/watch/sm9
[niconico] sm9: Downloading webpage
[niconico] sm9: Downloading video info page
[debug] Formats sorted by: hasvid, ie_pref, lang, quality, res, fps, vcodec:vp9.2(10), acodec, filesize, fs_approx, tbr, vbr, abr, asr, proto, vext, aext, hasaud, source, id
[debug] Default format spec: bestvideo*+bestaudio/best
[info] sm9: Downloading 1 format(s): h264_360p-aac_128kbps
[debug] Invoking downloader on "niconico_dmc:sm9/archive_h264_360p/archive_aac_128kbps"
[niconico_dmc] Downloading from DMC
[niconico] sm9: Downloading JSON metadata for h264_360p-aac_128kbps
[niconico] sm9: Acquiring permission for downloading video
[niconico_dmc] Heartbeat with 40 second interval ...
[niconico_dmc] Heartbeat failed
[download] Destination: 新・豪血寺一族 -煩悩解放 - レッツゴー!陰陽師 [sm9].mp4
[download] 25.4% of 26.95MiB at 174.88KiB/s ETA 01:57[niconico_dmc] Heartbeat failed
[download] 39.8% of 26.95MiB at 137.12KiB/s ETA 02:01[niconico_dmc] Heartbeat failed
[download] 53.6% of 26.95MiB at 124.49KiB/s ETA 01:42[download] Got server HTTP error: Downloaded 15136650 bytes, expected 28261021 bytes. Retrying (attempt 1 of 10) ...
```
## Description
There seem to be two separate issues:
- niconico_dmc heartbeat connection does not respect the `--proxy` option, and
- it cannot connect via a SOCKS5 server.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `yt_dlp/downloader/niconico.py`
Content:
```
1 # coding: utf-8
2 from __future__ import unicode_literals
3
4 import threading
5
6 from .common import FileDownloader
7 from ..downloader import get_suitable_downloader
8 from ..extractor.niconico import NiconicoIE
9 from ..compat import compat_urllib_request
10
11
12 class NiconicoDmcFD(FileDownloader):
13 """ Downloading niconico douga from DMC with heartbeat """
14
15 FD_NAME = 'niconico_dmc'
16
17 def real_download(self, filename, info_dict):
18 self.to_screen('[%s] Downloading from DMC' % self.FD_NAME)
19
20 ie = NiconicoIE(self.ydl)
21 info_dict, heartbeat_info_dict = ie._get_heartbeat_info(info_dict)
22
23 fd = get_suitable_downloader(info_dict, params=self.params)(self.ydl, self.params)
24
25 success = download_complete = False
26 timer = [None]
27 heartbeat_lock = threading.Lock()
28 heartbeat_url = heartbeat_info_dict['url']
29 heartbeat_data = heartbeat_info_dict['data'].encode()
30 heartbeat_interval = heartbeat_info_dict.get('interval', 30)
31
32 def heartbeat():
33 try:
34 compat_urllib_request.urlopen(url=heartbeat_url, data=heartbeat_data)
35 except Exception:
36 self.to_screen('[%s] Heartbeat failed' % self.FD_NAME)
37
38 with heartbeat_lock:
39 if not download_complete:
40 timer[0] = threading.Timer(heartbeat_interval, heartbeat)
41 timer[0].start()
42
43 heartbeat_info_dict['ping']()
44 self.to_screen('[%s] Heartbeat with %d second interval ...' % (self.FD_NAME, heartbeat_interval))
45 try:
46 heartbeat()
47 if type(fd).__name__ == 'HlsFD':
48 info_dict.update(ie._extract_m3u8_formats(info_dict['url'], info_dict['id'])[0])
49 success = fd.real_download(filename, info_dict)
50 finally:
51 if heartbeat_lock:
52 with heartbeat_lock:
53 timer[0].cancel()
54 download_complete = True
55 return success
56
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/yt_dlp/downloader/niconico.py b/yt_dlp/downloader/niconico.py
--- a/yt_dlp/downloader/niconico.py
+++ b/yt_dlp/downloader/niconico.py
@@ -6,7 +6,7 @@
from .common import FileDownloader
from ..downloader import get_suitable_downloader
from ..extractor.niconico import NiconicoIE
-from ..compat import compat_urllib_request
+from ..utils import sanitized_Request
class NiconicoDmcFD(FileDownloader):
@@ -29,9 +29,11 @@
heartbeat_data = heartbeat_info_dict['data'].encode()
heartbeat_interval = heartbeat_info_dict.get('interval', 30)
+ request = sanitized_Request(heartbeat_url, heartbeat_data)
+
def heartbeat():
try:
- compat_urllib_request.urlopen(url=heartbeat_url, data=heartbeat_data)
+ self.ydl.urlopen(request).read()
except Exception:
self.to_screen('[%s] Heartbeat failed' % self.FD_NAME)
|
{"golden_diff": "diff --git a/yt_dlp/downloader/niconico.py b/yt_dlp/downloader/niconico.py\n--- a/yt_dlp/downloader/niconico.py\n+++ b/yt_dlp/downloader/niconico.py\n@@ -6,7 +6,7 @@\n from .common import FileDownloader\n from ..downloader import get_suitable_downloader\n from ..extractor.niconico import NiconicoIE\n-from ..compat import compat_urllib_request\n+from ..utils import sanitized_Request\n \n \n class NiconicoDmcFD(FileDownloader):\n@@ -29,9 +29,11 @@\n heartbeat_data = heartbeat_info_dict['data'].encode()\n heartbeat_interval = heartbeat_info_dict.get('interval', 30)\n \n+ request = sanitized_Request(heartbeat_url, heartbeat_data)\n+\n def heartbeat():\n try:\n- compat_urllib_request.urlopen(url=heartbeat_url, data=heartbeat_data)\n+ self.ydl.urlopen(request).read()\n except Exception:\n self.to_screen('[%s] Heartbeat failed' % self.FD_NAME)\n", "issue": "[niconico_dmc] Heartbeat failed with SOCKS5 `HTTPS?_PROXY` env\n\r\n## Checklist\r\n\r\n- [x] I'm reporting a broken site support\r\n- [x] I've verified that I'm running yt-dlp version **2021.09.02**\r\n- [x] I've checked that all provided URLs are alive and playable in a browser\r\n- [x] I've checked that all URLs and arguments with special characters are properly quoted or escaped\r\n- [x] I've searched the bugtracker for similar issues including closed ones\r\n\r\n\r\n## Verbose log\r\n\r\n```\r\n$ HTTP_PROXY=socks5://127.0.0.1:10080 HTTPS_PROXY=socks5://127.0.0.1:10080 yt-dlp --verbose --proxy socks5://127.0.0.1:10080 https://www.nicovideo.jp/watch/sm9\r\n[debug] Command-line config: ['--verbose', '--proxy', 'socks5://127.0.0.1:10080', 'https://www.nicovideo.jp/watch/sm9']\r\n[debug] Encodings: locale UTF-8, fs utf-8, out utf-8, pref UTF-8\r\n[debug] yt-dlp version 2021.09.02 (source)\r\n[debug] Python version 3.10.0rc2 (CPython 32bit) - Linux-3.10.49-armv7l-with-libc\r\n[debug] exe versions: ffmpeg 4.4, ffprobe 4.4\r\n[debug] Optional libraries: mutagen, pycryptodome, sqlite, websockets\r\n[debug] Proxy map: {'http': 'socks5://127.0.0.1:10080', 'https': 'socks5://127.0.0.1:10080'}\r\n[debug] [niconico] Extracting URL: https://www.nicovideo.jp/watch/sm9\r\n[niconico] sm9: Downloading webpage\r\n[niconico] sm9: Downloading video info page\r\n[debug] Formats sorted by: hasvid, ie_pref, lang, quality, res, fps, vcodec:vp9.2(10), acodec, filesize, fs_approx, tbr, vbr, abr, asr, proto, vext, aext, hasaud, source, id\r\n[debug] Default format spec: bestvideo*+bestaudio/best\r\n[info] sm9: Downloading 1 format(s): h264_360p-aac_128kbps\r\n[debug] Invoking downloader on \"niconico_dmc:sm9/archive_h264_360p/archive_aac_128kbps\"\r\n[niconico_dmc] Downloading from DMC\r\n[niconico] sm9: Downloading JSON metadata for h264_360p-aac_128kbps\r\n[niconico] sm9: Acquiring permission for downloading video\r\n[niconico_dmc] Heartbeat with 40 second interval ...\r\n[niconico_dmc] Heartbeat failed\r\n[download] Destination: \u65b0\u30fb\u8c6a\u8840\u5bfa\u4e00\u65cf -\u7169\u60a9\u89e3\u653e - \u30ec\u30c3\u30c4\u30b4\u30fc\uff01\u9670\u967d\u5e2b [sm9].mp4\r\n[download] 25.4% of 26.95MiB at 174.88KiB/s ETA 01:57[niconico_dmc] Heartbeat failed\r\n[download] 39.8% of 26.95MiB at 137.12KiB/s ETA 02:01[niconico_dmc] Heartbeat failed\r\n[download] 53.6% of 26.95MiB at 124.49KiB/s ETA 01:42[download] Got server HTTP error: Downloaded 15136650 bytes, expected 28261021 bytes. Retrying (attempt 1 of 10) ...\r\n```\r\n\r\n\r\n## Description\r\n\r\nThere seem to be two separate issues:\r\n - niconico_dmc heartbeat connection does not respect the `--proxy` option, and\r\n - it cannot connect via a SOCKS5 server.\r\n\n", "before_files": [{"content": "# coding: utf-8\nfrom __future__ import unicode_literals\n\nimport threading\n\nfrom .common import FileDownloader\nfrom ..downloader import get_suitable_downloader\nfrom ..extractor.niconico import NiconicoIE\nfrom ..compat import compat_urllib_request\n\n\nclass NiconicoDmcFD(FileDownloader):\n \"\"\" Downloading niconico douga from DMC with heartbeat \"\"\"\n\n FD_NAME = 'niconico_dmc'\n\n def real_download(self, filename, info_dict):\n self.to_screen('[%s] Downloading from DMC' % self.FD_NAME)\n\n ie = NiconicoIE(self.ydl)\n info_dict, heartbeat_info_dict = ie._get_heartbeat_info(info_dict)\n\n fd = get_suitable_downloader(info_dict, params=self.params)(self.ydl, self.params)\n\n success = download_complete = False\n timer = [None]\n heartbeat_lock = threading.Lock()\n heartbeat_url = heartbeat_info_dict['url']\n heartbeat_data = heartbeat_info_dict['data'].encode()\n heartbeat_interval = heartbeat_info_dict.get('interval', 30)\n\n def heartbeat():\n try:\n compat_urllib_request.urlopen(url=heartbeat_url, data=heartbeat_data)\n except Exception:\n self.to_screen('[%s] Heartbeat failed' % self.FD_NAME)\n\n with heartbeat_lock:\n if not download_complete:\n timer[0] = threading.Timer(heartbeat_interval, heartbeat)\n timer[0].start()\n\n heartbeat_info_dict['ping']()\n self.to_screen('[%s] Heartbeat with %d second interval ...' % (self.FD_NAME, heartbeat_interval))\n try:\n heartbeat()\n if type(fd).__name__ == 'HlsFD':\n info_dict.update(ie._extract_m3u8_formats(info_dict['url'], info_dict['id'])[0])\n success = fd.real_download(filename, info_dict)\n finally:\n if heartbeat_lock:\n with heartbeat_lock:\n timer[0].cancel()\n download_complete = True\n return success\n", "path": "yt_dlp/downloader/niconico.py"}], "after_files": [{"content": "# coding: utf-8\nfrom __future__ import unicode_literals\n\nimport threading\n\nfrom .common import FileDownloader\nfrom ..downloader import get_suitable_downloader\nfrom ..extractor.niconico import NiconicoIE\nfrom ..utils import sanitized_Request\n\n\nclass NiconicoDmcFD(FileDownloader):\n \"\"\" Downloading niconico douga from DMC with heartbeat \"\"\"\n\n FD_NAME = 'niconico_dmc'\n\n def real_download(self, filename, info_dict):\n self.to_screen('[%s] Downloading from DMC' % self.FD_NAME)\n\n ie = NiconicoIE(self.ydl)\n info_dict, heartbeat_info_dict = ie._get_heartbeat_info(info_dict)\n\n fd = get_suitable_downloader(info_dict, params=self.params)(self.ydl, self.params)\n\n success = download_complete = False\n timer = [None]\n heartbeat_lock = threading.Lock()\n heartbeat_url = heartbeat_info_dict['url']\n heartbeat_data = heartbeat_info_dict['data'].encode()\n heartbeat_interval = heartbeat_info_dict.get('interval', 30)\n\n request = sanitized_Request(heartbeat_url, heartbeat_data)\n\n def heartbeat():\n try:\n self.ydl.urlopen(request).read()\n except Exception:\n self.to_screen('[%s] Heartbeat failed' % self.FD_NAME)\n\n with heartbeat_lock:\n if not download_complete:\n timer[0] = threading.Timer(heartbeat_interval, heartbeat)\n timer[0].start()\n\n heartbeat_info_dict['ping']()\n self.to_screen('[%s] Heartbeat with %d second interval ...' % (self.FD_NAME, heartbeat_interval))\n try:\n heartbeat()\n if type(fd).__name__ == 'HlsFD':\n info_dict.update(ie._extract_m3u8_formats(info_dict['url'], info_dict['id'])[0])\n success = fd.real_download(filename, info_dict)\n finally:\n if heartbeat_lock:\n with heartbeat_lock:\n timer[0].cancel()\n download_complete = True\n return success\n", "path": "yt_dlp/downloader/niconico.py"}]}
| 1,783 | 232 |
gh_patches_debug_3623
|
rasdani/github-patches
|
git_diff
|
ansible__ansible-modules-extras-3522
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
ec2_elb_facts should support check mode
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
ec2_elb_facts
##### ANSIBLE VERSION
```
ansible 2.2.0.0
config file =
configured module search path = Default w/o overrides
```
##### CONFIGURATION
*N/A*
##### OS / ENVIRONMENT
*N/A*
##### SUMMARY
Since the `ec2_elb_facts` is strictly a read-only operation, it should support running with `--check`
##### STEPS TO REPRODUCE
```sh
ansible-playbook \
-i hosts \
-l my-elb-host \
ec2_elb_facts_check.yml \
-vv \
--check
```
```yaml
- hosts: all
connection: local
gather_facts: no
tasks:
- name: Collect ELB facts
ec2_elb_facts:
names: "my-elb"
region: "us-east-1"
register: elbfacts
tags: always
```
##### EXPECTED RESULTS
It would be expected that `ec2_elb_facts` would still fetch the instance information. This being omitted, prevents the ability to enumerate ELB instance hosts, dynamically add them to the inventory, and then conduct `--check` mode against what would *actually* be getting done.
##### ACTUAL RESULTS
```
TASK [Collect ELB facts] ***********************************************
task path: /Projects/ec2_elb_facts_check.yml:6
skipping: [my-elb-host] => {
"changed": false,
"skipped": true
}
MSG:
remote module (ec2_elb_facts) does not support check mode
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `cloud/amazon/ec2_elb_facts.py`
Content:
```
1 #!/usr/bin/python
2 #
3 # This is a free software: you can redistribute it and/or modify
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation, either version 3 of the License, or
6 # (at your option) any later version.
7 #
8 # This Ansible library is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
12 #
13 # You should have received a copy of the GNU General Public License
14 # along with this library. If not, see <http://www.gnu.org/licenses/>.
15
16 DOCUMENTATION = '''
17 ---
18 module: ec2_elb_facts
19 short_description: Gather facts about EC2 Elastic Load Balancers in AWS
20 description:
21 - Gather facts about EC2 Elastic Load Balancers in AWS
22 version_added: "2.0"
23 author:
24 - "Michael Schultz (github.com/mjschultz)"
25 - "Fernando Jose Pando (@nand0p)"
26 options:
27 names:
28 description:
29 - List of ELB names to gather facts about. Pass this option to gather facts about a set of ELBs, otherwise, all ELBs are returned.
30 required: false
31 default: null
32 aliases: ['elb_ids', 'ec2_elbs']
33 extends_documentation_fragment:
34 - aws
35 - ec2
36 '''
37
38 EXAMPLES = '''
39 # Note: These examples do not set authentication details, see the AWS Guide for details.
40 # Output format tries to match ec2_elb_lb module input parameters
41
42 # Gather facts about all ELBs
43 - action:
44 module: ec2_elb_facts
45 register: elb_facts
46
47 - action:
48 module: debug
49 msg: "{{ item.dns_name }}"
50 with_items: "{{ elb_facts.elbs }}"
51
52 # Gather facts about a particular ELB
53 - action:
54 module: ec2_elb_facts
55 names: frontend-prod-elb
56 register: elb_facts
57
58 - action:
59 module: debug
60 msg: "{{ elb_facts.elbs.0.dns_name }}"
61
62 # Gather facts about a set of ELBs
63 - action:
64 module: ec2_elb_facts
65 names:
66 - frontend-prod-elb
67 - backend-prod-elb
68 register: elb_facts
69
70 - action:
71 module: debug
72 msg: "{{ item.dns_name }}"
73 with_items: "{{ elb_facts.elbs }}"
74
75 '''
76
77 try:
78 import boto.ec2.elb
79 from boto.ec2.tag import Tag
80 from boto.exception import BotoServerError
81 HAS_BOTO = True
82 except ImportError:
83 HAS_BOTO = False
84
85 class ElbInformation(object):
86 """ Handles ELB information """
87
88 def __init__(self,
89 module,
90 names,
91 region,
92 **aws_connect_params):
93
94 self.module = module
95 self.names = names
96 self.region = region
97 self.aws_connect_params = aws_connect_params
98 self.connection = self._get_elb_connection()
99
100 def _get_tags(self, elbname):
101 params = {'LoadBalancerNames.member.1': elbname}
102 try:
103 elb_tags = self.connection.get_list('DescribeTags', params, [('member', Tag)])
104 return dict((tag.Key, tag.Value) for tag in elb_tags if hasattr(tag, 'Key'))
105 except:
106 return {}
107
108 def _get_elb_connection(self):
109 try:
110 return connect_to_aws(boto.ec2.elb, self.region, **self.aws_connect_params)
111 except BotoServerError as err:
112 self.module.fail_json(msg=err.message)
113
114 def _get_elb_listeners(self, listeners):
115 listener_list = []
116
117 for listener in listeners:
118 listener_dict = {
119 'load_balancer_port': listener[0],
120 'instance_port': listener[1],
121 'protocol': listener[2],
122 }
123
124 try:
125 ssl_certificate_id = listener[4]
126 except IndexError:
127 pass
128 else:
129 if ssl_certificate_id:
130 listener_dict['ssl_certificate_id'] = ssl_certificate_id
131
132 listener_list.append(listener_dict)
133
134 return listener_list
135
136 def _get_health_check(self, health_check):
137 protocol, port_path = health_check.target.split(':')
138 try:
139 port, path = port_path.split('/', 1)
140 path = '/{}'.format(path)
141 except ValueError:
142 port = port_path
143 path = None
144
145 health_check_dict = {
146 'ping_protocol': protocol.lower(),
147 'ping_port': int(port),
148 'response_timeout': health_check.timeout,
149 'interval': health_check.interval,
150 'unhealthy_threshold': health_check.unhealthy_threshold,
151 'healthy_threshold': health_check.healthy_threshold,
152 }
153
154 if path:
155 health_check_dict['ping_path'] = path
156 return health_check_dict
157
158 def _get_elb_info(self, elb):
159 elb_info = {
160 'name': elb.name,
161 'zones': elb.availability_zones,
162 'dns_name': elb.dns_name,
163 'canonical_hosted_zone_name': elb.canonical_hosted_zone_name,
164 'canonical_hosted_zone_name_id': elb.canonical_hosted_zone_name_id,
165 'hosted_zone_name': elb.canonical_hosted_zone_name,
166 'hosted_zone_id': elb.canonical_hosted_zone_name_id,
167 'instances': [instance.id for instance in elb.instances],
168 'listeners': self._get_elb_listeners(elb.listeners),
169 'scheme': elb.scheme,
170 'security_groups': elb.security_groups,
171 'health_check': self._get_health_check(elb.health_check),
172 'subnets': elb.subnets,
173 'instances_inservice': [],
174 'instances_inservice_count': 0,
175 'instances_outofservice': [],
176 'instances_outofservice_count': 0,
177 'instances_inservice_percent': 0.0,
178 'tags': self._get_tags(elb.name)
179 }
180
181 if elb.vpc_id:
182 elb_info['vpc_id'] = elb.vpc_id
183
184 if elb.instances:
185 try:
186 instance_health = self.connection.describe_instance_health(elb.name)
187 except BotoServerError as err:
188 self.module.fail_json(msg=err.message)
189 elb_info['instances_inservice'] = [inst.instance_id for inst in instance_health if inst.state == 'InService']
190 elb_info['instances_inservice_count'] = len(elb_info['instances_inservice'])
191 elb_info['instances_outofservice'] = [inst.instance_id for inst in instance_health if inst.state == 'OutOfService']
192 elb_info['instances_outofservice_count'] = len(elb_info['instances_outofservice'])
193 elb_info['instances_inservice_percent'] = float(elb_info['instances_inservice_count'])/(
194 float(elb_info['instances_inservice_count']) +
195 float(elb_info['instances_outofservice_count']))*100
196 return elb_info
197
198
199 def list_elbs(self):
200 elb_array = []
201
202 try:
203 all_elbs = self.connection.get_all_load_balancers()
204 except BotoServerError as err:
205 self.module.fail_json(msg = "%s: %s" % (err.error_code, err.error_message))
206
207 if all_elbs:
208 if self.names:
209 for existing_lb in all_elbs:
210 if existing_lb.name in self.names:
211 elb_array.append(existing_lb)
212 else:
213 elb_array = all_elbs
214
215 return list(map(self._get_elb_info, elb_array))
216
217 def main():
218 argument_spec = ec2_argument_spec()
219 argument_spec.update(dict(
220 names={'default': [], 'type': 'list'}
221 )
222 )
223 module = AnsibleModule(argument_spec=argument_spec)
224
225 if not HAS_BOTO:
226 module.fail_json(msg='boto required for this module')
227
228 region, ec2_url, aws_connect_params = get_aws_connection_info(module)
229
230 if not region:
231 module.fail_json(msg="region must be specified")
232
233 names = module.params['names']
234 elb_information = ElbInformation(module,
235 names,
236 region,
237 **aws_connect_params)
238
239 ec2_facts_result = dict(changed=False,
240 elbs=elb_information.list_elbs())
241
242 module.exit_json(**ec2_facts_result)
243
244 from ansible.module_utils.basic import *
245 from ansible.module_utils.ec2 import *
246
247 if __name__ == '__main__':
248 main()
249
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/cloud/amazon/ec2_elb_facts.py b/cloud/amazon/ec2_elb_facts.py
--- a/cloud/amazon/ec2_elb_facts.py
+++ b/cloud/amazon/ec2_elb_facts.py
@@ -220,7 +220,8 @@
names={'default': [], 'type': 'list'}
)
)
- module = AnsibleModule(argument_spec=argument_spec)
+ module = AnsibleModule(argument_spec=argument_spec,
+ supports_check_mode=True)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
|
{"golden_diff": "diff --git a/cloud/amazon/ec2_elb_facts.py b/cloud/amazon/ec2_elb_facts.py\n--- a/cloud/amazon/ec2_elb_facts.py\n+++ b/cloud/amazon/ec2_elb_facts.py\n@@ -220,7 +220,8 @@\n names={'default': [], 'type': 'list'}\n )\n )\n- module = AnsibleModule(argument_spec=argument_spec)\n+ module = AnsibleModule(argument_spec=argument_spec,\n+ supports_check_mode=True)\n \n if not HAS_BOTO:\n module.fail_json(msg='boto required for this module')\n", "issue": "ec2_elb_facts should support check mode\n##### ISSUE TYPE\r\n - Bug Report\r\n\r\n##### COMPONENT NAME\r\nec2_elb_facts\r\n\r\n##### ANSIBLE VERSION\r\n\r\n```\r\nansible 2.2.0.0\r\n config file = \r\n configured module search path = Default w/o overrides\r\n```\r\n\r\n##### CONFIGURATION\r\n*N/A*\r\n\r\n##### OS / ENVIRONMENT\r\n*N/A*\r\n\r\n##### SUMMARY\r\nSince the `ec2_elb_facts` is strictly a read-only operation, it should support running with `--check`\r\n\r\n##### STEPS TO REPRODUCE\r\n\r\n```sh\r\nansible-playbook \\\r\n -i hosts \\\r\n -l my-elb-host \\\r\n ec2_elb_facts_check.yml \\\r\n -vv \\\r\n --check\r\n```\r\n\r\n```yaml\r\n- hosts: all\r\n connection: local\r\n gather_facts: no\r\n\r\n tasks:\r\n - name: Collect ELB facts\r\n ec2_elb_facts:\r\n names: \"my-elb\"\r\n region: \"us-east-1\"\r\n register: elbfacts\r\n tags: always\r\n```\r\n\r\n##### EXPECTED RESULTS\r\nIt would be expected that `ec2_elb_facts` would still fetch the instance information. This being omitted, prevents the ability to enumerate ELB instance hosts, dynamically add them to the inventory, and then conduct `--check` mode against what would *actually* be getting done.\r\n\r\n##### ACTUAL RESULTS\r\n\r\n```\r\nTASK [Collect ELB facts] ***********************************************\r\ntask path: /Projects/ec2_elb_facts_check.yml:6\r\nskipping: [my-elb-host] => {\r\n \"changed\": false, \r\n \"skipped\": true\r\n}\r\n\r\nMSG:\r\n\r\nremote module (ec2_elb_facts) does not support check mode\r\n```\n", "before_files": [{"content": "#!/usr/bin/python\n#\n# This is a free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# This Ansible library is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this library. If not, see <http://www.gnu.org/licenses/>.\n\nDOCUMENTATION = '''\n---\nmodule: ec2_elb_facts\nshort_description: Gather facts about EC2 Elastic Load Balancers in AWS\ndescription:\n - Gather facts about EC2 Elastic Load Balancers in AWS\nversion_added: \"2.0\"\nauthor:\n - \"Michael Schultz (github.com/mjschultz)\"\n - \"Fernando Jose Pando (@nand0p)\"\noptions:\n names:\n description:\n - List of ELB names to gather facts about. Pass this option to gather facts about a set of ELBs, otherwise, all ELBs are returned.\n required: false\n default: null\n aliases: ['elb_ids', 'ec2_elbs']\nextends_documentation_fragment:\n - aws\n - ec2\n'''\n\nEXAMPLES = '''\n# Note: These examples do not set authentication details, see the AWS Guide for details.\n# Output format tries to match ec2_elb_lb module input parameters\n\n# Gather facts about all ELBs\n- action:\n module: ec2_elb_facts\n register: elb_facts\n\n- action:\n module: debug\n msg: \"{{ item.dns_name }}\"\n with_items: \"{{ elb_facts.elbs }}\"\n\n# Gather facts about a particular ELB\n- action:\n module: ec2_elb_facts\n names: frontend-prod-elb\n register: elb_facts\n\n- action:\n module: debug\n msg: \"{{ elb_facts.elbs.0.dns_name }}\"\n\n# Gather facts about a set of ELBs\n- action:\n module: ec2_elb_facts\n names:\n - frontend-prod-elb\n - backend-prod-elb\n register: elb_facts\n\n- action:\n module: debug\n msg: \"{{ item.dns_name }}\"\n with_items: \"{{ elb_facts.elbs }}\"\n\n'''\n\ntry:\n import boto.ec2.elb\n from boto.ec2.tag import Tag\n from boto.exception import BotoServerError\n HAS_BOTO = True\nexcept ImportError:\n HAS_BOTO = False\n\nclass ElbInformation(object):\n \"\"\" Handles ELB information \"\"\"\n\n def __init__(self,\n module,\n names,\n region,\n **aws_connect_params):\n\n self.module = module\n self.names = names\n self.region = region\n self.aws_connect_params = aws_connect_params\n self.connection = self._get_elb_connection()\n\n def _get_tags(self, elbname):\n params = {'LoadBalancerNames.member.1': elbname}\n try:\n elb_tags = self.connection.get_list('DescribeTags', params, [('member', Tag)])\n return dict((tag.Key, tag.Value) for tag in elb_tags if hasattr(tag, 'Key'))\n except:\n return {}\n\n def _get_elb_connection(self):\n try:\n return connect_to_aws(boto.ec2.elb, self.region, **self.aws_connect_params)\n except BotoServerError as err:\n self.module.fail_json(msg=err.message)\n\n def _get_elb_listeners(self, listeners):\n listener_list = []\n\n for listener in listeners:\n listener_dict = {\n 'load_balancer_port': listener[0],\n 'instance_port': listener[1],\n 'protocol': listener[2],\n }\n\n try:\n ssl_certificate_id = listener[4]\n except IndexError:\n pass\n else:\n if ssl_certificate_id:\n listener_dict['ssl_certificate_id'] = ssl_certificate_id\n\n listener_list.append(listener_dict)\n\n return listener_list\n\n def _get_health_check(self, health_check):\n protocol, port_path = health_check.target.split(':')\n try:\n port, path = port_path.split('/', 1)\n path = '/{}'.format(path)\n except ValueError:\n port = port_path\n path = None\n\n health_check_dict = {\n 'ping_protocol': protocol.lower(),\n 'ping_port': int(port),\n 'response_timeout': health_check.timeout,\n 'interval': health_check.interval,\n 'unhealthy_threshold': health_check.unhealthy_threshold,\n 'healthy_threshold': health_check.healthy_threshold,\n }\n\n if path:\n health_check_dict['ping_path'] = path\n return health_check_dict\n\n def _get_elb_info(self, elb):\n elb_info = {\n 'name': elb.name,\n 'zones': elb.availability_zones,\n 'dns_name': elb.dns_name,\n 'canonical_hosted_zone_name': elb.canonical_hosted_zone_name,\n 'canonical_hosted_zone_name_id': elb.canonical_hosted_zone_name_id,\n 'hosted_zone_name': elb.canonical_hosted_zone_name,\n 'hosted_zone_id': elb.canonical_hosted_zone_name_id,\n 'instances': [instance.id for instance in elb.instances],\n 'listeners': self._get_elb_listeners(elb.listeners),\n 'scheme': elb.scheme,\n 'security_groups': elb.security_groups,\n 'health_check': self._get_health_check(elb.health_check),\n 'subnets': elb.subnets,\n 'instances_inservice': [],\n 'instances_inservice_count': 0,\n 'instances_outofservice': [],\n 'instances_outofservice_count': 0,\n 'instances_inservice_percent': 0.0,\n 'tags': self._get_tags(elb.name)\n }\n\n if elb.vpc_id:\n elb_info['vpc_id'] = elb.vpc_id\n\n if elb.instances:\n try:\n instance_health = self.connection.describe_instance_health(elb.name)\n except BotoServerError as err:\n self.module.fail_json(msg=err.message)\n elb_info['instances_inservice'] = [inst.instance_id for inst in instance_health if inst.state == 'InService']\n elb_info['instances_inservice_count'] = len(elb_info['instances_inservice'])\n elb_info['instances_outofservice'] = [inst.instance_id for inst in instance_health if inst.state == 'OutOfService']\n elb_info['instances_outofservice_count'] = len(elb_info['instances_outofservice'])\n elb_info['instances_inservice_percent'] = float(elb_info['instances_inservice_count'])/(\n float(elb_info['instances_inservice_count']) +\n float(elb_info['instances_outofservice_count']))*100\n return elb_info\n\n\n def list_elbs(self):\n elb_array = []\n\n try:\n all_elbs = self.connection.get_all_load_balancers()\n except BotoServerError as err:\n self.module.fail_json(msg = \"%s: %s\" % (err.error_code, err.error_message))\n\n if all_elbs:\n if self.names:\n for existing_lb in all_elbs:\n if existing_lb.name in self.names:\n elb_array.append(existing_lb)\n else:\n elb_array = all_elbs\n \n return list(map(self._get_elb_info, elb_array))\n\ndef main():\n argument_spec = ec2_argument_spec()\n argument_spec.update(dict(\n names={'default': [], 'type': 'list'}\n )\n )\n module = AnsibleModule(argument_spec=argument_spec)\n\n if not HAS_BOTO:\n module.fail_json(msg='boto required for this module')\n\n region, ec2_url, aws_connect_params = get_aws_connection_info(module)\n\n if not region:\n module.fail_json(msg=\"region must be specified\")\n\n names = module.params['names']\n elb_information = ElbInformation(module,\n names,\n region,\n **aws_connect_params)\n\n ec2_facts_result = dict(changed=False,\n elbs=elb_information.list_elbs())\n\n module.exit_json(**ec2_facts_result)\n\nfrom ansible.module_utils.basic import *\nfrom ansible.module_utils.ec2 import *\n\nif __name__ == '__main__':\n main()\n", "path": "cloud/amazon/ec2_elb_facts.py"}], "after_files": [{"content": "#!/usr/bin/python\n#\n# This is a free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# This Ansible library is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this library. If not, see <http://www.gnu.org/licenses/>.\n\nDOCUMENTATION = '''\n---\nmodule: ec2_elb_facts\nshort_description: Gather facts about EC2 Elastic Load Balancers in AWS\ndescription:\n - Gather facts about EC2 Elastic Load Balancers in AWS\nversion_added: \"2.0\"\nauthor:\n - \"Michael Schultz (github.com/mjschultz)\"\n - \"Fernando Jose Pando (@nand0p)\"\noptions:\n names:\n description:\n - List of ELB names to gather facts about. Pass this option to gather facts about a set of ELBs, otherwise, all ELBs are returned.\n required: false\n default: null\n aliases: ['elb_ids', 'ec2_elbs']\nextends_documentation_fragment:\n - aws\n - ec2\n'''\n\nEXAMPLES = '''\n# Note: These examples do not set authentication details, see the AWS Guide for details.\n# Output format tries to match ec2_elb_lb module input parameters\n\n# Gather facts about all ELBs\n- action:\n module: ec2_elb_facts\n register: elb_facts\n\n- action:\n module: debug\n msg: \"{{ item.dns_name }}\"\n with_items: \"{{ elb_facts.elbs }}\"\n\n# Gather facts about a particular ELB\n- action:\n module: ec2_elb_facts\n names: frontend-prod-elb\n register: elb_facts\n\n- action:\n module: debug\n msg: \"{{ elb_facts.elbs.0.dns_name }}\"\n\n# Gather facts about a set of ELBs\n- action:\n module: ec2_elb_facts\n names:\n - frontend-prod-elb\n - backend-prod-elb\n register: elb_facts\n\n- action:\n module: debug\n msg: \"{{ item.dns_name }}\"\n with_items: \"{{ elb_facts.elbs }}\"\n\n'''\n\ntry:\n import boto.ec2.elb\n from boto.ec2.tag import Tag\n from boto.exception import BotoServerError\n HAS_BOTO = True\nexcept ImportError:\n HAS_BOTO = False\n\nclass ElbInformation(object):\n \"\"\" Handles ELB information \"\"\"\n\n def __init__(self,\n module,\n names,\n region,\n **aws_connect_params):\n\n self.module = module\n self.names = names\n self.region = region\n self.aws_connect_params = aws_connect_params\n self.connection = self._get_elb_connection()\n\n def _get_tags(self, elbname):\n params = {'LoadBalancerNames.member.1': elbname}\n try:\n elb_tags = self.connection.get_list('DescribeTags', params, [('member', Tag)])\n return dict((tag.Key, tag.Value) for tag in elb_tags if hasattr(tag, 'Key'))\n except:\n return {}\n\n def _get_elb_connection(self):\n try:\n return connect_to_aws(boto.ec2.elb, self.region, **self.aws_connect_params)\n except BotoServerError as err:\n self.module.fail_json(msg=err.message)\n\n def _get_elb_listeners(self, listeners):\n listener_list = []\n\n for listener in listeners:\n listener_dict = {\n 'load_balancer_port': listener[0],\n 'instance_port': listener[1],\n 'protocol': listener[2],\n }\n\n try:\n ssl_certificate_id = listener[4]\n except IndexError:\n pass\n else:\n if ssl_certificate_id:\n listener_dict['ssl_certificate_id'] = ssl_certificate_id\n\n listener_list.append(listener_dict)\n\n return listener_list\n\n def _get_health_check(self, health_check):\n protocol, port_path = health_check.target.split(':')\n try:\n port, path = port_path.split('/', 1)\n path = '/{}'.format(path)\n except ValueError:\n port = port_path\n path = None\n\n health_check_dict = {\n 'ping_protocol': protocol.lower(),\n 'ping_port': int(port),\n 'response_timeout': health_check.timeout,\n 'interval': health_check.interval,\n 'unhealthy_threshold': health_check.unhealthy_threshold,\n 'healthy_threshold': health_check.healthy_threshold,\n }\n\n if path:\n health_check_dict['ping_path'] = path\n return health_check_dict\n\n def _get_elb_info(self, elb):\n elb_info = {\n 'name': elb.name,\n 'zones': elb.availability_zones,\n 'dns_name': elb.dns_name,\n 'canonical_hosted_zone_name': elb.canonical_hosted_zone_name,\n 'canonical_hosted_zone_name_id': elb.canonical_hosted_zone_name_id,\n 'hosted_zone_name': elb.canonical_hosted_zone_name,\n 'hosted_zone_id': elb.canonical_hosted_zone_name_id,\n 'instances': [instance.id for instance in elb.instances],\n 'listeners': self._get_elb_listeners(elb.listeners),\n 'scheme': elb.scheme,\n 'security_groups': elb.security_groups,\n 'health_check': self._get_health_check(elb.health_check),\n 'subnets': elb.subnets,\n 'instances_inservice': [],\n 'instances_inservice_count': 0,\n 'instances_outofservice': [],\n 'instances_outofservice_count': 0,\n 'instances_inservice_percent': 0.0,\n 'tags': self._get_tags(elb.name)\n }\n\n if elb.vpc_id:\n elb_info['vpc_id'] = elb.vpc_id\n\n if elb.instances:\n try:\n instance_health = self.connection.describe_instance_health(elb.name)\n except BotoServerError as err:\n self.module.fail_json(msg=err.message)\n elb_info['instances_inservice'] = [inst.instance_id for inst in instance_health if inst.state == 'InService']\n elb_info['instances_inservice_count'] = len(elb_info['instances_inservice'])\n elb_info['instances_outofservice'] = [inst.instance_id for inst in instance_health if inst.state == 'OutOfService']\n elb_info['instances_outofservice_count'] = len(elb_info['instances_outofservice'])\n elb_info['instances_inservice_percent'] = float(elb_info['instances_inservice_count'])/(\n float(elb_info['instances_inservice_count']) +\n float(elb_info['instances_outofservice_count']))*100\n return elb_info\n\n\n def list_elbs(self):\n elb_array = []\n\n try:\n all_elbs = self.connection.get_all_load_balancers()\n except BotoServerError as err:\n self.module.fail_json(msg = \"%s: %s\" % (err.error_code, err.error_message))\n\n if all_elbs:\n if self.names:\n for existing_lb in all_elbs:\n if existing_lb.name in self.names:\n elb_array.append(existing_lb)\n else:\n elb_array = all_elbs\n \n return list(map(self._get_elb_info, elb_array))\n\ndef main():\n argument_spec = ec2_argument_spec()\n argument_spec.update(dict(\n names={'default': [], 'type': 'list'}\n )\n )\n module = AnsibleModule(argument_spec=argument_spec,\n supports_check_mode=True)\n\n if not HAS_BOTO:\n module.fail_json(msg='boto required for this module')\n\n region, ec2_url, aws_connect_params = get_aws_connection_info(module)\n\n if not region:\n module.fail_json(msg=\"region must be specified\")\n\n names = module.params['names']\n elb_information = ElbInformation(module,\n names,\n region,\n **aws_connect_params)\n\n ec2_facts_result = dict(changed=False,\n elbs=elb_information.list_elbs())\n\n module.exit_json(**ec2_facts_result)\n\nfrom ansible.module_utils.basic import *\nfrom ansible.module_utils.ec2 import *\n\nif __name__ == '__main__':\n main()\n", "path": "cloud/amazon/ec2_elb_facts.py"}]}
| 3,158 | 136 |
gh_patches_debug_14029
|
rasdani/github-patches
|
git_diff
|
jupyterhub__jupyterhub-797
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Container dying after startup leads to infinite redirection loop
We are using jupyterhub with the dockerspawner module. Once successfully logged in, we observed that if the docker container is spawned but dies a second or so later (for whatever reason), the redirection from /hub/user/username to /user/username will enter an infinite redirection loop. I suspect the reason is
that there's nothing listening at /user/username (because the container server is not there), so jupyterhub redirects you to /hub/user/username as default action in that case, which in turn tries to send you back to /user/username because you are logged in.
It's an unusual condition and we are running outside the basic design, but if you have any feedback on this it would be great.
jupyterhub version 0.6.1
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `jupyterhub/user.py`
Content:
```
1 # Copyright (c) Jupyter Development Team.
2 # Distributed under the terms of the Modified BSD License.
3
4 from datetime import datetime, timedelta
5 from urllib.parse import quote, urlparse
6
7 from tornado import gen
8 from tornado.log import app_log
9
10 from sqlalchemy import inspect
11
12 from .utils import url_path_join
13
14 from . import orm
15 from traitlets import HasTraits, Any, Dict, observe, default
16 from .spawner import LocalProcessSpawner
17
18
19 class UserDict(dict):
20 """Like defaultdict, but for users
21
22 Getting by a user id OR an orm.User instance returns a User wrapper around the orm user.
23 """
24 def __init__(self, db_factory, settings):
25 self.db_factory = db_factory
26 self.settings = settings
27 super().__init__()
28
29 @property
30 def db(self):
31 return self.db_factory()
32
33 def __contains__(self, key):
34 if isinstance(key, (User, orm.User)):
35 key = key.id
36 return dict.__contains__(self, key)
37
38 def __getitem__(self, key):
39 if isinstance(key, User):
40 key = key.id
41 elif isinstance(key, str):
42 orm_user = self.db.query(orm.User).filter(orm.User.name==key).first()
43 if orm_user is None:
44 raise KeyError("No such user: %s" % key)
45 else:
46 key = orm_user
47 if isinstance(key, orm.User):
48 # users[orm_user] returns User(orm_user)
49 orm_user = key
50 if orm_user.id not in self:
51 user = self[orm_user.id] = User(orm_user, self.settings)
52 return user
53 user = dict.__getitem__(self, orm_user.id)
54 user.db = self.db
55 return user
56 elif isinstance(key, int):
57 id = key
58 if id not in self:
59 orm_user = self.db.query(orm.User).filter(orm.User.id==id).first()
60 if orm_user is None:
61 raise KeyError("No such user: %s" % id)
62 user = self[id] = User(orm_user, self.settings)
63 return dict.__getitem__(self, id)
64 else:
65 raise KeyError(repr(key))
66
67 def __delitem__(self, key):
68 user = self[key]
69 user_id = user.id
70 db = self.db
71 db.delete(user.orm_user)
72 db.commit()
73 dict.__delitem__(self, user_id)
74
75
76 class User(HasTraits):
77
78 @default('log')
79 def _log_default(self):
80 return app_log
81
82 settings = Dict()
83
84 db = Any(allow_none=True)
85 @default('db')
86 def _db_default(self):
87 if self.orm_user:
88 return inspect(self.orm_user).session
89 @observe('db')
90 def _db_changed(self, change):
91 """Changing db session reacquires ORM User object"""
92 # db session changed, re-get orm User
93 if self.orm_user:
94 id = self.orm_user.id
95 self.orm_user = change['new'].query(orm.User).filter(orm.User.id==id).first()
96 self.spawner.db = self.db
97
98 orm_user = None
99 spawner = None
100 spawn_pending = False
101 stop_pending = False
102
103 @property
104 def authenticator(self):
105 return self.settings.get('authenticator', None)
106
107 @property
108 def spawner_class(self):
109 return self.settings.get('spawner_class', LocalProcessSpawner)
110
111 def __init__(self, orm_user, settings, **kwargs):
112 self.orm_user = orm_user
113 self.settings = settings
114 super().__init__(**kwargs)
115
116 hub = self.db.query(orm.Hub).first()
117
118 self.cookie_name = '%s-%s' % (hub.server.cookie_name, quote(self.name, safe=''))
119 self.base_url = url_path_join(
120 self.settings.get('base_url', '/'), 'user', self.escaped_name)
121
122 self.spawner = self.spawner_class(
123 user=self,
124 db=self.db,
125 hub=hub,
126 authenticator=self.authenticator,
127 config=self.settings.get('config'),
128 )
129
130 # pass get/setattr to ORM user
131
132 def __getattr__(self, attr):
133 if hasattr(self.orm_user, attr):
134 return getattr(self.orm_user, attr)
135 else:
136 raise AttributeError(attr)
137
138 def __setattr__(self, attr, value):
139 if self.orm_user and hasattr(self.orm_user, attr):
140 setattr(self.orm_user, attr, value)
141 else:
142 super().__setattr__(attr, value)
143
144 def __repr__(self):
145 return repr(self.orm_user)
146
147 @property
148 def running(self):
149 """property for whether a user has a running server"""
150 if self.spawn_pending or self.stop_pending:
151 return False # server is not running if spawn or stop is still pending
152 if self.server is None:
153 return False
154 return True
155
156 @property
157 def escaped_name(self):
158 """My name, escaped for use in URLs, cookies, etc."""
159 return quote(self.name, safe='@')
160
161 @property
162 def proxy_path(self):
163 if self.settings.get('subdomain_host'):
164 return url_path_join('/' + self.domain, self.base_url)
165 else:
166 return self.base_url
167
168 @property
169 def domain(self):
170 """Get the domain for my server."""
171 # FIXME: escaped_name probably isn't escaped enough in general for a domain fragment
172 return self.escaped_name + '.' + self.settings['domain']
173
174 @property
175 def host(self):
176 """Get the *host* for my server (proto://domain[:port])"""
177 # FIXME: escaped_name probably isn't escaped enough in general for a domain fragment
178 parsed = urlparse(self.settings['subdomain_host'])
179 h = '%s://%s.%s' % (parsed.scheme, self.escaped_name, parsed.netloc)
180 return h
181
182 @property
183 def url(self):
184 """My URL
185
186 Full name.domain/path if using subdomains, otherwise just my /base/url
187 """
188 if self.settings.get('subdomain_host'):
189 return '{host}{path}'.format(
190 host=self.host,
191 path=self.base_url,
192 )
193 else:
194 return self.base_url
195
196 @gen.coroutine
197 def spawn(self, options=None):
198 """Start the user's spawner"""
199 db = self.db
200
201 self.server = orm.Server(
202 cookie_name=self.cookie_name,
203 base_url=self.base_url,
204 )
205 db.add(self.server)
206 db.commit()
207
208 api_token = self.new_api_token()
209 db.commit()
210
211 spawner = self.spawner
212 spawner.user_options = options or {}
213 # we are starting a new server, make sure it doesn't restore state
214 spawner.clear_state()
215 spawner.api_token = api_token
216
217 # trigger pre-spawn hook on authenticator
218 authenticator = self.authenticator
219 if (authenticator):
220 yield gen.maybe_future(authenticator.pre_spawn_start(self, spawner))
221
222 self.spawn_pending = True
223 # wait for spawner.start to return
224 try:
225 f = spawner.start()
226 # commit any changes in spawner.start (always commit db changes before yield)
227 db.commit()
228 ip_port = yield gen.with_timeout(timedelta(seconds=spawner.start_timeout), f)
229 if ip_port:
230 # get ip, port info from return value of start()
231 self.server.ip, self.server.port = ip_port
232 else:
233 # prior to 0.7, spawners had to store this info in user.server themselves.
234 # Handle < 0.7 behavior with a warning, assuming info was stored in db by the Spawner.
235 self.log.warning("DEPRECATION: Spawner.start should return (ip, port) in JupyterHub >= 0.7")
236 except Exception as e:
237 if isinstance(e, gen.TimeoutError):
238 self.log.warning("{user}'s server failed to start in {s} seconds, giving up".format(
239 user=self.name, s=spawner.start_timeout,
240 ))
241 e.reason = 'timeout'
242 else:
243 self.log.error("Unhandled error starting {user}'s server: {error}".format(
244 user=self.name, error=e,
245 ))
246 e.reason = 'error'
247 try:
248 yield self.stop()
249 except Exception:
250 self.log.error("Failed to cleanup {user}'s server that failed to start".format(
251 user=self.name,
252 ), exc_info=True)
253 # raise original exception
254 raise e
255 spawner.start_polling()
256
257 # store state
258 self.state = spawner.get_state()
259 self.last_activity = datetime.utcnow()
260 db.commit()
261 self.spawn_pending = False
262 try:
263 yield self.server.wait_up(http=True, timeout=spawner.http_timeout)
264 except Exception as e:
265 if isinstance(e, TimeoutError):
266 self.log.warning(
267 "{user}'s server never showed up at {url} "
268 "after {http_timeout} seconds. Giving up".format(
269 user=self.name,
270 url=self.server.url,
271 http_timeout=spawner.http_timeout,
272 )
273 )
274 e.reason = 'timeout'
275 else:
276 e.reason = 'error'
277 self.log.error("Unhandled error waiting for {user}'s server to show up at {url}: {error}".format(
278 user=self.name, url=self.server.url, error=e,
279 ))
280 try:
281 yield self.stop()
282 except Exception:
283 self.log.error("Failed to cleanup {user}'s server that failed to start".format(
284 user=self.name,
285 ), exc_info=True)
286 # raise original TimeoutError
287 raise e
288 return self
289
290 @gen.coroutine
291 def stop(self):
292 """Stop the user's spawner
293
294 and cleanup after it.
295 """
296 self.spawn_pending = False
297 spawner = self.spawner
298 self.spawner.stop_polling()
299 self.stop_pending = True
300 try:
301 api_token = self.spawner.api_token
302 status = yield spawner.poll()
303 if status is None:
304 yield self.spawner.stop()
305 spawner.clear_state()
306 self.state = spawner.get_state()
307 self.last_activity = datetime.utcnow()
308 # cleanup server entry, API token from defunct server
309 if self.server:
310 # cleanup server entry from db
311 self.db.delete(self.server)
312 orm_token = orm.APIToken.find(self.db, api_token)
313 if orm_token:
314 self.db.delete(orm_token)
315 self.server = None
316 self.db.commit()
317 finally:
318 self.stop_pending = False
319 # trigger post-spawner hook on authenticator
320 auth = spawner.authenticator
321 if auth:
322 yield gen.maybe_future(
323 auth.post_spawn_stop(self, spawner)
324 )
325
326
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/jupyterhub/user.py b/jupyterhub/user.py
--- a/jupyterhub/user.py
+++ b/jupyterhub/user.py
@@ -258,7 +258,6 @@
self.state = spawner.get_state()
self.last_activity = datetime.utcnow()
db.commit()
- self.spawn_pending = False
try:
yield self.server.wait_up(http=True, timeout=spawner.http_timeout)
except Exception as e:
@@ -285,6 +284,7 @@
), exc_info=True)
# raise original TimeoutError
raise e
+ self.spawn_pending = False
return self
@gen.coroutine
|
{"golden_diff": "diff --git a/jupyterhub/user.py b/jupyterhub/user.py\n--- a/jupyterhub/user.py\n+++ b/jupyterhub/user.py\n@@ -258,7 +258,6 @@\n self.state = spawner.get_state()\n self.last_activity = datetime.utcnow()\n db.commit()\n- self.spawn_pending = False\n try:\n yield self.server.wait_up(http=True, timeout=spawner.http_timeout)\n except Exception as e:\n@@ -285,6 +284,7 @@\n ), exc_info=True)\n # raise original TimeoutError\n raise e\n+ self.spawn_pending = False\n return self\n \n @gen.coroutine\n", "issue": "Container dying after startup leads to infinite redirection loop\nWe are using jupyterhub with the dockerspawner module. Once successfully logged in, we observed that if the docker container is spawned but dies a second or so later (for whatever reason), the redirection from /hub/user/username to /user/username will enter an infinite redirection loop. I suspect the reason is\nthat there's nothing listening at /user/username (because the container server is not there), so jupyterhub redirects you to /hub/user/username as default action in that case, which in turn tries to send you back to /user/username because you are logged in.\n\nIt's an unusual condition and we are running outside the basic design, but if you have any feedback on this it would be great.\n\njupyterhub version 0.6.1\n\n", "before_files": [{"content": "# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\n\nfrom datetime import datetime, timedelta\nfrom urllib.parse import quote, urlparse\n\nfrom tornado import gen\nfrom tornado.log import app_log\n\nfrom sqlalchemy import inspect\n\nfrom .utils import url_path_join\n\nfrom . import orm\nfrom traitlets import HasTraits, Any, Dict, observe, default\nfrom .spawner import LocalProcessSpawner\n\n\nclass UserDict(dict):\n \"\"\"Like defaultdict, but for users\n \n Getting by a user id OR an orm.User instance returns a User wrapper around the orm user.\n \"\"\"\n def __init__(self, db_factory, settings):\n self.db_factory = db_factory\n self.settings = settings\n super().__init__()\n \n @property\n def db(self):\n return self.db_factory()\n \n def __contains__(self, key):\n if isinstance(key, (User, orm.User)):\n key = key.id\n return dict.__contains__(self, key)\n \n def __getitem__(self, key):\n if isinstance(key, User):\n key = key.id\n elif isinstance(key, str):\n orm_user = self.db.query(orm.User).filter(orm.User.name==key).first()\n if orm_user is None:\n raise KeyError(\"No such user: %s\" % key)\n else:\n key = orm_user\n if isinstance(key, orm.User):\n # users[orm_user] returns User(orm_user)\n orm_user = key\n if orm_user.id not in self:\n user = self[orm_user.id] = User(orm_user, self.settings)\n return user\n user = dict.__getitem__(self, orm_user.id)\n user.db = self.db\n return user\n elif isinstance(key, int):\n id = key\n if id not in self:\n orm_user = self.db.query(orm.User).filter(orm.User.id==id).first()\n if orm_user is None:\n raise KeyError(\"No such user: %s\" % id)\n user = self[id] = User(orm_user, self.settings)\n return dict.__getitem__(self, id)\n else:\n raise KeyError(repr(key))\n \n def __delitem__(self, key):\n user = self[key]\n user_id = user.id\n db = self.db\n db.delete(user.orm_user)\n db.commit()\n dict.__delitem__(self, user_id)\n\n\nclass User(HasTraits):\n \n @default('log')\n def _log_default(self):\n return app_log\n \n settings = Dict()\n \n db = Any(allow_none=True)\n @default('db')\n def _db_default(self):\n if self.orm_user:\n return inspect(self.orm_user).session\n @observe('db')\n def _db_changed(self, change):\n \"\"\"Changing db session reacquires ORM User object\"\"\"\n # db session changed, re-get orm User\n if self.orm_user:\n id = self.orm_user.id\n self.orm_user = change['new'].query(orm.User).filter(orm.User.id==id).first()\n self.spawner.db = self.db\n \n orm_user = None\n spawner = None\n spawn_pending = False\n stop_pending = False\n \n @property\n def authenticator(self):\n return self.settings.get('authenticator', None)\n \n @property\n def spawner_class(self):\n return self.settings.get('spawner_class', LocalProcessSpawner)\n \n def __init__(self, orm_user, settings, **kwargs):\n self.orm_user = orm_user\n self.settings = settings\n super().__init__(**kwargs)\n \n hub = self.db.query(orm.Hub).first()\n \n self.cookie_name = '%s-%s' % (hub.server.cookie_name, quote(self.name, safe=''))\n self.base_url = url_path_join(\n self.settings.get('base_url', '/'), 'user', self.escaped_name)\n \n self.spawner = self.spawner_class(\n user=self,\n db=self.db,\n hub=hub,\n authenticator=self.authenticator,\n config=self.settings.get('config'),\n )\n \n # pass get/setattr to ORM user\n \n def __getattr__(self, attr):\n if hasattr(self.orm_user, attr):\n return getattr(self.orm_user, attr)\n else:\n raise AttributeError(attr)\n \n def __setattr__(self, attr, value):\n if self.orm_user and hasattr(self.orm_user, attr):\n setattr(self.orm_user, attr, value)\n else:\n super().__setattr__(attr, value)\n \n def __repr__(self):\n return repr(self.orm_user)\n \n @property\n def running(self):\n \"\"\"property for whether a user has a running server\"\"\"\n if self.spawn_pending or self.stop_pending:\n return False # server is not running if spawn or stop is still pending\n if self.server is None:\n return False\n return True\n \n @property\n def escaped_name(self):\n \"\"\"My name, escaped for use in URLs, cookies, etc.\"\"\"\n return quote(self.name, safe='@')\n \n @property\n def proxy_path(self):\n if self.settings.get('subdomain_host'):\n return url_path_join('/' + self.domain, self.base_url)\n else:\n return self.base_url\n \n @property\n def domain(self):\n \"\"\"Get the domain for my server.\"\"\"\n # FIXME: escaped_name probably isn't escaped enough in general for a domain fragment\n return self.escaped_name + '.' + self.settings['domain']\n \n @property\n def host(self):\n \"\"\"Get the *host* for my server (proto://domain[:port])\"\"\"\n # FIXME: escaped_name probably isn't escaped enough in general for a domain fragment\n parsed = urlparse(self.settings['subdomain_host'])\n h = '%s://%s.%s' % (parsed.scheme, self.escaped_name, parsed.netloc)\n return h\n \n @property\n def url(self):\n \"\"\"My URL\n \n Full name.domain/path if using subdomains, otherwise just my /base/url\n \"\"\"\n if self.settings.get('subdomain_host'):\n return '{host}{path}'.format(\n host=self.host,\n path=self.base_url,\n )\n else:\n return self.base_url\n \n @gen.coroutine\n def spawn(self, options=None):\n \"\"\"Start the user's spawner\"\"\"\n db = self.db\n \n self.server = orm.Server(\n cookie_name=self.cookie_name,\n base_url=self.base_url,\n )\n db.add(self.server)\n db.commit()\n \n api_token = self.new_api_token()\n db.commit()\n \n spawner = self.spawner\n spawner.user_options = options or {}\n # we are starting a new server, make sure it doesn't restore state\n spawner.clear_state()\n spawner.api_token = api_token\n\n # trigger pre-spawn hook on authenticator\n authenticator = self.authenticator\n if (authenticator):\n yield gen.maybe_future(authenticator.pre_spawn_start(self, spawner))\n\n self.spawn_pending = True\n # wait for spawner.start to return\n try:\n f = spawner.start()\n # commit any changes in spawner.start (always commit db changes before yield)\n db.commit()\n ip_port = yield gen.with_timeout(timedelta(seconds=spawner.start_timeout), f)\n if ip_port:\n # get ip, port info from return value of start()\n self.server.ip, self.server.port = ip_port\n else:\n # prior to 0.7, spawners had to store this info in user.server themselves.\n # Handle < 0.7 behavior with a warning, assuming info was stored in db by the Spawner.\n self.log.warning(\"DEPRECATION: Spawner.start should return (ip, port) in JupyterHub >= 0.7\")\n except Exception as e:\n if isinstance(e, gen.TimeoutError):\n self.log.warning(\"{user}'s server failed to start in {s} seconds, giving up\".format(\n user=self.name, s=spawner.start_timeout,\n ))\n e.reason = 'timeout'\n else:\n self.log.error(\"Unhandled error starting {user}'s server: {error}\".format(\n user=self.name, error=e,\n ))\n e.reason = 'error'\n try:\n yield self.stop()\n except Exception:\n self.log.error(\"Failed to cleanup {user}'s server that failed to start\".format(\n user=self.name,\n ), exc_info=True)\n # raise original exception\n raise e\n spawner.start_polling()\n\n # store state\n self.state = spawner.get_state()\n self.last_activity = datetime.utcnow()\n db.commit()\n self.spawn_pending = False\n try:\n yield self.server.wait_up(http=True, timeout=spawner.http_timeout)\n except Exception as e:\n if isinstance(e, TimeoutError):\n self.log.warning(\n \"{user}'s server never showed up at {url} \"\n \"after {http_timeout} seconds. Giving up\".format(\n user=self.name,\n url=self.server.url,\n http_timeout=spawner.http_timeout,\n )\n )\n e.reason = 'timeout'\n else:\n e.reason = 'error'\n self.log.error(\"Unhandled error waiting for {user}'s server to show up at {url}: {error}\".format(\n user=self.name, url=self.server.url, error=e,\n ))\n try:\n yield self.stop()\n except Exception:\n self.log.error(\"Failed to cleanup {user}'s server that failed to start\".format(\n user=self.name,\n ), exc_info=True)\n # raise original TimeoutError\n raise e\n return self\n\n @gen.coroutine\n def stop(self):\n \"\"\"Stop the user's spawner\n \n and cleanup after it.\n \"\"\"\n self.spawn_pending = False\n spawner = self.spawner\n self.spawner.stop_polling()\n self.stop_pending = True\n try:\n api_token = self.spawner.api_token\n status = yield spawner.poll()\n if status is None:\n yield self.spawner.stop()\n spawner.clear_state()\n self.state = spawner.get_state()\n self.last_activity = datetime.utcnow()\n # cleanup server entry, API token from defunct server\n if self.server:\n # cleanup server entry from db\n self.db.delete(self.server)\n orm_token = orm.APIToken.find(self.db, api_token)\n if orm_token:\n self.db.delete(orm_token)\n self.server = None\n self.db.commit()\n finally:\n self.stop_pending = False\n # trigger post-spawner hook on authenticator\n auth = spawner.authenticator\n if auth:\n yield gen.maybe_future(\n auth.post_spawn_stop(self, spawner)\n )\n\n", "path": "jupyterhub/user.py"}], "after_files": [{"content": "# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\n\nfrom datetime import datetime, timedelta\nfrom urllib.parse import quote, urlparse\n\nfrom tornado import gen\nfrom tornado.log import app_log\n\nfrom sqlalchemy import inspect\n\nfrom .utils import url_path_join\n\nfrom . import orm\nfrom traitlets import HasTraits, Any, Dict, observe, default\nfrom .spawner import LocalProcessSpawner\n\n\nclass UserDict(dict):\n \"\"\"Like defaultdict, but for users\n \n Getting by a user id OR an orm.User instance returns a User wrapper around the orm user.\n \"\"\"\n def __init__(self, db_factory, settings):\n self.db_factory = db_factory\n self.settings = settings\n super().__init__()\n \n @property\n def db(self):\n return self.db_factory()\n \n def __contains__(self, key):\n if isinstance(key, (User, orm.User)):\n key = key.id\n return dict.__contains__(self, key)\n \n def __getitem__(self, key):\n if isinstance(key, User):\n key = key.id\n elif isinstance(key, str):\n orm_user = self.db.query(orm.User).filter(orm.User.name==key).first()\n if orm_user is None:\n raise KeyError(\"No such user: %s\" % key)\n else:\n key = orm_user\n if isinstance(key, orm.User):\n # users[orm_user] returns User(orm_user)\n orm_user = key\n if orm_user.id not in self:\n user = self[orm_user.id] = User(orm_user, self.settings)\n return user\n user = dict.__getitem__(self, orm_user.id)\n user.db = self.db\n return user\n elif isinstance(key, int):\n id = key\n if id not in self:\n orm_user = self.db.query(orm.User).filter(orm.User.id==id).first()\n if orm_user is None:\n raise KeyError(\"No such user: %s\" % id)\n user = self[id] = User(orm_user, self.settings)\n return dict.__getitem__(self, id)\n else:\n raise KeyError(repr(key))\n \n def __delitem__(self, key):\n user = self[key]\n user_id = user.id\n db = self.db\n db.delete(user.orm_user)\n db.commit()\n dict.__delitem__(self, user_id)\n\n\nclass User(HasTraits):\n \n @default('log')\n def _log_default(self):\n return app_log\n \n settings = Dict()\n \n db = Any(allow_none=True)\n @default('db')\n def _db_default(self):\n if self.orm_user:\n return inspect(self.orm_user).session\n @observe('db')\n def _db_changed(self, change):\n \"\"\"Changing db session reacquires ORM User object\"\"\"\n # db session changed, re-get orm User\n if self.orm_user:\n id = self.orm_user.id\n self.orm_user = change['new'].query(orm.User).filter(orm.User.id==id).first()\n self.spawner.db = self.db\n \n orm_user = None\n spawner = None\n spawn_pending = False\n stop_pending = False\n \n @property\n def authenticator(self):\n return self.settings.get('authenticator', None)\n \n @property\n def spawner_class(self):\n return self.settings.get('spawner_class', LocalProcessSpawner)\n \n def __init__(self, orm_user, settings, **kwargs):\n self.orm_user = orm_user\n self.settings = settings\n super().__init__(**kwargs)\n \n hub = self.db.query(orm.Hub).first()\n \n self.cookie_name = '%s-%s' % (hub.server.cookie_name, quote(self.name, safe=''))\n self.base_url = url_path_join(\n self.settings.get('base_url', '/'), 'user', self.escaped_name)\n \n self.spawner = self.spawner_class(\n user=self,\n db=self.db,\n hub=hub,\n authenticator=self.authenticator,\n config=self.settings.get('config'),\n )\n \n # pass get/setattr to ORM user\n \n def __getattr__(self, attr):\n if hasattr(self.orm_user, attr):\n return getattr(self.orm_user, attr)\n else:\n raise AttributeError(attr)\n \n def __setattr__(self, attr, value):\n if self.orm_user and hasattr(self.orm_user, attr):\n setattr(self.orm_user, attr, value)\n else:\n super().__setattr__(attr, value)\n \n def __repr__(self):\n return repr(self.orm_user)\n \n @property\n def running(self):\n \"\"\"property for whether a user has a running server\"\"\"\n if self.spawn_pending or self.stop_pending:\n return False # server is not running if spawn or stop is still pending\n if self.server is None:\n return False\n return True\n \n @property\n def escaped_name(self):\n \"\"\"My name, escaped for use in URLs, cookies, etc.\"\"\"\n return quote(self.name, safe='@')\n \n @property\n def proxy_path(self):\n if self.settings.get('subdomain_host'):\n return url_path_join('/' + self.domain, self.base_url)\n else:\n return self.base_url\n \n @property\n def domain(self):\n \"\"\"Get the domain for my server.\"\"\"\n # FIXME: escaped_name probably isn't escaped enough in general for a domain fragment\n return self.escaped_name + '.' + self.settings['domain']\n \n @property\n def host(self):\n \"\"\"Get the *host* for my server (proto://domain[:port])\"\"\"\n # FIXME: escaped_name probably isn't escaped enough in general for a domain fragment\n parsed = urlparse(self.settings['subdomain_host'])\n h = '%s://%s.%s' % (parsed.scheme, self.escaped_name, parsed.netloc)\n return h\n \n @property\n def url(self):\n \"\"\"My URL\n \n Full name.domain/path if using subdomains, otherwise just my /base/url\n \"\"\"\n if self.settings.get('subdomain_host'):\n return '{host}{path}'.format(\n host=self.host,\n path=self.base_url,\n )\n else:\n return self.base_url\n \n @gen.coroutine\n def spawn(self, options=None):\n \"\"\"Start the user's spawner\"\"\"\n db = self.db\n \n self.server = orm.Server(\n cookie_name=self.cookie_name,\n base_url=self.base_url,\n )\n db.add(self.server)\n db.commit()\n \n api_token = self.new_api_token()\n db.commit()\n \n spawner = self.spawner\n spawner.user_options = options or {}\n # we are starting a new server, make sure it doesn't restore state\n spawner.clear_state()\n spawner.api_token = api_token\n\n # trigger pre-spawn hook on authenticator\n authenticator = self.authenticator\n if (authenticator):\n yield gen.maybe_future(authenticator.pre_spawn_start(self, spawner))\n\n self.spawn_pending = True\n # wait for spawner.start to return\n try:\n f = spawner.start()\n # commit any changes in spawner.start (always commit db changes before yield)\n db.commit()\n ip_port = yield gen.with_timeout(timedelta(seconds=spawner.start_timeout), f)\n if ip_port:\n # get ip, port info from return value of start()\n self.server.ip, self.server.port = ip_port\n else:\n # prior to 0.7, spawners had to store this info in user.server themselves.\n # Handle < 0.7 behavior with a warning, assuming info was stored in db by the Spawner.\n self.log.warning(\"DEPRECATION: Spawner.start should return (ip, port) in JupyterHub >= 0.7\")\n except Exception as e:\n if isinstance(e, gen.TimeoutError):\n self.log.warning(\"{user}'s server failed to start in {s} seconds, giving up\".format(\n user=self.name, s=spawner.start_timeout,\n ))\n e.reason = 'timeout'\n else:\n self.log.error(\"Unhandled error starting {user}'s server: {error}\".format(\n user=self.name, error=e,\n ))\n e.reason = 'error'\n try:\n yield self.stop()\n except Exception:\n self.log.error(\"Failed to cleanup {user}'s server that failed to start\".format(\n user=self.name,\n ), exc_info=True)\n # raise original exception\n raise e\n spawner.start_polling()\n\n # store state\n self.state = spawner.get_state()\n self.last_activity = datetime.utcnow()\n db.commit()\n try:\n yield self.server.wait_up(http=True, timeout=spawner.http_timeout)\n except Exception as e:\n if isinstance(e, TimeoutError):\n self.log.warning(\n \"{user}'s server never showed up at {url} \"\n \"after {http_timeout} seconds. Giving up\".format(\n user=self.name,\n url=self.server.url,\n http_timeout=spawner.http_timeout,\n )\n )\n e.reason = 'timeout'\n else:\n e.reason = 'error'\n self.log.error(\"Unhandled error waiting for {user}'s server to show up at {url}: {error}\".format(\n user=self.name, url=self.server.url, error=e,\n ))\n try:\n yield self.stop()\n except Exception:\n self.log.error(\"Failed to cleanup {user}'s server that failed to start\".format(\n user=self.name,\n ), exc_info=True)\n # raise original TimeoutError\n raise e\n self.spawn_pending = False\n return self\n\n @gen.coroutine\n def stop(self):\n \"\"\"Stop the user's spawner\n \n and cleanup after it.\n \"\"\"\n self.spawn_pending = False\n spawner = self.spawner\n self.spawner.stop_polling()\n self.stop_pending = True\n try:\n api_token = self.spawner.api_token\n status = yield spawner.poll()\n if status is None:\n yield self.spawner.stop()\n spawner.clear_state()\n self.state = spawner.get_state()\n self.last_activity = datetime.utcnow()\n # cleanup server entry, API token from defunct server\n if self.server:\n # cleanup server entry from db\n self.db.delete(self.server)\n orm_token = orm.APIToken.find(self.db, api_token)\n if orm_token:\n self.db.delete(orm_token)\n self.server = None\n self.db.commit()\n finally:\n self.stop_pending = False\n # trigger post-spawner hook on authenticator\n auth = spawner.authenticator\n if auth:\n yield gen.maybe_future(\n auth.post_spawn_stop(self, spawner)\n )\n\n", "path": "jupyterhub/user.py"}]}
| 3,659 | 149 |
gh_patches_debug_20623
|
rasdani/github-patches
|
git_diff
|
svthalia__concrexit-3068
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Expand/improve document part of event endpoint
<!--
Please add the appropriate label for what change should be made:
docs: changes to the documentation)
refactor: refactoring production code, eg. renaming a variable or rewriting a function
test: adding missing tests, refactoring tests; no production code change
chore: updating poetry etc; no production code change
-->
### Describe the change
Change the current URL or add a URL to a link to the actual document (that is, one that ends with `/[name].pdf`).
### Motivation
It's very nice for the app to be able to just fetch files directly.
Also, I don't think the current URL is even necessary because you also have the PK with which you can create the URL.
### Current implementation
There is a URL that leads to the file, but only after a redirect to `cdn`.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `website/documents/api/v2/serializers/document.py`
Content:
```
1 from rest_framework.fields import SerializerMethodField
2 from rest_framework.reverse import reverse
3
4 from documents.models import Document
5 from thaliawebsite.api.v2.serializers.cleaned_model_serializer import (
6 CleanedModelSerializer,
7 )
8
9
10 class DocumentSerializer(CleanedModelSerializer):
11 class Meta:
12 model = Document
13 fields = ("pk", "name", "url", "category", "members_only")
14
15 url = SerializerMethodField("_url")
16
17 def _url(self, instance):
18 return self.context["request"].build_absolute_uri(
19 reverse("documents:document", kwargs={"pk": instance.pk})
20 )
21
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/website/documents/api/v2/serializers/document.py b/website/documents/api/v2/serializers/document.py
--- a/website/documents/api/v2/serializers/document.py
+++ b/website/documents/api/v2/serializers/document.py
@@ -1,10 +1,10 @@
from rest_framework.fields import SerializerMethodField
-from rest_framework.reverse import reverse
from documents.models import Document
from thaliawebsite.api.v2.serializers.cleaned_model_serializer import (
CleanedModelSerializer,
)
+from utils.media.services import get_media_url
class DocumentSerializer(CleanedModelSerializer):
@@ -15,6 +15,10 @@
url = SerializerMethodField("_url")
def _url(self, instance):
- return self.context["request"].build_absolute_uri(
- reverse("documents:document", kwargs={"pk": instance.pk})
- )
+ if instance.members_only and (
+ not self.request.user.is_authenticated
+ or not self.request.member.has_active_membership()
+ ):
+ return self.request.build_absolute_uri(instance.get_absolute_url())
+
+ return get_media_url(instance.file, absolute_url=True)
|
{"golden_diff": "diff --git a/website/documents/api/v2/serializers/document.py b/website/documents/api/v2/serializers/document.py\n--- a/website/documents/api/v2/serializers/document.py\n+++ b/website/documents/api/v2/serializers/document.py\n@@ -1,10 +1,10 @@\n from rest_framework.fields import SerializerMethodField\n-from rest_framework.reverse import reverse\n \n from documents.models import Document\n from thaliawebsite.api.v2.serializers.cleaned_model_serializer import (\n CleanedModelSerializer,\n )\n+from utils.media.services import get_media_url\n \n \n class DocumentSerializer(CleanedModelSerializer):\n@@ -15,6 +15,10 @@\n url = SerializerMethodField(\"_url\")\n \n def _url(self, instance):\n- return self.context[\"request\"].build_absolute_uri(\n- reverse(\"documents:document\", kwargs={\"pk\": instance.pk})\n- )\n+ if instance.members_only and (\n+ not self.request.user.is_authenticated\n+ or not self.request.member.has_active_membership()\n+ ):\n+ return self.request.build_absolute_uri(instance.get_absolute_url())\n+\n+ return get_media_url(instance.file, absolute_url=True)\n", "issue": "Expand/improve document part of event endpoint\n<!--\r\n\r\nPlease add the appropriate label for what change should be made:\r\ndocs: changes to the documentation)\r\nrefactor: refactoring production code, eg. renaming a variable or rewriting a function\r\ntest: adding missing tests, refactoring tests; no production code change\r\nchore: updating poetry etc; no production code change\r\n\r\n-->\r\n\r\n### Describe the change\r\nChange the current URL or add a URL to a link to the actual document (that is, one that ends with `/[name].pdf`).\r\n\r\n### Motivation\r\nIt's very nice for the app to be able to just fetch files directly.\r\nAlso, I don't think the current URL is even necessary because you also have the PK with which you can create the URL.\r\n\r\n### Current implementation\r\nThere is a URL that leads to the file, but only after a redirect to `cdn`.\n", "before_files": [{"content": "from rest_framework.fields import SerializerMethodField\nfrom rest_framework.reverse import reverse\n\nfrom documents.models import Document\nfrom thaliawebsite.api.v2.serializers.cleaned_model_serializer import (\n CleanedModelSerializer,\n)\n\n\nclass DocumentSerializer(CleanedModelSerializer):\n class Meta:\n model = Document\n fields = (\"pk\", \"name\", \"url\", \"category\", \"members_only\")\n\n url = SerializerMethodField(\"_url\")\n\n def _url(self, instance):\n return self.context[\"request\"].build_absolute_uri(\n reverse(\"documents:document\", kwargs={\"pk\": instance.pk})\n )\n", "path": "website/documents/api/v2/serializers/document.py"}], "after_files": [{"content": "from rest_framework.fields import SerializerMethodField\n\nfrom documents.models import Document\nfrom thaliawebsite.api.v2.serializers.cleaned_model_serializer import (\n CleanedModelSerializer,\n)\nfrom utils.media.services import get_media_url\n\n\nclass DocumentSerializer(CleanedModelSerializer):\n class Meta:\n model = Document\n fields = (\"pk\", \"name\", \"url\", \"category\", \"members_only\")\n\n url = SerializerMethodField(\"_url\")\n\n def _url(self, instance):\n if instance.members_only and (\n not self.request.user.is_authenticated\n or not self.request.member.has_active_membership()\n ):\n return self.request.build_absolute_uri(instance.get_absolute_url())\n\n return get_media_url(instance.file, absolute_url=True)\n", "path": "website/documents/api/v2/serializers/document.py"}]}
| 608 | 251 |
gh_patches_debug_51282
|
rasdani/github-patches
|
git_diff
|
open-telemetry__opentelemetry-python-636
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add a standard way to "reset" a Configuration object for testing
It is a common occurrence in tests that the global `Configuration` object needs to be "reset" between tests. This means that its attributes need to be set back to their original values. Since the `Configuration` object is immutable by design, some additional, non-production available mechanism is needed to perform this action.
The need for this feature was mentioned in a [conversation](https://github.com/open-telemetry/opentelemetry-python/pull/630#discussion_r418343720) in #630.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `opentelemetry-api/src/opentelemetry/configuration/__init__.py`
Content:
```
1 # Copyright The OpenTelemetry Authors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 # FIXME find a better way to avoid all those "Expression has type "Any"" errors
16 # type: ignore
17
18 """
19 Simple configuration manager
20
21 This is a configuration manager for OpenTelemetry. It reads configuration
22 values from environment variables prefixed with ``OPENTELEMETRY_PYTHON_`` whose
23 characters are only alphanumeric characters and unserscores, except for the
24 first character after ``OPENTELEMETRY_PYTHON_`` which must not be a number.
25
26 For example, these environment variables will be read:
27
28 1. ``OPENTELEMETRY_PYTHON_SOMETHING``
29 2. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_``
30 3. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_AND__ELSE``
31 4. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_AND_else``
32 4. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_AND_else2``
33
34 These won't:
35
36 1. ``OPENTELEMETRY_PYTH_SOMETHING``
37 2. ``OPENTELEMETRY_PYTHON_2_SOMETHING_AND__ELSE``
38 3. ``OPENTELEMETRY_PYTHON_SOMETHING_%_ELSE``
39
40 The values stored in the environment variables can be found in an instance of
41 ``opentelemetry.configuration.Configuration``. This class can be instantiated
42 freely because instantiating it returns always the same object.
43
44 For example, if the environment variable
45 ``OPENTELEMETRY_PYTHON_METER_PROVIDER`` value is ``my_meter_provider``, then
46 ``Configuration().meter_provider == "my_meter_provider"`` would be ``True``.
47
48 Non defined attributes will always return ``None``. This is intended to make it
49 easier to use the ``Configuration`` object in actual code, because it won't be
50 necessary to check for the attribute to be defined first.
51
52 Environment variables used by OpenTelemetry
53 -------------------------------------------
54
55 1. OPENTELEMETRY_PYTHON_METER_PROVIDER
56 2. OPENTELEMETRY_PYTHON_TRACER_PROVIDER
57
58 The value of these environment variables should be the name of the entry point
59 that points to the class that implements either provider. This OpenTelemetry
60 API package provides one entry point for each, which can be found in the
61 setup.py file::
62
63 entry_points={
64 ...
65 "opentelemetry_meter_provider": [
66 "default_meter_provider = "
67 "opentelemetry.metrics:DefaultMeterProvider"
68 ],
69 "opentelemetry_tracer_provider": [
70 "default_tracer_provider = "
71 "opentelemetry.trace:DefaultTracerProvider"
72 ],
73 }
74
75 To use the meter provider above, then the
76 ``OPENTELEMETRY_PYTHON_METER_PROVIDER`` should be set to
77 "default_meter_provider" (this is not actually necessary since the
78 OpenTelemetry API provided providers are the default ones used if no
79 configuration is found in the environment variables).
80 """
81
82 from os import environ
83 from re import fullmatch
84
85
86 class Configuration:
87 _instance = None
88
89 __slots__ = []
90
91 def __new__(cls) -> "Configuration":
92 if Configuration._instance is None:
93
94 for key, value in environ.items():
95
96 match = fullmatch(
97 r"OPENTELEMETRY_PYTHON_([A-Za-z_][\w_]*)", key
98 )
99
100 if match is not None:
101
102 key = match.group(1)
103
104 setattr(Configuration, "_{}".format(key), value)
105 setattr(
106 Configuration,
107 key,
108 property(
109 fget=lambda cls, key=key: getattr(
110 cls, "_{}".format(key)
111 )
112 ),
113 )
114
115 Configuration.__slots__.append(key)
116
117 Configuration.__slots__ = tuple(Configuration.__slots__)
118
119 Configuration._instance = object.__new__(cls)
120
121 return cls._instance
122
123 def __getattr__(self, name):
124 return None
125
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/opentelemetry-api/src/opentelemetry/configuration/__init__.py b/opentelemetry-api/src/opentelemetry/configuration/__init__.py
--- a/opentelemetry-api/src/opentelemetry/configuration/__init__.py
+++ b/opentelemetry-api/src/opentelemetry/configuration/__init__.py
@@ -122,3 +122,20 @@
def __getattr__(self, name):
return None
+
+ @classmethod
+ def _reset(cls):
+ """
+ This method "resets" the global configuration attributes
+
+ It is not intended to be used by production code but by testing code
+ only.
+ """
+
+ for slot in cls.__slots__:
+ if slot in cls.__dict__.keys():
+ delattr(cls, slot)
+ delattr(cls, "_{}".format(slot))
+
+ cls.__slots__ = []
+ cls._instance = None
|
{"golden_diff": "diff --git a/opentelemetry-api/src/opentelemetry/configuration/__init__.py b/opentelemetry-api/src/opentelemetry/configuration/__init__.py\n--- a/opentelemetry-api/src/opentelemetry/configuration/__init__.py\n+++ b/opentelemetry-api/src/opentelemetry/configuration/__init__.py\n@@ -122,3 +122,20 @@\n \n def __getattr__(self, name):\n return None\n+\n+ @classmethod\n+ def _reset(cls):\n+ \"\"\"\n+ This method \"resets\" the global configuration attributes\n+\n+ It is not intended to be used by production code but by testing code\n+ only.\n+ \"\"\"\n+\n+ for slot in cls.__slots__:\n+ if slot in cls.__dict__.keys():\n+ delattr(cls, slot)\n+ delattr(cls, \"_{}\".format(slot))\n+\n+ cls.__slots__ = []\n+ cls._instance = None\n", "issue": "Add a standard way to \"reset\" a Configuration object for testing\nIt is a common occurrence in tests that the global `Configuration` object needs to be \"reset\" between tests. This means that its attributes need to be set back to their original values. Since the `Configuration` object is immutable by design, some additional, non-production available mechanism is needed to perform this action.\r\n\r\nThe need for this feature was mentioned in a [conversation](https://github.com/open-telemetry/opentelemetry-python/pull/630#discussion_r418343720) in #630.\n", "before_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# FIXME find a better way to avoid all those \"Expression has type \"Any\"\" errors\n# type: ignore\n\n\"\"\"\nSimple configuration manager\n\nThis is a configuration manager for OpenTelemetry. It reads configuration\nvalues from environment variables prefixed with ``OPENTELEMETRY_PYTHON_`` whose\ncharacters are only alphanumeric characters and unserscores, except for the\nfirst character after ``OPENTELEMETRY_PYTHON_`` which must not be a number.\n\nFor example, these environment variables will be read:\n\n1. ``OPENTELEMETRY_PYTHON_SOMETHING``\n2. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_``\n3. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_AND__ELSE``\n4. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_AND_else``\n4. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_AND_else2``\n\nThese won't:\n\n1. ``OPENTELEMETRY_PYTH_SOMETHING``\n2. ``OPENTELEMETRY_PYTHON_2_SOMETHING_AND__ELSE``\n3. ``OPENTELEMETRY_PYTHON_SOMETHING_%_ELSE``\n\nThe values stored in the environment variables can be found in an instance of\n``opentelemetry.configuration.Configuration``. This class can be instantiated\nfreely because instantiating it returns always the same object.\n\nFor example, if the environment variable\n``OPENTELEMETRY_PYTHON_METER_PROVIDER`` value is ``my_meter_provider``, then\n``Configuration().meter_provider == \"my_meter_provider\"`` would be ``True``.\n\nNon defined attributes will always return ``None``. This is intended to make it\neasier to use the ``Configuration`` object in actual code, because it won't be\nnecessary to check for the attribute to be defined first.\n\nEnvironment variables used by OpenTelemetry\n-------------------------------------------\n\n1. OPENTELEMETRY_PYTHON_METER_PROVIDER\n2. OPENTELEMETRY_PYTHON_TRACER_PROVIDER\n\nThe value of these environment variables should be the name of the entry point\nthat points to the class that implements either provider. This OpenTelemetry\nAPI package provides one entry point for each, which can be found in the\nsetup.py file::\n\n entry_points={\n ...\n \"opentelemetry_meter_provider\": [\n \"default_meter_provider = \"\n \"opentelemetry.metrics:DefaultMeterProvider\"\n ],\n \"opentelemetry_tracer_provider\": [\n \"default_tracer_provider = \"\n \"opentelemetry.trace:DefaultTracerProvider\"\n ],\n }\n\nTo use the meter provider above, then the\n``OPENTELEMETRY_PYTHON_METER_PROVIDER`` should be set to\n\"default_meter_provider\" (this is not actually necessary since the\nOpenTelemetry API provided providers are the default ones used if no\nconfiguration is found in the environment variables).\n\"\"\"\n\nfrom os import environ\nfrom re import fullmatch\n\n\nclass Configuration:\n _instance = None\n\n __slots__ = []\n\n def __new__(cls) -> \"Configuration\":\n if Configuration._instance is None:\n\n for key, value in environ.items():\n\n match = fullmatch(\n r\"OPENTELEMETRY_PYTHON_([A-Za-z_][\\w_]*)\", key\n )\n\n if match is not None:\n\n key = match.group(1)\n\n setattr(Configuration, \"_{}\".format(key), value)\n setattr(\n Configuration,\n key,\n property(\n fget=lambda cls, key=key: getattr(\n cls, \"_{}\".format(key)\n )\n ),\n )\n\n Configuration.__slots__.append(key)\n\n Configuration.__slots__ = tuple(Configuration.__slots__)\n\n Configuration._instance = object.__new__(cls)\n\n return cls._instance\n\n def __getattr__(self, name):\n return None\n", "path": "opentelemetry-api/src/opentelemetry/configuration/__init__.py"}], "after_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# FIXME find a better way to avoid all those \"Expression has type \"Any\"\" errors\n# type: ignore\n\n\"\"\"\nSimple configuration manager\n\nThis is a configuration manager for OpenTelemetry. It reads configuration\nvalues from environment variables prefixed with ``OPENTELEMETRY_PYTHON_`` whose\ncharacters are only alphanumeric characters and unserscores, except for the\nfirst character after ``OPENTELEMETRY_PYTHON_`` which must not be a number.\n\nFor example, these environment variables will be read:\n\n1. ``OPENTELEMETRY_PYTHON_SOMETHING``\n2. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_``\n3. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_AND__ELSE``\n4. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_AND_else``\n4. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_AND_else2``\n\nThese won't:\n\n1. ``OPENTELEMETRY_PYTH_SOMETHING``\n2. ``OPENTELEMETRY_PYTHON_2_SOMETHING_AND__ELSE``\n3. ``OPENTELEMETRY_PYTHON_SOMETHING_%_ELSE``\n\nThe values stored in the environment variables can be found in an instance of\n``opentelemetry.configuration.Configuration``. This class can be instantiated\nfreely because instantiating it returns always the same object.\n\nFor example, if the environment variable\n``OPENTELEMETRY_PYTHON_METER_PROVIDER`` value is ``my_meter_provider``, then\n``Configuration().meter_provider == \"my_meter_provider\"`` would be ``True``.\n\nNon defined attributes will always return ``None``. This is intended to make it\neasier to use the ``Configuration`` object in actual code, because it won't be\nnecessary to check for the attribute to be defined first.\n\nEnvironment variables used by OpenTelemetry\n-------------------------------------------\n\n1. OPENTELEMETRY_PYTHON_METER_PROVIDER\n2. OPENTELEMETRY_PYTHON_TRACER_PROVIDER\n\nThe value of these environment variables should be the name of the entry point\nthat points to the class that implements either provider. This OpenTelemetry\nAPI package provides one entry point for each, which can be found in the\nsetup.py file::\n\n entry_points={\n ...\n \"opentelemetry_meter_provider\": [\n \"default_meter_provider = \"\n \"opentelemetry.metrics:DefaultMeterProvider\"\n ],\n \"opentelemetry_tracer_provider\": [\n \"default_tracer_provider = \"\n \"opentelemetry.trace:DefaultTracerProvider\"\n ],\n }\n\nTo use the meter provider above, then the\n``OPENTELEMETRY_PYTHON_METER_PROVIDER`` should be set to\n\"default_meter_provider\" (this is not actually necessary since the\nOpenTelemetry API provided providers are the default ones used if no\nconfiguration is found in the environment variables).\n\"\"\"\n\nfrom os import environ\nfrom re import fullmatch\n\n\nclass Configuration:\n _instance = None\n\n __slots__ = []\n\n def __new__(cls) -> \"Configuration\":\n if Configuration._instance is None:\n\n for key, value in environ.items():\n\n match = fullmatch(\n r\"OPENTELEMETRY_PYTHON_([A-Za-z_][\\w_]*)\", key\n )\n\n if match is not None:\n\n key = match.group(1)\n\n setattr(Configuration, \"_{}\".format(key), value)\n setattr(\n Configuration,\n key,\n property(\n fget=lambda cls, key=key: getattr(\n cls, \"_{}\".format(key)\n )\n ),\n )\n\n Configuration.__slots__.append(key)\n\n Configuration.__slots__ = tuple(Configuration.__slots__)\n\n Configuration._instance = object.__new__(cls)\n\n return cls._instance\n\n def __getattr__(self, name):\n return None\n\n @classmethod\n def _reset(cls):\n \"\"\"\n This method \"resets\" the global configuration attributes\n\n It is not intended to be used by production code but by testing code\n only.\n \"\"\"\n\n for slot in cls.__slots__:\n if slot in cls.__dict__.keys():\n delattr(cls, slot)\n delattr(cls, \"_{}\".format(slot))\n\n cls.__slots__ = []\n cls._instance = None\n", "path": "opentelemetry-api/src/opentelemetry/configuration/__init__.py"}]}
| 1,632 | 205 |
gh_patches_debug_3505
|
rasdani/github-patches
|
git_diff
|
saleor__saleor-1832
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Localization of duplicate email account doesn't make sense
I saw this weird thing in post production when I was creating a customer account that was already registered.


---
Edit: note, the untranslated string is ` User with this Email already exists. `
---
I'm not quite sure how to explain this, it doesn't make sense in other language, but make sense in English. It's understandable for a French or a Polish for example but doesn't make real sense it shouldn't be that way, it looks like a debug string (as a dev).
I will wait to hear back from you. I also know it's not from your side, so I'm not quite sure of what you can do about this.
The error is generated by `django.db.models.base.Model#unique_error_message` (sourcing from `django.forms.models.BaseModelForm#_post_clean`).
```python
def unique_error_message(self, model_class, unique_check):
opts = model_class._meta
params = {
'model': self,
'model_class': model_class,
'model_name': capfirst(opts.verbose_name),
'unique_check': unique_check,
}
# A unique field
if len(unique_check) == 1:
field = opts.get_field(unique_check[0])
params['field_label'] = capfirst(field.verbose_name)
return ValidationError(
message=field.error_messages['unique'],
code='unique',
params=params,
)
# unique_together
else:
field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check]
params['field_labels'] = get_text_list(field_labels, _('and'))
return ValidationError(
message=_("%(model_name)s with this %(field_labels)s already exists."),
code='unique_together',
params=params,
)
# ^^^^^^
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `saleor/account/forms.py`
Content:
```
1 from django import forms
2 from django.conf import settings
3 from django.contrib.auth import forms as django_forms, update_session_auth_hash
4 from django.utils.translation import pgettext, pgettext_lazy
5 from phonenumbers.phonenumberutil import country_code_for_region
6
7 from ..account.models import User
8 from .i18n import AddressMetaForm, get_address_form_class
9 from . import emails
10
11
12 def get_address_form(
13 data, country_code, initial=None, instance=None, **kwargs):
14 country_form = AddressMetaForm(data, initial=initial)
15 preview = False
16 if country_form.is_valid():
17 country_code = country_form.cleaned_data['country']
18 preview = country_form.cleaned_data['preview']
19
20 if initial is None and country_code:
21 initial = {}
22 if country_code:
23 initial['phone'] = '+{}'.format(country_code_for_region(country_code))
24
25 address_form_class = get_address_form_class(country_code)
26
27 if not preview and instance is not None:
28 address_form_class = get_address_form_class(
29 instance.country.code)
30 address_form = address_form_class(
31 data, instance=instance, **kwargs)
32 else:
33 initial_address = (
34 initial if not preview
35 else data.dict() if data is not None else data)
36 address_form = address_form_class(
37 not preview and data or None,
38 initial=initial_address,
39 **kwargs)
40 return address_form, preview
41
42
43 class ChangePasswordForm(django_forms.PasswordChangeForm):
44 def __init__(self, *args, **kwargs):
45 super().__init__(*args, **kwargs)
46 self.fields['new_password1'].user = self.user
47 self.fields['old_password'].widget.attrs['placeholder'] = ''
48 self.fields['new_password1'].widget.attrs['placeholder'] = ''
49 del self.fields['new_password2']
50
51
52 def logout_on_password_change(request, user):
53 if (update_session_auth_hash is not None and
54 not settings.LOGOUT_ON_PASSWORD_CHANGE):
55 update_session_auth_hash(request, user)
56
57
58 class LoginForm(django_forms.AuthenticationForm):
59 username = forms.EmailField(
60 label=pgettext('Form field', 'Email'), max_length=75)
61
62 def __init__(self, request=None, *args, **kwargs):
63 super().__init__(request=request, *args, **kwargs)
64 if request:
65 email = request.GET.get('email')
66 if email:
67 self.fields['username'].initial = email
68
69
70 class SignupForm(forms.ModelForm):
71 password = forms.CharField(
72 widget=forms.PasswordInput)
73
74 class Meta:
75 model = User
76 fields = ('email',)
77 labels = {
78 'email': pgettext_lazy(
79 'Email', 'Email'),
80 'password': pgettext_lazy(
81 'Password', 'Password')}
82
83 def __init__(self, *args, **kwargs):
84 super().__init__(*args, **kwargs)
85 if self._meta.model.USERNAME_FIELD in self.fields:
86 self.fields[self._meta.model.USERNAME_FIELD].widget.attrs.update(
87 {'autofocus': ''})
88
89 def save(self, request=None, commit=True):
90 user = super().save(commit=False)
91 password = self.cleaned_data['password']
92 user.set_password(password)
93 if commit:
94 user.save()
95 return user
96
97
98 class PasswordResetForm(django_forms.PasswordResetForm):
99 """Allow resetting passwords.
100
101 This subclass overrides sending emails to use templated email.
102 """
103
104 def get_users(self, email):
105 active_users = User.objects.filter(email__iexact=email, is_active=True)
106 return active_users
107
108 def send_mail(
109 self, subject_template_name, email_template_name, context,
110 from_email, to_email, html_email_template_name=None):
111 emails.send_password_reset_email.delay(context, to_email)
112
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/saleor/account/forms.py b/saleor/account/forms.py
--- a/saleor/account/forms.py
+++ b/saleor/account/forms.py
@@ -70,6 +70,11 @@
class SignupForm(forms.ModelForm):
password = forms.CharField(
widget=forms.PasswordInput)
+ email = forms.EmailField(
+ error_messages={
+ 'unique': pgettext_lazy(
+ 'Registration error',
+ 'This email has already been registered.')})
class Meta:
model = User
|
{"golden_diff": "diff --git a/saleor/account/forms.py b/saleor/account/forms.py\n--- a/saleor/account/forms.py\n+++ b/saleor/account/forms.py\n@@ -70,6 +70,11 @@\n class SignupForm(forms.ModelForm):\n password = forms.CharField(\n widget=forms.PasswordInput)\n+ email = forms.EmailField(\n+ error_messages={\n+ 'unique': pgettext_lazy(\n+ 'Registration error',\n+ 'This email has already been registered.')})\n \n class Meta:\n model = User\n", "issue": "Localization of duplicate email account doesn't make sense\nI saw this weird thing in post production when I was creating a customer account that was already registered.\r\n\r\n\r\n\r\n\r\n\r\n---\r\n\r\nEdit: note, the untranslated string is ` User with this Email already exists. `\r\n\r\n---\r\n\r\nI'm not quite sure how to explain this, it doesn't make sense in other language, but make sense in English. It's understandable for a French or a Polish for example but doesn't make real sense it shouldn't be that way, it looks like a debug string (as a dev).\r\n\r\nI will wait to hear back from you. I also know it's not from your side, so I'm not quite sure of what you can do about this.\r\n\r\nThe error is generated by `django.db.models.base.Model#unique_error_message` (sourcing from `django.forms.models.BaseModelForm#_post_clean`).\r\n```python\r\n def unique_error_message(self, model_class, unique_check):\r\n opts = model_class._meta\r\n\r\n params = {\r\n 'model': self,\r\n 'model_class': model_class,\r\n 'model_name': capfirst(opts.verbose_name),\r\n 'unique_check': unique_check,\r\n }\r\n\r\n # A unique field\r\n if len(unique_check) == 1:\r\n field = opts.get_field(unique_check[0])\r\n params['field_label'] = capfirst(field.verbose_name)\r\n return ValidationError(\r\n message=field.error_messages['unique'],\r\n code='unique',\r\n params=params,\r\n )\r\n\r\n # unique_together\r\n else:\r\n field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check]\r\n params['field_labels'] = get_text_list(field_labels, _('and'))\r\n return ValidationError(\r\n message=_(\"%(model_name)s with this %(field_labels)s already exists.\"),\r\n code='unique_together',\r\n params=params,\r\n )\r\n # ^^^^^^\r\n```\n", "before_files": [{"content": "from django import forms\nfrom django.conf import settings\nfrom django.contrib.auth import forms as django_forms, update_session_auth_hash\nfrom django.utils.translation import pgettext, pgettext_lazy\nfrom phonenumbers.phonenumberutil import country_code_for_region\n\nfrom ..account.models import User\nfrom .i18n import AddressMetaForm, get_address_form_class\nfrom . import emails\n\n\ndef get_address_form(\n data, country_code, initial=None, instance=None, **kwargs):\n country_form = AddressMetaForm(data, initial=initial)\n preview = False\n if country_form.is_valid():\n country_code = country_form.cleaned_data['country']\n preview = country_form.cleaned_data['preview']\n\n if initial is None and country_code:\n initial = {}\n if country_code:\n initial['phone'] = '+{}'.format(country_code_for_region(country_code))\n\n address_form_class = get_address_form_class(country_code)\n\n if not preview and instance is not None:\n address_form_class = get_address_form_class(\n instance.country.code)\n address_form = address_form_class(\n data, instance=instance, **kwargs)\n else:\n initial_address = (\n initial if not preview\n else data.dict() if data is not None else data)\n address_form = address_form_class(\n not preview and data or None,\n initial=initial_address,\n **kwargs)\n return address_form, preview\n\n\nclass ChangePasswordForm(django_forms.PasswordChangeForm):\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.fields['new_password1'].user = self.user\n self.fields['old_password'].widget.attrs['placeholder'] = ''\n self.fields['new_password1'].widget.attrs['placeholder'] = ''\n del self.fields['new_password2']\n\n\ndef logout_on_password_change(request, user):\n if (update_session_auth_hash is not None and\n not settings.LOGOUT_ON_PASSWORD_CHANGE):\n update_session_auth_hash(request, user)\n\n\nclass LoginForm(django_forms.AuthenticationForm):\n username = forms.EmailField(\n label=pgettext('Form field', 'Email'), max_length=75)\n\n def __init__(self, request=None, *args, **kwargs):\n super().__init__(request=request, *args, **kwargs)\n if request:\n email = request.GET.get('email')\n if email:\n self.fields['username'].initial = email\n\n\nclass SignupForm(forms.ModelForm):\n password = forms.CharField(\n widget=forms.PasswordInput)\n\n class Meta:\n model = User\n fields = ('email',)\n labels = {\n 'email': pgettext_lazy(\n 'Email', 'Email'),\n 'password': pgettext_lazy(\n 'Password', 'Password')}\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n if self._meta.model.USERNAME_FIELD in self.fields:\n self.fields[self._meta.model.USERNAME_FIELD].widget.attrs.update(\n {'autofocus': ''})\n\n def save(self, request=None, commit=True):\n user = super().save(commit=False)\n password = self.cleaned_data['password']\n user.set_password(password)\n if commit:\n user.save()\n return user\n\n\nclass PasswordResetForm(django_forms.PasswordResetForm):\n \"\"\"Allow resetting passwords.\n\n This subclass overrides sending emails to use templated email.\n \"\"\"\n\n def get_users(self, email):\n active_users = User.objects.filter(email__iexact=email, is_active=True)\n return active_users\n\n def send_mail(\n self, subject_template_name, email_template_name, context,\n from_email, to_email, html_email_template_name=None):\n emails.send_password_reset_email.delay(context, to_email)\n", "path": "saleor/account/forms.py"}], "after_files": [{"content": "from django import forms\nfrom django.conf import settings\nfrom django.contrib.auth import forms as django_forms, update_session_auth_hash\nfrom django.utils.translation import pgettext, pgettext_lazy\nfrom phonenumbers.phonenumberutil import country_code_for_region\n\nfrom ..account.models import User\nfrom .i18n import AddressMetaForm, get_address_form_class\nfrom . import emails\n\n\ndef get_address_form(\n data, country_code, initial=None, instance=None, **kwargs):\n country_form = AddressMetaForm(data, initial=initial)\n preview = False\n if country_form.is_valid():\n country_code = country_form.cleaned_data['country']\n preview = country_form.cleaned_data['preview']\n\n if initial is None and country_code:\n initial = {}\n if country_code:\n initial['phone'] = '+{}'.format(country_code_for_region(country_code))\n\n address_form_class = get_address_form_class(country_code)\n\n if not preview and instance is not None:\n address_form_class = get_address_form_class(\n instance.country.code)\n address_form = address_form_class(\n data, instance=instance, **kwargs)\n else:\n initial_address = (\n initial if not preview\n else data.dict() if data is not None else data)\n address_form = address_form_class(\n not preview and data or None,\n initial=initial_address,\n **kwargs)\n return address_form, preview\n\n\nclass ChangePasswordForm(django_forms.PasswordChangeForm):\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.fields['new_password1'].user = self.user\n self.fields['old_password'].widget.attrs['placeholder'] = ''\n self.fields['new_password1'].widget.attrs['placeholder'] = ''\n del self.fields['new_password2']\n\n\ndef logout_on_password_change(request, user):\n if (update_session_auth_hash is not None and\n not settings.LOGOUT_ON_PASSWORD_CHANGE):\n update_session_auth_hash(request, user)\n\n\nclass LoginForm(django_forms.AuthenticationForm):\n username = forms.EmailField(\n label=pgettext('Form field', 'Email'), max_length=75)\n\n def __init__(self, request=None, *args, **kwargs):\n super().__init__(request=request, *args, **kwargs)\n if request:\n email = request.GET.get('email')\n if email:\n self.fields['username'].initial = email\n\n\nclass SignupForm(forms.ModelForm):\n password = forms.CharField(\n widget=forms.PasswordInput)\n email = forms.EmailField(\n error_messages={\n 'unique': pgettext_lazy(\n 'Registration error',\n 'This email has already been registered.')})\n\n class Meta:\n model = User\n fields = ('email',)\n labels = {\n 'email': pgettext_lazy(\n 'Email', 'Email'),\n 'password': pgettext_lazy(\n 'Password', 'Password')}\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n if self._meta.model.USERNAME_FIELD in self.fields:\n self.fields[self._meta.model.USERNAME_FIELD].widget.attrs.update(\n {'autofocus': ''})\n\n def save(self, request=None, commit=True):\n user = super().save(commit=False)\n password = self.cleaned_data['password']\n user.set_password(password)\n if commit:\n user.save()\n return user\n\n\nclass PasswordResetForm(django_forms.PasswordResetForm):\n \"\"\"Allow resetting passwords.\n\n This subclass overrides sending emails to use templated email.\n \"\"\"\n\n def get_users(self, email):\n active_users = User.objects.filter(email__iexact=email, is_active=True)\n return active_users\n\n def send_mail(\n self, subject_template_name, email_template_name, context,\n from_email, to_email, html_email_template_name=None):\n emails.send_password_reset_email.delay(context, to_email)\n", "path": "saleor/account/forms.py"}]}
| 1,717 | 116 |
gh_patches_debug_17693
|
rasdani/github-patches
|
git_diff
|
elastic__apm-agent-python-1095
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Root Django URL doesn't appear in the UI when using routes
**Describe the bug**: `transaction.name` field is missing from the `transcction` document
**To Reproduce**
1. set `DJANGO_TRANSACTION_NAME_FROM_ROUTE` to `True`
2. have url of `path("", TemplateView.as_view(template_name="pages/home.html"), name="home")`
3. ...
4. not profit
**Environment (please complete the following information)**
- OS: Linux
- Python version: 3.8
- Framework and version [e.g. Django 2.1]: Django 3.1.6
- APM Server version: 7.12
- Agent version:6.1.1
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `elasticapm/contrib/django/middleware/__init__.py`
Content:
```
1 # BSD 3-Clause License
2 #
3 # Copyright (c) 2012, the Sentry Team, see AUTHORS for more details
4 # Copyright (c) 2019, Elasticsearch BV
5 # All rights reserved.
6 #
7 # Redistribution and use in source and binary forms, with or without
8 # modification, are permitted provided that the following conditions are met:
9 #
10 # * Redistributions of source code must retain the above copyright notice, this
11 # list of conditions and the following disclaimer.
12 #
13 # * Redistributions in binary form must reproduce the above copyright notice,
14 # this list of conditions and the following disclaimer in the documentation
15 # and/or other materials provided with the distribution.
16 #
17 # * Neither the name of the copyright holder nor the names of its
18 # contributors may be used to endorse or promote products derived from
19 # this software without specific prior written permission.
20 #
21 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
22 # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
23 # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
24 # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
25 # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
26 # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27 # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
28 # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
29 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30
31
32 from __future__ import absolute_import
33
34 import logging
35 import threading
36
37 from django.apps import apps
38 from django.conf import settings as django_settings
39
40 import elasticapm
41 from elasticapm.conf import constants
42 from elasticapm.contrib.django.client import client, get_client
43 from elasticapm.utils import build_name_with_http_method_prefix, get_name_from_func, wrapt
44
45 try:
46 from importlib import import_module
47 except ImportError:
48 from django.utils.importlib import import_module
49
50 try:
51 from django.utils.deprecation import MiddlewareMixin
52 except ImportError:
53 # no-op class for Django < 1.10
54 class MiddlewareMixin(object):
55 pass
56
57
58 def _is_ignorable_404(uri):
59 """
60 Returns True if the given request *shouldn't* notify the site managers.
61 """
62 urls = getattr(django_settings, "IGNORABLE_404_URLS", ())
63 return any(pattern.search(uri) for pattern in urls)
64
65
66 class ElasticAPMClientMiddlewareMixin(object):
67 @property
68 def client(self):
69 try:
70 app = apps.get_app_config("elasticapm.contrib.django")
71 return app.client
72 except LookupError:
73 return get_client()
74
75
76 class Catch404Middleware(MiddlewareMixin, ElasticAPMClientMiddlewareMixin):
77 def process_response(self, request, response):
78 if response.status_code != 404 or _is_ignorable_404(request.get_full_path()):
79 return response
80 if django_settings.DEBUG and not self.client.config.debug:
81 return response
82 data = {"level": logging.INFO, "logger": "http404"}
83 result = self.client.capture(
84 "Message",
85 request=request,
86 param_message={"message": "Page Not Found: %s", "params": [request.build_absolute_uri()]},
87 logger_name="http404",
88 level=logging.INFO,
89 )
90 request._elasticapm = {"service_name": data.get("service_name", self.client.config.service_name), "id": result}
91 return response
92
93
94 def get_name_from_middleware(wrapped, instance):
95 name = [type(instance).__name__, wrapped.__name__]
96 if type(instance).__module__:
97 name = [type(instance).__module__] + name
98 return ".".join(name)
99
100
101 def process_request_wrapper(wrapped, instance, args, kwargs):
102 response = wrapped(*args, **kwargs)
103 try:
104 if response is not None:
105 request = args[0]
106 elasticapm.set_transaction_name(
107 build_name_with_http_method_prefix(get_name_from_middleware(wrapped, instance), request)
108 )
109 finally:
110 return response
111
112
113 def process_response_wrapper(wrapped, instance, args, kwargs):
114 response = wrapped(*args, **kwargs)
115 try:
116 request, original_response = args
117 # if there's no view_func on the request, and this middleware created
118 # a new response object, it's logged as the responsible transaction
119 # name
120 if not hasattr(request, "_elasticapm_view_func") and response is not original_response:
121 elasticapm.set_transaction_name(
122 build_name_with_http_method_prefix(get_name_from_middleware(wrapped, instance), request)
123 )
124 finally:
125 return response
126
127
128 class TracingMiddleware(MiddlewareMixin, ElasticAPMClientMiddlewareMixin):
129 _elasticapm_instrumented = False
130 _instrumenting_lock = threading.Lock()
131
132 def __init__(self, *args, **kwargs):
133 super(TracingMiddleware, self).__init__(*args, **kwargs)
134 if not self._elasticapm_instrumented:
135 with self._instrumenting_lock:
136 if not self._elasticapm_instrumented:
137 if self.client.config.instrument_django_middleware:
138 self.instrument_middlewares()
139
140 TracingMiddleware._elasticapm_instrumented = True
141
142 def instrument_middlewares(self):
143 middlewares = getattr(django_settings, "MIDDLEWARE", None) or getattr(
144 django_settings, "MIDDLEWARE_CLASSES", None
145 )
146 if middlewares:
147 for middleware_path in middlewares:
148 module_path, class_name = middleware_path.rsplit(".", 1)
149 try:
150 module = import_module(module_path)
151 middleware_class = getattr(module, class_name)
152 if middleware_class == type(self):
153 # don't instrument ourselves
154 continue
155 if hasattr(middleware_class, "process_request"):
156 wrapt.wrap_function_wrapper(middleware_class, "process_request", process_request_wrapper)
157 if hasattr(middleware_class, "process_response"):
158 wrapt.wrap_function_wrapper(middleware_class, "process_response", process_response_wrapper)
159 except ImportError:
160 client.logger.warning("Can't instrument middleware %s", middleware_path)
161
162 def process_view(self, request, view_func, view_args, view_kwargs):
163 request._elasticapm_view_func = view_func
164
165 def process_response(self, request, response):
166 if django_settings.DEBUG and not self.client.config.debug:
167 return response
168 try:
169 if hasattr(response, "status_code"):
170 transaction_name = None
171 if self.client.config.django_transaction_name_from_route and hasattr(request.resolver_match, "route"):
172 transaction_name = request.resolver_match.route
173 elif getattr(request, "_elasticapm_view_func", False):
174 transaction_name = get_name_from_func(request._elasticapm_view_func)
175 if transaction_name:
176 transaction_name = build_name_with_http_method_prefix(transaction_name, request)
177 elasticapm.set_transaction_name(transaction_name, override=False)
178
179 elasticapm.set_context(
180 lambda: self.client.get_data_from_request(request, constants.TRANSACTION), "request"
181 )
182 elasticapm.set_context(
183 lambda: self.client.get_data_from_response(response, constants.TRANSACTION), "response"
184 )
185 elasticapm.set_context(lambda: self.client.get_user_info(request), "user")
186 elasticapm.set_transaction_result("HTTP {}xx".format(response.status_code // 100), override=False)
187 elasticapm.set_transaction_outcome(http_status_code=response.status_code, override=False)
188 except Exception:
189 self.client.error_logger.error("Exception during timing of request", exc_info=True)
190 return response
191
192
193 class ErrorIdMiddleware(MiddlewareMixin):
194 """
195 Appends the X-ElasticAPM-ErrorId response header for referencing a message within
196 the ElasticAPM datastore.
197 """
198
199 def process_response(self, request, response):
200 if not getattr(request, "_elasticapm", None):
201 return response
202 response["X-ElasticAPM-ErrorId"] = request._elasticapm["id"]
203 return response
204
205
206 class LogMiddleware(MiddlewareMixin):
207 # Create a thread local variable to store the session in for logging
208 thread = threading.local()
209
210 def process_request(self, request):
211 self.thread.request = request
212
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/elasticapm/contrib/django/middleware/__init__.py b/elasticapm/contrib/django/middleware/__init__.py
--- a/elasticapm/contrib/django/middleware/__init__.py
+++ b/elasticapm/contrib/django/middleware/__init__.py
@@ -169,7 +169,9 @@
if hasattr(response, "status_code"):
transaction_name = None
if self.client.config.django_transaction_name_from_route and hasattr(request.resolver_match, "route"):
- transaction_name = request.resolver_match.route
+ r = request.resolver_match
+ # if no route is defined (e.g. for the root URL), fall back on url_name and then function name
+ transaction_name = r.route or r.url_name or get_name_from_func(r.func)
elif getattr(request, "_elasticapm_view_func", False):
transaction_name = get_name_from_func(request._elasticapm_view_func)
if transaction_name:
|
{"golden_diff": "diff --git a/elasticapm/contrib/django/middleware/__init__.py b/elasticapm/contrib/django/middleware/__init__.py\n--- a/elasticapm/contrib/django/middleware/__init__.py\n+++ b/elasticapm/contrib/django/middleware/__init__.py\n@@ -169,7 +169,9 @@\n if hasattr(response, \"status_code\"):\n transaction_name = None\n if self.client.config.django_transaction_name_from_route and hasattr(request.resolver_match, \"route\"):\n- transaction_name = request.resolver_match.route\n+ r = request.resolver_match\n+ # if no route is defined (e.g. for the root URL), fall back on url_name and then function name\n+ transaction_name = r.route or r.url_name or get_name_from_func(r.func)\n elif getattr(request, \"_elasticapm_view_func\", False):\n transaction_name = get_name_from_func(request._elasticapm_view_func)\n if transaction_name:\n", "issue": "Root Django URL doesn't appear in the UI when using routes\n**Describe the bug**: `transaction.name` field is missing from the `transcction` document\r\n\r\n**To Reproduce**\r\n\r\n1. set `DJANGO_TRANSACTION_NAME_FROM_ROUTE` to `True`\r\n2. have url of `path(\"\", TemplateView.as_view(template_name=\"pages/home.html\"), name=\"home\")`\r\n3. ...\r\n4. not profit\r\n\r\n**Environment (please complete the following information)**\r\n- OS: Linux\r\n- Python version: 3.8\r\n- Framework and version [e.g. Django 2.1]: Django 3.1.6\r\n- APM Server version: 7.12\r\n- Agent version:6.1.1\n", "before_files": [{"content": "# BSD 3-Clause License\n#\n# Copyright (c) 2012, the Sentry Team, see AUTHORS for more details\n# Copyright (c) 2019, Elasticsearch BV\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are met:\n#\n# * Redistributions of source code must retain the above copyright notice, this\n# list of conditions and the following disclaimer.\n#\n# * Redistributions in binary form must reproduce the above copyright notice,\n# this list of conditions and the following disclaimer in the documentation\n# and/or other materials provided with the distribution.\n#\n# * Neither the name of the copyright holder nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\n\nfrom __future__ import absolute_import\n\nimport logging\nimport threading\n\nfrom django.apps import apps\nfrom django.conf import settings as django_settings\n\nimport elasticapm\nfrom elasticapm.conf import constants\nfrom elasticapm.contrib.django.client import client, get_client\nfrom elasticapm.utils import build_name_with_http_method_prefix, get_name_from_func, wrapt\n\ntry:\n from importlib import import_module\nexcept ImportError:\n from django.utils.importlib import import_module\n\ntry:\n from django.utils.deprecation import MiddlewareMixin\nexcept ImportError:\n # no-op class for Django < 1.10\n class MiddlewareMixin(object):\n pass\n\n\ndef _is_ignorable_404(uri):\n \"\"\"\n Returns True if the given request *shouldn't* notify the site managers.\n \"\"\"\n urls = getattr(django_settings, \"IGNORABLE_404_URLS\", ())\n return any(pattern.search(uri) for pattern in urls)\n\n\nclass ElasticAPMClientMiddlewareMixin(object):\n @property\n def client(self):\n try:\n app = apps.get_app_config(\"elasticapm.contrib.django\")\n return app.client\n except LookupError:\n return get_client()\n\n\nclass Catch404Middleware(MiddlewareMixin, ElasticAPMClientMiddlewareMixin):\n def process_response(self, request, response):\n if response.status_code != 404 or _is_ignorable_404(request.get_full_path()):\n return response\n if django_settings.DEBUG and not self.client.config.debug:\n return response\n data = {\"level\": logging.INFO, \"logger\": \"http404\"}\n result = self.client.capture(\n \"Message\",\n request=request,\n param_message={\"message\": \"Page Not Found: %s\", \"params\": [request.build_absolute_uri()]},\n logger_name=\"http404\",\n level=logging.INFO,\n )\n request._elasticapm = {\"service_name\": data.get(\"service_name\", self.client.config.service_name), \"id\": result}\n return response\n\n\ndef get_name_from_middleware(wrapped, instance):\n name = [type(instance).__name__, wrapped.__name__]\n if type(instance).__module__:\n name = [type(instance).__module__] + name\n return \".\".join(name)\n\n\ndef process_request_wrapper(wrapped, instance, args, kwargs):\n response = wrapped(*args, **kwargs)\n try:\n if response is not None:\n request = args[0]\n elasticapm.set_transaction_name(\n build_name_with_http_method_prefix(get_name_from_middleware(wrapped, instance), request)\n )\n finally:\n return response\n\n\ndef process_response_wrapper(wrapped, instance, args, kwargs):\n response = wrapped(*args, **kwargs)\n try:\n request, original_response = args\n # if there's no view_func on the request, and this middleware created\n # a new response object, it's logged as the responsible transaction\n # name\n if not hasattr(request, \"_elasticapm_view_func\") and response is not original_response:\n elasticapm.set_transaction_name(\n build_name_with_http_method_prefix(get_name_from_middleware(wrapped, instance), request)\n )\n finally:\n return response\n\n\nclass TracingMiddleware(MiddlewareMixin, ElasticAPMClientMiddlewareMixin):\n _elasticapm_instrumented = False\n _instrumenting_lock = threading.Lock()\n\n def __init__(self, *args, **kwargs):\n super(TracingMiddleware, self).__init__(*args, **kwargs)\n if not self._elasticapm_instrumented:\n with self._instrumenting_lock:\n if not self._elasticapm_instrumented:\n if self.client.config.instrument_django_middleware:\n self.instrument_middlewares()\n\n TracingMiddleware._elasticapm_instrumented = True\n\n def instrument_middlewares(self):\n middlewares = getattr(django_settings, \"MIDDLEWARE\", None) or getattr(\n django_settings, \"MIDDLEWARE_CLASSES\", None\n )\n if middlewares:\n for middleware_path in middlewares:\n module_path, class_name = middleware_path.rsplit(\".\", 1)\n try:\n module = import_module(module_path)\n middleware_class = getattr(module, class_name)\n if middleware_class == type(self):\n # don't instrument ourselves\n continue\n if hasattr(middleware_class, \"process_request\"):\n wrapt.wrap_function_wrapper(middleware_class, \"process_request\", process_request_wrapper)\n if hasattr(middleware_class, \"process_response\"):\n wrapt.wrap_function_wrapper(middleware_class, \"process_response\", process_response_wrapper)\n except ImportError:\n client.logger.warning(\"Can't instrument middleware %s\", middleware_path)\n\n def process_view(self, request, view_func, view_args, view_kwargs):\n request._elasticapm_view_func = view_func\n\n def process_response(self, request, response):\n if django_settings.DEBUG and not self.client.config.debug:\n return response\n try:\n if hasattr(response, \"status_code\"):\n transaction_name = None\n if self.client.config.django_transaction_name_from_route and hasattr(request.resolver_match, \"route\"):\n transaction_name = request.resolver_match.route\n elif getattr(request, \"_elasticapm_view_func\", False):\n transaction_name = get_name_from_func(request._elasticapm_view_func)\n if transaction_name:\n transaction_name = build_name_with_http_method_prefix(transaction_name, request)\n elasticapm.set_transaction_name(transaction_name, override=False)\n\n elasticapm.set_context(\n lambda: self.client.get_data_from_request(request, constants.TRANSACTION), \"request\"\n )\n elasticapm.set_context(\n lambda: self.client.get_data_from_response(response, constants.TRANSACTION), \"response\"\n )\n elasticapm.set_context(lambda: self.client.get_user_info(request), \"user\")\n elasticapm.set_transaction_result(\"HTTP {}xx\".format(response.status_code // 100), override=False)\n elasticapm.set_transaction_outcome(http_status_code=response.status_code, override=False)\n except Exception:\n self.client.error_logger.error(\"Exception during timing of request\", exc_info=True)\n return response\n\n\nclass ErrorIdMiddleware(MiddlewareMixin):\n \"\"\"\n Appends the X-ElasticAPM-ErrorId response header for referencing a message within\n the ElasticAPM datastore.\n \"\"\"\n\n def process_response(self, request, response):\n if not getattr(request, \"_elasticapm\", None):\n return response\n response[\"X-ElasticAPM-ErrorId\"] = request._elasticapm[\"id\"]\n return response\n\n\nclass LogMiddleware(MiddlewareMixin):\n # Create a thread local variable to store the session in for logging\n thread = threading.local()\n\n def process_request(self, request):\n self.thread.request = request\n", "path": "elasticapm/contrib/django/middleware/__init__.py"}], "after_files": [{"content": "# BSD 3-Clause License\n#\n# Copyright (c) 2012, the Sentry Team, see AUTHORS for more details\n# Copyright (c) 2019, Elasticsearch BV\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are met:\n#\n# * Redistributions of source code must retain the above copyright notice, this\n# list of conditions and the following disclaimer.\n#\n# * Redistributions in binary form must reproduce the above copyright notice,\n# this list of conditions and the following disclaimer in the documentation\n# and/or other materials provided with the distribution.\n#\n# * Neither the name of the copyright holder nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\n\nfrom __future__ import absolute_import\n\nimport logging\nimport threading\n\nfrom django.apps import apps\nfrom django.conf import settings as django_settings\n\nimport elasticapm\nfrom elasticapm.conf import constants\nfrom elasticapm.contrib.django.client import client, get_client\nfrom elasticapm.utils import build_name_with_http_method_prefix, get_name_from_func, wrapt\n\ntry:\n from importlib import import_module\nexcept ImportError:\n from django.utils.importlib import import_module\n\ntry:\n from django.utils.deprecation import MiddlewareMixin\nexcept ImportError:\n # no-op class for Django < 1.10\n class MiddlewareMixin(object):\n pass\n\n\ndef _is_ignorable_404(uri):\n \"\"\"\n Returns True if the given request *shouldn't* notify the site managers.\n \"\"\"\n urls = getattr(django_settings, \"IGNORABLE_404_URLS\", ())\n return any(pattern.search(uri) for pattern in urls)\n\n\nclass ElasticAPMClientMiddlewareMixin(object):\n @property\n def client(self):\n try:\n app = apps.get_app_config(\"elasticapm.contrib.django\")\n return app.client\n except LookupError:\n return get_client()\n\n\nclass Catch404Middleware(MiddlewareMixin, ElasticAPMClientMiddlewareMixin):\n def process_response(self, request, response):\n if response.status_code != 404 or _is_ignorable_404(request.get_full_path()):\n return response\n if django_settings.DEBUG and not self.client.config.debug:\n return response\n data = {\"level\": logging.INFO, \"logger\": \"http404\"}\n result = self.client.capture(\n \"Message\",\n request=request,\n param_message={\"message\": \"Page Not Found: %s\", \"params\": [request.build_absolute_uri()]},\n logger_name=\"http404\",\n level=logging.INFO,\n )\n request._elasticapm = {\"service_name\": data.get(\"service_name\", self.client.config.service_name), \"id\": result}\n return response\n\n\ndef get_name_from_middleware(wrapped, instance):\n name = [type(instance).__name__, wrapped.__name__]\n if type(instance).__module__:\n name = [type(instance).__module__] + name\n return \".\".join(name)\n\n\ndef process_request_wrapper(wrapped, instance, args, kwargs):\n response = wrapped(*args, **kwargs)\n try:\n if response is not None:\n request = args[0]\n elasticapm.set_transaction_name(\n build_name_with_http_method_prefix(get_name_from_middleware(wrapped, instance), request)\n )\n finally:\n return response\n\n\ndef process_response_wrapper(wrapped, instance, args, kwargs):\n response = wrapped(*args, **kwargs)\n try:\n request, original_response = args\n # if there's no view_func on the request, and this middleware created\n # a new response object, it's logged as the responsible transaction\n # name\n if not hasattr(request, \"_elasticapm_view_func\") and response is not original_response:\n elasticapm.set_transaction_name(\n build_name_with_http_method_prefix(get_name_from_middleware(wrapped, instance), request)\n )\n finally:\n return response\n\n\nclass TracingMiddleware(MiddlewareMixin, ElasticAPMClientMiddlewareMixin):\n _elasticapm_instrumented = False\n _instrumenting_lock = threading.Lock()\n\n def __init__(self, *args, **kwargs):\n super(TracingMiddleware, self).__init__(*args, **kwargs)\n if not self._elasticapm_instrumented:\n with self._instrumenting_lock:\n if not self._elasticapm_instrumented:\n if self.client.config.instrument_django_middleware:\n self.instrument_middlewares()\n\n TracingMiddleware._elasticapm_instrumented = True\n\n def instrument_middlewares(self):\n middlewares = getattr(django_settings, \"MIDDLEWARE\", None) or getattr(\n django_settings, \"MIDDLEWARE_CLASSES\", None\n )\n if middlewares:\n for middleware_path in middlewares:\n module_path, class_name = middleware_path.rsplit(\".\", 1)\n try:\n module = import_module(module_path)\n middleware_class = getattr(module, class_name)\n if middleware_class == type(self):\n # don't instrument ourselves\n continue\n if hasattr(middleware_class, \"process_request\"):\n wrapt.wrap_function_wrapper(middleware_class, \"process_request\", process_request_wrapper)\n if hasattr(middleware_class, \"process_response\"):\n wrapt.wrap_function_wrapper(middleware_class, \"process_response\", process_response_wrapper)\n except ImportError:\n client.logger.warning(\"Can't instrument middleware %s\", middleware_path)\n\n def process_view(self, request, view_func, view_args, view_kwargs):\n request._elasticapm_view_func = view_func\n\n def process_response(self, request, response):\n if django_settings.DEBUG and not self.client.config.debug:\n return response\n try:\n if hasattr(response, \"status_code\"):\n transaction_name = None\n if self.client.config.django_transaction_name_from_route and hasattr(request.resolver_match, \"route\"):\n r = request.resolver_match\n # if no route is defined (e.g. for the root URL), fall back on url_name and then function name\n transaction_name = r.route or r.url_name or get_name_from_func(r.func)\n elif getattr(request, \"_elasticapm_view_func\", False):\n transaction_name = get_name_from_func(request._elasticapm_view_func)\n if transaction_name:\n transaction_name = build_name_with_http_method_prefix(transaction_name, request)\n elasticapm.set_transaction_name(transaction_name, override=False)\n\n elasticapm.set_context(\n lambda: self.client.get_data_from_request(request, constants.TRANSACTION), \"request\"\n )\n elasticapm.set_context(\n lambda: self.client.get_data_from_response(response, constants.TRANSACTION), \"response\"\n )\n elasticapm.set_context(lambda: self.client.get_user_info(request), \"user\")\n elasticapm.set_transaction_result(\"HTTP {}xx\".format(response.status_code // 100), override=False)\n elasticapm.set_transaction_outcome(http_status_code=response.status_code, override=False)\n except Exception:\n self.client.error_logger.error(\"Exception during timing of request\", exc_info=True)\n return response\n\n\nclass ErrorIdMiddleware(MiddlewareMixin):\n \"\"\"\n Appends the X-ElasticAPM-ErrorId response header for referencing a message within\n the ElasticAPM datastore.\n \"\"\"\n\n def process_response(self, request, response):\n if not getattr(request, \"_elasticapm\", None):\n return response\n response[\"X-ElasticAPM-ErrorId\"] = request._elasticapm[\"id\"]\n return response\n\n\nclass LogMiddleware(MiddlewareMixin):\n # Create a thread local variable to store the session in for logging\n thread = threading.local()\n\n def process_request(self, request):\n self.thread.request = request\n", "path": "elasticapm/contrib/django/middleware/__init__.py"}]}
| 2,765 | 219 |
gh_patches_debug_14762
|
rasdani/github-patches
|
git_diff
|
huggingface__peft-1320
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
An error occurs when using LoftQ. IndexError
A problem occurred while applying LoftQ to T5.
`IndexError: tensors used as indices must be long, byte or bool tensors`
The problem was in [this line](https://github.com/huggingface/peft/blob/main/src/peft/utils/loftq_utils.py#L158), I replaced int with long and it worked fine.
`lookup_table_idx = lookup_table_idx.to(torch.long)`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/peft/utils/loftq_utils.py`
Content:
```
1 # coding=utf-8
2 # Copyright 2023-present the HuggingFace Inc. team.
3 #
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 # Reference code: https://github.com/yxli2123/LoftQ/blob/main/utils.py
17 # Reference paper: https://arxiv.org/abs/2310.08659
18
19 import logging
20 from typing import Union
21
22 import torch
23
24 from peft.import_utils import is_bnb_4bit_available, is_bnb_available
25
26
27 if is_bnb_available():
28 import bitsandbytes as bnb
29
30
31 class NFQuantizer:
32 def __init__(self, num_bits=2, device="cuda", method="normal", block_size=64, *args, **kwargs):
33 super().__init__(*args, **kwargs)
34 self.num_bits = num_bits
35 self.device = device
36 self.method = method
37 self.block_size = block_size
38 if self.method == "normal":
39 self.norm_lookup_table = self.create_normal_map(num_bits=self.num_bits)
40 self.norm_lookup_table = self.norm_lookup_table.to(device)
41 elif self.method == "uniform":
42 self.norm_lookup_table = self.create_uniform_map(num_bits=self.num_bits)
43 self.norm_lookup_table = self.norm_lookup_table.to(device)
44 else:
45 raise NotImplementedError("Other quantization methods not supported yet.")
46
47 @staticmethod
48 def create_uniform_map(symmetric=False, num_bits=4):
49 if symmetric:
50 # print("symmetric uniform quantization")
51 negative = torch.linspace(-1, 0, 2 ** (num_bits - 1))
52 positive = torch.linspace(0, 1, 2 ** (num_bits - 1))
53 table = torch.cat([negative, positive[1:]])
54 else:
55 # print("asymmetric uniform quantization")
56 table = torch.linspace(-1, 1, 2**num_bits)
57 return table
58
59 @staticmethod
60 def create_normal_map(offset=0.9677083, symmetric=False, num_bits=2):
61 try:
62 from scipy.stats import norm
63 except ImportError:
64 raise ImportError("The required package 'scipy' is not installed. Please install it to continue.")
65
66 variations = 2**num_bits
67 if symmetric:
68 v = norm.ppf(torch.linspace(1 - offset, offset, variations + 1)).tolist()
69 values = []
70 for index in range(len(v) - 1):
71 values.append(0.5 * v[index] + 0.5 * v[index + 1])
72 v = values
73 else:
74 # one more positive value, this is an asymmetric type
75 v1 = norm.ppf(torch.linspace(offset, 0.5, variations // 2 + 1)[:-1]).tolist()
76 v2 = [0]
77 v3 = (-norm.ppf(torch.linspace(offset, 0.5, variations // 2)[:-1])).tolist()
78 v = v1 + v2 + v3
79
80 values = torch.Tensor(v)
81 values = values.sort().values
82 values /= values.max()
83 return values
84
85 def quantize_tensor(self, weight):
86 max_abs = torch.abs(weight).max()
87 weight_normed = weight / max_abs
88
89 weight_normed_expanded = weight_normed.unsqueeze(-1)
90
91 # Reshape L to have the same number of dimensions as X_expanded
92 L_reshaped = torch.tensor(self.norm_lookup_table).reshape(1, -1)
93
94 # Calculate the absolute difference between X_expanded and L_reshaped
95 abs_diff = torch.abs(weight_normed_expanded - L_reshaped)
96
97 # Find the index of the minimum absolute difference for each element
98 qweight = torch.argmin(abs_diff, dim=-1)
99 return qweight, max_abs
100
101 def dequantize_tensor(self, qweight, max_abs):
102 qweight_flatten = qweight.flatten()
103
104 weight_normed = self.norm_lookup_table[qweight_flatten]
105 weight = weight_normed * max_abs
106
107 weight = weight.reshape(qweight.shape)
108
109 return weight
110
111 def quantize_block(self, weight):
112 if len(weight.shape) != 2:
113 raise ValueError(f"Only support 2D matrix, but your input has {len(weight.shape)} dimensions.")
114 if weight.shape[0] * weight.shape[1] % self.block_size != 0:
115 raise ValueError(
116 f"Weight with shape ({weight.shape[0]} x {weight.shape[1]}) "
117 f"is not dividable by block size {self.block_size}."
118 )
119
120 M, N = weight.shape
121 device = weight.device
122
123 # Quantization
124 weight_flatten = weight.flatten() # (M*N, )
125 weight_block = weight_flatten.reshape(-1, self.block_size) # (L, B), L = M * N / B
126 if self.method == "normal":
127 weight_max = weight_block.abs().max(dim=-1)[0] # (L, 1)
128 elif self.method == "uniform":
129 weight_max = weight_block.mean(dim=-1) + 2.5 * weight_block.std(dim=-1)
130 else:
131 raise NotImplementedError("Method not supported yet.")
132 weight_max = weight_max.unsqueeze(-1)
133 weight_divabs = weight_block / weight_max # (L, B)
134 weight_divabs = weight_divabs.unsqueeze(-1) # (L, B, 1)
135 L_reshaped = self.norm_lookup_table.reshape(1, -1) # (1, 2**K)
136
137 abs_diff = torch.abs(weight_divabs - L_reshaped) # (L, B, 2**K)
138 qweight = torch.argmin(abs_diff, dim=-1) # (L, B)
139
140 # Pack multiple k-bit into uint8
141 qweight = qweight.reshape(-1, 8 // self.num_bits)
142 qweight_pack = torch.zeros((M * N // 8 * self.num_bits, 1), dtype=torch.uint8, device=device)
143
144 # data format example:
145 # [1, 0, 3, 2] or [01, 00, 11, 10] -> [10110001], LIFO
146 for i in range(8 // self.num_bits):
147 qweight[:, i] = qweight[:, i] << i * self.num_bits
148 qweight_pack[:, 0] |= qweight[:, i]
149
150 return qweight_pack, weight_max, weight.shape
151
152 def dequantize_block(self, qweight, weight_max, weight_shape):
153 # unpack weight
154 device = qweight.device
155 weight = torch.zeros((qweight.shape[0], 8 // self.num_bits), dtype=torch.float32, device=device)
156 for i in range(8 // self.num_bits):
157 lookup_table_idx = qweight.to(torch.long) % 2**self.num_bits # get the most right 2 bits
158 lookup_table_idx = lookup_table_idx.to(torch.int)
159 weight[:, i] = self.norm_lookup_table[lookup_table_idx].squeeze()
160 qweight = qweight >> self.num_bits # right shift 2 bits of the original data
161
162 weight_block = weight.reshape(-1, self.block_size)
163 weight = weight_block * weight_max
164 weight = weight.reshape(weight_shape)
165
166 return weight
167
168
169 def _low_rank_decomposition(weight, reduced_rank=32):
170 """
171 :param weight: The matrix to decompose, of shape (H, W) :param reduced_rank: the final rank :return:
172 """
173 matrix_dimension = len(weight.size())
174 if matrix_dimension != 2:
175 raise ValueError(f"Only support 2D matrix, but your input has {matrix_dimension} dimensions.")
176
177 # Use SVD to decompose a matrix, default full_matrices is False to save parameters
178 U, S, Vh = torch.linalg.svd(weight, full_matrices=False)
179
180 L = U @ (torch.sqrt(torch.diag(S)[:, 0:reduced_rank]))
181 R = torch.sqrt(torch.diag(S)[0:reduced_rank, :]) @ Vh
182
183 return {"L": L, "R": R, "U": U, "S": S, "Vh": Vh, "reduced_rank": reduced_rank}
184
185
186 @torch.no_grad()
187 def loftq_init(weight: Union[torch.Tensor, torch.nn.Parameter], num_bits: int, reduced_rank: int, num_iter=1):
188 if num_bits not in [2, 4, 8]:
189 raise ValueError("Only support 2, 4, 8 bits quantization")
190 if num_iter <= 0:
191 raise ValueError("Number of iterations must be greater than 0")
192
193 out_feature, in_feature = weight.size()
194 device = weight.device
195 dtype = weight.dtype
196
197 logging.info(
198 f"Weight: ({out_feature}, {in_feature}) | Rank: {reduced_rank} "
199 f"| Num Iter: {num_iter} | Num Bits: {num_bits}"
200 )
201 if not is_bnb_4bit_available() or num_bits in [2, 8]:
202 quantizer = NFQuantizer(num_bits=num_bits, device=device, method="normal", block_size=64)
203 compute_device = device
204 else:
205 compute_device = "cuda"
206
207 weight = weight.to(device=compute_device, dtype=torch.float32)
208 res = weight.clone()
209 for i in range(num_iter):
210 torch.cuda.empty_cache()
211 # Quantization
212 if num_bits == 4 and is_bnb_4bit_available():
213 qweight = bnb.nn.Params4bit(
214 res.to("cpu"), requires_grad=False, compress_statistics=False, quant_type="nf4"
215 ).to(compute_device)
216 dequantized_weight = bnb.functional.dequantize_4bit(qweight.data, qweight.quant_state)
217 else:
218 quantized_weight, max_abs, shape = quantizer.quantize_block(res)
219 dequantized_weight = quantizer.dequantize_block(quantized_weight, max_abs, shape)
220
221 res = weight - dequantized_weight
222
223 # Decompose the residual by SVD
224 output = _low_rank_decomposition(res, reduced_rank=reduced_rank)
225 L, R, reduced_rank = output["L"], output["R"], output["reduced_rank"]
226 res = weight - torch.mm(L, R)
227
228 lora_A, lora_B = R, L
229
230 return dequantized_weight.to(device=device, dtype=dtype), lora_A, lora_B
231
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/peft/utils/loftq_utils.py b/src/peft/utils/loftq_utils.py
--- a/src/peft/utils/loftq_utils.py
+++ b/src/peft/utils/loftq_utils.py
@@ -155,7 +155,7 @@
weight = torch.zeros((qweight.shape[0], 8 // self.num_bits), dtype=torch.float32, device=device)
for i in range(8 // self.num_bits):
lookup_table_idx = qweight.to(torch.long) % 2**self.num_bits # get the most right 2 bits
- lookup_table_idx = lookup_table_idx.to(torch.int)
+ lookup_table_idx = lookup_table_idx.to(torch.long)
weight[:, i] = self.norm_lookup_table[lookup_table_idx].squeeze()
qweight = qweight >> self.num_bits # right shift 2 bits of the original data
|
{"golden_diff": "diff --git a/src/peft/utils/loftq_utils.py b/src/peft/utils/loftq_utils.py\n--- a/src/peft/utils/loftq_utils.py\n+++ b/src/peft/utils/loftq_utils.py\n@@ -155,7 +155,7 @@\n weight = torch.zeros((qweight.shape[0], 8 // self.num_bits), dtype=torch.float32, device=device)\n for i in range(8 // self.num_bits):\n lookup_table_idx = qweight.to(torch.long) % 2**self.num_bits # get the most right 2 bits\n- lookup_table_idx = lookup_table_idx.to(torch.int)\n+ lookup_table_idx = lookup_table_idx.to(torch.long)\n weight[:, i] = self.norm_lookup_table[lookup_table_idx].squeeze()\n qweight = qweight >> self.num_bits # right shift 2 bits of the original data\n", "issue": "An error occurs when using LoftQ. IndexError\nA problem occurred while applying LoftQ to T5.\r\n`IndexError: tensors used as indices must be long, byte or bool tensors`\r\nThe problem was in [this line](https://github.com/huggingface/peft/blob/main/src/peft/utils/loftq_utils.py#L158), I replaced int with long and it worked fine.\r\n`lookup_table_idx = lookup_table_idx.to(torch.long)`\n", "before_files": [{"content": "# coding=utf-8\n# Copyright 2023-present the HuggingFace Inc. team.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# Reference code: https://github.com/yxli2123/LoftQ/blob/main/utils.py\n# Reference paper: https://arxiv.org/abs/2310.08659\n\nimport logging\nfrom typing import Union\n\nimport torch\n\nfrom peft.import_utils import is_bnb_4bit_available, is_bnb_available\n\n\nif is_bnb_available():\n import bitsandbytes as bnb\n\n\nclass NFQuantizer:\n def __init__(self, num_bits=2, device=\"cuda\", method=\"normal\", block_size=64, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.num_bits = num_bits\n self.device = device\n self.method = method\n self.block_size = block_size\n if self.method == \"normal\":\n self.norm_lookup_table = self.create_normal_map(num_bits=self.num_bits)\n self.norm_lookup_table = self.norm_lookup_table.to(device)\n elif self.method == \"uniform\":\n self.norm_lookup_table = self.create_uniform_map(num_bits=self.num_bits)\n self.norm_lookup_table = self.norm_lookup_table.to(device)\n else:\n raise NotImplementedError(\"Other quantization methods not supported yet.\")\n\n @staticmethod\n def create_uniform_map(symmetric=False, num_bits=4):\n if symmetric:\n # print(\"symmetric uniform quantization\")\n negative = torch.linspace(-1, 0, 2 ** (num_bits - 1))\n positive = torch.linspace(0, 1, 2 ** (num_bits - 1))\n table = torch.cat([negative, positive[1:]])\n else:\n # print(\"asymmetric uniform quantization\")\n table = torch.linspace(-1, 1, 2**num_bits)\n return table\n\n @staticmethod\n def create_normal_map(offset=0.9677083, symmetric=False, num_bits=2):\n try:\n from scipy.stats import norm\n except ImportError:\n raise ImportError(\"The required package 'scipy' is not installed. Please install it to continue.\")\n\n variations = 2**num_bits\n if symmetric:\n v = norm.ppf(torch.linspace(1 - offset, offset, variations + 1)).tolist()\n values = []\n for index in range(len(v) - 1):\n values.append(0.5 * v[index] + 0.5 * v[index + 1])\n v = values\n else:\n # one more positive value, this is an asymmetric type\n v1 = norm.ppf(torch.linspace(offset, 0.5, variations // 2 + 1)[:-1]).tolist()\n v2 = [0]\n v3 = (-norm.ppf(torch.linspace(offset, 0.5, variations // 2)[:-1])).tolist()\n v = v1 + v2 + v3\n\n values = torch.Tensor(v)\n values = values.sort().values\n values /= values.max()\n return values\n\n def quantize_tensor(self, weight):\n max_abs = torch.abs(weight).max()\n weight_normed = weight / max_abs\n\n weight_normed_expanded = weight_normed.unsqueeze(-1)\n\n # Reshape L to have the same number of dimensions as X_expanded\n L_reshaped = torch.tensor(self.norm_lookup_table).reshape(1, -1)\n\n # Calculate the absolute difference between X_expanded and L_reshaped\n abs_diff = torch.abs(weight_normed_expanded - L_reshaped)\n\n # Find the index of the minimum absolute difference for each element\n qweight = torch.argmin(abs_diff, dim=-1)\n return qweight, max_abs\n\n def dequantize_tensor(self, qweight, max_abs):\n qweight_flatten = qweight.flatten()\n\n weight_normed = self.norm_lookup_table[qweight_flatten]\n weight = weight_normed * max_abs\n\n weight = weight.reshape(qweight.shape)\n\n return weight\n\n def quantize_block(self, weight):\n if len(weight.shape) != 2:\n raise ValueError(f\"Only support 2D matrix, but your input has {len(weight.shape)} dimensions.\")\n if weight.shape[0] * weight.shape[1] % self.block_size != 0:\n raise ValueError(\n f\"Weight with shape ({weight.shape[0]} x {weight.shape[1]}) \"\n f\"is not dividable by block size {self.block_size}.\"\n )\n\n M, N = weight.shape\n device = weight.device\n\n # Quantization\n weight_flatten = weight.flatten() # (M*N, )\n weight_block = weight_flatten.reshape(-1, self.block_size) # (L, B), L = M * N / B\n if self.method == \"normal\":\n weight_max = weight_block.abs().max(dim=-1)[0] # (L, 1)\n elif self.method == \"uniform\":\n weight_max = weight_block.mean(dim=-1) + 2.5 * weight_block.std(dim=-1)\n else:\n raise NotImplementedError(\"Method not supported yet.\")\n weight_max = weight_max.unsqueeze(-1)\n weight_divabs = weight_block / weight_max # (L, B)\n weight_divabs = weight_divabs.unsqueeze(-1) # (L, B, 1)\n L_reshaped = self.norm_lookup_table.reshape(1, -1) # (1, 2**K)\n\n abs_diff = torch.abs(weight_divabs - L_reshaped) # (L, B, 2**K)\n qweight = torch.argmin(abs_diff, dim=-1) # (L, B)\n\n # Pack multiple k-bit into uint8\n qweight = qweight.reshape(-1, 8 // self.num_bits)\n qweight_pack = torch.zeros((M * N // 8 * self.num_bits, 1), dtype=torch.uint8, device=device)\n\n # data format example:\n # [1, 0, 3, 2] or [01, 00, 11, 10] -> [10110001], LIFO\n for i in range(8 // self.num_bits):\n qweight[:, i] = qweight[:, i] << i * self.num_bits\n qweight_pack[:, 0] |= qweight[:, i]\n\n return qweight_pack, weight_max, weight.shape\n\n def dequantize_block(self, qweight, weight_max, weight_shape):\n # unpack weight\n device = qweight.device\n weight = torch.zeros((qweight.shape[0], 8 // self.num_bits), dtype=torch.float32, device=device)\n for i in range(8 // self.num_bits):\n lookup_table_idx = qweight.to(torch.long) % 2**self.num_bits # get the most right 2 bits\n lookup_table_idx = lookup_table_idx.to(torch.int)\n weight[:, i] = self.norm_lookup_table[lookup_table_idx].squeeze()\n qweight = qweight >> self.num_bits # right shift 2 bits of the original data\n\n weight_block = weight.reshape(-1, self.block_size)\n weight = weight_block * weight_max\n weight = weight.reshape(weight_shape)\n\n return weight\n\n\ndef _low_rank_decomposition(weight, reduced_rank=32):\n \"\"\"\n :param weight: The matrix to decompose, of shape (H, W) :param reduced_rank: the final rank :return:\n \"\"\"\n matrix_dimension = len(weight.size())\n if matrix_dimension != 2:\n raise ValueError(f\"Only support 2D matrix, but your input has {matrix_dimension} dimensions.\")\n\n # Use SVD to decompose a matrix, default full_matrices is False to save parameters\n U, S, Vh = torch.linalg.svd(weight, full_matrices=False)\n\n L = U @ (torch.sqrt(torch.diag(S)[:, 0:reduced_rank]))\n R = torch.sqrt(torch.diag(S)[0:reduced_rank, :]) @ Vh\n\n return {\"L\": L, \"R\": R, \"U\": U, \"S\": S, \"Vh\": Vh, \"reduced_rank\": reduced_rank}\n\n\[email protected]_grad()\ndef loftq_init(weight: Union[torch.Tensor, torch.nn.Parameter], num_bits: int, reduced_rank: int, num_iter=1):\n if num_bits not in [2, 4, 8]:\n raise ValueError(\"Only support 2, 4, 8 bits quantization\")\n if num_iter <= 0:\n raise ValueError(\"Number of iterations must be greater than 0\")\n\n out_feature, in_feature = weight.size()\n device = weight.device\n dtype = weight.dtype\n\n logging.info(\n f\"Weight: ({out_feature}, {in_feature}) | Rank: {reduced_rank} \"\n f\"| Num Iter: {num_iter} | Num Bits: {num_bits}\"\n )\n if not is_bnb_4bit_available() or num_bits in [2, 8]:\n quantizer = NFQuantizer(num_bits=num_bits, device=device, method=\"normal\", block_size=64)\n compute_device = device\n else:\n compute_device = \"cuda\"\n\n weight = weight.to(device=compute_device, dtype=torch.float32)\n res = weight.clone()\n for i in range(num_iter):\n torch.cuda.empty_cache()\n # Quantization\n if num_bits == 4 and is_bnb_4bit_available():\n qweight = bnb.nn.Params4bit(\n res.to(\"cpu\"), requires_grad=False, compress_statistics=False, quant_type=\"nf4\"\n ).to(compute_device)\n dequantized_weight = bnb.functional.dequantize_4bit(qweight.data, qweight.quant_state)\n else:\n quantized_weight, max_abs, shape = quantizer.quantize_block(res)\n dequantized_weight = quantizer.dequantize_block(quantized_weight, max_abs, shape)\n\n res = weight - dequantized_weight\n\n # Decompose the residual by SVD\n output = _low_rank_decomposition(res, reduced_rank=reduced_rank)\n L, R, reduced_rank = output[\"L\"], output[\"R\"], output[\"reduced_rank\"]\n res = weight - torch.mm(L, R)\n\n lora_A, lora_B = R, L\n\n return dequantized_weight.to(device=device, dtype=dtype), lora_A, lora_B\n", "path": "src/peft/utils/loftq_utils.py"}], "after_files": [{"content": "# coding=utf-8\n# Copyright 2023-present the HuggingFace Inc. team.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# Reference code: https://github.com/yxli2123/LoftQ/blob/main/utils.py\n# Reference paper: https://arxiv.org/abs/2310.08659\n\nimport logging\nfrom typing import Union\n\nimport torch\n\nfrom peft.import_utils import is_bnb_4bit_available, is_bnb_available\n\n\nif is_bnb_available():\n import bitsandbytes as bnb\n\n\nclass NFQuantizer:\n def __init__(self, num_bits=2, device=\"cuda\", method=\"normal\", block_size=64, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.num_bits = num_bits\n self.device = device\n self.method = method\n self.block_size = block_size\n if self.method == \"normal\":\n self.norm_lookup_table = self.create_normal_map(num_bits=self.num_bits)\n self.norm_lookup_table = self.norm_lookup_table.to(device)\n elif self.method == \"uniform\":\n self.norm_lookup_table = self.create_uniform_map(num_bits=self.num_bits)\n self.norm_lookup_table = self.norm_lookup_table.to(device)\n else:\n raise NotImplementedError(\"Other quantization methods not supported yet.\")\n\n @staticmethod\n def create_uniform_map(symmetric=False, num_bits=4):\n if symmetric:\n # print(\"symmetric uniform quantization\")\n negative = torch.linspace(-1, 0, 2 ** (num_bits - 1))\n positive = torch.linspace(0, 1, 2 ** (num_bits - 1))\n table = torch.cat([negative, positive[1:]])\n else:\n # print(\"asymmetric uniform quantization\")\n table = torch.linspace(-1, 1, 2**num_bits)\n return table\n\n @staticmethod\n def create_normal_map(offset=0.9677083, symmetric=False, num_bits=2):\n try:\n from scipy.stats import norm\n except ImportError:\n raise ImportError(\"The required package 'scipy' is not installed. Please install it to continue.\")\n\n variations = 2**num_bits\n if symmetric:\n v = norm.ppf(torch.linspace(1 - offset, offset, variations + 1)).tolist()\n values = []\n for index in range(len(v) - 1):\n values.append(0.5 * v[index] + 0.5 * v[index + 1])\n v = values\n else:\n # one more positive value, this is an asymmetric type\n v1 = norm.ppf(torch.linspace(offset, 0.5, variations // 2 + 1)[:-1]).tolist()\n v2 = [0]\n v3 = (-norm.ppf(torch.linspace(offset, 0.5, variations // 2)[:-1])).tolist()\n v = v1 + v2 + v3\n\n values = torch.Tensor(v)\n values = values.sort().values\n values /= values.max()\n return values\n\n def quantize_tensor(self, weight):\n max_abs = torch.abs(weight).max()\n weight_normed = weight / max_abs\n\n weight_normed_expanded = weight_normed.unsqueeze(-1)\n\n # Reshape L to have the same number of dimensions as X_expanded\n L_reshaped = torch.tensor(self.norm_lookup_table).reshape(1, -1)\n\n # Calculate the absolute difference between X_expanded and L_reshaped\n abs_diff = torch.abs(weight_normed_expanded - L_reshaped)\n\n # Find the index of the minimum absolute difference for each element\n qweight = torch.argmin(abs_diff, dim=-1)\n return qweight, max_abs\n\n def dequantize_tensor(self, qweight, max_abs):\n qweight_flatten = qweight.flatten()\n\n weight_normed = self.norm_lookup_table[qweight_flatten]\n weight = weight_normed * max_abs\n\n weight = weight.reshape(qweight.shape)\n\n return weight\n\n def quantize_block(self, weight):\n if len(weight.shape) != 2:\n raise ValueError(f\"Only support 2D matrix, but your input has {len(weight.shape)} dimensions.\")\n if weight.shape[0] * weight.shape[1] % self.block_size != 0:\n raise ValueError(\n f\"Weight with shape ({weight.shape[0]} x {weight.shape[1]}) \"\n f\"is not dividable by block size {self.block_size}.\"\n )\n\n M, N = weight.shape\n device = weight.device\n\n # Quantization\n weight_flatten = weight.flatten() # (M*N, )\n weight_block = weight_flatten.reshape(-1, self.block_size) # (L, B), L = M * N / B\n if self.method == \"normal\":\n weight_max = weight_block.abs().max(dim=-1)[0] # (L, 1)\n elif self.method == \"uniform\":\n weight_max = weight_block.mean(dim=-1) + 2.5 * weight_block.std(dim=-1)\n else:\n raise NotImplementedError(\"Method not supported yet.\")\n weight_max = weight_max.unsqueeze(-1)\n weight_divabs = weight_block / weight_max # (L, B)\n weight_divabs = weight_divabs.unsqueeze(-1) # (L, B, 1)\n L_reshaped = self.norm_lookup_table.reshape(1, -1) # (1, 2**K)\n\n abs_diff = torch.abs(weight_divabs - L_reshaped) # (L, B, 2**K)\n qweight = torch.argmin(abs_diff, dim=-1) # (L, B)\n\n # Pack multiple k-bit into uint8\n qweight = qweight.reshape(-1, 8 // self.num_bits)\n qweight_pack = torch.zeros((M * N // 8 * self.num_bits, 1), dtype=torch.uint8, device=device)\n\n # data format example:\n # [1, 0, 3, 2] or [01, 00, 11, 10] -> [10110001], LIFO\n for i in range(8 // self.num_bits):\n qweight[:, i] = qweight[:, i] << i * self.num_bits\n qweight_pack[:, 0] |= qweight[:, i]\n\n return qweight_pack, weight_max, weight.shape\n\n def dequantize_block(self, qweight, weight_max, weight_shape):\n # unpack weight\n device = qweight.device\n weight = torch.zeros((qweight.shape[0], 8 // self.num_bits), dtype=torch.float32, device=device)\n for i in range(8 // self.num_bits):\n lookup_table_idx = qweight.to(torch.long) % 2**self.num_bits # get the most right 2 bits\n lookup_table_idx = lookup_table_idx.to(torch.long)\n weight[:, i] = self.norm_lookup_table[lookup_table_idx].squeeze()\n qweight = qweight >> self.num_bits # right shift 2 bits of the original data\n\n weight_block = weight.reshape(-1, self.block_size)\n weight = weight_block * weight_max\n weight = weight.reshape(weight_shape)\n\n return weight\n\n\ndef _low_rank_decomposition(weight, reduced_rank=32):\n \"\"\"\n :param weight: The matrix to decompose, of shape (H, W) :param reduced_rank: the final rank :return:\n \"\"\"\n matrix_dimension = len(weight.size())\n if matrix_dimension != 2:\n raise ValueError(f\"Only support 2D matrix, but your input has {matrix_dimension} dimensions.\")\n\n # Use SVD to decompose a matrix, default full_matrices is False to save parameters\n U, S, Vh = torch.linalg.svd(weight, full_matrices=False)\n\n L = U @ (torch.sqrt(torch.diag(S)[:, 0:reduced_rank]))\n R = torch.sqrt(torch.diag(S)[0:reduced_rank, :]) @ Vh\n\n return {\"L\": L, \"R\": R, \"U\": U, \"S\": S, \"Vh\": Vh, \"reduced_rank\": reduced_rank}\n\n\[email protected]_grad()\ndef loftq_init(weight: Union[torch.Tensor, torch.nn.Parameter], num_bits: int, reduced_rank: int, num_iter=1):\n if num_bits not in [2, 4, 8]:\n raise ValueError(\"Only support 2, 4, 8 bits quantization\")\n if num_iter <= 0:\n raise ValueError(\"Number of iterations must be greater than 0\")\n\n out_feature, in_feature = weight.size()\n device = weight.device\n dtype = weight.dtype\n\n logging.info(\n f\"Weight: ({out_feature}, {in_feature}) | Rank: {reduced_rank} \"\n f\"| Num Iter: {num_iter} | Num Bits: {num_bits}\"\n )\n if not is_bnb_4bit_available() or num_bits in [2, 8]:\n quantizer = NFQuantizer(num_bits=num_bits, device=device, method=\"normal\", block_size=64)\n compute_device = device\n else:\n compute_device = \"cuda\"\n\n weight = weight.to(device=compute_device, dtype=torch.float32)\n res = weight.clone()\n for i in range(num_iter):\n torch.cuda.empty_cache()\n # Quantization\n if num_bits == 4 and is_bnb_4bit_available():\n qweight = bnb.nn.Params4bit(\n res.to(\"cpu\"), requires_grad=False, compress_statistics=False, quant_type=\"nf4\"\n ).to(compute_device)\n dequantized_weight = bnb.functional.dequantize_4bit(qweight.data, qweight.quant_state)\n else:\n quantized_weight, max_abs, shape = quantizer.quantize_block(res)\n dequantized_weight = quantizer.dequantize_block(quantized_weight, max_abs, shape)\n\n res = weight - dequantized_weight\n\n # Decompose the residual by SVD\n output = _low_rank_decomposition(res, reduced_rank=reduced_rank)\n L, R, reduced_rank = output[\"L\"], output[\"R\"], output[\"reduced_rank\"]\n res = weight - torch.mm(L, R)\n\n lora_A, lora_B = R, L\n\n return dequantized_weight.to(device=device, dtype=dtype), lora_A, lora_B\n", "path": "src/peft/utils/loftq_utils.py"}]}
| 3,383 | 203 |
gh_patches_debug_10163
|
rasdani/github-patches
|
git_diff
|
pytorch__pytorch-2200
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
DataParallel tests are currently broken
https://github.com/pytorch/pytorch/pull/2121/commits/d69669efcfe4333c223f53249185c2e22f76ed73 has broken DataParallel tests. Now that device_ids are explicitly sent to parallel_apply, this assert https://github.com/pytorch/pytorch/blob/master/torch/nn/parallel/parallel_apply.py#L30 gets triggered if inputs are not big enough to be on all devices (e.g. batch size of 20 on 8 GPUs gets chunked into 6*3+2, so that 8-th GPU is idle, and assert gets triggered).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `torch/nn/parallel/data_parallel.py`
Content:
```
1 import torch
2 from ..modules import Module
3 from .scatter_gather import scatter_kwargs, gather
4 from .replicate import replicate
5 from .parallel_apply import parallel_apply
6
7
8 class DataParallel(Module):
9 """Implements data parallelism at the module level.
10
11 This container parallelizes the application of the given module by
12 splitting the input across the specified devices by chunking in the batch
13 dimension. In the forward pass, the module is replicated on each device,
14 and each replica handles a portion of the input. During the backwards
15 pass, gradients from each replica are summed into the original module.
16
17 The batch size should be larger than the number of GPUs used. It should
18 also be an integer multiple of the number of GPUs so that each chunk is the
19 same size (so that each GPU processes the same number of samples).
20
21 See also: :ref:`cuda-nn-dataparallel-instead`
22
23 Arbitrary positional and keyword inputs are allowed to be passed into
24 DataParallel EXCEPT Tensors. All variables will be scattered on dim
25 specified (default 0). Primitive types will be broadcasted, but all
26 other types will be a shallow copy and can be corrupted if written to in
27 the model's forward pass.
28
29 Args:
30 module: module to be parallelized
31 device_ids: CUDA devices (default: all devices)
32 output_device: device location of output (default: device_ids[0])
33
34 Example::
35
36 >>> net = torch.nn.DataParallel(model, device_ids=[0, 1, 2])
37 >>> output = net(input_var)
38 """
39
40 # TODO: update notes/cuda.rst when this class handles 8+ GPUs well
41
42 def __init__(self, module, device_ids=None, output_device=None, dim=0):
43 super(DataParallel, self).__init__()
44 if device_ids is None:
45 device_ids = list(range(torch.cuda.device_count()))
46 if output_device is None:
47 output_device = device_ids[0]
48 self.dim = dim
49 self.module = module
50 self.device_ids = device_ids
51 self.output_device = output_device
52 if len(self.device_ids) == 1:
53 self.module.cuda(device_ids[0])
54
55 def forward(self, *inputs, **kwargs):
56 inputs, kwargs = self.scatter(inputs, kwargs, self.device_ids)
57 if len(self.device_ids) == 1:
58 return self.module(*inputs[0], **kwargs[0])
59 replicas = self.replicate(self.module, self.device_ids[:len(inputs)])
60 outputs = self.parallel_apply(replicas, inputs, kwargs)
61 return self.gather(outputs, self.output_device)
62
63 def replicate(self, module, device_ids):
64 return replicate(module, device_ids)
65
66 def scatter(self, inputs, kwargs, device_ids):
67 return scatter_kwargs(inputs, kwargs, device_ids, dim=self.dim)
68
69 def parallel_apply(self, replicas, inputs, kwargs):
70 return parallel_apply(replicas, inputs, kwargs, self.device_ids[:len(replicas)])
71
72 def gather(self, outputs, output_device):
73 return gather(outputs, output_device, dim=self.dim)
74
75
76 def data_parallel(module, inputs, device_ids=None, output_device=None, dim=0, module_kwargs=None):
77 """Evaluates module(input) in parallel across the GPUs given in device_ids.
78
79 This is the functional version of the DataParallel module.
80
81 Args:
82 module: the module to evaluate in parallel
83 inputs: inputs to the module
84 device_ids: GPU ids on which to replicate module
85 output_device: GPU location of the output Use -1 to indicate the CPU.
86 (default: device_ids[0])
87 Returns:
88 a Variable containing the result of module(input) located on
89 output_device
90 """
91 if not isinstance(inputs, tuple):
92 inputs = (inputs,)
93
94 if device_ids is None:
95 device_ids = list(range(torch.cuda.device_count()))
96
97 if output_device is None:
98 output_device = device_ids[0]
99
100 inputs, module_kwargs = scatter_kwargs(inputs, module_kwargs, device_ids, dim)
101 if len(device_ids) == 1:
102 return module(*inputs[0], **module_kwargs[0])
103 replicas = replicate(module, device_ids[:len(inputs)])
104 outputs = parallel_apply(replicas, inputs, module_kwargs, device_ids)
105 return gather(outputs, output_device, dim)
106
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/torch/nn/parallel/data_parallel.py b/torch/nn/parallel/data_parallel.py
--- a/torch/nn/parallel/data_parallel.py
+++ b/torch/nn/parallel/data_parallel.py
@@ -100,6 +100,7 @@
inputs, module_kwargs = scatter_kwargs(inputs, module_kwargs, device_ids, dim)
if len(device_ids) == 1:
return module(*inputs[0], **module_kwargs[0])
- replicas = replicate(module, device_ids[:len(inputs)])
- outputs = parallel_apply(replicas, inputs, module_kwargs, device_ids)
+ used_device_ids = device_ids[:len(inputs)]
+ replicas = replicate(module, used_device_ids)
+ outputs = parallel_apply(replicas, inputs, module_kwargs, used_device_ids)
return gather(outputs, output_device, dim)
|
{"golden_diff": "diff --git a/torch/nn/parallel/data_parallel.py b/torch/nn/parallel/data_parallel.py\n--- a/torch/nn/parallel/data_parallel.py\n+++ b/torch/nn/parallel/data_parallel.py\n@@ -100,6 +100,7 @@\n inputs, module_kwargs = scatter_kwargs(inputs, module_kwargs, device_ids, dim)\n if len(device_ids) == 1:\n return module(*inputs[0], **module_kwargs[0])\n- replicas = replicate(module, device_ids[:len(inputs)])\n- outputs = parallel_apply(replicas, inputs, module_kwargs, device_ids)\n+ used_device_ids = device_ids[:len(inputs)]\n+ replicas = replicate(module, used_device_ids)\n+ outputs = parallel_apply(replicas, inputs, module_kwargs, used_device_ids)\n return gather(outputs, output_device, dim)\n", "issue": "DataParallel tests are currently broken \nhttps://github.com/pytorch/pytorch/pull/2121/commits/d69669efcfe4333c223f53249185c2e22f76ed73 has broken DataParallel tests. Now that device_ids are explicitly sent to parallel_apply, this assert https://github.com/pytorch/pytorch/blob/master/torch/nn/parallel/parallel_apply.py#L30 gets triggered if inputs are not big enough to be on all devices (e.g. batch size of 20 on 8 GPUs gets chunked into 6*3+2, so that 8-th GPU is idle, and assert gets triggered). \r\n\n", "before_files": [{"content": "import torch\nfrom ..modules import Module\nfrom .scatter_gather import scatter_kwargs, gather\nfrom .replicate import replicate\nfrom .parallel_apply import parallel_apply\n\n\nclass DataParallel(Module):\n \"\"\"Implements data parallelism at the module level.\n\n This container parallelizes the application of the given module by\n splitting the input across the specified devices by chunking in the batch\n dimension. In the forward pass, the module is replicated on each device,\n and each replica handles a portion of the input. During the backwards\n pass, gradients from each replica are summed into the original module.\n\n The batch size should be larger than the number of GPUs used. It should\n also be an integer multiple of the number of GPUs so that each chunk is the\n same size (so that each GPU processes the same number of samples).\n\n See also: :ref:`cuda-nn-dataparallel-instead`\n\n Arbitrary positional and keyword inputs are allowed to be passed into\n DataParallel EXCEPT Tensors. All variables will be scattered on dim\n specified (default 0). Primitive types will be broadcasted, but all\n other types will be a shallow copy and can be corrupted if written to in\n the model's forward pass.\n\n Args:\n module: module to be parallelized\n device_ids: CUDA devices (default: all devices)\n output_device: device location of output (default: device_ids[0])\n\n Example::\n\n >>> net = torch.nn.DataParallel(model, device_ids=[0, 1, 2])\n >>> output = net(input_var)\n \"\"\"\n\n # TODO: update notes/cuda.rst when this class handles 8+ GPUs well\n\n def __init__(self, module, device_ids=None, output_device=None, dim=0):\n super(DataParallel, self).__init__()\n if device_ids is None:\n device_ids = list(range(torch.cuda.device_count()))\n if output_device is None:\n output_device = device_ids[0]\n self.dim = dim\n self.module = module\n self.device_ids = device_ids\n self.output_device = output_device\n if len(self.device_ids) == 1:\n self.module.cuda(device_ids[0])\n\n def forward(self, *inputs, **kwargs):\n inputs, kwargs = self.scatter(inputs, kwargs, self.device_ids)\n if len(self.device_ids) == 1:\n return self.module(*inputs[0], **kwargs[0])\n replicas = self.replicate(self.module, self.device_ids[:len(inputs)])\n outputs = self.parallel_apply(replicas, inputs, kwargs)\n return self.gather(outputs, self.output_device)\n\n def replicate(self, module, device_ids):\n return replicate(module, device_ids)\n\n def scatter(self, inputs, kwargs, device_ids):\n return scatter_kwargs(inputs, kwargs, device_ids, dim=self.dim)\n\n def parallel_apply(self, replicas, inputs, kwargs):\n return parallel_apply(replicas, inputs, kwargs, self.device_ids[:len(replicas)])\n\n def gather(self, outputs, output_device):\n return gather(outputs, output_device, dim=self.dim)\n\n\ndef data_parallel(module, inputs, device_ids=None, output_device=None, dim=0, module_kwargs=None):\n \"\"\"Evaluates module(input) in parallel across the GPUs given in device_ids.\n\n This is the functional version of the DataParallel module.\n\n Args:\n module: the module to evaluate in parallel\n inputs: inputs to the module\n device_ids: GPU ids on which to replicate module\n output_device: GPU location of the output Use -1 to indicate the CPU.\n (default: device_ids[0])\n Returns:\n a Variable containing the result of module(input) located on\n output_device\n \"\"\"\n if not isinstance(inputs, tuple):\n inputs = (inputs,)\n\n if device_ids is None:\n device_ids = list(range(torch.cuda.device_count()))\n\n if output_device is None:\n output_device = device_ids[0]\n\n inputs, module_kwargs = scatter_kwargs(inputs, module_kwargs, device_ids, dim)\n if len(device_ids) == 1:\n return module(*inputs[0], **module_kwargs[0])\n replicas = replicate(module, device_ids[:len(inputs)])\n outputs = parallel_apply(replicas, inputs, module_kwargs, device_ids)\n return gather(outputs, output_device, dim)\n", "path": "torch/nn/parallel/data_parallel.py"}], "after_files": [{"content": "import torch\nfrom ..modules import Module\nfrom .scatter_gather import scatter_kwargs, gather\nfrom .replicate import replicate\nfrom .parallel_apply import parallel_apply\n\n\nclass DataParallel(Module):\n \"\"\"Implements data parallelism at the module level.\n\n This container parallelizes the application of the given module by\n splitting the input across the specified devices by chunking in the batch\n dimension. In the forward pass, the module is replicated on each device,\n and each replica handles a portion of the input. During the backwards\n pass, gradients from each replica are summed into the original module.\n\n The batch size should be larger than the number of GPUs used. It should\n also be an integer multiple of the number of GPUs so that each chunk is the\n same size (so that each GPU processes the same number of samples).\n\n See also: :ref:`cuda-nn-dataparallel-instead`\n\n Arbitrary positional and keyword inputs are allowed to be passed into\n DataParallel EXCEPT Tensors. All variables will be scattered on dim\n specified (default 0). Primitive types will be broadcasted, but all\n other types will be a shallow copy and can be corrupted if written to in\n the model's forward pass.\n\n Args:\n module: module to be parallelized\n device_ids: CUDA devices (default: all devices)\n output_device: device location of output (default: device_ids[0])\n\n Example::\n\n >>> net = torch.nn.DataParallel(model, device_ids=[0, 1, 2])\n >>> output = net(input_var)\n \"\"\"\n\n # TODO: update notes/cuda.rst when this class handles 8+ GPUs well\n\n def __init__(self, module, device_ids=None, output_device=None, dim=0):\n super(DataParallel, self).__init__()\n if device_ids is None:\n device_ids = list(range(torch.cuda.device_count()))\n if output_device is None:\n output_device = device_ids[0]\n self.dim = dim\n self.module = module\n self.device_ids = device_ids\n self.output_device = output_device\n if len(self.device_ids) == 1:\n self.module.cuda(device_ids[0])\n\n def forward(self, *inputs, **kwargs):\n inputs, kwargs = self.scatter(inputs, kwargs, self.device_ids)\n if len(self.device_ids) == 1:\n return self.module(*inputs[0], **kwargs[0])\n replicas = self.replicate(self.module, self.device_ids[:len(inputs)])\n outputs = self.parallel_apply(replicas, inputs, kwargs)\n return self.gather(outputs, self.output_device)\n\n def replicate(self, module, device_ids):\n return replicate(module, device_ids)\n\n def scatter(self, inputs, kwargs, device_ids):\n return scatter_kwargs(inputs, kwargs, device_ids, dim=self.dim)\n\n def parallel_apply(self, replicas, inputs, kwargs):\n return parallel_apply(replicas, inputs, kwargs, self.device_ids[:len(replicas)])\n\n def gather(self, outputs, output_device):\n return gather(outputs, output_device, dim=self.dim)\n\n\ndef data_parallel(module, inputs, device_ids=None, output_device=None, dim=0, module_kwargs=None):\n \"\"\"Evaluates module(input) in parallel across the GPUs given in device_ids.\n\n This is the functional version of the DataParallel module.\n\n Args:\n module: the module to evaluate in parallel\n inputs: inputs to the module\n device_ids: GPU ids on which to replicate module\n output_device: GPU location of the output Use -1 to indicate the CPU.\n (default: device_ids[0])\n Returns:\n a Variable containing the result of module(input) located on\n output_device\n \"\"\"\n if not isinstance(inputs, tuple):\n inputs = (inputs,)\n\n if device_ids is None:\n device_ids = list(range(torch.cuda.device_count()))\n\n if output_device is None:\n output_device = device_ids[0]\n\n inputs, module_kwargs = scatter_kwargs(inputs, module_kwargs, device_ids, dim)\n if len(device_ids) == 1:\n return module(*inputs[0], **module_kwargs[0])\n used_device_ids = device_ids[:len(inputs)]\n replicas = replicate(module, used_device_ids)\n outputs = parallel_apply(replicas, inputs, module_kwargs, used_device_ids)\n return gather(outputs, output_device, dim)\n", "path": "torch/nn/parallel/data_parallel.py"}]}
| 1,575 | 185 |
gh_patches_debug_34564
|
rasdani/github-patches
|
git_diff
|
pyinstaller__pyinstaller-7259
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Fixes for use of pyinstaller with Django 4.x and custom management commands.
PROBLEM:
This feature aims to solve the problem of the custom app level management commands being missed out from hidden imports alongside issues with imports of apps listed within INSTALLED_APPS failing due to erroneous execution of 'eval_script' function. Specifically when the hidden imports of the INSTALLED_APPS are evaluated the logging outputs generated by 'collect_submodules' when called in django_import_finder.py are captured in the STDOUT regardless of the --log-level. Also any additional management commands provided by one of the INSTALLED_APPS are ignored as the 'get_commands' function has a hardcoded referenced to Django 1.8 command set. Django's currently implementation of command collection will not complain of missing commands at runtime thereby rendering the patch of this function that is currently implemented irrelevant.
SOLUTION:
The solution to this issue is to remove several redundant parts of the code alongside adding additional overrides for decluttering STDOUT.
The following is a list of measures taken to resolve the problem
- remove the monkey patching of Django's 'get_commands' method in pyi_rth_django.py
- modify the collect static code to have a boolean input parameter 'log' which when the relevant calls to logging within this function are wrapped in a conditional will serve to prevent logs being inappropriately raised.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `PyInstaller/hooks/rthooks/pyi_rth_django.py`
Content:
```
1 #-----------------------------------------------------------------------------
2 # Copyright (c) 2005-2022, PyInstaller Development Team.
3 #
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 #
7 # The full license is in the file COPYING.txt, distributed with this software.
8 #
9 # SPDX-License-Identifier: Apache-2.0
10 #-----------------------------------------------------------------------------
11
12 # This Django rthook was tested with Django 1.8.3.
13
14 import django.core.management
15 import django.utils.autoreload
16
17
18 def _get_commands():
19 # Django groupss commands by app. This returns static dict() as it is for django 1.8 and the default project.
20 commands = {
21 'changepassword': 'django.contrib.auth',
22 'check': 'django.core',
23 'clearsessions': 'django.contrib.sessions',
24 'collectstatic': 'django.contrib.staticfiles',
25 'compilemessages': 'django.core',
26 'createcachetable': 'django.core',
27 'createsuperuser': 'django.contrib.auth',
28 'dbshell': 'django.core',
29 'diffsettings': 'django.core',
30 'dumpdata': 'django.core',
31 'findstatic': 'django.contrib.staticfiles',
32 'flush': 'django.core',
33 'inspectdb': 'django.core',
34 'loaddata': 'django.core',
35 'makemessages': 'django.core',
36 'makemigrations': 'django.core',
37 'migrate': 'django.core',
38 'runfcgi': 'django.core',
39 'runserver': 'django.core',
40 'shell': 'django.core',
41 'showmigrations': 'django.core',
42 'sql': 'django.core',
43 'sqlall': 'django.core',
44 'sqlclear': 'django.core',
45 'sqlcustom': 'django.core',
46 'sqldropindexes': 'django.core',
47 'sqlflush': 'django.core',
48 'sqlindexes': 'django.core',
49 'sqlmigrate': 'django.core',
50 'sqlsequencereset': 'django.core',
51 'squashmigrations': 'django.core',
52 'startapp': 'django.core',
53 'startproject': 'django.core',
54 'syncdb': 'django.core',
55 'test': 'django.core',
56 'testserver': 'django.core',
57 'validate': 'django.core'
58 }
59 return commands
60
61
62 _old_restart_with_reloader = django.utils.autoreload.restart_with_reloader
63
64
65 def _restart_with_reloader(*args):
66 import sys
67 a0 = sys.argv.pop(0)
68 try:
69 return _old_restart_with_reloader(*args)
70 finally:
71 sys.argv.insert(0, a0)
72
73
74 # Override get_commands() function otherwise the app will complain that there are no commands.
75 django.core.management.get_commands = _get_commands
76 # Override restart_with_reloader() function, otherwise the app might complain that some commands do not exist;
77 # e.g., runserver.
78 django.utils.autoreload.restart_with_reloader = _restart_with_reloader
79
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/PyInstaller/hooks/rthooks/pyi_rth_django.py b/PyInstaller/hooks/rthooks/pyi_rth_django.py
--- a/PyInstaller/hooks/rthooks/pyi_rth_django.py
+++ b/PyInstaller/hooks/rthooks/pyi_rth_django.py
@@ -11,54 +11,8 @@
# This Django rthook was tested with Django 1.8.3.
-import django.core.management
import django.utils.autoreload
-
-def _get_commands():
- # Django groupss commands by app. This returns static dict() as it is for django 1.8 and the default project.
- commands = {
- 'changepassword': 'django.contrib.auth',
- 'check': 'django.core',
- 'clearsessions': 'django.contrib.sessions',
- 'collectstatic': 'django.contrib.staticfiles',
- 'compilemessages': 'django.core',
- 'createcachetable': 'django.core',
- 'createsuperuser': 'django.contrib.auth',
- 'dbshell': 'django.core',
- 'diffsettings': 'django.core',
- 'dumpdata': 'django.core',
- 'findstatic': 'django.contrib.staticfiles',
- 'flush': 'django.core',
- 'inspectdb': 'django.core',
- 'loaddata': 'django.core',
- 'makemessages': 'django.core',
- 'makemigrations': 'django.core',
- 'migrate': 'django.core',
- 'runfcgi': 'django.core',
- 'runserver': 'django.core',
- 'shell': 'django.core',
- 'showmigrations': 'django.core',
- 'sql': 'django.core',
- 'sqlall': 'django.core',
- 'sqlclear': 'django.core',
- 'sqlcustom': 'django.core',
- 'sqldropindexes': 'django.core',
- 'sqlflush': 'django.core',
- 'sqlindexes': 'django.core',
- 'sqlmigrate': 'django.core',
- 'sqlsequencereset': 'django.core',
- 'squashmigrations': 'django.core',
- 'startapp': 'django.core',
- 'startproject': 'django.core',
- 'syncdb': 'django.core',
- 'test': 'django.core',
- 'testserver': 'django.core',
- 'validate': 'django.core'
- }
- return commands
-
-
_old_restart_with_reloader = django.utils.autoreload.restart_with_reloader
@@ -71,8 +25,6 @@
sys.argv.insert(0, a0)
-# Override get_commands() function otherwise the app will complain that there are no commands.
-django.core.management.get_commands = _get_commands
# Override restart_with_reloader() function, otherwise the app might complain that some commands do not exist;
# e.g., runserver.
django.utils.autoreload.restart_with_reloader = _restart_with_reloader
|
{"golden_diff": "diff --git a/PyInstaller/hooks/rthooks/pyi_rth_django.py b/PyInstaller/hooks/rthooks/pyi_rth_django.py\n--- a/PyInstaller/hooks/rthooks/pyi_rth_django.py\n+++ b/PyInstaller/hooks/rthooks/pyi_rth_django.py\n@@ -11,54 +11,8 @@\n \n # This Django rthook was tested with Django 1.8.3.\n \n-import django.core.management\n import django.utils.autoreload\n \n-\n-def _get_commands():\n- # Django groupss commands by app. This returns static dict() as it is for django 1.8 and the default project.\n- commands = {\n- 'changepassword': 'django.contrib.auth',\n- 'check': 'django.core',\n- 'clearsessions': 'django.contrib.sessions',\n- 'collectstatic': 'django.contrib.staticfiles',\n- 'compilemessages': 'django.core',\n- 'createcachetable': 'django.core',\n- 'createsuperuser': 'django.contrib.auth',\n- 'dbshell': 'django.core',\n- 'diffsettings': 'django.core',\n- 'dumpdata': 'django.core',\n- 'findstatic': 'django.contrib.staticfiles',\n- 'flush': 'django.core',\n- 'inspectdb': 'django.core',\n- 'loaddata': 'django.core',\n- 'makemessages': 'django.core',\n- 'makemigrations': 'django.core',\n- 'migrate': 'django.core',\n- 'runfcgi': 'django.core',\n- 'runserver': 'django.core',\n- 'shell': 'django.core',\n- 'showmigrations': 'django.core',\n- 'sql': 'django.core',\n- 'sqlall': 'django.core',\n- 'sqlclear': 'django.core',\n- 'sqlcustom': 'django.core',\n- 'sqldropindexes': 'django.core',\n- 'sqlflush': 'django.core',\n- 'sqlindexes': 'django.core',\n- 'sqlmigrate': 'django.core',\n- 'sqlsequencereset': 'django.core',\n- 'squashmigrations': 'django.core',\n- 'startapp': 'django.core',\n- 'startproject': 'django.core',\n- 'syncdb': 'django.core',\n- 'test': 'django.core',\n- 'testserver': 'django.core',\n- 'validate': 'django.core'\n- }\n- return commands\n-\n-\n _old_restart_with_reloader = django.utils.autoreload.restart_with_reloader\n \n \n@@ -71,8 +25,6 @@\n sys.argv.insert(0, a0)\n \n \n-# Override get_commands() function otherwise the app will complain that there are no commands.\n-django.core.management.get_commands = _get_commands\n # Override restart_with_reloader() function, otherwise the app might complain that some commands do not exist;\n # e.g., runserver.\n django.utils.autoreload.restart_with_reloader = _restart_with_reloader\n", "issue": "Fixes for use of pyinstaller with Django 4.x and custom management commands.\nPROBLEM:\r\nThis feature aims to solve the problem of the custom app level management commands being missed out from hidden imports alongside issues with imports of apps listed within INSTALLED_APPS failing due to erroneous execution of 'eval_script' function. Specifically when the hidden imports of the INSTALLED_APPS are evaluated the logging outputs generated by 'collect_submodules' when called in django_import_finder.py are captured in the STDOUT regardless of the --log-level. Also any additional management commands provided by one of the INSTALLED_APPS are ignored as the 'get_commands' function has a hardcoded referenced to Django 1.8 command set. Django's currently implementation of command collection will not complain of missing commands at runtime thereby rendering the patch of this function that is currently implemented irrelevant.\r\n\r\nSOLUTION:\r\nThe solution to this issue is to remove several redundant parts of the code alongside adding additional overrides for decluttering STDOUT. \r\n\r\nThe following is a list of measures taken to resolve the problem\r\n- remove the monkey patching of Django's 'get_commands' method in pyi_rth_django.py\r\n- modify the collect static code to have a boolean input parameter 'log' which when the relevant calls to logging within this function are wrapped in a conditional will serve to prevent logs being inappropriately raised.\r\n\n", "before_files": [{"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2005-2022, PyInstaller Development Team.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#\n# SPDX-License-Identifier: Apache-2.0\n#-----------------------------------------------------------------------------\n\n# This Django rthook was tested with Django 1.8.3.\n\nimport django.core.management\nimport django.utils.autoreload\n\n\ndef _get_commands():\n # Django groupss commands by app. This returns static dict() as it is for django 1.8 and the default project.\n commands = {\n 'changepassword': 'django.contrib.auth',\n 'check': 'django.core',\n 'clearsessions': 'django.contrib.sessions',\n 'collectstatic': 'django.contrib.staticfiles',\n 'compilemessages': 'django.core',\n 'createcachetable': 'django.core',\n 'createsuperuser': 'django.contrib.auth',\n 'dbshell': 'django.core',\n 'diffsettings': 'django.core',\n 'dumpdata': 'django.core',\n 'findstatic': 'django.contrib.staticfiles',\n 'flush': 'django.core',\n 'inspectdb': 'django.core',\n 'loaddata': 'django.core',\n 'makemessages': 'django.core',\n 'makemigrations': 'django.core',\n 'migrate': 'django.core',\n 'runfcgi': 'django.core',\n 'runserver': 'django.core',\n 'shell': 'django.core',\n 'showmigrations': 'django.core',\n 'sql': 'django.core',\n 'sqlall': 'django.core',\n 'sqlclear': 'django.core',\n 'sqlcustom': 'django.core',\n 'sqldropindexes': 'django.core',\n 'sqlflush': 'django.core',\n 'sqlindexes': 'django.core',\n 'sqlmigrate': 'django.core',\n 'sqlsequencereset': 'django.core',\n 'squashmigrations': 'django.core',\n 'startapp': 'django.core',\n 'startproject': 'django.core',\n 'syncdb': 'django.core',\n 'test': 'django.core',\n 'testserver': 'django.core',\n 'validate': 'django.core'\n }\n return commands\n\n\n_old_restart_with_reloader = django.utils.autoreload.restart_with_reloader\n\n\ndef _restart_with_reloader(*args):\n import sys\n a0 = sys.argv.pop(0)\n try:\n return _old_restart_with_reloader(*args)\n finally:\n sys.argv.insert(0, a0)\n\n\n# Override get_commands() function otherwise the app will complain that there are no commands.\ndjango.core.management.get_commands = _get_commands\n# Override restart_with_reloader() function, otherwise the app might complain that some commands do not exist;\n# e.g., runserver.\ndjango.utils.autoreload.restart_with_reloader = _restart_with_reloader\n", "path": "PyInstaller/hooks/rthooks/pyi_rth_django.py"}], "after_files": [{"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2005-2022, PyInstaller Development Team.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#\n# SPDX-License-Identifier: Apache-2.0\n#-----------------------------------------------------------------------------\n\n# This Django rthook was tested with Django 1.8.3.\n\nimport django.utils.autoreload\n\n_old_restart_with_reloader = django.utils.autoreload.restart_with_reloader\n\n\ndef _restart_with_reloader(*args):\n import sys\n a0 = sys.argv.pop(0)\n try:\n return _old_restart_with_reloader(*args)\n finally:\n sys.argv.insert(0, a0)\n\n\n# Override restart_with_reloader() function, otherwise the app might complain that some commands do not exist;\n# e.g., runserver.\ndjango.utils.autoreload.restart_with_reloader = _restart_with_reloader\n", "path": "PyInstaller/hooks/rthooks/pyi_rth_django.py"}]}
| 1,349 | 663 |
gh_patches_debug_24655
|
rasdani/github-patches
|
git_diff
|
open-telemetry__opentelemetry-python-3217
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Make entry_points behave the same across Python versions
The recently introduced `entry_points` function does not behave the same across Python versions and it is not possible to get all entry points in Python 3.8 and 3.9.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `opentelemetry-api/src/opentelemetry/util/_importlib_metadata.py`
Content:
```
1 # Copyright The OpenTelemetry Authors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from sys import version_info
16
17 # FIXME remove this when support for 3.7 is dropped.
18 if version_info.minor == 7:
19 # pylint: disable=import-error
20 from importlib_metadata import entry_points, version # type: ignore
21
22 # FIXME remove this file when support for 3.9 is dropped.
23 elif version_info.minor in (8, 9):
24 # pylint: disable=import-error
25 from importlib.metadata import (
26 entry_points as importlib_metadata_entry_points,
27 )
28 from importlib.metadata import version
29
30 def entry_points(group: str, name: str): # type: ignore
31 for entry_point in importlib_metadata_entry_points()[group]:
32 if entry_point.name == name:
33 yield entry_point
34
35 else:
36 from importlib.metadata import entry_points, version
37
38 __all__ = ["entry_points", "version"]
39
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/opentelemetry-api/src/opentelemetry/util/_importlib_metadata.py b/opentelemetry-api/src/opentelemetry/util/_importlib_metadata.py
--- a/opentelemetry-api/src/opentelemetry/util/_importlib_metadata.py
+++ b/opentelemetry-api/src/opentelemetry/util/_importlib_metadata.py
@@ -12,27 +12,18 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from sys import version_info
+# FIXME: Use importlib.metadata when support for 3.11 is dropped if the rest of
+# the supported versions at that time have the same API.
+from importlib_metadata import ( # type: ignore
+ EntryPoint,
+ EntryPoints,
+ entry_points,
+ version,
+)
-# FIXME remove this when support for 3.7 is dropped.
-if version_info.minor == 7:
- # pylint: disable=import-error
- from importlib_metadata import entry_points, version # type: ignore
+# The importlib-metadata library has introduced breaking changes before to its
+# API, this module is kept just to act as a layer between the
+# importlib-metadata library and our project if in any case it is necessary to
+# do so.
-# FIXME remove this file when support for 3.9 is dropped.
-elif version_info.minor in (8, 9):
- # pylint: disable=import-error
- from importlib.metadata import (
- entry_points as importlib_metadata_entry_points,
- )
- from importlib.metadata import version
-
- def entry_points(group: str, name: str): # type: ignore
- for entry_point in importlib_metadata_entry_points()[group]:
- if entry_point.name == name:
- yield entry_point
-
-else:
- from importlib.metadata import entry_points, version
-
-__all__ = ["entry_points", "version"]
+__all__ = ["entry_points", "version", "EntryPoint", "EntryPoints"]
|
{"golden_diff": "diff --git a/opentelemetry-api/src/opentelemetry/util/_importlib_metadata.py b/opentelemetry-api/src/opentelemetry/util/_importlib_metadata.py\n--- a/opentelemetry-api/src/opentelemetry/util/_importlib_metadata.py\n+++ b/opentelemetry-api/src/opentelemetry/util/_importlib_metadata.py\n@@ -12,27 +12,18 @@\n # See the License for the specific language governing permissions and\n # limitations under the License.\n \n-from sys import version_info\n+# FIXME: Use importlib.metadata when support for 3.11 is dropped if the rest of\n+# the supported versions at that time have the same API.\n+from importlib_metadata import ( # type: ignore\n+ EntryPoint,\n+ EntryPoints,\n+ entry_points,\n+ version,\n+)\n \n-# FIXME remove this when support for 3.7 is dropped.\n-if version_info.minor == 7:\n- # pylint: disable=import-error\n- from importlib_metadata import entry_points, version # type: ignore\n+# The importlib-metadata library has introduced breaking changes before to its\n+# API, this module is kept just to act as a layer between the\n+# importlib-metadata library and our project if in any case it is necessary to\n+# do so.\n \n-# FIXME remove this file when support for 3.9 is dropped.\n-elif version_info.minor in (8, 9):\n- # pylint: disable=import-error\n- from importlib.metadata import (\n- entry_points as importlib_metadata_entry_points,\n- )\n- from importlib.metadata import version\n-\n- def entry_points(group: str, name: str): # type: ignore\n- for entry_point in importlib_metadata_entry_points()[group]:\n- if entry_point.name == name:\n- yield entry_point\n-\n-else:\n- from importlib.metadata import entry_points, version\n-\n-__all__ = [\"entry_points\", \"version\"]\n+__all__ = [\"entry_points\", \"version\", \"EntryPoint\", \"EntryPoints\"]\n", "issue": "Make entry_points behave the same across Python versions\nThe recently introduced `entry_points` function does not behave the same across Python versions and it is not possible to get all entry points in Python 3.8 and 3.9.\n", "before_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom sys import version_info\n\n# FIXME remove this when support for 3.7 is dropped.\nif version_info.minor == 7:\n # pylint: disable=import-error\n from importlib_metadata import entry_points, version # type: ignore\n\n# FIXME remove this file when support for 3.9 is dropped.\nelif version_info.minor in (8, 9):\n # pylint: disable=import-error\n from importlib.metadata import (\n entry_points as importlib_metadata_entry_points,\n )\n from importlib.metadata import version\n\n def entry_points(group: str, name: str): # type: ignore\n for entry_point in importlib_metadata_entry_points()[group]:\n if entry_point.name == name:\n yield entry_point\n\nelse:\n from importlib.metadata import entry_points, version\n\n__all__ = [\"entry_points\", \"version\"]\n", "path": "opentelemetry-api/src/opentelemetry/util/_importlib_metadata.py"}], "after_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# FIXME: Use importlib.metadata when support for 3.11 is dropped if the rest of\n# the supported versions at that time have the same API.\nfrom importlib_metadata import ( # type: ignore\n EntryPoint,\n EntryPoints,\n entry_points,\n version,\n)\n\n# The importlib-metadata library has introduced breaking changes before to its\n# API, this module is kept just to act as a layer between the\n# importlib-metadata library and our project if in any case it is necessary to\n# do so.\n\n__all__ = [\"entry_points\", \"version\", \"EntryPoint\", \"EntryPoints\"]\n", "path": "opentelemetry-api/src/opentelemetry/util/_importlib_metadata.py"}]}
| 704 | 445 |
gh_patches_debug_8511
|
rasdani/github-patches
|
git_diff
|
scalableminds__webknossos-libs-1067
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
upload command on windows: backslashes on server, invalid dataset
A user created a valid dataset on a windows machine with the `webknossos convert` command, then called `webknossos upload` with a valid token. The upload went through, but the directory structure got lost: the files on the server had backslashes in the paths, like `'color\2-2-1\z0\y7\x1.wkw'`. Instead, when sending files to upload, the client should always replace the client’s path separator by `/`.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `webknossos/webknossos/client/_upload_dataset.py`
Content:
```
1 import os
2 import warnings
3 from functools import lru_cache
4 from pathlib import Path
5 from tempfile import TemporaryDirectory
6 from time import gmtime, strftime
7 from typing import Iterator, List, NamedTuple, Optional, Tuple
8 from uuid import uuid4
9
10 import httpx
11
12 from ..dataset import Dataset, Layer, RemoteDataset
13 from ..utils import get_rich_progress
14 from ._resumable import Resumable
15 from .api_client.models import (
16 ApiDatasetUploadInformation,
17 ApiLinkedLayerIdentifier,
18 ApiReserveDatasetUploadInformation,
19 )
20 from .context import _get_context, _WebknossosContext
21
22 DEFAULT_SIMULTANEOUS_UPLOADS = 5
23 MAXIMUM_RETRY_COUNT = 4
24
25
26 class LayerToLink(NamedTuple):
27 dataset_name: str
28 layer_name: str
29 new_layer_name: Optional[str] = None
30 organization_id: Optional[str] = (
31 None # defaults to the user's organization before uploading
32 )
33
34 @classmethod
35 def from_remote_layer(
36 cls,
37 layer: Layer,
38 new_layer_name: Optional[str] = None,
39 organization_id: Optional[str] = None,
40 ) -> "LayerToLink":
41 ds = layer.dataset
42 assert isinstance(
43 ds, RemoteDataset
44 ), f"The passed layer must belong to a RemoteDataset, but belongs to {ds}"
45 return cls(ds._dataset_name, layer.name, new_layer_name, organization_id)
46
47 def as_api_linked_layer_identifier(self) -> ApiLinkedLayerIdentifier:
48 context = _get_context()
49 return ApiLinkedLayerIdentifier(
50 self.organization_id or context.organization_id,
51 self.dataset_name,
52 self.layer_name,
53 self.new_layer_name,
54 )
55
56
57 @lru_cache(maxsize=None)
58 def _cached_get_upload_datastore(context: _WebknossosContext) -> str:
59 datastores = context.api_client_with_auth.datastore_list()
60 for datastore in datastores:
61 if datastore.allows_upload:
62 return datastore.url
63 raise ValueError("No datastore found where datasets can be uploaded.")
64
65
66 def _walk(
67 path: Path,
68 base_path: Optional[Path] = None,
69 ) -> Iterator[Tuple[Path, Path, int]]:
70 if base_path is None:
71 base_path = path
72 if path.is_dir():
73 for p in path.iterdir():
74 yield from _walk(p, base_path)
75 else:
76 yield (path.resolve(), path.relative_to(base_path), path.stat().st_size)
77
78
79 def upload_dataset(
80 dataset: Dataset,
81 new_dataset_name: Optional[str] = None,
82 layers_to_link: Optional[List[LayerToLink]] = None,
83 jobs: Optional[int] = None,
84 ) -> str:
85 if new_dataset_name is None:
86 new_dataset_name = dataset.name
87 if layers_to_link is None:
88 layers_to_link = []
89 context = _get_context()
90 layer_names_to_link = set(i.new_layer_name or i.layer_name for i in layers_to_link)
91 if len(layer_names_to_link.intersection(dataset.layers.keys())) > 0:
92 warnings.warn(
93 "[INFO] Excluding the following layers from upload, since they will be linked: "
94 + f"{layer_names_to_link.intersection(dataset.layers.keys())}"
95 )
96 with TemporaryDirectory() as tmpdir:
97 tmp_ds = dataset.shallow_copy_dataset(
98 tmpdir, name=dataset.name, layers_to_ignore=layer_names_to_link
99 )
100 return upload_dataset(
101 tmp_ds,
102 new_dataset_name=new_dataset_name,
103 layers_to_link=layers_to_link,
104 jobs=jobs,
105 )
106
107 file_infos = list(_walk(dataset.path))
108 total_file_size = sum(size for _, _, size in file_infos)
109 # replicates https://github.com/scalableminds/webknossos/blob/master/frontend/javascripts/admin/dataset/dataset_upload_view.js
110 time_str = strftime("%Y-%m-%dT%H-%M-%S", gmtime())
111 upload_id = f"{time_str}__{uuid4()}"
112 datastore_token = context.datastore_required_token
113 datastore_url = _cached_get_upload_datastore(context)
114 datastore_api_client = context.get_datastore_api_client(datastore_url)
115 simultaneous_uploads = jobs if jobs is not None else DEFAULT_SIMULTANEOUS_UPLOADS
116 if "PYTEST_CURRENT_TEST" in os.environ:
117 simultaneous_uploads = 1
118 is_valid_new_name_response = context.api_client_with_auth.dataset_is_valid_new_name(
119 context.organization_id, new_dataset_name
120 )
121 if not is_valid_new_name_response.is_valid:
122 problems_str = ""
123 if is_valid_new_name_response.errors is not None:
124 problems_str = f" Problems: {is_valid_new_name_response.errors}"
125 raise Exception(
126 f"Dataset name {context.organization_id}/{new_dataset_name} is not a valid new dataset name.{problems_str}"
127 )
128
129 datastore_api_client.dataset_reserve_upload(
130 ApiReserveDatasetUploadInformation(
131 upload_id,
132 new_dataset_name,
133 context.organization_id,
134 total_file_count=len(file_infos),
135 layers_to_link=[
136 layer.as_api_linked_layer_identifier() for layer in layers_to_link
137 ],
138 folder_id=None,
139 initial_teams=[],
140 ),
141 token=datastore_token,
142 retry_count=MAXIMUM_RETRY_COUNT,
143 )
144 with get_rich_progress() as progress:
145 with Resumable(
146 f"{datastore_url}/data/datasets?token={datastore_token}",
147 simultaneous_uploads=simultaneous_uploads,
148 query={
149 "owningOrganization": context.organization_id,
150 "name": new_dataset_name,
151 "totalFileCount": len(file_infos),
152 },
153 chunk_size=100 * 1024 * 1024, # 100 MiB
154 generate_unique_identifier=lambda _,
155 relative_path: f"{upload_id}/{relative_path}",
156 test_chunks=False,
157 permanent_errors=[400, 403, 404, 409, 415, 500, 501],
158 client=httpx.Client(timeout=None),
159 ) as session:
160 progress_task = progress.add_task("Dataset Upload", total=total_file_size)
161 for file_path, relative_path, _ in file_infos:
162 resumable_file = session.add_file(file_path, relative_path)
163 resumable_file.chunk_completed.register(
164 lambda chunk: progress.advance(progress_task, chunk.size)
165 )
166
167 datastore_api_client.dataset_finish_upload(
168 ApiDatasetUploadInformation(upload_id),
169 datastore_token,
170 retry_count=MAXIMUM_RETRY_COUNT,
171 )
172
173 return new_dataset_name
174
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/webknossos/webknossos/client/_upload_dataset.py b/webknossos/webknossos/client/_upload_dataset.py
--- a/webknossos/webknossos/client/_upload_dataset.py
+++ b/webknossos/webknossos/client/_upload_dataset.py
@@ -152,7 +152,7 @@
},
chunk_size=100 * 1024 * 1024, # 100 MiB
generate_unique_identifier=lambda _,
- relative_path: f"{upload_id}/{relative_path}",
+ relative_path: f"{upload_id}/{relative_path.as_posix()}",
test_chunks=False,
permanent_errors=[400, 403, 404, 409, 415, 500, 501],
client=httpx.Client(timeout=None),
|
{"golden_diff": "diff --git a/webknossos/webknossos/client/_upload_dataset.py b/webknossos/webknossos/client/_upload_dataset.py\n--- a/webknossos/webknossos/client/_upload_dataset.py\n+++ b/webknossos/webknossos/client/_upload_dataset.py\n@@ -152,7 +152,7 @@\n },\n chunk_size=100 * 1024 * 1024, # 100 MiB\n generate_unique_identifier=lambda _,\n- relative_path: f\"{upload_id}/{relative_path}\",\n+ relative_path: f\"{upload_id}/{relative_path.as_posix()}\",\n test_chunks=False,\n permanent_errors=[400, 403, 404, 409, 415, 500, 501],\n client=httpx.Client(timeout=None),\n", "issue": "upload command on windows: backslashes on server, invalid dataset\nA user created a valid dataset on a windows machine with the `webknossos convert` command, then called `webknossos upload` with a valid token. The upload went through, but the directory structure got lost: the files on the server had backslashes in the paths, like `'color\\2-2-1\\z0\\y7\\x1.wkw'`. Instead, when sending files to upload, the client should always replace the client\u2019s path separator by `/`.\n", "before_files": [{"content": "import os\nimport warnings\nfrom functools import lru_cache\nfrom pathlib import Path\nfrom tempfile import TemporaryDirectory\nfrom time import gmtime, strftime\nfrom typing import Iterator, List, NamedTuple, Optional, Tuple\nfrom uuid import uuid4\n\nimport httpx\n\nfrom ..dataset import Dataset, Layer, RemoteDataset\nfrom ..utils import get_rich_progress\nfrom ._resumable import Resumable\nfrom .api_client.models import (\n ApiDatasetUploadInformation,\n ApiLinkedLayerIdentifier,\n ApiReserveDatasetUploadInformation,\n)\nfrom .context import _get_context, _WebknossosContext\n\nDEFAULT_SIMULTANEOUS_UPLOADS = 5\nMAXIMUM_RETRY_COUNT = 4\n\n\nclass LayerToLink(NamedTuple):\n dataset_name: str\n layer_name: str\n new_layer_name: Optional[str] = None\n organization_id: Optional[str] = (\n None # defaults to the user's organization before uploading\n )\n\n @classmethod\n def from_remote_layer(\n cls,\n layer: Layer,\n new_layer_name: Optional[str] = None,\n organization_id: Optional[str] = None,\n ) -> \"LayerToLink\":\n ds = layer.dataset\n assert isinstance(\n ds, RemoteDataset\n ), f\"The passed layer must belong to a RemoteDataset, but belongs to {ds}\"\n return cls(ds._dataset_name, layer.name, new_layer_name, organization_id)\n\n def as_api_linked_layer_identifier(self) -> ApiLinkedLayerIdentifier:\n context = _get_context()\n return ApiLinkedLayerIdentifier(\n self.organization_id or context.organization_id,\n self.dataset_name,\n self.layer_name,\n self.new_layer_name,\n )\n\n\n@lru_cache(maxsize=None)\ndef _cached_get_upload_datastore(context: _WebknossosContext) -> str:\n datastores = context.api_client_with_auth.datastore_list()\n for datastore in datastores:\n if datastore.allows_upload:\n return datastore.url\n raise ValueError(\"No datastore found where datasets can be uploaded.\")\n\n\ndef _walk(\n path: Path,\n base_path: Optional[Path] = None,\n) -> Iterator[Tuple[Path, Path, int]]:\n if base_path is None:\n base_path = path\n if path.is_dir():\n for p in path.iterdir():\n yield from _walk(p, base_path)\n else:\n yield (path.resolve(), path.relative_to(base_path), path.stat().st_size)\n\n\ndef upload_dataset(\n dataset: Dataset,\n new_dataset_name: Optional[str] = None,\n layers_to_link: Optional[List[LayerToLink]] = None,\n jobs: Optional[int] = None,\n) -> str:\n if new_dataset_name is None:\n new_dataset_name = dataset.name\n if layers_to_link is None:\n layers_to_link = []\n context = _get_context()\n layer_names_to_link = set(i.new_layer_name or i.layer_name for i in layers_to_link)\n if len(layer_names_to_link.intersection(dataset.layers.keys())) > 0:\n warnings.warn(\n \"[INFO] Excluding the following layers from upload, since they will be linked: \"\n + f\"{layer_names_to_link.intersection(dataset.layers.keys())}\"\n )\n with TemporaryDirectory() as tmpdir:\n tmp_ds = dataset.shallow_copy_dataset(\n tmpdir, name=dataset.name, layers_to_ignore=layer_names_to_link\n )\n return upload_dataset(\n tmp_ds,\n new_dataset_name=new_dataset_name,\n layers_to_link=layers_to_link,\n jobs=jobs,\n )\n\n file_infos = list(_walk(dataset.path))\n total_file_size = sum(size for _, _, size in file_infos)\n # replicates https://github.com/scalableminds/webknossos/blob/master/frontend/javascripts/admin/dataset/dataset_upload_view.js\n time_str = strftime(\"%Y-%m-%dT%H-%M-%S\", gmtime())\n upload_id = f\"{time_str}__{uuid4()}\"\n datastore_token = context.datastore_required_token\n datastore_url = _cached_get_upload_datastore(context)\n datastore_api_client = context.get_datastore_api_client(datastore_url)\n simultaneous_uploads = jobs if jobs is not None else DEFAULT_SIMULTANEOUS_UPLOADS\n if \"PYTEST_CURRENT_TEST\" in os.environ:\n simultaneous_uploads = 1\n is_valid_new_name_response = context.api_client_with_auth.dataset_is_valid_new_name(\n context.organization_id, new_dataset_name\n )\n if not is_valid_new_name_response.is_valid:\n problems_str = \"\"\n if is_valid_new_name_response.errors is not None:\n problems_str = f\" Problems: {is_valid_new_name_response.errors}\"\n raise Exception(\n f\"Dataset name {context.organization_id}/{new_dataset_name} is not a valid new dataset name.{problems_str}\"\n )\n\n datastore_api_client.dataset_reserve_upload(\n ApiReserveDatasetUploadInformation(\n upload_id,\n new_dataset_name,\n context.organization_id,\n total_file_count=len(file_infos),\n layers_to_link=[\n layer.as_api_linked_layer_identifier() for layer in layers_to_link\n ],\n folder_id=None,\n initial_teams=[],\n ),\n token=datastore_token,\n retry_count=MAXIMUM_RETRY_COUNT,\n )\n with get_rich_progress() as progress:\n with Resumable(\n f\"{datastore_url}/data/datasets?token={datastore_token}\",\n simultaneous_uploads=simultaneous_uploads,\n query={\n \"owningOrganization\": context.organization_id,\n \"name\": new_dataset_name,\n \"totalFileCount\": len(file_infos),\n },\n chunk_size=100 * 1024 * 1024, # 100 MiB\n generate_unique_identifier=lambda _,\n relative_path: f\"{upload_id}/{relative_path}\",\n test_chunks=False,\n permanent_errors=[400, 403, 404, 409, 415, 500, 501],\n client=httpx.Client(timeout=None),\n ) as session:\n progress_task = progress.add_task(\"Dataset Upload\", total=total_file_size)\n for file_path, relative_path, _ in file_infos:\n resumable_file = session.add_file(file_path, relative_path)\n resumable_file.chunk_completed.register(\n lambda chunk: progress.advance(progress_task, chunk.size)\n )\n\n datastore_api_client.dataset_finish_upload(\n ApiDatasetUploadInformation(upload_id),\n datastore_token,\n retry_count=MAXIMUM_RETRY_COUNT,\n )\n\n return new_dataset_name\n", "path": "webknossos/webknossos/client/_upload_dataset.py"}], "after_files": [{"content": "import os\nimport warnings\nfrom functools import lru_cache\nfrom pathlib import Path\nfrom tempfile import TemporaryDirectory\nfrom time import gmtime, strftime\nfrom typing import Iterator, List, NamedTuple, Optional, Tuple\nfrom uuid import uuid4\n\nimport httpx\n\nfrom ..dataset import Dataset, Layer, RemoteDataset\nfrom ..utils import get_rich_progress\nfrom ._resumable import Resumable\nfrom .api_client.models import (\n ApiDatasetUploadInformation,\n ApiLinkedLayerIdentifier,\n ApiReserveDatasetUploadInformation,\n)\nfrom .context import _get_context, _WebknossosContext\n\nDEFAULT_SIMULTANEOUS_UPLOADS = 5\nMAXIMUM_RETRY_COUNT = 4\n\n\nclass LayerToLink(NamedTuple):\n dataset_name: str\n layer_name: str\n new_layer_name: Optional[str] = None\n organization_id: Optional[str] = (\n None # defaults to the user's organization before uploading\n )\n\n @classmethod\n def from_remote_layer(\n cls,\n layer: Layer,\n new_layer_name: Optional[str] = None,\n organization_id: Optional[str] = None,\n ) -> \"LayerToLink\":\n ds = layer.dataset\n assert isinstance(\n ds, RemoteDataset\n ), f\"The passed layer must belong to a RemoteDataset, but belongs to {ds}\"\n return cls(ds._dataset_name, layer.name, new_layer_name, organization_id)\n\n def as_api_linked_layer_identifier(self) -> ApiLinkedLayerIdentifier:\n context = _get_context()\n return ApiLinkedLayerIdentifier(\n self.organization_id or context.organization_id,\n self.dataset_name,\n self.layer_name,\n self.new_layer_name,\n )\n\n\n@lru_cache(maxsize=None)\ndef _cached_get_upload_datastore(context: _WebknossosContext) -> str:\n datastores = context.api_client_with_auth.datastore_list()\n for datastore in datastores:\n if datastore.allows_upload:\n return datastore.url\n raise ValueError(\"No datastore found where datasets can be uploaded.\")\n\n\ndef _walk(\n path: Path,\n base_path: Optional[Path] = None,\n) -> Iterator[Tuple[Path, Path, int]]:\n if base_path is None:\n base_path = path\n if path.is_dir():\n for p in path.iterdir():\n yield from _walk(p, base_path)\n else:\n yield (path.resolve(), path.relative_to(base_path), path.stat().st_size)\n\n\ndef upload_dataset(\n dataset: Dataset,\n new_dataset_name: Optional[str] = None,\n layers_to_link: Optional[List[LayerToLink]] = None,\n jobs: Optional[int] = None,\n) -> str:\n if new_dataset_name is None:\n new_dataset_name = dataset.name\n if layers_to_link is None:\n layers_to_link = []\n context = _get_context()\n layer_names_to_link = set(i.new_layer_name or i.layer_name for i in layers_to_link)\n if len(layer_names_to_link.intersection(dataset.layers.keys())) > 0:\n warnings.warn(\n \"[INFO] Excluding the following layers from upload, since they will be linked: \"\n + f\"{layer_names_to_link.intersection(dataset.layers.keys())}\"\n )\n with TemporaryDirectory() as tmpdir:\n tmp_ds = dataset.shallow_copy_dataset(\n tmpdir, name=dataset.name, layers_to_ignore=layer_names_to_link\n )\n return upload_dataset(\n tmp_ds,\n new_dataset_name=new_dataset_name,\n layers_to_link=layers_to_link,\n jobs=jobs,\n )\n\n file_infos = list(_walk(dataset.path))\n total_file_size = sum(size for _, _, size in file_infos)\n # replicates https://github.com/scalableminds/webknossos/blob/master/frontend/javascripts/admin/dataset/dataset_upload_view.js\n time_str = strftime(\"%Y-%m-%dT%H-%M-%S\", gmtime())\n upload_id = f\"{time_str}__{uuid4()}\"\n datastore_token = context.datastore_required_token\n datastore_url = _cached_get_upload_datastore(context)\n datastore_api_client = context.get_datastore_api_client(datastore_url)\n simultaneous_uploads = jobs if jobs is not None else DEFAULT_SIMULTANEOUS_UPLOADS\n if \"PYTEST_CURRENT_TEST\" in os.environ:\n simultaneous_uploads = 1\n is_valid_new_name_response = context.api_client_with_auth.dataset_is_valid_new_name(\n context.organization_id, new_dataset_name\n )\n if not is_valid_new_name_response.is_valid:\n problems_str = \"\"\n if is_valid_new_name_response.errors is not None:\n problems_str = f\" Problems: {is_valid_new_name_response.errors}\"\n raise Exception(\n f\"Dataset name {context.organization_id}/{new_dataset_name} is not a valid new dataset name.{problems_str}\"\n )\n\n datastore_api_client.dataset_reserve_upload(\n ApiReserveDatasetUploadInformation(\n upload_id,\n new_dataset_name,\n context.organization_id,\n total_file_count=len(file_infos),\n layers_to_link=[\n layer.as_api_linked_layer_identifier() for layer in layers_to_link\n ],\n folder_id=None,\n initial_teams=[],\n ),\n token=datastore_token,\n retry_count=MAXIMUM_RETRY_COUNT,\n )\n with get_rich_progress() as progress:\n with Resumable(\n f\"{datastore_url}/data/datasets?token={datastore_token}\",\n simultaneous_uploads=simultaneous_uploads,\n query={\n \"owningOrganization\": context.organization_id,\n \"name\": new_dataset_name,\n \"totalFileCount\": len(file_infos),\n },\n chunk_size=100 * 1024 * 1024, # 100 MiB\n generate_unique_identifier=lambda _,\n relative_path: f\"{upload_id}/{relative_path.as_posix()}\",\n test_chunks=False,\n permanent_errors=[400, 403, 404, 409, 415, 500, 501],\n client=httpx.Client(timeout=None),\n ) as session:\n progress_task = progress.add_task(\"Dataset Upload\", total=total_file_size)\n for file_path, relative_path, _ in file_infos:\n resumable_file = session.add_file(file_path, relative_path)\n resumable_file.chunk_completed.register(\n lambda chunk: progress.advance(progress_task, chunk.size)\n )\n\n datastore_api_client.dataset_finish_upload(\n ApiDatasetUploadInformation(upload_id),\n datastore_token,\n retry_count=MAXIMUM_RETRY_COUNT,\n )\n\n return new_dataset_name\n", "path": "webknossos/webknossos/client/_upload_dataset.py"}]}
| 2,216 | 197 |
gh_patches_debug_3318
|
rasdani/github-patches
|
git_diff
|
feast-dev__feast-2753
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Unable to access data in Feast UI when deployed to remote instance
## Expected Behavior
Should be able to view registry data when launching UI with `feast ui` on remote instances (like EC2).
## Current Behavior
I’ve tried setting the host to `0.0.0.0` and the static assets get loaded and can accessed via the public IP. But the requests to the registry (`http://0.0.0.0:8888/registry`) fails, so no data shows up.
I've also tried setting the host to the private IP, but the request to `/registry` times out.
## Steps to reproduce
Run `feast ui --host <instance private ip>` in EC2 instance.
### Specifications
- Version:`0.21.2`
- Platform: EC2
- Subsystem:
## Possible Solution
Potential CORS issue that needs to be fixed?
Unable to access data in Feast UI when deployed to remote instance
## Expected Behavior
Should be able to view registry data when launching UI with `feast ui` on remote instances (like EC2).
## Current Behavior
I’ve tried setting the host to `0.0.0.0` and the static assets get loaded and can accessed via the public IP. But the requests to the registry (`http://0.0.0.0:8888/registry`) fails, so no data shows up.
I've also tried setting the host to the private IP, but the request to `/registry` times out.
## Steps to reproduce
Run `feast ui --host <instance private ip>` in EC2 instance.
### Specifications
- Version:`0.21.2`
- Platform: EC2
- Subsystem:
## Possible Solution
Potential CORS issue that needs to be fixed?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sdk/python/feast/ui_server.py`
Content:
```
1 import json
2 import threading
3 from typing import Callable, Optional
4
5 import pkg_resources
6 import uvicorn
7 from fastapi import FastAPI, Response
8 from fastapi.middleware.cors import CORSMiddleware
9 from fastapi.staticfiles import StaticFiles
10
11 import feast
12
13
14 def get_app(
15 store: "feast.FeatureStore",
16 get_registry_dump: Callable,
17 project_id: str,
18 registry_ttl_secs: int,
19 host: str,
20 port: int,
21 ):
22 app = FastAPI()
23
24 app.add_middleware(
25 CORSMiddleware,
26 allow_origins=["*"],
27 allow_credentials=True,
28 allow_methods=["*"],
29 allow_headers=["*"],
30 )
31
32 # Asynchronously refresh registry, notifying shutdown and canceling the active timer if the app is shutting down
33 registry_json = ""
34 shutting_down = False
35 active_timer: Optional[threading.Timer] = None
36
37 def async_refresh():
38 store.refresh_registry()
39 nonlocal registry_json
40 registry_json = get_registry_dump(store.config, store.repo_path)
41 if shutting_down:
42 return
43 nonlocal active_timer
44 active_timer = threading.Timer(registry_ttl_secs, async_refresh)
45 active_timer.start()
46
47 @app.on_event("shutdown")
48 def shutdown_event():
49 nonlocal shutting_down
50 shutting_down = True
51 if active_timer:
52 active_timer.cancel()
53
54 async_refresh()
55
56 ui_dir = pkg_resources.resource_filename(__name__, "ui/build/")
57 # Initialize with the projects-list.json file
58 with open(ui_dir + "projects-list.json", mode="w") as f:
59 projects_dict = {
60 "projects": [
61 {
62 "name": "Project",
63 "description": "Test project",
64 "id": project_id,
65 "registryPath": f"http://{host}:{port}/registry",
66 }
67 ]
68 }
69 f.write(json.dumps(projects_dict))
70
71 @app.get("/registry")
72 def read_registry():
73 return json.loads(registry_json)
74
75 # For all other paths (such as paths that would otherwise be handled by react router), pass to React
76 @app.api_route("/p/{path_name:path}", methods=["GET"])
77 def catch_all():
78 filename = ui_dir + "index.html"
79
80 with open(filename) as f:
81 content = f.read()
82
83 return Response(content, media_type="text/html")
84
85 app.mount(
86 "/", StaticFiles(directory=ui_dir, html=True), name="site",
87 )
88
89 return app
90
91
92 def start_server(
93 store: "feast.FeatureStore",
94 host: str,
95 port: int,
96 get_registry_dump: Callable,
97 project_id: str,
98 registry_ttl_sec: int,
99 ):
100 app = get_app(store, get_registry_dump, project_id, registry_ttl_sec, host, port)
101 uvicorn.run(app, host=host, port=port)
102
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/sdk/python/feast/ui_server.py b/sdk/python/feast/ui_server.py
--- a/sdk/python/feast/ui_server.py
+++ b/sdk/python/feast/ui_server.py
@@ -62,7 +62,7 @@
"name": "Project",
"description": "Test project",
"id": project_id,
- "registryPath": f"http://{host}:{port}/registry",
+ "registryPath": "/registry",
}
]
}
|
{"golden_diff": "diff --git a/sdk/python/feast/ui_server.py b/sdk/python/feast/ui_server.py\n--- a/sdk/python/feast/ui_server.py\n+++ b/sdk/python/feast/ui_server.py\n@@ -62,7 +62,7 @@\n \"name\": \"Project\",\n \"description\": \"Test project\",\n \"id\": project_id,\n- \"registryPath\": f\"http://{host}:{port}/registry\",\n+ \"registryPath\": \"/registry\",\n }\n ]\n }\n", "issue": "Unable to access data in Feast UI when deployed to remote instance\n## Expected Behavior \r\nShould be able to view registry data when launching UI with `feast ui` on remote instances (like EC2).\r\n\r\n## Current Behavior\r\nI\u2019ve tried setting the host to `0.0.0.0` and the static assets get loaded and can accessed via the public IP. But the requests to the registry (`http://0.0.0.0:8888/registry`) fails, so no data shows up.\r\n\r\nI've also tried setting the host to the private IP, but the request to `/registry` times out.\r\n\r\n## Steps to reproduce\r\nRun `feast ui --host <instance private ip>` in EC2 instance.\r\n\r\n### Specifications\r\n\r\n- Version:`0.21.2`\r\n- Platform: EC2\r\n- Subsystem:\r\n\r\n## Possible Solution\r\nPotential CORS issue that needs to be fixed?\nUnable to access data in Feast UI when deployed to remote instance\n## Expected Behavior \r\nShould be able to view registry data when launching UI with `feast ui` on remote instances (like EC2).\r\n\r\n## Current Behavior\r\nI\u2019ve tried setting the host to `0.0.0.0` and the static assets get loaded and can accessed via the public IP. But the requests to the registry (`http://0.0.0.0:8888/registry`) fails, so no data shows up.\r\n\r\nI've also tried setting the host to the private IP, but the request to `/registry` times out.\r\n\r\n## Steps to reproduce\r\nRun `feast ui --host <instance private ip>` in EC2 instance.\r\n\r\n### Specifications\r\n\r\n- Version:`0.21.2`\r\n- Platform: EC2\r\n- Subsystem:\r\n\r\n## Possible Solution\r\nPotential CORS issue that needs to be fixed?\n", "before_files": [{"content": "import json\nimport threading\nfrom typing import Callable, Optional\n\nimport pkg_resources\nimport uvicorn\nfrom fastapi import FastAPI, Response\nfrom fastapi.middleware.cors import CORSMiddleware\nfrom fastapi.staticfiles import StaticFiles\n\nimport feast\n\n\ndef get_app(\n store: \"feast.FeatureStore\",\n get_registry_dump: Callable,\n project_id: str,\n registry_ttl_secs: int,\n host: str,\n port: int,\n):\n app = FastAPI()\n\n app.add_middleware(\n CORSMiddleware,\n allow_origins=[\"*\"],\n allow_credentials=True,\n allow_methods=[\"*\"],\n allow_headers=[\"*\"],\n )\n\n # Asynchronously refresh registry, notifying shutdown and canceling the active timer if the app is shutting down\n registry_json = \"\"\n shutting_down = False\n active_timer: Optional[threading.Timer] = None\n\n def async_refresh():\n store.refresh_registry()\n nonlocal registry_json\n registry_json = get_registry_dump(store.config, store.repo_path)\n if shutting_down:\n return\n nonlocal active_timer\n active_timer = threading.Timer(registry_ttl_secs, async_refresh)\n active_timer.start()\n\n @app.on_event(\"shutdown\")\n def shutdown_event():\n nonlocal shutting_down\n shutting_down = True\n if active_timer:\n active_timer.cancel()\n\n async_refresh()\n\n ui_dir = pkg_resources.resource_filename(__name__, \"ui/build/\")\n # Initialize with the projects-list.json file\n with open(ui_dir + \"projects-list.json\", mode=\"w\") as f:\n projects_dict = {\n \"projects\": [\n {\n \"name\": \"Project\",\n \"description\": \"Test project\",\n \"id\": project_id,\n \"registryPath\": f\"http://{host}:{port}/registry\",\n }\n ]\n }\n f.write(json.dumps(projects_dict))\n\n @app.get(\"/registry\")\n def read_registry():\n return json.loads(registry_json)\n\n # For all other paths (such as paths that would otherwise be handled by react router), pass to React\n @app.api_route(\"/p/{path_name:path}\", methods=[\"GET\"])\n def catch_all():\n filename = ui_dir + \"index.html\"\n\n with open(filename) as f:\n content = f.read()\n\n return Response(content, media_type=\"text/html\")\n\n app.mount(\n \"/\", StaticFiles(directory=ui_dir, html=True), name=\"site\",\n )\n\n return app\n\n\ndef start_server(\n store: \"feast.FeatureStore\",\n host: str,\n port: int,\n get_registry_dump: Callable,\n project_id: str,\n registry_ttl_sec: int,\n):\n app = get_app(store, get_registry_dump, project_id, registry_ttl_sec, host, port)\n uvicorn.run(app, host=host, port=port)\n", "path": "sdk/python/feast/ui_server.py"}], "after_files": [{"content": "import json\nimport threading\nfrom typing import Callable, Optional\n\nimport pkg_resources\nimport uvicorn\nfrom fastapi import FastAPI, Response\nfrom fastapi.middleware.cors import CORSMiddleware\nfrom fastapi.staticfiles import StaticFiles\n\nimport feast\n\n\ndef get_app(\n store: \"feast.FeatureStore\",\n get_registry_dump: Callable,\n project_id: str,\n registry_ttl_secs: int,\n host: str,\n port: int,\n):\n app = FastAPI()\n\n app.add_middleware(\n CORSMiddleware,\n allow_origins=[\"*\"],\n allow_credentials=True,\n allow_methods=[\"*\"],\n allow_headers=[\"*\"],\n )\n\n # Asynchronously refresh registry, notifying shutdown and canceling the active timer if the app is shutting down\n registry_json = \"\"\n shutting_down = False\n active_timer: Optional[threading.Timer] = None\n\n def async_refresh():\n store.refresh_registry()\n nonlocal registry_json\n registry_json = get_registry_dump(store.config, store.repo_path)\n if shutting_down:\n return\n nonlocal active_timer\n active_timer = threading.Timer(registry_ttl_secs, async_refresh)\n active_timer.start()\n\n @app.on_event(\"shutdown\")\n def shutdown_event():\n nonlocal shutting_down\n shutting_down = True\n if active_timer:\n active_timer.cancel()\n\n async_refresh()\n\n ui_dir = pkg_resources.resource_filename(__name__, \"ui/build/\")\n # Initialize with the projects-list.json file\n with open(ui_dir + \"projects-list.json\", mode=\"w\") as f:\n projects_dict = {\n \"projects\": [\n {\n \"name\": \"Project\",\n \"description\": \"Test project\",\n \"id\": project_id,\n \"registryPath\": \"/registry\",\n }\n ]\n }\n f.write(json.dumps(projects_dict))\n\n @app.get(\"/registry\")\n def read_registry():\n return json.loads(registry_json)\n\n # For all other paths (such as paths that would otherwise be handled by react router), pass to React\n @app.api_route(\"/p/{path_name:path}\", methods=[\"GET\"])\n def catch_all():\n filename = ui_dir + \"index.html\"\n\n with open(filename) as f:\n content = f.read()\n\n return Response(content, media_type=\"text/html\")\n\n app.mount(\n \"/\", StaticFiles(directory=ui_dir, html=True), name=\"site\",\n )\n\n return app\n\n\ndef start_server(\n store: \"feast.FeatureStore\",\n host: str,\n port: int,\n get_registry_dump: Callable,\n project_id: str,\n registry_ttl_sec: int,\n):\n app = get_app(store, get_registry_dump, project_id, registry_ttl_sec, host, port)\n uvicorn.run(app, host=host, port=port)\n", "path": "sdk/python/feast/ui_server.py"}]}
| 1,454 | 108 |
gh_patches_debug_24195
|
rasdani/github-patches
|
git_diff
|
liqd__a4-meinberlin-2543
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
testing #2252 follow mail: mail participation ends soon
Where: Mail "participation ends soon"
* in a single module project link should go to project view and not to a module view that does not regularly exist in this case. Is that possible?
* As in the other mails: paragraph between two sentences probably looks better.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `meinberlin/apps/offlineevents/models.py`
Content:
```
1 from datetime import timedelta
2
3 from autoslug import AutoSlugField
4 from ckeditor_uploader.fields import RichTextUploadingField
5 from django.db import models
6 from django.urls import reverse
7 from django.utils import timezone
8 from django.utils.translation import ugettext_lazy as _
9
10 from adhocracy4 import transforms
11 from adhocracy4.models.base import UserGeneratedContentModel
12 from adhocracy4.projects import models as project_models
13
14
15 class OfflineEventsQuerySet(models.QuerySet):
16
17 def starts_within(self, hours=72):
18 """All offlineevents starting within the given time."""
19 now = timezone.now()
20 return self.filter(date__gt=now,
21 date__lt=(now + timedelta(hours=hours)))
22
23
24 class OfflineEvent(UserGeneratedContentModel):
25 slug = AutoSlugField(populate_from='name', unique=True)
26 name = models.CharField(max_length=120, verbose_name=_('Name of event'))
27 event_type = models.CharField(
28 max_length=30, verbose_name=_('Event type'),
29 help_text=_('The content of this field is shown in the timeline. It '
30 'should have no more than 30 characters e.g. Information '
31 'event or 3rd public workshop.'))
32 date = models.DateTimeField(
33 verbose_name=_('Date'))
34 description = RichTextUploadingField(
35 config_name='image-editor',
36 verbose_name=_('Description'))
37 project = models.ForeignKey(
38 project_models.Project, on_delete=models.CASCADE)
39
40 objects = OfflineEventsQuerySet.as_manager()
41
42 class Meta:
43 ordering = ['-date']
44
45 def __str__(self):
46 return self.name
47
48 def save(self, *args, **kwargs):
49 self.description = transforms.clean_html_field(
50 self.description, 'image-editor')
51 super().save(*args, **kwargs)
52
53 def get_absolute_url(self):
54 return reverse('meinberlin_offlineevents:offlineevent-detail',
55 args=[str(self.slug)])
56
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/meinberlin/apps/offlineevents/models.py b/meinberlin/apps/offlineevents/models.py
--- a/meinberlin/apps/offlineevents/models.py
+++ b/meinberlin/apps/offlineevents/models.py
@@ -3,8 +3,8 @@
from autoslug import AutoSlugField
from ckeditor_uploader.fields import RichTextUploadingField
from django.db import models
-from django.urls import reverse
from django.utils import timezone
+from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from adhocracy4 import transforms
@@ -50,6 +50,16 @@
self.description, 'image-editor')
super().save(*args, **kwargs)
+ @cached_property
+ def get_timeline_index(self):
+ if self.project.display_timeline:
+ for count, cluster in enumerate(self.project.participation_dates):
+ if 'event_type' in cluster and self.slug == cluster['slug']:
+ return count
+ return 0
+
def get_absolute_url(self):
- return reverse('meinberlin_offlineevents:offlineevent-detail',
- args=[str(self.slug)])
+ if self.project.display_timeline:
+ return '{}?initialSlide={}'.format(self.project.get_absolute_url(),
+ self.get_timeline_index)
+ return self.project.get_absolute_url()
|
{"golden_diff": "diff --git a/meinberlin/apps/offlineevents/models.py b/meinberlin/apps/offlineevents/models.py\n--- a/meinberlin/apps/offlineevents/models.py\n+++ b/meinberlin/apps/offlineevents/models.py\n@@ -3,8 +3,8 @@\n from autoslug import AutoSlugField\n from ckeditor_uploader.fields import RichTextUploadingField\n from django.db import models\n-from django.urls import reverse\n from django.utils import timezone\n+from django.utils.functional import cached_property\n from django.utils.translation import ugettext_lazy as _\n \n from adhocracy4 import transforms\n@@ -50,6 +50,16 @@\n self.description, 'image-editor')\n super().save(*args, **kwargs)\n \n+ @cached_property\n+ def get_timeline_index(self):\n+ if self.project.display_timeline:\n+ for count, cluster in enumerate(self.project.participation_dates):\n+ if 'event_type' in cluster and self.slug == cluster['slug']:\n+ return count\n+ return 0\n+\n def get_absolute_url(self):\n- return reverse('meinberlin_offlineevents:offlineevent-detail',\n- args=[str(self.slug)])\n+ if self.project.display_timeline:\n+ return '{}?initialSlide={}'.format(self.project.get_absolute_url(),\n+ self.get_timeline_index)\n+ return self.project.get_absolute_url()\n", "issue": "testing #2252 follow mail: mail participation ends soon\nWhere: Mail \"participation ends soon\"\r\n\r\n* in a single module project link should go to project view and not to a module view that does not regularly exist in this case. Is that possible?\r\n* As in the other mails: paragraph between two sentences probably looks better.\n", "before_files": [{"content": "from datetime import timedelta\n\nfrom autoslug import AutoSlugField\nfrom ckeditor_uploader.fields import RichTextUploadingField\nfrom django.db import models\nfrom django.urls import reverse\nfrom django.utils import timezone\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom adhocracy4 import transforms\nfrom adhocracy4.models.base import UserGeneratedContentModel\nfrom adhocracy4.projects import models as project_models\n\n\nclass OfflineEventsQuerySet(models.QuerySet):\n\n def starts_within(self, hours=72):\n \"\"\"All offlineevents starting within the given time.\"\"\"\n now = timezone.now()\n return self.filter(date__gt=now,\n date__lt=(now + timedelta(hours=hours)))\n\n\nclass OfflineEvent(UserGeneratedContentModel):\n slug = AutoSlugField(populate_from='name', unique=True)\n name = models.CharField(max_length=120, verbose_name=_('Name of event'))\n event_type = models.CharField(\n max_length=30, verbose_name=_('Event type'),\n help_text=_('The content of this field is shown in the timeline. It '\n 'should have no more than 30 characters e.g. Information '\n 'event or 3rd public workshop.'))\n date = models.DateTimeField(\n verbose_name=_('Date'))\n description = RichTextUploadingField(\n config_name='image-editor',\n verbose_name=_('Description'))\n project = models.ForeignKey(\n project_models.Project, on_delete=models.CASCADE)\n\n objects = OfflineEventsQuerySet.as_manager()\n\n class Meta:\n ordering = ['-date']\n\n def __str__(self):\n return self.name\n\n def save(self, *args, **kwargs):\n self.description = transforms.clean_html_field(\n self.description, 'image-editor')\n super().save(*args, **kwargs)\n\n def get_absolute_url(self):\n return reverse('meinberlin_offlineevents:offlineevent-detail',\n args=[str(self.slug)])\n", "path": "meinberlin/apps/offlineevents/models.py"}], "after_files": [{"content": "from datetime import timedelta\n\nfrom autoslug import AutoSlugField\nfrom ckeditor_uploader.fields import RichTextUploadingField\nfrom django.db import models\nfrom django.utils import timezone\nfrom django.utils.functional import cached_property\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom adhocracy4 import transforms\nfrom adhocracy4.models.base import UserGeneratedContentModel\nfrom adhocracy4.projects import models as project_models\n\n\nclass OfflineEventsQuerySet(models.QuerySet):\n\n def starts_within(self, hours=72):\n \"\"\"All offlineevents starting within the given time.\"\"\"\n now = timezone.now()\n return self.filter(date__gt=now,\n date__lt=(now + timedelta(hours=hours)))\n\n\nclass OfflineEvent(UserGeneratedContentModel):\n slug = AutoSlugField(populate_from='name', unique=True)\n name = models.CharField(max_length=120, verbose_name=_('Name of event'))\n event_type = models.CharField(\n max_length=30, verbose_name=_('Event type'),\n help_text=_('The content of this field is shown in the timeline. It '\n 'should have no more than 30 characters e.g. Information '\n 'event or 3rd public workshop.'))\n date = models.DateTimeField(\n verbose_name=_('Date'))\n description = RichTextUploadingField(\n config_name='image-editor',\n verbose_name=_('Description'))\n project = models.ForeignKey(\n project_models.Project, on_delete=models.CASCADE)\n\n objects = OfflineEventsQuerySet.as_manager()\n\n class Meta:\n ordering = ['-date']\n\n def __str__(self):\n return self.name\n\n def save(self, *args, **kwargs):\n self.description = transforms.clean_html_field(\n self.description, 'image-editor')\n super().save(*args, **kwargs)\n\n @cached_property\n def get_timeline_index(self):\n if self.project.display_timeline:\n for count, cluster in enumerate(self.project.participation_dates):\n if 'event_type' in cluster and self.slug == cluster['slug']:\n return count\n return 0\n\n def get_absolute_url(self):\n if self.project.display_timeline:\n return '{}?initialSlide={}'.format(self.project.get_absolute_url(),\n self.get_timeline_index)\n return self.project.get_absolute_url()\n", "path": "meinberlin/apps/offlineevents/models.py"}]}
| 850 | 298 |
gh_patches_debug_43164
|
rasdani/github-patches
|
git_diff
|
Textualize__textual-3659
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
CSS error reporting sometimes off by one
See https://github.com/Textualize/textual/pull/3582#issuecomment-1787507687.
Running the app at the bottom produces the error below, where the error reporting is off by one.
See the line above the panel and the code lines in the code snippet printed.
```
Error in stylesheet:
/Users/davep/develop/python/textual-upstream/sandbox/foo.py, CSSErrorApp.CSS:1:4
╭────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
│ 1 │ │
│ ❱ 2 │ : │
│ 3 │
╰────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
• Expected one of 'comment line', 'comment start', 'selector start', 'selector start class', 'selector start id', 'selector start universal', 'variable name', or 'whitespace'.
• Did you forget a semicolon at the end of a line?
```
```py
from textual.app import App, ComposeResult
from textual.widgets import Label
class CSSErrorApp(App[None]):
CSS = """
:
"""
def compose(self) -> ComposeResult:
yield Label()
if __name__ == "__main__":
CSSErrorApp().run()
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/textual/css/tokenizer.py`
Content:
```
1 from __future__ import annotations
2
3 import re
4 from typing import TYPE_CHECKING, NamedTuple
5
6 import rich.repr
7 from rich.console import Group, RenderableType
8 from rich.highlighter import ReprHighlighter
9 from rich.padding import Padding
10 from rich.panel import Panel
11 from rich.syntax import Syntax
12 from rich.text import Text
13
14 from ..suggestions import get_suggestion
15 from ._error_tools import friendly_list
16 from .constants import VALID_PSEUDO_CLASSES
17
18 if TYPE_CHECKING:
19 from .types import CSSLocation
20
21
22 class TokenError(Exception):
23 """Error raised when the CSS cannot be tokenized (syntax error)."""
24
25 def __init__(
26 self,
27 read_from: CSSLocation,
28 code: str,
29 start: tuple[int, int],
30 message: str,
31 end: tuple[int, int] | None = None,
32 ) -> None:
33 """
34 Args:
35 read_from: The location where the CSS was read from.
36 code: The code being parsed.
37 start: Line number of the error.
38 message: A message associated with the error.
39 end: End location of token, or None if not known.
40 """
41
42 self.read_from = read_from
43 self.code = code
44 self.start = start
45 self.end = end or start
46 super().__init__(message)
47
48 def _get_snippet(self) -> Panel:
49 """Get a short snippet of code around a given line number.
50
51 Returns:
52 A renderable.
53 """
54 line_no = self.start[0]
55 # TODO: Highlight column number
56 syntax = Syntax(
57 self.code,
58 lexer="scss",
59 theme="ansi_light",
60 line_numbers=True,
61 indent_guides=True,
62 line_range=(max(0, line_no - 2), line_no + 2),
63 highlight_lines={line_no + 1},
64 )
65 syntax.stylize_range("reverse bold", self.start, self.end)
66 return Panel(syntax, border_style="red")
67
68 def __rich__(self) -> RenderableType:
69 highlighter = ReprHighlighter()
70 errors: list[RenderableType] = []
71
72 message = str(self)
73 errors.append(Text(" Error in stylesheet:", style="bold red"))
74
75 line_no, col_no = self.start
76
77 path, widget_variable = self.read_from
78 if widget_variable:
79 css_location = f" {path}, {widget_variable}:{line_no}:{col_no}"
80 else:
81 css_location = f" {path}:{line_no}:{col_no}"
82 errors.append(highlighter(css_location))
83 errors.append(self._get_snippet())
84
85 final_message = "\n".join(
86 f"• {message_part.strip()}" for message_part in message.split(";")
87 )
88 errors.append(
89 Padding(
90 highlighter(
91 Text(final_message, "red"),
92 ),
93 pad=(0, 1),
94 )
95 )
96
97 return Group(*errors)
98
99
100 class EOFError(TokenError):
101 pass
102
103
104 class Expect:
105 def __init__(self, **tokens: str) -> None:
106 self.names = list(tokens.keys())
107 self.regexes = list(tokens.values())
108 self._regex = re.compile(
109 "("
110 + "|".join(f"(?P<{name}>{regex})" for name, regex in tokens.items())
111 + ")"
112 )
113 self.match = self._regex.match
114 self.search = self._regex.search
115 self._expect_eof = False
116
117 def expect_eof(self, eof: bool) -> Expect:
118 self._expect_eof = eof
119 return self
120
121 def __rich_repr__(self) -> rich.repr.Result:
122 yield from zip(self.names, self.regexes)
123
124
125 class ReferencedBy(NamedTuple):
126 name: str
127 location: tuple[int, int]
128 length: int
129 code: str
130
131
132 @rich.repr.auto
133 class Token(NamedTuple):
134 name: str
135 value: str
136 read_from: CSSLocation
137 code: str
138 location: tuple[int, int]
139 referenced_by: ReferencedBy | None = None
140
141 @property
142 def start(self) -> tuple[int, int]:
143 """Start line and column (1 indexed)."""
144 line, offset = self.location
145 return (line + 1, offset)
146
147 @property
148 def end(self) -> tuple[int, int]:
149 """End line and column (1 indexed)."""
150 line, offset = self.location
151 return (line + 1, offset + len(self.value))
152
153 def with_reference(self, by: ReferencedBy | None) -> "Token":
154 """Return a copy of the Token, with reference information attached.
155 This is used for variable substitution, where a variable reference
156 can refer to tokens which were defined elsewhere. With the additional
157 ReferencedBy data attached, we can track where the token we are referring
158 to is used.
159 """
160 return Token(
161 name=self.name,
162 value=self.value,
163 read_from=self.read_from,
164 code=self.code,
165 location=self.location,
166 referenced_by=by,
167 )
168
169 def __str__(self) -> str:
170 return self.value
171
172 def __rich_repr__(self) -> rich.repr.Result:
173 yield "name", self.name
174 yield "value", self.value
175 yield (
176 "read_from",
177 self.read_from[0] if not self.read_from[1] else self.read_from,
178 )
179 yield "code", self.code if len(self.code) < 40 else self.code[:40] + "..."
180 yield "location", self.location
181 yield "referenced_by", self.referenced_by, None
182
183
184 class Tokenizer:
185 def __init__(self, text: str, read_from: CSSLocation = ("", "")) -> None:
186 self.read_from = read_from
187 self.code = text
188 self.lines = text.splitlines(keepends=True)
189 self.line_no = 0
190 self.col_no = 0
191
192 def get_token(self, expect: Expect) -> Token:
193 line_no = self.line_no
194 col_no = self.col_no
195 if line_no >= len(self.lines):
196 if expect._expect_eof:
197 return Token(
198 "eof",
199 "",
200 self.read_from,
201 self.code,
202 (line_no + 1, col_no + 1),
203 None,
204 )
205 else:
206 raise EOFError(
207 self.read_from,
208 self.code,
209 (line_no + 1, col_no + 1),
210 "Unexpected end of file",
211 )
212 line = self.lines[line_no]
213 match = expect.match(line, col_no)
214 if match is None:
215 expected = friendly_list(" ".join(name.split("_")) for name in expect.names)
216 message = f"Expected one of {expected}.; Did you forget a semicolon at the end of a line?"
217 raise TokenError(
218 self.read_from,
219 self.code,
220 (line_no, col_no),
221 message,
222 )
223 iter_groups = iter(match.groups())
224
225 next(iter_groups)
226
227 for name, value in zip(expect.names, iter_groups):
228 if value is not None:
229 break
230 else:
231 # For MyPy's benefit
232 raise AssertionError("can't reach here")
233
234 token = Token(
235 name,
236 value,
237 self.read_from,
238 self.code,
239 (line_no, col_no),
240 referenced_by=None,
241 )
242
243 if (
244 token.name == "pseudo_class"
245 and token.value.strip(":") not in VALID_PSEUDO_CLASSES
246 ):
247 pseudo_class = token.value.strip(":")
248 suggestion = get_suggestion(pseudo_class, list(VALID_PSEUDO_CLASSES))
249 all_valid = f"must be one of {friendly_list(VALID_PSEUDO_CLASSES)}"
250 if suggestion:
251 raise TokenError(
252 self.read_from,
253 self.code,
254 (line_no, col_no),
255 f"unknown pseudo-class {pseudo_class!r}; did you mean {suggestion!r}?; {all_valid}",
256 )
257 else:
258 raise TokenError(
259 self.read_from,
260 self.code,
261 (line_no, col_no),
262 f"unknown pseudo-class {pseudo_class!r}; {all_valid}",
263 )
264
265 col_no += len(value)
266 if col_no >= len(line):
267 line_no += 1
268 col_no = 0
269 self.line_no = line_no
270 self.col_no = col_no
271 return token
272
273 def skip_to(self, expect: Expect) -> Token:
274 line_no = self.line_no
275 col_no = self.col_no
276
277 while True:
278 if line_no >= len(self.lines):
279 raise EOFError(
280 self.read_from,
281 self.code,
282 (line_no, col_no),
283 "Unexpected end of file",
284 )
285 line = self.lines[line_no]
286 match = expect.search(line, col_no)
287
288 if match is None:
289 line_no += 1
290 col_no = 0
291 else:
292 self.line_no = line_no
293 self.col_no = match.span(0)[0]
294 return self.get_token(expect)
295
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/textual/css/tokenizer.py b/src/textual/css/tokenizer.py
--- a/src/textual/css/tokenizer.py
+++ b/src/textual/css/tokenizer.py
@@ -34,9 +34,9 @@
Args:
read_from: The location where the CSS was read from.
code: The code being parsed.
- start: Line number of the error.
+ start: Line and column number of the error (1-indexed).
message: A message associated with the error.
- end: End location of token, or None if not known.
+ end: End location of token (1-indexed), or None if not known.
"""
self.read_from = read_from
@@ -60,9 +60,13 @@
line_numbers=True,
indent_guides=True,
line_range=(max(0, line_no - 2), line_no + 2),
- highlight_lines={line_no + 1},
+ highlight_lines={line_no},
+ )
+ syntax.stylize_range(
+ "reverse bold",
+ (self.start[0], self.start[1] - 1),
+ (self.end[0], self.end[1] - 1),
)
- syntax.stylize_range("reverse bold", self.start, self.end)
return Panel(syntax, border_style="red")
def __rich__(self) -> RenderableType:
@@ -136,19 +140,20 @@
read_from: CSSLocation
code: str
location: tuple[int, int]
+ """Token starting location, 0-indexed."""
referenced_by: ReferencedBy | None = None
@property
def start(self) -> tuple[int, int]:
- """Start line and column (1 indexed)."""
+ """Start line and column (1-indexed)."""
line, offset = self.location
- return (line + 1, offset)
+ return (line + 1, offset + 1)
@property
def end(self) -> tuple[int, int]:
- """End line and column (1 indexed)."""
+ """End line and column (1-indexed)."""
line, offset = self.location
- return (line + 1, offset + len(self.value))
+ return (line + 1, offset + len(self.value) + 1)
def with_reference(self, by: ReferencedBy | None) -> "Token":
"""Return a copy of the Token, with reference information attached.
@@ -199,7 +204,7 @@
"",
self.read_from,
self.code,
- (line_no + 1, col_no + 1),
+ (line_no, col_no),
None,
)
else:
@@ -217,7 +222,7 @@
raise TokenError(
self.read_from,
self.code,
- (line_no, col_no),
+ (line_no + 1, col_no + 1),
message,
)
iter_groups = iter(match.groups())
@@ -251,14 +256,14 @@
raise TokenError(
self.read_from,
self.code,
- (line_no, col_no),
+ (line_no + 1, col_no + 1),
f"unknown pseudo-class {pseudo_class!r}; did you mean {suggestion!r}?; {all_valid}",
)
else:
raise TokenError(
self.read_from,
self.code,
- (line_no, col_no),
+ (line_no + 1, col_no + 1),
f"unknown pseudo-class {pseudo_class!r}; {all_valid}",
)
|
{"golden_diff": "diff --git a/src/textual/css/tokenizer.py b/src/textual/css/tokenizer.py\n--- a/src/textual/css/tokenizer.py\n+++ b/src/textual/css/tokenizer.py\n@@ -34,9 +34,9 @@\n Args:\n read_from: The location where the CSS was read from.\n code: The code being parsed.\n- start: Line number of the error.\n+ start: Line and column number of the error (1-indexed).\n message: A message associated with the error.\n- end: End location of token, or None if not known.\n+ end: End location of token (1-indexed), or None if not known.\n \"\"\"\n \n self.read_from = read_from\n@@ -60,9 +60,13 @@\n line_numbers=True,\n indent_guides=True,\n line_range=(max(0, line_no - 2), line_no + 2),\n- highlight_lines={line_no + 1},\n+ highlight_lines={line_no},\n+ )\n+ syntax.stylize_range(\n+ \"reverse bold\",\n+ (self.start[0], self.start[1] - 1),\n+ (self.end[0], self.end[1] - 1),\n )\n- syntax.stylize_range(\"reverse bold\", self.start, self.end)\n return Panel(syntax, border_style=\"red\")\n \n def __rich__(self) -> RenderableType:\n@@ -136,19 +140,20 @@\n read_from: CSSLocation\n code: str\n location: tuple[int, int]\n+ \"\"\"Token starting location, 0-indexed.\"\"\"\n referenced_by: ReferencedBy | None = None\n \n @property\n def start(self) -> tuple[int, int]:\n- \"\"\"Start line and column (1 indexed).\"\"\"\n+ \"\"\"Start line and column (1-indexed).\"\"\"\n line, offset = self.location\n- return (line + 1, offset)\n+ return (line + 1, offset + 1)\n \n @property\n def end(self) -> tuple[int, int]:\n- \"\"\"End line and column (1 indexed).\"\"\"\n+ \"\"\"End line and column (1-indexed).\"\"\"\n line, offset = self.location\n- return (line + 1, offset + len(self.value))\n+ return (line + 1, offset + len(self.value) + 1)\n \n def with_reference(self, by: ReferencedBy | None) -> \"Token\":\n \"\"\"Return a copy of the Token, with reference information attached.\n@@ -199,7 +204,7 @@\n \"\",\n self.read_from,\n self.code,\n- (line_no + 1, col_no + 1),\n+ (line_no, col_no),\n None,\n )\n else:\n@@ -217,7 +222,7 @@\n raise TokenError(\n self.read_from,\n self.code,\n- (line_no, col_no),\n+ (line_no + 1, col_no + 1),\n message,\n )\n iter_groups = iter(match.groups())\n@@ -251,14 +256,14 @@\n raise TokenError(\n self.read_from,\n self.code,\n- (line_no, col_no),\n+ (line_no + 1, col_no + 1),\n f\"unknown pseudo-class {pseudo_class!r}; did you mean {suggestion!r}?; {all_valid}\",\n )\n else:\n raise TokenError(\n self.read_from,\n self.code,\n- (line_no, col_no),\n+ (line_no + 1, col_no + 1),\n f\"unknown pseudo-class {pseudo_class!r}; {all_valid}\",\n )\n", "issue": "CSS error reporting sometimes off by one\nSee https://github.com/Textualize/textual/pull/3582#issuecomment-1787507687.\r\n\r\nRunning the app at the bottom produces the error below, where the error reporting is off by one.\r\nSee the line above the panel and the code lines in the code snippet printed.\r\n\r\n```\r\n Error in stylesheet:\r\n /Users/davep/develop/python/textual-upstream/sandbox/foo.py, CSSErrorApp.CSS:1:4\r\n\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\r\n\u2502 1 \u2502 \u2502\r\n\u2502 \u2771 2 \u2502 : \u2502\r\n\u2502 3 \u2502\r\n\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\r\n \u2022 Expected one of 'comment line', 'comment start', 'selector start', 'selector start class', 'selector start id', 'selector start universal', 'variable name', or 'whitespace'. \r\n \u2022 Did you forget a semicolon at the end of a line? \r\n```\r\n\r\n```py\r\nfrom textual.app import App, ComposeResult\r\nfrom textual.widgets import Label\r\n\r\nclass CSSErrorApp(App[None]):\r\n\r\n CSS = \"\"\"\r\n :\r\n \"\"\"\r\n\r\n def compose(self) -> ComposeResult:\r\n yield Label()\r\n\r\nif __name__ == \"__main__\":\r\n CSSErrorApp().run()\r\n```\n", "before_files": [{"content": "from __future__ import annotations\n\nimport re\nfrom typing import TYPE_CHECKING, NamedTuple\n\nimport rich.repr\nfrom rich.console import Group, RenderableType\nfrom rich.highlighter import ReprHighlighter\nfrom rich.padding import Padding\nfrom rich.panel import Panel\nfrom rich.syntax import Syntax\nfrom rich.text import Text\n\nfrom ..suggestions import get_suggestion\nfrom ._error_tools import friendly_list\nfrom .constants import VALID_PSEUDO_CLASSES\n\nif TYPE_CHECKING:\n from .types import CSSLocation\n\n\nclass TokenError(Exception):\n \"\"\"Error raised when the CSS cannot be tokenized (syntax error).\"\"\"\n\n def __init__(\n self,\n read_from: CSSLocation,\n code: str,\n start: tuple[int, int],\n message: str,\n end: tuple[int, int] | None = None,\n ) -> None:\n \"\"\"\n Args:\n read_from: The location where the CSS was read from.\n code: The code being parsed.\n start: Line number of the error.\n message: A message associated with the error.\n end: End location of token, or None if not known.\n \"\"\"\n\n self.read_from = read_from\n self.code = code\n self.start = start\n self.end = end or start\n super().__init__(message)\n\n def _get_snippet(self) -> Panel:\n \"\"\"Get a short snippet of code around a given line number.\n\n Returns:\n A renderable.\n \"\"\"\n line_no = self.start[0]\n # TODO: Highlight column number\n syntax = Syntax(\n self.code,\n lexer=\"scss\",\n theme=\"ansi_light\",\n line_numbers=True,\n indent_guides=True,\n line_range=(max(0, line_no - 2), line_no + 2),\n highlight_lines={line_no + 1},\n )\n syntax.stylize_range(\"reverse bold\", self.start, self.end)\n return Panel(syntax, border_style=\"red\")\n\n def __rich__(self) -> RenderableType:\n highlighter = ReprHighlighter()\n errors: list[RenderableType] = []\n\n message = str(self)\n errors.append(Text(\" Error in stylesheet:\", style=\"bold red\"))\n\n line_no, col_no = self.start\n\n path, widget_variable = self.read_from\n if widget_variable:\n css_location = f\" {path}, {widget_variable}:{line_no}:{col_no}\"\n else:\n css_location = f\" {path}:{line_no}:{col_no}\"\n errors.append(highlighter(css_location))\n errors.append(self._get_snippet())\n\n final_message = \"\\n\".join(\n f\"\u2022 {message_part.strip()}\" for message_part in message.split(\";\")\n )\n errors.append(\n Padding(\n highlighter(\n Text(final_message, \"red\"),\n ),\n pad=(0, 1),\n )\n )\n\n return Group(*errors)\n\n\nclass EOFError(TokenError):\n pass\n\n\nclass Expect:\n def __init__(self, **tokens: str) -> None:\n self.names = list(tokens.keys())\n self.regexes = list(tokens.values())\n self._regex = re.compile(\n \"(\"\n + \"|\".join(f\"(?P<{name}>{regex})\" for name, regex in tokens.items())\n + \")\"\n )\n self.match = self._regex.match\n self.search = self._regex.search\n self._expect_eof = False\n\n def expect_eof(self, eof: bool) -> Expect:\n self._expect_eof = eof\n return self\n\n def __rich_repr__(self) -> rich.repr.Result:\n yield from zip(self.names, self.regexes)\n\n\nclass ReferencedBy(NamedTuple):\n name: str\n location: tuple[int, int]\n length: int\n code: str\n\n\[email protected]\nclass Token(NamedTuple):\n name: str\n value: str\n read_from: CSSLocation\n code: str\n location: tuple[int, int]\n referenced_by: ReferencedBy | None = None\n\n @property\n def start(self) -> tuple[int, int]:\n \"\"\"Start line and column (1 indexed).\"\"\"\n line, offset = self.location\n return (line + 1, offset)\n\n @property\n def end(self) -> tuple[int, int]:\n \"\"\"End line and column (1 indexed).\"\"\"\n line, offset = self.location\n return (line + 1, offset + len(self.value))\n\n def with_reference(self, by: ReferencedBy | None) -> \"Token\":\n \"\"\"Return a copy of the Token, with reference information attached.\n This is used for variable substitution, where a variable reference\n can refer to tokens which were defined elsewhere. With the additional\n ReferencedBy data attached, we can track where the token we are referring\n to is used.\n \"\"\"\n return Token(\n name=self.name,\n value=self.value,\n read_from=self.read_from,\n code=self.code,\n location=self.location,\n referenced_by=by,\n )\n\n def __str__(self) -> str:\n return self.value\n\n def __rich_repr__(self) -> rich.repr.Result:\n yield \"name\", self.name\n yield \"value\", self.value\n yield (\n \"read_from\",\n self.read_from[0] if not self.read_from[1] else self.read_from,\n )\n yield \"code\", self.code if len(self.code) < 40 else self.code[:40] + \"...\"\n yield \"location\", self.location\n yield \"referenced_by\", self.referenced_by, None\n\n\nclass Tokenizer:\n def __init__(self, text: str, read_from: CSSLocation = (\"\", \"\")) -> None:\n self.read_from = read_from\n self.code = text\n self.lines = text.splitlines(keepends=True)\n self.line_no = 0\n self.col_no = 0\n\n def get_token(self, expect: Expect) -> Token:\n line_no = self.line_no\n col_no = self.col_no\n if line_no >= len(self.lines):\n if expect._expect_eof:\n return Token(\n \"eof\",\n \"\",\n self.read_from,\n self.code,\n (line_no + 1, col_no + 1),\n None,\n )\n else:\n raise EOFError(\n self.read_from,\n self.code,\n (line_no + 1, col_no + 1),\n \"Unexpected end of file\",\n )\n line = self.lines[line_no]\n match = expect.match(line, col_no)\n if match is None:\n expected = friendly_list(\" \".join(name.split(\"_\")) for name in expect.names)\n message = f\"Expected one of {expected}.; Did you forget a semicolon at the end of a line?\"\n raise TokenError(\n self.read_from,\n self.code,\n (line_no, col_no),\n message,\n )\n iter_groups = iter(match.groups())\n\n next(iter_groups)\n\n for name, value in zip(expect.names, iter_groups):\n if value is not None:\n break\n else:\n # For MyPy's benefit\n raise AssertionError(\"can't reach here\")\n\n token = Token(\n name,\n value,\n self.read_from,\n self.code,\n (line_no, col_no),\n referenced_by=None,\n )\n\n if (\n token.name == \"pseudo_class\"\n and token.value.strip(\":\") not in VALID_PSEUDO_CLASSES\n ):\n pseudo_class = token.value.strip(\":\")\n suggestion = get_suggestion(pseudo_class, list(VALID_PSEUDO_CLASSES))\n all_valid = f\"must be one of {friendly_list(VALID_PSEUDO_CLASSES)}\"\n if suggestion:\n raise TokenError(\n self.read_from,\n self.code,\n (line_no, col_no),\n f\"unknown pseudo-class {pseudo_class!r}; did you mean {suggestion!r}?; {all_valid}\",\n )\n else:\n raise TokenError(\n self.read_from,\n self.code,\n (line_no, col_no),\n f\"unknown pseudo-class {pseudo_class!r}; {all_valid}\",\n )\n\n col_no += len(value)\n if col_no >= len(line):\n line_no += 1\n col_no = 0\n self.line_no = line_no\n self.col_no = col_no\n return token\n\n def skip_to(self, expect: Expect) -> Token:\n line_no = self.line_no\n col_no = self.col_no\n\n while True:\n if line_no >= len(self.lines):\n raise EOFError(\n self.read_from,\n self.code,\n (line_no, col_no),\n \"Unexpected end of file\",\n )\n line = self.lines[line_no]\n match = expect.search(line, col_no)\n\n if match is None:\n line_no += 1\n col_no = 0\n else:\n self.line_no = line_no\n self.col_no = match.span(0)[0]\n return self.get_token(expect)\n", "path": "src/textual/css/tokenizer.py"}], "after_files": [{"content": "from __future__ import annotations\n\nimport re\nfrom typing import TYPE_CHECKING, NamedTuple\n\nimport rich.repr\nfrom rich.console import Group, RenderableType\nfrom rich.highlighter import ReprHighlighter\nfrom rich.padding import Padding\nfrom rich.panel import Panel\nfrom rich.syntax import Syntax\nfrom rich.text import Text\n\nfrom ..suggestions import get_suggestion\nfrom ._error_tools import friendly_list\nfrom .constants import VALID_PSEUDO_CLASSES\n\nif TYPE_CHECKING:\n from .types import CSSLocation\n\n\nclass TokenError(Exception):\n \"\"\"Error raised when the CSS cannot be tokenized (syntax error).\"\"\"\n\n def __init__(\n self,\n read_from: CSSLocation,\n code: str,\n start: tuple[int, int],\n message: str,\n end: tuple[int, int] | None = None,\n ) -> None:\n \"\"\"\n Args:\n read_from: The location where the CSS was read from.\n code: The code being parsed.\n start: Line and column number of the error (1-indexed).\n message: A message associated with the error.\n end: End location of token (1-indexed), or None if not known.\n \"\"\"\n\n self.read_from = read_from\n self.code = code\n self.start = start\n self.end = end or start\n super().__init__(message)\n\n def _get_snippet(self) -> Panel:\n \"\"\"Get a short snippet of code around a given line number.\n\n Returns:\n A renderable.\n \"\"\"\n line_no = self.start[0]\n # TODO: Highlight column number\n syntax = Syntax(\n self.code,\n lexer=\"scss\",\n theme=\"ansi_light\",\n line_numbers=True,\n indent_guides=True,\n line_range=(max(0, line_no - 2), line_no + 2),\n highlight_lines={line_no},\n )\n syntax.stylize_range(\n \"reverse bold\",\n (self.start[0], self.start[1] - 1),\n (self.end[0], self.end[1] - 1),\n )\n return Panel(syntax, border_style=\"red\")\n\n def __rich__(self) -> RenderableType:\n highlighter = ReprHighlighter()\n errors: list[RenderableType] = []\n\n message = str(self)\n errors.append(Text(\" Error in stylesheet:\", style=\"bold red\"))\n\n line_no, col_no = self.start\n\n path, widget_variable = self.read_from\n if widget_variable:\n css_location = f\" {path}, {widget_variable}:{line_no}:{col_no}\"\n else:\n css_location = f\" {path}:{line_no}:{col_no}\"\n errors.append(highlighter(css_location))\n errors.append(self._get_snippet())\n\n final_message = \"\\n\".join(\n f\"\u2022 {message_part.strip()}\" for message_part in message.split(\";\")\n )\n errors.append(\n Padding(\n highlighter(\n Text(final_message, \"red\"),\n ),\n pad=(0, 1),\n )\n )\n\n return Group(*errors)\n\n\nclass EOFError(TokenError):\n pass\n\n\nclass Expect:\n def __init__(self, **tokens: str) -> None:\n self.names = list(tokens.keys())\n self.regexes = list(tokens.values())\n self._regex = re.compile(\n \"(\"\n + \"|\".join(f\"(?P<{name}>{regex})\" for name, regex in tokens.items())\n + \")\"\n )\n self.match = self._regex.match\n self.search = self._regex.search\n self._expect_eof = False\n\n def expect_eof(self, eof: bool) -> Expect:\n self._expect_eof = eof\n return self\n\n def __rich_repr__(self) -> rich.repr.Result:\n yield from zip(self.names, self.regexes)\n\n\nclass ReferencedBy(NamedTuple):\n name: str\n location: tuple[int, int]\n length: int\n code: str\n\n\[email protected]\nclass Token(NamedTuple):\n name: str\n value: str\n read_from: CSSLocation\n code: str\n location: tuple[int, int]\n \"\"\"Token starting location, 0-indexed.\"\"\"\n referenced_by: ReferencedBy | None = None\n\n @property\n def start(self) -> tuple[int, int]:\n \"\"\"Start line and column (1-indexed).\"\"\"\n line, offset = self.location\n return (line + 1, offset + 1)\n\n @property\n def end(self) -> tuple[int, int]:\n \"\"\"End line and column (1-indexed).\"\"\"\n line, offset = self.location\n return (line + 1, offset + len(self.value) + 1)\n\n def with_reference(self, by: ReferencedBy | None) -> \"Token\":\n \"\"\"Return a copy of the Token, with reference information attached.\n This is used for variable substitution, where a variable reference\n can refer to tokens which were defined elsewhere. With the additional\n ReferencedBy data attached, we can track where the token we are referring\n to is used.\n \"\"\"\n return Token(\n name=self.name,\n value=self.value,\n read_from=self.read_from,\n code=self.code,\n location=self.location,\n referenced_by=by,\n )\n\n def __str__(self) -> str:\n return self.value\n\n def __rich_repr__(self) -> rich.repr.Result:\n yield \"name\", self.name\n yield \"value\", self.value\n yield (\n \"read_from\",\n self.read_from[0] if not self.read_from[1] else self.read_from,\n )\n yield \"code\", self.code if len(self.code) < 40 else self.code[:40] + \"...\"\n yield \"location\", self.location\n yield \"referenced_by\", self.referenced_by, None\n\n\nclass Tokenizer:\n def __init__(self, text: str, read_from: CSSLocation = (\"\", \"\")) -> None:\n self.read_from = read_from\n self.code = text\n self.lines = text.splitlines(keepends=True)\n self.line_no = 0\n self.col_no = 0\n\n def get_token(self, expect: Expect) -> Token:\n line_no = self.line_no\n col_no = self.col_no\n if line_no >= len(self.lines):\n if expect._expect_eof:\n return Token(\n \"eof\",\n \"\",\n self.read_from,\n self.code,\n (line_no, col_no),\n None,\n )\n else:\n raise EOFError(\n self.read_from,\n self.code,\n (line_no + 1, col_no + 1),\n \"Unexpected end of file\",\n )\n line = self.lines[line_no]\n match = expect.match(line, col_no)\n if match is None:\n expected = friendly_list(\" \".join(name.split(\"_\")) for name in expect.names)\n message = f\"Expected one of {expected}.; Did you forget a semicolon at the end of a line?\"\n raise TokenError(\n self.read_from,\n self.code,\n (line_no + 1, col_no + 1),\n message,\n )\n iter_groups = iter(match.groups())\n\n next(iter_groups)\n\n for name, value in zip(expect.names, iter_groups):\n if value is not None:\n break\n else:\n # For MyPy's benefit\n raise AssertionError(\"can't reach here\")\n\n token = Token(\n name,\n value,\n self.read_from,\n self.code,\n (line_no, col_no),\n referenced_by=None,\n )\n\n if (\n token.name == \"pseudo_class\"\n and token.value.strip(\":\") not in VALID_PSEUDO_CLASSES\n ):\n pseudo_class = token.value.strip(\":\")\n suggestion = get_suggestion(pseudo_class, list(VALID_PSEUDO_CLASSES))\n all_valid = f\"must be one of {friendly_list(VALID_PSEUDO_CLASSES)}\"\n if suggestion:\n raise TokenError(\n self.read_from,\n self.code,\n (line_no + 1, col_no + 1),\n f\"unknown pseudo-class {pseudo_class!r}; did you mean {suggestion!r}?; {all_valid}\",\n )\n else:\n raise TokenError(\n self.read_from,\n self.code,\n (line_no + 1, col_no + 1),\n f\"unknown pseudo-class {pseudo_class!r}; {all_valid}\",\n )\n\n col_no += len(value)\n if col_no >= len(line):\n line_no += 1\n col_no = 0\n self.line_no = line_no\n self.col_no = col_no\n return token\n\n def skip_to(self, expect: Expect) -> Token:\n line_no = self.line_no\n col_no = self.col_no\n\n while True:\n if line_no >= len(self.lines):\n raise EOFError(\n self.read_from,\n self.code,\n (line_no, col_no),\n \"Unexpected end of file\",\n )\n line = self.lines[line_no]\n match = expect.search(line, col_no)\n\n if match is None:\n line_no += 1\n col_no = 0\n else:\n self.line_no = line_no\n self.col_no = match.span(0)[0]\n return self.get_token(expect)\n", "path": "src/textual/css/tokenizer.py"}]}
| 3,349 | 826 |
gh_patches_debug_22114
|
rasdani/github-patches
|
git_diff
|
deepchecks__deepchecks-550
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[BUG] Receiving FutureWarning for each label on Calibration Score check
**Describe the bug**
Receiving FutureWarning for each label on Calibration Score
**To Reproduce**
Run a categorical Dataset on Calibration Score check
**Expected behavior**
No warnings
**Screenshots**

**Environment (please complete the following information):**
- OS: mac
- Python Version: 3.8
- Deepchecks Version: 0.2.1
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `deepchecks/checks/performance/calibration_score.py`
Content:
```
1 # ----------------------------------------------------------------------------
2 # Copyright (C) 2021 Deepchecks (https://www.deepchecks.com)
3 #
4 # This file is part of Deepchecks.
5 # Deepchecks is distributed under the terms of the GNU Affero General
6 # Public License (version 3 or later).
7 # You should have received a copy of the GNU Affero General Public License
8 # along with Deepchecks. If not, see <http://www.gnu.org/licenses/>.
9 # ----------------------------------------------------------------------------
10 #
11 """The calibration score check module."""
12 from sklearn.base import BaseEstimator
13 from sklearn.calibration import calibration_curve
14 from sklearn.metrics import brier_score_loss
15 import plotly.graph_objects as go
16
17 from deepchecks import Dataset, CheckResult, SingleDatasetBaseCheck
18 from deepchecks.utils.metrics import ModelType, task_type_validation
19
20
21 __all__ = ['CalibrationScore']
22
23
24 class CalibrationScore(SingleDatasetBaseCheck):
25 """Calculate the calibration curve with brier score for each class."""
26
27 def run(self, dataset: Dataset, model: BaseEstimator) -> CheckResult:
28 """Run check.
29
30 Args:
31 model (BaseEstimator): A scikit-learn-compatible fitted estimator instance
32 dataset: a Dataset object
33 Returns:
34 CheckResult: value is dictionary of class and it's brier score, displays the calibration curve
35 graph with each class
36
37 Raises:
38 DeepchecksValueError: If the object is not a Dataset instance with a label
39 """
40 return self._calibration_score(dataset, model)
41
42 def _calibration_score(self, dataset: Dataset, model):
43 Dataset.validate_dataset(dataset)
44 dataset.validate_label()
45 task_type_validation(model, dataset, [ModelType.MULTICLASS, ModelType.BINARY])
46
47 ds_x = dataset.features_columns
48 ds_y = dataset.label_col
49 # Expect predict_proba to return in order of the sorted classes.
50 y_pred = model.predict_proba(ds_x)
51
52 briers_scores = {}
53
54 if len(dataset.classes) == 2:
55 briers_scores[0] = brier_score_loss(ds_y, y_pred[:, 1], pos_label=dataset.classes[1])
56 else:
57 for class_index, class_name in enumerate(dataset.classes):
58 prob_pos = y_pred[:, class_index]
59 clf_score = brier_score_loss(ds_y == class_name, prob_pos, pos_label=class_name)
60 briers_scores[class_name] = clf_score
61
62 fig = go.Figure()
63
64 fig.add_trace(go.Scatter(
65 x=[0, 1],
66 y=[0, 1],
67 line_width=2, line_dash='dash',
68 name='Perfectly calibrated',
69 ))
70
71 if len(dataset.classes) == 2:
72 fraction_of_positives, mean_predicted_value = calibration_curve(ds_y, y_pred[:, 1], n_bins=10)
73
74 fig.add_trace(go.Scatter(
75 x=mean_predicted_value,
76 y=fraction_of_positives,
77 mode='lines+markers',
78 name=f'(brier:{briers_scores[0]:9.4f})',
79 ))
80 else:
81 for class_index, class_name in enumerate(dataset.classes):
82 prob_pos = y_pred[:, class_index]
83
84 fraction_of_positives, mean_predicted_value = \
85 calibration_curve(ds_y == class_name, prob_pos, n_bins=10)
86
87 fig.add_trace(go.Scatter(
88 x=mean_predicted_value,
89 y=fraction_of_positives,
90 mode='lines+markers',
91 name=f'{class_name} (brier:{briers_scores[class_name]:9.4f})',
92 ))
93
94 fig.update_layout(title_text='Calibration plots (reliability curve)',
95 width=700, height=500)
96 fig.update_yaxes(title='Fraction of positives')
97 fig.update_xaxes(title='Mean predicted value')
98
99 calibration_text = 'Calibration curves (also known as reliability diagrams) compare how well the ' \
100 'probabilistic predictions of a binary classifier are calibrated. It plots the true ' \
101 'frequency of the positive label against its predicted probability, for binned predictions.'
102 brier_text = 'The Brier score metric may be used to assess how well a classifier is calibrated. For more ' \
103 'info, please visit https://en.wikipedia.org/wiki/Brier_score'
104 return CheckResult(briers_scores, header='Calibration Metric',
105 display=[calibration_text, fig, brier_text])
106
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/deepchecks/checks/performance/calibration_score.py b/deepchecks/checks/performance/calibration_score.py
--- a/deepchecks/checks/performance/calibration_score.py
+++ b/deepchecks/checks/performance/calibration_score.py
@@ -17,7 +17,6 @@
from deepchecks import Dataset, CheckResult, SingleDatasetBaseCheck
from deepchecks.utils.metrics import ModelType, task_type_validation
-
__all__ = ['CalibrationScore']
@@ -52,11 +51,11 @@
briers_scores = {}
if len(dataset.classes) == 2:
- briers_scores[0] = brier_score_loss(ds_y, y_pred[:, 1], pos_label=dataset.classes[1])
+ briers_scores[0] = brier_score_loss(ds_y == dataset.classes[1], y_pred[:, 1])
else:
for class_index, class_name in enumerate(dataset.classes):
prob_pos = y_pred[:, class_index]
- clf_score = brier_score_loss(ds_y == class_name, prob_pos, pos_label=class_name)
+ clf_score = brier_score_loss(ds_y == class_name, prob_pos)
briers_scores[class_name] = clf_score
fig = go.Figure()
|
{"golden_diff": "diff --git a/deepchecks/checks/performance/calibration_score.py b/deepchecks/checks/performance/calibration_score.py\n--- a/deepchecks/checks/performance/calibration_score.py\n+++ b/deepchecks/checks/performance/calibration_score.py\n@@ -17,7 +17,6 @@\n from deepchecks import Dataset, CheckResult, SingleDatasetBaseCheck\n from deepchecks.utils.metrics import ModelType, task_type_validation\n \n-\n __all__ = ['CalibrationScore']\n \n \n@@ -52,11 +51,11 @@\n briers_scores = {}\n \n if len(dataset.classes) == 2:\n- briers_scores[0] = brier_score_loss(ds_y, y_pred[:, 1], pos_label=dataset.classes[1])\n+ briers_scores[0] = brier_score_loss(ds_y == dataset.classes[1], y_pred[:, 1])\n else:\n for class_index, class_name in enumerate(dataset.classes):\n prob_pos = y_pred[:, class_index]\n- clf_score = brier_score_loss(ds_y == class_name, prob_pos, pos_label=class_name)\n+ clf_score = brier_score_loss(ds_y == class_name, prob_pos)\n briers_scores[class_name] = clf_score\n \n fig = go.Figure()\n", "issue": "[BUG] Receiving FutureWarning for each label on Calibration Score check\n**Describe the bug**\r\nReceiving FutureWarning for each label on Calibration Score\r\n\r\n**To Reproduce**\r\nRun a categorical Dataset on Calibration Score check\r\n\r\n**Expected behavior**\r\nNo warnings\r\n\r\n**Screenshots**\r\n\r\n\r\n\r\n**Environment (please complete the following information):**\r\n - OS: mac\r\n - Python Version: 3.8\r\n - Deepchecks Version: 0.2.1\r\n\n", "before_files": [{"content": "# ----------------------------------------------------------------------------\n# Copyright (C) 2021 Deepchecks (https://www.deepchecks.com)\n#\n# This file is part of Deepchecks.\n# Deepchecks is distributed under the terms of the GNU Affero General\n# Public License (version 3 or later).\n# You should have received a copy of the GNU Affero General Public License\n# along with Deepchecks. If not, see <http://www.gnu.org/licenses/>.\n# ----------------------------------------------------------------------------\n#\n\"\"\"The calibration score check module.\"\"\"\nfrom sklearn.base import BaseEstimator\nfrom sklearn.calibration import calibration_curve\nfrom sklearn.metrics import brier_score_loss\nimport plotly.graph_objects as go\n\nfrom deepchecks import Dataset, CheckResult, SingleDatasetBaseCheck\nfrom deepchecks.utils.metrics import ModelType, task_type_validation\n\n\n__all__ = ['CalibrationScore']\n\n\nclass CalibrationScore(SingleDatasetBaseCheck):\n \"\"\"Calculate the calibration curve with brier score for each class.\"\"\"\n\n def run(self, dataset: Dataset, model: BaseEstimator) -> CheckResult:\n \"\"\"Run check.\n\n Args:\n model (BaseEstimator): A scikit-learn-compatible fitted estimator instance\n dataset: a Dataset object\n Returns:\n CheckResult: value is dictionary of class and it's brier score, displays the calibration curve\n graph with each class\n\n Raises:\n DeepchecksValueError: If the object is not a Dataset instance with a label\n \"\"\"\n return self._calibration_score(dataset, model)\n\n def _calibration_score(self, dataset: Dataset, model):\n Dataset.validate_dataset(dataset)\n dataset.validate_label()\n task_type_validation(model, dataset, [ModelType.MULTICLASS, ModelType.BINARY])\n\n ds_x = dataset.features_columns\n ds_y = dataset.label_col\n # Expect predict_proba to return in order of the sorted classes.\n y_pred = model.predict_proba(ds_x)\n\n briers_scores = {}\n\n if len(dataset.classes) == 2:\n briers_scores[0] = brier_score_loss(ds_y, y_pred[:, 1], pos_label=dataset.classes[1])\n else:\n for class_index, class_name in enumerate(dataset.classes):\n prob_pos = y_pred[:, class_index]\n clf_score = brier_score_loss(ds_y == class_name, prob_pos, pos_label=class_name)\n briers_scores[class_name] = clf_score\n\n fig = go.Figure()\n\n fig.add_trace(go.Scatter(\n x=[0, 1],\n y=[0, 1],\n line_width=2, line_dash='dash',\n name='Perfectly calibrated',\n ))\n\n if len(dataset.classes) == 2:\n fraction_of_positives, mean_predicted_value = calibration_curve(ds_y, y_pred[:, 1], n_bins=10)\n\n fig.add_trace(go.Scatter(\n x=mean_predicted_value,\n y=fraction_of_positives,\n mode='lines+markers',\n name=f'(brier:{briers_scores[0]:9.4f})',\n ))\n else:\n for class_index, class_name in enumerate(dataset.classes):\n prob_pos = y_pred[:, class_index]\n\n fraction_of_positives, mean_predicted_value = \\\n calibration_curve(ds_y == class_name, prob_pos, n_bins=10)\n\n fig.add_trace(go.Scatter(\n x=mean_predicted_value,\n y=fraction_of_positives,\n mode='lines+markers',\n name=f'{class_name} (brier:{briers_scores[class_name]:9.4f})',\n ))\n\n fig.update_layout(title_text='Calibration plots (reliability curve)',\n width=700, height=500)\n fig.update_yaxes(title='Fraction of positives')\n fig.update_xaxes(title='Mean predicted value')\n\n calibration_text = 'Calibration curves (also known as reliability diagrams) compare how well the ' \\\n 'probabilistic predictions of a binary classifier are calibrated. It plots the true ' \\\n 'frequency of the positive label against its predicted probability, for binned predictions.'\n brier_text = 'The Brier score metric may be used to assess how well a classifier is calibrated. For more ' \\\n 'info, please visit https://en.wikipedia.org/wiki/Brier_score'\n return CheckResult(briers_scores, header='Calibration Metric',\n display=[calibration_text, fig, brier_text])\n", "path": "deepchecks/checks/performance/calibration_score.py"}], "after_files": [{"content": "# ----------------------------------------------------------------------------\n# Copyright (C) 2021 Deepchecks (https://www.deepchecks.com)\n#\n# This file is part of Deepchecks.\n# Deepchecks is distributed under the terms of the GNU Affero General\n# Public License (version 3 or later).\n# You should have received a copy of the GNU Affero General Public License\n# along with Deepchecks. If not, see <http://www.gnu.org/licenses/>.\n# ----------------------------------------------------------------------------\n#\n\"\"\"The calibration score check module.\"\"\"\nfrom sklearn.base import BaseEstimator\nfrom sklearn.calibration import calibration_curve\nfrom sklearn.metrics import brier_score_loss\nimport plotly.graph_objects as go\n\nfrom deepchecks import Dataset, CheckResult, SingleDatasetBaseCheck\nfrom deepchecks.utils.metrics import ModelType, task_type_validation\n\n__all__ = ['CalibrationScore']\n\n\nclass CalibrationScore(SingleDatasetBaseCheck):\n \"\"\"Calculate the calibration curve with brier score for each class.\"\"\"\n\n def run(self, dataset: Dataset, model: BaseEstimator) -> CheckResult:\n \"\"\"Run check.\n\n Args:\n model (BaseEstimator): A scikit-learn-compatible fitted estimator instance\n dataset: a Dataset object\n Returns:\n CheckResult: value is dictionary of class and it's brier score, displays the calibration curve\n graph with each class\n\n Raises:\n DeepchecksValueError: If the object is not a Dataset instance with a label\n \"\"\"\n return self._calibration_score(dataset, model)\n\n def _calibration_score(self, dataset: Dataset, model):\n Dataset.validate_dataset(dataset)\n dataset.validate_label()\n task_type_validation(model, dataset, [ModelType.MULTICLASS, ModelType.BINARY])\n\n ds_x = dataset.features_columns\n ds_y = dataset.label_col\n # Expect predict_proba to return in order of the sorted classes.\n y_pred = model.predict_proba(ds_x)\n\n briers_scores = {}\n\n if len(dataset.classes) == 2:\n briers_scores[0] = brier_score_loss(ds_y == dataset.classes[1], y_pred[:, 1])\n else:\n for class_index, class_name in enumerate(dataset.classes):\n prob_pos = y_pred[:, class_index]\n clf_score = brier_score_loss(ds_y == class_name, prob_pos)\n briers_scores[class_name] = clf_score\n\n fig = go.Figure()\n\n fig.add_trace(go.Scatter(\n x=[0, 1],\n y=[0, 1],\n line_width=2, line_dash='dash',\n name='Perfectly calibrated',\n ))\n\n if len(dataset.classes) == 2:\n fraction_of_positives, mean_predicted_value = calibration_curve(ds_y, y_pred[:, 1], n_bins=10)\n\n fig.add_trace(go.Scatter(\n x=mean_predicted_value,\n y=fraction_of_positives,\n mode='lines+markers',\n name=f'(brier:{briers_scores[0]:9.4f})',\n ))\n else:\n for class_index, class_name in enumerate(dataset.classes):\n prob_pos = y_pred[:, class_index]\n\n fraction_of_positives, mean_predicted_value = \\\n calibration_curve(ds_y == class_name, prob_pos, n_bins=10)\n\n fig.add_trace(go.Scatter(\n x=mean_predicted_value,\n y=fraction_of_positives,\n mode='lines+markers',\n name=f'{class_name} (brier:{briers_scores[class_name]:9.4f})',\n ))\n\n fig.update_layout(title_text='Calibration plots (reliability curve)',\n width=700, height=500)\n fig.update_yaxes(title='Fraction of positives')\n fig.update_xaxes(title='Mean predicted value')\n\n calibration_text = 'Calibration curves (also known as reliability diagrams) compare how well the ' \\\n 'probabilistic predictions of a binary classifier are calibrated. It plots the true ' \\\n 'frequency of the positive label against its predicted probability, for binned predictions.'\n brier_text = 'The Brier score metric may be used to assess how well a classifier is calibrated. For more ' \\\n 'info, please visit https://en.wikipedia.org/wiki/Brier_score'\n return CheckResult(briers_scores, header='Calibration Metric',\n display=[calibration_text, fig, brier_text])\n", "path": "deepchecks/checks/performance/calibration_score.py"}]}
| 1,594 | 279 |
gh_patches_debug_5367
|
rasdani/github-patches
|
git_diff
|
comic__grand-challenge.org-2344
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
'NoneType' object has no attribute 'values'
Sentry issue https://sentry.io/organizations/grand-challenge/issues/3127690895/?project=303639&query=is%3Aunresolved
```
slugs = {slug for viewport in mapping.values() for slug in viewport}
```
Added in https://github.com/comic/grand-challenge.org/pull/2322
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `app/grandchallenge/hanging_protocols/forms.py`
Content:
```
1 from django import forms
2
3 from grandchallenge.components.models import ComponentInterface
4 from grandchallenge.core.forms import SaveFormInitMixin
5 from grandchallenge.core.widgets import JSONEditorWidget
6 from grandchallenge.hanging_protocols.models import (
7 HANGING_PROTOCOL_SCHEMA,
8 VIEW_CONTENT_SCHEMA,
9 HangingProtocol,
10 )
11
12
13 class HangingProtocolForm(SaveFormInitMixin, forms.ModelForm):
14 class Meta:
15 model = HangingProtocol
16 fields = ("title", "description", "json")
17 widgets = {"json": JSONEditorWidget(schema=HANGING_PROTOCOL_SCHEMA)}
18 help_texts = {
19 "json": (
20 "To display a single image in full size, define the "
21 "protocol as follows: "
22 '[{"viewport_name": "main", "x": 0,"y": 0,"w": 1,"h": 1,'
23 '"fullsizable": true,"draggable": false,"selectable": true,'
24 '"order": 0}]'
25 )
26 }
27
28
29 class ViewContentMixin:
30 def clean_view_content(self):
31 mapping = self.cleaned_data["view_content"]
32 hanging_protocol = self.cleaned_data["hanging_protocol"]
33 if mapping and not hanging_protocol:
34 self.add_error(
35 error="Please select a hanging protocol before filling this field.",
36 field="view_content",
37 )
38
39 if mapping and hanging_protocol:
40 if set(mapping.keys()) != {
41 x["viewport_name"] for x in hanging_protocol.json
42 }:
43 self.add_error(
44 error=(
45 "Image ports in view_content do not match "
46 "those in the selected hanging protocol."
47 ),
48 field="view_content",
49 )
50
51 slugs = {slug for viewport in mapping.values() for slug in viewport}
52 unknown = []
53 for slug in slugs:
54 if not ComponentInterface.objects.filter(slug=slug).exists():
55 unknown.append(slug)
56 if len(unknown) > 0:
57 self.add_error(
58 error=f"Unkown slugs in view_content: {', '.join(unknown)}",
59 field="view_content",
60 )
61
62 return mapping
63
64 class Meta:
65 widgets = {
66 "view_content": JSONEditorWidget(schema=VIEW_CONTENT_SCHEMA),
67 }
68 help_texts = {
69 "view_content": (
70 "Indicate which Component Interfaces need to be displayed in "
71 'which image port. E.g. {"main": ["interface1"]}. The first '
72 "item in the list of interfaces will be the main image in "
73 "the image port. The first overlay type interface thereafter "
74 "will be rendered as an overlay. For now, any other items "
75 "will be ignored by the viewer."
76 )
77 }
78
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/app/grandchallenge/hanging_protocols/forms.py b/app/grandchallenge/hanging_protocols/forms.py
--- a/app/grandchallenge/hanging_protocols/forms.py
+++ b/app/grandchallenge/hanging_protocols/forms.py
@@ -28,7 +28,7 @@
class ViewContentMixin:
def clean_view_content(self):
- mapping = self.cleaned_data["view_content"]
+ mapping = self.cleaned_data["view_content"] or {}
hanging_protocol = self.cleaned_data["hanging_protocol"]
if mapping and not hanging_protocol:
self.add_error(
|
{"golden_diff": "diff --git a/app/grandchallenge/hanging_protocols/forms.py b/app/grandchallenge/hanging_protocols/forms.py\n--- a/app/grandchallenge/hanging_protocols/forms.py\n+++ b/app/grandchallenge/hanging_protocols/forms.py\n@@ -28,7 +28,7 @@\n \r\n class ViewContentMixin:\r\n def clean_view_content(self):\r\n- mapping = self.cleaned_data[\"view_content\"]\r\n+ mapping = self.cleaned_data[\"view_content\"] or {}\r\n hanging_protocol = self.cleaned_data[\"hanging_protocol\"]\r\n if mapping and not hanging_protocol:\r\n self.add_error(\n", "issue": "'NoneType' object has no attribute 'values'\nSentry issue https://sentry.io/organizations/grand-challenge/issues/3127690895/?project=303639&query=is%3Aunresolved\r\n\r\n```\r\nslugs = {slug for viewport in mapping.values() for slug in viewport}\r\n```\r\n\r\nAdded in https://github.com/comic/grand-challenge.org/pull/2322\n", "before_files": [{"content": "from django import forms\r\n\r\nfrom grandchallenge.components.models import ComponentInterface\r\nfrom grandchallenge.core.forms import SaveFormInitMixin\r\nfrom grandchallenge.core.widgets import JSONEditorWidget\r\nfrom grandchallenge.hanging_protocols.models import (\r\n HANGING_PROTOCOL_SCHEMA,\r\n VIEW_CONTENT_SCHEMA,\r\n HangingProtocol,\r\n)\r\n\r\n\r\nclass HangingProtocolForm(SaveFormInitMixin, forms.ModelForm):\r\n class Meta:\r\n model = HangingProtocol\r\n fields = (\"title\", \"description\", \"json\")\r\n widgets = {\"json\": JSONEditorWidget(schema=HANGING_PROTOCOL_SCHEMA)}\r\n help_texts = {\r\n \"json\": (\r\n \"To display a single image in full size, define the \"\r\n \"protocol as follows: \"\r\n '[{\"viewport_name\": \"main\", \"x\": 0,\"y\": 0,\"w\": 1,\"h\": 1,'\r\n '\"fullsizable\": true,\"draggable\": false,\"selectable\": true,'\r\n '\"order\": 0}]'\r\n )\r\n }\r\n\r\n\r\nclass ViewContentMixin:\r\n def clean_view_content(self):\r\n mapping = self.cleaned_data[\"view_content\"]\r\n hanging_protocol = self.cleaned_data[\"hanging_protocol\"]\r\n if mapping and not hanging_protocol:\r\n self.add_error(\r\n error=\"Please select a hanging protocol before filling this field.\",\r\n field=\"view_content\",\r\n )\r\n\r\n if mapping and hanging_protocol:\r\n if set(mapping.keys()) != {\r\n x[\"viewport_name\"] for x in hanging_protocol.json\r\n }:\r\n self.add_error(\r\n error=(\r\n \"Image ports in view_content do not match \"\r\n \"those in the selected hanging protocol.\"\r\n ),\r\n field=\"view_content\",\r\n )\r\n\r\n slugs = {slug for viewport in mapping.values() for slug in viewport}\r\n unknown = []\r\n for slug in slugs:\r\n if not ComponentInterface.objects.filter(slug=slug).exists():\r\n unknown.append(slug)\r\n if len(unknown) > 0:\r\n self.add_error(\r\n error=f\"Unkown slugs in view_content: {', '.join(unknown)}\",\r\n field=\"view_content\",\r\n )\r\n\r\n return mapping\r\n\r\n class Meta:\r\n widgets = {\r\n \"view_content\": JSONEditorWidget(schema=VIEW_CONTENT_SCHEMA),\r\n }\r\n help_texts = {\r\n \"view_content\": (\r\n \"Indicate which Component Interfaces need to be displayed in \"\r\n 'which image port. E.g. {\"main\": [\"interface1\"]}. The first '\r\n \"item in the list of interfaces will be the main image in \"\r\n \"the image port. The first overlay type interface thereafter \"\r\n \"will be rendered as an overlay. For now, any other items \"\r\n \"will be ignored by the viewer.\"\r\n )\r\n }\r\n", "path": "app/grandchallenge/hanging_protocols/forms.py"}], "after_files": [{"content": "from django import forms\r\n\r\nfrom grandchallenge.components.models import ComponentInterface\r\nfrom grandchallenge.core.forms import SaveFormInitMixin\r\nfrom grandchallenge.core.widgets import JSONEditorWidget\r\nfrom grandchallenge.hanging_protocols.models import (\r\n HANGING_PROTOCOL_SCHEMA,\r\n VIEW_CONTENT_SCHEMA,\r\n HangingProtocol,\r\n)\r\n\r\n\r\nclass HangingProtocolForm(SaveFormInitMixin, forms.ModelForm):\r\n class Meta:\r\n model = HangingProtocol\r\n fields = (\"title\", \"description\", \"json\")\r\n widgets = {\"json\": JSONEditorWidget(schema=HANGING_PROTOCOL_SCHEMA)}\r\n help_texts = {\r\n \"json\": (\r\n \"To display a single image in full size, define the \"\r\n \"protocol as follows: \"\r\n '[{\"viewport_name\": \"main\", \"x\": 0,\"y\": 0,\"w\": 1,\"h\": 1,'\r\n '\"fullsizable\": true,\"draggable\": false,\"selectable\": true,'\r\n '\"order\": 0}]'\r\n )\r\n }\r\n\r\n\r\nclass ViewContentMixin:\r\n def clean_view_content(self):\r\n mapping = self.cleaned_data[\"view_content\"] or {}\r\n hanging_protocol = self.cleaned_data[\"hanging_protocol\"]\r\n if mapping and not hanging_protocol:\r\n self.add_error(\r\n error=\"Please select a hanging protocol before filling this field.\",\r\n field=\"view_content\",\r\n )\r\n\r\n if mapping and hanging_protocol:\r\n if set(mapping.keys()) != {\r\n x[\"viewport_name\"] for x in hanging_protocol.json\r\n }:\r\n self.add_error(\r\n error=(\r\n \"Image ports in view_content do not match \"\r\n \"those in the selected hanging protocol.\"\r\n ),\r\n field=\"view_content\",\r\n )\r\n\r\n slugs = {slug for viewport in mapping.values() for slug in viewport}\r\n unknown = []\r\n for slug in slugs:\r\n if not ComponentInterface.objects.filter(slug=slug).exists():\r\n unknown.append(slug)\r\n if len(unknown) > 0:\r\n self.add_error(\r\n error=f\"Unkown slugs in view_content: {', '.join(unknown)}\",\r\n field=\"view_content\",\r\n )\r\n\r\n return mapping\r\n\r\n class Meta:\r\n widgets = {\r\n \"view_content\": JSONEditorWidget(schema=VIEW_CONTENT_SCHEMA),\r\n }\r\n help_texts = {\r\n \"view_content\": (\r\n \"Indicate which Component Interfaces need to be displayed in \"\r\n 'which image port. E.g. {\"main\": [\"interface1\"]}. The first '\r\n \"item in the list of interfaces will be the main image in \"\r\n \"the image port. The first overlay type interface thereafter \"\r\n \"will be rendered as an overlay. For now, any other items \"\r\n \"will be ignored by the viewer.\"\r\n )\r\n }\r\n", "path": "app/grandchallenge/hanging_protocols/forms.py"}]}
| 1,080 | 126 |
gh_patches_debug_12704
|
rasdani/github-patches
|
git_diff
|
pypa__pip-6725
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
manpage documentation is missing all subcommands
Initially reported in Arch Linux as: https://bugs.archlinux.org/task/63223
To reproduce:
```
git clone https://github.com/pypa/pip/
cd pip/docs
PYTHONPATH=$PWD/../src/ sphinx-build -W -b man -d build/doctrees/man man build/man -c html
```
Look in build/man and you will see only one manpage: pip.1
Really quick reproducer: look at a recent Travis CI build for the TOXENV=docs results, for example https://travis-ci.org/pypa/pip/jobs/559973823#L388, and see only one file being written out.
Expectation: There should be lots of manpages, one for each pip subcommand, and linux distro packages which install the docs/build/man/ directory to /usr/share/man/man1/ should be able to read all about pip's many excellent features in their offline documentation reader.
The cause of this breakage is https://github.com/pypa/pip/pull/5724, which reorganized the conf.py layout due to https://github.com/readthedocs/readthedocs.org/issues/1543 but did not adapt the somewhat hacky code to automatically add new entries.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `docs/html/conf.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 #
3 # pip documentation build configuration file, created by
4 # sphinx-quickstart on Tue Apr 22 22:08:49 2008
5 #
6 # This file is execfile()d with the current directory set to its containing dir
7 #
8 # Note that not all possible configuration values are present in this
9 # autogenerated file.
10 #
11 # All configuration values have a default; values that are commented out
12 # serve to show the default.
13
14 import glob
15 import os
16 import re
17 import sys
18
19 on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
20
21 docs_dir = os.path.dirname(os.path.dirname(__file__))
22 # If extensions (or modules to document with autodoc) are in another directory,
23 # add these directories to sys.path here. If the directory is relative to the
24 # documentation root, use os.path.abspath to make it absolute, like shown here.
25 sys.path.insert(0, docs_dir)
26 # sys.path.append(os.path.join(os.path.dirname(__file__), '../'))
27
28 # -- General configuration ----------------------------------------------------
29
30 # Add any Sphinx extension module names here, as strings. They can be
31 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
32 # extensions = ['sphinx.ext.autodoc']
33 extensions = ['sphinx.ext.extlinks', 'pip_sphinxext', 'sphinx.ext.intersphinx']
34
35 # intersphinx
36 intersphinx_cache_limit = 0
37 intersphinx_mapping = {
38 'pypug': ('https://packaging.python.org/', None),
39 'pypa': ('https://www.pypa.io/en/latest/', None),
40 }
41
42
43 # Add any paths that contain templates here, relative to this directory.
44 templates_path = []
45
46 # The suffix of source filenames.
47 source_suffix = '.rst'
48
49 # The encoding of source files.
50 # source_encoding = 'utf-8'
51
52 # The master toctree document.
53 master_doc = 'index'
54
55 # General information about the project.
56 project = 'pip'
57 copyright = '2008-2017, PyPA'
58
59 # The version info for the project you're documenting, acts as replacement for
60 # |version| and |release|, also used in various other places throughout the
61 # built documents.
62 #
63 # The short X.Y version.
64
65 version = release = 'dev'
66
67 # Readthedocs seems to install pip as an egg (via setup.py install) which
68 # is somehow resulting in "import pip" picking up an older copy of pip.
69 # Rather than trying to force RTD to install pip properly, we'll simply
70 # read the version direct from the __init__.py file. (Yes, this is
71 # fragile, but it works...)
72
73 pip_init = os.path.join(docs_dir, '..', 'src', 'pip', '__init__.py')
74 with open(pip_init) as f:
75 for line in f:
76 m = re.match(r'__version__ = "(.*)"', line)
77 if m:
78 __version__ = m.group(1)
79 # The short X.Y version.
80 version = '.'.join(__version__.split('.')[:2])
81 # The full version, including alpha/beta/rc tags.
82 release = __version__
83 break
84
85 # We have this here because readthedocs plays tricks sometimes and there seems
86 # to be a heisenbug, related to the version of pip discovered. This is here to
87 # help debug that if someone decides to do that in the future.
88 print(version)
89
90 # The language for content autogenerated by Sphinx. Refer to documentation
91 # for a list of supported languages.
92 # language = None
93
94 # There are two options for replacing |today|: either, you set today to some
95 # non-false value, then it is used:
96 # today = ''
97 # Else, today_fmt is used as the format for a strftime call.
98 today_fmt = '%B %d, %Y'
99
100 # List of documents that shouldn't be included in the build.
101 # unused_docs = []
102
103 # List of directories, relative to source directory, that shouldn't be searched
104 # for source files.
105 exclude_patterns = ['build/']
106
107 # The reST default role (used for this markup: `text`) to use for all documents
108 # default_role = None
109
110 # If true, '()' will be appended to :func: etc. cross-reference text.
111 # add_function_parentheses = True
112
113 # If true, the current module name will be prepended to all description
114 # unit titles (such as .. function::).
115 # add_module_names = True
116
117 # If true, sectionauthor and moduleauthor directives will be shown in the
118 # output. They are ignored by default.
119 # show_authors = False
120
121 # The name of the Pygments (syntax highlighting) style to use.
122 pygments_style = 'sphinx'
123
124 # A list of ignored prefixes for module index sorting.
125 # modindex_common_prefix = []
126
127 extlinks = {
128 'issue': ('https://github.com/pypa/pip/issues/%s', '#'),
129 'pull': ('https://github.com/pypa/pip/pull/%s', 'PR #'),
130 'pypi': ('https://pypi.org/project/%s', ''),
131 }
132
133 # -- Options for HTML output --------------------------------------------------
134
135 # The theme to use for HTML and HTML Help pages. Major themes that come with
136 # Sphinx are currently 'default' and 'sphinxdoc'.
137 html_theme = "pypa_theme"
138
139 # Theme options are theme-specific and customize the look and feel of a theme
140 # further. For a list of options available for each theme, see the
141 # documentation.
142 html_theme_options = {
143 'collapsiblesidebar': True,
144 'externalrefs': True,
145 'navigation_depth': 3,
146 'issues_url': 'https://github.com/pypa/pip/issues'
147 }
148
149 # Add any paths that contain custom themes here, relative to this directory.
150
151 # The name for this set of Sphinx documents. If None, it defaults to
152 # "<project> v<release> documentation".
153 # html_title = None
154
155 # A shorter title for the navigation bar. Default is the same as html_title.
156 # html_short_title = None
157
158 # The name of an image file (relative to this directory) to place at the top
159 # of the sidebar.
160 # html_logo = '_static/piplogo.png'
161
162 # The name of an image file (within the static path) to use as favicon of the
163 # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
164 # pixels large.
165 # html_favicon = 'favicon.png'
166
167 # Add any paths that contain custom static files (such as style sheets) here,
168 # relative to this directory. They are copied after the builtin static files,
169 # so a file named "default.css" will overwrite the builtin "default.css".
170 html_static_path = []
171
172 # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
173 # using the given strftime format.
174 html_last_updated_fmt = '%b %d, %Y'
175
176 # If true, the Docutils Smart Quotes transform (originally based on
177 # SmartyPants) will be used to convert characters like quotes and dashes
178 # to typographically correct entities. The default is True.
179 smartquotes = True
180
181 # This string, for use with Docutils 0.14 or later, customizes the
182 # SmartQuotes transform. The default of "qDe" converts normal quote
183 # characters ('"' and "'"), en and em dashes ("--" and "---"), and
184 # ellipses "...".
185 # For now, we disable the conversion of dashes so that long options
186 # like "--find-links" won't render as "-find-links" if included in the
187 # text in places where monospaced type can't be used. For example, backticks
188 # can't be used inside roles like :ref:`--no-index <--no-index>` because
189 # of nesting.
190 smartquotes_action = "qe"
191
192 # Custom sidebar templates, maps document names to template names.
193 html_sidebars = {
194 '**': ['localtoc.html', 'relations.html'],
195 'index': ['localtoc.html']
196 }
197
198 # Additional templates that should be rendered to pages, maps page names to
199 # template names.
200 # html_additional_pages = {}
201
202 # If false, no module index is generated.
203 html_use_modindex = False
204
205 # If false, no index is generated.
206 html_use_index = False
207
208 # If true, the index is split into individual pages for each letter.
209 # html_split_index = False
210
211 # If true, links to the reST sources are added to the pages.
212 html_show_sourcelink = False
213
214 # If true, an OpenSearch description file will be output, and all pages will
215 # contain a <link> tag referring to it. The value of this option must be the
216 # base URL from which the finished HTML is served.
217 # html_use_opensearch = ''
218
219 # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
220 # html_file_suffix = ''
221
222 # Output file base name for HTML help builder.
223 htmlhelp_basename = 'pipdocs'
224
225
226 # -- Options for LaTeX output -------------------------------------------------
227
228 # The paper size ('letter' or 'a4').
229 # latex_paper_size = 'letter'
230
231 # The font size ('10pt', '11pt' or '12pt').
232 # latex_font_size = '10pt'
233
234 # Grouping the document tree into LaTeX files. List of tuples
235 # (source start file, target name, title, author, documentclass [howto/manual])
236 latex_documents = [
237 (
238 'index',
239 'pip.tex',
240 u'pip Documentation',
241 u'pip developers',
242 'manual',
243 ),
244 ]
245
246 # The name of an image file (relative to this directory) to place at the top of
247 # the title page.
248 # latex_logo = None
249
250 # For "manual" documents, if this is true, then toplevel headings are parts,
251 # not chapters.
252 # latex_use_parts = False
253
254 # Additional stuff for the LaTeX preamble.
255 # latex_preamble = ''
256
257 # Documents to append as an appendix to all manuals.
258 # latex_appendices = []
259
260 # If false, no module index is generated.
261 # latex_use_modindex = True
262
263 # -- Options for Manual Pages -------------------------------------------------
264
265 # List of manual pages generated
266 man_pages = [
267 (
268 'index',
269 'pip',
270 u'package manager for Python packages',
271 u'pip developers',
272 1
273 )
274 ]
275
276 # Here, we crawl the entire man/commands/ directory and list every file with
277 # appropriate name and details
278 for fname in glob.glob('man/commands/*.rst'):
279 fname_base = fname[4:-4]
280 outname = 'pip-' + fname_base[13:]
281 description = u'description of {} command'.format(
282 outname.replace('-', ' ')
283 )
284
285 man_pages.append((fname_base, outname, description, u'pip developers', 1))
286
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/docs/html/conf.py b/docs/html/conf.py
--- a/docs/html/conf.py
+++ b/docs/html/conf.py
@@ -275,9 +275,13 @@
# Here, we crawl the entire man/commands/ directory and list every file with
# appropriate name and details
-for fname in glob.glob('man/commands/*.rst'):
- fname_base = fname[4:-4]
- outname = 'pip-' + fname_base[13:]
+man_dir = os.path.join(docs_dir, 'man/')
+raw_subcommands = glob.glob(os.path.join(man_dir, 'commands/*.rst'))
+if not raw_subcommands:
+ raise FileNotFoundError('The individual subcommand manpages could not be found!')
+for fname in raw_subcommands:
+ fname_base = fname[len(man_dir):-4]
+ outname = 'pip-' + fname_base[9:]
description = u'description of {} command'.format(
outname.replace('-', ' ')
)
|
{"golden_diff": "diff --git a/docs/html/conf.py b/docs/html/conf.py\n--- a/docs/html/conf.py\n+++ b/docs/html/conf.py\n@@ -275,9 +275,13 @@\n \n # Here, we crawl the entire man/commands/ directory and list every file with\n # appropriate name and details\n-for fname in glob.glob('man/commands/*.rst'):\n- fname_base = fname[4:-4]\n- outname = 'pip-' + fname_base[13:]\n+man_dir = os.path.join(docs_dir, 'man/')\n+raw_subcommands = glob.glob(os.path.join(man_dir, 'commands/*.rst'))\n+if not raw_subcommands:\n+ raise FileNotFoundError('The individual subcommand manpages could not be found!')\n+for fname in raw_subcommands:\n+ fname_base = fname[len(man_dir):-4]\n+ outname = 'pip-' + fname_base[9:]\n description = u'description of {} command'.format(\n outname.replace('-', ' ')\n )\n", "issue": "manpage documentation is missing all subcommands\nInitially reported in Arch Linux as: https://bugs.archlinux.org/task/63223\r\n\r\nTo reproduce:\r\n```\r\ngit clone https://github.com/pypa/pip/\r\ncd pip/docs\r\nPYTHONPATH=$PWD/../src/ sphinx-build -W -b man -d build/doctrees/man man build/man -c html\r\n```\r\n\r\nLook in build/man and you will see only one manpage: pip.1\r\n\r\nReally quick reproducer: look at a recent Travis CI build for the TOXENV=docs results, for example https://travis-ci.org/pypa/pip/jobs/559973823#L388, and see only one file being written out.\r\n\r\nExpectation: There should be lots of manpages, one for each pip subcommand, and linux distro packages which install the docs/build/man/ directory to /usr/share/man/man1/ should be able to read all about pip's many excellent features in their offline documentation reader.\r\n\r\nThe cause of this breakage is https://github.com/pypa/pip/pull/5724, which reorganized the conf.py layout due to https://github.com/readthedocs/readthedocs.org/issues/1543 but did not adapt the somewhat hacky code to automatically add new entries.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# pip documentation build configuration file, created by\n# sphinx-quickstart on Tue Apr 22 22:08:49 2008\n#\n# This file is execfile()d with the current directory set to its containing dir\n#\n# Note that not all possible configuration values are present in this\n# autogenerated file.\n#\n# All configuration values have a default; values that are commented out\n# serve to show the default.\n\nimport glob\nimport os\nimport re\nimport sys\n\non_rtd = os.environ.get('READTHEDOCS', None) == 'True'\n\ndocs_dir = os.path.dirname(os.path.dirname(__file__))\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\nsys.path.insert(0, docs_dir)\n# sys.path.append(os.path.join(os.path.dirname(__file__), '../'))\n\n# -- General configuration ----------------------------------------------------\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.\n# extensions = ['sphinx.ext.autodoc']\nextensions = ['sphinx.ext.extlinks', 'pip_sphinxext', 'sphinx.ext.intersphinx']\n\n# intersphinx\nintersphinx_cache_limit = 0\nintersphinx_mapping = {\n 'pypug': ('https://packaging.python.org/', None),\n 'pypa': ('https://www.pypa.io/en/latest/', None),\n}\n\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = []\n\n# The suffix of source filenames.\nsource_suffix = '.rst'\n\n# The encoding of source files.\n# source_encoding = 'utf-8'\n\n# The master toctree document.\nmaster_doc = 'index'\n\n# General information about the project.\nproject = 'pip'\ncopyright = '2008-2017, PyPA'\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n#\n# The short X.Y version.\n\nversion = release = 'dev'\n\n# Readthedocs seems to install pip as an egg (via setup.py install) which\n# is somehow resulting in \"import pip\" picking up an older copy of pip.\n# Rather than trying to force RTD to install pip properly, we'll simply\n# read the version direct from the __init__.py file. (Yes, this is\n# fragile, but it works...)\n\npip_init = os.path.join(docs_dir, '..', 'src', 'pip', '__init__.py')\nwith open(pip_init) as f:\n for line in f:\n m = re.match(r'__version__ = \"(.*)\"', line)\n if m:\n __version__ = m.group(1)\n # The short X.Y version.\n version = '.'.join(__version__.split('.')[:2])\n # The full version, including alpha/beta/rc tags.\n release = __version__\n break\n\n# We have this here because readthedocs plays tricks sometimes and there seems\n# to be a heisenbug, related to the version of pip discovered. This is here to\n# help debug that if someone decides to do that in the future.\nprint(version)\n\n# The language for content autogenerated by Sphinx. Refer to documentation\n# for a list of supported languages.\n# language = None\n\n# There are two options for replacing |today|: either, you set today to some\n# non-false value, then it is used:\n# today = ''\n# Else, today_fmt is used as the format for a strftime call.\ntoday_fmt = '%B %d, %Y'\n\n# List of documents that shouldn't be included in the build.\n# unused_docs = []\n\n# List of directories, relative to source directory, that shouldn't be searched\n# for source files.\nexclude_patterns = ['build/']\n\n# The reST default role (used for this markup: `text`) to use for all documents\n# default_role = None\n\n# If true, '()' will be appended to :func: etc. cross-reference text.\n# add_function_parentheses = True\n\n# If true, the current module name will be prepended to all description\n# unit titles (such as .. function::).\n# add_module_names = True\n\n# If true, sectionauthor and moduleauthor directives will be shown in the\n# output. They are ignored by default.\n# show_authors = False\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = 'sphinx'\n\n# A list of ignored prefixes for module index sorting.\n# modindex_common_prefix = []\n\nextlinks = {\n 'issue': ('https://github.com/pypa/pip/issues/%s', '#'),\n 'pull': ('https://github.com/pypa/pip/pull/%s', 'PR #'),\n 'pypi': ('https://pypi.org/project/%s', ''),\n}\n\n# -- Options for HTML output --------------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. Major themes that come with\n# Sphinx are currently 'default' and 'sphinxdoc'.\nhtml_theme = \"pypa_theme\"\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further. For a list of options available for each theme, see the\n# documentation.\nhtml_theme_options = {\n 'collapsiblesidebar': True,\n 'externalrefs': True,\n 'navigation_depth': 3,\n 'issues_url': 'https://github.com/pypa/pip/issues'\n}\n\n# Add any paths that contain custom themes here, relative to this directory.\n\n# The name for this set of Sphinx documents. If None, it defaults to\n# \"<project> v<release> documentation\".\n# html_title = None\n\n# A shorter title for the navigation bar. Default is the same as html_title.\n# html_short_title = None\n\n# The name of an image file (relative to this directory) to place at the top\n# of the sidebar.\n# html_logo = '_static/piplogo.png'\n\n# The name of an image file (within the static path) to use as favicon of the\n# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32\n# pixels large.\n# html_favicon = 'favicon.png'\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = []\n\n# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,\n# using the given strftime format.\nhtml_last_updated_fmt = '%b %d, %Y'\n\n# If true, the Docutils Smart Quotes transform (originally based on\n# SmartyPants) will be used to convert characters like quotes and dashes\n# to typographically correct entities. The default is True.\nsmartquotes = True\n\n# This string, for use with Docutils 0.14 or later, customizes the\n# SmartQuotes transform. The default of \"qDe\" converts normal quote\n# characters ('\"' and \"'\"), en and em dashes (\"--\" and \"---\"), and\n# ellipses \"...\".\n# For now, we disable the conversion of dashes so that long options\n# like \"--find-links\" won't render as \"-find-links\" if included in the\n# text in places where monospaced type can't be used. For example, backticks\n# can't be used inside roles like :ref:`--no-index <--no-index>` because\n# of nesting.\nsmartquotes_action = \"qe\"\n\n# Custom sidebar templates, maps document names to template names.\nhtml_sidebars = {\n '**': ['localtoc.html', 'relations.html'],\n 'index': ['localtoc.html']\n}\n\n# Additional templates that should be rendered to pages, maps page names to\n# template names.\n# html_additional_pages = {}\n\n# If false, no module index is generated.\nhtml_use_modindex = False\n\n# If false, no index is generated.\nhtml_use_index = False\n\n# If true, the index is split into individual pages for each letter.\n# html_split_index = False\n\n# If true, links to the reST sources are added to the pages.\nhtml_show_sourcelink = False\n\n# If true, an OpenSearch description file will be output, and all pages will\n# contain a <link> tag referring to it. The value of this option must be the\n# base URL from which the finished HTML is served.\n# html_use_opensearch = ''\n\n# If nonempty, this is the file name suffix for HTML files (e.g. \".xhtml\").\n# html_file_suffix = ''\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = 'pipdocs'\n\n\n# -- Options for LaTeX output -------------------------------------------------\n\n# The paper size ('letter' or 'a4').\n# latex_paper_size = 'letter'\n\n# The font size ('10pt', '11pt' or '12pt').\n# latex_font_size = '10pt'\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title, author, documentclass [howto/manual])\nlatex_documents = [\n (\n 'index',\n 'pip.tex',\n u'pip Documentation',\n u'pip developers',\n 'manual',\n ),\n]\n\n# The name of an image file (relative to this directory) to place at the top of\n# the title page.\n# latex_logo = None\n\n# For \"manual\" documents, if this is true, then toplevel headings are parts,\n# not chapters.\n# latex_use_parts = False\n\n# Additional stuff for the LaTeX preamble.\n# latex_preamble = ''\n\n# Documents to append as an appendix to all manuals.\n# latex_appendices = []\n\n# If false, no module index is generated.\n# latex_use_modindex = True\n\n# -- Options for Manual Pages -------------------------------------------------\n\n# List of manual pages generated\nman_pages = [\n (\n 'index',\n 'pip',\n u'package manager for Python packages',\n u'pip developers',\n 1\n )\n]\n\n# Here, we crawl the entire man/commands/ directory and list every file with\n# appropriate name and details\nfor fname in glob.glob('man/commands/*.rst'):\n fname_base = fname[4:-4]\n outname = 'pip-' + fname_base[13:]\n description = u'description of {} command'.format(\n outname.replace('-', ' ')\n )\n\n man_pages.append((fname_base, outname, description, u'pip developers', 1))\n", "path": "docs/html/conf.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# pip documentation build configuration file, created by\n# sphinx-quickstart on Tue Apr 22 22:08:49 2008\n#\n# This file is execfile()d with the current directory set to its containing dir\n#\n# Note that not all possible configuration values are present in this\n# autogenerated file.\n#\n# All configuration values have a default; values that are commented out\n# serve to show the default.\n\nimport glob\nimport os\nimport re\nimport sys\n\non_rtd = os.environ.get('READTHEDOCS', None) == 'True'\n\ndocs_dir = os.path.dirname(os.path.dirname(__file__))\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\nsys.path.insert(0, docs_dir)\n# sys.path.append(os.path.join(os.path.dirname(__file__), '../'))\n\n# -- General configuration ----------------------------------------------------\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.\n# extensions = ['sphinx.ext.autodoc']\nextensions = ['sphinx.ext.extlinks', 'pip_sphinxext', 'sphinx.ext.intersphinx']\n\n# intersphinx\nintersphinx_cache_limit = 0\nintersphinx_mapping = {\n 'pypug': ('https://packaging.python.org/', None),\n 'pypa': ('https://www.pypa.io/en/latest/', None),\n}\n\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = []\n\n# The suffix of source filenames.\nsource_suffix = '.rst'\n\n# The encoding of source files.\n# source_encoding = 'utf-8'\n\n# The master toctree document.\nmaster_doc = 'index'\n\n# General information about the project.\nproject = 'pip'\ncopyright = '2008-2017, PyPA'\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n#\n# The short X.Y version.\n\nversion = release = 'dev'\n\n# Readthedocs seems to install pip as an egg (via setup.py install) which\n# is somehow resulting in \"import pip\" picking up an older copy of pip.\n# Rather than trying to force RTD to install pip properly, we'll simply\n# read the version direct from the __init__.py file. (Yes, this is\n# fragile, but it works...)\n\npip_init = os.path.join(docs_dir, '..', 'src', 'pip', '__init__.py')\nwith open(pip_init) as f:\n for line in f:\n m = re.match(r'__version__ = \"(.*)\"', line)\n if m:\n __version__ = m.group(1)\n # The short X.Y version.\n version = '.'.join(__version__.split('.')[:2])\n # The full version, including alpha/beta/rc tags.\n release = __version__\n break\n\n# We have this here because readthedocs plays tricks sometimes and there seems\n# to be a heisenbug, related to the version of pip discovered. This is here to\n# help debug that if someone decides to do that in the future.\nprint(version)\n\n# The language for content autogenerated by Sphinx. Refer to documentation\n# for a list of supported languages.\n# language = None\n\n# There are two options for replacing |today|: either, you set today to some\n# non-false value, then it is used:\n# today = ''\n# Else, today_fmt is used as the format for a strftime call.\ntoday_fmt = '%B %d, %Y'\n\n# List of documents that shouldn't be included in the build.\n# unused_docs = []\n\n# List of directories, relative to source directory, that shouldn't be searched\n# for source files.\nexclude_patterns = ['build/']\n\n# The reST default role (used for this markup: `text`) to use for all documents\n# default_role = None\n\n# If true, '()' will be appended to :func: etc. cross-reference text.\n# add_function_parentheses = True\n\n# If true, the current module name will be prepended to all description\n# unit titles (such as .. function::).\n# add_module_names = True\n\n# If true, sectionauthor and moduleauthor directives will be shown in the\n# output. They are ignored by default.\n# show_authors = False\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = 'sphinx'\n\n# A list of ignored prefixes for module index sorting.\n# modindex_common_prefix = []\n\nextlinks = {\n 'issue': ('https://github.com/pypa/pip/issues/%s', '#'),\n 'pull': ('https://github.com/pypa/pip/pull/%s', 'PR #'),\n 'pypi': ('https://pypi.org/project/%s', ''),\n}\n\n# -- Options for HTML output --------------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. Major themes that come with\n# Sphinx are currently 'default' and 'sphinxdoc'.\nhtml_theme = \"pypa_theme\"\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further. For a list of options available for each theme, see the\n# documentation.\nhtml_theme_options = {\n 'collapsiblesidebar': True,\n 'externalrefs': True,\n 'navigation_depth': 3,\n 'issues_url': 'https://github.com/pypa/pip/issues'\n}\n\n# Add any paths that contain custom themes here, relative to this directory.\n\n# The name for this set of Sphinx documents. If None, it defaults to\n# \"<project> v<release> documentation\".\n# html_title = None\n\n# A shorter title for the navigation bar. Default is the same as html_title.\n# html_short_title = None\n\n# The name of an image file (relative to this directory) to place at the top\n# of the sidebar.\n# html_logo = '_static/piplogo.png'\n\n# The name of an image file (within the static path) to use as favicon of the\n# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32\n# pixels large.\n# html_favicon = 'favicon.png'\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = []\n\n# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,\n# using the given strftime format.\nhtml_last_updated_fmt = '%b %d, %Y'\n\n# If true, the Docutils Smart Quotes transform (originally based on\n# SmartyPants) will be used to convert characters like quotes and dashes\n# to typographically correct entities. The default is True.\nsmartquotes = True\n\n# This string, for use with Docutils 0.14 or later, customizes the\n# SmartQuotes transform. The default of \"qDe\" converts normal quote\n# characters ('\"' and \"'\"), en and em dashes (\"--\" and \"---\"), and\n# ellipses \"...\".\n# For now, we disable the conversion of dashes so that long options\n# like \"--find-links\" won't render as \"-find-links\" if included in the\n# text in places where monospaced type can't be used. For example, backticks\n# can't be used inside roles like :ref:`--no-index <--no-index>` because\n# of nesting.\nsmartquotes_action = \"qe\"\n\n# Custom sidebar templates, maps document names to template names.\nhtml_sidebars = {\n '**': ['localtoc.html', 'relations.html'],\n 'index': ['localtoc.html']\n}\n\n# Additional templates that should be rendered to pages, maps page names to\n# template names.\n# html_additional_pages = {}\n\n# If false, no module index is generated.\nhtml_use_modindex = False\n\n# If false, no index is generated.\nhtml_use_index = False\n\n# If true, the index is split into individual pages for each letter.\n# html_split_index = False\n\n# If true, links to the reST sources are added to the pages.\nhtml_show_sourcelink = False\n\n# If true, an OpenSearch description file will be output, and all pages will\n# contain a <link> tag referring to it. The value of this option must be the\n# base URL from which the finished HTML is served.\n# html_use_opensearch = ''\n\n# If nonempty, this is the file name suffix for HTML files (e.g. \".xhtml\").\n# html_file_suffix = ''\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = 'pipdocs'\n\n\n# -- Options for LaTeX output -------------------------------------------------\n\n# The paper size ('letter' or 'a4').\n# latex_paper_size = 'letter'\n\n# The font size ('10pt', '11pt' or '12pt').\n# latex_font_size = '10pt'\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title, author, documentclass [howto/manual])\nlatex_documents = [\n (\n 'index',\n 'pip.tex',\n u'pip Documentation',\n u'pip developers',\n 'manual',\n ),\n]\n\n# The name of an image file (relative to this directory) to place at the top of\n# the title page.\n# latex_logo = None\n\n# For \"manual\" documents, if this is true, then toplevel headings are parts,\n# not chapters.\n# latex_use_parts = False\n\n# Additional stuff for the LaTeX preamble.\n# latex_preamble = ''\n\n# Documents to append as an appendix to all manuals.\n# latex_appendices = []\n\n# If false, no module index is generated.\n# latex_use_modindex = True\n\n# -- Options for Manual Pages -------------------------------------------------\n\n# List of manual pages generated\nman_pages = [\n (\n 'index',\n 'pip',\n u'package manager for Python packages',\n u'pip developers',\n 1\n )\n]\n\n# Here, we crawl the entire man/commands/ directory and list every file with\n# appropriate name and details\nman_dir = os.path.join(docs_dir, 'man/')\nraw_subcommands = glob.glob(os.path.join(man_dir, 'commands/*.rst'))\nif not raw_subcommands:\n raise FileNotFoundError('The individual subcommand manpages could not be found!')\nfor fname in raw_subcommands:\n fname_base = fname[len(man_dir):-4]\n outname = 'pip-' + fname_base[9:]\n description = u'description of {} command'.format(\n outname.replace('-', ' ')\n )\n\n man_pages.append((fname_base, outname, description, u'pip developers', 1))\n", "path": "docs/html/conf.py"}]}
| 3,678 | 215 |
gh_patches_debug_24549
|
rasdani/github-patches
|
git_diff
|
meltano__meltano-8031
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
EL log files should not contain secrets
The full database URI is shown in debug logs:
```console
$ export MELTANO_CLI_LOG_LEVEL=debug
$ meltano invoke my-tap
2022-09-07T16:34:57.234152Z [info ] Environment 'dev' is active
2022-09-07T16:34:57.338859Z [debug ] Creating engine <meltano.core.project.Project object at 0x10e9702e0>@postgresql://***********
```
Where I redacted the username, password, etc. from the Postgres URI.
The full environment variables mapping log message may also contain secrets:
```console
2022-09-07T16:35:01.443284Z [debug ] Env: {'USER': ...
```
_Raised by Tomas B in Office Hours._
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/meltano/core/db.py`
Content:
```
1 """Defines helpers related to the system database."""
2
3 from __future__ import annotations
4
5 import logging
6 import time
7
8 from sqlalchemy import create_engine
9 from sqlalchemy.engine import Connection, Engine
10 from sqlalchemy.exc import OperationalError
11 from sqlalchemy.orm import sessionmaker
12 from sqlalchemy.pool import NullPool
13 from sqlalchemy.sql import text
14
15 from meltano.core.error import MeltanoError
16 from meltano.core.project import Project
17
18 # Keep a Project → Engine mapping to serve
19 # the same engine for the same Project
20 _engines = {}
21
22
23 class MeltanoDatabaseCompatibilityError(MeltanoError):
24 """Raised when the database is not compatible with Meltano."""
25
26 INSTRUCTION = (
27 "Upgrade your database to be compatible with Meltano or use a different "
28 "database"
29 )
30
31 def __init__(self, reason: str):
32 """Initialize the error with a reason.
33
34 Args:
35 reason: The reason why the database is not compatible.
36 """
37 super().__init__(reason, self.INSTRUCTION)
38
39
40 class NullConnectionStringError(MeltanoError):
41 """Raised when the database is not compatible with Meltano."""
42
43 REASON = "The `database_uri` setting has a null value"
44 INSTRUCTION = (
45 "Verify that the `database_uri` setting points to a valid database connection "
46 "URI, or use `MELTANO_FF_STRICT_ENV_VAR_MODE=1 meltano config meltano list` "
47 "to check for missing environment variables"
48 )
49
50 def __init__(self):
51 """Initialize the exception."""
52 super().__init__(self.REASON, self.INSTRUCTION)
53
54
55 def project_engine(
56 project: Project,
57 default: bool = False,
58 ) -> tuple[Engine, sessionmaker]:
59 """Create and register a SQLAlchemy engine for a Meltano project instance.
60
61 Args:
62 project: The Meltano project that the engine will be connected to.
63 default: Whether the engine created should be stored as the default
64 engine for this project.
65
66 Returns:
67 The engine, and a session maker bound to the engine.
68
69 Raises:
70 NullConnectionStringError: The `database_uri` setting has a null value.
71 """
72 existing_engine = _engines.get(project)
73 if existing_engine:
74 return existing_engine
75
76 engine_uri = project.settings.get("database_uri")
77 logging.debug(f"Creating engine '{project}@{engine_uri}'")
78
79 if engine_uri is None:
80 raise NullConnectionStringError
81
82 engine = create_engine(engine_uri, poolclass=NullPool)
83
84 # Connect to the database to ensure it is available.
85 connect(
86 engine,
87 max_retries=project.settings.get("database_max_retries"),
88 retry_timeout=project.settings.get("database_retry_timeout"),
89 )
90
91 check_database_compatibility(engine)
92 init_hook(engine)
93
94 engine_session = (engine, sessionmaker(bind=engine))
95
96 if default:
97 # register the default engine
98 _engines[project] = engine_session
99
100 return engine_session
101
102
103 def connect(
104 engine: Engine,
105 max_retries: int,
106 retry_timeout: float,
107 ) -> Connection:
108 """Connect to the database.
109
110 Args:
111 engine: The DB engine with which the check will be performed.
112 max_retries: The maximum number of retries that will be attempted.
113 retry_timeout: The number of seconds to wait between retries.
114
115 Raises:
116 OperationalError: Error during DB connection - max retries exceeded.
117
118 Returns:
119 A connection to the database.
120 """
121 attempt = 0
122 while True:
123 try:
124 return engine.connect()
125 except OperationalError:
126 if attempt >= max_retries:
127 logging.error(
128 f"Could not connect to the database after {attempt} "
129 "attempts. Max retries exceeded.",
130 )
131 raise
132 attempt += 1
133 logging.info(
134 f"DB connection failed. Will retry after {retry_timeout}s. "
135 f"Attempt {attempt}/{max_retries}",
136 )
137 time.sleep(retry_timeout)
138
139
140 init_hooks = {
141 "sqlite": lambda x: x.execute("PRAGMA journal_mode=WAL"),
142 }
143
144
145 def init_hook(engine: Engine) -> None:
146 """Run the initialization hook for the provided DB engine.
147
148 The initialization hooks are taken from the `meltano.core.db.init_hooks`
149 dictionary, which maps the dialect name of the engine to a unary function
150 which will be called with the provided DB engine.
151
152 Args:
153 engine: The engine for which the init hook will be run.
154
155 Raises:
156 Exception: The init hook raised an exception.
157 """
158 try:
159 hook = init_hooks[engine.dialect.name]
160 except KeyError:
161 return
162
163 try:
164 hook(engine)
165 except Exception as ex:
166 raise Exception(f"Failed to initialize database: {ex!s}") from ex
167
168
169 def ensure_schema_exists(
170 engine: Engine,
171 schema_name: str,
172 grant_roles: tuple[str] = (),
173 ) -> None:
174 """Ensure the specified `schema_name` exists in the database.
175
176 Args:
177 engine: The DB engine to be used.
178 schema_name: The name of the schema.
179 grant_roles: Roles to grant to the specified schema.
180 """
181 group_identifiers = ",".join(grant_roles)
182
183 create_schema = text(f"CREATE SCHEMA IF NOT EXISTS {schema_name}")
184 grant_select_schema = text(
185 f"ALTER DEFAULT PRIVILEGES IN SCHEMA {schema_name} GRANT SELECT ON "
186 f"TABLES TO {group_identifiers}",
187 )
188 grant_usage_schema = text(
189 f"GRANT USAGE ON SCHEMA {schema_name} TO {group_identifiers}",
190 )
191
192 with engine.connect() as conn, conn.begin():
193 conn.execute(create_schema)
194 if grant_roles:
195 conn.execute(grant_select_schema)
196 conn.execute(grant_usage_schema)
197
198 logging.info(f"Schema {schema_name} has been created successfully.")
199 for role in grant_roles:
200 logging.info(f"Usage has been granted for role: {role}.")
201
202
203 def check_database_compatibility(engine: Engine) -> None:
204 """Check that the database is compatible with Meltano.
205
206 Args:
207 engine: The DB engine to be used. This should already be connected to
208 the database.
209
210 Raises:
211 MeltanoDatabaseCompatibilityError: The database is not compatible with
212 Meltano.
213 """
214 dialect = engine.dialect.name
215 version = engine.dialect.server_version_info
216
217 if dialect == "sqlite" and version < (3, 25, 1):
218 version_string = ".".join(map(str, version))
219 reason = (
220 f"Detected SQLite {version_string}, but Meltano requires at least 3.25.1"
221 )
222 raise MeltanoDatabaseCompatibilityError(reason)
223
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/meltano/core/db.py b/src/meltano/core/db.py
--- a/src/meltano/core/db.py
+++ b/src/meltano/core/db.py
@@ -4,6 +4,7 @@
import logging
import time
+from urllib.parse import urlparse
from sqlalchemy import create_engine
from sqlalchemy.engine import Connection, Engine
@@ -73,13 +74,27 @@
if existing_engine:
return existing_engine
- engine_uri = project.settings.get("database_uri")
- logging.debug(f"Creating engine '{project}@{engine_uri}'")
+ database_uri = project.settings.get("database_uri")
+ parsed_db_uri = urlparse(database_uri)
+ sanitized_db_uri = parsed_db_uri._replace( # noqa: WPS437
+ netloc=(
+ f"{parsed_db_uri.username}:********@" # user:pass auth case
+ if parsed_db_uri.password
+ else "********@" # token auth case
+ if parsed_db_uri.username
+ else "" # no auth case
+ )
+ + (parsed_db_uri.hostname or ""),
+ ).geturl()
+ logging.debug(
+ f"Creating DB engine for project at {str(project.root)!r} "
+ f"with DB URI {sanitized_db_uri!r}",
+ )
- if engine_uri is None:
+ if database_uri is None:
raise NullConnectionStringError
- engine = create_engine(engine_uri, poolclass=NullPool)
+ engine = create_engine(database_uri, poolclass=NullPool)
# Connect to the database to ensure it is available.
connect(
|
{"golden_diff": "diff --git a/src/meltano/core/db.py b/src/meltano/core/db.py\n--- a/src/meltano/core/db.py\n+++ b/src/meltano/core/db.py\n@@ -4,6 +4,7 @@\n \n import logging\n import time\n+from urllib.parse import urlparse\n \n from sqlalchemy import create_engine\n from sqlalchemy.engine import Connection, Engine\n@@ -73,13 +74,27 @@\n if existing_engine:\n return existing_engine\n \n- engine_uri = project.settings.get(\"database_uri\")\n- logging.debug(f\"Creating engine '{project}@{engine_uri}'\")\n+ database_uri = project.settings.get(\"database_uri\")\n+ parsed_db_uri = urlparse(database_uri)\n+ sanitized_db_uri = parsed_db_uri._replace( # noqa: WPS437\n+ netloc=(\n+ f\"{parsed_db_uri.username}:********@\" # user:pass auth case\n+ if parsed_db_uri.password\n+ else \"********@\" # token auth case\n+ if parsed_db_uri.username\n+ else \"\" # no auth case\n+ )\n+ + (parsed_db_uri.hostname or \"\"),\n+ ).geturl()\n+ logging.debug(\n+ f\"Creating DB engine for project at {str(project.root)!r} \"\n+ f\"with DB URI {sanitized_db_uri!r}\",\n+ )\n \n- if engine_uri is None:\n+ if database_uri is None:\n raise NullConnectionStringError\n \n- engine = create_engine(engine_uri, poolclass=NullPool)\n+ engine = create_engine(database_uri, poolclass=NullPool)\n \n # Connect to the database to ensure it is available.\n connect(\n", "issue": "EL log files should not contain secrets\nThe full database URI is shown in debug logs:\r\n\r\n```console\r\n$ export MELTANO_CLI_LOG_LEVEL=debug\r\n$ meltano invoke my-tap\r\n2022-09-07T16:34:57.234152Z [info ] Environment 'dev' is active\r\n2022-09-07T16:34:57.338859Z [debug ] Creating engine <meltano.core.project.Project object at 0x10e9702e0>@postgresql://***********\r\n```\r\n\r\nWhere I redacted the username, password, etc. from the Postgres URI.\r\n\r\nThe full environment variables mapping log message may also contain secrets:\r\n\r\n```console\r\n2022-09-07T16:35:01.443284Z [debug ] Env: {'USER': ...\r\n```\r\n\r\n_Raised by Tomas B in Office Hours._\r\n\n", "before_files": [{"content": "\"\"\"Defines helpers related to the system database.\"\"\"\n\nfrom __future__ import annotations\n\nimport logging\nimport time\n\nfrom sqlalchemy import create_engine\nfrom sqlalchemy.engine import Connection, Engine\nfrom sqlalchemy.exc import OperationalError\nfrom sqlalchemy.orm import sessionmaker\nfrom sqlalchemy.pool import NullPool\nfrom sqlalchemy.sql import text\n\nfrom meltano.core.error import MeltanoError\nfrom meltano.core.project import Project\n\n# Keep a Project \u2192 Engine mapping to serve\n# the same engine for the same Project\n_engines = {}\n\n\nclass MeltanoDatabaseCompatibilityError(MeltanoError):\n \"\"\"Raised when the database is not compatible with Meltano.\"\"\"\n\n INSTRUCTION = (\n \"Upgrade your database to be compatible with Meltano or use a different \"\n \"database\"\n )\n\n def __init__(self, reason: str):\n \"\"\"Initialize the error with a reason.\n\n Args:\n reason: The reason why the database is not compatible.\n \"\"\"\n super().__init__(reason, self.INSTRUCTION)\n\n\nclass NullConnectionStringError(MeltanoError):\n \"\"\"Raised when the database is not compatible with Meltano.\"\"\"\n\n REASON = \"The `database_uri` setting has a null value\"\n INSTRUCTION = (\n \"Verify that the `database_uri` setting points to a valid database connection \"\n \"URI, or use `MELTANO_FF_STRICT_ENV_VAR_MODE=1 meltano config meltano list` \"\n \"to check for missing environment variables\"\n )\n\n def __init__(self):\n \"\"\"Initialize the exception.\"\"\"\n super().__init__(self.REASON, self.INSTRUCTION)\n\n\ndef project_engine(\n project: Project,\n default: bool = False,\n) -> tuple[Engine, sessionmaker]:\n \"\"\"Create and register a SQLAlchemy engine for a Meltano project instance.\n\n Args:\n project: The Meltano project that the engine will be connected to.\n default: Whether the engine created should be stored as the default\n engine for this project.\n\n Returns:\n The engine, and a session maker bound to the engine.\n\n Raises:\n NullConnectionStringError: The `database_uri` setting has a null value.\n \"\"\"\n existing_engine = _engines.get(project)\n if existing_engine:\n return existing_engine\n\n engine_uri = project.settings.get(\"database_uri\")\n logging.debug(f\"Creating engine '{project}@{engine_uri}'\")\n\n if engine_uri is None:\n raise NullConnectionStringError\n\n engine = create_engine(engine_uri, poolclass=NullPool)\n\n # Connect to the database to ensure it is available.\n connect(\n engine,\n max_retries=project.settings.get(\"database_max_retries\"),\n retry_timeout=project.settings.get(\"database_retry_timeout\"),\n )\n\n check_database_compatibility(engine)\n init_hook(engine)\n\n engine_session = (engine, sessionmaker(bind=engine))\n\n if default:\n # register the default engine\n _engines[project] = engine_session\n\n return engine_session\n\n\ndef connect(\n engine: Engine,\n max_retries: int,\n retry_timeout: float,\n) -> Connection:\n \"\"\"Connect to the database.\n\n Args:\n engine: The DB engine with which the check will be performed.\n max_retries: The maximum number of retries that will be attempted.\n retry_timeout: The number of seconds to wait between retries.\n\n Raises:\n OperationalError: Error during DB connection - max retries exceeded.\n\n Returns:\n A connection to the database.\n \"\"\"\n attempt = 0\n while True:\n try:\n return engine.connect()\n except OperationalError:\n if attempt >= max_retries:\n logging.error(\n f\"Could not connect to the database after {attempt} \"\n \"attempts. Max retries exceeded.\",\n )\n raise\n attempt += 1\n logging.info(\n f\"DB connection failed. Will retry after {retry_timeout}s. \"\n f\"Attempt {attempt}/{max_retries}\",\n )\n time.sleep(retry_timeout)\n\n\ninit_hooks = {\n \"sqlite\": lambda x: x.execute(\"PRAGMA journal_mode=WAL\"),\n}\n\n\ndef init_hook(engine: Engine) -> None:\n \"\"\"Run the initialization hook for the provided DB engine.\n\n The initialization hooks are taken from the `meltano.core.db.init_hooks`\n dictionary, which maps the dialect name of the engine to a unary function\n which will be called with the provided DB engine.\n\n Args:\n engine: The engine for which the init hook will be run.\n\n Raises:\n Exception: The init hook raised an exception.\n \"\"\"\n try:\n hook = init_hooks[engine.dialect.name]\n except KeyError:\n return\n\n try:\n hook(engine)\n except Exception as ex:\n raise Exception(f\"Failed to initialize database: {ex!s}\") from ex\n\n\ndef ensure_schema_exists(\n engine: Engine,\n schema_name: str,\n grant_roles: tuple[str] = (),\n) -> None:\n \"\"\"Ensure the specified `schema_name` exists in the database.\n\n Args:\n engine: The DB engine to be used.\n schema_name: The name of the schema.\n grant_roles: Roles to grant to the specified schema.\n \"\"\"\n group_identifiers = \",\".join(grant_roles)\n\n create_schema = text(f\"CREATE SCHEMA IF NOT EXISTS {schema_name}\")\n grant_select_schema = text(\n f\"ALTER DEFAULT PRIVILEGES IN SCHEMA {schema_name} GRANT SELECT ON \"\n f\"TABLES TO {group_identifiers}\",\n )\n grant_usage_schema = text(\n f\"GRANT USAGE ON SCHEMA {schema_name} TO {group_identifiers}\",\n )\n\n with engine.connect() as conn, conn.begin():\n conn.execute(create_schema)\n if grant_roles:\n conn.execute(grant_select_schema)\n conn.execute(grant_usage_schema)\n\n logging.info(f\"Schema {schema_name} has been created successfully.\")\n for role in grant_roles:\n logging.info(f\"Usage has been granted for role: {role}.\")\n\n\ndef check_database_compatibility(engine: Engine) -> None:\n \"\"\"Check that the database is compatible with Meltano.\n\n Args:\n engine: The DB engine to be used. This should already be connected to\n the database.\n\n Raises:\n MeltanoDatabaseCompatibilityError: The database is not compatible with\n Meltano.\n \"\"\"\n dialect = engine.dialect.name\n version = engine.dialect.server_version_info\n\n if dialect == \"sqlite\" and version < (3, 25, 1):\n version_string = \".\".join(map(str, version))\n reason = (\n f\"Detected SQLite {version_string}, but Meltano requires at least 3.25.1\"\n )\n raise MeltanoDatabaseCompatibilityError(reason)\n", "path": "src/meltano/core/db.py"}], "after_files": [{"content": "\"\"\"Defines helpers related to the system database.\"\"\"\n\nfrom __future__ import annotations\n\nimport logging\nimport time\nfrom urllib.parse import urlparse\n\nfrom sqlalchemy import create_engine\nfrom sqlalchemy.engine import Connection, Engine\nfrom sqlalchemy.exc import OperationalError\nfrom sqlalchemy.orm import sessionmaker\nfrom sqlalchemy.pool import NullPool\nfrom sqlalchemy.sql import text\n\nfrom meltano.core.error import MeltanoError\nfrom meltano.core.project import Project\n\n# Keep a Project \u2192 Engine mapping to serve\n# the same engine for the same Project\n_engines = {}\n\n\nclass MeltanoDatabaseCompatibilityError(MeltanoError):\n \"\"\"Raised when the database is not compatible with Meltano.\"\"\"\n\n INSTRUCTION = (\n \"Upgrade your database to be compatible with Meltano or use a different \"\n \"database\"\n )\n\n def __init__(self, reason: str):\n \"\"\"Initialize the error with a reason.\n\n Args:\n reason: The reason why the database is not compatible.\n \"\"\"\n super().__init__(reason, self.INSTRUCTION)\n\n\nclass NullConnectionStringError(MeltanoError):\n \"\"\"Raised when the database is not compatible with Meltano.\"\"\"\n\n REASON = \"The `database_uri` setting has a null value\"\n INSTRUCTION = (\n \"Verify that the `database_uri` setting points to a valid database connection \"\n \"URI, or use `MELTANO_FF_STRICT_ENV_VAR_MODE=1 meltano config meltano list` \"\n \"to check for missing environment variables\"\n )\n\n def __init__(self):\n \"\"\"Initialize the exception.\"\"\"\n super().__init__(self.REASON, self.INSTRUCTION)\n\n\ndef project_engine(\n project: Project,\n default: bool = False,\n) -> tuple[Engine, sessionmaker]:\n \"\"\"Create and register a SQLAlchemy engine for a Meltano project instance.\n\n Args:\n project: The Meltano project that the engine will be connected to.\n default: Whether the engine created should be stored as the default\n engine for this project.\n\n Returns:\n The engine, and a session maker bound to the engine.\n\n Raises:\n NullConnectionStringError: The `database_uri` setting has a null value.\n \"\"\"\n existing_engine = _engines.get(project)\n if existing_engine:\n return existing_engine\n\n database_uri = project.settings.get(\"database_uri\")\n parsed_db_uri = urlparse(database_uri)\n sanitized_db_uri = parsed_db_uri._replace( # noqa: WPS437\n netloc=(\n f\"{parsed_db_uri.username}:********@\" # user:pass auth case\n if parsed_db_uri.password\n else \"********@\" # token auth case\n if parsed_db_uri.username\n else \"\" # no auth case\n )\n + (parsed_db_uri.hostname or \"\"),\n ).geturl()\n logging.debug(\n f\"Creating DB engine for project at {str(project.root)!r} \"\n f\"with DB URI {sanitized_db_uri!r}\",\n )\n\n if database_uri is None:\n raise NullConnectionStringError\n\n engine = create_engine(database_uri, poolclass=NullPool)\n\n # Connect to the database to ensure it is available.\n connect(\n engine,\n max_retries=project.settings.get(\"database_max_retries\"),\n retry_timeout=project.settings.get(\"database_retry_timeout\"),\n )\n\n check_database_compatibility(engine)\n init_hook(engine)\n\n engine_session = (engine, sessionmaker(bind=engine))\n\n if default:\n # register the default engine\n _engines[project] = engine_session\n\n return engine_session\n\n\ndef connect(\n engine: Engine,\n max_retries: int,\n retry_timeout: float,\n) -> Connection:\n \"\"\"Connect to the database.\n\n Args:\n engine: The DB engine with which the check will be performed.\n max_retries: The maximum number of retries that will be attempted.\n retry_timeout: The number of seconds to wait between retries.\n\n Raises:\n OperationalError: Error during DB connection - max retries exceeded.\n\n Returns:\n A connection to the database.\n \"\"\"\n attempt = 0\n while True:\n try:\n return engine.connect()\n except OperationalError:\n if attempt >= max_retries:\n logging.error(\n f\"Could not connect to the database after {attempt} \"\n \"attempts. Max retries exceeded.\",\n )\n raise\n attempt += 1\n logging.info(\n f\"DB connection failed. Will retry after {retry_timeout}s. \"\n f\"Attempt {attempt}/{max_retries}\",\n )\n time.sleep(retry_timeout)\n\n\ninit_hooks = {\n \"sqlite\": lambda x: x.execute(\"PRAGMA journal_mode=WAL\"),\n}\n\n\ndef init_hook(engine: Engine) -> None:\n \"\"\"Run the initialization hook for the provided DB engine.\n\n The initialization hooks are taken from the `meltano.core.db.init_hooks`\n dictionary, which maps the dialect name of the engine to a unary function\n which will be called with the provided DB engine.\n\n Args:\n engine: The engine for which the init hook will be run.\n\n Raises:\n Exception: The init hook raised an exception.\n \"\"\"\n try:\n hook = init_hooks[engine.dialect.name]\n except KeyError:\n return\n\n try:\n hook(engine)\n except Exception as ex:\n raise Exception(f\"Failed to initialize database: {ex!s}\") from ex\n\n\ndef ensure_schema_exists(\n engine: Engine,\n schema_name: str,\n grant_roles: tuple[str] = (),\n) -> None:\n \"\"\"Ensure the specified `schema_name` exists in the database.\n\n Args:\n engine: The DB engine to be used.\n schema_name: The name of the schema.\n grant_roles: Roles to grant to the specified schema.\n \"\"\"\n group_identifiers = \",\".join(grant_roles)\n\n create_schema = text(f\"CREATE SCHEMA IF NOT EXISTS {schema_name}\")\n grant_select_schema = text(\n f\"ALTER DEFAULT PRIVILEGES IN SCHEMA {schema_name} GRANT SELECT ON \"\n f\"TABLES TO {group_identifiers}\",\n )\n grant_usage_schema = text(\n f\"GRANT USAGE ON SCHEMA {schema_name} TO {group_identifiers}\",\n )\n\n with engine.connect() as conn, conn.begin():\n conn.execute(create_schema)\n if grant_roles:\n conn.execute(grant_select_schema)\n conn.execute(grant_usage_schema)\n\n logging.info(f\"Schema {schema_name} has been created successfully.\")\n for role in grant_roles:\n logging.info(f\"Usage has been granted for role: {role}.\")\n\n\ndef check_database_compatibility(engine: Engine) -> None:\n \"\"\"Check that the database is compatible with Meltano.\n\n Args:\n engine: The DB engine to be used. This should already be connected to\n the database.\n\n Raises:\n MeltanoDatabaseCompatibilityError: The database is not compatible with\n Meltano.\n \"\"\"\n dialect = engine.dialect.name\n version = engine.dialect.server_version_info\n\n if dialect == \"sqlite\" and version < (3, 25, 1):\n version_string = \".\".join(map(str, version))\n reason = (\n f\"Detected SQLite {version_string}, but Meltano requires at least 3.25.1\"\n )\n raise MeltanoDatabaseCompatibilityError(reason)\n", "path": "src/meltano/core/db.py"}]}
| 2,521 | 365 |
gh_patches_debug_914
|
rasdani/github-patches
|
git_diff
|
wemake-services__wemake-python-styleguide-204
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Feature: ignore async function definitions from jones complexity check
Currently we only ignore `ClassDef` and `FunctionDef`: https://github.com/wemake-services/wemake-python-styleguide/blob/master/wemake_python_styleguide/visitors/ast/complexity/jones.py#L38-L41
What needs to be done:
1. ignore `AsyncFunctionDef` from the check
2. we do not have a special test case for ignoring nodes for now. It should be added. We can call it `test_that_some_nodes_are_ignored`. It should test all three ignored nodes: with the lowest complexity threshold there should be no errors: https://github.com/wemake-services/wemake-python-styleguide/blob/master/tests/test_visitors/test_ast/test_complexity/test_jones/test_line_complexity.py
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `wemake_python_styleguide/visitors/ast/complexity/jones.py`
Content:
```
1 # -*- coding: utf-8 -*-
2
3 """
4 Jones Complexity to count inline complexity.
5
6 Based on the original `jones-complexity` project:
7 https://github.com/Miserlou/JonesComplexity
8
9 Original project is licensed under MIT.
10 """
11
12 import ast
13 from collections import defaultdict
14 from statistics import median
15 from typing import DefaultDict, List
16
17 from wemake_python_styleguide.logics.nodes import is_subtype_of_any
18 from wemake_python_styleguide.violations.complexity import (
19 JonesScoreViolation,
20 LineComplexityViolation,
21 )
22 from wemake_python_styleguide.visitors.base import BaseNodeVisitor
23
24
25 class JonesComplexityVisitor(BaseNodeVisitor): # TODO: consider `logical_line`
26 """
27 This visitor is used to find complex lines in the code.
28
29 Calculates the number of AST nodes per line of code.
30 Also calculates the median nodes/line score.
31 Then compares these numbers to the given tressholds.
32
33 Some nodes are ignored because there's no sense in analyzing them.
34 Some nodes like type annotations are not affecting line complexity,
35 so we do not count them.
36 """
37
38 _ignored_nodes = (
39 ast.FunctionDef,
40 ast.ClassDef,
41 )
42
43 def __init__(self, *args, **kwargs) -> None:
44 """Initializes line number counter."""
45 super().__init__(*args, **kwargs)
46 self._lines: DefaultDict[int, List[ast.AST]] = defaultdict(list)
47 self._to_ignore: List[ast.AST] = []
48
49 def _post_visit(self) -> None:
50 """
51 Triggers after the whole module was processed.
52
53 Checks each line for its complexity, compares it to the tresshold.
54 We also calculate the final Jones score for the whole module.
55 """
56 for line_nodes in self._lines.values():
57 complexity = len(line_nodes)
58 if complexity > self.options.max_line_complexity:
59 self.add_violation(LineComplexityViolation(
60 line_nodes[0], text=str(complexity),
61 ))
62
63 node_counts = [len(nodes) for nodes in self._lines.values()]
64 total_count = median(node_counts) if node_counts else 0
65 if total_count > self.options.max_jones_score:
66 self.add_violation(JonesScoreViolation())
67
68 def _maybe_ignore_child(self, node: ast.AST) -> bool:
69 if isinstance(node, ast.AnnAssign):
70 self._to_ignore.append(node.annotation)
71
72 return node in self._to_ignore
73
74 def visit(self, node: ast.AST) -> None:
75 """
76 Visits all nodes, sums the number of nodes per line.
77
78 Then calculates the median value of all line results.
79
80 Raises:
81 JonesScoreViolation
82 LineComplexityViolation
83
84 """
85 line_number = getattr(node, 'lineno', None)
86 is_ignored = is_subtype_of_any(node, self._ignored_nodes)
87 if line_number is not None and not is_ignored:
88 if not self._maybe_ignore_child(node):
89 self._lines[line_number].append(node)
90
91 self.generic_visit(node)
92
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/wemake_python_styleguide/visitors/ast/complexity/jones.py b/wemake_python_styleguide/visitors/ast/complexity/jones.py
--- a/wemake_python_styleguide/visitors/ast/complexity/jones.py
+++ b/wemake_python_styleguide/visitors/ast/complexity/jones.py
@@ -38,6 +38,7 @@
_ignored_nodes = (
ast.FunctionDef,
ast.ClassDef,
+ ast.AsyncFunctionDef,
)
def __init__(self, *args, **kwargs) -> None:
|
{"golden_diff": "diff --git a/wemake_python_styleguide/visitors/ast/complexity/jones.py b/wemake_python_styleguide/visitors/ast/complexity/jones.py\n--- a/wemake_python_styleguide/visitors/ast/complexity/jones.py\n+++ b/wemake_python_styleguide/visitors/ast/complexity/jones.py\n@@ -38,6 +38,7 @@\n _ignored_nodes = (\n ast.FunctionDef,\n ast.ClassDef,\n+ ast.AsyncFunctionDef,\n )\n \n def __init__(self, *args, **kwargs) -> None:\n", "issue": "Feature: ignore async function definitions from jones complexity check\nCurrently we only ignore `ClassDef` and `FunctionDef`: https://github.com/wemake-services/wemake-python-styleguide/blob/master/wemake_python_styleguide/visitors/ast/complexity/jones.py#L38-L41\r\n\r\nWhat needs to be done:\r\n1. ignore `AsyncFunctionDef` from the check\r\n2. we do not have a special test case for ignoring nodes for now. It should be added. We can call it `test_that_some_nodes_are_ignored`. It should test all three ignored nodes: with the lowest complexity threshold there should be no errors: https://github.com/wemake-services/wemake-python-styleguide/blob/master/tests/test_visitors/test_ast/test_complexity/test_jones/test_line_complexity.py\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n\"\"\"\nJones Complexity to count inline complexity.\n\nBased on the original `jones-complexity` project:\nhttps://github.com/Miserlou/JonesComplexity\n\nOriginal project is licensed under MIT.\n\"\"\"\n\nimport ast\nfrom collections import defaultdict\nfrom statistics import median\nfrom typing import DefaultDict, List\n\nfrom wemake_python_styleguide.logics.nodes import is_subtype_of_any\nfrom wemake_python_styleguide.violations.complexity import (\n JonesScoreViolation,\n LineComplexityViolation,\n)\nfrom wemake_python_styleguide.visitors.base import BaseNodeVisitor\n\n\nclass JonesComplexityVisitor(BaseNodeVisitor): # TODO: consider `logical_line`\n \"\"\"\n This visitor is used to find complex lines in the code.\n\n Calculates the number of AST nodes per line of code.\n Also calculates the median nodes/line score.\n Then compares these numbers to the given tressholds.\n\n Some nodes are ignored because there's no sense in analyzing them.\n Some nodes like type annotations are not affecting line complexity,\n so we do not count them.\n \"\"\"\n\n _ignored_nodes = (\n ast.FunctionDef,\n ast.ClassDef,\n )\n\n def __init__(self, *args, **kwargs) -> None:\n \"\"\"Initializes line number counter.\"\"\"\n super().__init__(*args, **kwargs)\n self._lines: DefaultDict[int, List[ast.AST]] = defaultdict(list)\n self._to_ignore: List[ast.AST] = []\n\n def _post_visit(self) -> None:\n \"\"\"\n Triggers after the whole module was processed.\n\n Checks each line for its complexity, compares it to the tresshold.\n We also calculate the final Jones score for the whole module.\n \"\"\"\n for line_nodes in self._lines.values():\n complexity = len(line_nodes)\n if complexity > self.options.max_line_complexity:\n self.add_violation(LineComplexityViolation(\n line_nodes[0], text=str(complexity),\n ))\n\n node_counts = [len(nodes) for nodes in self._lines.values()]\n total_count = median(node_counts) if node_counts else 0\n if total_count > self.options.max_jones_score:\n self.add_violation(JonesScoreViolation())\n\n def _maybe_ignore_child(self, node: ast.AST) -> bool:\n if isinstance(node, ast.AnnAssign):\n self._to_ignore.append(node.annotation)\n\n return node in self._to_ignore\n\n def visit(self, node: ast.AST) -> None:\n \"\"\"\n Visits all nodes, sums the number of nodes per line.\n\n Then calculates the median value of all line results.\n\n Raises:\n JonesScoreViolation\n LineComplexityViolation\n\n \"\"\"\n line_number = getattr(node, 'lineno', None)\n is_ignored = is_subtype_of_any(node, self._ignored_nodes)\n if line_number is not None and not is_ignored:\n if not self._maybe_ignore_child(node):\n self._lines[line_number].append(node)\n\n self.generic_visit(node)\n", "path": "wemake_python_styleguide/visitors/ast/complexity/jones.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\n\"\"\"\nJones Complexity to count inline complexity.\n\nBased on the original `jones-complexity` project:\nhttps://github.com/Miserlou/JonesComplexity\n\nOriginal project is licensed under MIT.\n\"\"\"\n\nimport ast\nfrom collections import defaultdict\nfrom statistics import median\nfrom typing import DefaultDict, List\n\nfrom wemake_python_styleguide.logics.nodes import is_subtype_of_any\nfrom wemake_python_styleguide.violations.complexity import (\n JonesScoreViolation,\n LineComplexityViolation,\n)\nfrom wemake_python_styleguide.visitors.base import BaseNodeVisitor\n\n\nclass JonesComplexityVisitor(BaseNodeVisitor): # TODO: consider `logical_line`\n \"\"\"\n This visitor is used to find complex lines in the code.\n\n Calculates the number of AST nodes per line of code.\n Also calculates the median nodes/line score.\n Then compares these numbers to the given tressholds.\n\n Some nodes are ignored because there's no sense in analyzing them.\n Some nodes like type annotations are not affecting line complexity,\n so we do not count them.\n \"\"\"\n\n _ignored_nodes = (\n ast.FunctionDef,\n ast.ClassDef,\n ast.AsyncFunctionDef,\n )\n\n def __init__(self, *args, **kwargs) -> None:\n \"\"\"Initializes line number counter.\"\"\"\n super().__init__(*args, **kwargs)\n self._lines: DefaultDict[int, List[ast.AST]] = defaultdict(list)\n self._to_ignore: List[ast.AST] = []\n\n def _post_visit(self) -> None:\n \"\"\"\n Triggers after the whole module was processed.\n\n Checks each line for its complexity, compares it to the tresshold.\n We also calculate the final Jones score for the whole module.\n \"\"\"\n for line_nodes in self._lines.values():\n complexity = len(line_nodes)\n if complexity > self.options.max_line_complexity:\n self.add_violation(LineComplexityViolation(\n line_nodes[0], text=str(complexity),\n ))\n\n node_counts = [len(nodes) for nodes in self._lines.values()]\n total_count = median(node_counts) if node_counts else 0\n if total_count > self.options.max_jones_score:\n self.add_violation(JonesScoreViolation())\n\n def _maybe_ignore_child(self, node: ast.AST) -> bool:\n if isinstance(node, ast.AnnAssign):\n self._to_ignore.append(node.annotation)\n\n return node in self._to_ignore\n\n def visit(self, node: ast.AST) -> None:\n \"\"\"\n Visits all nodes, sums the number of nodes per line.\n\n Then calculates the median value of all line results.\n\n Raises:\n JonesScoreViolation\n LineComplexityViolation\n\n \"\"\"\n line_number = getattr(node, 'lineno', None)\n is_ignored = is_subtype_of_any(node, self._ignored_nodes)\n if line_number is not None and not is_ignored:\n if not self._maybe_ignore_child(node):\n self._lines[line_number].append(node)\n\n self.generic_visit(node)\n", "path": "wemake_python_styleguide/visitors/ast/complexity/jones.py"}]}
| 1,284 | 134 |
gh_patches_debug_28303
|
rasdani/github-patches
|
git_diff
|
wright-group__WrightTools-517
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
quick2D returns transposed plot
`artists.quick2D` uses the overloaded `ax.pcolor` and `ax.contourf` to plot arrays--- see [here](https://github.com/wright-group/WrightTools/blob/development/WrightTools/artists/_quick.py#L252).
However, `ax.contourf` does not properly handle the arrays. I think the problem is [line255](https://github.com/wright-group/WrightTools/blob/development/WrightTools/artists/_base.py#L255). I think `zi` should instead be `zi.T` to handle the way matplotlib indexes arrays.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `WrightTools/artists/_quick.py`
Content:
```
1 """Quick plotting."""
2
3
4 # --- import --------------------------------------------------------------------------------------
5
6
7 import os
8
9 import numpy as np
10
11 import matplotlib.pyplot as plt
12
13 from ._helpers import create_figure, plot_colorbar, savefig
14 from ._colors import colormaps
15 from .. import kit as wt_kit
16
17
18 # --- define --------------------------------------------------------------------------------------
19
20
21 __all__ = ['quick1D', 'quick2D']
22
23
24 # --- general purpose plotting functions ----------------------------------------------------------
25
26
27 def quick1D(data, axis=0, at={}, channel=0, *, local=False, autosave=False, save_directory=None,
28 fname=None, verbose=True):
29 """Quickly plot 1D slice(s) of data.
30
31 Parameters
32 ----------
33 data : WrightTools.Data object
34 Data to plot.
35 axis : string or integer (optional)
36 Expression or index of axis. Default is 0.
37 at : dictionary (optional)
38 Dictionary of parameters in non-plotted dimension(s). If not
39 provided, plots will be made at each coordinate.
40 channel : string or integer (optional)
41 Name or index of channel to plot. Default is 0.
42 local : boolean (optional)
43 Toggle plotting locally. Default is False.
44 autosave : boolean (optional)
45 Toggle autosave. Default is False.
46 save_directory : string (optional)
47 Location to save image(s). Default is None (auto-generated).
48 fname : string (optional)
49 File name. If None, data name is used. Default is None.
50 verbose : boolean (optional)
51 Toggle talkback. Default is True.
52
53 Returns
54 -------
55 list of strings
56 List of saved image files (if any).
57 """
58 # prepare data
59 chopped = data.chop(axis, at=at, verbose=False)
60 # channel index
61 channel_index = wt_kit.get_index(data.channel_names, channel)
62 # prepare figure
63 fig = None
64 if len(chopped) > 10:
65 if not autosave:
66 print('more than 10 images will be generated: forcing autosave')
67 autosave = True
68 # prepare output folders
69 if autosave:
70 if save_directory:
71 pass
72 else:
73 if len(chopped) == 1:
74 save_directory = os.getcwd()
75 if fname:
76 pass
77 else:
78 fname = data.natural_name
79 else:
80 folder_name = 'mpl_1D ' + wt_kit.TimeStamp().path
81 os.mkdir(folder_name)
82 save_directory = folder_name
83 # chew through image generation
84 out = []
85 for i, d in enumerate(chopped.values()):
86 # unpack data -----------------------------------------------------------------------------
87 axis = d.axes[0]
88 xi = axis.full
89 channel = d.channels[channel_index]
90 zi = channel[:]
91 # create figure ---------------------------------------------------------------------------
92 aspects = [[[0, 0], 0.5]]
93 fig, gs = create_figure(width='single', nrows=1, cols=[1], aspects=aspects)
94 ax = plt.subplot(gs[0, 0])
95 # plot ------------------------------------------------------------------------------------
96 plt.plot(xi, zi, lw=2)
97 plt.scatter(xi, zi, color='grey', alpha=0.5, edgecolor='none')
98 # decoration ------------------------------------------------------------------------------
99 plt.grid()
100 # limits
101 if local:
102 pass
103 else:
104 data_channel = data.channels[channel_index]
105 plt.ylim(data_channel.min(), data_channel.max())
106 # label axes
107 ax.set_xlabel(axis.label, fontsize=18)
108 ax.set_ylabel(channel.name, fontsize=18)
109 plt.xticks(rotation=45)
110 plt.xlim(xi.min(), xi.max())
111 # save ------------------------------------------------------------------------------------
112 if autosave:
113 if fname:
114 file_name = fname + ' ' + str(i).zfill(3)
115 else:
116 file_name = str(i).zfill(3)
117 fpath = os.path.join(save_directory, file_name + '.png')
118 savefig(fpath, fig=fig)
119 plt.close()
120 if verbose:
121 print('image saved at', fpath)
122 out.append(fpath)
123 return out
124
125
126 def quick2D(data, xaxis=1, yaxis=0, at={}, channel=0, *, contours=0, pixelated=True,
127 dynamic_range=False, local=False, contours_local=True, autosave=False,
128 save_directory=None, fname=None, verbose=True):
129 """Quickly plot 2D slice(s) of data.
130
131 Parameters
132 ----------
133 data : WrightTools.Data object.
134 Data to plot.
135 xaxis : string or integer (optional)
136 Expression or index of horizontal axis. Default is 1.
137 yaxis : string or integer (optional)
138 Expression or index of vertical axis. Default is 0.
139 at : dictionary (optional)
140 Dictionary of parameters in non-plotted dimension(s). If not
141 provided, plots will be made at each coordinate.
142 channel : string or integer (optional)
143 Name or index of channel to plot. Default is 0.
144 contours : integer (optional)
145 The number of black contour lines to add to the plot. Default is 0.
146 pixelated : boolean (optional)
147 Toggle between pcolor and contourf (deulaney) plotting backends.
148 Default is True (pcolor).
149 dynamic_range : boolean (optional)
150 Force the colorbar to use all of its colors. Only changes behavior
151 for signed channels. Default is False.
152 local : boolean (optional)
153 Toggle plotting locally. Default is False.
154 contours_local : boolean (optional)
155 Toggle plotting black contour lines locally. Default is True.
156 autosave : boolean (optional)
157 Toggle autosave. Default is False.
158 save_directory : string (optional)
159 Location to save image(s). Default is None (auto-generated).
160 fname : string (optional)
161 File name. If None, data name is used. Default is None.
162 verbose : boolean (optional)
163 Toggle talkback. Default is True.
164
165 Returns
166 -------
167 list of strings
168 List of saved image files (if any).
169 """
170 # prepare data
171 chopped = data.chop(xaxis, yaxis, at=at, verbose=False)
172 # channel index
173 channel_index = wt_kit.get_index(data.channel_names, channel)
174 # colormap
175 # get colormap
176 if data.channels[channel_index].signed:
177 cmap = 'signed'
178 else:
179 cmap = 'default'
180 cmap = colormaps[cmap]
181 cmap.set_bad([0.75] * 3, 1.)
182 cmap.set_under([0.75] * 3, 1.)
183 # fname
184 if fname is None:
185 fname = data.natural_name
186 # autosave
187 if len(chopped) > 10:
188 if not autosave:
189 print('more than 10 images will be generated: forcing autosave')
190 autosave = True
191 # output folder
192 if autosave:
193 if save_directory:
194 pass
195 else:
196 if len(chopped) == 1:
197 save_directory = os.getcwd()
198 else:
199 folder_name = 'quick2D ' + wt_kit.TimeStamp().path
200 os.mkdir(folder_name)
201 save_directory = folder_name
202 # loop through image generation
203 out = []
204 for i, d in enumerate(chopped.values()):
205 # unpack data -----------------------------------------------------------------------------
206 xaxis = d.axes[0]
207 xlim = xaxis.min(), xaxis.max()
208 yaxis = d.axes[1]
209 ylim = xaxis.min(), yaxis.max()
210 channel = d.channels[channel_index]
211 zi = channel[:]
212 zi = np.ma.masked_invalid(zi)
213 # create figure ---------------------------------------------------------------------------
214 if xaxis.units == yaxis.units:
215 xr = xlim[1] - xlim[0]
216 yr = ylim[1] - ylim[0]
217 aspect = np.abs(yr / xr)
218 if 3 < aspect or aspect < 1 / 3.:
219 # TODO: raise warning here
220 aspect = np.clip(aspect, 1 / 3., 3.)
221 else:
222 aspect = 1
223 fig, gs = create_figure(width='single', nrows=1, cols=[1, 'cbar'],
224 aspects=[[[0, 0], aspect]])
225 ax = plt.subplot(gs[0])
226 ax.patch.set_facecolor('w')
227 # levels ----------------------------------------------------------------------------------
228 if channel.signed:
229 if local:
230 limit = channel.mag
231 else:
232 data_channel = data.channels[channel_index]
233 if dynamic_range:
234 limit = min(abs(data_channel.null - data_channel.min()),
235 abs(data_channel.null - data_channel.max()))
236 else:
237 limit = data_channel.mag
238 if np.isnan(limit):
239 limit = 1.
240 if limit is np.ma.masked:
241 limit = 1.
242 levels = np.linspace(-limit + channel.null, limit + channel.null, 200)
243 else:
244 if local:
245 levels = np.linspace(channel.null, np.nanmax(zi), 200)
246 else:
247 data_channel = data.channels[channel_index]
248 if data_channel.max() < data_channel.null:
249 levels = np.linspace(data_channel.min(), data_channel.null, 200)
250 else:
251 levels = np.linspace(data_channel.null, data_channel.max(), 200)
252 # colors ----------------------------------------------------------------------------------
253 if pixelated:
254 ax.pcolor(d, cmap=cmap, vmin=levels.min(), vmax=levels.max())
255 else:
256 ax.contourf(d, cmap=cmap, levels=levels)
257 # contour lines ---------------------------------------------------------------------------
258 if contours:
259 raise NotImplementedError
260 # decoration ------------------------------------------------------------------------------
261 plt.xticks(rotation=45, fontsize=14)
262 plt.yticks(fontsize=14)
263 ax.set_xlabel(xaxis.label, fontsize=18)
264 ax.set_ylabel(yaxis.label, fontsize=18)
265 # colorbar
266 cax = plt.subplot(gs[1])
267 cbar_ticks = np.linspace(levels.min(), levels.max(), 11)
268 plot_colorbar(cax=cax, ticks=cbar_ticks, label=channel.name, cmap=cmap)
269 # save figure -----------------------------------------------------------------------------
270 if autosave:
271 if fname:
272 file_name = fname + ' ' + str(i).zfill(3)
273 else:
274 file_name = str(i).zfill(3)
275 fpath = os.path.join(save_directory, file_name + '.png')
276 savefig(fpath, fig=fig)
277 plt.close()
278 if verbose:
279 print('image saved at', fpath)
280 out.append(fpath)
281 return out
282
```
Path: `examples/fringes_transform.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 """
3 Fringes transform
4 =================
5
6 An example of transform on a dataset containing fringes.
7 """
8
9 import matplotlib.pyplot as plt
10
11 import WrightTools as wt
12 from WrightTools import datasets
13
14 p = datasets.PyCMDS.w2_w1_000
15 data = wt.data.from_PyCMDS(p)
16
17 data.signal_mean.symmetric_root(0.5) # to amplitude level
18
19 fig, gs = wt.artists.create_figure(width='double', cols=[1, 1, 'cbar'])
20
21 # as taken
22 ax = plt.subplot(gs[0, 0])
23 ax.pcolor(data)
24 wt.artists.set_ax_labels(xlabel=data.w2.label, ylabel=data.w1.label)
25 ax.grid()
26 ax.set_title('as taken', fontsize=20)
27
28 # transformed
29 ax = plt.subplot(gs[0, 1])
30 data.transform(['wm', 'w1'])
31 data.convert('wn')
32 ax.pcolor(data)
33 wt.artists.set_ax_labels(xlabel=data.wm.label, yticks=False)
34 ax.grid()
35 ax.set_title('transformed', fontsize=20)
36
37 # colorbar
38 cax = plt.subplot(gs[0, -1])
39 wt.artists.plot_colorbar(cax, label='amplitude')
40
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/WrightTools/artists/_quick.py b/WrightTools/artists/_quick.py
--- a/WrightTools/artists/_quick.py
+++ b/WrightTools/artists/_quick.py
@@ -123,7 +123,7 @@
return out
-def quick2D(data, xaxis=1, yaxis=0, at={}, channel=0, *, contours=0, pixelated=True,
+def quick2D(data, xaxis=0, yaxis=1, at={}, channel=0, *, contours=0, pixelated=True,
dynamic_range=False, local=False, contours_local=True, autosave=False,
save_directory=None, fname=None, verbose=True):
"""Quickly plot 2D slice(s) of data.
@@ -133,9 +133,9 @@
data : WrightTools.Data object.
Data to plot.
xaxis : string or integer (optional)
- Expression or index of horizontal axis. Default is 1.
+ Expression or index of horizontal axis. Default is 0.
yaxis : string or integer (optional)
- Expression or index of vertical axis. Default is 0.
+ Expression or index of vertical axis. Default is 1.
at : dictionary (optional)
Dictionary of parameters in non-plotted dimension(s). If not
provided, plots will be made at each coordinate.
diff --git a/examples/fringes_transform.py b/examples/fringes_transform.py
--- a/examples/fringes_transform.py
+++ b/examples/fringes_transform.py
@@ -15,6 +15,7 @@
data = wt.data.from_PyCMDS(p)
data.signal_mean.symmetric_root(0.5) # to amplitude level
+data.convert('wn')
fig, gs = wt.artists.create_figure(width='double', cols=[1, 1, 'cbar'])
|
{"golden_diff": "diff --git a/WrightTools/artists/_quick.py b/WrightTools/artists/_quick.py\n--- a/WrightTools/artists/_quick.py\n+++ b/WrightTools/artists/_quick.py\n@@ -123,7 +123,7 @@\n return out\n \n \n-def quick2D(data, xaxis=1, yaxis=0, at={}, channel=0, *, contours=0, pixelated=True,\n+def quick2D(data, xaxis=0, yaxis=1, at={}, channel=0, *, contours=0, pixelated=True,\n dynamic_range=False, local=False, contours_local=True, autosave=False,\n save_directory=None, fname=None, verbose=True):\n \"\"\"Quickly plot 2D slice(s) of data.\n@@ -133,9 +133,9 @@\n data : WrightTools.Data object.\n Data to plot.\n xaxis : string or integer (optional)\n- Expression or index of horizontal axis. Default is 1.\n+ Expression or index of horizontal axis. Default is 0.\n yaxis : string or integer (optional)\n- Expression or index of vertical axis. Default is 0.\n+ Expression or index of vertical axis. Default is 1.\n at : dictionary (optional)\n Dictionary of parameters in non-plotted dimension(s). If not\n provided, plots will be made at each coordinate.\ndiff --git a/examples/fringes_transform.py b/examples/fringes_transform.py\n--- a/examples/fringes_transform.py\n+++ b/examples/fringes_transform.py\n@@ -15,6 +15,7 @@\n data = wt.data.from_PyCMDS(p)\n \n data.signal_mean.symmetric_root(0.5) # to amplitude level\n+data.convert('wn')\n \n fig, gs = wt.artists.create_figure(width='double', cols=[1, 1, 'cbar'])\n", "issue": "quick2D returns transposed plot\n`artists.quick2D` uses the overloaded `ax.pcolor` and `ax.contourf` to plot arrays--- see [here](https://github.com/wright-group/WrightTools/blob/development/WrightTools/artists/_quick.py#L252). \r\n\r\nHowever, `ax.contourf` does not properly handle the arrays. I think the problem is [line255](https://github.com/wright-group/WrightTools/blob/development/WrightTools/artists/_base.py#L255). I think `zi` should instead be `zi.T` to handle the way matplotlib indexes arrays. \r\n \r\n\n", "before_files": [{"content": "\"\"\"Quick plotting.\"\"\"\n\n\n# --- import --------------------------------------------------------------------------------------\n\n\nimport os\n\nimport numpy as np\n\nimport matplotlib.pyplot as plt\n\nfrom ._helpers import create_figure, plot_colorbar, savefig\nfrom ._colors import colormaps\nfrom .. import kit as wt_kit\n\n\n# --- define --------------------------------------------------------------------------------------\n\n\n__all__ = ['quick1D', 'quick2D']\n\n\n# --- general purpose plotting functions ----------------------------------------------------------\n\n\ndef quick1D(data, axis=0, at={}, channel=0, *, local=False, autosave=False, save_directory=None,\n fname=None, verbose=True):\n \"\"\"Quickly plot 1D slice(s) of data.\n\n Parameters\n ----------\n data : WrightTools.Data object\n Data to plot.\n axis : string or integer (optional)\n Expression or index of axis. Default is 0.\n at : dictionary (optional)\n Dictionary of parameters in non-plotted dimension(s). If not\n provided, plots will be made at each coordinate.\n channel : string or integer (optional)\n Name or index of channel to plot. Default is 0.\n local : boolean (optional)\n Toggle plotting locally. Default is False.\n autosave : boolean (optional)\n Toggle autosave. Default is False.\n save_directory : string (optional)\n Location to save image(s). Default is None (auto-generated).\n fname : string (optional)\n File name. If None, data name is used. Default is None.\n verbose : boolean (optional)\n Toggle talkback. Default is True.\n\n Returns\n -------\n list of strings\n List of saved image files (if any).\n \"\"\"\n # prepare data\n chopped = data.chop(axis, at=at, verbose=False)\n # channel index\n channel_index = wt_kit.get_index(data.channel_names, channel)\n # prepare figure\n fig = None\n if len(chopped) > 10:\n if not autosave:\n print('more than 10 images will be generated: forcing autosave')\n autosave = True\n # prepare output folders\n if autosave:\n if save_directory:\n pass\n else:\n if len(chopped) == 1:\n save_directory = os.getcwd()\n if fname:\n pass\n else:\n fname = data.natural_name\n else:\n folder_name = 'mpl_1D ' + wt_kit.TimeStamp().path\n os.mkdir(folder_name)\n save_directory = folder_name\n # chew through image generation\n out = []\n for i, d in enumerate(chopped.values()):\n # unpack data -----------------------------------------------------------------------------\n axis = d.axes[0]\n xi = axis.full\n channel = d.channels[channel_index]\n zi = channel[:]\n # create figure ---------------------------------------------------------------------------\n aspects = [[[0, 0], 0.5]]\n fig, gs = create_figure(width='single', nrows=1, cols=[1], aspects=aspects)\n ax = plt.subplot(gs[0, 0])\n # plot ------------------------------------------------------------------------------------\n plt.plot(xi, zi, lw=2)\n plt.scatter(xi, zi, color='grey', alpha=0.5, edgecolor='none')\n # decoration ------------------------------------------------------------------------------\n plt.grid()\n # limits\n if local:\n pass\n else:\n data_channel = data.channels[channel_index]\n plt.ylim(data_channel.min(), data_channel.max())\n # label axes\n ax.set_xlabel(axis.label, fontsize=18)\n ax.set_ylabel(channel.name, fontsize=18)\n plt.xticks(rotation=45)\n plt.xlim(xi.min(), xi.max())\n # save ------------------------------------------------------------------------------------\n if autosave:\n if fname:\n file_name = fname + ' ' + str(i).zfill(3)\n else:\n file_name = str(i).zfill(3)\n fpath = os.path.join(save_directory, file_name + '.png')\n savefig(fpath, fig=fig)\n plt.close()\n if verbose:\n print('image saved at', fpath)\n out.append(fpath)\n return out\n\n\ndef quick2D(data, xaxis=1, yaxis=0, at={}, channel=0, *, contours=0, pixelated=True,\n dynamic_range=False, local=False, contours_local=True, autosave=False,\n save_directory=None, fname=None, verbose=True):\n \"\"\"Quickly plot 2D slice(s) of data.\n\n Parameters\n ----------\n data : WrightTools.Data object.\n Data to plot.\n xaxis : string or integer (optional)\n Expression or index of horizontal axis. Default is 1.\n yaxis : string or integer (optional)\n Expression or index of vertical axis. Default is 0.\n at : dictionary (optional)\n Dictionary of parameters in non-plotted dimension(s). If not\n provided, plots will be made at each coordinate.\n channel : string or integer (optional)\n Name or index of channel to plot. Default is 0.\n contours : integer (optional)\n The number of black contour lines to add to the plot. Default is 0.\n pixelated : boolean (optional)\n Toggle between pcolor and contourf (deulaney) plotting backends.\n Default is True (pcolor).\n dynamic_range : boolean (optional)\n Force the colorbar to use all of its colors. Only changes behavior\n for signed channels. Default is False.\n local : boolean (optional)\n Toggle plotting locally. Default is False.\n contours_local : boolean (optional)\n Toggle plotting black contour lines locally. Default is True.\n autosave : boolean (optional)\n Toggle autosave. Default is False.\n save_directory : string (optional)\n Location to save image(s). Default is None (auto-generated).\n fname : string (optional)\n File name. If None, data name is used. Default is None.\n verbose : boolean (optional)\n Toggle talkback. Default is True.\n\n Returns\n -------\n list of strings\n List of saved image files (if any).\n \"\"\"\n # prepare data\n chopped = data.chop(xaxis, yaxis, at=at, verbose=False)\n # channel index\n channel_index = wt_kit.get_index(data.channel_names, channel)\n # colormap\n # get colormap\n if data.channels[channel_index].signed:\n cmap = 'signed'\n else:\n cmap = 'default'\n cmap = colormaps[cmap]\n cmap.set_bad([0.75] * 3, 1.)\n cmap.set_under([0.75] * 3, 1.)\n # fname\n if fname is None:\n fname = data.natural_name\n # autosave\n if len(chopped) > 10:\n if not autosave:\n print('more than 10 images will be generated: forcing autosave')\n autosave = True\n # output folder\n if autosave:\n if save_directory:\n pass\n else:\n if len(chopped) == 1:\n save_directory = os.getcwd()\n else:\n folder_name = 'quick2D ' + wt_kit.TimeStamp().path\n os.mkdir(folder_name)\n save_directory = folder_name\n # loop through image generation\n out = []\n for i, d in enumerate(chopped.values()):\n # unpack data -----------------------------------------------------------------------------\n xaxis = d.axes[0]\n xlim = xaxis.min(), xaxis.max()\n yaxis = d.axes[1]\n ylim = xaxis.min(), yaxis.max()\n channel = d.channels[channel_index]\n zi = channel[:]\n zi = np.ma.masked_invalid(zi)\n # create figure ---------------------------------------------------------------------------\n if xaxis.units == yaxis.units:\n xr = xlim[1] - xlim[0]\n yr = ylim[1] - ylim[0]\n aspect = np.abs(yr / xr)\n if 3 < aspect or aspect < 1 / 3.:\n # TODO: raise warning here\n aspect = np.clip(aspect, 1 / 3., 3.)\n else:\n aspect = 1\n fig, gs = create_figure(width='single', nrows=1, cols=[1, 'cbar'],\n aspects=[[[0, 0], aspect]])\n ax = plt.subplot(gs[0])\n ax.patch.set_facecolor('w')\n # levels ----------------------------------------------------------------------------------\n if channel.signed:\n if local:\n limit = channel.mag\n else:\n data_channel = data.channels[channel_index]\n if dynamic_range:\n limit = min(abs(data_channel.null - data_channel.min()),\n abs(data_channel.null - data_channel.max()))\n else:\n limit = data_channel.mag\n if np.isnan(limit):\n limit = 1.\n if limit is np.ma.masked:\n limit = 1.\n levels = np.linspace(-limit + channel.null, limit + channel.null, 200)\n else:\n if local:\n levels = np.linspace(channel.null, np.nanmax(zi), 200)\n else:\n data_channel = data.channels[channel_index]\n if data_channel.max() < data_channel.null:\n levels = np.linspace(data_channel.min(), data_channel.null, 200)\n else:\n levels = np.linspace(data_channel.null, data_channel.max(), 200)\n # colors ----------------------------------------------------------------------------------\n if pixelated:\n ax.pcolor(d, cmap=cmap, vmin=levels.min(), vmax=levels.max())\n else:\n ax.contourf(d, cmap=cmap, levels=levels)\n # contour lines ---------------------------------------------------------------------------\n if contours:\n raise NotImplementedError\n # decoration ------------------------------------------------------------------------------\n plt.xticks(rotation=45, fontsize=14)\n plt.yticks(fontsize=14)\n ax.set_xlabel(xaxis.label, fontsize=18)\n ax.set_ylabel(yaxis.label, fontsize=18)\n # colorbar\n cax = plt.subplot(gs[1])\n cbar_ticks = np.linspace(levels.min(), levels.max(), 11)\n plot_colorbar(cax=cax, ticks=cbar_ticks, label=channel.name, cmap=cmap)\n # save figure -----------------------------------------------------------------------------\n if autosave:\n if fname:\n file_name = fname + ' ' + str(i).zfill(3)\n else:\n file_name = str(i).zfill(3)\n fpath = os.path.join(save_directory, file_name + '.png')\n savefig(fpath, fig=fig)\n plt.close()\n if verbose:\n print('image saved at', fpath)\n out.append(fpath)\n return out\n", "path": "WrightTools/artists/_quick.py"}, {"content": "# -*- coding: utf-8 -*-\n\"\"\"\nFringes transform\n=================\n\nAn example of transform on a dataset containing fringes.\n\"\"\"\n\nimport matplotlib.pyplot as plt\n\nimport WrightTools as wt\nfrom WrightTools import datasets\n\np = datasets.PyCMDS.w2_w1_000\ndata = wt.data.from_PyCMDS(p)\n\ndata.signal_mean.symmetric_root(0.5) # to amplitude level\n\nfig, gs = wt.artists.create_figure(width='double', cols=[1, 1, 'cbar'])\n\n# as taken\nax = plt.subplot(gs[0, 0])\nax.pcolor(data)\nwt.artists.set_ax_labels(xlabel=data.w2.label, ylabel=data.w1.label)\nax.grid()\nax.set_title('as taken', fontsize=20)\n\n# transformed\nax = plt.subplot(gs[0, 1])\ndata.transform(['wm', 'w1'])\ndata.convert('wn')\nax.pcolor(data)\nwt.artists.set_ax_labels(xlabel=data.wm.label, yticks=False)\nax.grid()\nax.set_title('transformed', fontsize=20)\n\n# colorbar\ncax = plt.subplot(gs[0, -1])\nwt.artists.plot_colorbar(cax, label='amplitude')\n", "path": "examples/fringes_transform.py"}], "after_files": [{"content": "\"\"\"Quick plotting.\"\"\"\n\n\n# --- import --------------------------------------------------------------------------------------\n\n\nimport os\n\nimport numpy as np\n\nimport matplotlib.pyplot as plt\n\nfrom ._helpers import create_figure, plot_colorbar, savefig\nfrom ._colors import colormaps\nfrom .. import kit as wt_kit\n\n\n# --- define --------------------------------------------------------------------------------------\n\n\n__all__ = ['quick1D', 'quick2D']\n\n\n# --- general purpose plotting functions ----------------------------------------------------------\n\n\ndef quick1D(data, axis=0, at={}, channel=0, *, local=False, autosave=False, save_directory=None,\n fname=None, verbose=True):\n \"\"\"Quickly plot 1D slice(s) of data.\n\n Parameters\n ----------\n data : WrightTools.Data object\n Data to plot.\n axis : string or integer (optional)\n Expression or index of axis. Default is 0.\n at : dictionary (optional)\n Dictionary of parameters in non-plotted dimension(s). If not\n provided, plots will be made at each coordinate.\n channel : string or integer (optional)\n Name or index of channel to plot. Default is 0.\n local : boolean (optional)\n Toggle plotting locally. Default is False.\n autosave : boolean (optional)\n Toggle autosave. Default is False.\n save_directory : string (optional)\n Location to save image(s). Default is None (auto-generated).\n fname : string (optional)\n File name. If None, data name is used. Default is None.\n verbose : boolean (optional)\n Toggle talkback. Default is True.\n\n Returns\n -------\n list of strings\n List of saved image files (if any).\n \"\"\"\n # prepare data\n chopped = data.chop(axis, at=at, verbose=False)\n # channel index\n channel_index = wt_kit.get_index(data.channel_names, channel)\n # prepare figure\n fig = None\n if len(chopped) > 10:\n if not autosave:\n print('more than 10 images will be generated: forcing autosave')\n autosave = True\n # prepare output folders\n if autosave:\n if save_directory:\n pass\n else:\n if len(chopped) == 1:\n save_directory = os.getcwd()\n if fname:\n pass\n else:\n fname = data.natural_name\n else:\n folder_name = 'mpl_1D ' + wt_kit.TimeStamp().path\n os.mkdir(folder_name)\n save_directory = folder_name\n # chew through image generation\n out = []\n for i, d in enumerate(chopped.values()):\n # unpack data -----------------------------------------------------------------------------\n axis = d.axes[0]\n xi = axis.full\n channel = d.channels[channel_index]\n zi = channel[:]\n # create figure ---------------------------------------------------------------------------\n aspects = [[[0, 0], 0.5]]\n fig, gs = create_figure(width='single', nrows=1, cols=[1], aspects=aspects)\n ax = plt.subplot(gs[0, 0])\n # plot ------------------------------------------------------------------------------------\n plt.plot(xi, zi, lw=2)\n plt.scatter(xi, zi, color='grey', alpha=0.5, edgecolor='none')\n # decoration ------------------------------------------------------------------------------\n plt.grid()\n # limits\n if local:\n pass\n else:\n data_channel = data.channels[channel_index]\n plt.ylim(data_channel.min(), data_channel.max())\n # label axes\n ax.set_xlabel(axis.label, fontsize=18)\n ax.set_ylabel(channel.name, fontsize=18)\n plt.xticks(rotation=45)\n plt.xlim(xi.min(), xi.max())\n # save ------------------------------------------------------------------------------------\n if autosave:\n if fname:\n file_name = fname + ' ' + str(i).zfill(3)\n else:\n file_name = str(i).zfill(3)\n fpath = os.path.join(save_directory, file_name + '.png')\n savefig(fpath, fig=fig)\n plt.close()\n if verbose:\n print('image saved at', fpath)\n out.append(fpath)\n return out\n\n\ndef quick2D(data, xaxis=0, yaxis=1, at={}, channel=0, *, contours=0, pixelated=True,\n dynamic_range=False, local=False, contours_local=True, autosave=False,\n save_directory=None, fname=None, verbose=True):\n \"\"\"Quickly plot 2D slice(s) of data.\n\n Parameters\n ----------\n data : WrightTools.Data object.\n Data to plot.\n xaxis : string or integer (optional)\n Expression or index of horizontal axis. Default is 0.\n yaxis : string or integer (optional)\n Expression or index of vertical axis. Default is 1.\n at : dictionary (optional)\n Dictionary of parameters in non-plotted dimension(s). If not\n provided, plots will be made at each coordinate.\n channel : string or integer (optional)\n Name or index of channel to plot. Default is 0.\n contours : integer (optional)\n The number of black contour lines to add to the plot. Default is 0.\n pixelated : boolean (optional)\n Toggle between pcolor and contourf (deulaney) plotting backends.\n Default is True (pcolor).\n dynamic_range : boolean (optional)\n Force the colorbar to use all of its colors. Only changes behavior\n for signed channels. Default is False.\n local : boolean (optional)\n Toggle plotting locally. Default is False.\n contours_local : boolean (optional)\n Toggle plotting black contour lines locally. Default is True.\n autosave : boolean (optional)\n Toggle autosave. Default is False.\n save_directory : string (optional)\n Location to save image(s). Default is None (auto-generated).\n fname : string (optional)\n File name. If None, data name is used. Default is None.\n verbose : boolean (optional)\n Toggle talkback. Default is True.\n\n Returns\n -------\n list of strings\n List of saved image files (if any).\n \"\"\"\n # prepare data\n chopped = data.chop(xaxis, yaxis, at=at, verbose=False)\n # channel index\n channel_index = wt_kit.get_index(data.channel_names, channel)\n # colormap\n # get colormap\n if data.channels[channel_index].signed:\n cmap = 'signed'\n else:\n cmap = 'default'\n cmap = colormaps[cmap]\n cmap.set_bad([0.75] * 3, 1.)\n cmap.set_under([0.75] * 3, 1.)\n # fname\n if fname is None:\n fname = data.natural_name\n # autosave\n if len(chopped) > 10:\n if not autosave:\n print('more than 10 images will be generated: forcing autosave')\n autosave = True\n # output folder\n if autosave:\n if save_directory:\n pass\n else:\n if len(chopped) == 1:\n save_directory = os.getcwd()\n else:\n folder_name = 'quick2D ' + wt_kit.TimeStamp().path\n os.mkdir(folder_name)\n save_directory = folder_name\n # loop through image generation\n out = []\n for i, d in enumerate(chopped.values()):\n # unpack data -----------------------------------------------------------------------------\n xaxis = d.axes[0]\n xlim = xaxis.min(), xaxis.max()\n yaxis = d.axes[1]\n ylim = xaxis.min(), yaxis.max()\n channel = d.channels[channel_index]\n zi = channel[:]\n zi = np.ma.masked_invalid(zi)\n # create figure ---------------------------------------------------------------------------\n if xaxis.units == yaxis.units:\n xr = xlim[1] - xlim[0]\n yr = ylim[1] - ylim[0]\n aspect = np.abs(yr / xr)\n if 3 < aspect or aspect < 1 / 3.:\n # TODO: raise warning here\n aspect = np.clip(aspect, 1 / 3., 3.)\n else:\n aspect = 1\n fig, gs = create_figure(width='single', nrows=1, cols=[1, 'cbar'],\n aspects=[[[0, 0], aspect]])\n ax = plt.subplot(gs[0])\n ax.patch.set_facecolor('w')\n # levels ----------------------------------------------------------------------------------\n if channel.signed:\n if local:\n limit = channel.mag\n else:\n data_channel = data.channels[channel_index]\n if dynamic_range:\n limit = min(abs(data_channel.null - data_channel.min()),\n abs(data_channel.null - data_channel.max()))\n else:\n limit = data_channel.mag\n if np.isnan(limit):\n limit = 1.\n if limit is np.ma.masked:\n limit = 1.\n levels = np.linspace(-limit + channel.null, limit + channel.null, 200)\n else:\n if local:\n levels = np.linspace(channel.null, np.nanmax(zi), 200)\n else:\n data_channel = data.channels[channel_index]\n if data_channel.max() < data_channel.null:\n levels = np.linspace(data_channel.min(), data_channel.null, 200)\n else:\n levels = np.linspace(data_channel.null, data_channel.max(), 200)\n # colors ----------------------------------------------------------------------------------\n if pixelated:\n ax.pcolor(d, cmap=cmap, vmin=levels.min(), vmax=levels.max())\n else:\n ax.contourf(d, cmap=cmap, levels=levels)\n # contour lines ---------------------------------------------------------------------------\n if contours:\n raise NotImplementedError\n # decoration ------------------------------------------------------------------------------\n plt.xticks(rotation=45, fontsize=14)\n plt.yticks(fontsize=14)\n ax.set_xlabel(xaxis.label, fontsize=18)\n ax.set_ylabel(yaxis.label, fontsize=18)\n # colorbar\n cax = plt.subplot(gs[1])\n cbar_ticks = np.linspace(levels.min(), levels.max(), 11)\n plot_colorbar(cax=cax, ticks=cbar_ticks, label=channel.name, cmap=cmap)\n # save figure -----------------------------------------------------------------------------\n if autosave:\n if fname:\n file_name = fname + ' ' + str(i).zfill(3)\n else:\n file_name = str(i).zfill(3)\n fpath = os.path.join(save_directory, file_name + '.png')\n savefig(fpath, fig=fig)\n plt.close()\n if verbose:\n print('image saved at', fpath)\n out.append(fpath)\n return out\n", "path": "WrightTools/artists/_quick.py"}, {"content": "# -*- coding: utf-8 -*-\n\"\"\"\nFringes transform\n=================\n\nAn example of transform on a dataset containing fringes.\n\"\"\"\n\nimport matplotlib.pyplot as plt\n\nimport WrightTools as wt\nfrom WrightTools import datasets\n\np = datasets.PyCMDS.w2_w1_000\ndata = wt.data.from_PyCMDS(p)\n\ndata.signal_mean.symmetric_root(0.5) # to amplitude level\ndata.convert('wn')\n\nfig, gs = wt.artists.create_figure(width='double', cols=[1, 1, 'cbar'])\n\n# as taken\nax = plt.subplot(gs[0, 0])\nax.pcolor(data)\nwt.artists.set_ax_labels(xlabel=data.w2.label, ylabel=data.w1.label)\nax.grid()\nax.set_title('as taken', fontsize=20)\n\n# transformed\nax = plt.subplot(gs[0, 1])\ndata.transform(['wm', 'w1'])\ndata.convert('wn')\nax.pcolor(data)\nwt.artists.set_ax_labels(xlabel=data.wm.label, yticks=False)\nax.grid()\nax.set_title('transformed', fontsize=20)\n\n# colorbar\ncax = plt.subplot(gs[0, -1])\nwt.artists.plot_colorbar(cax, label='amplitude')\n", "path": "examples/fringes_transform.py"}]}
| 3,770 | 405 |
gh_patches_debug_5671
|
rasdani/github-patches
|
git_diff
|
projectmesa__mesa-539
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
epstein_civil_violence box doesn't fit grid
<img width="431" alt="screen shot 2018-04-01 at 10 05 11 pm" src="https://user-images.githubusercontent.com/166734/38180219-de2decf8-35f8-11e8-8d9b-562d2fb7c58b.png">
^^ Fix the outline grid on this model. The grid should be the same size as the outline.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/epstein_civil_violence/civil_violence/server.py`
Content:
```
1 from mesa.visualization.ModularVisualization import ModularServer
2 from mesa.visualization.modules import CanvasGrid
3
4 from .model import CivilViolenceModel
5 from .agent import Citizen, Cop
6
7
8 COP_COLOR = "#000000"
9 AGENT_QUIET_COLOR = "#0066CC"
10 AGENT_REBEL_COLOR = "#CC0000"
11 JAIL_COLOR = "#757575"
12
13
14 def citizen_cop_portrayal(agent):
15 if agent is None:
16 return
17
18 portrayal = {"Shape": "circle",
19 "x": agent.pos[0], "y": agent.pos[1],
20 "Filled": "true"}
21
22 if type(agent) is Citizen:
23 color = AGENT_QUIET_COLOR if agent.condition == "Quiescent" else \
24 AGENT_REBEL_COLOR
25 color = JAIL_COLOR if agent.jail_sentence else color
26 portrayal["Color"] = color
27 portrayal["r"] = 0.8
28 portrayal["Layer"] = 0
29
30 elif type(agent) is Cop:
31 portrayal["Color"] = COP_COLOR
32 portrayal["r"] = 0.5
33 portrayal["Layer"] = 1
34 return portrayal
35
36
37 model_params = dict(height=40,
38 width=40,
39 citizen_density=.7,
40 cop_density=.074,
41 citizen_vision=7,
42 cop_vision=7,
43 legitimacy=.8,
44 max_jail_term=1000)
45
46 canvas_element = CanvasGrid(citizen_cop_portrayal, 40, 40, 500, 500)
47 server = ModularServer(CivilViolenceModel, [canvas_element],
48 "Epstein Civil Violence", model_params)
49
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/examples/epstein_civil_violence/civil_violence/server.py b/examples/epstein_civil_violence/civil_violence/server.py
--- a/examples/epstein_civil_violence/civil_violence/server.py
+++ b/examples/epstein_civil_violence/civil_violence/server.py
@@ -43,6 +43,6 @@
legitimacy=.8,
max_jail_term=1000)
-canvas_element = CanvasGrid(citizen_cop_portrayal, 40, 40, 500, 500)
+canvas_element = CanvasGrid(citizen_cop_portrayal, 40, 40, 480, 480)
server = ModularServer(CivilViolenceModel, [canvas_element],
"Epstein Civil Violence", model_params)
|
{"golden_diff": "diff --git a/examples/epstein_civil_violence/civil_violence/server.py b/examples/epstein_civil_violence/civil_violence/server.py\n--- a/examples/epstein_civil_violence/civil_violence/server.py\n+++ b/examples/epstein_civil_violence/civil_violence/server.py\n@@ -43,6 +43,6 @@\n legitimacy=.8,\n max_jail_term=1000)\n \n-canvas_element = CanvasGrid(citizen_cop_portrayal, 40, 40, 500, 500)\n+canvas_element = CanvasGrid(citizen_cop_portrayal, 40, 40, 480, 480)\n server = ModularServer(CivilViolenceModel, [canvas_element],\n \"Epstein Civil Violence\", model_params)\n", "issue": "epstein_civil_violence box doesn't fit grid\n<img width=\"431\" alt=\"screen shot 2018-04-01 at 10 05 11 pm\" src=\"https://user-images.githubusercontent.com/166734/38180219-de2decf8-35f8-11e8-8d9b-562d2fb7c58b.png\">\r\n\r\n^^ Fix the outline grid on this model. The grid should be the same size as the outline.\n", "before_files": [{"content": "from mesa.visualization.ModularVisualization import ModularServer\nfrom mesa.visualization.modules import CanvasGrid\n\nfrom .model import CivilViolenceModel\nfrom .agent import Citizen, Cop\n\n\nCOP_COLOR = \"#000000\"\nAGENT_QUIET_COLOR = \"#0066CC\"\nAGENT_REBEL_COLOR = \"#CC0000\"\nJAIL_COLOR = \"#757575\"\n\n\ndef citizen_cop_portrayal(agent):\n if agent is None:\n return\n\n portrayal = {\"Shape\": \"circle\",\n \"x\": agent.pos[0], \"y\": agent.pos[1],\n \"Filled\": \"true\"}\n\n if type(agent) is Citizen:\n color = AGENT_QUIET_COLOR if agent.condition == \"Quiescent\" else \\\n AGENT_REBEL_COLOR\n color = JAIL_COLOR if agent.jail_sentence else color\n portrayal[\"Color\"] = color\n portrayal[\"r\"] = 0.8\n portrayal[\"Layer\"] = 0\n\n elif type(agent) is Cop:\n portrayal[\"Color\"] = COP_COLOR\n portrayal[\"r\"] = 0.5\n portrayal[\"Layer\"] = 1\n return portrayal\n\n\nmodel_params = dict(height=40,\n width=40,\n citizen_density=.7,\n cop_density=.074,\n citizen_vision=7,\n cop_vision=7,\n legitimacy=.8,\n max_jail_term=1000)\n\ncanvas_element = CanvasGrid(citizen_cop_portrayal, 40, 40, 500, 500)\nserver = ModularServer(CivilViolenceModel, [canvas_element],\n \"Epstein Civil Violence\", model_params)\n", "path": "examples/epstein_civil_violence/civil_violence/server.py"}], "after_files": [{"content": "from mesa.visualization.ModularVisualization import ModularServer\nfrom mesa.visualization.modules import CanvasGrid\n\nfrom .model import CivilViolenceModel\nfrom .agent import Citizen, Cop\n\n\nCOP_COLOR = \"#000000\"\nAGENT_QUIET_COLOR = \"#0066CC\"\nAGENT_REBEL_COLOR = \"#CC0000\"\nJAIL_COLOR = \"#757575\"\n\n\ndef citizen_cop_portrayal(agent):\n if agent is None:\n return\n\n portrayal = {\"Shape\": \"circle\",\n \"x\": agent.pos[0], \"y\": agent.pos[1],\n \"Filled\": \"true\"}\n\n if type(agent) is Citizen:\n color = AGENT_QUIET_COLOR if agent.condition == \"Quiescent\" else \\\n AGENT_REBEL_COLOR\n color = JAIL_COLOR if agent.jail_sentence else color\n portrayal[\"Color\"] = color\n portrayal[\"r\"] = 0.8\n portrayal[\"Layer\"] = 0\n\n elif type(agent) is Cop:\n portrayal[\"Color\"] = COP_COLOR\n portrayal[\"r\"] = 0.5\n portrayal[\"Layer\"] = 1\n return portrayal\n\n\nmodel_params = dict(height=40,\n width=40,\n citizen_density=.7,\n cop_density=.074,\n citizen_vision=7,\n cop_vision=7,\n legitimacy=.8,\n max_jail_term=1000)\n\ncanvas_element = CanvasGrid(citizen_cop_portrayal, 40, 40, 480, 480)\nserver = ModularServer(CivilViolenceModel, [canvas_element],\n \"Epstein Civil Violence\", model_params)\n", "path": "examples/epstein_civil_violence/civil_violence/server.py"}]}
| 863 | 193 |
gh_patches_debug_9767
|
rasdani/github-patches
|
git_diff
|
meltano__meltano-7096
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add deprecation warning to `meltano ui` command
Meltano UI is deprecated, and is scheduled for removal in Meltano v3. We should make that known to all who currently use the UI by printing a clear warning message when they run `meltano ui` that states that:
- it is *currently* deprecated
- it will be removed in Meltano v3
Relates to https://github.com/meltano/internal-general/discussions/460
Migration strategies likely fall outside the scope of this issue. From a chat with @aaronsteers on 2022-12-12 the priority will be providing feature-parity with equal-or-better UX via the CLI.
A UI as part of Meltano Cloud (possibly accessible after `meltano login` even if not running workloads on Meltano Cloud) may be available in the future for users who absolutely love UIs and have no interest in moving away from the Meltano UI to the CLI, but that's more or less entirely unplanned, so no promises at this point.
Because we cannot make promises about what we'll be doing to replace the UI, at this point I recommend we keep the deprecation warning minimal and fact-based. Doing so may lead to users asking (many) questions about the impending removal on Slack. This will be a good opportunity for us to discuss with them to figure out what the best path forward will be, i.e. we can ask them why they like the UI, if they'd be happy with the CLI, etc.
Once we've got a more concrete idea for what comes next (and likely after it has been implemented and released), we can update the deprecation warning to advertise it.
@sbalnojan @afolson @tayloramurphy
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/meltano/cli/ui.py`
Content:
```
1 """Meltano UI CLI."""
2
3 from __future__ import annotations
4
5 import logging
6 import os
7 import secrets
8 import signal
9
10 import click
11
12 from meltano.api.workers import APIWorker, UIAvailableWorker
13 from meltano.cli import cli
14 from meltano.cli.params import pass_project
15 from meltano.cli.utils import CliError, InstrumentedCmd, InstrumentedDefaultGroup
16 from meltano.core.project import Project
17 from meltano.core.project_settings_service import (
18 ProjectSettingsService,
19 SettingValueStore,
20 )
21
22 logger = logging.getLogger(__name__)
23
24
25 def ensure_secure_setup(project: Project):
26 """Verify UI security settings."""
27 settings_service = ProjectSettingsService(project)
28
29 if not settings_service.get("ui.authentication"):
30 return
31
32 facts = []
33 if (
34 settings_service.get("ui.server_name") is None
35 and settings_service.get("ui.session_cookie_domain") is None
36 ):
37 facts.append(
38 "- Neither the 'ui.server_name' or 'ui.session_cookie_domain' setting has been set"
39 )
40
41 secure_settings = ["ui.secret_key", "ui.password_salt"]
42 for setting_name in secure_settings:
43 value, source = settings_service.get_with_source(setting_name)
44 if source is SettingValueStore.DEFAULT:
45 facts.append(
46 f"- The '{setting_name}' setting has not been changed from the default test value"
47 )
48
49 if facts:
50 click.secho(
51 "Authentication is enabled, but your configuration is currently insecure:",
52 fg="red",
53 )
54 for fact in facts:
55 click.echo(fact)
56 click.echo(
57 "For more information about these settings and how to set them, visit "
58 "https://docs.meltano.com/reference/settings#uiauthentication"
59 )
60 click.echo()
61
62
63 def start_workers(workers):
64 """Start UI background workers."""
65
66 def stop_all():
67 logger.info("Stopping all background workers...")
68 for worker in workers:
69 worker.stop()
70
71 # start all workers
72 for worker in workers:
73 worker.start()
74
75 return stop_all
76
77
78 @cli.group(
79 cls=InstrumentedDefaultGroup,
80 default="start",
81 default_if_no_args=True,
82 short_help="Start the Meltano UI webserver.",
83 )
84 @pass_project(migrate=True)
85 @click.pass_context
86 def ui(ctx, project: Project):
87 """
88 Start the Meltano UI webserver.
89
90 \b\nRead more at https://docs.meltano.com/reference/command-line-interface#ui
91 """
92 ctx.obj["project"] = project
93
94
95 @ui.command(cls=InstrumentedCmd, short_help="Start the Meltano UI webserver.")
96 @click.option("--reload", is_flag=True, default=False)
97 @click.option("--bind", help="The hostname (or IP address) to bind on")
98 @click.option("--bind-port", help="Port to run webserver on", type=int)
99 @click.pass_context
100 def start(ctx, reload, bind, bind_port):
101 """Start the Meltano UI webserver."""
102 if bind:
103 ProjectSettingsService.config_override["ui.bind_host"] = bind
104 if bind_port:
105 ProjectSettingsService.config_override["ui.bind_port"] = bind_port
106
107 project: Project = ctx.obj["project"]
108 ensure_secure_setup(project)
109
110 workers = []
111
112 workers.append(UIAvailableWorker(project))
113 workers.append(
114 APIWorker(project, reload=reload or os.getenv("FLASK_ENV") == "development")
115 )
116
117 cleanup = start_workers(workers)
118
119 def handle_terminate(signal, frame): # noqa: WPS442
120 cleanup()
121
122 signal.signal(signal.SIGTERM, handle_terminate)
123 logger.info("All workers started.")
124
125
126 @ui.command(
127 cls=InstrumentedCmd, short_help="Generate and store server name and secrets."
128 )
129 @click.argument("server_name")
130 @click.option(
131 "--bits",
132 default=256, # noqa: WPS432
133 help="Specify the size of secrets in bits in the system DB (default 256)",
134 )
135 @click.pass_context
136 def setup(ctx, server_name, **flags):
137 """
138 Generate and store server name and secrets.
139
140 WARNING\n
141 Regenerating secrets will cause the following:
142
143 - All passwords will be invalid\n
144 - All sessions will be expired\n
145
146 Use with caution!
147 """
148 project = ctx.obj["project"]
149 settings_service = ProjectSettingsService(project)
150
151 def set_setting_env(setting_name, value):
152 settings_service.set(setting_name, value, store=SettingValueStore.DOTENV)
153
154 set_setting_env("ui.server_name", server_name)
155
156 ui_cfg_path = project.root_dir("ui.cfg")
157 if ui_cfg_path.exists():
158 raise CliError(
159 f"Found existing secrets in file '{ui_cfg_path}'. Please delete this file and rerun this command to regenerate the secrets."
160 )
161
162 def generate_secret():
163 return secrets.token_hex(int(flags["bits"] / 8)) # in bytes
164
165 secret_settings = ["ui.secret_key", "ui.password_salt"]
166 for setting_name in secret_settings:
167 value, source = settings_service.get_with_source(setting_name)
168 if source is not SettingValueStore.DEFAULT:
169 click.echo(
170 f"Setting '{setting_name}' has already been set in {source.label}. Please unset it manually and rerun this command to regenerate this secret."
171 )
172 else:
173 set_setting_env(setting_name, generate_secret())
174
175 click.echo(
176 "The server name and generated secrets have been stored in your project's `.env` file."
177 )
178 click.echo(
179 "In production, you will likely want to move these settings to actual environment variables, since `.env` is in `.gitignore` by default."
180 )
181
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/meltano/cli/ui.py b/src/meltano/cli/ui.py
--- a/src/meltano/cli/ui.py
+++ b/src/meltano/cli/ui.py
@@ -90,6 +90,13 @@
\b\nRead more at https://docs.meltano.com/reference/command-line-interface#ui
"""
ctx.obj["project"] = project
+ click.secho(
+ "\n"
+ "┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓\n"
+ "┃ The Meltano UI is deprecated, and scheduled for removal in Meltano 3.0 ┃\n"
+ "┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛\n",
+ fg="red",
+ )
@ui.command(cls=InstrumentedCmd, short_help="Start the Meltano UI webserver.")
|
{"golden_diff": "diff --git a/src/meltano/cli/ui.py b/src/meltano/cli/ui.py\n--- a/src/meltano/cli/ui.py\n+++ b/src/meltano/cli/ui.py\n@@ -90,6 +90,13 @@\n \\b\\nRead more at https://docs.meltano.com/reference/command-line-interface#ui\n \"\"\"\n ctx.obj[\"project\"] = project\n+ click.secho(\n+ \"\\n\"\n+ \"\u250f\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2513\\n\"\n+ \"\u2503 The Meltano UI is deprecated, and scheduled for removal in Meltano 3.0 \u2503\\n\"\n+ \"\u2517\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u251b\\n\",\n+ fg=\"red\",\n+ )\n \n \n @ui.command(cls=InstrumentedCmd, short_help=\"Start the Meltano UI webserver.\")\n", "issue": "Add deprecation warning to `meltano ui` command\nMeltano UI is deprecated, and is scheduled for removal in Meltano v3. We should make that known to all who currently use the UI by printing a clear warning message when they run `meltano ui` that states that:\r\n- it is *currently* deprecated\r\n- it will be removed in Meltano v3\r\n\r\nRelates to https://github.com/meltano/internal-general/discussions/460\r\n\r\nMigration strategies likely fall outside the scope of this issue. From a chat with @aaronsteers on 2022-12-12 the priority will be providing feature-parity with equal-or-better UX via the CLI.\r\n\r\nA UI as part of Meltano Cloud (possibly accessible after `meltano login` even if not running workloads on Meltano Cloud) may be available in the future for users who absolutely love UIs and have no interest in moving away from the Meltano UI to the CLI, but that's more or less entirely unplanned, so no promises at this point.\r\n\r\nBecause we cannot make promises about what we'll be doing to replace the UI, at this point I recommend we keep the deprecation warning minimal and fact-based. Doing so may lead to users asking (many) questions about the impending removal on Slack. This will be a good opportunity for us to discuss with them to figure out what the best path forward will be, i.e. we can ask them why they like the UI, if they'd be happy with the CLI, etc.\r\n\r\nOnce we've got a more concrete idea for what comes next (and likely after it has been implemented and released), we can update the deprecation warning to advertise it.\r\n\r\n@sbalnojan @afolson @tayloramurphy \n", "before_files": [{"content": "\"\"\"Meltano UI CLI.\"\"\"\n\nfrom __future__ import annotations\n\nimport logging\nimport os\nimport secrets\nimport signal\n\nimport click\n\nfrom meltano.api.workers import APIWorker, UIAvailableWorker\nfrom meltano.cli import cli\nfrom meltano.cli.params import pass_project\nfrom meltano.cli.utils import CliError, InstrumentedCmd, InstrumentedDefaultGroup\nfrom meltano.core.project import Project\nfrom meltano.core.project_settings_service import (\n ProjectSettingsService,\n SettingValueStore,\n)\n\nlogger = logging.getLogger(__name__)\n\n\ndef ensure_secure_setup(project: Project):\n \"\"\"Verify UI security settings.\"\"\"\n settings_service = ProjectSettingsService(project)\n\n if not settings_service.get(\"ui.authentication\"):\n return\n\n facts = []\n if (\n settings_service.get(\"ui.server_name\") is None\n and settings_service.get(\"ui.session_cookie_domain\") is None\n ):\n facts.append(\n \"- Neither the 'ui.server_name' or 'ui.session_cookie_domain' setting has been set\"\n )\n\n secure_settings = [\"ui.secret_key\", \"ui.password_salt\"]\n for setting_name in secure_settings:\n value, source = settings_service.get_with_source(setting_name)\n if source is SettingValueStore.DEFAULT:\n facts.append(\n f\"- The '{setting_name}' setting has not been changed from the default test value\"\n )\n\n if facts:\n click.secho(\n \"Authentication is enabled, but your configuration is currently insecure:\",\n fg=\"red\",\n )\n for fact in facts:\n click.echo(fact)\n click.echo(\n \"For more information about these settings and how to set them, visit \"\n \"https://docs.meltano.com/reference/settings#uiauthentication\"\n )\n click.echo()\n\n\ndef start_workers(workers):\n \"\"\"Start UI background workers.\"\"\"\n\n def stop_all():\n logger.info(\"Stopping all background workers...\")\n for worker in workers:\n worker.stop()\n\n # start all workers\n for worker in workers:\n worker.start()\n\n return stop_all\n\n\[email protected](\n cls=InstrumentedDefaultGroup,\n default=\"start\",\n default_if_no_args=True,\n short_help=\"Start the Meltano UI webserver.\",\n)\n@pass_project(migrate=True)\[email protected]_context\ndef ui(ctx, project: Project):\n \"\"\"\n Start the Meltano UI webserver.\n\n \\b\\nRead more at https://docs.meltano.com/reference/command-line-interface#ui\n \"\"\"\n ctx.obj[\"project\"] = project\n\n\[email protected](cls=InstrumentedCmd, short_help=\"Start the Meltano UI webserver.\")\[email protected](\"--reload\", is_flag=True, default=False)\[email protected](\"--bind\", help=\"The hostname (or IP address) to bind on\")\[email protected](\"--bind-port\", help=\"Port to run webserver on\", type=int)\[email protected]_context\ndef start(ctx, reload, bind, bind_port):\n \"\"\"Start the Meltano UI webserver.\"\"\"\n if bind:\n ProjectSettingsService.config_override[\"ui.bind_host\"] = bind\n if bind_port:\n ProjectSettingsService.config_override[\"ui.bind_port\"] = bind_port\n\n project: Project = ctx.obj[\"project\"]\n ensure_secure_setup(project)\n\n workers = []\n\n workers.append(UIAvailableWorker(project))\n workers.append(\n APIWorker(project, reload=reload or os.getenv(\"FLASK_ENV\") == \"development\")\n )\n\n cleanup = start_workers(workers)\n\n def handle_terminate(signal, frame): # noqa: WPS442\n cleanup()\n\n signal.signal(signal.SIGTERM, handle_terminate)\n logger.info(\"All workers started.\")\n\n\[email protected](\n cls=InstrumentedCmd, short_help=\"Generate and store server name and secrets.\"\n)\[email protected](\"server_name\")\[email protected](\n \"--bits\",\n default=256, # noqa: WPS432\n help=\"Specify the size of secrets in bits in the system DB (default 256)\",\n)\[email protected]_context\ndef setup(ctx, server_name, **flags):\n \"\"\"\n Generate and store server name and secrets.\n\n WARNING\\n\n Regenerating secrets will cause the following:\n\n - All passwords will be invalid\\n\n - All sessions will be expired\\n\n\n Use with caution!\n \"\"\"\n project = ctx.obj[\"project\"]\n settings_service = ProjectSettingsService(project)\n\n def set_setting_env(setting_name, value):\n settings_service.set(setting_name, value, store=SettingValueStore.DOTENV)\n\n set_setting_env(\"ui.server_name\", server_name)\n\n ui_cfg_path = project.root_dir(\"ui.cfg\")\n if ui_cfg_path.exists():\n raise CliError(\n f\"Found existing secrets in file '{ui_cfg_path}'. Please delete this file and rerun this command to regenerate the secrets.\"\n )\n\n def generate_secret():\n return secrets.token_hex(int(flags[\"bits\"] / 8)) # in bytes\n\n secret_settings = [\"ui.secret_key\", \"ui.password_salt\"]\n for setting_name in secret_settings:\n value, source = settings_service.get_with_source(setting_name)\n if source is not SettingValueStore.DEFAULT:\n click.echo(\n f\"Setting '{setting_name}' has already been set in {source.label}. Please unset it manually and rerun this command to regenerate this secret.\"\n )\n else:\n set_setting_env(setting_name, generate_secret())\n\n click.echo(\n \"The server name and generated secrets have been stored in your project's `.env` file.\"\n )\n click.echo(\n \"In production, you will likely want to move these settings to actual environment variables, since `.env` is in `.gitignore` by default.\"\n )\n", "path": "src/meltano/cli/ui.py"}], "after_files": [{"content": "\"\"\"Meltano UI CLI.\"\"\"\n\nfrom __future__ import annotations\n\nimport logging\nimport os\nimport secrets\nimport signal\n\nimport click\n\nfrom meltano.api.workers import APIWorker, UIAvailableWorker\nfrom meltano.cli import cli\nfrom meltano.cli.params import pass_project\nfrom meltano.cli.utils import CliError, InstrumentedCmd, InstrumentedDefaultGroup\nfrom meltano.core.project import Project\nfrom meltano.core.project_settings_service import (\n ProjectSettingsService,\n SettingValueStore,\n)\n\nlogger = logging.getLogger(__name__)\n\n\ndef ensure_secure_setup(project: Project):\n \"\"\"Verify UI security settings.\"\"\"\n settings_service = ProjectSettingsService(project)\n\n if not settings_service.get(\"ui.authentication\"):\n return\n\n facts = []\n if (\n settings_service.get(\"ui.server_name\") is None\n and settings_service.get(\"ui.session_cookie_domain\") is None\n ):\n facts.append(\n \"- Neither the 'ui.server_name' or 'ui.session_cookie_domain' setting has been set\"\n )\n\n secure_settings = [\"ui.secret_key\", \"ui.password_salt\"]\n for setting_name in secure_settings:\n value, source = settings_service.get_with_source(setting_name)\n if source is SettingValueStore.DEFAULT:\n facts.append(\n f\"- The '{setting_name}' setting has not been changed from the default test value\"\n )\n\n if facts:\n click.secho(\n \"Authentication is enabled, but your configuration is currently insecure:\",\n fg=\"red\",\n )\n for fact in facts:\n click.echo(fact)\n click.echo(\n \"For more information about these settings and how to set them, visit \"\n \"https://docs.meltano.com/reference/settings#uiauthentication\"\n )\n click.echo()\n\n\ndef start_workers(workers):\n \"\"\"Start UI background workers.\"\"\"\n\n def stop_all():\n logger.info(\"Stopping all background workers...\")\n for worker in workers:\n worker.stop()\n\n # start all workers\n for worker in workers:\n worker.start()\n\n return stop_all\n\n\[email protected](\n cls=InstrumentedDefaultGroup,\n default=\"start\",\n default_if_no_args=True,\n short_help=\"Start the Meltano UI webserver.\",\n)\n@pass_project(migrate=True)\[email protected]_context\ndef ui(ctx, project: Project):\n \"\"\"\n Start the Meltano UI webserver.\n\n \\b\\nRead more at https://docs.meltano.com/reference/command-line-interface#ui\n \"\"\"\n ctx.obj[\"project\"] = project\n click.secho(\n \"\\n\"\n \"\u250f\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2513\\n\"\n \"\u2503 The Meltano UI is deprecated, and scheduled for removal in Meltano 3.0 \u2503\\n\"\n \"\u2517\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u251b\\n\",\n fg=\"red\",\n )\n\n\[email protected](cls=InstrumentedCmd, short_help=\"Start the Meltano UI webserver.\")\[email protected](\"--reload\", is_flag=True, default=False)\[email protected](\"--bind\", help=\"The hostname (or IP address) to bind on\")\[email protected](\"--bind-port\", help=\"Port to run webserver on\", type=int)\[email protected]_context\ndef start(ctx, reload, bind, bind_port):\n \"\"\"Start the Meltano UI webserver.\"\"\"\n if bind:\n ProjectSettingsService.config_override[\"ui.bind_host\"] = bind\n if bind_port:\n ProjectSettingsService.config_override[\"ui.bind_port\"] = bind_port\n\n project: Project = ctx.obj[\"project\"]\n ensure_secure_setup(project)\n\n workers = []\n\n workers.append(UIAvailableWorker(project))\n workers.append(\n APIWorker(project, reload=reload or os.getenv(\"FLASK_ENV\") == \"development\")\n )\n\n cleanup = start_workers(workers)\n\n def handle_terminate(signal, frame): # noqa: WPS442\n cleanup()\n\n signal.signal(signal.SIGTERM, handle_terminate)\n logger.info(\"All workers started.\")\n\n\[email protected](\n cls=InstrumentedCmd, short_help=\"Generate and store server name and secrets.\"\n)\[email protected](\"server_name\")\[email protected](\n \"--bits\",\n default=256, # noqa: WPS432\n help=\"Specify the size of secrets in bits in the system DB (default 256)\",\n)\[email protected]_context\ndef setup(ctx, server_name, **flags):\n \"\"\"\n Generate and store server name and secrets.\n\n WARNING\\n\n Regenerating secrets will cause the following:\n\n - All passwords will be invalid\\n\n - All sessions will be expired\\n\n\n Use with caution!\n \"\"\"\n project = ctx.obj[\"project\"]\n settings_service = ProjectSettingsService(project)\n\n def set_setting_env(setting_name, value):\n settings_service.set(setting_name, value, store=SettingValueStore.DOTENV)\n\n set_setting_env(\"ui.server_name\", server_name)\n\n ui_cfg_path = project.root_dir(\"ui.cfg\")\n if ui_cfg_path.exists():\n raise CliError(\n f\"Found existing secrets in file '{ui_cfg_path}'. Please delete this file and rerun this command to regenerate the secrets.\"\n )\n\n def generate_secret():\n return secrets.token_hex(int(flags[\"bits\"] / 8)) # in bytes\n\n secret_settings = [\"ui.secret_key\", \"ui.password_salt\"]\n for setting_name in secret_settings:\n value, source = settings_service.get_with_source(setting_name)\n if source is not SettingValueStore.DEFAULT:\n click.echo(\n f\"Setting '{setting_name}' has already been set in {source.label}. Please unset it manually and rerun this command to regenerate this secret.\"\n )\n else:\n set_setting_env(setting_name, generate_secret())\n\n click.echo(\n \"The server name and generated secrets have been stored in your project's `.env` file.\"\n )\n click.echo(\n \"In production, you will likely want to move these settings to actual environment variables, since `.env` is in `.gitignore` by default.\"\n )\n", "path": "src/meltano/cli/ui.py"}]}
| 2,306 | 244 |
gh_patches_debug_39517
|
rasdani/github-patches
|
git_diff
|
strawberry-graphql__strawberry-55
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add support for interfaces
We should be able to define interfaces with strawberry, something like this:
```python
@strawberry.interface
class Node:
id: strawberry.ID
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `strawberry/type.py`
Content:
```
1 import typing
2 from functools import partial
3
4 from dataclasses import dataclass
5 from graphql import (
6 GraphQLField,
7 GraphQLInputField,
8 GraphQLInputObjectType,
9 GraphQLObjectType,
10 )
11 from graphql.utilities.schema_printer import print_type
12
13 from .constants import IS_STRAWBERRY_FIELD, IS_STRAWBERRY_INPUT
14 from .type_converter import REGISTRY, get_graphql_type_for_annotation
15 from .utils.str_converters import to_camel_case
16
17
18 def _get_resolver(cls, field_name):
19 def _resolver(obj, info):
20 # TODO: can we make this nicer?
21 # does it work in all the cases?
22
23 field_resolver = getattr(cls(**(obj.__dict__ if obj else {})), field_name)
24
25 if getattr(field_resolver, IS_STRAWBERRY_FIELD, False):
26 return field_resolver(obj, info)
27
28 return field_resolver
29
30 return _resolver
31
32
33 def _convert_annotations_fields(cls, *, is_input=False):
34 FieldClass = GraphQLInputField if is_input else GraphQLField
35 annotations = typing.get_type_hints(cls, None, REGISTRY)
36
37 fields = {}
38
39 for key, annotation in annotations.items():
40 field_name = to_camel_case(key)
41 class_field = getattr(cls, key, None)
42
43 description = getattr(class_field, "description", None)
44
45 fields[field_name] = FieldClass(
46 get_graphql_type_for_annotation(annotation, key),
47 description=description,
48 **({} if is_input else {"resolve": _get_resolver(cls, key)})
49 )
50
51 return fields
52
53
54 def _process_type(cls, *, is_input=False, description=None):
55 name = cls.__name__
56 REGISTRY[name] = cls
57
58 def repr_(self):
59 return print_type(self.field)
60
61 setattr(cls, "__repr__", repr_)
62
63 def _get_fields():
64 fields = _convert_annotations_fields(cls, is_input=is_input)
65
66 fields.update(
67 {
68 to_camel_case(key): value.field
69 for key, value in cls.__dict__.items()
70 if getattr(value, IS_STRAWBERRY_FIELD, False)
71 }
72 )
73
74 return fields
75
76 if is_input:
77 setattr(cls, IS_STRAWBERRY_INPUT, True)
78
79 extra_kwargs = {"description": description or cls.__doc__}
80
81 TypeClass = GraphQLInputObjectType if is_input else GraphQLObjectType
82 cls.field = TypeClass(name, lambda: _get_fields(), **extra_kwargs)
83
84 return dataclass(cls, repr=False)
85
86
87 def type(cls=None, *, is_input=False, description=None):
88 """Annotates a class as a GraphQL type.
89
90 Example usage:
91
92 >>> @strawberry.type:
93 >>> class X:
94 >>> field_abc: str = "ABC"
95 """
96
97 def wrap(cls):
98 return _process_type(cls, is_input=is_input, description=description)
99
100 if cls is None:
101 return wrap
102
103 return wrap(cls)
104
105
106 input = partial(type, is_input=True)
107
```
Path: `strawberry/constants.py`
Content:
```
1 IS_STRAWBERRY_FIELD = "_is_strawberry_field"
2 IS_STRAWBERRY_INPUT = "_is_strawberry_input"
3
```
Path: `strawberry/__init__.py`
Content:
```
1 __version__ = "0.1.0"
2
3
4 from .enum import enum # noqa
5 from .field import field # noqa
6 from .mutation import mutation, subscription # noqa
7 from .scalars import ID # noqa
8 from .schema import Schema # noqa
9 from .type import input, type # noqa
10
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/strawberry/__init__.py b/strawberry/__init__.py
--- a/strawberry/__init__.py
+++ b/strawberry/__init__.py
@@ -6,4 +6,4 @@
from .mutation import mutation, subscription # noqa
from .scalars import ID # noqa
from .schema import Schema # noqa
-from .type import input, type # noqa
+from .type import input, type, interface # noqa
diff --git a/strawberry/constants.py b/strawberry/constants.py
--- a/strawberry/constants.py
+++ b/strawberry/constants.py
@@ -1,2 +1,3 @@
IS_STRAWBERRY_FIELD = "_is_strawberry_field"
IS_STRAWBERRY_INPUT = "_is_strawberry_input"
+IS_STRAWBERRY_INTERFACE = "_is_strawberry_interface"
diff --git a/strawberry/type.py b/strawberry/type.py
--- a/strawberry/type.py
+++ b/strawberry/type.py
@@ -6,11 +6,12 @@
GraphQLField,
GraphQLInputField,
GraphQLInputObjectType,
+ GraphQLInterfaceType,
GraphQLObjectType,
)
from graphql.utilities.schema_printer import print_type
-from .constants import IS_STRAWBERRY_FIELD, IS_STRAWBERRY_INPUT
+from .constants import IS_STRAWBERRY_FIELD, IS_STRAWBERRY_INPUT, IS_STRAWBERRY_INTERFACE
from .type_converter import REGISTRY, get_graphql_type_for_annotation
from .utils.str_converters import to_camel_case
@@ -51,7 +52,7 @@
return fields
-def _process_type(cls, *, is_input=False, description=None):
+def _process_type(cls, *, is_input=False, is_interface=False, description=None):
name = cls.__name__
REGISTRY[name] = cls
@@ -75,16 +76,30 @@
if is_input:
setattr(cls, IS_STRAWBERRY_INPUT, True)
+ elif is_interface:
+ setattr(cls, IS_STRAWBERRY_INTERFACE, True)
extra_kwargs = {"description": description or cls.__doc__}
- TypeClass = GraphQLInputObjectType if is_input else GraphQLObjectType
+ if is_input:
+ TypeClass = GraphQLInputObjectType
+ elif is_interface:
+ TypeClass = GraphQLInterfaceType
+ else:
+ TypeClass = GraphQLObjectType
+
+ extra_kwargs["interfaces"] = [
+ klass.field
+ for klass in cls.__bases__
+ if hasattr(klass, IS_STRAWBERRY_INTERFACE)
+ ]
+
cls.field = TypeClass(name, lambda: _get_fields(), **extra_kwargs)
return dataclass(cls, repr=False)
-def type(cls=None, *, is_input=False, description=None):
+def type(cls=None, *, is_input=False, is_interface=False, description=None):
"""Annotates a class as a GraphQL type.
Example usage:
@@ -95,7 +110,9 @@
"""
def wrap(cls):
- return _process_type(cls, is_input=is_input, description=description)
+ return _process_type(
+ cls, is_input=is_input, is_interface=is_interface, description=description
+ )
if cls is None:
return wrap
@@ -104,3 +121,4 @@
input = partial(type, is_input=True)
+interface = partial(type, is_interface=True)
|
{"golden_diff": "diff --git a/strawberry/__init__.py b/strawberry/__init__.py\n--- a/strawberry/__init__.py\n+++ b/strawberry/__init__.py\n@@ -6,4 +6,4 @@\n from .mutation import mutation, subscription # noqa\n from .scalars import ID # noqa\n from .schema import Schema # noqa\n-from .type import input, type # noqa\n+from .type import input, type, interface # noqa\ndiff --git a/strawberry/constants.py b/strawberry/constants.py\n--- a/strawberry/constants.py\n+++ b/strawberry/constants.py\n@@ -1,2 +1,3 @@\n IS_STRAWBERRY_FIELD = \"_is_strawberry_field\"\n IS_STRAWBERRY_INPUT = \"_is_strawberry_input\"\n+IS_STRAWBERRY_INTERFACE = \"_is_strawberry_interface\"\ndiff --git a/strawberry/type.py b/strawberry/type.py\n--- a/strawberry/type.py\n+++ b/strawberry/type.py\n@@ -6,11 +6,12 @@\n GraphQLField,\n GraphQLInputField,\n GraphQLInputObjectType,\n+ GraphQLInterfaceType,\n GraphQLObjectType,\n )\n from graphql.utilities.schema_printer import print_type\n \n-from .constants import IS_STRAWBERRY_FIELD, IS_STRAWBERRY_INPUT\n+from .constants import IS_STRAWBERRY_FIELD, IS_STRAWBERRY_INPUT, IS_STRAWBERRY_INTERFACE\n from .type_converter import REGISTRY, get_graphql_type_for_annotation\n from .utils.str_converters import to_camel_case\n \n@@ -51,7 +52,7 @@\n return fields\n \n \n-def _process_type(cls, *, is_input=False, description=None):\n+def _process_type(cls, *, is_input=False, is_interface=False, description=None):\n name = cls.__name__\n REGISTRY[name] = cls\n \n@@ -75,16 +76,30 @@\n \n if is_input:\n setattr(cls, IS_STRAWBERRY_INPUT, True)\n+ elif is_interface:\n+ setattr(cls, IS_STRAWBERRY_INTERFACE, True)\n \n extra_kwargs = {\"description\": description or cls.__doc__}\n \n- TypeClass = GraphQLInputObjectType if is_input else GraphQLObjectType\n+ if is_input:\n+ TypeClass = GraphQLInputObjectType\n+ elif is_interface:\n+ TypeClass = GraphQLInterfaceType\n+ else:\n+ TypeClass = GraphQLObjectType\n+\n+ extra_kwargs[\"interfaces\"] = [\n+ klass.field\n+ for klass in cls.__bases__\n+ if hasattr(klass, IS_STRAWBERRY_INTERFACE)\n+ ]\n+\n cls.field = TypeClass(name, lambda: _get_fields(), **extra_kwargs)\n \n return dataclass(cls, repr=False)\n \n \n-def type(cls=None, *, is_input=False, description=None):\n+def type(cls=None, *, is_input=False, is_interface=False, description=None):\n \"\"\"Annotates a class as a GraphQL type.\n \n Example usage:\n@@ -95,7 +110,9 @@\n \"\"\"\n \n def wrap(cls):\n- return _process_type(cls, is_input=is_input, description=description)\n+ return _process_type(\n+ cls, is_input=is_input, is_interface=is_interface, description=description\n+ )\n \n if cls is None:\n return wrap\n@@ -104,3 +121,4 @@\n \n \n input = partial(type, is_input=True)\n+interface = partial(type, is_interface=True)\n", "issue": "Add support for interfaces\nWe should be able to define interfaces with strawberry, something like this:\r\n\r\n```python\r\n\r\[email protected]\r\nclass Node:\r\n id: strawberry.ID\r\n```\n", "before_files": [{"content": "import typing\nfrom functools import partial\n\nfrom dataclasses import dataclass\nfrom graphql import (\n GraphQLField,\n GraphQLInputField,\n GraphQLInputObjectType,\n GraphQLObjectType,\n)\nfrom graphql.utilities.schema_printer import print_type\n\nfrom .constants import IS_STRAWBERRY_FIELD, IS_STRAWBERRY_INPUT\nfrom .type_converter import REGISTRY, get_graphql_type_for_annotation\nfrom .utils.str_converters import to_camel_case\n\n\ndef _get_resolver(cls, field_name):\n def _resolver(obj, info):\n # TODO: can we make this nicer?\n # does it work in all the cases?\n\n field_resolver = getattr(cls(**(obj.__dict__ if obj else {})), field_name)\n\n if getattr(field_resolver, IS_STRAWBERRY_FIELD, False):\n return field_resolver(obj, info)\n\n return field_resolver\n\n return _resolver\n\n\ndef _convert_annotations_fields(cls, *, is_input=False):\n FieldClass = GraphQLInputField if is_input else GraphQLField\n annotations = typing.get_type_hints(cls, None, REGISTRY)\n\n fields = {}\n\n for key, annotation in annotations.items():\n field_name = to_camel_case(key)\n class_field = getattr(cls, key, None)\n\n description = getattr(class_field, \"description\", None)\n\n fields[field_name] = FieldClass(\n get_graphql_type_for_annotation(annotation, key),\n description=description,\n **({} if is_input else {\"resolve\": _get_resolver(cls, key)})\n )\n\n return fields\n\n\ndef _process_type(cls, *, is_input=False, description=None):\n name = cls.__name__\n REGISTRY[name] = cls\n\n def repr_(self):\n return print_type(self.field)\n\n setattr(cls, \"__repr__\", repr_)\n\n def _get_fields():\n fields = _convert_annotations_fields(cls, is_input=is_input)\n\n fields.update(\n {\n to_camel_case(key): value.field\n for key, value in cls.__dict__.items()\n if getattr(value, IS_STRAWBERRY_FIELD, False)\n }\n )\n\n return fields\n\n if is_input:\n setattr(cls, IS_STRAWBERRY_INPUT, True)\n\n extra_kwargs = {\"description\": description or cls.__doc__}\n\n TypeClass = GraphQLInputObjectType if is_input else GraphQLObjectType\n cls.field = TypeClass(name, lambda: _get_fields(), **extra_kwargs)\n\n return dataclass(cls, repr=False)\n\n\ndef type(cls=None, *, is_input=False, description=None):\n \"\"\"Annotates a class as a GraphQL type.\n\n Example usage:\n\n >>> @strawberry.type:\n >>> class X:\n >>> field_abc: str = \"ABC\"\n \"\"\"\n\n def wrap(cls):\n return _process_type(cls, is_input=is_input, description=description)\n\n if cls is None:\n return wrap\n\n return wrap(cls)\n\n\ninput = partial(type, is_input=True)\n", "path": "strawberry/type.py"}, {"content": "IS_STRAWBERRY_FIELD = \"_is_strawberry_field\"\nIS_STRAWBERRY_INPUT = \"_is_strawberry_input\"\n", "path": "strawberry/constants.py"}, {"content": "__version__ = \"0.1.0\"\n\n\nfrom .enum import enum # noqa\nfrom .field import field # noqa\nfrom .mutation import mutation, subscription # noqa\nfrom .scalars import ID # noqa\nfrom .schema import Schema # noqa\nfrom .type import input, type # noqa\n", "path": "strawberry/__init__.py"}], "after_files": [{"content": "import typing\nfrom functools import partial\n\nfrom dataclasses import dataclass\nfrom graphql import (\n GraphQLField,\n GraphQLInputField,\n GraphQLInputObjectType,\n GraphQLInterfaceType,\n GraphQLObjectType,\n)\nfrom graphql.utilities.schema_printer import print_type\n\nfrom .constants import IS_STRAWBERRY_FIELD, IS_STRAWBERRY_INPUT, IS_STRAWBERRY_INTERFACE\nfrom .type_converter import REGISTRY, get_graphql_type_for_annotation\nfrom .utils.str_converters import to_camel_case\n\n\ndef _get_resolver(cls, field_name):\n def _resolver(obj, info):\n # TODO: can we make this nicer?\n # does it work in all the cases?\n\n field_resolver = getattr(cls(**(obj.__dict__ if obj else {})), field_name)\n\n if getattr(field_resolver, IS_STRAWBERRY_FIELD, False):\n return field_resolver(obj, info)\n\n return field_resolver\n\n return _resolver\n\n\ndef _convert_annotations_fields(cls, *, is_input=False):\n FieldClass = GraphQLInputField if is_input else GraphQLField\n annotations = typing.get_type_hints(cls, None, REGISTRY)\n\n fields = {}\n\n for key, annotation in annotations.items():\n field_name = to_camel_case(key)\n class_field = getattr(cls, key, None)\n\n description = getattr(class_field, \"description\", None)\n\n fields[field_name] = FieldClass(\n get_graphql_type_for_annotation(annotation, key),\n description=description,\n **({} if is_input else {\"resolve\": _get_resolver(cls, key)})\n )\n\n return fields\n\n\ndef _process_type(cls, *, is_input=False, is_interface=False, description=None):\n name = cls.__name__\n REGISTRY[name] = cls\n\n def repr_(self):\n return print_type(self.field)\n\n setattr(cls, \"__repr__\", repr_)\n\n def _get_fields():\n fields = _convert_annotations_fields(cls, is_input=is_input)\n\n fields.update(\n {\n to_camel_case(key): value.field\n for key, value in cls.__dict__.items()\n if getattr(value, IS_STRAWBERRY_FIELD, False)\n }\n )\n\n return fields\n\n if is_input:\n setattr(cls, IS_STRAWBERRY_INPUT, True)\n elif is_interface:\n setattr(cls, IS_STRAWBERRY_INTERFACE, True)\n\n extra_kwargs = {\"description\": description or cls.__doc__}\n\n if is_input:\n TypeClass = GraphQLInputObjectType\n elif is_interface:\n TypeClass = GraphQLInterfaceType\n else:\n TypeClass = GraphQLObjectType\n\n extra_kwargs[\"interfaces\"] = [\n klass.field\n for klass in cls.__bases__\n if hasattr(klass, IS_STRAWBERRY_INTERFACE)\n ]\n\n cls.field = TypeClass(name, lambda: _get_fields(), **extra_kwargs)\n\n return dataclass(cls, repr=False)\n\n\ndef type(cls=None, *, is_input=False, is_interface=False, description=None):\n \"\"\"Annotates a class as a GraphQL type.\n\n Example usage:\n\n >>> @strawberry.type:\n >>> class X:\n >>> field_abc: str = \"ABC\"\n \"\"\"\n\n def wrap(cls):\n return _process_type(\n cls, is_input=is_input, is_interface=is_interface, description=description\n )\n\n if cls is None:\n return wrap\n\n return wrap(cls)\n\n\ninput = partial(type, is_input=True)\ninterface = partial(type, is_interface=True)\n", "path": "strawberry/type.py"}, {"content": "IS_STRAWBERRY_FIELD = \"_is_strawberry_field\"\nIS_STRAWBERRY_INPUT = \"_is_strawberry_input\"\nIS_STRAWBERRY_INTERFACE = \"_is_strawberry_interface\"\n", "path": "strawberry/constants.py"}, {"content": "__version__ = \"0.1.0\"\n\n\nfrom .enum import enum # noqa\nfrom .field import field # noqa\nfrom .mutation import mutation, subscription # noqa\nfrom .scalars import ID # noqa\nfrom .schema import Schema # noqa\nfrom .type import input, type, interface # noqa\n", "path": "strawberry/__init__.py"}]}
| 1,308 | 771 |
gh_patches_debug_16
|
rasdani/github-patches
|
git_diff
|
OCHA-DAP__hdx-ckan-1401
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
The MailChimp subscribe field could use a little bit more padding-left
Right now the input text is too close to the left border. It would be nice to add some padding there.

--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ckanext-hdx_theme/ckanext/hdx_theme/version.py`
Content:
```
1 hdx_version = 'v0.3.9'
2
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/ckanext-hdx_theme/ckanext/hdx_theme/version.py b/ckanext-hdx_theme/ckanext/hdx_theme/version.py
--- a/ckanext-hdx_theme/ckanext/hdx_theme/version.py
+++ b/ckanext-hdx_theme/ckanext/hdx_theme/version.py
@@ -1 +1 @@
-hdx_version = 'v0.3.9'
+hdx_version = 'v0.3.10'
|
{"golden_diff": "diff --git a/ckanext-hdx_theme/ckanext/hdx_theme/version.py b/ckanext-hdx_theme/ckanext/hdx_theme/version.py\n--- a/ckanext-hdx_theme/ckanext/hdx_theme/version.py\n+++ b/ckanext-hdx_theme/ckanext/hdx_theme/version.py\n@@ -1 +1 @@\n-hdx_version = 'v0.3.9'\n+hdx_version = 'v0.3.10'\n", "issue": "The MailChimp subscribe field could use a little bit more padding-left\nRight now the input text is too close to the left border. It would be nice to add some padding there. \n\n\n\n", "before_files": [{"content": "hdx_version = 'v0.3.9'\n", "path": "ckanext-hdx_theme/ckanext/hdx_theme/version.py"}], "after_files": [{"content": "hdx_version = 'v0.3.10'\n", "path": "ckanext-hdx_theme/ckanext/hdx_theme/version.py"}]}
| 405 | 107 |
gh_patches_debug_25906
|
rasdani/github-patches
|
git_diff
|
facebookresearch__ParlAI-341
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Return type ambiguous when extracting image features
The return type of the extracted image features (if the features file is present or not) is different. If the file is present then it returns a numpy.ndarray type object otherwise it returns a torch.autograd.variable.Variable object.
( https://github.com/facebookresearch/ParlAI/blob/3d86ccdbb4d87002cc6c4782afd0ee5277e742f1/parlai/core/image_featurizers.py#L149 )
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `parlai/tasks/vqa_v2/agents.py`
Content:
```
1 # Copyright (c) 2017-present, Facebook, Inc.
2 # All rights reserved.
3 # This source code is licensed under the BSD-style license found in the
4 # LICENSE file in the root directory of this source tree. An additional grant
5 # of patent rights can be found in the PATENTS file in the same directory.
6
7 from parlai.core.agents import Teacher
8 from parlai.core.image_featurizers import ImageLoader
9 from .build import build, buildImage
10
11 import json
12 import random
13 import os
14
15
16 def _path(opt):
17 build(opt)
18 buildImage(opt)
19 dt = opt['datatype'].split(':')[0]
20
21 if dt == 'train':
22 ques_suffix = 'v2_OpenEnded_mscoco_train2014'
23 annotation_suffix = 'v2_mscoco_train2014'
24 img_suffix = os.path.join('train2014', 'COCO_train2014_')
25 elif dt == 'valid':
26 ques_suffix = 'v2_OpenEnded_mscoco_val2014'
27 annotation_suffix = 'v2_mscoco_val2014'
28 img_suffix = os.path.join('val2014', 'COCO_val2014_')
29 elif dt == 'test':
30 ques_suffix = 'v2_OpenEnded_mscoco_test2015'
31 annotation_suffix = 'None'
32 img_suffix = os.path.join('test2015', 'COCO_test2015_')
33 else:
34 raise RuntimeError('Not valid datatype.')
35
36 data_path = os.path.join(opt['datapath'], 'VQA-v2',
37 ques_suffix + '_questions.json')
38
39 annotation_path = os.path.join(opt['datapath'], 'VQA-v2',
40 annotation_suffix + '_annotations.json')
41
42 image_path = os.path.join(opt['datapath'], 'COCO-IMG', img_suffix)
43
44 return data_path, annotation_path, image_path
45
46
47 class OeTeacher(Teacher):
48 """VQA v2.0 Open-Ended teacher, which loads the json VQA data and
49 implements its own `act` method for interacting with student agent.
50 agent.
51 """
52 def __init__(self, opt, shared=None):
53 super().__init__(opt)
54 self.datatype = opt['datatype']
55 data_path, annotation_path, self.image_path = _path(opt)
56
57 if shared and 'ques' in shared:
58 self.ques = shared['ques']
59 if 'annotation' in shared:
60 self.annotation = shared['annotation']
61 else:
62 self._setup_data(data_path, annotation_path)
63 self.len = len(self.ques['questions'])
64
65 # for ordered data in batch mode (especially, for validation and
66 # testing), each teacher in the batch gets a start index and a step
67 # size so they all process disparate sets of the data
68 self.step_size = opt.get('batchsize', 1)
69 self.data_offset = opt.get('batchindex', 0)
70 self.image_loader = ImageLoader(opt)
71
72 self.reset()
73
74 def __len__(self):
75 return self.len
76
77 def reset(self):
78 # Reset the dialog so that it is at the start of the epoch,
79 # and all metrics are reset.
80 super().reset()
81 self.lastY = None
82 self.episode_idx = self.data_offset - self.step_size
83
84 def observe(self, observation):
85 """Process observation for metrics."""
86 if self.lastY is not None:
87 self.metrics.update(observation, self.lastY)
88 self.lastY = None
89 return observation
90
91 def act(self):
92 if self.datatype == 'train':
93 self.episode_idx = random.randrange(self.len)
94 else:
95 self.episode_idx = (self.episode_idx + self.step_size) % len(self)
96 if self.episode_idx == len(self) - self.step_size:
97 self.epochDone = True
98
99 qa = self.ques['questions'][self.episode_idx]
100 question = qa['question']
101 image_id = qa['image_id']
102
103 img_path = self.image_path + '%012d.jpg' % (image_id)
104
105 action = {
106 'image': self.image_loader.load(img_path),
107 'text': question,
108 'episode_done': True
109 }
110
111 if not self.datatype.startswith('test'):
112 anno = self.annotation['annotations'][self.episode_idx]
113 self.lastY = [ans['answer'] for ans in anno['answers']]
114
115 if self.datatype.startswith('train'):
116 action['labels'] = self.lastY
117
118 return action
119
120 def share(self):
121 shared = super().share()
122 shared['ques'] = self.ques
123 if hasattr(self, 'annotation'):
124 shared['annotation'] = self.annotation
125 return shared
126
127 def _setup_data(self, data_path, annotation_path):
128 print('loading: ' + data_path)
129 with open(data_path) as data_file:
130 self.ques = json.load(data_file)
131
132 if self.datatype != 'test':
133 print('loading: ' + annotation_path)
134 with open(annotation_path) as data_file:
135 self.annotation = json.load(data_file)
136
137
138 class DefaultTeacher(OeTeacher):
139 pass
140
```
Path: `parlai/tasks/vqa_v1/agents.py`
Content:
```
1 # Copyright (c) 2017-present, Facebook, Inc.
2 # All rights reserved.
3 # This source code is licensed under the BSD-style license found in the
4 # LICENSE file in the root directory of this source tree. An additional grant
5 # of patent rights can be found in the PATENTS file in the same directory.
6
7 from parlai.core.agents import Teacher
8 from parlai.core.image_featurizers import ImageLoader
9 from .build import build, buildImage
10
11 import json
12 import random
13 import os
14
15
16 def _path(opt):
17 build(opt)
18 buildImage(opt)
19 dt = opt['datatype'].split(':')[0]
20
21 if dt == 'train':
22 ques_suffix = 'MultipleChoice_mscoco_train2014'
23 annotation_suffix = 'mscoco_train2014'
24 img_suffix = os.path.join('train2014', 'COCO_train2014_')
25 elif dt == 'valid':
26 ques_suffix = 'MultipleChoice_mscoco_val2014'
27 annotation_suffix = 'mscoco_val2014'
28 img_suffix = os.path.join('val2014', 'COCO_val2014_')
29 elif dt == 'test':
30 ques_suffix = 'MultipleChoice_mscoco_test2015'
31 annotation_suffix = 'None'
32 img_suffix = os.path.join('test2015', 'COCO_test2015_')
33 else:
34 raise RuntimeError('Not valid datatype.')
35
36 data_path = os.path.join(opt['datapath'], 'VQA-v1',
37 ques_suffix + '_questions.json')
38
39 annotation_path = os.path.join(opt['datapath'], 'VQA-v1',
40 annotation_suffix + '_annotations.json')
41
42 image_path = os.path.join(opt['datapath'], 'COCO-IMG', img_suffix)
43
44 return data_path, annotation_path, image_path
45
46
47 class OeTeacher(Teacher):
48 """
49 VQA Open-Ended teacher, which loads the json vqa data and implements its
50 own `act` method for interacting with student agent.
51 """
52 def __init__(self, opt, shared=None):
53 super().__init__(opt, shared)
54 self.datatype = opt['datatype']
55 data_path, annotation_path, self.image_path = _path(opt)
56
57 if shared and 'ques' in shared:
58 self.ques = shared['ques']
59 if 'annotation' in shared:
60 self.annotation = shared['annotation']
61 else:
62 self._setup_data(data_path, annotation_path)
63
64 # for ordered data in batch mode (especially, for validation and
65 # testing), each teacher in the batch gets a start index and a step
66 # size so they all process disparate sets of the data
67 self.step_size = opt.get('batchsize', 1)
68 self.data_offset = opt.get('batchindex', 0)
69 self.image_loader = ImageLoader(opt)
70 self.reset()
71
72 def __len__(self):
73 return len(self.ques['questions'])
74
75 def reset(self):
76 # Reset the dialog so that it is at the start of the epoch,
77 # and all metrics are reset.
78 super().reset()
79 self.lastY = None
80 self.episode_idx = self.data_offset - self.step_size
81
82 def observe(self, observation):
83 """Process observation for metrics."""
84 if self.lastY is not None:
85 self.metrics.update(observation, self.lastY)
86 self.lastY = None
87 return observation
88
89 def act(self):
90 if self.datatype == 'train':
91 self.episode_idx = random.randrange(len(self))
92 else:
93 self.episode_idx = (self.episode_idx + self.step_size) % len(self)
94 if self.episode_idx == len(self) - self.step_size:
95 self.epochDone = True
96
97 qa = self.ques['questions'][self.episode_idx]
98 question = qa['question']
99 image_id = qa['image_id']
100
101 img_path = self.image_path + '%012d.jpg' % (image_id)
102
103 action = {
104 'image': self.image_loader.load(img_path),
105 'text': question,
106 'episode_done': True
107 }
108
109 if not self.datatype.startswith('test'):
110 anno = self.annotation['annotations'][self.episode_idx]
111 self.lastY = [ans['answer'] for ans in anno['answers']]
112
113 if self.datatype.startswith('train'):
114 action['labels'] = self.lastY
115
116 return action
117
118 def share(self):
119 shared = super().share()
120 shared['ques'] = self.ques
121 if hasattr(self, 'annotation'):
122 shared['annotation'] = self.annotation
123 return shared
124
125 def _setup_data(self, data_path, annotation_path):
126 print('loading: ' + data_path)
127 with open(data_path) as data_file:
128 self.ques = json.load(data_file)
129
130 if self.datatype != 'test':
131 print('loading: ' + annotation_path)
132 with open(annotation_path) as data_file:
133 self.annotation = json.load(data_file)
134
135
136 class McTeacher(OeTeacher):
137 """
138 VQA Multiple-Choice teacher, which inherits from OeTeacher but overrides
139 the label and label_candidates fields with multiple choice data.
140 """
141
142 def act(self):
143 action = super().act()
144
145 qa = self.ques['questions'][self.episode_idx]
146 multiple_choices = qa['multiple_choices']
147
148 action['label_candidates'] = multiple_choices
149
150 if not self.datatype.startswith('test'):
151 anno = self.annotation['annotations'][self.episode_idx]
152 self.lastY = [anno['multiple_choice_answer']]
153
154 if self.datatype.startswith('train'):
155 action['labels'] = self.lastY
156
157 return action
158
159
160 class DefaultTeacher(McTeacher):
161 # default to Multiple-Choice Teacher
162 pass
163
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/parlai/tasks/vqa_v1/agents.py b/parlai/tasks/vqa_v1/agents.py
--- a/parlai/tasks/vqa_v1/agents.py
+++ b/parlai/tasks/vqa_v1/agents.py
@@ -157,6 +157,30 @@
return action
+class AllTeacher(OeTeacher):
+ """
+ VQA Teacher, which inherits from OeTeacher and gives access to
+ the multiple choices and the multiple choice answer.
+ """
+
+ def act(self):
+ action = super().act()
+
+ qa = self.ques['questions'][self.episode_idx]
+ multiple_choices = qa['multiple_choices']
+
+ action['label_candidates'] = multiple_choices
+
+ if not self.datatype.startswith('test'):
+ anno = self.annotation['annotations'][self.episode_idx]
+ self.mclabel = [anno['multiple_choice_answer']]
+
+ if self.datatype.startswith('train'):
+ action['mc_label'] = self.mclabel
+
+ return action
+
+
class DefaultTeacher(McTeacher):
# default to Multiple-Choice Teacher
pass
diff --git a/parlai/tasks/vqa_v2/agents.py b/parlai/tasks/vqa_v2/agents.py
--- a/parlai/tasks/vqa_v2/agents.py
+++ b/parlai/tasks/vqa_v2/agents.py
@@ -135,5 +135,24 @@
self.annotation = json.load(data_file)
+class AllTeacher(OeTeacher):
+ """
+ VQA v2.0 Open-Ended teacher, which inherits from OeTeacher and
+ gives access to the multiple choice answer.
+ """
+
+ def act(self):
+ action = super().act()
+
+ if not self.datatype.startswith('test'):
+ anno = self.annotation['annotations'][self.episode_idx]
+ self.mclabel = [anno['multiple_choice_answer']]
+
+ if self.datatype.startswith('train'):
+ action['mc_label'] = self.mclabel
+
+ return action
+
+
class DefaultTeacher(OeTeacher):
pass
|
{"golden_diff": "diff --git a/parlai/tasks/vqa_v1/agents.py b/parlai/tasks/vqa_v1/agents.py\n--- a/parlai/tasks/vqa_v1/agents.py\n+++ b/parlai/tasks/vqa_v1/agents.py\n@@ -157,6 +157,30 @@\n return action\n \n \n+class AllTeacher(OeTeacher):\n+ \"\"\"\n+ VQA Teacher, which inherits from OeTeacher and gives access to\n+ the multiple choices and the multiple choice answer.\n+ \"\"\"\n+\n+ def act(self):\n+ action = super().act()\n+\n+ qa = self.ques['questions'][self.episode_idx]\n+ multiple_choices = qa['multiple_choices']\n+\n+ action['label_candidates'] = multiple_choices\n+\n+ if not self.datatype.startswith('test'):\n+ anno = self.annotation['annotations'][self.episode_idx]\n+ self.mclabel = [anno['multiple_choice_answer']]\n+\n+ if self.datatype.startswith('train'):\n+ action['mc_label'] = self.mclabel\n+\n+ return action\n+\n+\n class DefaultTeacher(McTeacher):\n # default to Multiple-Choice Teacher\n pass\ndiff --git a/parlai/tasks/vqa_v2/agents.py b/parlai/tasks/vqa_v2/agents.py\n--- a/parlai/tasks/vqa_v2/agents.py\n+++ b/parlai/tasks/vqa_v2/agents.py\n@@ -135,5 +135,24 @@\n self.annotation = json.load(data_file)\n \n \n+class AllTeacher(OeTeacher):\n+ \"\"\"\n+ VQA v2.0 Open-Ended teacher, which inherits from OeTeacher and \n+ gives access to the multiple choice answer.\n+ \"\"\"\n+\n+ def act(self):\n+ action = super().act()\n+\n+ if not self.datatype.startswith('test'):\n+ anno = self.annotation['annotations'][self.episode_idx]\n+ self.mclabel = [anno['multiple_choice_answer']]\n+\n+ if self.datatype.startswith('train'):\n+ action['mc_label'] = self.mclabel\n+\n+ return action\n+\n+\n class DefaultTeacher(OeTeacher):\n pass\n", "issue": "Return type ambiguous when extracting image features\nThe return type of the extracted image features (if the features file is present or not) is different. If the file is present then it returns a numpy.ndarray type object otherwise it returns a torch.autograd.variable.Variable object.\r\n( https://github.com/facebookresearch/ParlAI/blob/3d86ccdbb4d87002cc6c4782afd0ee5277e742f1/parlai/core/image_featurizers.py#L149 )\n", "before_files": [{"content": "# Copyright (c) 2017-present, Facebook, Inc.\n# All rights reserved.\n# This source code is licensed under the BSD-style license found in the\n# LICENSE file in the root directory of this source tree. An additional grant\n# of patent rights can be found in the PATENTS file in the same directory.\n\nfrom parlai.core.agents import Teacher\nfrom parlai.core.image_featurizers import ImageLoader\nfrom .build import build, buildImage\n\nimport json\nimport random\nimport os\n\n\ndef _path(opt):\n build(opt)\n buildImage(opt)\n dt = opt['datatype'].split(':')[0]\n\n if dt == 'train':\n ques_suffix = 'v2_OpenEnded_mscoco_train2014'\n annotation_suffix = 'v2_mscoco_train2014'\n img_suffix = os.path.join('train2014', 'COCO_train2014_')\n elif dt == 'valid':\n ques_suffix = 'v2_OpenEnded_mscoco_val2014'\n annotation_suffix = 'v2_mscoco_val2014'\n img_suffix = os.path.join('val2014', 'COCO_val2014_')\n elif dt == 'test':\n ques_suffix = 'v2_OpenEnded_mscoco_test2015'\n annotation_suffix = 'None'\n img_suffix = os.path.join('test2015', 'COCO_test2015_')\n else:\n raise RuntimeError('Not valid datatype.')\n\n data_path = os.path.join(opt['datapath'], 'VQA-v2',\n ques_suffix + '_questions.json')\n\n annotation_path = os.path.join(opt['datapath'], 'VQA-v2',\n annotation_suffix + '_annotations.json')\n\n image_path = os.path.join(opt['datapath'], 'COCO-IMG', img_suffix)\n\n return data_path, annotation_path, image_path\n\n\nclass OeTeacher(Teacher):\n \"\"\"VQA v2.0 Open-Ended teacher, which loads the json VQA data and\n implements its own `act` method for interacting with student agent.\n agent.\n \"\"\"\n def __init__(self, opt, shared=None):\n super().__init__(opt)\n self.datatype = opt['datatype']\n data_path, annotation_path, self.image_path = _path(opt)\n\n if shared and 'ques' in shared:\n self.ques = shared['ques']\n if 'annotation' in shared:\n self.annotation = shared['annotation']\n else:\n self._setup_data(data_path, annotation_path)\n self.len = len(self.ques['questions'])\n\n # for ordered data in batch mode (especially, for validation and\n # testing), each teacher in the batch gets a start index and a step\n # size so they all process disparate sets of the data\n self.step_size = opt.get('batchsize', 1)\n self.data_offset = opt.get('batchindex', 0)\n self.image_loader = ImageLoader(opt)\n\n self.reset()\n\n def __len__(self):\n return self.len\n\n def reset(self):\n # Reset the dialog so that it is at the start of the epoch,\n # and all metrics are reset.\n super().reset()\n self.lastY = None\n self.episode_idx = self.data_offset - self.step_size\n\n def observe(self, observation):\n \"\"\"Process observation for metrics.\"\"\"\n if self.lastY is not None:\n self.metrics.update(observation, self.lastY)\n self.lastY = None\n return observation\n\n def act(self):\n if self.datatype == 'train':\n self.episode_idx = random.randrange(self.len)\n else:\n self.episode_idx = (self.episode_idx + self.step_size) % len(self)\n if self.episode_idx == len(self) - self.step_size:\n self.epochDone = True\n\n qa = self.ques['questions'][self.episode_idx]\n question = qa['question']\n image_id = qa['image_id']\n\n img_path = self.image_path + '%012d.jpg' % (image_id)\n\n action = {\n 'image': self.image_loader.load(img_path),\n 'text': question,\n 'episode_done': True\n }\n\n if not self.datatype.startswith('test'):\n anno = self.annotation['annotations'][self.episode_idx]\n self.lastY = [ans['answer'] for ans in anno['answers']]\n\n if self.datatype.startswith('train'):\n action['labels'] = self.lastY\n\n return action\n\n def share(self):\n shared = super().share()\n shared['ques'] = self.ques\n if hasattr(self, 'annotation'):\n shared['annotation'] = self.annotation\n return shared\n\n def _setup_data(self, data_path, annotation_path):\n print('loading: ' + data_path)\n with open(data_path) as data_file:\n self.ques = json.load(data_file)\n\n if self.datatype != 'test':\n print('loading: ' + annotation_path)\n with open(annotation_path) as data_file:\n self.annotation = json.load(data_file)\n\n\nclass DefaultTeacher(OeTeacher):\n pass\n", "path": "parlai/tasks/vqa_v2/agents.py"}, {"content": "# Copyright (c) 2017-present, Facebook, Inc.\n# All rights reserved.\n# This source code is licensed under the BSD-style license found in the\n# LICENSE file in the root directory of this source tree. An additional grant\n# of patent rights can be found in the PATENTS file in the same directory.\n\nfrom parlai.core.agents import Teacher\nfrom parlai.core.image_featurizers import ImageLoader\nfrom .build import build, buildImage\n\nimport json\nimport random\nimport os\n\n\ndef _path(opt):\n build(opt)\n buildImage(opt)\n dt = opt['datatype'].split(':')[0]\n\n if dt == 'train':\n ques_suffix = 'MultipleChoice_mscoco_train2014'\n annotation_suffix = 'mscoco_train2014'\n img_suffix = os.path.join('train2014', 'COCO_train2014_')\n elif dt == 'valid':\n ques_suffix = 'MultipleChoice_mscoco_val2014'\n annotation_suffix = 'mscoco_val2014'\n img_suffix = os.path.join('val2014', 'COCO_val2014_')\n elif dt == 'test':\n ques_suffix = 'MultipleChoice_mscoco_test2015'\n annotation_suffix = 'None'\n img_suffix = os.path.join('test2015', 'COCO_test2015_')\n else:\n raise RuntimeError('Not valid datatype.')\n\n data_path = os.path.join(opt['datapath'], 'VQA-v1',\n ques_suffix + '_questions.json')\n\n annotation_path = os.path.join(opt['datapath'], 'VQA-v1',\n annotation_suffix + '_annotations.json')\n\n image_path = os.path.join(opt['datapath'], 'COCO-IMG', img_suffix)\n\n return data_path, annotation_path, image_path\n\n\nclass OeTeacher(Teacher):\n \"\"\"\n VQA Open-Ended teacher, which loads the json vqa data and implements its\n own `act` method for interacting with student agent.\n \"\"\"\n def __init__(self, opt, shared=None):\n super().__init__(opt, shared)\n self.datatype = opt['datatype']\n data_path, annotation_path, self.image_path = _path(opt)\n\n if shared and 'ques' in shared:\n self.ques = shared['ques']\n if 'annotation' in shared:\n self.annotation = shared['annotation']\n else:\n self._setup_data(data_path, annotation_path)\n\n # for ordered data in batch mode (especially, for validation and\n # testing), each teacher in the batch gets a start index and a step\n # size so they all process disparate sets of the data\n self.step_size = opt.get('batchsize', 1)\n self.data_offset = opt.get('batchindex', 0)\n self.image_loader = ImageLoader(opt)\n self.reset()\n\n def __len__(self):\n return len(self.ques['questions'])\n\n def reset(self):\n # Reset the dialog so that it is at the start of the epoch,\n # and all metrics are reset.\n super().reset()\n self.lastY = None\n self.episode_idx = self.data_offset - self.step_size\n\n def observe(self, observation):\n \"\"\"Process observation for metrics.\"\"\"\n if self.lastY is not None:\n self.metrics.update(observation, self.lastY)\n self.lastY = None\n return observation\n\n def act(self):\n if self.datatype == 'train':\n self.episode_idx = random.randrange(len(self))\n else:\n self.episode_idx = (self.episode_idx + self.step_size) % len(self)\n if self.episode_idx == len(self) - self.step_size:\n self.epochDone = True\n\n qa = self.ques['questions'][self.episode_idx]\n question = qa['question']\n image_id = qa['image_id']\n\n img_path = self.image_path + '%012d.jpg' % (image_id)\n\n action = {\n 'image': self.image_loader.load(img_path),\n 'text': question,\n 'episode_done': True\n }\n\n if not self.datatype.startswith('test'):\n anno = self.annotation['annotations'][self.episode_idx]\n self.lastY = [ans['answer'] for ans in anno['answers']]\n\n if self.datatype.startswith('train'):\n action['labels'] = self.lastY\n\n return action\n\n def share(self):\n shared = super().share()\n shared['ques'] = self.ques\n if hasattr(self, 'annotation'):\n shared['annotation'] = self.annotation\n return shared\n\n def _setup_data(self, data_path, annotation_path):\n print('loading: ' + data_path)\n with open(data_path) as data_file:\n self.ques = json.load(data_file)\n\n if self.datatype != 'test':\n print('loading: ' + annotation_path)\n with open(annotation_path) as data_file:\n self.annotation = json.load(data_file)\n\n\nclass McTeacher(OeTeacher):\n \"\"\"\n VQA Multiple-Choice teacher, which inherits from OeTeacher but overrides\n the label and label_candidates fields with multiple choice data.\n \"\"\"\n\n def act(self):\n action = super().act()\n\n qa = self.ques['questions'][self.episode_idx]\n multiple_choices = qa['multiple_choices']\n\n action['label_candidates'] = multiple_choices\n\n if not self.datatype.startswith('test'):\n anno = self.annotation['annotations'][self.episode_idx]\n self.lastY = [anno['multiple_choice_answer']]\n\n if self.datatype.startswith('train'):\n action['labels'] = self.lastY\n\n return action\n\n\nclass DefaultTeacher(McTeacher):\n # default to Multiple-Choice Teacher\n pass\n", "path": "parlai/tasks/vqa_v1/agents.py"}], "after_files": [{"content": "# Copyright (c) 2017-present, Facebook, Inc.\n# All rights reserved.\n# This source code is licensed under the BSD-style license found in the\n# LICENSE file in the root directory of this source tree. An additional grant\n# of patent rights can be found in the PATENTS file in the same directory.\n\nfrom parlai.core.agents import Teacher\nfrom parlai.core.image_featurizers import ImageLoader\nfrom .build import build, buildImage\n\nimport json\nimport random\nimport os\n\n\ndef _path(opt):\n build(opt)\n buildImage(opt)\n dt = opt['datatype'].split(':')[0]\n\n if dt == 'train':\n ques_suffix = 'v2_OpenEnded_mscoco_train2014'\n annotation_suffix = 'v2_mscoco_train2014'\n img_suffix = os.path.join('train2014', 'COCO_train2014_')\n elif dt == 'valid':\n ques_suffix = 'v2_OpenEnded_mscoco_val2014'\n annotation_suffix = 'v2_mscoco_val2014'\n img_suffix = os.path.join('val2014', 'COCO_val2014_')\n elif dt == 'test':\n ques_suffix = 'v2_OpenEnded_mscoco_test2015'\n annotation_suffix = 'None'\n img_suffix = os.path.join('test2015', 'COCO_test2015_')\n else:\n raise RuntimeError('Not valid datatype.')\n\n data_path = os.path.join(opt['datapath'], 'VQA-v2',\n ques_suffix + '_questions.json')\n\n annotation_path = os.path.join(opt['datapath'], 'VQA-v2',\n annotation_suffix + '_annotations.json')\n\n image_path = os.path.join(opt['datapath'], 'COCO-IMG', img_suffix)\n\n return data_path, annotation_path, image_path\n\n\nclass OeTeacher(Teacher):\n \"\"\"VQA v2.0 Open-Ended teacher, which loads the json VQA data and\n implements its own `act` method for interacting with student agent.\n agent.\n \"\"\"\n def __init__(self, opt, shared=None):\n super().__init__(opt)\n self.datatype = opt['datatype']\n data_path, annotation_path, self.image_path = _path(opt)\n\n if shared and 'ques' in shared:\n self.ques = shared['ques']\n if 'annotation' in shared:\n self.annotation = shared['annotation']\n else:\n self._setup_data(data_path, annotation_path)\n self.len = len(self.ques['questions'])\n\n # for ordered data in batch mode (especially, for validation and\n # testing), each teacher in the batch gets a start index and a step\n # size so they all process disparate sets of the data\n self.step_size = opt.get('batchsize', 1)\n self.data_offset = opt.get('batchindex', 0)\n self.image_loader = ImageLoader(opt)\n\n self.reset()\n\n def __len__(self):\n return self.len\n\n def reset(self):\n # Reset the dialog so that it is at the start of the epoch,\n # and all metrics are reset.\n super().reset()\n self.lastY = None\n self.episode_idx = self.data_offset - self.step_size\n\n def observe(self, observation):\n \"\"\"Process observation for metrics.\"\"\"\n if self.lastY is not None:\n self.metrics.update(observation, self.lastY)\n self.lastY = None\n return observation\n\n def act(self):\n if self.datatype == 'train':\n self.episode_idx = random.randrange(self.len)\n else:\n self.episode_idx = (self.episode_idx + self.step_size) % len(self)\n if self.episode_idx == len(self) - self.step_size:\n self.epochDone = True\n\n qa = self.ques['questions'][self.episode_idx]\n question = qa['question']\n image_id = qa['image_id']\n\n img_path = self.image_path + '%012d.jpg' % (image_id)\n\n action = {\n 'image': self.image_loader.load(img_path),\n 'text': question,\n 'episode_done': True\n }\n\n if not self.datatype.startswith('test'):\n anno = self.annotation['annotations'][self.episode_idx]\n self.lastY = [ans['answer'] for ans in anno['answers']]\n\n if self.datatype.startswith('train'):\n action['labels'] = self.lastY\n\n return action\n\n def share(self):\n shared = super().share()\n shared['ques'] = self.ques\n if hasattr(self, 'annotation'):\n shared['annotation'] = self.annotation\n return shared\n\n def _setup_data(self, data_path, annotation_path):\n print('loading: ' + data_path)\n with open(data_path) as data_file:\n self.ques = json.load(data_file)\n\n if self.datatype != 'test':\n print('loading: ' + annotation_path)\n with open(annotation_path) as data_file:\n self.annotation = json.load(data_file)\n\n\nclass AllTeacher(OeTeacher):\n \"\"\"\n VQA v2.0 Open-Ended teacher, which inherits from OeTeacher and \n gives access to the multiple choice answer.\n \"\"\"\n\n def act(self):\n action = super().act()\n\n if not self.datatype.startswith('test'):\n anno = self.annotation['annotations'][self.episode_idx]\n self.mclabel = [anno['multiple_choice_answer']]\n\n if self.datatype.startswith('train'):\n action['mc_label'] = self.mclabel\n\n return action\n\n\nclass DefaultTeacher(OeTeacher):\n pass\n", "path": "parlai/tasks/vqa_v2/agents.py"}, {"content": "# Copyright (c) 2017-present, Facebook, Inc.\n# All rights reserved.\n# This source code is licensed under the BSD-style license found in the\n# LICENSE file in the root directory of this source tree. An additional grant\n# of patent rights can be found in the PATENTS file in the same directory.\n\nfrom parlai.core.agents import Teacher\nfrom parlai.core.image_featurizers import ImageLoader\nfrom .build import build, buildImage\n\nimport json\nimport random\nimport os\n\n\ndef _path(opt):\n build(opt)\n buildImage(opt)\n dt = opt['datatype'].split(':')[0]\n\n if dt == 'train':\n ques_suffix = 'MultipleChoice_mscoco_train2014'\n annotation_suffix = 'mscoco_train2014'\n img_suffix = os.path.join('train2014', 'COCO_train2014_')\n elif dt == 'valid':\n ques_suffix = 'MultipleChoice_mscoco_val2014'\n annotation_suffix = 'mscoco_val2014'\n img_suffix = os.path.join('val2014', 'COCO_val2014_')\n elif dt == 'test':\n ques_suffix = 'MultipleChoice_mscoco_test2015'\n annotation_suffix = 'None'\n img_suffix = os.path.join('test2015', 'COCO_test2015_')\n else:\n raise RuntimeError('Not valid datatype.')\n\n data_path = os.path.join(opt['datapath'], 'VQA-v1',\n ques_suffix + '_questions.json')\n\n annotation_path = os.path.join(opt['datapath'], 'VQA-v1',\n annotation_suffix + '_annotations.json')\n\n image_path = os.path.join(opt['datapath'], 'COCO-IMG', img_suffix)\n\n return data_path, annotation_path, image_path\n\n\nclass OeTeacher(Teacher):\n \"\"\"\n VQA Open-Ended teacher, which loads the json vqa data and implements its\n own `act` method for interacting with student agent.\n \"\"\"\n def __init__(self, opt, shared=None):\n super().__init__(opt, shared)\n self.datatype = opt['datatype']\n data_path, annotation_path, self.image_path = _path(opt)\n\n if shared and 'ques' in shared:\n self.ques = shared['ques']\n if 'annotation' in shared:\n self.annotation = shared['annotation']\n else:\n self._setup_data(data_path, annotation_path)\n\n # for ordered data in batch mode (especially, for validation and\n # testing), each teacher in the batch gets a start index and a step\n # size so they all process disparate sets of the data\n self.step_size = opt.get('batchsize', 1)\n self.data_offset = opt.get('batchindex', 0)\n self.image_loader = ImageLoader(opt)\n self.reset()\n\n def __len__(self):\n return len(self.ques['questions'])\n\n def reset(self):\n # Reset the dialog so that it is at the start of the epoch,\n # and all metrics are reset.\n super().reset()\n self.lastY = None\n self.episode_idx = self.data_offset - self.step_size\n\n def observe(self, observation):\n \"\"\"Process observation for metrics.\"\"\"\n if self.lastY is not None:\n self.metrics.update(observation, self.lastY)\n self.lastY = None\n return observation\n\n def act(self):\n if self.datatype == 'train':\n self.episode_idx = random.randrange(len(self))\n else:\n self.episode_idx = (self.episode_idx + self.step_size) % len(self)\n if self.episode_idx == len(self) - self.step_size:\n self.epochDone = True\n\n qa = self.ques['questions'][self.episode_idx]\n question = qa['question']\n image_id = qa['image_id']\n\n img_path = self.image_path + '%012d.jpg' % (image_id)\n\n action = {\n 'image': self.image_loader.load(img_path),\n 'text': question,\n 'episode_done': True\n }\n\n if not self.datatype.startswith('test'):\n anno = self.annotation['annotations'][self.episode_idx]\n self.lastY = [ans['answer'] for ans in anno['answers']]\n\n if self.datatype.startswith('train'):\n action['labels'] = self.lastY\n\n return action\n\n def share(self):\n shared = super().share()\n shared['ques'] = self.ques\n if hasattr(self, 'annotation'):\n shared['annotation'] = self.annotation\n return shared\n\n def _setup_data(self, data_path, annotation_path):\n print('loading: ' + data_path)\n with open(data_path) as data_file:\n self.ques = json.load(data_file)\n\n if self.datatype != 'test':\n print('loading: ' + annotation_path)\n with open(annotation_path) as data_file:\n self.annotation = json.load(data_file)\n\n\nclass McTeacher(OeTeacher):\n \"\"\"\n VQA Multiple-Choice teacher, which inherits from OeTeacher but overrides\n the label and label_candidates fields with multiple choice data.\n \"\"\"\n\n def act(self):\n action = super().act()\n\n qa = self.ques['questions'][self.episode_idx]\n multiple_choices = qa['multiple_choices']\n\n action['label_candidates'] = multiple_choices\n\n if not self.datatype.startswith('test'):\n anno = self.annotation['annotations'][self.episode_idx]\n self.lastY = [anno['multiple_choice_answer']]\n\n if self.datatype.startswith('train'):\n action['labels'] = self.lastY\n\n return action\n\n\nclass AllTeacher(OeTeacher):\n \"\"\"\n VQA Teacher, which inherits from OeTeacher and gives access to\n the multiple choices and the multiple choice answer.\n \"\"\"\n\n def act(self):\n action = super().act()\n\n qa = self.ques['questions'][self.episode_idx]\n multiple_choices = qa['multiple_choices']\n\n action['label_candidates'] = multiple_choices\n\n if not self.datatype.startswith('test'):\n anno = self.annotation['annotations'][self.episode_idx]\n self.mclabel = [anno['multiple_choice_answer']]\n\n if self.datatype.startswith('train'):\n action['mc_label'] = self.mclabel\n\n return action\n\n\nclass DefaultTeacher(McTeacher):\n # default to Multiple-Choice Teacher\n pass\n", "path": "parlai/tasks/vqa_v1/agents.py"}]}
| 3,520 | 493 |
gh_patches_debug_40712
|
rasdani/github-patches
|
git_diff
|
conan-io__conan-5973
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[bug] Propagation of cpp_info.name in cmake_find_package_multi
The `cmake_find_package_multi` correctly generates *XXXConfig.cmake* files using the dependency's `cpp_info.name` for the `XXX` part, but doesn't always correctly use `cpp_info.name` inside the generated file.
### Environment Details
* Operating System+version: Ubuntu 18.04
* Conan version: 1.19.2
* Python version: 3.6.8
### Steps to reproduce
I have got an ITK recipe that requires both `zlib` and `hdf5` and uses `cmake_find_package_multi`, the `hdf5` recipe also requires `zlib`. Everything worked smoothly up to yesterday when the `cpp_info.name` of `zlib` was changed to `ZLIB`: now `cmake_find_package_multi` correctly generates *HDF5Config.cmake* and *ZLIBConfig.cmake* as expected, but the *HDF5Config.cmake* file contains the following lines:
```cmake
include(CMakeFindDependencyMacro)
if(${CMAKE_VERSION} VERSION_LESS "3.9.0")
find_package(zlib REQUIRED NO_MODULE)
else()
find_dependency(zlib REQUIRED NO_MODULE)
endif()
get_target_property(tmp zlib::zlib INTERFACE_LINK_LIBRARIES)
```
When the `find_dependency` above is called, it searches a file called *zlibConfig.cmake* instead of the generated *ZLIBConfig.cmake*. I can't tell for sure, but I believe that `cmake_find_package_multi` doesn't correctly propagate the `cpp_info.name` in to the `find_dependency` and subsequent target names when it should.
### Logs (Executed commands with output) (Include/Attach if Applicable)
The relevant logs are the following ones:
```
CMake Error at /usr/share/cmake-3.10/Modules/CMakeFindDependencyMacro.cmake:48 (find_package):
Could not find a package configuration file provided by "zlib" with any of
the following names:
zlibConfig.cmake
zlib-config.cmake
Add the installation prefix of "zlib" to CMAKE_PREFIX_PATH or set
"zlib_DIR" to a directory containing one of the above files. If "zlib"
provides a separate development package or SDK, be sure it has been
installed.
Call Stack (most recent call first):
HDF5Config.cmake:43 (find_dependency)
source_subfolder/Modules/ThirdParty/HDF5/itk-module-init.cmake:5 (find_package)
source_subfolder/CMake/ITKModuleEnablement.cmake:318 (include)
source_subfolder/CMakeLists.txt:433 (include)
-- Configuring incomplete, errors occurred!
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `conans/client/generators/cmake_find_package.py`
Content:
```
1 from conans.client.generators.cmake import DepsCppCmake
2 from conans.client.generators.cmake_find_package_common import target_template
3 from conans.model import Generator
4
5 find_package_header = """
6 include(FindPackageHandleStandardArgs)
7
8 message(STATUS "Conan: Using autogenerated Find{name}.cmake")
9 # Global approach
10 set({name}_FOUND 1)
11 set({name}_VERSION "{version}")
12
13 find_package_handle_standard_args({name} REQUIRED_VARS {name}_VERSION VERSION_VAR {name}_VERSION)
14 mark_as_advanced({name}_FOUND {name}_VERSION)
15
16 """
17
18
19 assign_target_properties = """
20 if({name}_INCLUDE_DIRS)
21 set_target_properties({name}::{name} PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${{{name}_INCLUDE_DIRS}}")
22 endif()
23 set_property(TARGET {name}::{name} PROPERTY INTERFACE_LINK_LIBRARIES ${{{name}_LIBRARIES_TARGETS}} "${{{name}_LINKER_FLAGS_LIST}}")
24 set_property(TARGET {name}::{name} PROPERTY INTERFACE_COMPILE_DEFINITIONS ${{{name}_COMPILE_DEFINITIONS}})
25 set_property(TARGET {name}::{name} PROPERTY INTERFACE_COMPILE_OPTIONS "${{{name}_COMPILE_OPTIONS_LIST}}")
26 """
27
28
29 class CMakeFindPackageGenerator(Generator):
30 template = """
31 {find_package_header_block}
32 {find_libraries_block}
33 if(NOT ${{CMAKE_VERSION}} VERSION_LESS "3.0")
34 # Target approach
35 if(NOT TARGET {name}::{name})
36 add_library({name}::{name} INTERFACE IMPORTED)
37 {assign_target_properties_block}
38 {find_dependencies_block}
39 endif()
40 endif()
41 """
42
43 @property
44 def filename(self):
45 pass
46
47 @property
48 def content(self):
49 ret = {}
50 for depname, cpp_info in self.deps_build_info.dependencies:
51 ret["Find%s.cmake" % cpp_info.name] = self._find_for_dep(cpp_info.name, cpp_info)
52 return ret
53
54 def _find_for_dep(self, name, cpp_info):
55 deps = DepsCppCmake(cpp_info)
56 lines = []
57 if cpp_info.public_deps:
58 # Here we are generating FindXXX, so find_modules=True
59 lines = find_dependency_lines(name, cpp_info, find_modules=True)
60 find_package_header_block = find_package_header.format(name=name, version=cpp_info.version)
61 find_libraries_block = target_template.format(name=name, deps=deps, build_type_suffix="")
62 target_props = assign_target_properties.format(name=name, deps=deps)
63 tmp = self.template.format(name=name, deps=deps,
64 version=cpp_info.version,
65 find_dependencies_block="\n".join(lines),
66 find_libraries_block=find_libraries_block,
67 find_package_header_block=find_package_header_block,
68 assign_target_properties_block=target_props)
69 return tmp
70
71
72 def find_dependency_lines(name, cpp_info, find_modules):
73 lines = ["", "# Library dependencies", "include(CMakeFindDependencyMacro)"]
74 for dep in cpp_info.public_deps:
75 def property_lines(prop):
76 lib_t = "%s::%s" % (name, name)
77 dep_t = "%s::%s" % (dep, dep)
78 return ["get_target_property(tmp %s %s)" % (dep_t, prop),
79 "if(tmp)",
80 " set_property(TARGET %s APPEND PROPERTY %s ${tmp})" % (lib_t, prop),
81 'endif()']
82
83 if find_modules:
84 lines.append("find_dependency(%s REQUIRED)" % dep)
85 else:
86 # https://github.com/conan-io/conan/issues/4994
87 # https://github.com/conan-io/conan/issues/5040
88 lines.append('if(${CMAKE_VERSION} VERSION_LESS "3.9.0")')
89 lines.append(' find_package(%s REQUIRED NO_MODULE)' % dep)
90 lines.append("else()")
91 lines.append(' find_dependency(%s REQUIRED NO_MODULE)' % dep)
92 lines.append("endif()")
93
94 lines.extend(property_lines("INTERFACE_LINK_LIBRARIES"))
95 lines.extend(property_lines("INTERFACE_COMPILE_DEFINITIONS"))
96 lines.extend(property_lines("INTERFACE_INCLUDE_DIRECTORIES"))
97 return [" {}".format(l) for l in lines]
98
```
Path: `conans/client/generators/cmake_find_package_multi.py`
Content:
```
1 from conans.client.generators.cmake import DepsCppCmake
2 from conans.client.generators.cmake_find_package import find_dependency_lines
3 from conans.client.generators.cmake_find_package_common import target_template
4 from conans.model import Generator
5
6
7 class CMakeFindPackageMultiGenerator(Generator):
8 config_xxx_template = """
9
10 # Requires CMake > 3.0
11 if(${{CMAKE_VERSION}} VERSION_LESS "3.0")
12 message(FATAL_ERROR "The 'cmake_find_package_multi' only works with CMake > 3.0" )
13 endif()
14
15 include(${{CMAKE_CURRENT_LIST_DIR}}/{name}Targets.cmake)
16
17 {target_props_block}
18 {find_dependencies_block}
19 """
20
21 targets_file = """
22 if(NOT TARGET {name}::{name})
23 add_library({name}::{name} INTERFACE IMPORTED)
24 endif()
25
26 # Load the debug and release library finders
27 get_filename_component(_DIR "${{CMAKE_CURRENT_LIST_FILE}}" PATH)
28 file(GLOB CONFIG_FILES "${{_DIR}}/{name}Target-*.cmake")
29
30 foreach(f ${{CONFIG_FILES}})
31 include(${{f}})
32 endforeach()
33
34 """
35
36 target_properties = """
37 # Assign target properties
38 set_property(TARGET {name}::{name}
39 PROPERTY INTERFACE_LINK_LIBRARIES
40 $<$<CONFIG:Release>:${{{name}_LIBRARIES_TARGETS_RELEASE}} ${{{name}_LINKER_FLAGS_RELEASE_LIST}}>
41 $<$<CONFIG:RelWithDebInfo>:${{{name}_LIBRARIES_TARGETS_RELWITHDEBINFO}} ${{{name}_LINKER_FLAGS_RELWITHDEBINFO_LIST}}>
42 $<$<CONFIG:MinSizeRel>:${{{name}_LIBRARIES_TARGETS_MINSIZEREL}} ${{{name}_LINKER_FLAGS_MINSIZEREL_LIST}}>
43 $<$<CONFIG:Debug>:${{{name}_LIBRARIES_TARGETS_DEBUG}} ${{{name}_LINKER_FLAGS_DEBUG_LIST}}>)
44 set_property(TARGET {name}::{name}
45 PROPERTY INTERFACE_INCLUDE_DIRECTORIES
46 $<$<CONFIG:Release>:${{{name}_INCLUDE_DIRS_RELEASE}}>
47 $<$<CONFIG:RelWithDebInfo>:${{{name}_INCLUDE_DIRS_RELWITHDEBINFO}}>
48 $<$<CONFIG:MinSizeRel>:${{{name}_INCLUDE_DIRS_MINSIZEREL}}>
49 $<$<CONFIG:Debug>:${{{name}_INCLUDE_DIRS_DEBUG}}>)
50 set_property(TARGET {name}::{name}
51 PROPERTY INTERFACE_COMPILE_DEFINITIONS
52 $<$<CONFIG:Release>:${{{name}_COMPILE_DEFINITIONS_RELEASE}}>
53 $<$<CONFIG:RelWithDebInfo>:${{{name}_COMPILE_DEFINITIONS_RELWITHDEBINFO}}>
54 $<$<CONFIG:MinSizeRel>:${{{name}_COMPILE_DEFINITIONS_MINSIZEREL}}>
55 $<$<CONFIG:Debug>:${{{name}_COMPILE_DEFINITIONS_DEBUG}}>)
56 set_property(TARGET {name}::{name}
57 PROPERTY INTERFACE_COMPILE_OPTIONS
58 $<$<CONFIG:Release>:${{{name}_COMPILE_OPTIONS_RELEASE_LIST}}>
59 $<$<CONFIG:RelWithDebInfo>:${{{name}_COMPILE_OPTIONS_RELWITHDEBINFO_LIST}}>
60 $<$<CONFIG:MinSizeRel>:${{{name}_COMPILE_OPTIONS_MINSIZEREL_LIST}}>
61 $<$<CONFIG:Debug>:${{{name}_COMPILE_OPTIONS_DEBUG_LIST}}>)
62 """
63
64 @property
65 def filename(self):
66 pass
67
68 @property
69 def content(self):
70 ret = {}
71 build_type = self.conanfile.settings.get_safe("build_type")
72 build_type_suffix = "_{}".format(build_type.upper()) if build_type else ""
73 for _, cpp_info in self.deps_build_info.dependencies:
74 depname = cpp_info.name
75 deps = DepsCppCmake(cpp_info)
76 ret["{}Config.cmake".format(depname)] = self._find_for_dep(depname, cpp_info)
77
78 find_lib = target_template.format(name=depname, deps=deps,
79 build_type_suffix=build_type_suffix)
80 ret["{}Targets.cmake".format(depname)] = self.targets_file.format(name=depname)
81 ret["{}Target-{}.cmake".format(depname, build_type.lower())] = find_lib
82 return ret
83
84 def _build_type_suffix(self, build_type):
85 return
86
87 def _find_for_dep(self, name, cpp_info):
88 lines = []
89 if cpp_info.public_deps:
90 # Here we are generating only Config files, so do not search for FindXXX modules
91 lines = find_dependency_lines(name, cpp_info, find_modules=False)
92
93 targets_props = self.target_properties.format(name=name)
94
95 tmp = self.config_xxx_template.format(name=name,
96 version=cpp_info.version,
97 find_dependencies_block="\n".join(lines),
98 target_props_block=targets_props)
99
100 return tmp
101
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/conans/client/generators/cmake_find_package.py b/conans/client/generators/cmake_find_package.py
--- a/conans/client/generators/cmake_find_package.py
+++ b/conans/client/generators/cmake_find_package.py
@@ -56,7 +56,8 @@
lines = []
if cpp_info.public_deps:
# Here we are generating FindXXX, so find_modules=True
- lines = find_dependency_lines(name, cpp_info, find_modules=True)
+ public_deps_names = [self.deps_build_info[dep].name for dep in cpp_info.public_deps]
+ lines = find_dependency_lines(name, public_deps_names, find_modules=True)
find_package_header_block = find_package_header.format(name=name, version=cpp_info.version)
find_libraries_block = target_template.format(name=name, deps=deps, build_type_suffix="")
target_props = assign_target_properties.format(name=name, deps=deps)
@@ -69,26 +70,26 @@
return tmp
-def find_dependency_lines(name, cpp_info, find_modules):
+def find_dependency_lines(name, public_deps_names, find_modules):
lines = ["", "# Library dependencies", "include(CMakeFindDependencyMacro)"]
- for dep in cpp_info.public_deps:
+ for dep_name in public_deps_names:
def property_lines(prop):
lib_t = "%s::%s" % (name, name)
- dep_t = "%s::%s" % (dep, dep)
+ dep_t = "%s::%s" % (dep_name, dep_name)
return ["get_target_property(tmp %s %s)" % (dep_t, prop),
"if(tmp)",
" set_property(TARGET %s APPEND PROPERTY %s ${tmp})" % (lib_t, prop),
'endif()']
if find_modules:
- lines.append("find_dependency(%s REQUIRED)" % dep)
+ lines.append("find_dependency(%s REQUIRED)" % dep_name)
else:
# https://github.com/conan-io/conan/issues/4994
# https://github.com/conan-io/conan/issues/5040
lines.append('if(${CMAKE_VERSION} VERSION_LESS "3.9.0")')
- lines.append(' find_package(%s REQUIRED NO_MODULE)' % dep)
+ lines.append(' find_package(%s REQUIRED NO_MODULE)' % dep_name)
lines.append("else()")
- lines.append(' find_dependency(%s REQUIRED NO_MODULE)' % dep)
+ lines.append(' find_dependency(%s REQUIRED NO_MODULE)' % dep_name)
lines.append("endif()")
lines.extend(property_lines("INTERFACE_LINK_LIBRARIES"))
diff --git a/conans/client/generators/cmake_find_package_multi.py b/conans/client/generators/cmake_find_package_multi.py
--- a/conans/client/generators/cmake_find_package_multi.py
+++ b/conans/client/generators/cmake_find_package_multi.py
@@ -88,7 +88,8 @@
lines = []
if cpp_info.public_deps:
# Here we are generating only Config files, so do not search for FindXXX modules
- lines = find_dependency_lines(name, cpp_info, find_modules=False)
+ public_deps_names = [self.deps_build_info[dep].name for dep in cpp_info.public_deps]
+ lines = find_dependency_lines(name, public_deps_names, find_modules=False)
targets_props = self.target_properties.format(name=name)
|
{"golden_diff": "diff --git a/conans/client/generators/cmake_find_package.py b/conans/client/generators/cmake_find_package.py\n--- a/conans/client/generators/cmake_find_package.py\n+++ b/conans/client/generators/cmake_find_package.py\n@@ -56,7 +56,8 @@\n lines = []\n if cpp_info.public_deps:\n # Here we are generating FindXXX, so find_modules=True\n- lines = find_dependency_lines(name, cpp_info, find_modules=True)\n+ public_deps_names = [self.deps_build_info[dep].name for dep in cpp_info.public_deps]\n+ lines = find_dependency_lines(name, public_deps_names, find_modules=True)\n find_package_header_block = find_package_header.format(name=name, version=cpp_info.version)\n find_libraries_block = target_template.format(name=name, deps=deps, build_type_suffix=\"\")\n target_props = assign_target_properties.format(name=name, deps=deps)\n@@ -69,26 +70,26 @@\n return tmp\n \n \n-def find_dependency_lines(name, cpp_info, find_modules):\n+def find_dependency_lines(name, public_deps_names, find_modules):\n lines = [\"\", \"# Library dependencies\", \"include(CMakeFindDependencyMacro)\"]\n- for dep in cpp_info.public_deps:\n+ for dep_name in public_deps_names:\n def property_lines(prop):\n lib_t = \"%s::%s\" % (name, name)\n- dep_t = \"%s::%s\" % (dep, dep)\n+ dep_t = \"%s::%s\" % (dep_name, dep_name)\n return [\"get_target_property(tmp %s %s)\" % (dep_t, prop),\n \"if(tmp)\",\n \" set_property(TARGET %s APPEND PROPERTY %s ${tmp})\" % (lib_t, prop),\n 'endif()']\n \n if find_modules:\n- lines.append(\"find_dependency(%s REQUIRED)\" % dep)\n+ lines.append(\"find_dependency(%s REQUIRED)\" % dep_name)\n else:\n # https://github.com/conan-io/conan/issues/4994\n # https://github.com/conan-io/conan/issues/5040\n lines.append('if(${CMAKE_VERSION} VERSION_LESS \"3.9.0\")')\n- lines.append(' find_package(%s REQUIRED NO_MODULE)' % dep)\n+ lines.append(' find_package(%s REQUIRED NO_MODULE)' % dep_name)\n lines.append(\"else()\")\n- lines.append(' find_dependency(%s REQUIRED NO_MODULE)' % dep)\n+ lines.append(' find_dependency(%s REQUIRED NO_MODULE)' % dep_name)\n lines.append(\"endif()\")\n \n lines.extend(property_lines(\"INTERFACE_LINK_LIBRARIES\"))\ndiff --git a/conans/client/generators/cmake_find_package_multi.py b/conans/client/generators/cmake_find_package_multi.py\n--- a/conans/client/generators/cmake_find_package_multi.py\n+++ b/conans/client/generators/cmake_find_package_multi.py\n@@ -88,7 +88,8 @@\n lines = []\n if cpp_info.public_deps:\n # Here we are generating only Config files, so do not search for FindXXX modules\n- lines = find_dependency_lines(name, cpp_info, find_modules=False)\n+ public_deps_names = [self.deps_build_info[dep].name for dep in cpp_info.public_deps]\n+ lines = find_dependency_lines(name, public_deps_names, find_modules=False)\n \n targets_props = self.target_properties.format(name=name)\n", "issue": "[bug] Propagation of cpp_info.name in cmake_find_package_multi\nThe `cmake_find_package_multi` correctly generates *XXXConfig.cmake* files using the dependency's `cpp_info.name` for the `XXX` part, but doesn't always correctly use `cpp_info.name` inside the generated file.\r\n\r\n### Environment Details\r\n * Operating System+version: Ubuntu 18.04\r\n * Conan version: 1.19.2\r\n * Python version: 3.6.8\r\n\r\n### Steps to reproduce\r\n\r\nI have got an ITK recipe that requires both `zlib` and `hdf5` and uses `cmake_find_package_multi`, the `hdf5` recipe also requires `zlib`. Everything worked smoothly up to yesterday when the `cpp_info.name` of `zlib` was changed to `ZLIB`: now `cmake_find_package_multi` correctly generates *HDF5Config.cmake* and *ZLIBConfig.cmake* as expected, but the *HDF5Config.cmake* file contains the following lines:\r\n\r\n```cmake\r\ninclude(CMakeFindDependencyMacro)\r\nif(${CMAKE_VERSION} VERSION_LESS \"3.9.0\")\r\n find_package(zlib REQUIRED NO_MODULE)\r\nelse()\r\n find_dependency(zlib REQUIRED NO_MODULE)\r\nendif()\r\nget_target_property(tmp zlib::zlib INTERFACE_LINK_LIBRARIES)\r\n```\r\n\r\nWhen the `find_dependency` above is called, it searches a file called *zlibConfig.cmake* instead of the generated *ZLIBConfig.cmake*. I can't tell for sure, but I believe that `cmake_find_package_multi` doesn't correctly propagate the `cpp_info.name` in to the `find_dependency` and subsequent target names when it should.\r\n\r\n### Logs (Executed commands with output) (Include/Attach if Applicable)\r\n\r\nThe relevant logs are the following ones:\r\n\r\n```\r\nCMake Error at /usr/share/cmake-3.10/Modules/CMakeFindDependencyMacro.cmake:48 (find_package):\r\n Could not find a package configuration file provided by \"zlib\" with any of\r\n the following names:\r\n\r\n zlibConfig.cmake\r\n zlib-config.cmake\r\n\r\n Add the installation prefix of \"zlib\" to CMAKE_PREFIX_PATH or set\r\n \"zlib_DIR\" to a directory containing one of the above files. If \"zlib\"\r\n provides a separate development package or SDK, be sure it has been\r\n installed.\r\nCall Stack (most recent call first):\r\n HDF5Config.cmake:43 (find_dependency)\r\n source_subfolder/Modules/ThirdParty/HDF5/itk-module-init.cmake:5 (find_package)\r\n source_subfolder/CMake/ITKModuleEnablement.cmake:318 (include)\r\n source_subfolder/CMakeLists.txt:433 (include)\r\n\r\n\r\n-- Configuring incomplete, errors occurred!\r\n```\r\n\n", "before_files": [{"content": "from conans.client.generators.cmake import DepsCppCmake\nfrom conans.client.generators.cmake_find_package_common import target_template\nfrom conans.model import Generator\n\nfind_package_header = \"\"\"\ninclude(FindPackageHandleStandardArgs)\n\nmessage(STATUS \"Conan: Using autogenerated Find{name}.cmake\")\n# Global approach\nset({name}_FOUND 1)\nset({name}_VERSION \"{version}\")\n\nfind_package_handle_standard_args({name} REQUIRED_VARS {name}_VERSION VERSION_VAR {name}_VERSION)\nmark_as_advanced({name}_FOUND {name}_VERSION)\n\n\"\"\"\n\n\nassign_target_properties = \"\"\"\n if({name}_INCLUDE_DIRS)\n set_target_properties({name}::{name} PROPERTIES INTERFACE_INCLUDE_DIRECTORIES \"${{{name}_INCLUDE_DIRS}}\")\n endif()\n set_property(TARGET {name}::{name} PROPERTY INTERFACE_LINK_LIBRARIES ${{{name}_LIBRARIES_TARGETS}} \"${{{name}_LINKER_FLAGS_LIST}}\")\n set_property(TARGET {name}::{name} PROPERTY INTERFACE_COMPILE_DEFINITIONS ${{{name}_COMPILE_DEFINITIONS}})\n set_property(TARGET {name}::{name} PROPERTY INTERFACE_COMPILE_OPTIONS \"${{{name}_COMPILE_OPTIONS_LIST}}\")\n\"\"\"\n\n\nclass CMakeFindPackageGenerator(Generator):\n template = \"\"\"\n{find_package_header_block}\n{find_libraries_block}\nif(NOT ${{CMAKE_VERSION}} VERSION_LESS \"3.0\")\n # Target approach\n if(NOT TARGET {name}::{name})\n add_library({name}::{name} INTERFACE IMPORTED)\n {assign_target_properties_block}\n {find_dependencies_block}\n endif()\nendif()\n\"\"\"\n\n @property\n def filename(self):\n pass\n\n @property\n def content(self):\n ret = {}\n for depname, cpp_info in self.deps_build_info.dependencies:\n ret[\"Find%s.cmake\" % cpp_info.name] = self._find_for_dep(cpp_info.name, cpp_info)\n return ret\n\n def _find_for_dep(self, name, cpp_info):\n deps = DepsCppCmake(cpp_info)\n lines = []\n if cpp_info.public_deps:\n # Here we are generating FindXXX, so find_modules=True\n lines = find_dependency_lines(name, cpp_info, find_modules=True)\n find_package_header_block = find_package_header.format(name=name, version=cpp_info.version)\n find_libraries_block = target_template.format(name=name, deps=deps, build_type_suffix=\"\")\n target_props = assign_target_properties.format(name=name, deps=deps)\n tmp = self.template.format(name=name, deps=deps,\n version=cpp_info.version,\n find_dependencies_block=\"\\n\".join(lines),\n find_libraries_block=find_libraries_block,\n find_package_header_block=find_package_header_block,\n assign_target_properties_block=target_props)\n return tmp\n\n\ndef find_dependency_lines(name, cpp_info, find_modules):\n lines = [\"\", \"# Library dependencies\", \"include(CMakeFindDependencyMacro)\"]\n for dep in cpp_info.public_deps:\n def property_lines(prop):\n lib_t = \"%s::%s\" % (name, name)\n dep_t = \"%s::%s\" % (dep, dep)\n return [\"get_target_property(tmp %s %s)\" % (dep_t, prop),\n \"if(tmp)\",\n \" set_property(TARGET %s APPEND PROPERTY %s ${tmp})\" % (lib_t, prop),\n 'endif()']\n\n if find_modules:\n lines.append(\"find_dependency(%s REQUIRED)\" % dep)\n else:\n # https://github.com/conan-io/conan/issues/4994\n # https://github.com/conan-io/conan/issues/5040\n lines.append('if(${CMAKE_VERSION} VERSION_LESS \"3.9.0\")')\n lines.append(' find_package(%s REQUIRED NO_MODULE)' % dep)\n lines.append(\"else()\")\n lines.append(' find_dependency(%s REQUIRED NO_MODULE)' % dep)\n lines.append(\"endif()\")\n\n lines.extend(property_lines(\"INTERFACE_LINK_LIBRARIES\"))\n lines.extend(property_lines(\"INTERFACE_COMPILE_DEFINITIONS\"))\n lines.extend(property_lines(\"INTERFACE_INCLUDE_DIRECTORIES\"))\n return [\" {}\".format(l) for l in lines]\n", "path": "conans/client/generators/cmake_find_package.py"}, {"content": "from conans.client.generators.cmake import DepsCppCmake\nfrom conans.client.generators.cmake_find_package import find_dependency_lines\nfrom conans.client.generators.cmake_find_package_common import target_template\nfrom conans.model import Generator\n\n\nclass CMakeFindPackageMultiGenerator(Generator):\n config_xxx_template = \"\"\"\n\n# Requires CMake > 3.0\nif(${{CMAKE_VERSION}} VERSION_LESS \"3.0\")\n message(FATAL_ERROR \"The 'cmake_find_package_multi' only works with CMake > 3.0\" )\nendif()\n\ninclude(${{CMAKE_CURRENT_LIST_DIR}}/{name}Targets.cmake)\n\n{target_props_block}\n{find_dependencies_block}\n\"\"\"\n\n targets_file = \"\"\"\nif(NOT TARGET {name}::{name})\n add_library({name}::{name} INTERFACE IMPORTED)\nendif()\n\n# Load the debug and release library finders\nget_filename_component(_DIR \"${{CMAKE_CURRENT_LIST_FILE}}\" PATH)\nfile(GLOB CONFIG_FILES \"${{_DIR}}/{name}Target-*.cmake\")\n\nforeach(f ${{CONFIG_FILES}})\n include(${{f}})\nendforeach()\n \n\"\"\"\n\n target_properties = \"\"\"\n# Assign target properties\nset_property(TARGET {name}::{name} \n PROPERTY INTERFACE_LINK_LIBRARIES \n $<$<CONFIG:Release>:${{{name}_LIBRARIES_TARGETS_RELEASE}} ${{{name}_LINKER_FLAGS_RELEASE_LIST}}>\n $<$<CONFIG:RelWithDebInfo>:${{{name}_LIBRARIES_TARGETS_RELWITHDEBINFO}} ${{{name}_LINKER_FLAGS_RELWITHDEBINFO_LIST}}>\n $<$<CONFIG:MinSizeRel>:${{{name}_LIBRARIES_TARGETS_MINSIZEREL}} ${{{name}_LINKER_FLAGS_MINSIZEREL_LIST}}>\n $<$<CONFIG:Debug>:${{{name}_LIBRARIES_TARGETS_DEBUG}} ${{{name}_LINKER_FLAGS_DEBUG_LIST}}>)\nset_property(TARGET {name}::{name} \n PROPERTY INTERFACE_INCLUDE_DIRECTORIES \n $<$<CONFIG:Release>:${{{name}_INCLUDE_DIRS_RELEASE}}>\n $<$<CONFIG:RelWithDebInfo>:${{{name}_INCLUDE_DIRS_RELWITHDEBINFO}}>\n $<$<CONFIG:MinSizeRel>:${{{name}_INCLUDE_DIRS_MINSIZEREL}}>\n $<$<CONFIG:Debug>:${{{name}_INCLUDE_DIRS_DEBUG}}>)\nset_property(TARGET {name}::{name} \n PROPERTY INTERFACE_COMPILE_DEFINITIONS \n $<$<CONFIG:Release>:${{{name}_COMPILE_DEFINITIONS_RELEASE}}>\n $<$<CONFIG:RelWithDebInfo>:${{{name}_COMPILE_DEFINITIONS_RELWITHDEBINFO}}>\n $<$<CONFIG:MinSizeRel>:${{{name}_COMPILE_DEFINITIONS_MINSIZEREL}}>\n $<$<CONFIG:Debug>:${{{name}_COMPILE_DEFINITIONS_DEBUG}}>)\nset_property(TARGET {name}::{name} \n PROPERTY INTERFACE_COMPILE_OPTIONS \n $<$<CONFIG:Release>:${{{name}_COMPILE_OPTIONS_RELEASE_LIST}}>\n $<$<CONFIG:RelWithDebInfo>:${{{name}_COMPILE_OPTIONS_RELWITHDEBINFO_LIST}}>\n $<$<CONFIG:MinSizeRel>:${{{name}_COMPILE_OPTIONS_MINSIZEREL_LIST}}>\n $<$<CONFIG:Debug>:${{{name}_COMPILE_OPTIONS_DEBUG_LIST}}>) \n \"\"\"\n\n @property\n def filename(self):\n pass\n\n @property\n def content(self):\n ret = {}\n build_type = self.conanfile.settings.get_safe(\"build_type\")\n build_type_suffix = \"_{}\".format(build_type.upper()) if build_type else \"\"\n for _, cpp_info in self.deps_build_info.dependencies:\n depname = cpp_info.name\n deps = DepsCppCmake(cpp_info)\n ret[\"{}Config.cmake\".format(depname)] = self._find_for_dep(depname, cpp_info)\n\n find_lib = target_template.format(name=depname, deps=deps,\n build_type_suffix=build_type_suffix)\n ret[\"{}Targets.cmake\".format(depname)] = self.targets_file.format(name=depname)\n ret[\"{}Target-{}.cmake\".format(depname, build_type.lower())] = find_lib\n return ret\n\n def _build_type_suffix(self, build_type):\n return\n\n def _find_for_dep(self, name, cpp_info):\n lines = []\n if cpp_info.public_deps:\n # Here we are generating only Config files, so do not search for FindXXX modules\n lines = find_dependency_lines(name, cpp_info, find_modules=False)\n\n targets_props = self.target_properties.format(name=name)\n\n tmp = self.config_xxx_template.format(name=name,\n version=cpp_info.version,\n find_dependencies_block=\"\\n\".join(lines),\n target_props_block=targets_props)\n\n return tmp\n", "path": "conans/client/generators/cmake_find_package_multi.py"}], "after_files": [{"content": "from conans.client.generators.cmake import DepsCppCmake\nfrom conans.client.generators.cmake_find_package_common import target_template\nfrom conans.model import Generator\n\nfind_package_header = \"\"\"\ninclude(FindPackageHandleStandardArgs)\n\nmessage(STATUS \"Conan: Using autogenerated Find{name}.cmake\")\n# Global approach\nset({name}_FOUND 1)\nset({name}_VERSION \"{version}\")\n\nfind_package_handle_standard_args({name} REQUIRED_VARS {name}_VERSION VERSION_VAR {name}_VERSION)\nmark_as_advanced({name}_FOUND {name}_VERSION)\n\n\"\"\"\n\n\nassign_target_properties = \"\"\"\n if({name}_INCLUDE_DIRS)\n set_target_properties({name}::{name} PROPERTIES INTERFACE_INCLUDE_DIRECTORIES \"${{{name}_INCLUDE_DIRS}}\")\n endif()\n set_property(TARGET {name}::{name} PROPERTY INTERFACE_LINK_LIBRARIES ${{{name}_LIBRARIES_TARGETS}} \"${{{name}_LINKER_FLAGS_LIST}}\")\n set_property(TARGET {name}::{name} PROPERTY INTERFACE_COMPILE_DEFINITIONS ${{{name}_COMPILE_DEFINITIONS}})\n set_property(TARGET {name}::{name} PROPERTY INTERFACE_COMPILE_OPTIONS \"${{{name}_COMPILE_OPTIONS_LIST}}\")\n\"\"\"\n\n\nclass CMakeFindPackageGenerator(Generator):\n template = \"\"\"\n{find_package_header_block}\n{find_libraries_block}\nif(NOT ${{CMAKE_VERSION}} VERSION_LESS \"3.0\")\n # Target approach\n if(NOT TARGET {name}::{name})\n add_library({name}::{name} INTERFACE IMPORTED)\n {assign_target_properties_block}\n {find_dependencies_block}\n endif()\nendif()\n\"\"\"\n\n @property\n def filename(self):\n pass\n\n @property\n def content(self):\n ret = {}\n for depname, cpp_info in self.deps_build_info.dependencies:\n ret[\"Find%s.cmake\" % cpp_info.name] = self._find_for_dep(cpp_info.name, cpp_info)\n return ret\n\n def _find_for_dep(self, name, cpp_info):\n deps = DepsCppCmake(cpp_info)\n lines = []\n if cpp_info.public_deps:\n # Here we are generating FindXXX, so find_modules=True\n public_deps_names = [self.deps_build_info[dep].name for dep in cpp_info.public_deps]\n lines = find_dependency_lines(name, public_deps_names, find_modules=True)\n find_package_header_block = find_package_header.format(name=name, version=cpp_info.version)\n find_libraries_block = target_template.format(name=name, deps=deps, build_type_suffix=\"\")\n target_props = assign_target_properties.format(name=name, deps=deps)\n tmp = self.template.format(name=name, deps=deps,\n version=cpp_info.version,\n find_dependencies_block=\"\\n\".join(lines),\n find_libraries_block=find_libraries_block,\n find_package_header_block=find_package_header_block,\n assign_target_properties_block=target_props)\n return tmp\n\n\ndef find_dependency_lines(name, public_deps_names, find_modules):\n lines = [\"\", \"# Library dependencies\", \"include(CMakeFindDependencyMacro)\"]\n for dep_name in public_deps_names:\n def property_lines(prop):\n lib_t = \"%s::%s\" % (name, name)\n dep_t = \"%s::%s\" % (dep_name, dep_name)\n return [\"get_target_property(tmp %s %s)\" % (dep_t, prop),\n \"if(tmp)\",\n \" set_property(TARGET %s APPEND PROPERTY %s ${tmp})\" % (lib_t, prop),\n 'endif()']\n\n if find_modules:\n lines.append(\"find_dependency(%s REQUIRED)\" % dep_name)\n else:\n # https://github.com/conan-io/conan/issues/4994\n # https://github.com/conan-io/conan/issues/5040\n lines.append('if(${CMAKE_VERSION} VERSION_LESS \"3.9.0\")')\n lines.append(' find_package(%s REQUIRED NO_MODULE)' % dep_name)\n lines.append(\"else()\")\n lines.append(' find_dependency(%s REQUIRED NO_MODULE)' % dep_name)\n lines.append(\"endif()\")\n\n lines.extend(property_lines(\"INTERFACE_LINK_LIBRARIES\"))\n lines.extend(property_lines(\"INTERFACE_COMPILE_DEFINITIONS\"))\n lines.extend(property_lines(\"INTERFACE_INCLUDE_DIRECTORIES\"))\n return [\" {}\".format(l) for l in lines]\n", "path": "conans/client/generators/cmake_find_package.py"}, {"content": "from conans.client.generators.cmake import DepsCppCmake\nfrom conans.client.generators.cmake_find_package import find_dependency_lines\nfrom conans.client.generators.cmake_find_package_common import target_template\nfrom conans.model import Generator\n\n\nclass CMakeFindPackageMultiGenerator(Generator):\n config_xxx_template = \"\"\"\n\n# Requires CMake > 3.0\nif(${{CMAKE_VERSION}} VERSION_LESS \"3.0\")\n message(FATAL_ERROR \"The 'cmake_find_package_multi' only works with CMake > 3.0\" )\nendif()\n\ninclude(${{CMAKE_CURRENT_LIST_DIR}}/{name}Targets.cmake)\n\n{target_props_block}\n{find_dependencies_block}\n\"\"\"\n\n targets_file = \"\"\"\nif(NOT TARGET {name}::{name})\n add_library({name}::{name} INTERFACE IMPORTED)\nendif()\n\n# Load the debug and release library finders\nget_filename_component(_DIR \"${{CMAKE_CURRENT_LIST_FILE}}\" PATH)\nfile(GLOB CONFIG_FILES \"${{_DIR}}/{name}Target-*.cmake\")\n\nforeach(f ${{CONFIG_FILES}})\n include(${{f}})\nendforeach()\n \n\"\"\"\n\n target_properties = \"\"\"\n# Assign target properties\nset_property(TARGET {name}::{name} \n PROPERTY INTERFACE_LINK_LIBRARIES \n $<$<CONFIG:Release>:${{{name}_LIBRARIES_TARGETS_RELEASE}} ${{{name}_LINKER_FLAGS_RELEASE_LIST}}>\n $<$<CONFIG:RelWithDebInfo>:${{{name}_LIBRARIES_TARGETS_RELWITHDEBINFO}} ${{{name}_LINKER_FLAGS_RELWITHDEBINFO_LIST}}>\n $<$<CONFIG:MinSizeRel>:${{{name}_LIBRARIES_TARGETS_MINSIZEREL}} ${{{name}_LINKER_FLAGS_MINSIZEREL_LIST}}>\n $<$<CONFIG:Debug>:${{{name}_LIBRARIES_TARGETS_DEBUG}} ${{{name}_LINKER_FLAGS_DEBUG_LIST}}>)\nset_property(TARGET {name}::{name} \n PROPERTY INTERFACE_INCLUDE_DIRECTORIES \n $<$<CONFIG:Release>:${{{name}_INCLUDE_DIRS_RELEASE}}>\n $<$<CONFIG:RelWithDebInfo>:${{{name}_INCLUDE_DIRS_RELWITHDEBINFO}}>\n $<$<CONFIG:MinSizeRel>:${{{name}_INCLUDE_DIRS_MINSIZEREL}}>\n $<$<CONFIG:Debug>:${{{name}_INCLUDE_DIRS_DEBUG}}>)\nset_property(TARGET {name}::{name} \n PROPERTY INTERFACE_COMPILE_DEFINITIONS \n $<$<CONFIG:Release>:${{{name}_COMPILE_DEFINITIONS_RELEASE}}>\n $<$<CONFIG:RelWithDebInfo>:${{{name}_COMPILE_DEFINITIONS_RELWITHDEBINFO}}>\n $<$<CONFIG:MinSizeRel>:${{{name}_COMPILE_DEFINITIONS_MINSIZEREL}}>\n $<$<CONFIG:Debug>:${{{name}_COMPILE_DEFINITIONS_DEBUG}}>)\nset_property(TARGET {name}::{name} \n PROPERTY INTERFACE_COMPILE_OPTIONS \n $<$<CONFIG:Release>:${{{name}_COMPILE_OPTIONS_RELEASE_LIST}}>\n $<$<CONFIG:RelWithDebInfo>:${{{name}_COMPILE_OPTIONS_RELWITHDEBINFO_LIST}}>\n $<$<CONFIG:MinSizeRel>:${{{name}_COMPILE_OPTIONS_MINSIZEREL_LIST}}>\n $<$<CONFIG:Debug>:${{{name}_COMPILE_OPTIONS_DEBUG_LIST}}>) \n \"\"\"\n\n @property\n def filename(self):\n pass\n\n @property\n def content(self):\n ret = {}\n build_type = self.conanfile.settings.get_safe(\"build_type\")\n build_type_suffix = \"_{}\".format(build_type.upper()) if build_type else \"\"\n for _, cpp_info in self.deps_build_info.dependencies:\n depname = cpp_info.name\n deps = DepsCppCmake(cpp_info)\n ret[\"{}Config.cmake\".format(depname)] = self._find_for_dep(depname, cpp_info)\n\n find_lib = target_template.format(name=depname, deps=deps,\n build_type_suffix=build_type_suffix)\n ret[\"{}Targets.cmake\".format(depname)] = self.targets_file.format(name=depname)\n ret[\"{}Target-{}.cmake\".format(depname, build_type.lower())] = find_lib\n return ret\n\n def _build_type_suffix(self, build_type):\n return\n\n def _find_for_dep(self, name, cpp_info):\n lines = []\n if cpp_info.public_deps:\n # Here we are generating only Config files, so do not search for FindXXX modules\n public_deps_names = [self.deps_build_info[dep].name for dep in cpp_info.public_deps]\n lines = find_dependency_lines(name, public_deps_names, find_modules=False)\n\n targets_props = self.target_properties.format(name=name)\n\n tmp = self.config_xxx_template.format(name=name,\n version=cpp_info.version,\n find_dependencies_block=\"\\n\".join(lines),\n target_props_block=targets_props)\n\n return tmp\n", "path": "conans/client/generators/cmake_find_package_multi.py"}]}
| 3,219 | 759 |
gh_patches_debug_24750
|
rasdani/github-patches
|
git_diff
|
dbt-labs__dbt-core-7636
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[CT-2479] replace all instances of set-output and node16
Details in https://github.com/dbt-labs/actions/issues/39.
### Acceptance Criteria
- [ ] Verified there are no workflows to update
_or_
- [ ] removed all uses of `set-output` - either directly or up updating any marketplace actions we reference
- [ ] removed all references to node16 - either directly or up updating any marketplace actions we reference
- [ ] backport changes
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `.github/actions/latest-wrangler/main.py`
Content:
```
1 import os
2 import sys
3 import requests
4 from distutils.util import strtobool
5 from typing import Union
6 from packaging.version import parse, Version
7
8 if __name__ == "__main__":
9
10 # get inputs
11 package = os.environ["INPUT_PACKAGE"]
12 new_version = parse(os.environ["INPUT_NEW_VERSION"])
13 gh_token = os.environ["INPUT_GH_TOKEN"]
14 halt_on_missing = strtobool(os.environ.get("INPUT_HALT_ON_MISSING", "False"))
15
16 # get package metadata from github
17 package_request = requests.get(
18 f"https://api.github.com/orgs/dbt-labs/packages/container/{package}/versions",
19 auth=("", gh_token),
20 )
21 package_meta = package_request.json()
22
23 # Log info if we don't get a 200
24 if package_request.status_code != 200:
25 print(f"Call to GH API failed: {package_request.status_code} {package_meta['message']}")
26
27 # Make an early exit if there is no matching package in github
28 if package_request.status_code == 404:
29 if halt_on_missing:
30 sys.exit(1)
31 else:
32 # everything is the latest if the package doesn't exist
33 print(f"::set-output name=latest::{True}")
34 print(f"::set-output name=minor_latest::{True}")
35 sys.exit(0)
36
37 # TODO: verify package meta is "correct"
38 # https://github.com/dbt-labs/dbt-core/issues/4640
39
40 # map versions and tags
41 version_tag_map = {
42 version["id"]: version["metadata"]["container"]["tags"] for version in package_meta
43 }
44
45 # is pre-release
46 pre_rel = True if any(x in str(new_version) for x in ["a", "b", "rc"]) else False
47
48 # semver of current latest
49 for version, tags in version_tag_map.items():
50 if "latest" in tags:
51 # N.B. This seems counterintuitive, but we expect any version tagged
52 # 'latest' to have exactly three associated tags:
53 # latest, major.minor.latest, and major.minor.patch.
54 # Subtracting everything that contains the string 'latest' gets us
55 # the major.minor.patch which is what's needed for comparison.
56 current_latest = parse([tag for tag in tags if "latest" not in tag][0])
57 else:
58 current_latest = False
59
60 # semver of current_minor_latest
61 for version, tags in version_tag_map.items():
62 if f"{new_version.major}.{new_version.minor}.latest" in tags:
63 # Similar to above, only now we expect exactly two tags:
64 # major.minor.patch and major.minor.latest
65 current_minor_latest = parse([tag for tag in tags if "latest" not in tag][0])
66 else:
67 current_minor_latest = False
68
69 def is_latest(
70 pre_rel: bool, new_version: Version, remote_latest: Union[bool, Version]
71 ) -> bool:
72 """Determine if a given contaier should be tagged 'latest' based on:
73 - it's pre-release status
74 - it's version
75 - the version of a previously identified container tagged 'latest'
76
77 :param pre_rel: Wether or not the version of the new container is a pre-release
78 :param new_version: The version of the new container
79 :param remote_latest: The version of the previously identified container that's
80 already tagged latest or False
81 """
82 # is a pre-release = not latest
83 if pre_rel:
84 return False
85 # + no latest tag found = is latest
86 if not remote_latest:
87 return True
88 # + if remote version is lower than current = is latest, else not latest
89 return True if remote_latest <= new_version else False
90
91 latest = is_latest(pre_rel, new_version, current_latest)
92 minor_latest = is_latest(pre_rel, new_version, current_minor_latest)
93
94 print(f"::set-output name=latest::{latest}")
95 print(f"::set-output name=minor_latest::{minor_latest}")
96
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/.github/actions/latest-wrangler/main.py b/.github/actions/latest-wrangler/main.py
--- a/.github/actions/latest-wrangler/main.py
+++ b/.github/actions/latest-wrangler/main.py
@@ -28,11 +28,12 @@
if package_request.status_code == 404:
if halt_on_missing:
sys.exit(1)
- else:
- # everything is the latest if the package doesn't exist
- print(f"::set-output name=latest::{True}")
- print(f"::set-output name=minor_latest::{True}")
- sys.exit(0)
+ # everything is the latest if the package doesn't exist
+ github_output = os.environ.get("GITHUB_OUTPUT")
+ with open(github_output, "at", encoding="utf-8") as gh_output:
+ gh_output.write("latest=True")
+ gh_output.write("minor_latest=True")
+ sys.exit(0)
# TODO: verify package meta is "correct"
# https://github.com/dbt-labs/dbt-core/issues/4640
@@ -91,5 +92,7 @@
latest = is_latest(pre_rel, new_version, current_latest)
minor_latest = is_latest(pre_rel, new_version, current_minor_latest)
- print(f"::set-output name=latest::{latest}")
- print(f"::set-output name=minor_latest::{minor_latest}")
+ github_output = os.environ.get("GITHUB_OUTPUT")
+ with open(github_output, "at", encoding="utf-8") as gh_output:
+ gh_output.write(f"latest={latest}")
+ gh_output.write(f"minor_latest={minor_latest}")
|
{"golden_diff": "diff --git a/.github/actions/latest-wrangler/main.py b/.github/actions/latest-wrangler/main.py\n--- a/.github/actions/latest-wrangler/main.py\n+++ b/.github/actions/latest-wrangler/main.py\n@@ -28,11 +28,12 @@\n if package_request.status_code == 404:\n if halt_on_missing:\n sys.exit(1)\n- else:\n- # everything is the latest if the package doesn't exist\n- print(f\"::set-output name=latest::{True}\")\n- print(f\"::set-output name=minor_latest::{True}\")\n- sys.exit(0)\n+ # everything is the latest if the package doesn't exist\n+ github_output = os.environ.get(\"GITHUB_OUTPUT\")\n+ with open(github_output, \"at\", encoding=\"utf-8\") as gh_output:\n+ gh_output.write(\"latest=True\")\n+ gh_output.write(\"minor_latest=True\")\n+ sys.exit(0)\n \n # TODO: verify package meta is \"correct\"\n # https://github.com/dbt-labs/dbt-core/issues/4640\n@@ -91,5 +92,7 @@\n latest = is_latest(pre_rel, new_version, current_latest)\n minor_latest = is_latest(pre_rel, new_version, current_minor_latest)\n \n- print(f\"::set-output name=latest::{latest}\")\n- print(f\"::set-output name=minor_latest::{minor_latest}\")\n+ github_output = os.environ.get(\"GITHUB_OUTPUT\")\n+ with open(github_output, \"at\", encoding=\"utf-8\") as gh_output:\n+ gh_output.write(f\"latest={latest}\")\n+ gh_output.write(f\"minor_latest={minor_latest}\")\n", "issue": "[CT-2479] replace all instances of set-output and node16\nDetails in https://github.com/dbt-labs/actions/issues/39.\r\n\r\n### Acceptance Criteria\r\n- [ ] Verified there are no workflows to update\r\n_or_\r\n- [ ] removed all uses of `set-output` - either directly or up updating any marketplace actions we reference\r\n- [ ] removed all references to node16 - either directly or up updating any marketplace actions we reference\r\n- [ ] backport changes\n", "before_files": [{"content": "import os\nimport sys\nimport requests\nfrom distutils.util import strtobool\nfrom typing import Union\nfrom packaging.version import parse, Version\n\nif __name__ == \"__main__\":\n\n # get inputs\n package = os.environ[\"INPUT_PACKAGE\"]\n new_version = parse(os.environ[\"INPUT_NEW_VERSION\"])\n gh_token = os.environ[\"INPUT_GH_TOKEN\"]\n halt_on_missing = strtobool(os.environ.get(\"INPUT_HALT_ON_MISSING\", \"False\"))\n\n # get package metadata from github\n package_request = requests.get(\n f\"https://api.github.com/orgs/dbt-labs/packages/container/{package}/versions\",\n auth=(\"\", gh_token),\n )\n package_meta = package_request.json()\n\n # Log info if we don't get a 200\n if package_request.status_code != 200:\n print(f\"Call to GH API failed: {package_request.status_code} {package_meta['message']}\")\n\n # Make an early exit if there is no matching package in github\n if package_request.status_code == 404:\n if halt_on_missing:\n sys.exit(1)\n else:\n # everything is the latest if the package doesn't exist\n print(f\"::set-output name=latest::{True}\")\n print(f\"::set-output name=minor_latest::{True}\")\n sys.exit(0)\n\n # TODO: verify package meta is \"correct\"\n # https://github.com/dbt-labs/dbt-core/issues/4640\n\n # map versions and tags\n version_tag_map = {\n version[\"id\"]: version[\"metadata\"][\"container\"][\"tags\"] for version in package_meta\n }\n\n # is pre-release\n pre_rel = True if any(x in str(new_version) for x in [\"a\", \"b\", \"rc\"]) else False\n\n # semver of current latest\n for version, tags in version_tag_map.items():\n if \"latest\" in tags:\n # N.B. This seems counterintuitive, but we expect any version tagged\n # 'latest' to have exactly three associated tags:\n # latest, major.minor.latest, and major.minor.patch.\n # Subtracting everything that contains the string 'latest' gets us\n # the major.minor.patch which is what's needed for comparison.\n current_latest = parse([tag for tag in tags if \"latest\" not in tag][0])\n else:\n current_latest = False\n\n # semver of current_minor_latest\n for version, tags in version_tag_map.items():\n if f\"{new_version.major}.{new_version.minor}.latest\" in tags:\n # Similar to above, only now we expect exactly two tags:\n # major.minor.patch and major.minor.latest\n current_minor_latest = parse([tag for tag in tags if \"latest\" not in tag][0])\n else:\n current_minor_latest = False\n\n def is_latest(\n pre_rel: bool, new_version: Version, remote_latest: Union[bool, Version]\n ) -> bool:\n \"\"\"Determine if a given contaier should be tagged 'latest' based on:\n - it's pre-release status\n - it's version\n - the version of a previously identified container tagged 'latest'\n\n :param pre_rel: Wether or not the version of the new container is a pre-release\n :param new_version: The version of the new container\n :param remote_latest: The version of the previously identified container that's\n already tagged latest or False\n \"\"\"\n # is a pre-release = not latest\n if pre_rel:\n return False\n # + no latest tag found = is latest\n if not remote_latest:\n return True\n # + if remote version is lower than current = is latest, else not latest\n return True if remote_latest <= new_version else False\n\n latest = is_latest(pre_rel, new_version, current_latest)\n minor_latest = is_latest(pre_rel, new_version, current_minor_latest)\n\n print(f\"::set-output name=latest::{latest}\")\n print(f\"::set-output name=minor_latest::{minor_latest}\")\n", "path": ".github/actions/latest-wrangler/main.py"}], "after_files": [{"content": "import os\nimport sys\nimport requests\nfrom distutils.util import strtobool\nfrom typing import Union\nfrom packaging.version import parse, Version\n\nif __name__ == \"__main__\":\n\n # get inputs\n package = os.environ[\"INPUT_PACKAGE\"]\n new_version = parse(os.environ[\"INPUT_NEW_VERSION\"])\n gh_token = os.environ[\"INPUT_GH_TOKEN\"]\n halt_on_missing = strtobool(os.environ.get(\"INPUT_HALT_ON_MISSING\", \"False\"))\n\n # get package metadata from github\n package_request = requests.get(\n f\"https://api.github.com/orgs/dbt-labs/packages/container/{package}/versions\",\n auth=(\"\", gh_token),\n )\n package_meta = package_request.json()\n\n # Log info if we don't get a 200\n if package_request.status_code != 200:\n print(f\"Call to GH API failed: {package_request.status_code} {package_meta['message']}\")\n\n # Make an early exit if there is no matching package in github\n if package_request.status_code == 404:\n if halt_on_missing:\n sys.exit(1)\n # everything is the latest if the package doesn't exist\n github_output = os.environ.get(\"GITHUB_OUTPUT\")\n with open(github_output, \"at\", encoding=\"utf-8\") as gh_output:\n gh_output.write(\"latest=True\")\n gh_output.write(\"minor_latest=True\")\n sys.exit(0)\n\n # TODO: verify package meta is \"correct\"\n # https://github.com/dbt-labs/dbt-core/issues/4640\n\n # map versions and tags\n version_tag_map = {\n version[\"id\"]: version[\"metadata\"][\"container\"][\"tags\"] for version in package_meta\n }\n\n # is pre-release\n pre_rel = True if any(x in str(new_version) for x in [\"a\", \"b\", \"rc\"]) else False\n\n # semver of current latest\n for version, tags in version_tag_map.items():\n if \"latest\" in tags:\n # N.B. This seems counterintuitive, but we expect any version tagged\n # 'latest' to have exactly three associated tags:\n # latest, major.minor.latest, and major.minor.patch.\n # Subtracting everything that contains the string 'latest' gets us\n # the major.minor.patch which is what's needed for comparison.\n current_latest = parse([tag for tag in tags if \"latest\" not in tag][0])\n else:\n current_latest = False\n\n # semver of current_minor_latest\n for version, tags in version_tag_map.items():\n if f\"{new_version.major}.{new_version.minor}.latest\" in tags:\n # Similar to above, only now we expect exactly two tags:\n # major.minor.patch and major.minor.latest\n current_minor_latest = parse([tag for tag in tags if \"latest\" not in tag][0])\n else:\n current_minor_latest = False\n\n def is_latest(\n pre_rel: bool, new_version: Version, remote_latest: Union[bool, Version]\n ) -> bool:\n \"\"\"Determine if a given contaier should be tagged 'latest' based on:\n - it's pre-release status\n - it's version\n - the version of a previously identified container tagged 'latest'\n\n :param pre_rel: Wether or not the version of the new container is a pre-release\n :param new_version: The version of the new container\n :param remote_latest: The version of the previously identified container that's\n already tagged latest or False\n \"\"\"\n # is a pre-release = not latest\n if pre_rel:\n return False\n # + no latest tag found = is latest\n if not remote_latest:\n return True\n # + if remote version is lower than current = is latest, else not latest\n return True if remote_latest <= new_version else False\n\n latest = is_latest(pre_rel, new_version, current_latest)\n minor_latest = is_latest(pre_rel, new_version, current_minor_latest)\n\n github_output = os.environ.get(\"GITHUB_OUTPUT\")\n with open(github_output, \"at\", encoding=\"utf-8\") as gh_output:\n gh_output.write(f\"latest={latest}\")\n gh_output.write(f\"minor_latest={minor_latest}\")\n", "path": ".github/actions/latest-wrangler/main.py"}]}
| 1,441 | 377 |
gh_patches_debug_13866
|
rasdani/github-patches
|
git_diff
|
ESMCI__cime-4326
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add "set_value" method to CIME.BuildTools.configure.FakeCase so that user can add needed variables
In principle this is a simple change, but I want to make sure it's acceptable. I'd like to add a set_value method to the FakeCase class in CIME.BuiltTools.configure so that the user can add the variables they want to have in their FakeCase. The utility I see is for unit-testing of buildnml. But, I think there may be other cases where you want to create a simpler Fake case without doing all the normal needed setup to create a real case. There are some offline scripts we are thinking about having this kind of utility where we know we need to setup some type of fake case, but we want to hide it from the user, and it doesn't need to be fully functional like a real case.
``` python
diff --git a/scripts/lib/CIME/BuildTools/configure.py b/scripts/lib/CIME/BuildTools/configure.py
index a74c087a5..fd7067ed7 100644
--- a/scripts/lib/CIME/BuildTools/configure.py
+++ b/scripts/lib/CIME/BuildTools/configure.py
@@ -74,6 +74,9 @@ def get_value(self, attrib):
expect(attrib in self._vals, "FakeCase does not support getting value of '%s'" % attrib)
return self._vals[attrib]
+ def set_value(self, attrib, value):
+ self._vals[attrib] = value
+
def _generate_env_mach_specific(output_dir, machobj, compiler, mpilib, debug,
sysos, unit_testing):
"""
```
@jedwards4b does this sound acceptable? If so I'll submit a simple PR for it.
@billsacks would this kind of thing possibly make LILAC easier to setup?
Add "set_value" method to CIME.BuildTools.configure.FakeCase so that user can add needed variables
In principle this is a simple change, but I want to make sure it's acceptable. I'd like to add a set_value method to the FakeCase class in CIME.BuiltTools.configure so that the user can add the variables they want to have in their FakeCase. The utility I see is for unit-testing of buildnml. But, I think there may be other cases where you want to create a simpler Fake case without doing all the normal needed setup to create a real case. There are some offline scripts we are thinking about having this kind of utility where we know we need to setup some type of fake case, but we want to hide it from the user, and it doesn't need to be fully functional like a real case.
``` python
diff --git a/scripts/lib/CIME/BuildTools/configure.py b/scripts/lib/CIME/BuildTools/configure.py
index a74c087a5..fd7067ed7 100644
--- a/scripts/lib/CIME/BuildTools/configure.py
+++ b/scripts/lib/CIME/BuildTools/configure.py
@@ -74,6 +74,9 @@ def get_value(self, attrib):
expect(attrib in self._vals, "FakeCase does not support getting value of '%s'" % attrib)
return self._vals[attrib]
+ def set_value(self, attrib, value):
+ self._vals[attrib] = value
+
def _generate_env_mach_specific(output_dir, machobj, compiler, mpilib, debug,
sysos, unit_testing):
"""
```
@jedwards4b does this sound acceptable? If so I'll submit a simple PR for it.
@billsacks would this kind of thing possibly make LILAC easier to setup?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `CIME/BuildTools/configure.py`
Content:
```
1 #!/usr/bin/env python3
2
3 """This script writes CIME build information to a directory.
4
5 The pieces of information that will be written include:
6
7 1. Machine-specific build settings (i.e. the "Macros" file).
8 2. File-specific build settings (i.e. "Depends" files).
9 3. Environment variable loads (i.e. the env_mach_specific files).
10
11 The .env_mach_specific.sh and .env_mach_specific.csh files are specific to a
12 given compiler, MPI library, and DEBUG setting. By default, these will be the
13 machine's default compiler, the machine's default MPI library, and FALSE,
14 respectively. These can be changed by setting the environment variables
15 COMPILER, MPILIB, and DEBUG, respectively.
16 """
17
18 from CIME.XML.standard_module_setup import *
19 from CIME.utils import (
20 expect,
21 safe_copy,
22 get_model,
23 get_src_root,
24 stringify_bool,
25 copy_local_macros_to_dir,
26 )
27 from CIME.XML.env_mach_specific import EnvMachSpecific
28 from CIME.XML.files import Files
29 from CIME.build import CmakeTmpBuildDir
30
31 import shutil
32
33 logger = logging.getLogger(__name__)
34
35
36 def configure(
37 machobj,
38 output_dir,
39 macros_format,
40 compiler,
41 mpilib,
42 debug,
43 comp_interface,
44 sysos,
45 unit_testing=False,
46 noenv=False,
47 threaded=False,
48 extra_machines_dir=None,
49 ):
50 """Add Macros, Depends, and env_mach_specific files to a directory.
51
52 Arguments:
53 machobj - Machines argument for this machine.
54 output_dir - Directory in which to place output.
55 macros_format - Container containing the string 'Makefile' to produce
56 Makefile Macros output, and/or 'CMake' for CMake output.
57 compiler - String containing the compiler vendor to configure for.
58 mpilib - String containing the MPI implementation to configure for.
59 debug - Boolean specifying whether debugging options are enabled.
60 unit_testing - Boolean specifying whether we're running unit tests (as
61 opposed to a system run)
62 extra_machines_dir - String giving path to an additional directory that will be
63 searched for cmake_macros.
64 """
65 new_cmake_macros_dir = Files(comp_interface=comp_interface).get_value(
66 "CMAKE_MACROS_DIR"
67 )
68 for form in macros_format:
69
70 if not os.path.isfile(os.path.join(output_dir, "Macros.cmake")):
71 safe_copy(os.path.join(new_cmake_macros_dir, "Macros.cmake"), output_dir)
72 output_cmake_macros_dir = os.path.join(output_dir, "cmake_macros")
73 if not os.path.exists(output_cmake_macros_dir):
74 shutil.copytree(new_cmake_macros_dir, output_cmake_macros_dir)
75
76 copy_local_macros_to_dir(
77 output_cmake_macros_dir, extra_machdir=extra_machines_dir
78 )
79
80 if form == "Makefile":
81 # Use the cmake macros to generate the make macros
82 cmake_args = " -DOS={} -DMACH={} -DCOMPILER={} -DDEBUG={} -DMPILIB={} -Dcompile_threaded={} -DCASEROOT={}".format(
83 sysos,
84 machobj.get_machine_name(),
85 compiler,
86 stringify_bool(debug),
87 mpilib,
88 stringify_bool(threaded),
89 output_dir,
90 )
91
92 with CmakeTmpBuildDir(macroloc=output_dir) as cmaketmp:
93 output = cmaketmp.get_makefile_vars(cmake_args=cmake_args)
94
95 with open(os.path.join(output_dir, "Macros.make"), "w") as fd:
96 fd.write(output)
97
98 copy_depends_files(
99 machobj.get_machine_name(), machobj.machines_dir, output_dir, compiler
100 )
101 generate_env_mach_specific(
102 output_dir,
103 machobj,
104 compiler,
105 mpilib,
106 debug,
107 comp_interface,
108 sysos,
109 unit_testing,
110 threaded,
111 noenv=noenv,
112 )
113
114
115 def copy_depends_files(machine_name, machines_dir, output_dir, compiler):
116 """
117 Copy any system or compiler Depends files if they do not exist in the output directory
118 If there is a match for Depends.machine_name.compiler copy that and ignore the others
119 """
120 # Note, the cmake build system does not stop if Depends.mach.compiler.cmake is found
121 makefiles_done = False
122 both = "{}.{}".format(machine_name, compiler)
123 for suffix in [both, machine_name, compiler]:
124 for extra_suffix in ["", ".cmake"]:
125 if extra_suffix == "" and makefiles_done:
126 continue
127
128 basename = "Depends.{}{}".format(suffix, extra_suffix)
129 dfile = os.path.join(machines_dir, basename)
130 outputdfile = os.path.join(output_dir, basename)
131 if os.path.isfile(dfile):
132 if suffix == both and extra_suffix == "":
133 makefiles_done = True
134 if not os.path.exists(outputdfile):
135 safe_copy(dfile, outputdfile)
136
137
138 class FakeCase(object):
139 def __init__(self, compiler, mpilib, debug, comp_interface, threading=False):
140 # PIO_VERSION is needed to parse config_machines.xml but isn't otherwise used
141 # by FakeCase
142 self._vals = {
143 "COMPILER": compiler,
144 "MPILIB": mpilib,
145 "DEBUG": debug,
146 "COMP_INTERFACE": comp_interface,
147 "PIO_VERSION": 2,
148 "SMP_PRESENT": threading,
149 "MODEL": get_model(),
150 "SRCROOT": get_src_root(),
151 }
152
153 def get_build_threaded(self):
154 return self.get_value("SMP_PRESENT")
155
156 def get_value(self, attrib):
157 expect(
158 attrib in self._vals,
159 "FakeCase does not support getting value of '%s'" % attrib,
160 )
161 return self._vals[attrib]
162
163
164 def generate_env_mach_specific(
165 output_dir,
166 machobj,
167 compiler,
168 mpilib,
169 debug,
170 comp_interface,
171 sysos,
172 unit_testing,
173 threaded,
174 noenv=False,
175 ):
176 """
177 env_mach_specific generation.
178 """
179 ems_path = os.path.join(output_dir, "env_mach_specific.xml")
180 if os.path.exists(ems_path):
181 logger.warning("{} already exists, delete to replace".format(ems_path))
182 return
183
184 ems_file = EnvMachSpecific(
185 output_dir, unit_testing=unit_testing, standalone_configure=True
186 )
187 ems_file.populate(
188 machobj,
189 attributes={"mpilib": mpilib, "compiler": compiler, "threaded": threaded},
190 )
191 ems_file.write()
192
193 if noenv:
194 return
195
196 fake_case = FakeCase(compiler, mpilib, debug, comp_interface)
197 ems_file.load_env(fake_case)
198 for shell in ("sh", "csh"):
199 ems_file.make_env_mach_specific_file(shell, fake_case, output_dir=output_dir)
200 shell_path = os.path.join(output_dir, ".env_mach_specific." + shell)
201 with open(shell_path, "a") as shell_file:
202 if shell == "sh":
203 shell_file.write("\nexport COMPILER={}\n".format(compiler))
204 shell_file.write("export MPILIB={}\n".format(mpilib))
205 shell_file.write("export DEBUG={}\n".format(repr(debug).upper()))
206 shell_file.write("export OS={}\n".format(sysos))
207 else:
208 shell_file.write("\nsetenv COMPILER {}\n".format(compiler))
209 shell_file.write("setenv MPILIB {}\n".format(mpilib))
210 shell_file.write("setenv DEBUG {}\n".format(repr(debug).upper()))
211 shell_file.write("setenv OS {}\n".format(sysos))
212
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/CIME/BuildTools/configure.py b/CIME/BuildTools/configure.py
--- a/CIME/BuildTools/configure.py
+++ b/CIME/BuildTools/configure.py
@@ -153,6 +153,10 @@
def get_build_threaded(self):
return self.get_value("SMP_PRESENT")
+ def get_case_root(self):
+ """Returns the root directory for this case."""
+ return self.get_value("CASEROOT")
+
def get_value(self, attrib):
expect(
attrib in self._vals,
@@ -160,6 +164,10 @@
)
return self._vals[attrib]
+ def set_value(self, attrib, value):
+ """Sets a given variable value for the case"""
+ self._vals[attrib] = value
+
def generate_env_mach_specific(
output_dir,
|
{"golden_diff": "diff --git a/CIME/BuildTools/configure.py b/CIME/BuildTools/configure.py\n--- a/CIME/BuildTools/configure.py\n+++ b/CIME/BuildTools/configure.py\n@@ -153,6 +153,10 @@\n def get_build_threaded(self):\n return self.get_value(\"SMP_PRESENT\")\n \n+ def get_case_root(self):\n+ \"\"\"Returns the root directory for this case.\"\"\"\n+ return self.get_value(\"CASEROOT\")\n+\n def get_value(self, attrib):\n expect(\n attrib in self._vals,\n@@ -160,6 +164,10 @@\n )\n return self._vals[attrib]\n \n+ def set_value(self, attrib, value):\n+ \"\"\"Sets a given variable value for the case\"\"\"\n+ self._vals[attrib] = value\n+\n \n def generate_env_mach_specific(\n output_dir,\n", "issue": "Add \"set_value\" method to CIME.BuildTools.configure.FakeCase so that user can add needed variables\nIn principle this is a simple change, but I want to make sure it's acceptable. I'd like to add a set_value method to the FakeCase class in CIME.BuiltTools.configure so that the user can add the variables they want to have in their FakeCase. The utility I see is for unit-testing of buildnml. But, I think there may be other cases where you want to create a simpler Fake case without doing all the normal needed setup to create a real case. There are some offline scripts we are thinking about having this kind of utility where we know we need to setup some type of fake case, but we want to hide it from the user, and it doesn't need to be fully functional like a real case.\r\n\r\n``` python\r\ndiff --git a/scripts/lib/CIME/BuildTools/configure.py b/scripts/lib/CIME/BuildTools/configure.py\r\nindex a74c087a5..fd7067ed7 100644\r\n--- a/scripts/lib/CIME/BuildTools/configure.py\r\n+++ b/scripts/lib/CIME/BuildTools/configure.py\r\n@@ -74,6 +74,9 @@ def get_value(self, attrib):\r\n expect(attrib in self._vals, \"FakeCase does not support getting value of '%s'\" % attrib)\r\n return self._vals[attrib]\r\n \r\n+ def set_value(self, attrib, value):\r\n+ self._vals[attrib] = value\r\n+\r\n def _generate_env_mach_specific(output_dir, machobj, compiler, mpilib, debug,\r\n sysos, unit_testing):\r\n \"\"\"\r\n```\r\n\r\n@jedwards4b does this sound acceptable? If so I'll submit a simple PR for it.\r\n@billsacks would this kind of thing possibly make LILAC easier to setup?\nAdd \"set_value\" method to CIME.BuildTools.configure.FakeCase so that user can add needed variables\nIn principle this is a simple change, but I want to make sure it's acceptable. I'd like to add a set_value method to the FakeCase class in CIME.BuiltTools.configure so that the user can add the variables they want to have in their FakeCase. The utility I see is for unit-testing of buildnml. But, I think there may be other cases where you want to create a simpler Fake case without doing all the normal needed setup to create a real case. There are some offline scripts we are thinking about having this kind of utility where we know we need to setup some type of fake case, but we want to hide it from the user, and it doesn't need to be fully functional like a real case.\r\n\r\n``` python\r\ndiff --git a/scripts/lib/CIME/BuildTools/configure.py b/scripts/lib/CIME/BuildTools/configure.py\r\nindex a74c087a5..fd7067ed7 100644\r\n--- a/scripts/lib/CIME/BuildTools/configure.py\r\n+++ b/scripts/lib/CIME/BuildTools/configure.py\r\n@@ -74,6 +74,9 @@ def get_value(self, attrib):\r\n expect(attrib in self._vals, \"FakeCase does not support getting value of '%s'\" % attrib)\r\n return self._vals[attrib]\r\n \r\n+ def set_value(self, attrib, value):\r\n+ self._vals[attrib] = value\r\n+\r\n def _generate_env_mach_specific(output_dir, machobj, compiler, mpilib, debug,\r\n sysos, unit_testing):\r\n \"\"\"\r\n```\r\n\r\n@jedwards4b does this sound acceptable? If so I'll submit a simple PR for it.\r\n@billsacks would this kind of thing possibly make LILAC easier to setup?\n", "before_files": [{"content": "#!/usr/bin/env python3\n\n\"\"\"This script writes CIME build information to a directory.\n\nThe pieces of information that will be written include:\n\n1. Machine-specific build settings (i.e. the \"Macros\" file).\n2. File-specific build settings (i.e. \"Depends\" files).\n3. Environment variable loads (i.e. the env_mach_specific files).\n\nThe .env_mach_specific.sh and .env_mach_specific.csh files are specific to a\ngiven compiler, MPI library, and DEBUG setting. By default, these will be the\nmachine's default compiler, the machine's default MPI library, and FALSE,\nrespectively. These can be changed by setting the environment variables\nCOMPILER, MPILIB, and DEBUG, respectively.\n\"\"\"\n\nfrom CIME.XML.standard_module_setup import *\nfrom CIME.utils import (\n expect,\n safe_copy,\n get_model,\n get_src_root,\n stringify_bool,\n copy_local_macros_to_dir,\n)\nfrom CIME.XML.env_mach_specific import EnvMachSpecific\nfrom CIME.XML.files import Files\nfrom CIME.build import CmakeTmpBuildDir\n\nimport shutil\n\nlogger = logging.getLogger(__name__)\n\n\ndef configure(\n machobj,\n output_dir,\n macros_format,\n compiler,\n mpilib,\n debug,\n comp_interface,\n sysos,\n unit_testing=False,\n noenv=False,\n threaded=False,\n extra_machines_dir=None,\n):\n \"\"\"Add Macros, Depends, and env_mach_specific files to a directory.\n\n Arguments:\n machobj - Machines argument for this machine.\n output_dir - Directory in which to place output.\n macros_format - Container containing the string 'Makefile' to produce\n Makefile Macros output, and/or 'CMake' for CMake output.\n compiler - String containing the compiler vendor to configure for.\n mpilib - String containing the MPI implementation to configure for.\n debug - Boolean specifying whether debugging options are enabled.\n unit_testing - Boolean specifying whether we're running unit tests (as\n opposed to a system run)\n extra_machines_dir - String giving path to an additional directory that will be\n searched for cmake_macros.\n \"\"\"\n new_cmake_macros_dir = Files(comp_interface=comp_interface).get_value(\n \"CMAKE_MACROS_DIR\"\n )\n for form in macros_format:\n\n if not os.path.isfile(os.path.join(output_dir, \"Macros.cmake\")):\n safe_copy(os.path.join(new_cmake_macros_dir, \"Macros.cmake\"), output_dir)\n output_cmake_macros_dir = os.path.join(output_dir, \"cmake_macros\")\n if not os.path.exists(output_cmake_macros_dir):\n shutil.copytree(new_cmake_macros_dir, output_cmake_macros_dir)\n\n copy_local_macros_to_dir(\n output_cmake_macros_dir, extra_machdir=extra_machines_dir\n )\n\n if form == \"Makefile\":\n # Use the cmake macros to generate the make macros\n cmake_args = \" -DOS={} -DMACH={} -DCOMPILER={} -DDEBUG={} -DMPILIB={} -Dcompile_threaded={} -DCASEROOT={}\".format(\n sysos,\n machobj.get_machine_name(),\n compiler,\n stringify_bool(debug),\n mpilib,\n stringify_bool(threaded),\n output_dir,\n )\n\n with CmakeTmpBuildDir(macroloc=output_dir) as cmaketmp:\n output = cmaketmp.get_makefile_vars(cmake_args=cmake_args)\n\n with open(os.path.join(output_dir, \"Macros.make\"), \"w\") as fd:\n fd.write(output)\n\n copy_depends_files(\n machobj.get_machine_name(), machobj.machines_dir, output_dir, compiler\n )\n generate_env_mach_specific(\n output_dir,\n machobj,\n compiler,\n mpilib,\n debug,\n comp_interface,\n sysos,\n unit_testing,\n threaded,\n noenv=noenv,\n )\n\n\ndef copy_depends_files(machine_name, machines_dir, output_dir, compiler):\n \"\"\"\n Copy any system or compiler Depends files if they do not exist in the output directory\n If there is a match for Depends.machine_name.compiler copy that and ignore the others\n \"\"\"\n # Note, the cmake build system does not stop if Depends.mach.compiler.cmake is found\n makefiles_done = False\n both = \"{}.{}\".format(machine_name, compiler)\n for suffix in [both, machine_name, compiler]:\n for extra_suffix in [\"\", \".cmake\"]:\n if extra_suffix == \"\" and makefiles_done:\n continue\n\n basename = \"Depends.{}{}\".format(suffix, extra_suffix)\n dfile = os.path.join(machines_dir, basename)\n outputdfile = os.path.join(output_dir, basename)\n if os.path.isfile(dfile):\n if suffix == both and extra_suffix == \"\":\n makefiles_done = True\n if not os.path.exists(outputdfile):\n safe_copy(dfile, outputdfile)\n\n\nclass FakeCase(object):\n def __init__(self, compiler, mpilib, debug, comp_interface, threading=False):\n # PIO_VERSION is needed to parse config_machines.xml but isn't otherwise used\n # by FakeCase\n self._vals = {\n \"COMPILER\": compiler,\n \"MPILIB\": mpilib,\n \"DEBUG\": debug,\n \"COMP_INTERFACE\": comp_interface,\n \"PIO_VERSION\": 2,\n \"SMP_PRESENT\": threading,\n \"MODEL\": get_model(),\n \"SRCROOT\": get_src_root(),\n }\n\n def get_build_threaded(self):\n return self.get_value(\"SMP_PRESENT\")\n\n def get_value(self, attrib):\n expect(\n attrib in self._vals,\n \"FakeCase does not support getting value of '%s'\" % attrib,\n )\n return self._vals[attrib]\n\n\ndef generate_env_mach_specific(\n output_dir,\n machobj,\n compiler,\n mpilib,\n debug,\n comp_interface,\n sysos,\n unit_testing,\n threaded,\n noenv=False,\n):\n \"\"\"\n env_mach_specific generation.\n \"\"\"\n ems_path = os.path.join(output_dir, \"env_mach_specific.xml\")\n if os.path.exists(ems_path):\n logger.warning(\"{} already exists, delete to replace\".format(ems_path))\n return\n\n ems_file = EnvMachSpecific(\n output_dir, unit_testing=unit_testing, standalone_configure=True\n )\n ems_file.populate(\n machobj,\n attributes={\"mpilib\": mpilib, \"compiler\": compiler, \"threaded\": threaded},\n )\n ems_file.write()\n\n if noenv:\n return\n\n fake_case = FakeCase(compiler, mpilib, debug, comp_interface)\n ems_file.load_env(fake_case)\n for shell in (\"sh\", \"csh\"):\n ems_file.make_env_mach_specific_file(shell, fake_case, output_dir=output_dir)\n shell_path = os.path.join(output_dir, \".env_mach_specific.\" + shell)\n with open(shell_path, \"a\") as shell_file:\n if shell == \"sh\":\n shell_file.write(\"\\nexport COMPILER={}\\n\".format(compiler))\n shell_file.write(\"export MPILIB={}\\n\".format(mpilib))\n shell_file.write(\"export DEBUG={}\\n\".format(repr(debug).upper()))\n shell_file.write(\"export OS={}\\n\".format(sysos))\n else:\n shell_file.write(\"\\nsetenv COMPILER {}\\n\".format(compiler))\n shell_file.write(\"setenv MPILIB {}\\n\".format(mpilib))\n shell_file.write(\"setenv DEBUG {}\\n\".format(repr(debug).upper()))\n shell_file.write(\"setenv OS {}\\n\".format(sysos))\n", "path": "CIME/BuildTools/configure.py"}], "after_files": [{"content": "#!/usr/bin/env python3\n\n\"\"\"This script writes CIME build information to a directory.\n\nThe pieces of information that will be written include:\n\n1. Machine-specific build settings (i.e. the \"Macros\" file).\n2. File-specific build settings (i.e. \"Depends\" files).\n3. Environment variable loads (i.e. the env_mach_specific files).\n\nThe .env_mach_specific.sh and .env_mach_specific.csh files are specific to a\ngiven compiler, MPI library, and DEBUG setting. By default, these will be the\nmachine's default compiler, the machine's default MPI library, and FALSE,\nrespectively. These can be changed by setting the environment variables\nCOMPILER, MPILIB, and DEBUG, respectively.\n\"\"\"\n\nfrom CIME.XML.standard_module_setup import *\nfrom CIME.utils import (\n expect,\n safe_copy,\n get_model,\n get_src_root,\n stringify_bool,\n copy_local_macros_to_dir,\n)\nfrom CIME.XML.env_mach_specific import EnvMachSpecific\nfrom CIME.XML.files import Files\nfrom CIME.build import CmakeTmpBuildDir\n\nimport shutil\n\nlogger = logging.getLogger(__name__)\n\n\ndef configure(\n machobj,\n output_dir,\n macros_format,\n compiler,\n mpilib,\n debug,\n comp_interface,\n sysos,\n unit_testing=False,\n noenv=False,\n threaded=False,\n extra_machines_dir=None,\n):\n \"\"\"Add Macros, Depends, and env_mach_specific files to a directory.\n\n Arguments:\n machobj - Machines argument for this machine.\n output_dir - Directory in which to place output.\n macros_format - Container containing the string 'Makefile' to produce\n Makefile Macros output, and/or 'CMake' for CMake output.\n compiler - String containing the compiler vendor to configure for.\n mpilib - String containing the MPI implementation to configure for.\n debug - Boolean specifying whether debugging options are enabled.\n unit_testing - Boolean specifying whether we're running unit tests (as\n opposed to a system run)\n extra_machines_dir - String giving path to an additional directory that will be\n searched for cmake_macros.\n \"\"\"\n new_cmake_macros_dir = Files(comp_interface=comp_interface).get_value(\n \"CMAKE_MACROS_DIR\"\n )\n for form in macros_format:\n\n if not os.path.isfile(os.path.join(output_dir, \"Macros.cmake\")):\n safe_copy(os.path.join(new_cmake_macros_dir, \"Macros.cmake\"), output_dir)\n output_cmake_macros_dir = os.path.join(output_dir, \"cmake_macros\")\n if not os.path.exists(output_cmake_macros_dir):\n shutil.copytree(new_cmake_macros_dir, output_cmake_macros_dir)\n\n copy_local_macros_to_dir(\n output_cmake_macros_dir, extra_machdir=extra_machines_dir\n )\n\n if form == \"Makefile\":\n # Use the cmake macros to generate the make macros\n cmake_args = \" -DOS={} -DMACH={} -DCOMPILER={} -DDEBUG={} -DMPILIB={} -Dcompile_threaded={} -DCASEROOT={}\".format(\n sysos,\n machobj.get_machine_name(),\n compiler,\n stringify_bool(debug),\n mpilib,\n stringify_bool(threaded),\n output_dir,\n )\n\n with CmakeTmpBuildDir(macroloc=output_dir) as cmaketmp:\n output = cmaketmp.get_makefile_vars(cmake_args=cmake_args)\n\n with open(os.path.join(output_dir, \"Macros.make\"), \"w\") as fd:\n fd.write(output)\n\n copy_depends_files(\n machobj.get_machine_name(), machobj.machines_dir, output_dir, compiler\n )\n generate_env_mach_specific(\n output_dir,\n machobj,\n compiler,\n mpilib,\n debug,\n comp_interface,\n sysos,\n unit_testing,\n threaded,\n noenv=noenv,\n )\n\n\ndef copy_depends_files(machine_name, machines_dir, output_dir, compiler):\n \"\"\"\n Copy any system or compiler Depends files if they do not exist in the output directory\n If there is a match for Depends.machine_name.compiler copy that and ignore the others\n \"\"\"\n # Note, the cmake build system does not stop if Depends.mach.compiler.cmake is found\n makefiles_done = False\n both = \"{}.{}\".format(machine_name, compiler)\n for suffix in [both, machine_name, compiler]:\n for extra_suffix in [\"\", \".cmake\"]:\n if extra_suffix == \"\" and makefiles_done:\n continue\n\n basename = \"Depends.{}{}\".format(suffix, extra_suffix)\n dfile = os.path.join(machines_dir, basename)\n outputdfile = os.path.join(output_dir, basename)\n if os.path.isfile(dfile):\n if suffix == both and extra_suffix == \"\":\n makefiles_done = True\n if not os.path.exists(outputdfile):\n safe_copy(dfile, outputdfile)\n\n\nclass FakeCase(object):\n def __init__(self, compiler, mpilib, debug, comp_interface, threading=False):\n # PIO_VERSION is needed to parse config_machines.xml but isn't otherwise used\n # by FakeCase\n self._vals = {\n \"COMPILER\": compiler,\n \"MPILIB\": mpilib,\n \"DEBUG\": debug,\n \"COMP_INTERFACE\": comp_interface,\n \"PIO_VERSION\": 2,\n \"SMP_PRESENT\": threading,\n \"MODEL\": get_model(),\n \"SRCROOT\": get_src_root(),\n }\n\n def get_build_threaded(self):\n return self.get_value(\"SMP_PRESENT\")\n\n def get_case_root(self):\n \"\"\"Returns the root directory for this case.\"\"\"\n return self.get_value(\"CASEROOT\")\n\n def get_value(self, attrib):\n expect(\n attrib in self._vals,\n \"FakeCase does not support getting value of '%s'\" % attrib,\n )\n return self._vals[attrib]\n\n def set_value(self, attrib, value):\n \"\"\"Sets a given variable value for the case\"\"\"\n self._vals[attrib] = value\n\n\ndef generate_env_mach_specific(\n output_dir,\n machobj,\n compiler,\n mpilib,\n debug,\n comp_interface,\n sysos,\n unit_testing,\n threaded,\n noenv=False,\n):\n \"\"\"\n env_mach_specific generation.\n \"\"\"\n ems_path = os.path.join(output_dir, \"env_mach_specific.xml\")\n if os.path.exists(ems_path):\n logger.warning(\"{} already exists, delete to replace\".format(ems_path))\n return\n\n ems_file = EnvMachSpecific(\n output_dir, unit_testing=unit_testing, standalone_configure=True\n )\n ems_file.populate(\n machobj,\n attributes={\"mpilib\": mpilib, \"compiler\": compiler, \"threaded\": threaded},\n )\n ems_file.write()\n\n if noenv:\n return\n\n fake_case = FakeCase(compiler, mpilib, debug, comp_interface)\n ems_file.load_env(fake_case)\n for shell in (\"sh\", \"csh\"):\n ems_file.make_env_mach_specific_file(shell, fake_case, output_dir=output_dir)\n shell_path = os.path.join(output_dir, \".env_mach_specific.\" + shell)\n with open(shell_path, \"a\") as shell_file:\n if shell == \"sh\":\n shell_file.write(\"\\nexport COMPILER={}\\n\".format(compiler))\n shell_file.write(\"export MPILIB={}\\n\".format(mpilib))\n shell_file.write(\"export DEBUG={}\\n\".format(repr(debug).upper()))\n shell_file.write(\"export OS={}\\n\".format(sysos))\n else:\n shell_file.write(\"\\nsetenv COMPILER {}\\n\".format(compiler))\n shell_file.write(\"setenv MPILIB {}\\n\".format(mpilib))\n shell_file.write(\"setenv DEBUG {}\\n\".format(repr(debug).upper()))\n shell_file.write(\"setenv OS {}\\n\".format(sysos))\n", "path": "CIME/BuildTools/configure.py"}]}
| 3,267 | 200 |
gh_patches_debug_37590
|
rasdani/github-patches
|
git_diff
|
urllib3__urllib3-840
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
urllib3 attempts to use IPv6 even when IPv6 is disabled
This is an issue when running on a server without IPv6 (must be disabled because the network does not support it). Example when connecting to https://graph.facebook.com using requests and IPv4 happens to fail:
```
HTTPSConnectionPool(host='graph.facebook.com', port=443): Max retries exceeded with url: /v2.5/me/feed (Caused by NewConnectionError('<requests.packages.urllib3.connection.VerifiedHTTPSConnection object at 0x7f4dbd158518>: Failed to establish a new connection: [Errno 97] Address family not supported by protocol',))
Traceback (most recent call last):
File "/home/lib/python3.4/site-packages/requests/packages/urllib3/connection.py", line 137, in _new_conn
(self.host, self.port), self.timeout, **extra_kw)
File "/home/lib/python3.4/site-packages/requests/packages/urllib3/util/connection.py", line 91, in create_connection
raise err
File "/home/lib/python3.4/site-packages/requests/packages/urllib3/util/connection.py", line 71, in create_connection
sock = socket.socket(af, socktype, proto)
File "/usr/lib/python3.4/socket.py", line 126, in __init__
_socket.socket.__init__(self, family, type, proto, fileno)
OSError: [Errno 97] Address family not supported by protocol
```
urllib3 should throw an exception after exhausting all IPv4 options instead of trying (and invariably failing) IPv6.
See closed issue https://github.com/kennethreitz/requests/issues/3084.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `urllib3/util/connection.py`
Content:
```
1 from __future__ import absolute_import
2 import socket
3 try:
4 from select import poll, POLLIN
5 except ImportError: # `poll` doesn't exist on OSX and other platforms
6 poll = False
7 try:
8 from select import select
9 except ImportError: # `select` doesn't exist on AppEngine.
10 select = False
11
12
13 def is_connection_dropped(conn): # Platform-specific
14 """
15 Returns True if the connection is dropped and should be closed.
16
17 :param conn:
18 :class:`httplib.HTTPConnection` object.
19
20 Note: For platforms like AppEngine, this will always return ``False`` to
21 let the platform handle connection recycling transparently for us.
22 """
23 sock = getattr(conn, 'sock', False)
24 if sock is False: # Platform-specific: AppEngine
25 return False
26 if sock is None: # Connection already closed (such as by httplib).
27 return True
28
29 if not poll:
30 if not select: # Platform-specific: AppEngine
31 return False
32
33 try:
34 return select([sock], [], [], 0.0)[0]
35 except socket.error:
36 return True
37
38 # This version is better on platforms that support it.
39 p = poll()
40 p.register(sock, POLLIN)
41 for (fno, ev) in p.poll(0.0):
42 if fno == sock.fileno():
43 # Either data is buffered (bad), or the connection is dropped.
44 return True
45
46
47 # This function is copied from socket.py in the Python 2.7 standard
48 # library test suite. Added to its signature is only `socket_options`.
49 def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
50 source_address=None, socket_options=None):
51 """Connect to *address* and return the socket object.
52
53 Convenience function. Connect to *address* (a 2-tuple ``(host,
54 port)``) and return the socket object. Passing the optional
55 *timeout* parameter will set the timeout on the socket instance
56 before attempting to connect. If no *timeout* is supplied, the
57 global default timeout setting returned by :func:`getdefaulttimeout`
58 is used. If *source_address* is set it must be a tuple of (host, port)
59 for the socket to bind as a source address before making the connection.
60 An host of '' or port 0 tells the OS to use the default.
61 """
62
63 host, port = address
64 if host.startswith('['):
65 host = host.strip('[]')
66 err = None
67 for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
68 af, socktype, proto, canonname, sa = res
69 sock = None
70 try:
71 sock = socket.socket(af, socktype, proto)
72
73 # If provided, set socket level options before connecting.
74 # This is the only addition urllib3 makes to this function.
75 _set_socket_options(sock, socket_options)
76
77 if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
78 sock.settimeout(timeout)
79 if source_address:
80 sock.bind(source_address)
81 sock.connect(sa)
82 return sock
83
84 except socket.error as e:
85 err = e
86 if sock is not None:
87 sock.close()
88 sock = None
89
90 if err is not None:
91 raise err
92
93 raise socket.error("getaddrinfo returns an empty list")
94
95
96 def _set_socket_options(sock, options):
97 if options is None:
98 return
99
100 for opt in options:
101 sock.setsockopt(*opt)
102
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/urllib3/util/connection.py b/urllib3/util/connection.py
--- a/urllib3/util/connection.py
+++ b/urllib3/util/connection.py
@@ -46,6 +46,8 @@
# This function is copied from socket.py in the Python 2.7 standard
# library test suite. Added to its signature is only `socket_options`.
+# One additional modification is that we avoid binding to IPv6 servers
+# discovered in DNS if the system doesn't have IPv6 functionality.
def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None, socket_options=None):
"""Connect to *address* and return the socket object.
@@ -64,14 +66,19 @@
if host.startswith('['):
host = host.strip('[]')
err = None
- for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
+
+ # Using the value from allowed_gai_family() in the context of getaddrinfo lets
+ # us select whether to work with IPv4 DNS records, IPv6 records, or both.
+ # The original create_connection function always returns all records.
+ family = allowed_gai_family()
+
+ for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
try:
sock = socket.socket(af, socktype, proto)
# If provided, set socket level options before connecting.
- # This is the only addition urllib3 makes to this function.
_set_socket_options(sock, socket_options)
if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
@@ -99,3 +106,39 @@
for opt in options:
sock.setsockopt(*opt)
+
+
+def allowed_gai_family():
+ """This function is designed to work in the context of
+ getaddrinfo, where family=socket.AF_UNSPEC is the default and
+ will perform a DNS search for both IPv6 and IPv4 records."""
+
+ family = socket.AF_INET
+ if HAS_IPV6:
+ family = socket.AF_UNSPEC
+ return family
+
+
+def _has_ipv6(host):
+ """ Returns True if the system can bind an IPv6 address. """
+ sock = None
+ has_ipv6 = False
+
+ if socket.has_ipv6:
+ # has_ipv6 returns true if cPython was compiled with IPv6 support.
+ # It does not tell us if the system has IPv6 support enabled. To
+ # determine that we must bind to an IPv6 address.
+ # https://github.com/shazow/urllib3/pull/611
+ # https://bugs.python.org/issue658327
+ try:
+ sock = socket.socket(socket.AF_INET6)
+ sock.bind((host, 0))
+ has_ipv6 = True
+ except Exception:
+ pass
+
+ if sock:
+ sock.close()
+ return has_ipv6
+
+HAS_IPV6 = _has_ipv6('::1')
|
{"golden_diff": "diff --git a/urllib3/util/connection.py b/urllib3/util/connection.py\n--- a/urllib3/util/connection.py\n+++ b/urllib3/util/connection.py\n@@ -46,6 +46,8 @@\n \n # This function is copied from socket.py in the Python 2.7 standard\n # library test suite. Added to its signature is only `socket_options`.\n+# One additional modification is that we avoid binding to IPv6 servers\n+# discovered in DNS if the system doesn't have IPv6 functionality.\n def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,\n source_address=None, socket_options=None):\n \"\"\"Connect to *address* and return the socket object.\n@@ -64,14 +66,19 @@\n if host.startswith('['):\n host = host.strip('[]')\n err = None\n- for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):\n+\n+ # Using the value from allowed_gai_family() in the context of getaddrinfo lets\n+ # us select whether to work with IPv4 DNS records, IPv6 records, or both.\n+ # The original create_connection function always returns all records.\n+ family = allowed_gai_family()\n+\n+ for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):\n af, socktype, proto, canonname, sa = res\n sock = None\n try:\n sock = socket.socket(af, socktype, proto)\n \n # If provided, set socket level options before connecting.\n- # This is the only addition urllib3 makes to this function.\n _set_socket_options(sock, socket_options)\n \n if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:\n@@ -99,3 +106,39 @@\n \n for opt in options:\n sock.setsockopt(*opt)\n+\n+\n+def allowed_gai_family():\n+ \"\"\"This function is designed to work in the context of\n+ getaddrinfo, where family=socket.AF_UNSPEC is the default and\n+ will perform a DNS search for both IPv6 and IPv4 records.\"\"\"\n+\n+ family = socket.AF_INET\n+ if HAS_IPV6:\n+ family = socket.AF_UNSPEC\n+ return family\n+\n+\n+def _has_ipv6(host):\n+ \"\"\" Returns True if the system can bind an IPv6 address. \"\"\"\n+ sock = None\n+ has_ipv6 = False\n+\n+ if socket.has_ipv6:\n+ # has_ipv6 returns true if cPython was compiled with IPv6 support.\n+ # It does not tell us if the system has IPv6 support enabled. To\n+ # determine that we must bind to an IPv6 address.\n+ # https://github.com/shazow/urllib3/pull/611\n+ # https://bugs.python.org/issue658327\n+ try:\n+ sock = socket.socket(socket.AF_INET6)\n+ sock.bind((host, 0))\n+ has_ipv6 = True\n+ except Exception:\n+ pass\n+\n+ if sock:\n+ sock.close()\n+ return has_ipv6\n+\n+HAS_IPV6 = _has_ipv6('::1')\n", "issue": "urllib3 attempts to use IPv6 even when IPv6 is disabled\nThis is an issue when running on a server without IPv6 (must be disabled because the network does not support it). Example when connecting to https://graph.facebook.com using requests and IPv4 happens to fail:\n\n```\nHTTPSConnectionPool(host='graph.facebook.com', port=443): Max retries exceeded with url: /v2.5/me/feed (Caused by NewConnectionError('<requests.packages.urllib3.connection.VerifiedHTTPSConnection object at 0x7f4dbd158518>: Failed to establish a new connection: [Errno 97] Address family not supported by protocol',))\nTraceback (most recent call last):\n File \"/home/lib/python3.4/site-packages/requests/packages/urllib3/connection.py\", line 137, in _new_conn\n (self.host, self.port), self.timeout, **extra_kw)\n File \"/home/lib/python3.4/site-packages/requests/packages/urllib3/util/connection.py\", line 91, in create_connection\n raise err\n File \"/home/lib/python3.4/site-packages/requests/packages/urllib3/util/connection.py\", line 71, in create_connection\n sock = socket.socket(af, socktype, proto)\n File \"/usr/lib/python3.4/socket.py\", line 126, in __init__\n _socket.socket.__init__(self, family, type, proto, fileno)\nOSError: [Errno 97] Address family not supported by protocol\n```\n\nurllib3 should throw an exception after exhausting all IPv4 options instead of trying (and invariably failing) IPv6.\n\nSee closed issue https://github.com/kennethreitz/requests/issues/3084.\n\n", "before_files": [{"content": "from __future__ import absolute_import\nimport socket\ntry:\n from select import poll, POLLIN\nexcept ImportError: # `poll` doesn't exist on OSX and other platforms\n poll = False\n try:\n from select import select\n except ImportError: # `select` doesn't exist on AppEngine.\n select = False\n\n\ndef is_connection_dropped(conn): # Platform-specific\n \"\"\"\n Returns True if the connection is dropped and should be closed.\n\n :param conn:\n :class:`httplib.HTTPConnection` object.\n\n Note: For platforms like AppEngine, this will always return ``False`` to\n let the platform handle connection recycling transparently for us.\n \"\"\"\n sock = getattr(conn, 'sock', False)\n if sock is False: # Platform-specific: AppEngine\n return False\n if sock is None: # Connection already closed (such as by httplib).\n return True\n\n if not poll:\n if not select: # Platform-specific: AppEngine\n return False\n\n try:\n return select([sock], [], [], 0.0)[0]\n except socket.error:\n return True\n\n # This version is better on platforms that support it.\n p = poll()\n p.register(sock, POLLIN)\n for (fno, ev) in p.poll(0.0):\n if fno == sock.fileno():\n # Either data is buffered (bad), or the connection is dropped.\n return True\n\n\n# This function is copied from socket.py in the Python 2.7 standard\n# library test suite. Added to its signature is only `socket_options`.\ndef create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,\n source_address=None, socket_options=None):\n \"\"\"Connect to *address* and return the socket object.\n\n Convenience function. Connect to *address* (a 2-tuple ``(host,\n port)``) and return the socket object. Passing the optional\n *timeout* parameter will set the timeout on the socket instance\n before attempting to connect. If no *timeout* is supplied, the\n global default timeout setting returned by :func:`getdefaulttimeout`\n is used. If *source_address* is set it must be a tuple of (host, port)\n for the socket to bind as a source address before making the connection.\n An host of '' or port 0 tells the OS to use the default.\n \"\"\"\n\n host, port = address\n if host.startswith('['):\n host = host.strip('[]')\n err = None\n for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):\n af, socktype, proto, canonname, sa = res\n sock = None\n try:\n sock = socket.socket(af, socktype, proto)\n\n # If provided, set socket level options before connecting.\n # This is the only addition urllib3 makes to this function.\n _set_socket_options(sock, socket_options)\n\n if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:\n sock.settimeout(timeout)\n if source_address:\n sock.bind(source_address)\n sock.connect(sa)\n return sock\n\n except socket.error as e:\n err = e\n if sock is not None:\n sock.close()\n sock = None\n\n if err is not None:\n raise err\n\n raise socket.error(\"getaddrinfo returns an empty list\")\n\n\ndef _set_socket_options(sock, options):\n if options is None:\n return\n\n for opt in options:\n sock.setsockopt(*opt)\n", "path": "urllib3/util/connection.py"}], "after_files": [{"content": "from __future__ import absolute_import\nimport socket\ntry:\n from select import poll, POLLIN\nexcept ImportError: # `poll` doesn't exist on OSX and other platforms\n poll = False\n try:\n from select import select\n except ImportError: # `select` doesn't exist on AppEngine.\n select = False\n\n\ndef is_connection_dropped(conn): # Platform-specific\n \"\"\"\n Returns True if the connection is dropped and should be closed.\n\n :param conn:\n :class:`httplib.HTTPConnection` object.\n\n Note: For platforms like AppEngine, this will always return ``False`` to\n let the platform handle connection recycling transparently for us.\n \"\"\"\n sock = getattr(conn, 'sock', False)\n if sock is False: # Platform-specific: AppEngine\n return False\n if sock is None: # Connection already closed (such as by httplib).\n return True\n\n if not poll:\n if not select: # Platform-specific: AppEngine\n return False\n\n try:\n return select([sock], [], [], 0.0)[0]\n except socket.error:\n return True\n\n # This version is better on platforms that support it.\n p = poll()\n p.register(sock, POLLIN)\n for (fno, ev) in p.poll(0.0):\n if fno == sock.fileno():\n # Either data is buffered (bad), or the connection is dropped.\n return True\n\n\n# This function is copied from socket.py in the Python 2.7 standard\n# library test suite. Added to its signature is only `socket_options`.\n# One additional modification is that we avoid binding to IPv6 servers\n# discovered in DNS if the system doesn't have IPv6 functionality.\ndef create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,\n source_address=None, socket_options=None):\n \"\"\"Connect to *address* and return the socket object.\n\n Convenience function. Connect to *address* (a 2-tuple ``(host,\n port)``) and return the socket object. Passing the optional\n *timeout* parameter will set the timeout on the socket instance\n before attempting to connect. If no *timeout* is supplied, the\n global default timeout setting returned by :func:`getdefaulttimeout`\n is used. If *source_address* is set it must be a tuple of (host, port)\n for the socket to bind as a source address before making the connection.\n An host of '' or port 0 tells the OS to use the default.\n \"\"\"\n\n host, port = address\n if host.startswith('['):\n host = host.strip('[]')\n err = None\n\n # Using the value from allowed_gai_family() in the context of getaddrinfo lets\n # us select whether to work with IPv4 DNS records, IPv6 records, or both.\n # The original create_connection function always returns all records.\n family = allowed_gai_family()\n\n for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):\n af, socktype, proto, canonname, sa = res\n sock = None\n try:\n sock = socket.socket(af, socktype, proto)\n\n # If provided, set socket level options before connecting.\n _set_socket_options(sock, socket_options)\n\n if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:\n sock.settimeout(timeout)\n if source_address:\n sock.bind(source_address)\n sock.connect(sa)\n return sock\n\n except socket.error as e:\n err = e\n if sock is not None:\n sock.close()\n sock = None\n\n if err is not None:\n raise err\n\n raise socket.error(\"getaddrinfo returns an empty list\")\n\n\ndef _set_socket_options(sock, options):\n if options is None:\n return\n\n for opt in options:\n sock.setsockopt(*opt)\n\n\ndef allowed_gai_family():\n \"\"\"This function is designed to work in the context of\n getaddrinfo, where family=socket.AF_UNSPEC is the default and\n will perform a DNS search for both IPv6 and IPv4 records.\"\"\"\n\n family = socket.AF_INET\n if HAS_IPV6:\n family = socket.AF_UNSPEC\n return family\n\n\ndef _has_ipv6(host):\n \"\"\" Returns True if the system can bind an IPv6 address. \"\"\"\n sock = None\n has_ipv6 = False\n\n if socket.has_ipv6:\n # has_ipv6 returns true if cPython was compiled with IPv6 support.\n # It does not tell us if the system has IPv6 support enabled. To\n # determine that we must bind to an IPv6 address.\n # https://github.com/shazow/urllib3/pull/611\n # https://bugs.python.org/issue658327\n try:\n sock = socket.socket(socket.AF_INET6)\n sock.bind((host, 0))\n has_ipv6 = True\n except Exception:\n pass\n\n if sock:\n sock.close()\n return has_ipv6\n\nHAS_IPV6 = _has_ipv6('::1')\n", "path": "urllib3/util/connection.py"}]}
| 1,620 | 700 |
gh_patches_debug_57104
|
rasdani/github-patches
|
git_diff
|
pyro-ppl__pyro-1704
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
make test: no attribute 'optim' error in 'examples/contrib/oed/ab_test.py'
### Issue Description
On the latest dev branch, `make test` gives the following error:
_
examples/contrib/oed/ab_test.py:12: in <module>
from gp_bayes_opt import GPBayesOptimizer
examples/contrib/oed/gp_bayes_opt.py:11: in <module>
class GPBayesOptimizer(pyro.optim.multi.MultiOptimizer):
E AttributeError: module 'pyro' has no attribute 'optim'
### Environment
For any bugs, please provide the following:
- OS and python version: CentOS Linux 7 (Core); Python 3.7.1
- PyTorch version, or if relevant, output of `pip freeze`: PyTorch 1.0.0
- Pyro version: output of `python -c 'import pyro; print pyro.__version__'`: pyro 0.3.0+9adbdb7
### Code Snippet
```
make install
make format
make test
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/contrib/oed/gp_bayes_opt.py`
Content:
```
1 import torch
2 import torch.autograd as autograd
3 import torch.optim as optim
4 from torch.distributions import transform_to
5
6 import pyro
7 import pyro.contrib.gp as gp
8 from pyro.infer import TraceEnum_ELBO
9
10
11 class GPBayesOptimizer(pyro.optim.multi.MultiOptimizer):
12 """Performs Bayesian Optimization using a Gaussian Process as an
13 emulator for the unknown function.
14 """
15
16 def __init__(self, constraints, gpmodel, num_acquisitions, acquisition_func=None):
17 """
18 :param torch.constraint constraints: constraints defining the domain of `f`
19 :param gp.models.GPRegression gpmodel: a (possibly initialized) GP
20 regression model. The kernel, etc is specified via `gpmodel`.
21 :param int num_acquisitions: number of points to acquire at each step
22 :param function acquisition_func: a function to generate acquisitions.
23 It should return a torch.Tensor of new points to query.
24 """
25 if acquisition_func is None:
26 acquisition_func = self.acquire_thompson
27
28 self.constraints = constraints
29 self.gpmodel = gpmodel
30 self.num_acquisitions = num_acquisitions
31 self.acquisition_func = acquisition_func
32
33 def update_posterior(self, X, y):
34 X = torch.cat([self.gpmodel.X, X])
35 y = torch.cat([self.gpmodel.y, y])
36 self.gpmodel.set_data(X, y)
37 optimizer = torch.optim.Adam(self.gpmodel.parameters(), lr=0.001)
38 gp.util.train(self.gpmodel, optimizer,
39 loss_fn=TraceEnum_ELBO(strict_enumeration_warning=False).differentiable_loss,
40 retain_graph=True)
41
42 def find_a_candidate(self, differentiable, x_init):
43 """Given a starting point, `x_init`, takes one LBFGS step
44 to optimize the differentiable function.
45
46 :param function differentiable: a function amenable to torch
47 autograd
48 :param torch.Tensor x_init: the initial point
49
50 """
51 # transform x to an unconstrained domain
52 unconstrained_x_init = transform_to(self.constraints).inv(x_init)
53 unconstrained_x = unconstrained_x_init.detach().clone().requires_grad_(True)
54 # TODO: Use LBFGS with line search by pytorch #8824 merged
55 minimizer = optim.LBFGS([unconstrained_x], max_eval=20)
56
57 def closure():
58 minimizer.zero_grad()
59 if (torch.log(torch.abs(unconstrained_x)) > 25.).any():
60 return torch.tensor(float('inf'))
61 x = transform_to(self.constraints)(unconstrained_x)
62 y = differentiable(x)
63 autograd.backward(unconstrained_x,
64 autograd.grad(y, unconstrained_x, retain_graph=True))
65 return y
66
67 minimizer.step(closure)
68 # after finding a candidate in the unconstrained domain,
69 # convert it back to original domain.
70 x = transform_to(self.constraints)(unconstrained_x)
71 opt_y = differentiable(x)
72 return x.detach(), opt_y.detach()
73
74 def opt_differentiable(self, differentiable, num_candidates=5):
75 """Optimizes a differentiable function by choosing `num_candidates`
76 initial points at random and calling :func:`find_a_candidate` on
77 each. The best candidate is returned with its function value.
78
79 :param function differentiable: a function amenable to torch autograd
80 :param int num_candidates: the number of random starting points to
81 use
82 :return: the minimiser and its function value
83 :rtype: tuple
84 """
85
86 candidates = []
87 values = []
88 for j in range(num_candidates):
89 x_init = self.gpmodel.X.new_empty(1).uniform_(
90 self.constraints.lower_bound, self.constraints.upper_bound)
91 x, y = self.find_a_candidate(differentiable, x_init)
92 if torch.isnan(y):
93 continue
94 candidates.append(x)
95 values.append(y)
96
97 mvalue, argmin = torch.min(torch.cat(values), dim=0)
98 return candidates[argmin.item()], mvalue
99
100 def acquire_thompson(self, num_acquisitions=1, **opt_params):
101 """Selects `num_acquisitions` query points at which to query the
102 original function by Thompson sampling.
103
104 :param int num_acquisitions: the number of points to generate
105 :param dict opt_params: additional parameters for optimization
106 routines
107 :return: a tensor of points to evaluate `loss` at
108 :rtype: torch.Tensor
109 """
110
111 # Initialize the return tensor
112 X = self.gpmodel.X.new_empty(num_acquisitions, *self.gpmodel.X.shape[1:])
113
114 for i in range(num_acquisitions):
115 sampler = self.gpmodel.iter_sample(noiseless=False)
116 x, _ = self.opt_differentiable(sampler, **opt_params)
117 X[i, ...] = x
118
119 return X
120
121 def get_step(self, loss, params, verbose=False):
122 X = self.acquisition_func(num_acquisitions=self.num_acquisitions)
123 y = loss(X)
124 if verbose:
125 print("Acquire at: X")
126 print(X)
127 print("y")
128 print(y)
129 self.update_posterior(X, y)
130 return self.opt_differentiable(lambda x: self.gpmodel(x)[0])
131
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/examples/contrib/oed/gp_bayes_opt.py b/examples/contrib/oed/gp_bayes_opt.py
--- a/examples/contrib/oed/gp_bayes_opt.py
+++ b/examples/contrib/oed/gp_bayes_opt.py
@@ -3,9 +3,9 @@
import torch.optim as optim
from torch.distributions import transform_to
-import pyro
import pyro.contrib.gp as gp
from pyro.infer import TraceEnum_ELBO
+import pyro.optim
class GPBayesOptimizer(pyro.optim.multi.MultiOptimizer):
|
{"golden_diff": "diff --git a/examples/contrib/oed/gp_bayes_opt.py b/examples/contrib/oed/gp_bayes_opt.py\n--- a/examples/contrib/oed/gp_bayes_opt.py\n+++ b/examples/contrib/oed/gp_bayes_opt.py\n@@ -3,9 +3,9 @@\n import torch.optim as optim\n from torch.distributions import transform_to\n \n-import pyro\n import pyro.contrib.gp as gp\n from pyro.infer import TraceEnum_ELBO\n+import pyro.optim\n \n \n class GPBayesOptimizer(pyro.optim.multi.MultiOptimizer):\n", "issue": "make test: no attribute 'optim' error in 'examples/contrib/oed/ab_test.py' \n### Issue Description\r\nOn the latest dev branch, `make test` gives the following error:\r\n\r\n_\r\nexamples/contrib/oed/ab_test.py:12: in <module>\r\n from gp_bayes_opt import GPBayesOptimizer\r\nexamples/contrib/oed/gp_bayes_opt.py:11: in <module>\r\n class GPBayesOptimizer(pyro.optim.multi.MultiOptimizer):\r\nE AttributeError: module 'pyro' has no attribute 'optim'\r\n\r\n### Environment\r\nFor any bugs, please provide the following:\r\n - OS and python version: CentOS Linux 7 (Core); Python 3.7.1\r\n - PyTorch version, or if relevant, output of `pip freeze`: PyTorch 1.0.0\r\n - Pyro version: output of `python -c 'import pyro; print pyro.__version__'`: pyro 0.3.0+9adbdb7\r\n\r\n### Code Snippet\r\n\r\n```\r\nmake install\r\nmake format\r\nmake test\r\n```\r\n\n", "before_files": [{"content": "import torch\nimport torch.autograd as autograd\nimport torch.optim as optim\nfrom torch.distributions import transform_to\n\nimport pyro\nimport pyro.contrib.gp as gp\nfrom pyro.infer import TraceEnum_ELBO\n\n\nclass GPBayesOptimizer(pyro.optim.multi.MultiOptimizer):\n \"\"\"Performs Bayesian Optimization using a Gaussian Process as an\n emulator for the unknown function.\n \"\"\"\n\n def __init__(self, constraints, gpmodel, num_acquisitions, acquisition_func=None):\n \"\"\"\n :param torch.constraint constraints: constraints defining the domain of `f`\n :param gp.models.GPRegression gpmodel: a (possibly initialized) GP\n regression model. The kernel, etc is specified via `gpmodel`.\n :param int num_acquisitions: number of points to acquire at each step\n :param function acquisition_func: a function to generate acquisitions.\n It should return a torch.Tensor of new points to query.\n \"\"\"\n if acquisition_func is None:\n acquisition_func = self.acquire_thompson\n\n self.constraints = constraints\n self.gpmodel = gpmodel\n self.num_acquisitions = num_acquisitions\n self.acquisition_func = acquisition_func\n\n def update_posterior(self, X, y):\n X = torch.cat([self.gpmodel.X, X])\n y = torch.cat([self.gpmodel.y, y])\n self.gpmodel.set_data(X, y)\n optimizer = torch.optim.Adam(self.gpmodel.parameters(), lr=0.001)\n gp.util.train(self.gpmodel, optimizer,\n loss_fn=TraceEnum_ELBO(strict_enumeration_warning=False).differentiable_loss,\n retain_graph=True)\n\n def find_a_candidate(self, differentiable, x_init):\n \"\"\"Given a starting point, `x_init`, takes one LBFGS step\n to optimize the differentiable function.\n\n :param function differentiable: a function amenable to torch\n autograd\n :param torch.Tensor x_init: the initial point\n\n \"\"\"\n # transform x to an unconstrained domain\n unconstrained_x_init = transform_to(self.constraints).inv(x_init)\n unconstrained_x = unconstrained_x_init.detach().clone().requires_grad_(True)\n # TODO: Use LBFGS with line search by pytorch #8824 merged\n minimizer = optim.LBFGS([unconstrained_x], max_eval=20)\n\n def closure():\n minimizer.zero_grad()\n if (torch.log(torch.abs(unconstrained_x)) > 25.).any():\n return torch.tensor(float('inf'))\n x = transform_to(self.constraints)(unconstrained_x)\n y = differentiable(x)\n autograd.backward(unconstrained_x,\n autograd.grad(y, unconstrained_x, retain_graph=True))\n return y\n\n minimizer.step(closure)\n # after finding a candidate in the unconstrained domain,\n # convert it back to original domain.\n x = transform_to(self.constraints)(unconstrained_x)\n opt_y = differentiable(x)\n return x.detach(), opt_y.detach()\n\n def opt_differentiable(self, differentiable, num_candidates=5):\n \"\"\"Optimizes a differentiable function by choosing `num_candidates`\n initial points at random and calling :func:`find_a_candidate` on\n each. The best candidate is returned with its function value.\n\n :param function differentiable: a function amenable to torch autograd\n :param int num_candidates: the number of random starting points to\n use\n :return: the minimiser and its function value\n :rtype: tuple\n \"\"\"\n\n candidates = []\n values = []\n for j in range(num_candidates):\n x_init = self.gpmodel.X.new_empty(1).uniform_(\n self.constraints.lower_bound, self.constraints.upper_bound)\n x, y = self.find_a_candidate(differentiable, x_init)\n if torch.isnan(y):\n continue\n candidates.append(x)\n values.append(y)\n\n mvalue, argmin = torch.min(torch.cat(values), dim=0)\n return candidates[argmin.item()], mvalue\n\n def acquire_thompson(self, num_acquisitions=1, **opt_params):\n \"\"\"Selects `num_acquisitions` query points at which to query the\n original function by Thompson sampling.\n\n :param int num_acquisitions: the number of points to generate\n :param dict opt_params: additional parameters for optimization\n routines\n :return: a tensor of points to evaluate `loss` at\n :rtype: torch.Tensor\n \"\"\"\n\n # Initialize the return tensor\n X = self.gpmodel.X.new_empty(num_acquisitions, *self.gpmodel.X.shape[1:])\n\n for i in range(num_acquisitions):\n sampler = self.gpmodel.iter_sample(noiseless=False)\n x, _ = self.opt_differentiable(sampler, **opt_params)\n X[i, ...] = x\n\n return X\n\n def get_step(self, loss, params, verbose=False):\n X = self.acquisition_func(num_acquisitions=self.num_acquisitions)\n y = loss(X)\n if verbose:\n print(\"Acquire at: X\")\n print(X)\n print(\"y\")\n print(y)\n self.update_posterior(X, y)\n return self.opt_differentiable(lambda x: self.gpmodel(x)[0])\n", "path": "examples/contrib/oed/gp_bayes_opt.py"}], "after_files": [{"content": "import torch\nimport torch.autograd as autograd\nimport torch.optim as optim\nfrom torch.distributions import transform_to\n\nimport pyro.contrib.gp as gp\nfrom pyro.infer import TraceEnum_ELBO\nimport pyro.optim\n\n\nclass GPBayesOptimizer(pyro.optim.multi.MultiOptimizer):\n \"\"\"Performs Bayesian Optimization using a Gaussian Process as an\n emulator for the unknown function.\n \"\"\"\n\n def __init__(self, constraints, gpmodel, num_acquisitions, acquisition_func=None):\n \"\"\"\n :param torch.constraint constraints: constraints defining the domain of `f`\n :param gp.models.GPRegression gpmodel: a (possibly initialized) GP\n regression model. The kernel, etc is specified via `gpmodel`.\n :param int num_acquisitions: number of points to acquire at each step\n :param function acquisition_func: a function to generate acquisitions.\n It should return a torch.Tensor of new points to query.\n \"\"\"\n if acquisition_func is None:\n acquisition_func = self.acquire_thompson\n\n self.constraints = constraints\n self.gpmodel = gpmodel\n self.num_acquisitions = num_acquisitions\n self.acquisition_func = acquisition_func\n\n def update_posterior(self, X, y):\n X = torch.cat([self.gpmodel.X, X])\n y = torch.cat([self.gpmodel.y, y])\n self.gpmodel.set_data(X, y)\n optimizer = torch.optim.Adam(self.gpmodel.parameters(), lr=0.001)\n gp.util.train(self.gpmodel, optimizer,\n loss_fn=TraceEnum_ELBO(strict_enumeration_warning=False).differentiable_loss,\n retain_graph=True)\n\n def find_a_candidate(self, differentiable, x_init):\n \"\"\"Given a starting point, `x_init`, takes one LBFGS step\n to optimize the differentiable function.\n\n :param function differentiable: a function amenable to torch\n autograd\n :param torch.Tensor x_init: the initial point\n\n \"\"\"\n # transform x to an unconstrained domain\n unconstrained_x_init = transform_to(self.constraints).inv(x_init)\n unconstrained_x = unconstrained_x_init.detach().clone().requires_grad_(True)\n # TODO: Use LBFGS with line search by pytorch #8824 merged\n minimizer = optim.LBFGS([unconstrained_x], max_eval=20)\n\n def closure():\n minimizer.zero_grad()\n if (torch.log(torch.abs(unconstrained_x)) > 25.).any():\n return torch.tensor(float('inf'))\n x = transform_to(self.constraints)(unconstrained_x)\n y = differentiable(x)\n autograd.backward(unconstrained_x,\n autograd.grad(y, unconstrained_x, retain_graph=True))\n return y\n\n minimizer.step(closure)\n # after finding a candidate in the unconstrained domain,\n # convert it back to original domain.\n x = transform_to(self.constraints)(unconstrained_x)\n opt_y = differentiable(x)\n return x.detach(), opt_y.detach()\n\n def opt_differentiable(self, differentiable, num_candidates=5):\n \"\"\"Optimizes a differentiable function by choosing `num_candidates`\n initial points at random and calling :func:`find_a_candidate` on\n each. The best candidate is returned with its function value.\n\n :param function differentiable: a function amenable to torch autograd\n :param int num_candidates: the number of random starting points to\n use\n :return: the minimiser and its function value\n :rtype: tuple\n \"\"\"\n\n candidates = []\n values = []\n for j in range(num_candidates):\n x_init = self.gpmodel.X.new_empty(1).uniform_(\n self.constraints.lower_bound, self.constraints.upper_bound)\n x, y = self.find_a_candidate(differentiable, x_init)\n if torch.isnan(y):\n continue\n candidates.append(x)\n values.append(y)\n\n mvalue, argmin = torch.min(torch.cat(values), dim=0)\n return candidates[argmin.item()], mvalue\n\n def acquire_thompson(self, num_acquisitions=1, **opt_params):\n \"\"\"Selects `num_acquisitions` query points at which to query the\n original function by Thompson sampling.\n\n :param int num_acquisitions: the number of points to generate\n :param dict opt_params: additional parameters for optimization\n routines\n :return: a tensor of points to evaluate `loss` at\n :rtype: torch.Tensor\n \"\"\"\n\n # Initialize the return tensor\n X = self.gpmodel.X.new_empty(num_acquisitions, *self.gpmodel.X.shape[1:])\n\n for i in range(num_acquisitions):\n sampler = self.gpmodel.iter_sample(noiseless=False)\n x, _ = self.opt_differentiable(sampler, **opt_params)\n X[i, ...] = x\n\n return X\n\n def get_step(self, loss, params, verbose=False):\n X = self.acquisition_func(num_acquisitions=self.num_acquisitions)\n y = loss(X)\n if verbose:\n print(\"Acquire at: X\")\n print(X)\n print(\"y\")\n print(y)\n self.update_posterior(X, y)\n return self.opt_differentiable(lambda x: self.gpmodel(x)[0])\n", "path": "examples/contrib/oed/gp_bayes_opt.py"}]}
| 1,942 | 127 |
gh_patches_debug_23563
|
rasdani/github-patches
|
git_diff
|
getsentry__snuba-558
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Support Redis Authentication
I'm trying to install Snuba on my Kubernetes instance alongside Sentry.
Sentry's Helm chart installs Redis with a password (It generates a secret), and there was no option for me to specify that password for Snuba.
I opened up the source code and it looks like a simple solution:
Another setting (REDIS_PASSWORD) that would be passed to startup_nodes and to StrictRedis' constructor on the snuba/redis.py module.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `snuba/settings_docker.py`
Content:
```
1 import os
2 from snuba.settings_base import *
3
4 env = os.environ.get
5
6 DEBUG = env('DEBUG', '0').lower() in ('1', 'true')
7
8 DEFAULT_BROKERS = env('DEFAULT_BROKERS', 'localhost:9092').split(',')
9
10 REDIS_HOST = env('REDIS_HOST', 'localhost')
11 REDIS_PORT = int(env('REDIS_PORT', 6379))
12 REDIS_DB = int(env('REDIS_DB', 1))
13 USE_REDIS_CLUSTER = False
14
```
Path: `snuba/settings_base.py`
Content:
```
1 import os
2
3 LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO')
4
5 TESTING = False
6 DEBUG = True
7
8 PORT = 1218
9
10 DEFAULT_DATASET_NAME = 'events'
11 DISABLED_DATASETS = {}
12 DATASET_MODE = 'local'
13
14 # Clickhouse Options
15 # TODO: Warn about using `CLICKHOUSE_SERVER`, users should use the new settings instead.
16 [default_clickhouse_host, default_clickhouse_port] = os.environ.get('CLICKHOUSE_SERVER', 'localhost:9000').split(':', 1)
17 CLICKHOUSE_HOST = os.environ.get('CLICKHOUSE_HOST', default_clickhouse_host)
18 CLICKHOUSE_PORT = int(os.environ.get('CLICKHOUSE_PORT', default_clickhouse_port))
19 CLICKHOUSE_HTTP_PORT = int(os.environ.get('CLICKHOUSE_HTTP_PORT', 8123))
20 CLICKHOUSE_MAX_POOL_SIZE = 25
21
22 # Dogstatsd Options
23 DOGSTATSD_HOST = 'localhost'
24 DOGSTATSD_PORT = 8125
25
26 # Redis Options
27 USE_REDIS_CLUSTER = False
28 REDIS_CLUSTER_STARTUP_NODES = None
29 REDIS_HOST = os.environ.get('REDIS_HOST', 'localhost')
30 REDIS_PORT = 6379
31 REDIS_DB = 1
32
33 # Query Recording Options
34 RECORD_QUERIES = False
35 QUERIES_TOPIC = 'snuba-queries'
36
37 # Runtime Config Options
38 CONFIG_MEMOIZE_TIMEOUT = 10
39
40 # Sentry Options
41 SENTRY_DSN = None
42
43 # Snuba Options
44
45 SNAPSHOT_LOAD_PRODUCT = 'snuba'
46
47 SNAPSHOT_CONTROL_TOPIC_INIT_TIMEOUT = 30
48 BULK_CLICKHOUSE_BUFFER = 10000
49
50 # Processor/Writer Options
51 DEFAULT_BROKERS = ['localhost:9092']
52 DEFAULT_DATASET_BROKERS = {}
53
54 DEFAULT_MAX_BATCH_SIZE = 50000
55 DEFAULT_MAX_BATCH_TIME_MS = 2 * 1000
56 DEFAULT_QUEUED_MAX_MESSAGE_KBYTES = 10000
57 DEFAULT_QUEUED_MIN_MESSAGES = 10000
58 DISCARD_OLD_EVENTS = True
59
60 DEFAULT_RETENTION_DAYS = 90
61 RETENTION_OVERRIDES = {}
62
63 MAX_PREWHERE_CONDITIONS = 1
64
65 STATS_IN_RESPONSE = False
66
67 PAYLOAD_DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
68
69 REPLACER_MAX_BLOCK_SIZE = 512
70 REPLACER_MAX_MEMORY_USAGE = 10 * (1024**3) # 10GB
71 # TLL of Redis key that denotes whether a project had replacements
72 # run recently. Useful for decidig whether or not to add FINAL clause
73 # to queries.
74 REPLACER_KEY_TTL = 12 * 60 * 60
75 REPLACER_MAX_GROUP_IDS_TO_EXCLUDE = 256
76
77 TURBO_SAMPLE_RATE = 0.1
78
```
Path: `snuba/redis.py`
Content:
```
1 from __future__ import absolute_import
2
3 from redis.client import StrictRedis
4 from redis.exceptions import BusyLoadingError, ConnectionError
5 from rediscluster import StrictRedisCluster
6
7 from snuba import settings
8
9
10 class RetryingStrictRedisCluster(StrictRedisCluster):
11 """
12 Execute a command with cluster reinitialization retry logic.
13 Should a cluster respond with a ConnectionError or BusyLoadingError the
14 cluster nodes list will be reinitialized and the command will be executed
15 again with the most up to date view of the world.
16 """
17
18 def execute_command(self, *args, **kwargs):
19 try:
20 return super(self.__class__, self).execute_command(*args, **kwargs)
21 except (
22 ConnectionError,
23 BusyLoadingError,
24 KeyError # see: https://github.com/Grokzen/redis-py-cluster/issues/287
25 ):
26 self.connection_pool.nodes.reset()
27 return super(self.__class__, self).execute_command(*args, **kwargs)
28
29
30 if settings.USE_REDIS_CLUSTER:
31 startup_nodes = settings.REDIS_CLUSTER_STARTUP_NODES
32 if startup_nodes is None:
33 startup_nodes = [{
34 'host': settings.REDIS_HOST,
35 'port': settings.REDIS_PORT,
36 }]
37 redis_client = RetryingStrictRedisCluster(
38 startup_nodes=startup_nodes,
39 socket_keepalive=True,
40 )
41 else:
42 redis_client = StrictRedis(
43 host=settings.REDIS_HOST,
44 port=settings.REDIS_PORT,
45 db=settings.REDIS_DB,
46 socket_keepalive=True,
47 )
48
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/snuba/redis.py b/snuba/redis.py
--- a/snuba/redis.py
+++ b/snuba/redis.py
@@ -37,11 +37,13 @@
redis_client = RetryingStrictRedisCluster(
startup_nodes=startup_nodes,
socket_keepalive=True,
+ password=settings.REDIS_PASSWORD,
)
else:
redis_client = StrictRedis(
host=settings.REDIS_HOST,
port=settings.REDIS_PORT,
+ password=settings.REDIS_PASSWORD,
db=settings.REDIS_DB,
socket_keepalive=True,
)
diff --git a/snuba/settings_base.py b/snuba/settings_base.py
--- a/snuba/settings_base.py
+++ b/snuba/settings_base.py
@@ -28,6 +28,7 @@
REDIS_CLUSTER_STARTUP_NODES = None
REDIS_HOST = os.environ.get('REDIS_HOST', 'localhost')
REDIS_PORT = 6379
+REDIS_PASSWORD = None
REDIS_DB = 1
# Query Recording Options
diff --git a/snuba/settings_docker.py b/snuba/settings_docker.py
--- a/snuba/settings_docker.py
+++ b/snuba/settings_docker.py
@@ -9,5 +9,6 @@
REDIS_HOST = env('REDIS_HOST', 'localhost')
REDIS_PORT = int(env('REDIS_PORT', 6379))
+REDIS_PASSWORD = env('REDIS_PASSWORD')
REDIS_DB = int(env('REDIS_DB', 1))
USE_REDIS_CLUSTER = False
|
{"golden_diff": "diff --git a/snuba/redis.py b/snuba/redis.py\n--- a/snuba/redis.py\n+++ b/snuba/redis.py\n@@ -37,11 +37,13 @@\n redis_client = RetryingStrictRedisCluster(\n startup_nodes=startup_nodes,\n socket_keepalive=True,\n+ password=settings.REDIS_PASSWORD,\n )\n else:\n redis_client = StrictRedis(\n host=settings.REDIS_HOST,\n port=settings.REDIS_PORT,\n+ password=settings.REDIS_PASSWORD,\n db=settings.REDIS_DB,\n socket_keepalive=True,\n )\ndiff --git a/snuba/settings_base.py b/snuba/settings_base.py\n--- a/snuba/settings_base.py\n+++ b/snuba/settings_base.py\n@@ -28,6 +28,7 @@\n REDIS_CLUSTER_STARTUP_NODES = None\n REDIS_HOST = os.environ.get('REDIS_HOST', 'localhost')\n REDIS_PORT = 6379\n+REDIS_PASSWORD = None\n REDIS_DB = 1\n \n # Query Recording Options\ndiff --git a/snuba/settings_docker.py b/snuba/settings_docker.py\n--- a/snuba/settings_docker.py\n+++ b/snuba/settings_docker.py\n@@ -9,5 +9,6 @@\n \n REDIS_HOST = env('REDIS_HOST', 'localhost')\n REDIS_PORT = int(env('REDIS_PORT', 6379))\n+REDIS_PASSWORD = env('REDIS_PASSWORD')\n REDIS_DB = int(env('REDIS_DB', 1))\n USE_REDIS_CLUSTER = False\n", "issue": "Support Redis Authentication\nI'm trying to install Snuba on my Kubernetes instance alongside Sentry.\r\nSentry's Helm chart installs Redis with a password (It generates a secret), and there was no option for me to specify that password for Snuba.\r\n\r\nI opened up the source code and it looks like a simple solution: \r\nAnother setting (REDIS_PASSWORD) that would be passed to startup_nodes and to StrictRedis' constructor on the snuba/redis.py module.\n", "before_files": [{"content": "import os\nfrom snuba.settings_base import *\n\nenv = os.environ.get\n\nDEBUG = env('DEBUG', '0').lower() in ('1', 'true')\n\nDEFAULT_BROKERS = env('DEFAULT_BROKERS', 'localhost:9092').split(',')\n\nREDIS_HOST = env('REDIS_HOST', 'localhost')\nREDIS_PORT = int(env('REDIS_PORT', 6379))\nREDIS_DB = int(env('REDIS_DB', 1))\nUSE_REDIS_CLUSTER = False\n", "path": "snuba/settings_docker.py"}, {"content": "import os\n\nLOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO')\n\nTESTING = False\nDEBUG = True\n\nPORT = 1218\n\nDEFAULT_DATASET_NAME = 'events'\nDISABLED_DATASETS = {}\nDATASET_MODE = 'local'\n\n# Clickhouse Options\n# TODO: Warn about using `CLICKHOUSE_SERVER`, users should use the new settings instead.\n[default_clickhouse_host, default_clickhouse_port] = os.environ.get('CLICKHOUSE_SERVER', 'localhost:9000').split(':', 1)\nCLICKHOUSE_HOST = os.environ.get('CLICKHOUSE_HOST', default_clickhouse_host)\nCLICKHOUSE_PORT = int(os.environ.get('CLICKHOUSE_PORT', default_clickhouse_port))\nCLICKHOUSE_HTTP_PORT = int(os.environ.get('CLICKHOUSE_HTTP_PORT', 8123))\nCLICKHOUSE_MAX_POOL_SIZE = 25\n\n# Dogstatsd Options\nDOGSTATSD_HOST = 'localhost'\nDOGSTATSD_PORT = 8125\n\n# Redis Options\nUSE_REDIS_CLUSTER = False\nREDIS_CLUSTER_STARTUP_NODES = None\nREDIS_HOST = os.environ.get('REDIS_HOST', 'localhost')\nREDIS_PORT = 6379\nREDIS_DB = 1\n\n# Query Recording Options\nRECORD_QUERIES = False\nQUERIES_TOPIC = 'snuba-queries'\n\n# Runtime Config Options\nCONFIG_MEMOIZE_TIMEOUT = 10\n\n# Sentry Options\nSENTRY_DSN = None\n\n# Snuba Options\n\nSNAPSHOT_LOAD_PRODUCT = 'snuba'\n\nSNAPSHOT_CONTROL_TOPIC_INIT_TIMEOUT = 30\nBULK_CLICKHOUSE_BUFFER = 10000\n\n# Processor/Writer Options\nDEFAULT_BROKERS = ['localhost:9092']\nDEFAULT_DATASET_BROKERS = {}\n\nDEFAULT_MAX_BATCH_SIZE = 50000\nDEFAULT_MAX_BATCH_TIME_MS = 2 * 1000\nDEFAULT_QUEUED_MAX_MESSAGE_KBYTES = 10000\nDEFAULT_QUEUED_MIN_MESSAGES = 10000\nDISCARD_OLD_EVENTS = True\n\nDEFAULT_RETENTION_DAYS = 90\nRETENTION_OVERRIDES = {}\n\nMAX_PREWHERE_CONDITIONS = 1\n\nSTATS_IN_RESPONSE = False\n\nPAYLOAD_DATETIME_FORMAT = \"%Y-%m-%dT%H:%M:%S.%fZ\"\n\nREPLACER_MAX_BLOCK_SIZE = 512\nREPLACER_MAX_MEMORY_USAGE = 10 * (1024**3) # 10GB\n# TLL of Redis key that denotes whether a project had replacements\n# run recently. Useful for decidig whether or not to add FINAL clause\n# to queries.\nREPLACER_KEY_TTL = 12 * 60 * 60\nREPLACER_MAX_GROUP_IDS_TO_EXCLUDE = 256\n\nTURBO_SAMPLE_RATE = 0.1\n", "path": "snuba/settings_base.py"}, {"content": "from __future__ import absolute_import\n\nfrom redis.client import StrictRedis\nfrom redis.exceptions import BusyLoadingError, ConnectionError\nfrom rediscluster import StrictRedisCluster\n\nfrom snuba import settings\n\n\nclass RetryingStrictRedisCluster(StrictRedisCluster):\n \"\"\"\n Execute a command with cluster reinitialization retry logic.\n Should a cluster respond with a ConnectionError or BusyLoadingError the\n cluster nodes list will be reinitialized and the command will be executed\n again with the most up to date view of the world.\n \"\"\"\n\n def execute_command(self, *args, **kwargs):\n try:\n return super(self.__class__, self).execute_command(*args, **kwargs)\n except (\n ConnectionError,\n BusyLoadingError,\n KeyError # see: https://github.com/Grokzen/redis-py-cluster/issues/287\n ):\n self.connection_pool.nodes.reset()\n return super(self.__class__, self).execute_command(*args, **kwargs)\n\n\nif settings.USE_REDIS_CLUSTER:\n startup_nodes = settings.REDIS_CLUSTER_STARTUP_NODES\n if startup_nodes is None:\n startup_nodes = [{\n 'host': settings.REDIS_HOST,\n 'port': settings.REDIS_PORT,\n }]\n redis_client = RetryingStrictRedisCluster(\n startup_nodes=startup_nodes,\n socket_keepalive=True,\n )\nelse:\n redis_client = StrictRedis(\n host=settings.REDIS_HOST,\n port=settings.REDIS_PORT,\n db=settings.REDIS_DB,\n socket_keepalive=True,\n )\n", "path": "snuba/redis.py"}], "after_files": [{"content": "import os\nfrom snuba.settings_base import *\n\nenv = os.environ.get\n\nDEBUG = env('DEBUG', '0').lower() in ('1', 'true')\n\nDEFAULT_BROKERS = env('DEFAULT_BROKERS', 'localhost:9092').split(',')\n\nREDIS_HOST = env('REDIS_HOST', 'localhost')\nREDIS_PORT = int(env('REDIS_PORT', 6379))\nREDIS_PASSWORD = env('REDIS_PASSWORD')\nREDIS_DB = int(env('REDIS_DB', 1))\nUSE_REDIS_CLUSTER = False\n", "path": "snuba/settings_docker.py"}, {"content": "import os\n\nLOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO')\n\nTESTING = False\nDEBUG = True\n\nPORT = 1218\n\nDEFAULT_DATASET_NAME = 'events'\nDISABLED_DATASETS = {}\nDATASET_MODE = 'local'\n\n# Clickhouse Options\n# TODO: Warn about using `CLICKHOUSE_SERVER`, users should use the new settings instead.\n[default_clickhouse_host, default_clickhouse_port] = os.environ.get('CLICKHOUSE_SERVER', 'localhost:9000').split(':', 1)\nCLICKHOUSE_HOST = os.environ.get('CLICKHOUSE_HOST', default_clickhouse_host)\nCLICKHOUSE_PORT = int(os.environ.get('CLICKHOUSE_PORT', default_clickhouse_port))\nCLICKHOUSE_HTTP_PORT = int(os.environ.get('CLICKHOUSE_HTTP_PORT', 8123))\nCLICKHOUSE_MAX_POOL_SIZE = 25\n\n# Dogstatsd Options\nDOGSTATSD_HOST = 'localhost'\nDOGSTATSD_PORT = 8125\n\n# Redis Options\nUSE_REDIS_CLUSTER = False\nREDIS_CLUSTER_STARTUP_NODES = None\nREDIS_HOST = os.environ.get('REDIS_HOST', 'localhost')\nREDIS_PORT = 6379\nREDIS_PASSWORD = None\nREDIS_DB = 1\n\n# Query Recording Options\nRECORD_QUERIES = False\nQUERIES_TOPIC = 'snuba-queries'\n\n# Runtime Config Options\nCONFIG_MEMOIZE_TIMEOUT = 10\n\n# Sentry Options\nSENTRY_DSN = None\n\n# Snuba Options\n\nSNAPSHOT_LOAD_PRODUCT = 'snuba'\n\nSNAPSHOT_CONTROL_TOPIC_INIT_TIMEOUT = 30\nBULK_CLICKHOUSE_BUFFER = 10000\n\n# Processor/Writer Options\nDEFAULT_BROKERS = ['localhost:9092']\nDEFAULT_DATASET_BROKERS = {}\n\nDEFAULT_MAX_BATCH_SIZE = 50000\nDEFAULT_MAX_BATCH_TIME_MS = 2 * 1000\nDEFAULT_QUEUED_MAX_MESSAGE_KBYTES = 10000\nDEFAULT_QUEUED_MIN_MESSAGES = 10000\nDISCARD_OLD_EVENTS = True\n\nDEFAULT_RETENTION_DAYS = 90\nRETENTION_OVERRIDES = {}\n\nMAX_PREWHERE_CONDITIONS = 1\n\nSTATS_IN_RESPONSE = False\n\nPAYLOAD_DATETIME_FORMAT = \"%Y-%m-%dT%H:%M:%S.%fZ\"\n\nREPLACER_MAX_BLOCK_SIZE = 512\nREPLACER_MAX_MEMORY_USAGE = 10 * (1024**3) # 10GB\n# TLL of Redis key that denotes whether a project had replacements\n# run recently. Useful for decidig whether or not to add FINAL clause\n# to queries.\nREPLACER_KEY_TTL = 12 * 60 * 60\nREPLACER_MAX_GROUP_IDS_TO_EXCLUDE = 256\n\nTURBO_SAMPLE_RATE = 0.1\n", "path": "snuba/settings_base.py"}, {"content": "from __future__ import absolute_import\n\nfrom redis.client import StrictRedis\nfrom redis.exceptions import BusyLoadingError, ConnectionError\nfrom rediscluster import StrictRedisCluster\n\nfrom snuba import settings\n\n\nclass RetryingStrictRedisCluster(StrictRedisCluster):\n \"\"\"\n Execute a command with cluster reinitialization retry logic.\n Should a cluster respond with a ConnectionError or BusyLoadingError the\n cluster nodes list will be reinitialized and the command will be executed\n again with the most up to date view of the world.\n \"\"\"\n\n def execute_command(self, *args, **kwargs):\n try:\n return super(self.__class__, self).execute_command(*args, **kwargs)\n except (\n ConnectionError,\n BusyLoadingError,\n KeyError # see: https://github.com/Grokzen/redis-py-cluster/issues/287\n ):\n self.connection_pool.nodes.reset()\n return super(self.__class__, self).execute_command(*args, **kwargs)\n\n\nif settings.USE_REDIS_CLUSTER:\n startup_nodes = settings.REDIS_CLUSTER_STARTUP_NODES\n if startup_nodes is None:\n startup_nodes = [{\n 'host': settings.REDIS_HOST,\n 'port': settings.REDIS_PORT,\n }]\n redis_client = RetryingStrictRedisCluster(\n startup_nodes=startup_nodes,\n socket_keepalive=True,\n password=settings.REDIS_PASSWORD,\n )\nelse:\n redis_client = StrictRedis(\n host=settings.REDIS_HOST,\n port=settings.REDIS_PORT,\n password=settings.REDIS_PASSWORD,\n db=settings.REDIS_DB,\n socket_keepalive=True,\n )\n", "path": "snuba/redis.py"}]}
| 1,698 | 324 |
gh_patches_debug_40257
|
rasdani/github-patches
|
git_diff
|
great-expectations__great_expectations-4042
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
S3 prefixes get broken when running on Windows
**Describe the bug**
On Windows when referencing an S3 data source, the prefix is suffixed with ``\`` instead of ``/``, resulting in query always failing.
**To Reproduce**
Create a ``InferredAssetS3DataConnector`` data connector, and pass any valid prefix (probably needs to be non-empty). Then, attempt to load a file from that connector.
**Expected behavior**
The file gets loaded properly.
**Error**
```
...\lib\site-packages\great_expectations\datasource\data_connector\util.py in list_s3_keys(s3, query_options, iterator_dict, recursive)
452
453 if not any(key in s3_objects_info for key in ["Contents", "CommonPrefixes"]):
--> 454 raise ValueError("S3 query may not have been configured correctly.")
455
456 if "Contents" in s3_objects_info:
ValueError: S3 query may not have been configured correctly.
```
**Fix**
In ``inferred_asset_s3_data_connector.py``, line 82 is:
```
self._prefix = FilePathDataConnector.sanitize_prefix(prefix)
```
This ensures that the file path is always a folder, by appending a slash to the end. However, on Windows, the slash is ``\``, and thus you end up with a prefix like ``path/to/some/folder\``, which results in an S3 query with no results, hence the above stacktrace.
The fix is to ensure that only the correct slash can be appended to the prefix, regardless of the host OS. For example:
```
self._prefix = prefix.rstrip('/') + '/'
```
or something to that effect.
**Environment (please complete the following information):**
- Operating System: Windows
- Great Expectations Version: 0.14.1
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `great_expectations/datasource/data_connector/configured_asset_s3_data_connector.py`
Content:
```
1 import logging
2 import os
3 from typing import List, Optional
4
5 try:
6 import boto3
7 except ImportError:
8 boto3 = None
9
10 from great_expectations.core.batch import BatchDefinition
11 from great_expectations.core.batch_spec import PathBatchSpec, S3BatchSpec
12 from great_expectations.datasource.data_connector.asset import Asset
13 from great_expectations.datasource.data_connector.configured_asset_file_path_data_connector import (
14 ConfiguredAssetFilePathDataConnector,
15 )
16 from great_expectations.datasource.data_connector.file_path_data_connector import (
17 FilePathDataConnector,
18 )
19 from great_expectations.datasource.data_connector.util import list_s3_keys
20 from great_expectations.execution_engine import ExecutionEngine
21
22 logger = logging.getLogger(__name__)
23
24
25 class ConfiguredAssetS3DataConnector(ConfiguredAssetFilePathDataConnector):
26 """
27 Extension of ConfiguredAssetFilePathDataConnector used to connect to S3
28
29 DataConnectors produce identifying information, called "batch_spec" that ExecutionEngines
30 can use to get individual batches of data. They add flexibility in how to obtain data
31 such as with time-based partitioning, downsampling, or other techniques appropriate
32 for the Datasource.
33
34 The ConfiguredAssetS3DataConnector is one of two classes (InferredAssetS3DataConnector being the
35 other one) designed for connecting to data on S3.
36
37 A ConfiguredAssetS3DataConnector requires an explicit listing of each DataAsset you want to connect to.
38 This allows more fine-tuning, but also requires more setup.
39 """
40
41 def __init__(
42 self,
43 name: str,
44 datasource_name: str,
45 bucket: str,
46 assets: dict,
47 execution_engine: Optional[ExecutionEngine] = None,
48 default_regex: Optional[dict] = None,
49 sorters: Optional[list] = None,
50 prefix: str = "",
51 delimiter: str = "/",
52 max_keys: int = 1000,
53 boto3_options: Optional[dict] = None,
54 batch_spec_passthrough: Optional[dict] = None,
55 ):
56 """
57 ConfiguredAssetDataConnector for connecting to S3.
58
59 Args:
60 name (str): required name for DataConnector
61 datasource_name (str): required name for datasource
62 bucket (str): bucket for S3
63 assets (dict): dict of asset configuration (required for ConfiguredAssetDataConnector)
64 execution_engine (ExecutionEngine): optional reference to ExecutionEngine
65 default_regex (dict): optional regex configuration for filtering data_references
66 sorters (list): optional list of sorters for sorting data_references
67 prefix (str): S3 prefix
68 delimiter (str): S3 delimiter
69 max_keys (int): S3 max_keys (default is 1000)
70 boto3_options (dict): optional boto3 options
71 batch_spec_passthrough (dict): dictionary with keys that will be added directly to batch_spec
72 """
73 logger.debug(f'Constructing ConfiguredAssetS3DataConnector "{name}".')
74
75 super().__init__(
76 name=name,
77 datasource_name=datasource_name,
78 execution_engine=execution_engine,
79 assets=assets,
80 default_regex=default_regex,
81 sorters=sorters,
82 batch_spec_passthrough=batch_spec_passthrough,
83 )
84 self._bucket = bucket
85 self._prefix = FilePathDataConnector.sanitize_prefix(prefix)
86 self._delimiter = delimiter
87 self._max_keys = max_keys
88
89 if boto3_options is None:
90 boto3_options = {}
91
92 try:
93 self._s3 = boto3.client("s3", **boto3_options)
94 except (TypeError, AttributeError):
95 raise ImportError(
96 "Unable to load boto3 (it is required for ConfiguredAssetS3DataConnector)."
97 )
98
99 def build_batch_spec(self, batch_definition: BatchDefinition) -> S3BatchSpec:
100 """
101 Build BatchSpec from batch_definition by calling DataConnector's build_batch_spec function.
102
103 Args:
104 batch_definition (BatchDefinition): to be used to build batch_spec
105
106 Returns:
107 BatchSpec built from batch_definition
108 """
109 batch_spec: PathBatchSpec = super().build_batch_spec(
110 batch_definition=batch_definition
111 )
112 return S3BatchSpec(batch_spec)
113
114 def _get_data_reference_list_for_asset(self, asset: Optional[Asset]) -> List[str]:
115 query_options: dict = {
116 "Bucket": self._bucket,
117 "Prefix": self._prefix,
118 "Delimiter": self._delimiter,
119 "MaxKeys": self._max_keys,
120 }
121 if asset is not None:
122 if asset.bucket:
123 query_options["Bucket"] = asset.bucket
124 if asset.prefix:
125 query_options["Prefix"] = asset.prefix
126 if asset.delimiter:
127 query_options["Delimiter"] = asset.delimiter
128 if asset.max_keys:
129 query_options["MaxKeys"] = asset.max_keys
130
131 path_list: List[str] = [
132 key
133 for key in list_s3_keys(
134 s3=self._s3,
135 query_options=query_options,
136 iterator_dict={},
137 recursive=False,
138 )
139 ]
140 return path_list
141
142 def _get_full_file_path_for_asset(
143 self, path: str, asset: Optional[Asset] = None
144 ) -> str:
145 # asset isn't used in this method.
146 # It's only kept for compatibility with parent methods.
147 template_arguments: dict = {
148 "bucket": self._bucket,
149 "path": path,
150 }
151 return self.execution_engine.resolve_data_reference(
152 data_connector_name=self.__class__.__name__,
153 template_arguments=template_arguments,
154 )
155
```
Path: `great_expectations/datasource/data_connector/inferred_asset_s3_data_connector.py`
Content:
```
1 import logging
2 from typing import List, Optional
3
4 import great_expectations.exceptions as ge_exceptions
5 from great_expectations.core.batch import BatchDefinition
6 from great_expectations.core.batch_spec import PathBatchSpec, S3BatchSpec
7 from great_expectations.datasource.data_connector.file_path_data_connector import (
8 FilePathDataConnector,
9 )
10
11 try:
12 import boto3
13 except ImportError:
14 boto3 = None
15
16 from great_expectations.datasource.data_connector.inferred_asset_file_path_data_connector import (
17 InferredAssetFilePathDataConnector,
18 )
19 from great_expectations.datasource.data_connector.util import list_s3_keys
20 from great_expectations.execution_engine import ExecutionEngine
21
22 logger = logging.getLogger(__name__)
23
24 INVALID_S3_CHARS = ["*"]
25
26
27 class InferredAssetS3DataConnector(InferredAssetFilePathDataConnector):
28 """
29 Extension of InferredAssetFilePathDataConnector used to connect to S3
30
31 The InferredAssetS3DataConnector is one of two classes (ConfiguredAssetS3DataConnector being the
32 other one) designed for connecting to filesystem-like data, more specifically files on S3. It connects to assets
33 inferred from bucket, prefix, and file name by default_regex.
34
35 InferredAssetS3DataConnector that operates on S3 buckets and determines
36 the data_asset_name implicitly (e.g., through the combination of the regular expressions pattern and group names)
37
38 """
39
40 def __init__(
41 self,
42 name: str,
43 datasource_name: str,
44 bucket: str,
45 execution_engine: Optional[ExecutionEngine] = None,
46 default_regex: Optional[dict] = None,
47 sorters: Optional[list] = None,
48 prefix: str = "",
49 delimiter: str = "/",
50 max_keys: int = 1000,
51 boto3_options: Optional[dict] = None,
52 batch_spec_passthrough: Optional[dict] = None,
53 ):
54 """
55 InferredAssetS3DataConnector for connecting to S3.
56
57 Args:
58 name (str): required name for data_connector
59 datasource_name (str): required name for datasource
60 bucket (str): bucket for S3
61 execution_engine (ExecutionEngine): optional reference to ExecutionEngine
62 default_regex (dict): optional regex configuration for filtering data_references
63 sorters (list): optional list of sorters for sorting data_references
64 prefix (str): S3 prefix
65 delimiter (str): S3 delimiter
66 max_keys (int): S3 max_keys (default is 1000)
67 boto3_options (dict): optional boto3 options
68 batch_spec_passthrough (dict): dictionary with keys that will be added directly to batch_spec
69 """
70 logger.debug(f'Constructing InferredAssetS3DataConnector "{name}".')
71
72 super().__init__(
73 name=name,
74 datasource_name=datasource_name,
75 execution_engine=execution_engine,
76 default_regex=default_regex,
77 sorters=sorters,
78 batch_spec_passthrough=batch_spec_passthrough,
79 )
80
81 self._bucket = bucket
82 self._prefix = FilePathDataConnector.sanitize_prefix(prefix)
83 self._delimiter = delimiter
84 self._max_keys = max_keys
85
86 if boto3_options is None:
87 boto3_options = {}
88
89 try:
90 self._s3 = boto3.client("s3", **boto3_options)
91 except (TypeError, AttributeError):
92 raise ImportError(
93 "Unable to load boto3 (it is required for InferredAssetS3DataConnector)."
94 )
95
96 def build_batch_spec(self, batch_definition: BatchDefinition) -> S3BatchSpec:
97 """
98 Build BatchSpec from batch_definition by calling DataConnector's build_batch_spec function.
99
100 Args:
101 batch_definition (BatchDefinition): to be used to build batch_spec
102
103 Returns:
104 BatchSpec built from batch_definition
105 """
106 batch_spec: PathBatchSpec = super().build_batch_spec(
107 batch_definition=batch_definition
108 )
109 return S3BatchSpec(batch_spec)
110
111 def _get_data_reference_list(
112 self, data_asset_name: Optional[str] = None
113 ) -> List[str]:
114 """
115 List objects in the underlying data store to create a list of data_references.
116
117 This method is used to refresh the cache.
118 """
119 query_options: dict = {
120 "Bucket": self._bucket,
121 "Prefix": self._prefix,
122 "Delimiter": self._delimiter,
123 "MaxKeys": self._max_keys,
124 }
125
126 path_list: List[str] = [
127 key
128 for key in list_s3_keys(
129 s3=self._s3,
130 query_options=query_options,
131 iterator_dict={},
132 recursive=True,
133 )
134 ]
135 return path_list
136
137 def _get_full_file_path(
138 self,
139 path: str,
140 data_asset_name: Optional[str] = None,
141 ) -> str:
142 # data_asset_name isn't used in this method.
143 # It's only kept for compatibility with parent methods.
144 _check_valid_s3_path(path)
145 template_arguments: dict = {
146 "bucket": self._bucket,
147 "path": path,
148 }
149 return self.execution_engine.resolve_data_reference(
150 data_connector_name=self.__class__.__name__,
151 template_arguments=template_arguments,
152 )
153
154
155 def _check_valid_s3_path(
156 path: str,
157 ) -> None:
158 """Performs a basic check for validity of the S3 path"""
159 bad_chars: list = [c for c in INVALID_S3_CHARS if c in path]
160 if len(bad_chars) > 0:
161 msg: str = (
162 f"The parsed S3 path={path} contains the invalid characters {bad_chars}."
163 "Please make sure your regex is correct and characters are escaped."
164 )
165 if "*" in bad_chars:
166 msg += "Note: `*` is internally used to replace the regex for `.`."
167 raise ge_exceptions.ParserError(msg)
168
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/great_expectations/datasource/data_connector/configured_asset_s3_data_connector.py b/great_expectations/datasource/data_connector/configured_asset_s3_data_connector.py
--- a/great_expectations/datasource/data_connector/configured_asset_s3_data_connector.py
+++ b/great_expectations/datasource/data_connector/configured_asset_s3_data_connector.py
@@ -13,9 +13,6 @@
from great_expectations.datasource.data_connector.configured_asset_file_path_data_connector import (
ConfiguredAssetFilePathDataConnector,
)
-from great_expectations.datasource.data_connector.file_path_data_connector import (
- FilePathDataConnector,
-)
from great_expectations.datasource.data_connector.util import list_s3_keys
from great_expectations.execution_engine import ExecutionEngine
@@ -82,7 +79,7 @@
batch_spec_passthrough=batch_spec_passthrough,
)
self._bucket = bucket
- self._prefix = FilePathDataConnector.sanitize_prefix(prefix)
+ self._prefix = self.sanitize_prefix_for_s3(prefix)
self._delimiter = delimiter
self._max_keys = max_keys
@@ -96,6 +93,26 @@
"Unable to load boto3 (it is required for ConfiguredAssetS3DataConnector)."
)
+ @staticmethod
+ def sanitize_prefix_for_s3(text: str) -> str:
+ """
+ Takes in a given user-prefix and cleans it to work with file-system traversal methods
+ (i.e. add '/' to the end of a string meant to represent a directory)
+
+ Customized for S3 paths, ignoring the path separator used by the host OS
+ """
+ text = text.strip()
+ if not text:
+ return text
+
+ path_parts = text.split("/")
+ if not path_parts: # Empty prefix
+ return text
+ elif "." in path_parts[-1]: # File, not folder
+ return text
+ else: # Folder, should have trailing /
+ return text.rstrip("/") + "/"
+
def build_batch_spec(self, batch_definition: BatchDefinition) -> S3BatchSpec:
"""
Build BatchSpec from batch_definition by calling DataConnector's build_batch_spec function.
diff --git a/great_expectations/datasource/data_connector/inferred_asset_s3_data_connector.py b/great_expectations/datasource/data_connector/inferred_asset_s3_data_connector.py
--- a/great_expectations/datasource/data_connector/inferred_asset_s3_data_connector.py
+++ b/great_expectations/datasource/data_connector/inferred_asset_s3_data_connector.py
@@ -4,9 +4,7 @@
import great_expectations.exceptions as ge_exceptions
from great_expectations.core.batch import BatchDefinition
from great_expectations.core.batch_spec import PathBatchSpec, S3BatchSpec
-from great_expectations.datasource.data_connector.file_path_data_connector import (
- FilePathDataConnector,
-)
+from great_expectations.datasource.data_connector import ConfiguredAssetS3DataConnector
try:
import boto3
@@ -79,7 +77,7 @@
)
self._bucket = bucket
- self._prefix = FilePathDataConnector.sanitize_prefix(prefix)
+ self._prefix = ConfiguredAssetS3DataConnector.sanitize_prefix_for_s3(prefix)
self._delimiter = delimiter
self._max_keys = max_keys
|
{"golden_diff": "diff --git a/great_expectations/datasource/data_connector/configured_asset_s3_data_connector.py b/great_expectations/datasource/data_connector/configured_asset_s3_data_connector.py\n--- a/great_expectations/datasource/data_connector/configured_asset_s3_data_connector.py\n+++ b/great_expectations/datasource/data_connector/configured_asset_s3_data_connector.py\n@@ -13,9 +13,6 @@\n from great_expectations.datasource.data_connector.configured_asset_file_path_data_connector import (\n ConfiguredAssetFilePathDataConnector,\n )\n-from great_expectations.datasource.data_connector.file_path_data_connector import (\n- FilePathDataConnector,\n-)\n from great_expectations.datasource.data_connector.util import list_s3_keys\n from great_expectations.execution_engine import ExecutionEngine\n \n@@ -82,7 +79,7 @@\n batch_spec_passthrough=batch_spec_passthrough,\n )\n self._bucket = bucket\n- self._prefix = FilePathDataConnector.sanitize_prefix(prefix)\n+ self._prefix = self.sanitize_prefix_for_s3(prefix)\n self._delimiter = delimiter\n self._max_keys = max_keys\n \n@@ -96,6 +93,26 @@\n \"Unable to load boto3 (it is required for ConfiguredAssetS3DataConnector).\"\n )\n \n+ @staticmethod\n+ def sanitize_prefix_for_s3(text: str) -> str:\n+ \"\"\"\n+ Takes in a given user-prefix and cleans it to work with file-system traversal methods\n+ (i.e. add '/' to the end of a string meant to represent a directory)\n+\n+ Customized for S3 paths, ignoring the path separator used by the host OS\n+ \"\"\"\n+ text = text.strip()\n+ if not text:\n+ return text\n+\n+ path_parts = text.split(\"/\")\n+ if not path_parts: # Empty prefix\n+ return text\n+ elif \".\" in path_parts[-1]: # File, not folder\n+ return text\n+ else: # Folder, should have trailing /\n+ return text.rstrip(\"/\") + \"/\"\n+\n def build_batch_spec(self, batch_definition: BatchDefinition) -> S3BatchSpec:\n \"\"\"\n Build BatchSpec from batch_definition by calling DataConnector's build_batch_spec function.\ndiff --git a/great_expectations/datasource/data_connector/inferred_asset_s3_data_connector.py b/great_expectations/datasource/data_connector/inferred_asset_s3_data_connector.py\n--- a/great_expectations/datasource/data_connector/inferred_asset_s3_data_connector.py\n+++ b/great_expectations/datasource/data_connector/inferred_asset_s3_data_connector.py\n@@ -4,9 +4,7 @@\n import great_expectations.exceptions as ge_exceptions\n from great_expectations.core.batch import BatchDefinition\n from great_expectations.core.batch_spec import PathBatchSpec, S3BatchSpec\n-from great_expectations.datasource.data_connector.file_path_data_connector import (\n- FilePathDataConnector,\n-)\n+from great_expectations.datasource.data_connector import ConfiguredAssetS3DataConnector\n \n try:\n import boto3\n@@ -79,7 +77,7 @@\n )\n \n self._bucket = bucket\n- self._prefix = FilePathDataConnector.sanitize_prefix(prefix)\n+ self._prefix = ConfiguredAssetS3DataConnector.sanitize_prefix_for_s3(prefix)\n self._delimiter = delimiter\n self._max_keys = max_keys\n", "issue": "S3 prefixes get broken when running on Windows\n**Describe the bug**\r\nOn Windows when referencing an S3 data source, the prefix is suffixed with ``\\`` instead of ``/``, resulting in query always failing.\r\n\r\n**To Reproduce**\r\nCreate a ``InferredAssetS3DataConnector`` data connector, and pass any valid prefix (probably needs to be non-empty). Then, attempt to load a file from that connector.\r\n\r\n**Expected behavior**\r\nThe file gets loaded properly.\r\n\r\n**Error**\r\n```\r\n...\\lib\\site-packages\\great_expectations\\datasource\\data_connector\\util.py in list_s3_keys(s3, query_options, iterator_dict, recursive)\r\n 452 \r\n 453 if not any(key in s3_objects_info for key in [\"Contents\", \"CommonPrefixes\"]):\r\n--> 454 raise ValueError(\"S3 query may not have been configured correctly.\")\r\n 455 \r\n 456 if \"Contents\" in s3_objects_info:\r\n\r\nValueError: S3 query may not have been configured correctly.\r\n```\r\n\r\n**Fix**\r\nIn ``inferred_asset_s3_data_connector.py``, line 82 is:\r\n\r\n```\r\n self._prefix = FilePathDataConnector.sanitize_prefix(prefix)\r\n```\r\n\r\nThis ensures that the file path is always a folder, by appending a slash to the end. However, on Windows, the slash is ``\\``, and thus you end up with a prefix like ``path/to/some/folder\\``, which results in an S3 query with no results, hence the above stacktrace.\r\n\r\nThe fix is to ensure that only the correct slash can be appended to the prefix, regardless of the host OS. For example:\r\n\r\n```\r\n self._prefix = prefix.rstrip('/') + '/'\r\n```\r\n\r\nor something to that effect.\r\n\r\n**Environment (please complete the following information):**\r\n - Operating System: Windows\r\n - Great Expectations Version: 0.14.1\r\n\n", "before_files": [{"content": "import logging\nimport os\nfrom typing import List, Optional\n\ntry:\n import boto3\nexcept ImportError:\n boto3 = None\n\nfrom great_expectations.core.batch import BatchDefinition\nfrom great_expectations.core.batch_spec import PathBatchSpec, S3BatchSpec\nfrom great_expectations.datasource.data_connector.asset import Asset\nfrom great_expectations.datasource.data_connector.configured_asset_file_path_data_connector import (\n ConfiguredAssetFilePathDataConnector,\n)\nfrom great_expectations.datasource.data_connector.file_path_data_connector import (\n FilePathDataConnector,\n)\nfrom great_expectations.datasource.data_connector.util import list_s3_keys\nfrom great_expectations.execution_engine import ExecutionEngine\n\nlogger = logging.getLogger(__name__)\n\n\nclass ConfiguredAssetS3DataConnector(ConfiguredAssetFilePathDataConnector):\n \"\"\"\n Extension of ConfiguredAssetFilePathDataConnector used to connect to S3\n\n DataConnectors produce identifying information, called \"batch_spec\" that ExecutionEngines\n can use to get individual batches of data. They add flexibility in how to obtain data\n such as with time-based partitioning, downsampling, or other techniques appropriate\n for the Datasource.\n\n The ConfiguredAssetS3DataConnector is one of two classes (InferredAssetS3DataConnector being the\n other one) designed for connecting to data on S3.\n\n A ConfiguredAssetS3DataConnector requires an explicit listing of each DataAsset you want to connect to.\n This allows more fine-tuning, but also requires more setup.\n \"\"\"\n\n def __init__(\n self,\n name: str,\n datasource_name: str,\n bucket: str,\n assets: dict,\n execution_engine: Optional[ExecutionEngine] = None,\n default_regex: Optional[dict] = None,\n sorters: Optional[list] = None,\n prefix: str = \"\",\n delimiter: str = \"/\",\n max_keys: int = 1000,\n boto3_options: Optional[dict] = None,\n batch_spec_passthrough: Optional[dict] = None,\n ):\n \"\"\"\n ConfiguredAssetDataConnector for connecting to S3.\n\n Args:\n name (str): required name for DataConnector\n datasource_name (str): required name for datasource\n bucket (str): bucket for S3\n assets (dict): dict of asset configuration (required for ConfiguredAssetDataConnector)\n execution_engine (ExecutionEngine): optional reference to ExecutionEngine\n default_regex (dict): optional regex configuration for filtering data_references\n sorters (list): optional list of sorters for sorting data_references\n prefix (str): S3 prefix\n delimiter (str): S3 delimiter\n max_keys (int): S3 max_keys (default is 1000)\n boto3_options (dict): optional boto3 options\n batch_spec_passthrough (dict): dictionary with keys that will be added directly to batch_spec\n \"\"\"\n logger.debug(f'Constructing ConfiguredAssetS3DataConnector \"{name}\".')\n\n super().__init__(\n name=name,\n datasource_name=datasource_name,\n execution_engine=execution_engine,\n assets=assets,\n default_regex=default_regex,\n sorters=sorters,\n batch_spec_passthrough=batch_spec_passthrough,\n )\n self._bucket = bucket\n self._prefix = FilePathDataConnector.sanitize_prefix(prefix)\n self._delimiter = delimiter\n self._max_keys = max_keys\n\n if boto3_options is None:\n boto3_options = {}\n\n try:\n self._s3 = boto3.client(\"s3\", **boto3_options)\n except (TypeError, AttributeError):\n raise ImportError(\n \"Unable to load boto3 (it is required for ConfiguredAssetS3DataConnector).\"\n )\n\n def build_batch_spec(self, batch_definition: BatchDefinition) -> S3BatchSpec:\n \"\"\"\n Build BatchSpec from batch_definition by calling DataConnector's build_batch_spec function.\n\n Args:\n batch_definition (BatchDefinition): to be used to build batch_spec\n\n Returns:\n BatchSpec built from batch_definition\n \"\"\"\n batch_spec: PathBatchSpec = super().build_batch_spec(\n batch_definition=batch_definition\n )\n return S3BatchSpec(batch_spec)\n\n def _get_data_reference_list_for_asset(self, asset: Optional[Asset]) -> List[str]:\n query_options: dict = {\n \"Bucket\": self._bucket,\n \"Prefix\": self._prefix,\n \"Delimiter\": self._delimiter,\n \"MaxKeys\": self._max_keys,\n }\n if asset is not None:\n if asset.bucket:\n query_options[\"Bucket\"] = asset.bucket\n if asset.prefix:\n query_options[\"Prefix\"] = asset.prefix\n if asset.delimiter:\n query_options[\"Delimiter\"] = asset.delimiter\n if asset.max_keys:\n query_options[\"MaxKeys\"] = asset.max_keys\n\n path_list: List[str] = [\n key\n for key in list_s3_keys(\n s3=self._s3,\n query_options=query_options,\n iterator_dict={},\n recursive=False,\n )\n ]\n return path_list\n\n def _get_full_file_path_for_asset(\n self, path: str, asset: Optional[Asset] = None\n ) -> str:\n # asset isn't used in this method.\n # It's only kept for compatibility with parent methods.\n template_arguments: dict = {\n \"bucket\": self._bucket,\n \"path\": path,\n }\n return self.execution_engine.resolve_data_reference(\n data_connector_name=self.__class__.__name__,\n template_arguments=template_arguments,\n )\n", "path": "great_expectations/datasource/data_connector/configured_asset_s3_data_connector.py"}, {"content": "import logging\nfrom typing import List, Optional\n\nimport great_expectations.exceptions as ge_exceptions\nfrom great_expectations.core.batch import BatchDefinition\nfrom great_expectations.core.batch_spec import PathBatchSpec, S3BatchSpec\nfrom great_expectations.datasource.data_connector.file_path_data_connector import (\n FilePathDataConnector,\n)\n\ntry:\n import boto3\nexcept ImportError:\n boto3 = None\n\nfrom great_expectations.datasource.data_connector.inferred_asset_file_path_data_connector import (\n InferredAssetFilePathDataConnector,\n)\nfrom great_expectations.datasource.data_connector.util import list_s3_keys\nfrom great_expectations.execution_engine import ExecutionEngine\n\nlogger = logging.getLogger(__name__)\n\nINVALID_S3_CHARS = [\"*\"]\n\n\nclass InferredAssetS3DataConnector(InferredAssetFilePathDataConnector):\n \"\"\"\n Extension of InferredAssetFilePathDataConnector used to connect to S3\n\n The InferredAssetS3DataConnector is one of two classes (ConfiguredAssetS3DataConnector being the\n other one) designed for connecting to filesystem-like data, more specifically files on S3. It connects to assets\n inferred from bucket, prefix, and file name by default_regex.\n\n InferredAssetS3DataConnector that operates on S3 buckets and determines\n the data_asset_name implicitly (e.g., through the combination of the regular expressions pattern and group names)\n\n \"\"\"\n\n def __init__(\n self,\n name: str,\n datasource_name: str,\n bucket: str,\n execution_engine: Optional[ExecutionEngine] = None,\n default_regex: Optional[dict] = None,\n sorters: Optional[list] = None,\n prefix: str = \"\",\n delimiter: str = \"/\",\n max_keys: int = 1000,\n boto3_options: Optional[dict] = None,\n batch_spec_passthrough: Optional[dict] = None,\n ):\n \"\"\"\n InferredAssetS3DataConnector for connecting to S3.\n\n Args:\n name (str): required name for data_connector\n datasource_name (str): required name for datasource\n bucket (str): bucket for S3\n execution_engine (ExecutionEngine): optional reference to ExecutionEngine\n default_regex (dict): optional regex configuration for filtering data_references\n sorters (list): optional list of sorters for sorting data_references\n prefix (str): S3 prefix\n delimiter (str): S3 delimiter\n max_keys (int): S3 max_keys (default is 1000)\n boto3_options (dict): optional boto3 options\n batch_spec_passthrough (dict): dictionary with keys that will be added directly to batch_spec\n \"\"\"\n logger.debug(f'Constructing InferredAssetS3DataConnector \"{name}\".')\n\n super().__init__(\n name=name,\n datasource_name=datasource_name,\n execution_engine=execution_engine,\n default_regex=default_regex,\n sorters=sorters,\n batch_spec_passthrough=batch_spec_passthrough,\n )\n\n self._bucket = bucket\n self._prefix = FilePathDataConnector.sanitize_prefix(prefix)\n self._delimiter = delimiter\n self._max_keys = max_keys\n\n if boto3_options is None:\n boto3_options = {}\n\n try:\n self._s3 = boto3.client(\"s3\", **boto3_options)\n except (TypeError, AttributeError):\n raise ImportError(\n \"Unable to load boto3 (it is required for InferredAssetS3DataConnector).\"\n )\n\n def build_batch_spec(self, batch_definition: BatchDefinition) -> S3BatchSpec:\n \"\"\"\n Build BatchSpec from batch_definition by calling DataConnector's build_batch_spec function.\n\n Args:\n batch_definition (BatchDefinition): to be used to build batch_spec\n\n Returns:\n BatchSpec built from batch_definition\n \"\"\"\n batch_spec: PathBatchSpec = super().build_batch_spec(\n batch_definition=batch_definition\n )\n return S3BatchSpec(batch_spec)\n\n def _get_data_reference_list(\n self, data_asset_name: Optional[str] = None\n ) -> List[str]:\n \"\"\"\n List objects in the underlying data store to create a list of data_references.\n\n This method is used to refresh the cache.\n \"\"\"\n query_options: dict = {\n \"Bucket\": self._bucket,\n \"Prefix\": self._prefix,\n \"Delimiter\": self._delimiter,\n \"MaxKeys\": self._max_keys,\n }\n\n path_list: List[str] = [\n key\n for key in list_s3_keys(\n s3=self._s3,\n query_options=query_options,\n iterator_dict={},\n recursive=True,\n )\n ]\n return path_list\n\n def _get_full_file_path(\n self,\n path: str,\n data_asset_name: Optional[str] = None,\n ) -> str:\n # data_asset_name isn't used in this method.\n # It's only kept for compatibility with parent methods.\n _check_valid_s3_path(path)\n template_arguments: dict = {\n \"bucket\": self._bucket,\n \"path\": path,\n }\n return self.execution_engine.resolve_data_reference(\n data_connector_name=self.__class__.__name__,\n template_arguments=template_arguments,\n )\n\n\ndef _check_valid_s3_path(\n path: str,\n) -> None:\n \"\"\"Performs a basic check for validity of the S3 path\"\"\"\n bad_chars: list = [c for c in INVALID_S3_CHARS if c in path]\n if len(bad_chars) > 0:\n msg: str = (\n f\"The parsed S3 path={path} contains the invalid characters {bad_chars}.\"\n \"Please make sure your regex is correct and characters are escaped.\"\n )\n if \"*\" in bad_chars:\n msg += \"Note: `*` is internally used to replace the regex for `.`.\"\n raise ge_exceptions.ParserError(msg)\n", "path": "great_expectations/datasource/data_connector/inferred_asset_s3_data_connector.py"}], "after_files": [{"content": "import logging\nimport os\nfrom typing import List, Optional\n\ntry:\n import boto3\nexcept ImportError:\n boto3 = None\n\nfrom great_expectations.core.batch import BatchDefinition\nfrom great_expectations.core.batch_spec import PathBatchSpec, S3BatchSpec\nfrom great_expectations.datasource.data_connector.asset import Asset\nfrom great_expectations.datasource.data_connector.configured_asset_file_path_data_connector import (\n ConfiguredAssetFilePathDataConnector,\n)\nfrom great_expectations.datasource.data_connector.util import list_s3_keys\nfrom great_expectations.execution_engine import ExecutionEngine\n\nlogger = logging.getLogger(__name__)\n\n\nclass ConfiguredAssetS3DataConnector(ConfiguredAssetFilePathDataConnector):\n \"\"\"\n Extension of ConfiguredAssetFilePathDataConnector used to connect to S3\n\n DataConnectors produce identifying information, called \"batch_spec\" that ExecutionEngines\n can use to get individual batches of data. They add flexibility in how to obtain data\n such as with time-based partitioning, downsampling, or other techniques appropriate\n for the Datasource.\n\n The ConfiguredAssetS3DataConnector is one of two classes (InferredAssetS3DataConnector being the\n other one) designed for connecting to data on S3.\n\n A ConfiguredAssetS3DataConnector requires an explicit listing of each DataAsset you want to connect to.\n This allows more fine-tuning, but also requires more setup.\n \"\"\"\n\n def __init__(\n self,\n name: str,\n datasource_name: str,\n bucket: str,\n assets: dict,\n execution_engine: Optional[ExecutionEngine] = None,\n default_regex: Optional[dict] = None,\n sorters: Optional[list] = None,\n prefix: str = \"\",\n delimiter: str = \"/\",\n max_keys: int = 1000,\n boto3_options: Optional[dict] = None,\n batch_spec_passthrough: Optional[dict] = None,\n ):\n \"\"\"\n ConfiguredAssetDataConnector for connecting to S3.\n\n Args:\n name (str): required name for DataConnector\n datasource_name (str): required name for datasource\n bucket (str): bucket for S3\n assets (dict): dict of asset configuration (required for ConfiguredAssetDataConnector)\n execution_engine (ExecutionEngine): optional reference to ExecutionEngine\n default_regex (dict): optional regex configuration for filtering data_references\n sorters (list): optional list of sorters for sorting data_references\n prefix (str): S3 prefix\n delimiter (str): S3 delimiter\n max_keys (int): S3 max_keys (default is 1000)\n boto3_options (dict): optional boto3 options\n batch_spec_passthrough (dict): dictionary with keys that will be added directly to batch_spec\n \"\"\"\n logger.debug(f'Constructing ConfiguredAssetS3DataConnector \"{name}\".')\n\n super().__init__(\n name=name,\n datasource_name=datasource_name,\n execution_engine=execution_engine,\n assets=assets,\n default_regex=default_regex,\n sorters=sorters,\n batch_spec_passthrough=batch_spec_passthrough,\n )\n self._bucket = bucket\n self._prefix = self.sanitize_prefix_for_s3(prefix)\n self._delimiter = delimiter\n self._max_keys = max_keys\n\n if boto3_options is None:\n boto3_options = {}\n\n try:\n self._s3 = boto3.client(\"s3\", **boto3_options)\n except (TypeError, AttributeError):\n raise ImportError(\n \"Unable to load boto3 (it is required for ConfiguredAssetS3DataConnector).\"\n )\n\n @staticmethod\n def sanitize_prefix_for_s3(text: str) -> str:\n \"\"\"\n Takes in a given user-prefix and cleans it to work with file-system traversal methods\n (i.e. add '/' to the end of a string meant to represent a directory)\n\n Customized for S3 paths, ignoring the path separator used by the host OS\n \"\"\"\n text = text.strip()\n if not text:\n return text\n\n path_parts = text.split(\"/\")\n if not path_parts: # Empty prefix\n return text\n elif \".\" in path_parts[-1]: # File, not folder\n return text\n else: # Folder, should have trailing /\n return text.rstrip(\"/\") + \"/\"\n\n def build_batch_spec(self, batch_definition: BatchDefinition) -> S3BatchSpec:\n \"\"\"\n Build BatchSpec from batch_definition by calling DataConnector's build_batch_spec function.\n\n Args:\n batch_definition (BatchDefinition): to be used to build batch_spec\n\n Returns:\n BatchSpec built from batch_definition\n \"\"\"\n batch_spec: PathBatchSpec = super().build_batch_spec(\n batch_definition=batch_definition\n )\n return S3BatchSpec(batch_spec)\n\n def _get_data_reference_list_for_asset(self, asset: Optional[Asset]) -> List[str]:\n query_options: dict = {\n \"Bucket\": self._bucket,\n \"Prefix\": self._prefix,\n \"Delimiter\": self._delimiter,\n \"MaxKeys\": self._max_keys,\n }\n if asset is not None:\n if asset.bucket:\n query_options[\"Bucket\"] = asset.bucket\n if asset.prefix:\n query_options[\"Prefix\"] = asset.prefix\n if asset.delimiter:\n query_options[\"Delimiter\"] = asset.delimiter\n if asset.max_keys:\n query_options[\"MaxKeys\"] = asset.max_keys\n\n path_list: List[str] = [\n key\n for key in list_s3_keys(\n s3=self._s3,\n query_options=query_options,\n iterator_dict={},\n recursive=False,\n )\n ]\n return path_list\n\n def _get_full_file_path_for_asset(\n self, path: str, asset: Optional[Asset] = None\n ) -> str:\n # asset isn't used in this method.\n # It's only kept for compatibility with parent methods.\n template_arguments: dict = {\n \"bucket\": self._bucket,\n \"path\": path,\n }\n return self.execution_engine.resolve_data_reference(\n data_connector_name=self.__class__.__name__,\n template_arguments=template_arguments,\n )\n", "path": "great_expectations/datasource/data_connector/configured_asset_s3_data_connector.py"}, {"content": "import logging\nfrom typing import List, Optional\n\nimport great_expectations.exceptions as ge_exceptions\nfrom great_expectations.core.batch import BatchDefinition\nfrom great_expectations.core.batch_spec import PathBatchSpec, S3BatchSpec\nfrom great_expectations.datasource.data_connector import ConfiguredAssetS3DataConnector\n\ntry:\n import boto3\nexcept ImportError:\n boto3 = None\n\nfrom great_expectations.datasource.data_connector.inferred_asset_file_path_data_connector import (\n InferredAssetFilePathDataConnector,\n)\nfrom great_expectations.datasource.data_connector.util import list_s3_keys\nfrom great_expectations.execution_engine import ExecutionEngine\n\nlogger = logging.getLogger(__name__)\n\nINVALID_S3_CHARS = [\"*\"]\n\n\nclass InferredAssetS3DataConnector(InferredAssetFilePathDataConnector):\n \"\"\"\n Extension of InferredAssetFilePathDataConnector used to connect to S3\n\n The InferredAssetS3DataConnector is one of two classes (ConfiguredAssetS3DataConnector being the\n other one) designed for connecting to filesystem-like data, more specifically files on S3. It connects to assets\n inferred from bucket, prefix, and file name by default_regex.\n\n InferredAssetS3DataConnector that operates on S3 buckets and determines\n the data_asset_name implicitly (e.g., through the combination of the regular expressions pattern and group names)\n\n \"\"\"\n\n def __init__(\n self,\n name: str,\n datasource_name: str,\n bucket: str,\n execution_engine: Optional[ExecutionEngine] = None,\n default_regex: Optional[dict] = None,\n sorters: Optional[list] = None,\n prefix: str = \"\",\n delimiter: str = \"/\",\n max_keys: int = 1000,\n boto3_options: Optional[dict] = None,\n batch_spec_passthrough: Optional[dict] = None,\n ):\n \"\"\"\n InferredAssetS3DataConnector for connecting to S3.\n\n Args:\n name (str): required name for data_connector\n datasource_name (str): required name for datasource\n bucket (str): bucket for S3\n execution_engine (ExecutionEngine): optional reference to ExecutionEngine\n default_regex (dict): optional regex configuration for filtering data_references\n sorters (list): optional list of sorters for sorting data_references\n prefix (str): S3 prefix\n delimiter (str): S3 delimiter\n max_keys (int): S3 max_keys (default is 1000)\n boto3_options (dict): optional boto3 options\n batch_spec_passthrough (dict): dictionary with keys that will be added directly to batch_spec\n \"\"\"\n logger.debug(f'Constructing InferredAssetS3DataConnector \"{name}\".')\n\n super().__init__(\n name=name,\n datasource_name=datasource_name,\n execution_engine=execution_engine,\n default_regex=default_regex,\n sorters=sorters,\n batch_spec_passthrough=batch_spec_passthrough,\n )\n\n self._bucket = bucket\n self._prefix = ConfiguredAssetS3DataConnector.sanitize_prefix_for_s3(prefix)\n self._delimiter = delimiter\n self._max_keys = max_keys\n\n if boto3_options is None:\n boto3_options = {}\n\n try:\n self._s3 = boto3.client(\"s3\", **boto3_options)\n except (TypeError, AttributeError):\n raise ImportError(\n \"Unable to load boto3 (it is required for InferredAssetS3DataConnector).\"\n )\n\n def build_batch_spec(self, batch_definition: BatchDefinition) -> S3BatchSpec:\n \"\"\"\n Build BatchSpec from batch_definition by calling DataConnector's build_batch_spec function.\n\n Args:\n batch_definition (BatchDefinition): to be used to build batch_spec\n\n Returns:\n BatchSpec built from batch_definition\n \"\"\"\n batch_spec: PathBatchSpec = super().build_batch_spec(\n batch_definition=batch_definition\n )\n return S3BatchSpec(batch_spec)\n\n def _get_data_reference_list(\n self, data_asset_name: Optional[str] = None\n ) -> List[str]:\n \"\"\"\n List objects in the underlying data store to create a list of data_references.\n\n This method is used to refresh the cache.\n \"\"\"\n query_options: dict = {\n \"Bucket\": self._bucket,\n \"Prefix\": self._prefix,\n \"Delimiter\": self._delimiter,\n \"MaxKeys\": self._max_keys,\n }\n\n path_list: List[str] = [\n key\n for key in list_s3_keys(\n s3=self._s3,\n query_options=query_options,\n iterator_dict={},\n recursive=True,\n )\n ]\n return path_list\n\n def _get_full_file_path(\n self,\n path: str,\n data_asset_name: Optional[str] = None,\n ) -> str:\n # data_asset_name isn't used in this method.\n # It's only kept for compatibility with parent methods.\n _check_valid_s3_path(path)\n template_arguments: dict = {\n \"bucket\": self._bucket,\n \"path\": path,\n }\n return self.execution_engine.resolve_data_reference(\n data_connector_name=self.__class__.__name__,\n template_arguments=template_arguments,\n )\n\n\ndef _check_valid_s3_path(\n path: str,\n) -> None:\n \"\"\"Performs a basic check for validity of the S3 path\"\"\"\n bad_chars: list = [c for c in INVALID_S3_CHARS if c in path]\n if len(bad_chars) > 0:\n msg: str = (\n f\"The parsed S3 path={path} contains the invalid characters {bad_chars}.\"\n \"Please make sure your regex is correct and characters are escaped.\"\n )\n if \"*\" in bad_chars:\n msg += \"Note: `*` is internally used to replace the regex for `.`.\"\n raise ge_exceptions.ParserError(msg)\n", "path": "great_expectations/datasource/data_connector/inferred_asset_s3_data_connector.py"}]}
| 3,915 | 729 |
gh_patches_debug_3670
|
rasdani/github-patches
|
git_diff
|
wright-group__WrightTools-753
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Kit leastsq should not except BaseException
https://github.com/wright-group/WrightTools/blob/f22920579f45632b4123661d9832ff0cc1b614c4/WrightTools/kit/_leastsq.py#L74
The exception caught should be limited to those known to be raised inside.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `WrightTools/kit/_leastsq.py`
Content:
```
1 """Least-square fitting tools."""
2
3
4 # --- import --------------------------------------------------------------------------------------
5
6
7 from ._utilities import Timer
8
9 import numpy as np
10
11 from scipy import optimize as scipy_optimize
12
13
14 # --- define --------------------------------------------------------------------------------------
15
16
17 __all__ = ["leastsqfitter"]
18
19
20 # --- functions -----------------------------------------------------------------------------------
21
22
23 def leastsqfitter(p0, datax, datay, function, verbose=False, cov_verbose=False):
24 """Conveniently call scipy.optmize.leastsq().
25
26 Returns fit parameters and their errors.
27
28 Parameters
29 ----------
30 p0 : list
31 list of guess parameters to pass to function
32 datax : array
33 array of independent values
34 datay : array
35 array of dependent values
36 function : function
37 function object to fit data to. Must be of the callable form function(p, x)
38 verbose : bool
39 toggles printing of fit time, fit params, and fit param errors
40 cov_verbose : bool
41 toggles printing of covarience matrix
42
43 Returns
44 -------
45 pfit_leastsq : list
46 list of fit parameters. s.t. the error between datay and function(p, datax) is minimized
47 perr_leastsq : list
48 list of fit parameter errors (1 std)
49 """
50 timer = Timer(verbose=False)
51 with timer:
52 # define error function
53 def errfunc(p, x, y):
54 return y - function(p, x)
55
56 # run optimization
57 pfit_leastsq, pcov, infodict, errmsg, success = scipy_optimize.leastsq(
58 errfunc, p0, args=(datax, datay), full_output=1, epsfcn=0.0001
59 )
60 # calculate covarience matrix
61 # original idea https://stackoverflow.com/a/21844726
62 if (len(datay) > len(p0)) and pcov is not None:
63 s_sq = (errfunc(pfit_leastsq, datax, datay) ** 2).sum() / (len(datay) - len(p0))
64 pcov = pcov * s_sq
65 if cov_verbose:
66 print(pcov)
67 else:
68 pcov = np.inf
69 # calculate and write errors
70 error = []
71 for i in range(len(pfit_leastsq)):
72 try:
73 error.append(np.absolute(pcov[i][i]) ** 0.5)
74 except BaseException:
75 error.append(0.00)
76 perr_leastsq = np.array(error)
77 # exit
78 if verbose:
79 print("fit params: ", pfit_leastsq)
80 print("fit params error: ", perr_leastsq)
81 print("fitting done in %f seconds" % timer.interval)
82 return pfit_leastsq, perr_leastsq
83
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/WrightTools/kit/_leastsq.py b/WrightTools/kit/_leastsq.py
--- a/WrightTools/kit/_leastsq.py
+++ b/WrightTools/kit/_leastsq.py
@@ -71,7 +71,7 @@
for i in range(len(pfit_leastsq)):
try:
error.append(np.absolute(pcov[i][i]) ** 0.5)
- except BaseException:
+ except IndexError:
error.append(0.00)
perr_leastsq = np.array(error)
# exit
|
{"golden_diff": "diff --git a/WrightTools/kit/_leastsq.py b/WrightTools/kit/_leastsq.py\n--- a/WrightTools/kit/_leastsq.py\n+++ b/WrightTools/kit/_leastsq.py\n@@ -71,7 +71,7 @@\n for i in range(len(pfit_leastsq)):\n try:\n error.append(np.absolute(pcov[i][i]) ** 0.5)\n- except BaseException:\n+ except IndexError:\n error.append(0.00)\n perr_leastsq = np.array(error)\n # exit\n", "issue": "Kit leastsq should not except BaseException\nhttps://github.com/wright-group/WrightTools/blob/f22920579f45632b4123661d9832ff0cc1b614c4/WrightTools/kit/_leastsq.py#L74\r\n\r\nThe exception caught should be limited to those known to be raised inside.\n", "before_files": [{"content": "\"\"\"Least-square fitting tools.\"\"\"\n\n\n# --- import --------------------------------------------------------------------------------------\n\n\nfrom ._utilities import Timer\n\nimport numpy as np\n\nfrom scipy import optimize as scipy_optimize\n\n\n# --- define --------------------------------------------------------------------------------------\n\n\n__all__ = [\"leastsqfitter\"]\n\n\n# --- functions -----------------------------------------------------------------------------------\n\n\ndef leastsqfitter(p0, datax, datay, function, verbose=False, cov_verbose=False):\n \"\"\"Conveniently call scipy.optmize.leastsq().\n\n Returns fit parameters and their errors.\n\n Parameters\n ----------\n p0 : list\n list of guess parameters to pass to function\n datax : array\n array of independent values\n datay : array\n array of dependent values\n function : function\n function object to fit data to. Must be of the callable form function(p, x)\n verbose : bool\n toggles printing of fit time, fit params, and fit param errors\n cov_verbose : bool\n toggles printing of covarience matrix\n\n Returns\n -------\n pfit_leastsq : list\n list of fit parameters. s.t. the error between datay and function(p, datax) is minimized\n perr_leastsq : list\n list of fit parameter errors (1 std)\n \"\"\"\n timer = Timer(verbose=False)\n with timer:\n # define error function\n def errfunc(p, x, y):\n return y - function(p, x)\n\n # run optimization\n pfit_leastsq, pcov, infodict, errmsg, success = scipy_optimize.leastsq(\n errfunc, p0, args=(datax, datay), full_output=1, epsfcn=0.0001\n )\n # calculate covarience matrix\n # original idea https://stackoverflow.com/a/21844726\n if (len(datay) > len(p0)) and pcov is not None:\n s_sq = (errfunc(pfit_leastsq, datax, datay) ** 2).sum() / (len(datay) - len(p0))\n pcov = pcov * s_sq\n if cov_verbose:\n print(pcov)\n else:\n pcov = np.inf\n # calculate and write errors\n error = []\n for i in range(len(pfit_leastsq)):\n try:\n error.append(np.absolute(pcov[i][i]) ** 0.5)\n except BaseException:\n error.append(0.00)\n perr_leastsq = np.array(error)\n # exit\n if verbose:\n print(\"fit params: \", pfit_leastsq)\n print(\"fit params error: \", perr_leastsq)\n print(\"fitting done in %f seconds\" % timer.interval)\n return pfit_leastsq, perr_leastsq\n", "path": "WrightTools/kit/_leastsq.py"}], "after_files": [{"content": "\"\"\"Least-square fitting tools.\"\"\"\n\n\n# --- import --------------------------------------------------------------------------------------\n\n\nfrom ._utilities import Timer\n\nimport numpy as np\n\nfrom scipy import optimize as scipy_optimize\n\n\n# --- define --------------------------------------------------------------------------------------\n\n\n__all__ = [\"leastsqfitter\"]\n\n\n# --- functions -----------------------------------------------------------------------------------\n\n\ndef leastsqfitter(p0, datax, datay, function, verbose=False, cov_verbose=False):\n \"\"\"Conveniently call scipy.optmize.leastsq().\n\n Returns fit parameters and their errors.\n\n Parameters\n ----------\n p0 : list\n list of guess parameters to pass to function\n datax : array\n array of independent values\n datay : array\n array of dependent values\n function : function\n function object to fit data to. Must be of the callable form function(p, x)\n verbose : bool\n toggles printing of fit time, fit params, and fit param errors\n cov_verbose : bool\n toggles printing of covarience matrix\n\n Returns\n -------\n pfit_leastsq : list\n list of fit parameters. s.t. the error between datay and function(p, datax) is minimized\n perr_leastsq : list\n list of fit parameter errors (1 std)\n \"\"\"\n timer = Timer(verbose=False)\n with timer:\n # define error function\n def errfunc(p, x, y):\n return y - function(p, x)\n\n # run optimization\n pfit_leastsq, pcov, infodict, errmsg, success = scipy_optimize.leastsq(\n errfunc, p0, args=(datax, datay), full_output=1, epsfcn=0.0001\n )\n # calculate covarience matrix\n # original idea https://stackoverflow.com/a/21844726\n if (len(datay) > len(p0)) and pcov is not None:\n s_sq = (errfunc(pfit_leastsq, datax, datay) ** 2).sum() / (len(datay) - len(p0))\n pcov = pcov * s_sq\n if cov_verbose:\n print(pcov)\n else:\n pcov = np.inf\n # calculate and write errors\n error = []\n for i in range(len(pfit_leastsq)):\n try:\n error.append(np.absolute(pcov[i][i]) ** 0.5)\n except IndexError:\n error.append(0.00)\n perr_leastsq = np.array(error)\n # exit\n if verbose:\n print(\"fit params: \", pfit_leastsq)\n print(\"fit params error: \", perr_leastsq)\n print(\"fitting done in %f seconds\" % timer.interval)\n return pfit_leastsq, perr_leastsq\n", "path": "WrightTools/kit/_leastsq.py"}]}
| 1,131 | 131 |
gh_patches_debug_5878
|
rasdani/github-patches
|
git_diff
|
ManimCommunity__manim-2593
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Preview without embed broken in jupyter notebooks.
## Description
The embed feature seems to have broken preview in jupyter notebooks.
default:
<img width="1115" alt="Screenshot 2022-03-05 at 3 24 09 PM" src="https://user-images.githubusercontent.com/90276965/156878243-224900ac-8da2-46e5-8ee2-cc9f1852bec8.png">
with media embed:
<img width="1115" alt="Screenshot 2022-03-05 at 3 24 30 PM" src="https://user-images.githubusercontent.com/90276965/156878267-b5d77f95-7953-45e9-b155-baac8f19db35.png">
## Expected Behaviour
Should preview images even without `media_embed`
_Originally posted by @Kiran-Raj-Dev in https://github.com/ManimCommunity/manim/issues/2442#issuecomment-1059732017_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `manim/utils/ipython_magic.py`
Content:
```
1 """Utilities for using Manim with IPython (in particular: Jupyter notebooks)"""
2
3 from __future__ import annotations
4
5 import mimetypes
6 import os
7 import shutil
8 from datetime import datetime
9 from pathlib import Path
10 from typing import Any
11
12 from manim import Group, config, logger, tempconfig
13 from manim.__main__ import main
14 from manim.renderer.shader import shader_program_cache
15
16 try:
17 from IPython import get_ipython
18 from IPython.core.interactiveshell import InteractiveShell
19 from IPython.core.magic import (
20 Magics,
21 line_cell_magic,
22 magics_class,
23 needs_local_scope,
24 )
25 from IPython.display import Image, Video, display
26 except ImportError:
27 pass
28 else:
29
30 @magics_class
31 class ManimMagic(Magics):
32 def __init__(self, shell: InteractiveShell) -> None:
33 super().__init__(shell)
34 self.rendered_files = {}
35
36 @needs_local_scope
37 @line_cell_magic
38 def manim(
39 self,
40 line: str,
41 cell: str = None,
42 local_ns: dict[str, Any] = None,
43 ) -> None:
44 r"""Render Manim scenes contained in IPython cells.
45 Works as a line or cell magic.
46
47 .. hint::
48
49 This line and cell magic works best when used in a JupyterLab
50 environment: while all of the functionality is available for
51 classic Jupyter notebooks as well, it is possible that videos
52 sometimes don't update on repeated execution of the same cell
53 if the scene name stays the same.
54
55 This problem does not occur when using JupyterLab.
56
57 Please refer to `<https://jupyter.org/>`_ for more information about JupyterLab
58 and Jupyter notebooks.
59
60 Usage in line mode::
61
62 %manim [CLI options] MyAwesomeScene
63
64 Usage in cell mode::
65
66 %%manim [CLI options] MyAwesomeScene
67
68 class MyAweseomeScene(Scene):
69 def construct(self):
70 ...
71
72 Run ``%manim --help`` and ``%manim render --help`` for possible command line interface options.
73
74 .. note::
75
76 The maximal width of the rendered videos that are displayed in the notebook can be
77 configured via the ``media_width`` configuration option. The default is set to ``25vw``,
78 which is 25% of your current viewport width. To allow the output to become as large
79 as possible, set ``config.media_width = "100%"``.
80
81 The ``media_embed`` option will embed the image/video output in the notebook. This is
82 generally undesirable as it makes the notebooks very large, but is required on some
83 platforms (notably Google's CoLab, where it is automatically enabled unless suppressed
84 by ``config.embed = False``) and needed in cases when the notebook (or converted HTML
85 file) will be moved relative to the video locations. Use-cases include building
86 documentation with Sphinx and JupyterBook. See also the :mod:`manim directive for Sphinx
87 <manim.utils.docbuild.manim_directive>`.
88
89 Examples
90 --------
91
92 First make sure to put ``import manim``, or even ``from manim import *``
93 in a cell and evaluate it. Then, a typical Jupyter notebook cell for Manim
94 could look as follows::
95
96 %%manim -v WARNING --disable_caching -qm BannerExample
97
98 config.media_width = "75%"
99 config.media_embed = True
100
101 class BannerExample(Scene):
102 def construct(self):
103 self.camera.background_color = "#ece6e2"
104 banner_large = ManimBanner(dark_theme=False).scale(0.7)
105 self.play(banner_large.create())
106 self.play(banner_large.expand())
107
108 Evaluating this cell will render and display the ``BannerExample`` scene defined in the body of the cell.
109
110 .. note::
111
112 In case you want to hide the red box containing the output progress bar, the ``progress_bar`` config
113 option should be set to ``None``. This can also be done by passing ``--progress_bar None`` as a
114 CLI flag.
115
116 """
117 if cell:
118 exec(cell, local_ns)
119
120 args = line.split()
121 if not len(args) or "-h" in args or "--help" in args or "--version" in args:
122 main(args, standalone_mode=False, prog_name="manim")
123 return
124
125 modified_args = self.add_additional_args(args)
126 args = main(modified_args, standalone_mode=False, prog_name="manim")
127 with tempconfig(local_ns.get("config", {})):
128 config.digest_args(args)
129
130 renderer = None
131 if config.renderer == "opengl":
132 # Check if the imported mobjects extend the OpenGLMobject class
133 # meaning ConvertToOpenGL did its job
134 if "OpenGLMobject" in map(lambda cls: cls.__name__, Group.mro()):
135 from manim.renderer.opengl_renderer import OpenGLRenderer
136
137 renderer = OpenGLRenderer()
138 else:
139 logger.warning(
140 "Renderer must be set to OpenGL in the configuration file "
141 "before importing Manim! Using cairo renderer instead.",
142 )
143 config.renderer = "cairo"
144
145 try:
146 SceneClass = local_ns[config["scene_names"][0]]
147 scene = SceneClass(renderer=renderer)
148 scene.render()
149 finally:
150 # Shader cache becomes invalid as the context is destroyed
151 shader_program_cache.clear()
152
153 # Close OpenGL window here instead of waiting for the main thread to
154 # finish causing the window to stay open and freeze
155 if renderer is not None and renderer.window is not None:
156 renderer.window.close()
157
158 if config["output_file"] is None:
159 logger.info("No output file produced")
160 return
161
162 local_path = Path(config["output_file"]).relative_to(Path.cwd())
163 tmpfile = (
164 Path(config["media_dir"])
165 / "jupyter"
166 / f"{_generate_file_name()}{local_path.suffix}"
167 )
168
169 if local_path in self.rendered_files:
170 self.rendered_files[local_path].unlink()
171 self.rendered_files[local_path] = tmpfile
172 os.makedirs(tmpfile.parent, exist_ok=True)
173 shutil.copy(local_path, tmpfile)
174
175 file_type = mimetypes.guess_type(config["output_file"])[0]
176 embed = config["media_embed"]
177 if embed is None:
178 # videos need to be embedded when running in google colab.
179 # do this automatically in case config.media_embed has not been
180 # set explicitly.
181 embed = "google.colab" in str(get_ipython())
182
183 if file_type.startswith("image"):
184 result = Image(filename=config["output_file"], embed=embed)
185 else:
186 result = Video(
187 tmpfile,
188 html_attributes=f'controls autoplay loop style="max-width: {config["media_width"]};"',
189 embed=embed,
190 )
191
192 display(result)
193
194 def add_additional_args(self, args: list[str]) -> list[str]:
195 additional_args = ["--jupyter"]
196 # Use webm to support transparency
197 if "-t" in args and "--format" not in args:
198 additional_args += ["--format", "webm"]
199 return additional_args + args[:-1] + [""] + [args[-1]]
200
201
202 def _generate_file_name() -> str:
203 return config["scene_names"][0] + "@" + datetime.now().strftime("%Y-%m-%d@%H-%M-%S")
204
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/manim/utils/ipython_magic.py b/manim/utils/ipython_magic.py
--- a/manim/utils/ipython_magic.py
+++ b/manim/utils/ipython_magic.py
@@ -181,7 +181,7 @@
embed = "google.colab" in str(get_ipython())
if file_type.startswith("image"):
- result = Image(filename=config["output_file"], embed=embed)
+ result = Image(filename=config["output_file"])
else:
result = Video(
tmpfile,
|
{"golden_diff": "diff --git a/manim/utils/ipython_magic.py b/manim/utils/ipython_magic.py\n--- a/manim/utils/ipython_magic.py\n+++ b/manim/utils/ipython_magic.py\n@@ -181,7 +181,7 @@\n embed = \"google.colab\" in str(get_ipython())\n \n if file_type.startswith(\"image\"):\n- result = Image(filename=config[\"output_file\"], embed=embed)\n+ result = Image(filename=config[\"output_file\"])\n else:\n result = Video(\n tmpfile,\n", "issue": "Preview without embed broken in jupyter notebooks.\n## Description\r\n\r\nThe embed feature seems to have broken preview in jupyter notebooks.\r\n\r\ndefault:\r\n<img width=\"1115\" alt=\"Screenshot 2022-03-05 at 3 24 09 PM\" src=\"https://user-images.githubusercontent.com/90276965/156878243-224900ac-8da2-46e5-8ee2-cc9f1852bec8.png\">\r\n\r\nwith media embed:\r\n<img width=\"1115\" alt=\"Screenshot 2022-03-05 at 3 24 30 PM\" src=\"https://user-images.githubusercontent.com/90276965/156878267-b5d77f95-7953-45e9-b155-baac8f19db35.png\">\r\n\r\n## Expected Behaviour\r\nShould preview images even without `media_embed`\r\n\r\n\r\n_Originally posted by @Kiran-Raj-Dev in https://github.com/ManimCommunity/manim/issues/2442#issuecomment-1059732017_\n", "before_files": [{"content": "\"\"\"Utilities for using Manim with IPython (in particular: Jupyter notebooks)\"\"\"\n\nfrom __future__ import annotations\n\nimport mimetypes\nimport os\nimport shutil\nfrom datetime import datetime\nfrom pathlib import Path\nfrom typing import Any\n\nfrom manim import Group, config, logger, tempconfig\nfrom manim.__main__ import main\nfrom manim.renderer.shader import shader_program_cache\n\ntry:\n from IPython import get_ipython\n from IPython.core.interactiveshell import InteractiveShell\n from IPython.core.magic import (\n Magics,\n line_cell_magic,\n magics_class,\n needs_local_scope,\n )\n from IPython.display import Image, Video, display\nexcept ImportError:\n pass\nelse:\n\n @magics_class\n class ManimMagic(Magics):\n def __init__(self, shell: InteractiveShell) -> None:\n super().__init__(shell)\n self.rendered_files = {}\n\n @needs_local_scope\n @line_cell_magic\n def manim(\n self,\n line: str,\n cell: str = None,\n local_ns: dict[str, Any] = None,\n ) -> None:\n r\"\"\"Render Manim scenes contained in IPython cells.\n Works as a line or cell magic.\n\n .. hint::\n\n This line and cell magic works best when used in a JupyterLab\n environment: while all of the functionality is available for\n classic Jupyter notebooks as well, it is possible that videos\n sometimes don't update on repeated execution of the same cell\n if the scene name stays the same.\n\n This problem does not occur when using JupyterLab.\n\n Please refer to `<https://jupyter.org/>`_ for more information about JupyterLab\n and Jupyter notebooks.\n\n Usage in line mode::\n\n %manim [CLI options] MyAwesomeScene\n\n Usage in cell mode::\n\n %%manim [CLI options] MyAwesomeScene\n\n class MyAweseomeScene(Scene):\n def construct(self):\n ...\n\n Run ``%manim --help`` and ``%manim render --help`` for possible command line interface options.\n\n .. note::\n\n The maximal width of the rendered videos that are displayed in the notebook can be\n configured via the ``media_width`` configuration option. The default is set to ``25vw``,\n which is 25% of your current viewport width. To allow the output to become as large\n as possible, set ``config.media_width = \"100%\"``.\n\n The ``media_embed`` option will embed the image/video output in the notebook. This is\n generally undesirable as it makes the notebooks very large, but is required on some\n platforms (notably Google's CoLab, where it is automatically enabled unless suppressed\n by ``config.embed = False``) and needed in cases when the notebook (or converted HTML\n file) will be moved relative to the video locations. Use-cases include building\n documentation with Sphinx and JupyterBook. See also the :mod:`manim directive for Sphinx\n <manim.utils.docbuild.manim_directive>`.\n\n Examples\n --------\n\n First make sure to put ``import manim``, or even ``from manim import *``\n in a cell and evaluate it. Then, a typical Jupyter notebook cell for Manim\n could look as follows::\n\n %%manim -v WARNING --disable_caching -qm BannerExample\n\n config.media_width = \"75%\"\n config.media_embed = True\n\n class BannerExample(Scene):\n def construct(self):\n self.camera.background_color = \"#ece6e2\"\n banner_large = ManimBanner(dark_theme=False).scale(0.7)\n self.play(banner_large.create())\n self.play(banner_large.expand())\n\n Evaluating this cell will render and display the ``BannerExample`` scene defined in the body of the cell.\n\n .. note::\n\n In case you want to hide the red box containing the output progress bar, the ``progress_bar`` config\n option should be set to ``None``. This can also be done by passing ``--progress_bar None`` as a\n CLI flag.\n\n \"\"\"\n if cell:\n exec(cell, local_ns)\n\n args = line.split()\n if not len(args) or \"-h\" in args or \"--help\" in args or \"--version\" in args:\n main(args, standalone_mode=False, prog_name=\"manim\")\n return\n\n modified_args = self.add_additional_args(args)\n args = main(modified_args, standalone_mode=False, prog_name=\"manim\")\n with tempconfig(local_ns.get(\"config\", {})):\n config.digest_args(args)\n\n renderer = None\n if config.renderer == \"opengl\":\n # Check if the imported mobjects extend the OpenGLMobject class\n # meaning ConvertToOpenGL did its job\n if \"OpenGLMobject\" in map(lambda cls: cls.__name__, Group.mro()):\n from manim.renderer.opengl_renderer import OpenGLRenderer\n\n renderer = OpenGLRenderer()\n else:\n logger.warning(\n \"Renderer must be set to OpenGL in the configuration file \"\n \"before importing Manim! Using cairo renderer instead.\",\n )\n config.renderer = \"cairo\"\n\n try:\n SceneClass = local_ns[config[\"scene_names\"][0]]\n scene = SceneClass(renderer=renderer)\n scene.render()\n finally:\n # Shader cache becomes invalid as the context is destroyed\n shader_program_cache.clear()\n\n # Close OpenGL window here instead of waiting for the main thread to\n # finish causing the window to stay open and freeze\n if renderer is not None and renderer.window is not None:\n renderer.window.close()\n\n if config[\"output_file\"] is None:\n logger.info(\"No output file produced\")\n return\n\n local_path = Path(config[\"output_file\"]).relative_to(Path.cwd())\n tmpfile = (\n Path(config[\"media_dir\"])\n / \"jupyter\"\n / f\"{_generate_file_name()}{local_path.suffix}\"\n )\n\n if local_path in self.rendered_files:\n self.rendered_files[local_path].unlink()\n self.rendered_files[local_path] = tmpfile\n os.makedirs(tmpfile.parent, exist_ok=True)\n shutil.copy(local_path, tmpfile)\n\n file_type = mimetypes.guess_type(config[\"output_file\"])[0]\n embed = config[\"media_embed\"]\n if embed is None:\n # videos need to be embedded when running in google colab.\n # do this automatically in case config.media_embed has not been\n # set explicitly.\n embed = \"google.colab\" in str(get_ipython())\n\n if file_type.startswith(\"image\"):\n result = Image(filename=config[\"output_file\"], embed=embed)\n else:\n result = Video(\n tmpfile,\n html_attributes=f'controls autoplay loop style=\"max-width: {config[\"media_width\"]};\"',\n embed=embed,\n )\n\n display(result)\n\n def add_additional_args(self, args: list[str]) -> list[str]:\n additional_args = [\"--jupyter\"]\n # Use webm to support transparency\n if \"-t\" in args and \"--format\" not in args:\n additional_args += [\"--format\", \"webm\"]\n return additional_args + args[:-1] + [\"\"] + [args[-1]]\n\n\ndef _generate_file_name() -> str:\n return config[\"scene_names\"][0] + \"@\" + datetime.now().strftime(\"%Y-%m-%d@%H-%M-%S\")\n", "path": "manim/utils/ipython_magic.py"}], "after_files": [{"content": "\"\"\"Utilities for using Manim with IPython (in particular: Jupyter notebooks)\"\"\"\n\nfrom __future__ import annotations\n\nimport mimetypes\nimport os\nimport shutil\nfrom datetime import datetime\nfrom pathlib import Path\nfrom typing import Any\n\nfrom manim import Group, config, logger, tempconfig\nfrom manim.__main__ import main\nfrom manim.renderer.shader import shader_program_cache\n\ntry:\n from IPython import get_ipython\n from IPython.core.interactiveshell import InteractiveShell\n from IPython.core.magic import (\n Magics,\n line_cell_magic,\n magics_class,\n needs_local_scope,\n )\n from IPython.display import Image, Video, display\nexcept ImportError:\n pass\nelse:\n\n @magics_class\n class ManimMagic(Magics):\n def __init__(self, shell: InteractiveShell) -> None:\n super().__init__(shell)\n self.rendered_files = {}\n\n @needs_local_scope\n @line_cell_magic\n def manim(\n self,\n line: str,\n cell: str = None,\n local_ns: dict[str, Any] = None,\n ) -> None:\n r\"\"\"Render Manim scenes contained in IPython cells.\n Works as a line or cell magic.\n\n .. hint::\n\n This line and cell magic works best when used in a JupyterLab\n environment: while all of the functionality is available for\n classic Jupyter notebooks as well, it is possible that videos\n sometimes don't update on repeated execution of the same cell\n if the scene name stays the same.\n\n This problem does not occur when using JupyterLab.\n\n Please refer to `<https://jupyter.org/>`_ for more information about JupyterLab\n and Jupyter notebooks.\n\n Usage in line mode::\n\n %manim [CLI options] MyAwesomeScene\n\n Usage in cell mode::\n\n %%manim [CLI options] MyAwesomeScene\n\n class MyAweseomeScene(Scene):\n def construct(self):\n ...\n\n Run ``%manim --help`` and ``%manim render --help`` for possible command line interface options.\n\n .. note::\n\n The maximal width of the rendered videos that are displayed in the notebook can be\n configured via the ``media_width`` configuration option. The default is set to ``25vw``,\n which is 25% of your current viewport width. To allow the output to become as large\n as possible, set ``config.media_width = \"100%\"``.\n\n The ``media_embed`` option will embed the image/video output in the notebook. This is\n generally undesirable as it makes the notebooks very large, but is required on some\n platforms (notably Google's CoLab, where it is automatically enabled unless suppressed\n by ``config.embed = False``) and needed in cases when the notebook (or converted HTML\n file) will be moved relative to the video locations. Use-cases include building\n documentation with Sphinx and JupyterBook. See also the :mod:`manim directive for Sphinx\n <manim.utils.docbuild.manim_directive>`.\n\n Examples\n --------\n\n First make sure to put ``import manim``, or even ``from manim import *``\n in a cell and evaluate it. Then, a typical Jupyter notebook cell for Manim\n could look as follows::\n\n %%manim -v WARNING --disable_caching -qm BannerExample\n\n config.media_width = \"75%\"\n config.media_embed = True\n\n class BannerExample(Scene):\n def construct(self):\n self.camera.background_color = \"#ece6e2\"\n banner_large = ManimBanner(dark_theme=False).scale(0.7)\n self.play(banner_large.create())\n self.play(banner_large.expand())\n\n Evaluating this cell will render and display the ``BannerExample`` scene defined in the body of the cell.\n\n .. note::\n\n In case you want to hide the red box containing the output progress bar, the ``progress_bar`` config\n option should be set to ``None``. This can also be done by passing ``--progress_bar None`` as a\n CLI flag.\n\n \"\"\"\n if cell:\n exec(cell, local_ns)\n\n args = line.split()\n if not len(args) or \"-h\" in args or \"--help\" in args or \"--version\" in args:\n main(args, standalone_mode=False, prog_name=\"manim\")\n return\n\n modified_args = self.add_additional_args(args)\n args = main(modified_args, standalone_mode=False, prog_name=\"manim\")\n with tempconfig(local_ns.get(\"config\", {})):\n config.digest_args(args)\n\n renderer = None\n if config.renderer == \"opengl\":\n # Check if the imported mobjects extend the OpenGLMobject class\n # meaning ConvertToOpenGL did its job\n if \"OpenGLMobject\" in map(lambda cls: cls.__name__, Group.mro()):\n from manim.renderer.opengl_renderer import OpenGLRenderer\n\n renderer = OpenGLRenderer()\n else:\n logger.warning(\n \"Renderer must be set to OpenGL in the configuration file \"\n \"before importing Manim! Using cairo renderer instead.\",\n )\n config.renderer = \"cairo\"\n\n try:\n SceneClass = local_ns[config[\"scene_names\"][0]]\n scene = SceneClass(renderer=renderer)\n scene.render()\n finally:\n # Shader cache becomes invalid as the context is destroyed\n shader_program_cache.clear()\n\n # Close OpenGL window here instead of waiting for the main thread to\n # finish causing the window to stay open and freeze\n if renderer is not None and renderer.window is not None:\n renderer.window.close()\n\n if config[\"output_file\"] is None:\n logger.info(\"No output file produced\")\n return\n\n local_path = Path(config[\"output_file\"]).relative_to(Path.cwd())\n tmpfile = (\n Path(config[\"media_dir\"])\n / \"jupyter\"\n / f\"{_generate_file_name()}{local_path.suffix}\"\n )\n\n if local_path in self.rendered_files:\n self.rendered_files[local_path].unlink()\n self.rendered_files[local_path] = tmpfile\n os.makedirs(tmpfile.parent, exist_ok=True)\n shutil.copy(local_path, tmpfile)\n\n file_type = mimetypes.guess_type(config[\"output_file\"])[0]\n embed = config[\"media_embed\"]\n if embed is None:\n # videos need to be embedded when running in google colab.\n # do this automatically in case config.media_embed has not been\n # set explicitly.\n embed = \"google.colab\" in str(get_ipython())\n\n if file_type.startswith(\"image\"):\n result = Image(filename=config[\"output_file\"])\n else:\n result = Video(\n tmpfile,\n html_attributes=f'controls autoplay loop style=\"max-width: {config[\"media_width\"]};\"',\n embed=embed,\n )\n\n display(result)\n\n def add_additional_args(self, args: list[str]) -> list[str]:\n additional_args = [\"--jupyter\"]\n # Use webm to support transparency\n if \"-t\" in args and \"--format\" not in args:\n additional_args += [\"--format\", \"webm\"]\n return additional_args + args[:-1] + [\"\"] + [args[-1]]\n\n\ndef _generate_file_name() -> str:\n return config[\"scene_names\"][0] + \"@\" + datetime.now().strftime(\"%Y-%m-%d@%H-%M-%S\")\n", "path": "manim/utils/ipython_magic.py"}]}
| 2,675 | 119 |
gh_patches_debug_32137
|
rasdani/github-patches
|
git_diff
|
kartoza__prj.app-391
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Cannot add new sponsor
I tried adding new sponsors to the QGIS sponsors list, but got the following error:
500
Ooops. Something broke....
The powers that be have been informed.
In the meantime, please form an orderly queue and head to the home page.
If you need assistance, you may reference this error as 1f96dfdab8c642789dcb40867a97e90d.
I used foreign characters, f.e. "ü" and "é" in the contact names, if that matters.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `django_project/base/models/project.py`
Content:
```
1 # coding=utf-8
2 """Project model used by all apps."""
3 import os
4 import logging
5 import string
6 import re
7 from django.core.urlresolvers import reverse
8 from django.utils.text import slugify
9 from django.conf.global_settings import MEDIA_ROOT
10 from django.db import models
11 from django.utils.translation import ugettext_lazy as _
12 from changes.models.version import Version
13 from core.settings.contrib import STOP_WORDS
14 from django.contrib.auth.models import User
15 from django.conf import settings
16 from django.core.exceptions import ValidationError
17
18 logger = logging.getLogger(__name__)
19
20
21 class ApprovedProjectManager(models.Manager):
22 """Custom project manager that shows only approved records."""
23
24 def get_queryset(self):
25 """Query set generator"""
26 return super(
27 ApprovedProjectManager, self).get_queryset().filter(
28 approved=True)
29
30
31 class UnapprovedProjectManager(models.Manager):
32 """Custom project manager that shows only unapproved records."""
33
34 def get_queryset(self):
35 """Query set generator"""
36 return super(
37 UnapprovedProjectManager, self).get_queryset().filter(
38 approved=False)
39
40
41 class PublicProjectManager(models.Manager):
42 """Custom project manager that shows only public and approved projects."""
43
44 def get_queryset(self):
45 """Query set generator"""
46 return super(
47 PublicProjectManager, self).get_queryset().filter(
48 private=False).filter(approved=True)
49
50
51 def validate_gitter_room_name(value):
52 """Ensure user enter proper gitter room name
53
54 :param value: string input
55 :raises: ValidationError
56 """
57 invalid_chars = set(string.punctuation.replace('/', ''))
58 pattern = re.compile('^(\w+\/\w+)$')
59 if any(char in invalid_chars for char in value) \
60 or not pattern.match(value):
61 raise ValidationError(
62 _('%(value)s is not proper gitter room name'),
63 params={'value': value},
64 )
65
66
67 class Project(models.Model):
68 """A project model e.g. QGIS, InaSAFE etc."""
69 name = models.CharField(
70 help_text=_('Name of this project.'),
71 max_length=255,
72 null=False,
73 blank=False,
74 unique=True)
75
76 description = models.CharField(
77 help_text=_('A description for the project'),
78 max_length=500,
79 blank=True,
80 null=True
81 )
82
83 image_file = models.ImageField(
84 help_text=_('A logo image for this project. '
85 'Most browsers support dragging the image directly on to '
86 'the "Choose File" button above.'),
87 upload_to=os.path.join(MEDIA_ROOT, 'images/projects'),
88 blank=True
89 )
90
91 approved = models.BooleanField(
92 help_text=_('Whether this project has been approved for use yet.'),
93 default=False
94 )
95
96 private = models.BooleanField(
97 help_text=_('Only visible to logged-in users?'),
98 default=False
99 )
100
101 owner = models.ForeignKey(User)
102 slug = models.SlugField(unique=True)
103 objects = models.Manager()
104 approved_objects = ApprovedProjectManager()
105 unapproved_objects = UnapprovedProjectManager()
106 public_objects = PublicProjectManager()
107
108 gitter_room = models.CharField(
109 help_text=_('Gitter room name, e.g. gitterhq/sandbox'),
110 max_length=255,
111 null=True,
112 blank=True,
113 validators=[validate_gitter_room_name]
114 )
115
116 # noinspection PyClassicStyleClass
117 class Meta:
118 """Meta class for project."""
119 app_label = 'base'
120 ordering = ['name']
121
122 def save(self, *args, **kwargs):
123 """Overloaded save method.
124
125 :param args:
126 :param kwargs:
127 """
128 if not self.pk:
129 words = self.name.split()
130 filtered_words = [t for t in words if t.lower() not in STOP_WORDS]
131 new_list = unicode(' '.join(filtered_words))
132 self.slug = slugify(new_list)[:50]
133
134 super(Project, self).save(*args, **kwargs)
135
136 def __unicode__(self):
137 return u'%s' % self.name
138
139 def get_absolute_url(self):
140 """Return URL to project detail page
141
142 :return: URL
143 :rtype: str
144
145 """
146 return reverse('project-detail', kwargs={'slug': self.slug})
147
148 def versions(self):
149 """Get all the versions for this project."""
150 qs = Version.objects.filter(project=self).order_by('-padded_version')
151 return qs
152
153 def latest_versions(self):
154 """Get the latest version.
155
156 How many versions returned is determined by the pagination threshold.
157
158 :returns: List of versions.
159 :rtype: list"""
160 return self.versions()[:settings.PROJECT_VERSION_LIST_SIZE]
161
162 @staticmethod
163 def pagination_threshold(self):
164 """Find out how many versions to list per page.
165
166 :returns: The count of items to show per page as defined in
167 settings.PROJECT_VERSION_LIST_SIZE.
168 :rtype: int
169 """
170 return settings.PROJECT_VERSION_LIST_SIZE
171
172 def pagination_threshold_exceeded(self):
173 """Check if project version count exceeds pagination threshold.
174
175 :returns: Flag indicating if there are more versions than
176 self.threshold.
177 :rtype: bool
178 """
179 if self.versions().count() >= settings.PROJECT_VERSION_LIST_SIZE:
180 return True
181 else:
182 return False
183
```
Path: `django_project/changes/models/sponsor.py`
Content:
```
1 # coding=utf-8
2 import os
3 import pytz
4 import logging
5 from django.core.urlresolvers import reverse
6 from django.utils.text import slugify
7 from core.settings.contrib import STOP_WORDS
8 from django.conf.global_settings import MEDIA_ROOT
9 from django.db import models
10 from django.utils.translation import ugettext_lazy as _
11 from django.contrib.auth.models import User
12 from django_countries.fields import CountryField
13
14 __author__ = 'rischan'
15
16 logger = logging.getLogger(__name__)
17 utc = pytz.UTC
18
19
20 class ApprovedSponsorManager(models.Manager):
21 """Custom sponsor manager that shows only approved records."""
22
23 def get_queryset(self):
24 """Query set generator"""
25 return super(
26 ApprovedSponsorManager, self).get_queryset().filter(
27 approved=True)
28
29
30 class UnapprovedSponsorManager(models.Manager):
31 """Custom sponsor manager that shows only unapproved records."""
32
33 def get_queryset(self):
34 """Query set generator"""
35 return super(
36 UnapprovedSponsorManager, self).get_queryset().filter(
37 approved=False)
38
39
40 # noinspection PyUnresolvedReferences
41 class Sponsor(models.Model):
42 """A sponsor model e.g. gui, backend, web site etc."""
43 name = models.CharField(
44 help_text=_('Name of sponsor.'),
45 max_length=255,
46 null=False,
47 blank=False,
48 unique=False) # there is a unique together rule in meta class below
49
50 sponsor_url = models.CharField(
51 help_text='Input the sponsor URL.',
52 max_length=255,
53 null=True,
54 blank=True)
55
56 contact_person = models.CharField(
57 help_text='Input the contact person of sponsor.',
58 max_length=255,
59 null=True,
60 blank=True)
61
62 address = models.TextField(
63 help_text=(
64 'Enter the complete street address for this sponsor. '
65 'Use line breaks to separate address elements and use '
66 'the country field to specify the country.'
67 ),
68 null=True,
69 blank=True)
70
71 country = CountryField(
72 help_text='Select the country for this sponsor',
73 null=True,
74 blank=True)
75
76 sponsor_email = models.CharField(
77 help_text='Input an email of sponsor.',
78 max_length=255,
79 null=True,
80 blank=True)
81
82 agreement = models.FileField(
83 help_text='Attach sponsor agreement',
84 upload_to=os.path.join(MEDIA_ROOT, 'docs'),
85 blank=True)
86
87 logo = models.ImageField(
88 help_text=(
89 'An image of sponsor logo e.g. a splashscreen. '
90 'Most browsers support dragging the image directly on to the '
91 '"Choose File" button above.'),
92 upload_to=os.path.join(MEDIA_ROOT, 'images/projects'),
93 blank=False)
94
95 approved = models.BooleanField(
96 help_text=_(
97 'Whether this sponsor has been approved for use by the '
98 'project owner.'),
99 default=False
100 )
101
102 author = models.ForeignKey(User)
103 slug = models.SlugField()
104 project = models.ForeignKey('base.Project')
105 objects = models.Manager()
106 approved_objects = ApprovedSponsorManager()
107 unapproved_objects = UnapprovedSponsorManager()
108
109 # noinspection PyClassicStyleClass
110 class Meta:
111 """Meta options for the sponsor class."""
112 unique_together = (
113 ('name', 'project'),
114 ('project', 'slug')
115 )
116 app_label = 'changes'
117 ordering = ['name']
118
119 def save(self, *args, **kwargs):
120 if not self.pk:
121 words = self.name.split()
122 filtered_words = [t for t in words if t.lower() not in STOP_WORDS]
123 new_list = ' '.join(filtered_words)
124 self.slug = slugify(new_list)[:50]
125 super(Sponsor, self).save(*args, **kwargs)
126
127 def __unicode__(self):
128 return u'%s' % (self.name)
129
130 def get_absolute_url(self):
131 return reverse('sponsor-detail', kwargs={
132 'slug': self.slug,
133 'project_slug': self.project.slug
134 })
135
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/django_project/base/models/project.py b/django_project/base/models/project.py
--- a/django_project/base/models/project.py
+++ b/django_project/base/models/project.py
@@ -14,6 +14,7 @@
from django.contrib.auth.models import User
from django.conf import settings
from django.core.exceptions import ValidationError
+from unidecode import unidecode
logger = logging.getLogger(__name__)
@@ -128,7 +129,8 @@
if not self.pk:
words = self.name.split()
filtered_words = [t for t in words if t.lower() not in STOP_WORDS]
- new_list = unicode(' '.join(filtered_words))
+ # unidecode() represents special characters (unicode data) in ASCII
+ new_list = unidecode(' '.join(filtered_words))
self.slug = slugify(new_list)[:50]
super(Project, self).save(*args, **kwargs)
diff --git a/django_project/changes/models/sponsor.py b/django_project/changes/models/sponsor.py
--- a/django_project/changes/models/sponsor.py
+++ b/django_project/changes/models/sponsor.py
@@ -10,6 +10,8 @@
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from django_countries.fields import CountryField
+from unidecode import unidecode
+
__author__ = 'rischan'
@@ -120,7 +122,8 @@
if not self.pk:
words = self.name.split()
filtered_words = [t for t in words if t.lower() not in STOP_WORDS]
- new_list = ' '.join(filtered_words)
+ # unidecode() represents special characters (unicode data) in ASCII
+ new_list = unidecode(' '.join(filtered_words))
self.slug = slugify(new_list)[:50]
super(Sponsor, self).save(*args, **kwargs)
|
{"golden_diff": "diff --git a/django_project/base/models/project.py b/django_project/base/models/project.py\n--- a/django_project/base/models/project.py\n+++ b/django_project/base/models/project.py\n@@ -14,6 +14,7 @@\n from django.contrib.auth.models import User\n from django.conf import settings\n from django.core.exceptions import ValidationError\n+from unidecode import unidecode\n \n logger = logging.getLogger(__name__)\n \n@@ -128,7 +129,8 @@\n if not self.pk:\n words = self.name.split()\n filtered_words = [t for t in words if t.lower() not in STOP_WORDS]\n- new_list = unicode(' '.join(filtered_words))\n+ # unidecode() represents special characters (unicode data) in ASCII\n+ new_list = unidecode(' '.join(filtered_words))\n self.slug = slugify(new_list)[:50]\n \n super(Project, self).save(*args, **kwargs)\ndiff --git a/django_project/changes/models/sponsor.py b/django_project/changes/models/sponsor.py\n--- a/django_project/changes/models/sponsor.py\n+++ b/django_project/changes/models/sponsor.py\n@@ -10,6 +10,8 @@\n from django.utils.translation import ugettext_lazy as _\n from django.contrib.auth.models import User\n from django_countries.fields import CountryField\n+from unidecode import unidecode\n+\n \n __author__ = 'rischan'\n \n@@ -120,7 +122,8 @@\n if not self.pk:\n words = self.name.split()\n filtered_words = [t for t in words if t.lower() not in STOP_WORDS]\n- new_list = ' '.join(filtered_words)\n+ # unidecode() represents special characters (unicode data) in ASCII\n+ new_list = unidecode(' '.join(filtered_words))\n self.slug = slugify(new_list)[:50]\n super(Sponsor, self).save(*args, **kwargs)\n", "issue": "Cannot add new sponsor\nI tried adding new sponsors to the QGIS sponsors list, but got the following error:\n\n500\nOoops. Something broke....\n\nThe powers that be have been informed.\nIn the meantime, please form an orderly queue and head to the home page.\n\nIf you need assistance, you may reference this error as 1f96dfdab8c642789dcb40867a97e90d.\n\nI used foreign characters, f.e. \"\u00fc\" and \"\u00e9\" in the contact names, if that matters.\n\n", "before_files": [{"content": "# coding=utf-8\n\"\"\"Project model used by all apps.\"\"\"\nimport os\nimport logging\nimport string\nimport re\nfrom django.core.urlresolvers import reverse\nfrom django.utils.text import slugify\nfrom django.conf.global_settings import MEDIA_ROOT\nfrom django.db import models\nfrom django.utils.translation import ugettext_lazy as _\nfrom changes.models.version import Version\nfrom core.settings.contrib import STOP_WORDS\nfrom django.contrib.auth.models import User\nfrom django.conf import settings\nfrom django.core.exceptions import ValidationError\n\nlogger = logging.getLogger(__name__)\n\n\nclass ApprovedProjectManager(models.Manager):\n \"\"\"Custom project manager that shows only approved records.\"\"\"\n\n def get_queryset(self):\n \"\"\"Query set generator\"\"\"\n return super(\n ApprovedProjectManager, self).get_queryset().filter(\n approved=True)\n\n\nclass UnapprovedProjectManager(models.Manager):\n \"\"\"Custom project manager that shows only unapproved records.\"\"\"\n\n def get_queryset(self):\n \"\"\"Query set generator\"\"\"\n return super(\n UnapprovedProjectManager, self).get_queryset().filter(\n approved=False)\n\n\nclass PublicProjectManager(models.Manager):\n \"\"\"Custom project manager that shows only public and approved projects.\"\"\"\n\n def get_queryset(self):\n \"\"\"Query set generator\"\"\"\n return super(\n PublicProjectManager, self).get_queryset().filter(\n private=False).filter(approved=True)\n\n\ndef validate_gitter_room_name(value):\n \"\"\"Ensure user enter proper gitter room name\n\n :param value: string input\n :raises: ValidationError\n \"\"\"\n invalid_chars = set(string.punctuation.replace('/', ''))\n pattern = re.compile('^(\\w+\\/\\w+)$')\n if any(char in invalid_chars for char in value) \\\n or not pattern.match(value):\n raise ValidationError(\n _('%(value)s is not proper gitter room name'),\n params={'value': value},\n )\n\n\nclass Project(models.Model):\n \"\"\"A project model e.g. QGIS, InaSAFE etc.\"\"\"\n name = models.CharField(\n help_text=_('Name of this project.'),\n max_length=255,\n null=False,\n blank=False,\n unique=True)\n\n description = models.CharField(\n help_text=_('A description for the project'),\n max_length=500,\n blank=True,\n null=True\n )\n\n image_file = models.ImageField(\n help_text=_('A logo image for this project. '\n 'Most browsers support dragging the image directly on to '\n 'the \"Choose File\" button above.'),\n upload_to=os.path.join(MEDIA_ROOT, 'images/projects'),\n blank=True\n )\n\n approved = models.BooleanField(\n help_text=_('Whether this project has been approved for use yet.'),\n default=False\n )\n\n private = models.BooleanField(\n help_text=_('Only visible to logged-in users?'),\n default=False\n )\n\n owner = models.ForeignKey(User)\n slug = models.SlugField(unique=True)\n objects = models.Manager()\n approved_objects = ApprovedProjectManager()\n unapproved_objects = UnapprovedProjectManager()\n public_objects = PublicProjectManager()\n\n gitter_room = models.CharField(\n help_text=_('Gitter room name, e.g. gitterhq/sandbox'),\n max_length=255,\n null=True,\n blank=True,\n validators=[validate_gitter_room_name]\n )\n\n # noinspection PyClassicStyleClass\n class Meta:\n \"\"\"Meta class for project.\"\"\"\n app_label = 'base'\n ordering = ['name']\n\n def save(self, *args, **kwargs):\n \"\"\"Overloaded save method.\n\n :param args:\n :param kwargs:\n \"\"\"\n if not self.pk:\n words = self.name.split()\n filtered_words = [t for t in words if t.lower() not in STOP_WORDS]\n new_list = unicode(' '.join(filtered_words))\n self.slug = slugify(new_list)[:50]\n\n super(Project, self).save(*args, **kwargs)\n\n def __unicode__(self):\n return u'%s' % self.name\n\n def get_absolute_url(self):\n \"\"\"Return URL to project detail page\n\n :return: URL\n :rtype: str\n\n \"\"\"\n return reverse('project-detail', kwargs={'slug': self.slug})\n\n def versions(self):\n \"\"\"Get all the versions for this project.\"\"\"\n qs = Version.objects.filter(project=self).order_by('-padded_version')\n return qs\n\n def latest_versions(self):\n \"\"\"Get the latest version.\n\n How many versions returned is determined by the pagination threshold.\n\n :returns: List of versions.\n :rtype: list\"\"\"\n return self.versions()[:settings.PROJECT_VERSION_LIST_SIZE]\n\n @staticmethod\n def pagination_threshold(self):\n \"\"\"Find out how many versions to list per page.\n\n :returns: The count of items to show per page as defined in\n settings.PROJECT_VERSION_LIST_SIZE.\n :rtype: int\n \"\"\"\n return settings.PROJECT_VERSION_LIST_SIZE\n\n def pagination_threshold_exceeded(self):\n \"\"\"Check if project version count exceeds pagination threshold.\n\n :returns: Flag indicating if there are more versions than\n self.threshold.\n :rtype: bool\n \"\"\"\n if self.versions().count() >= settings.PROJECT_VERSION_LIST_SIZE:\n return True\n else:\n return False\n", "path": "django_project/base/models/project.py"}, {"content": "# coding=utf-8\nimport os\nimport pytz\nimport logging\nfrom django.core.urlresolvers import reverse\nfrom django.utils.text import slugify\nfrom core.settings.contrib import STOP_WORDS\nfrom django.conf.global_settings import MEDIA_ROOT\nfrom django.db import models\nfrom django.utils.translation import ugettext_lazy as _\nfrom django.contrib.auth.models import User\nfrom django_countries.fields import CountryField\n\n__author__ = 'rischan'\n\nlogger = logging.getLogger(__name__)\nutc = pytz.UTC\n\n\nclass ApprovedSponsorManager(models.Manager):\n \"\"\"Custom sponsor manager that shows only approved records.\"\"\"\n\n def get_queryset(self):\n \"\"\"Query set generator\"\"\"\n return super(\n ApprovedSponsorManager, self).get_queryset().filter(\n approved=True)\n\n\nclass UnapprovedSponsorManager(models.Manager):\n \"\"\"Custom sponsor manager that shows only unapproved records.\"\"\"\n\n def get_queryset(self):\n \"\"\"Query set generator\"\"\"\n return super(\n UnapprovedSponsorManager, self).get_queryset().filter(\n approved=False)\n\n\n# noinspection PyUnresolvedReferences\nclass Sponsor(models.Model):\n \"\"\"A sponsor model e.g. gui, backend, web site etc.\"\"\"\n name = models.CharField(\n help_text=_('Name of sponsor.'),\n max_length=255,\n null=False,\n blank=False,\n unique=False) # there is a unique together rule in meta class below\n\n sponsor_url = models.CharField(\n help_text='Input the sponsor URL.',\n max_length=255,\n null=True,\n blank=True)\n\n contact_person = models.CharField(\n help_text='Input the contact person of sponsor.',\n max_length=255,\n null=True,\n blank=True)\n\n address = models.TextField(\n help_text=(\n 'Enter the complete street address for this sponsor. '\n 'Use line breaks to separate address elements and use '\n 'the country field to specify the country.'\n ),\n null=True,\n blank=True)\n\n country = CountryField(\n help_text='Select the country for this sponsor',\n null=True,\n blank=True)\n\n sponsor_email = models.CharField(\n help_text='Input an email of sponsor.',\n max_length=255,\n null=True,\n blank=True)\n\n agreement = models.FileField(\n help_text='Attach sponsor agreement',\n upload_to=os.path.join(MEDIA_ROOT, 'docs'),\n blank=True)\n\n logo = models.ImageField(\n help_text=(\n 'An image of sponsor logo e.g. a splashscreen. '\n 'Most browsers support dragging the image directly on to the '\n '\"Choose File\" button above.'),\n upload_to=os.path.join(MEDIA_ROOT, 'images/projects'),\n blank=False)\n\n approved = models.BooleanField(\n help_text=_(\n 'Whether this sponsor has been approved for use by the '\n 'project owner.'),\n default=False\n )\n\n author = models.ForeignKey(User)\n slug = models.SlugField()\n project = models.ForeignKey('base.Project')\n objects = models.Manager()\n approved_objects = ApprovedSponsorManager()\n unapproved_objects = UnapprovedSponsorManager()\n\n # noinspection PyClassicStyleClass\n class Meta:\n \"\"\"Meta options for the sponsor class.\"\"\"\n unique_together = (\n ('name', 'project'),\n ('project', 'slug')\n )\n app_label = 'changes'\n ordering = ['name']\n\n def save(self, *args, **kwargs):\n if not self.pk:\n words = self.name.split()\n filtered_words = [t for t in words if t.lower() not in STOP_WORDS]\n new_list = ' '.join(filtered_words)\n self.slug = slugify(new_list)[:50]\n super(Sponsor, self).save(*args, **kwargs)\n\n def __unicode__(self):\n return u'%s' % (self.name)\n\n def get_absolute_url(self):\n return reverse('sponsor-detail', kwargs={\n 'slug': self.slug,\n 'project_slug': self.project.slug\n })\n", "path": "django_project/changes/models/sponsor.py"}], "after_files": [{"content": "# coding=utf-8\n\"\"\"Project model used by all apps.\"\"\"\nimport os\nimport logging\nimport string\nimport re\nfrom django.core.urlresolvers import reverse\nfrom django.utils.text import slugify\nfrom django.conf.global_settings import MEDIA_ROOT\nfrom django.db import models\nfrom django.utils.translation import ugettext_lazy as _\nfrom changes.models.version import Version\nfrom core.settings.contrib import STOP_WORDS\nfrom django.contrib.auth.models import User\nfrom django.conf import settings\nfrom django.core.exceptions import ValidationError\nfrom unidecode import unidecode\n\nlogger = logging.getLogger(__name__)\n\n\nclass ApprovedProjectManager(models.Manager):\n \"\"\"Custom project manager that shows only approved records.\"\"\"\n\n def get_queryset(self):\n \"\"\"Query set generator\"\"\"\n return super(\n ApprovedProjectManager, self).get_queryset().filter(\n approved=True)\n\n\nclass UnapprovedProjectManager(models.Manager):\n \"\"\"Custom project manager that shows only unapproved records.\"\"\"\n\n def get_queryset(self):\n \"\"\"Query set generator\"\"\"\n return super(\n UnapprovedProjectManager, self).get_queryset().filter(\n approved=False)\n\n\nclass PublicProjectManager(models.Manager):\n \"\"\"Custom project manager that shows only public and approved projects.\"\"\"\n\n def get_queryset(self):\n \"\"\"Query set generator\"\"\"\n return super(\n PublicProjectManager, self).get_queryset().filter(\n private=False).filter(approved=True)\n\n\ndef validate_gitter_room_name(value):\n \"\"\"Ensure user enter proper gitter room name\n\n :param value: string input\n :raises: ValidationError\n \"\"\"\n invalid_chars = set(string.punctuation.replace('/', ''))\n pattern = re.compile('^(\\w+\\/\\w+)$')\n if any(char in invalid_chars for char in value) \\\n or not pattern.match(value):\n raise ValidationError(\n _('%(value)s is not proper gitter room name'),\n params={'value': value},\n )\n\n\nclass Project(models.Model):\n \"\"\"A project model e.g. QGIS, InaSAFE etc.\"\"\"\n name = models.CharField(\n help_text=_('Name of this project.'),\n max_length=255,\n null=False,\n blank=False,\n unique=True)\n\n description = models.CharField(\n help_text=_('A description for the project'),\n max_length=500,\n blank=True,\n null=True\n )\n\n image_file = models.ImageField(\n help_text=_('A logo image for this project. '\n 'Most browsers support dragging the image directly on to '\n 'the \"Choose File\" button above.'),\n upload_to=os.path.join(MEDIA_ROOT, 'images/projects'),\n blank=True\n )\n\n approved = models.BooleanField(\n help_text=_('Whether this project has been approved for use yet.'),\n default=False\n )\n\n private = models.BooleanField(\n help_text=_('Only visible to logged-in users?'),\n default=False\n )\n\n owner = models.ForeignKey(User)\n slug = models.SlugField(unique=True)\n objects = models.Manager()\n approved_objects = ApprovedProjectManager()\n unapproved_objects = UnapprovedProjectManager()\n public_objects = PublicProjectManager()\n\n gitter_room = models.CharField(\n help_text=_('Gitter room name, e.g. gitterhq/sandbox'),\n max_length=255,\n null=True,\n blank=True,\n validators=[validate_gitter_room_name]\n )\n\n # noinspection PyClassicStyleClass\n class Meta:\n \"\"\"Meta class for project.\"\"\"\n app_label = 'base'\n ordering = ['name']\n\n def save(self, *args, **kwargs):\n \"\"\"Overloaded save method.\n\n :param args:\n :param kwargs:\n \"\"\"\n if not self.pk:\n words = self.name.split()\n filtered_words = [t for t in words if t.lower() not in STOP_WORDS]\n # unidecode() represents special characters (unicode data) in ASCII\n new_list = unidecode(' '.join(filtered_words))\n self.slug = slugify(new_list)[:50]\n\n super(Project, self).save(*args, **kwargs)\n\n def __unicode__(self):\n return u'%s' % self.name\n\n def get_absolute_url(self):\n \"\"\"Return URL to project detail page\n\n :return: URL\n :rtype: str\n\n \"\"\"\n return reverse('project-detail', kwargs={'slug': self.slug})\n\n def versions(self):\n \"\"\"Get all the versions for this project.\"\"\"\n qs = Version.objects.filter(project=self).order_by('-padded_version')\n return qs\n\n def latest_versions(self):\n \"\"\"Get the latest version.\n\n How many versions returned is determined by the pagination threshold.\n\n :returns: List of versions.\n :rtype: list\"\"\"\n return self.versions()[:settings.PROJECT_VERSION_LIST_SIZE]\n\n @staticmethod\n def pagination_threshold(self):\n \"\"\"Find out how many versions to list per page.\n\n :returns: The count of items to show per page as defined in\n settings.PROJECT_VERSION_LIST_SIZE.\n :rtype: int\n \"\"\"\n return settings.PROJECT_VERSION_LIST_SIZE\n\n def pagination_threshold_exceeded(self):\n \"\"\"Check if project version count exceeds pagination threshold.\n\n :returns: Flag indicating if there are more versions than\n self.threshold.\n :rtype: bool\n \"\"\"\n if self.versions().count() >= settings.PROJECT_VERSION_LIST_SIZE:\n return True\n else:\n return False\n", "path": "django_project/base/models/project.py"}, {"content": "# coding=utf-8\nimport os\nimport pytz\nimport logging\nfrom django.core.urlresolvers import reverse\nfrom django.utils.text import slugify\nfrom core.settings.contrib import STOP_WORDS\nfrom django.conf.global_settings import MEDIA_ROOT\nfrom django.db import models\nfrom django.utils.translation import ugettext_lazy as _\nfrom django.contrib.auth.models import User\nfrom django_countries.fields import CountryField\nfrom unidecode import unidecode\n\n\n__author__ = 'rischan'\n\nlogger = logging.getLogger(__name__)\nutc = pytz.UTC\n\n\nclass ApprovedSponsorManager(models.Manager):\n \"\"\"Custom sponsor manager that shows only approved records.\"\"\"\n\n def get_queryset(self):\n \"\"\"Query set generator\"\"\"\n return super(\n ApprovedSponsorManager, self).get_queryset().filter(\n approved=True)\n\n\nclass UnapprovedSponsorManager(models.Manager):\n \"\"\"Custom sponsor manager that shows only unapproved records.\"\"\"\n\n def get_queryset(self):\n \"\"\"Query set generator\"\"\"\n return super(\n UnapprovedSponsorManager, self).get_queryset().filter(\n approved=False)\n\n\n# noinspection PyUnresolvedReferences\nclass Sponsor(models.Model):\n \"\"\"A sponsor model e.g. gui, backend, web site etc.\"\"\"\n name = models.CharField(\n help_text=_('Name of sponsor.'),\n max_length=255,\n null=False,\n blank=False,\n unique=False) # there is a unique together rule in meta class below\n\n sponsor_url = models.CharField(\n help_text='Input the sponsor URL.',\n max_length=255,\n null=True,\n blank=True)\n\n contact_person = models.CharField(\n help_text='Input the contact person of sponsor.',\n max_length=255,\n null=True,\n blank=True)\n\n address = models.TextField(\n help_text=(\n 'Enter the complete street address for this sponsor. '\n 'Use line breaks to separate address elements and use '\n 'the country field to specify the country.'\n ),\n null=True,\n blank=True)\n\n country = CountryField(\n help_text='Select the country for this sponsor',\n null=True,\n blank=True)\n\n sponsor_email = models.CharField(\n help_text='Input an email of sponsor.',\n max_length=255,\n null=True,\n blank=True)\n\n agreement = models.FileField(\n help_text='Attach sponsor agreement',\n upload_to=os.path.join(MEDIA_ROOT, 'docs'),\n blank=True)\n\n logo = models.ImageField(\n help_text=(\n 'An image of sponsor logo e.g. a splashscreen. '\n 'Most browsers support dragging the image directly on to the '\n '\"Choose File\" button above.'),\n upload_to=os.path.join(MEDIA_ROOT, 'images/projects'),\n blank=False)\n\n approved = models.BooleanField(\n help_text=_(\n 'Whether this sponsor has been approved for use by the '\n 'project owner.'),\n default=False\n )\n\n author = models.ForeignKey(User)\n slug = models.SlugField()\n project = models.ForeignKey('base.Project')\n objects = models.Manager()\n approved_objects = ApprovedSponsorManager()\n unapproved_objects = UnapprovedSponsorManager()\n\n # noinspection PyClassicStyleClass\n class Meta:\n \"\"\"Meta options for the sponsor class.\"\"\"\n unique_together = (\n ('name', 'project'),\n ('project', 'slug')\n )\n app_label = 'changes'\n ordering = ['name']\n\n def save(self, *args, **kwargs):\n if not self.pk:\n words = self.name.split()\n filtered_words = [t for t in words if t.lower() not in STOP_WORDS]\n # unidecode() represents special characters (unicode data) in ASCII\n new_list = unidecode(' '.join(filtered_words))\n self.slug = slugify(new_list)[:50]\n super(Sponsor, self).save(*args, **kwargs)\n\n def __unicode__(self):\n return u'%s' % (self.name)\n\n def get_absolute_url(self):\n return reverse('sponsor-detail', kwargs={\n 'slug': self.slug,\n 'project_slug': self.project.slug\n })\n", "path": "django_project/changes/models/sponsor.py"}]}
| 3,130 | 427 |
gh_patches_debug_20609
|
rasdani/github-patches
|
git_diff
|
svthalia__concrexit-3484
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Merchandise page redesign
### Is your feature request related to a problem? Please describe.
The current merchandise page is a long list with lot's of text. Part of this problem is the board not hiding the merchandise items that are sold out, but I think some other layout than a list would help to improve the look.
### Describe the solution you'd like
View more images side by side, and make the images larger. The text is not very important for the merch at all, so it can be pushed to the background.
### Motivation
The board is getting new merch and would like the page to look better to get people interested in the merch.
### Describe alternatives you've considered
Keep the page as is, because people will buy merch anyway through whatsapp promotion etc.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `website/merchandise/urls.py`
Content:
```
1 """Defines the routes provided in this package."""
2 from django.urls import include, path
3
4 from . import views
5
6 #: the name of the application
7 app_name = "merchandise"
8
9 #: the urls provided by this package
10 urlpatterns = [
11 path(
12 "association/merchandise/",
13 include(
14 [
15 path("", views.index, name="index"),
16 ]
17 ),
18 )
19 ]
20
```
Path: `website/merchandise/views.py`
Content:
```
1 """The views for the merchandise package."""
2 from django.shortcuts import render
3
4 from merchandise.models import MerchandiseItem
5
6
7 def index(request):
8 """Render the index view.
9
10 :param request: the request object
11 :return: the response
12 """
13 items = MerchandiseItem.objects.all()
14
15 return render(request, "merchandise/index.html", {"items": items})
16
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/website/merchandise/urls.py b/website/merchandise/urls.py
--- a/website/merchandise/urls.py
+++ b/website/merchandise/urls.py
@@ -15,5 +15,6 @@
path("", views.index, name="index"),
]
),
- )
+ ),
+ path("association/merchandise/<int:id>/", views.product_page, name="product"),
]
diff --git a/website/merchandise/views.py b/website/merchandise/views.py
--- a/website/merchandise/views.py
+++ b/website/merchandise/views.py
@@ -1,4 +1,5 @@
"""The views for the merchandise package."""
+from django.http import Http404
from django.shortcuts import render
from merchandise.models import MerchandiseItem
@@ -13,3 +14,14 @@
items = MerchandiseItem.objects.all()
return render(request, "merchandise/index.html", {"items": items})
+
+
+def product_page(request, id):
+ try:
+ product = MerchandiseItem.objects.get(pk=id)
+ except MerchandiseItem.DoesNotExist:
+ raise Http404(
+ "This item may not exists, or is removed. Please check if the link is correct!"
+ )
+
+ return render(request, "merchandise/product_page.html", {"product": product})
|
{"golden_diff": "diff --git a/website/merchandise/urls.py b/website/merchandise/urls.py\n--- a/website/merchandise/urls.py\n+++ b/website/merchandise/urls.py\n@@ -15,5 +15,6 @@\n path(\"\", views.index, name=\"index\"),\n ]\n ),\n- )\n+ ),\n+ path(\"association/merchandise/<int:id>/\", views.product_page, name=\"product\"),\n ]\ndiff --git a/website/merchandise/views.py b/website/merchandise/views.py\n--- a/website/merchandise/views.py\n+++ b/website/merchandise/views.py\n@@ -1,4 +1,5 @@\n \"\"\"The views for the merchandise package.\"\"\"\n+from django.http import Http404\n from django.shortcuts import render\n \n from merchandise.models import MerchandiseItem\n@@ -13,3 +14,14 @@\n items = MerchandiseItem.objects.all()\n \n return render(request, \"merchandise/index.html\", {\"items\": items})\n+\n+\n+def product_page(request, id):\n+ try:\n+ product = MerchandiseItem.objects.get(pk=id)\n+ except MerchandiseItem.DoesNotExist:\n+ raise Http404(\n+ \"This item may not exists, or is removed. Please check if the link is correct!\"\n+ )\n+\n+ return render(request, \"merchandise/product_page.html\", {\"product\": product})\n", "issue": "Merchandise page redesign\n### Is your feature request related to a problem? Please describe.\r\n\r\nThe current merchandise page is a long list with lot's of text. Part of this problem is the board not hiding the merchandise items that are sold out, but I think some other layout than a list would help to improve the look.\r\n\r\n### Describe the solution you'd like\r\n\r\nView more images side by side, and make the images larger. The text is not very important for the merch at all, so it can be pushed to the background.\r\n\r\n### Motivation\r\n\r\nThe board is getting new merch and would like the page to look better to get people interested in the merch.\r\n\r\n### Describe alternatives you've considered\r\n\r\nKeep the page as is, because people will buy merch anyway through whatsapp promotion etc.\r\n\n", "before_files": [{"content": "\"\"\"Defines the routes provided in this package.\"\"\"\nfrom django.urls import include, path\n\nfrom . import views\n\n#: the name of the application\napp_name = \"merchandise\"\n\n#: the urls provided by this package\nurlpatterns = [\n path(\n \"association/merchandise/\",\n include(\n [\n path(\"\", views.index, name=\"index\"),\n ]\n ),\n )\n]\n", "path": "website/merchandise/urls.py"}, {"content": "\"\"\"The views for the merchandise package.\"\"\"\nfrom django.shortcuts import render\n\nfrom merchandise.models import MerchandiseItem\n\n\ndef index(request):\n \"\"\"Render the index view.\n\n :param request: the request object\n :return: the response\n \"\"\"\n items = MerchandiseItem.objects.all()\n\n return render(request, \"merchandise/index.html\", {\"items\": items})\n", "path": "website/merchandise/views.py"}], "after_files": [{"content": "\"\"\"Defines the routes provided in this package.\"\"\"\nfrom django.urls import include, path\n\nfrom . import views\n\n#: the name of the application\napp_name = \"merchandise\"\n\n#: the urls provided by this package\nurlpatterns = [\n path(\n \"association/merchandise/\",\n include(\n [\n path(\"\", views.index, name=\"index\"),\n ]\n ),\n ),\n path(\"association/merchandise/<int:id>/\", views.product_page, name=\"product\"),\n]\n", "path": "website/merchandise/urls.py"}, {"content": "\"\"\"The views for the merchandise package.\"\"\"\nfrom django.http import Http404\nfrom django.shortcuts import render\n\nfrom merchandise.models import MerchandiseItem\n\n\ndef index(request):\n \"\"\"Render the index view.\n\n :param request: the request object\n :return: the response\n \"\"\"\n items = MerchandiseItem.objects.all()\n\n return render(request, \"merchandise/index.html\", {\"items\": items})\n\n\ndef product_page(request, id):\n try:\n product = MerchandiseItem.objects.get(pk=id)\n except MerchandiseItem.DoesNotExist:\n raise Http404(\n \"This item may not exists, or is removed. Please check if the link is correct!\"\n )\n\n return render(request, \"merchandise/product_page.html\", {\"product\": product})\n", "path": "website/merchandise/views.py"}]}
| 660 | 315 |
gh_patches_debug_11616
|
rasdani/github-patches
|
git_diff
|
dbt-labs__dbt-core-7320
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[CT-2391] [Bug] Following the Python API example.py example raises AttributeError
### Is this a new bug in dbt-core?
- [X] I believe this is a new bug in dbt-core
- [X] I have searched the existing issues, and I could not find an existing issue for this bug
### Current Behavior
I'm trying to work with the python APIs in dbt version 1.5, per the given example.py and hitting some error
`AttributeError: 'Namespace' object has no attribute 'MACRO_DEBUGGING'`
### Expected Behavior
Not throw AttributeError, profile getting created properly
### Steps To Reproduce
1. run these lines (project_dir is a path to a dbt project)
```python
from dbt.cli.main import dbtRunner
from dbt.config.runtime import load_profile, load_project
project_dir = "/Users/roitabach/workspace/small_fix/dbt-data-reliability/integration_tests/"
profile = load_profile(project_dir, {}, target_override="snowflake")
```
### Relevant log output
```shell
This exception is raised:
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
Cell In[16], line 1
----> 1 profile = load_profile(project_dir, cli_vars={"MACRO_DEBUGGING":False},target_override="snowflake")
File ~/workspace/env/elementary/venv/lib/python3.10/site-packages/dbt/config/runtime.py:68, in load_profile(project_root, cli_vars, profile_name_override, target_override, threads_override)
66 raw_profile_name = raw_project.get("profile")
67 profile_renderer = ProfileRenderer(cli_vars)
---> 68 profile_name = profile_renderer.render_value(raw_profile_name)
69 profile = Profile.render(
70 profile_renderer, profile_name, profile_name_override, target_override, threads_override
71 )
72 # Save env_vars encountered in rendering for partial parsing
File ~/workspace/env/elementary/venv/lib/python3.10/site-packages/dbt/config/renderer.py:185, in SecretRenderer.render_value(self, value, keypath)
181 def render_value(self, value: Any, keypath: Optional[Keypath] = None) -> Any:
182 # First, standard Jinja rendering, with special handling for 'secret' environment variables
183 # "{{ env_var('DBT_SECRET_ENV_VAR') }}" -> "$$$DBT_SECRET_START$$$DBT_SECRET_ENV_{VARIABLE_NAME}$$$DBT_SECRET_END$$$"
184 # This prevents Jinja manipulation of secrets via macros/filters that might leak partial/modified values in logs
--> 185 rendered = super().render_value(value, keypath)
186 # Now, detect instances of the placeholder value ($$$DBT_SECRET_START...DBT_SECRET_END$$$)
187 # and replace them with the actual secret value
188 if SECRET_ENV_PREFIX in str(rendered):
File ~/workspace/env/elementary/venv/lib/python3.10/site-packages/dbt/config/renderer.py:42, in BaseRenderer.render_value(self, value, keypath)
40 try:
41 with catch_jinja():
---> 42 return get_rendered(value, self.context, native=True)
43 except CompilationError as exc:
44 msg = f"Could not render {value}: {exc.msg}"
File ~/workspace/env/elementary/venv/lib/python3.10/site-packages/dbt/clients/jinja.py:584, in get_rendered(string, ctx, node, capture_macros, native)
582 if not native and isinstance(string, str) and _HAS_RENDER_CHARS_PAT.search(string) is None:
583 return string
--> 584 template = get_template(
585 string,
586 ctx,
587 node,
588 capture_macros=capture_macros,
589 native=native,
590 )
591 return render_template(template, ctx, node)
File ~/workspace/env/elementary/venv/lib/python3.10/site-packages/dbt/clients/jinja.py:541, in get_template(string, ctx, node, capture_macros, native)
538 env = get_environment(node, capture_macros, native=native)
540 template_source = str(string)
--> 541 return env.from_string(template_source, globals=ctx)
File ~/workspace/env/elementary/venv/lib/python3.10/site-packages/Jinja2-3.1.2-py3.10.egg/jinja2/environment.py:1105, in Environment.from_string(self, source, globals, template_class)
1103 gs = self.make_globals(globals)
1104 cls = template_class or self.template_class
-> 1105 return cls.from_code(self, self.compile(source), gs, None)
File ~/workspace/env/elementary/venv/lib/python3.10/site-packages/Jinja2-3.1.2-py3.10.egg/jinja2/environment.py:766, in Environment.compile(self, source, name, filename, raw, defer_init)
764 if filename is None:
765 filename = "<template>"
--> 766 return self._compile(source, filename)
767 except TemplateSyntaxError:
768 self.handle_exception(source=source_hint)
File ~/workspace/env/elementary/venv/lib/python3.10/site-packages/dbt/clients/jinja.py:102, in MacroFuzzEnvironment._compile(self, source, filename)
94 def _compile(self, source, filename):
95 """Override jinja's compilation to stash the rendered source inside
96 the python linecache for debugging when the appropriate environment
97 variable is set.
(...)
100 WARNING: This can write a ton of data if you aren't careful.
101 """
--> 102 macro_debugging = get_flags().MACRO_DEBUGGING
103 if filename == "<template>" and macro_debugging:
104 write = macro_debugging == "write"
AttributeError: 'Namespace' object has no attribute 'MACRO_DEBUGGING'
```
### Environment
```markdown
- OS: MacOS Monterey 12.5.1 (21G83)
- Python: 3.11.2
- dbt-core: 1.5.0-b5
```
### Which database adapter are you using with dbt?
snowflake
### Additional Context
I tried to comment out the `if` block just to see where I get- got another argparse error-
```
AttributeError: 'Namespace' object has no attribute 'PROFILES_DIR'
```
from this row in `.../site-packages/dbt/config/profile.py`
```
--> 434 raw_profiles = read_profile(flags.PROFILES_DIR)
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `core/dbt/cli/example.py`
Content:
```
1 from dbt.cli.main import dbtRunner
2 from dbt.config.runtime import load_profile, load_project
3
4 if __name__ == "__main__":
5 project_dir = "/Users/chenyuli/git/jaffle_shop"
6 cli_args = ["run", "--project-dir", project_dir]
7
8 # initialize the dbt runner
9 dbt = dbtRunner()
10 # run the command
11 res = dbt.invoke(cli_args)
12
13 # preload profile and project
14 profile = load_profile(project_dir, {}, "testing-postgres")
15 project = load_project(project_dir, False, profile, {})
16
17 # initialize the runner with pre-loaded profile and project, you can also pass in a preloaded manifest
18 dbt = dbtRunner(profile=profile, project=project)
19 # run the command, this will use the pre-loaded profile and project instead of loading
20 res = dbt.invoke(cli_args)
21
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/core/dbt/cli/example.py b/core/dbt/cli/example.py
deleted file mode 100644
--- a/core/dbt/cli/example.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from dbt.cli.main import dbtRunner
-from dbt.config.runtime import load_profile, load_project
-
-if __name__ == "__main__":
- project_dir = "/Users/chenyuli/git/jaffle_shop"
- cli_args = ["run", "--project-dir", project_dir]
-
- # initialize the dbt runner
- dbt = dbtRunner()
- # run the command
- res = dbt.invoke(cli_args)
-
- # preload profile and project
- profile = load_profile(project_dir, {}, "testing-postgres")
- project = load_project(project_dir, False, profile, {})
-
- # initialize the runner with pre-loaded profile and project, you can also pass in a preloaded manifest
- dbt = dbtRunner(profile=profile, project=project)
- # run the command, this will use the pre-loaded profile and project instead of loading
- res = dbt.invoke(cli_args)
|
{"golden_diff": "diff --git a/core/dbt/cli/example.py b/core/dbt/cli/example.py\ndeleted file mode 100644\n--- a/core/dbt/cli/example.py\n+++ /dev/null\n@@ -1,20 +0,0 @@\n-from dbt.cli.main import dbtRunner\n-from dbt.config.runtime import load_profile, load_project\n-\n-if __name__ == \"__main__\":\n- project_dir = \"/Users/chenyuli/git/jaffle_shop\"\n- cli_args = [\"run\", \"--project-dir\", project_dir]\n-\n- # initialize the dbt runner\n- dbt = dbtRunner()\n- # run the command\n- res = dbt.invoke(cli_args)\n-\n- # preload profile and project\n- profile = load_profile(project_dir, {}, \"testing-postgres\")\n- project = load_project(project_dir, False, profile, {})\n-\n- # initialize the runner with pre-loaded profile and project, you can also pass in a preloaded manifest\n- dbt = dbtRunner(profile=profile, project=project)\n- # run the command, this will use the pre-loaded profile and project instead of loading\n- res = dbt.invoke(cli_args)\n", "issue": "[CT-2391] [Bug] Following the Python API example.py example raises AttributeError\n### Is this a new bug in dbt-core?\r\n\r\n- [X] I believe this is a new bug in dbt-core\r\n- [X] I have searched the existing issues, and I could not find an existing issue for this bug\r\n\r\n### Current Behavior\r\n\r\n I'm trying to work with the python APIs in dbt version 1.5, per the given example.py and hitting some error\r\n\r\n`AttributeError: 'Namespace' object has no attribute 'MACRO_DEBUGGING'`\r\n\r\n\r\n### Expected Behavior\r\n\r\nNot throw AttributeError, profile getting created properly\r\n\r\n### Steps To Reproduce\r\n\r\n1. run these lines (project_dir is a path to a dbt project)\r\n```python\r\nfrom dbt.cli.main import dbtRunner\r\nfrom dbt.config.runtime import load_profile, load_project\r\nproject_dir = \"/Users/roitabach/workspace/small_fix/dbt-data-reliability/integration_tests/\"\r\nprofile = load_profile(project_dir, {}, target_override=\"snowflake\")\r\n```\r\n\r\n\r\n### Relevant log output\r\n\r\n```shell\r\nThis exception is raised:\r\n\r\n---------------------------------------------------------------------------\r\nAttributeError Traceback (most recent call last)\r\nCell In[16], line 1\r\n----> 1 profile = load_profile(project_dir, cli_vars={\"MACRO_DEBUGGING\":False},target_override=\"snowflake\")\r\n\r\nFile ~/workspace/env/elementary/venv/lib/python3.10/site-packages/dbt/config/runtime.py:68, in load_profile(project_root, cli_vars, profile_name_override, target_override, threads_override)\r\n 66 raw_profile_name = raw_project.get(\"profile\")\r\n 67 profile_renderer = ProfileRenderer(cli_vars)\r\n---> 68 profile_name = profile_renderer.render_value(raw_profile_name)\r\n 69 profile = Profile.render(\r\n 70 profile_renderer, profile_name, profile_name_override, target_override, threads_override\r\n 71 )\r\n 72 # Save env_vars encountered in rendering for partial parsing\r\n\r\nFile ~/workspace/env/elementary/venv/lib/python3.10/site-packages/dbt/config/renderer.py:185, in SecretRenderer.render_value(self, value, keypath)\r\n 181 def render_value(self, value: Any, keypath: Optional[Keypath] = None) -> Any:\r\n 182 # First, standard Jinja rendering, with special handling for 'secret' environment variables\r\n 183 # \"{{ env_var('DBT_SECRET_ENV_VAR') }}\" -> \"$$$DBT_SECRET_START$$$DBT_SECRET_ENV_{VARIABLE_NAME}$$$DBT_SECRET_END$$$\"\r\n 184 # This prevents Jinja manipulation of secrets via macros/filters that might leak partial/modified values in logs\r\n--> 185 rendered = super().render_value(value, keypath)\r\n 186 # Now, detect instances of the placeholder value ($$$DBT_SECRET_START...DBT_SECRET_END$$$)\r\n 187 # and replace them with the actual secret value\r\n 188 if SECRET_ENV_PREFIX in str(rendered):\r\n\r\nFile ~/workspace/env/elementary/venv/lib/python3.10/site-packages/dbt/config/renderer.py:42, in BaseRenderer.render_value(self, value, keypath)\r\n 40 try:\r\n 41 with catch_jinja():\r\n---> 42 return get_rendered(value, self.context, native=True)\r\n 43 except CompilationError as exc:\r\n 44 msg = f\"Could not render {value}: {exc.msg}\"\r\n\r\nFile ~/workspace/env/elementary/venv/lib/python3.10/site-packages/dbt/clients/jinja.py:584, in get_rendered(string, ctx, node, capture_macros, native)\r\n 582 if not native and isinstance(string, str) and _HAS_RENDER_CHARS_PAT.search(string) is None:\r\n 583 return string\r\n--> 584 template = get_template(\r\n 585 string,\r\n 586 ctx,\r\n 587 node,\r\n 588 capture_macros=capture_macros,\r\n 589 native=native,\r\n 590 )\r\n 591 return render_template(template, ctx, node)\r\n\r\nFile ~/workspace/env/elementary/venv/lib/python3.10/site-packages/dbt/clients/jinja.py:541, in get_template(string, ctx, node, capture_macros, native)\r\n 538 env = get_environment(node, capture_macros, native=native)\r\n 540 template_source = str(string)\r\n--> 541 return env.from_string(template_source, globals=ctx)\r\n\r\nFile ~/workspace/env/elementary/venv/lib/python3.10/site-packages/Jinja2-3.1.2-py3.10.egg/jinja2/environment.py:1105, in Environment.from_string(self, source, globals, template_class)\r\n 1103 gs = self.make_globals(globals)\r\n 1104 cls = template_class or self.template_class\r\n-> 1105 return cls.from_code(self, self.compile(source), gs, None)\r\n\r\nFile ~/workspace/env/elementary/venv/lib/python3.10/site-packages/Jinja2-3.1.2-py3.10.egg/jinja2/environment.py:766, in Environment.compile(self, source, name, filename, raw, defer_init)\r\n 764 if filename is None:\r\n 765 filename = \"<template>\"\r\n--> 766 return self._compile(source, filename)\r\n 767 except TemplateSyntaxError:\r\n 768 self.handle_exception(source=source_hint)\r\n\r\nFile ~/workspace/env/elementary/venv/lib/python3.10/site-packages/dbt/clients/jinja.py:102, in MacroFuzzEnvironment._compile(self, source, filename)\r\n 94 def _compile(self, source, filename):\r\n 95 \"\"\"Override jinja's compilation to stash the rendered source inside\r\n 96 the python linecache for debugging when the appropriate environment\r\n 97 variable is set.\r\n (...)\r\n 100 WARNING: This can write a ton of data if you aren't careful.\r\n 101 \"\"\"\r\n--> 102 macro_debugging = get_flags().MACRO_DEBUGGING\r\n 103 if filename == \"<template>\" and macro_debugging:\r\n 104 write = macro_debugging == \"write\"\r\n\r\nAttributeError: 'Namespace' object has no attribute 'MACRO_DEBUGGING'\r\n```\r\n\r\n### Environment\r\n\r\n```markdown\r\n- OS: MacOS Monterey 12.5.1 (21G83)\r\n- Python: 3.11.2\r\n- dbt-core: 1.5.0-b5\r\n```\r\n\r\n### Which database adapter are you using with dbt?\r\n\r\nsnowflake\r\n\r\n### Additional Context\r\n\r\nI tried to comment out the `if` block just to see where I get- got another argparse error- \r\n```\r\nAttributeError: 'Namespace' object has no attribute 'PROFILES_DIR'\r\n```\r\nfrom this row in `.../site-packages/dbt/config/profile.py`\r\n```\r\n--> 434 raw_profiles = read_profile(flags.PROFILES_DIR)\r\n```\n", "before_files": [{"content": "from dbt.cli.main import dbtRunner\nfrom dbt.config.runtime import load_profile, load_project\n\nif __name__ == \"__main__\":\n project_dir = \"/Users/chenyuli/git/jaffle_shop\"\n cli_args = [\"run\", \"--project-dir\", project_dir]\n\n # initialize the dbt runner\n dbt = dbtRunner()\n # run the command\n res = dbt.invoke(cli_args)\n\n # preload profile and project\n profile = load_profile(project_dir, {}, \"testing-postgres\")\n project = load_project(project_dir, False, profile, {})\n\n # initialize the runner with pre-loaded profile and project, you can also pass in a preloaded manifest\n dbt = dbtRunner(profile=profile, project=project)\n # run the command, this will use the pre-loaded profile and project instead of loading\n res = dbt.invoke(cli_args)\n", "path": "core/dbt/cli/example.py"}], "after_files": [{"content": null, "path": "core/dbt/cli/example.py"}]}
| 2,072 | 260 |
gh_patches_debug_59564
|
rasdani/github-patches
|
git_diff
|
saulpw__visidata-1921
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Pasting from Clipboard in Windows 10
**Small description**
I cannot get any results pasted from the clipboard onto a visidata cell, selected cell or range of cells.
On the most basic test:
- Create new sheet
- Create 1 cell
- I copy a short string from another app to the clipboard
- trying:
- syspaste-cells or
- syspaste-cells-selected (while selecting a single cell), or
- zP
- I get the following message "INFO: Type "CLIP /?" for usage."
This is a message from Windows regarding its CLIP application.
I have tried this, having multiple clipboard history activated, or deactivated. In both cases I get the same result.
Copying from visidata onto clipboard works alright with zY
**Expected result**
Pasted content in cell
**Actual result with screenshot**
If you get an unexpected error, please include the full stack trace that you get with `Ctrl-E`.

No error captured (no ctrl-E)
**Steps to reproduce with sample data and a .vd**
First try reproducing without any user configuration by using the flag `-N`.
e.g. `echo "abc" | vd -f txt -N`
Same result.
Please attach the commandlog (saved with `Ctrl-D`)
[paste_problem_win.zip](https://github.com/saulpw/visidata/files/11748544/paste_problem_win.zip)
to show the steps that led to the issue.
See [here](http://visidata.org/docs/save-restore/) for more details.
**Additional context**
Please include the version of VisiData and Python.
Visidata v2.11
Python 3.9.7
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `visidata/clipboard.py`
Content:
```
1 from copy import copy, deepcopy
2 import shutil
3 import subprocess
4 import io
5 import sys
6 import tempfile
7 import functools
8 import os
9
10 from visidata import VisiData, vd, asyncthread
11 from visidata import Sheet, Path
12
13 if sys.platform == 'win32':
14 syscopy_cmd_default = 'clip.exe'
15 syspaste_cmd_default = 'clip.exe'
16 elif sys.platform == 'darwin':
17 syscopy_cmd_default = 'pbcopy w'
18 syspaste_cmd_default = 'pbpaste'
19 else:
20 if 'WAYLAND_DISPLAY' in os.environ:
21 syscopy_cmd_default = 'wl-copy'
22 syspaste_cmd_default = 'wl-paste'
23 else:
24 syscopy_cmd_default = 'xclip -selection clipboard -filter' # xsel --clipboard --input
25 syspaste_cmd_default = 'xclip -selection clipboard -o' # xsel --clipboard
26
27 vd.option('clipboard_copy_cmd', syscopy_cmd_default, 'command to copy stdin to system clipboard', sheettype=None)
28 vd.option('clipboard_paste_cmd', syspaste_cmd_default, 'command to send contents of system clipboard to stdout', sheettype=None)
29
30
31 @Sheet.api
32 def copyRows(sheet, rows):
33 vd.memory.cliprows = rows
34 vd.memory.clipcols = list(sheet.visibleCols)
35 if not rows:
36 vd.warning('no %s selected; clipboard emptied' % sheet.rowtype)
37 else:
38 vd.status('copied %d %s to clipboard' % (len(rows), sheet.rowtype))
39
40 @Sheet.api
41 def copyCells(sheet, col, rows):
42 vd.memory.clipcells = [col.getTypedValue(r) for r in rows]
43 if not rows:
44 vd.warning('no %s selected; clipboard emptied' % sheet.rowtype)
45 return
46 vd.status('copied %d %s.%s to clipboard' % (len(rows), sheet.rowtype, col.name))
47
48
49 @Sheet.api
50 def syscopyValue(sheet, val):
51 # pipe val to stdin of clipboard command
52
53 p = subprocess.run(
54 sheet.options.clipboard_copy_cmd.split(),
55 input=val,
56 encoding=sheet.options.encoding,
57 stdout=subprocess.DEVNULL)
58
59 vd.status('copied value to system clipboard')
60
61
62 @Sheet.api
63 def syscopyCells(sheet, cols, rows, filetype=None):
64 filetype = filetype or vd.input("copy %d %s as filetype: " % (len(rows), sheet.rowtype), value=sheet.options.save_filetype or 'tsv')
65 sheet.syscopyCells_async(cols, rows, filetype)
66
67
68 @Sheet.api
69 @asyncthread
70 def syscopyCells_async(sheet, cols, rows, filetype):
71 vs = copy(sheet)
72 vs.rows = rows or vd.fail('no %s selected' % sheet.rowtype)
73 vs.columns = cols
74
75 vd.status(f'copying {vs.nRows} {vs.rowtype} to system clipboard as {filetype}')
76
77 with io.StringIO() as buf:
78 vd.sync(vd.saveSheets(Path(sheet.name+'.'+filetype, fptext=buf), vs))
79 subprocess.run(
80 sheet.options.clipboard_copy_cmd.split(),
81 input=buf.getvalue(),
82 encoding=sheet.options.encoding,
83 stdout=subprocess.DEVNULL)
84
85
86 @VisiData.api
87 def sysclipValue(vd):
88 cmd = vd.options.clipboard_paste_cmd
89 return subprocess.check_output(vd.options.clipboard_paste_cmd.split()).decode('utf-8')
90
91
92 @VisiData.api
93 @asyncthread
94 def pasteFromClipboard(vd, cols, rows):
95 text = vd.getLastArgs() or vd.sysclipValue().strip() or vd.fail('system clipboard is empty')
96
97 vd.addUndoSetValues(cols, rows)
98 lines = text.split('\n')
99 if not lines:
100 vd.warning('nothing to paste')
101 return
102
103 vs = cols[0].sheet
104 newrows = [vs.newRow() for i in range(len(lines)-len(rows))]
105 if newrows:
106 rows.extend(newrows)
107 vs.addRows(newrows)
108
109 for line, r in zip(lines, rows):
110 for v, c in zip(line.split('\t'), cols):
111 c.setValue(r, v)
112
113
114 @Sheet.api
115 def delete_row(sheet, rowidx):
116 if not sheet.defer:
117 oldrow = sheet.rows.pop(rowidx)
118 vd.addUndo(sheet.rows.insert, rowidx, oldrow)
119 # clear the deleted row from selected rows
120 if sheet.isSelected(oldrow):
121 sheet.addUndoSelection()
122 sheet.unselectRow(oldrow)
123 else:
124 oldrow = sheet.rows[rowidx]
125 sheet.rowDeleted(oldrow)
126
127 sheet.setModified()
128 return oldrow
129
130 @Sheet.api
131 def paste_after(sheet, rowidx):
132 if not vd.memory.cliprows: #1793
133 vd.warning('nothing to paste')
134 return
135 to_paste = list(deepcopy(r) for r in reversed(vd.memory.cliprows))
136 sheet.addRows(to_paste, index=rowidx)
137
138
139
140 Sheet.addCommand('y', 'copy-row', 'copyRows([cursorRow])', 'yank (copy) current row to clipboard')
141
142 Sheet.addCommand('p', 'paste-after', 'paste_after(cursorRowIndex)', 'paste clipboard rows after current row')
143 Sheet.addCommand('P', 'paste-before', 'paste_after(cursorRowIndex-1)', 'paste clipboard rows before current row')
144
145 Sheet.addCommand('gy', 'copy-selected', 'copyRows(onlySelectedRows)', 'yank (copy) selected rows to clipboard')
146
147 Sheet.addCommand('zy', 'copy-cell', 'copyCells(cursorCol, [cursorRow]); vd.memo("clipval", cursorCol, cursorRow)', 'yank (copy) current cell to clipboard')
148 Sheet.addCommand('zp', 'paste-cell', 'cursorCol.setValuesTyped([cursorRow], vd.memory.clipval)', 'set contents of current cell to last clipboard value')
149
150 Sheet.addCommand('d', 'delete-row', 'delete_row(cursorRowIndex); defer and cursorDown(1)', 'delete current row')
151 Sheet.addCommand('gd', 'delete-selected', 'deleteSelected()', 'delete selected rows')
152 Sheet.addCommand('zd', 'delete-cell', 'cursorCol.setValues([cursorRow], options.null_value)', 'delete current cell (set to None)')
153 Sheet.addCommand('gzd', 'delete-cells', 'cursorCol.setValues(onlySelectedRows, options.null_value)', 'delete contents of current column for selected rows (set to None)')
154
155 Sheet.bindkey('BUTTON2_PRESSED', 'go-mouse')
156 Sheet.addCommand('BUTTON2_RELEASED', 'syspaste-cells', 'pasteFromClipboard(visibleCols[cursorVisibleColIndex:], rows[cursorRowIndex:])', 'paste from system clipboard to region starting at cursor')
157 Sheet.bindkey('BUTTON2_CLICKED', 'go-mouse')
158 Sheet.bindkey('zP', 'syspaste-cells')
159 Sheet.addCommand('gzP', 'syspaste-cells-selected', 'pasteFromClipboard(visibleCols[cursorVisibleColIndex:], someSelectedRows)', 'paste from system clipboard to selected cells')
160
161 Sheet.addCommand('gzy', 'copy-cells', 'copyCells(cursorCol, onlySelectedRows)', 'yank (copy) contents of current column for selected rows to clipboard')
162 Sheet.addCommand('gzp', 'setcol-clipboard', 'for r, v in zip(onlySelectedRows, itertools.cycle(vd.memory.clipcells or [None])): cursorCol.setValuesTyped([r], v)', 'set cells of current column for selected rows to last clipboard value')
163
164 Sheet.addCommand('Y', 'syscopy-row', 'syscopyCells(visibleCols, [cursorRow])', 'yank (copy) current row to system clipboard (using options.clipboard_copy_cmd)')
165
166 Sheet.addCommand('gY', 'syscopy-selected', 'syscopyCells(visibleCols, onlySelectedRows)', 'yank (copy) selected rows to system clipboard (using options.clipboard_copy_cmd)')
167 Sheet.addCommand('zY', 'syscopy-cell', 'syscopyValue(cursorDisplay)', 'yank (copy) current cell to system clipboard (using options.clipboard_copy_cmd)')
168 Sheet.addCommand('gzY', 'syscopy-cells', 'syscopyCells([cursorCol], onlySelectedRows, filetype="txt")', 'yank (copy) contents of current column from selected rows to system clipboard (using options.clipboard_copy_cmd')
169
170 Sheet.addCommand('x', 'cut-row', 'copyRows([sheet.delete_row(cursorRowIndex)]); defer and cursorDown(1)', 'delete (cut) current row and move it to clipboard')
171 Sheet.addCommand('gx', 'cut-selected', 'copyRows(onlySelectedRows); deleteSelected()', 'delete (cut) selected rows and move them to clipboard')
172 Sheet.addCommand('zx', 'cut-cell', 'copyCells(cursorCol, [cursorRow]); cursorCol.setValues([cursorRow], None)', 'delete (cut) current cell and move it to clipboard')
173 Sheet.addCommand('gzx', 'cut-cells', 'copyCells(cursorCol, onlySelectedRows); cursorCol.setValues(onlySelectedRows, None)', 'delete (cut) contents of current column for selected rows and move them to clipboard')
174
175
176 Sheet.bindkey('KEY_DC', 'delete-cell'),
177 Sheet.bindkey('gKEY_DC', 'delete-cells'),
178
179 vd.addMenuItems('''
180 Edit > Delete > current row > delete-row
181 Edit > Delete > current cell > delete-cell
182 Edit > Delete > selected rows > delete-selected
183 Edit > Delete > selected cells > delete-cells
184 Edit > Copy > current cell > copy-cell
185 Edit > Copy > current row > copy-row
186 Edit > Copy > selected cells > copy-cells
187 Edit > Copy > selected rows > copy-selected
188 Edit > Copy > to system clipboard > current cell > syscopy-cell
189 Edit > Copy > to system clipboard > current row > syscopy-row
190 Edit > Copy > to system clipboard > selected cells > syscopy-cells
191 Edit > Copy > to system clipboard > selected rows > syscopy-selected
192 Edit > Cut > current row > cut-row
193 Edit > Cut > selected cells > cut-selected
194 Edit > Cut > current cell > cut-cell
195 Edit > Paste > row after > paste-after
196 Edit > Paste > row before > paste-before
197 Edit > Paste > into selected cells > setcol-clipboard
198 Edit > Paste > into current cell > paste-cell
199 Edit > Paste > from system clipboard > cells at cursor > syspaste-cells
200 Edit > Paste > from system clipboard > selected cells > syspaste-cells-selected
201 ''')
202
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/visidata/clipboard.py b/visidata/clipboard.py
--- a/visidata/clipboard.py
+++ b/visidata/clipboard.py
@@ -12,7 +12,7 @@
if sys.platform == 'win32':
syscopy_cmd_default = 'clip.exe'
- syspaste_cmd_default = 'clip.exe'
+ syspaste_cmd_default = 'powershell -command Get-Clipboard'
elif sys.platform == 'darwin':
syscopy_cmd_default = 'pbcopy w'
syspaste_cmd_default = 'pbpaste'
|
{"golden_diff": "diff --git a/visidata/clipboard.py b/visidata/clipboard.py\n--- a/visidata/clipboard.py\n+++ b/visidata/clipboard.py\n@@ -12,7 +12,7 @@\n \n if sys.platform == 'win32':\n syscopy_cmd_default = 'clip.exe'\n- syspaste_cmd_default = 'clip.exe'\n+ syspaste_cmd_default = 'powershell -command Get-Clipboard'\n elif sys.platform == 'darwin':\n syscopy_cmd_default = 'pbcopy w'\n syspaste_cmd_default = 'pbpaste'\n", "issue": "Pasting from Clipboard in Windows 10\n**Small description**\r\nI cannot get any results pasted from the clipboard onto a visidata cell, selected cell or range of cells. \r\n\r\nOn the most basic test:\r\n- Create new sheet\r\n- Create 1 cell\r\n- I copy a short string from another app to the clipboard\r\n- trying:\r\n - syspaste-cells or \r\n - syspaste-cells-selected (while selecting a single cell), or\r\n - zP\r\n- I get the following message \"INFO: Type \"CLIP /?\" for usage.\"\r\n\r\nThis is a message from Windows regarding its CLIP application.\r\n\r\nI have tried this, having multiple clipboard history activated, or deactivated. In both cases I get the same result.\r\n\r\nCopying from visidata onto clipboard works alright with zY\r\n\r\n**Expected result**\r\nPasted content in cell\r\n\r\n**Actual result with screenshot**\r\nIf you get an unexpected error, please include the full stack trace that you get with `Ctrl-E`.\r\n\r\n\r\nNo error captured (no ctrl-E)\r\n\r\n**Steps to reproduce with sample data and a .vd**\r\nFirst try reproducing without any user configuration by using the flag `-N`.\r\ne.g. `echo \"abc\" | vd -f txt -N`\r\n\r\nSame result.\r\n\r\nPlease attach the commandlog (saved with `Ctrl-D`)\r\n[paste_problem_win.zip](https://github.com/saulpw/visidata/files/11748544/paste_problem_win.zip)\r\n\r\n to show the steps that led to the issue.\r\nSee [here](http://visidata.org/docs/save-restore/) for more details.\r\n\r\n**Additional context**\r\nPlease include the version of VisiData and Python.\r\nVisidata v2.11\r\nPython 3.9.7\n", "before_files": [{"content": "from copy import copy, deepcopy\nimport shutil\nimport subprocess\nimport io\nimport sys\nimport tempfile\nimport functools\nimport os\n\nfrom visidata import VisiData, vd, asyncthread\nfrom visidata import Sheet, Path\n\nif sys.platform == 'win32':\n syscopy_cmd_default = 'clip.exe'\n syspaste_cmd_default = 'clip.exe'\nelif sys.platform == 'darwin':\n syscopy_cmd_default = 'pbcopy w'\n syspaste_cmd_default = 'pbpaste'\nelse:\n if 'WAYLAND_DISPLAY' in os.environ:\n syscopy_cmd_default = 'wl-copy'\n syspaste_cmd_default = 'wl-paste'\n else:\n syscopy_cmd_default = 'xclip -selection clipboard -filter' # xsel --clipboard --input\n syspaste_cmd_default = 'xclip -selection clipboard -o' # xsel --clipboard\n\nvd.option('clipboard_copy_cmd', syscopy_cmd_default, 'command to copy stdin to system clipboard', sheettype=None)\nvd.option('clipboard_paste_cmd', syspaste_cmd_default, 'command to send contents of system clipboard to stdout', sheettype=None)\n\n\[email protected]\ndef copyRows(sheet, rows):\n vd.memory.cliprows = rows\n vd.memory.clipcols = list(sheet.visibleCols)\n if not rows:\n vd.warning('no %s selected; clipboard emptied' % sheet.rowtype)\n else:\n vd.status('copied %d %s to clipboard' % (len(rows), sheet.rowtype))\n\[email protected]\ndef copyCells(sheet, col, rows):\n vd.memory.clipcells = [col.getTypedValue(r) for r in rows]\n if not rows:\n vd.warning('no %s selected; clipboard emptied' % sheet.rowtype)\n return\n vd.status('copied %d %s.%s to clipboard' % (len(rows), sheet.rowtype, col.name))\n\n\[email protected]\ndef syscopyValue(sheet, val):\n # pipe val to stdin of clipboard command\n\n p = subprocess.run(\n sheet.options.clipboard_copy_cmd.split(),\n input=val,\n encoding=sheet.options.encoding,\n stdout=subprocess.DEVNULL)\n\n vd.status('copied value to system clipboard')\n\n\[email protected]\ndef syscopyCells(sheet, cols, rows, filetype=None):\n filetype = filetype or vd.input(\"copy %d %s as filetype: \" % (len(rows), sheet.rowtype), value=sheet.options.save_filetype or 'tsv')\n sheet.syscopyCells_async(cols, rows, filetype)\n\n\[email protected]\n@asyncthread\ndef syscopyCells_async(sheet, cols, rows, filetype):\n vs = copy(sheet)\n vs.rows = rows or vd.fail('no %s selected' % sheet.rowtype)\n vs.columns = cols\n\n vd.status(f'copying {vs.nRows} {vs.rowtype} to system clipboard as {filetype}')\n\n with io.StringIO() as buf:\n vd.sync(vd.saveSheets(Path(sheet.name+'.'+filetype, fptext=buf), vs))\n subprocess.run(\n sheet.options.clipboard_copy_cmd.split(),\n input=buf.getvalue(),\n encoding=sheet.options.encoding,\n stdout=subprocess.DEVNULL)\n\n\[email protected]\ndef sysclipValue(vd):\n cmd = vd.options.clipboard_paste_cmd\n return subprocess.check_output(vd.options.clipboard_paste_cmd.split()).decode('utf-8')\n\n\[email protected]\n@asyncthread\ndef pasteFromClipboard(vd, cols, rows):\n text = vd.getLastArgs() or vd.sysclipValue().strip() or vd.fail('system clipboard is empty')\n\n vd.addUndoSetValues(cols, rows)\n lines = text.split('\\n')\n if not lines:\n vd.warning('nothing to paste')\n return\n\n vs = cols[0].sheet\n newrows = [vs.newRow() for i in range(len(lines)-len(rows))]\n if newrows:\n rows.extend(newrows)\n vs.addRows(newrows)\n\n for line, r in zip(lines, rows):\n for v, c in zip(line.split('\\t'), cols):\n c.setValue(r, v)\n\n\[email protected]\ndef delete_row(sheet, rowidx):\n if not sheet.defer:\n oldrow = sheet.rows.pop(rowidx)\n vd.addUndo(sheet.rows.insert, rowidx, oldrow)\n # clear the deleted row from selected rows\n if sheet.isSelected(oldrow):\n sheet.addUndoSelection()\n sheet.unselectRow(oldrow)\n else:\n oldrow = sheet.rows[rowidx]\n sheet.rowDeleted(oldrow)\n\n sheet.setModified()\n return oldrow\n\[email protected]\ndef paste_after(sheet, rowidx):\n if not vd.memory.cliprows: #1793\n vd.warning('nothing to paste')\n return\n to_paste = list(deepcopy(r) for r in reversed(vd.memory.cliprows))\n sheet.addRows(to_paste, index=rowidx)\n\n\n\nSheet.addCommand('y', 'copy-row', 'copyRows([cursorRow])', 'yank (copy) current row to clipboard')\n\nSheet.addCommand('p', 'paste-after', 'paste_after(cursorRowIndex)', 'paste clipboard rows after current row')\nSheet.addCommand('P', 'paste-before', 'paste_after(cursorRowIndex-1)', 'paste clipboard rows before current row')\n\nSheet.addCommand('gy', 'copy-selected', 'copyRows(onlySelectedRows)', 'yank (copy) selected rows to clipboard')\n\nSheet.addCommand('zy', 'copy-cell', 'copyCells(cursorCol, [cursorRow]); vd.memo(\"clipval\", cursorCol, cursorRow)', 'yank (copy) current cell to clipboard')\nSheet.addCommand('zp', 'paste-cell', 'cursorCol.setValuesTyped([cursorRow], vd.memory.clipval)', 'set contents of current cell to last clipboard value')\n\nSheet.addCommand('d', 'delete-row', 'delete_row(cursorRowIndex); defer and cursorDown(1)', 'delete current row')\nSheet.addCommand('gd', 'delete-selected', 'deleteSelected()', 'delete selected rows')\nSheet.addCommand('zd', 'delete-cell', 'cursorCol.setValues([cursorRow], options.null_value)', 'delete current cell (set to None)')\nSheet.addCommand('gzd', 'delete-cells', 'cursorCol.setValues(onlySelectedRows, options.null_value)', 'delete contents of current column for selected rows (set to None)')\n\nSheet.bindkey('BUTTON2_PRESSED', 'go-mouse')\nSheet.addCommand('BUTTON2_RELEASED', 'syspaste-cells', 'pasteFromClipboard(visibleCols[cursorVisibleColIndex:], rows[cursorRowIndex:])', 'paste from system clipboard to region starting at cursor')\nSheet.bindkey('BUTTON2_CLICKED', 'go-mouse')\nSheet.bindkey('zP', 'syspaste-cells')\nSheet.addCommand('gzP', 'syspaste-cells-selected', 'pasteFromClipboard(visibleCols[cursorVisibleColIndex:], someSelectedRows)', 'paste from system clipboard to selected cells')\n\nSheet.addCommand('gzy', 'copy-cells', 'copyCells(cursorCol, onlySelectedRows)', 'yank (copy) contents of current column for selected rows to clipboard')\nSheet.addCommand('gzp', 'setcol-clipboard', 'for r, v in zip(onlySelectedRows, itertools.cycle(vd.memory.clipcells or [None])): cursorCol.setValuesTyped([r], v)', 'set cells of current column for selected rows to last clipboard value')\n\nSheet.addCommand('Y', 'syscopy-row', 'syscopyCells(visibleCols, [cursorRow])', 'yank (copy) current row to system clipboard (using options.clipboard_copy_cmd)')\n\nSheet.addCommand('gY', 'syscopy-selected', 'syscopyCells(visibleCols, onlySelectedRows)', 'yank (copy) selected rows to system clipboard (using options.clipboard_copy_cmd)')\nSheet.addCommand('zY', 'syscopy-cell', 'syscopyValue(cursorDisplay)', 'yank (copy) current cell to system clipboard (using options.clipboard_copy_cmd)')\nSheet.addCommand('gzY', 'syscopy-cells', 'syscopyCells([cursorCol], onlySelectedRows, filetype=\"txt\")', 'yank (copy) contents of current column from selected rows to system clipboard (using options.clipboard_copy_cmd')\n\nSheet.addCommand('x', 'cut-row', 'copyRows([sheet.delete_row(cursorRowIndex)]); defer and cursorDown(1)', 'delete (cut) current row and move it to clipboard')\nSheet.addCommand('gx', 'cut-selected', 'copyRows(onlySelectedRows); deleteSelected()', 'delete (cut) selected rows and move them to clipboard')\nSheet.addCommand('zx', 'cut-cell', 'copyCells(cursorCol, [cursorRow]); cursorCol.setValues([cursorRow], None)', 'delete (cut) current cell and move it to clipboard')\nSheet.addCommand('gzx', 'cut-cells', 'copyCells(cursorCol, onlySelectedRows); cursorCol.setValues(onlySelectedRows, None)', 'delete (cut) contents of current column for selected rows and move them to clipboard')\n\n\nSheet.bindkey('KEY_DC', 'delete-cell'),\nSheet.bindkey('gKEY_DC', 'delete-cells'),\n\nvd.addMenuItems('''\n Edit > Delete > current row > delete-row\n Edit > Delete > current cell > delete-cell\n Edit > Delete > selected rows > delete-selected\n Edit > Delete > selected cells > delete-cells\n Edit > Copy > current cell > copy-cell\n Edit > Copy > current row > copy-row\n Edit > Copy > selected cells > copy-cells\n Edit > Copy > selected rows > copy-selected\n Edit > Copy > to system clipboard > current cell > syscopy-cell\n Edit > Copy > to system clipboard > current row > syscopy-row\n Edit > Copy > to system clipboard > selected cells > syscopy-cells\n Edit > Copy > to system clipboard > selected rows > syscopy-selected\n Edit > Cut > current row > cut-row\n Edit > Cut > selected cells > cut-selected\n Edit > Cut > current cell > cut-cell\n Edit > Paste > row after > paste-after\n Edit > Paste > row before > paste-before\n Edit > Paste > into selected cells > setcol-clipboard\n Edit > Paste > into current cell > paste-cell\n Edit > Paste > from system clipboard > cells at cursor > syspaste-cells\n Edit > Paste > from system clipboard > selected cells > syspaste-cells-selected\n''')\n", "path": "visidata/clipboard.py"}], "after_files": [{"content": "from copy import copy, deepcopy\nimport shutil\nimport subprocess\nimport io\nimport sys\nimport tempfile\nimport functools\nimport os\n\nfrom visidata import VisiData, vd, asyncthread\nfrom visidata import Sheet, Path\n\nif sys.platform == 'win32':\n syscopy_cmd_default = 'clip.exe'\n syspaste_cmd_default = 'powershell -command Get-Clipboard'\nelif sys.platform == 'darwin':\n syscopy_cmd_default = 'pbcopy w'\n syspaste_cmd_default = 'pbpaste'\nelse:\n if 'WAYLAND_DISPLAY' in os.environ:\n syscopy_cmd_default = 'wl-copy'\n syspaste_cmd_default = 'wl-paste'\n else:\n syscopy_cmd_default = 'xclip -selection clipboard -filter' # xsel --clipboard --input\n syspaste_cmd_default = 'xclip -selection clipboard -o' # xsel --clipboard\n\nvd.option('clipboard_copy_cmd', syscopy_cmd_default, 'command to copy stdin to system clipboard', sheettype=None)\nvd.option('clipboard_paste_cmd', syspaste_cmd_default, 'command to send contents of system clipboard to stdout', sheettype=None)\n\n\[email protected]\ndef copyRows(sheet, rows):\n vd.memory.cliprows = rows\n vd.memory.clipcols = list(sheet.visibleCols)\n if not rows:\n vd.warning('no %s selected; clipboard emptied' % sheet.rowtype)\n else:\n vd.status('copied %d %s to clipboard' % (len(rows), sheet.rowtype))\n\[email protected]\ndef copyCells(sheet, col, rows):\n vd.memory.clipcells = [col.getTypedValue(r) for r in rows]\n if not rows:\n vd.warning('no %s selected; clipboard emptied' % sheet.rowtype)\n return\n vd.status('copied %d %s.%s to clipboard' % (len(rows), sheet.rowtype, col.name))\n\n\[email protected]\ndef syscopyValue(sheet, val):\n # pipe val to stdin of clipboard command\n\n p = subprocess.run(\n sheet.options.clipboard_copy_cmd.split(),\n input=val,\n encoding=sheet.options.encoding,\n stdout=subprocess.DEVNULL)\n\n vd.status('copied value to system clipboard')\n\n\[email protected]\ndef syscopyCells(sheet, cols, rows, filetype=None):\n filetype = filetype or vd.input(\"copy %d %s as filetype: \" % (len(rows), sheet.rowtype), value=sheet.options.save_filetype or 'tsv')\n sheet.syscopyCells_async(cols, rows, filetype)\n\n\[email protected]\n@asyncthread\ndef syscopyCells_async(sheet, cols, rows, filetype):\n vs = copy(sheet)\n vs.rows = rows or vd.fail('no %s selected' % sheet.rowtype)\n vs.columns = cols\n\n vd.status(f'copying {vs.nRows} {vs.rowtype} to system clipboard as {filetype}')\n\n with io.StringIO() as buf:\n vd.sync(vd.saveSheets(Path(sheet.name+'.'+filetype, fptext=buf), vs))\n subprocess.run(\n sheet.options.clipboard_copy_cmd.split(),\n input=buf.getvalue(),\n encoding=sheet.options.encoding,\n stdout=subprocess.DEVNULL)\n\n\[email protected]\ndef sysclipValue(vd):\n cmd = vd.options.clipboard_paste_cmd\n return subprocess.check_output(vd.options.clipboard_paste_cmd.split()).decode('utf-8')\n\n\[email protected]\n@asyncthread\ndef pasteFromClipboard(vd, cols, rows):\n text = vd.getLastArgs() or vd.sysclipValue().strip() or vd.fail('system clipboard is empty')\n\n vd.addUndoSetValues(cols, rows)\n lines = text.split('\\n')\n if not lines:\n vd.warning('nothing to paste')\n return\n\n vs = cols[0].sheet\n newrows = [vs.newRow() for i in range(len(lines)-len(rows))]\n if newrows:\n rows.extend(newrows)\n vs.addRows(newrows)\n\n for line, r in zip(lines, rows):\n for v, c in zip(line.split('\\t'), cols):\n c.setValue(r, v)\n\n\[email protected]\ndef delete_row(sheet, rowidx):\n if not sheet.defer:\n oldrow = sheet.rows.pop(rowidx)\n vd.addUndo(sheet.rows.insert, rowidx, oldrow)\n # clear the deleted row from selected rows\n if sheet.isSelected(oldrow):\n sheet.addUndoSelection()\n sheet.unselectRow(oldrow)\n else:\n oldrow = sheet.rows[rowidx]\n sheet.rowDeleted(oldrow)\n\n sheet.setModified()\n return oldrow\n\[email protected]\ndef paste_after(sheet, rowidx):\n if not vd.memory.cliprows: #1793\n vd.warning('nothing to paste')\n return\n to_paste = list(deepcopy(r) for r in reversed(vd.memory.cliprows))\n sheet.addRows(to_paste, index=rowidx)\n\n\n\nSheet.addCommand('y', 'copy-row', 'copyRows([cursorRow])', 'yank (copy) current row to clipboard')\n\nSheet.addCommand('p', 'paste-after', 'paste_after(cursorRowIndex)', 'paste clipboard rows after current row')\nSheet.addCommand('P', 'paste-before', 'paste_after(cursorRowIndex-1)', 'paste clipboard rows before current row')\n\nSheet.addCommand('gy', 'copy-selected', 'copyRows(onlySelectedRows)', 'yank (copy) selected rows to clipboard')\n\nSheet.addCommand('zy', 'copy-cell', 'copyCells(cursorCol, [cursorRow]); vd.memo(\"clipval\", cursorCol, cursorRow)', 'yank (copy) current cell to clipboard')\nSheet.addCommand('zp', 'paste-cell', 'cursorCol.setValuesTyped([cursorRow], vd.memory.clipval)', 'set contents of current cell to last clipboard value')\n\nSheet.addCommand('d', 'delete-row', 'delete_row(cursorRowIndex); defer and cursorDown(1)', 'delete current row')\nSheet.addCommand('gd', 'delete-selected', 'deleteSelected()', 'delete selected rows')\nSheet.addCommand('zd', 'delete-cell', 'cursorCol.setValues([cursorRow], options.null_value)', 'delete current cell (set to None)')\nSheet.addCommand('gzd', 'delete-cells', 'cursorCol.setValues(onlySelectedRows, options.null_value)', 'delete contents of current column for selected rows (set to None)')\n\nSheet.bindkey('BUTTON2_PRESSED', 'go-mouse')\nSheet.addCommand('BUTTON2_RELEASED', 'syspaste-cells', 'pasteFromClipboard(visibleCols[cursorVisibleColIndex:], rows[cursorRowIndex:])', 'paste from system clipboard to region starting at cursor')\nSheet.bindkey('BUTTON2_CLICKED', 'go-mouse')\nSheet.bindkey('zP', 'syspaste-cells')\nSheet.addCommand('gzP', 'syspaste-cells-selected', 'pasteFromClipboard(visibleCols[cursorVisibleColIndex:], someSelectedRows)', 'paste from system clipboard to selected cells')\n\nSheet.addCommand('gzy', 'copy-cells', 'copyCells(cursorCol, onlySelectedRows)', 'yank (copy) contents of current column for selected rows to clipboard')\nSheet.addCommand('gzp', 'setcol-clipboard', 'for r, v in zip(onlySelectedRows, itertools.cycle(vd.memory.clipcells or [None])): cursorCol.setValuesTyped([r], v)', 'set cells of current column for selected rows to last clipboard value')\n\nSheet.addCommand('Y', 'syscopy-row', 'syscopyCells(visibleCols, [cursorRow])', 'yank (copy) current row to system clipboard (using options.clipboard_copy_cmd)')\n\nSheet.addCommand('gY', 'syscopy-selected', 'syscopyCells(visibleCols, onlySelectedRows)', 'yank (copy) selected rows to system clipboard (using options.clipboard_copy_cmd)')\nSheet.addCommand('zY', 'syscopy-cell', 'syscopyValue(cursorDisplay)', 'yank (copy) current cell to system clipboard (using options.clipboard_copy_cmd)')\nSheet.addCommand('gzY', 'syscopy-cells', 'syscopyCells([cursorCol], onlySelectedRows, filetype=\"txt\")', 'yank (copy) contents of current column from selected rows to system clipboard (using options.clipboard_copy_cmd')\n\nSheet.addCommand('x', 'cut-row', 'copyRows([sheet.delete_row(cursorRowIndex)]); defer and cursorDown(1)', 'delete (cut) current row and move it to clipboard')\nSheet.addCommand('gx', 'cut-selected', 'copyRows(onlySelectedRows); deleteSelected()', 'delete (cut) selected rows and move them to clipboard')\nSheet.addCommand('zx', 'cut-cell', 'copyCells(cursorCol, [cursorRow]); cursorCol.setValues([cursorRow], None)', 'delete (cut) current cell and move it to clipboard')\nSheet.addCommand('gzx', 'cut-cells', 'copyCells(cursorCol, onlySelectedRows); cursorCol.setValues(onlySelectedRows, None)', 'delete (cut) contents of current column for selected rows and move them to clipboard')\n\n\nSheet.bindkey('KEY_DC', 'delete-cell'),\nSheet.bindkey('gKEY_DC', 'delete-cells'),\n\nvd.addMenuItems('''\n Edit > Delete > current row > delete-row\n Edit > Delete > current cell > delete-cell\n Edit > Delete > selected rows > delete-selected\n Edit > Delete > selected cells > delete-cells\n Edit > Copy > current cell > copy-cell\n Edit > Copy > current row > copy-row\n Edit > Copy > selected cells > copy-cells\n Edit > Copy > selected rows > copy-selected\n Edit > Copy > to system clipboard > current cell > syscopy-cell\n Edit > Copy > to system clipboard > current row > syscopy-row\n Edit > Copy > to system clipboard > selected cells > syscopy-cells\n Edit > Copy > to system clipboard > selected rows > syscopy-selected\n Edit > Cut > current row > cut-row\n Edit > Cut > selected cells > cut-selected\n Edit > Cut > current cell > cut-cell\n Edit > Paste > row after > paste-after\n Edit > Paste > row before > paste-before\n Edit > Paste > into selected cells > setcol-clipboard\n Edit > Paste > into current cell > paste-cell\n Edit > Paste > from system clipboard > cells at cursor > syspaste-cells\n Edit > Paste > from system clipboard > selected cells > syspaste-cells-selected\n''')\n", "path": "visidata/clipboard.py"}]}
| 3,456 | 123 |
gh_patches_debug_33854
|
rasdani/github-patches
|
git_diff
|
svthalia__concrexit-1551
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Use a different colour for waiting list events
### Is your feature request related to a problem? Please describe.
In the event overview calendar, the dot in front of the event is colored as if you are registered. And this is confusing.
### Describe the solution you'd like
Use a different colour for events you're on the waiting list for.
### Motivation
Better user experience.
### Describe alternatives you've considered
There is not really any alternative
### Additional context
Created based on #1007
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `website/events/api/calendarjs/serializers.py`
Content:
```
1 from rest_framework.reverse import reverse
2
3 from events import services
4 from events.models import Event
5 from thaliawebsite.api.calendarjs.serializers import CalenderJSSerializer
6
7
8 class EventsCalenderJSSerializer(CalenderJSSerializer):
9 class Meta(CalenderJSSerializer.Meta):
10 model = Event
11
12 def _url(self, instance):
13 return reverse("events:event", kwargs={"pk": instance.id})
14
15 def _class_names(self, instance):
16 class_names = ["regular-event"]
17 if self.context["member"] and services.is_user_registered(
18 self.context["member"], instance
19 ):
20 class_names.append("has-registration")
21 return class_names
22
23
24 class UnpublishedEventsCalenderJSSerializer(CalenderJSSerializer):
25 """See CalenderJSSerializer, customised classes."""
26
27 class Meta(CalenderJSSerializer.Meta):
28 model = Event
29
30 def _class_names(self, instance):
31 return ["unpublished-event"]
32
33 def _url(self, instance):
34 return reverse("admin:events_event_details", kwargs={"pk": instance.id})
35
```
Path: `website/events/services.py`
Content:
```
1 from collections import OrderedDict
2
3 from django.utils import timezone
4 from django.utils.datetime_safe import date
5 from django.utils.translation import gettext_lazy as _, get_language
6
7 from events import emails
8 from events.exceptions import RegistrationError
9 from events.models import EventRegistration, RegistrationInformationField, Event
10 from payments.api.v1.fields import PaymentTypeField
11 from payments.services import create_payment, delete_payment
12 from utils.snippets import datetime_to_lectureyear
13
14
15 def is_user_registered(member, event):
16 """Return if the user is registered for the specified event.
17
18 :param member: the user
19 :param event: the event
20 :return: None if registration is not required or no member else True/False
21 """
22 if not event.registration_required or not member.is_authenticated:
23 return None
24
25 return event.registrations.filter(member=member, date_cancelled=None).count() > 0
26
27
28 def is_user_present(member, event):
29 if not event.registration_required or not member.is_authenticated:
30 return None
31
32 return (
33 event.registrations.filter(
34 member=member, date_cancelled=None, present=True
35 ).count()
36 > 0
37 )
38
39
40 def event_permissions(member, event, name=None):
41 """Return a dictionary with the available event permissions of the user.
42
43 :param member: the user
44 :param event: the event
45 :param name: the name of a non member registration
46 :return: the permission dictionary
47 """
48 perms = {
49 "create_registration": False,
50 "cancel_registration": False,
51 "update_registration": False,
52 "manage_event": is_organiser(member, event),
53 }
54 if not member:
55 return perms
56 if not (member.is_authenticated or name):
57 return perms
58
59 registration = None
60 try:
61 registration = EventRegistration.objects.get(
62 event=event, member=member, name=name
63 )
64 except EventRegistration.DoesNotExist:
65 pass
66
67 perms["create_registration"] = (
68 (registration is None or registration.date_cancelled is not None)
69 and (event.registration_allowed or not event.registration_required)
70 and (name or member.can_attend_events)
71 )
72 perms["cancel_registration"] = (
73 registration is not None
74 and registration.date_cancelled is None
75 and (event.cancellation_allowed or name or not event.registration_required)
76 and registration.payment is None
77 )
78 perms["update_registration"] = (
79 registration is not None
80 and registration.date_cancelled is None
81 and event.has_fields
82 and (event.registration_allowed or not event.registration_required)
83 and (name or member.can_attend_events)
84 )
85 return perms
86
87
88 def is_organiser(member, event):
89 if member and member.is_authenticated:
90 if member.is_superuser or member.has_perm("events.override_organiser"):
91 return True
92
93 if event:
94 return member.get_member_groups().filter(pk=event.organiser.pk).count() != 0
95
96 return False
97
98
99 def create_registration(member, event):
100 """Create a new user registration for an event.
101
102 :param member: the user
103 :param event: the event
104 :return: Return the registration if successful
105 """
106 if event_permissions(member, event)["create_registration"]:
107 registration = None
108 try:
109 registration = EventRegistration.objects.get(event=event, member=member)
110 except EventRegistration.DoesNotExist:
111 pass
112
113 if registration is None:
114 return EventRegistration.objects.create(event=event, member=member)
115 if registration.date_cancelled is not None:
116 if registration.is_late_cancellation():
117 raise RegistrationError(
118 _(
119 "You cannot re-register anymore "
120 "since you've cancelled after the "
121 "deadline."
122 )
123 )
124 registration.date = timezone.now()
125 registration.date_cancelled = None
126 registration.save()
127
128 return registration
129 if event_permissions(member, event)["cancel_registration"]:
130 raise RegistrationError(_("You were already registered."))
131 raise RegistrationError(_("You may not register."))
132
133
134 def cancel_registration(member, event):
135 """Cancel a user registration for an event.
136
137 :param member: the user
138 :param event: the event
139 """
140 registration = None
141 try:
142 registration = EventRegistration.objects.get(event=event, member=member)
143 except EventRegistration.DoesNotExist:
144 pass
145
146 if event_permissions(member, event)["cancel_registration"] and registration:
147 if not registration.queue_position:
148 emails.notify_first_waiting(event)
149
150 if event.send_cancel_email and event.after_cancel_deadline:
151 emails.notify_organiser(event, registration)
152
153 # Note that this doesn"t remove the values for the
154 # information fields that the user entered upon registering.
155 # But this is regarded as a feature, not a bug. Especially
156 # since the values will still appear in the backend.
157 registration.date_cancelled = timezone.now()
158 registration.save()
159 else:
160 raise RegistrationError(_("You are not allowed to deregister for this event."))
161
162
163 def update_registration(
164 member=None, event=None, name=None, registration=None, field_values=None
165 ):
166 """Update a user registration of an event.
167
168 :param request: http request
169 :param member: the user
170 :param event: the event
171 :param name: the name of a registration not associated with a user
172 :param registration: the registration
173 :param field_values: values for the information fields
174 """
175 if not registration:
176 try:
177 registration = EventRegistration.objects.get(
178 event=event, member=member, name=name
179 )
180 except EventRegistration.DoesNotExist as error:
181 raise RegistrationError(
182 _("You are not registered for this event.")
183 ) from error
184 else:
185 member = registration.member
186 event = registration.event
187 name = registration.name
188
189 if (
190 not event_permissions(member, event, name)["update_registration"]
191 or not field_values
192 ):
193 return
194
195 for field_id, field_value in field_values:
196 field = RegistrationInformationField.objects.get(
197 id=field_id.replace("info_field_", "")
198 )
199
200 if (
201 field.type == RegistrationInformationField.INTEGER_FIELD
202 and field_value is None
203 ):
204 field_value = 0
205 elif (
206 field.type == RegistrationInformationField.BOOLEAN_FIELD
207 and field_value is None
208 ):
209 field_value = False
210 elif (
211 field.type == RegistrationInformationField.TEXT_FIELD
212 and field_value is None
213 ):
214 field_value = ""
215
216 field.set_value_for(registration, field_value)
217
218
219 def registration_fields(request, member=None, event=None, registration=None, name=None):
220 """Return information about the registration fields of a registration.
221
222 :param member: the user (optional if registration provided)
223 :param name: the name of a non member registration
224 (optional if registration provided)
225 :param event: the event (optional if registration provided)
226 :param registration: the registration (optional if member & event provided)
227 :return: the fields
228 """
229 if registration is None:
230 try:
231 registration = EventRegistration.objects.get(
232 event=event, member=member, name=name
233 )
234 except EventRegistration.DoesNotExist as error:
235 raise RegistrationError(
236 _("You are not registered for this event.")
237 ) from error
238 except EventRegistration.MultipleObjectsReturned as error:
239 raise RegistrationError(
240 _("Unable to find the right registration.")
241 ) from error
242
243 member = registration.member
244 event = registration.event
245 name = registration.name
246
247 perms = event_permissions(member, event, name)[
248 "update_registration"
249 ] or is_organiser(request.member, event)
250 if perms and registration:
251 information_fields = registration.information_fields
252 fields = OrderedDict()
253
254 for information_field in information_fields:
255 field = information_field["field"]
256
257 fields["info_field_{}".format(field.id)] = {
258 "type": field.type,
259 "label": getattr(field, "{}_{}".format("name", get_language())),
260 "description": getattr(
261 field, "{}_{}".format("description", get_language())
262 ),
263 "value": information_field["value"],
264 "required": field.required,
265 }
266
267 return fields
268 raise RegistrationError(_("You are not allowed to update this registration."))
269
270
271 def update_registration_by_organiser(registration, member, data):
272 if not is_organiser(member, registration.event):
273 raise RegistrationError(_("You are not allowed to update this registration."))
274
275 if "payment" in data:
276 if data["payment"]["type"] == PaymentTypeField.NO_PAYMENT:
277 if registration.payment is not None:
278 delete_payment(registration)
279 else:
280 registration.payment = create_payment(
281 payable=registration,
282 processed_by=member,
283 pay_type=data["payment"]["type"],
284 )
285
286 if "present" in data:
287 registration.present = data["present"]
288
289 registration.save()
290
291
292 def generate_category_statistics():
293 """Generate statistics about events, number of events per category.
294
295 :return: Dict with key, value resp. being category, event count.
296 """
297 year = datetime_to_lectureyear(timezone.now())
298
299 data = {}
300 for i in range(5):
301 year_start = date(year=year - i, month=9, day=1)
302 year_end = date(year=year - i + 1, month=9, day=1)
303 data[str(year - i)] = {
304 str(display): Event.objects.filter(
305 category=key, start__gte=year_start, end__lte=year_end
306 ).count()
307 for key, display in Event.EVENT_CATEGORIES
308 }
309
310 return data
311
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/website/events/api/calendarjs/serializers.py b/website/events/api/calendarjs/serializers.py
--- a/website/events/api/calendarjs/serializers.py
+++ b/website/events/api/calendarjs/serializers.py
@@ -13,12 +13,18 @@
return reverse("events:event", kwargs={"pk": instance.id})
def _class_names(self, instance):
- class_names = ["regular-event"]
if self.context["member"] and services.is_user_registered(
self.context["member"], instance
):
- class_names.append("has-registration")
- return class_names
+ if services.user_registration_pending(self.context["member"], instance):
+ return ["regular-event-pending-registration"]
+ else:
+ return ["regular-event-has-registration"]
+ elif (not instance.registration_required) or instance.registration_allowed:
+ return ["regular-event-registration-open"]
+ else:
+ # I think this handles the case that registration is needed, but not yet possible
+ return ["regular-event-registration-closed"]
class UnpublishedEventsCalenderJSSerializer(CalenderJSSerializer):
diff --git a/website/events/services.py b/website/events/services.py
--- a/website/events/services.py
+++ b/website/events/services.py
@@ -19,12 +19,28 @@
:param event: the event
:return: None if registration is not required or no member else True/False
"""
- if not event.registration_required or not member.is_authenticated:
+ if not member.is_authenticated:
return None
return event.registrations.filter(member=member, date_cancelled=None).count() > 0
+def user_registration_pending(member, event):
+ """Return if the user is in the queue, but not yet registered for, the specific event.
+
+ :param member: the user
+ :param event: the event
+ :return: None if registration is not required or no member else True/False
+ """
+ if not event.registration_required:
+ return False
+ if not member.is_authenticated:
+ return None
+
+ reg = event.registrations.filter(member=member, date_cancelled=None)
+ return len(list(filter(lambda r: r.queue_position, reg))) > 0
+
+
def is_user_present(member, event):
if not event.registration_required or not member.is_authenticated:
return None
|
{"golden_diff": "diff --git a/website/events/api/calendarjs/serializers.py b/website/events/api/calendarjs/serializers.py\n--- a/website/events/api/calendarjs/serializers.py\n+++ b/website/events/api/calendarjs/serializers.py\n@@ -13,12 +13,18 @@\n return reverse(\"events:event\", kwargs={\"pk\": instance.id})\n \n def _class_names(self, instance):\n- class_names = [\"regular-event\"]\n if self.context[\"member\"] and services.is_user_registered(\n self.context[\"member\"], instance\n ):\n- class_names.append(\"has-registration\")\n- return class_names\n+ if services.user_registration_pending(self.context[\"member\"], instance):\n+ return [\"regular-event-pending-registration\"]\n+ else:\n+ return [\"regular-event-has-registration\"]\n+ elif (not instance.registration_required) or instance.registration_allowed:\n+ return [\"regular-event-registration-open\"]\n+ else:\n+ # I think this handles the case that registration is needed, but not yet possible\n+ return [\"regular-event-registration-closed\"]\n \n \n class UnpublishedEventsCalenderJSSerializer(CalenderJSSerializer):\ndiff --git a/website/events/services.py b/website/events/services.py\n--- a/website/events/services.py\n+++ b/website/events/services.py\n@@ -19,12 +19,28 @@\n :param event: the event\n :return: None if registration is not required or no member else True/False\n \"\"\"\n- if not event.registration_required or not member.is_authenticated:\n+ if not member.is_authenticated:\n return None\n \n return event.registrations.filter(member=member, date_cancelled=None).count() > 0\n \n \n+def user_registration_pending(member, event):\n+ \"\"\"Return if the user is in the queue, but not yet registered for, the specific event.\n+\n+ :param member: the user\n+ :param event: the event\n+ :return: None if registration is not required or no member else True/False\n+ \"\"\"\n+ if not event.registration_required:\n+ return False\n+ if not member.is_authenticated:\n+ return None\n+\n+ reg = event.registrations.filter(member=member, date_cancelled=None)\n+ return len(list(filter(lambda r: r.queue_position, reg))) > 0\n+\n+\n def is_user_present(member, event):\n if not event.registration_required or not member.is_authenticated:\n return None\n", "issue": "Use a different colour for waiting list events\n### Is your feature request related to a problem? Please describe.\r\nIn the event overview calendar, the dot in front of the event is colored as if you are registered. And this is confusing.\r\n\r\n### Describe the solution you'd like\r\nUse a different colour for events you're on the waiting list for.\r\n\r\n### Motivation\r\nBetter user experience.\r\n\r\n### Describe alternatives you've considered\r\nThere is not really any alternative\r\n\r\n### Additional context\r\nCreated based on #1007\r\n\n", "before_files": [{"content": "from rest_framework.reverse import reverse\n\nfrom events import services\nfrom events.models import Event\nfrom thaliawebsite.api.calendarjs.serializers import CalenderJSSerializer\n\n\nclass EventsCalenderJSSerializer(CalenderJSSerializer):\n class Meta(CalenderJSSerializer.Meta):\n model = Event\n\n def _url(self, instance):\n return reverse(\"events:event\", kwargs={\"pk\": instance.id})\n\n def _class_names(self, instance):\n class_names = [\"regular-event\"]\n if self.context[\"member\"] and services.is_user_registered(\n self.context[\"member\"], instance\n ):\n class_names.append(\"has-registration\")\n return class_names\n\n\nclass UnpublishedEventsCalenderJSSerializer(CalenderJSSerializer):\n \"\"\"See CalenderJSSerializer, customised classes.\"\"\"\n\n class Meta(CalenderJSSerializer.Meta):\n model = Event\n\n def _class_names(self, instance):\n return [\"unpublished-event\"]\n\n def _url(self, instance):\n return reverse(\"admin:events_event_details\", kwargs={\"pk\": instance.id})\n", "path": "website/events/api/calendarjs/serializers.py"}, {"content": "from collections import OrderedDict\n\nfrom django.utils import timezone\nfrom django.utils.datetime_safe import date\nfrom django.utils.translation import gettext_lazy as _, get_language\n\nfrom events import emails\nfrom events.exceptions import RegistrationError\nfrom events.models import EventRegistration, RegistrationInformationField, Event\nfrom payments.api.v1.fields import PaymentTypeField\nfrom payments.services import create_payment, delete_payment\nfrom utils.snippets import datetime_to_lectureyear\n\n\ndef is_user_registered(member, event):\n \"\"\"Return if the user is registered for the specified event.\n\n :param member: the user\n :param event: the event\n :return: None if registration is not required or no member else True/False\n \"\"\"\n if not event.registration_required or not member.is_authenticated:\n return None\n\n return event.registrations.filter(member=member, date_cancelled=None).count() > 0\n\n\ndef is_user_present(member, event):\n if not event.registration_required or not member.is_authenticated:\n return None\n\n return (\n event.registrations.filter(\n member=member, date_cancelled=None, present=True\n ).count()\n > 0\n )\n\n\ndef event_permissions(member, event, name=None):\n \"\"\"Return a dictionary with the available event permissions of the user.\n\n :param member: the user\n :param event: the event\n :param name: the name of a non member registration\n :return: the permission dictionary\n \"\"\"\n perms = {\n \"create_registration\": False,\n \"cancel_registration\": False,\n \"update_registration\": False,\n \"manage_event\": is_organiser(member, event),\n }\n if not member:\n return perms\n if not (member.is_authenticated or name):\n return perms\n\n registration = None\n try:\n registration = EventRegistration.objects.get(\n event=event, member=member, name=name\n )\n except EventRegistration.DoesNotExist:\n pass\n\n perms[\"create_registration\"] = (\n (registration is None or registration.date_cancelled is not None)\n and (event.registration_allowed or not event.registration_required)\n and (name or member.can_attend_events)\n )\n perms[\"cancel_registration\"] = (\n registration is not None\n and registration.date_cancelled is None\n and (event.cancellation_allowed or name or not event.registration_required)\n and registration.payment is None\n )\n perms[\"update_registration\"] = (\n registration is not None\n and registration.date_cancelled is None\n and event.has_fields\n and (event.registration_allowed or not event.registration_required)\n and (name or member.can_attend_events)\n )\n return perms\n\n\ndef is_organiser(member, event):\n if member and member.is_authenticated:\n if member.is_superuser or member.has_perm(\"events.override_organiser\"):\n return True\n\n if event:\n return member.get_member_groups().filter(pk=event.organiser.pk).count() != 0\n\n return False\n\n\ndef create_registration(member, event):\n \"\"\"Create a new user registration for an event.\n\n :param member: the user\n :param event: the event\n :return: Return the registration if successful\n \"\"\"\n if event_permissions(member, event)[\"create_registration\"]:\n registration = None\n try:\n registration = EventRegistration.objects.get(event=event, member=member)\n except EventRegistration.DoesNotExist:\n pass\n\n if registration is None:\n return EventRegistration.objects.create(event=event, member=member)\n if registration.date_cancelled is not None:\n if registration.is_late_cancellation():\n raise RegistrationError(\n _(\n \"You cannot re-register anymore \"\n \"since you've cancelled after the \"\n \"deadline.\"\n )\n )\n registration.date = timezone.now()\n registration.date_cancelled = None\n registration.save()\n\n return registration\n if event_permissions(member, event)[\"cancel_registration\"]:\n raise RegistrationError(_(\"You were already registered.\"))\n raise RegistrationError(_(\"You may not register.\"))\n\n\ndef cancel_registration(member, event):\n \"\"\"Cancel a user registration for an event.\n\n :param member: the user\n :param event: the event\n \"\"\"\n registration = None\n try:\n registration = EventRegistration.objects.get(event=event, member=member)\n except EventRegistration.DoesNotExist:\n pass\n\n if event_permissions(member, event)[\"cancel_registration\"] and registration:\n if not registration.queue_position:\n emails.notify_first_waiting(event)\n\n if event.send_cancel_email and event.after_cancel_deadline:\n emails.notify_organiser(event, registration)\n\n # Note that this doesn\"t remove the values for the\n # information fields that the user entered upon registering.\n # But this is regarded as a feature, not a bug. Especially\n # since the values will still appear in the backend.\n registration.date_cancelled = timezone.now()\n registration.save()\n else:\n raise RegistrationError(_(\"You are not allowed to deregister for this event.\"))\n\n\ndef update_registration(\n member=None, event=None, name=None, registration=None, field_values=None\n):\n \"\"\"Update a user registration of an event.\n\n :param request: http request\n :param member: the user\n :param event: the event\n :param name: the name of a registration not associated with a user\n :param registration: the registration\n :param field_values: values for the information fields\n \"\"\"\n if not registration:\n try:\n registration = EventRegistration.objects.get(\n event=event, member=member, name=name\n )\n except EventRegistration.DoesNotExist as error:\n raise RegistrationError(\n _(\"You are not registered for this event.\")\n ) from error\n else:\n member = registration.member\n event = registration.event\n name = registration.name\n\n if (\n not event_permissions(member, event, name)[\"update_registration\"]\n or not field_values\n ):\n return\n\n for field_id, field_value in field_values:\n field = RegistrationInformationField.objects.get(\n id=field_id.replace(\"info_field_\", \"\")\n )\n\n if (\n field.type == RegistrationInformationField.INTEGER_FIELD\n and field_value is None\n ):\n field_value = 0\n elif (\n field.type == RegistrationInformationField.BOOLEAN_FIELD\n and field_value is None\n ):\n field_value = False\n elif (\n field.type == RegistrationInformationField.TEXT_FIELD\n and field_value is None\n ):\n field_value = \"\"\n\n field.set_value_for(registration, field_value)\n\n\ndef registration_fields(request, member=None, event=None, registration=None, name=None):\n \"\"\"Return information about the registration fields of a registration.\n\n :param member: the user (optional if registration provided)\n :param name: the name of a non member registration\n (optional if registration provided)\n :param event: the event (optional if registration provided)\n :param registration: the registration (optional if member & event provided)\n :return: the fields\n \"\"\"\n if registration is None:\n try:\n registration = EventRegistration.objects.get(\n event=event, member=member, name=name\n )\n except EventRegistration.DoesNotExist as error:\n raise RegistrationError(\n _(\"You are not registered for this event.\")\n ) from error\n except EventRegistration.MultipleObjectsReturned as error:\n raise RegistrationError(\n _(\"Unable to find the right registration.\")\n ) from error\n\n member = registration.member\n event = registration.event\n name = registration.name\n\n perms = event_permissions(member, event, name)[\n \"update_registration\"\n ] or is_organiser(request.member, event)\n if perms and registration:\n information_fields = registration.information_fields\n fields = OrderedDict()\n\n for information_field in information_fields:\n field = information_field[\"field\"]\n\n fields[\"info_field_{}\".format(field.id)] = {\n \"type\": field.type,\n \"label\": getattr(field, \"{}_{}\".format(\"name\", get_language())),\n \"description\": getattr(\n field, \"{}_{}\".format(\"description\", get_language())\n ),\n \"value\": information_field[\"value\"],\n \"required\": field.required,\n }\n\n return fields\n raise RegistrationError(_(\"You are not allowed to update this registration.\"))\n\n\ndef update_registration_by_organiser(registration, member, data):\n if not is_organiser(member, registration.event):\n raise RegistrationError(_(\"You are not allowed to update this registration.\"))\n\n if \"payment\" in data:\n if data[\"payment\"][\"type\"] == PaymentTypeField.NO_PAYMENT:\n if registration.payment is not None:\n delete_payment(registration)\n else:\n registration.payment = create_payment(\n payable=registration,\n processed_by=member,\n pay_type=data[\"payment\"][\"type\"],\n )\n\n if \"present\" in data:\n registration.present = data[\"present\"]\n\n registration.save()\n\n\ndef generate_category_statistics():\n \"\"\"Generate statistics about events, number of events per category.\n\n :return: Dict with key, value resp. being category, event count.\n \"\"\"\n year = datetime_to_lectureyear(timezone.now())\n\n data = {}\n for i in range(5):\n year_start = date(year=year - i, month=9, day=1)\n year_end = date(year=year - i + 1, month=9, day=1)\n data[str(year - i)] = {\n str(display): Event.objects.filter(\n category=key, start__gte=year_start, end__lte=year_end\n ).count()\n for key, display in Event.EVENT_CATEGORIES\n }\n\n return data\n", "path": "website/events/services.py"}], "after_files": [{"content": "from rest_framework.reverse import reverse\n\nfrom events import services\nfrom events.models import Event\nfrom thaliawebsite.api.calendarjs.serializers import CalenderJSSerializer\n\n\nclass EventsCalenderJSSerializer(CalenderJSSerializer):\n class Meta(CalenderJSSerializer.Meta):\n model = Event\n\n def _url(self, instance):\n return reverse(\"events:event\", kwargs={\"pk\": instance.id})\n\n def _class_names(self, instance):\n if self.context[\"member\"] and services.is_user_registered(\n self.context[\"member\"], instance\n ):\n if services.user_registration_pending(self.context[\"member\"], instance):\n return [\"regular-event-pending-registration\"]\n else:\n return [\"regular-event-has-registration\"]\n elif (not instance.registration_required) or instance.registration_allowed:\n return [\"regular-event-registration-open\"]\n else:\n # I think this handles the case that registration is needed, but not yet possible\n return [\"regular-event-registration-closed\"]\n\n\nclass UnpublishedEventsCalenderJSSerializer(CalenderJSSerializer):\n \"\"\"See CalenderJSSerializer, customised classes.\"\"\"\n\n class Meta(CalenderJSSerializer.Meta):\n model = Event\n\n def _class_names(self, instance):\n return [\"unpublished-event\"]\n\n def _url(self, instance):\n return reverse(\"admin:events_event_details\", kwargs={\"pk\": instance.id})\n", "path": "website/events/api/calendarjs/serializers.py"}, {"content": "from collections import OrderedDict\n\nfrom django.utils import timezone\nfrom django.utils.datetime_safe import date\nfrom django.utils.translation import gettext_lazy as _, get_language\n\nfrom events import emails\nfrom events.exceptions import RegistrationError\nfrom events.models import EventRegistration, RegistrationInformationField, Event\nfrom payments.api.v1.fields import PaymentTypeField\nfrom payments.services import create_payment, delete_payment\nfrom utils.snippets import datetime_to_lectureyear\n\n\ndef is_user_registered(member, event):\n \"\"\"Return if the user is registered for the specified event.\n\n :param member: the user\n :param event: the event\n :return: None if registration is not required or no member else True/False\n \"\"\"\n if not member.is_authenticated:\n return None\n\n return event.registrations.filter(member=member, date_cancelled=None).count() > 0\n\n\ndef user_registration_pending(member, event):\n \"\"\"Return if the user is in the queue, but not yet registered for, the specific event.\n\n :param member: the user\n :param event: the event\n :return: None if registration is not required or no member else True/False\n \"\"\"\n if not event.registration_required:\n return False\n if not member.is_authenticated:\n return None\n\n reg = event.registrations.filter(member=member, date_cancelled=None)\n return len(list(filter(lambda r: r.queue_position, reg))) > 0\n\n\ndef is_user_present(member, event):\n if not event.registration_required or not member.is_authenticated:\n return None\n\n return (\n event.registrations.filter(\n member=member, date_cancelled=None, present=True\n ).count()\n > 0\n )\n\n\ndef event_permissions(member, event, name=None):\n \"\"\"Return a dictionary with the available event permissions of the user.\n\n :param member: the user\n :param event: the event\n :param name: the name of a non member registration\n :return: the permission dictionary\n \"\"\"\n perms = {\n \"create_registration\": False,\n \"cancel_registration\": False,\n \"update_registration\": False,\n \"manage_event\": is_organiser(member, event),\n }\n if not member:\n return perms\n if not (member.is_authenticated or name):\n return perms\n\n registration = None\n try:\n registration = EventRegistration.objects.get(\n event=event, member=member, name=name\n )\n except EventRegistration.DoesNotExist:\n pass\n\n perms[\"create_registration\"] = (\n (registration is None or registration.date_cancelled is not None)\n and (event.registration_allowed or not event.registration_required)\n and (name or member.can_attend_events)\n )\n perms[\"cancel_registration\"] = (\n registration is not None\n and registration.date_cancelled is None\n and (event.cancellation_allowed or name or not event.registration_required)\n and registration.payment is None\n )\n perms[\"update_registration\"] = (\n registration is not None\n and registration.date_cancelled is None\n and event.has_fields\n and (event.registration_allowed or not event.registration_required)\n and (name or member.can_attend_events)\n )\n return perms\n\n\ndef is_organiser(member, event):\n if member and member.is_authenticated:\n if member.is_superuser or member.has_perm(\"events.override_organiser\"):\n return True\n\n if event:\n return member.get_member_groups().filter(pk=event.organiser.pk).count() != 0\n\n return False\n\n\ndef create_registration(member, event):\n \"\"\"Create a new user registration for an event.\n\n :param member: the user\n :param event: the event\n :return: Return the registration if successful\n \"\"\"\n if event_permissions(member, event)[\"create_registration\"]:\n registration = None\n try:\n registration = EventRegistration.objects.get(event=event, member=member)\n except EventRegistration.DoesNotExist:\n pass\n\n if registration is None:\n return EventRegistration.objects.create(event=event, member=member)\n if registration.date_cancelled is not None:\n if registration.is_late_cancellation():\n raise RegistrationError(\n _(\n \"You cannot re-register anymore \"\n \"since you've cancelled after the \"\n \"deadline.\"\n )\n )\n registration.date = timezone.now()\n registration.date_cancelled = None\n registration.save()\n\n return registration\n if event_permissions(member, event)[\"cancel_registration\"]:\n raise RegistrationError(_(\"You were already registered.\"))\n raise RegistrationError(_(\"You may not register.\"))\n\n\ndef cancel_registration(member, event):\n \"\"\"Cancel a user registration for an event.\n\n :param member: the user\n :param event: the event\n \"\"\"\n registration = None\n try:\n registration = EventRegistration.objects.get(event=event, member=member)\n except EventRegistration.DoesNotExist:\n pass\n\n if event_permissions(member, event)[\"cancel_registration\"] and registration:\n if not registration.queue_position:\n emails.notify_first_waiting(event)\n\n if event.send_cancel_email and event.after_cancel_deadline:\n emails.notify_organiser(event, registration)\n\n # Note that this doesn\"t remove the values for the\n # information fields that the user entered upon registering.\n # But this is regarded as a feature, not a bug. Especially\n # since the values will still appear in the backend.\n registration.date_cancelled = timezone.now()\n registration.save()\n else:\n raise RegistrationError(_(\"You are not allowed to deregister for this event.\"))\n\n\ndef update_registration(\n member=None, event=None, name=None, registration=None, field_values=None\n):\n \"\"\"Update a user registration of an event.\n\n :param request: http request\n :param member: the user\n :param event: the event\n :param name: the name of a registration not associated with a user\n :param registration: the registration\n :param field_values: values for the information fields\n \"\"\"\n if not registration:\n try:\n registration = EventRegistration.objects.get(\n event=event, member=member, name=name\n )\n except EventRegistration.DoesNotExist as error:\n raise RegistrationError(\n _(\"You are not registered for this event.\")\n ) from error\n else:\n member = registration.member\n event = registration.event\n name = registration.name\n\n if (\n not event_permissions(member, event, name)[\"update_registration\"]\n or not field_values\n ):\n return\n\n for field_id, field_value in field_values:\n field = RegistrationInformationField.objects.get(\n id=field_id.replace(\"info_field_\", \"\")\n )\n\n if (\n field.type == RegistrationInformationField.INTEGER_FIELD\n and field_value is None\n ):\n field_value = 0\n elif (\n field.type == RegistrationInformationField.BOOLEAN_FIELD\n and field_value is None\n ):\n field_value = False\n elif (\n field.type == RegistrationInformationField.TEXT_FIELD\n and field_value is None\n ):\n field_value = \"\"\n\n field.set_value_for(registration, field_value)\n\n\ndef registration_fields(request, member=None, event=None, registration=None, name=None):\n \"\"\"Return information about the registration fields of a registration.\n\n :param member: the user (optional if registration provided)\n :param name: the name of a non member registration\n (optional if registration provided)\n :param event: the event (optional if registration provided)\n :param registration: the registration (optional if member & event provided)\n :return: the fields\n \"\"\"\n if registration is None:\n try:\n registration = EventRegistration.objects.get(\n event=event, member=member, name=name\n )\n except EventRegistration.DoesNotExist as error:\n raise RegistrationError(\n _(\"You are not registered for this event.\")\n ) from error\n except EventRegistration.MultipleObjectsReturned as error:\n raise RegistrationError(\n _(\"Unable to find the right registration.\")\n ) from error\n\n member = registration.member\n event = registration.event\n name = registration.name\n\n perms = event_permissions(member, event, name)[\n \"update_registration\"\n ] or is_organiser(request.member, event)\n if perms and registration:\n information_fields = registration.information_fields\n fields = OrderedDict()\n\n for information_field in information_fields:\n field = information_field[\"field\"]\n\n fields[\"info_field_{}\".format(field.id)] = {\n \"type\": field.type,\n \"label\": getattr(field, \"{}_{}\".format(\"name\", get_language())),\n \"description\": getattr(\n field, \"{}_{}\".format(\"description\", get_language())\n ),\n \"value\": information_field[\"value\"],\n \"required\": field.required,\n }\n\n return fields\n raise RegistrationError(_(\"You are not allowed to update this registration.\"))\n\n\ndef update_registration_by_organiser(registration, member, data):\n if not is_organiser(member, registration.event):\n raise RegistrationError(_(\"You are not allowed to update this registration.\"))\n\n if \"payment\" in data:\n if data[\"payment\"][\"type\"] == PaymentTypeField.NO_PAYMENT:\n if registration.payment is not None:\n delete_payment(registration)\n else:\n registration.payment = create_payment(\n payable=registration,\n processed_by=member,\n pay_type=data[\"payment\"][\"type\"],\n )\n\n if \"present\" in data:\n registration.present = data[\"present\"]\n\n registration.save()\n\n\ndef generate_category_statistics():\n \"\"\"Generate statistics about events, number of events per category.\n\n :return: Dict with key, value resp. being category, event count.\n \"\"\"\n year = datetime_to_lectureyear(timezone.now())\n\n data = {}\n for i in range(5):\n year_start = date(year=year - i, month=9, day=1)\n year_end = date(year=year - i + 1, month=9, day=1)\n data[str(year - i)] = {\n str(display): Event.objects.filter(\n category=key, start__gte=year_start, end__lte=year_end\n ).count()\n for key, display in Event.EVENT_CATEGORIES\n }\n\n return data\n", "path": "website/events/services.py"}]}
| 3,581 | 531 |
gh_patches_debug_33960
|
rasdani/github-patches
|
git_diff
|
tensorflow__addons-187
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Dense Image Warp tests are flaky
Recently we've seen that #53 is causing flaky failures in the CI. See:
https://source.cloud.google.com/results/invocations/8f31faef-505a-440e-b75f-e6edf1071269/targets/tensorflow_addons%2Fubuntu%2Fgpu%2Fpy3%2Fpresubmit/log
Do you mind taking a look when time allows @WindQAQ ?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `tensorflow_addons/image/dense_image_warp.py`
Content:
```
1 # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 # ==============================================================================
15 """Image warping using per-pixel flow vectors."""
16 from __future__ import absolute_import
17 from __future__ import division
18 from __future__ import print_function
19
20 import numpy as np
21 import tensorflow as tf
22
23
24 @tf.function
25 def interpolate_bilinear(grid, query_points, indexing="ij", name=None):
26 """Similar to Matlab's interp2 function.
27
28 Finds values for query points on a grid using bilinear interpolation.
29
30 Args:
31 grid: a 4-D float `Tensor` of shape `[batch, height, width, channels]`.
32 query_points: a 3-D float `Tensor` of N points with shape
33 `[batch, N, 2]`.
34 indexing: whether the query points are specified as row and column (ij),
35 or Cartesian coordinates (xy).
36 name: a name for the operation (optional).
37
38 Returns:
39 values: a 3-D `Tensor` with shape `[batch, N, channels]`
40
41 Raises:
42 ValueError: if the indexing mode is invalid, or if the shape of the
43 inputs invalid.
44 """
45 if indexing != "ij" and indexing != "xy":
46 raise ValueError("Indexing mode must be \'ij\' or \'xy\'")
47
48 with tf.name_scope(name or "interpolate_bilinear"):
49 grid = tf.convert_to_tensor(grid)
50 query_points = tf.convert_to_tensor(query_points)
51 shape = grid.get_shape().as_list()
52 if len(shape) != 4:
53 msg = "Grid must be 4 dimensional. Received size: "
54 raise ValueError(msg + str(grid.get_shape()))
55
56 batch_size, height, width, channels = (tf.shape(grid)[0],
57 tf.shape(grid)[1],
58 tf.shape(grid)[2],
59 tf.shape(grid)[3])
60
61 shape = [batch_size, height, width, channels]
62 query_type = query_points.dtype
63 grid_type = grid.dtype
64
65 tf.debugging.assert_equal(
66 len(query_points.get_shape()),
67 3,
68 message="Query points must be 3 dimensional.")
69 tf.debugging.assert_equal(
70 tf.shape(query_points)[2],
71 2,
72 message="Query points must be size 2 in dim 2.")
73
74 num_queries = tf.shape(query_points)[1]
75
76 tf.debugging.assert_greater_equal(
77 height, 2, message="Grid height must be at least 2."),
78 tf.debugging.assert_greater_equal(
79 width, 2, message="Grid width must be at least 2.")
80
81 alphas = []
82 floors = []
83 ceils = []
84 index_order = [0, 1] if indexing == "ij" else [1, 0]
85 unstacked_query_points = tf.unstack(query_points, axis=2)
86
87 for dim in index_order:
88 with tf.name_scope("dim-" + str(dim)):
89 queries = unstacked_query_points[dim]
90
91 size_in_indexing_dimension = shape[dim + 1]
92
93 # max_floor is size_in_indexing_dimension - 2 so that max_floor + 1
94 # is still a valid index into the grid.
95 max_floor = tf.cast(size_in_indexing_dimension - 2, query_type)
96 min_floor = tf.constant(0.0, dtype=query_type)
97 floor = tf.math.minimum(
98 tf.math.maximum(min_floor, tf.math.floor(queries)),
99 max_floor)
100 int_floor = tf.cast(floor, tf.dtypes.int32)
101 floors.append(int_floor)
102 ceil = int_floor + 1
103 ceils.append(ceil)
104
105 # alpha has the same type as the grid, as we will directly use alpha
106 # when taking linear combinations of pixel values from the image.
107 alpha = tf.cast(queries - floor, grid_type)
108 min_alpha = tf.constant(0.0, dtype=grid_type)
109 max_alpha = tf.constant(1.0, dtype=grid_type)
110 alpha = tf.math.minimum(
111 tf.math.maximum(min_alpha, alpha), max_alpha)
112
113 # Expand alpha to [b, n, 1] so we can use broadcasting
114 # (since the alpha values don't depend on the channel).
115 alpha = tf.expand_dims(alpha, 2)
116 alphas.append(alpha)
117
118 tf.debugging.assert_less_equal(
119 tf.cast(batch_size * height * width, dtype=tf.dtypes.float32),
120 np.iinfo(np.int32).max / 8.0,
121 message="The image size or batch size is sufficiently large "
122 "that the linearized addresses used by tf.gather "
123 "may exceed the int32 limit.")
124 flattened_grid = tf.reshape(grid,
125 [batch_size * height * width, channels])
126 batch_offsets = tf.reshape(
127 tf.range(batch_size) * height * width, [batch_size, 1])
128
129 # This wraps tf.gather. We reshape the image data such that the
130 # batch, y, and x coordinates are pulled into the first dimension.
131 # Then we gather. Finally, we reshape the output back. It's possible this
132 # code would be made simpler by using tf.gather_nd.
133 def gather(y_coords, x_coords, name):
134 with tf.name_scope("gather-" + name):
135 linear_coordinates = (
136 batch_offsets + y_coords * width + x_coords)
137 gathered_values = tf.gather(flattened_grid, linear_coordinates)
138 return tf.reshape(gathered_values,
139 [batch_size, num_queries, channels])
140
141 # grab the pixel values in the 4 corners around each query point
142 top_left = gather(floors[0], floors[1], "top_left")
143 top_right = gather(floors[0], ceils[1], "top_right")
144 bottom_left = gather(ceils[0], floors[1], "bottom_left")
145 bottom_right = gather(ceils[0], ceils[1], "bottom_right")
146
147 # now, do the actual interpolation
148 with tf.name_scope("interpolate"):
149 interp_top = alphas[1] * (top_right - top_left) + top_left
150 interp_bottom = alphas[1] * (
151 bottom_right - bottom_left) + bottom_left
152 interp = alphas[0] * (interp_bottom - interp_top) + interp_top
153
154 return interp
155
156
157 @tf.function
158 def dense_image_warp(image, flow, name=None):
159 """Image warping using per-pixel flow vectors.
160
161 Apply a non-linear warp to the image, where the warp is specified by a
162 dense flow field of offset vectors that define the correspondences of
163 pixel values in the output image back to locations in the source image.
164 Specifically, the pixel value at output[b, j, i, c] is
165 images[b, j - flow[b, j, i, 0], i - flow[b, j, i, 1], c].
166
167 The locations specified by this formula do not necessarily map to an int
168 index. Therefore, the pixel value is obtained by bilinear
169 interpolation of the 4 nearest pixels around
170 (b, j - flow[b, j, i, 0], i - flow[b, j, i, 1]). For locations outside
171 of the image, we use the nearest pixel values at the image boundary.
172
173 Args:
174 image: 4-D float `Tensor` with shape `[batch, height, width, channels]`.
175 flow: A 4-D float `Tensor` with shape `[batch, height, width, 2]`.
176 name: A name for the operation (optional).
177
178 Note that image and flow can be of type tf.half, tf.float32, or
179 tf.float64, and do not necessarily have to be the same type.
180
181 Returns:
182 A 4-D float `Tensor` with shape`[batch, height, width, channels]`
183 and same type as input image.
184
185 Raises:
186 ValueError: if height < 2 or width < 2 or the inputs have the wrong
187 number of dimensions.
188 """
189 with tf.name_scope(name or "dense_image_warp"):
190 image = tf.convert_to_tensor(image)
191 flow = tf.convert_to_tensor(flow)
192 batch_size, height, width, channels = (tf.shape(image)[0],
193 tf.shape(image)[1],
194 tf.shape(image)[2],
195 tf.shape(image)[3])
196
197 # The flow is defined on the image grid. Turn the flow into a list of query
198 # points in the grid space.
199 grid_x, grid_y = tf.meshgrid(tf.range(width), tf.range(height))
200 stacked_grid = tf.cast(tf.stack([grid_y, grid_x], axis=2), flow.dtype)
201 batched_grid = tf.expand_dims(stacked_grid, axis=0)
202 query_points_on_grid = batched_grid - flow
203 query_points_flattened = tf.reshape(query_points_on_grid,
204 [batch_size, height * width, 2])
205 # Compute values at the query points, then reshape the result back to the
206 # image grid.
207 interpolated = interpolate_bilinear(image, query_points_flattened)
208 interpolated = tf.reshape(interpolated,
209 [batch_size, height, width, channels])
210 return interpolated
211
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/tensorflow_addons/image/dense_image_warp.py b/tensorflow_addons/image/dense_image_warp.py
--- a/tensorflow_addons/image/dense_image_warp.py
+++ b/tensorflow_addons/image/dense_image_warp.py
@@ -21,7 +21,6 @@
import tensorflow as tf
[email protected]
def interpolate_bilinear(grid, query_points, indexing="ij", name=None):
"""Similar to Matlab's interp2 function.
@@ -48,30 +47,28 @@
with tf.name_scope(name or "interpolate_bilinear"):
grid = tf.convert_to_tensor(grid)
query_points = tf.convert_to_tensor(query_points)
- shape = grid.get_shape().as_list()
- if len(shape) != 4:
+
+ if len(grid.shape) != 4:
msg = "Grid must be 4 dimensional. Received size: "
- raise ValueError(msg + str(grid.get_shape()))
+ raise ValueError(msg + str(grid.shape))
+
+ if len(query_points.shape) != 3:
+ raise ValueError("Query points must be 3 dimensional.")
+
+ grid_shape = tf.shape(grid)
+ query_shape = tf.shape(query_points)
- batch_size, height, width, channels = (tf.shape(grid)[0],
- tf.shape(grid)[1],
- tf.shape(grid)[2],
- tf.shape(grid)[3])
+ batch_size, height, width, channels = (grid_shape[0], grid_shape[1],
+ grid_shape[2], grid_shape[3])
shape = [batch_size, height, width, channels]
+ num_queries = query_shape[1]
+
query_type = query_points.dtype
grid_type = grid.dtype
tf.debugging.assert_equal(
- len(query_points.get_shape()),
- 3,
- message="Query points must be 3 dimensional.")
- tf.debugging.assert_equal(
- tf.shape(query_points)[2],
- 2,
- message="Query points must be size 2 in dim 2.")
-
- num_queries = tf.shape(query_points)[1]
+ query_shape[2], 2, message="Query points must be size 2 in dim 2.")
tf.debugging.assert_greater_equal(
height, 2, message="Grid height must be at least 2."),
|
{"golden_diff": "diff --git a/tensorflow_addons/image/dense_image_warp.py b/tensorflow_addons/image/dense_image_warp.py\n--- a/tensorflow_addons/image/dense_image_warp.py\n+++ b/tensorflow_addons/image/dense_image_warp.py\n@@ -21,7 +21,6 @@\n import tensorflow as tf\n \n \[email protected]\n def interpolate_bilinear(grid, query_points, indexing=\"ij\", name=None):\n \"\"\"Similar to Matlab's interp2 function.\n \n@@ -48,30 +47,28 @@\n with tf.name_scope(name or \"interpolate_bilinear\"):\n grid = tf.convert_to_tensor(grid)\n query_points = tf.convert_to_tensor(query_points)\n- shape = grid.get_shape().as_list()\n- if len(shape) != 4:\n+\n+ if len(grid.shape) != 4:\n msg = \"Grid must be 4 dimensional. Received size: \"\n- raise ValueError(msg + str(grid.get_shape()))\n+ raise ValueError(msg + str(grid.shape))\n+\n+ if len(query_points.shape) != 3:\n+ raise ValueError(\"Query points must be 3 dimensional.\")\n+\n+ grid_shape = tf.shape(grid)\n+ query_shape = tf.shape(query_points)\n \n- batch_size, height, width, channels = (tf.shape(grid)[0],\n- tf.shape(grid)[1],\n- tf.shape(grid)[2],\n- tf.shape(grid)[3])\n+ batch_size, height, width, channels = (grid_shape[0], grid_shape[1],\n+ grid_shape[2], grid_shape[3])\n \n shape = [batch_size, height, width, channels]\n+ num_queries = query_shape[1]\n+\n query_type = query_points.dtype\n grid_type = grid.dtype\n \n tf.debugging.assert_equal(\n- len(query_points.get_shape()),\n- 3,\n- message=\"Query points must be 3 dimensional.\")\n- tf.debugging.assert_equal(\n- tf.shape(query_points)[2],\n- 2,\n- message=\"Query points must be size 2 in dim 2.\")\n-\n- num_queries = tf.shape(query_points)[1]\n+ query_shape[2], 2, message=\"Query points must be size 2 in dim 2.\")\n \n tf.debugging.assert_greater_equal(\n height, 2, message=\"Grid height must be at least 2.\"),\n", "issue": "Dense Image Warp tests are flaky\nRecently we've seen that #53 is causing flaky failures in the CI. See:\r\nhttps://source.cloud.google.com/results/invocations/8f31faef-505a-440e-b75f-e6edf1071269/targets/tensorflow_addons%2Fubuntu%2Fgpu%2Fpy3%2Fpresubmit/log\r\n\r\nDo you mind taking a look when time allows @WindQAQ ?\n", "before_files": [{"content": "# Copyright 2019 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Image warping using per-pixel flow vectors.\"\"\"\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport numpy as np\nimport tensorflow as tf\n\n\[email protected]\ndef interpolate_bilinear(grid, query_points, indexing=\"ij\", name=None):\n \"\"\"Similar to Matlab's interp2 function.\n\n Finds values for query points on a grid using bilinear interpolation.\n\n Args:\n grid: a 4-D float `Tensor` of shape `[batch, height, width, channels]`.\n query_points: a 3-D float `Tensor` of N points with shape\n `[batch, N, 2]`.\n indexing: whether the query points are specified as row and column (ij),\n or Cartesian coordinates (xy).\n name: a name for the operation (optional).\n\n Returns:\n values: a 3-D `Tensor` with shape `[batch, N, channels]`\n\n Raises:\n ValueError: if the indexing mode is invalid, or if the shape of the\n inputs invalid.\n \"\"\"\n if indexing != \"ij\" and indexing != \"xy\":\n raise ValueError(\"Indexing mode must be \\'ij\\' or \\'xy\\'\")\n\n with tf.name_scope(name or \"interpolate_bilinear\"):\n grid = tf.convert_to_tensor(grid)\n query_points = tf.convert_to_tensor(query_points)\n shape = grid.get_shape().as_list()\n if len(shape) != 4:\n msg = \"Grid must be 4 dimensional. Received size: \"\n raise ValueError(msg + str(grid.get_shape()))\n\n batch_size, height, width, channels = (tf.shape(grid)[0],\n tf.shape(grid)[1],\n tf.shape(grid)[2],\n tf.shape(grid)[3])\n\n shape = [batch_size, height, width, channels]\n query_type = query_points.dtype\n grid_type = grid.dtype\n\n tf.debugging.assert_equal(\n len(query_points.get_shape()),\n 3,\n message=\"Query points must be 3 dimensional.\")\n tf.debugging.assert_equal(\n tf.shape(query_points)[2],\n 2,\n message=\"Query points must be size 2 in dim 2.\")\n\n num_queries = tf.shape(query_points)[1]\n\n tf.debugging.assert_greater_equal(\n height, 2, message=\"Grid height must be at least 2.\"),\n tf.debugging.assert_greater_equal(\n width, 2, message=\"Grid width must be at least 2.\")\n\n alphas = []\n floors = []\n ceils = []\n index_order = [0, 1] if indexing == \"ij\" else [1, 0]\n unstacked_query_points = tf.unstack(query_points, axis=2)\n\n for dim in index_order:\n with tf.name_scope(\"dim-\" + str(dim)):\n queries = unstacked_query_points[dim]\n\n size_in_indexing_dimension = shape[dim + 1]\n\n # max_floor is size_in_indexing_dimension - 2 so that max_floor + 1\n # is still a valid index into the grid.\n max_floor = tf.cast(size_in_indexing_dimension - 2, query_type)\n min_floor = tf.constant(0.0, dtype=query_type)\n floor = tf.math.minimum(\n tf.math.maximum(min_floor, tf.math.floor(queries)),\n max_floor)\n int_floor = tf.cast(floor, tf.dtypes.int32)\n floors.append(int_floor)\n ceil = int_floor + 1\n ceils.append(ceil)\n\n # alpha has the same type as the grid, as we will directly use alpha\n # when taking linear combinations of pixel values from the image.\n alpha = tf.cast(queries - floor, grid_type)\n min_alpha = tf.constant(0.0, dtype=grid_type)\n max_alpha = tf.constant(1.0, dtype=grid_type)\n alpha = tf.math.minimum(\n tf.math.maximum(min_alpha, alpha), max_alpha)\n\n # Expand alpha to [b, n, 1] so we can use broadcasting\n # (since the alpha values don't depend on the channel).\n alpha = tf.expand_dims(alpha, 2)\n alphas.append(alpha)\n\n tf.debugging.assert_less_equal(\n tf.cast(batch_size * height * width, dtype=tf.dtypes.float32),\n np.iinfo(np.int32).max / 8.0,\n message=\"The image size or batch size is sufficiently large \"\n \"that the linearized addresses used by tf.gather \"\n \"may exceed the int32 limit.\")\n flattened_grid = tf.reshape(grid,\n [batch_size * height * width, channels])\n batch_offsets = tf.reshape(\n tf.range(batch_size) * height * width, [batch_size, 1])\n\n # This wraps tf.gather. We reshape the image data such that the\n # batch, y, and x coordinates are pulled into the first dimension.\n # Then we gather. Finally, we reshape the output back. It's possible this\n # code would be made simpler by using tf.gather_nd.\n def gather(y_coords, x_coords, name):\n with tf.name_scope(\"gather-\" + name):\n linear_coordinates = (\n batch_offsets + y_coords * width + x_coords)\n gathered_values = tf.gather(flattened_grid, linear_coordinates)\n return tf.reshape(gathered_values,\n [batch_size, num_queries, channels])\n\n # grab the pixel values in the 4 corners around each query point\n top_left = gather(floors[0], floors[1], \"top_left\")\n top_right = gather(floors[0], ceils[1], \"top_right\")\n bottom_left = gather(ceils[0], floors[1], \"bottom_left\")\n bottom_right = gather(ceils[0], ceils[1], \"bottom_right\")\n\n # now, do the actual interpolation\n with tf.name_scope(\"interpolate\"):\n interp_top = alphas[1] * (top_right - top_left) + top_left\n interp_bottom = alphas[1] * (\n bottom_right - bottom_left) + bottom_left\n interp = alphas[0] * (interp_bottom - interp_top) + interp_top\n\n return interp\n\n\[email protected]\ndef dense_image_warp(image, flow, name=None):\n \"\"\"Image warping using per-pixel flow vectors.\n\n Apply a non-linear warp to the image, where the warp is specified by a\n dense flow field of offset vectors that define the correspondences of\n pixel values in the output image back to locations in the source image.\n Specifically, the pixel value at output[b, j, i, c] is\n images[b, j - flow[b, j, i, 0], i - flow[b, j, i, 1], c].\n\n The locations specified by this formula do not necessarily map to an int\n index. Therefore, the pixel value is obtained by bilinear\n interpolation of the 4 nearest pixels around\n (b, j - flow[b, j, i, 0], i - flow[b, j, i, 1]). For locations outside\n of the image, we use the nearest pixel values at the image boundary.\n\n Args:\n image: 4-D float `Tensor` with shape `[batch, height, width, channels]`.\n flow: A 4-D float `Tensor` with shape `[batch, height, width, 2]`.\n name: A name for the operation (optional).\n\n Note that image and flow can be of type tf.half, tf.float32, or\n tf.float64, and do not necessarily have to be the same type.\n\n Returns:\n A 4-D float `Tensor` with shape`[batch, height, width, channels]`\n and same type as input image.\n\n Raises:\n ValueError: if height < 2 or width < 2 or the inputs have the wrong\n number of dimensions.\n \"\"\"\n with tf.name_scope(name or \"dense_image_warp\"):\n image = tf.convert_to_tensor(image)\n flow = tf.convert_to_tensor(flow)\n batch_size, height, width, channels = (tf.shape(image)[0],\n tf.shape(image)[1],\n tf.shape(image)[2],\n tf.shape(image)[3])\n\n # The flow is defined on the image grid. Turn the flow into a list of query\n # points in the grid space.\n grid_x, grid_y = tf.meshgrid(tf.range(width), tf.range(height))\n stacked_grid = tf.cast(tf.stack([grid_y, grid_x], axis=2), flow.dtype)\n batched_grid = tf.expand_dims(stacked_grid, axis=0)\n query_points_on_grid = batched_grid - flow\n query_points_flattened = tf.reshape(query_points_on_grid,\n [batch_size, height * width, 2])\n # Compute values at the query points, then reshape the result back to the\n # image grid.\n interpolated = interpolate_bilinear(image, query_points_flattened)\n interpolated = tf.reshape(interpolated,\n [batch_size, height, width, channels])\n return interpolated\n", "path": "tensorflow_addons/image/dense_image_warp.py"}], "after_files": [{"content": "# Copyright 2019 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Image warping using per-pixel flow vectors.\"\"\"\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport numpy as np\nimport tensorflow as tf\n\n\ndef interpolate_bilinear(grid, query_points, indexing=\"ij\", name=None):\n \"\"\"Similar to Matlab's interp2 function.\n\n Finds values for query points on a grid using bilinear interpolation.\n\n Args:\n grid: a 4-D float `Tensor` of shape `[batch, height, width, channels]`.\n query_points: a 3-D float `Tensor` of N points with shape\n `[batch, N, 2]`.\n indexing: whether the query points are specified as row and column (ij),\n or Cartesian coordinates (xy).\n name: a name for the operation (optional).\n\n Returns:\n values: a 3-D `Tensor` with shape `[batch, N, channels]`\n\n Raises:\n ValueError: if the indexing mode is invalid, or if the shape of the\n inputs invalid.\n \"\"\"\n if indexing != \"ij\" and indexing != \"xy\":\n raise ValueError(\"Indexing mode must be \\'ij\\' or \\'xy\\'\")\n\n with tf.name_scope(name or \"interpolate_bilinear\"):\n grid = tf.convert_to_tensor(grid)\n query_points = tf.convert_to_tensor(query_points)\n\n if len(grid.shape) != 4:\n msg = \"Grid must be 4 dimensional. Received size: \"\n raise ValueError(msg + str(grid.shape))\n\n if len(query_points.shape) != 3:\n raise ValueError(\"Query points must be 3 dimensional.\")\n\n grid_shape = tf.shape(grid)\n query_shape = tf.shape(query_points)\n\n batch_size, height, width, channels = (grid_shape[0], grid_shape[1],\n grid_shape[2], grid_shape[3])\n\n shape = [batch_size, height, width, channels]\n num_queries = query_shape[1]\n\n query_type = query_points.dtype\n grid_type = grid.dtype\n\n tf.debugging.assert_equal(\n query_shape[2], 2, message=\"Query points must be size 2 in dim 2.\")\n\n tf.debugging.assert_greater_equal(\n height, 2, message=\"Grid height must be at least 2.\"),\n tf.debugging.assert_greater_equal(\n width, 2, message=\"Grid width must be at least 2.\")\n\n alphas = []\n floors = []\n ceils = []\n index_order = [0, 1] if indexing == \"ij\" else [1, 0]\n unstacked_query_points = tf.unstack(query_points, axis=2)\n\n for dim in index_order:\n with tf.name_scope(\"dim-\" + str(dim)):\n queries = unstacked_query_points[dim]\n\n size_in_indexing_dimension = shape[dim + 1]\n\n # max_floor is size_in_indexing_dimension - 2 so that max_floor + 1\n # is still a valid index into the grid.\n max_floor = tf.cast(size_in_indexing_dimension - 2, query_type)\n min_floor = tf.constant(0.0, dtype=query_type)\n floor = tf.math.minimum(\n tf.math.maximum(min_floor, tf.math.floor(queries)),\n max_floor)\n int_floor = tf.cast(floor, tf.dtypes.int32)\n floors.append(int_floor)\n ceil = int_floor + 1\n ceils.append(ceil)\n\n # alpha has the same type as the grid, as we will directly use alpha\n # when taking linear combinations of pixel values from the image.\n alpha = tf.cast(queries - floor, grid_type)\n min_alpha = tf.constant(0.0, dtype=grid_type)\n max_alpha = tf.constant(1.0, dtype=grid_type)\n alpha = tf.math.minimum(\n tf.math.maximum(min_alpha, alpha), max_alpha)\n\n # Expand alpha to [b, n, 1] so we can use broadcasting\n # (since the alpha values don't depend on the channel).\n alpha = tf.expand_dims(alpha, 2)\n alphas.append(alpha)\n\n tf.debugging.assert_less_equal(\n tf.cast(batch_size * height * width, dtype=tf.dtypes.float32),\n np.iinfo(np.int32).max / 8.0,\n message=\"The image size or batch size is sufficiently large \"\n \"that the linearized addresses used by tf.gather \"\n \"may exceed the int32 limit.\")\n flattened_grid = tf.reshape(grid,\n [batch_size * height * width, channels])\n batch_offsets = tf.reshape(\n tf.range(batch_size) * height * width, [batch_size, 1])\n\n # This wraps tf.gather. We reshape the image data such that the\n # batch, y, and x coordinates are pulled into the first dimension.\n # Then we gather. Finally, we reshape the output back. It's possible this\n # code would be made simpler by using tf.gather_nd.\n def gather(y_coords, x_coords, name):\n with tf.name_scope(\"gather-\" + name):\n linear_coordinates = (\n batch_offsets + y_coords * width + x_coords)\n gathered_values = tf.gather(flattened_grid, linear_coordinates)\n return tf.reshape(gathered_values,\n [batch_size, num_queries, channels])\n\n # grab the pixel values in the 4 corners around each query point\n top_left = gather(floors[0], floors[1], \"top_left\")\n top_right = gather(floors[0], ceils[1], \"top_right\")\n bottom_left = gather(ceils[0], floors[1], \"bottom_left\")\n bottom_right = gather(ceils[0], ceils[1], \"bottom_right\")\n\n # now, do the actual interpolation\n with tf.name_scope(\"interpolate\"):\n interp_top = alphas[1] * (top_right - top_left) + top_left\n interp_bottom = alphas[1] * (\n bottom_right - bottom_left) + bottom_left\n interp = alphas[0] * (interp_bottom - interp_top) + interp_top\n\n return interp\n\n\[email protected]\ndef dense_image_warp(image, flow, name=None):\n \"\"\"Image warping using per-pixel flow vectors.\n\n Apply a non-linear warp to the image, where the warp is specified by a\n dense flow field of offset vectors that define the correspondences of\n pixel values in the output image back to locations in the source image.\n Specifically, the pixel value at output[b, j, i, c] is\n images[b, j - flow[b, j, i, 0], i - flow[b, j, i, 1], c].\n\n The locations specified by this formula do not necessarily map to an int\n index. Therefore, the pixel value is obtained by bilinear\n interpolation of the 4 nearest pixels around\n (b, j - flow[b, j, i, 0], i - flow[b, j, i, 1]). For locations outside\n of the image, we use the nearest pixel values at the image boundary.\n\n Args:\n image: 4-D float `Tensor` with shape `[batch, height, width, channels]`.\n flow: A 4-D float `Tensor` with shape `[batch, height, width, 2]`.\n name: A name for the operation (optional).\n\n Note that image and flow can be of type tf.half, tf.float32, or\n tf.float64, and do not necessarily have to be the same type.\n\n Returns:\n A 4-D float `Tensor` with shape`[batch, height, width, channels]`\n and same type as input image.\n\n Raises:\n ValueError: if height < 2 or width < 2 or the inputs have the wrong\n number of dimensions.\n \"\"\"\n with tf.name_scope(name or \"dense_image_warp\"):\n image = tf.convert_to_tensor(image)\n flow = tf.convert_to_tensor(flow)\n batch_size, height, width, channels = (tf.shape(image)[0],\n tf.shape(image)[1],\n tf.shape(image)[2],\n tf.shape(image)[3])\n\n # The flow is defined on the image grid. Turn the flow into a list of query\n # points in the grid space.\n grid_x, grid_y = tf.meshgrid(tf.range(width), tf.range(height))\n stacked_grid = tf.cast(tf.stack([grid_y, grid_x], axis=2), flow.dtype)\n batched_grid = tf.expand_dims(stacked_grid, axis=0)\n query_points_on_grid = batched_grid - flow\n query_points_flattened = tf.reshape(query_points_on_grid,\n [batch_size, height * width, 2])\n # Compute values at the query points, then reshape the result back to the\n # image grid.\n interpolated = interpolate_bilinear(image, query_points_flattened)\n interpolated = tf.reshape(interpolated,\n [batch_size, height, width, channels])\n return interpolated\n", "path": "tensorflow_addons/image/dense_image_warp.py"}]}
| 3,023 | 522 |
gh_patches_debug_24019
|
rasdani/github-patches
|
git_diff
|
nilearn__nilearn-2096
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
scipy.misc.imread() replaced by scipy.imageio.imread() in v1.2
`scipy.misc.imread()` was deprecatd in SciPy 1.0 & replaced in SciPy 1.2 by `scipy.imageio.imread()`
https://docs.scipy.org/doc/scipy-1.2.1/reference/generated/scipy.misc.imread.html
This is causing failures in CircleCI.
I will work on this once PR #2076 doctest problem has been addressed, since we need this issue to be resolved before it can be merged. Intended today.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/02_decoding/plot_haxby_stimuli.py`
Content:
```
1 """
2 Show stimuli of Haxby et al. dataset
3 ===============================================================================
4
5 In this script we plot an overview of the stimuli used in "Distributed
6 and Overlapping Representations of Faces and Objects in Ventral Temporal
7 Cortex" (Science 2001)
8 """
9
10 from scipy.misc import imread
11 import matplotlib.pyplot as plt
12
13 from nilearn import datasets
14 from nilearn.plotting import show
15
16 haxby_dataset = datasets.fetch_haxby(subjects=[], fetch_stimuli=True)
17 stimulus_information = haxby_dataset.stimuli
18
19 for stim_type in sorted(stimulus_information.keys()):
20 if stim_type == b'controls':
21 # skip control images, there are too many
22 continue
23
24 file_names = stimulus_information[stim_type]
25
26 plt.figure()
27 for i in range(48):
28 plt.subplot(6, 8, i + 1)
29 try:
30 plt.imshow(imread(file_names[i]), cmap=plt.cm.gray)
31 except:
32 # just go to the next one if the file is not present
33 pass
34 plt.axis("off")
35 plt.suptitle(stim_type)
36
37 show()
38
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/examples/02_decoding/plot_haxby_stimuli.py b/examples/02_decoding/plot_haxby_stimuli.py
--- a/examples/02_decoding/plot_haxby_stimuli.py
+++ b/examples/02_decoding/plot_haxby_stimuli.py
@@ -7,7 +7,6 @@
Cortex" (Science 2001)
"""
-from scipy.misc import imread
import matplotlib.pyplot as plt
from nilearn import datasets
@@ -16,22 +15,19 @@
haxby_dataset = datasets.fetch_haxby(subjects=[], fetch_stimuli=True)
stimulus_information = haxby_dataset.stimuli
-for stim_type in sorted(stimulus_information.keys()):
- if stim_type == b'controls':
- # skip control images, there are too many
- continue
-
- file_names = stimulus_information[stim_type]
-
- plt.figure()
- for i in range(48):
- plt.subplot(6, 8, i + 1)
- try:
- plt.imshow(imread(file_names[i]), cmap=plt.cm.gray)
- except:
- # just go to the next one if the file is not present
- pass
- plt.axis("off")
- plt.suptitle(stim_type)
+for stim_type in stimulus_information:
+ # skip control images, there are too many
+ if stim_type != 'controls':
+
+ file_names = stimulus_information[stim_type]
+
+ fig, axes = plt.subplots(6, 8)
+ fig.suptitle(stim_type)
+
+ for img_path, ax in zip(file_names, axes.ravel()):
+ ax.imshow(plt.imread(img_path), cmap=plt.cm.gray)
+
+ for ax in axes.ravel():
+ ax.axis("off")
show()
|
{"golden_diff": "diff --git a/examples/02_decoding/plot_haxby_stimuli.py b/examples/02_decoding/plot_haxby_stimuli.py\n--- a/examples/02_decoding/plot_haxby_stimuli.py\n+++ b/examples/02_decoding/plot_haxby_stimuli.py\n@@ -7,7 +7,6 @@\n Cortex\" (Science 2001)\n \"\"\"\n \n-from scipy.misc import imread\n import matplotlib.pyplot as plt\n \n from nilearn import datasets\n@@ -16,22 +15,19 @@\n haxby_dataset = datasets.fetch_haxby(subjects=[], fetch_stimuli=True)\n stimulus_information = haxby_dataset.stimuli\n \n-for stim_type in sorted(stimulus_information.keys()):\n- if stim_type == b'controls':\n- # skip control images, there are too many\n- continue\n-\n- file_names = stimulus_information[stim_type]\n-\n- plt.figure()\n- for i in range(48):\n- plt.subplot(6, 8, i + 1)\n- try:\n- plt.imshow(imread(file_names[i]), cmap=plt.cm.gray)\n- except:\n- # just go to the next one if the file is not present\n- pass\n- plt.axis(\"off\")\n- plt.suptitle(stim_type)\n+for stim_type in stimulus_information:\n+ # skip control images, there are too many\n+ if stim_type != 'controls':\n+\n+ file_names = stimulus_information[stim_type]\n+\n+ fig, axes = plt.subplots(6, 8)\n+ fig.suptitle(stim_type)\n+\n+ for img_path, ax in zip(file_names, axes.ravel()):\n+ ax.imshow(plt.imread(img_path), cmap=plt.cm.gray)\n+\n+ for ax in axes.ravel():\n+ ax.axis(\"off\")\n \n show()\n", "issue": "scipy.misc.imread() replaced by scipy.imageio.imread() in v1.2\n`scipy.misc.imread()` was deprecatd in SciPy 1.0 & replaced in SciPy 1.2 by `scipy.imageio.imread()`\r\n\r\nhttps://docs.scipy.org/doc/scipy-1.2.1/reference/generated/scipy.misc.imread.html\r\n\r\nThis is causing failures in CircleCI. \r\n\r\nI will work on this once PR #2076 doctest problem has been addressed, since we need this issue to be resolved before it can be merged. Intended today.\n", "before_files": [{"content": "\"\"\"\nShow stimuli of Haxby et al. dataset\n===============================================================================\n\nIn this script we plot an overview of the stimuli used in \"Distributed\nand Overlapping Representations of Faces and Objects in Ventral Temporal\nCortex\" (Science 2001)\n\"\"\"\n\nfrom scipy.misc import imread\nimport matplotlib.pyplot as plt\n\nfrom nilearn import datasets\nfrom nilearn.plotting import show\n\nhaxby_dataset = datasets.fetch_haxby(subjects=[], fetch_stimuli=True)\nstimulus_information = haxby_dataset.stimuli\n\nfor stim_type in sorted(stimulus_information.keys()):\n if stim_type == b'controls':\n # skip control images, there are too many\n continue\n\n file_names = stimulus_information[stim_type]\n\n plt.figure()\n for i in range(48):\n plt.subplot(6, 8, i + 1)\n try:\n plt.imshow(imread(file_names[i]), cmap=plt.cm.gray)\n except:\n # just go to the next one if the file is not present\n pass\n plt.axis(\"off\")\n plt.suptitle(stim_type)\n\nshow()\n", "path": "examples/02_decoding/plot_haxby_stimuli.py"}], "after_files": [{"content": "\"\"\"\nShow stimuli of Haxby et al. dataset\n===============================================================================\n\nIn this script we plot an overview of the stimuli used in \"Distributed\nand Overlapping Representations of Faces and Objects in Ventral Temporal\nCortex\" (Science 2001)\n\"\"\"\n\nimport matplotlib.pyplot as plt\n\nfrom nilearn import datasets\nfrom nilearn.plotting import show\n\nhaxby_dataset = datasets.fetch_haxby(subjects=[], fetch_stimuli=True)\nstimulus_information = haxby_dataset.stimuli\n\nfor stim_type in stimulus_information:\n # skip control images, there are too many\n if stim_type != 'controls':\n\n file_names = stimulus_information[stim_type]\n\n fig, axes = plt.subplots(6, 8)\n fig.suptitle(stim_type)\n\n for img_path, ax in zip(file_names, axes.ravel()):\n ax.imshow(plt.imread(img_path), cmap=plt.cm.gray)\n\n for ax in axes.ravel():\n ax.axis(\"off\")\n\nshow()\n", "path": "examples/02_decoding/plot_haxby_stimuli.py"}]}
| 708 | 413 |
gh_patches_debug_4779
|
rasdani/github-patches
|
git_diff
|
zestedesavoir__zds-site-4960
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Insertion du smiley >_<
On obtient `>_<`
Solution 1 : Modifier le code ici : https://github.com/zestedesavoir/zds-site/blob/4ae0431bbf199e318dd6f2b1301ac7b6adc40198/assets/js/editor.js#L132 Vérifier qu'il n'y a pas un bug/fail avec ">" et "<".
Solution 2 : On peut ajouter l'alias `X/` pour ce smiley et remplacer le code dans l'éditeur. https://github.com/zestedesavoir/zds-site/blob/56a5b2e8b524848efa2d328c0a46365a44c1d43e/zds/utils/templatetags/smileys_def.py#L26
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `zds/utils/templatetags/smileys_def.py`
Content:
```
1 import os
2 from django.conf import settings
3
4 SMILEYS_BASE_PATH = os.path.join(settings.BASE_DIR, 'dist/smileys')
5 LICENSES_BASE_PATH = os.path.join(settings.BASE_DIR, 'dist/licenses')
6 SMILEYS_BASE_URL = os.path.join(settings.STATIC_URL, 'smileys')
7
8 SMILEYS_BASE = {
9 'smile.png': (':)', ':-)', ),
10 'heureux.png': (':D', ':-D', ),
11 'clin.png': (';)', ';-)', ),
12 'langue.png': (':p', ':P', ':-p', ':-P', ),
13 'rire.gif': (':lol:', ),
14 'unsure.gif': (':euh:', ),
15 'triste.png': (':(', ':-(', ),
16 'huh.png': (':o', ':-o', ':O', ':-O', ),
17 'mechant.png': (':colere2:', ),
18 'blink.gif': ('o_O', 'O_o', ),
19 'hihi.png': ('^^', ),
20 'siffle.png': (':-°', ':°', ),
21 'ange.png': (':ange:', ),
22 'angry.gif': (':colere:', ),
23 'diable.png': (':diable:', ),
24 'magicien.png': (':magicien:', ),
25 'ninja.gif': (':ninja:', ),
26 'pinch.png': ('>_<', ),
27 'pirate.png': (':pirate:', ),
28 'pleure.png': (":'(", ),
29 'rouge.png': (':honte:', ),
30 'soleil.png': (':soleil:', ),
31 'waw.png': (':waw:', ),
32 'zorro.png': (':zorro:', ),
33 'cthulhu.png': ('^(;,;)^', ),
34 }
35
36 smileys = {}
37 for image_file, symbols in SMILEYS_BASE.items():
38 for symbol in symbols:
39 smileys[symbol] = os.path.join(SMILEYS_BASE_URL, image_file)
40
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/zds/utils/templatetags/smileys_def.py b/zds/utils/templatetags/smileys_def.py
--- a/zds/utils/templatetags/smileys_def.py
+++ b/zds/utils/templatetags/smileys_def.py
@@ -23,7 +23,7 @@
'diable.png': (':diable:', ),
'magicien.png': (':magicien:', ),
'ninja.gif': (':ninja:', ),
- 'pinch.png': ('>_<', ),
+ 'pinch.png': ('>_<', 'X/'),
'pirate.png': (':pirate:', ),
'pleure.png': (":'(", ),
'rouge.png': (':honte:', ),
|
{"golden_diff": "diff --git a/zds/utils/templatetags/smileys_def.py b/zds/utils/templatetags/smileys_def.py\n--- a/zds/utils/templatetags/smileys_def.py\n+++ b/zds/utils/templatetags/smileys_def.py\n@@ -23,7 +23,7 @@\n 'diable.png': (':diable:', ),\n 'magicien.png': (':magicien:', ),\n 'ninja.gif': (':ninja:', ),\n- 'pinch.png': ('>_<', ),\n+ 'pinch.png': ('>_<', 'X/'),\n 'pirate.png': (':pirate:', ),\n 'pleure.png': (\":'(\", ),\n 'rouge.png': (':honte:', ),\n", "issue": " Insertion du smiley >_<\nOn obtient `>_<`\r\n\r\nSolution 1 : Modifier le code ici : https://github.com/zestedesavoir/zds-site/blob/4ae0431bbf199e318dd6f2b1301ac7b6adc40198/assets/js/editor.js#L132 V\u00e9rifier qu'il n'y a pas un bug/fail avec \">\" et \"<\".\r\n\r\nSolution 2 : On peut ajouter l'alias `X/` pour ce smiley et remplacer le code dans l'\u00e9diteur. https://github.com/zestedesavoir/zds-site/blob/56a5b2e8b524848efa2d328c0a46365a44c1d43e/zds/utils/templatetags/smileys_def.py#L26\n", "before_files": [{"content": "import os\nfrom django.conf import settings\n\nSMILEYS_BASE_PATH = os.path.join(settings.BASE_DIR, 'dist/smileys')\nLICENSES_BASE_PATH = os.path.join(settings.BASE_DIR, 'dist/licenses')\nSMILEYS_BASE_URL = os.path.join(settings.STATIC_URL, 'smileys')\n\nSMILEYS_BASE = {\n 'smile.png': (':)', ':-)', ),\n 'heureux.png': (':D', ':-D', ),\n 'clin.png': (';)', ';-)', ),\n 'langue.png': (':p', ':P', ':-p', ':-P', ),\n 'rire.gif': (':lol:', ),\n 'unsure.gif': (':euh:', ),\n 'triste.png': (':(', ':-(', ),\n 'huh.png': (':o', ':-o', ':O', ':-O', ),\n 'mechant.png': (':colere2:', ),\n 'blink.gif': ('o_O', 'O_o', ),\n 'hihi.png': ('^^', ),\n 'siffle.png': (':-\u00b0', ':\u00b0', ),\n 'ange.png': (':ange:', ),\n 'angry.gif': (':colere:', ),\n 'diable.png': (':diable:', ),\n 'magicien.png': (':magicien:', ),\n 'ninja.gif': (':ninja:', ),\n 'pinch.png': ('>_<', ),\n 'pirate.png': (':pirate:', ),\n 'pleure.png': (\":'(\", ),\n 'rouge.png': (':honte:', ),\n 'soleil.png': (':soleil:', ),\n 'waw.png': (':waw:', ),\n 'zorro.png': (':zorro:', ),\n 'cthulhu.png': ('^(;,;)^', ),\n}\n\nsmileys = {}\nfor image_file, symbols in SMILEYS_BASE.items():\n for symbol in symbols:\n smileys[symbol] = os.path.join(SMILEYS_BASE_URL, image_file)\n", "path": "zds/utils/templatetags/smileys_def.py"}], "after_files": [{"content": "import os\nfrom django.conf import settings\n\nSMILEYS_BASE_PATH = os.path.join(settings.BASE_DIR, 'dist/smileys')\nLICENSES_BASE_PATH = os.path.join(settings.BASE_DIR, 'dist/licenses')\nSMILEYS_BASE_URL = os.path.join(settings.STATIC_URL, 'smileys')\n\nSMILEYS_BASE = {\n 'smile.png': (':)', ':-)', ),\n 'heureux.png': (':D', ':-D', ),\n 'clin.png': (';)', ';-)', ),\n 'langue.png': (':p', ':P', ':-p', ':-P', ),\n 'rire.gif': (':lol:', ),\n 'unsure.gif': (':euh:', ),\n 'triste.png': (':(', ':-(', ),\n 'huh.png': (':o', ':-o', ':O', ':-O', ),\n 'mechant.png': (':colere2:', ),\n 'blink.gif': ('o_O', 'O_o', ),\n 'hihi.png': ('^^', ),\n 'siffle.png': (':-\u00b0', ':\u00b0', ),\n 'ange.png': (':ange:', ),\n 'angry.gif': (':colere:', ),\n 'diable.png': (':diable:', ),\n 'magicien.png': (':magicien:', ),\n 'ninja.gif': (':ninja:', ),\n 'pinch.png': ('>_<', 'X/'),\n 'pirate.png': (':pirate:', ),\n 'pleure.png': (\":'(\", ),\n 'rouge.png': (':honte:', ),\n 'soleil.png': (':soleil:', ),\n 'waw.png': (':waw:', ),\n 'zorro.png': (':zorro:', ),\n 'cthulhu.png': ('^(;,;)^', ),\n}\n\nsmileys = {}\nfor image_file, symbols in SMILEYS_BASE.items():\n for symbol in symbols:\n smileys[symbol] = os.path.join(SMILEYS_BASE_URL, image_file)\n", "path": "zds/utils/templatetags/smileys_def.py"}]}
| 987 | 176 |
gh_patches_debug_25265
|
rasdani/github-patches
|
git_diff
|
tinygrad__tinygrad-1562
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Tensor.__eq__() with two bool tensors raises error on Torch backend
This was introduced from #1493
To reproduce:
```
In [24]: (Tensor([1], dtype=dtypes.bool, device="TORCH") == Tensor([1], dtype=dtypes.bool, device="TORCH")).realize()
RuntimeError: Subtraction, the `-` operator, with a bool tensor is not supported. If you are trying to invert a mask, use the `~` or `logical_not()` operator instead.
```
RuntimeError is from pytorch
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `tinygrad/runtime/ops_torch.py`
Content:
```
1 import torch
2 from typing import Dict, Callable, Optional
3 from tinygrad.ops import UnaryOps, BinaryOps, MovementOps, TernaryOps, Op, Interpreted
4 from tinygrad.helpers import getenv, dtypes, prod, DType
5 from tinygrad.runtime.ops_cpu import base_fxn_for_op, einsum_mulacc
6 from tinygrad.runtime.lib import RawBuffer
7
8 device = torch.device("cuda:0" if torch.cuda.is_available() else ("mps" if getenv("MPS", 0) else "cpu"))
9 type_map = {torch.float64: dtypes.float64, torch.float16: dtypes.float16, torch.float32: dtypes.float32, torch.int8: dtypes.int8, torch.int32: dtypes.int32, torch.int64: dtypes.int64, torch.uint8: dtypes.uint8, torch.bool: dtypes.bool}
10 inverse_type_map = {v:k for k,v in type_map.items()}
11
12 def as_strided(x, arg):
13 if any(i < 0 for i in arg[1]):
14 return torch.as_strided(x.contiguous(), arg[0], tuple(abs(i) for i in arg[1]),
15 arg[2] + sum((s-1)*a if a < 0 else 0 for (s,a) in zip(arg[0], arg[1]))).flip([i for i,a in enumerate(arg[1]) if a < 0])
16 return torch.as_strided(x.contiguous(), arg[0], arg[1], arg[2])
17
18 torch_fxn_for_op: Dict[Op, Callable] = {**base_fxn_for_op, **{
19 UnaryOps.NOOP: lambda x: x.contiguous(), UnaryOps.SQRT: lambda x: x.sqrt(), UnaryOps.EXP2: lambda x: x.exp2(), UnaryOps.LOG2: lambda x: x.log2(), UnaryOps.SIN: torch.sin,
20 UnaryOps.CAST: lambda x,y: (x.view if y[1] else x.type)(next(k for k,v in type_map.items() if v==y[0])),
21 BinaryOps.MAX: torch.maximum, BinaryOps.CMPLT: lambda x,y: (x<y).type(torch.promote_types(x.dtype, y.dtype)),
22 MovementOps.PAD: lambda x, padding: torch.nn.functional.pad(x, [item for sublist in padding[::-1] for item in sublist]),
23 TernaryOps.MULACC: einsum_mulacc(lambda s,a,b: torch.einsum(s, a.float(), b.float()).type(torch.promote_types(a.dtype, b.dtype)), lambda x: x.stride(), lambda x,s: x.expand(s)),
24 TernaryOps.WHERE: lambda x, y, z: torch.where(x != 0, y, z),
25 MovementOps.STRIDE: lambda x, arg: x[tuple(slice(None, None, abs(i)) for i in arg)].flip([i for i,a in enumerate(arg) if a < 0]),
26 MovementOps.EXPAND: lambda x, arg: x.expand(arg), MovementOps.PERMUTE: lambda x, arg: x.permute(arg),
27 MovementOps.AS_STRIDED: as_strided
28 }}
29
30 class RawTorchBuffer(RawBuffer):
31 def __init__(self, size:int, dtype:DType, buf:Optional[torch.Tensor]=None): super().__init__(size, dtype, buf if buf is not None else torch.empty([size], dtype=inverse_type_map[dtype]))
32 @classmethod
33 def fromCPU(cls, x):
34 buf = torch.from_numpy(x if all(s>=0 for s in x.strides) else x.copy()).requires_grad_(False).to(device)
35 return cls(prod(x.shape), type_map[buf.dtype], buf)
36 def toCPU(self): return self._buf.cpu().numpy()
37 TorchBuffer = Interpreted(RawTorchBuffer, torch_fxn_for_op, from_underlying=lambda x: RawTorchBuffer(prod(x.shape), type_map[x.dtype], x))
38
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/tinygrad/runtime/ops_torch.py b/tinygrad/runtime/ops_torch.py
--- a/tinygrad/runtime/ops_torch.py
+++ b/tinygrad/runtime/ops_torch.py
@@ -18,7 +18,7 @@
torch_fxn_for_op: Dict[Op, Callable] = {**base_fxn_for_op, **{
UnaryOps.NOOP: lambda x: x.contiguous(), UnaryOps.SQRT: lambda x: x.sqrt(), UnaryOps.EXP2: lambda x: x.exp2(), UnaryOps.LOG2: lambda x: x.log2(), UnaryOps.SIN: torch.sin,
UnaryOps.CAST: lambda x,y: (x.view if y[1] else x.type)(next(k for k,v in type_map.items() if v==y[0])),
- BinaryOps.MAX: torch.maximum, BinaryOps.CMPLT: lambda x,y: (x<y).type(torch.promote_types(x.dtype, y.dtype)),
+ BinaryOps.MAX: torch.maximum, BinaryOps.CMPLT: lambda x,y: (x<y).type(torch.promote_types(x.dtype, y.dtype)), BinaryOps.SUB: lambda x,y: torch.logical_xor(x, y) if y.dtype is torch.bool else torch.sub(x, y),
MovementOps.PAD: lambda x, padding: torch.nn.functional.pad(x, [item for sublist in padding[::-1] for item in sublist]),
TernaryOps.MULACC: einsum_mulacc(lambda s,a,b: torch.einsum(s, a.float(), b.float()).type(torch.promote_types(a.dtype, b.dtype)), lambda x: x.stride(), lambda x,s: x.expand(s)),
TernaryOps.WHERE: lambda x, y, z: torch.where(x != 0, y, z),
|
{"golden_diff": "diff --git a/tinygrad/runtime/ops_torch.py b/tinygrad/runtime/ops_torch.py\n--- a/tinygrad/runtime/ops_torch.py\n+++ b/tinygrad/runtime/ops_torch.py\n@@ -18,7 +18,7 @@\n torch_fxn_for_op: Dict[Op, Callable] = {**base_fxn_for_op, **{\n UnaryOps.NOOP: lambda x: x.contiguous(), UnaryOps.SQRT: lambda x: x.sqrt(), UnaryOps.EXP2: lambda x: x.exp2(), UnaryOps.LOG2: lambda x: x.log2(), UnaryOps.SIN: torch.sin,\n UnaryOps.CAST: lambda x,y: (x.view if y[1] else x.type)(next(k for k,v in type_map.items() if v==y[0])),\n- BinaryOps.MAX: torch.maximum, BinaryOps.CMPLT: lambda x,y: (x<y).type(torch.promote_types(x.dtype, y.dtype)),\n+ BinaryOps.MAX: torch.maximum, BinaryOps.CMPLT: lambda x,y: (x<y).type(torch.promote_types(x.dtype, y.dtype)), BinaryOps.SUB: lambda x,y: torch.logical_xor(x, y) if y.dtype is torch.bool else torch.sub(x, y),\n MovementOps.PAD: lambda x, padding: torch.nn.functional.pad(x, [item for sublist in padding[::-1] for item in sublist]),\n TernaryOps.MULACC: einsum_mulacc(lambda s,a,b: torch.einsum(s, a.float(), b.float()).type(torch.promote_types(a.dtype, b.dtype)), lambda x: x.stride(), lambda x,s: x.expand(s)),\n TernaryOps.WHERE: lambda x, y, z: torch.where(x != 0, y, z),\n", "issue": "Tensor.__eq__() with two bool tensors raises error on Torch backend\nThis was introduced from #1493\r\n\r\nTo reproduce:\r\n```\r\nIn [24]: (Tensor([1], dtype=dtypes.bool, device=\"TORCH\") == Tensor([1], dtype=dtypes.bool, device=\"TORCH\")).realize()\r\nRuntimeError: Subtraction, the `-` operator, with a bool tensor is not supported. If you are trying to invert a mask, use the `~` or `logical_not()` operator instead.\r\n```\r\nRuntimeError is from pytorch\r\n\r\n\n", "before_files": [{"content": "import torch\nfrom typing import Dict, Callable, Optional\nfrom tinygrad.ops import UnaryOps, BinaryOps, MovementOps, TernaryOps, Op, Interpreted\nfrom tinygrad.helpers import getenv, dtypes, prod, DType\nfrom tinygrad.runtime.ops_cpu import base_fxn_for_op, einsum_mulacc\nfrom tinygrad.runtime.lib import RawBuffer\n\ndevice = torch.device(\"cuda:0\" if torch.cuda.is_available() else (\"mps\" if getenv(\"MPS\", 0) else \"cpu\"))\ntype_map = {torch.float64: dtypes.float64, torch.float16: dtypes.float16, torch.float32: dtypes.float32, torch.int8: dtypes.int8, torch.int32: dtypes.int32, torch.int64: dtypes.int64, torch.uint8: dtypes.uint8, torch.bool: dtypes.bool}\ninverse_type_map = {v:k for k,v in type_map.items()}\n\ndef as_strided(x, arg):\n if any(i < 0 for i in arg[1]):\n return torch.as_strided(x.contiguous(), arg[0], tuple(abs(i) for i in arg[1]),\n arg[2] + sum((s-1)*a if a < 0 else 0 for (s,a) in zip(arg[0], arg[1]))).flip([i for i,a in enumerate(arg[1]) if a < 0])\n return torch.as_strided(x.contiguous(), arg[0], arg[1], arg[2])\n\ntorch_fxn_for_op: Dict[Op, Callable] = {**base_fxn_for_op, **{\n UnaryOps.NOOP: lambda x: x.contiguous(), UnaryOps.SQRT: lambda x: x.sqrt(), UnaryOps.EXP2: lambda x: x.exp2(), UnaryOps.LOG2: lambda x: x.log2(), UnaryOps.SIN: torch.sin,\n UnaryOps.CAST: lambda x,y: (x.view if y[1] else x.type)(next(k for k,v in type_map.items() if v==y[0])),\n BinaryOps.MAX: torch.maximum, BinaryOps.CMPLT: lambda x,y: (x<y).type(torch.promote_types(x.dtype, y.dtype)),\n MovementOps.PAD: lambda x, padding: torch.nn.functional.pad(x, [item for sublist in padding[::-1] for item in sublist]),\n TernaryOps.MULACC: einsum_mulacc(lambda s,a,b: torch.einsum(s, a.float(), b.float()).type(torch.promote_types(a.dtype, b.dtype)), lambda x: x.stride(), lambda x,s: x.expand(s)),\n TernaryOps.WHERE: lambda x, y, z: torch.where(x != 0, y, z),\n MovementOps.STRIDE: lambda x, arg: x[tuple(slice(None, None, abs(i)) for i in arg)].flip([i for i,a in enumerate(arg) if a < 0]),\n MovementOps.EXPAND: lambda x, arg: x.expand(arg), MovementOps.PERMUTE: lambda x, arg: x.permute(arg),\n MovementOps.AS_STRIDED: as_strided\n}}\n\nclass RawTorchBuffer(RawBuffer):\n def __init__(self, size:int, dtype:DType, buf:Optional[torch.Tensor]=None): super().__init__(size, dtype, buf if buf is not None else torch.empty([size], dtype=inverse_type_map[dtype]))\n @classmethod\n def fromCPU(cls, x):\n buf = torch.from_numpy(x if all(s>=0 for s in x.strides) else x.copy()).requires_grad_(False).to(device)\n return cls(prod(x.shape), type_map[buf.dtype], buf)\n def toCPU(self): return self._buf.cpu().numpy()\nTorchBuffer = Interpreted(RawTorchBuffer, torch_fxn_for_op, from_underlying=lambda x: RawTorchBuffer(prod(x.shape), type_map[x.dtype], x))\n", "path": "tinygrad/runtime/ops_torch.py"}], "after_files": [{"content": "import torch\nfrom typing import Dict, Callable, Optional\nfrom tinygrad.ops import UnaryOps, BinaryOps, MovementOps, TernaryOps, Op, Interpreted\nfrom tinygrad.helpers import getenv, dtypes, prod, DType\nfrom tinygrad.runtime.ops_cpu import base_fxn_for_op, einsum_mulacc\nfrom tinygrad.runtime.lib import RawBuffer\n\ndevice = torch.device(\"cuda:0\" if torch.cuda.is_available() else (\"mps\" if getenv(\"MPS\", 0) else \"cpu\"))\ntype_map = {torch.float64: dtypes.float64, torch.float16: dtypes.float16, torch.float32: dtypes.float32, torch.int8: dtypes.int8, torch.int32: dtypes.int32, torch.int64: dtypes.int64, torch.uint8: dtypes.uint8, torch.bool: dtypes.bool}\ninverse_type_map = {v:k for k,v in type_map.items()}\n\ndef as_strided(x, arg):\n if any(i < 0 for i in arg[1]):\n return torch.as_strided(x.contiguous(), arg[0], tuple(abs(i) for i in arg[1]),\n arg[2] + sum((s-1)*a if a < 0 else 0 for (s,a) in zip(arg[0], arg[1]))).flip([i for i,a in enumerate(arg[1]) if a < 0])\n return torch.as_strided(x.contiguous(), arg[0], arg[1], arg[2])\n\ntorch_fxn_for_op: Dict[Op, Callable] = {**base_fxn_for_op, **{\n UnaryOps.NOOP: lambda x: x.contiguous(), UnaryOps.SQRT: lambda x: x.sqrt(), UnaryOps.EXP2: lambda x: x.exp2(), UnaryOps.LOG2: lambda x: x.log2(), UnaryOps.SIN: torch.sin,\n UnaryOps.CAST: lambda x,y: (x.view if y[1] else x.type)(next(k for k,v in type_map.items() if v==y[0])),\n BinaryOps.MAX: torch.maximum, BinaryOps.CMPLT: lambda x,y: (x<y).type(torch.promote_types(x.dtype, y.dtype)), BinaryOps.SUB: lambda x,y: torch.logical_xor(x, y) if y.dtype is torch.bool else torch.sub(x, y),\n MovementOps.PAD: lambda x, padding: torch.nn.functional.pad(x, [item for sublist in padding[::-1] for item in sublist]),\n TernaryOps.MULACC: einsum_mulacc(lambda s,a,b: torch.einsum(s, a.float(), b.float()).type(torch.promote_types(a.dtype, b.dtype)), lambda x: x.stride(), lambda x,s: x.expand(s)),\n TernaryOps.WHERE: lambda x, y, z: torch.where(x != 0, y, z),\n MovementOps.STRIDE: lambda x, arg: x[tuple(slice(None, None, abs(i)) for i in arg)].flip([i for i,a in enumerate(arg) if a < 0]),\n MovementOps.EXPAND: lambda x, arg: x.expand(arg), MovementOps.PERMUTE: lambda x, arg: x.permute(arg),\n MovementOps.AS_STRIDED: as_strided\n}}\n\nclass RawTorchBuffer(RawBuffer):\n def __init__(self, size:int, dtype:DType, buf:Optional[torch.Tensor]=None): super().__init__(size, dtype, buf if buf is not None else torch.empty([size], dtype=inverse_type_map[dtype]))\n @classmethod\n def fromCPU(cls, x):\n buf = torch.from_numpy(x if all(s>=0 for s in x.strides) else x.copy()).requires_grad_(False).to(device)\n return cls(prod(x.shape), type_map[buf.dtype], buf)\n def toCPU(self): return self._buf.cpu().numpy()\nTorchBuffer = Interpreted(RawTorchBuffer, torch_fxn_for_op, from_underlying=lambda x: RawTorchBuffer(prod(x.shape), type_map[x.dtype], x))\n", "path": "tinygrad/runtime/ops_torch.py"}]}
| 1,313 | 385 |
gh_patches_debug_32291
|
rasdani/github-patches
|
git_diff
|
open-telemetry__opentelemetry-python-contrib-1253
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add more features for adding HTTP request / response headers to spans.
I already have https://github.com/open-telemetry/opentelemetry-python-contrib/pull/1172 open for this, and I'll be breaking it in to smaller pieces at @lzchen 's request.
**Is your feature request related to a problem?**
Currently, you can only provide a list of full HTTP request / response header names to be added to the span.
There is also no capacity for header value redaction.
**Describe the solution you'd like**
It would be nice to be able to specify a regex or "all" to get all headers.
Header value redaction is also a must-have for us.
**Describe alternatives you've considered**
I considered doing this in my application, but it makes more sense to add it here.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py`
Content:
```
1 # Copyright The OpenTelemetry Authors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from os import environ
16 from re import compile as re_compile
17 from re import search
18 from typing import Iterable, List
19 from urllib.parse import urlparse, urlunparse
20
21 from opentelemetry.semconv.trace import SpanAttributes
22
23 OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST = (
24 "OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST"
25 )
26 OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_RESPONSE = (
27 "OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_RESPONSE"
28 )
29
30 # List of recommended metrics attributes
31 _duration_attrs = {
32 SpanAttributes.HTTP_METHOD,
33 SpanAttributes.HTTP_HOST,
34 SpanAttributes.HTTP_SCHEME,
35 SpanAttributes.HTTP_STATUS_CODE,
36 SpanAttributes.HTTP_FLAVOR,
37 SpanAttributes.HTTP_SERVER_NAME,
38 SpanAttributes.NET_HOST_NAME,
39 SpanAttributes.NET_HOST_PORT,
40 }
41
42 _active_requests_count_attrs = {
43 SpanAttributes.HTTP_METHOD,
44 SpanAttributes.HTTP_HOST,
45 SpanAttributes.HTTP_SCHEME,
46 SpanAttributes.HTTP_FLAVOR,
47 SpanAttributes.HTTP_SERVER_NAME,
48 }
49
50
51 class ExcludeList:
52 """Class to exclude certain paths (given as a list of regexes) from tracing requests"""
53
54 def __init__(self, excluded_urls: Iterable[str]):
55 self._excluded_urls = excluded_urls
56 if self._excluded_urls:
57 self._regex = re_compile("|".join(excluded_urls))
58
59 def url_disabled(self, url: str) -> bool:
60 return bool(self._excluded_urls and search(self._regex, url))
61
62
63 _root = r"OTEL_PYTHON_{}"
64
65
66 def get_traced_request_attrs(instrumentation):
67 traced_request_attrs = environ.get(
68 _root.format(f"{instrumentation}_TRACED_REQUEST_ATTRS"), []
69 )
70
71 if traced_request_attrs:
72 traced_request_attrs = [
73 traced_request_attr.strip()
74 for traced_request_attr in traced_request_attrs.split(",")
75 ]
76
77 return traced_request_attrs
78
79
80 def get_excluded_urls(instrumentation: str) -> ExcludeList:
81 # Get instrumentation-specific excluded URLs. If not set, retrieve them
82 # from generic variable.
83 excluded_urls = environ.get(
84 _root.format(f"{instrumentation}_EXCLUDED_URLS"),
85 environ.get(_root.format("EXCLUDED_URLS"), ""),
86 )
87
88 return parse_excluded_urls(excluded_urls)
89
90
91 def parse_excluded_urls(excluded_urls: str) -> ExcludeList:
92 """
93 Small helper to put an arbitrary url list inside of ExcludeList
94 """
95 if excluded_urls:
96 excluded_url_list = [
97 excluded_url.strip() for excluded_url in excluded_urls.split(",")
98 ]
99 else:
100 excluded_url_list = []
101
102 return ExcludeList(excluded_url_list)
103
104
105 def remove_url_credentials(url: str) -> str:
106 """Given a string url, remove the username and password only if it is a valid url"""
107
108 try:
109 parsed = urlparse(url)
110 if all([parsed.scheme, parsed.netloc]): # checks for valid url
111 parsed_url = urlparse(url)
112 netloc = (
113 (":".join(((parsed_url.hostname or ""), str(parsed_url.port))))
114 if parsed_url.port
115 else (parsed_url.hostname or "")
116 )
117 return urlunparse(
118 (
119 parsed_url.scheme,
120 netloc,
121 parsed_url.path,
122 parsed_url.params,
123 parsed_url.query,
124 parsed_url.fragment,
125 )
126 )
127 except ValueError: # an unparsable url was passed
128 pass
129 return url
130
131
132 def normalise_request_header_name(header: str) -> str:
133 key = header.lower().replace("-", "_")
134 return f"http.request.header.{key}"
135
136
137 def normalise_response_header_name(header: str) -> str:
138 key = header.lower().replace("-", "_")
139 return f"http.response.header.{key}"
140
141
142 def get_custom_headers(env_var: str) -> List[str]:
143 custom_headers = environ.get(env_var, [])
144 if custom_headers:
145 custom_headers = [
146 custom_headers.strip()
147 for custom_headers in custom_headers.split(",")
148 ]
149 return custom_headers
150
151
152 def _parse_active_request_count_attrs(req_attrs):
153 active_requests_count_attrs = {
154 key: req_attrs[key]
155 for key in _active_requests_count_attrs.intersection(req_attrs.keys())
156 }
157 return active_requests_count_attrs
158
159
160 def _parse_duration_attrs(req_attrs):
161 duration_attrs = {
162 key: req_attrs[key]
163 for key in _duration_attrs.intersection(req_attrs.keys())
164 }
165 return duration_attrs
166
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py b/util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py
--- a/util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py
+++ b/util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py
@@ -13,6 +13,7 @@
# limitations under the License.
from os import environ
+from re import IGNORECASE as RE_IGNORECASE
from re import compile as re_compile
from re import search
from typing import Iterable, List
@@ -20,6 +21,9 @@
from opentelemetry.semconv.trace import SpanAttributes
+OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SANITIZE_FIELDS = (
+ "OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SANITIZE_FIELDS"
+)
OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST = (
"OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST"
)
@@ -60,6 +64,22 @@
return bool(self._excluded_urls and search(self._regex, url))
+class SanitizeValue:
+ """Class to sanitize (remove sensitive data from) certain headers (given as a list of regexes)"""
+
+ def __init__(self, sanitized_fields: Iterable[str]):
+ self._sanitized_fields = sanitized_fields
+ if self._sanitized_fields:
+ self._regex = re_compile("|".join(sanitized_fields), RE_IGNORECASE)
+
+ def sanitize_header_value(self, header: str, value: str) -> str:
+ return (
+ "[REDACTED]"
+ if (self._sanitized_fields and search(self._regex, header))
+ else value
+ )
+
+
_root = r"OTEL_PYTHON_{}"
@@ -90,7 +110,7 @@
def parse_excluded_urls(excluded_urls: str) -> ExcludeList:
"""
- Small helper to put an arbitrary url list inside of ExcludeList
+ Small helper to put an arbitrary url list inside an ExcludeList
"""
if excluded_urls:
excluded_url_list = [
|
{"golden_diff": "diff --git a/util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py b/util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py\n--- a/util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py\n+++ b/util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py\n@@ -13,6 +13,7 @@\n # limitations under the License.\n \n from os import environ\n+from re import IGNORECASE as RE_IGNORECASE\n from re import compile as re_compile\n from re import search\n from typing import Iterable, List\n@@ -20,6 +21,9 @@\n \n from opentelemetry.semconv.trace import SpanAttributes\n \n+OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SANITIZE_FIELDS = (\n+ \"OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SANITIZE_FIELDS\"\n+)\n OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST = (\n \"OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST\"\n )\n@@ -60,6 +64,22 @@\n return bool(self._excluded_urls and search(self._regex, url))\n \n \n+class SanitizeValue:\n+ \"\"\"Class to sanitize (remove sensitive data from) certain headers (given as a list of regexes)\"\"\"\n+\n+ def __init__(self, sanitized_fields: Iterable[str]):\n+ self._sanitized_fields = sanitized_fields\n+ if self._sanitized_fields:\n+ self._regex = re_compile(\"|\".join(sanitized_fields), RE_IGNORECASE)\n+\n+ def sanitize_header_value(self, header: str, value: str) -> str:\n+ return (\n+ \"[REDACTED]\"\n+ if (self._sanitized_fields and search(self._regex, header))\n+ else value\n+ )\n+\n+\n _root = r\"OTEL_PYTHON_{}\"\n \n \n@@ -90,7 +110,7 @@\n \n def parse_excluded_urls(excluded_urls: str) -> ExcludeList:\n \"\"\"\n- Small helper to put an arbitrary url list inside of ExcludeList\n+ Small helper to put an arbitrary url list inside an ExcludeList\n \"\"\"\n if excluded_urls:\n excluded_url_list = [\n", "issue": "Add more features for adding HTTP request / response headers to spans.\nI already have https://github.com/open-telemetry/opentelemetry-python-contrib/pull/1172 open for this, and I'll be breaking it in to smaller pieces at @lzchen 's request.\r\n\r\n**Is your feature request related to a problem?**\r\nCurrently, you can only provide a list of full HTTP request / response header names to be added to the span.\r\n\r\nThere is also no capacity for header value redaction.\r\n\r\n**Describe the solution you'd like**\r\nIt would be nice to be able to specify a regex or \"all\" to get all headers.\r\n\r\nHeader value redaction is also a must-have for us.\r\n\r\n**Describe alternatives you've considered**\r\nI considered doing this in my application, but it makes more sense to add it here.\r\n\n", "before_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom os import environ\nfrom re import compile as re_compile\nfrom re import search\nfrom typing import Iterable, List\nfrom urllib.parse import urlparse, urlunparse\n\nfrom opentelemetry.semconv.trace import SpanAttributes\n\nOTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST = (\n \"OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST\"\n)\nOTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_RESPONSE = (\n \"OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_RESPONSE\"\n)\n\n# List of recommended metrics attributes\n_duration_attrs = {\n SpanAttributes.HTTP_METHOD,\n SpanAttributes.HTTP_HOST,\n SpanAttributes.HTTP_SCHEME,\n SpanAttributes.HTTP_STATUS_CODE,\n SpanAttributes.HTTP_FLAVOR,\n SpanAttributes.HTTP_SERVER_NAME,\n SpanAttributes.NET_HOST_NAME,\n SpanAttributes.NET_HOST_PORT,\n}\n\n_active_requests_count_attrs = {\n SpanAttributes.HTTP_METHOD,\n SpanAttributes.HTTP_HOST,\n SpanAttributes.HTTP_SCHEME,\n SpanAttributes.HTTP_FLAVOR,\n SpanAttributes.HTTP_SERVER_NAME,\n}\n\n\nclass ExcludeList:\n \"\"\"Class to exclude certain paths (given as a list of regexes) from tracing requests\"\"\"\n\n def __init__(self, excluded_urls: Iterable[str]):\n self._excluded_urls = excluded_urls\n if self._excluded_urls:\n self._regex = re_compile(\"|\".join(excluded_urls))\n\n def url_disabled(self, url: str) -> bool:\n return bool(self._excluded_urls and search(self._regex, url))\n\n\n_root = r\"OTEL_PYTHON_{}\"\n\n\ndef get_traced_request_attrs(instrumentation):\n traced_request_attrs = environ.get(\n _root.format(f\"{instrumentation}_TRACED_REQUEST_ATTRS\"), []\n )\n\n if traced_request_attrs:\n traced_request_attrs = [\n traced_request_attr.strip()\n for traced_request_attr in traced_request_attrs.split(\",\")\n ]\n\n return traced_request_attrs\n\n\ndef get_excluded_urls(instrumentation: str) -> ExcludeList:\n # Get instrumentation-specific excluded URLs. If not set, retrieve them\n # from generic variable.\n excluded_urls = environ.get(\n _root.format(f\"{instrumentation}_EXCLUDED_URLS\"),\n environ.get(_root.format(\"EXCLUDED_URLS\"), \"\"),\n )\n\n return parse_excluded_urls(excluded_urls)\n\n\ndef parse_excluded_urls(excluded_urls: str) -> ExcludeList:\n \"\"\"\n Small helper to put an arbitrary url list inside of ExcludeList\n \"\"\"\n if excluded_urls:\n excluded_url_list = [\n excluded_url.strip() for excluded_url in excluded_urls.split(\",\")\n ]\n else:\n excluded_url_list = []\n\n return ExcludeList(excluded_url_list)\n\n\ndef remove_url_credentials(url: str) -> str:\n \"\"\"Given a string url, remove the username and password only if it is a valid url\"\"\"\n\n try:\n parsed = urlparse(url)\n if all([parsed.scheme, parsed.netloc]): # checks for valid url\n parsed_url = urlparse(url)\n netloc = (\n (\":\".join(((parsed_url.hostname or \"\"), str(parsed_url.port))))\n if parsed_url.port\n else (parsed_url.hostname or \"\")\n )\n return urlunparse(\n (\n parsed_url.scheme,\n netloc,\n parsed_url.path,\n parsed_url.params,\n parsed_url.query,\n parsed_url.fragment,\n )\n )\n except ValueError: # an unparsable url was passed\n pass\n return url\n\n\ndef normalise_request_header_name(header: str) -> str:\n key = header.lower().replace(\"-\", \"_\")\n return f\"http.request.header.{key}\"\n\n\ndef normalise_response_header_name(header: str) -> str:\n key = header.lower().replace(\"-\", \"_\")\n return f\"http.response.header.{key}\"\n\n\ndef get_custom_headers(env_var: str) -> List[str]:\n custom_headers = environ.get(env_var, [])\n if custom_headers:\n custom_headers = [\n custom_headers.strip()\n for custom_headers in custom_headers.split(\",\")\n ]\n return custom_headers\n\n\ndef _parse_active_request_count_attrs(req_attrs):\n active_requests_count_attrs = {\n key: req_attrs[key]\n for key in _active_requests_count_attrs.intersection(req_attrs.keys())\n }\n return active_requests_count_attrs\n\n\ndef _parse_duration_attrs(req_attrs):\n duration_attrs = {\n key: req_attrs[key]\n for key in _duration_attrs.intersection(req_attrs.keys())\n }\n return duration_attrs\n", "path": "util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py"}], "after_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom os import environ\nfrom re import IGNORECASE as RE_IGNORECASE\nfrom re import compile as re_compile\nfrom re import search\nfrom typing import Iterable, List\nfrom urllib.parse import urlparse, urlunparse\n\nfrom opentelemetry.semconv.trace import SpanAttributes\n\nOTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SANITIZE_FIELDS = (\n \"OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SANITIZE_FIELDS\"\n)\nOTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST = (\n \"OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST\"\n)\nOTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_RESPONSE = (\n \"OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_RESPONSE\"\n)\n\n# List of recommended metrics attributes\n_duration_attrs = {\n SpanAttributes.HTTP_METHOD,\n SpanAttributes.HTTP_HOST,\n SpanAttributes.HTTP_SCHEME,\n SpanAttributes.HTTP_STATUS_CODE,\n SpanAttributes.HTTP_FLAVOR,\n SpanAttributes.HTTP_SERVER_NAME,\n SpanAttributes.NET_HOST_NAME,\n SpanAttributes.NET_HOST_PORT,\n}\n\n_active_requests_count_attrs = {\n SpanAttributes.HTTP_METHOD,\n SpanAttributes.HTTP_HOST,\n SpanAttributes.HTTP_SCHEME,\n SpanAttributes.HTTP_FLAVOR,\n SpanAttributes.HTTP_SERVER_NAME,\n}\n\n\nclass ExcludeList:\n \"\"\"Class to exclude certain paths (given as a list of regexes) from tracing requests\"\"\"\n\n def __init__(self, excluded_urls: Iterable[str]):\n self._excluded_urls = excluded_urls\n if self._excluded_urls:\n self._regex = re_compile(\"|\".join(excluded_urls))\n\n def url_disabled(self, url: str) -> bool:\n return bool(self._excluded_urls and search(self._regex, url))\n\n\nclass SanitizeValue:\n \"\"\"Class to sanitize (remove sensitive data from) certain headers (given as a list of regexes)\"\"\"\n\n def __init__(self, sanitized_fields: Iterable[str]):\n self._sanitized_fields = sanitized_fields\n if self._sanitized_fields:\n self._regex = re_compile(\"|\".join(sanitized_fields), RE_IGNORECASE)\n\n def sanitize_header_value(self, header: str, value: str) -> str:\n return (\n \"[REDACTED]\"\n if (self._sanitized_fields and search(self._regex, header))\n else value\n )\n\n\n_root = r\"OTEL_PYTHON_{}\"\n\n\ndef get_traced_request_attrs(instrumentation):\n traced_request_attrs = environ.get(\n _root.format(f\"{instrumentation}_TRACED_REQUEST_ATTRS\"), []\n )\n\n if traced_request_attrs:\n traced_request_attrs = [\n traced_request_attr.strip()\n for traced_request_attr in traced_request_attrs.split(\",\")\n ]\n\n return traced_request_attrs\n\n\ndef get_excluded_urls(instrumentation: str) -> ExcludeList:\n # Get instrumentation-specific excluded URLs. If not set, retrieve them\n # from generic variable.\n excluded_urls = environ.get(\n _root.format(f\"{instrumentation}_EXCLUDED_URLS\"),\n environ.get(_root.format(\"EXCLUDED_URLS\"), \"\"),\n )\n\n return parse_excluded_urls(excluded_urls)\n\n\ndef parse_excluded_urls(excluded_urls: str) -> ExcludeList:\n \"\"\"\n Small helper to put an arbitrary url list inside an ExcludeList\n \"\"\"\n if excluded_urls:\n excluded_url_list = [\n excluded_url.strip() for excluded_url in excluded_urls.split(\",\")\n ]\n else:\n excluded_url_list = []\n\n return ExcludeList(excluded_url_list)\n\n\ndef remove_url_credentials(url: str) -> str:\n \"\"\"Given a string url, remove the username and password only if it is a valid url\"\"\"\n\n try:\n parsed = urlparse(url)\n if all([parsed.scheme, parsed.netloc]): # checks for valid url\n parsed_url = urlparse(url)\n netloc = (\n (\":\".join(((parsed_url.hostname or \"\"), str(parsed_url.port))))\n if parsed_url.port\n else (parsed_url.hostname or \"\")\n )\n return urlunparse(\n (\n parsed_url.scheme,\n netloc,\n parsed_url.path,\n parsed_url.params,\n parsed_url.query,\n parsed_url.fragment,\n )\n )\n except ValueError: # an unparsable url was passed\n pass\n return url\n\n\ndef normalise_request_header_name(header: str) -> str:\n key = header.lower().replace(\"-\", \"_\")\n return f\"http.request.header.{key}\"\n\n\ndef normalise_response_header_name(header: str) -> str:\n key = header.lower().replace(\"-\", \"_\")\n return f\"http.response.header.{key}\"\n\n\ndef get_custom_headers(env_var: str) -> List[str]:\n custom_headers = environ.get(env_var, [])\n if custom_headers:\n custom_headers = [\n custom_headers.strip()\n for custom_headers in custom_headers.split(\",\")\n ]\n return custom_headers\n\n\ndef _parse_active_request_count_attrs(req_attrs):\n active_requests_count_attrs = {\n key: req_attrs[key]\n for key in _active_requests_count_attrs.intersection(req_attrs.keys())\n }\n return active_requests_count_attrs\n\n\ndef _parse_duration_attrs(req_attrs):\n duration_attrs = {\n key: req_attrs[key]\n for key in _duration_attrs.intersection(req_attrs.keys())\n }\n return duration_attrs\n", "path": "util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py"}]}
| 1,912 | 475 |
gh_patches_debug_2683
|
rasdani/github-patches
|
git_diff
|
huggingface__huggingface_hub-790
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Support python=3.10
Python 3.10 has been out for a while but we seem to not test for it. What are the roadblocks for us to support 3.10 and maybe deprecate 3.6? (Many packages now support 3.8-3.10 and older versions are not supported anymore).
Ping @LysandreJik @osanseviero maybe?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 from setuptools import find_packages, setup
2
3
4 def get_version() -> str:
5 rel_path = "src/huggingface_hub/__init__.py"
6 with open(rel_path, "r") as fp:
7 for line in fp.read().splitlines():
8 if line.startswith("__version__"):
9 delim = '"' if '"' in line else "'"
10 return line.split(delim)[1]
11 raise RuntimeError("Unable to find version string.")
12
13
14 install_requires = [
15 "filelock",
16 "requests",
17 "tqdm",
18 "pyyaml",
19 "typing-extensions>=3.7.4.3", # to be able to import TypeAlias
20 "importlib_metadata;python_version<'3.8'",
21 "packaging>=20.9",
22 ]
23
24 extras = {}
25
26 extras["torch"] = [
27 "torch",
28 ]
29
30 extras["tensorflow"] = [
31 "tensorflow",
32 "pydot",
33 "graphviz"
34 ]
35
36 extras["testing"] = [
37 "pytest",
38 "datasets",
39 "soundfile",
40 ]
41
42 extras["quality"] = [
43 "black~=22.0",
44 "isort>=5.5.4",
45 "flake8>=3.8.3",
46 ]
47
48 extras["all"] = extras["testing"] + extras["quality"]
49
50 extras["dev"] = extras["all"]
51
52
53 setup(
54 name="huggingface_hub",
55 version=get_version(),
56 author="Hugging Face, Inc.",
57 author_email="[email protected]",
58 description="Client library to download and publish models on the huggingface.co hub",
59 long_description=open("README.md", "r", encoding="utf-8").read(),
60 long_description_content_type="text/markdown",
61 keywords="model-hub machine-learning models natural-language-processing deep-learning pytorch pretrained-models",
62 license="Apache",
63 url="https://github.com/huggingface/huggingface_hub",
64 package_dir={"": "src"},
65 packages=find_packages("src"),
66 extras_require=extras,
67 entry_points={
68 "console_scripts": [
69 "huggingface-cli=huggingface_hub.commands.huggingface_cli:main"
70 ]
71 },
72 python_requires=">=3.6.0",
73 install_requires=install_requires,
74 classifiers=[
75 "Intended Audience :: Developers",
76 "Intended Audience :: Education",
77 "Intended Audience :: Science/Research",
78 "License :: OSI Approved :: Apache Software License",
79 "Operating System :: OS Independent",
80 "Programming Language :: Python :: 3",
81 "Topic :: Scientific/Engineering :: Artificial Intelligence",
82 ],
83 )
84
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -69,7 +69,7 @@
"huggingface-cli=huggingface_hub.commands.huggingface_cli:main"
]
},
- python_requires=">=3.6.0",
+ python_requires=">=3.7.0",
install_requires=install_requires,
classifiers=[
"Intended Audience :: Developers",
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -69,7 +69,7 @@\n \"huggingface-cli=huggingface_hub.commands.huggingface_cli:main\"\n ]\n },\n- python_requires=\">=3.6.0\",\n+ python_requires=\">=3.7.0\",\n install_requires=install_requires,\n classifiers=[\n \"Intended Audience :: Developers\",\n", "issue": "Support python=3.10\nPython 3.10 has been out for a while but we seem to not test for it. What are the roadblocks for us to support 3.10 and maybe deprecate 3.6? (Many packages now support 3.8-3.10 and older versions are not supported anymore).\r\n\r\nPing @LysandreJik @osanseviero maybe?\n", "before_files": [{"content": "from setuptools import find_packages, setup\n\n\ndef get_version() -> str:\n rel_path = \"src/huggingface_hub/__init__.py\"\n with open(rel_path, \"r\") as fp:\n for line in fp.read().splitlines():\n if line.startswith(\"__version__\"):\n delim = '\"' if '\"' in line else \"'\"\n return line.split(delim)[1]\n raise RuntimeError(\"Unable to find version string.\")\n\n\ninstall_requires = [\n \"filelock\",\n \"requests\",\n \"tqdm\",\n \"pyyaml\",\n \"typing-extensions>=3.7.4.3\", # to be able to import TypeAlias\n \"importlib_metadata;python_version<'3.8'\",\n \"packaging>=20.9\",\n]\n\nextras = {}\n\nextras[\"torch\"] = [\n \"torch\",\n]\n\nextras[\"tensorflow\"] = [\n \"tensorflow\",\n \"pydot\",\n \"graphviz\"\n]\n\nextras[\"testing\"] = [\n \"pytest\",\n \"datasets\",\n \"soundfile\",\n]\n\nextras[\"quality\"] = [\n \"black~=22.0\",\n \"isort>=5.5.4\",\n \"flake8>=3.8.3\",\n]\n\nextras[\"all\"] = extras[\"testing\"] + extras[\"quality\"]\n\nextras[\"dev\"] = extras[\"all\"]\n\n\nsetup(\n name=\"huggingface_hub\",\n version=get_version(),\n author=\"Hugging Face, Inc.\",\n author_email=\"[email protected]\",\n description=\"Client library to download and publish models on the huggingface.co hub\",\n long_description=open(\"README.md\", \"r\", encoding=\"utf-8\").read(),\n long_description_content_type=\"text/markdown\",\n keywords=\"model-hub machine-learning models natural-language-processing deep-learning pytorch pretrained-models\",\n license=\"Apache\",\n url=\"https://github.com/huggingface/huggingface_hub\",\n package_dir={\"\": \"src\"},\n packages=find_packages(\"src\"),\n extras_require=extras,\n entry_points={\n \"console_scripts\": [\n \"huggingface-cli=huggingface_hub.commands.huggingface_cli:main\"\n ]\n },\n python_requires=\">=3.6.0\",\n install_requires=install_requires,\n classifiers=[\n \"Intended Audience :: Developers\",\n \"Intended Audience :: Education\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python :: 3\",\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n ],\n)\n", "path": "setup.py"}], "after_files": [{"content": "from setuptools import find_packages, setup\n\n\ndef get_version() -> str:\n rel_path = \"src/huggingface_hub/__init__.py\"\n with open(rel_path, \"r\") as fp:\n for line in fp.read().splitlines():\n if line.startswith(\"__version__\"):\n delim = '\"' if '\"' in line else \"'\"\n return line.split(delim)[1]\n raise RuntimeError(\"Unable to find version string.\")\n\n\ninstall_requires = [\n \"filelock\",\n \"requests\",\n \"tqdm\",\n \"pyyaml\",\n \"typing-extensions>=3.7.4.3\", # to be able to import TypeAlias\n \"importlib_metadata;python_version<'3.8'\",\n \"packaging>=20.9\",\n]\n\nextras = {}\n\nextras[\"torch\"] = [\n \"torch\",\n]\n\nextras[\"tensorflow\"] = [\n \"tensorflow\",\n \"pydot\",\n \"graphviz\"\n]\n\nextras[\"testing\"] = [\n \"pytest\",\n \"datasets\",\n \"soundfile\",\n]\n\nextras[\"quality\"] = [\n \"black~=22.0\",\n \"isort>=5.5.4\",\n \"flake8>=3.8.3\",\n]\n\nextras[\"all\"] = extras[\"testing\"] + extras[\"quality\"]\n\nextras[\"dev\"] = extras[\"all\"]\n\n\nsetup(\n name=\"huggingface_hub\",\n version=get_version(),\n author=\"Hugging Face, Inc.\",\n author_email=\"[email protected]\",\n description=\"Client library to download and publish models on the huggingface.co hub\",\n long_description=open(\"README.md\", \"r\", encoding=\"utf-8\").read(),\n long_description_content_type=\"text/markdown\",\n keywords=\"model-hub machine-learning models natural-language-processing deep-learning pytorch pretrained-models\",\n license=\"Apache\",\n url=\"https://github.com/huggingface/huggingface_hub\",\n package_dir={\"\": \"src\"},\n packages=find_packages(\"src\"),\n extras_require=extras,\n entry_points={\n \"console_scripts\": [\n \"huggingface-cli=huggingface_hub.commands.huggingface_cli:main\"\n ]\n },\n python_requires=\">=3.7.0\",\n install_requires=install_requires,\n classifiers=[\n \"Intended Audience :: Developers\",\n \"Intended Audience :: Education\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python :: 3\",\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n ],\n)\n", "path": "setup.py"}]}
| 1,063 | 96 |
gh_patches_debug_28735
|
rasdani/github-patches
|
git_diff
|
dotkom__onlineweb4-2362
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Show if inventory item is available in nibble shop in product listing
## What kind of an issue is this?
- [x] Feature request
## What is the expected behaviour?
There should be an icon in the dashboard product listing page that shows if an item is available for sale in Nibble.
This icon should also be clickable, to toggle whether or not the item should be available for sale in Nibble.
It should be possible to sort the items in the table according to the "available for sale"-state.
## Other information
This is how an event's attendee list looks, and it could probably be done in the same way (look at the rightmost column):

This is to be done in this list:

--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `apps/inventory/dashboard/urls.py`
Content:
```
1 # -*- encoding: utf-8 -*-
2
3 from django.conf.urls import url
4
5 from apps.inventory.dashboard import views
6
7 urlpatterns = [
8 url(r'^$', views.index, name='dashboard_inventory_index'),
9 url(r'^statistics/$', views.statistics, name='dashboard_inventory_statistics'),
10 url(r'^statistics/orders/$', views.order_statistics, name='dashboard_inventory_order_statistics'),
11
12 url(r'^category/$', views.category_index, name='dashboard_category_index'),
13 url(r'^category/(?P<category_pk>\d+)/$', views.category_details, name='dashboard_category_details'),
14 url(r'^category/new/$', views.category_new, name='dashboard_category_new'),
15 url(r'^category/(?P<category_pk>\d+)/delete/$', views.category_delete, name='dashboard_category_delete'),
16
17 url(r'^new/$', views.new, name='dashboard_inventory_new'),
18 url(r'^item/(?P<item_pk>\d+)/$', views.details, name='dashboard_inventory_details'),
19 url(r'^item/(?P<item_pk>\d+)/delete/$', views.item_delete, name='dashboard_inventory_delete'),
20 url(r'^item/(?P<item_pk>\d+)/batch/new/$', views.batch_new, name='dashboard_inventory_batch_new'),
21 url(r'^item/(?P<item_pk>\d+)/batch/(?P<batch_pk>\d+)/$', views.batch, name='dashboard_inventory_batch'),
22 url(r'^item/(?P<item_pk>\d+)/batch/(?P<batch_pk>\d+)/delete/$',
23 views.batch_delete,
24 name='dashboard_inventory_batch_delete'),
25 ]
26
```
Path: `apps/inventory/dashboard/views.py`
Content:
```
1 # -*- encoding: utf-8 -*-
2
3 from logging import getLogger
4
5 from django.contrib import messages
6 from django.contrib.auth.decorators import login_required
7 from django.contrib.contenttypes.models import ContentType
8 from django.core.exceptions import PermissionDenied
9 from django.db.models import Count
10 from django.http import JsonResponse
11 from django.shortcuts import get_object_or_404, redirect, render
12 from guardian.decorators import permission_required
13
14 from apps.dashboard.tools import get_base_context, has_access
15 from apps.inventory.dashboard.forms import BatchForm, CategoryForm, ItemForm
16 from apps.inventory.models import Batch, Item, ItemCategory
17 from apps.shop.models import Order
18
19
20 @login_required
21 @permission_required('inventory.view_item', return_403=True)
22 def index(request):
23
24 # Generic check to see if user has access to dashboard. (In Komiteer or superuser)
25 if not has_access(request):
26 raise PermissionDenied
27
28 # Create the base context needed for the sidebar
29 context = get_base_context(request)
30
31 context['items'] = Item.objects.all().order_by('name')
32
33 return render(request, 'inventory/dashboard/index.html', context)
34
35
36 @login_required
37 @permission_required('inventory.add_item', return_403=True)
38 def new(request):
39
40 if not has_access(request):
41 raise PermissionDenied
42
43 # Get base context
44 context = get_base_context(request)
45
46 if request.method == 'POST':
47 inventory_form = ItemForm(request.POST)
48
49 if not inventory_form.is_valid():
50 messages.error(request, 'Noen av de påkrevde feltene inneholder feil.')
51 else:
52 item = inventory_form.save()
53 messages.success(request, 'Varen ble opprettet')
54 return redirect(details, item.id)
55
56 context['form'] = inventory_form
57
58 else:
59 context['form'] = ItemForm()
60
61 return render(request, 'inventory/dashboard/new.html', context)
62
63
64 @login_required
65 @permission_required('inventory.view_item', return_403=True)
66 def details(request, item_pk):
67 # Generic check to see if user has access to dashboard. (In Komiteer or superuser)
68 if not has_access(request):
69 raise PermissionDenied
70
71 # Create the base context needed for the sidebar
72 context = get_base_context(request)
73
74 context['item'] = get_object_or_404(Item, pk=item_pk)
75
76 if request.method == 'POST':
77 if 'inventory.change_item' not in context['user_permissions']:
78 raise PermissionDenied
79
80 item_form = ItemForm(request.POST, instance=context['item'])
81 if not item_form.is_valid():
82 messages.error(request, 'Noen av de påkrevde feltene inneholder feil.')
83 else:
84 item_form.save()
85 messages.success(request, 'Varen ble oppdatert')
86 context['form'] = item_form
87 else:
88 context['form'] = ItemForm(instance=context['item'])
89
90 context['new_batch_form'] = BatchForm()
91
92 context['batch_forms'] = [
93 (b.id, BatchForm(instance=b)) for b in Batch.objects.filter(item=context['item'])
94 ]
95
96 return render(request, 'inventory/dashboard/details.html', context)
97
98
99 @login_required
100 @permission_required('inventory.delete_item', return_403=True)
101 def item_delete(request, item_pk):
102 if not has_access(request):
103 raise PermissionDenied
104
105 item = get_object_or_404(Item, pk=item_pk)
106
107 if request.method == 'POST':
108
109 item.delete()
110
111 messages.success(request, 'Varen %s ble slettet.' % item.name)
112
113 return redirect(index)
114
115 raise PermissionDenied
116
117
118 @login_required
119 @permission_required('inventory.add_batch', return_403=True)
120 def batch_new(request, item_pk):
121 if not has_access(request):
122 raise PermissionDenied
123
124 # Field mapper
125 fieldmap = {
126 'amount': 'Mengde',
127 'expiration_date': 'Utløpsdato',
128 }
129
130 item = get_object_or_404(Item, pk=item_pk)
131
132 if request.method == 'POST':
133 batch_form = BatchForm(request.POST)
134
135 if not batch_form.is_valid():
136 # Dirty hack to display errors since the form is not passed in redirect context
137 error_reply = "Feil i felt:"
138 for field, error in batch_form.errors.items():
139 error_reply += ' ' + fieldmap[field] + ' (' + batch_form.error_class.as_text(error) + '),'
140
141 messages.error(request, error_reply.rstrip(','))
142 else:
143 b = batch_form.save(commit=False)
144 b.item = item
145 b.save()
146 messages.success(request, 'Batchen ble lagt til.')
147
148 return redirect(details, item_pk=item_pk)
149
150 raise PermissionDenied
151
152
153 @login_required
154 @permission_required('inventory.change_batch', return_403=True)
155 def batch(request, item_pk, batch_pk):
156 if not has_access(request):
157 raise PermissionDenied
158
159 # Get base context
160
161 get_object_or_404(Item, pk=item_pk)
162 b = get_object_or_404(Batch, pk=batch_pk)
163
164 if request.method == 'POST':
165 batch_form = BatchForm(request.POST, instance=b)
166
167 if not batch_form.is_valid():
168 messages.error(request, 'Noen av de påkrevde feltene inneholder feil.')
169 else:
170 batch_form.save()
171 messages.success(request, 'Batchen ble oppdatert.')
172
173 return redirect(details, item_pk=item_pk)
174
175 raise PermissionDenied
176
177
178 @login_required
179 @permission_required('inventory.delete_batch', return_403=True)
180 def batch_delete(request, item_pk, batch_pk):
181 if not has_access(request):
182 raise PermissionDenied
183
184 b = get_object_or_404(Batch, pk=batch_pk)
185
186 if request.method == 'POST':
187
188 b.delete()
189 messages.success(request, 'Batchen ble slettet.')
190
191 return redirect(details, item_pk=item_pk)
192
193 raise PermissionDenied
194
195
196 @login_required
197 @permission_required('inventory.view_itemcategory', return_403=True)
198 def category_index(request):
199
200 # Generic check to see if user has access to dashboard. (In Komiteer or superuser)
201 if not has_access(request):
202 raise PermissionDenied
203
204 # Create the base context needed for the sidebar
205 context = get_base_context(request)
206
207 context['categories'] = ItemCategory.objects.all()
208
209 return render(request, 'inventory/dashboard/category_index.html', context)
210
211
212 @login_required
213 @permission_required('inventory.view_itemcategory', return_403=True)
214 def category_details(request, category_pk):
215 # Generic check to see if user has access to dashboard. (In Komiteer or superuser)
216 if not has_access(request):
217 raise PermissionDenied
218
219 # Create the base context needed for the sidebar
220 context = get_base_context(request)
221
222 context['category'] = get_object_or_404(ItemCategory, pk=category_pk)
223
224 if request.method == 'POST':
225
226 form = CategoryForm(request.POST, instance=context['category'])
227 if not form.is_valid():
228 messages.error(request, 'Noen av de påkrevde feltene inneholder feil.')
229 else:
230 form.save()
231 messages.success(request, 'Kategorien ble oppdatert')
232 return redirect(category_index)
233
234 context['form'] = form
235 else:
236 context['form'] = CategoryForm(instance=context['category'])
237
238 return render(request, 'inventory/dashboard/category_new.html', context)
239
240
241 @login_required
242 @permission_required('inventory.add_itemcategory', return_403=True)
243 def category_new(request):
244
245 if not has_access(request):
246 raise PermissionDenied
247
248 # Get base context
249 context = get_base_context(request)
250
251 if request.method == 'POST':
252 form = CategoryForm(request.POST)
253
254 if not form.is_valid():
255 messages.error(request, 'Noen av de påkrevde feltene inneholder feil.')
256 else:
257 form.save()
258 messages.success(request, 'Kategorien ble opprettet')
259 return redirect(category_index)
260
261 context['form'] = form
262 else:
263 context['form'] = CategoryForm()
264
265 return render(request, 'inventory/dashboard/category_new.html', context)
266
267
268 @login_required
269 @permission_required('inventory.delete_itemcategory', return_403=True)
270 def category_delete(request, category_pk):
271 if not has_access(request):
272 raise PermissionDenied
273
274 category = get_object_or_404(ItemCategory, pk=category_pk)
275
276 items = Item.objects.filter(category=category)
277
278 # Removes the category binding to prevent cascading delete
279 for item in items:
280 item.category = None
281 item.save()
282
283 category.delete()
284 messages.success(request, 'Kategorien %s ble slettet.' % category.name)
285 return redirect(category_index)
286
287 raise PermissionDenied
288
289
290 @login_required
291 # @permission_required('inventory.delete_batch', return_403=True)
292 def statistics(request):
293 # if not has_access(request):
294 # raise PermissionDenied
295
296 context = get_base_context(request)
297
298 return render(request, 'inventory/dashboard/statistics.html', context)
299
300
301 @login_required
302 def order_statistics(request):
303 # TODO check permissions
304
305 statistics = dict()
306
307 counts = Order.objects.all().values('object_id', 'content_type').annotate(total=Count('object_id'))
308 item_type = ContentType.objects.get_for_model(Item)
309
310 for count in counts:
311 print(count)
312 if item_type.id == count['content_type']:
313 try:
314 item = Item.objects.get(pk=count['object_id'])
315 except Item.DoesNotExist:
316 getLogger(__name__).error('Item with pk %s does not exist (DoesNotExist error)' % count['object_id'])
317 except KeyError:
318 getLogger(__name__).error('Key "object_id" does not exist')
319 if count['total'] > 0:
320 statistics[item.name] = count['total']
321
322 return JsonResponse(statistics)
323
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/apps/inventory/dashboard/urls.py b/apps/inventory/dashboard/urls.py
--- a/apps/inventory/dashboard/urls.py
+++ b/apps/inventory/dashboard/urls.py
@@ -17,6 +17,7 @@
url(r'^new/$', views.new, name='dashboard_inventory_new'),
url(r'^item/(?P<item_pk>\d+)/$', views.details, name='dashboard_inventory_details'),
url(r'^item/(?P<item_pk>\d+)/delete/$', views.item_delete, name='dashboard_inventory_delete'),
+ url(r'^item/(?P<item_pk>\d+)/change/$', views.item_change_availability, name='dashboard_inventory_change'),
url(r'^item/(?P<item_pk>\d+)/batch/new/$', views.batch_new, name='dashboard_inventory_batch_new'),
url(r'^item/(?P<item_pk>\d+)/batch/(?P<batch_pk>\d+)/$', views.batch, name='dashboard_inventory_batch'),
url(r'^item/(?P<item_pk>\d+)/batch/(?P<batch_pk>\d+)/delete/$',
diff --git a/apps/inventory/dashboard/views.py b/apps/inventory/dashboard/views.py
--- a/apps/inventory/dashboard/views.py
+++ b/apps/inventory/dashboard/views.py
@@ -115,6 +115,23 @@
raise PermissionDenied
+@login_required
+@permission_required('inventory.change_item', return_403=True)
+def item_change_availability(request, item_pk):
+ if not has_access(request):
+ raise PermissionDenied
+
+ item = get_object_or_404(Item, pk=item_pk)
+
+ # AJAX
+ if request.method == 'POST':
+ if request.is_ajax and 'action' in request.POST:
+ item.available = not item.available
+ item.save()
+
+ return JsonResponse({'message': 'OK', 'status': 200})
+
+
@login_required
@permission_required('inventory.add_batch', return_403=True)
def batch_new(request, item_pk):
|
{"golden_diff": "diff --git a/apps/inventory/dashboard/urls.py b/apps/inventory/dashboard/urls.py\n--- a/apps/inventory/dashboard/urls.py\n+++ b/apps/inventory/dashboard/urls.py\n@@ -17,6 +17,7 @@\n url(r'^new/$', views.new, name='dashboard_inventory_new'),\n url(r'^item/(?P<item_pk>\\d+)/$', views.details, name='dashboard_inventory_details'),\n url(r'^item/(?P<item_pk>\\d+)/delete/$', views.item_delete, name='dashboard_inventory_delete'),\n+ url(r'^item/(?P<item_pk>\\d+)/change/$', views.item_change_availability, name='dashboard_inventory_change'),\n url(r'^item/(?P<item_pk>\\d+)/batch/new/$', views.batch_new, name='dashboard_inventory_batch_new'),\n url(r'^item/(?P<item_pk>\\d+)/batch/(?P<batch_pk>\\d+)/$', views.batch, name='dashboard_inventory_batch'),\n url(r'^item/(?P<item_pk>\\d+)/batch/(?P<batch_pk>\\d+)/delete/$',\ndiff --git a/apps/inventory/dashboard/views.py b/apps/inventory/dashboard/views.py\n--- a/apps/inventory/dashboard/views.py\n+++ b/apps/inventory/dashboard/views.py\n@@ -115,6 +115,23 @@\n raise PermissionDenied\n \n \n+@login_required\n+@permission_required('inventory.change_item', return_403=True)\n+def item_change_availability(request, item_pk):\n+ if not has_access(request):\n+ raise PermissionDenied\n+\n+ item = get_object_or_404(Item, pk=item_pk)\n+\n+ # AJAX\n+ if request.method == 'POST':\n+ if request.is_ajax and 'action' in request.POST:\n+ item.available = not item.available\n+ item.save()\n+\n+ return JsonResponse({'message': 'OK', 'status': 200})\n+\n+\n @login_required\n @permission_required('inventory.add_batch', return_403=True)\n def batch_new(request, item_pk):\n", "issue": "Show if inventory item is available in nibble shop in product listing\n## What kind of an issue is this?\r\n\r\n- [x] Feature request\r\n\r\n\r\n## What is the expected behaviour?\r\n\r\nThere should be an icon in the dashboard product listing page that shows if an item is available for sale in Nibble.\r\n\r\nThis icon should also be clickable, to toggle whether or not the item should be available for sale in Nibble.\r\n\r\nIt should be possible to sort the items in the table according to the \"available for sale\"-state.\r\n\r\n\r\n## Other information\r\n\r\nThis is how an event's attendee list looks, and it could probably be done in the same way (look at the rightmost column): \r\n\r\n\r\nThis is to be done in this list:\r\n\r\n\n", "before_files": [{"content": "# -*- encoding: utf-8 -*-\n\nfrom django.conf.urls import url\n\nfrom apps.inventory.dashboard import views\n\nurlpatterns = [\n url(r'^$', views.index, name='dashboard_inventory_index'),\n url(r'^statistics/$', views.statistics, name='dashboard_inventory_statistics'),\n url(r'^statistics/orders/$', views.order_statistics, name='dashboard_inventory_order_statistics'),\n\n url(r'^category/$', views.category_index, name='dashboard_category_index'),\n url(r'^category/(?P<category_pk>\\d+)/$', views.category_details, name='dashboard_category_details'),\n url(r'^category/new/$', views.category_new, name='dashboard_category_new'),\n url(r'^category/(?P<category_pk>\\d+)/delete/$', views.category_delete, name='dashboard_category_delete'),\n\n url(r'^new/$', views.new, name='dashboard_inventory_new'),\n url(r'^item/(?P<item_pk>\\d+)/$', views.details, name='dashboard_inventory_details'),\n url(r'^item/(?P<item_pk>\\d+)/delete/$', views.item_delete, name='dashboard_inventory_delete'),\n url(r'^item/(?P<item_pk>\\d+)/batch/new/$', views.batch_new, name='dashboard_inventory_batch_new'),\n url(r'^item/(?P<item_pk>\\d+)/batch/(?P<batch_pk>\\d+)/$', views.batch, name='dashboard_inventory_batch'),\n url(r'^item/(?P<item_pk>\\d+)/batch/(?P<batch_pk>\\d+)/delete/$',\n views.batch_delete,\n name='dashboard_inventory_batch_delete'),\n]\n", "path": "apps/inventory/dashboard/urls.py"}, {"content": "# -*- encoding: utf-8 -*-\n\nfrom logging import getLogger\n\nfrom django.contrib import messages\nfrom django.contrib.auth.decorators import login_required\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.core.exceptions import PermissionDenied\nfrom django.db.models import Count\nfrom django.http import JsonResponse\nfrom django.shortcuts import get_object_or_404, redirect, render\nfrom guardian.decorators import permission_required\n\nfrom apps.dashboard.tools import get_base_context, has_access\nfrom apps.inventory.dashboard.forms import BatchForm, CategoryForm, ItemForm\nfrom apps.inventory.models import Batch, Item, ItemCategory\nfrom apps.shop.models import Order\n\n\n@login_required\n@permission_required('inventory.view_item', return_403=True)\ndef index(request):\n\n # Generic check to see if user has access to dashboard. (In Komiteer or superuser)\n if not has_access(request):\n raise PermissionDenied\n\n # Create the base context needed for the sidebar\n context = get_base_context(request)\n\n context['items'] = Item.objects.all().order_by('name')\n\n return render(request, 'inventory/dashboard/index.html', context)\n\n\n@login_required\n@permission_required('inventory.add_item', return_403=True)\ndef new(request):\n\n if not has_access(request):\n raise PermissionDenied\n\n # Get base context\n context = get_base_context(request)\n\n if request.method == 'POST':\n inventory_form = ItemForm(request.POST)\n\n if not inventory_form.is_valid():\n messages.error(request, 'Noen av de p\u00e5krevde feltene inneholder feil.')\n else:\n item = inventory_form.save()\n messages.success(request, 'Varen ble opprettet')\n return redirect(details, item.id)\n\n context['form'] = inventory_form\n\n else:\n context['form'] = ItemForm()\n\n return render(request, 'inventory/dashboard/new.html', context)\n\n\n@login_required\n@permission_required('inventory.view_item', return_403=True)\ndef details(request, item_pk):\n # Generic check to see if user has access to dashboard. (In Komiteer or superuser)\n if not has_access(request):\n raise PermissionDenied\n\n # Create the base context needed for the sidebar\n context = get_base_context(request)\n\n context['item'] = get_object_or_404(Item, pk=item_pk)\n\n if request.method == 'POST':\n if 'inventory.change_item' not in context['user_permissions']:\n raise PermissionDenied\n\n item_form = ItemForm(request.POST, instance=context['item'])\n if not item_form.is_valid():\n messages.error(request, 'Noen av de p\u00e5krevde feltene inneholder feil.')\n else:\n item_form.save()\n messages.success(request, 'Varen ble oppdatert')\n context['form'] = item_form\n else:\n context['form'] = ItemForm(instance=context['item'])\n\n context['new_batch_form'] = BatchForm()\n\n context['batch_forms'] = [\n (b.id, BatchForm(instance=b)) for b in Batch.objects.filter(item=context['item'])\n ]\n\n return render(request, 'inventory/dashboard/details.html', context)\n\n\n@login_required\n@permission_required('inventory.delete_item', return_403=True)\ndef item_delete(request, item_pk):\n if not has_access(request):\n raise PermissionDenied\n\n item = get_object_or_404(Item, pk=item_pk)\n\n if request.method == 'POST':\n\n item.delete()\n\n messages.success(request, 'Varen %s ble slettet.' % item.name)\n\n return redirect(index)\n\n raise PermissionDenied\n\n\n@login_required\n@permission_required('inventory.add_batch', return_403=True)\ndef batch_new(request, item_pk):\n if not has_access(request):\n raise PermissionDenied\n\n # Field mapper\n fieldmap = {\n 'amount': 'Mengde',\n 'expiration_date': 'Utl\u00f8psdato',\n }\n\n item = get_object_or_404(Item, pk=item_pk)\n\n if request.method == 'POST':\n batch_form = BatchForm(request.POST)\n\n if not batch_form.is_valid():\n # Dirty hack to display errors since the form is not passed in redirect context\n error_reply = \"Feil i felt:\"\n for field, error in batch_form.errors.items():\n error_reply += ' ' + fieldmap[field] + ' (' + batch_form.error_class.as_text(error) + '),'\n\n messages.error(request, error_reply.rstrip(','))\n else:\n b = batch_form.save(commit=False)\n b.item = item\n b.save()\n messages.success(request, 'Batchen ble lagt til.')\n\n return redirect(details, item_pk=item_pk)\n\n raise PermissionDenied\n\n\n@login_required\n@permission_required('inventory.change_batch', return_403=True)\ndef batch(request, item_pk, batch_pk):\n if not has_access(request):\n raise PermissionDenied\n\n # Get base context\n\n get_object_or_404(Item, pk=item_pk)\n b = get_object_or_404(Batch, pk=batch_pk)\n\n if request.method == 'POST':\n batch_form = BatchForm(request.POST, instance=b)\n\n if not batch_form.is_valid():\n messages.error(request, 'Noen av de p\u00e5krevde feltene inneholder feil.')\n else:\n batch_form.save()\n messages.success(request, 'Batchen ble oppdatert.')\n\n return redirect(details, item_pk=item_pk)\n\n raise PermissionDenied\n\n\n@login_required\n@permission_required('inventory.delete_batch', return_403=True)\ndef batch_delete(request, item_pk, batch_pk):\n if not has_access(request):\n raise PermissionDenied\n\n b = get_object_or_404(Batch, pk=batch_pk)\n\n if request.method == 'POST':\n\n b.delete()\n messages.success(request, 'Batchen ble slettet.')\n\n return redirect(details, item_pk=item_pk)\n\n raise PermissionDenied\n\n\n@login_required\n@permission_required('inventory.view_itemcategory', return_403=True)\ndef category_index(request):\n\n # Generic check to see if user has access to dashboard. (In Komiteer or superuser)\n if not has_access(request):\n raise PermissionDenied\n\n # Create the base context needed for the sidebar\n context = get_base_context(request)\n\n context['categories'] = ItemCategory.objects.all()\n\n return render(request, 'inventory/dashboard/category_index.html', context)\n\n\n@login_required\n@permission_required('inventory.view_itemcategory', return_403=True)\ndef category_details(request, category_pk):\n # Generic check to see if user has access to dashboard. (In Komiteer or superuser)\n if not has_access(request):\n raise PermissionDenied\n\n # Create the base context needed for the sidebar\n context = get_base_context(request)\n\n context['category'] = get_object_or_404(ItemCategory, pk=category_pk)\n\n if request.method == 'POST':\n\n form = CategoryForm(request.POST, instance=context['category'])\n if not form.is_valid():\n messages.error(request, 'Noen av de p\u00e5krevde feltene inneholder feil.')\n else:\n form.save()\n messages.success(request, 'Kategorien ble oppdatert')\n return redirect(category_index)\n\n context['form'] = form\n else:\n context['form'] = CategoryForm(instance=context['category'])\n\n return render(request, 'inventory/dashboard/category_new.html', context)\n\n\n@login_required\n@permission_required('inventory.add_itemcategory', return_403=True)\ndef category_new(request):\n\n if not has_access(request):\n raise PermissionDenied\n\n # Get base context\n context = get_base_context(request)\n\n if request.method == 'POST':\n form = CategoryForm(request.POST)\n\n if not form.is_valid():\n messages.error(request, 'Noen av de p\u00e5krevde feltene inneholder feil.')\n else:\n form.save()\n messages.success(request, 'Kategorien ble opprettet')\n return redirect(category_index)\n\n context['form'] = form\n else:\n context['form'] = CategoryForm()\n\n return render(request, 'inventory/dashboard/category_new.html', context)\n\n\n@login_required\n@permission_required('inventory.delete_itemcategory', return_403=True)\ndef category_delete(request, category_pk):\n if not has_access(request):\n raise PermissionDenied\n\n category = get_object_or_404(ItemCategory, pk=category_pk)\n\n items = Item.objects.filter(category=category)\n\n # Removes the category binding to prevent cascading delete\n for item in items:\n item.category = None\n item.save()\n\n category.delete()\n messages.success(request, 'Kategorien %s ble slettet.' % category.name)\n return redirect(category_index)\n\n raise PermissionDenied\n\n\n@login_required\n# @permission_required('inventory.delete_batch', return_403=True)\ndef statistics(request):\n # if not has_access(request):\n # raise PermissionDenied\n\n context = get_base_context(request)\n\n return render(request, 'inventory/dashboard/statistics.html', context)\n\n\n@login_required\ndef order_statistics(request):\n # TODO check permissions\n\n statistics = dict()\n\n counts = Order.objects.all().values('object_id', 'content_type').annotate(total=Count('object_id'))\n item_type = ContentType.objects.get_for_model(Item)\n\n for count in counts:\n print(count)\n if item_type.id == count['content_type']:\n try:\n item = Item.objects.get(pk=count['object_id'])\n except Item.DoesNotExist:\n getLogger(__name__).error('Item with pk %s does not exist (DoesNotExist error)' % count['object_id'])\n except KeyError:\n getLogger(__name__).error('Key \"object_id\" does not exist')\n if count['total'] > 0:\n statistics[item.name] = count['total']\n\n return JsonResponse(statistics)\n", "path": "apps/inventory/dashboard/views.py"}], "after_files": [{"content": "# -*- encoding: utf-8 -*-\n\nfrom django.conf.urls import url\n\nfrom apps.inventory.dashboard import views\n\nurlpatterns = [\n url(r'^$', views.index, name='dashboard_inventory_index'),\n url(r'^statistics/$', views.statistics, name='dashboard_inventory_statistics'),\n url(r'^statistics/orders/$', views.order_statistics, name='dashboard_inventory_order_statistics'),\n\n url(r'^category/$', views.category_index, name='dashboard_category_index'),\n url(r'^category/(?P<category_pk>\\d+)/$', views.category_details, name='dashboard_category_details'),\n url(r'^category/new/$', views.category_new, name='dashboard_category_new'),\n url(r'^category/(?P<category_pk>\\d+)/delete/$', views.category_delete, name='dashboard_category_delete'),\n\n url(r'^new/$', views.new, name='dashboard_inventory_new'),\n url(r'^item/(?P<item_pk>\\d+)/$', views.details, name='dashboard_inventory_details'),\n url(r'^item/(?P<item_pk>\\d+)/delete/$', views.item_delete, name='dashboard_inventory_delete'),\n url(r'^item/(?P<item_pk>\\d+)/change/$', views.item_change_availability, name='dashboard_inventory_change'),\n url(r'^item/(?P<item_pk>\\d+)/batch/new/$', views.batch_new, name='dashboard_inventory_batch_new'),\n url(r'^item/(?P<item_pk>\\d+)/batch/(?P<batch_pk>\\d+)/$', views.batch, name='dashboard_inventory_batch'),\n url(r'^item/(?P<item_pk>\\d+)/batch/(?P<batch_pk>\\d+)/delete/$',\n views.batch_delete,\n name='dashboard_inventory_batch_delete'),\n]\n", "path": "apps/inventory/dashboard/urls.py"}, {"content": "# -*- encoding: utf-8 -*-\n\nfrom logging import getLogger\n\nfrom django.contrib import messages\nfrom django.contrib.auth.decorators import login_required\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.core.exceptions import PermissionDenied\nfrom django.db.models import Count\nfrom django.http import JsonResponse\nfrom django.shortcuts import get_object_or_404, redirect, render\nfrom guardian.decorators import permission_required\n\nfrom apps.dashboard.tools import get_base_context, has_access\nfrom apps.inventory.dashboard.forms import BatchForm, CategoryForm, ItemForm\nfrom apps.inventory.models import Batch, Item, ItemCategory\nfrom apps.shop.models import Order\n\n\n@login_required\n@permission_required('inventory.view_item', return_403=True)\ndef index(request):\n\n # Generic check to see if user has access to dashboard. (In Komiteer or superuser)\n if not has_access(request):\n raise PermissionDenied\n\n # Create the base context needed for the sidebar\n context = get_base_context(request)\n\n context['items'] = Item.objects.all().order_by('name')\n\n return render(request, 'inventory/dashboard/index.html', context)\n\n\n@login_required\n@permission_required('inventory.add_item', return_403=True)\ndef new(request):\n\n if not has_access(request):\n raise PermissionDenied\n\n # Get base context\n context = get_base_context(request)\n\n if request.method == 'POST':\n inventory_form = ItemForm(request.POST)\n\n if not inventory_form.is_valid():\n messages.error(request, 'Noen av de p\u00e5krevde feltene inneholder feil.')\n else:\n item = inventory_form.save()\n messages.success(request, 'Varen ble opprettet')\n return redirect(details, item.id)\n\n context['form'] = inventory_form\n\n else:\n context['form'] = ItemForm()\n\n return render(request, 'inventory/dashboard/new.html', context)\n\n\n@login_required\n@permission_required('inventory.view_item', return_403=True)\ndef details(request, item_pk):\n # Generic check to see if user has access to dashboard. (In Komiteer or superuser)\n if not has_access(request):\n raise PermissionDenied\n\n # Create the base context needed for the sidebar\n context = get_base_context(request)\n\n context['item'] = get_object_or_404(Item, pk=item_pk)\n\n if request.method == 'POST':\n if 'inventory.change_item' not in context['user_permissions']:\n raise PermissionDenied\n\n item_form = ItemForm(request.POST, instance=context['item'])\n if not item_form.is_valid():\n messages.error(request, 'Noen av de p\u00e5krevde feltene inneholder feil.')\n else:\n item_form.save()\n messages.success(request, 'Varen ble oppdatert')\n context['form'] = item_form\n else:\n context['form'] = ItemForm(instance=context['item'])\n\n context['new_batch_form'] = BatchForm()\n\n context['batch_forms'] = [\n (b.id, BatchForm(instance=b)) for b in Batch.objects.filter(item=context['item'])\n ]\n\n return render(request, 'inventory/dashboard/details.html', context)\n\n\n@login_required\n@permission_required('inventory.delete_item', return_403=True)\ndef item_delete(request, item_pk):\n if not has_access(request):\n raise PermissionDenied\n\n item = get_object_or_404(Item, pk=item_pk)\n\n if request.method == 'POST':\n\n item.delete()\n\n messages.success(request, 'Varen %s ble slettet.' % item.name)\n\n return redirect(index)\n\n raise PermissionDenied\n\n\n@login_required\n@permission_required('inventory.change_item', return_403=True)\ndef item_change_availability(request, item_pk):\n if not has_access(request):\n raise PermissionDenied\n\n item = get_object_or_404(Item, pk=item_pk)\n\n # AJAX\n if request.method == 'POST':\n if request.is_ajax and 'action' in request.POST:\n item.available = not item.available\n item.save()\n\n return JsonResponse({'message': 'OK', 'status': 200})\n\n\n@login_required\n@permission_required('inventory.add_batch', return_403=True)\ndef batch_new(request, item_pk):\n if not has_access(request):\n raise PermissionDenied\n\n # Field mapper\n fieldmap = {\n 'amount': 'Mengde',\n 'expiration_date': 'Utl\u00f8psdato',\n }\n\n item = get_object_or_404(Item, pk=item_pk)\n\n if request.method == 'POST':\n batch_form = BatchForm(request.POST)\n\n if not batch_form.is_valid():\n # Dirty hack to display errors since the form is not passed in redirect context\n error_reply = \"Feil i felt:\"\n for field, error in batch_form.errors.items():\n error_reply += ' ' + fieldmap[field] + ' (' + batch_form.error_class.as_text(error) + '),'\n\n messages.error(request, error_reply.rstrip(','))\n else:\n b = batch_form.save(commit=False)\n b.item = item\n b.save()\n messages.success(request, 'Batchen ble lagt til.')\n\n return redirect(details, item_pk=item_pk)\n\n raise PermissionDenied\n\n\n@login_required\n@permission_required('inventory.change_batch', return_403=True)\ndef batch(request, item_pk, batch_pk):\n if not has_access(request):\n raise PermissionDenied\n\n # Get base context\n\n get_object_or_404(Item, pk=item_pk)\n b = get_object_or_404(Batch, pk=batch_pk)\n\n if request.method == 'POST':\n batch_form = BatchForm(request.POST, instance=b)\n\n if not batch_form.is_valid():\n messages.error(request, 'Noen av de p\u00e5krevde feltene inneholder feil.')\n else:\n batch_form.save()\n messages.success(request, 'Batchen ble oppdatert.')\n\n return redirect(details, item_pk=item_pk)\n\n raise PermissionDenied\n\n\n@login_required\n@permission_required('inventory.delete_batch', return_403=True)\ndef batch_delete(request, item_pk, batch_pk):\n if not has_access(request):\n raise PermissionDenied\n\n b = get_object_or_404(Batch, pk=batch_pk)\n\n if request.method == 'POST':\n\n b.delete()\n messages.success(request, 'Batchen ble slettet.')\n\n return redirect(details, item_pk=item_pk)\n\n raise PermissionDenied\n\n\n@login_required\n@permission_required('inventory.view_itemcategory', return_403=True)\ndef category_index(request):\n\n # Generic check to see if user has access to dashboard. (In Komiteer or superuser)\n if not has_access(request):\n raise PermissionDenied\n\n # Create the base context needed for the sidebar\n context = get_base_context(request)\n\n context['categories'] = ItemCategory.objects.all()\n\n return render(request, 'inventory/dashboard/category_index.html', context)\n\n\n@login_required\n@permission_required('inventory.view_itemcategory', return_403=True)\ndef category_details(request, category_pk):\n # Generic check to see if user has access to dashboard. (In Komiteer or superuser)\n if not has_access(request):\n raise PermissionDenied\n\n # Create the base context needed for the sidebar\n context = get_base_context(request)\n\n context['category'] = get_object_or_404(ItemCategory, pk=category_pk)\n\n if request.method == 'POST':\n\n form = CategoryForm(request.POST, instance=context['category'])\n if not form.is_valid():\n messages.error(request, 'Noen av de p\u00e5krevde feltene inneholder feil.')\n else:\n form.save()\n messages.success(request, 'Kategorien ble oppdatert')\n return redirect(category_index)\n\n context['form'] = form\n else:\n context['form'] = CategoryForm(instance=context['category'])\n\n return render(request, 'inventory/dashboard/category_new.html', context)\n\n\n@login_required\n@permission_required('inventory.add_itemcategory', return_403=True)\ndef category_new(request):\n\n if not has_access(request):\n raise PermissionDenied\n\n # Get base context\n context = get_base_context(request)\n\n if request.method == 'POST':\n form = CategoryForm(request.POST)\n\n if not form.is_valid():\n messages.error(request, 'Noen av de p\u00e5krevde feltene inneholder feil.')\n else:\n form.save()\n messages.success(request, 'Kategorien ble opprettet')\n return redirect(category_index)\n\n context['form'] = form\n else:\n context['form'] = CategoryForm()\n\n return render(request, 'inventory/dashboard/category_new.html', context)\n\n\n@login_required\n@permission_required('inventory.delete_itemcategory', return_403=True)\ndef category_delete(request, category_pk):\n if not has_access(request):\n raise PermissionDenied\n\n category = get_object_or_404(ItemCategory, pk=category_pk)\n\n items = Item.objects.filter(category=category)\n\n # Removes the category binding to prevent cascading delete\n for item in items:\n item.category = None\n item.save()\n\n category.delete()\n messages.success(request, 'Kategorien %s ble slettet.' % category.name)\n return redirect(category_index)\n\n raise PermissionDenied\n\n\n@login_required\n# @permission_required('inventory.delete_batch', return_403=True)\ndef statistics(request):\n # if not has_access(request):\n # raise PermissionDenied\n\n context = get_base_context(request)\n\n return render(request, 'inventory/dashboard/statistics.html', context)\n\n\n@login_required\ndef order_statistics(request):\n # TODO check permissions\n\n statistics = dict()\n\n counts = Order.objects.all().values('object_id', 'content_type').annotate(total=Count('object_id'))\n item_type = ContentType.objects.get_for_model(Item)\n\n for count in counts:\n print(count)\n if item_type.id == count['content_type']:\n try:\n item = Item.objects.get(pk=count['object_id'])\n except Item.DoesNotExist:\n getLogger(__name__).error('Item with pk %s does not exist (DoesNotExist error)' % count['object_id'])\n except KeyError:\n getLogger(__name__).error('Key \"object_id\" does not exist')\n if count['total'] > 0:\n statistics[item.name] = count['total']\n\n return JsonResponse(statistics)\n", "path": "apps/inventory/dashboard/views.py"}]}
| 4,005 | 439 |
gh_patches_debug_36073
|
rasdani/github-patches
|
git_diff
|
streamlink__streamlink-5711
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
plugins.wasd: service gone
### Checklist
- [X] This is a [plugin issue](https://streamlink.github.io/plugins.html) and not [a different kind of issue](https://github.com/streamlink/streamlink/issues/new/choose)
- [X] [I have read the contribution guidelines](https://github.com/streamlink/streamlink/blob/master/CONTRIBUTING.md#contributing-to-streamlink)
- [X] [I have checked the list of open and recently closed plugin issues](https://github.com/streamlink/streamlink/issues?q=is%3Aissue+label%3A%22plugin+issue%22)
- [X] [I have checked the commit log of the master branch](https://github.com/streamlink/streamlink/commits/master)
### Streamlink version
6.4.2
### Description
A few days ago, the service [gone](https://mts.ru/personal/novosti/2023-12-05/vstrechajte-polzovatelskuyu-videoplatformu-nuum). Now this [nuum.ru](https://nuum.ru).
Though we could easily replace the plugin, but I'm not sure it's worth adding it to upstream, because it's a beta version.
<details>
```diff
diff --git a/src/streamlink/plugins/wasd.py b/src/streamlink/plugins/wasd.py
index 7d61304e..656a16eb 100644
--- a/src/streamlink/plugins/wasd.py
+++ b/src/streamlink/plugins/wasd.py
@@ -16,7 +16,7 @@ log = logging.getLogger(__name__)
@pluginmatcher(re.compile(
- r"https?://(?:www\.)?wasd\.tv/(?P<nickname>[^/]+)/?$",
+ r"https?://(?:www\.)?nuum\.ru/channel/(?P<nickname>[^/]+)/?$",
))
class WASD(Plugin):
_media_schema = validate.Schema({
@@ -53,11 +53,11 @@ class WASD(Plugin):
def _get_streams(self):
nickname = self.match.group("nickname")
- res = self.session.http.get(f"https://wasd.tv/api/channels/nicknames/{nickname}")
+ res = self.session.http.get(f"https://nuum.ru/api/channels/nicknames/{nickname}")
channel_id = self.session.http.json(res, schema=self._api_nicknames_schema)
res = self.session.http.get(
- "https://wasd.tv/api/v2/media-containers",
+ "https://nuum.ru/api/v2/media-containers",
params={
"media_container_status": "RUNNING",
"limit": "1",
```
</details>
### Debug log
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/streamlink/plugins/wasd.py`
Content:
```
1 """
2 $description Russian live-streaming social platform.
3 $url wasd.tv
4 $type live
5 """
6
7 import logging
8 import re
9
10 from streamlink.plugin import Plugin, PluginError, pluginmatcher
11 from streamlink.plugin.api import validate
12 from streamlink.stream.hls import HLSStream
13
14
15 log = logging.getLogger(__name__)
16
17
18 @pluginmatcher(re.compile(
19 r"https?://(?:www\.)?wasd\.tv/(?P<nickname>[^/]+)/?$",
20 ))
21 class WASD(Plugin):
22 _media_schema = validate.Schema({
23 "user_id": int,
24 "media_container_online_status": str,
25 "media_container_status": str,
26 "media_container_streams": [{
27 "stream_media": [{
28 "media_id": int,
29 "media_meta": {
30 "media_url": validate.any(str, None),
31 "media_archive_url": validate.any(str, None),
32 },
33 "media_status": validate.any("STOPPED", "RUNNING"),
34 "media_type": "HLS",
35 }],
36 }],
37 })
38 _api_schema = validate.Schema({
39 "result":
40 validate.any(
41 _media_schema,
42 validate.all(list,
43 validate.get(0),
44 _media_schema),
45 [],
46 ),
47 }, validate.get("result"))
48 _api_nicknames_schema = validate.Schema({
49 "result": {
50 "channel_id": int,
51 },
52 }, validate.get("result"), validate.get("channel_id"))
53
54 def _get_streams(self):
55 nickname = self.match.group("nickname")
56 res = self.session.http.get(f"https://wasd.tv/api/channels/nicknames/{nickname}")
57 channel_id = self.session.http.json(res, schema=self._api_nicknames_schema)
58
59 res = self.session.http.get(
60 "https://wasd.tv/api/v2/media-containers",
61 params={
62 "media_container_status": "RUNNING",
63 "limit": "1",
64 "offset": "0",
65 "channel_id": channel_id,
66 "media_container_type": "SINGLE,COOP",
67 },
68 )
69
70 json_res = self.session.http.json(res, schema=self._api_schema)
71 log.trace("{0!r}".format(json_res))
72 if not json_res:
73 raise PluginError("No data returned from URL={0}".format(res.url))
74
75 for stream in json_res["media_container_streams"]:
76 log.debug("media_container_status: {0}, media_container_online_status: {1}".format(
77 json_res["media_container_status"], json_res["media_container_online_status"]))
78 for stream_media in stream["stream_media"]:
79 if stream_media["media_status"] == "STOPPED":
80 hls_url = stream_media["media_meta"]["media_archive_url"]
81 else:
82 hls_url = stream_media["media_meta"]["media_url"]
83
84 yield from HLSStream.parse_variant_playlist(self.session, hls_url).items()
85
86
87 __plugin__ = WASD
88
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/streamlink/plugins/wasd.py b/src/streamlink/plugins/wasd.py
deleted file mode 100644
--- a/src/streamlink/plugins/wasd.py
+++ /dev/null
@@ -1,87 +0,0 @@
-"""
-$description Russian live-streaming social platform.
-$url wasd.tv
-$type live
-"""
-
-import logging
-import re
-
-from streamlink.plugin import Plugin, PluginError, pluginmatcher
-from streamlink.plugin.api import validate
-from streamlink.stream.hls import HLSStream
-
-
-log = logging.getLogger(__name__)
-
-
-@pluginmatcher(re.compile(
- r"https?://(?:www\.)?wasd\.tv/(?P<nickname>[^/]+)/?$",
-))
-class WASD(Plugin):
- _media_schema = validate.Schema({
- "user_id": int,
- "media_container_online_status": str,
- "media_container_status": str,
- "media_container_streams": [{
- "stream_media": [{
- "media_id": int,
- "media_meta": {
- "media_url": validate.any(str, None),
- "media_archive_url": validate.any(str, None),
- },
- "media_status": validate.any("STOPPED", "RUNNING"),
- "media_type": "HLS",
- }],
- }],
- })
- _api_schema = validate.Schema({
- "result":
- validate.any(
- _media_schema,
- validate.all(list,
- validate.get(0),
- _media_schema),
- [],
- ),
- }, validate.get("result"))
- _api_nicknames_schema = validate.Schema({
- "result": {
- "channel_id": int,
- },
- }, validate.get("result"), validate.get("channel_id"))
-
- def _get_streams(self):
- nickname = self.match.group("nickname")
- res = self.session.http.get(f"https://wasd.tv/api/channels/nicknames/{nickname}")
- channel_id = self.session.http.json(res, schema=self._api_nicknames_schema)
-
- res = self.session.http.get(
- "https://wasd.tv/api/v2/media-containers",
- params={
- "media_container_status": "RUNNING",
- "limit": "1",
- "offset": "0",
- "channel_id": channel_id,
- "media_container_type": "SINGLE,COOP",
- },
- )
-
- json_res = self.session.http.json(res, schema=self._api_schema)
- log.trace("{0!r}".format(json_res))
- if not json_res:
- raise PluginError("No data returned from URL={0}".format(res.url))
-
- for stream in json_res["media_container_streams"]:
- log.debug("media_container_status: {0}, media_container_online_status: {1}".format(
- json_res["media_container_status"], json_res["media_container_online_status"]))
- for stream_media in stream["stream_media"]:
- if stream_media["media_status"] == "STOPPED":
- hls_url = stream_media["media_meta"]["media_archive_url"]
- else:
- hls_url = stream_media["media_meta"]["media_url"]
-
- yield from HLSStream.parse_variant_playlist(self.session, hls_url).items()
-
-
-__plugin__ = WASD
|
{"golden_diff": "diff --git a/src/streamlink/plugins/wasd.py b/src/streamlink/plugins/wasd.py\ndeleted file mode 100644\n--- a/src/streamlink/plugins/wasd.py\n+++ /dev/null\n@@ -1,87 +0,0 @@\n-\"\"\"\n-$description Russian live-streaming social platform.\n-$url wasd.tv\n-$type live\n-\"\"\"\n-\n-import logging\n-import re\n-\n-from streamlink.plugin import Plugin, PluginError, pluginmatcher\n-from streamlink.plugin.api import validate\n-from streamlink.stream.hls import HLSStream\n-\n-\n-log = logging.getLogger(__name__)\n-\n-\n-@pluginmatcher(re.compile(\n- r\"https?://(?:www\\.)?wasd\\.tv/(?P<nickname>[^/]+)/?$\",\n-))\n-class WASD(Plugin):\n- _media_schema = validate.Schema({\n- \"user_id\": int,\n- \"media_container_online_status\": str,\n- \"media_container_status\": str,\n- \"media_container_streams\": [{\n- \"stream_media\": [{\n- \"media_id\": int,\n- \"media_meta\": {\n- \"media_url\": validate.any(str, None),\n- \"media_archive_url\": validate.any(str, None),\n- },\n- \"media_status\": validate.any(\"STOPPED\", \"RUNNING\"),\n- \"media_type\": \"HLS\",\n- }],\n- }],\n- })\n- _api_schema = validate.Schema({\n- \"result\":\n- validate.any(\n- _media_schema,\n- validate.all(list,\n- validate.get(0),\n- _media_schema),\n- [],\n- ),\n- }, validate.get(\"result\"))\n- _api_nicknames_schema = validate.Schema({\n- \"result\": {\n- \"channel_id\": int,\n- },\n- }, validate.get(\"result\"), validate.get(\"channel_id\"))\n-\n- def _get_streams(self):\n- nickname = self.match.group(\"nickname\")\n- res = self.session.http.get(f\"https://wasd.tv/api/channels/nicknames/{nickname}\")\n- channel_id = self.session.http.json(res, schema=self._api_nicknames_schema)\n-\n- res = self.session.http.get(\n- \"https://wasd.tv/api/v2/media-containers\",\n- params={\n- \"media_container_status\": \"RUNNING\",\n- \"limit\": \"1\",\n- \"offset\": \"0\",\n- \"channel_id\": channel_id,\n- \"media_container_type\": \"SINGLE,COOP\",\n- },\n- )\n-\n- json_res = self.session.http.json(res, schema=self._api_schema)\n- log.trace(\"{0!r}\".format(json_res))\n- if not json_res:\n- raise PluginError(\"No data returned from URL={0}\".format(res.url))\n-\n- for stream in json_res[\"media_container_streams\"]:\n- log.debug(\"media_container_status: {0}, media_container_online_status: {1}\".format(\n- json_res[\"media_container_status\"], json_res[\"media_container_online_status\"]))\n- for stream_media in stream[\"stream_media\"]:\n- if stream_media[\"media_status\"] == \"STOPPED\":\n- hls_url = stream_media[\"media_meta\"][\"media_archive_url\"]\n- else:\n- hls_url = stream_media[\"media_meta\"][\"media_url\"]\n-\n- yield from HLSStream.parse_variant_playlist(self.session, hls_url).items()\n-\n-\n-__plugin__ = WASD\n", "issue": "plugins.wasd: service gone\n### Checklist\r\n\r\n- [X] This is a [plugin issue](https://streamlink.github.io/plugins.html) and not [a different kind of issue](https://github.com/streamlink/streamlink/issues/new/choose)\r\n- [X] [I have read the contribution guidelines](https://github.com/streamlink/streamlink/blob/master/CONTRIBUTING.md#contributing-to-streamlink)\r\n- [X] [I have checked the list of open and recently closed plugin issues](https://github.com/streamlink/streamlink/issues?q=is%3Aissue+label%3A%22plugin+issue%22)\r\n- [X] [I have checked the commit log of the master branch](https://github.com/streamlink/streamlink/commits/master)\r\n\r\n### Streamlink version\r\n\r\n6.4.2\r\n\r\n### Description\r\n\r\nA few days ago, the service [gone](https://mts.ru/personal/novosti/2023-12-05/vstrechajte-polzovatelskuyu-videoplatformu-nuum). Now this [nuum.ru](https://nuum.ru).\r\n\r\nThough we could easily replace the plugin, but I'm not sure it's worth adding it to upstream, because it's a beta version.\r\n<details>\r\n\r\n```diff\r\ndiff --git a/src/streamlink/plugins/wasd.py b/src/streamlink/plugins/wasd.py\r\nindex 7d61304e..656a16eb 100644\r\n--- a/src/streamlink/plugins/wasd.py\r\n+++ b/src/streamlink/plugins/wasd.py\r\n@@ -16,7 +16,7 @@ log = logging.getLogger(__name__)\r\n \r\n \r\n @pluginmatcher(re.compile(\r\n- r\"https?://(?:www\\.)?wasd\\.tv/(?P<nickname>[^/]+)/?$\",\r\n+ r\"https?://(?:www\\.)?nuum\\.ru/channel/(?P<nickname>[^/]+)/?$\",\r\n ))\r\n class WASD(Plugin):\r\n _media_schema = validate.Schema({\r\n@@ -53,11 +53,11 @@ class WASD(Plugin):\r\n \r\n def _get_streams(self):\r\n nickname = self.match.group(\"nickname\")\r\n- res = self.session.http.get(f\"https://wasd.tv/api/channels/nicknames/{nickname}\")\r\n+ res = self.session.http.get(f\"https://nuum.ru/api/channels/nicknames/{nickname}\")\r\n channel_id = self.session.http.json(res, schema=self._api_nicknames_schema)\r\n \r\n res = self.session.http.get(\r\n- \"https://wasd.tv/api/v2/media-containers\",\r\n+ \"https://nuum.ru/api/v2/media-containers\",\r\n params={\r\n \"media_container_status\": \"RUNNING\",\r\n \"limit\": \"1\",\r\n```\r\n</details>\r\n\r\n### Debug log\r\n\r\n\n", "before_files": [{"content": "\"\"\"\n$description Russian live-streaming social platform.\n$url wasd.tv\n$type live\n\"\"\"\n\nimport logging\nimport re\n\nfrom streamlink.plugin import Plugin, PluginError, pluginmatcher\nfrom streamlink.plugin.api import validate\nfrom streamlink.stream.hls import HLSStream\n\n\nlog = logging.getLogger(__name__)\n\n\n@pluginmatcher(re.compile(\n r\"https?://(?:www\\.)?wasd\\.tv/(?P<nickname>[^/]+)/?$\",\n))\nclass WASD(Plugin):\n _media_schema = validate.Schema({\n \"user_id\": int,\n \"media_container_online_status\": str,\n \"media_container_status\": str,\n \"media_container_streams\": [{\n \"stream_media\": [{\n \"media_id\": int,\n \"media_meta\": {\n \"media_url\": validate.any(str, None),\n \"media_archive_url\": validate.any(str, None),\n },\n \"media_status\": validate.any(\"STOPPED\", \"RUNNING\"),\n \"media_type\": \"HLS\",\n }],\n }],\n })\n _api_schema = validate.Schema({\n \"result\":\n validate.any(\n _media_schema,\n validate.all(list,\n validate.get(0),\n _media_schema),\n [],\n ),\n }, validate.get(\"result\"))\n _api_nicknames_schema = validate.Schema({\n \"result\": {\n \"channel_id\": int,\n },\n }, validate.get(\"result\"), validate.get(\"channel_id\"))\n\n def _get_streams(self):\n nickname = self.match.group(\"nickname\")\n res = self.session.http.get(f\"https://wasd.tv/api/channels/nicknames/{nickname}\")\n channel_id = self.session.http.json(res, schema=self._api_nicknames_schema)\n\n res = self.session.http.get(\n \"https://wasd.tv/api/v2/media-containers\",\n params={\n \"media_container_status\": \"RUNNING\",\n \"limit\": \"1\",\n \"offset\": \"0\",\n \"channel_id\": channel_id,\n \"media_container_type\": \"SINGLE,COOP\",\n },\n )\n\n json_res = self.session.http.json(res, schema=self._api_schema)\n log.trace(\"{0!r}\".format(json_res))\n if not json_res:\n raise PluginError(\"No data returned from URL={0}\".format(res.url))\n\n for stream in json_res[\"media_container_streams\"]:\n log.debug(\"media_container_status: {0}, media_container_online_status: {1}\".format(\n json_res[\"media_container_status\"], json_res[\"media_container_online_status\"]))\n for stream_media in stream[\"stream_media\"]:\n if stream_media[\"media_status\"] == \"STOPPED\":\n hls_url = stream_media[\"media_meta\"][\"media_archive_url\"]\n else:\n hls_url = stream_media[\"media_meta\"][\"media_url\"]\n\n yield from HLSStream.parse_variant_playlist(self.session, hls_url).items()\n\n\n__plugin__ = WASD\n", "path": "src/streamlink/plugins/wasd.py"}], "after_files": [{"content": null, "path": "src/streamlink/plugins/wasd.py"}]}
| 1,658 | 742 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.