problem_id
stringlengths 18
22
| source
stringclasses 1
value | task_type
stringclasses 1
value | in_source_id
stringlengths 13
58
| prompt
stringlengths 1.1k
25.4k
| golden_diff
stringlengths 145
5.13k
| verification_info
stringlengths 582
39.1k
| num_tokens
int64 271
4.1k
| num_tokens_diff
int64 47
1.02k
|
---|---|---|---|---|---|---|---|---|
gh_patches_debug_4181
|
rasdani/github-patches
|
git_diff
|
learningequality__kolibri-4903
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
When starting Exams, Notifications and Attempt logs can be out of sync
### Observed behavior
1. As a learner, start an exam, but do not do any of the questions
1. After some time, a Notification will be recorded for the exam with a status of "Started"
1. However, querying the class summary API, the 'exam learner status' for that exam will have a status of "NotStarted".
This can lead to an inconsistency, where the Notification will update the in-memory classSummary data and cause the dashboard and reports to show that 1 learner has "started" an exam, but if you were to refresh (and get the on-server class summary data without the updating notification), it will revert to showing 0 learners starting the exam.
### Expected behavior
Since, the UI intends to use Notifications to patch class summary in real time, the two notions of "Exam Started" should match to avoid situations like the on described above.
### User-facing consequences
inconsistent / fluctuating values in reports
### Steps to reproduce
<!--
Precise steps that someone else can follow in order to see this behavior
-->
…
### Context
0.12.0 a 7
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kolibri/plugins/coach/class_summary_api.py`
Content:
```
1 from django.db.models import Max
2 from django.db.models import Sum
3 from django.shortcuts import get_object_or_404
4 from rest_framework import serializers
5 from rest_framework import viewsets
6 from rest_framework.response import Response
7
8 from kolibri.core.auth import models as auth_models
9 from kolibri.core.content.models import ContentNode
10 from kolibri.core.exams.models import Exam
11 from kolibri.core.lessons.models import Lesson
12 from kolibri.core.logger import models as logger_models
13 from kolibri.core.notifications.models import LearnerProgressNotification
14 from kolibri.core.notifications.models import NotificationEventType
15
16
17 # Intended to match NotificationEventType
18 NOT_STARTED = "NotStarted"
19 STARTED = "Started"
20 HELP_NEEDED = "HelpNeeded"
21 COMPLETED = "Completed"
22
23
24 def content_status_serializer(lesson_data, learners_data, classroom):
25
26 # First generate a unique set of content node ids from all the lessons
27 lesson_node_ids = set()
28 for lesson in lesson_data:
29 lesson_node_ids |= set(lesson.get("node_ids"))
30
31 # Now create a map of content_id to node_id so that we can map between lessons, and notifications
32 # which use the node id, and summary logs, which use content_id
33 content_map = {n[0]: n[1] for n in ContentNode.objects.filter(id__in=lesson_node_ids).values_list("content_id", "id")}
34
35 # Get all the values we need from the summary logs to be able to summarize current status on the
36 # relevant content items.
37 content_log_values = logger_models.ContentSummaryLog.objects.filter(
38 content_id__in=set(content_map.keys()), user__in=[learner["id"] for learner in learners_data]
39 ).values("user_id", "content_id", "end_timestamp", "time_spent", "progress")
40
41 # In order to make the lookup speedy, generate a unique key for each user/node that we find
42 # listed in the needs help notifications that are relevant. We can then just check
43 # existence of this key in the set in order to see whether this user has been flagged as needing
44 # help.
45 lookup_key = "{user_id}-{node_id}"
46 needs_help = {
47 lookup_key.format(user_id=n[0], node_id=n[1]): n[2] for n in LearnerProgressNotification.objects.filter(
48 classroom_id=classroom.id,
49 notification_event=NotificationEventType.Help,
50 lesson_id__in=[lesson["id"] for lesson in lesson_data],
51 ).values_list("user_id", "contentnode_id", "timestamp")
52 }
53
54 # In case a previously flagged learner has since completed an exercise, check all the completed
55 # notifications also
56 completed = {
57 lookup_key.format(user_id=n[0], node_id=n[1]): n[2] for n in LearnerProgressNotification.objects.filter(
58 classroom_id=classroom.id,
59 notification_event=NotificationEventType.Completed,
60 lesson_id__in=[lesson["id"] for lesson in lesson_data],
61 ).values_list("user_id", "contentnode_id", "timestamp")
62 }
63
64 def get_status(log):
65 """
66 Read the dict from a content summary log values query and return the status
67 In the case that we have found a needs help notification for the user and content node
68 in question, return that they need help, otherwise return status based on their
69 current progress.
70 """
71 content_id = log["content_id"]
72 if content_id in content_map:
73 # Don't try to lookup anything if we don't know the content_id
74 # node_id mapping - might happen if a channel has since been deleted
75 key = lookup_key.format(user_id=log["user_id"], node_id=content_map[content_id])
76 if key in needs_help:
77 # Now check if we have not already registered completion of the content node
78 # or if we have and the timestamp is earlier than that on the needs_help event
79 if key not in completed or completed[key] < needs_help[key]:
80 return HELP_NEEDED
81 if log["progress"] == 1:
82 return COMPLETED
83 elif log["progress"] == 0:
84 return NOT_STARTED
85 return STARTED
86
87 def map_content_logs(log):
88 """
89 Parse the content logs to return objects in the expected format.
90 """
91 return {
92 "learner_id": log["user_id"],
93 "content_id": log["content_id"],
94 "status": get_status(log),
95 "last_activity": log["end_timestamp"],
96 "time_spent": log["time_spent"],
97 }
98
99 return map(map_content_logs, content_log_values)
100
101
102 class ExamStatusSerializer(serializers.ModelSerializer):
103 status = serializers.SerializerMethodField()
104 exam_id = serializers.PrimaryKeyRelatedField(source="exam", read_only=True)
105 learner_id = serializers.PrimaryKeyRelatedField(source="user", read_only=True)
106 last_activity = serializers.CharField()
107 num_correct = serializers.SerializerMethodField()
108
109 def get_status(self, exam_log):
110 if exam_log.closed:
111 return COMPLETED
112 elif exam_log.attemptlogs.values_list("item").count() > 0:
113 return STARTED
114 return NOT_STARTED
115
116 def get_num_correct(self, exam_log):
117 return (
118 exam_log.attemptlogs.values_list('item')
119 .order_by('completion_timestamp')
120 .distinct()
121 .aggregate(Sum('correct'))
122 .get('correct__sum')
123 )
124
125 class Meta:
126 model = logger_models.ExamLog
127 fields = ("exam_id", "learner_id", "status", "last_activity", "num_correct")
128
129
130 class GroupSerializer(serializers.ModelSerializer):
131 member_ids = serializers.SerializerMethodField()
132
133 def get_member_ids(self, group):
134 return group.get_members().values_list("id", flat=True)
135
136 class Meta:
137 model = auth_models.LearnerGroup
138 fields = ("id", "name", "member_ids")
139
140
141 class UserSerializer(serializers.ModelSerializer):
142 name = serializers.CharField(source="full_name")
143
144 class Meta:
145 model = auth_models.FacilityUser
146 fields = ("id", "name", "username")
147
148
149 class LessonNodeIdsField(serializers.Field):
150 def to_representation(self, values):
151 return [value["contentnode_id"] for value in values]
152
153
154 class LessonAssignmentsField(serializers.RelatedField):
155 def to_representation(self, assignment):
156 return assignment.collection.id
157
158
159 class LessonSerializer(serializers.ModelSerializer):
160 active = serializers.BooleanField(source="is_active")
161 node_ids = LessonNodeIdsField(default=[], source="resources")
162
163 # classrooms are in here, and filtered out later
164 groups = LessonAssignmentsField(
165 many=True, read_only=True, source="lesson_assignments"
166 )
167
168 class Meta:
169 model = Lesson
170 fields = ("id", "title", "active", "node_ids", "groups")
171
172
173 class ExamQuestionSourcesField(serializers.Field):
174 def to_representation(self, values):
175 return values
176
177
178 class ExamAssignmentsField(serializers.RelatedField):
179 def to_representation(self, assignment):
180 return assignment.collection.id
181
182
183 class ExamSerializer(serializers.ModelSerializer):
184
185 question_sources = ExamQuestionSourcesField(default=[])
186
187 # classes are in here, and filtered out later
188 groups = ExamAssignmentsField(many=True, read_only=True, source="assignments")
189
190 class Meta:
191 model = Exam
192 fields = ("id", "title", "active", "question_sources", "groups")
193
194
195 class ContentSerializer(serializers.ModelSerializer):
196 node_id = serializers.CharField(source="id")
197
198 class Meta:
199 model = ContentNode
200 fields = ("node_id", "content_id", "title", "kind")
201
202
203 def data(Serializer, queryset):
204 return Serializer(queryset, many=True).data
205
206
207 class ClassSummaryViewSet(viewsets.ViewSet):
208 def retrieve(self, request, pk):
209 classroom = get_object_or_404(auth_models.Classroom, id=pk)
210 query_learners = classroom.get_members()
211 query_lesson = Lesson.objects.filter(collection=pk)
212 query_exams = Exam.objects.filter(collection=pk)
213 query_exam_logs = logger_models.ExamLog.objects.filter(
214 exam__in=query_exams
215 ).annotate(last_activity=Max("attemptlogs__end_timestamp"))
216
217 lesson_data = data(LessonSerializer, query_lesson)
218 exam_data = data(ExamSerializer, query_exams)
219
220 # filter classes out of exam assignments
221 for exam in exam_data:
222 exam["groups"] = [g for g in exam["groups"] if g != pk]
223
224 # filter classes out of lesson assignments
225 for lesson in lesson_data:
226 lesson["groups"] = [g for g in lesson["groups"] if g != pk]
227
228 all_node_ids = set()
229 for lesson in lesson_data:
230 all_node_ids |= set(lesson.get("node_ids"))
231 for exam in exam_data:
232 exam_node_ids = [question['exercise_id'] for question in exam.get("question_sources")]
233 all_node_ids |= set(exam_node_ids)
234
235 query_content = ContentNode.objects.filter(id__in=all_node_ids)
236
237 learners_data = data(UserSerializer, query_learners)
238
239 output = {
240 "id": pk,
241 "name": classroom.name,
242 "coaches": data(UserSerializer, classroom.get_coaches()),
243 "learners": learners_data,
244 "groups": data(GroupSerializer, classroom.get_learner_groups()),
245 "exams": exam_data,
246 "exam_learner_status": data(ExamStatusSerializer, query_exam_logs),
247 "content": data(ContentSerializer, query_content),
248 "content_learner_status": content_status_serializer(lesson_data, learners_data, classroom),
249 "lessons": lesson_data,
250 }
251
252 return Response(output)
253
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/kolibri/plugins/coach/class_summary_api.py b/kolibri/plugins/coach/class_summary_api.py
--- a/kolibri/plugins/coach/class_summary_api.py
+++ b/kolibri/plugins/coach/class_summary_api.py
@@ -108,9 +108,8 @@
def get_status(self, exam_log):
if exam_log.closed:
return COMPLETED
- elif exam_log.attemptlogs.values_list("item").count() > 0:
+ else:
return STARTED
- return NOT_STARTED
def get_num_correct(self, exam_log):
return (
|
{"golden_diff": "diff --git a/kolibri/plugins/coach/class_summary_api.py b/kolibri/plugins/coach/class_summary_api.py\n--- a/kolibri/plugins/coach/class_summary_api.py\n+++ b/kolibri/plugins/coach/class_summary_api.py\n@@ -108,9 +108,8 @@\n def get_status(self, exam_log):\n if exam_log.closed:\n return COMPLETED\n- elif exam_log.attemptlogs.values_list(\"item\").count() > 0:\n+ else:\n return STARTED\n- return NOT_STARTED\n \n def get_num_correct(self, exam_log):\n return (\n", "issue": "When starting Exams, Notifications and Attempt logs can be out of sync\n\r\n### Observed behavior\r\n\r\n1. As a learner, start an exam, but do not do any of the questions\r\n1. After some time, a Notification will be recorded for the exam with a status of \"Started\"\r\n1. However, querying the class summary API, the 'exam learner status' for that exam will have a status of \"NotStarted\".\r\n\r\nThis can lead to an inconsistency, where the Notification will update the in-memory classSummary data and cause the dashboard and reports to show that 1 learner has \"started\" an exam, but if you were to refresh (and get the on-server class summary data without the updating notification), it will revert to showing 0 learners starting the exam.\r\n\r\n### Expected behavior\r\n\r\nSince, the UI intends to use Notifications to patch class summary in real time, the two notions of \"Exam Started\" should match to avoid situations like the on described above.\r\n\r\n\r\n### User-facing consequences\r\n\r\ninconsistent / fluctuating values in reports\r\n\r\n\r\n### Steps to reproduce\r\n<!--\r\nPrecise steps that someone else can follow in order to see this behavior\r\n-->\r\n\r\n\u2026\r\n\r\n### Context\r\n\r\n0.12.0 a 7\n", "before_files": [{"content": "from django.db.models import Max\nfrom django.db.models import Sum\nfrom django.shortcuts import get_object_or_404\nfrom rest_framework import serializers\nfrom rest_framework import viewsets\nfrom rest_framework.response import Response\n\nfrom kolibri.core.auth import models as auth_models\nfrom kolibri.core.content.models import ContentNode\nfrom kolibri.core.exams.models import Exam\nfrom kolibri.core.lessons.models import Lesson\nfrom kolibri.core.logger import models as logger_models\nfrom kolibri.core.notifications.models import LearnerProgressNotification\nfrom kolibri.core.notifications.models import NotificationEventType\n\n\n# Intended to match NotificationEventType\nNOT_STARTED = \"NotStarted\"\nSTARTED = \"Started\"\nHELP_NEEDED = \"HelpNeeded\"\nCOMPLETED = \"Completed\"\n\n\ndef content_status_serializer(lesson_data, learners_data, classroom):\n\n # First generate a unique set of content node ids from all the lessons\n lesson_node_ids = set()\n for lesson in lesson_data:\n lesson_node_ids |= set(lesson.get(\"node_ids\"))\n\n # Now create a map of content_id to node_id so that we can map between lessons, and notifications\n # which use the node id, and summary logs, which use content_id\n content_map = {n[0]: n[1] for n in ContentNode.objects.filter(id__in=lesson_node_ids).values_list(\"content_id\", \"id\")}\n\n # Get all the values we need from the summary logs to be able to summarize current status on the\n # relevant content items.\n content_log_values = logger_models.ContentSummaryLog.objects.filter(\n content_id__in=set(content_map.keys()), user__in=[learner[\"id\"] for learner in learners_data]\n ).values(\"user_id\", \"content_id\", \"end_timestamp\", \"time_spent\", \"progress\")\n\n # In order to make the lookup speedy, generate a unique key for each user/node that we find\n # listed in the needs help notifications that are relevant. We can then just check\n # existence of this key in the set in order to see whether this user has been flagged as needing\n # help.\n lookup_key = \"{user_id}-{node_id}\"\n needs_help = {\n lookup_key.format(user_id=n[0], node_id=n[1]): n[2] for n in LearnerProgressNotification.objects.filter(\n classroom_id=classroom.id,\n notification_event=NotificationEventType.Help,\n lesson_id__in=[lesson[\"id\"] for lesson in lesson_data],\n ).values_list(\"user_id\", \"contentnode_id\", \"timestamp\")\n }\n\n # In case a previously flagged learner has since completed an exercise, check all the completed\n # notifications also\n completed = {\n lookup_key.format(user_id=n[0], node_id=n[1]): n[2] for n in LearnerProgressNotification.objects.filter(\n classroom_id=classroom.id,\n notification_event=NotificationEventType.Completed,\n lesson_id__in=[lesson[\"id\"] for lesson in lesson_data],\n ).values_list(\"user_id\", \"contentnode_id\", \"timestamp\")\n }\n\n def get_status(log):\n \"\"\"\n Read the dict from a content summary log values query and return the status\n In the case that we have found a needs help notification for the user and content node\n in question, return that they need help, otherwise return status based on their\n current progress.\n \"\"\"\n content_id = log[\"content_id\"]\n if content_id in content_map:\n # Don't try to lookup anything if we don't know the content_id\n # node_id mapping - might happen if a channel has since been deleted\n key = lookup_key.format(user_id=log[\"user_id\"], node_id=content_map[content_id])\n if key in needs_help:\n # Now check if we have not already registered completion of the content node\n # or if we have and the timestamp is earlier than that on the needs_help event\n if key not in completed or completed[key] < needs_help[key]:\n return HELP_NEEDED\n if log[\"progress\"] == 1:\n return COMPLETED\n elif log[\"progress\"] == 0:\n return NOT_STARTED\n return STARTED\n\n def map_content_logs(log):\n \"\"\"\n Parse the content logs to return objects in the expected format.\n \"\"\"\n return {\n \"learner_id\": log[\"user_id\"],\n \"content_id\": log[\"content_id\"],\n \"status\": get_status(log),\n \"last_activity\": log[\"end_timestamp\"],\n \"time_spent\": log[\"time_spent\"],\n }\n\n return map(map_content_logs, content_log_values)\n\n\nclass ExamStatusSerializer(serializers.ModelSerializer):\n status = serializers.SerializerMethodField()\n exam_id = serializers.PrimaryKeyRelatedField(source=\"exam\", read_only=True)\n learner_id = serializers.PrimaryKeyRelatedField(source=\"user\", read_only=True)\n last_activity = serializers.CharField()\n num_correct = serializers.SerializerMethodField()\n\n def get_status(self, exam_log):\n if exam_log.closed:\n return COMPLETED\n elif exam_log.attemptlogs.values_list(\"item\").count() > 0:\n return STARTED\n return NOT_STARTED\n\n def get_num_correct(self, exam_log):\n return (\n exam_log.attemptlogs.values_list('item')\n .order_by('completion_timestamp')\n .distinct()\n .aggregate(Sum('correct'))\n .get('correct__sum')\n )\n\n class Meta:\n model = logger_models.ExamLog\n fields = (\"exam_id\", \"learner_id\", \"status\", \"last_activity\", \"num_correct\")\n\n\nclass GroupSerializer(serializers.ModelSerializer):\n member_ids = serializers.SerializerMethodField()\n\n def get_member_ids(self, group):\n return group.get_members().values_list(\"id\", flat=True)\n\n class Meta:\n model = auth_models.LearnerGroup\n fields = (\"id\", \"name\", \"member_ids\")\n\n\nclass UserSerializer(serializers.ModelSerializer):\n name = serializers.CharField(source=\"full_name\")\n\n class Meta:\n model = auth_models.FacilityUser\n fields = (\"id\", \"name\", \"username\")\n\n\nclass LessonNodeIdsField(serializers.Field):\n def to_representation(self, values):\n return [value[\"contentnode_id\"] for value in values]\n\n\nclass LessonAssignmentsField(serializers.RelatedField):\n def to_representation(self, assignment):\n return assignment.collection.id\n\n\nclass LessonSerializer(serializers.ModelSerializer):\n active = serializers.BooleanField(source=\"is_active\")\n node_ids = LessonNodeIdsField(default=[], source=\"resources\")\n\n # classrooms are in here, and filtered out later\n groups = LessonAssignmentsField(\n many=True, read_only=True, source=\"lesson_assignments\"\n )\n\n class Meta:\n model = Lesson\n fields = (\"id\", \"title\", \"active\", \"node_ids\", \"groups\")\n\n\nclass ExamQuestionSourcesField(serializers.Field):\n def to_representation(self, values):\n return values\n\n\nclass ExamAssignmentsField(serializers.RelatedField):\n def to_representation(self, assignment):\n return assignment.collection.id\n\n\nclass ExamSerializer(serializers.ModelSerializer):\n\n question_sources = ExamQuestionSourcesField(default=[])\n\n # classes are in here, and filtered out later\n groups = ExamAssignmentsField(many=True, read_only=True, source=\"assignments\")\n\n class Meta:\n model = Exam\n fields = (\"id\", \"title\", \"active\", \"question_sources\", \"groups\")\n\n\nclass ContentSerializer(serializers.ModelSerializer):\n node_id = serializers.CharField(source=\"id\")\n\n class Meta:\n model = ContentNode\n fields = (\"node_id\", \"content_id\", \"title\", \"kind\")\n\n\ndef data(Serializer, queryset):\n return Serializer(queryset, many=True).data\n\n\nclass ClassSummaryViewSet(viewsets.ViewSet):\n def retrieve(self, request, pk):\n classroom = get_object_or_404(auth_models.Classroom, id=pk)\n query_learners = classroom.get_members()\n query_lesson = Lesson.objects.filter(collection=pk)\n query_exams = Exam.objects.filter(collection=pk)\n query_exam_logs = logger_models.ExamLog.objects.filter(\n exam__in=query_exams\n ).annotate(last_activity=Max(\"attemptlogs__end_timestamp\"))\n\n lesson_data = data(LessonSerializer, query_lesson)\n exam_data = data(ExamSerializer, query_exams)\n\n # filter classes out of exam assignments\n for exam in exam_data:\n exam[\"groups\"] = [g for g in exam[\"groups\"] if g != pk]\n\n # filter classes out of lesson assignments\n for lesson in lesson_data:\n lesson[\"groups\"] = [g for g in lesson[\"groups\"] if g != pk]\n\n all_node_ids = set()\n for lesson in lesson_data:\n all_node_ids |= set(lesson.get(\"node_ids\"))\n for exam in exam_data:\n exam_node_ids = [question['exercise_id'] for question in exam.get(\"question_sources\")]\n all_node_ids |= set(exam_node_ids)\n\n query_content = ContentNode.objects.filter(id__in=all_node_ids)\n\n learners_data = data(UserSerializer, query_learners)\n\n output = {\n \"id\": pk,\n \"name\": classroom.name,\n \"coaches\": data(UserSerializer, classroom.get_coaches()),\n \"learners\": learners_data,\n \"groups\": data(GroupSerializer, classroom.get_learner_groups()),\n \"exams\": exam_data,\n \"exam_learner_status\": data(ExamStatusSerializer, query_exam_logs),\n \"content\": data(ContentSerializer, query_content),\n \"content_learner_status\": content_status_serializer(lesson_data, learners_data, classroom),\n \"lessons\": lesson_data,\n }\n\n return Response(output)\n", "path": "kolibri/plugins/coach/class_summary_api.py"}], "after_files": [{"content": "from django.db.models import Max\nfrom django.db.models import Sum\nfrom django.shortcuts import get_object_or_404\nfrom rest_framework import serializers\nfrom rest_framework import viewsets\nfrom rest_framework.response import Response\n\nfrom kolibri.core.auth import models as auth_models\nfrom kolibri.core.content.models import ContentNode\nfrom kolibri.core.exams.models import Exam\nfrom kolibri.core.lessons.models import Lesson\nfrom kolibri.core.logger import models as logger_models\nfrom kolibri.core.notifications.models import LearnerProgressNotification\nfrom kolibri.core.notifications.models import NotificationEventType\n\n\nNOT_STARTED = \"not_started\"\nSTARTED = \"started\"\nHELP_NEEDED = \"help_needed\"\nCOMPLETED = \"completed\"\n\n\ndef content_status_serializer(lesson_data, learners_data, classroom):\n\n # First generate a unique set of content node ids from all the lessons\n lesson_node_ids = set()\n for lesson in lesson_data:\n lesson_node_ids |= set(lesson.get(\"node_ids\"))\n\n # Now create a map of content_id to node_id so that we can map between lessons, and notifications\n # which use the node id, and summary logs, which use content_id\n content_map = {n[0]: n[1] for n in ContentNode.objects.filter(id__in=lesson_node_ids).values_list(\"content_id\", \"id\")}\n\n # Get all the values we need from the summary logs to be able to summarize current status on the\n # relevant content items.\n content_log_values = logger_models.ContentSummaryLog.objects.filter(\n content_id__in=set(content_map.keys()), user__in=[learner[\"id\"] for learner in learners_data]\n ).values(\"user_id\", \"content_id\", \"end_timestamp\", \"time_spent\", \"progress\")\n\n # In order to make the lookup speedy, generate a unique key for each user/node that we find\n # listed in the needs help notifications that are relevant. We can then just check\n # existence of this key in the set in order to see whether this user has been flagged as needing\n # help.\n lookup_key = \"{user_id}-{node_id}\"\n needs_help = {\n lookup_key.format(user_id=n[0], node_id=n[1]): n[2] for n in LearnerProgressNotification.objects.filter(\n classroom_id=classroom.id,\n notification_event=NotificationEventType.Help,\n lesson_id__in=[lesson[\"id\"] for lesson in lesson_data],\n ).values_list(\"user_id\", \"contentnode_id\", \"timestamp\")\n }\n\n # In case a previously flagged learner has since completed an exercise, check all the completed\n # notifications also\n completed = {\n lookup_key.format(user_id=n[0], node_id=n[1]): n[2] for n in LearnerProgressNotification.objects.filter(\n classroom_id=classroom.id,\n notification_event=NotificationEventType.Completed,\n lesson_id__in=[lesson[\"id\"] for lesson in lesson_data],\n ).values_list(\"user_id\", \"contentnode_id\", \"timestamp\")\n }\n\n def get_status(log):\n \"\"\"\n Read the dict from a content summary log values query and return the status\n In the case that we have found a needs help notification for the user and content node\n in question, return that they need help, otherwise return status based on their\n current progress.\n \"\"\"\n content_id = log[\"content_id\"]\n if content_id in content_map:\n # Don't try to lookup anything if we don't know the content_id\n # node_id mapping - might happen if a channel has since been deleted\n key = lookup_key.format(user_id=log[\"user_id\"], node_id=content_map[content_id])\n if key in needs_help:\n # Now check if we have not already registered completion of the content node\n # or if we have and the timestamp is earlier than that on the needs_help event\n if key not in completed or completed[key] < needs_help[key]:\n return HELP_NEEDED\n if log[\"progress\"] == 1:\n return COMPLETED\n elif log[\"progress\"] == 0:\n return NOT_STARTED\n return STARTED\n\n def map_content_logs(log):\n \"\"\"\n Parse the content logs to return objects in the expected format.\n \"\"\"\n return {\n \"learner_id\": log[\"user_id\"],\n \"content_id\": log[\"content_id\"],\n \"status\": get_status(log),\n \"last_activity\": log[\"end_timestamp\"],\n \"time_spent\": log[\"time_spent\"],\n }\n\n return map(map_content_logs, content_log_values)\n\n\nclass ExamStatusSerializer(serializers.ModelSerializer):\n status = serializers.SerializerMethodField()\n exam_id = serializers.PrimaryKeyRelatedField(source=\"exam\", read_only=True)\n learner_id = serializers.PrimaryKeyRelatedField(source=\"user\", read_only=True)\n last_activity = serializers.CharField()\n num_correct = serializers.SerializerMethodField()\n\n def get_status(self, exam_log):\n if exam_log.closed:\n return COMPLETED\n else:\n return STARTED\n\n def get_num_correct(self, exam_log):\n return (\n exam_log.attemptlogs.values_list('item')\n .order_by('completion_timestamp')\n .distinct()\n .aggregate(Sum('correct'))\n .get('correct__sum')\n )\n\n class Meta:\n model = logger_models.ExamLog\n fields = (\"exam_id\", \"learner_id\", \"status\", \"last_activity\", \"num_correct\")\n\n\nclass GroupSerializer(serializers.ModelSerializer):\n member_ids = serializers.SerializerMethodField()\n\n def get_member_ids(self, group):\n return group.get_members().values_list(\"id\", flat=True)\n\n class Meta:\n model = auth_models.LearnerGroup\n fields = (\"id\", \"name\", \"member_ids\")\n\n\nclass UserSerializer(serializers.ModelSerializer):\n name = serializers.CharField(source=\"full_name\")\n\n class Meta:\n model = auth_models.FacilityUser\n fields = (\"id\", \"name\", \"username\")\n\n\nclass LessonNodeIdsField(serializers.Field):\n def to_representation(self, values):\n return [value[\"contentnode_id\"] for value in values]\n\n\nclass LessonAssignmentsField(serializers.RelatedField):\n def to_representation(self, assignment):\n return assignment.collection.id\n\n\nclass LessonSerializer(serializers.ModelSerializer):\n active = serializers.BooleanField(source=\"is_active\")\n node_ids = LessonNodeIdsField(default=[], source=\"resources\")\n\n # classrooms are in here, and filtered out later\n groups = LessonAssignmentsField(\n many=True, read_only=True, source=\"lesson_assignments\"\n )\n\n class Meta:\n model = Lesson\n fields = (\"id\", \"title\", \"active\", \"node_ids\", \"groups\")\n\n\nclass ExamQuestionSourcesField(serializers.Field):\n def to_representation(self, values):\n return values\n\n\nclass ExamAssignmentsField(serializers.RelatedField):\n def to_representation(self, assignment):\n return assignment.collection.id\n\n\nclass ExamSerializer(serializers.ModelSerializer):\n\n question_sources = ExamQuestionSourcesField(default=[])\n\n # classes are in here, and filtered out later\n groups = ExamAssignmentsField(many=True, read_only=True, source=\"assignments\")\n\n class Meta:\n model = Exam\n fields = (\"id\", \"title\", \"active\", \"question_sources\", \"groups\")\n\n\nclass ContentSerializer(serializers.ModelSerializer):\n node_id = serializers.CharField(source=\"id\")\n\n class Meta:\n model = ContentNode\n fields = (\"node_id\", \"content_id\", \"title\", \"kind\")\n\n\ndef data(Serializer, queryset):\n return Serializer(queryset, many=True).data\n\n\nclass ClassSummaryViewSet(viewsets.ViewSet):\n def retrieve(self, request, pk):\n classroom = get_object_or_404(auth_models.Classroom, id=pk)\n query_learners = classroom.get_members()\n query_lesson = Lesson.objects.filter(collection=pk)\n query_exams = Exam.objects.filter(collection=pk)\n query_exam_logs = logger_models.ExamLog.objects.filter(\n exam__in=query_exams\n ).annotate(last_activity=Max(\"attemptlogs__end_timestamp\"))\n\n lesson_data = data(LessonSerializer, query_lesson)\n exam_data = data(ExamSerializer, query_exams)\n\n # filter classes out of exam assignments\n for exam in exam_data:\n exam[\"groups\"] = [g for g in exam[\"groups\"] if g != pk]\n\n # filter classes out of lesson assignments\n for lesson in lesson_data:\n lesson[\"groups\"] = [g for g in lesson[\"groups\"] if g != pk]\n\n all_node_ids = set()\n for lesson in lesson_data:\n all_node_ids |= set(lesson.get(\"node_ids\"))\n for exam in exam_data:\n exam_node_ids = [question['exercise_id'] for question in exam.get(\"question_sources\")]\n all_node_ids |= set(exam_node_ids)\n\n query_content = ContentNode.objects.filter(id__in=all_node_ids)\n\n learners_data = data(UserSerializer, query_learners)\n\n output = {\n \"id\": pk,\n \"name\": classroom.name,\n \"coaches\": data(UserSerializer, classroom.get_coaches()),\n \"learners\": learners_data,\n \"groups\": data(GroupSerializer, classroom.get_learner_groups()),\n \"exams\": exam_data,\n \"exam_learner_status\": data(ExamStatusSerializer, query_exam_logs),\n \"content\": data(ContentSerializer, query_content),\n \"content_learner_status\": content_status_serializer(lesson_data, learners_data, classroom),\n \"lessons\": lesson_data,\n }\n\n return Response(output)\n", "path": "kolibri/plugins/coach/class_summary_api.py"}]}
| 3,248 | 133 |
gh_patches_debug_692
|
rasdani/github-patches
|
git_diff
|
hylang__hy-2312
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
New release
It's time for a new release soon. Here are the things I'd like to get done, or at least try to get done, first. If you think you'll make a PR soon that you'd also like to get in for this release, mention that, too. Volunteers to take these tasks on are also welcome.
- ~#2291~; ~#2292~ - These are more difficult than I thought. I don't think I'm going to make the release wait for them.
- Install bytecode (for Hy and for Hyrule): hylang/hyrule#42; at least partly addresses #1747
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `docs/conf.py`
Content:
```
1 # This file is execfile()d with the current directory set to its containing dir.
2
3 import html
4 import os
5 import re
6 import sys
7 import time
8
9 sys.path.insert(0, os.path.abspath(".."))
10
11 extensions = [
12 "sphinx.ext.napoleon",
13 "sphinx.ext.intersphinx",
14 "sphinx.ext.autodoc",
15 "sphinx.ext.viewcode",
16 "sphinxcontrib.hydomain",
17 ]
18
19 from get_version import __version__ as hy_version
20
21 # Read the Docs might dirty its checkout, so strip the dirty flag.
22 hy_version = re.sub(r"[+.]dirty\Z", "", hy_version)
23
24 templates_path = ["_templates"]
25 source_suffix = ".rst"
26
27 master_doc = "index"
28
29 # General information about the project.
30 project = "hy"
31 copyright = "%s the authors" % time.strftime("%Y")
32
33 # The version info for the project you're documenting, acts as replacement for
34 # |version| and |release|, also used in various other places throughout the
35 # built documents.
36 #
37 # The short X.Y version.
38 version = ".".join(hy_version.split(".")[:-1])
39 # The full version, including alpha/beta/rc tags.
40 release = hy_version
41 hy_descriptive_version = html.escape(hy_version)
42 if "+" in hy_version:
43 hy_descriptive_version += " <strong style='color: red;'>(unstable)</strong>"
44
45 exclude_patterns = ["_build", "coreteam.rst"]
46 add_module_names = True
47
48 pygments_style = "sphinx"
49
50 import sphinx_rtd_theme
51
52 html_theme = "sphinx_rtd_theme"
53 html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
54
55 # Add any paths that contain custom static files (such as style sheets) here,
56 # relative to this directory. They are copied after the builtin static files,
57 # so a file named "default.css" will overwrite the builtin "default.css".
58 html_static_path = ["_static"]
59
60 html_use_smartypants = False
61 html_show_sphinx = False
62
63 html_context = dict(
64 hy_descriptive_version=hy_descriptive_version,
65 has_active_alpha=True,
66 )
67
68 highlight_language = "clojure"
69
70 intersphinx_mapping = dict(
71 py=("https://docs.python.org/3/", None),
72 py3_10=("https://docs.python.org/3.10/", None),
73 hyrule=("https://hyrule.readthedocs.io/en/master/", None),
74 )
75 # ** Generate Cheatsheet
76 import json
77 from itertools import zip_longest
78 from pathlib import Path
79
80
81 def refize(spec):
82 role = ":hy:func:"
83 if isinstance(spec, dict):
84 _name = spec["name"]
85 uri = spec["uri"]
86 if spec.get("internal"):
87 role = ":ref:"
88 else:
89 uri = spec
90 _name = str.split(uri, ".")[-1]
91 return "{}`{} <{}>`".format(role, _name, uri)
92
93
94 def format_refs(refs, indent):
95 args = [iter(map(refize, refs))]
96 ref_groups = zip_longest(*args, fillvalue="")
97 return str.join(
98 " \\\n" + " " * (indent + 3),
99 [str.join(" ", ref_group) for ref_group in ref_groups],
100 )
101
102
103 def format_row(category, divider_loc):
104 return "{title: <{width}} | {methods}".format(
105 width=divider_loc,
106 title=category["name"],
107 methods=format_refs(category["methods"], divider_loc),
108 )
109
110
111 def format_table(table_spec):
112 table_name = table_spec["name"]
113 categories = table_spec["categories"]
114 longest_cat_name = max(len(category["name"]) for category in categories)
115 table = [
116 table_name,
117 "-" * len(table_name),
118 "",
119 "=" * longest_cat_name + " " + "=" * 25,
120 *(format_row(category, longest_cat_name) for category in categories),
121 "=" * longest_cat_name + " " + "=" * 25,
122 "",
123 ]
124 return "\n".join(table)
125
126
127 # Modifications to the cheatsheet should be added in `cheatsheet.json`
128 cheatsheet_spec = json.loads(Path("./docs/cheatsheet.json").read_text())
129 cheatsheet = [
130 "..",
131 " DO NOT MODIFY THIS FILE. IT IS AUTO GENERATED BY ``conf.py``",
132 " If you need to change or add methods, modify ``cheatsheet_spec`` in ``conf.py``",
133 "",
134 ".. _cheatsheet:",
135 "",
136 "Cheatsheet",
137 "==========",
138 "",
139 *map(format_table, cheatsheet_spec),
140 ]
141 Path("./docs/cheatsheet.rst").write_text("\n".join(cheatsheet))
142
143
144 # ** Sphinx App Setup
145
146
147 def setup(app):
148 app.add_css_file("overrides.css")
149
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -61,9 +61,7 @@
html_show_sphinx = False
html_context = dict(
- hy_descriptive_version=hy_descriptive_version,
- has_active_alpha=True,
-)
+ hy_descriptive_version=hy_descriptive_version)
highlight_language = "clojure"
|
{"golden_diff": "diff --git a/docs/conf.py b/docs/conf.py\n--- a/docs/conf.py\n+++ b/docs/conf.py\n@@ -61,9 +61,7 @@\n html_show_sphinx = False\n \n html_context = dict(\n- hy_descriptive_version=hy_descriptive_version,\n- has_active_alpha=True,\n-)\n+ hy_descriptive_version=hy_descriptive_version)\n \n highlight_language = \"clojure\"\n", "issue": "New release\nIt's time for a new release soon. Here are the things I'd like to get done, or at least try to get done, first. If you think you'll make a PR soon that you'd also like to get in for this release, mention that, too. Volunteers to take these tasks on are also welcome.\r\n\r\n- ~#2291~; ~#2292~ - These are more difficult than I thought. I don't think I'm going to make the release wait for them.\r\n- Install bytecode (for Hy and for Hyrule): hylang/hyrule#42; at least partly addresses #1747\n", "before_files": [{"content": "# This file is execfile()d with the current directory set to its containing dir.\n\nimport html\nimport os\nimport re\nimport sys\nimport time\n\nsys.path.insert(0, os.path.abspath(\"..\"))\n\nextensions = [\n \"sphinx.ext.napoleon\",\n \"sphinx.ext.intersphinx\",\n \"sphinx.ext.autodoc\",\n \"sphinx.ext.viewcode\",\n \"sphinxcontrib.hydomain\",\n]\n\nfrom get_version import __version__ as hy_version\n\n# Read the Docs might dirty its checkout, so strip the dirty flag.\nhy_version = re.sub(r\"[+.]dirty\\Z\", \"\", hy_version)\n\ntemplates_path = [\"_templates\"]\nsource_suffix = \".rst\"\n\nmaster_doc = \"index\"\n\n# General information about the project.\nproject = \"hy\"\ncopyright = \"%s the authors\" % time.strftime(\"%Y\")\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n#\n# The short X.Y version.\nversion = \".\".join(hy_version.split(\".\")[:-1])\n# The full version, including alpha/beta/rc tags.\nrelease = hy_version\nhy_descriptive_version = html.escape(hy_version)\nif \"+\" in hy_version:\n hy_descriptive_version += \" <strong style='color: red;'>(unstable)</strong>\"\n\nexclude_patterns = [\"_build\", \"coreteam.rst\"]\nadd_module_names = True\n\npygments_style = \"sphinx\"\n\nimport sphinx_rtd_theme\n\nhtml_theme = \"sphinx_rtd_theme\"\nhtml_theme_path = [sphinx_rtd_theme.get_html_theme_path()]\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = [\"_static\"]\n\nhtml_use_smartypants = False\nhtml_show_sphinx = False\n\nhtml_context = dict(\n hy_descriptive_version=hy_descriptive_version,\n has_active_alpha=True,\n)\n\nhighlight_language = \"clojure\"\n\nintersphinx_mapping = dict(\n py=(\"https://docs.python.org/3/\", None),\n py3_10=(\"https://docs.python.org/3.10/\", None),\n hyrule=(\"https://hyrule.readthedocs.io/en/master/\", None),\n)\n# ** Generate Cheatsheet\nimport json\nfrom itertools import zip_longest\nfrom pathlib import Path\n\n\ndef refize(spec):\n role = \":hy:func:\"\n if isinstance(spec, dict):\n _name = spec[\"name\"]\n uri = spec[\"uri\"]\n if spec.get(\"internal\"):\n role = \":ref:\"\n else:\n uri = spec\n _name = str.split(uri, \".\")[-1]\n return \"{}`{} <{}>`\".format(role, _name, uri)\n\n\ndef format_refs(refs, indent):\n args = [iter(map(refize, refs))]\n ref_groups = zip_longest(*args, fillvalue=\"\")\n return str.join(\n \" \\\\\\n\" + \" \" * (indent + 3),\n [str.join(\" \", ref_group) for ref_group in ref_groups],\n )\n\n\ndef format_row(category, divider_loc):\n return \"{title: <{width}} | {methods}\".format(\n width=divider_loc,\n title=category[\"name\"],\n methods=format_refs(category[\"methods\"], divider_loc),\n )\n\n\ndef format_table(table_spec):\n table_name = table_spec[\"name\"]\n categories = table_spec[\"categories\"]\n longest_cat_name = max(len(category[\"name\"]) for category in categories)\n table = [\n table_name,\n \"-\" * len(table_name),\n \"\",\n \"=\" * longest_cat_name + \" \" + \"=\" * 25,\n *(format_row(category, longest_cat_name) for category in categories),\n \"=\" * longest_cat_name + \" \" + \"=\" * 25,\n \"\",\n ]\n return \"\\n\".join(table)\n\n\n# Modifications to the cheatsheet should be added in `cheatsheet.json`\ncheatsheet_spec = json.loads(Path(\"./docs/cheatsheet.json\").read_text())\ncheatsheet = [\n \"..\",\n \" DO NOT MODIFY THIS FILE. IT IS AUTO GENERATED BY ``conf.py``\",\n \" If you need to change or add methods, modify ``cheatsheet_spec`` in ``conf.py``\",\n \"\",\n \".. _cheatsheet:\",\n \"\",\n \"Cheatsheet\",\n \"==========\",\n \"\",\n *map(format_table, cheatsheet_spec),\n]\nPath(\"./docs/cheatsheet.rst\").write_text(\"\\n\".join(cheatsheet))\n\n\n# ** Sphinx App Setup\n\n\ndef setup(app):\n app.add_css_file(\"overrides.css\")\n", "path": "docs/conf.py"}], "after_files": [{"content": "# This file is execfile()d with the current directory set to its containing dir.\n\nimport html\nimport os\nimport re\nimport sys\nimport time\n\nsys.path.insert(0, os.path.abspath(\"..\"))\n\nextensions = [\n \"sphinx.ext.napoleon\",\n \"sphinx.ext.intersphinx\",\n \"sphinx.ext.autodoc\",\n \"sphinx.ext.viewcode\",\n \"sphinxcontrib.hydomain\",\n]\n\nfrom get_version import __version__ as hy_version\n\n# Read the Docs might dirty its checkout, so strip the dirty flag.\nhy_version = re.sub(r\"[+.]dirty\\Z\", \"\", hy_version)\n\ntemplates_path = [\"_templates\"]\nsource_suffix = \".rst\"\n\nmaster_doc = \"index\"\n\n# General information about the project.\nproject = \"hy\"\ncopyright = \"%s the authors\" % time.strftime(\"%Y\")\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n#\n# The short X.Y version.\nversion = \".\".join(hy_version.split(\".\")[:-1])\n# The full version, including alpha/beta/rc tags.\nrelease = hy_version\nhy_descriptive_version = html.escape(hy_version)\nif \"+\" in hy_version:\n hy_descriptive_version += \" <strong style='color: red;'>(unstable)</strong>\"\n\nexclude_patterns = [\"_build\", \"coreteam.rst\"]\nadd_module_names = True\n\npygments_style = \"sphinx\"\n\nimport sphinx_rtd_theme\n\nhtml_theme = \"sphinx_rtd_theme\"\nhtml_theme_path = [sphinx_rtd_theme.get_html_theme_path()]\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = [\"_static\"]\n\nhtml_use_smartypants = False\nhtml_show_sphinx = False\n\nhtml_context = dict(\n hy_descriptive_version=hy_descriptive_version)\n\nhighlight_language = \"clojure\"\n\nintersphinx_mapping = dict(\n py=(\"https://docs.python.org/3/\", None),\n py3_10=(\"https://docs.python.org/3.10/\", None),\n hyrule=(\"https://hyrule.readthedocs.io/en/master/\", None),\n)\n# ** Generate Cheatsheet\nimport json\nfrom itertools import zip_longest\nfrom pathlib import Path\n\n\ndef refize(spec):\n role = \":hy:func:\"\n if isinstance(spec, dict):\n _name = spec[\"name\"]\n uri = spec[\"uri\"]\n if spec.get(\"internal\"):\n role = \":ref:\"\n else:\n uri = spec\n _name = str.split(uri, \".\")[-1]\n return \"{}`{} <{}>`\".format(role, _name, uri)\n\n\ndef format_refs(refs, indent):\n args = [iter(map(refize, refs))]\n ref_groups = zip_longest(*args, fillvalue=\"\")\n return str.join(\n \" \\\\\\n\" + \" \" * (indent + 3),\n [str.join(\" \", ref_group) for ref_group in ref_groups],\n )\n\n\ndef format_row(category, divider_loc):\n return \"{title: <{width}} | {methods}\".format(\n width=divider_loc,\n title=category[\"name\"],\n methods=format_refs(category[\"methods\"], divider_loc),\n )\n\n\ndef format_table(table_spec):\n table_name = table_spec[\"name\"]\n categories = table_spec[\"categories\"]\n longest_cat_name = max(len(category[\"name\"]) for category in categories)\n table = [\n table_name,\n \"-\" * len(table_name),\n \"\",\n \"=\" * longest_cat_name + \" \" + \"=\" * 25,\n *(format_row(category, longest_cat_name) for category in categories),\n \"=\" * longest_cat_name + \" \" + \"=\" * 25,\n \"\",\n ]\n return \"\\n\".join(table)\n\n\n# Modifications to the cheatsheet should be added in `cheatsheet.json`\ncheatsheet_spec = json.loads(Path(\"./docs/cheatsheet.json\").read_text())\ncheatsheet = [\n \"..\",\n \" DO NOT MODIFY THIS FILE. IT IS AUTO GENERATED BY ``conf.py``\",\n \" If you need to change or add methods, modify ``cheatsheet_spec`` in ``conf.py``\",\n \"\",\n \".. _cheatsheet:\",\n \"\",\n \"Cheatsheet\",\n \"==========\",\n \"\",\n *map(format_table, cheatsheet_spec),\n]\nPath(\"./docs/cheatsheet.rst\").write_text(\"\\n\".join(cheatsheet))\n\n\n# ** Sphinx App Setup\n\n\ndef setup(app):\n app.add_css_file(\"overrides.css\")\n", "path": "docs/conf.py"}]}
| 1,780 | 89 |
gh_patches_debug_37323
|
rasdani/github-patches
|
git_diff
|
pyro-ppl__pyro-743
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Default guide for `Importance` gives non-zero `log_weight`
The default `guide` in `Importance` blocks _all_ sites (and not just those observed). This leads to the `guide_trace` being empty (except for the input and return values). As a result, no samples are reused when replayed on the model _and_ the `guide_trace.log_pdf()` evaluates to `0.0`. The `log_weight` is then equal to the `model_trace.log_pdf()` (which is also evaluated on different samples), which I believe is unintended and incorrect.
The program below illustrates this for a simple univariate Gaussian, where my proposal and target distribution are identical and I would expect the log of the weights to be `0.0`. The latter is only the case when the sites are _explicitly_ exposed.
```python
import torch
from torch.autograd import Variable
import pyro
from pyro import distributions as dist
from pyro import infer
from pyro import poutine
def gaussian():
return pyro.sample('x', dist.normal,
Variable(torch.Tensor([0.0])),
Variable(torch.Tensor([1.0])))
# Using `Importance` with the default `guide`, the `log_weight` is equal to the
# `model_trace.log_pdf()`. That is, the `guide_trace.log_pdf()` (evaluated
# internally) is incorrectly `0.0`.
print('importance_default_guide:')
importance_default_guide = infer.Importance(gaussian, num_samples=10)
for model_trace, log_weight in importance_default_guide._traces():
model_trace_log_pdf = model_trace.log_pdf()
are_equal = log_weight.data[0] == model_trace_log_pdf.data[0]
print(log_weight.data[0], are_equal)
# However, setting the `guide` to expose `x` ensures that it is replayed so
# that the `log_weight` is exactly zero for each sample.
print('importance_exposed_guide:')
importance_exposed_guide = infer.Importance(
gaussian,
guide=poutine.block(gaussian, expose=['x']),
num_samples=10)
for model_trace, log_weight in importance_exposed_guide._traces():
print(log_weight.data[0])
```
```
importance_default_guide:
-0.9368391633033752 True
-1.3421428203582764 True
-0.9189755320549011 True
-2.1423826217651367 True
-2.301940679550171 True
-1.142196774482727 True
-0.9449963569641113 True
-2.7146053314208984 True
-3.420013904571533 True
-1.7994171380996704 True
importance_exposed_guide:
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pyro/poutine/block_poutine.py`
Content:
```
1 from __future__ import absolute_import, division, print_function
2
3 from .poutine import Poutine
4
5
6 class BlockPoutine(Poutine):
7 """
8 This Poutine selectively hides pyro primitive sites from the outside world.
9
10 For example, suppose the stochastic function fn has two sample sites "a" and "b".
11 Then any poutine outside of BlockPoutine(fn, hide=["a"])
12 will not be applied to site "a" and will only see site "b":
13
14 >>> fn_inner = TracePoutine(fn)
15 >>> fn_outer = TracePoutine(BlockPoutine(TracePoutine(fn), hide=["a"]))
16 >>> trace_inner = fn_inner.get_trace()
17 >>> trace_outer = fn_outer.get_trace()
18 >>> "a" in trace_inner
19 True
20 >>> "a" in trace_outer
21 False
22 >>> "b" in trace_inner
23 True
24 >>> "b" in trace_outer
25 True
26
27 BlockPoutine has a flexible interface that allows users
28 to specify in several different ways
29 which sites should be hidden or exposed.
30 See the constructor for details.
31 """
32
33 def __init__(self, fn,
34 hide_all=True, expose_all=False,
35 hide=None, expose=None,
36 hide_types=None, expose_types=None):
37 """
38 :param bool hide_all: hide all sites
39 :param bool expose_all: expose all sites normally
40 :param list hide: list of site names to hide, rest will be exposed normally
41 :param list expose: list of site names to expose, rest will be hidden
42 :param list hide_types: list of site types to hide, rest will be exposed normally
43 :param list expose_types: list of site types to expose normally, rest will be hidden
44
45 Constructor for blocking poutine
46 Default behavior: block everything (hide_all == True)
47
48 A site is hidden if at least one of the following holds:
49 1. msg["name"] in hide
50 2. msg["type"] in hide_types
51 3. msg["name"] not in expose and msg["type"] not in expose_types
52 4. hide_all == True
53 """
54 super(BlockPoutine, self).__init__(fn)
55 # first, some sanity checks:
56 # hide_all and expose_all intersect?
57 assert (hide_all is False and expose_all is False) or \
58 (hide_all != expose_all), "cannot hide and expose a site"
59
60 # hide and expose intersect?
61 if hide is None:
62 hide = []
63 else:
64 hide_all = False
65
66 if expose is None:
67 expose = []
68 assert set(hide).isdisjoint(set(expose)), \
69 "cannot hide and expose a site"
70
71 # hide_types and expose_types intersect?
72 if hide_types is None:
73 hide_types = []
74 if expose_types is None:
75 expose_types = []
76 assert set(hide_types).isdisjoint(set(expose_types)), \
77 "cannot hide and expose a site type"
78
79 # now set stuff
80 self.hide_all = hide_all
81 self.expose_all = expose_all
82 self.hide = hide
83 self.expose = expose
84 self.hide_types = hide_types
85 self.expose_types = expose_types
86
87 def _block_up(self, msg):
88 """
89 :param msg: current message at a trace site, after all execution finished.
90 :returns: boolean decision to hide or expose site.
91
92 A site is hidden if at least one of the following holds:
93 1. msg["name"] in self.hide
94 2. msg["type"] in self.hide_types
95 3. msg["name"] not in self.expose and msg["type"] not in self.expose_types
96 4. self.hide_all == True
97 """
98 # handle observes
99 if msg["type"] == "sample" and msg["is_observed"]:
100 msg_type = "observe"
101 else:
102 msg_type = msg["type"]
103
104 # decision rule for hiding:
105 if (msg["name"] in self.hide) or \
106 (msg_type in self.hide_types) or \
107 ((msg["name"] not in self.expose) and
108 (msg_type not in self.expose_types) and self.hide_all): # noqa: E129
109
110 return True
111 # otherwise expose
112 else:
113 return False
114
115 def _pyro_sample(self, msg):
116 """
117 :param msg: current message at a trace site
118 :returns: a sample from the stochastic function at the site.
119
120 Default sample behavior with a side effect.
121 Applies self._block_up to decide whether to hide the site.
122 """
123 ret = super(BlockPoutine, self)._pyro_sample(msg)
124 msg["stop"] = self._block_up(msg)
125 return ret
126
127 def _pyro_param(self, msg):
128 """
129 :param msg: current message at a trace site
130 :returns: the result of querying the parameter store.
131
132 Default param behavior with a side effect.
133 Applies self._block_up to decide whether to hide the site.
134 """
135 ret = super(BlockPoutine, self)._pyro_param(msg)
136 msg["stop"] = self._block_up(msg)
137 return ret
138
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pyro/poutine/block_poutine.py b/pyro/poutine/block_poutine.py
--- a/pyro/poutine/block_poutine.py
+++ b/pyro/poutine/block_poutine.py
@@ -49,7 +49,7 @@
1. msg["name"] in hide
2. msg["type"] in hide_types
3. msg["name"] not in expose and msg["type"] not in expose_types
- 4. hide_all == True
+ 4. hide_all == True and hide, hide_types, and expose_types are all None
"""
super(BlockPoutine, self).__init__(fn)
# first, some sanity checks:
@@ -65,14 +65,23 @@
if expose is None:
expose = []
+ else:
+ hide_all = True
+
assert set(hide).isdisjoint(set(expose)), \
"cannot hide and expose a site"
# hide_types and expose_types intersect?
if hide_types is None:
hide_types = []
+ else:
+ hide_all = False
+
if expose_types is None:
expose_types = []
+ else:
+ hide_all = True
+
assert set(hide_types).isdisjoint(set(expose_types)), \
"cannot hide and expose a site type"
@@ -93,7 +102,7 @@
1. msg["name"] in self.hide
2. msg["type"] in self.hide_types
3. msg["name"] not in self.expose and msg["type"] not in self.expose_types
- 4. self.hide_all == True
+ 4. self.hide_all == True and hide, hide_types, and expose_types are all None
"""
# handle observes
if msg["type"] == "sample" and msg["is_observed"]:
@@ -101,11 +110,13 @@
else:
msg_type = msg["type"]
+ is_not_exposed = (msg["name"] not in self.expose) and \
+ (msg_type not in self.expose_types)
+
# decision rule for hiding:
if (msg["name"] in self.hide) or \
(msg_type in self.hide_types) or \
- ((msg["name"] not in self.expose) and
- (msg_type not in self.expose_types) and self.hide_all): # noqa: E129
+ (is_not_exposed and self.hide_all): # noqa: E129
return True
# otherwise expose
|
{"golden_diff": "diff --git a/pyro/poutine/block_poutine.py b/pyro/poutine/block_poutine.py\n--- a/pyro/poutine/block_poutine.py\n+++ b/pyro/poutine/block_poutine.py\n@@ -49,7 +49,7 @@\n 1. msg[\"name\"] in hide\n 2. msg[\"type\"] in hide_types\n 3. msg[\"name\"] not in expose and msg[\"type\"] not in expose_types\n- 4. hide_all == True\n+ 4. hide_all == True and hide, hide_types, and expose_types are all None\n \"\"\"\n super(BlockPoutine, self).__init__(fn)\n # first, some sanity checks:\n@@ -65,14 +65,23 @@\n \n if expose is None:\n expose = []\n+ else:\n+ hide_all = True\n+\n assert set(hide).isdisjoint(set(expose)), \\\n \"cannot hide and expose a site\"\n \n # hide_types and expose_types intersect?\n if hide_types is None:\n hide_types = []\n+ else:\n+ hide_all = False\n+\n if expose_types is None:\n expose_types = []\n+ else:\n+ hide_all = True\n+\n assert set(hide_types).isdisjoint(set(expose_types)), \\\n \"cannot hide and expose a site type\"\n \n@@ -93,7 +102,7 @@\n 1. msg[\"name\"] in self.hide\n 2. msg[\"type\"] in self.hide_types\n 3. msg[\"name\"] not in self.expose and msg[\"type\"] not in self.expose_types\n- 4. self.hide_all == True\n+ 4. self.hide_all == True and hide, hide_types, and expose_types are all None\n \"\"\"\n # handle observes\n if msg[\"type\"] == \"sample\" and msg[\"is_observed\"]:\n@@ -101,11 +110,13 @@\n else:\n msg_type = msg[\"type\"]\n \n+ is_not_exposed = (msg[\"name\"] not in self.expose) and \\\n+ (msg_type not in self.expose_types)\n+\n # decision rule for hiding:\n if (msg[\"name\"] in self.hide) or \\\n (msg_type in self.hide_types) or \\\n- ((msg[\"name\"] not in self.expose) and\n- (msg_type not in self.expose_types) and self.hide_all): # noqa: E129\n+ (is_not_exposed and self.hide_all): # noqa: E129\n \n return True\n # otherwise expose\n", "issue": "Default guide for `Importance` gives non-zero `log_weight`\nThe default `guide` in `Importance` blocks _all_ sites (and not just those observed). This leads to the `guide_trace` being empty (except for the input and return values). As a result, no samples are reused when replayed on the model _and_ the `guide_trace.log_pdf()` evaluates to `0.0`. The `log_weight` is then equal to the `model_trace.log_pdf()` (which is also evaluated on different samples), which I believe is unintended and incorrect.\r\n\r\nThe program below illustrates this for a simple univariate Gaussian, where my proposal and target distribution are identical and I would expect the log of the weights to be `0.0`. The latter is only the case when the sites are _explicitly_ exposed.\r\n\r\n```python\r\nimport torch\r\nfrom torch.autograd import Variable\r\n\r\nimport pyro\r\nfrom pyro import distributions as dist\r\nfrom pyro import infer\r\nfrom pyro import poutine\r\n\r\n\r\ndef gaussian():\r\n return pyro.sample('x', dist.normal,\r\n Variable(torch.Tensor([0.0])),\r\n Variable(torch.Tensor([1.0])))\r\n\r\n\r\n# Using `Importance` with the default `guide`, the `log_weight` is equal to the\r\n# `model_trace.log_pdf()`. That is, the `guide_trace.log_pdf()` (evaluated\r\n# internally) is incorrectly `0.0`.\r\nprint('importance_default_guide:')\r\nimportance_default_guide = infer.Importance(gaussian, num_samples=10)\r\nfor model_trace, log_weight in importance_default_guide._traces():\r\n model_trace_log_pdf = model_trace.log_pdf()\r\n are_equal = log_weight.data[0] == model_trace_log_pdf.data[0]\r\n print(log_weight.data[0], are_equal)\r\n\r\n# However, setting the `guide` to expose `x` ensures that it is replayed so\r\n# that the `log_weight` is exactly zero for each sample.\r\nprint('importance_exposed_guide:')\r\nimportance_exposed_guide = infer.Importance(\r\n gaussian,\r\n guide=poutine.block(gaussian, expose=['x']),\r\n num_samples=10)\r\nfor model_trace, log_weight in importance_exposed_guide._traces():\r\n print(log_weight.data[0])\r\n```\r\n```\r\nimportance_default_guide:\r\n-0.9368391633033752 True\r\n-1.3421428203582764 True\r\n-0.9189755320549011 True\r\n-2.1423826217651367 True\r\n-2.301940679550171 True\r\n-1.142196774482727 True\r\n-0.9449963569641113 True\r\n-2.7146053314208984 True\r\n-3.420013904571533 True\r\n-1.7994171380996704 True\r\nimportance_exposed_guide:\r\n0.0\r\n0.0\r\n0.0\r\n0.0\r\n0.0\r\n0.0\r\n0.0\r\n0.0\r\n0.0\r\n0.0\r\n```\n", "before_files": [{"content": "from __future__ import absolute_import, division, print_function\n\nfrom .poutine import Poutine\n\n\nclass BlockPoutine(Poutine):\n \"\"\"\n This Poutine selectively hides pyro primitive sites from the outside world.\n\n For example, suppose the stochastic function fn has two sample sites \"a\" and \"b\".\n Then any poutine outside of BlockPoutine(fn, hide=[\"a\"])\n will not be applied to site \"a\" and will only see site \"b\":\n\n >>> fn_inner = TracePoutine(fn)\n >>> fn_outer = TracePoutine(BlockPoutine(TracePoutine(fn), hide=[\"a\"]))\n >>> trace_inner = fn_inner.get_trace()\n >>> trace_outer = fn_outer.get_trace()\n >>> \"a\" in trace_inner\n True\n >>> \"a\" in trace_outer\n False\n >>> \"b\" in trace_inner\n True\n >>> \"b\" in trace_outer\n True\n\n BlockPoutine has a flexible interface that allows users\n to specify in several different ways\n which sites should be hidden or exposed.\n See the constructor for details.\n \"\"\"\n\n def __init__(self, fn,\n hide_all=True, expose_all=False,\n hide=None, expose=None,\n hide_types=None, expose_types=None):\n \"\"\"\n :param bool hide_all: hide all sites\n :param bool expose_all: expose all sites normally\n :param list hide: list of site names to hide, rest will be exposed normally\n :param list expose: list of site names to expose, rest will be hidden\n :param list hide_types: list of site types to hide, rest will be exposed normally\n :param list expose_types: list of site types to expose normally, rest will be hidden\n\n Constructor for blocking poutine\n Default behavior: block everything (hide_all == True)\n\n A site is hidden if at least one of the following holds:\n 1. msg[\"name\"] in hide\n 2. msg[\"type\"] in hide_types\n 3. msg[\"name\"] not in expose and msg[\"type\"] not in expose_types\n 4. hide_all == True\n \"\"\"\n super(BlockPoutine, self).__init__(fn)\n # first, some sanity checks:\n # hide_all and expose_all intersect?\n assert (hide_all is False and expose_all is False) or \\\n (hide_all != expose_all), \"cannot hide and expose a site\"\n\n # hide and expose intersect?\n if hide is None:\n hide = []\n else:\n hide_all = False\n\n if expose is None:\n expose = []\n assert set(hide).isdisjoint(set(expose)), \\\n \"cannot hide and expose a site\"\n\n # hide_types and expose_types intersect?\n if hide_types is None:\n hide_types = []\n if expose_types is None:\n expose_types = []\n assert set(hide_types).isdisjoint(set(expose_types)), \\\n \"cannot hide and expose a site type\"\n\n # now set stuff\n self.hide_all = hide_all\n self.expose_all = expose_all\n self.hide = hide\n self.expose = expose\n self.hide_types = hide_types\n self.expose_types = expose_types\n\n def _block_up(self, msg):\n \"\"\"\n :param msg: current message at a trace site, after all execution finished.\n :returns: boolean decision to hide or expose site.\n\n A site is hidden if at least one of the following holds:\n 1. msg[\"name\"] in self.hide\n 2. msg[\"type\"] in self.hide_types\n 3. msg[\"name\"] not in self.expose and msg[\"type\"] not in self.expose_types\n 4. self.hide_all == True\n \"\"\"\n # handle observes\n if msg[\"type\"] == \"sample\" and msg[\"is_observed\"]:\n msg_type = \"observe\"\n else:\n msg_type = msg[\"type\"]\n\n # decision rule for hiding:\n if (msg[\"name\"] in self.hide) or \\\n (msg_type in self.hide_types) or \\\n ((msg[\"name\"] not in self.expose) and\n (msg_type not in self.expose_types) and self.hide_all): # noqa: E129\n\n return True\n # otherwise expose\n else:\n return False\n\n def _pyro_sample(self, msg):\n \"\"\"\n :param msg: current message at a trace site\n :returns: a sample from the stochastic function at the site.\n\n Default sample behavior with a side effect.\n Applies self._block_up to decide whether to hide the site.\n \"\"\"\n ret = super(BlockPoutine, self)._pyro_sample(msg)\n msg[\"stop\"] = self._block_up(msg)\n return ret\n\n def _pyro_param(self, msg):\n \"\"\"\n :param msg: current message at a trace site\n :returns: the result of querying the parameter store.\n\n Default param behavior with a side effect.\n Applies self._block_up to decide whether to hide the site.\n \"\"\"\n ret = super(BlockPoutine, self)._pyro_param(msg)\n msg[\"stop\"] = self._block_up(msg)\n return ret\n", "path": "pyro/poutine/block_poutine.py"}], "after_files": [{"content": "from __future__ import absolute_import, division, print_function\n\nfrom .poutine import Poutine\n\n\nclass BlockPoutine(Poutine):\n \"\"\"\n This Poutine selectively hides pyro primitive sites from the outside world.\n\n For example, suppose the stochastic function fn has two sample sites \"a\" and \"b\".\n Then any poutine outside of BlockPoutine(fn, hide=[\"a\"])\n will not be applied to site \"a\" and will only see site \"b\":\n\n >>> fn_inner = TracePoutine(fn)\n >>> fn_outer = TracePoutine(BlockPoutine(TracePoutine(fn), hide=[\"a\"]))\n >>> trace_inner = fn_inner.get_trace()\n >>> trace_outer = fn_outer.get_trace()\n >>> \"a\" in trace_inner\n True\n >>> \"a\" in trace_outer\n False\n >>> \"b\" in trace_inner\n True\n >>> \"b\" in trace_outer\n True\n\n BlockPoutine has a flexible interface that allows users\n to specify in several different ways\n which sites should be hidden or exposed.\n See the constructor for details.\n \"\"\"\n\n def __init__(self, fn,\n hide_all=True, expose_all=False,\n hide=None, expose=None,\n hide_types=None, expose_types=None):\n \"\"\"\n :param bool hide_all: hide all sites\n :param bool expose_all: expose all sites normally\n :param list hide: list of site names to hide, rest will be exposed normally\n :param list expose: list of site names to expose, rest will be hidden\n :param list hide_types: list of site types to hide, rest will be exposed normally\n :param list expose_types: list of site types to expose normally, rest will be hidden\n\n Constructor for blocking poutine\n Default behavior: block everything (hide_all == True)\n\n A site is hidden if at least one of the following holds:\n 1. msg[\"name\"] in hide\n 2. msg[\"type\"] in hide_types\n 3. msg[\"name\"] not in expose and msg[\"type\"] not in expose_types\n 4. hide_all == True and hide, hide_types, and expose_types are all None\n \"\"\"\n super(BlockPoutine, self).__init__(fn)\n # first, some sanity checks:\n # hide_all and expose_all intersect?\n assert (hide_all is False and expose_all is False) or \\\n (hide_all != expose_all), \"cannot hide and expose a site\"\n\n # hide and expose intersect?\n if hide is None:\n hide = []\n else:\n hide_all = False\n\n if expose is None:\n expose = []\n else:\n hide_all = True\n\n assert set(hide).isdisjoint(set(expose)), \\\n \"cannot hide and expose a site\"\n\n # hide_types and expose_types intersect?\n if hide_types is None:\n hide_types = []\n else:\n hide_all = False\n\n if expose_types is None:\n expose_types = []\n else:\n hide_all = True\n\n assert set(hide_types).isdisjoint(set(expose_types)), \\\n \"cannot hide and expose a site type\"\n\n # now set stuff\n self.hide_all = hide_all\n self.expose_all = expose_all\n self.hide = hide\n self.expose = expose\n self.hide_types = hide_types\n self.expose_types = expose_types\n\n def _block_up(self, msg):\n \"\"\"\n :param msg: current message at a trace site, after all execution finished.\n :returns: boolean decision to hide or expose site.\n\n A site is hidden if at least one of the following holds:\n 1. msg[\"name\"] in self.hide\n 2. msg[\"type\"] in self.hide_types\n 3. msg[\"name\"] not in self.expose and msg[\"type\"] not in self.expose_types\n 4. self.hide_all == True and hide, hide_types, and expose_types are all None\n \"\"\"\n # handle observes\n if msg[\"type\"] == \"sample\" and msg[\"is_observed\"]:\n msg_type = \"observe\"\n else:\n msg_type = msg[\"type\"]\n\n is_not_exposed = (msg[\"name\"] not in self.expose) and \\\n (msg_type not in self.expose_types)\n\n # decision rule for hiding:\n if (msg[\"name\"] in self.hide) or \\\n (msg_type in self.hide_types) or \\\n (is_not_exposed and self.hide_all): # noqa: E129\n\n return True\n # otherwise expose\n else:\n return False\n\n def _pyro_sample(self, msg):\n \"\"\"\n :param msg: current message at a trace site\n :returns: a sample from the stochastic function at the site.\n\n Default sample behavior with a side effect.\n Applies self._block_up to decide whether to hide the site.\n \"\"\"\n ret = super(BlockPoutine, self)._pyro_sample(msg)\n msg[\"stop\"] = self._block_up(msg)\n return ret\n\n def _pyro_param(self, msg):\n \"\"\"\n :param msg: current message at a trace site\n :returns: the result of querying the parameter store.\n\n Default param behavior with a side effect.\n Applies self._block_up to decide whether to hide the site.\n \"\"\"\n ret = super(BlockPoutine, self)._pyro_param(msg)\n msg[\"stop\"] = self._block_up(msg)\n return ret\n", "path": "pyro/poutine/block_poutine.py"}]}
| 2,453 | 583 |
gh_patches_debug_65907
|
rasdani/github-patches
|
git_diff
|
ipython__ipython-3338
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
find_cmd test failure on Windows
I think this is caused by #3301. The [Windows implementation of find_cmd](https://github.com/ipython/ipython/blob/master/IPython/utils/_process_win32.py#L74) expects a command name without an extension, but the test now uses 'python.exe'.
I think that 'python.exe' is a valid command on Windows, so I think we should modify `find_cmd` to allow passing a command with an extension. Alternatively, we could modify the test to strip the extension.
```
======================================================================
ERROR: Make sure we find sys.exectable for python.
----------------------------------------------------------------------
Traceback (most recent call last):
File "S:\Users\slave\Jenkins\shiningpanda\jobs\d5f643a2\virtualenvs\ff035a1d\lib\site-packages\nose\case.py", line 197, in runTest
self.test(*self.arg)
File "S:\Users\slave\Jenkins\shiningpanda\jobs\d5f643a2\virtualenvs\ff035a1d\lib\site-packages\ipython-1.0.dev-py2.7.egg\IPython\utils\tests\test_process.py", line 36, in test_find_cmd_python
nt.assert_equal(find_cmd(python), sys.executable)
File "S:\Users\slave\Jenkins\shiningpanda\jobs\d5f643a2\virtualenvs\ff035a1d\lib\site-packages\ipython-1.0.dev-py2.7.egg\IPython\utils\process.py", line 67, in find_cmd
raise FindCmdError('command could not be found: %s' % cmd)
FindCmdError: command could not be found: python.exe
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `IPython/utils/_process_win32.py`
Content:
```
1 """Windows-specific implementation of process utilities.
2
3 This file is only meant to be imported by process.py, not by end-users.
4 """
5
6 #-----------------------------------------------------------------------------
7 # Copyright (C) 2010-2011 The IPython Development Team
8 #
9 # Distributed under the terms of the BSD License. The full license is in
10 # the file COPYING, distributed as part of this software.
11 #-----------------------------------------------------------------------------
12
13 #-----------------------------------------------------------------------------
14 # Imports
15 #-----------------------------------------------------------------------------
16 from __future__ import print_function
17
18 # stdlib
19 import os
20 import sys
21 import ctypes
22 import msvcrt
23
24 from ctypes import c_int, POINTER
25 from ctypes.wintypes import LPCWSTR, HLOCAL
26 from subprocess import STDOUT
27
28 # our own imports
29 from ._process_common import read_no_interrupt, process_handler, arg_split as py_arg_split
30 from . import py3compat
31 from .encoding import DEFAULT_ENCODING
32
33 #-----------------------------------------------------------------------------
34 # Function definitions
35 #-----------------------------------------------------------------------------
36
37 class AvoidUNCPath(object):
38 """A context manager to protect command execution from UNC paths.
39
40 In the Win32 API, commands can't be invoked with the cwd being a UNC path.
41 This context manager temporarily changes directory to the 'C:' drive on
42 entering, and restores the original working directory on exit.
43
44 The context manager returns the starting working directory *if* it made a
45 change and None otherwise, so that users can apply the necessary adjustment
46 to their system calls in the event of a change.
47
48 Example
49 -------
50 ::
51 cmd = 'dir'
52 with AvoidUNCPath() as path:
53 if path is not None:
54 cmd = '"pushd %s &&"%s' % (path, cmd)
55 os.system(cmd)
56 """
57 def __enter__(self):
58 self.path = os.getcwdu()
59 self.is_unc_path = self.path.startswith(r"\\")
60 if self.is_unc_path:
61 # change to c drive (as cmd.exe cannot handle UNC addresses)
62 os.chdir("C:")
63 return self.path
64 else:
65 # We return None to signal that there was no change in the working
66 # directory
67 return None
68
69 def __exit__(self, exc_type, exc_value, traceback):
70 if self.is_unc_path:
71 os.chdir(self.path)
72
73
74 def _find_cmd(cmd):
75 """Find the full path to a .bat or .exe using the win32api module."""
76 try:
77 from win32api import SearchPath
78 except ImportError:
79 raise ImportError('you need to have pywin32 installed for this to work')
80 else:
81 PATH = os.environ['PATH']
82 extensions = ['.exe', '.com', '.bat', '.py']
83 path = None
84 for ext in extensions:
85 try:
86 path = SearchPath(PATH, cmd + ext)[0]
87 except:
88 pass
89 if path is None:
90 raise OSError("command %r not found" % cmd)
91 else:
92 return path
93
94
95 def _system_body(p):
96 """Callback for _system."""
97 enc = DEFAULT_ENCODING
98 for line in read_no_interrupt(p.stdout).splitlines():
99 line = line.decode(enc, 'replace')
100 print(line, file=sys.stdout)
101 for line in read_no_interrupt(p.stderr).splitlines():
102 line = line.decode(enc, 'replace')
103 print(line, file=sys.stderr)
104
105 # Wait to finish for returncode
106 return p.wait()
107
108
109 def system(cmd):
110 """Win32 version of os.system() that works with network shares.
111
112 Note that this implementation returns None, as meant for use in IPython.
113
114 Parameters
115 ----------
116 cmd : str
117 A command to be executed in the system shell.
118
119 Returns
120 -------
121 None : we explicitly do NOT return the subprocess status code, as this
122 utility is meant to be used extensively in IPython, where any return value
123 would trigger :func:`sys.displayhook` calls.
124 """
125 # The controller provides interactivity with both
126 # stdin and stdout
127 #import _process_win32_controller
128 #_process_win32_controller.system(cmd)
129
130 with AvoidUNCPath() as path:
131 if path is not None:
132 cmd = '"pushd %s &&"%s' % (path, cmd)
133 return process_handler(cmd, _system_body)
134
135 def getoutput(cmd):
136 """Return standard output of executing cmd in a shell.
137
138 Accepts the same arguments as os.system().
139
140 Parameters
141 ----------
142 cmd : str
143 A command to be executed in the system shell.
144
145 Returns
146 -------
147 stdout : str
148 """
149
150 with AvoidUNCPath() as path:
151 if path is not None:
152 cmd = '"pushd %s &&"%s' % (path, cmd)
153 out = process_handler(cmd, lambda p: p.communicate()[0], STDOUT)
154
155 if out is None:
156 out = b''
157 return py3compat.bytes_to_str(out)
158
159 try:
160 CommandLineToArgvW = ctypes.windll.shell32.CommandLineToArgvW
161 CommandLineToArgvW.arg_types = [LPCWSTR, POINTER(c_int)]
162 CommandLineToArgvW.restype = POINTER(LPCWSTR)
163 LocalFree = ctypes.windll.kernel32.LocalFree
164 LocalFree.res_type = HLOCAL
165 LocalFree.arg_types = [HLOCAL]
166
167 def arg_split(commandline, posix=False, strict=True):
168 """Split a command line's arguments in a shell-like manner.
169
170 This is a special version for windows that use a ctypes call to CommandLineToArgvW
171 to do the argv splitting. The posix paramter is ignored.
172
173 If strict=False, process_common.arg_split(...strict=False) is used instead.
174 """
175 #CommandLineToArgvW returns path to executable if called with empty string.
176 if commandline.strip() == "":
177 return []
178 if not strict:
179 # not really a cl-arg, fallback on _process_common
180 return py_arg_split(commandline, posix=posix, strict=strict)
181 argvn = c_int()
182 result_pointer = CommandLineToArgvW(py3compat.cast_unicode(commandline.lstrip()), ctypes.byref(argvn))
183 result_array_type = LPCWSTR * argvn.value
184 result = [arg for arg in result_array_type.from_address(ctypes.addressof(result_pointer.contents))]
185 retval = LocalFree(result_pointer)
186 return result
187 except AttributeError:
188 arg_split = py_arg_split
189
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/IPython/utils/_process_win32.py b/IPython/utils/_process_win32.py
--- a/IPython/utils/_process_win32.py
+++ b/IPython/utils/_process_win32.py
@@ -83,7 +83,7 @@
path = None
for ext in extensions:
try:
- path = SearchPath(PATH, cmd + ext)[0]
+ path = SearchPath(PATH, cmd, ext)[0]
except:
pass
if path is None:
|
{"golden_diff": "diff --git a/IPython/utils/_process_win32.py b/IPython/utils/_process_win32.py\n--- a/IPython/utils/_process_win32.py\n+++ b/IPython/utils/_process_win32.py\n@@ -83,7 +83,7 @@\n path = None\n for ext in extensions:\n try:\n- path = SearchPath(PATH, cmd + ext)[0]\n+ path = SearchPath(PATH, cmd, ext)[0]\n except:\n pass\n if path is None:\n", "issue": "find_cmd test failure on Windows\nI think this is caused by #3301. The [Windows implementation of find_cmd](https://github.com/ipython/ipython/blob/master/IPython/utils/_process_win32.py#L74) expects a command name without an extension, but the test now uses 'python.exe'.\n\nI think that 'python.exe' is a valid command on Windows, so I think we should modify `find_cmd` to allow passing a command with an extension. Alternatively, we could modify the test to strip the extension.\n\n```\n======================================================================\nERROR: Make sure we find sys.exectable for python.\n----------------------------------------------------------------------\nTraceback (most recent call last):\n File \"S:\\Users\\slave\\Jenkins\\shiningpanda\\jobs\\d5f643a2\\virtualenvs\\ff035a1d\\lib\\site-packages\\nose\\case.py\", line 197, in runTest\n self.test(*self.arg)\n File \"S:\\Users\\slave\\Jenkins\\shiningpanda\\jobs\\d5f643a2\\virtualenvs\\ff035a1d\\lib\\site-packages\\ipython-1.0.dev-py2.7.egg\\IPython\\utils\\tests\\test_process.py\", line 36, in test_find_cmd_python\n nt.assert_equal(find_cmd(python), sys.executable)\n File \"S:\\Users\\slave\\Jenkins\\shiningpanda\\jobs\\d5f643a2\\virtualenvs\\ff035a1d\\lib\\site-packages\\ipython-1.0.dev-py2.7.egg\\IPython\\utils\\process.py\", line 67, in find_cmd\n raise FindCmdError('command could not be found: %s' % cmd)\nFindCmdError: command could not be found: python.exe\n```\n\n", "before_files": [{"content": "\"\"\"Windows-specific implementation of process utilities.\n\nThis file is only meant to be imported by process.py, not by end-users.\n\"\"\"\n\n#-----------------------------------------------------------------------------\n# Copyright (C) 2010-2011 The IPython Development Team\n#\n# Distributed under the terms of the BSD License. The full license is in\n# the file COPYING, distributed as part of this software.\n#-----------------------------------------------------------------------------\n\n#-----------------------------------------------------------------------------\n# Imports\n#-----------------------------------------------------------------------------\nfrom __future__ import print_function\n\n# stdlib\nimport os\nimport sys\nimport ctypes\nimport msvcrt\n\nfrom ctypes import c_int, POINTER\nfrom ctypes.wintypes import LPCWSTR, HLOCAL\nfrom subprocess import STDOUT\n\n# our own imports\nfrom ._process_common import read_no_interrupt, process_handler, arg_split as py_arg_split\nfrom . import py3compat\nfrom .encoding import DEFAULT_ENCODING\n\n#-----------------------------------------------------------------------------\n# Function definitions\n#-----------------------------------------------------------------------------\n\nclass AvoidUNCPath(object):\n \"\"\"A context manager to protect command execution from UNC paths.\n\n In the Win32 API, commands can't be invoked with the cwd being a UNC path.\n This context manager temporarily changes directory to the 'C:' drive on\n entering, and restores the original working directory on exit.\n\n The context manager returns the starting working directory *if* it made a\n change and None otherwise, so that users can apply the necessary adjustment\n to their system calls in the event of a change.\n\n Example\n -------\n ::\n cmd = 'dir'\n with AvoidUNCPath() as path:\n if path is not None:\n cmd = '\"pushd %s &&\"%s' % (path, cmd)\n os.system(cmd)\n \"\"\"\n def __enter__(self):\n self.path = os.getcwdu()\n self.is_unc_path = self.path.startswith(r\"\\\\\")\n if self.is_unc_path:\n # change to c drive (as cmd.exe cannot handle UNC addresses)\n os.chdir(\"C:\")\n return self.path\n else:\n # We return None to signal that there was no change in the working\n # directory\n return None\n\n def __exit__(self, exc_type, exc_value, traceback):\n if self.is_unc_path:\n os.chdir(self.path)\n\n\ndef _find_cmd(cmd):\n \"\"\"Find the full path to a .bat or .exe using the win32api module.\"\"\"\n try:\n from win32api import SearchPath\n except ImportError:\n raise ImportError('you need to have pywin32 installed for this to work')\n else:\n PATH = os.environ['PATH']\n extensions = ['.exe', '.com', '.bat', '.py']\n path = None\n for ext in extensions:\n try:\n path = SearchPath(PATH, cmd + ext)[0]\n except:\n pass\n if path is None:\n raise OSError(\"command %r not found\" % cmd)\n else:\n return path\n\n\ndef _system_body(p):\n \"\"\"Callback for _system.\"\"\"\n enc = DEFAULT_ENCODING\n for line in read_no_interrupt(p.stdout).splitlines():\n line = line.decode(enc, 'replace')\n print(line, file=sys.stdout)\n for line in read_no_interrupt(p.stderr).splitlines():\n line = line.decode(enc, 'replace')\n print(line, file=sys.stderr)\n\n # Wait to finish for returncode\n return p.wait()\n\n\ndef system(cmd):\n \"\"\"Win32 version of os.system() that works with network shares.\n\n Note that this implementation returns None, as meant for use in IPython.\n\n Parameters\n ----------\n cmd : str\n A command to be executed in the system shell.\n\n Returns\n -------\n None : we explicitly do NOT return the subprocess status code, as this\n utility is meant to be used extensively in IPython, where any return value\n would trigger :func:`sys.displayhook` calls.\n \"\"\"\n # The controller provides interactivity with both\n # stdin and stdout\n #import _process_win32_controller\n #_process_win32_controller.system(cmd)\n\n with AvoidUNCPath() as path:\n if path is not None:\n cmd = '\"pushd %s &&\"%s' % (path, cmd)\n return process_handler(cmd, _system_body)\n\ndef getoutput(cmd):\n \"\"\"Return standard output of executing cmd in a shell.\n\n Accepts the same arguments as os.system().\n\n Parameters\n ----------\n cmd : str\n A command to be executed in the system shell.\n\n Returns\n -------\n stdout : str\n \"\"\"\n\n with AvoidUNCPath() as path:\n if path is not None:\n cmd = '\"pushd %s &&\"%s' % (path, cmd)\n out = process_handler(cmd, lambda p: p.communicate()[0], STDOUT)\n\n if out is None:\n out = b''\n return py3compat.bytes_to_str(out)\n\ntry:\n CommandLineToArgvW = ctypes.windll.shell32.CommandLineToArgvW\n CommandLineToArgvW.arg_types = [LPCWSTR, POINTER(c_int)]\n CommandLineToArgvW.restype = POINTER(LPCWSTR)\n LocalFree = ctypes.windll.kernel32.LocalFree\n LocalFree.res_type = HLOCAL\n LocalFree.arg_types = [HLOCAL]\n \n def arg_split(commandline, posix=False, strict=True):\n \"\"\"Split a command line's arguments in a shell-like manner.\n\n This is a special version for windows that use a ctypes call to CommandLineToArgvW\n to do the argv splitting. The posix paramter is ignored.\n \n If strict=False, process_common.arg_split(...strict=False) is used instead.\n \"\"\"\n #CommandLineToArgvW returns path to executable if called with empty string.\n if commandline.strip() == \"\":\n return []\n if not strict:\n # not really a cl-arg, fallback on _process_common\n return py_arg_split(commandline, posix=posix, strict=strict)\n argvn = c_int()\n result_pointer = CommandLineToArgvW(py3compat.cast_unicode(commandline.lstrip()), ctypes.byref(argvn))\n result_array_type = LPCWSTR * argvn.value\n result = [arg for arg in result_array_type.from_address(ctypes.addressof(result_pointer.contents))]\n retval = LocalFree(result_pointer)\n return result\nexcept AttributeError:\n arg_split = py_arg_split\n", "path": "IPython/utils/_process_win32.py"}], "after_files": [{"content": "\"\"\"Windows-specific implementation of process utilities.\n\nThis file is only meant to be imported by process.py, not by end-users.\n\"\"\"\n\n#-----------------------------------------------------------------------------\n# Copyright (C) 2010-2011 The IPython Development Team\n#\n# Distributed under the terms of the BSD License. The full license is in\n# the file COPYING, distributed as part of this software.\n#-----------------------------------------------------------------------------\n\n#-----------------------------------------------------------------------------\n# Imports\n#-----------------------------------------------------------------------------\nfrom __future__ import print_function\n\n# stdlib\nimport os\nimport sys\nimport ctypes\nimport msvcrt\n\nfrom ctypes import c_int, POINTER\nfrom ctypes.wintypes import LPCWSTR, HLOCAL\nfrom subprocess import STDOUT\n\n# our own imports\nfrom ._process_common import read_no_interrupt, process_handler, arg_split as py_arg_split\nfrom . import py3compat\nfrom .encoding import DEFAULT_ENCODING\n\n#-----------------------------------------------------------------------------\n# Function definitions\n#-----------------------------------------------------------------------------\n\nclass AvoidUNCPath(object):\n \"\"\"A context manager to protect command execution from UNC paths.\n\n In the Win32 API, commands can't be invoked with the cwd being a UNC path.\n This context manager temporarily changes directory to the 'C:' drive on\n entering, and restores the original working directory on exit.\n\n The context manager returns the starting working directory *if* it made a\n change and None otherwise, so that users can apply the necessary adjustment\n to their system calls in the event of a change.\n\n Example\n -------\n ::\n cmd = 'dir'\n with AvoidUNCPath() as path:\n if path is not None:\n cmd = '\"pushd %s &&\"%s' % (path, cmd)\n os.system(cmd)\n \"\"\"\n def __enter__(self):\n self.path = os.getcwdu()\n self.is_unc_path = self.path.startswith(r\"\\\\\")\n if self.is_unc_path:\n # change to c drive (as cmd.exe cannot handle UNC addresses)\n os.chdir(\"C:\")\n return self.path\n else:\n # We return None to signal that there was no change in the working\n # directory\n return None\n\n def __exit__(self, exc_type, exc_value, traceback):\n if self.is_unc_path:\n os.chdir(self.path)\n\n\ndef _find_cmd(cmd):\n \"\"\"Find the full path to a .bat or .exe using the win32api module.\"\"\"\n try:\n from win32api import SearchPath\n except ImportError:\n raise ImportError('you need to have pywin32 installed for this to work')\n else:\n PATH = os.environ['PATH']\n extensions = ['.exe', '.com', '.bat', '.py']\n path = None\n for ext in extensions:\n try:\n path = SearchPath(PATH, cmd, ext)[0]\n except:\n pass\n if path is None:\n raise OSError(\"command %r not found\" % cmd)\n else:\n return path\n\n\ndef _system_body(p):\n \"\"\"Callback for _system.\"\"\"\n enc = DEFAULT_ENCODING\n for line in read_no_interrupt(p.stdout).splitlines():\n line = line.decode(enc, 'replace')\n print(line, file=sys.stdout)\n for line in read_no_interrupt(p.stderr).splitlines():\n line = line.decode(enc, 'replace')\n print(line, file=sys.stderr)\n\n # Wait to finish for returncode\n return p.wait()\n\n\ndef system(cmd):\n \"\"\"Win32 version of os.system() that works with network shares.\n\n Note that this implementation returns None, as meant for use in IPython.\n\n Parameters\n ----------\n cmd : str\n A command to be executed in the system shell.\n\n Returns\n -------\n None : we explicitly do NOT return the subprocess status code, as this\n utility is meant to be used extensively in IPython, where any return value\n would trigger :func:`sys.displayhook` calls.\n \"\"\"\n # The controller provides interactivity with both\n # stdin and stdout\n #import _process_win32_controller\n #_process_win32_controller.system(cmd)\n\n with AvoidUNCPath() as path:\n if path is not None:\n cmd = '\"pushd %s &&\"%s' % (path, cmd)\n return process_handler(cmd, _system_body)\n\ndef getoutput(cmd):\n \"\"\"Return standard output of executing cmd in a shell.\n\n Accepts the same arguments as os.system().\n\n Parameters\n ----------\n cmd : str\n A command to be executed in the system shell.\n\n Returns\n -------\n stdout : str\n \"\"\"\n\n with AvoidUNCPath() as path:\n if path is not None:\n cmd = '\"pushd %s &&\"%s' % (path, cmd)\n out = process_handler(cmd, lambda p: p.communicate()[0], STDOUT)\n\n if out is None:\n out = b''\n return py3compat.bytes_to_str(out)\n\ntry:\n CommandLineToArgvW = ctypes.windll.shell32.CommandLineToArgvW\n CommandLineToArgvW.arg_types = [LPCWSTR, POINTER(c_int)]\n CommandLineToArgvW.restype = POINTER(LPCWSTR)\n LocalFree = ctypes.windll.kernel32.LocalFree\n LocalFree.res_type = HLOCAL\n LocalFree.arg_types = [HLOCAL]\n \n def arg_split(commandline, posix=False, strict=True):\n \"\"\"Split a command line's arguments in a shell-like manner.\n\n This is a special version for windows that use a ctypes call to CommandLineToArgvW\n to do the argv splitting. The posix paramter is ignored.\n \n If strict=False, process_common.arg_split(...strict=False) is used instead.\n \"\"\"\n #CommandLineToArgvW returns path to executable if called with empty string.\n if commandline.strip() == \"\":\n return []\n if not strict:\n # not really a cl-arg, fallback on _process_common\n return py_arg_split(commandline, posix=posix, strict=strict)\n argvn = c_int()\n result_pointer = CommandLineToArgvW(py3compat.cast_unicode(commandline.lstrip()), ctypes.byref(argvn))\n result_array_type = LPCWSTR * argvn.value\n result = [arg for arg in result_array_type.from_address(ctypes.addressof(result_pointer.contents))]\n retval = LocalFree(result_pointer)\n return result\nexcept AttributeError:\n arg_split = py_arg_split\n", "path": "IPython/utils/_process_win32.py"}]}
| 2,524 | 114 |
gh_patches_debug_28222
|
rasdani/github-patches
|
git_diff
|
scikit-hep__awkward-1650
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
ak.fields (v2) passes a RecordArray's internal fields by reference
Okay, so I hadn't noticed that Awkward v2's fields are passed by reference, which exposes them to the danger that someone might modify them downstream:
v1:
```python
>>> array = awkward.Array([{"x": 1, "y": 1.1}])
>>> fields = awkward.fields(array)
>>> array
<Array [{x: 1, y: 1.1}] type='1 * {"x": int64, "y": float64}'>
>>> fields
['x', 'y']
>>> fields[0] = "XXX"
>>> fields
['XXX', 'y']
>>> array
<Array [{x: 1, y: 1.1}] type='1 * {"x": int64, "y": float64}'>
```
v2:
```python
>>> array = awkward._v2.Array([{"x": 1, "y": 1.1}])
>>> fields = awkward._v2.fields(array)
>>> array
<Array [{x: 1, y: 1.1}] type='1 * {x: int64, y: float64}'>
>>> fields
['x', 'y']
>>> fields[0] = "XXX"
>>> fields
['XXX', 'y']
>>> array
<Array [{XXX: 1, y: 1.1}] type='1 * {XXX: int64, y: float64}'>
```
It could be fixed [here, in Awkward](https://github.com/scikit-hep/awkward/blob/352b0dead74846ad2a56d385be4694ec87072a08/src/awkward/_v2/contents/recordarray.py#L162), or maybe [here](https://github.com/scikit-hep/awkward/blob/352b0dead74846ad2a56d385be4694ec87072a08/src/awkward/_v2/operations/ak_fields.py#L30) (to only suffer the list-copy when handing it off to a user, so that internal uses can still be by reference).
I'll use this comment to open an issue in Awkward. Once `awkward.fields` is guarded, your `.copy()` can be removed, but it can also not be removed with no consequences but a little performance.
_Originally posted by @jpivarski in https://github.com/scikit-hep/vector/pull/226#discussion_r958660705_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/awkward/_v2/operations/ak_fields.py`
Content:
```
1 # BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE
2
3 import awkward as ak
4
5 np = ak.nplike.NumpyMetadata.instance()
6
7
8 def fields(array):
9 """
10 Extracts record fields or tuple slot numbers from `array` (many types
11 supported, including all Awkward Arrays and Records).
12
13 If the array contains nested records, only the outermost record is
14 queried. If it contains tuples instead of records, this function outputs
15 string representations of integers, such as `"0"`, `"1"`, `"2"`, etc.
16 The records or tuples may be within multiple layers of nested lists.
17
18 If the array contains neither tuples nor records, this returns an empty
19 list.
20 """
21 with ak._v2._util.OperationErrorContext(
22 "ak._v2.fields",
23 dict(array=array),
24 ):
25 return _impl(array)
26
27
28 def _impl(array):
29 layout = ak._v2.operations.to_layout(array, allow_record=True, allow_other=False)
30 return layout.fields
31
```
Path: `src/awkward/_v2/operations/ak_parameters.py`
Content:
```
1 # BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE
2
3 import awkward as ak
4
5 np = ak.nplike.NumpyMetadata.instance()
6
7
8 def parameters(array):
9 """
10 Extracts parameters from the outermost array node of `array` (many types
11 supported, including all Awkward Arrays and Records).
12
13 Parameters are a dict from str to JSON-like objects, usually strings.
14 Every #ak.layout.Content node has a different set of parameters. Some
15 key names are special, such as `"__record__"` and `"__array__"` that name
16 particular records and arrays as capable of supporting special behaviors.
17
18 See #ak.Array and #ak.behavior for a more complete description of
19 behaviors.
20 """
21 with ak._v2._util.OperationErrorContext(
22 "ak._v2.parameters",
23 dict(array=array),
24 ):
25 return _impl(array)
26
27
28 def _impl(array):
29 if isinstance(array, (ak._v2.highlevel.Array, ak._v2.highlevel.Record)):
30 return array.layout.parameters
31
32 elif isinstance(
33 array,
34 (ak._v2.contents.Content, ak._v2.record.Record),
35 ):
36 return array.parameters
37
38 elif isinstance(array, ak._v2.highlevel.ArrayBuilder):
39 return array.snapshot().layout.parameters
40
41 elif isinstance(array, ak.layout.ArrayBuilder):
42 return array.snapshot().parameters
43
44 else:
45 return {}
46
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/awkward/_v2/operations/ak_fields.py b/src/awkward/_v2/operations/ak_fields.py
--- a/src/awkward/_v2/operations/ak_fields.py
+++ b/src/awkward/_v2/operations/ak_fields.py
@@ -27,4 +27,4 @@
def _impl(array):
layout = ak._v2.operations.to_layout(array, allow_record=True, allow_other=False)
- return layout.fields
+ return layout.fields.copy()
diff --git a/src/awkward/_v2/operations/ak_parameters.py b/src/awkward/_v2/operations/ak_parameters.py
--- a/src/awkward/_v2/operations/ak_parameters.py
+++ b/src/awkward/_v2/operations/ak_parameters.py
@@ -1,5 +1,8 @@
# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE
+import copy
+import numbers
+
import awkward as ak
np = ak.nplike.NumpyMetadata.instance()
@@ -27,13 +30,13 @@
def _impl(array):
if isinstance(array, (ak._v2.highlevel.Array, ak._v2.highlevel.Record)):
- return array.layout.parameters
+ return _copy(array.layout.parameters)
elif isinstance(
array,
(ak._v2.contents.Content, ak._v2.record.Record),
):
- return array.parameters
+ return _copy(array.parameters)
elif isinstance(array, ak._v2.highlevel.ArrayBuilder):
return array.snapshot().layout.parameters
@@ -43,3 +46,10 @@
else:
return {}
+
+
+def _copy(what):
+ if all(isinstance(x, (str, numbers.Real)) for x in what.values()):
+ return what.copy()
+ else:
+ return copy.deepcopy(what)
|
{"golden_diff": "diff --git a/src/awkward/_v2/operations/ak_fields.py b/src/awkward/_v2/operations/ak_fields.py\n--- a/src/awkward/_v2/operations/ak_fields.py\n+++ b/src/awkward/_v2/operations/ak_fields.py\n@@ -27,4 +27,4 @@\n \n def _impl(array):\n layout = ak._v2.operations.to_layout(array, allow_record=True, allow_other=False)\n- return layout.fields\n+ return layout.fields.copy()\ndiff --git a/src/awkward/_v2/operations/ak_parameters.py b/src/awkward/_v2/operations/ak_parameters.py\n--- a/src/awkward/_v2/operations/ak_parameters.py\n+++ b/src/awkward/_v2/operations/ak_parameters.py\n@@ -1,5 +1,8 @@\n # BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE\n \n+import copy\n+import numbers\n+\n import awkward as ak\n \n np = ak.nplike.NumpyMetadata.instance()\n@@ -27,13 +30,13 @@\n \n def _impl(array):\n if isinstance(array, (ak._v2.highlevel.Array, ak._v2.highlevel.Record)):\n- return array.layout.parameters\n+ return _copy(array.layout.parameters)\n \n elif isinstance(\n array,\n (ak._v2.contents.Content, ak._v2.record.Record),\n ):\n- return array.parameters\n+ return _copy(array.parameters)\n \n elif isinstance(array, ak._v2.highlevel.ArrayBuilder):\n return array.snapshot().layout.parameters\n@@ -43,3 +46,10 @@\n \n else:\n return {}\n+\n+\n+def _copy(what):\n+ if all(isinstance(x, (str, numbers.Real)) for x in what.values()):\n+ return what.copy()\n+ else:\n+ return copy.deepcopy(what)\n", "issue": "ak.fields (v2) passes a RecordArray's internal fields by reference\nOkay, so I hadn't noticed that Awkward v2's fields are passed by reference, which exposes them to the danger that someone might modify them downstream:\r\n\r\nv1:\r\n\r\n```python\r\n>>> array = awkward.Array([{\"x\": 1, \"y\": 1.1}])\r\n>>> fields = awkward.fields(array)\r\n>>> array\r\n<Array [{x: 1, y: 1.1}] type='1 * {\"x\": int64, \"y\": float64}'>\r\n>>> fields\r\n['x', 'y']\r\n>>> fields[0] = \"XXX\"\r\n>>> fields\r\n['XXX', 'y']\r\n>>> array\r\n<Array [{x: 1, y: 1.1}] type='1 * {\"x\": int64, \"y\": float64}'>\r\n```\r\n\r\nv2:\r\n\r\n```python\r\n>>> array = awkward._v2.Array([{\"x\": 1, \"y\": 1.1}])\r\n>>> fields = awkward._v2.fields(array)\r\n>>> array\r\n<Array [{x: 1, y: 1.1}] type='1 * {x: int64, y: float64}'>\r\n>>> fields\r\n['x', 'y']\r\n>>> fields[0] = \"XXX\"\r\n>>> fields\r\n['XXX', 'y']\r\n>>> array\r\n<Array [{XXX: 1, y: 1.1}] type='1 * {XXX: int64, y: float64}'>\r\n```\r\n\r\nIt could be fixed [here, in Awkward](https://github.com/scikit-hep/awkward/blob/352b0dead74846ad2a56d385be4694ec87072a08/src/awkward/_v2/contents/recordarray.py#L162), or maybe [here](https://github.com/scikit-hep/awkward/blob/352b0dead74846ad2a56d385be4694ec87072a08/src/awkward/_v2/operations/ak_fields.py#L30) (to only suffer the list-copy when handing it off to a user, so that internal uses can still be by reference).\r\n\r\nI'll use this comment to open an issue in Awkward. Once `awkward.fields` is guarded, your `.copy()` can be removed, but it can also not be removed with no consequences but a little performance.\r\n\r\n_Originally posted by @jpivarski in https://github.com/scikit-hep/vector/pull/226#discussion_r958660705_\n", "before_files": [{"content": "# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE\n\nimport awkward as ak\n\nnp = ak.nplike.NumpyMetadata.instance()\n\n\ndef fields(array):\n \"\"\"\n Extracts record fields or tuple slot numbers from `array` (many types\n supported, including all Awkward Arrays and Records).\n\n If the array contains nested records, only the outermost record is\n queried. If it contains tuples instead of records, this function outputs\n string representations of integers, such as `\"0\"`, `\"1\"`, `\"2\"`, etc.\n The records or tuples may be within multiple layers of nested lists.\n\n If the array contains neither tuples nor records, this returns an empty\n list.\n \"\"\"\n with ak._v2._util.OperationErrorContext(\n \"ak._v2.fields\",\n dict(array=array),\n ):\n return _impl(array)\n\n\ndef _impl(array):\n layout = ak._v2.operations.to_layout(array, allow_record=True, allow_other=False)\n return layout.fields\n", "path": "src/awkward/_v2/operations/ak_fields.py"}, {"content": "# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE\n\nimport awkward as ak\n\nnp = ak.nplike.NumpyMetadata.instance()\n\n\ndef parameters(array):\n \"\"\"\n Extracts parameters from the outermost array node of `array` (many types\n supported, including all Awkward Arrays and Records).\n\n Parameters are a dict from str to JSON-like objects, usually strings.\n Every #ak.layout.Content node has a different set of parameters. Some\n key names are special, such as `\"__record__\"` and `\"__array__\"` that name\n particular records and arrays as capable of supporting special behaviors.\n\n See #ak.Array and #ak.behavior for a more complete description of\n behaviors.\n \"\"\"\n with ak._v2._util.OperationErrorContext(\n \"ak._v2.parameters\",\n dict(array=array),\n ):\n return _impl(array)\n\n\ndef _impl(array):\n if isinstance(array, (ak._v2.highlevel.Array, ak._v2.highlevel.Record)):\n return array.layout.parameters\n\n elif isinstance(\n array,\n (ak._v2.contents.Content, ak._v2.record.Record),\n ):\n return array.parameters\n\n elif isinstance(array, ak._v2.highlevel.ArrayBuilder):\n return array.snapshot().layout.parameters\n\n elif isinstance(array, ak.layout.ArrayBuilder):\n return array.snapshot().parameters\n\n else:\n return {}\n", "path": "src/awkward/_v2/operations/ak_parameters.py"}], "after_files": [{"content": "# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE\n\nimport awkward as ak\n\nnp = ak.nplike.NumpyMetadata.instance()\n\n\ndef fields(array):\n \"\"\"\n Extracts record fields or tuple slot numbers from `array` (many types\n supported, including all Awkward Arrays and Records).\n\n If the array contains nested records, only the outermost record is\n queried. If it contains tuples instead of records, this function outputs\n string representations of integers, such as `\"0\"`, `\"1\"`, `\"2\"`, etc.\n The records or tuples may be within multiple layers of nested lists.\n\n If the array contains neither tuples nor records, this returns an empty\n list.\n \"\"\"\n with ak._v2._util.OperationErrorContext(\n \"ak._v2.fields\",\n dict(array=array),\n ):\n return _impl(array)\n\n\ndef _impl(array):\n layout = ak._v2.operations.to_layout(array, allow_record=True, allow_other=False)\n return layout.fields.copy()\n", "path": "src/awkward/_v2/operations/ak_fields.py"}, {"content": "# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE\n\nimport copy\nimport numbers\n\nimport awkward as ak\n\nnp = ak.nplike.NumpyMetadata.instance()\n\n\ndef parameters(array):\n \"\"\"\n Extracts parameters from the outermost array node of `array` (many types\n supported, including all Awkward Arrays and Records).\n\n Parameters are a dict from str to JSON-like objects, usually strings.\n Every #ak.layout.Content node has a different set of parameters. Some\n key names are special, such as `\"__record__\"` and `\"__array__\"` that name\n particular records and arrays as capable of supporting special behaviors.\n\n See #ak.Array and #ak.behavior for a more complete description of\n behaviors.\n \"\"\"\n with ak._v2._util.OperationErrorContext(\n \"ak._v2.parameters\",\n dict(array=array),\n ):\n return _impl(array)\n\n\ndef _impl(array):\n if isinstance(array, (ak._v2.highlevel.Array, ak._v2.highlevel.Record)):\n return _copy(array.layout.parameters)\n\n elif isinstance(\n array,\n (ak._v2.contents.Content, ak._v2.record.Record),\n ):\n return _copy(array.parameters)\n\n elif isinstance(array, ak._v2.highlevel.ArrayBuilder):\n return array.snapshot().layout.parameters\n\n elif isinstance(array, ak.layout.ArrayBuilder):\n return array.snapshot().parameters\n\n else:\n return {}\n\n\ndef _copy(what):\n if all(isinstance(x, (str, numbers.Real)) for x in what.values()):\n return what.copy()\n else:\n return copy.deepcopy(what)\n", "path": "src/awkward/_v2/operations/ak_parameters.py"}]}
| 1,562 | 428 |
gh_patches_debug_7366
|
rasdani/github-patches
|
git_diff
|
aws-cloudformation__cfn-lint-2665
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
cfn-lint throws error when !ToJsonString contains int value
### CloudFormation Lint Version
0.76.2
### What operating system are you using?
Ubuntu
### Describe the bug
Unexpected internal error during linting of rule E1031, involving `ToJsonString` of numerical value
```
2023-04-06 20:20:31,922 - cfnlint - DEBUG - Completed linting of file: templates/lambda.yml
E0002 Unknown exception while processing rule E1031: Traceback (most recent call last):
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py", line 320, in run_check
return check(*args)
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py", line 44, in wrapper
results = match_function(self, filename, cfn, *args, **kwargs)
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py", line 202, in matchall
return self.match(cfn) # pylint: disable=E1102
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/functions/ToJsonString.py", line 39, in match
LanguageExtensions.validate_pseudo_parameters(
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/languageExtensions.py", line 32, in validate_pseudo_parameters
ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/languageExtensions.py", line 32, in <listcomp>
ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]
TypeError: argument of type 'int' is not iterable
cfn-secrets-stack.yml:1:1
E0002 Unknown exception while processing rule E1031: Traceback (most recent call last):
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py", line 320, in run_check
return check(*args)
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py", line 44, in wrapper
results = match_function(self, filename, cfn, *args, **kwargs)
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py", line 202, in matchall
return self.match(cfn) # pylint: disable=E1102
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/functions/ToJsonString.py", line 39, in match
LanguageExtensions.validate_pseudo_parameters(
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/languageExtensions.py", line 32, in validate_pseudo_parameters
ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/languageExtensions.py", line 32, in <listcomp>
ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]
TypeError: argument of type 'int' is not iterable
cfn-secrets-stack.yml:1:1
```
### Expected behavior
String quoted int should work as well as int, both are valid json
### Reproduction template
This works
```yaml
Resources:
DeploymentProperties:
Properties:
Description: "testing"
Name: 'Test'
SecretString: !ToJsonString
SomeNumber: '3'
Type: AWS::SecretsManager::Secret
Transform: AWS::LanguageExtensions
```
This does not, with the above error
```yaml
Resources:
DeploymentProperties:
Properties:
Description: "testing"
Name: 'Test'
SecretString: !ToJsonString
SomeNumber: 3
Type: AWS::SecretsManager::Secret
Transform: AWS::LanguageExtensions
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/cfnlint/languageExtensions.py`
Content:
```
1 from cfnlint.rules import RuleMatch
2
3
4 class LanguageExtensions:
5 """Class for a CloudFormation languageExtensions"""
6
7 def validate_transform_is_declared(
8 self, has_language_extensions_transform, matches, tree, intrinsic_function
9 ):
10 if not has_language_extensions_transform:
11 message = (
12 "Missing Transform: Declare the AWS::LanguageExtensions Transform globally to enable use"
13 " of the intrinsic function " + intrinsic_function + " at {0}"
14 )
15 matches.append(RuleMatch(tree[:], message.format("/".join(map(str, tree)))))
16 return matches
17
18 def validate_type(self, fn_object_val, matches, tree, intrinsic_function):
19 if not isinstance(fn_object_val, dict) and not isinstance(fn_object_val, list):
20 message = intrinsic_function + " needs a map or a list at {0}"
21 matches.append(RuleMatch(tree[:], message.format("/".join(map(str, tree)))))
22 elif len(fn_object_val) == 0:
23 message = "Invalid value for " + intrinsic_function + " for {0}"
24 matches.append(RuleMatch(tree[:], message.format("/".join(map(str, tree)))))
25 return matches
26
27 def validate_pseudo_parameters(
28 self, fn_object_val, matches, tree, pseudo_params, intrinsic_function
29 ):
30 if isinstance(fn_object_val, dict):
31 ref = "Ref"
32 ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]
33 for ref in ref_list:
34 if ref in pseudo_params:
35 message = (
36 intrinsic_function
37 + " does not support the pseudo parameter "
38 + ref
39 + " for {0}"
40 )
41 matches.append(
42 RuleMatch(tree[:], message.format("/".join(map(str, tree))))
43 )
44 return matches
45
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/cfnlint/languageExtensions.py b/src/cfnlint/languageExtensions.py
--- a/src/cfnlint/languageExtensions.py
+++ b/src/cfnlint/languageExtensions.py
@@ -29,7 +29,11 @@
):
if isinstance(fn_object_val, dict):
ref = "Ref"
- ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]
+ ref_list = [
+ val[ref]
+ for _, val in fn_object_val.items()
+ if hasattr(val, "__iter__") and ref in val
+ ]
for ref in ref_list:
if ref in pseudo_params:
message = (
|
{"golden_diff": "diff --git a/src/cfnlint/languageExtensions.py b/src/cfnlint/languageExtensions.py\n--- a/src/cfnlint/languageExtensions.py\n+++ b/src/cfnlint/languageExtensions.py\n@@ -29,7 +29,11 @@\n ):\n if isinstance(fn_object_val, dict):\n ref = \"Ref\"\n- ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]\n+ ref_list = [\n+ val[ref]\n+ for _, val in fn_object_val.items()\n+ if hasattr(val, \"__iter__\") and ref in val\n+ ]\n for ref in ref_list:\n if ref in pseudo_params:\n message = (\n", "issue": "cfn-lint throws error when !ToJsonString contains int value\n### CloudFormation Lint Version\n\n0.76.2\n\n### What operating system are you using?\n\nUbuntu\n\n### Describe the bug\n\nUnexpected internal error during linting of rule E1031, involving `ToJsonString` of numerical value\r\n\r\n```\r\n2023-04-06 20:20:31,922 - cfnlint - DEBUG - Completed linting of file: templates/lambda.yml\r\nE0002 Unknown exception while processing rule E1031: Traceback (most recent call last):\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py\", line 320, in run_check\r\n return check(*args)\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py\", line 44, in wrapper\r\n results = match_function(self, filename, cfn, *args, **kwargs)\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py\", line 202, in matchall\r\n return self.match(cfn) # pylint: disable=E1102\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/functions/ToJsonString.py\", line 39, in match\r\n LanguageExtensions.validate_pseudo_parameters(\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/languageExtensions.py\", line 32, in validate_pseudo_parameters\r\n ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/languageExtensions.py\", line 32, in <listcomp>\r\n ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]\r\nTypeError: argument of type 'int' is not iterable\r\n\r\ncfn-secrets-stack.yml:1:1\r\n\r\nE0002 Unknown exception while processing rule E1031: Traceback (most recent call last):\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py\", line 320, in run_check\r\n return check(*args)\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py\", line 44, in wrapper\r\n results = match_function(self, filename, cfn, *args, **kwargs)\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py\", line 202, in matchall\r\n return self.match(cfn) # pylint: disable=E1102\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/functions/ToJsonString.py\", line 39, in match\r\n LanguageExtensions.validate_pseudo_parameters(\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/languageExtensions.py\", line 32, in validate_pseudo_parameters\r\n ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/languageExtensions.py\", line 32, in <listcomp>\r\n ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]\r\nTypeError: argument of type 'int' is not iterable\r\n\r\ncfn-secrets-stack.yml:1:1\r\n```\n\n### Expected behavior\n\nString quoted int should work as well as int, both are valid json\n\n### Reproduction template\n\nThis works\r\n```yaml\r\nResources:\r\n DeploymentProperties:\r\n Properties:\r\n Description: \"testing\"\r\n Name: 'Test'\r\n SecretString: !ToJsonString\r\n SomeNumber: '3'\r\n Type: AWS::SecretsManager::Secret\r\nTransform: AWS::LanguageExtensions\r\n```\r\n\r\nThis does not, with the above error\r\n```yaml\r\nResources:\r\n DeploymentProperties:\r\n Properties:\r\n Description: \"testing\"\r\n Name: 'Test'\r\n SecretString: !ToJsonString\r\n SomeNumber: 3\r\n Type: AWS::SecretsManager::Secret\r\nTransform: AWS::LanguageExtensions\r\n```\r\n\n", "before_files": [{"content": "from cfnlint.rules import RuleMatch\n\n\nclass LanguageExtensions:\n \"\"\"Class for a CloudFormation languageExtensions\"\"\"\n\n def validate_transform_is_declared(\n self, has_language_extensions_transform, matches, tree, intrinsic_function\n ):\n if not has_language_extensions_transform:\n message = (\n \"Missing Transform: Declare the AWS::LanguageExtensions Transform globally to enable use\"\n \" of the intrinsic function \" + intrinsic_function + \" at {0}\"\n )\n matches.append(RuleMatch(tree[:], message.format(\"/\".join(map(str, tree)))))\n return matches\n\n def validate_type(self, fn_object_val, matches, tree, intrinsic_function):\n if not isinstance(fn_object_val, dict) and not isinstance(fn_object_val, list):\n message = intrinsic_function + \" needs a map or a list at {0}\"\n matches.append(RuleMatch(tree[:], message.format(\"/\".join(map(str, tree)))))\n elif len(fn_object_val) == 0:\n message = \"Invalid value for \" + intrinsic_function + \" for {0}\"\n matches.append(RuleMatch(tree[:], message.format(\"/\".join(map(str, tree)))))\n return matches\n\n def validate_pseudo_parameters(\n self, fn_object_val, matches, tree, pseudo_params, intrinsic_function\n ):\n if isinstance(fn_object_val, dict):\n ref = \"Ref\"\n ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]\n for ref in ref_list:\n if ref in pseudo_params:\n message = (\n intrinsic_function\n + \" does not support the pseudo parameter \"\n + ref\n + \" for {0}\"\n )\n matches.append(\n RuleMatch(tree[:], message.format(\"/\".join(map(str, tree))))\n )\n return matches\n", "path": "src/cfnlint/languageExtensions.py"}], "after_files": [{"content": "from cfnlint.rules import RuleMatch\n\n\nclass LanguageExtensions:\n \"\"\"Class for a CloudFormation languageExtensions\"\"\"\n\n def validate_transform_is_declared(\n self, has_language_extensions_transform, matches, tree, intrinsic_function\n ):\n if not has_language_extensions_transform:\n message = (\n \"Missing Transform: Declare the AWS::LanguageExtensions Transform globally to enable use\"\n \" of the intrinsic function \" + intrinsic_function + \" at {0}\"\n )\n matches.append(RuleMatch(tree[:], message.format(\"/\".join(map(str, tree)))))\n return matches\n\n def validate_type(self, fn_object_val, matches, tree, intrinsic_function):\n if not isinstance(fn_object_val, dict) and not isinstance(fn_object_val, list):\n message = intrinsic_function + \" needs a map or a list at {0}\"\n matches.append(RuleMatch(tree[:], message.format(\"/\".join(map(str, tree)))))\n elif len(fn_object_val) == 0:\n message = \"Invalid value for \" + intrinsic_function + \" for {0}\"\n matches.append(RuleMatch(tree[:], message.format(\"/\".join(map(str, tree)))))\n return matches\n\n def validate_pseudo_parameters(\n self, fn_object_val, matches, tree, pseudo_params, intrinsic_function\n ):\n if isinstance(fn_object_val, dict):\n ref = \"Ref\"\n ref_list = [\n val[ref]\n for _, val in fn_object_val.items()\n if hasattr(val, \"__iter__\") and ref in val\n ]\n for ref in ref_list:\n if ref in pseudo_params:\n message = (\n intrinsic_function\n + \" does not support the pseudo parameter \"\n + ref\n + \" for {0}\"\n )\n matches.append(\n RuleMatch(tree[:], message.format(\"/\".join(map(str, tree))))\n )\n return matches\n", "path": "src/cfnlint/languageExtensions.py"}]}
| 1,760 | 154 |
gh_patches_debug_38350
|
rasdani/github-patches
|
git_diff
|
quantumlib__Cirq-5098
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Deprecate gateset.accepts_global_phase
**Description of the issue**
Issue requested by @tanujkhattar
xref: https://github.com/quantumlib/Cirq/pull/4697/files/d64eb23319c0eb7664526613b95db368659fb7aa#r766054614
Since global phase has a gate, it can be inspected like any other gate. Therefore gatesets no longer need special handling for global phase.
mypy check is failing on `CI` for sympy `v1.10`
**Description of the issue**
See: https://github.com/quantumlib/Cirq/pull/4936#issuecomment-1060953773
**Cirq version**
`master` version
Docs: Filter out TYPE_CHECKING from public docs
**Description of the issue**
The `TYPE_CHECKING` variable imported from `typing` shows up in API docs (example: https://github.com/quantumlib/Cirq/issues/5150). We should filter it out, since it's not part of the cirq API. Per @dabacon's [comment](https://github.com/quantumlib/Cirq/pull/5229#issuecomment-1093080151), we should be able to do this in `dev_tools/docs/build_api_docs.py`.
Deprecation of abstract/protocol methods
We need a better way to deprecate abstract methods.
Context (#3860):
@balopat This ends up being a breaking change though for any simulator that was overriding `_simulator_iterator`. This is why I had left the call to `_simulator_iterator` there in the original PR #3650. Otherwise I'd have just inlined and deleted `_simulator_iterator` outright: it's a private method and it's unused elsewhere.
It doesn't sound like anyone has been broken by this, so maybe it's okay. (We can just delete `_simulator_iterator` in that case; it's not called by anyone). I think there's still an unmet need for cleanly deprecate abstract methods, or methods that were intended to be overridden. The base class has to continue calling them until the deadline or it will break child classes. I like the error-by-default, but maybe there's a way to silence these errors when retaining the call to a deprecated method is required, like here.
_Originally posted by @daxfohl in https://github.com/quantumlib/Cirq/pull/3860#discussion_r587578684_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/qaoa.py`
Content:
```
1 # pylint: disable=wrong-or-nonexistent-copyright-notice
2 """Runs the Quantum Approximate Optimization Algorithm on Max-Cut.
3
4 === EXAMPLE OUTPUT ===
5
6 Example QAOA circuit:
7 0 1 2 3 4 5
8 │ │ │ │ │ │
9 H H H H H H
10 │ │ │ │ │ │
11 ZZ──────────ZZ^(-4/13) │ │ │ │
12 ┌ │ │ │ │ │ │ ┐
13 │ ZZ──────────┼───────────ZZ^(-4/13) │ │ │ │
14 │ │ ZZ──────────┼───────────ZZ^(-4/13) │ │ │
15 └ │ │ │ │ │ │ ┘
16 ┌ │ │ │ │ │ │ ┐
17 │ ZZ──────────┼───────────┼───────────┼───────────ZZ^(-4/13) │ │
18 │ │ ZZ──────────┼───────────┼───────────┼───────────ZZ^(-4/13) │
19 └ │ │ │ │ │ │ ┘
20 Rx(0.151π) Rx(0.151π) ZZ──────────┼───────────ZZ^(-4/13) │
21 │ │ │ │ │ │
22 ZZ──────────ZZ^-0.941 ZZ──────────┼───────────┼───────────ZZ^(-4/13)
23 │ │ │ ZZ──────────ZZ^(-4/13) │
24 ┌ │ │ │ │ │ │ ┐
25 │ │ │ Rx(0.151π) ZZ──────────┼───────────ZZ^(-4/13) │
26 │ │ │ │ │ Rx(0.151π) │ │
27 └ │ │ │ │ │ │ ┘
28 ZZ──────────┼───────────ZZ^-0.941 Rx(0.151π) │ Rx(0.151π)
29 ┌ │ │ │ │ │ │ ┐
30 │ ZZ──────────┼───────────┼───────────┼───────────ZZ^-0.941 │ │
31 │ │ ZZ──────────┼───────────ZZ^-0.941 │ │ │
32 └ │ │ │ │ │ │ ┘
33 Rx(-0.448π) ZZ──────────┼───────────┼───────────┼───────────ZZ^-0.941
34 │ │ ZZ──────────┼───────────ZZ^-0.941 │
35 │ │ │ │ │ │
36 │ Rx(-0.448π) ZZ──────────┼───────────┼───────────ZZ^-0.941
37 │ │ │ ZZ──────────ZZ^-0.941 │
38 ┌ │ │ │ │ │ │ ┐
39 │ │ │ Rx(-0.448π) ZZ──────────┼───────────ZZ^-0.941 │
40 │ │ │ │ │ Rx(-0.448π) │ │
41 └ │ │ │ │ │ │ ┘
42 │ │ │ Rx(-0.448π) │ Rx(-0.448π)
43 │ │ │ │ │ │
44 M('m')──────M───────────M───────────M───────────M───────────M
45 │ │ │ │ │ │
46 Optimizing objective function ...
47 The largest cut value found was 7.
48 The largest possible cut has size 7.
49 The approximation ratio achieved is 1.0.
50 """
51
52 import itertools
53
54 import numpy as np
55 import networkx
56 import scipy.optimize
57
58 import cirq
59
60
61 def main(repetitions=1000, maxiter=50):
62 # Set problem parameters
63 n = 6
64 p = 2
65
66 # Generate a random 3-regular graph on n nodes
67 graph = networkx.random_regular_graph(3, n)
68
69 # Make qubits
70 qubits = cirq.LineQubit.range(n)
71
72 # Print an example circuit
73 betas = np.random.uniform(-np.pi, np.pi, size=p)
74 gammas = np.random.uniform(-np.pi, np.pi, size=p)
75 circuit = qaoa_max_cut_circuit(qubits, betas, gammas, graph)
76 print('Example QAOA circuit:')
77 print(circuit.to_text_diagram(transpose=True))
78
79 # Create variables to store the largest cut and cut value found
80 largest_cut_found = None
81 largest_cut_value_found = 0
82
83 # Initialize simulator
84 simulator = cirq.Simulator()
85
86 # Define objective function (we'll use the negative expected cut value)
87
88 def f(x):
89 # Create circuit
90 betas = x[:p]
91 gammas = x[p:]
92 circuit = qaoa_max_cut_circuit(qubits, betas, gammas, graph)
93 # Sample bitstrings from circuit
94 result = simulator.run(circuit, repetitions=repetitions)
95 bitstrings = result.measurements['m']
96 # Process bitstrings
97 nonlocal largest_cut_found
98 nonlocal largest_cut_value_found
99 values = cut_values(bitstrings, graph)
100 max_value_index = np.argmax(values)
101 max_value = values[max_value_index]
102 if max_value > largest_cut_value_found:
103 largest_cut_value_found = max_value
104 largest_cut_found = bitstrings[max_value_index]
105 mean = np.mean(values)
106 return -mean
107
108 # Pick an initial guess
109 x0 = np.random.uniform(-np.pi, np.pi, size=2 * p)
110
111 # Optimize f
112 print('Optimizing objective function ...')
113 scipy.optimize.minimize(f, x0, method='Nelder-Mead', options={'maxiter': maxiter})
114
115 # Compute best possible cut value via brute force search
116 all_bitstrings = np.array(list(itertools.product(range(2), repeat=n)))
117 all_values = cut_values(all_bitstrings, graph)
118 max_cut_value = np.max(all_values)
119
120 # Print the results
121 print(f'The largest cut value found was {largest_cut_value_found}.')
122 print(f'The largest possible cut has size {max_cut_value}.')
123 print(f'The approximation ratio achieved is {largest_cut_value_found / max_cut_value}.')
124
125
126 def rzz(rads):
127 """Returns a gate with the matrix exp(-i Z⊗Z rads)."""
128 return cirq.ZZPowGate(exponent=2 * rads / np.pi, global_shift=-0.5)
129
130
131 def qaoa_max_cut_unitary(qubits, betas, gammas, graph): # Nodes should be integers
132 for beta, gamma in zip(betas, gammas):
133 yield (rzz(-0.5 * gamma).on(qubits[i], qubits[j]) for i, j in graph.edges)
134 yield cirq.rx(2 * beta).on_each(*qubits)
135
136
137 def qaoa_max_cut_circuit(qubits, betas, gammas, graph): # Nodes should be integers
138 return cirq.Circuit(
139 # Prepare uniform superposition
140 cirq.H.on_each(*qubits),
141 # Apply QAOA unitary
142 qaoa_max_cut_unitary(qubits, betas, gammas, graph),
143 # Measure
144 cirq.measure(*qubits, key='m'),
145 )
146
147
148 def cut_values(bitstrings, graph):
149 mat = networkx.adjacency_matrix(graph, nodelist=sorted(graph.nodes))
150 vecs = (-1) ** bitstrings
151 vals = 0.5 * np.sum(vecs * (mat @ vecs.T).T, axis=-1)
152 vals = 0.5 * (graph.size() - vals)
153 return vals
154
155
156 if __name__ == '__main__':
157 main()
158
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/examples/qaoa.py b/examples/qaoa.py
--- a/examples/qaoa.py
+++ b/examples/qaoa.py
@@ -58,7 +58,7 @@
import cirq
-def main(repetitions=1000, maxiter=50):
+def main(repetitions=10, maxiter=50, use_boolean_hamiltonian_gate=False):
# Set problem parameters
n = 6
p = 2
@@ -72,7 +72,7 @@
# Print an example circuit
betas = np.random.uniform(-np.pi, np.pi, size=p)
gammas = np.random.uniform(-np.pi, np.pi, size=p)
- circuit = qaoa_max_cut_circuit(qubits, betas, gammas, graph)
+ circuit = qaoa_max_cut_circuit(qubits, betas, gammas, graph, use_boolean_hamiltonian_gate)
print('Example QAOA circuit:')
print(circuit.to_text_diagram(transpose=True))
@@ -89,7 +89,7 @@
# Create circuit
betas = x[:p]
gammas = x[p:]
- circuit = qaoa_max_cut_circuit(qubits, betas, gammas, graph)
+ circuit = qaoa_max_cut_circuit(qubits, betas, gammas, graph, use_boolean_hamiltonian_gate)
# Sample bitstrings from circuit
result = simulator.run(circuit, repetitions=repetitions)
bitstrings = result.measurements['m']
@@ -128,18 +128,29 @@
return cirq.ZZPowGate(exponent=2 * rads / np.pi, global_shift=-0.5)
-def qaoa_max_cut_unitary(qubits, betas, gammas, graph): # Nodes should be integers
- for beta, gamma in zip(betas, gammas):
- yield (rzz(-0.5 * gamma).on(qubits[i], qubits[j]) for i, j in graph.edges)
- yield cirq.rx(2 * beta).on_each(*qubits)
-
-
-def qaoa_max_cut_circuit(qubits, betas, gammas, graph): # Nodes should be integers
+def qaoa_max_cut_unitary(
+ qubits, betas, gammas, graph, use_boolean_hamiltonian_gate
+): # Nodes should be integers
+ if use_boolean_hamiltonian_gate:
+ booleans = [f"x{i} ^ x{j}" for i, j in sorted(graph.edges)]
+ param_names = [f"x{i}" for i in range(len(qubits))]
+ for beta, gamma in zip(betas, gammas):
+ yield cirq.BooleanHamiltonianGate(param_names, booleans, 2.0 * gamma).on(*qubits)
+ yield cirq.rx(2 * beta).on_each(*qubits)
+ else:
+ for beta, gamma in zip(betas, gammas):
+ yield (rzz(-0.5 * gamma).on(qubits[i], qubits[j]) for i, j in graph.edges)
+ yield cirq.rx(2 * beta).on_each(*qubits)
+
+
+def qaoa_max_cut_circuit(
+ qubits, betas, gammas, graph, use_boolean_hamiltonian_gate
+): # Nodes should be integers
return cirq.Circuit(
# Prepare uniform superposition
cirq.H.on_each(*qubits),
# Apply QAOA unitary
- qaoa_max_cut_unitary(qubits, betas, gammas, graph),
+ qaoa_max_cut_unitary(qubits, betas, gammas, graph, use_boolean_hamiltonian_gate),
# Measure
cirq.measure(*qubits, key='m'),
)
|
{"golden_diff": "diff --git a/examples/qaoa.py b/examples/qaoa.py\n--- a/examples/qaoa.py\n+++ b/examples/qaoa.py\n@@ -58,7 +58,7 @@\n import cirq\n \n \n-def main(repetitions=1000, maxiter=50):\n+def main(repetitions=10, maxiter=50, use_boolean_hamiltonian_gate=False):\n # Set problem parameters\n n = 6\n p = 2\n@@ -72,7 +72,7 @@\n # Print an example circuit\n betas = np.random.uniform(-np.pi, np.pi, size=p)\n gammas = np.random.uniform(-np.pi, np.pi, size=p)\n- circuit = qaoa_max_cut_circuit(qubits, betas, gammas, graph)\n+ circuit = qaoa_max_cut_circuit(qubits, betas, gammas, graph, use_boolean_hamiltonian_gate)\n print('Example QAOA circuit:')\n print(circuit.to_text_diagram(transpose=True))\n \n@@ -89,7 +89,7 @@\n # Create circuit\n betas = x[:p]\n gammas = x[p:]\n- circuit = qaoa_max_cut_circuit(qubits, betas, gammas, graph)\n+ circuit = qaoa_max_cut_circuit(qubits, betas, gammas, graph, use_boolean_hamiltonian_gate)\n # Sample bitstrings from circuit\n result = simulator.run(circuit, repetitions=repetitions)\n bitstrings = result.measurements['m']\n@@ -128,18 +128,29 @@\n return cirq.ZZPowGate(exponent=2 * rads / np.pi, global_shift=-0.5)\n \n \n-def qaoa_max_cut_unitary(qubits, betas, gammas, graph): # Nodes should be integers\n- for beta, gamma in zip(betas, gammas):\n- yield (rzz(-0.5 * gamma).on(qubits[i], qubits[j]) for i, j in graph.edges)\n- yield cirq.rx(2 * beta).on_each(*qubits)\n-\n-\n-def qaoa_max_cut_circuit(qubits, betas, gammas, graph): # Nodes should be integers\n+def qaoa_max_cut_unitary(\n+ qubits, betas, gammas, graph, use_boolean_hamiltonian_gate\n+): # Nodes should be integers\n+ if use_boolean_hamiltonian_gate:\n+ booleans = [f\"x{i} ^ x{j}\" for i, j in sorted(graph.edges)]\n+ param_names = [f\"x{i}\" for i in range(len(qubits))]\n+ for beta, gamma in zip(betas, gammas):\n+ yield cirq.BooleanHamiltonianGate(param_names, booleans, 2.0 * gamma).on(*qubits)\n+ yield cirq.rx(2 * beta).on_each(*qubits)\n+ else:\n+ for beta, gamma in zip(betas, gammas):\n+ yield (rzz(-0.5 * gamma).on(qubits[i], qubits[j]) for i, j in graph.edges)\n+ yield cirq.rx(2 * beta).on_each(*qubits)\n+\n+\n+def qaoa_max_cut_circuit(\n+ qubits, betas, gammas, graph, use_boolean_hamiltonian_gate\n+): # Nodes should be integers\n return cirq.Circuit(\n # Prepare uniform superposition\n cirq.H.on_each(*qubits),\n # Apply QAOA unitary\n- qaoa_max_cut_unitary(qubits, betas, gammas, graph),\n+ qaoa_max_cut_unitary(qubits, betas, gammas, graph, use_boolean_hamiltonian_gate),\n # Measure\n cirq.measure(*qubits, key='m'),\n )\n", "issue": "Deprecate gateset.accepts_global_phase\n**Description of the issue**\r\n\r\nIssue requested by @tanujkhattar \r\n\r\nxref: https://github.com/quantumlib/Cirq/pull/4697/files/d64eb23319c0eb7664526613b95db368659fb7aa#r766054614\r\n\r\nSince global phase has a gate, it can be inspected like any other gate. Therefore gatesets no longer need special handling for global phase.\nmypy check is failing on `CI` for sympy `v1.10`\n**Description of the issue**\r\nSee: https://github.com/quantumlib/Cirq/pull/4936#issuecomment-1060953773\r\n\r\n**Cirq version**\r\n`master` version\r\n\r\n\nDocs: Filter out TYPE_CHECKING from public docs\n**Description of the issue**\r\n\r\nThe `TYPE_CHECKING` variable imported from `typing` shows up in API docs (example: https://github.com/quantumlib/Cirq/issues/5150). We should filter it out, since it's not part of the cirq API. Per @dabacon's [comment](https://github.com/quantumlib/Cirq/pull/5229#issuecomment-1093080151), we should be able to do this in `dev_tools/docs/build_api_docs.py`.\r\n\nDeprecation of abstract/protocol methods\nWe need a better way to deprecate abstract methods.\r\n\r\nContext (#3860): \r\n\r\n@balopat This ends up being a breaking change though for any simulator that was overriding `_simulator_iterator`. This is why I had left the call to `_simulator_iterator` there in the original PR #3650. Otherwise I'd have just inlined and deleted `_simulator_iterator` outright: it's a private method and it's unused elsewhere.\r\n\r\nIt doesn't sound like anyone has been broken by this, so maybe it's okay. (We can just delete `_simulator_iterator` in that case; it's not called by anyone). I think there's still an unmet need for cleanly deprecate abstract methods, or methods that were intended to be overridden. The base class has to continue calling them until the deadline or it will break child classes. I like the error-by-default, but maybe there's a way to silence these errors when retaining the call to a deprecated method is required, like here.\r\n\r\n_Originally posted by @daxfohl in https://github.com/quantumlib/Cirq/pull/3860#discussion_r587578684_\n", "before_files": [{"content": "# pylint: disable=wrong-or-nonexistent-copyright-notice\n\"\"\"Runs the Quantum Approximate Optimization Algorithm on Max-Cut.\n\n=== EXAMPLE OUTPUT ===\n\nExample QAOA circuit:\n 0 1 2 3 4 5\n \u2502 \u2502 \u2502 \u2502 \u2502 \u2502\n H H H H H H\n \u2502 \u2502 \u2502 \u2502 \u2502 \u2502\n ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^(-4/13) \u2502 \u2502 \u2502 \u2502\n\u250c \u2502 \u2502 \u2502 \u2502 \u2502 \u2502 \u2510\n\u2502 ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^(-4/13) \u2502 \u2502 \u2502 \u2502\n\u2502 \u2502 ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^(-4/13) \u2502 \u2502 \u2502\n\u2514 \u2502 \u2502 \u2502 \u2502 \u2502 \u2502 \u2518\n\u250c \u2502 \u2502 \u2502 \u2502 \u2502 \u2502 \u2510\n\u2502 ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^(-4/13) \u2502 \u2502\n\u2502 \u2502 ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^(-4/13) \u2502\n\u2514 \u2502 \u2502 \u2502 \u2502 \u2502 \u2502 \u2518\n Rx(0.151\u03c0) Rx(0.151\u03c0) ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^(-4/13) \u2502\n \u2502 \u2502 \u2502 \u2502 \u2502 \u2502\n ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^-0.941 ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^(-4/13)\n \u2502 \u2502 \u2502 ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^(-4/13) \u2502\n\u250c \u2502 \u2502 \u2502 \u2502 \u2502 \u2502 \u2510\n\u2502 \u2502 \u2502 Rx(0.151\u03c0) ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^(-4/13) \u2502\n\u2502 \u2502 \u2502 \u2502 \u2502 Rx(0.151\u03c0) \u2502 \u2502\n\u2514 \u2502 \u2502 \u2502 \u2502 \u2502 \u2502 \u2518\n ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^-0.941 Rx(0.151\u03c0) \u2502 Rx(0.151\u03c0)\n\u250c \u2502 \u2502 \u2502 \u2502 \u2502 \u2502 \u2510\n\u2502 ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^-0.941 \u2502 \u2502\n\u2502 \u2502 ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^-0.941 \u2502 \u2502 \u2502\n\u2514 \u2502 \u2502 \u2502 \u2502 \u2502 \u2502 \u2518\n Rx(-0.448\u03c0) ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^-0.941\n \u2502 \u2502 ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^-0.941 \u2502\n \u2502 \u2502 \u2502 \u2502 \u2502 \u2502\n \u2502 Rx(-0.448\u03c0) ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^-0.941\n \u2502 \u2502 \u2502 ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^-0.941 \u2502\n\u250c \u2502 \u2502 \u2502 \u2502 \u2502 \u2502 \u2510\n\u2502 \u2502 \u2502 Rx(-0.448\u03c0) ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^-0.941 \u2502\n\u2502 \u2502 \u2502 \u2502 \u2502 Rx(-0.448\u03c0) \u2502 \u2502\n\u2514 \u2502 \u2502 \u2502 \u2502 \u2502 \u2502 \u2518\n \u2502 \u2502 \u2502 Rx(-0.448\u03c0) \u2502 Rx(-0.448\u03c0)\n \u2502 \u2502 \u2502 \u2502 \u2502 \u2502\n M('m')\u2500\u2500\u2500\u2500\u2500\u2500M\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500M\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500M\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500M\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500M\n \u2502 \u2502 \u2502 \u2502 \u2502 \u2502\nOptimizing objective function ...\nThe largest cut value found was 7.\nThe largest possible cut has size 7.\nThe approximation ratio achieved is 1.0.\n\"\"\"\n\nimport itertools\n\nimport numpy as np\nimport networkx\nimport scipy.optimize\n\nimport cirq\n\n\ndef main(repetitions=1000, maxiter=50):\n # Set problem parameters\n n = 6\n p = 2\n\n # Generate a random 3-regular graph on n nodes\n graph = networkx.random_regular_graph(3, n)\n\n # Make qubits\n qubits = cirq.LineQubit.range(n)\n\n # Print an example circuit\n betas = np.random.uniform(-np.pi, np.pi, size=p)\n gammas = np.random.uniform(-np.pi, np.pi, size=p)\n circuit = qaoa_max_cut_circuit(qubits, betas, gammas, graph)\n print('Example QAOA circuit:')\n print(circuit.to_text_diagram(transpose=True))\n\n # Create variables to store the largest cut and cut value found\n largest_cut_found = None\n largest_cut_value_found = 0\n\n # Initialize simulator\n simulator = cirq.Simulator()\n\n # Define objective function (we'll use the negative expected cut value)\n\n def f(x):\n # Create circuit\n betas = x[:p]\n gammas = x[p:]\n circuit = qaoa_max_cut_circuit(qubits, betas, gammas, graph)\n # Sample bitstrings from circuit\n result = simulator.run(circuit, repetitions=repetitions)\n bitstrings = result.measurements['m']\n # Process bitstrings\n nonlocal largest_cut_found\n nonlocal largest_cut_value_found\n values = cut_values(bitstrings, graph)\n max_value_index = np.argmax(values)\n max_value = values[max_value_index]\n if max_value > largest_cut_value_found:\n largest_cut_value_found = max_value\n largest_cut_found = bitstrings[max_value_index]\n mean = np.mean(values)\n return -mean\n\n # Pick an initial guess\n x0 = np.random.uniform(-np.pi, np.pi, size=2 * p)\n\n # Optimize f\n print('Optimizing objective function ...')\n scipy.optimize.minimize(f, x0, method='Nelder-Mead', options={'maxiter': maxiter})\n\n # Compute best possible cut value via brute force search\n all_bitstrings = np.array(list(itertools.product(range(2), repeat=n)))\n all_values = cut_values(all_bitstrings, graph)\n max_cut_value = np.max(all_values)\n\n # Print the results\n print(f'The largest cut value found was {largest_cut_value_found}.')\n print(f'The largest possible cut has size {max_cut_value}.')\n print(f'The approximation ratio achieved is {largest_cut_value_found / max_cut_value}.')\n\n\ndef rzz(rads):\n \"\"\"Returns a gate with the matrix exp(-i Z\u2297Z rads).\"\"\"\n return cirq.ZZPowGate(exponent=2 * rads / np.pi, global_shift=-0.5)\n\n\ndef qaoa_max_cut_unitary(qubits, betas, gammas, graph): # Nodes should be integers\n for beta, gamma in zip(betas, gammas):\n yield (rzz(-0.5 * gamma).on(qubits[i], qubits[j]) for i, j in graph.edges)\n yield cirq.rx(2 * beta).on_each(*qubits)\n\n\ndef qaoa_max_cut_circuit(qubits, betas, gammas, graph): # Nodes should be integers\n return cirq.Circuit(\n # Prepare uniform superposition\n cirq.H.on_each(*qubits),\n # Apply QAOA unitary\n qaoa_max_cut_unitary(qubits, betas, gammas, graph),\n # Measure\n cirq.measure(*qubits, key='m'),\n )\n\n\ndef cut_values(bitstrings, graph):\n mat = networkx.adjacency_matrix(graph, nodelist=sorted(graph.nodes))\n vecs = (-1) ** bitstrings\n vals = 0.5 * np.sum(vecs * (mat @ vecs.T).T, axis=-1)\n vals = 0.5 * (graph.size() - vals)\n return vals\n\n\nif __name__ == '__main__':\n main()\n", "path": "examples/qaoa.py"}], "after_files": [{"content": "# pylint: disable=wrong-or-nonexistent-copyright-notice\n\"\"\"Runs the Quantum Approximate Optimization Algorithm on Max-Cut.\n\n=== EXAMPLE OUTPUT ===\n\nExample QAOA circuit:\n 0 1 2 3 4 5\n \u2502 \u2502 \u2502 \u2502 \u2502 \u2502\n H H H H H H\n \u2502 \u2502 \u2502 \u2502 \u2502 \u2502\n ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^(-4/13) \u2502 \u2502 \u2502 \u2502\n\u250c \u2502 \u2502 \u2502 \u2502 \u2502 \u2502 \u2510\n\u2502 ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^(-4/13) \u2502 \u2502 \u2502 \u2502\n\u2502 \u2502 ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^(-4/13) \u2502 \u2502 \u2502\n\u2514 \u2502 \u2502 \u2502 \u2502 \u2502 \u2502 \u2518\n\u250c \u2502 \u2502 \u2502 \u2502 \u2502 \u2502 \u2510\n\u2502 ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^(-4/13) \u2502 \u2502\n\u2502 \u2502 ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^(-4/13) \u2502\n\u2514 \u2502 \u2502 \u2502 \u2502 \u2502 \u2502 \u2518\n Rx(0.151\u03c0) Rx(0.151\u03c0) ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^(-4/13) \u2502\n \u2502 \u2502 \u2502 \u2502 \u2502 \u2502\n ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^-0.941 ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^(-4/13)\n \u2502 \u2502 \u2502 ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^(-4/13) \u2502\n\u250c \u2502 \u2502 \u2502 \u2502 \u2502 \u2502 \u2510\n\u2502 \u2502 \u2502 Rx(0.151\u03c0) ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^(-4/13) \u2502\n\u2502 \u2502 \u2502 \u2502 \u2502 Rx(0.151\u03c0) \u2502 \u2502\n\u2514 \u2502 \u2502 \u2502 \u2502 \u2502 \u2502 \u2518\n ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^-0.941 Rx(0.151\u03c0) \u2502 Rx(0.151\u03c0)\n\u250c \u2502 \u2502 \u2502 \u2502 \u2502 \u2502 \u2510\n\u2502 ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^-0.941 \u2502 \u2502\n\u2502 \u2502 ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^-0.941 \u2502 \u2502 \u2502\n\u2514 \u2502 \u2502 \u2502 \u2502 \u2502 \u2502 \u2518\n Rx(-0.448\u03c0) ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^-0.941\n \u2502 \u2502 ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^-0.941 \u2502\n \u2502 \u2502 \u2502 \u2502 \u2502 \u2502\n \u2502 Rx(-0.448\u03c0) ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^-0.941\n \u2502 \u2502 \u2502 ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^-0.941 \u2502\n\u250c \u2502 \u2502 \u2502 \u2502 \u2502 \u2502 \u2510\n\u2502 \u2502 \u2502 Rx(-0.448\u03c0) ZZ\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500ZZ^-0.941 \u2502\n\u2502 \u2502 \u2502 \u2502 \u2502 Rx(-0.448\u03c0) \u2502 \u2502\n\u2514 \u2502 \u2502 \u2502 \u2502 \u2502 \u2502 \u2518\n \u2502 \u2502 \u2502 Rx(-0.448\u03c0) \u2502 Rx(-0.448\u03c0)\n \u2502 \u2502 \u2502 \u2502 \u2502 \u2502\n M('m')\u2500\u2500\u2500\u2500\u2500\u2500M\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500M\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500M\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500M\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500M\n \u2502 \u2502 \u2502 \u2502 \u2502 \u2502\nOptimizing objective function ...\nThe largest cut value found was 7.\nThe largest possible cut has size 7.\nThe approximation ratio achieved is 1.0.\n\"\"\"\n\nimport itertools\n\nimport numpy as np\nimport networkx\nimport scipy.optimize\n\nimport cirq\n\n\ndef main(repetitions=10, maxiter=50, use_boolean_hamiltonian_gate=False):\n # Set problem parameters\n n = 6\n p = 2\n\n # Generate a random 3-regular graph on n nodes\n graph = networkx.random_regular_graph(3, n)\n\n # Make qubits\n qubits = cirq.LineQubit.range(n)\n\n # Print an example circuit\n betas = np.random.uniform(-np.pi, np.pi, size=p)\n gammas = np.random.uniform(-np.pi, np.pi, size=p)\n circuit = qaoa_max_cut_circuit(qubits, betas, gammas, graph, use_boolean_hamiltonian_gate)\n print('Example QAOA circuit:')\n print(circuit.to_text_diagram(transpose=True))\n\n # Create variables to store the largest cut and cut value found\n largest_cut_found = None\n largest_cut_value_found = 0\n\n # Initialize simulator\n simulator = cirq.Simulator()\n\n # Define objective function (we'll use the negative expected cut value)\n\n def f(x):\n # Create circuit\n betas = x[:p]\n gammas = x[p:]\n circuit = qaoa_max_cut_circuit(qubits, betas, gammas, graph, use_boolean_hamiltonian_gate)\n # Sample bitstrings from circuit\n result = simulator.run(circuit, repetitions=repetitions)\n bitstrings = result.measurements['m']\n # Process bitstrings\n nonlocal largest_cut_found\n nonlocal largest_cut_value_found\n values = cut_values(bitstrings, graph)\n max_value_index = np.argmax(values)\n max_value = values[max_value_index]\n if max_value > largest_cut_value_found:\n largest_cut_value_found = max_value\n largest_cut_found = bitstrings[max_value_index]\n mean = np.mean(values)\n return -mean\n\n # Pick an initial guess\n x0 = np.random.uniform(-np.pi, np.pi, size=2 * p)\n\n # Optimize f\n print('Optimizing objective function ...')\n scipy.optimize.minimize(f, x0, method='Nelder-Mead', options={'maxiter': maxiter})\n\n # Compute best possible cut value via brute force search\n all_bitstrings = np.array(list(itertools.product(range(2), repeat=n)))\n all_values = cut_values(all_bitstrings, graph)\n max_cut_value = np.max(all_values)\n\n # Print the results\n print(f'The largest cut value found was {largest_cut_value_found}.')\n print(f'The largest possible cut has size {max_cut_value}.')\n print(f'The approximation ratio achieved is {largest_cut_value_found / max_cut_value}.')\n\n\ndef rzz(rads):\n \"\"\"Returns a gate with the matrix exp(-i Z\u2297Z rads).\"\"\"\n return cirq.ZZPowGate(exponent=2 * rads / np.pi, global_shift=-0.5)\n\n\ndef qaoa_max_cut_unitary(\n qubits, betas, gammas, graph, use_boolean_hamiltonian_gate\n): # Nodes should be integers\n if use_boolean_hamiltonian_gate:\n booleans = [f\"x{i} ^ x{j}\" for i, j in sorted(graph.edges)]\n param_names = [f\"x{i}\" for i in range(len(qubits))]\n for beta, gamma in zip(betas, gammas):\n yield cirq.BooleanHamiltonianGate(param_names, booleans, 2.0 * gamma).on(*qubits)\n yield cirq.rx(2 * beta).on_each(*qubits)\n else:\n for beta, gamma in zip(betas, gammas):\n yield (rzz(-0.5 * gamma).on(qubits[i], qubits[j]) for i, j in graph.edges)\n yield cirq.rx(2 * beta).on_each(*qubits)\n\n\ndef qaoa_max_cut_circuit(\n qubits, betas, gammas, graph, use_boolean_hamiltonian_gate\n): # Nodes should be integers\n return cirq.Circuit(\n # Prepare uniform superposition\n cirq.H.on_each(*qubits),\n # Apply QAOA unitary\n qaoa_max_cut_unitary(qubits, betas, gammas, graph, use_boolean_hamiltonian_gate),\n # Measure\n cirq.measure(*qubits, key='m'),\n )\n\n\ndef cut_values(bitstrings, graph):\n mat = networkx.adjacency_matrix(graph, nodelist=sorted(graph.nodes))\n vecs = (-1) ** bitstrings\n vals = 0.5 * np.sum(vecs * (mat @ vecs.T).T, axis=-1)\n vals = 0.5 * (graph.size() - vals)\n return vals\n\n\nif __name__ == '__main__':\n main()\n", "path": "examples/qaoa.py"}]}
| 3,076 | 865 |
gh_patches_debug_34432
|
rasdani/github-patches
|
git_diff
|
scrapy__scrapy-2314
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
SSL error
I have lot of errors with SSL websites.
For exemple, when I call : `scrapy shell https://subscribe.wsj.com/printpack/`
I have this error :
```
2016-10-06 22:15:40 [scrapy] DEBUG: Telnet console listening on 127.0.0.1:6043
2016-10-06 22:15:40 [scrapy] INFO: Spider opened
2016-10-06 22:15:40 [scrapy] DEBUG: Retrying <GET https://subscribe.wsj.com/printpack/> (failed 1 times): [<twisted.python.failure.Failure OpenSSL.SSL.Error: [('SSL routines', 'SSL23_GET_SERVER_HELLO', 'sslv3 alert handshake failure')]>]
2016-10-06 22:15:40 [scrapy] DEBUG: Retrying <GET https://subscribe.wsj.com/printpack/> (failed 2 times): [<twisted.python.failure.Failure OpenSSL.SSL.Error: [('SSL routines', 'SSL23_GET_SERVER_HELLO', 'sslv3 alert handshake failure')]>]
2016-10-06 22:15:40 [scrapy] DEBUG: Gave up retrying <GET https://subscribe.wsj.com/printpack/> (failed 3 times): [<twisted.python.failure.Failure OpenSSL.SSL.Error: [('SSL routines', 'SSL23_GET_SERVER_HELLO', 'sslv3 alert handshake failure')]>]
Traceback (most recent call last):
File "/usr/local/bin/scrapy", line 11, in <module>
sys.exit(execute())
File "/usr/local/lib/python2.7/dist-packages/scrapy/cmdline.py", line 142, in execute
_run_print_help(parser, _run_command, cmd, args, opts)
File "/usr/local/lib/python2.7/dist-packages/scrapy/cmdline.py", line 88, in _run_print_help
func(*a, **kw)
File "/usr/local/lib/python2.7/dist-packages/scrapy/cmdline.py", line 149, in _run_command
cmd.run(args, opts)
File "/usr/local/lib/python2.7/dist-packages/scrapy/commands/shell.py", line 71, in run
shell.start(url=url)
File "/usr/local/lib/python2.7/dist-packages/scrapy/shell.py", line 47, in start
self.fetch(url, spider)
File "/usr/local/lib/python2.7/dist-packages/scrapy/shell.py", line 112, in fetch
reactor, self._schedule, request, spider)
File "/usr/local/lib/python2.7/dist-packages/twisted/internet/threads.py", line 122, in blockingCallFromThread
result.raiseException()
File "<string>", line 2, in raiseException
twisted.web._newclient.ResponseNeverReceived: [<twisted.python.failure.Failure OpenSSL.SSL.Error: [('SSL routines', 'SSL23_GET_SERVER_HELLO', 'sslv3 alert handshake failure')]>]
```
How fix that ?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `scrapy/core/downloader/tls.py`
Content:
```
1 import logging
2 from OpenSSL import SSL
3
4
5 logger = logging.getLogger(__name__)
6
7 METHOD_SSLv3 = 'SSLv3'
8 METHOD_TLS = 'TLS'
9 METHOD_TLSv10 = 'TLSv1.0'
10 METHOD_TLSv11 = 'TLSv1.1'
11 METHOD_TLSv12 = 'TLSv1.2'
12
13 openssl_methods = {
14 METHOD_TLS: SSL.SSLv23_METHOD, # protocol negotiation (recommended)
15 METHOD_SSLv3: SSL.SSLv3_METHOD, # SSL 3 (NOT recommended)
16 METHOD_TLSv10: SSL.TLSv1_METHOD, # TLS 1.0 only
17 METHOD_TLSv11: getattr(SSL, 'TLSv1_1_METHOD', 5), # TLS 1.1 only
18 METHOD_TLSv12: getattr(SSL, 'TLSv1_2_METHOD', 6), # TLS 1.2 only
19 }
20
21 # ClientTLSOptions requires a recent-enough version of Twisted
22 try:
23
24 # taken from twisted/twisted/internet/_sslverify.py
25 try:
26 from OpenSSL.SSL import SSL_CB_HANDSHAKE_DONE, SSL_CB_HANDSHAKE_START
27 except ImportError:
28 SSL_CB_HANDSHAKE_START = 0x10
29 SSL_CB_HANDSHAKE_DONE = 0x20
30
31 from twisted.internet._sslverify import (ClientTLSOptions,
32 _maybeSetHostNameIndication,
33 verifyHostname,
34 VerificationError)
35
36 class ScrapyClientTLSOptions(ClientTLSOptions):
37 """
38 SSL Client connection creator ignoring certificate verification errors
39 (for genuinely invalid certificates or bugs in verification code).
40
41 Same as Twisted's private _sslverify.ClientTLSOptions,
42 except that VerificationError and ValueError exceptions are caught,
43 so that the connection is not closed, only logging warnings.
44 """
45
46 def _identityVerifyingInfoCallback(self, connection, where, ret):
47 if where & SSL_CB_HANDSHAKE_START:
48 _maybeSetHostNameIndication(connection, self._hostnameBytes)
49 elif where & SSL_CB_HANDSHAKE_DONE:
50 try:
51 verifyHostname(connection, self._hostnameASCII)
52 except VerificationError as e:
53 logger.warning(
54 'Remote certificate is not valid for hostname "{}"; {}'.format(
55 self._hostnameASCII, e))
56
57 except ValueError as e:
58 logger.warning(
59 'Ignoring error while verifying certificate '
60 'from host "{}" (exception: {})'.format(
61 self._hostnameASCII, repr(e)))
62
63 except ImportError:
64 # ImportError should not matter for older Twisted versions
65 # as the above is not used in the fallback ScrapyClientContextFactory
66 pass
67
```
Path: `scrapy/core/downloader/contextfactory.py`
Content:
```
1 from OpenSSL import SSL
2 from twisted.internet.ssl import ClientContextFactory
3
4 try:
5
6 from zope.interface.declarations import implementer
7
8 # the following should be available from Twisted 14.0.0
9 from twisted.internet.ssl import (optionsForClientTLS,
10 CertificateOptions,
11 platformTrust)
12
13 from twisted.web.client import BrowserLikePolicyForHTTPS
14 from twisted.web.iweb import IPolicyForHTTPS
15
16 from scrapy.core.downloader.tls import ScrapyClientTLSOptions
17
18
19 @implementer(IPolicyForHTTPS)
20 class ScrapyClientContextFactory(BrowserLikePolicyForHTTPS):
21 """
22 Non-peer-certificate verifying HTTPS context factory
23
24 Default OpenSSL method is TLS_METHOD (also called SSLv23_METHOD)
25 which allows TLS protocol negotiation
26
27 'A TLS/SSL connection established with [this method] may
28 understand the SSLv3, TLSv1, TLSv1.1 and TLSv1.2 protocols.'
29 """
30
31 def __init__(self, method=SSL.SSLv23_METHOD, *args, **kwargs):
32 super(ScrapyClientContextFactory, self).__init__(*args, **kwargs)
33 self._ssl_method = method
34
35 def getCertificateOptions(self):
36 # setting verify=True will require you to provide CAs
37 # to verify against; in other words: it's not that simple
38
39 # backward-compatible SSL/TLS method:
40 #
41 # * this will respect `method` attribute in often recommended
42 # `ScrapyClientContextFactory` subclass
43 # (https://github.com/scrapy/scrapy/issues/1429#issuecomment-131782133)
44 #
45 # * getattr() for `_ssl_method` attribute for context factories
46 # not calling super(..., self).__init__
47 return CertificateOptions(verify=False,
48 method=getattr(self, 'method',
49 getattr(self, '_ssl_method', None)))
50
51 # kept for old-style HTTP/1.0 downloader context twisted calls,
52 # e.g. connectSSL()
53 def getContext(self, hostname=None, port=None):
54 return self.getCertificateOptions().getContext()
55
56 def creatorForNetloc(self, hostname, port):
57 return ScrapyClientTLSOptions(hostname.decode("ascii"), self.getContext())
58
59
60 @implementer(IPolicyForHTTPS)
61 class BrowserLikeContextFactory(ScrapyClientContextFactory):
62 """
63 Twisted-recommended context factory for web clients.
64
65 Quoting http://twistedmatrix.com/documents/current/api/twisted.web.client.Agent.html:
66 "The default is to use a BrowserLikePolicyForHTTPS,
67 so unless you have special requirements you can leave this as-is."
68
69 creatorForNetloc() is the same as BrowserLikePolicyForHTTPS
70 except this context factory allows setting the TLS/SSL method to use.
71
72 Default OpenSSL method is TLS_METHOD (also called SSLv23_METHOD)
73 which allows TLS protocol negotiation.
74 """
75 def creatorForNetloc(self, hostname, port):
76
77 # trustRoot set to platformTrust() will use the platform's root CAs.
78 #
79 # This means that a website like https://www.cacert.org will be rejected
80 # by default, since CAcert.org CA certificate is seldom shipped.
81 return optionsForClientTLS(hostname.decode("ascii"),
82 trustRoot=platformTrust(),
83 extraCertificateOptions={
84 'method': self._ssl_method,
85 })
86
87 except ImportError:
88
89 class ScrapyClientContextFactory(ClientContextFactory):
90 "A SSL context factory which is more permissive against SSL bugs."
91 # see https://github.com/scrapy/scrapy/issues/82
92 # and https://github.com/scrapy/scrapy/issues/26
93 # and https://github.com/scrapy/scrapy/issues/981
94
95 def __init__(self, method=SSL.SSLv23_METHOD):
96 self.method = method
97
98 def getContext(self, hostname=None, port=None):
99 ctx = ClientContextFactory.getContext(self)
100 # Enable all workarounds to SSL bugs as documented by
101 # http://www.openssl.org/docs/ssl/SSL_CTX_set_options.html
102 ctx.set_options(SSL.OP_ALL)
103 return ctx
104
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/scrapy/core/downloader/contextfactory.py b/scrapy/core/downloader/contextfactory.py
--- a/scrapy/core/downloader/contextfactory.py
+++ b/scrapy/core/downloader/contextfactory.py
@@ -13,7 +13,7 @@
from twisted.web.client import BrowserLikePolicyForHTTPS
from twisted.web.iweb import IPolicyForHTTPS
- from scrapy.core.downloader.tls import ScrapyClientTLSOptions
+ from scrapy.core.downloader.tls import ScrapyClientTLSOptions, DEFAULT_CIPHERS
@implementer(IPolicyForHTTPS)
@@ -46,7 +46,9 @@
# not calling super(..., self).__init__
return CertificateOptions(verify=False,
method=getattr(self, 'method',
- getattr(self, '_ssl_method', None)))
+ getattr(self, '_ssl_method', None)),
+ fixBrokenPeers=True,
+ acceptableCiphers=DEFAULT_CIPHERS)
# kept for old-style HTTP/1.0 downloader context twisted calls,
# e.g. connectSSL()
diff --git a/scrapy/core/downloader/tls.py b/scrapy/core/downloader/tls.py
--- a/scrapy/core/downloader/tls.py
+++ b/scrapy/core/downloader/tls.py
@@ -28,6 +28,7 @@
SSL_CB_HANDSHAKE_START = 0x10
SSL_CB_HANDSHAKE_DONE = 0x20
+ from twisted.internet.ssl import AcceptableCiphers
from twisted.internet._sslverify import (ClientTLSOptions,
_maybeSetHostNameIndication,
verifyHostname,
@@ -60,6 +61,8 @@
'from host "{}" (exception: {})'.format(
self._hostnameASCII, repr(e)))
+ DEFAULT_CIPHERS = AcceptableCiphers.fromOpenSSLCipherString('DEFAULT')
+
except ImportError:
# ImportError should not matter for older Twisted versions
# as the above is not used in the fallback ScrapyClientContextFactory
|
{"golden_diff": "diff --git a/scrapy/core/downloader/contextfactory.py b/scrapy/core/downloader/contextfactory.py\n--- a/scrapy/core/downloader/contextfactory.py\n+++ b/scrapy/core/downloader/contextfactory.py\n@@ -13,7 +13,7 @@\n from twisted.web.client import BrowserLikePolicyForHTTPS\n from twisted.web.iweb import IPolicyForHTTPS\n \n- from scrapy.core.downloader.tls import ScrapyClientTLSOptions\n+ from scrapy.core.downloader.tls import ScrapyClientTLSOptions, DEFAULT_CIPHERS\n \n \n @implementer(IPolicyForHTTPS)\n@@ -46,7 +46,9 @@\n # not calling super(..., self).__init__\n return CertificateOptions(verify=False,\n method=getattr(self, 'method',\n- getattr(self, '_ssl_method', None)))\n+ getattr(self, '_ssl_method', None)),\n+ fixBrokenPeers=True,\n+ acceptableCiphers=DEFAULT_CIPHERS)\n \n # kept for old-style HTTP/1.0 downloader context twisted calls,\n # e.g. connectSSL()\ndiff --git a/scrapy/core/downloader/tls.py b/scrapy/core/downloader/tls.py\n--- a/scrapy/core/downloader/tls.py\n+++ b/scrapy/core/downloader/tls.py\n@@ -28,6 +28,7 @@\n SSL_CB_HANDSHAKE_START = 0x10\n SSL_CB_HANDSHAKE_DONE = 0x20\n \n+ from twisted.internet.ssl import AcceptableCiphers\n from twisted.internet._sslverify import (ClientTLSOptions,\n _maybeSetHostNameIndication,\n verifyHostname,\n@@ -60,6 +61,8 @@\n 'from host \"{}\" (exception: {})'.format(\n self._hostnameASCII, repr(e)))\n \n+ DEFAULT_CIPHERS = AcceptableCiphers.fromOpenSSLCipherString('DEFAULT')\n+\n except ImportError:\n # ImportError should not matter for older Twisted versions\n # as the above is not used in the fallback ScrapyClientContextFactory\n", "issue": "SSL error\nI have lot of errors with SSL websites.\nFor exemple, when I call : `scrapy shell https://subscribe.wsj.com/printpack/`\n\nI have this error : \n\n```\n2016-10-06 22:15:40 [scrapy] DEBUG: Telnet console listening on 127.0.0.1:6043\n2016-10-06 22:15:40 [scrapy] INFO: Spider opened\n2016-10-06 22:15:40 [scrapy] DEBUG: Retrying <GET https://subscribe.wsj.com/printpack/> (failed 1 times): [<twisted.python.failure.Failure OpenSSL.SSL.Error: [('SSL routines', 'SSL23_GET_SERVER_HELLO', 'sslv3 alert handshake failure')]>]\n2016-10-06 22:15:40 [scrapy] DEBUG: Retrying <GET https://subscribe.wsj.com/printpack/> (failed 2 times): [<twisted.python.failure.Failure OpenSSL.SSL.Error: [('SSL routines', 'SSL23_GET_SERVER_HELLO', 'sslv3 alert handshake failure')]>]\n2016-10-06 22:15:40 [scrapy] DEBUG: Gave up retrying <GET https://subscribe.wsj.com/printpack/> (failed 3 times): [<twisted.python.failure.Failure OpenSSL.SSL.Error: [('SSL routines', 'SSL23_GET_SERVER_HELLO', 'sslv3 alert handshake failure')]>]\nTraceback (most recent call last):\n File \"/usr/local/bin/scrapy\", line 11, in <module>\n sys.exit(execute())\n File \"/usr/local/lib/python2.7/dist-packages/scrapy/cmdline.py\", line 142, in execute\n _run_print_help(parser, _run_command, cmd, args, opts)\n File \"/usr/local/lib/python2.7/dist-packages/scrapy/cmdline.py\", line 88, in _run_print_help\n func(*a, **kw)\n File \"/usr/local/lib/python2.7/dist-packages/scrapy/cmdline.py\", line 149, in _run_command\n cmd.run(args, opts)\n File \"/usr/local/lib/python2.7/dist-packages/scrapy/commands/shell.py\", line 71, in run\n shell.start(url=url)\n File \"/usr/local/lib/python2.7/dist-packages/scrapy/shell.py\", line 47, in start\n self.fetch(url, spider)\n File \"/usr/local/lib/python2.7/dist-packages/scrapy/shell.py\", line 112, in fetch\n reactor, self._schedule, request, spider)\n File \"/usr/local/lib/python2.7/dist-packages/twisted/internet/threads.py\", line 122, in blockingCallFromThread\n result.raiseException()\n File \"<string>\", line 2, in raiseException\ntwisted.web._newclient.ResponseNeverReceived: [<twisted.python.failure.Failure OpenSSL.SSL.Error: [('SSL routines', 'SSL23_GET_SERVER_HELLO', 'sslv3 alert handshake failure')]>]\n```\n\nHow fix that ?\n\n", "before_files": [{"content": "import logging\nfrom OpenSSL import SSL\n\n\nlogger = logging.getLogger(__name__)\n\nMETHOD_SSLv3 = 'SSLv3'\nMETHOD_TLS = 'TLS'\nMETHOD_TLSv10 = 'TLSv1.0'\nMETHOD_TLSv11 = 'TLSv1.1'\nMETHOD_TLSv12 = 'TLSv1.2'\n\nopenssl_methods = {\n METHOD_TLS: SSL.SSLv23_METHOD, # protocol negotiation (recommended)\n METHOD_SSLv3: SSL.SSLv3_METHOD, # SSL 3 (NOT recommended)\n METHOD_TLSv10: SSL.TLSv1_METHOD, # TLS 1.0 only\n METHOD_TLSv11: getattr(SSL, 'TLSv1_1_METHOD', 5), # TLS 1.1 only\n METHOD_TLSv12: getattr(SSL, 'TLSv1_2_METHOD', 6), # TLS 1.2 only\n}\n\n# ClientTLSOptions requires a recent-enough version of Twisted\ntry:\n\n # taken from twisted/twisted/internet/_sslverify.py\n try:\n from OpenSSL.SSL import SSL_CB_HANDSHAKE_DONE, SSL_CB_HANDSHAKE_START\n except ImportError:\n SSL_CB_HANDSHAKE_START = 0x10\n SSL_CB_HANDSHAKE_DONE = 0x20\n\n from twisted.internet._sslverify import (ClientTLSOptions,\n _maybeSetHostNameIndication,\n verifyHostname,\n VerificationError)\n\n class ScrapyClientTLSOptions(ClientTLSOptions):\n \"\"\"\n SSL Client connection creator ignoring certificate verification errors\n (for genuinely invalid certificates or bugs in verification code).\n\n Same as Twisted's private _sslverify.ClientTLSOptions,\n except that VerificationError and ValueError exceptions are caught,\n so that the connection is not closed, only logging warnings.\n \"\"\"\n\n def _identityVerifyingInfoCallback(self, connection, where, ret):\n if where & SSL_CB_HANDSHAKE_START:\n _maybeSetHostNameIndication(connection, self._hostnameBytes)\n elif where & SSL_CB_HANDSHAKE_DONE:\n try:\n verifyHostname(connection, self._hostnameASCII)\n except VerificationError as e:\n logger.warning(\n 'Remote certificate is not valid for hostname \"{}\"; {}'.format(\n self._hostnameASCII, e))\n\n except ValueError as e:\n logger.warning(\n 'Ignoring error while verifying certificate '\n 'from host \"{}\" (exception: {})'.format(\n self._hostnameASCII, repr(e)))\n\nexcept ImportError:\n # ImportError should not matter for older Twisted versions\n # as the above is not used in the fallback ScrapyClientContextFactory\n pass\n", "path": "scrapy/core/downloader/tls.py"}, {"content": "from OpenSSL import SSL\nfrom twisted.internet.ssl import ClientContextFactory\n\ntry:\n\n from zope.interface.declarations import implementer\n\n # the following should be available from Twisted 14.0.0\n from twisted.internet.ssl import (optionsForClientTLS,\n CertificateOptions,\n platformTrust)\n\n from twisted.web.client import BrowserLikePolicyForHTTPS\n from twisted.web.iweb import IPolicyForHTTPS\n\n from scrapy.core.downloader.tls import ScrapyClientTLSOptions\n\n\n @implementer(IPolicyForHTTPS)\n class ScrapyClientContextFactory(BrowserLikePolicyForHTTPS):\n \"\"\"\n Non-peer-certificate verifying HTTPS context factory\n\n Default OpenSSL method is TLS_METHOD (also called SSLv23_METHOD)\n which allows TLS protocol negotiation\n\n 'A TLS/SSL connection established with [this method] may\n understand the SSLv3, TLSv1, TLSv1.1 and TLSv1.2 protocols.'\n \"\"\"\n\n def __init__(self, method=SSL.SSLv23_METHOD, *args, **kwargs):\n super(ScrapyClientContextFactory, self).__init__(*args, **kwargs)\n self._ssl_method = method\n\n def getCertificateOptions(self):\n # setting verify=True will require you to provide CAs\n # to verify against; in other words: it's not that simple\n\n # backward-compatible SSL/TLS method:\n #\n # * this will respect `method` attribute in often recommended\n # `ScrapyClientContextFactory` subclass\n # (https://github.com/scrapy/scrapy/issues/1429#issuecomment-131782133)\n #\n # * getattr() for `_ssl_method` attribute for context factories\n # not calling super(..., self).__init__\n return CertificateOptions(verify=False,\n method=getattr(self, 'method',\n getattr(self, '_ssl_method', None)))\n\n # kept for old-style HTTP/1.0 downloader context twisted calls,\n # e.g. connectSSL()\n def getContext(self, hostname=None, port=None):\n return self.getCertificateOptions().getContext()\n\n def creatorForNetloc(self, hostname, port):\n return ScrapyClientTLSOptions(hostname.decode(\"ascii\"), self.getContext())\n\n\n @implementer(IPolicyForHTTPS)\n class BrowserLikeContextFactory(ScrapyClientContextFactory):\n \"\"\"\n Twisted-recommended context factory for web clients.\n\n Quoting http://twistedmatrix.com/documents/current/api/twisted.web.client.Agent.html:\n \"The default is to use a BrowserLikePolicyForHTTPS,\n so unless you have special requirements you can leave this as-is.\"\n\n creatorForNetloc() is the same as BrowserLikePolicyForHTTPS\n except this context factory allows setting the TLS/SSL method to use.\n\n Default OpenSSL method is TLS_METHOD (also called SSLv23_METHOD)\n which allows TLS protocol negotiation.\n \"\"\"\n def creatorForNetloc(self, hostname, port):\n\n # trustRoot set to platformTrust() will use the platform's root CAs.\n #\n # This means that a website like https://www.cacert.org will be rejected\n # by default, since CAcert.org CA certificate is seldom shipped.\n return optionsForClientTLS(hostname.decode(\"ascii\"),\n trustRoot=platformTrust(),\n extraCertificateOptions={\n 'method': self._ssl_method,\n })\n\nexcept ImportError:\n\n class ScrapyClientContextFactory(ClientContextFactory):\n \"A SSL context factory which is more permissive against SSL bugs.\"\n # see https://github.com/scrapy/scrapy/issues/82\n # and https://github.com/scrapy/scrapy/issues/26\n # and https://github.com/scrapy/scrapy/issues/981\n\n def __init__(self, method=SSL.SSLv23_METHOD):\n self.method = method\n\n def getContext(self, hostname=None, port=None):\n ctx = ClientContextFactory.getContext(self)\n # Enable all workarounds to SSL bugs as documented by\n # http://www.openssl.org/docs/ssl/SSL_CTX_set_options.html\n ctx.set_options(SSL.OP_ALL)\n return ctx\n", "path": "scrapy/core/downloader/contextfactory.py"}], "after_files": [{"content": "import logging\nfrom OpenSSL import SSL\n\n\nlogger = logging.getLogger(__name__)\n\nMETHOD_SSLv3 = 'SSLv3'\nMETHOD_TLS = 'TLS'\nMETHOD_TLSv10 = 'TLSv1.0'\nMETHOD_TLSv11 = 'TLSv1.1'\nMETHOD_TLSv12 = 'TLSv1.2'\n\nopenssl_methods = {\n METHOD_TLS: SSL.SSLv23_METHOD, # protocol negotiation (recommended)\n METHOD_SSLv3: SSL.SSLv3_METHOD, # SSL 3 (NOT recommended)\n METHOD_TLSv10: SSL.TLSv1_METHOD, # TLS 1.0 only\n METHOD_TLSv11: getattr(SSL, 'TLSv1_1_METHOD', 5), # TLS 1.1 only\n METHOD_TLSv12: getattr(SSL, 'TLSv1_2_METHOD', 6), # TLS 1.2 only\n}\n\n# ClientTLSOptions requires a recent-enough version of Twisted\ntry:\n\n # taken from twisted/twisted/internet/_sslverify.py\n try:\n from OpenSSL.SSL import SSL_CB_HANDSHAKE_DONE, SSL_CB_HANDSHAKE_START\n except ImportError:\n SSL_CB_HANDSHAKE_START = 0x10\n SSL_CB_HANDSHAKE_DONE = 0x20\n\n from twisted.internet.ssl import AcceptableCiphers\n from twisted.internet._sslverify import (ClientTLSOptions,\n _maybeSetHostNameIndication,\n verifyHostname,\n VerificationError)\n\n class ScrapyClientTLSOptions(ClientTLSOptions):\n \"\"\"\n SSL Client connection creator ignoring certificate verification errors\n (for genuinely invalid certificates or bugs in verification code).\n\n Same as Twisted's private _sslverify.ClientTLSOptions,\n except that VerificationError and ValueError exceptions are caught,\n so that the connection is not closed, only logging warnings.\n \"\"\"\n\n def _identityVerifyingInfoCallback(self, connection, where, ret):\n if where & SSL_CB_HANDSHAKE_START:\n _maybeSetHostNameIndication(connection, self._hostnameBytes)\n elif where & SSL_CB_HANDSHAKE_DONE:\n try:\n verifyHostname(connection, self._hostnameASCII)\n except VerificationError as e:\n logger.warning(\n 'Remote certificate is not valid for hostname \"{}\"; {}'.format(\n self._hostnameASCII, e))\n\n except ValueError as e:\n logger.warning(\n 'Ignoring error while verifying certificate '\n 'from host \"{}\" (exception: {})'.format(\n self._hostnameASCII, repr(e)))\n\n DEFAULT_CIPHERS = AcceptableCiphers.fromOpenSSLCipherString('DEFAULT')\n\nexcept ImportError:\n # ImportError should not matter for older Twisted versions\n # as the above is not used in the fallback ScrapyClientContextFactory\n pass\n", "path": "scrapy/core/downloader/tls.py"}, {"content": "from OpenSSL import SSL\nfrom twisted.internet.ssl import ClientContextFactory\n\ntry:\n\n from zope.interface.declarations import implementer\n\n # the following should be available from Twisted 14.0.0\n from twisted.internet.ssl import (optionsForClientTLS,\n CertificateOptions,\n platformTrust)\n\n from twisted.web.client import BrowserLikePolicyForHTTPS\n from twisted.web.iweb import IPolicyForHTTPS\n\n from scrapy.core.downloader.tls import ScrapyClientTLSOptions, DEFAULT_CIPHERS\n\n\n @implementer(IPolicyForHTTPS)\n class ScrapyClientContextFactory(BrowserLikePolicyForHTTPS):\n \"\"\"\n Non-peer-certificate verifying HTTPS context factory\n\n Default OpenSSL method is TLS_METHOD (also called SSLv23_METHOD)\n which allows TLS protocol negotiation\n\n 'A TLS/SSL connection established with [this method] may\n understand the SSLv3, TLSv1, TLSv1.1 and TLSv1.2 protocols.'\n \"\"\"\n\n def __init__(self, method=SSL.SSLv23_METHOD, *args, **kwargs):\n super(ScrapyClientContextFactory, self).__init__(*args, **kwargs)\n self._ssl_method = method\n\n def getCertificateOptions(self):\n # setting verify=True will require you to provide CAs\n # to verify against; in other words: it's not that simple\n\n # backward-compatible SSL/TLS method:\n #\n # * this will respect `method` attribute in often recommended\n # `ScrapyClientContextFactory` subclass\n # (https://github.com/scrapy/scrapy/issues/1429#issuecomment-131782133)\n #\n # * getattr() for `_ssl_method` attribute for context factories\n # not calling super(..., self).__init__\n return CertificateOptions(verify=False,\n method=getattr(self, 'method',\n getattr(self, '_ssl_method', None)),\n fixBrokenPeers=True,\n acceptableCiphers=DEFAULT_CIPHERS)\n\n # kept for old-style HTTP/1.0 downloader context twisted calls,\n # e.g. connectSSL()\n def getContext(self, hostname=None, port=None):\n return self.getCertificateOptions().getContext()\n\n def creatorForNetloc(self, hostname, port):\n return ScrapyClientTLSOptions(hostname.decode(\"ascii\"), self.getContext())\n\n\n @implementer(IPolicyForHTTPS)\n class BrowserLikeContextFactory(ScrapyClientContextFactory):\n \"\"\"\n Twisted-recommended context factory for web clients.\n\n Quoting http://twistedmatrix.com/documents/current/api/twisted.web.client.Agent.html:\n \"The default is to use a BrowserLikePolicyForHTTPS,\n so unless you have special requirements you can leave this as-is.\"\n\n creatorForNetloc() is the same as BrowserLikePolicyForHTTPS\n except this context factory allows setting the TLS/SSL method to use.\n\n Default OpenSSL method is TLS_METHOD (also called SSLv23_METHOD)\n which allows TLS protocol negotiation.\n \"\"\"\n def creatorForNetloc(self, hostname, port):\n\n # trustRoot set to platformTrust() will use the platform's root CAs.\n #\n # This means that a website like https://www.cacert.org will be rejected\n # by default, since CAcert.org CA certificate is seldom shipped.\n return optionsForClientTLS(hostname.decode(\"ascii\"),\n trustRoot=platformTrust(),\n extraCertificateOptions={\n 'method': self._ssl_method,\n })\n\nexcept ImportError:\n\n class ScrapyClientContextFactory(ClientContextFactory):\n \"A SSL context factory which is more permissive against SSL bugs.\"\n # see https://github.com/scrapy/scrapy/issues/82\n # and https://github.com/scrapy/scrapy/issues/26\n # and https://github.com/scrapy/scrapy/issues/981\n\n def __init__(self, method=SSL.SSLv23_METHOD):\n self.method = method\n\n def getContext(self, hostname=None, port=None):\n ctx = ClientContextFactory.getContext(self)\n # Enable all workarounds to SSL bugs as documented by\n # http://www.openssl.org/docs/ssl/SSL_CTX_set_options.html\n ctx.set_options(SSL.OP_ALL)\n return ctx\n", "path": "scrapy/core/downloader/contextfactory.py"}]}
| 2,809 | 440 |
gh_patches_debug_15151
|
rasdani/github-patches
|
git_diff
|
lutris__lutris-3956
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Can't interact with existing winesteam games in the library
```ERROR 2021-12-29 15:17:54,343 [game._get_runner:195]:Unable to import runner winesteam for steven-universe-save-the-light
Traceback (most recent call last):
File "/usr/lib/python3.10/site-packages/lutris/gui/lutriswindow.py", line 436, in update_revealer
self.game_bar = GameBar(game, self.game_actions, self.application)
File "/usr/lib/python3.10/site-packages/lutris/gui/widgets/game_bar.py", line 60, in __init__
self.update_view()
File "/usr/lib/python3.10/site-packages/lutris/gui/widgets/game_bar.py", line 76, in update_view
self.play_button = self.get_play_button()
File "/usr/lib/python3.10/site-packages/lutris/gui/widgets/game_bar.py", line 216, in get_play_button
popover = self.get_popover(self.get_game_buttons(), popover_button)
File "/usr/lib/python3.10/site-packages/lutris/gui/widgets/game_bar.py", line 223, in get_game_buttons
displayed = self.game_actions.get_displayed_entries()
File "/usr/lib/python3.10/site-packages/lutris/game_actions.py", line 106, in get_displayed_entries
"execute-script": bool(self.game.is_installed and self.game.runner.system_config.get("manual_command")),
AttributeError: 'NoneType' object has no attribute 'system_config'```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lutris/game_actions.py`
Content:
```
1 """Handle game specific actions"""
2
3 # Standard Library
4 # pylint: disable=too-many-public-methods
5 import os
6 from gettext import gettext as _
7
8 from gi.repository import Gio
9
10 from lutris.command import MonitoredCommand
11 from lutris.game import Game
12 from lutris.gui import dialogs
13 from lutris.gui.config.add_game import AddGameDialog
14 from lutris.gui.config.edit_game import EditGameConfigDialog
15 from lutris.gui.dialogs.log import LogWindow
16 from lutris.gui.dialogs.uninstall_game import RemoveGameDialog, UninstallGameDialog
17 from lutris.gui.widgets.utils import open_uri
18 from lutris.util import xdgshortcuts
19 from lutris.util.log import logger
20 from lutris.util.system import path_exists
21
22
23 class GameActions:
24 """Regroup a list of callbacks for a game"""
25
26 def __init__(self, application=None, window=None):
27 self.application = application or Gio.Application.get_default()
28 self.window = window
29 self.game_id = None
30 self._game = None
31
32 @property
33 def game(self):
34 if not self._game:
35 self._game = self.application.get_game_by_id(self.game_id)
36 if not self._game:
37 self._game = Game(self.game_id)
38 self._game.connect("game-error", self.window.on_game_error)
39 return self._game
40
41 @property
42 def is_game_running(self):
43 return bool(self.application.get_game_by_id(self.game_id))
44
45 def set_game(self, game=None, game_id=None):
46 if game:
47 self._game = game
48 self.game_id = game.id
49 else:
50 self._game = None
51 self.game_id = game_id
52
53 def get_game_actions(self):
54 """Return a list of game actions and their callbacks"""
55 return [
56 ("play", _("Play"), self.on_game_launch),
57 ("stop", _("Stop"), self.on_game_stop),
58 ("show_logs", _("Show logs"), self.on_show_logs),
59 ("install", _("Install"), self.on_install_clicked),
60 ("add", _("Add installed game"), self.on_add_manually),
61 ("configure", _("Configure"), self.on_edit_game_configuration),
62 ("favorite", _("Add to favorites"), self.on_add_favorite_game),
63 ("deletefavorite", _("Remove from favorites"), self.on_delete_favorite_game),
64 ("execute-script", _("Execute script"), self.on_execute_script_clicked),
65 ("browse", _("Browse files"), self.on_browse_files),
66 (
67 "desktop-shortcut",
68 _("Create desktop shortcut"),
69 self.on_create_desktop_shortcut,
70 ),
71 (
72 "rm-desktop-shortcut",
73 _("Delete desktop shortcut"),
74 self.on_remove_desktop_shortcut,
75 ),
76 (
77 "menu-shortcut",
78 _("Create application menu shortcut"),
79 self.on_create_menu_shortcut,
80 ),
81 (
82 "rm-menu-shortcut",
83 _("Delete application menu shortcut"),
84 self.on_remove_menu_shortcut,
85 ),
86 ("install_more", _("Install another version"), self.on_install_clicked),
87 ("remove", _("Remove"), self.on_remove_game),
88 ("view", _("View on Lutris.net"), self.on_view_game),
89 ("hide", _("Hide game from library"), self.on_hide_game),
90 ("unhide", _("Unhide game from library"), self.on_unhide_game),
91 ]
92
93 def get_displayed_entries(self):
94 """Return a dictionary of actions that should be shown for a game"""
95 return {
96 "add": not self.game.is_installed,
97 "install": not self.game.is_installed,
98 "play": self.game.is_installed and not self.is_game_running,
99 "stop": self.is_game_running,
100 "configure": bool(self.game.is_installed),
101 "browse": self.game.is_installed and self.game.runner_name != "browser",
102 "show_logs": self.game.is_installed,
103 "favorite": not self.game.is_favorite,
104 "deletefavorite": self.game.is_favorite,
105 "install_more": not self.game.service and self.game.is_installed,
106 "execute-script": bool(self.game.is_installed and self.game.runner.system_config.get("manual_command")),
107 "desktop-shortcut": (
108 self.game.is_installed
109 and not xdgshortcuts.desktop_launcher_exists(self.game.slug, self.game.id)
110 ),
111 "menu-shortcut": (
112 self.game.is_installed
113 and not xdgshortcuts.menu_launcher_exists(self.game.slug, self.game.id)
114 ),
115 "rm-desktop-shortcut": bool(
116 self.game.is_installed
117 and xdgshortcuts.desktop_launcher_exists(self.game.slug, self.game.id)
118 ),
119 "rm-menu-shortcut": bool(
120 self.game.is_installed
121 and xdgshortcuts.menu_launcher_exists(self.game.slug, self.game.id)
122 ),
123 "remove": True,
124 "view": True,
125 "hide": self.game.is_installed and not self.game.is_hidden,
126 "unhide": self.game.is_hidden,
127 }
128
129 def on_game_launch(self, *_args):
130 """Launch a game"""
131 self.game.launch()
132
133 def get_running_game(self):
134 ids = self.application.get_running_game_ids()
135 for game_id in ids:
136 if str(game_id) == str(self.game.id):
137 return self.game
138 logger.warning("Game %s not in %s", self.game_id, ids)
139
140 def on_game_stop(self, _caller):
141 """Stops the game"""
142 game = self.get_running_game()
143 if game:
144 game.force_stop()
145
146 def on_show_logs(self, _widget):
147 """Display game log"""
148 _buffer = self.game.log_buffer
149 if not _buffer:
150 logger.info("No log for game %s", self.game)
151 return LogWindow(
152 title=_("Log for {}").format(self.game),
153 buffer=_buffer,
154 application=self.application
155 )
156
157 def on_install_clicked(self, *_args):
158 """Install a game"""
159 # Install the currently selected game in the UI
160 if not self.game.slug:
161 raise RuntimeError("No game to install: %s" % self.game.id)
162 self.game.emit("game-install")
163
164 def on_locate_installed_game(self, _button, game):
165 """Show the user a dialog to import an existing install to a DRM free service
166
167 Params:
168 game (Game): Game instance without a database ID, populated with a fields the service can provides
169 """
170 AddGameDialog(self.window, game=game)
171
172 def on_add_manually(self, _widget, *_args):
173 """Callback that presents the Add game dialog"""
174 return AddGameDialog(self.window, game=self.game, runner=self.game.runner_name)
175
176 def on_edit_game_configuration(self, _widget):
177 """Edit game preferences"""
178 EditGameConfigDialog(self.window, self.game)
179
180 def on_add_favorite_game(self, _widget):
181 """Add to favorite Games list"""
182 self.game.add_to_favorites()
183
184 def on_delete_favorite_game(self, _widget):
185 """delete from favorites"""
186 self.game.remove_from_favorites()
187
188 def on_hide_game(self, _widget):
189 """Add a game to the list of hidden games"""
190 self.game.set_hidden(True)
191
192 def on_unhide_game(self, _widget):
193 """Removes a game from the list of hidden games"""
194 self.game.set_hidden(False)
195
196 def on_execute_script_clicked(self, _widget):
197 """Execute the game's associated script"""
198 manual_command = self.game.runner.system_config.get("manual_command")
199 if path_exists(manual_command):
200 MonitoredCommand(
201 [manual_command],
202 include_processes=[os.path.basename(manual_command)],
203 cwd=self.game.directory,
204 ).start()
205 logger.info("Running %s in the background", manual_command)
206
207 def on_browse_files(self, _widget):
208 """Callback to open a game folder in the file browser"""
209 path = self.game.get_browse_dir()
210 if not path:
211 dialogs.NoticeDialog(_("This game has no installation directory"))
212 elif path_exists(path):
213 open_uri("file://%s" % path)
214 else:
215 dialogs.NoticeDialog(_("Can't open %s \nThe folder doesn't exist.") % path)
216
217 def on_create_menu_shortcut(self, *_args):
218 """Add the selected game to the system's Games menu."""
219 xdgshortcuts.create_launcher(self.game.slug, self.game.id, self.game.name, menu=True)
220
221 def on_create_desktop_shortcut(self, *_args):
222 """Create a desktop launcher for the selected game."""
223 xdgshortcuts.create_launcher(self.game.slug, self.game.id, self.game.name, desktop=True)
224
225 def on_remove_menu_shortcut(self, *_args):
226 """Remove an XDG menu shortcut"""
227 xdgshortcuts.remove_launcher(self.game.slug, self.game.id, menu=True)
228
229 def on_remove_desktop_shortcut(self, *_args):
230 """Remove a .desktop shortcut"""
231 xdgshortcuts.remove_launcher(self.game.slug, self.game.id, desktop=True)
232
233 def on_view_game(self, _widget):
234 """Callback to open a game on lutris.net"""
235 open_uri("https://lutris.net/games/%s" % self.game.slug)
236
237 def on_remove_game(self, *_args):
238 """Callback that present the uninstall dialog to the user"""
239 if self.game.is_installed:
240 UninstallGameDialog(game_id=self.game.id, parent=self.window)
241 else:
242 RemoveGameDialog(game_id=self.game.id, parent=self.window)
243
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/lutris/game_actions.py b/lutris/game_actions.py
--- a/lutris/game_actions.py
+++ b/lutris/game_actions.py
@@ -103,7 +103,10 @@
"favorite": not self.game.is_favorite,
"deletefavorite": self.game.is_favorite,
"install_more": not self.game.service and self.game.is_installed,
- "execute-script": bool(self.game.is_installed and self.game.runner.system_config.get("manual_command")),
+ "execute-script": bool(
+ self.game.is_installed and self.game.runner
+ and self.game.runner.system_config.get("manual_command")
+ ),
"desktop-shortcut": (
self.game.is_installed
and not xdgshortcuts.desktop_launcher_exists(self.game.slug, self.game.id)
|
{"golden_diff": "diff --git a/lutris/game_actions.py b/lutris/game_actions.py\n--- a/lutris/game_actions.py\n+++ b/lutris/game_actions.py\n@@ -103,7 +103,10 @@\n \"favorite\": not self.game.is_favorite,\n \"deletefavorite\": self.game.is_favorite,\n \"install_more\": not self.game.service and self.game.is_installed,\n- \"execute-script\": bool(self.game.is_installed and self.game.runner.system_config.get(\"manual_command\")),\n+ \"execute-script\": bool(\n+ self.game.is_installed and self.game.runner\n+ and self.game.runner.system_config.get(\"manual_command\")\n+ ),\n \"desktop-shortcut\": (\n self.game.is_installed\n and not xdgshortcuts.desktop_launcher_exists(self.game.slug, self.game.id)\n", "issue": "Can't interact with existing winesteam games in the library\n```ERROR 2021-12-29 15:17:54,343 [game._get_runner:195]:Unable to import runner winesteam for steven-universe-save-the-light\r\nTraceback (most recent call last):\r\n File \"/usr/lib/python3.10/site-packages/lutris/gui/lutriswindow.py\", line 436, in update_revealer\r\n self.game_bar = GameBar(game, self.game_actions, self.application)\r\n File \"/usr/lib/python3.10/site-packages/lutris/gui/widgets/game_bar.py\", line 60, in __init__\r\n self.update_view()\r\n File \"/usr/lib/python3.10/site-packages/lutris/gui/widgets/game_bar.py\", line 76, in update_view\r\n self.play_button = self.get_play_button()\r\n File \"/usr/lib/python3.10/site-packages/lutris/gui/widgets/game_bar.py\", line 216, in get_play_button\r\n popover = self.get_popover(self.get_game_buttons(), popover_button)\r\n File \"/usr/lib/python3.10/site-packages/lutris/gui/widgets/game_bar.py\", line 223, in get_game_buttons\r\n displayed = self.game_actions.get_displayed_entries()\r\n File \"/usr/lib/python3.10/site-packages/lutris/game_actions.py\", line 106, in get_displayed_entries\r\n \"execute-script\": bool(self.game.is_installed and self.game.runner.system_config.get(\"manual_command\")),\r\nAttributeError: 'NoneType' object has no attribute 'system_config'```\n", "before_files": [{"content": "\"\"\"Handle game specific actions\"\"\"\n\n# Standard Library\n# pylint: disable=too-many-public-methods\nimport os\nfrom gettext import gettext as _\n\nfrom gi.repository import Gio\n\nfrom lutris.command import MonitoredCommand\nfrom lutris.game import Game\nfrom lutris.gui import dialogs\nfrom lutris.gui.config.add_game import AddGameDialog\nfrom lutris.gui.config.edit_game import EditGameConfigDialog\nfrom lutris.gui.dialogs.log import LogWindow\nfrom lutris.gui.dialogs.uninstall_game import RemoveGameDialog, UninstallGameDialog\nfrom lutris.gui.widgets.utils import open_uri\nfrom lutris.util import xdgshortcuts\nfrom lutris.util.log import logger\nfrom lutris.util.system import path_exists\n\n\nclass GameActions:\n \"\"\"Regroup a list of callbacks for a game\"\"\"\n\n def __init__(self, application=None, window=None):\n self.application = application or Gio.Application.get_default()\n self.window = window\n self.game_id = None\n self._game = None\n\n @property\n def game(self):\n if not self._game:\n self._game = self.application.get_game_by_id(self.game_id)\n if not self._game:\n self._game = Game(self.game_id)\n self._game.connect(\"game-error\", self.window.on_game_error)\n return self._game\n\n @property\n def is_game_running(self):\n return bool(self.application.get_game_by_id(self.game_id))\n\n def set_game(self, game=None, game_id=None):\n if game:\n self._game = game\n self.game_id = game.id\n else:\n self._game = None\n self.game_id = game_id\n\n def get_game_actions(self):\n \"\"\"Return a list of game actions and their callbacks\"\"\"\n return [\n (\"play\", _(\"Play\"), self.on_game_launch),\n (\"stop\", _(\"Stop\"), self.on_game_stop),\n (\"show_logs\", _(\"Show logs\"), self.on_show_logs),\n (\"install\", _(\"Install\"), self.on_install_clicked),\n (\"add\", _(\"Add installed game\"), self.on_add_manually),\n (\"configure\", _(\"Configure\"), self.on_edit_game_configuration),\n (\"favorite\", _(\"Add to favorites\"), self.on_add_favorite_game),\n (\"deletefavorite\", _(\"Remove from favorites\"), self.on_delete_favorite_game),\n (\"execute-script\", _(\"Execute script\"), self.on_execute_script_clicked),\n (\"browse\", _(\"Browse files\"), self.on_browse_files),\n (\n \"desktop-shortcut\",\n _(\"Create desktop shortcut\"),\n self.on_create_desktop_shortcut,\n ),\n (\n \"rm-desktop-shortcut\",\n _(\"Delete desktop shortcut\"),\n self.on_remove_desktop_shortcut,\n ),\n (\n \"menu-shortcut\",\n _(\"Create application menu shortcut\"),\n self.on_create_menu_shortcut,\n ),\n (\n \"rm-menu-shortcut\",\n _(\"Delete application menu shortcut\"),\n self.on_remove_menu_shortcut,\n ),\n (\"install_more\", _(\"Install another version\"), self.on_install_clicked),\n (\"remove\", _(\"Remove\"), self.on_remove_game),\n (\"view\", _(\"View on Lutris.net\"), self.on_view_game),\n (\"hide\", _(\"Hide game from library\"), self.on_hide_game),\n (\"unhide\", _(\"Unhide game from library\"), self.on_unhide_game),\n ]\n\n def get_displayed_entries(self):\n \"\"\"Return a dictionary of actions that should be shown for a game\"\"\"\n return {\n \"add\": not self.game.is_installed,\n \"install\": not self.game.is_installed,\n \"play\": self.game.is_installed and not self.is_game_running,\n \"stop\": self.is_game_running,\n \"configure\": bool(self.game.is_installed),\n \"browse\": self.game.is_installed and self.game.runner_name != \"browser\",\n \"show_logs\": self.game.is_installed,\n \"favorite\": not self.game.is_favorite,\n \"deletefavorite\": self.game.is_favorite,\n \"install_more\": not self.game.service and self.game.is_installed,\n \"execute-script\": bool(self.game.is_installed and self.game.runner.system_config.get(\"manual_command\")),\n \"desktop-shortcut\": (\n self.game.is_installed\n and not xdgshortcuts.desktop_launcher_exists(self.game.slug, self.game.id)\n ),\n \"menu-shortcut\": (\n self.game.is_installed\n and not xdgshortcuts.menu_launcher_exists(self.game.slug, self.game.id)\n ),\n \"rm-desktop-shortcut\": bool(\n self.game.is_installed\n and xdgshortcuts.desktop_launcher_exists(self.game.slug, self.game.id)\n ),\n \"rm-menu-shortcut\": bool(\n self.game.is_installed\n and xdgshortcuts.menu_launcher_exists(self.game.slug, self.game.id)\n ),\n \"remove\": True,\n \"view\": True,\n \"hide\": self.game.is_installed and not self.game.is_hidden,\n \"unhide\": self.game.is_hidden,\n }\n\n def on_game_launch(self, *_args):\n \"\"\"Launch a game\"\"\"\n self.game.launch()\n\n def get_running_game(self):\n ids = self.application.get_running_game_ids()\n for game_id in ids:\n if str(game_id) == str(self.game.id):\n return self.game\n logger.warning(\"Game %s not in %s\", self.game_id, ids)\n\n def on_game_stop(self, _caller):\n \"\"\"Stops the game\"\"\"\n game = self.get_running_game()\n if game:\n game.force_stop()\n\n def on_show_logs(self, _widget):\n \"\"\"Display game log\"\"\"\n _buffer = self.game.log_buffer\n if not _buffer:\n logger.info(\"No log for game %s\", self.game)\n return LogWindow(\n title=_(\"Log for {}\").format(self.game),\n buffer=_buffer,\n application=self.application\n )\n\n def on_install_clicked(self, *_args):\n \"\"\"Install a game\"\"\"\n # Install the currently selected game in the UI\n if not self.game.slug:\n raise RuntimeError(\"No game to install: %s\" % self.game.id)\n self.game.emit(\"game-install\")\n\n def on_locate_installed_game(self, _button, game):\n \"\"\"Show the user a dialog to import an existing install to a DRM free service\n\n Params:\n game (Game): Game instance without a database ID, populated with a fields the service can provides\n \"\"\"\n AddGameDialog(self.window, game=game)\n\n def on_add_manually(self, _widget, *_args):\n \"\"\"Callback that presents the Add game dialog\"\"\"\n return AddGameDialog(self.window, game=self.game, runner=self.game.runner_name)\n\n def on_edit_game_configuration(self, _widget):\n \"\"\"Edit game preferences\"\"\"\n EditGameConfigDialog(self.window, self.game)\n\n def on_add_favorite_game(self, _widget):\n \"\"\"Add to favorite Games list\"\"\"\n self.game.add_to_favorites()\n\n def on_delete_favorite_game(self, _widget):\n \"\"\"delete from favorites\"\"\"\n self.game.remove_from_favorites()\n\n def on_hide_game(self, _widget):\n \"\"\"Add a game to the list of hidden games\"\"\"\n self.game.set_hidden(True)\n\n def on_unhide_game(self, _widget):\n \"\"\"Removes a game from the list of hidden games\"\"\"\n self.game.set_hidden(False)\n\n def on_execute_script_clicked(self, _widget):\n \"\"\"Execute the game's associated script\"\"\"\n manual_command = self.game.runner.system_config.get(\"manual_command\")\n if path_exists(manual_command):\n MonitoredCommand(\n [manual_command],\n include_processes=[os.path.basename(manual_command)],\n cwd=self.game.directory,\n ).start()\n logger.info(\"Running %s in the background\", manual_command)\n\n def on_browse_files(self, _widget):\n \"\"\"Callback to open a game folder in the file browser\"\"\"\n path = self.game.get_browse_dir()\n if not path:\n dialogs.NoticeDialog(_(\"This game has no installation directory\"))\n elif path_exists(path):\n open_uri(\"file://%s\" % path)\n else:\n dialogs.NoticeDialog(_(\"Can't open %s \\nThe folder doesn't exist.\") % path)\n\n def on_create_menu_shortcut(self, *_args):\n \"\"\"Add the selected game to the system's Games menu.\"\"\"\n xdgshortcuts.create_launcher(self.game.slug, self.game.id, self.game.name, menu=True)\n\n def on_create_desktop_shortcut(self, *_args):\n \"\"\"Create a desktop launcher for the selected game.\"\"\"\n xdgshortcuts.create_launcher(self.game.slug, self.game.id, self.game.name, desktop=True)\n\n def on_remove_menu_shortcut(self, *_args):\n \"\"\"Remove an XDG menu shortcut\"\"\"\n xdgshortcuts.remove_launcher(self.game.slug, self.game.id, menu=True)\n\n def on_remove_desktop_shortcut(self, *_args):\n \"\"\"Remove a .desktop shortcut\"\"\"\n xdgshortcuts.remove_launcher(self.game.slug, self.game.id, desktop=True)\n\n def on_view_game(self, _widget):\n \"\"\"Callback to open a game on lutris.net\"\"\"\n open_uri(\"https://lutris.net/games/%s\" % self.game.slug)\n\n def on_remove_game(self, *_args):\n \"\"\"Callback that present the uninstall dialog to the user\"\"\"\n if self.game.is_installed:\n UninstallGameDialog(game_id=self.game.id, parent=self.window)\n else:\n RemoveGameDialog(game_id=self.game.id, parent=self.window)\n", "path": "lutris/game_actions.py"}], "after_files": [{"content": "\"\"\"Handle game specific actions\"\"\"\n\n# Standard Library\n# pylint: disable=too-many-public-methods\nimport os\nfrom gettext import gettext as _\n\nfrom gi.repository import Gio\n\nfrom lutris.command import MonitoredCommand\nfrom lutris.game import Game\nfrom lutris.gui import dialogs\nfrom lutris.gui.config.add_game import AddGameDialog\nfrom lutris.gui.config.edit_game import EditGameConfigDialog\nfrom lutris.gui.dialogs.log import LogWindow\nfrom lutris.gui.dialogs.uninstall_game import RemoveGameDialog, UninstallGameDialog\nfrom lutris.gui.widgets.utils import open_uri\nfrom lutris.util import xdgshortcuts\nfrom lutris.util.log import logger\nfrom lutris.util.system import path_exists\n\n\nclass GameActions:\n \"\"\"Regroup a list of callbacks for a game\"\"\"\n\n def __init__(self, application=None, window=None):\n self.application = application or Gio.Application.get_default()\n self.window = window\n self.game_id = None\n self._game = None\n\n @property\n def game(self):\n if not self._game:\n self._game = self.application.get_game_by_id(self.game_id)\n if not self._game:\n self._game = Game(self.game_id)\n self._game.connect(\"game-error\", self.window.on_game_error)\n return self._game\n\n @property\n def is_game_running(self):\n return bool(self.application.get_game_by_id(self.game_id))\n\n def set_game(self, game=None, game_id=None):\n if game:\n self._game = game\n self.game_id = game.id\n else:\n self._game = None\n self.game_id = game_id\n\n def get_game_actions(self):\n \"\"\"Return a list of game actions and their callbacks\"\"\"\n return [\n (\"play\", _(\"Play\"), self.on_game_launch),\n (\"stop\", _(\"Stop\"), self.on_game_stop),\n (\"show_logs\", _(\"Show logs\"), self.on_show_logs),\n (\"install\", _(\"Install\"), self.on_install_clicked),\n (\"add\", _(\"Add installed game\"), self.on_add_manually),\n (\"configure\", _(\"Configure\"), self.on_edit_game_configuration),\n (\"favorite\", _(\"Add to favorites\"), self.on_add_favorite_game),\n (\"deletefavorite\", _(\"Remove from favorites\"), self.on_delete_favorite_game),\n (\"execute-script\", _(\"Execute script\"), self.on_execute_script_clicked),\n (\"browse\", _(\"Browse files\"), self.on_browse_files),\n (\n \"desktop-shortcut\",\n _(\"Create desktop shortcut\"),\n self.on_create_desktop_shortcut,\n ),\n (\n \"rm-desktop-shortcut\",\n _(\"Delete desktop shortcut\"),\n self.on_remove_desktop_shortcut,\n ),\n (\n \"menu-shortcut\",\n _(\"Create application menu shortcut\"),\n self.on_create_menu_shortcut,\n ),\n (\n \"rm-menu-shortcut\",\n _(\"Delete application menu shortcut\"),\n self.on_remove_menu_shortcut,\n ),\n (\"install_more\", _(\"Install another version\"), self.on_install_clicked),\n (\"remove\", _(\"Remove\"), self.on_remove_game),\n (\"view\", _(\"View on Lutris.net\"), self.on_view_game),\n (\"hide\", _(\"Hide game from library\"), self.on_hide_game),\n (\"unhide\", _(\"Unhide game from library\"), self.on_unhide_game),\n ]\n\n def get_displayed_entries(self):\n \"\"\"Return a dictionary of actions that should be shown for a game\"\"\"\n return {\n \"add\": not self.game.is_installed,\n \"install\": not self.game.is_installed,\n \"play\": self.game.is_installed and not self.is_game_running,\n \"stop\": self.is_game_running,\n \"configure\": bool(self.game.is_installed),\n \"browse\": self.game.is_installed and self.game.runner_name != \"browser\",\n \"show_logs\": self.game.is_installed,\n \"favorite\": not self.game.is_favorite,\n \"deletefavorite\": self.game.is_favorite,\n \"install_more\": not self.game.service and self.game.is_installed,\n \"execute-script\": bool(\n self.game.is_installed and self.game.runner\n and self.game.runner.system_config.get(\"manual_command\")\n ),\n \"desktop-shortcut\": (\n self.game.is_installed\n and not xdgshortcuts.desktop_launcher_exists(self.game.slug, self.game.id)\n ),\n \"menu-shortcut\": (\n self.game.is_installed\n and not xdgshortcuts.menu_launcher_exists(self.game.slug, self.game.id)\n ),\n \"rm-desktop-shortcut\": bool(\n self.game.is_installed\n and xdgshortcuts.desktop_launcher_exists(self.game.slug, self.game.id)\n ),\n \"rm-menu-shortcut\": bool(\n self.game.is_installed\n and xdgshortcuts.menu_launcher_exists(self.game.slug, self.game.id)\n ),\n \"remove\": True,\n \"view\": True,\n \"hide\": self.game.is_installed and not self.game.is_hidden,\n \"unhide\": self.game.is_hidden,\n }\n\n def on_game_launch(self, *_args):\n \"\"\"Launch a game\"\"\"\n self.game.launch()\n\n def get_running_game(self):\n ids = self.application.get_running_game_ids()\n for game_id in ids:\n if str(game_id) == str(self.game.id):\n return self.game\n logger.warning(\"Game %s not in %s\", self.game_id, ids)\n\n def on_game_stop(self, _caller):\n \"\"\"Stops the game\"\"\"\n game = self.get_running_game()\n if game:\n game.force_stop()\n\n def on_show_logs(self, _widget):\n \"\"\"Display game log\"\"\"\n _buffer = self.game.log_buffer\n if not _buffer:\n logger.info(\"No log for game %s\", self.game)\n return LogWindow(\n title=_(\"Log for {}\").format(self.game),\n buffer=_buffer,\n application=self.application\n )\n\n def on_install_clicked(self, *_args):\n \"\"\"Install a game\"\"\"\n # Install the currently selected game in the UI\n if not self.game.slug:\n raise RuntimeError(\"No game to install: %s\" % self.game.id)\n self.game.emit(\"game-install\")\n\n def on_locate_installed_game(self, _button, game):\n \"\"\"Show the user a dialog to import an existing install to a DRM free service\n\n Params:\n game (Game): Game instance without a database ID, populated with a fields the service can provides\n \"\"\"\n AddGameDialog(self.window, game=game)\n\n def on_add_manually(self, _widget, *_args):\n \"\"\"Callback that presents the Add game dialog\"\"\"\n return AddGameDialog(self.window, game=self.game, runner=self.game.runner_name)\n\n def on_edit_game_configuration(self, _widget):\n \"\"\"Edit game preferences\"\"\"\n EditGameConfigDialog(self.window, self.game)\n\n def on_add_favorite_game(self, _widget):\n \"\"\"Add to favorite Games list\"\"\"\n self.game.add_to_favorites()\n\n def on_delete_favorite_game(self, _widget):\n \"\"\"delete from favorites\"\"\"\n self.game.remove_from_favorites()\n\n def on_hide_game(self, _widget):\n \"\"\"Add a game to the list of hidden games\"\"\"\n self.game.set_hidden(True)\n\n def on_unhide_game(self, _widget):\n \"\"\"Removes a game from the list of hidden games\"\"\"\n self.game.set_hidden(False)\n\n def on_execute_script_clicked(self, _widget):\n \"\"\"Execute the game's associated script\"\"\"\n manual_command = self.game.runner.system_config.get(\"manual_command\")\n if path_exists(manual_command):\n MonitoredCommand(\n [manual_command],\n include_processes=[os.path.basename(manual_command)],\n cwd=self.game.directory,\n ).start()\n logger.info(\"Running %s in the background\", manual_command)\n\n def on_browse_files(self, _widget):\n \"\"\"Callback to open a game folder in the file browser\"\"\"\n path = self.game.get_browse_dir()\n if not path:\n dialogs.NoticeDialog(_(\"This game has no installation directory\"))\n elif path_exists(path):\n open_uri(\"file://%s\" % path)\n else:\n dialogs.NoticeDialog(_(\"Can't open %s \\nThe folder doesn't exist.\") % path)\n\n def on_create_menu_shortcut(self, *_args):\n \"\"\"Add the selected game to the system's Games menu.\"\"\"\n xdgshortcuts.create_launcher(self.game.slug, self.game.id, self.game.name, menu=True)\n\n def on_create_desktop_shortcut(self, *_args):\n \"\"\"Create a desktop launcher for the selected game.\"\"\"\n xdgshortcuts.create_launcher(self.game.slug, self.game.id, self.game.name, desktop=True)\n\n def on_remove_menu_shortcut(self, *_args):\n \"\"\"Remove an XDG menu shortcut\"\"\"\n xdgshortcuts.remove_launcher(self.game.slug, self.game.id, menu=True)\n\n def on_remove_desktop_shortcut(self, *_args):\n \"\"\"Remove a .desktop shortcut\"\"\"\n xdgshortcuts.remove_launcher(self.game.slug, self.game.id, desktop=True)\n\n def on_view_game(self, _widget):\n \"\"\"Callback to open a game on lutris.net\"\"\"\n open_uri(\"https://lutris.net/games/%s\" % self.game.slug)\n\n def on_remove_game(self, *_args):\n \"\"\"Callback that present the uninstall dialog to the user\"\"\"\n if self.game.is_installed:\n UninstallGameDialog(game_id=self.game.id, parent=self.window)\n else:\n RemoveGameDialog(game_id=self.game.id, parent=self.window)\n", "path": "lutris/game_actions.py"}]}
| 3,251 | 174 |
gh_patches_debug_33567
|
rasdani/github-patches
|
git_diff
|
docker__docker-py-1662
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Support updated credential helpers syntax.
https://github.com/docker/compose/issues/4885
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `docker/auth.py`
Content:
```
1 import base64
2 import json
3 import logging
4 import os
5
6 import dockerpycreds
7 import six
8
9 from . import errors
10 from .constants import IS_WINDOWS_PLATFORM
11
12 INDEX_NAME = 'docker.io'
13 INDEX_URL = 'https://{0}/v1/'.format(INDEX_NAME)
14 DOCKER_CONFIG_FILENAME = os.path.join('.docker', 'config.json')
15 LEGACY_DOCKER_CONFIG_FILENAME = '.dockercfg'
16 TOKEN_USERNAME = '<token>'
17
18 log = logging.getLogger(__name__)
19
20
21 def resolve_repository_name(repo_name):
22 if '://' in repo_name:
23 raise errors.InvalidRepository(
24 'Repository name cannot contain a scheme ({0})'.format(repo_name)
25 )
26
27 index_name, remote_name = split_repo_name(repo_name)
28 if index_name[0] == '-' or index_name[-1] == '-':
29 raise errors.InvalidRepository(
30 'Invalid index name ({0}). Cannot begin or end with a'
31 ' hyphen.'.format(index_name)
32 )
33 return resolve_index_name(index_name), remote_name
34
35
36 def resolve_index_name(index_name):
37 index_name = convert_to_hostname(index_name)
38 if index_name == 'index.' + INDEX_NAME:
39 index_name = INDEX_NAME
40 return index_name
41
42
43 def get_config_header(client, registry):
44 log.debug('Looking for auth config')
45 if not client._auth_configs:
46 log.debug(
47 "No auth config in memory - loading from filesystem"
48 )
49 client._auth_configs = load_config()
50 authcfg = resolve_authconfig(client._auth_configs, registry)
51 # Do not fail here if no authentication exists for this
52 # specific registry as we can have a readonly pull. Just
53 # put the header if we can.
54 if authcfg:
55 log.debug('Found auth config')
56 # auth_config needs to be a dict in the format used by
57 # auth.py username , password, serveraddress, email
58 return encode_header(authcfg)
59 log.debug('No auth config found')
60 return None
61
62
63 def split_repo_name(repo_name):
64 parts = repo_name.split('/', 1)
65 if len(parts) == 1 or (
66 '.' not in parts[0] and ':' not in parts[0] and parts[0] != 'localhost'
67 ):
68 # This is a docker index repo (ex: username/foobar or ubuntu)
69 return INDEX_NAME, repo_name
70 return tuple(parts)
71
72
73 def resolve_authconfig(authconfig, registry=None):
74 """
75 Returns the authentication data from the given auth configuration for a
76 specific registry. As with the Docker client, legacy entries in the config
77 with full URLs are stripped down to hostnames before checking for a match.
78 Returns None if no match was found.
79 """
80 if 'credsStore' in authconfig:
81 log.debug(
82 'Using credentials store "{0}"'.format(authconfig['credsStore'])
83 )
84 return _resolve_authconfig_credstore(
85 authconfig, registry, authconfig['credsStore']
86 )
87 # Default to the public index server
88 registry = resolve_index_name(registry) if registry else INDEX_NAME
89 log.debug("Looking for auth entry for {0}".format(repr(registry)))
90
91 if registry in authconfig:
92 log.debug("Found {0}".format(repr(registry)))
93 return authconfig[registry]
94
95 for key, config in six.iteritems(authconfig):
96 if resolve_index_name(key) == registry:
97 log.debug("Found {0}".format(repr(key)))
98 return config
99
100 log.debug("No entry found")
101 return None
102
103
104 def _resolve_authconfig_credstore(authconfig, registry, credstore_name):
105 if not registry or registry == INDEX_NAME:
106 # The ecosystem is a little schizophrenic with index.docker.io VS
107 # docker.io - in that case, it seems the full URL is necessary.
108 registry = 'https://index.docker.io/v1/'
109 log.debug("Looking for auth entry for {0}".format(repr(registry)))
110 store = dockerpycreds.Store(credstore_name)
111 try:
112 data = store.get(registry)
113 res = {
114 'ServerAddress': registry,
115 }
116 if data['Username'] == TOKEN_USERNAME:
117 res['IdentityToken'] = data['Secret']
118 else:
119 res.update({
120 'Username': data['Username'],
121 'Password': data['Secret'],
122 })
123 return res
124 except dockerpycreds.CredentialsNotFound as e:
125 log.debug('No entry found')
126 return None
127 except dockerpycreds.StoreError as e:
128 raise errors.DockerException(
129 'Credentials store error: {0}'.format(repr(e))
130 )
131
132
133 def convert_to_hostname(url):
134 return url.replace('http://', '').replace('https://', '').split('/', 1)[0]
135
136
137 def decode_auth(auth):
138 if isinstance(auth, six.string_types):
139 auth = auth.encode('ascii')
140 s = base64.b64decode(auth)
141 login, pwd = s.split(b':', 1)
142 return login.decode('utf8'), pwd.decode('utf8')
143
144
145 def encode_header(auth):
146 auth_json = json.dumps(auth).encode('ascii')
147 return base64.urlsafe_b64encode(auth_json)
148
149
150 def parse_auth(entries, raise_on_error=False):
151 """
152 Parses authentication entries
153
154 Args:
155 entries: Dict of authentication entries.
156 raise_on_error: If set to true, an invalid format will raise
157 InvalidConfigFile
158
159 Returns:
160 Authentication registry.
161 """
162
163 conf = {}
164 for registry, entry in six.iteritems(entries):
165 if not isinstance(entry, dict):
166 log.debug(
167 'Config entry for key {0} is not auth config'.format(registry)
168 )
169 # We sometimes fall back to parsing the whole config as if it was
170 # the auth config by itself, for legacy purposes. In that case, we
171 # fail silently and return an empty conf if any of the keys is not
172 # formatted properly.
173 if raise_on_error:
174 raise errors.InvalidConfigFile(
175 'Invalid configuration for registry {0}'.format(registry)
176 )
177 return {}
178 if 'identitytoken' in entry:
179 log.debug('Found an IdentityToken entry for registry {0}'.format(
180 registry
181 ))
182 conf[registry] = {
183 'IdentityToken': entry['identitytoken']
184 }
185 continue # Other values are irrelevant if we have a token, skip.
186
187 if 'auth' not in entry:
188 # Starting with engine v1.11 (API 1.23), an empty dictionary is
189 # a valid value in the auths config.
190 # https://github.com/docker/compose/issues/3265
191 log.debug(
192 'Auth data for {0} is absent. Client might be using a '
193 'credentials store instead.'
194 )
195 conf[registry] = {}
196 continue
197
198 username, password = decode_auth(entry['auth'])
199 log.debug(
200 'Found entry (registry={0}, username={1})'
201 .format(repr(registry), repr(username))
202 )
203
204 conf[registry] = {
205 'username': username,
206 'password': password,
207 'email': entry.get('email'),
208 'serveraddress': registry,
209 }
210 return conf
211
212
213 def find_config_file(config_path=None):
214 paths = list(filter(None, [
215 config_path, # 1
216 config_path_from_environment(), # 2
217 os.path.join(home_dir(), DOCKER_CONFIG_FILENAME), # 3
218 os.path.join(home_dir(), LEGACY_DOCKER_CONFIG_FILENAME), # 4
219 ]))
220
221 log.debug("Trying paths: {0}".format(repr(paths)))
222
223 for path in paths:
224 if os.path.exists(path):
225 log.debug("Found file at path: {0}".format(path))
226 return path
227
228 log.debug("No config file found")
229
230 return None
231
232
233 def config_path_from_environment():
234 config_dir = os.environ.get('DOCKER_CONFIG')
235 if not config_dir:
236 return None
237 return os.path.join(config_dir, os.path.basename(DOCKER_CONFIG_FILENAME))
238
239
240 def home_dir():
241 """
242 Get the user's home directory, using the same logic as the Docker Engine
243 client - use %USERPROFILE% on Windows, $HOME/getuid on POSIX.
244 """
245 if IS_WINDOWS_PLATFORM:
246 return os.environ.get('USERPROFILE', '')
247 else:
248 return os.path.expanduser('~')
249
250
251 def load_config(config_path=None):
252 """
253 Loads authentication data from a Docker configuration file in the given
254 root directory or if config_path is passed use given path.
255 Lookup priority:
256 explicit config_path parameter > DOCKER_CONFIG environment variable >
257 ~/.docker/config.json > ~/.dockercfg
258 """
259 config_file = find_config_file(config_path)
260
261 if not config_file:
262 return {}
263
264 try:
265 with open(config_file) as f:
266 data = json.load(f)
267 res = {}
268 if data.get('auths'):
269 log.debug("Found 'auths' section")
270 res.update(parse_auth(data['auths'], raise_on_error=True))
271 if data.get('HttpHeaders'):
272 log.debug("Found 'HttpHeaders' section")
273 res.update({'HttpHeaders': data['HttpHeaders']})
274 if data.get('credsStore'):
275 log.debug("Found 'credsStore' section")
276 res.update({'credsStore': data['credsStore']})
277 if res:
278 return res
279 else:
280 log.debug("Couldn't find 'auths' or 'HttpHeaders' sections")
281 f.seek(0)
282 return parse_auth(json.load(f))
283 except (IOError, KeyError, ValueError) as e:
284 # Likely missing new Docker config file or it's in an
285 # unknown format, continue to attempt to read old location
286 # and format.
287 log.debug(e)
288
289 log.debug("Attempting to parse legacy auth file format")
290 try:
291 data = []
292 with open(config_file) as f:
293 for line in f.readlines():
294 data.append(line.strip().split(' = ')[1])
295 if len(data) < 2:
296 # Not enough data
297 raise errors.InvalidConfigFile(
298 'Invalid or empty configuration file!'
299 )
300
301 username, password = decode_auth(data[0])
302 return {
303 INDEX_NAME: {
304 'username': username,
305 'password': password,
306 'email': data[1],
307 'serveraddress': INDEX_URL,
308 }
309 }
310 except Exception as e:
311 log.debug(e)
312 pass
313
314 log.debug("All parsing attempts failed - returning empty config")
315 return {}
316
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/docker/auth.py b/docker/auth.py
--- a/docker/auth.py
+++ b/docker/auth.py
@@ -70,6 +70,15 @@
return tuple(parts)
+def get_credential_store(authconfig, registry):
+ if not registry or registry == INDEX_NAME:
+ registry = 'https://index.docker.io/v1/'
+
+ return authconfig.get('credHelpers', {}).get(registry) or authconfig.get(
+ 'credsStore'
+ )
+
+
def resolve_authconfig(authconfig, registry=None):
"""
Returns the authentication data from the given auth configuration for a
@@ -77,13 +86,17 @@
with full URLs are stripped down to hostnames before checking for a match.
Returns None if no match was found.
"""
- if 'credsStore' in authconfig:
- log.debug(
- 'Using credentials store "{0}"'.format(authconfig['credsStore'])
- )
- return _resolve_authconfig_credstore(
- authconfig, registry, authconfig['credsStore']
- )
+
+ if 'credHelpers' in authconfig or 'credsStore' in authconfig:
+ store_name = get_credential_store(authconfig, registry)
+ if store_name is not None:
+ log.debug(
+ 'Using credentials store "{0}"'.format(store_name)
+ )
+ return _resolve_authconfig_credstore(
+ authconfig, registry, store_name
+ )
+
# Default to the public index server
registry = resolve_index_name(registry) if registry else INDEX_NAME
log.debug("Looking for auth entry for {0}".format(repr(registry)))
@@ -274,6 +287,9 @@
if data.get('credsStore'):
log.debug("Found 'credsStore' section")
res.update({'credsStore': data['credsStore']})
+ if data.get('credHelpers'):
+ log.debug("Found 'credHelpers' section")
+ res.update({'credHelpers': data['credHelpers']})
if res:
return res
else:
|
{"golden_diff": "diff --git a/docker/auth.py b/docker/auth.py\n--- a/docker/auth.py\n+++ b/docker/auth.py\n@@ -70,6 +70,15 @@\n return tuple(parts)\n \n \n+def get_credential_store(authconfig, registry):\n+ if not registry or registry == INDEX_NAME:\n+ registry = 'https://index.docker.io/v1/'\n+\n+ return authconfig.get('credHelpers', {}).get(registry) or authconfig.get(\n+ 'credsStore'\n+ )\n+\n+\n def resolve_authconfig(authconfig, registry=None):\n \"\"\"\n Returns the authentication data from the given auth configuration for a\n@@ -77,13 +86,17 @@\n with full URLs are stripped down to hostnames before checking for a match.\n Returns None if no match was found.\n \"\"\"\n- if 'credsStore' in authconfig:\n- log.debug(\n- 'Using credentials store \"{0}\"'.format(authconfig['credsStore'])\n- )\n- return _resolve_authconfig_credstore(\n- authconfig, registry, authconfig['credsStore']\n- )\n+\n+ if 'credHelpers' in authconfig or 'credsStore' in authconfig:\n+ store_name = get_credential_store(authconfig, registry)\n+ if store_name is not None:\n+ log.debug(\n+ 'Using credentials store \"{0}\"'.format(store_name)\n+ )\n+ return _resolve_authconfig_credstore(\n+ authconfig, registry, store_name\n+ )\n+\n # Default to the public index server\n registry = resolve_index_name(registry) if registry else INDEX_NAME\n log.debug(\"Looking for auth entry for {0}\".format(repr(registry)))\n@@ -274,6 +287,9 @@\n if data.get('credsStore'):\n log.debug(\"Found 'credsStore' section\")\n res.update({'credsStore': data['credsStore']})\n+ if data.get('credHelpers'):\n+ log.debug(\"Found 'credHelpers' section\")\n+ res.update({'credHelpers': data['credHelpers']})\n if res:\n return res\n else:\n", "issue": "Support updated credential helpers syntax.\nhttps://github.com/docker/compose/issues/4885\n", "before_files": [{"content": "import base64\nimport json\nimport logging\nimport os\n\nimport dockerpycreds\nimport six\n\nfrom . import errors\nfrom .constants import IS_WINDOWS_PLATFORM\n\nINDEX_NAME = 'docker.io'\nINDEX_URL = 'https://{0}/v1/'.format(INDEX_NAME)\nDOCKER_CONFIG_FILENAME = os.path.join('.docker', 'config.json')\nLEGACY_DOCKER_CONFIG_FILENAME = '.dockercfg'\nTOKEN_USERNAME = '<token>'\n\nlog = logging.getLogger(__name__)\n\n\ndef resolve_repository_name(repo_name):\n if '://' in repo_name:\n raise errors.InvalidRepository(\n 'Repository name cannot contain a scheme ({0})'.format(repo_name)\n )\n\n index_name, remote_name = split_repo_name(repo_name)\n if index_name[0] == '-' or index_name[-1] == '-':\n raise errors.InvalidRepository(\n 'Invalid index name ({0}). Cannot begin or end with a'\n ' hyphen.'.format(index_name)\n )\n return resolve_index_name(index_name), remote_name\n\n\ndef resolve_index_name(index_name):\n index_name = convert_to_hostname(index_name)\n if index_name == 'index.' + INDEX_NAME:\n index_name = INDEX_NAME\n return index_name\n\n\ndef get_config_header(client, registry):\n log.debug('Looking for auth config')\n if not client._auth_configs:\n log.debug(\n \"No auth config in memory - loading from filesystem\"\n )\n client._auth_configs = load_config()\n authcfg = resolve_authconfig(client._auth_configs, registry)\n # Do not fail here if no authentication exists for this\n # specific registry as we can have a readonly pull. Just\n # put the header if we can.\n if authcfg:\n log.debug('Found auth config')\n # auth_config needs to be a dict in the format used by\n # auth.py username , password, serveraddress, email\n return encode_header(authcfg)\n log.debug('No auth config found')\n return None\n\n\ndef split_repo_name(repo_name):\n parts = repo_name.split('/', 1)\n if len(parts) == 1 or (\n '.' not in parts[0] and ':' not in parts[0] and parts[0] != 'localhost'\n ):\n # This is a docker index repo (ex: username/foobar or ubuntu)\n return INDEX_NAME, repo_name\n return tuple(parts)\n\n\ndef resolve_authconfig(authconfig, registry=None):\n \"\"\"\n Returns the authentication data from the given auth configuration for a\n specific registry. As with the Docker client, legacy entries in the config\n with full URLs are stripped down to hostnames before checking for a match.\n Returns None if no match was found.\n \"\"\"\n if 'credsStore' in authconfig:\n log.debug(\n 'Using credentials store \"{0}\"'.format(authconfig['credsStore'])\n )\n return _resolve_authconfig_credstore(\n authconfig, registry, authconfig['credsStore']\n )\n # Default to the public index server\n registry = resolve_index_name(registry) if registry else INDEX_NAME\n log.debug(\"Looking for auth entry for {0}\".format(repr(registry)))\n\n if registry in authconfig:\n log.debug(\"Found {0}\".format(repr(registry)))\n return authconfig[registry]\n\n for key, config in six.iteritems(authconfig):\n if resolve_index_name(key) == registry:\n log.debug(\"Found {0}\".format(repr(key)))\n return config\n\n log.debug(\"No entry found\")\n return None\n\n\ndef _resolve_authconfig_credstore(authconfig, registry, credstore_name):\n if not registry or registry == INDEX_NAME:\n # The ecosystem is a little schizophrenic with index.docker.io VS\n # docker.io - in that case, it seems the full URL is necessary.\n registry = 'https://index.docker.io/v1/'\n log.debug(\"Looking for auth entry for {0}\".format(repr(registry)))\n store = dockerpycreds.Store(credstore_name)\n try:\n data = store.get(registry)\n res = {\n 'ServerAddress': registry,\n }\n if data['Username'] == TOKEN_USERNAME:\n res['IdentityToken'] = data['Secret']\n else:\n res.update({\n 'Username': data['Username'],\n 'Password': data['Secret'],\n })\n return res\n except dockerpycreds.CredentialsNotFound as e:\n log.debug('No entry found')\n return None\n except dockerpycreds.StoreError as e:\n raise errors.DockerException(\n 'Credentials store error: {0}'.format(repr(e))\n )\n\n\ndef convert_to_hostname(url):\n return url.replace('http://', '').replace('https://', '').split('/', 1)[0]\n\n\ndef decode_auth(auth):\n if isinstance(auth, six.string_types):\n auth = auth.encode('ascii')\n s = base64.b64decode(auth)\n login, pwd = s.split(b':', 1)\n return login.decode('utf8'), pwd.decode('utf8')\n\n\ndef encode_header(auth):\n auth_json = json.dumps(auth).encode('ascii')\n return base64.urlsafe_b64encode(auth_json)\n\n\ndef parse_auth(entries, raise_on_error=False):\n \"\"\"\n Parses authentication entries\n\n Args:\n entries: Dict of authentication entries.\n raise_on_error: If set to true, an invalid format will raise\n InvalidConfigFile\n\n Returns:\n Authentication registry.\n \"\"\"\n\n conf = {}\n for registry, entry in six.iteritems(entries):\n if not isinstance(entry, dict):\n log.debug(\n 'Config entry for key {0} is not auth config'.format(registry)\n )\n # We sometimes fall back to parsing the whole config as if it was\n # the auth config by itself, for legacy purposes. In that case, we\n # fail silently and return an empty conf if any of the keys is not\n # formatted properly.\n if raise_on_error:\n raise errors.InvalidConfigFile(\n 'Invalid configuration for registry {0}'.format(registry)\n )\n return {}\n if 'identitytoken' in entry:\n log.debug('Found an IdentityToken entry for registry {0}'.format(\n registry\n ))\n conf[registry] = {\n 'IdentityToken': entry['identitytoken']\n }\n continue # Other values are irrelevant if we have a token, skip.\n\n if 'auth' not in entry:\n # Starting with engine v1.11 (API 1.23), an empty dictionary is\n # a valid value in the auths config.\n # https://github.com/docker/compose/issues/3265\n log.debug(\n 'Auth data for {0} is absent. Client might be using a '\n 'credentials store instead.'\n )\n conf[registry] = {}\n continue\n\n username, password = decode_auth(entry['auth'])\n log.debug(\n 'Found entry (registry={0}, username={1})'\n .format(repr(registry), repr(username))\n )\n\n conf[registry] = {\n 'username': username,\n 'password': password,\n 'email': entry.get('email'),\n 'serveraddress': registry,\n }\n return conf\n\n\ndef find_config_file(config_path=None):\n paths = list(filter(None, [\n config_path, # 1\n config_path_from_environment(), # 2\n os.path.join(home_dir(), DOCKER_CONFIG_FILENAME), # 3\n os.path.join(home_dir(), LEGACY_DOCKER_CONFIG_FILENAME), # 4\n ]))\n\n log.debug(\"Trying paths: {0}\".format(repr(paths)))\n\n for path in paths:\n if os.path.exists(path):\n log.debug(\"Found file at path: {0}\".format(path))\n return path\n\n log.debug(\"No config file found\")\n\n return None\n\n\ndef config_path_from_environment():\n config_dir = os.environ.get('DOCKER_CONFIG')\n if not config_dir:\n return None\n return os.path.join(config_dir, os.path.basename(DOCKER_CONFIG_FILENAME))\n\n\ndef home_dir():\n \"\"\"\n Get the user's home directory, using the same logic as the Docker Engine\n client - use %USERPROFILE% on Windows, $HOME/getuid on POSIX.\n \"\"\"\n if IS_WINDOWS_PLATFORM:\n return os.environ.get('USERPROFILE', '')\n else:\n return os.path.expanduser('~')\n\n\ndef load_config(config_path=None):\n \"\"\"\n Loads authentication data from a Docker configuration file in the given\n root directory or if config_path is passed use given path.\n Lookup priority:\n explicit config_path parameter > DOCKER_CONFIG environment variable >\n ~/.docker/config.json > ~/.dockercfg\n \"\"\"\n config_file = find_config_file(config_path)\n\n if not config_file:\n return {}\n\n try:\n with open(config_file) as f:\n data = json.load(f)\n res = {}\n if data.get('auths'):\n log.debug(\"Found 'auths' section\")\n res.update(parse_auth(data['auths'], raise_on_error=True))\n if data.get('HttpHeaders'):\n log.debug(\"Found 'HttpHeaders' section\")\n res.update({'HttpHeaders': data['HttpHeaders']})\n if data.get('credsStore'):\n log.debug(\"Found 'credsStore' section\")\n res.update({'credsStore': data['credsStore']})\n if res:\n return res\n else:\n log.debug(\"Couldn't find 'auths' or 'HttpHeaders' sections\")\n f.seek(0)\n return parse_auth(json.load(f))\n except (IOError, KeyError, ValueError) as e:\n # Likely missing new Docker config file or it's in an\n # unknown format, continue to attempt to read old location\n # and format.\n log.debug(e)\n\n log.debug(\"Attempting to parse legacy auth file format\")\n try:\n data = []\n with open(config_file) as f:\n for line in f.readlines():\n data.append(line.strip().split(' = ')[1])\n if len(data) < 2:\n # Not enough data\n raise errors.InvalidConfigFile(\n 'Invalid or empty configuration file!'\n )\n\n username, password = decode_auth(data[0])\n return {\n INDEX_NAME: {\n 'username': username,\n 'password': password,\n 'email': data[1],\n 'serveraddress': INDEX_URL,\n }\n }\n except Exception as e:\n log.debug(e)\n pass\n\n log.debug(\"All parsing attempts failed - returning empty config\")\n return {}\n", "path": "docker/auth.py"}], "after_files": [{"content": "import base64\nimport json\nimport logging\nimport os\n\nimport dockerpycreds\nimport six\n\nfrom . import errors\nfrom .constants import IS_WINDOWS_PLATFORM\n\nINDEX_NAME = 'docker.io'\nINDEX_URL = 'https://{0}/v1/'.format(INDEX_NAME)\nDOCKER_CONFIG_FILENAME = os.path.join('.docker', 'config.json')\nLEGACY_DOCKER_CONFIG_FILENAME = '.dockercfg'\nTOKEN_USERNAME = '<token>'\n\nlog = logging.getLogger(__name__)\n\n\ndef resolve_repository_name(repo_name):\n if '://' in repo_name:\n raise errors.InvalidRepository(\n 'Repository name cannot contain a scheme ({0})'.format(repo_name)\n )\n\n index_name, remote_name = split_repo_name(repo_name)\n if index_name[0] == '-' or index_name[-1] == '-':\n raise errors.InvalidRepository(\n 'Invalid index name ({0}). Cannot begin or end with a'\n ' hyphen.'.format(index_name)\n )\n return resolve_index_name(index_name), remote_name\n\n\ndef resolve_index_name(index_name):\n index_name = convert_to_hostname(index_name)\n if index_name == 'index.' + INDEX_NAME:\n index_name = INDEX_NAME\n return index_name\n\n\ndef get_config_header(client, registry):\n log.debug('Looking for auth config')\n if not client._auth_configs:\n log.debug(\n \"No auth config in memory - loading from filesystem\"\n )\n client._auth_configs = load_config()\n authcfg = resolve_authconfig(client._auth_configs, registry)\n # Do not fail here if no authentication exists for this\n # specific registry as we can have a readonly pull. Just\n # put the header if we can.\n if authcfg:\n log.debug('Found auth config')\n # auth_config needs to be a dict in the format used by\n # auth.py username , password, serveraddress, email\n return encode_header(authcfg)\n log.debug('No auth config found')\n return None\n\n\ndef split_repo_name(repo_name):\n parts = repo_name.split('/', 1)\n if len(parts) == 1 or (\n '.' not in parts[0] and ':' not in parts[0] and parts[0] != 'localhost'\n ):\n # This is a docker index repo (ex: username/foobar or ubuntu)\n return INDEX_NAME, repo_name\n return tuple(parts)\n\n\ndef get_credential_store(authconfig, registry):\n if not registry or registry == INDEX_NAME:\n registry = 'https://index.docker.io/v1/'\n\n return authconfig.get('credHelpers', {}).get(registry) or authconfig.get(\n 'credsStore'\n )\n\n\ndef resolve_authconfig(authconfig, registry=None):\n \"\"\"\n Returns the authentication data from the given auth configuration for a\n specific registry. As with the Docker client, legacy entries in the config\n with full URLs are stripped down to hostnames before checking for a match.\n Returns None if no match was found.\n \"\"\"\n\n if 'credHelpers' in authconfig or 'credsStore' in authconfig:\n store_name = get_credential_store(authconfig, registry)\n if store_name is not None:\n log.debug(\n 'Using credentials store \"{0}\"'.format(store_name)\n )\n return _resolve_authconfig_credstore(\n authconfig, registry, store_name\n )\n\n # Default to the public index server\n registry = resolve_index_name(registry) if registry else INDEX_NAME\n log.debug(\"Looking for auth entry for {0}\".format(repr(registry)))\n\n if registry in authconfig:\n log.debug(\"Found {0}\".format(repr(registry)))\n return authconfig[registry]\n\n for key, config in six.iteritems(authconfig):\n if resolve_index_name(key) == registry:\n log.debug(\"Found {0}\".format(repr(key)))\n return config\n\n log.debug(\"No entry found\")\n return None\n\n\ndef _resolve_authconfig_credstore(authconfig, registry, credstore_name):\n if not registry or registry == INDEX_NAME:\n # The ecosystem is a little schizophrenic with index.docker.io VS\n # docker.io - in that case, it seems the full URL is necessary.\n registry = 'https://index.docker.io/v1/'\n log.debug(\"Looking for auth entry for {0}\".format(repr(registry)))\n store = dockerpycreds.Store(credstore_name)\n try:\n data = store.get(registry)\n res = {\n 'ServerAddress': registry,\n }\n if data['Username'] == TOKEN_USERNAME:\n res['IdentityToken'] = data['Secret']\n else:\n res.update({\n 'Username': data['Username'],\n 'Password': data['Secret'],\n })\n return res\n except dockerpycreds.CredentialsNotFound as e:\n log.debug('No entry found')\n return None\n except dockerpycreds.StoreError as e:\n raise errors.DockerException(\n 'Credentials store error: {0}'.format(repr(e))\n )\n\n\ndef convert_to_hostname(url):\n return url.replace('http://', '').replace('https://', '').split('/', 1)[0]\n\n\ndef decode_auth(auth):\n if isinstance(auth, six.string_types):\n auth = auth.encode('ascii')\n s = base64.b64decode(auth)\n login, pwd = s.split(b':', 1)\n return login.decode('utf8'), pwd.decode('utf8')\n\n\ndef encode_header(auth):\n auth_json = json.dumps(auth).encode('ascii')\n return base64.urlsafe_b64encode(auth_json)\n\n\ndef parse_auth(entries, raise_on_error=False):\n \"\"\"\n Parses authentication entries\n\n Args:\n entries: Dict of authentication entries.\n raise_on_error: If set to true, an invalid format will raise\n InvalidConfigFile\n\n Returns:\n Authentication registry.\n \"\"\"\n\n conf = {}\n for registry, entry in six.iteritems(entries):\n if not isinstance(entry, dict):\n log.debug(\n 'Config entry for key {0} is not auth config'.format(registry)\n )\n # We sometimes fall back to parsing the whole config as if it was\n # the auth config by itself, for legacy purposes. In that case, we\n # fail silently and return an empty conf if any of the keys is not\n # formatted properly.\n if raise_on_error:\n raise errors.InvalidConfigFile(\n 'Invalid configuration for registry {0}'.format(registry)\n )\n return {}\n if 'identitytoken' in entry:\n log.debug('Found an IdentityToken entry for registry {0}'.format(\n registry\n ))\n conf[registry] = {\n 'IdentityToken': entry['identitytoken']\n }\n continue # Other values are irrelevant if we have a token, skip.\n\n if 'auth' not in entry:\n # Starting with engine v1.11 (API 1.23), an empty dictionary is\n # a valid value in the auths config.\n # https://github.com/docker/compose/issues/3265\n log.debug(\n 'Auth data for {0} is absent. Client might be using a '\n 'credentials store instead.'\n )\n conf[registry] = {}\n continue\n\n username, password = decode_auth(entry['auth'])\n log.debug(\n 'Found entry (registry={0}, username={1})'\n .format(repr(registry), repr(username))\n )\n\n conf[registry] = {\n 'username': username,\n 'password': password,\n 'email': entry.get('email'),\n 'serveraddress': registry,\n }\n return conf\n\n\ndef find_config_file(config_path=None):\n paths = list(filter(None, [\n config_path, # 1\n config_path_from_environment(), # 2\n os.path.join(home_dir(), DOCKER_CONFIG_FILENAME), # 3\n os.path.join(home_dir(), LEGACY_DOCKER_CONFIG_FILENAME), # 4\n ]))\n\n log.debug(\"Trying paths: {0}\".format(repr(paths)))\n\n for path in paths:\n if os.path.exists(path):\n log.debug(\"Found file at path: {0}\".format(path))\n return path\n\n log.debug(\"No config file found\")\n\n return None\n\n\ndef config_path_from_environment():\n config_dir = os.environ.get('DOCKER_CONFIG')\n if not config_dir:\n return None\n return os.path.join(config_dir, os.path.basename(DOCKER_CONFIG_FILENAME))\n\n\ndef home_dir():\n \"\"\"\n Get the user's home directory, using the same logic as the Docker Engine\n client - use %USERPROFILE% on Windows, $HOME/getuid on POSIX.\n \"\"\"\n if IS_WINDOWS_PLATFORM:\n return os.environ.get('USERPROFILE', '')\n else:\n return os.path.expanduser('~')\n\n\ndef load_config(config_path=None):\n \"\"\"\n Loads authentication data from a Docker configuration file in the given\n root directory or if config_path is passed use given path.\n Lookup priority:\n explicit config_path parameter > DOCKER_CONFIG environment variable >\n ~/.docker/config.json > ~/.dockercfg\n \"\"\"\n config_file = find_config_file(config_path)\n\n if not config_file:\n return {}\n\n try:\n with open(config_file) as f:\n data = json.load(f)\n res = {}\n if data.get('auths'):\n log.debug(\"Found 'auths' section\")\n res.update(parse_auth(data['auths'], raise_on_error=True))\n if data.get('HttpHeaders'):\n log.debug(\"Found 'HttpHeaders' section\")\n res.update({'HttpHeaders': data['HttpHeaders']})\n if data.get('credsStore'):\n log.debug(\"Found 'credsStore' section\")\n res.update({'credsStore': data['credsStore']})\n if data.get('credHelpers'):\n log.debug(\"Found 'credHelpers' section\")\n res.update({'credHelpers': data['credHelpers']})\n if res:\n return res\n else:\n log.debug(\"Couldn't find 'auths' or 'HttpHeaders' sections\")\n f.seek(0)\n return parse_auth(json.load(f))\n except (IOError, KeyError, ValueError) as e:\n # Likely missing new Docker config file or it's in an\n # unknown format, continue to attempt to read old location\n # and format.\n log.debug(e)\n\n log.debug(\"Attempting to parse legacy auth file format\")\n try:\n data = []\n with open(config_file) as f:\n for line in f.readlines():\n data.append(line.strip().split(' = ')[1])\n if len(data) < 2:\n # Not enough data\n raise errors.InvalidConfigFile(\n 'Invalid or empty configuration file!'\n )\n\n username, password = decode_auth(data[0])\n return {\n INDEX_NAME: {\n 'username': username,\n 'password': password,\n 'email': data[1],\n 'serveraddress': INDEX_URL,\n }\n }\n except Exception as e:\n log.debug(e)\n pass\n\n log.debug(\"All parsing attempts failed - returning empty config\")\n return {}\n", "path": "docker/auth.py"}]}
| 3,406 | 461 |
gh_patches_debug_10444
|
rasdani/github-patches
|
git_diff
|
buildbot__buildbot-4576
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
util.RemoteUserAuth raises builtins.TypeError
RemoteUserAuth (nginx http basic auth) fails with Type error, because the result of the regex match has values of type `bytes`, e.g.: `{'username': b'foobar'}`
Full stacktrace
```python
Traceback (most recent call last):
File "/.../python3.5/site-packages/twisted/internet/defer.py", line 1418, in _inlineCallbacks
result = g.send(result)
File "/.../python3.5/site-packages/buildbot/www/auth.py", line 131, in maybeAutoLogin
yield self.updateUserInfo(request)
File "/.../python3.5/site-packages/twisted/internet/defer.py", line 1613, in unwindGenerator
return _cancellableInlineCallbacks(gen)
File "/.../python3.5/site-packages/twisted/internet/defer.py", line 1529, in _cancellableInlineCallbacks
_inlineCallbacks(None, g, status)
--- <exception caught here> ---
File "/.../python3.5/site-packages/twisted/internet/defer.py", line 654, in _runCallbacks
current.result = callback(current.result, *args, **kw)
File "/.../python3.5/site-packages/buildbot/www/resource.py", line 92, in failHttpError
f.trap(Error)
File "/.../python3.5/site-packages/twisted/python/failure.py", line 439, in trap
self.raiseException()
File "/.../python3.5/site-packages/twisted/python/failure.py", line 467, in raiseException
raise self.value.with_traceback(self.tb)
File "/.../python3.5/site-packages/twisted/internet/defer.py", line 654, in _runCallbacks
current.result = callback(current.result, *args, **kw)
File "/.../python3.5/site-packages/buildbot/www/resource.py", line 85, in failHttpRedirect
f.trap(Redirect)
File "/.../python3.5/site-packages/twisted/python/failure.py", line 439, in trap
self.raiseException()
File "/.../python3.5/site-packages/twisted/python/failure.py", line 467, in raiseException
raise self.value.with_traceback(self.tb)
File "/.../python3.5/site-packages/buildbot/www/config.py", line 126, in renderIndex
yield self.config['auth'].maybeAutoLogin(request)
File "/.../python3.5/site-packages/buildbot/www/auth.py", line 131, in maybeAutoLogin
yield self.updateUserInfo(request)
File "/.../python3.5/site-packages/twisted/internet/defer.py", line 1418, in _inlineCallbacks
result = g.send(result)
File "/.../python3.5/site-packages/buildbot/www/auth.py", line 78, in updateUserInfo
session.updateSession(request)
File "/.../python3.5/site-packages/buildbot/www/service.py", line 110, in updateSession
request.addCookie(cookiename, self.uid, path=b"/",
File "/.../python3.5/site-packages/buildbot/www/service.py", line 137, in uid
return jwt.encode(claims, self.site.session_secret, algorithm=SESSION_SECRET_ALGORITHM)
File "/.../python3.5/site-packages/jwt/api_jwt.py", line 62, in encode
cls=json_encoder
File "/usr/lib/python3.5/json/__init__.py", line 237, in dumps
**kw).encode(obj)
File "/usr/lib/python3.5/json/encoder.py", line 198, in encode
chunks = self.iterencode(o, _one_shot=True)
File "/usr/lib/python3.5/json/encoder.py", line 256, in iterencode
return _iterencode(o, 0)
File "/usr/lib/python3.5/json/encoder.py", line 179, in default
raise TypeError(repr(o) + " is not JSON serializable")
builtins.TypeError: b'foobar' is not JSON serializable
```
following line [here](https://github.com/buildbot/buildbot/blob/master/master/buildbot/www/auth.py#L127) resolves the problem:
```python
user_info = {k: buildbot.util.bytes2unicode(v) for k, v in res.groupdict().items()}
```
Sorry if this is a duplicate, but I failed to find a ticket for this issue.
I will create a PR as soon as I have spare time.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `master/buildbot/www/auth.py`
Content:
```
1 # This file is part of Buildbot. Buildbot is free software: you can
2 # redistribute it and/or modify it under the terms of the GNU General Public
3 # License as published by the Free Software Foundation, version 2.
4 #
5 # This program is distributed in the hope that it will be useful, but WITHOUT
6 # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
7 # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
8 # details.
9 #
10 # You should have received a copy of the GNU General Public License along with
11 # this program; if not, write to the Free Software Foundation, Inc., 51
12 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
13 #
14 # Copyright Buildbot Team Members
15
16
17 import re
18 from abc import ABCMeta
19 from abc import abstractmethod
20
21 from twisted.cred.checkers import FilePasswordDB
22 from twisted.cred.checkers import ICredentialsChecker
23 from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse
24 from twisted.cred.credentials import IUsernamePassword
25 from twisted.cred.error import UnauthorizedLogin
26 from twisted.cred.portal import IRealm
27 from twisted.cred.portal import Portal
28 from twisted.internet import defer
29 from twisted.web.error import Error
30 from twisted.web.guard import BasicCredentialFactory
31 from twisted.web.guard import DigestCredentialFactory
32 from twisted.web.guard import HTTPAuthSessionWrapper
33 from twisted.web.resource import IResource
34 from zope.interface import implementer
35
36 from buildbot.util import bytes2unicode
37 from buildbot.util import config
38 from buildbot.util import unicode2bytes
39 from buildbot.www import resource
40
41
42 class AuthRootResource(resource.Resource):
43
44 def getChild(self, path, request):
45 # return dynamically generated resources
46 if path == b'login':
47 return self.master.www.auth.getLoginResource()
48 elif path == b'logout':
49 return self.master.www.auth.getLogoutResource()
50 return resource.Resource.getChild(self, path, request)
51
52
53 class AuthBase(config.ConfiguredMixin):
54
55 def __init__(self, userInfoProvider=None):
56 self.userInfoProvider = userInfoProvider
57
58 def reconfigAuth(self, master, new_config):
59 self.master = master
60
61 def maybeAutoLogin(self, request):
62 return defer.succeed(None)
63
64 def getLoginResource(self):
65 raise Error(501, b"not implemented")
66
67 def getLogoutResource(self):
68 return LogoutResource(self.master)
69
70 @defer.inlineCallbacks
71 def updateUserInfo(self, request):
72 session = request.getSession()
73 if self.userInfoProvider is not None:
74 infos = yield self.userInfoProvider.getUserInfo(session.user_info['username'])
75 session.user_info.update(infos)
76 session.updateSession(request)
77
78 def getConfigDict(self):
79 return {'name': type(self).__name__}
80
81
82 class UserInfoProviderBase(config.ConfiguredMixin):
83 name = "noinfo"
84
85 def getUserInfo(self, username):
86 return defer.succeed({'email': username})
87
88
89 class LoginResource(resource.Resource):
90
91 def render_GET(self, request):
92 return self.asyncRenderHelper(request, self.renderLogin)
93
94 @defer.inlineCallbacks
95 def renderLogin(self, request):
96 raise NotImplementedError
97
98
99 class NoAuth(AuthBase):
100 pass
101
102
103 class RemoteUserAuth(AuthBase):
104 header = b"REMOTE_USER"
105 headerRegex = re.compile(br"(?P<username>[^ @]+)@(?P<realm>[^ @]+)")
106
107 def __init__(self, header=None, headerRegex=None, **kwargs):
108 AuthBase.__init__(self, **kwargs)
109 if self.userInfoProvider is None:
110 self.userInfoProvider = UserInfoProviderBase()
111 if header is not None:
112 self.header = unicode2bytes(header)
113 if headerRegex is not None:
114 self.headerRegex = re.compile(unicode2bytes(headerRegex))
115
116 @defer.inlineCallbacks
117 def maybeAutoLogin(self, request):
118 header = request.getHeader(self.header)
119 if header is None:
120 raise Error(403, b"missing http header " + self.header + b". Check your reverse proxy config!")
121 res = self.headerRegex.match(header)
122 if res is None:
123 raise Error(
124 403, b'http header does not match regex! "' + header + b'" not matching ' + self.headerRegex.pattern)
125 session = request.getSession()
126 if session.user_info != dict(res.groupdict()):
127 session.user_info = dict(res.groupdict())
128 yield self.updateUserInfo(request)
129
130
131 @implementer(IRealm)
132 class AuthRealm(object):
133
134 def __init__(self, master, auth):
135 self.auth = auth
136 self.master = master
137
138 def requestAvatar(self, avatarId, mind, *interfaces):
139 if IResource in interfaces:
140 return (IResource,
141 PreAuthenticatedLoginResource(self.master, avatarId),
142 lambda: None)
143 raise NotImplementedError()
144
145
146 class TwistedICredAuthBase(AuthBase):
147
148 def __init__(self, credentialFactories, checkers, **kwargs):
149 AuthBase.__init__(self, **kwargs)
150 if self.userInfoProvider is None:
151 self.userInfoProvider = UserInfoProviderBase()
152 self.credentialFactories = credentialFactories
153 self.checkers = checkers
154
155 def getLoginResource(self):
156 return HTTPAuthSessionWrapper(
157 Portal(AuthRealm(self.master, self), self.checkers),
158 self.credentialFactories)
159
160
161 class HTPasswdAuth(TwistedICredAuthBase):
162
163 def __init__(self, passwdFile, **kwargs):
164 TwistedICredAuthBase.__init__(
165 self,
166 [DigestCredentialFactory(b"md5", b"buildbot"),
167 BasicCredentialFactory(b"buildbot")],
168 [FilePasswordDB(passwdFile)],
169 **kwargs)
170
171
172 class UserPasswordAuth(TwistedICredAuthBase):
173
174 def __init__(self, users, **kwargs):
175 if isinstance(users, dict):
176 users = {user: unicode2bytes(pw) for user, pw in users.items()}
177 elif isinstance(users, list):
178 users = [(user, unicode2bytes(pw)) for user, pw in users]
179 TwistedICredAuthBase.__init__(
180 self,
181 [DigestCredentialFactory(b"md5", b"buildbot"),
182 BasicCredentialFactory(b"buildbot")],
183 [InMemoryUsernamePasswordDatabaseDontUse(**dict(users))],
184 **kwargs)
185
186
187 @implementer(ICredentialsChecker)
188 class CustomAuth(TwistedICredAuthBase):
189 __metaclass__ = ABCMeta
190 credentialInterfaces = [IUsernamePassword]
191
192 def __init__(self, **kwargs):
193 TwistedICredAuthBase.__init__(
194 self,
195 [BasicCredentialFactory(b"buildbot")],
196 [self],
197 **kwargs)
198
199 def requestAvatarId(self, cred):
200 if self.check_credentials(cred.username, cred.password):
201 return defer.succeed(cred.username)
202 return defer.fail(UnauthorizedLogin())
203
204 @abstractmethod
205 def check_credentials(username, password):
206 return False
207
208
209 def _redirect(master, request):
210 url = request.args.get(b"redirect", [b"/"])[0]
211 url = bytes2unicode(url)
212 return resource.Redirect(master.config.buildbotURL + "#" + url)
213
214
215 class PreAuthenticatedLoginResource(LoginResource):
216 # a LoginResource which is already authenticated via a
217 # HTTPAuthSessionWrapper
218
219 def __init__(self, master, username):
220 LoginResource.__init__(self, master)
221 self.username = username
222
223 @defer.inlineCallbacks
224 def renderLogin(self, request):
225 session = request.getSession()
226 session.user_info = dict(username=bytes2unicode(self.username))
227 yield self.master.www.auth.updateUserInfo(request)
228 raise _redirect(self.master, request)
229
230
231 class LogoutResource(resource.Resource):
232
233 def render_GET(self, request):
234 session = request.getSession()
235 session.expire()
236 session.updateSession(request)
237 request.redirect(_redirect(self.master, request).url)
238 return b''
239
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/master/buildbot/www/auth.py b/master/buildbot/www/auth.py
--- a/master/buildbot/www/auth.py
+++ b/master/buildbot/www/auth.py
@@ -123,8 +123,9 @@
raise Error(
403, b'http header does not match regex! "' + header + b'" not matching ' + self.headerRegex.pattern)
session = request.getSession()
- if session.user_info != dict(res.groupdict()):
- session.user_info = dict(res.groupdict())
+ user_info = {k: bytes2unicode(v) for k, v in res.groupdict().items()}
+ if session.user_info != user_info:
+ session.user_info = user_info
yield self.updateUserInfo(request)
|
{"golden_diff": "diff --git a/master/buildbot/www/auth.py b/master/buildbot/www/auth.py\n--- a/master/buildbot/www/auth.py\n+++ b/master/buildbot/www/auth.py\n@@ -123,8 +123,9 @@\n raise Error(\n 403, b'http header does not match regex! \"' + header + b'\" not matching ' + self.headerRegex.pattern)\n session = request.getSession()\n- if session.user_info != dict(res.groupdict()):\n- session.user_info = dict(res.groupdict())\n+ user_info = {k: bytes2unicode(v) for k, v in res.groupdict().items()}\n+ if session.user_info != user_info:\n+ session.user_info = user_info\n yield self.updateUserInfo(request)\n", "issue": "util.RemoteUserAuth raises builtins.TypeError\nRemoteUserAuth (nginx http basic auth) fails with Type error, because the result of the regex match has values of type `bytes`, e.g.: `{'username': b'foobar'}`\r\nFull stacktrace\r\n```python\r\n Traceback (most recent call last):\r\n File \"/.../python3.5/site-packages/twisted/internet/defer.py\", line 1418, in _inlineCallbacks\r\n result = g.send(result)\r\n File \"/.../python3.5/site-packages/buildbot/www/auth.py\", line 131, in maybeAutoLogin\r\n yield self.updateUserInfo(request)\r\n File \"/.../python3.5/site-packages/twisted/internet/defer.py\", line 1613, in unwindGenerator \r\n return _cancellableInlineCallbacks(gen)\r\n File \"/.../python3.5/site-packages/twisted/internet/defer.py\", line 1529, in _cancellableInlineCallbacks\r\n _inlineCallbacks(None, g, status)\r\n --- <exception caught here> ---\r\n File \"/.../python3.5/site-packages/twisted/internet/defer.py\", line 654, in _runCallbacks\r\n current.result = callback(current.result, *args, **kw)\r\n File \"/.../python3.5/site-packages/buildbot/www/resource.py\", line 92, in failHttpError\r\n f.trap(Error)\r\n File \"/.../python3.5/site-packages/twisted/python/failure.py\", line 439, in trap\r\n self.raiseException()\r\n File \"/.../python3.5/site-packages/twisted/python/failure.py\", line 467, in raiseException\r\n raise self.value.with_traceback(self.tb)\r\n File \"/.../python3.5/site-packages/twisted/internet/defer.py\", line 654, in _runCallbacks\r\n current.result = callback(current.result, *args, **kw)\r\n File \"/.../python3.5/site-packages/buildbot/www/resource.py\", line 85, in failHttpRedirect\r\n f.trap(Redirect)\r\n File \"/.../python3.5/site-packages/twisted/python/failure.py\", line 439, in trap\r\n self.raiseException()\r\n File \"/.../python3.5/site-packages/twisted/python/failure.py\", line 467, in raiseException\r\n raise self.value.with_traceback(self.tb)\r\n File \"/.../python3.5/site-packages/buildbot/www/config.py\", line 126, in renderIndex\r\n yield self.config['auth'].maybeAutoLogin(request)\r\n File \"/.../python3.5/site-packages/buildbot/www/auth.py\", line 131, in maybeAutoLogin\r\n yield self.updateUserInfo(request)\r\n File \"/.../python3.5/site-packages/twisted/internet/defer.py\", line 1418, in _inlineCallbacks\r\n result = g.send(result)\r\n File \"/.../python3.5/site-packages/buildbot/www/auth.py\", line 78, in updateUserInfo\r\n session.updateSession(request)\r\n File \"/.../python3.5/site-packages/buildbot/www/service.py\", line 110, in updateSession\r\n request.addCookie(cookiename, self.uid, path=b\"/\",\r\n File \"/.../python3.5/site-packages/buildbot/www/service.py\", line 137, in uid\r\n return jwt.encode(claims, self.site.session_secret, algorithm=SESSION_SECRET_ALGORITHM)\r\n File \"/.../python3.5/site-packages/jwt/api_jwt.py\", line 62, in encode\r\n cls=json_encoder\r\n File \"/usr/lib/python3.5/json/__init__.py\", line 237, in dumps\r\n **kw).encode(obj)\r\n File \"/usr/lib/python3.5/json/encoder.py\", line 198, in encode\r\n chunks = self.iterencode(o, _one_shot=True)\r\n File \"/usr/lib/python3.5/json/encoder.py\", line 256, in iterencode\r\n return _iterencode(o, 0)\r\n File \"/usr/lib/python3.5/json/encoder.py\", line 179, in default\r\n raise TypeError(repr(o) + \" is not JSON serializable\")\r\n builtins.TypeError: b'foobar' is not JSON serializable\r\n```\r\nfollowing line [here](https://github.com/buildbot/buildbot/blob/master/master/buildbot/www/auth.py#L127) resolves the problem:\r\n```python\r\nuser_info = {k: buildbot.util.bytes2unicode(v) for k, v in res.groupdict().items()}\r\n```\r\nSorry if this is a duplicate, but I failed to find a ticket for this issue. \r\nI will create a PR as soon as I have spare time.\n", "before_files": [{"content": "# This file is part of Buildbot. Buildbot is free software: you can\n# redistribute it and/or modify it under the terms of the GNU General Public\n# License as published by the Free Software Foundation, version 2.\n#\n# This program is distributed in the hope that it will be useful, but WITHOUT\n# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS\n# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more\n# details.\n#\n# You should have received a copy of the GNU General Public License along with\n# this program; if not, write to the Free Software Foundation, Inc., 51\n# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.\n#\n# Copyright Buildbot Team Members\n\n\nimport re\nfrom abc import ABCMeta\nfrom abc import abstractmethod\n\nfrom twisted.cred.checkers import FilePasswordDB\nfrom twisted.cred.checkers import ICredentialsChecker\nfrom twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse\nfrom twisted.cred.credentials import IUsernamePassword\nfrom twisted.cred.error import UnauthorizedLogin\nfrom twisted.cred.portal import IRealm\nfrom twisted.cred.portal import Portal\nfrom twisted.internet import defer\nfrom twisted.web.error import Error\nfrom twisted.web.guard import BasicCredentialFactory\nfrom twisted.web.guard import DigestCredentialFactory\nfrom twisted.web.guard import HTTPAuthSessionWrapper\nfrom twisted.web.resource import IResource\nfrom zope.interface import implementer\n\nfrom buildbot.util import bytes2unicode\nfrom buildbot.util import config\nfrom buildbot.util import unicode2bytes\nfrom buildbot.www import resource\n\n\nclass AuthRootResource(resource.Resource):\n\n def getChild(self, path, request):\n # return dynamically generated resources\n if path == b'login':\n return self.master.www.auth.getLoginResource()\n elif path == b'logout':\n return self.master.www.auth.getLogoutResource()\n return resource.Resource.getChild(self, path, request)\n\n\nclass AuthBase(config.ConfiguredMixin):\n\n def __init__(self, userInfoProvider=None):\n self.userInfoProvider = userInfoProvider\n\n def reconfigAuth(self, master, new_config):\n self.master = master\n\n def maybeAutoLogin(self, request):\n return defer.succeed(None)\n\n def getLoginResource(self):\n raise Error(501, b\"not implemented\")\n\n def getLogoutResource(self):\n return LogoutResource(self.master)\n\n @defer.inlineCallbacks\n def updateUserInfo(self, request):\n session = request.getSession()\n if self.userInfoProvider is not None:\n infos = yield self.userInfoProvider.getUserInfo(session.user_info['username'])\n session.user_info.update(infos)\n session.updateSession(request)\n\n def getConfigDict(self):\n return {'name': type(self).__name__}\n\n\nclass UserInfoProviderBase(config.ConfiguredMixin):\n name = \"noinfo\"\n\n def getUserInfo(self, username):\n return defer.succeed({'email': username})\n\n\nclass LoginResource(resource.Resource):\n\n def render_GET(self, request):\n return self.asyncRenderHelper(request, self.renderLogin)\n\n @defer.inlineCallbacks\n def renderLogin(self, request):\n raise NotImplementedError\n\n\nclass NoAuth(AuthBase):\n pass\n\n\nclass RemoteUserAuth(AuthBase):\n header = b\"REMOTE_USER\"\n headerRegex = re.compile(br\"(?P<username>[^ @]+)@(?P<realm>[^ @]+)\")\n\n def __init__(self, header=None, headerRegex=None, **kwargs):\n AuthBase.__init__(self, **kwargs)\n if self.userInfoProvider is None:\n self.userInfoProvider = UserInfoProviderBase()\n if header is not None:\n self.header = unicode2bytes(header)\n if headerRegex is not None:\n self.headerRegex = re.compile(unicode2bytes(headerRegex))\n\n @defer.inlineCallbacks\n def maybeAutoLogin(self, request):\n header = request.getHeader(self.header)\n if header is None:\n raise Error(403, b\"missing http header \" + self.header + b\". Check your reverse proxy config!\")\n res = self.headerRegex.match(header)\n if res is None:\n raise Error(\n 403, b'http header does not match regex! \"' + header + b'\" not matching ' + self.headerRegex.pattern)\n session = request.getSession()\n if session.user_info != dict(res.groupdict()):\n session.user_info = dict(res.groupdict())\n yield self.updateUserInfo(request)\n\n\n@implementer(IRealm)\nclass AuthRealm(object):\n\n def __init__(self, master, auth):\n self.auth = auth\n self.master = master\n\n def requestAvatar(self, avatarId, mind, *interfaces):\n if IResource in interfaces:\n return (IResource,\n PreAuthenticatedLoginResource(self.master, avatarId),\n lambda: None)\n raise NotImplementedError()\n\n\nclass TwistedICredAuthBase(AuthBase):\n\n def __init__(self, credentialFactories, checkers, **kwargs):\n AuthBase.__init__(self, **kwargs)\n if self.userInfoProvider is None:\n self.userInfoProvider = UserInfoProviderBase()\n self.credentialFactories = credentialFactories\n self.checkers = checkers\n\n def getLoginResource(self):\n return HTTPAuthSessionWrapper(\n Portal(AuthRealm(self.master, self), self.checkers),\n self.credentialFactories)\n\n\nclass HTPasswdAuth(TwistedICredAuthBase):\n\n def __init__(self, passwdFile, **kwargs):\n TwistedICredAuthBase.__init__(\n self,\n [DigestCredentialFactory(b\"md5\", b\"buildbot\"),\n BasicCredentialFactory(b\"buildbot\")],\n [FilePasswordDB(passwdFile)],\n **kwargs)\n\n\nclass UserPasswordAuth(TwistedICredAuthBase):\n\n def __init__(self, users, **kwargs):\n if isinstance(users, dict):\n users = {user: unicode2bytes(pw) for user, pw in users.items()}\n elif isinstance(users, list):\n users = [(user, unicode2bytes(pw)) for user, pw in users]\n TwistedICredAuthBase.__init__(\n self,\n [DigestCredentialFactory(b\"md5\", b\"buildbot\"),\n BasicCredentialFactory(b\"buildbot\")],\n [InMemoryUsernamePasswordDatabaseDontUse(**dict(users))],\n **kwargs)\n\n\n@implementer(ICredentialsChecker)\nclass CustomAuth(TwistedICredAuthBase):\n __metaclass__ = ABCMeta\n credentialInterfaces = [IUsernamePassword]\n\n def __init__(self, **kwargs):\n TwistedICredAuthBase.__init__(\n self,\n [BasicCredentialFactory(b\"buildbot\")],\n [self],\n **kwargs)\n\n def requestAvatarId(self, cred):\n if self.check_credentials(cred.username, cred.password):\n return defer.succeed(cred.username)\n return defer.fail(UnauthorizedLogin())\n\n @abstractmethod\n def check_credentials(username, password):\n return False\n\n\ndef _redirect(master, request):\n url = request.args.get(b\"redirect\", [b\"/\"])[0]\n url = bytes2unicode(url)\n return resource.Redirect(master.config.buildbotURL + \"#\" + url)\n\n\nclass PreAuthenticatedLoginResource(LoginResource):\n # a LoginResource which is already authenticated via a\n # HTTPAuthSessionWrapper\n\n def __init__(self, master, username):\n LoginResource.__init__(self, master)\n self.username = username\n\n @defer.inlineCallbacks\n def renderLogin(self, request):\n session = request.getSession()\n session.user_info = dict(username=bytes2unicode(self.username))\n yield self.master.www.auth.updateUserInfo(request)\n raise _redirect(self.master, request)\n\n\nclass LogoutResource(resource.Resource):\n\n def render_GET(self, request):\n session = request.getSession()\n session.expire()\n session.updateSession(request)\n request.redirect(_redirect(self.master, request).url)\n return b''\n", "path": "master/buildbot/www/auth.py"}], "after_files": [{"content": "# This file is part of Buildbot. Buildbot is free software: you can\n# redistribute it and/or modify it under the terms of the GNU General Public\n# License as published by the Free Software Foundation, version 2.\n#\n# This program is distributed in the hope that it will be useful, but WITHOUT\n# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS\n# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more\n# details.\n#\n# You should have received a copy of the GNU General Public License along with\n# this program; if not, write to the Free Software Foundation, Inc., 51\n# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.\n#\n# Copyright Buildbot Team Members\n\n\nimport re\nfrom abc import ABCMeta\nfrom abc import abstractmethod\n\nfrom twisted.cred.checkers import FilePasswordDB\nfrom twisted.cred.checkers import ICredentialsChecker\nfrom twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse\nfrom twisted.cred.credentials import IUsernamePassword\nfrom twisted.cred.error import UnauthorizedLogin\nfrom twisted.cred.portal import IRealm\nfrom twisted.cred.portal import Portal\nfrom twisted.internet import defer\nfrom twisted.web.error import Error\nfrom twisted.web.guard import BasicCredentialFactory\nfrom twisted.web.guard import DigestCredentialFactory\nfrom twisted.web.guard import HTTPAuthSessionWrapper\nfrom twisted.web.resource import IResource\nfrom zope.interface import implementer\n\nfrom buildbot.util import bytes2unicode\nfrom buildbot.util import config\nfrom buildbot.util import unicode2bytes\nfrom buildbot.www import resource\n\n\nclass AuthRootResource(resource.Resource):\n\n def getChild(self, path, request):\n # return dynamically generated resources\n if path == b'login':\n return self.master.www.auth.getLoginResource()\n elif path == b'logout':\n return self.master.www.auth.getLogoutResource()\n return resource.Resource.getChild(self, path, request)\n\n\nclass AuthBase(config.ConfiguredMixin):\n\n def __init__(self, userInfoProvider=None):\n self.userInfoProvider = userInfoProvider\n\n def reconfigAuth(self, master, new_config):\n self.master = master\n\n def maybeAutoLogin(self, request):\n return defer.succeed(None)\n\n def getLoginResource(self):\n raise Error(501, b\"not implemented\")\n\n def getLogoutResource(self):\n return LogoutResource(self.master)\n\n @defer.inlineCallbacks\n def updateUserInfo(self, request):\n session = request.getSession()\n if self.userInfoProvider is not None:\n infos = yield self.userInfoProvider.getUserInfo(session.user_info['username'])\n session.user_info.update(infos)\n session.updateSession(request)\n\n def getConfigDict(self):\n return {'name': type(self).__name__}\n\n\nclass UserInfoProviderBase(config.ConfiguredMixin):\n name = \"noinfo\"\n\n def getUserInfo(self, username):\n return defer.succeed({'email': username})\n\n\nclass LoginResource(resource.Resource):\n\n def render_GET(self, request):\n return self.asyncRenderHelper(request, self.renderLogin)\n\n @defer.inlineCallbacks\n def renderLogin(self, request):\n raise NotImplementedError\n\n\nclass NoAuth(AuthBase):\n pass\n\n\nclass RemoteUserAuth(AuthBase):\n header = b\"REMOTE_USER\"\n headerRegex = re.compile(br\"(?P<username>[^ @]+)@(?P<realm>[^ @]+)\")\n\n def __init__(self, header=None, headerRegex=None, **kwargs):\n AuthBase.__init__(self, **kwargs)\n if self.userInfoProvider is None:\n self.userInfoProvider = UserInfoProviderBase()\n if header is not None:\n self.header = unicode2bytes(header)\n if headerRegex is not None:\n self.headerRegex = re.compile(unicode2bytes(headerRegex))\n\n @defer.inlineCallbacks\n def maybeAutoLogin(self, request):\n header = request.getHeader(self.header)\n if header is None:\n raise Error(403, b\"missing http header \" + self.header + b\". Check your reverse proxy config!\")\n res = self.headerRegex.match(header)\n if res is None:\n raise Error(\n 403, b'http header does not match regex! \"' + header + b'\" not matching ' + self.headerRegex.pattern)\n session = request.getSession()\n user_info = {k: bytes2unicode(v) for k, v in res.groupdict().items()}\n if session.user_info != user_info:\n session.user_info = user_info\n yield self.updateUserInfo(request)\n\n\n@implementer(IRealm)\nclass AuthRealm(object):\n\n def __init__(self, master, auth):\n self.auth = auth\n self.master = master\n\n def requestAvatar(self, avatarId, mind, *interfaces):\n if IResource in interfaces:\n return (IResource,\n PreAuthenticatedLoginResource(self.master, avatarId),\n lambda: None)\n raise NotImplementedError()\n\n\nclass TwistedICredAuthBase(AuthBase):\n\n def __init__(self, credentialFactories, checkers, **kwargs):\n AuthBase.__init__(self, **kwargs)\n if self.userInfoProvider is None:\n self.userInfoProvider = UserInfoProviderBase()\n self.credentialFactories = credentialFactories\n self.checkers = checkers\n\n def getLoginResource(self):\n return HTTPAuthSessionWrapper(\n Portal(AuthRealm(self.master, self), self.checkers),\n self.credentialFactories)\n\n\nclass HTPasswdAuth(TwistedICredAuthBase):\n\n def __init__(self, passwdFile, **kwargs):\n TwistedICredAuthBase.__init__(\n self,\n [DigestCredentialFactory(b\"md5\", b\"buildbot\"),\n BasicCredentialFactory(b\"buildbot\")],\n [FilePasswordDB(passwdFile)],\n **kwargs)\n\n\nclass UserPasswordAuth(TwistedICredAuthBase):\n\n def __init__(self, users, **kwargs):\n if isinstance(users, dict):\n users = {user: unicode2bytes(pw) for user, pw in users.items()}\n elif isinstance(users, list):\n users = [(user, unicode2bytes(pw)) for user, pw in users]\n TwistedICredAuthBase.__init__(\n self,\n [DigestCredentialFactory(b\"md5\", b\"buildbot\"),\n BasicCredentialFactory(b\"buildbot\")],\n [InMemoryUsernamePasswordDatabaseDontUse(**dict(users))],\n **kwargs)\n\n\n@implementer(ICredentialsChecker)\nclass CustomAuth(TwistedICredAuthBase):\n __metaclass__ = ABCMeta\n credentialInterfaces = [IUsernamePassword]\n\n def __init__(self, **kwargs):\n TwistedICredAuthBase.__init__(\n self,\n [BasicCredentialFactory(b\"buildbot\")],\n [self],\n **kwargs)\n\n def requestAvatarId(self, cred):\n if self.check_credentials(cred.username, cred.password):\n return defer.succeed(cred.username)\n return defer.fail(UnauthorizedLogin())\n\n @abstractmethod\n def check_credentials(username, password):\n return False\n\n\ndef _redirect(master, request):\n url = request.args.get(b\"redirect\", [b\"/\"])[0]\n url = bytes2unicode(url)\n return resource.Redirect(master.config.buildbotURL + \"#\" + url)\n\n\nclass PreAuthenticatedLoginResource(LoginResource):\n # a LoginResource which is already authenticated via a\n # HTTPAuthSessionWrapper\n\n def __init__(self, master, username):\n LoginResource.__init__(self, master)\n self.username = username\n\n @defer.inlineCallbacks\n def renderLogin(self, request):\n session = request.getSession()\n session.user_info = dict(username=bytes2unicode(self.username))\n yield self.master.www.auth.updateUserInfo(request)\n raise _redirect(self.master, request)\n\n\nclass LogoutResource(resource.Resource):\n\n def render_GET(self, request):\n session = request.getSession()\n session.expire()\n session.updateSession(request)\n request.redirect(_redirect(self.master, request).url)\n return b''\n", "path": "master/buildbot/www/auth.py"}]}
| 3,643 | 162 |
gh_patches_debug_36190
|
rasdani/github-patches
|
git_diff
|
vaexio__vaex-1077
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[BUG-REPORT] Multiple selection filters broken in latest version
**Description**
I think I've found an issue with the new internal representation of arrays as arrow arrays. This is in 4.0.0.dev0, so you might have a plan to fix it by the next proper release, but just flagging it anyway as it seems relatively major.
With a simple test table `data`:
```
import pandas as pd
import numpy as np
import vaex
data = pd.DataFrame({
"A":np.array([1,2,3,4]),
"B":["A","B","C","D"],
"C":[np.datetime64('2019-10-07 09:00:00'),np.datetime64('2019-10-07 10:00:00'),np.datetime64('2019-10-07 11:00:00'),np.datetime64('2019-10-07 12:00:00')]
})
vaex.from_pandas(data).export_arrow('arrow_sample.arrow')
data = vaex.open('arrow_sample.arrow')
```
Normal single clause filters work as you'd expect:
```
>>> data[(data['B'] == 'D')]
# A B C
0 4 'D' Timestamp('2019-10-07 12:00:00')
>>> data[(data['A'] > 2)]
# A B C
0 3 'C' Timestamp('2019-10-07 11:00:00')
1 4 'D' Timestamp('2019-10-07 12:00:00')
```
But when you combine multiple clauses the answer is wrong (this should return a single row):
```
>>> data[(data['B'] == 'D') & (data['A'] > 2)]
# A B C
0 1 'A' Timestamp('2019-10-07 09:00:00')
1 2 'B' Timestamp('2019-10-07 10:00:00')
2 3 'C' Timestamp('2019-10-07 11:00:00')
3 4 'D' Timestamp('2019-10-07 12:00:00')
```
I first noticed this using a much larger table, and in that case it seemed to cause a memory leak (process memory continually rose after the query) I haven't been able to reliably reproduce this case though, so I'm just reporting the simple case above. It seems likely they're the same bug with two different symptoms. With vaex==3.0.0 the query above returns a single row as you'd expect, so I suspect maybe [this change](https://github.com/vaexio/vaex/pull/984/files) has caused the issue, although I don't know enough to say how?
**Software information**
- Vaex version (`import vaex; vaex.__version__)`: `{'vaex': '4.0.0.dev0', 'vaex-core': '4.0.0a5', 'vaex-viz': '0.5.0.dev0', 'vaex-hdf5': '0.7.0a2', 'vaex-server': '0.4.0.dev0', 'vaex-astro': '0.8.0.dev0', 'vaex-ui': '0.3.0', 'vaex-jupyter': '0.6.0.dev0', 'vaex-ml': '0.11.0a4', 'vaex-graphql': '0.2.0.dev0'}`
- Vaex was installed from source
- OS: Ubuntu 18.04.4 LTS
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `packages/vaex-core/vaex/arrow/numpy_dispatch.py`
Content:
```
1 import numpy as np
2 import pyarrow as pa
3 import pyarrow.compute as pc
4 import vaex
5 from ..expression import _binary_ops, _unary_ops, reversable
6
7
8 def combine_missing(a, b):
9 assert a.offset == 0
10 if a.null_count > 0 or b.null_count > 0:
11 # not optimal
12 nulls = pc.invert(pc.or_(a.is_null(), b.is_null()))
13 assert nulls.offset == 0
14 nulls_buffer = nulls.buffers()[1]
15 # this is not the case: no reason why it should be (TODO: open arrow issue)
16 # assert nulls.buffers()[0] is None
17 else:
18 nulls_buffer = None
19 buffers = a.buffers()
20 return pa.Array.from_buffers(a.type, len(a), [nulls_buffer, buffers[1]])
21
22 class NumpyDispatch:
23 def __init__(self, ar):
24 self._array = ar
25 if isinstance(ar, vaex.column.ColumnStringArrow):
26 ar = pa.array(ar)
27 if isinstance(ar, np.ndarray):
28 self._numpy_array = ar
29 self._arrow_array = None
30 elif isinstance(ar, vaex.array_types.supported_arrow_array_types):
31 self._numpy_array = None
32 self._arrow_array = ar
33 else:
34 raise TypeError(f'Only support numpy and arrow, not {type(ar)}')
35
36 def add_missing(self, ar):
37 if isinstance(ar, np.ndarray):
38 # if we are an arrow array, we upgrade ar to one
39 if isinstance(self._array, vaex.array_types.supported_arrow_array_types):
40 ar = vaex.array_types.to_arrow(ar)
41 ar = combine_missing(ar, self._array)
42 # else: both numpy, handled by numpy
43 else:
44 if isinstance(self._array, vaex.array_types.supported_arrow_array_types):
45 ar = combine_missing(ar, self._array)
46 # else: was numpy, handled by numpy
47 return ar
48
49
50 @property
51 def numpy_array(self):
52 if self._numpy_array is None:
53 import vaex.arrow.convert
54 arrow_array = self._arrow_array
55 arrow_array = vaex.arrow.convert.ensure_not_chunked(arrow_array)
56 buffers = arrow_array.buffers()
57 # for math, we don't care about the nulls
58 if buffers[0] is not None:
59 buffers[0] = None
60 arrow_array = pa.Array.from_buffers(arrow_array.type, len(arrow_array), buffers, offset=arrow_array.offset)
61 self._numpy_array = vaex.array_types.to_numpy(arrow_array)
62 return self._numpy_array
63
64 @property
65 def arrow_array(self):
66 if self._arrow_array is None:
67 # convert lazily, since not all arrow arrays (e.g. lists) can be converted
68 if self._arrow_array is None:
69 self._arrow_array = vaex.array_types.to_arrow(self._numpy_array)
70 return self._arrow_array
71
72 def __eq__(self, rhs):
73 if vaex.array_types.is_string(self.arrow_array):
74 # this does not support scalar input
75 # return pc.equal(self.arrow_array, rhs)
76 return NumpyDispatch(pa.array(vaex.functions.str_equals(self.arrow_array, rhs)))
77 else:
78 if isinstance(rhs, NumpyDispatch):
79 rhs = rhs.numpy_array
80 return NumpyDispatch(pa.array(self.numpy_array == rhs))
81
82 for op in _binary_ops:
83 def closure(op=op):
84 def operator(a, b):
85 a_data = a
86 b_data = b
87 if isinstance(a, NumpyDispatch):
88 a_data = a.numpy_array
89 if isinstance(b, NumpyDispatch):
90 b_data = b.numpy_array
91 result_data = op['op'](a_data, b_data)
92 if isinstance(a, NumpyDispatch):
93 result_data = a.add_missing(result_data)
94 if isinstance(b, NumpyDispatch):
95 result_data = b.add_missing(result_data)
96 return NumpyDispatch(result_data)
97 return operator
98 method_name = '__%s__' % op['name']
99 if op['name'] != "eq":
100 setattr(NumpyDispatch, method_name, closure())
101 # to support e.g. (1 + ...) # to support e.g. (1 + ...)
102 if op['name'] in reversable:
103 def closure(op=op):
104 def operator(b, a):
105 a_data = a
106 b_data = b
107 if isinstance(a, NumpyDispatch):
108 a_data = a.numpy_array
109 if isinstance(b, NumpyDispatch):
110 b_data = b.numpy_array
111 result_data = op['op'](a_data, b_data)
112 if isinstance(a, NumpyDispatch):
113 result_data = a.add_missing(result_data)
114 if isinstance(b, NumpyDispatch):
115 result_data = b.add_missing(result_data)
116 return NumpyDispatch(result_data)
117 return operator
118 method_name = '__r%s__' % op['name']
119 setattr(NumpyDispatch, method_name, closure())
120
121
122 for op in _unary_ops:
123 def closure(op=op):
124 def operator(a):
125 a_data = a.numpy_array
126 result_data = op['op'](a_data)
127 if isinstance(a, NumpyDispatch):
128 result_data = a.add_missing(result_data)
129 return NumpyDispatch(result_data)
130 return operator
131 method_name = '__%s__' % op['name']
132 setattr(NumpyDispatch, method_name, closure())
133
134
135 def wrap(value):
136 if not isinstance(value, NumpyDispatch): # and not isinstance(value, np.ndarray):
137 if isinstance(value, vaex.array_types.supported_array_types + (vaex.column.ColumnStringArrow,)):
138 return NumpyDispatch(value)
139 # for performance reasons we don't visit lists and dicts
140 return value
141
142
143 def unwrap(value):
144 if isinstance(value, NumpyDispatch):
145 return value._array
146 # for performance reasons we don't visit lists and dicts
147 return value
148
149
150 def autowrapper(f):
151 '''Takes a function f, and will unwrap all its arguments and wrap the return value'''
152 def wrapper(*args, **kwargs):
153 args_original = args
154 args = list(map(unwrap, args))
155 kwargs = {k: unwrap(v) for k, v, in kwargs.items()}
156 result = f(*args, **kwargs)
157 if isinstance(result, vaex.array_types.supported_arrow_array_types):
158 result = NumpyDispatch(result)
159 return result
160 return wrapper
161
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/packages/vaex-core/vaex/arrow/numpy_dispatch.py b/packages/vaex-core/vaex/arrow/numpy_dispatch.py
--- a/packages/vaex-core/vaex/arrow/numpy_dispatch.py
+++ b/packages/vaex-core/vaex/arrow/numpy_dispatch.py
@@ -69,15 +69,6 @@
self._arrow_array = vaex.array_types.to_arrow(self._numpy_array)
return self._arrow_array
- def __eq__(self, rhs):
- if vaex.array_types.is_string(self.arrow_array):
- # this does not support scalar input
- # return pc.equal(self.arrow_array, rhs)
- return NumpyDispatch(pa.array(vaex.functions.str_equals(self.arrow_array, rhs)))
- else:
- if isinstance(rhs, NumpyDispatch):
- rhs = rhs.numpy_array
- return NumpyDispatch(pa.array(self.numpy_array == rhs))
for op in _binary_ops:
def closure(op=op):
@@ -88,7 +79,10 @@
a_data = a.numpy_array
if isinstance(b, NumpyDispatch):
b_data = b.numpy_array
- result_data = op['op'](a_data, b_data)
+ if op['name'] == 'eq' and (vaex.array_types.is_string(a_data) or vaex.array_types.is_string(b_data)):
+ result_data = vaex.functions.str_equals(a_data, b_data)
+ else:
+ result_data = op['op'](a_data, b_data)
if isinstance(a, NumpyDispatch):
result_data = a.add_missing(result_data)
if isinstance(b, NumpyDispatch):
@@ -96,8 +90,7 @@
return NumpyDispatch(result_data)
return operator
method_name = '__%s__' % op['name']
- if op['name'] != "eq":
- setattr(NumpyDispatch, method_name, closure())
+ setattr(NumpyDispatch, method_name, closure())
# to support e.g. (1 + ...) # to support e.g. (1 + ...)
if op['name'] in reversable:
def closure(op=op):
@@ -154,7 +147,5 @@
args = list(map(unwrap, args))
kwargs = {k: unwrap(v) for k, v, in kwargs.items()}
result = f(*args, **kwargs)
- if isinstance(result, vaex.array_types.supported_arrow_array_types):
- result = NumpyDispatch(result)
- return result
+ return wrap(result)
return wrapper
|
{"golden_diff": "diff --git a/packages/vaex-core/vaex/arrow/numpy_dispatch.py b/packages/vaex-core/vaex/arrow/numpy_dispatch.py\n--- a/packages/vaex-core/vaex/arrow/numpy_dispatch.py\n+++ b/packages/vaex-core/vaex/arrow/numpy_dispatch.py\n@@ -69,15 +69,6 @@\n self._arrow_array = vaex.array_types.to_arrow(self._numpy_array)\n return self._arrow_array\n \n- def __eq__(self, rhs):\n- if vaex.array_types.is_string(self.arrow_array):\n- # this does not support scalar input\n- # return pc.equal(self.arrow_array, rhs)\n- return NumpyDispatch(pa.array(vaex.functions.str_equals(self.arrow_array, rhs)))\n- else:\n- if isinstance(rhs, NumpyDispatch):\n- rhs = rhs.numpy_array\n- return NumpyDispatch(pa.array(self.numpy_array == rhs))\n \n for op in _binary_ops:\n def closure(op=op):\n@@ -88,7 +79,10 @@\n a_data = a.numpy_array\n if isinstance(b, NumpyDispatch):\n b_data = b.numpy_array\n- result_data = op['op'](a_data, b_data)\n+ if op['name'] == 'eq' and (vaex.array_types.is_string(a_data) or vaex.array_types.is_string(b_data)):\n+ result_data = vaex.functions.str_equals(a_data, b_data)\n+ else:\n+ result_data = op['op'](a_data, b_data)\n if isinstance(a, NumpyDispatch):\n result_data = a.add_missing(result_data)\n if isinstance(b, NumpyDispatch):\n@@ -96,8 +90,7 @@\n return NumpyDispatch(result_data)\n return operator\n method_name = '__%s__' % op['name']\n- if op['name'] != \"eq\":\n- setattr(NumpyDispatch, method_name, closure())\n+ setattr(NumpyDispatch, method_name, closure())\n # to support e.g. (1 + ...) # to support e.g. (1 + ...)\n if op['name'] in reversable:\n def closure(op=op):\n@@ -154,7 +147,5 @@\n args = list(map(unwrap, args))\n kwargs = {k: unwrap(v) for k, v, in kwargs.items()}\n result = f(*args, **kwargs)\n- if isinstance(result, vaex.array_types.supported_arrow_array_types):\n- result = NumpyDispatch(result)\n- return result\n+ return wrap(result)\n return wrapper\n", "issue": "[BUG-REPORT] Multiple selection filters broken in latest version\n**Description**\r\nI think I've found an issue with the new internal representation of arrays as arrow arrays. This is in 4.0.0.dev0, so you might have a plan to fix it by the next proper release, but just flagging it anyway as it seems relatively major.\r\n\r\nWith a simple test table `data`:\r\n\r\n```\r\nimport pandas as pd\r\nimport numpy as np\r\nimport vaex\r\n\r\ndata = pd.DataFrame({\r\n \"A\":np.array([1,2,3,4]),\r\n \"B\":[\"A\",\"B\",\"C\",\"D\"],\r\n \"C\":[np.datetime64('2019-10-07 09:00:00'),np.datetime64('2019-10-07 10:00:00'),np.datetime64('2019-10-07 11:00:00'),np.datetime64('2019-10-07 12:00:00')]\r\n })\r\n\r\nvaex.from_pandas(data).export_arrow('arrow_sample.arrow')\r\ndata = vaex.open('arrow_sample.arrow')\r\n```\r\n\r\nNormal single clause filters work as you'd expect:\r\n\r\n```\r\n>>> data[(data['B'] == 'D')]\r\n # A B C\r\n 0 4 'D' Timestamp('2019-10-07 12:00:00')\r\n>>> data[(data['A'] > 2)]\r\n # A B C\r\n 0 3 'C' Timestamp('2019-10-07 11:00:00')\r\n 1 4 'D' Timestamp('2019-10-07 12:00:00')\r\n```\r\n\r\nBut when you combine multiple clauses the answer is wrong (this should return a single row):\r\n\r\n```\r\n>>> data[(data['B'] == 'D') & (data['A'] > 2)]\r\n # A B C\r\n 0 1 'A' Timestamp('2019-10-07 09:00:00')\r\n 1 2 'B' Timestamp('2019-10-07 10:00:00')\r\n 2 3 'C' Timestamp('2019-10-07 11:00:00')\r\n 3 4 'D' Timestamp('2019-10-07 12:00:00')\r\n```\r\n\r\nI first noticed this using a much larger table, and in that case it seemed to cause a memory leak (process memory continually rose after the query) I haven't been able to reliably reproduce this case though, so I'm just reporting the simple case above. It seems likely they're the same bug with two different symptoms. With vaex==3.0.0 the query above returns a single row as you'd expect, so I suspect maybe [this change](https://github.com/vaexio/vaex/pull/984/files) has caused the issue, although I don't know enough to say how?\r\n\r\n**Software information**\r\n - Vaex version (`import vaex; vaex.__version__)`: `{'vaex': '4.0.0.dev0', 'vaex-core': '4.0.0a5', 'vaex-viz': '0.5.0.dev0', 'vaex-hdf5': '0.7.0a2', 'vaex-server': '0.4.0.dev0', 'vaex-astro': '0.8.0.dev0', 'vaex-ui': '0.3.0', 'vaex-jupyter': '0.6.0.dev0', 'vaex-ml': '0.11.0a4', 'vaex-graphql': '0.2.0.dev0'}`\r\n - Vaex was installed from source\r\n - OS: Ubuntu 18.04.4 LTS\r\n\n", "before_files": [{"content": "import numpy as np\nimport pyarrow as pa\nimport pyarrow.compute as pc\nimport vaex\nfrom ..expression import _binary_ops, _unary_ops, reversable\n\n\ndef combine_missing(a, b):\n assert a.offset == 0\n if a.null_count > 0 or b.null_count > 0:\n # not optimal\n nulls = pc.invert(pc.or_(a.is_null(), b.is_null()))\n assert nulls.offset == 0\n nulls_buffer = nulls.buffers()[1]\n # this is not the case: no reason why it should be (TODO: open arrow issue)\n # assert nulls.buffers()[0] is None\n else:\n nulls_buffer = None\n buffers = a.buffers()\n return pa.Array.from_buffers(a.type, len(a), [nulls_buffer, buffers[1]])\n\nclass NumpyDispatch:\n def __init__(self, ar):\n self._array = ar\n if isinstance(ar, vaex.column.ColumnStringArrow):\n ar = pa.array(ar)\n if isinstance(ar, np.ndarray):\n self._numpy_array = ar\n self._arrow_array = None\n elif isinstance(ar, vaex.array_types.supported_arrow_array_types):\n self._numpy_array = None\n self._arrow_array = ar\n else:\n raise TypeError(f'Only support numpy and arrow, not {type(ar)}')\n\n def add_missing(self, ar):\n if isinstance(ar, np.ndarray):\n # if we are an arrow array, we upgrade ar to one\n if isinstance(self._array, vaex.array_types.supported_arrow_array_types):\n ar = vaex.array_types.to_arrow(ar)\n ar = combine_missing(ar, self._array)\n # else: both numpy, handled by numpy\n else:\n if isinstance(self._array, vaex.array_types.supported_arrow_array_types):\n ar = combine_missing(ar, self._array)\n # else: was numpy, handled by numpy\n return ar\n\n\n @property\n def numpy_array(self):\n if self._numpy_array is None:\n import vaex.arrow.convert\n arrow_array = self._arrow_array\n arrow_array = vaex.arrow.convert.ensure_not_chunked(arrow_array)\n buffers = arrow_array.buffers()\n # for math, we don't care about the nulls\n if buffers[0] is not None:\n buffers[0] = None\n arrow_array = pa.Array.from_buffers(arrow_array.type, len(arrow_array), buffers, offset=arrow_array.offset)\n self._numpy_array = vaex.array_types.to_numpy(arrow_array)\n return self._numpy_array\n\n @property\n def arrow_array(self):\n if self._arrow_array is None:\n # convert lazily, since not all arrow arrays (e.g. lists) can be converted\n if self._arrow_array is None:\n self._arrow_array = vaex.array_types.to_arrow(self._numpy_array)\n return self._arrow_array\n\n def __eq__(self, rhs):\n if vaex.array_types.is_string(self.arrow_array):\n # this does not support scalar input\n # return pc.equal(self.arrow_array, rhs)\n return NumpyDispatch(pa.array(vaex.functions.str_equals(self.arrow_array, rhs)))\n else:\n if isinstance(rhs, NumpyDispatch):\n rhs = rhs.numpy_array\n return NumpyDispatch(pa.array(self.numpy_array == rhs))\n\nfor op in _binary_ops:\n def closure(op=op):\n def operator(a, b):\n a_data = a\n b_data = b\n if isinstance(a, NumpyDispatch):\n a_data = a.numpy_array\n if isinstance(b, NumpyDispatch):\n b_data = b.numpy_array\n result_data = op['op'](a_data, b_data)\n if isinstance(a, NumpyDispatch):\n result_data = a.add_missing(result_data)\n if isinstance(b, NumpyDispatch):\n result_data = b.add_missing(result_data)\n return NumpyDispatch(result_data)\n return operator\n method_name = '__%s__' % op['name']\n if op['name'] != \"eq\":\n setattr(NumpyDispatch, method_name, closure())\n # to support e.g. (1 + ...) # to support e.g. (1 + ...)\n if op['name'] in reversable:\n def closure(op=op):\n def operator(b, a):\n a_data = a\n b_data = b\n if isinstance(a, NumpyDispatch):\n a_data = a.numpy_array\n if isinstance(b, NumpyDispatch):\n b_data = b.numpy_array\n result_data = op['op'](a_data, b_data)\n if isinstance(a, NumpyDispatch):\n result_data = a.add_missing(result_data)\n if isinstance(b, NumpyDispatch):\n result_data = b.add_missing(result_data)\n return NumpyDispatch(result_data)\n return operator\n method_name = '__r%s__' % op['name']\n setattr(NumpyDispatch, method_name, closure())\n\n\nfor op in _unary_ops:\n def closure(op=op):\n def operator(a):\n a_data = a.numpy_array\n result_data = op['op'](a_data)\n if isinstance(a, NumpyDispatch):\n result_data = a.add_missing(result_data)\n return NumpyDispatch(result_data)\n return operator\n method_name = '__%s__' % op['name']\n setattr(NumpyDispatch, method_name, closure())\n\n\ndef wrap(value):\n if not isinstance(value, NumpyDispatch): # and not isinstance(value, np.ndarray):\n if isinstance(value, vaex.array_types.supported_array_types + (vaex.column.ColumnStringArrow,)):\n return NumpyDispatch(value)\n # for performance reasons we don't visit lists and dicts\n return value\n\n\ndef unwrap(value):\n if isinstance(value, NumpyDispatch):\n return value._array\n # for performance reasons we don't visit lists and dicts\n return value\n\n\ndef autowrapper(f):\n '''Takes a function f, and will unwrap all its arguments and wrap the return value'''\n def wrapper(*args, **kwargs):\n args_original = args\n args = list(map(unwrap, args))\n kwargs = {k: unwrap(v) for k, v, in kwargs.items()}\n result = f(*args, **kwargs)\n if isinstance(result, vaex.array_types.supported_arrow_array_types):\n result = NumpyDispatch(result)\n return result\n return wrapper\n", "path": "packages/vaex-core/vaex/arrow/numpy_dispatch.py"}], "after_files": [{"content": "import numpy as np\nimport pyarrow as pa\nimport pyarrow.compute as pc\nimport vaex\nfrom ..expression import _binary_ops, _unary_ops, reversable\n\n\ndef combine_missing(a, b):\n assert a.offset == 0\n if a.null_count > 0 or b.null_count > 0:\n # not optimal\n nulls = pc.invert(pc.or_(a.is_null(), b.is_null()))\n assert nulls.offset == 0\n nulls_buffer = nulls.buffers()[1]\n # this is not the case: no reason why it should be (TODO: open arrow issue)\n # assert nulls.buffers()[0] is None\n else:\n nulls_buffer = None\n buffers = a.buffers()\n return pa.Array.from_buffers(a.type, len(a), [nulls_buffer, buffers[1]])\n\nclass NumpyDispatch:\n def __init__(self, ar):\n self._array = ar\n if isinstance(ar, vaex.column.ColumnStringArrow):\n ar = pa.array(ar)\n if isinstance(ar, np.ndarray):\n self._numpy_array = ar\n self._arrow_array = None\n elif isinstance(ar, vaex.array_types.supported_arrow_array_types):\n self._numpy_array = None\n self._arrow_array = ar\n else:\n raise TypeError(f'Only support numpy and arrow, not {type(ar)}')\n\n def add_missing(self, ar):\n if isinstance(ar, np.ndarray):\n # if we are an arrow array, we upgrade ar to one\n if isinstance(self._array, vaex.array_types.supported_arrow_array_types):\n ar = vaex.array_types.to_arrow(ar)\n ar = combine_missing(ar, self._array)\n # else: both numpy, handled by numpy\n else:\n if isinstance(self._array, vaex.array_types.supported_arrow_array_types):\n ar = combine_missing(ar, self._array)\n # else: was numpy, handled by numpy\n return ar\n\n\n @property\n def numpy_array(self):\n if self._numpy_array is None:\n import vaex.arrow.convert\n arrow_array = self._arrow_array\n arrow_array = vaex.arrow.convert.ensure_not_chunked(arrow_array)\n buffers = arrow_array.buffers()\n # for math, we don't care about the nulls\n if buffers[0] is not None:\n buffers[0] = None\n arrow_array = pa.Array.from_buffers(arrow_array.type, len(arrow_array), buffers, offset=arrow_array.offset)\n self._numpy_array = vaex.array_types.to_numpy(arrow_array)\n return self._numpy_array\n\n @property\n def arrow_array(self):\n if self._arrow_array is None:\n # convert lazily, since not all arrow arrays (e.g. lists) can be converted\n if self._arrow_array is None:\n self._arrow_array = vaex.array_types.to_arrow(self._numpy_array)\n return self._arrow_array\n\n\nfor op in _binary_ops:\n def closure(op=op):\n def operator(a, b):\n a_data = a\n b_data = b\n if isinstance(a, NumpyDispatch):\n a_data = a.numpy_array\n if isinstance(b, NumpyDispatch):\n b_data = b.numpy_array\n if op['name'] == 'eq' and (vaex.array_types.is_string(a_data) or vaex.array_types.is_string(b_data)):\n result_data = vaex.functions.str_equals(a_data, b_data)\n else:\n result_data = op['op'](a_data, b_data)\n if isinstance(a, NumpyDispatch):\n result_data = a.add_missing(result_data)\n if isinstance(b, NumpyDispatch):\n result_data = b.add_missing(result_data)\n return NumpyDispatch(result_data)\n return operator\n method_name = '__%s__' % op['name']\n setattr(NumpyDispatch, method_name, closure())\n # to support e.g. (1 + ...) # to support e.g. (1 + ...)\n if op['name'] in reversable:\n def closure(op=op):\n def operator(b, a):\n a_data = a\n b_data = b\n if isinstance(a, NumpyDispatch):\n a_data = a.numpy_array\n if isinstance(b, NumpyDispatch):\n b_data = b.numpy_array\n result_data = op['op'](a_data, b_data)\n if isinstance(a, NumpyDispatch):\n result_data = a.add_missing(result_data)\n if isinstance(b, NumpyDispatch):\n result_data = b.add_missing(result_data)\n return NumpyDispatch(result_data)\n return operator\n method_name = '__r%s__' % op['name']\n setattr(NumpyDispatch, method_name, closure())\n\n\nfor op in _unary_ops:\n def closure(op=op):\n def operator(a):\n a_data = a.numpy_array\n result_data = op['op'](a_data)\n if isinstance(a, NumpyDispatch):\n result_data = a.add_missing(result_data)\n return NumpyDispatch(result_data)\n return operator\n method_name = '__%s__' % op['name']\n setattr(NumpyDispatch, method_name, closure())\n\n\ndef wrap(value):\n if not isinstance(value, NumpyDispatch): # and not isinstance(value, np.ndarray):\n if isinstance(value, vaex.array_types.supported_array_types + (vaex.column.ColumnStringArrow,)):\n return NumpyDispatch(value)\n # for performance reasons we don't visit lists and dicts\n return value\n\n\ndef unwrap(value):\n if isinstance(value, NumpyDispatch):\n return value._array\n # for performance reasons we don't visit lists and dicts\n return value\n\n\ndef autowrapper(f):\n '''Takes a function f, and will unwrap all its arguments and wrap the return value'''\n def wrapper(*args, **kwargs):\n args_original = args\n args = list(map(unwrap, args))\n kwargs = {k: unwrap(v) for k, v, in kwargs.items()}\n result = f(*args, **kwargs)\n return wrap(result)\n return wrapper\n", "path": "packages/vaex-core/vaex/arrow/numpy_dispatch.py"}]}
| 2,974 | 572 |
gh_patches_debug_5272
|
rasdani/github-patches
|
git_diff
|
napari__napari-5850
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[Qt6] AttributeError when renaming a layer
## 🐛 Bug
This seems to be yet another small Qt6 issue related to enum name changes.
## To Reproduce
Steps to reproduce the behavior:
1. Launch napari from a Qt6 environment
2. Open a file or otherwise create a new layer
3. Double click the layer to rename it, triggering the exception
```
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
File ~/src/napari/napari/_qt/containers/_layer_delegate.py:141, in LayerDelegate.createEditor(self=<napari._qt.containers._layer_delegate.LayerDelegate object>, parent=<PyQt6.QtWidgets.QWidget object>, option=<PyQt6.QtWidgets.QStyleOptionViewItem object>, index=<PyQt6.QtCore.QModelIndex object>)
138 editor = super().createEditor(parent, option, index)
139 # make sure editor has same alignment as the display name
140 editor.setAlignment(
--> 141 Qt.Alignment(index.data(Qt.ItemDataRole.TextAlignmentRole))
editor = <PyQt6.QtWidgets.QLineEdit object at 0x2aaa3e4d0>
index = <PyQt6.QtCore.QModelIndex object at 0x16b74ee30>
Qt = <class 'PyQt6.QtCore.Qt'>
Qt.ItemDataRole.TextAlignmentRole = <ItemDataRole.TextAlignmentRole: 7>
142 )
143 return editor
AttributeError: type object 'Qt' has no attribute 'Alignment'
```
## Expected behavior
Double-clicking the layer should allow editing the layer name and not raise an exception.
## Environment
napari: 0.5.0a2.dev129+g0dfb37b47.d20230518
Platform: macOS-13.3.1-arm64-arm-64bit
System: MacOS 13.3.1
Python: 3.10.9 (main, Feb 3 2023, 15:40:08) [Clang 14.0.0 (clang-1400.0.29.202)]
Qt: 6.5.0
PyQt6:
NumPy: 1.24.1
SciPy: 1.10.0
Dask: 2023.1.1
VisPy: 0.12.3.dev45+dirty
magicgui: 0.6.1
superqt: unknown
in-n-out: 0.1.6
app-model: 0.1.1.dev3+gdf48c9d
npe2: 0.6.2
OpenGL:
- GL version: 2.1 Metal - 83.1
- MAX_TEXTURE_SIZE: 16384
Screens:
- screen 1: resolution 2560x1440, scale 1.0
Settings path:
- /Users/aandersoniii/Library/Application Support/napari/venv_640f7def1935afdf07a142187e645430c6d70fe6/settings.yaml
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `napari/_qt/containers/_layer_delegate.py`
Content:
```
1 """
2 General rendering flow:
3
4 1. The List/Tree view needs to display or edit an index in the model...
5 2. It gets the ``itemDelegate``
6 a. A custom delegate can be set with ``setItemDelegate``
7 b. ``QStyledItemDelegate`` is the default delegate for all Qt item views,
8 and is installed upon them when they are created.
9 3. ``itemDelegate.paint`` is called on the index being displayed
10 4. Each index in the model has various data elements (i.e. name, image, etc..),
11 each of which has a "data role". A model should return the appropriate data
12 for each role by reimplementing ``QAbstractItemModel.data``.
13 a. `QStyledItemDelegate` implements display and editing for the most common
14 datatypes expected by users, including booleans, integers, and strings.
15 b. If the delegate does not support painting of the data types you need or
16 you want to customize the drawing of items, you need to subclass
17 ``QStyledItemDelegate``, and reimplement ``paint()`` and possibly
18 ``sizeHint()``.
19 c. When reimplementing ``paint()``, one typically handles the datatypes
20 they would like to draw and uses the superclass implementation for other
21 types.
22 5. The default implementation of ``QStyledItemDelegate.paint`` paints the item
23 using the view's ``QStyle`` (which is, by default, an OS specific style...
24 but see ``QCommonStyle`` for a generic implementation)
25 a. It is also possible to override the view's style, using either a
26 subclass of ``QCommonStyle``, for a platform-independent look and feel, or
27 ``QProxyStyle``, which let's you override only certain stylistic elements
28 on any platform, falling back to the system default otherwise.
29 b. ``QStyle`` paints various elements using methods like ``drawPrimitive``
30 and ``drawControl``. These can be overridden for very fine control.
31 6. It is hard to use stylesheets with custom ``QStyles``... but it's possible
32 to style sub-controls in ``QAbstractItemView`` (such as ``QTreeView``):
33 https://doc.qt.io/qt-5/stylesheet-reference.html#list-of-sub-controls
34
35 """
36 from __future__ import annotations
37
38 from typing import TYPE_CHECKING
39
40 from qtpy.QtCore import QPoint, QSize, Qt
41 from qtpy.QtGui import QMouseEvent, QPixmap
42 from qtpy.QtWidgets import QStyledItemDelegate
43
44 from napari._app_model.constants import MenuId
45 from napari._app_model.context import get_context
46 from napari._qt._qapp_model import build_qmodel_menu
47 from napari._qt.containers._base_item_model import ItemRole
48 from napari._qt.containers.qt_layer_model import ThumbnailRole
49 from napari._qt.qt_resources import QColoredSVGIcon
50
51 if TYPE_CHECKING:
52 from qtpy import QtCore
53 from qtpy.QtGui import QPainter
54 from qtpy.QtWidgets import QStyleOptionViewItem, QWidget
55
56 from napari.components.layerlist import LayerList
57
58
59 class LayerDelegate(QStyledItemDelegate):
60 """A QItemDelegate specialized for painting Layer objects.
61
62 In Qt's `Model/View architecture
63 <https://doc.qt.io/qt-5/model-view-programming.html>`_. A *delegate* is an
64 object that controls the visual rendering (and editing widgets) of an item
65 in a view. For more, see:
66 https://doc.qt.io/qt-5/model-view-programming.html#delegate-classes
67
68 This class provides the logic required to paint a Layer item in the
69 :class:`napari._qt.containers.QtLayerList`. The `QStyledItemDelegate`
70 super-class provides most of the logic (including display/editing of the
71 layer name, a visibility checkbox, and an icon for the layer type). This
72 subclass provides additional logic for drawing the layer thumbnail, picking
73 the appropriate icon for the layer, and some additional style/UX issues.
74 """
75
76 def paint(
77 self,
78 painter: QPainter,
79 option: QStyleOptionViewItem,
80 index: QtCore.QModelIndex,
81 ):
82 """Paint the item in the model at `index`."""
83 # update the icon based on layer type
84
85 self.get_layer_icon(option, index)
86 # paint the standard itemView (includes name, icon, and vis. checkbox)
87 super().paint(painter, option, index)
88 # paint the thumbnail
89 self._paint_thumbnail(painter, option, index)
90
91 def get_layer_icon(
92 self, option: QStyleOptionViewItem, index: QtCore.QModelIndex
93 ):
94 """Add the appropriate QIcon to the item based on the layer type."""
95 layer = index.data(ItemRole)
96 if layer is None:
97 return
98 if hasattr(layer, 'is_group') and layer.is_group(): # for layer trees
99 expanded = option.widget.isExpanded(index)
100 icon_name = 'folder-open' if expanded else 'folder'
101 else:
102 icon_name = f'new_{layer._type_string}'
103
104 try:
105 icon = QColoredSVGIcon.from_resources(icon_name)
106 except ValueError:
107 return
108 # guessing theme rather than passing it through.
109 bg = option.palette.color(option.palette.ColorRole.Window).red()
110 option.icon = icon.colored(theme='dark' if bg < 128 else 'light')
111 option.decorationSize = QSize(18, 18)
112 option.decorationPosition = (
113 option.Position.Right
114 ) # put icon on the right
115 option.features |= option.ViewItemFeature.HasDecoration
116
117 def _paint_thumbnail(self, painter, option, index):
118 """paint the layer thumbnail."""
119 # paint the thumbnail
120 # MAGICNUMBER: numbers from the margin applied in the stylesheet to
121 # QtLayerTreeView::item
122 thumb_rect = option.rect.translated(-2, 2)
123 h = index.data(Qt.ItemDataRole.SizeHintRole).height() - 4
124 thumb_rect.setWidth(h)
125 thumb_rect.setHeight(h)
126 image = index.data(ThumbnailRole)
127 painter.drawPixmap(thumb_rect, QPixmap.fromImage(image))
128
129 def createEditor(
130 self,
131 parent: QWidget,
132 option: QStyleOptionViewItem,
133 index: QtCore.QModelIndex,
134 ) -> QWidget:
135 """User has double clicked on layer name."""
136 # necessary for geometry, otherwise editor takes up full width.
137 self.get_layer_icon(option, index)
138 editor = super().createEditor(parent, option, index)
139 # make sure editor has same alignment as the display name
140 editor.setAlignment(
141 Qt.Alignment(index.data(Qt.ItemDataRole.TextAlignmentRole))
142 )
143 return editor
144
145 def editorEvent(
146 self,
147 event: QtCore.QEvent,
148 model: QtCore.QAbstractItemModel,
149 option: QStyleOptionViewItem,
150 index: QtCore.QModelIndex,
151 ) -> bool:
152 """Called when an event has occured in the editor.
153
154 This can be used to customize how the delegate handles mouse/key events
155 """
156 if (
157 event.type() == QMouseEvent.MouseButtonRelease
158 and event.button() == Qt.MouseButton.RightButton
159 ):
160 pnt = (
161 event.globalPosition().toPoint()
162 if hasattr(event, "globalPosition")
163 else event.globalPos()
164 )
165
166 self.show_context_menu(index, model, pnt, option.widget)
167
168 # if the user clicks quickly on the visibility checkbox, we *don't*
169 # want it to be interpreted as a double-click. We want the visibilty
170 # to simply be toggled.
171 if event.type() == QMouseEvent.MouseButtonDblClick:
172 self.initStyleOption(option, index)
173 style = option.widget.style()
174 check_rect = style.subElementRect(
175 style.SubElement.SE_ItemViewItemCheckIndicator,
176 option,
177 option.widget,
178 )
179 if check_rect.contains(event.pos()):
180 cur_state = index.data(Qt.ItemDataRole.CheckStateRole)
181 if model.flags(index) & Qt.ItemFlag.ItemIsUserTristate:
182 state = Qt.CheckState((cur_state + 1) % 3)
183 else:
184 state = (
185 Qt.CheckState.Unchecked
186 if cur_state
187 else Qt.CheckState.Checked
188 )
189 return model.setData(
190 index, state, Qt.ItemDataRole.CheckStateRole
191 )
192 # refer all other events to the QStyledItemDelegate
193 return super().editorEvent(event, model, option, index)
194
195 def show_context_menu(self, index, model, pos: QPoint, parent):
196 """Show the layerlist context menu.
197 To add a new item to the menu, update the _LAYER_ACTIONS dict.
198 """
199 if not hasattr(self, '_context_menu'):
200 self._context_menu = build_qmodel_menu(
201 MenuId.LAYERLIST_CONTEXT, parent=parent
202 )
203
204 layer_list: LayerList = model.sourceModel()._root
205 self._context_menu.update_from_context(get_context(layer_list))
206 self._context_menu.exec_(pos)
207
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/napari/_qt/containers/_layer_delegate.py b/napari/_qt/containers/_layer_delegate.py
--- a/napari/_qt/containers/_layer_delegate.py
+++ b/napari/_qt/containers/_layer_delegate.py
@@ -138,7 +138,7 @@
editor = super().createEditor(parent, option, index)
# make sure editor has same alignment as the display name
editor.setAlignment(
- Qt.Alignment(index.data(Qt.ItemDataRole.TextAlignmentRole))
+ Qt.AlignmentFlag(index.data(Qt.ItemDataRole.TextAlignmentRole))
)
return editor
|
{"golden_diff": "diff --git a/napari/_qt/containers/_layer_delegate.py b/napari/_qt/containers/_layer_delegate.py\n--- a/napari/_qt/containers/_layer_delegate.py\n+++ b/napari/_qt/containers/_layer_delegate.py\n@@ -138,7 +138,7 @@\n editor = super().createEditor(parent, option, index)\n # make sure editor has same alignment as the display name\n editor.setAlignment(\n- Qt.Alignment(index.data(Qt.ItemDataRole.TextAlignmentRole))\n+ Qt.AlignmentFlag(index.data(Qt.ItemDataRole.TextAlignmentRole))\n )\n return editor\n", "issue": "[Qt6] AttributeError when renaming a layer\n## \ud83d\udc1b Bug\r\n\r\nThis seems to be yet another small Qt6 issue related to enum name changes.\r\n\r\n## To Reproduce\r\n\r\nSteps to reproduce the behavior:\r\n\r\n1. Launch napari from a Qt6 environment\r\n2. Open a file or otherwise create a new layer\r\n3. Double click the layer to rename it, triggering the exception\r\n\r\n```\r\n---------------------------------------------------------------------------\r\nAttributeError Traceback (most recent call last)\r\nFile ~/src/napari/napari/_qt/containers/_layer_delegate.py:141, in LayerDelegate.createEditor(self=<napari._qt.containers._layer_delegate.LayerDelegate object>, parent=<PyQt6.QtWidgets.QWidget object>, option=<PyQt6.QtWidgets.QStyleOptionViewItem object>, index=<PyQt6.QtCore.QModelIndex object>)\r\n 138 editor = super().createEditor(parent, option, index)\r\n 139 # make sure editor has same alignment as the display name\r\n 140 editor.setAlignment(\r\n--> 141 Qt.Alignment(index.data(Qt.ItemDataRole.TextAlignmentRole))\r\n editor = <PyQt6.QtWidgets.QLineEdit object at 0x2aaa3e4d0>\r\n index = <PyQt6.QtCore.QModelIndex object at 0x16b74ee30>\r\n Qt = <class 'PyQt6.QtCore.Qt'>\r\n Qt.ItemDataRole.TextAlignmentRole = <ItemDataRole.TextAlignmentRole: 7>\r\n 142 )\r\n 143 return editor\r\n\r\nAttributeError: type object 'Qt' has no attribute 'Alignment'\r\n```\r\n\r\n## Expected behavior\r\n\r\nDouble-clicking the layer should allow editing the layer name and not raise an exception.\r\n\r\n## Environment\r\n\r\nnapari: 0.5.0a2.dev129+g0dfb37b47.d20230518\r\nPlatform: macOS-13.3.1-arm64-arm-64bit\r\nSystem: MacOS 13.3.1\r\nPython: 3.10.9 (main, Feb 3 2023, 15:40:08) [Clang 14.0.0 (clang-1400.0.29.202)]\r\nQt: 6.5.0\r\nPyQt6: \r\nNumPy: 1.24.1\r\nSciPy: 1.10.0\r\nDask: 2023.1.1\r\nVisPy: 0.12.3.dev45+dirty\r\nmagicgui: 0.6.1\r\nsuperqt: unknown\r\nin-n-out: 0.1.6\r\napp-model: 0.1.1.dev3+gdf48c9d\r\nnpe2: 0.6.2\r\n\r\nOpenGL:\r\n- GL version: 2.1 Metal - 83.1\r\n- MAX_TEXTURE_SIZE: 16384\r\n\r\nScreens:\r\n- screen 1: resolution 2560x1440, scale 1.0\r\n\r\nSettings path:\r\n- /Users/aandersoniii/Library/Application Support/napari/venv_640f7def1935afdf07a142187e645430c6d70fe6/settings.yaml\n", "before_files": [{"content": "\"\"\"\nGeneral rendering flow:\n\n1. The List/Tree view needs to display or edit an index in the model...\n2. It gets the ``itemDelegate``\n a. A custom delegate can be set with ``setItemDelegate``\n b. ``QStyledItemDelegate`` is the default delegate for all Qt item views,\n and is installed upon them when they are created.\n3. ``itemDelegate.paint`` is called on the index being displayed\n4. Each index in the model has various data elements (i.e. name, image, etc..),\n each of which has a \"data role\". A model should return the appropriate data\n for each role by reimplementing ``QAbstractItemModel.data``.\n a. `QStyledItemDelegate` implements display and editing for the most common\n datatypes expected by users, including booleans, integers, and strings.\n b. If the delegate does not support painting of the data types you need or\n you want to customize the drawing of items, you need to subclass\n ``QStyledItemDelegate``, and reimplement ``paint()`` and possibly\n ``sizeHint()``.\n c. When reimplementing ``paint()``, one typically handles the datatypes\n they would like to draw and uses the superclass implementation for other\n types.\n5. The default implementation of ``QStyledItemDelegate.paint`` paints the item\n using the view's ``QStyle`` (which is, by default, an OS specific style...\n but see ``QCommonStyle`` for a generic implementation)\n a. It is also possible to override the view's style, using either a\n subclass of ``QCommonStyle``, for a platform-independent look and feel, or\n ``QProxyStyle``, which let's you override only certain stylistic elements\n on any platform, falling back to the system default otherwise.\n b. ``QStyle`` paints various elements using methods like ``drawPrimitive``\n and ``drawControl``. These can be overridden for very fine control.\n6. It is hard to use stylesheets with custom ``QStyles``... but it's possible\n to style sub-controls in ``QAbstractItemView`` (such as ``QTreeView``):\n https://doc.qt.io/qt-5/stylesheet-reference.html#list-of-sub-controls\n\n\"\"\"\nfrom __future__ import annotations\n\nfrom typing import TYPE_CHECKING\n\nfrom qtpy.QtCore import QPoint, QSize, Qt\nfrom qtpy.QtGui import QMouseEvent, QPixmap\nfrom qtpy.QtWidgets import QStyledItemDelegate\n\nfrom napari._app_model.constants import MenuId\nfrom napari._app_model.context import get_context\nfrom napari._qt._qapp_model import build_qmodel_menu\nfrom napari._qt.containers._base_item_model import ItemRole\nfrom napari._qt.containers.qt_layer_model import ThumbnailRole\nfrom napari._qt.qt_resources import QColoredSVGIcon\n\nif TYPE_CHECKING:\n from qtpy import QtCore\n from qtpy.QtGui import QPainter\n from qtpy.QtWidgets import QStyleOptionViewItem, QWidget\n\n from napari.components.layerlist import LayerList\n\n\nclass LayerDelegate(QStyledItemDelegate):\n \"\"\"A QItemDelegate specialized for painting Layer objects.\n\n In Qt's `Model/View architecture\n <https://doc.qt.io/qt-5/model-view-programming.html>`_. A *delegate* is an\n object that controls the visual rendering (and editing widgets) of an item\n in a view. For more, see:\n https://doc.qt.io/qt-5/model-view-programming.html#delegate-classes\n\n This class provides the logic required to paint a Layer item in the\n :class:`napari._qt.containers.QtLayerList`. The `QStyledItemDelegate`\n super-class provides most of the logic (including display/editing of the\n layer name, a visibility checkbox, and an icon for the layer type). This\n subclass provides additional logic for drawing the layer thumbnail, picking\n the appropriate icon for the layer, and some additional style/UX issues.\n \"\"\"\n\n def paint(\n self,\n painter: QPainter,\n option: QStyleOptionViewItem,\n index: QtCore.QModelIndex,\n ):\n \"\"\"Paint the item in the model at `index`.\"\"\"\n # update the icon based on layer type\n\n self.get_layer_icon(option, index)\n # paint the standard itemView (includes name, icon, and vis. checkbox)\n super().paint(painter, option, index)\n # paint the thumbnail\n self._paint_thumbnail(painter, option, index)\n\n def get_layer_icon(\n self, option: QStyleOptionViewItem, index: QtCore.QModelIndex\n ):\n \"\"\"Add the appropriate QIcon to the item based on the layer type.\"\"\"\n layer = index.data(ItemRole)\n if layer is None:\n return\n if hasattr(layer, 'is_group') and layer.is_group(): # for layer trees\n expanded = option.widget.isExpanded(index)\n icon_name = 'folder-open' if expanded else 'folder'\n else:\n icon_name = f'new_{layer._type_string}'\n\n try:\n icon = QColoredSVGIcon.from_resources(icon_name)\n except ValueError:\n return\n # guessing theme rather than passing it through.\n bg = option.palette.color(option.palette.ColorRole.Window).red()\n option.icon = icon.colored(theme='dark' if bg < 128 else 'light')\n option.decorationSize = QSize(18, 18)\n option.decorationPosition = (\n option.Position.Right\n ) # put icon on the right\n option.features |= option.ViewItemFeature.HasDecoration\n\n def _paint_thumbnail(self, painter, option, index):\n \"\"\"paint the layer thumbnail.\"\"\"\n # paint the thumbnail\n # MAGICNUMBER: numbers from the margin applied in the stylesheet to\n # QtLayerTreeView::item\n thumb_rect = option.rect.translated(-2, 2)\n h = index.data(Qt.ItemDataRole.SizeHintRole).height() - 4\n thumb_rect.setWidth(h)\n thumb_rect.setHeight(h)\n image = index.data(ThumbnailRole)\n painter.drawPixmap(thumb_rect, QPixmap.fromImage(image))\n\n def createEditor(\n self,\n parent: QWidget,\n option: QStyleOptionViewItem,\n index: QtCore.QModelIndex,\n ) -> QWidget:\n \"\"\"User has double clicked on layer name.\"\"\"\n # necessary for geometry, otherwise editor takes up full width.\n self.get_layer_icon(option, index)\n editor = super().createEditor(parent, option, index)\n # make sure editor has same alignment as the display name\n editor.setAlignment(\n Qt.Alignment(index.data(Qt.ItemDataRole.TextAlignmentRole))\n )\n return editor\n\n def editorEvent(\n self,\n event: QtCore.QEvent,\n model: QtCore.QAbstractItemModel,\n option: QStyleOptionViewItem,\n index: QtCore.QModelIndex,\n ) -> bool:\n \"\"\"Called when an event has occured in the editor.\n\n This can be used to customize how the delegate handles mouse/key events\n \"\"\"\n if (\n event.type() == QMouseEvent.MouseButtonRelease\n and event.button() == Qt.MouseButton.RightButton\n ):\n pnt = (\n event.globalPosition().toPoint()\n if hasattr(event, \"globalPosition\")\n else event.globalPos()\n )\n\n self.show_context_menu(index, model, pnt, option.widget)\n\n # if the user clicks quickly on the visibility checkbox, we *don't*\n # want it to be interpreted as a double-click. We want the visibilty\n # to simply be toggled.\n if event.type() == QMouseEvent.MouseButtonDblClick:\n self.initStyleOption(option, index)\n style = option.widget.style()\n check_rect = style.subElementRect(\n style.SubElement.SE_ItemViewItemCheckIndicator,\n option,\n option.widget,\n )\n if check_rect.contains(event.pos()):\n cur_state = index.data(Qt.ItemDataRole.CheckStateRole)\n if model.flags(index) & Qt.ItemFlag.ItemIsUserTristate:\n state = Qt.CheckState((cur_state + 1) % 3)\n else:\n state = (\n Qt.CheckState.Unchecked\n if cur_state\n else Qt.CheckState.Checked\n )\n return model.setData(\n index, state, Qt.ItemDataRole.CheckStateRole\n )\n # refer all other events to the QStyledItemDelegate\n return super().editorEvent(event, model, option, index)\n\n def show_context_menu(self, index, model, pos: QPoint, parent):\n \"\"\"Show the layerlist context menu.\n To add a new item to the menu, update the _LAYER_ACTIONS dict.\n \"\"\"\n if not hasattr(self, '_context_menu'):\n self._context_menu = build_qmodel_menu(\n MenuId.LAYERLIST_CONTEXT, parent=parent\n )\n\n layer_list: LayerList = model.sourceModel()._root\n self._context_menu.update_from_context(get_context(layer_list))\n self._context_menu.exec_(pos)\n", "path": "napari/_qt/containers/_layer_delegate.py"}], "after_files": [{"content": "\"\"\"\nGeneral rendering flow:\n\n1. The List/Tree view needs to display or edit an index in the model...\n2. It gets the ``itemDelegate``\n a. A custom delegate can be set with ``setItemDelegate``\n b. ``QStyledItemDelegate`` is the default delegate for all Qt item views,\n and is installed upon them when they are created.\n3. ``itemDelegate.paint`` is called on the index being displayed\n4. Each index in the model has various data elements (i.e. name, image, etc..),\n each of which has a \"data role\". A model should return the appropriate data\n for each role by reimplementing ``QAbstractItemModel.data``.\n a. `QStyledItemDelegate` implements display and editing for the most common\n datatypes expected by users, including booleans, integers, and strings.\n b. If the delegate does not support painting of the data types you need or\n you want to customize the drawing of items, you need to subclass\n ``QStyledItemDelegate``, and reimplement ``paint()`` and possibly\n ``sizeHint()``.\n c. When reimplementing ``paint()``, one typically handles the datatypes\n they would like to draw and uses the superclass implementation for other\n types.\n5. The default implementation of ``QStyledItemDelegate.paint`` paints the item\n using the view's ``QStyle`` (which is, by default, an OS specific style...\n but see ``QCommonStyle`` for a generic implementation)\n a. It is also possible to override the view's style, using either a\n subclass of ``QCommonStyle``, for a platform-independent look and feel, or\n ``QProxyStyle``, which let's you override only certain stylistic elements\n on any platform, falling back to the system default otherwise.\n b. ``QStyle`` paints various elements using methods like ``drawPrimitive``\n and ``drawControl``. These can be overridden for very fine control.\n6. It is hard to use stylesheets with custom ``QStyles``... but it's possible\n to style sub-controls in ``QAbstractItemView`` (such as ``QTreeView``):\n https://doc.qt.io/qt-5/stylesheet-reference.html#list-of-sub-controls\n\n\"\"\"\nfrom __future__ import annotations\n\nfrom typing import TYPE_CHECKING\n\nfrom qtpy.QtCore import QPoint, QSize, Qt\nfrom qtpy.QtGui import QMouseEvent, QPixmap\nfrom qtpy.QtWidgets import QStyledItemDelegate\n\nfrom napari._app_model.constants import MenuId\nfrom napari._app_model.context import get_context\nfrom napari._qt._qapp_model import build_qmodel_menu\nfrom napari._qt.containers._base_item_model import ItemRole\nfrom napari._qt.containers.qt_layer_model import ThumbnailRole\nfrom napari._qt.qt_resources import QColoredSVGIcon\n\nif TYPE_CHECKING:\n from qtpy import QtCore\n from qtpy.QtGui import QPainter\n from qtpy.QtWidgets import QStyleOptionViewItem, QWidget\n\n from napari.components.layerlist import LayerList\n\n\nclass LayerDelegate(QStyledItemDelegate):\n \"\"\"A QItemDelegate specialized for painting Layer objects.\n\n In Qt's `Model/View architecture\n <https://doc.qt.io/qt-5/model-view-programming.html>`_. A *delegate* is an\n object that controls the visual rendering (and editing widgets) of an item\n in a view. For more, see:\n https://doc.qt.io/qt-5/model-view-programming.html#delegate-classes\n\n This class provides the logic required to paint a Layer item in the\n :class:`napari._qt.containers.QtLayerList`. The `QStyledItemDelegate`\n super-class provides most of the logic (including display/editing of the\n layer name, a visibility checkbox, and an icon for the layer type). This\n subclass provides additional logic for drawing the layer thumbnail, picking\n the appropriate icon for the layer, and some additional style/UX issues.\n \"\"\"\n\n def paint(\n self,\n painter: QPainter,\n option: QStyleOptionViewItem,\n index: QtCore.QModelIndex,\n ):\n \"\"\"Paint the item in the model at `index`.\"\"\"\n # update the icon based on layer type\n\n self.get_layer_icon(option, index)\n # paint the standard itemView (includes name, icon, and vis. checkbox)\n super().paint(painter, option, index)\n # paint the thumbnail\n self._paint_thumbnail(painter, option, index)\n\n def get_layer_icon(\n self, option: QStyleOptionViewItem, index: QtCore.QModelIndex\n ):\n \"\"\"Add the appropriate QIcon to the item based on the layer type.\"\"\"\n layer = index.data(ItemRole)\n if layer is None:\n return\n if hasattr(layer, 'is_group') and layer.is_group(): # for layer trees\n expanded = option.widget.isExpanded(index)\n icon_name = 'folder-open' if expanded else 'folder'\n else:\n icon_name = f'new_{layer._type_string}'\n\n try:\n icon = QColoredSVGIcon.from_resources(icon_name)\n except ValueError:\n return\n # guessing theme rather than passing it through.\n bg = option.palette.color(option.palette.ColorRole.Window).red()\n option.icon = icon.colored(theme='dark' if bg < 128 else 'light')\n option.decorationSize = QSize(18, 18)\n option.decorationPosition = (\n option.Position.Right\n ) # put icon on the right\n option.features |= option.ViewItemFeature.HasDecoration\n\n def _paint_thumbnail(self, painter, option, index):\n \"\"\"paint the layer thumbnail.\"\"\"\n # paint the thumbnail\n # MAGICNUMBER: numbers from the margin applied in the stylesheet to\n # QtLayerTreeView::item\n thumb_rect = option.rect.translated(-2, 2)\n h = index.data(Qt.ItemDataRole.SizeHintRole).height() - 4\n thumb_rect.setWidth(h)\n thumb_rect.setHeight(h)\n image = index.data(ThumbnailRole)\n painter.drawPixmap(thumb_rect, QPixmap.fromImage(image))\n\n def createEditor(\n self,\n parent: QWidget,\n option: QStyleOptionViewItem,\n index: QtCore.QModelIndex,\n ) -> QWidget:\n \"\"\"User has double clicked on layer name.\"\"\"\n # necessary for geometry, otherwise editor takes up full width.\n self.get_layer_icon(option, index)\n editor = super().createEditor(parent, option, index)\n # make sure editor has same alignment as the display name\n editor.setAlignment(\n Qt.AlignmentFlag(index.data(Qt.ItemDataRole.TextAlignmentRole))\n )\n return editor\n\n def editorEvent(\n self,\n event: QtCore.QEvent,\n model: QtCore.QAbstractItemModel,\n option: QStyleOptionViewItem,\n index: QtCore.QModelIndex,\n ) -> bool:\n \"\"\"Called when an event has occured in the editor.\n\n This can be used to customize how the delegate handles mouse/key events\n \"\"\"\n if (\n event.type() == QMouseEvent.MouseButtonRelease\n and event.button() == Qt.MouseButton.RightButton\n ):\n pnt = (\n event.globalPosition().toPoint()\n if hasattr(event, \"globalPosition\")\n else event.globalPos()\n )\n\n self.show_context_menu(index, model, pnt, option.widget)\n\n # if the user clicks quickly on the visibility checkbox, we *don't*\n # want it to be interpreted as a double-click. We want the visibilty\n # to simply be toggled.\n if event.type() == QMouseEvent.MouseButtonDblClick:\n self.initStyleOption(option, index)\n style = option.widget.style()\n check_rect = style.subElementRect(\n style.SubElement.SE_ItemViewItemCheckIndicator,\n option,\n option.widget,\n )\n if check_rect.contains(event.pos()):\n cur_state = index.data(Qt.ItemDataRole.CheckStateRole)\n if model.flags(index) & Qt.ItemFlag.ItemIsUserTristate:\n state = Qt.CheckState((cur_state + 1) % 3)\n else:\n state = (\n Qt.CheckState.Unchecked\n if cur_state\n else Qt.CheckState.Checked\n )\n return model.setData(\n index, state, Qt.ItemDataRole.CheckStateRole\n )\n # refer all other events to the QStyledItemDelegate\n return super().editorEvent(event, model, option, index)\n\n def show_context_menu(self, index, model, pos: QPoint, parent):\n \"\"\"Show the layerlist context menu.\n To add a new item to the menu, update the _LAYER_ACTIONS dict.\n \"\"\"\n if not hasattr(self, '_context_menu'):\n self._context_menu = build_qmodel_menu(\n MenuId.LAYERLIST_CONTEXT, parent=parent\n )\n\n layer_list: LayerList = model.sourceModel()._root\n self._context_menu.update_from_context(get_context(layer_list))\n self._context_menu.exec_(pos)\n", "path": "napari/_qt/containers/_layer_delegate.py"}]}
| 3,461 | 133 |
gh_patches_debug_3322
|
rasdani/github-patches
|
git_diff
|
holoviz__panel-3100
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Django autoload_handle broken
#### ALL software version info
Panel = 0.13.0a25
Bokeh = 2.4.2
Django = 2.2.14
When loading a Panel app embedded in Django, the `AutoloadJsConsumer` call just hangs. After stepping through the code it
appears there is an error, which causes it to enter an eternal loop:
```python
> /Users/rditlsc9/miniconda/envs/tethys-vtime/lib/python3.7/site-packages/panel/io/django.py(37)autoload_handle()
-> js = autoload_js_script(resources, session.token, element_id, app_path, absolute_url)
TypeError: autoload_js_script() missing 1 required positional argument: 'absolute_url'
```
It appears that #2919 changed the signature of `autoload_js_script`, but the call to it in `panel/io/django.py:autoload_handle` wasn't updated accordingly.
As a side note - is there a better way to get this type of error to log? I wasn't able to see any indication of an error until I stepped through the code in a debugger.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `panel/io/django.py`
Content:
```
1 from urllib.parse import urlparse
2
3 from bokeh.server.django.consumers import DocConsumer, AutoloadJsConsumer
4
5 from .resources import Resources
6 from .server import (
7 autoload_js_script, server_html_page_for_session
8 )
9
10 async def doc_handle(self, body):
11 session = await self._get_session()
12 resources = Resources.from_bokeh(self.application.resources())
13 page = server_html_page_for_session(
14 session, resources=resources, title=session.document.title,
15 template=session.document.template,
16 template_variables=session.document.template_variables
17 )
18 await self.send_response(200, page.encode(), headers=[(b"Content-Type", b"text/html")])
19
20
21 async def autoload_handle(self, body):
22 session = await self._get_session()
23
24 element_id = self.get_argument("bokeh-autoload-element", default=None)
25 if not element_id:
26 raise RuntimeError("No bokeh-autoload-element query parameter")
27
28 app_path = self.get_argument("bokeh-app-path", default="/")
29 absolute_url = self.get_argument("bokeh-absolute-url", default=None)
30
31 if absolute_url:
32 server_url = '{uri.scheme}://{uri.netloc}/'.format(uri=urlparse(absolute_url))
33 else:
34 server_url = None
35
36 resources = self.resources(server_url)
37 js = autoload_js_script(resources, session.token, element_id, app_path, absolute_url)
38
39 headers = [
40 (b"Access-Control-Allow-Headers", b"*"),
41 (b"Access-Control-Allow-Methods", b"PUT, GET, OPTIONS"),
42 (b"Access-Control-Allow-Origin", b"*"),
43 (b"Content-Type", b"application/javascript")
44 ]
45 await self.send_response(200, js.encode(), headers=headers)
46
47
48 DocConsumer.handle = doc_handle
49 AutoloadJsConsumer.handle = autoload_handle
50
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/panel/io/django.py b/panel/io/django.py
--- a/panel/io/django.py
+++ b/panel/io/django.py
@@ -34,7 +34,7 @@
server_url = None
resources = self.resources(server_url)
- js = autoload_js_script(resources, session.token, element_id, app_path, absolute_url)
+ js = autoload_js_script(session.document, resources, session.token, element_id, app_path, absolute_url)
headers = [
(b"Access-Control-Allow-Headers", b"*"),
|
{"golden_diff": "diff --git a/panel/io/django.py b/panel/io/django.py\n--- a/panel/io/django.py\n+++ b/panel/io/django.py\n@@ -34,7 +34,7 @@\n server_url = None\n \n resources = self.resources(server_url)\n- js = autoload_js_script(resources, session.token, element_id, app_path, absolute_url)\n+ js = autoload_js_script(session.document, resources, session.token, element_id, app_path, absolute_url)\n \n headers = [\n (b\"Access-Control-Allow-Headers\", b\"*\"),\n", "issue": "Django autoload_handle broken\n#### ALL software version info\r\nPanel = 0.13.0a25\r\nBokeh = 2.4.2\r\nDjango = 2.2.14\r\n\r\nWhen loading a Panel app embedded in Django, the `AutoloadJsConsumer` call just hangs. After stepping through the code it \r\nappears there is an error, which causes it to enter an eternal loop:\r\n\r\n```python\r\n> /Users/rditlsc9/miniconda/envs/tethys-vtime/lib/python3.7/site-packages/panel/io/django.py(37)autoload_handle()\r\n-> js = autoload_js_script(resources, session.token, element_id, app_path, absolute_url)\r\n\r\nTypeError: autoload_js_script() missing 1 required positional argument: 'absolute_url'\r\n```\r\n\r\nIt appears that #2919 changed the signature of `autoload_js_script`, but the call to it in `panel/io/django.py:autoload_handle` wasn't updated accordingly.\r\n\r\n\r\nAs a side note - is there a better way to get this type of error to log? I wasn't able to see any indication of an error until I stepped through the code in a debugger.\r\n\n", "before_files": [{"content": "from urllib.parse import urlparse\n\nfrom bokeh.server.django.consumers import DocConsumer, AutoloadJsConsumer\n\nfrom .resources import Resources\nfrom .server import (\n autoload_js_script, server_html_page_for_session\n)\n\nasync def doc_handle(self, body):\n session = await self._get_session()\n resources = Resources.from_bokeh(self.application.resources())\n page = server_html_page_for_session(\n session, resources=resources, title=session.document.title,\n template=session.document.template,\n template_variables=session.document.template_variables\n )\n await self.send_response(200, page.encode(), headers=[(b\"Content-Type\", b\"text/html\")])\n\n\nasync def autoload_handle(self, body):\n session = await self._get_session()\n\n element_id = self.get_argument(\"bokeh-autoload-element\", default=None)\n if not element_id:\n raise RuntimeError(\"No bokeh-autoload-element query parameter\")\n\n app_path = self.get_argument(\"bokeh-app-path\", default=\"/\")\n absolute_url = self.get_argument(\"bokeh-absolute-url\", default=None)\n\n if absolute_url:\n server_url = '{uri.scheme}://{uri.netloc}/'.format(uri=urlparse(absolute_url))\n else:\n server_url = None\n\n resources = self.resources(server_url)\n js = autoload_js_script(resources, session.token, element_id, app_path, absolute_url)\n\n headers = [\n (b\"Access-Control-Allow-Headers\", b\"*\"),\n (b\"Access-Control-Allow-Methods\", b\"PUT, GET, OPTIONS\"),\n (b\"Access-Control-Allow-Origin\", b\"*\"),\n (b\"Content-Type\", b\"application/javascript\")\n ]\n await self.send_response(200, js.encode(), headers=headers)\n\n\nDocConsumer.handle = doc_handle\nAutoloadJsConsumer.handle = autoload_handle\n", "path": "panel/io/django.py"}], "after_files": [{"content": "from urllib.parse import urlparse\n\nfrom bokeh.server.django.consumers import DocConsumer, AutoloadJsConsumer\n\nfrom .resources import Resources\nfrom .server import (\n autoload_js_script, server_html_page_for_session\n)\n\nasync def doc_handle(self, body):\n session = await self._get_session()\n resources = Resources.from_bokeh(self.application.resources())\n page = server_html_page_for_session(\n session, resources=resources, title=session.document.title,\n template=session.document.template,\n template_variables=session.document.template_variables\n )\n await self.send_response(200, page.encode(), headers=[(b\"Content-Type\", b\"text/html\")])\n\n\nasync def autoload_handle(self, body):\n session = await self._get_session()\n\n element_id = self.get_argument(\"bokeh-autoload-element\", default=None)\n if not element_id:\n raise RuntimeError(\"No bokeh-autoload-element query parameter\")\n\n app_path = self.get_argument(\"bokeh-app-path\", default=\"/\")\n absolute_url = self.get_argument(\"bokeh-absolute-url\", default=None)\n\n if absolute_url:\n server_url = '{uri.scheme}://{uri.netloc}/'.format(uri=urlparse(absolute_url))\n else:\n server_url = None\n\n resources = self.resources(server_url)\n js = autoload_js_script(session.document, resources, session.token, element_id, app_path, absolute_url)\n\n headers = [\n (b\"Access-Control-Allow-Headers\", b\"*\"),\n (b\"Access-Control-Allow-Methods\", b\"PUT, GET, OPTIONS\"),\n (b\"Access-Control-Allow-Origin\", b\"*\"),\n (b\"Content-Type\", b\"application/javascript\")\n ]\n await self.send_response(200, js.encode(), headers=headers)\n\n\nDocConsumer.handle = doc_handle\nAutoloadJsConsumer.handle = autoload_handle\n", "path": "panel/io/django.py"}]}
| 994 | 124 |
gh_patches_debug_875
|
rasdani/github-patches
|
git_diff
|
dbt-labs__dbt-core-5507
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[CT-876] Could we also now remove our upper bound on `MarkupSafe`, which we put in place earlier this year due to incompatibility with Jinja2?
Remove our upper bound on `MarkupSafe`, which we put in place earlier this year due to incompatibility with Jinja2(#4745). Also bump minimum requirement to match [Jinja2's requirements](https://github.com/pallets/jinja/blob/1c4066a4fad5aaeb2ac55809d1d38477cd23a0f6/setup.py#L6).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `core/setup.py`
Content:
```
1 #!/usr/bin/env python
2 import os
3 import sys
4
5 if sys.version_info < (3, 7, 2):
6 print("Error: dbt does not support this version of Python.")
7 print("Please upgrade to Python 3.7.2 or higher.")
8 sys.exit(1)
9
10
11 from setuptools import setup
12
13 try:
14 from setuptools import find_namespace_packages
15 except ImportError:
16 # the user has a downlevel version of setuptools.
17 print("Error: dbt requires setuptools v40.1.0 or higher.")
18 print('Please upgrade setuptools with "pip install --upgrade setuptools" ' "and try again")
19 sys.exit(1)
20
21
22 this_directory = os.path.abspath(os.path.dirname(__file__))
23 with open(os.path.join(this_directory, "README.md")) as f:
24 long_description = f.read()
25
26
27 package_name = "dbt-core"
28 package_version = "1.3.0a1"
29 description = """With dbt, data analysts and engineers can build analytics \
30 the way engineers build applications."""
31
32
33 setup(
34 name=package_name,
35 version=package_version,
36 description=description,
37 long_description=long_description,
38 long_description_content_type="text/markdown",
39 author="dbt Labs",
40 author_email="[email protected]",
41 url="https://github.com/dbt-labs/dbt-core",
42 packages=find_namespace_packages(include=["dbt", "dbt.*"]),
43 include_package_data=True,
44 test_suite="test",
45 entry_points={
46 "console_scripts": [
47 "dbt = dbt.main:main",
48 ],
49 },
50 install_requires=[
51 "Jinja2==3.1.2",
52 "MarkupSafe>=0.23,<2.1",
53 "agate>=1.6,<1.6.4",
54 "click>=7.0,<9",
55 "colorama>=0.3.9,<0.4.6",
56 "hologram>=0.0.14,<=0.0.15",
57 "isodate>=0.6,<0.7",
58 "logbook>=1.5,<1.6",
59 "mashumaro[msgpack]==3.0.3",
60 "minimal-snowplow-tracker==0.0.2",
61 "networkx>=2.3,<2.8.1;python_version<'3.8'",
62 "networkx>=2.3,<3;python_version>='3.8'",
63 "packaging>=20.9,<22.0",
64 "sqlparse>=0.2.3,<0.5",
65 "dbt-extractor~=0.4.1",
66 "typing-extensions>=3.7.4",
67 "werkzeug>=1,<3",
68 # the following are all to match snowflake-connector-python
69 "requests<3.0.0",
70 "idna>=2.5,<4",
71 "cffi>=1.9,<2.0.0",
72 "pyyaml>=6.0",
73 ],
74 zip_safe=False,
75 classifiers=[
76 "Development Status :: 5 - Production/Stable",
77 "License :: OSI Approved :: Apache Software License",
78 "Operating System :: Microsoft :: Windows",
79 "Operating System :: MacOS :: MacOS X",
80 "Operating System :: POSIX :: Linux",
81 "Programming Language :: Python :: 3.7",
82 "Programming Language :: Python :: 3.8",
83 "Programming Language :: Python :: 3.9",
84 "Programming Language :: Python :: 3.10",
85 ],
86 python_requires=">=3.7.2",
87 )
88
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/core/setup.py b/core/setup.py
--- a/core/setup.py
+++ b/core/setup.py
@@ -49,7 +49,6 @@
},
install_requires=[
"Jinja2==3.1.2",
- "MarkupSafe>=0.23,<2.1",
"agate>=1.6,<1.6.4",
"click>=7.0,<9",
"colorama>=0.3.9,<0.4.6",
|
{"golden_diff": "diff --git a/core/setup.py b/core/setup.py\n--- a/core/setup.py\n+++ b/core/setup.py\n@@ -49,7 +49,6 @@\n },\n install_requires=[\n \"Jinja2==3.1.2\",\n- \"MarkupSafe>=0.23,<2.1\",\n \"agate>=1.6,<1.6.4\",\n \"click>=7.0,<9\",\n \"colorama>=0.3.9,<0.4.6\",\n", "issue": "[CT-876] Could we also now remove our upper bound on `MarkupSafe`, which we put in place earlier this year due to incompatibility with Jinja2?\nRemove our upper bound on `MarkupSafe`, which we put in place earlier this year due to incompatibility with Jinja2(#4745). Also bump minimum requirement to match [Jinja2's requirements](https://github.com/pallets/jinja/blob/1c4066a4fad5aaeb2ac55809d1d38477cd23a0f6/setup.py#L6).\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\nimport os\nimport sys\n\nif sys.version_info < (3, 7, 2):\n print(\"Error: dbt does not support this version of Python.\")\n print(\"Please upgrade to Python 3.7.2 or higher.\")\n sys.exit(1)\n\n\nfrom setuptools import setup\n\ntry:\n from setuptools import find_namespace_packages\nexcept ImportError:\n # the user has a downlevel version of setuptools.\n print(\"Error: dbt requires setuptools v40.1.0 or higher.\")\n print('Please upgrade setuptools with \"pip install --upgrade setuptools\" ' \"and try again\")\n sys.exit(1)\n\n\nthis_directory = os.path.abspath(os.path.dirname(__file__))\nwith open(os.path.join(this_directory, \"README.md\")) as f:\n long_description = f.read()\n\n\npackage_name = \"dbt-core\"\npackage_version = \"1.3.0a1\"\ndescription = \"\"\"With dbt, data analysts and engineers can build analytics \\\nthe way engineers build applications.\"\"\"\n\n\nsetup(\n name=package_name,\n version=package_version,\n description=description,\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n author=\"dbt Labs\",\n author_email=\"[email protected]\",\n url=\"https://github.com/dbt-labs/dbt-core\",\n packages=find_namespace_packages(include=[\"dbt\", \"dbt.*\"]),\n include_package_data=True,\n test_suite=\"test\",\n entry_points={\n \"console_scripts\": [\n \"dbt = dbt.main:main\",\n ],\n },\n install_requires=[\n \"Jinja2==3.1.2\",\n \"MarkupSafe>=0.23,<2.1\",\n \"agate>=1.6,<1.6.4\",\n \"click>=7.0,<9\",\n \"colorama>=0.3.9,<0.4.6\",\n \"hologram>=0.0.14,<=0.0.15\",\n \"isodate>=0.6,<0.7\",\n \"logbook>=1.5,<1.6\",\n \"mashumaro[msgpack]==3.0.3\",\n \"minimal-snowplow-tracker==0.0.2\",\n \"networkx>=2.3,<2.8.1;python_version<'3.8'\",\n \"networkx>=2.3,<3;python_version>='3.8'\",\n \"packaging>=20.9,<22.0\",\n \"sqlparse>=0.2.3,<0.5\",\n \"dbt-extractor~=0.4.1\",\n \"typing-extensions>=3.7.4\",\n \"werkzeug>=1,<3\",\n # the following are all to match snowflake-connector-python\n \"requests<3.0.0\",\n \"idna>=2.5,<4\",\n \"cffi>=1.9,<2.0.0\",\n \"pyyaml>=6.0\",\n ],\n zip_safe=False,\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: Microsoft :: Windows\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: POSIX :: Linux\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n ],\n python_requires=\">=3.7.2\",\n)\n", "path": "core/setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\nimport os\nimport sys\n\nif sys.version_info < (3, 7, 2):\n print(\"Error: dbt does not support this version of Python.\")\n print(\"Please upgrade to Python 3.7.2 or higher.\")\n sys.exit(1)\n\n\nfrom setuptools import setup\n\ntry:\n from setuptools import find_namespace_packages\nexcept ImportError:\n # the user has a downlevel version of setuptools.\n print(\"Error: dbt requires setuptools v40.1.0 or higher.\")\n print('Please upgrade setuptools with \"pip install --upgrade setuptools\" ' \"and try again\")\n sys.exit(1)\n\n\nthis_directory = os.path.abspath(os.path.dirname(__file__))\nwith open(os.path.join(this_directory, \"README.md\")) as f:\n long_description = f.read()\n\n\npackage_name = \"dbt-core\"\npackage_version = \"1.3.0a1\"\ndescription = \"\"\"With dbt, data analysts and engineers can build analytics \\\nthe way engineers build applications.\"\"\"\n\n\nsetup(\n name=package_name,\n version=package_version,\n description=description,\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n author=\"dbt Labs\",\n author_email=\"[email protected]\",\n url=\"https://github.com/dbt-labs/dbt-core\",\n packages=find_namespace_packages(include=[\"dbt\", \"dbt.*\"]),\n include_package_data=True,\n test_suite=\"test\",\n entry_points={\n \"console_scripts\": [\n \"dbt = dbt.main:main\",\n ],\n },\n install_requires=[\n \"Jinja2==3.1.2\",\n \"agate>=1.6,<1.6.4\",\n \"click>=7.0,<9\",\n \"colorama>=0.3.9,<0.4.6\",\n \"hologram>=0.0.14,<=0.0.15\",\n \"isodate>=0.6,<0.7\",\n \"logbook>=1.5,<1.6\",\n \"mashumaro==2.9\",\n \"minimal-snowplow-tracker==0.0.2\",\n \"networkx>=2.3,<2.8.1;python_version<'3.8'\",\n \"networkx>=2.3,<3;python_version>='3.8'\",\n \"packaging>=20.9,<22.0\",\n \"sqlparse>=0.2.3,<0.5\",\n \"dbt-extractor~=0.4.1\",\n \"typing-extensions>=3.7.4\",\n \"werkzeug>=1,<3\",\n # the following are all to match snowflake-connector-python\n \"requests<3.0.0\",\n \"idna>=2.5,<4\",\n \"cffi>=1.9,<2.0.0\",\n ],\n zip_safe=False,\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: Microsoft :: Windows\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: POSIX :: Linux\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n ],\n python_requires=\">=3.7.2\",\n)\n", "path": "core/setup.py"}]}
| 1,343 | 110 |
gh_patches_debug_7583
|
rasdani/github-patches
|
git_diff
|
nextcloud__appstore-260
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Apps without releases should not be rendered on the app list page
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `nextcloudappstore/core/views.py`
Content:
```
1 from urllib.parse import urlencode
2
3 from django.contrib.auth.models import User
4 from django.core.exceptions import ObjectDoesNotExist
5 from django.db.models import Q
6 from django.http import HttpResponse
7 from django.shortcuts import get_object_or_404, redirect
8 from django.utils.functional import cached_property
9 from django.utils.translation import get_language, get_language_info
10 from django.views.generic.base import TemplateView
11 from django.views.generic.detail import DetailView
12 from django.views.generic.list import ListView
13 from rest_framework.generics import ListAPIView
14 from semantic_version import Version
15
16 from nextcloudappstore.core.api.v1.serializers import AppRatingSerializer
17 from nextcloudappstore.core.forms import AppRatingForm
18 from nextcloudappstore.core.models import App, Category, AppRating
19 from nextcloudappstore.core.versioning import pad_min_version
20
21
22 def app_description(request, id):
23 app = get_object_or_404(App, id=id)
24 return HttpResponse(app.description, content_type='text/plain')
25
26
27 class AppRatingApi(ListAPIView):
28 serializer_class = AppRatingSerializer
29
30 def get_queryset(self):
31 id = self.kwargs.get('id')
32 app = get_object_or_404(App, id=id)
33 return AppRating.objects.language(self.request.LANGUAGE_CODE).filter(
34 app=app)
35
36
37 class LegalNoticeView(TemplateView):
38 template_name = 'legal.html'
39
40
41 class AppDetailView(DetailView):
42 model = App
43 template_name = 'app/detail.html'
44 slug_field = 'id'
45 slug_url_kwarg = 'id'
46
47 def post(self, request, id):
48 form = AppRatingForm(request.POST, id=id, user=request.user,
49 language_code=request.LANGUAGE_CODE)
50 # there is no way that a rating can be invalid by default
51 if form.is_valid() and request.user.is_authenticated():
52 form.save()
53 return redirect('app-detail', id=id)
54
55 def get_context_data(self, **kwargs):
56 context = super().get_context_data(**kwargs)
57 context['rating_form'] = AppRatingForm()
58 context['user_has_rated_app'] = False
59 if self.request.user.is_authenticated():
60 try:
61 app_rating = AppRating.objects.get(user=self.request.user,
62 app=context['app'])
63 # when accessing an empty comment django-parler tries to
64 # fall back to the default language. However for comments
65 # the default (English) does not always exist. Unfortunately
66 # it throws the same exception as non existing models,
67 # so we need to access it beforehand
68 try:
69 comment = app_rating.comment
70 except AppRating.DoesNotExist:
71 comment = ''
72
73 context['rating_form'] = AppRatingForm(initial={
74 'rating': app_rating.rating,
75 'comment': comment
76 })
77 context['user_has_rated_app'] = True
78 except AppRating.DoesNotExist:
79 pass
80 context['categories'] = Category.objects.all()
81 context['latest_releases_by_platform_v'] = \
82 self.object.latest_releases_by_platform_v()
83 return context
84
85
86 class AppReleasesView(DetailView):
87 model = App
88 template_name = 'app/releases.html'
89 slug_field = 'id'
90 slug_url_kwarg = 'id'
91
92 def get_context_data(self, **kwargs):
93 context = super().get_context_data(**kwargs)
94 context['categories'] = Category.objects.all()
95
96 releases = self.object.releases_by_platform_v()
97 nightlies = self.object.nightly_releases_by_platform_v()
98 versions = set(list(releases.keys()) + list(nightlies.keys()))
99 all_releases = list(map(
100 lambda v: (v, releases.get(v, []) + nightlies.get(v, [])),
101 versions))
102 context['releases_by_platform_v'] = \
103 self._sort_by_platform_v(all_releases)
104
105 return context
106
107 def _sort_by_platform_v(self, releases_by_platform, reverse=True):
108 """Sorts a list of tuples like (<platform version>, [releases]) by
109 platform version.
110
111 :param releases_by_platform: A list of tuples.
112 :param reverse: Descending order if True, ascending otherwise.
113 :return sorted list of tuples.
114 """
115
116 return sorted(releases_by_platform, reverse=reverse,
117 key=lambda v: Version(pad_min_version(v[0])))
118
119
120 class CategoryAppListView(ListView):
121 model = App
122 template_name = 'app/list.html'
123 allow_empty = True
124
125 def get_queryset(self):
126 order_by = self.request.GET.get('order_by', 'rating_overall')
127 ordering = self.request.GET.get('ordering', 'desc')
128 featured = self.request.GET.get('featured', False)
129 maintainer = self.request.GET.get('maintainer', False)
130 sort_columns = []
131
132 allowed_order_by = {'name', 'last_release', 'rating_overall',
133 'rating_recent'}
134 if order_by in allowed_order_by:
135 if order_by == 'name':
136 order_by = 'translations__name'
137 if ordering == 'desc':
138 sort_columns.append('-' + order_by)
139 else:
140 sort_columns.append(order_by)
141
142 lang = get_language_info(get_language())['code']
143 category_id = self.kwargs['id']
144 queryset = App.objects.search(self.search_terms, lang).order_by(
145 *sort_columns)
146 if maintainer:
147 try:
148 user = User.objects.get_by_natural_key(maintainer)
149 queryset = queryset.filter(Q(owner=user) |
150 Q(co_maintainers=user))
151 except ObjectDoesNotExist:
152 return queryset.none()
153 if category_id:
154 queryset = queryset.filter(categories__id=category_id)
155 if featured == "true":
156 queryset = queryset.filter(featured=True)
157 return queryset
158
159 def get_context_data(self, **kwargs):
160 context = super().get_context_data(**kwargs)
161 context['categories'] = Category.objects.all()
162 category_id = self.kwargs['id']
163 if category_id:
164 context['current_category'] = Category.objects.get(id=category_id)
165 if self.search_terms:
166 context['search_query'] = ' '.join(self.search_terms)
167 context['url_params'] = self.url_params
168 return context
169
170 @cached_property
171 def url_params(self):
172 """URL encoded strings with the GET params of the last request.
173
174 Intended for preserving GET params upon clicking a link by including
175 one (and only one) of these strings in the "href" attribute.
176
177 The parameters are divided into three groups: search, filters and
178 ordering. In addition to these three, the returned dict also contains
179 some combinations of them, as specified by the dict keys.
180
181 No leading "?" or "&".
182
183 :return dict with URL encoded strings.
184 """
185
186 search = self._url_params_str('search')
187 filters = self._url_params_str('featured', 'maintainer')
188 ordering = self._url_params_str('order_by', 'ordering')
189
190 return {
191 'search': search,
192 'filters': filters,
193 'ordering': ordering,
194 'search_filters': self._join_url_params_strs(search, filters),
195 'filters_ordering': self._join_url_params_strs(filters, ordering),
196 }
197
198 def _url_params_str(self, *params):
199 args = map(lambda param: (param, self.request.GET.get(param, '')),
200 params)
201 present_args = filter(lambda a: a[1], args)
202 return urlencode(dict(present_args))
203
204 def _join_url_params_strs(self, *strings):
205 return '&'.join(filter(None, strings))
206
207 @cached_property
208 def search_terms(self):
209 return self.request.GET.get('search', '').strip().split()
210
211
212 class AppUploadView(TemplateView):
213 template_name = 'app/upload.html'
214
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/nextcloudappstore/core/views.py b/nextcloudappstore/core/views.py
--- a/nextcloudappstore/core/views.py
+++ b/nextcloudappstore/core/views.py
@@ -142,7 +142,7 @@
lang = get_language_info(get_language())['code']
category_id = self.kwargs['id']
queryset = App.objects.search(self.search_terms, lang).order_by(
- *sort_columns)
+ *sort_columns).filter(releases__gt=0)
if maintainer:
try:
user = User.objects.get_by_natural_key(maintainer)
|
{"golden_diff": "diff --git a/nextcloudappstore/core/views.py b/nextcloudappstore/core/views.py\n--- a/nextcloudappstore/core/views.py\n+++ b/nextcloudappstore/core/views.py\n@@ -142,7 +142,7 @@\n lang = get_language_info(get_language())['code']\n category_id = self.kwargs['id']\n queryset = App.objects.search(self.search_terms, lang).order_by(\n- *sort_columns)\n+ *sort_columns).filter(releases__gt=0)\n if maintainer:\n try:\n user = User.objects.get_by_natural_key(maintainer)\n", "issue": "Apps without releases should not be rendered on the app list page\n\n", "before_files": [{"content": "from urllib.parse import urlencode\n\nfrom django.contrib.auth.models import User\nfrom django.core.exceptions import ObjectDoesNotExist\nfrom django.db.models import Q\nfrom django.http import HttpResponse\nfrom django.shortcuts import get_object_or_404, redirect\nfrom django.utils.functional import cached_property\nfrom django.utils.translation import get_language, get_language_info\nfrom django.views.generic.base import TemplateView\nfrom django.views.generic.detail import DetailView\nfrom django.views.generic.list import ListView\nfrom rest_framework.generics import ListAPIView\nfrom semantic_version import Version\n\nfrom nextcloudappstore.core.api.v1.serializers import AppRatingSerializer\nfrom nextcloudappstore.core.forms import AppRatingForm\nfrom nextcloudappstore.core.models import App, Category, AppRating\nfrom nextcloudappstore.core.versioning import pad_min_version\n\n\ndef app_description(request, id):\n app = get_object_or_404(App, id=id)\n return HttpResponse(app.description, content_type='text/plain')\n\n\nclass AppRatingApi(ListAPIView):\n serializer_class = AppRatingSerializer\n\n def get_queryset(self):\n id = self.kwargs.get('id')\n app = get_object_or_404(App, id=id)\n return AppRating.objects.language(self.request.LANGUAGE_CODE).filter(\n app=app)\n\n\nclass LegalNoticeView(TemplateView):\n template_name = 'legal.html'\n\n\nclass AppDetailView(DetailView):\n model = App\n template_name = 'app/detail.html'\n slug_field = 'id'\n slug_url_kwarg = 'id'\n\n def post(self, request, id):\n form = AppRatingForm(request.POST, id=id, user=request.user,\n language_code=request.LANGUAGE_CODE)\n # there is no way that a rating can be invalid by default\n if form.is_valid() and request.user.is_authenticated():\n form.save()\n return redirect('app-detail', id=id)\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['rating_form'] = AppRatingForm()\n context['user_has_rated_app'] = False\n if self.request.user.is_authenticated():\n try:\n app_rating = AppRating.objects.get(user=self.request.user,\n app=context['app'])\n # when accessing an empty comment django-parler tries to\n # fall back to the default language. However for comments\n # the default (English) does not always exist. Unfortunately\n # it throws the same exception as non existing models,\n # so we need to access it beforehand\n try:\n comment = app_rating.comment\n except AppRating.DoesNotExist:\n comment = ''\n\n context['rating_form'] = AppRatingForm(initial={\n 'rating': app_rating.rating,\n 'comment': comment\n })\n context['user_has_rated_app'] = True\n except AppRating.DoesNotExist:\n pass\n context['categories'] = Category.objects.all()\n context['latest_releases_by_platform_v'] = \\\n self.object.latest_releases_by_platform_v()\n return context\n\n\nclass AppReleasesView(DetailView):\n model = App\n template_name = 'app/releases.html'\n slug_field = 'id'\n slug_url_kwarg = 'id'\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['categories'] = Category.objects.all()\n\n releases = self.object.releases_by_platform_v()\n nightlies = self.object.nightly_releases_by_platform_v()\n versions = set(list(releases.keys()) + list(nightlies.keys()))\n all_releases = list(map(\n lambda v: (v, releases.get(v, []) + nightlies.get(v, [])),\n versions))\n context['releases_by_platform_v'] = \\\n self._sort_by_platform_v(all_releases)\n\n return context\n\n def _sort_by_platform_v(self, releases_by_platform, reverse=True):\n \"\"\"Sorts a list of tuples like (<platform version>, [releases]) by\n platform version.\n\n :param releases_by_platform: A list of tuples.\n :param reverse: Descending order if True, ascending otherwise.\n :return sorted list of tuples.\n \"\"\"\n\n return sorted(releases_by_platform, reverse=reverse,\n key=lambda v: Version(pad_min_version(v[0])))\n\n\nclass CategoryAppListView(ListView):\n model = App\n template_name = 'app/list.html'\n allow_empty = True\n\n def get_queryset(self):\n order_by = self.request.GET.get('order_by', 'rating_overall')\n ordering = self.request.GET.get('ordering', 'desc')\n featured = self.request.GET.get('featured', False)\n maintainer = self.request.GET.get('maintainer', False)\n sort_columns = []\n\n allowed_order_by = {'name', 'last_release', 'rating_overall',\n 'rating_recent'}\n if order_by in allowed_order_by:\n if order_by == 'name':\n order_by = 'translations__name'\n if ordering == 'desc':\n sort_columns.append('-' + order_by)\n else:\n sort_columns.append(order_by)\n\n lang = get_language_info(get_language())['code']\n category_id = self.kwargs['id']\n queryset = App.objects.search(self.search_terms, lang).order_by(\n *sort_columns)\n if maintainer:\n try:\n user = User.objects.get_by_natural_key(maintainer)\n queryset = queryset.filter(Q(owner=user) |\n Q(co_maintainers=user))\n except ObjectDoesNotExist:\n return queryset.none()\n if category_id:\n queryset = queryset.filter(categories__id=category_id)\n if featured == \"true\":\n queryset = queryset.filter(featured=True)\n return queryset\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['categories'] = Category.objects.all()\n category_id = self.kwargs['id']\n if category_id:\n context['current_category'] = Category.objects.get(id=category_id)\n if self.search_terms:\n context['search_query'] = ' '.join(self.search_terms)\n context['url_params'] = self.url_params\n return context\n\n @cached_property\n def url_params(self):\n \"\"\"URL encoded strings with the GET params of the last request.\n\n Intended for preserving GET params upon clicking a link by including\n one (and only one) of these strings in the \"href\" attribute.\n\n The parameters are divided into three groups: search, filters and\n ordering. In addition to these three, the returned dict also contains\n some combinations of them, as specified by the dict keys.\n\n No leading \"?\" or \"&\".\n\n :return dict with URL encoded strings.\n \"\"\"\n\n search = self._url_params_str('search')\n filters = self._url_params_str('featured', 'maintainer')\n ordering = self._url_params_str('order_by', 'ordering')\n\n return {\n 'search': search,\n 'filters': filters,\n 'ordering': ordering,\n 'search_filters': self._join_url_params_strs(search, filters),\n 'filters_ordering': self._join_url_params_strs(filters, ordering),\n }\n\n def _url_params_str(self, *params):\n args = map(lambda param: (param, self.request.GET.get(param, '')),\n params)\n present_args = filter(lambda a: a[1], args)\n return urlencode(dict(present_args))\n\n def _join_url_params_strs(self, *strings):\n return '&'.join(filter(None, strings))\n\n @cached_property\n def search_terms(self):\n return self.request.GET.get('search', '').strip().split()\n\n\nclass AppUploadView(TemplateView):\n template_name = 'app/upload.html'\n", "path": "nextcloudappstore/core/views.py"}], "after_files": [{"content": "from urllib.parse import urlencode\n\nfrom django.contrib.auth.models import User\nfrom django.core.exceptions import ObjectDoesNotExist\nfrom django.db.models import Q\nfrom django.http import HttpResponse\nfrom django.shortcuts import get_object_or_404, redirect\nfrom django.utils.functional import cached_property\nfrom django.utils.translation import get_language, get_language_info\nfrom django.views.generic.base import TemplateView\nfrom django.views.generic.detail import DetailView\nfrom django.views.generic.list import ListView\nfrom rest_framework.generics import ListAPIView\nfrom semantic_version import Version\n\nfrom nextcloudappstore.core.api.v1.serializers import AppRatingSerializer\nfrom nextcloudappstore.core.forms import AppRatingForm\nfrom nextcloudappstore.core.models import App, Category, AppRating\nfrom nextcloudappstore.core.versioning import pad_min_version\n\n\ndef app_description(request, id):\n app = get_object_or_404(App, id=id)\n return HttpResponse(app.description, content_type='text/plain')\n\n\nclass AppRatingApi(ListAPIView):\n serializer_class = AppRatingSerializer\n\n def get_queryset(self):\n id = self.kwargs.get('id')\n app = get_object_or_404(App, id=id)\n return AppRating.objects.language(self.request.LANGUAGE_CODE).filter(\n app=app)\n\n\nclass LegalNoticeView(TemplateView):\n template_name = 'legal.html'\n\n\nclass AppDetailView(DetailView):\n model = App\n template_name = 'app/detail.html'\n slug_field = 'id'\n slug_url_kwarg = 'id'\n\n def post(self, request, id):\n form = AppRatingForm(request.POST, id=id, user=request.user,\n language_code=request.LANGUAGE_CODE)\n # there is no way that a rating can be invalid by default\n if form.is_valid() and request.user.is_authenticated():\n form.save()\n return redirect('app-detail', id=id)\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['rating_form'] = AppRatingForm()\n context['user_has_rated_app'] = False\n if self.request.user.is_authenticated():\n try:\n app_rating = AppRating.objects.get(user=self.request.user,\n app=context['app'])\n # when accessing an empty comment django-parler tries to\n # fall back to the default language. However for comments\n # the default (English) does not always exist. Unfortunately\n # it throws the same exception as non existing models,\n # so we need to access it beforehand\n try:\n comment = app_rating.comment\n except AppRating.DoesNotExist:\n comment = ''\n\n context['rating_form'] = AppRatingForm(initial={\n 'rating': app_rating.rating,\n 'comment': comment\n })\n context['user_has_rated_app'] = True\n except AppRating.DoesNotExist:\n pass\n context['categories'] = Category.objects.all()\n context['latest_releases_by_platform_v'] = \\\n self.object.latest_releases_by_platform_v()\n return context\n\n\nclass AppReleasesView(DetailView):\n model = App\n template_name = 'app/releases.html'\n slug_field = 'id'\n slug_url_kwarg = 'id'\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['categories'] = Category.objects.all()\n\n releases = self.object.releases_by_platform_v()\n nightlies = self.object.nightly_releases_by_platform_v()\n versions = set(list(releases.keys()) + list(nightlies.keys()))\n all_releases = list(map(\n lambda v: (v, releases.get(v, []) + nightlies.get(v, [])),\n versions))\n context['releases_by_platform_v'] = \\\n self._sort_by_platform_v(all_releases)\n\n return context\n\n def _sort_by_platform_v(self, releases_by_platform, reverse=True):\n \"\"\"Sorts a list of tuples like (<platform version>, [releases]) by\n platform version.\n\n :param releases_by_platform: A list of tuples.\n :param reverse: Descending order if True, ascending otherwise.\n :return sorted list of tuples.\n \"\"\"\n\n return sorted(releases_by_platform, reverse=reverse,\n key=lambda v: Version(pad_min_version(v[0])))\n\n\nclass CategoryAppListView(ListView):\n model = App\n template_name = 'app/list.html'\n allow_empty = True\n\n def get_queryset(self):\n order_by = self.request.GET.get('order_by', 'rating_overall')\n ordering = self.request.GET.get('ordering', 'desc')\n featured = self.request.GET.get('featured', False)\n maintainer = self.request.GET.get('maintainer', False)\n sort_columns = []\n\n allowed_order_by = {'name', 'last_release', 'rating_overall',\n 'rating_recent'}\n if order_by in allowed_order_by:\n if order_by == 'name':\n order_by = 'translations__name'\n if ordering == 'desc':\n sort_columns.append('-' + order_by)\n else:\n sort_columns.append(order_by)\n\n lang = get_language_info(get_language())['code']\n category_id = self.kwargs['id']\n queryset = App.objects.search(self.search_terms, lang).order_by(\n *sort_columns).filter(releases__gt=0)\n if maintainer:\n try:\n user = User.objects.get_by_natural_key(maintainer)\n queryset = queryset.filter(Q(owner=user) |\n Q(co_maintainers=user))\n except ObjectDoesNotExist:\n return queryset.none()\n if category_id:\n queryset = queryset.filter(categories__id=category_id)\n if featured == \"true\":\n queryset = queryset.filter(featured=True)\n return queryset\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['categories'] = Category.objects.all()\n category_id = self.kwargs['id']\n if category_id:\n context['current_category'] = Category.objects.get(id=category_id)\n if self.search_terms:\n context['search_query'] = ' '.join(self.search_terms)\n context['url_params'] = self.url_params\n return context\n\n @cached_property\n def url_params(self):\n \"\"\"URL encoded strings with the GET params of the last request.\n\n Intended for preserving GET params upon clicking a link by including\n one (and only one) of these strings in the \"href\" attribute.\n\n The parameters are divided into three groups: search, filters and\n ordering. In addition to these three, the returned dict also contains\n some combinations of them, as specified by the dict keys.\n\n No leading \"?\" or \"&\".\n\n :return dict with URL encoded strings.\n \"\"\"\n\n search = self._url_params_str('search')\n filters = self._url_params_str('featured', 'maintainer')\n ordering = self._url_params_str('order_by', 'ordering')\n\n return {\n 'search': search,\n 'filters': filters,\n 'ordering': ordering,\n 'search_filters': self._join_url_params_strs(search, filters),\n 'filters_ordering': self._join_url_params_strs(filters, ordering),\n }\n\n def _url_params_str(self, *params):\n args = map(lambda param: (param, self.request.GET.get(param, '')),\n params)\n present_args = filter(lambda a: a[1], args)\n return urlencode(dict(present_args))\n\n def _join_url_params_strs(self, *strings):\n return '&'.join(filter(None, strings))\n\n @cached_property\n def search_terms(self):\n return self.request.GET.get('search', '').strip().split()\n\n\nclass AppUploadView(TemplateView):\n template_name = 'app/upload.html'\n", "path": "nextcloudappstore/core/views.py"}]}
| 2,474 | 136 |
gh_patches_debug_29931
|
rasdani/github-patches
|
git_diff
|
deepset-ai__haystack-5811
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Change `SentenceTransformersTextEmbedder` to non-batch mode
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `haystack/preview/components/embedders/sentence_transformers_text_embedder.py`
Content:
```
1 from typing import List, Optional, Union, Dict, Any
2
3 from haystack.preview import component, default_to_dict, default_from_dict
4 from haystack.preview.embedding_backends.sentence_transformers_backend import (
5 _SentenceTransformersEmbeddingBackendFactory,
6 )
7
8
9 @component
10 class SentenceTransformersTextEmbedder:
11 """
12 A component for embedding strings using Sentence Transformers models.
13 """
14
15 def __init__(
16 self,
17 model_name_or_path: str = "sentence-transformers/all-mpnet-base-v2",
18 device: Optional[str] = None,
19 use_auth_token: Union[bool, str, None] = None,
20 prefix: str = "",
21 suffix: str = "",
22 batch_size: int = 32,
23 progress_bar: bool = True,
24 normalize_embeddings: bool = False,
25 ):
26 """
27 Create a SentenceTransformersTextEmbedder component.
28
29 :param model_name_or_path: Local path or name of the model in Hugging Face's model hub, such as ``'sentence-transformers/all-mpnet-base-v2'``.
30 :param device: Device (like 'cuda' / 'cpu') that should be used for computation. If None, checks if a GPU can be used.
31 :param use_auth_token: The API token used to download private models from Hugging Face.
32 If this parameter is set to `True`, then the token generated when running
33 `transformers-cli login` (stored in ~/.huggingface) will be used.
34 :param prefix: A string to add to the beginning of each text.
35 :param suffix: A string to add to the end of each text.
36 :param batch_size: Number of strings to encode at once.
37 :param progress_bar: If true, displays progress bar during embedding.
38 :param normalize_embeddings: If set to true, returned vectors will have length 1.
39 """
40
41 self.model_name_or_path = model_name_or_path
42 # TODO: remove device parameter and use Haystack's device management once migrated
43 self.device = device or "cpu"
44 self.use_auth_token = use_auth_token
45 self.prefix = prefix
46 self.suffix = suffix
47 self.batch_size = batch_size
48 self.progress_bar = progress_bar
49 self.normalize_embeddings = normalize_embeddings
50
51 def to_dict(self) -> Dict[str, Any]:
52 """
53 Serialize this component to a dictionary.
54 """
55 return default_to_dict(
56 self,
57 model_name_or_path=self.model_name_or_path,
58 device=self.device,
59 use_auth_token=self.use_auth_token,
60 prefix=self.prefix,
61 suffix=self.suffix,
62 batch_size=self.batch_size,
63 progress_bar=self.progress_bar,
64 normalize_embeddings=self.normalize_embeddings,
65 )
66
67 @classmethod
68 def from_dict(cls, data: Dict[str, Any]) -> "SentenceTransformersTextEmbedder":
69 """
70 Deserialize this component from a dictionary.
71 """
72 return default_from_dict(cls, data)
73
74 def warm_up(self):
75 """
76 Load the embedding backend.
77 """
78 if not hasattr(self, "embedding_backend"):
79 self.embedding_backend = _SentenceTransformersEmbeddingBackendFactory.get_embedding_backend(
80 model_name_or_path=self.model_name_or_path, device=self.device, use_auth_token=self.use_auth_token
81 )
82
83 @component.output_types(embeddings=List[List[float]])
84 def run(self, texts: List[str]):
85 """Embed a list of strings."""
86 if not isinstance(texts, list) or not isinstance(texts[0], str):
87 raise TypeError(
88 "SentenceTransformersTextEmbedder expects a list of strings as input."
89 "In case you want to embed a list of Documents, please use the SentenceTransformersDocumentEmbedder."
90 )
91 if not hasattr(self, "embedding_backend"):
92 raise RuntimeError("The embedding model has not been loaded. Please call warm_up() before running.")
93
94 texts_to_embed = [self.prefix + text + self.suffix for text in texts]
95 embeddings = self.embedding_backend.embed(
96 texts_to_embed,
97 batch_size=self.batch_size,
98 show_progress_bar=self.progress_bar,
99 normalize_embeddings=self.normalize_embeddings,
100 )
101 return {"embeddings": embeddings}
102
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/haystack/preview/components/embedders/sentence_transformers_text_embedder.py b/haystack/preview/components/embedders/sentence_transformers_text_embedder.py
--- a/haystack/preview/components/embedders/sentence_transformers_text_embedder.py
+++ b/haystack/preview/components/embedders/sentence_transformers_text_embedder.py
@@ -80,22 +80,22 @@
model_name_or_path=self.model_name_or_path, device=self.device, use_auth_token=self.use_auth_token
)
- @component.output_types(embeddings=List[List[float]])
- def run(self, texts: List[str]):
- """Embed a list of strings."""
- if not isinstance(texts, list) or not isinstance(texts[0], str):
+ @component.output_types(embedding=List[float])
+ def run(self, text: str):
+ """Embed a string."""
+ if not isinstance(text, str):
raise TypeError(
- "SentenceTransformersTextEmbedder expects a list of strings as input."
+ "SentenceTransformersTextEmbedder expects a string as input."
"In case you want to embed a list of Documents, please use the SentenceTransformersDocumentEmbedder."
)
if not hasattr(self, "embedding_backend"):
raise RuntimeError("The embedding model has not been loaded. Please call warm_up() before running.")
- texts_to_embed = [self.prefix + text + self.suffix for text in texts]
- embeddings = self.embedding_backend.embed(
- texts_to_embed,
+ text_to_embed = self.prefix + text + self.suffix
+ embedding = self.embedding_backend.embed(
+ [text_to_embed],
batch_size=self.batch_size,
show_progress_bar=self.progress_bar,
normalize_embeddings=self.normalize_embeddings,
- )
- return {"embeddings": embeddings}
+ )[0]
+ return {"embedding": embedding}
|
{"golden_diff": "diff --git a/haystack/preview/components/embedders/sentence_transformers_text_embedder.py b/haystack/preview/components/embedders/sentence_transformers_text_embedder.py\n--- a/haystack/preview/components/embedders/sentence_transformers_text_embedder.py\n+++ b/haystack/preview/components/embedders/sentence_transformers_text_embedder.py\n@@ -80,22 +80,22 @@\n model_name_or_path=self.model_name_or_path, device=self.device, use_auth_token=self.use_auth_token\n )\n \n- @component.output_types(embeddings=List[List[float]])\n- def run(self, texts: List[str]):\n- \"\"\"Embed a list of strings.\"\"\"\n- if not isinstance(texts, list) or not isinstance(texts[0], str):\n+ @component.output_types(embedding=List[float])\n+ def run(self, text: str):\n+ \"\"\"Embed a string.\"\"\"\n+ if not isinstance(text, str):\n raise TypeError(\n- \"SentenceTransformersTextEmbedder expects a list of strings as input.\"\n+ \"SentenceTransformersTextEmbedder expects a string as input.\"\n \"In case you want to embed a list of Documents, please use the SentenceTransformersDocumentEmbedder.\"\n )\n if not hasattr(self, \"embedding_backend\"):\n raise RuntimeError(\"The embedding model has not been loaded. Please call warm_up() before running.\")\n \n- texts_to_embed = [self.prefix + text + self.suffix for text in texts]\n- embeddings = self.embedding_backend.embed(\n- texts_to_embed,\n+ text_to_embed = self.prefix + text + self.suffix\n+ embedding = self.embedding_backend.embed(\n+ [text_to_embed],\n batch_size=self.batch_size,\n show_progress_bar=self.progress_bar,\n normalize_embeddings=self.normalize_embeddings,\n- )\n- return {\"embeddings\": embeddings}\n+ )[0]\n+ return {\"embedding\": embedding}\n", "issue": "Change `SentenceTransformersTextEmbedder` to non-batch mode\n\n", "before_files": [{"content": "from typing import List, Optional, Union, Dict, Any\n\nfrom haystack.preview import component, default_to_dict, default_from_dict\nfrom haystack.preview.embedding_backends.sentence_transformers_backend import (\n _SentenceTransformersEmbeddingBackendFactory,\n)\n\n\n@component\nclass SentenceTransformersTextEmbedder:\n \"\"\"\n A component for embedding strings using Sentence Transformers models.\n \"\"\"\n\n def __init__(\n self,\n model_name_or_path: str = \"sentence-transformers/all-mpnet-base-v2\",\n device: Optional[str] = None,\n use_auth_token: Union[bool, str, None] = None,\n prefix: str = \"\",\n suffix: str = \"\",\n batch_size: int = 32,\n progress_bar: bool = True,\n normalize_embeddings: bool = False,\n ):\n \"\"\"\n Create a SentenceTransformersTextEmbedder component.\n\n :param model_name_or_path: Local path or name of the model in Hugging Face's model hub, such as ``'sentence-transformers/all-mpnet-base-v2'``.\n :param device: Device (like 'cuda' / 'cpu') that should be used for computation. If None, checks if a GPU can be used.\n :param use_auth_token: The API token used to download private models from Hugging Face.\n If this parameter is set to `True`, then the token generated when running\n `transformers-cli login` (stored in ~/.huggingface) will be used.\n :param prefix: A string to add to the beginning of each text.\n :param suffix: A string to add to the end of each text.\n :param batch_size: Number of strings to encode at once.\n :param progress_bar: If true, displays progress bar during embedding.\n :param normalize_embeddings: If set to true, returned vectors will have length 1.\n \"\"\"\n\n self.model_name_or_path = model_name_or_path\n # TODO: remove device parameter and use Haystack's device management once migrated\n self.device = device or \"cpu\"\n self.use_auth_token = use_auth_token\n self.prefix = prefix\n self.suffix = suffix\n self.batch_size = batch_size\n self.progress_bar = progress_bar\n self.normalize_embeddings = normalize_embeddings\n\n def to_dict(self) -> Dict[str, Any]:\n \"\"\"\n Serialize this component to a dictionary.\n \"\"\"\n return default_to_dict(\n self,\n model_name_or_path=self.model_name_or_path,\n device=self.device,\n use_auth_token=self.use_auth_token,\n prefix=self.prefix,\n suffix=self.suffix,\n batch_size=self.batch_size,\n progress_bar=self.progress_bar,\n normalize_embeddings=self.normalize_embeddings,\n )\n\n @classmethod\n def from_dict(cls, data: Dict[str, Any]) -> \"SentenceTransformersTextEmbedder\":\n \"\"\"\n Deserialize this component from a dictionary.\n \"\"\"\n return default_from_dict(cls, data)\n\n def warm_up(self):\n \"\"\"\n Load the embedding backend.\n \"\"\"\n if not hasattr(self, \"embedding_backend\"):\n self.embedding_backend = _SentenceTransformersEmbeddingBackendFactory.get_embedding_backend(\n model_name_or_path=self.model_name_or_path, device=self.device, use_auth_token=self.use_auth_token\n )\n\n @component.output_types(embeddings=List[List[float]])\n def run(self, texts: List[str]):\n \"\"\"Embed a list of strings.\"\"\"\n if not isinstance(texts, list) or not isinstance(texts[0], str):\n raise TypeError(\n \"SentenceTransformersTextEmbedder expects a list of strings as input.\"\n \"In case you want to embed a list of Documents, please use the SentenceTransformersDocumentEmbedder.\"\n )\n if not hasattr(self, \"embedding_backend\"):\n raise RuntimeError(\"The embedding model has not been loaded. Please call warm_up() before running.\")\n\n texts_to_embed = [self.prefix + text + self.suffix for text in texts]\n embeddings = self.embedding_backend.embed(\n texts_to_embed,\n batch_size=self.batch_size,\n show_progress_bar=self.progress_bar,\n normalize_embeddings=self.normalize_embeddings,\n )\n return {\"embeddings\": embeddings}\n", "path": "haystack/preview/components/embedders/sentence_transformers_text_embedder.py"}], "after_files": [{"content": "from typing import List, Optional, Union, Dict, Any\n\nfrom haystack.preview import component, default_to_dict, default_from_dict\nfrom haystack.preview.embedding_backends.sentence_transformers_backend import (\n _SentenceTransformersEmbeddingBackendFactory,\n)\n\n\n@component\nclass SentenceTransformersTextEmbedder:\n \"\"\"\n A component for embedding strings using Sentence Transformers models.\n \"\"\"\n\n def __init__(\n self,\n model_name_or_path: str = \"sentence-transformers/all-mpnet-base-v2\",\n device: Optional[str] = None,\n use_auth_token: Union[bool, str, None] = None,\n prefix: str = \"\",\n suffix: str = \"\",\n batch_size: int = 32,\n progress_bar: bool = True,\n normalize_embeddings: bool = False,\n ):\n \"\"\"\n Create a SentenceTransformersTextEmbedder component.\n\n :param model_name_or_path: Local path or name of the model in Hugging Face's model hub, such as ``'sentence-transformers/all-mpnet-base-v2'``.\n :param device: Device (like 'cuda' / 'cpu') that should be used for computation. If None, checks if a GPU can be used.\n :param use_auth_token: The API token used to download private models from Hugging Face.\n If this parameter is set to `True`, then the token generated when running\n `transformers-cli login` (stored in ~/.huggingface) will be used.\n :param prefix: A string to add to the beginning of each text.\n :param suffix: A string to add to the end of each text.\n :param batch_size: Number of strings to encode at once.\n :param progress_bar: If true, displays progress bar during embedding.\n :param normalize_embeddings: If set to true, returned vectors will have length 1.\n \"\"\"\n\n self.model_name_or_path = model_name_or_path\n # TODO: remove device parameter and use Haystack's device management once migrated\n self.device = device or \"cpu\"\n self.use_auth_token = use_auth_token\n self.prefix = prefix\n self.suffix = suffix\n self.batch_size = batch_size\n self.progress_bar = progress_bar\n self.normalize_embeddings = normalize_embeddings\n\n def to_dict(self) -> Dict[str, Any]:\n \"\"\"\n Serialize this component to a dictionary.\n \"\"\"\n return default_to_dict(\n self,\n model_name_or_path=self.model_name_or_path,\n device=self.device,\n use_auth_token=self.use_auth_token,\n prefix=self.prefix,\n suffix=self.suffix,\n batch_size=self.batch_size,\n progress_bar=self.progress_bar,\n normalize_embeddings=self.normalize_embeddings,\n )\n\n @classmethod\n def from_dict(cls, data: Dict[str, Any]) -> \"SentenceTransformersTextEmbedder\":\n \"\"\"\n Deserialize this component from a dictionary.\n \"\"\"\n return default_from_dict(cls, data)\n\n def warm_up(self):\n \"\"\"\n Load the embedding backend.\n \"\"\"\n if not hasattr(self, \"embedding_backend\"):\n self.embedding_backend = _SentenceTransformersEmbeddingBackendFactory.get_embedding_backend(\n model_name_or_path=self.model_name_or_path, device=self.device, use_auth_token=self.use_auth_token\n )\n\n @component.output_types(embedding=List[float])\n def run(self, text: str):\n \"\"\"Embed a string.\"\"\"\n if not isinstance(text, str):\n raise TypeError(\n \"SentenceTransformersTextEmbedder expects a string as input.\"\n \"In case you want to embed a list of Documents, please use the SentenceTransformersDocumentEmbedder.\"\n )\n if not hasattr(self, \"embedding_backend\"):\n raise RuntimeError(\"The embedding model has not been loaded. Please call warm_up() before running.\")\n\n text_to_embed = self.prefix + text + self.suffix\n embedding = self.embedding_backend.embed(\n [text_to_embed],\n batch_size=self.batch_size,\n show_progress_bar=self.progress_bar,\n normalize_embeddings=self.normalize_embeddings,\n )[0]\n return {\"embedding\": embedding}\n", "path": "haystack/preview/components/embedders/sentence_transformers_text_embedder.py"}]}
| 1,367 | 412 |
gh_patches_debug_6821
|
rasdani/github-patches
|
git_diff
|
pallets__werkzeug-1741
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Local variables not available in debug console in version 1.0
Take this simple script:
```python
from werkzeug.wrappers import Request, Response
some_global_variable = True
@Request.application
def application(request):
msg = 'Hello, World!'
return Response(msg2)
if __name__ == '__main__':
from werkzeug.serving import run_simple
run_simple('localhost', 4000, application,
use_reloader=True, use_debugger=True, use_evalex=True)
```
As expected, the application crashes when it tries to create the response because `msg2` doesn't exist. With version 0.16.1 and Python 3.8, I can view the value of the `msg` variable in the debug console. But if I upgrade to version 1.0, `msg` is no longer defined. Global variables are available, however. Here's a screenshot:

--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/werkzeug/debug/console.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 """
3 werkzeug.debug.console
4 ~~~~~~~~~~~~~~~~~~~~~~
5
6 Interactive console support.
7
8 :copyright: 2007 Pallets
9 :license: BSD-3-Clause
10 """
11 import code
12 import sys
13 from types import CodeType
14
15 from ..local import Local
16 from ..utils import escape
17 from .repr import debug_repr
18 from .repr import dump
19 from .repr import helper
20
21
22 _local = Local()
23
24
25 class HTMLStringO(object):
26 """A StringO version that HTML escapes on write."""
27
28 def __init__(self):
29 self._buffer = []
30
31 def isatty(self):
32 return False
33
34 def close(self):
35 pass
36
37 def flush(self):
38 pass
39
40 def seek(self, n, mode=0):
41 pass
42
43 def readline(self):
44 if len(self._buffer) == 0:
45 return ""
46 ret = self._buffer[0]
47 del self._buffer[0]
48 return ret
49
50 def reset(self):
51 val = "".join(self._buffer)
52 del self._buffer[:]
53 return val
54
55 def _write(self, x):
56 if isinstance(x, bytes):
57 x = x.decode("utf-8", "replace")
58 self._buffer.append(x)
59
60 def write(self, x):
61 self._write(escape(x))
62
63 def writelines(self, x):
64 self._write(escape("".join(x)))
65
66
67 class ThreadedStream(object):
68 """Thread-local wrapper for sys.stdout for the interactive console."""
69
70 @staticmethod
71 def push():
72 if not isinstance(sys.stdout, ThreadedStream):
73 sys.stdout = ThreadedStream()
74 _local.stream = HTMLStringO()
75
76 @staticmethod
77 def fetch():
78 try:
79 stream = _local.stream
80 except AttributeError:
81 return ""
82 return stream.reset()
83
84 @staticmethod
85 def displayhook(obj):
86 try:
87 stream = _local.stream
88 except AttributeError:
89 return _displayhook(obj)
90 # stream._write bypasses escaping as debug_repr is
91 # already generating HTML for us.
92 if obj is not None:
93 _local._current_ipy.locals["_"] = obj
94 stream._write(debug_repr(obj))
95
96 def __setattr__(self, name, value):
97 raise AttributeError("read only attribute %s" % name)
98
99 def __dir__(self):
100 return dir(sys.__stdout__)
101
102 def __getattribute__(self, name):
103 if name == "__members__":
104 return dir(sys.__stdout__)
105 try:
106 stream = _local.stream
107 except AttributeError:
108 stream = sys.__stdout__
109 return getattr(stream, name)
110
111 def __repr__(self):
112 return repr(sys.__stdout__)
113
114
115 # add the threaded stream as display hook
116 _displayhook = sys.displayhook
117 sys.displayhook = ThreadedStream.displayhook
118
119
120 class _ConsoleLoader(object):
121 def __init__(self):
122 self._storage = {}
123
124 def register(self, code, source):
125 self._storage[id(code)] = source
126 # register code objects of wrapped functions too.
127 for var in code.co_consts:
128 if isinstance(var, CodeType):
129 self._storage[id(var)] = source
130
131 def get_source_by_code(self, code):
132 try:
133 return self._storage[id(code)]
134 except KeyError:
135 pass
136
137
138 def _wrap_compiler(console):
139 compile = console.compile
140
141 def func(source, filename, symbol):
142 code = compile(source, filename, symbol)
143 console.loader.register(code, source)
144 return code
145
146 console.compile = func
147
148
149 class _InteractiveConsole(code.InteractiveInterpreter):
150 def __init__(self, globals, locals):
151 locals = dict(globals)
152 locals.update(locals)
153 locals["dump"] = dump
154 locals["help"] = helper
155 locals["__loader__"] = self.loader = _ConsoleLoader()
156 code.InteractiveInterpreter.__init__(self, locals)
157 self.more = False
158 self.buffer = []
159 _wrap_compiler(self)
160
161 def runsource(self, source):
162 source = source.rstrip() + "\n"
163 ThreadedStream.push()
164 prompt = "... " if self.more else ">>> "
165 try:
166 source_to_eval = "".join(self.buffer + [source])
167 if code.InteractiveInterpreter.runsource(
168 self, source_to_eval, "<debugger>", "single"
169 ):
170 self.more = True
171 self.buffer.append(source)
172 else:
173 self.more = False
174 del self.buffer[:]
175 finally:
176 output = ThreadedStream.fetch()
177 return prompt + escape(source) + output
178
179 def runcode(self, code):
180 try:
181 exec(code, self.locals)
182 except Exception:
183 self.showtraceback()
184
185 def showtraceback(self):
186 from .tbtools import get_current_traceback
187
188 tb = get_current_traceback(skip=1)
189 sys.stdout._write(tb.render_summary())
190
191 def showsyntaxerror(self, filename=None):
192 from .tbtools import get_current_traceback
193
194 tb = get_current_traceback(skip=4)
195 sys.stdout._write(tb.render_summary())
196
197 def write(self, data):
198 sys.stdout.write(data)
199
200
201 class Console(object):
202 """An interactive console."""
203
204 def __init__(self, globals=None, locals=None):
205 if locals is None:
206 locals = {}
207 if globals is None:
208 globals = {}
209 self._ipy = _InteractiveConsole(globals, locals)
210
211 def eval(self, code):
212 _local._current_ipy = self._ipy
213 old_sys_stdout = sys.stdout
214 try:
215 return self._ipy.runsource(code)
216 finally:
217 sys.stdout = old_sys_stdout
218
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/werkzeug/debug/console.py b/src/werkzeug/debug/console.py
--- a/src/werkzeug/debug/console.py
+++ b/src/werkzeug/debug/console.py
@@ -148,8 +148,9 @@
class _InteractiveConsole(code.InteractiveInterpreter):
def __init__(self, globals, locals):
- locals = dict(globals)
- locals.update(locals)
+ _locals = dict(globals)
+ _locals.update(locals)
+ locals = _locals
locals["dump"] = dump
locals["help"] = helper
locals["__loader__"] = self.loader = _ConsoleLoader()
|
{"golden_diff": "diff --git a/src/werkzeug/debug/console.py b/src/werkzeug/debug/console.py\n--- a/src/werkzeug/debug/console.py\n+++ b/src/werkzeug/debug/console.py\n@@ -148,8 +148,9 @@\n \n class _InteractiveConsole(code.InteractiveInterpreter):\n def __init__(self, globals, locals):\n- locals = dict(globals)\n- locals.update(locals)\n+ _locals = dict(globals)\n+ _locals.update(locals)\n+ locals = _locals\n locals[\"dump\"] = dump\n locals[\"help\"] = helper\n locals[\"__loader__\"] = self.loader = _ConsoleLoader()\n", "issue": "Local variables not available in debug console in version 1.0\nTake this simple script:\r\n\r\n```python\r\nfrom werkzeug.wrappers import Request, Response\r\n\r\nsome_global_variable = True\r\n\r\[email protected]\r\ndef application(request):\r\n msg = 'Hello, World!'\r\n return Response(msg2)\r\n\r\nif __name__ == '__main__':\r\n from werkzeug.serving import run_simple\r\n run_simple('localhost', 4000, application,\r\n use_reloader=True, use_debugger=True, use_evalex=True)\r\n```\r\n\r\nAs expected, the application crashes when it tries to create the response because `msg2` doesn't exist. With version 0.16.1 and Python 3.8, I can view the value of the `msg` variable in the debug console. But if I upgrade to version 1.0, `msg` is no longer defined. Global variables are available, however. Here's a screenshot:\r\n\r\n\r\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"\n werkzeug.debug.console\n ~~~~~~~~~~~~~~~~~~~~~~\n\n Interactive console support.\n\n :copyright: 2007 Pallets\n :license: BSD-3-Clause\n\"\"\"\nimport code\nimport sys\nfrom types import CodeType\n\nfrom ..local import Local\nfrom ..utils import escape\nfrom .repr import debug_repr\nfrom .repr import dump\nfrom .repr import helper\n\n\n_local = Local()\n\n\nclass HTMLStringO(object):\n \"\"\"A StringO version that HTML escapes on write.\"\"\"\n\n def __init__(self):\n self._buffer = []\n\n def isatty(self):\n return False\n\n def close(self):\n pass\n\n def flush(self):\n pass\n\n def seek(self, n, mode=0):\n pass\n\n def readline(self):\n if len(self._buffer) == 0:\n return \"\"\n ret = self._buffer[0]\n del self._buffer[0]\n return ret\n\n def reset(self):\n val = \"\".join(self._buffer)\n del self._buffer[:]\n return val\n\n def _write(self, x):\n if isinstance(x, bytes):\n x = x.decode(\"utf-8\", \"replace\")\n self._buffer.append(x)\n\n def write(self, x):\n self._write(escape(x))\n\n def writelines(self, x):\n self._write(escape(\"\".join(x)))\n\n\nclass ThreadedStream(object):\n \"\"\"Thread-local wrapper for sys.stdout for the interactive console.\"\"\"\n\n @staticmethod\n def push():\n if not isinstance(sys.stdout, ThreadedStream):\n sys.stdout = ThreadedStream()\n _local.stream = HTMLStringO()\n\n @staticmethod\n def fetch():\n try:\n stream = _local.stream\n except AttributeError:\n return \"\"\n return stream.reset()\n\n @staticmethod\n def displayhook(obj):\n try:\n stream = _local.stream\n except AttributeError:\n return _displayhook(obj)\n # stream._write bypasses escaping as debug_repr is\n # already generating HTML for us.\n if obj is not None:\n _local._current_ipy.locals[\"_\"] = obj\n stream._write(debug_repr(obj))\n\n def __setattr__(self, name, value):\n raise AttributeError(\"read only attribute %s\" % name)\n\n def __dir__(self):\n return dir(sys.__stdout__)\n\n def __getattribute__(self, name):\n if name == \"__members__\":\n return dir(sys.__stdout__)\n try:\n stream = _local.stream\n except AttributeError:\n stream = sys.__stdout__\n return getattr(stream, name)\n\n def __repr__(self):\n return repr(sys.__stdout__)\n\n\n# add the threaded stream as display hook\n_displayhook = sys.displayhook\nsys.displayhook = ThreadedStream.displayhook\n\n\nclass _ConsoleLoader(object):\n def __init__(self):\n self._storage = {}\n\n def register(self, code, source):\n self._storage[id(code)] = source\n # register code objects of wrapped functions too.\n for var in code.co_consts:\n if isinstance(var, CodeType):\n self._storage[id(var)] = source\n\n def get_source_by_code(self, code):\n try:\n return self._storage[id(code)]\n except KeyError:\n pass\n\n\ndef _wrap_compiler(console):\n compile = console.compile\n\n def func(source, filename, symbol):\n code = compile(source, filename, symbol)\n console.loader.register(code, source)\n return code\n\n console.compile = func\n\n\nclass _InteractiveConsole(code.InteractiveInterpreter):\n def __init__(self, globals, locals):\n locals = dict(globals)\n locals.update(locals)\n locals[\"dump\"] = dump\n locals[\"help\"] = helper\n locals[\"__loader__\"] = self.loader = _ConsoleLoader()\n code.InteractiveInterpreter.__init__(self, locals)\n self.more = False\n self.buffer = []\n _wrap_compiler(self)\n\n def runsource(self, source):\n source = source.rstrip() + \"\\n\"\n ThreadedStream.push()\n prompt = \"... \" if self.more else \">>> \"\n try:\n source_to_eval = \"\".join(self.buffer + [source])\n if code.InteractiveInterpreter.runsource(\n self, source_to_eval, \"<debugger>\", \"single\"\n ):\n self.more = True\n self.buffer.append(source)\n else:\n self.more = False\n del self.buffer[:]\n finally:\n output = ThreadedStream.fetch()\n return prompt + escape(source) + output\n\n def runcode(self, code):\n try:\n exec(code, self.locals)\n except Exception:\n self.showtraceback()\n\n def showtraceback(self):\n from .tbtools import get_current_traceback\n\n tb = get_current_traceback(skip=1)\n sys.stdout._write(tb.render_summary())\n\n def showsyntaxerror(self, filename=None):\n from .tbtools import get_current_traceback\n\n tb = get_current_traceback(skip=4)\n sys.stdout._write(tb.render_summary())\n\n def write(self, data):\n sys.stdout.write(data)\n\n\nclass Console(object):\n \"\"\"An interactive console.\"\"\"\n\n def __init__(self, globals=None, locals=None):\n if locals is None:\n locals = {}\n if globals is None:\n globals = {}\n self._ipy = _InteractiveConsole(globals, locals)\n\n def eval(self, code):\n _local._current_ipy = self._ipy\n old_sys_stdout = sys.stdout\n try:\n return self._ipy.runsource(code)\n finally:\n sys.stdout = old_sys_stdout\n", "path": "src/werkzeug/debug/console.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"\n werkzeug.debug.console\n ~~~~~~~~~~~~~~~~~~~~~~\n\n Interactive console support.\n\n :copyright: 2007 Pallets\n :license: BSD-3-Clause\n\"\"\"\nimport code\nimport sys\nfrom types import CodeType\n\nfrom ..local import Local\nfrom ..utils import escape\nfrom .repr import debug_repr\nfrom .repr import dump\nfrom .repr import helper\n\n\n_local = Local()\n\n\nclass HTMLStringO(object):\n \"\"\"A StringO version that HTML escapes on write.\"\"\"\n\n def __init__(self):\n self._buffer = []\n\n def isatty(self):\n return False\n\n def close(self):\n pass\n\n def flush(self):\n pass\n\n def seek(self, n, mode=0):\n pass\n\n def readline(self):\n if len(self._buffer) == 0:\n return \"\"\n ret = self._buffer[0]\n del self._buffer[0]\n return ret\n\n def reset(self):\n val = \"\".join(self._buffer)\n del self._buffer[:]\n return val\n\n def _write(self, x):\n if isinstance(x, bytes):\n x = x.decode(\"utf-8\", \"replace\")\n self._buffer.append(x)\n\n def write(self, x):\n self._write(escape(x))\n\n def writelines(self, x):\n self._write(escape(\"\".join(x)))\n\n\nclass ThreadedStream(object):\n \"\"\"Thread-local wrapper for sys.stdout for the interactive console.\"\"\"\n\n @staticmethod\n def push():\n if not isinstance(sys.stdout, ThreadedStream):\n sys.stdout = ThreadedStream()\n _local.stream = HTMLStringO()\n\n @staticmethod\n def fetch():\n try:\n stream = _local.stream\n except AttributeError:\n return \"\"\n return stream.reset()\n\n @staticmethod\n def displayhook(obj):\n try:\n stream = _local.stream\n except AttributeError:\n return _displayhook(obj)\n # stream._write bypasses escaping as debug_repr is\n # already generating HTML for us.\n if obj is not None:\n _local._current_ipy.locals[\"_\"] = obj\n stream._write(debug_repr(obj))\n\n def __setattr__(self, name, value):\n raise AttributeError(\"read only attribute %s\" % name)\n\n def __dir__(self):\n return dir(sys.__stdout__)\n\n def __getattribute__(self, name):\n if name == \"__members__\":\n return dir(sys.__stdout__)\n try:\n stream = _local.stream\n except AttributeError:\n stream = sys.__stdout__\n return getattr(stream, name)\n\n def __repr__(self):\n return repr(sys.__stdout__)\n\n\n# add the threaded stream as display hook\n_displayhook = sys.displayhook\nsys.displayhook = ThreadedStream.displayhook\n\n\nclass _ConsoleLoader(object):\n def __init__(self):\n self._storage = {}\n\n def register(self, code, source):\n self._storage[id(code)] = source\n # register code objects of wrapped functions too.\n for var in code.co_consts:\n if isinstance(var, CodeType):\n self._storage[id(var)] = source\n\n def get_source_by_code(self, code):\n try:\n return self._storage[id(code)]\n except KeyError:\n pass\n\n\ndef _wrap_compiler(console):\n compile = console.compile\n\n def func(source, filename, symbol):\n code = compile(source, filename, symbol)\n console.loader.register(code, source)\n return code\n\n console.compile = func\n\n\nclass _InteractiveConsole(code.InteractiveInterpreter):\n def __init__(self, globals, locals):\n _locals = dict(globals)\n _locals.update(locals)\n locals = _locals\n locals[\"dump\"] = dump\n locals[\"help\"] = helper\n locals[\"__loader__\"] = self.loader = _ConsoleLoader()\n code.InteractiveInterpreter.__init__(self, locals)\n self.more = False\n self.buffer = []\n _wrap_compiler(self)\n\n def runsource(self, source):\n source = source.rstrip() + \"\\n\"\n ThreadedStream.push()\n prompt = \"... \" if self.more else \">>> \"\n try:\n source_to_eval = \"\".join(self.buffer + [source])\n if code.InteractiveInterpreter.runsource(\n self, source_to_eval, \"<debugger>\", \"single\"\n ):\n self.more = True\n self.buffer.append(source)\n else:\n self.more = False\n del self.buffer[:]\n finally:\n output = ThreadedStream.fetch()\n return prompt + escape(source) + output\n\n def runcode(self, code):\n try:\n exec(code, self.locals)\n except Exception:\n self.showtraceback()\n\n def showtraceback(self):\n from .tbtools import get_current_traceback\n\n tb = get_current_traceback(skip=1)\n sys.stdout._write(tb.render_summary())\n\n def showsyntaxerror(self, filename=None):\n from .tbtools import get_current_traceback\n\n tb = get_current_traceback(skip=4)\n sys.stdout._write(tb.render_summary())\n\n def write(self, data):\n sys.stdout.write(data)\n\n\nclass Console(object):\n \"\"\"An interactive console.\"\"\"\n\n def __init__(self, globals=None, locals=None):\n if locals is None:\n locals = {}\n if globals is None:\n globals = {}\n self._ipy = _InteractiveConsole(globals, locals)\n\n def eval(self, code):\n _local._current_ipy = self._ipy\n old_sys_stdout = sys.stdout\n try:\n return self._ipy.runsource(code)\n finally:\n sys.stdout = old_sys_stdout\n", "path": "src/werkzeug/debug/console.py"}]}
| 2,307 | 145 |
gh_patches_debug_26818
|
rasdani/github-patches
|
git_diff
|
plone__Products.CMFPlone-3836
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Un-remove IContentish from site root class
On https://github.com/plone/Products.CMFPlone/blob/016459fd9d023017e9dc0a0b635bd66099826db1/Products/CMFPlone/Portal.py#L214 I remove IContentish, because having it broke tests and setup code in places that I didn't feel like touching.
Nowadays we have some test failures because of the removal. See https://github.com/plone/plone.restapi/pull/1674#issuecomment-1664193927 for additional details, but suffice to say that `conversation_view` is expected to exist on the site root, but isn't because it's bound to `IContentish`.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `Products/CMFPlone/Portal.py`
Content:
```
1 from AccessControl import ClassSecurityInfo
2 from AccessControl import Unauthorized
3 from AccessControl.class_init import InitializeClass
4 from Acquisition import aq_base
5 from ComputedAttribute import ComputedAttribute
6 from five.localsitemanager.registry import PersistentComponents
7 from OFS.ObjectManager import REPLACEABLE
8 from plone.base.interfaces.siteroot import IPloneSiteRoot
9 from plone.base.interfaces.syndication import ISyndicatable
10 from plone.base.permissions import AddPortalContent
11 from plone.base.permissions import AddPortalFolders
12 from plone.base.permissions import ListPortalMembers
13 from plone.base.permissions import ModifyPortalContent
14 from plone.base.permissions import ReplyToItem
15 from plone.base.permissions import View
16 from plone.dexterity.content import Container
17 from Products.CMFCore import permissions
18 from Products.CMFCore.interfaces import IContentish
19 from Products.CMFCore.interfaces import ISiteRoot
20 from Products.CMFCore.permissions import AccessContentsInformation
21 from Products.CMFCore.permissions import AddPortalMember
22 from Products.CMFCore.permissions import MailForgottenPassword
23 from Products.CMFCore.permissions import RequestReview
24 from Products.CMFCore.permissions import ReviewPortalContent
25 from Products.CMFCore.permissions import SetOwnPassword
26 from Products.CMFCore.permissions import SetOwnProperties
27 from Products.CMFCore.PortalFolder import PortalFolderBase
28 from Products.CMFCore.PortalObject import PortalObjectBase
29 from Products.CMFCore.Skinnable import SkinnableObjectManager
30 from Products.CMFCore.utils import _checkPermission
31 from Products.CMFCore.utils import getToolByName
32 from Products.CMFPlone import bbb
33 from Products.Five.component.interfaces import IObjectManagerSite
34 from zope.event import notify
35 from zope.interface import classImplementsOnly
36 from zope.interface import implementedBy
37 from zope.interface import implementer
38 from zope.interface.interfaces import ComponentLookupError
39 from zope.traversing.interfaces import BeforeTraverseEvent
40
41
42 if bbb.HAS_ZSERVER:
43 from webdav.NullResource import NullResource
44
45
46 @implementer(IPloneSiteRoot, ISiteRoot, ISyndicatable, IObjectManagerSite)
47 class PloneSite(Container, SkinnableObjectManager):
48 """The Plone site object."""
49
50 security = ClassSecurityInfo()
51 meta_type = portal_type = "Plone Site"
52
53 # Ensure certain attributes come from the correct base class.
54 _checkId = SkinnableObjectManager._checkId
55 manage_main = PortalFolderBase.manage_main
56
57 def __getattr__(self, name):
58 try:
59 # Try DX
60 return super().__getattr__(name)
61 except AttributeError:
62 # Check portal_skins
63 return SkinnableObjectManager.__getattr__(self, name)
64
65 def __setattr__(self, name, obj):
66 # handle re setting an item as an attribute
67 if self._tree is not None and name in self:
68 del self[name]
69 self[name] = obj
70 else:
71 super().__setattr__(name, obj)
72
73 def __delattr__(self, name):
74 try:
75 return super().__delattr__(name)
76 except AttributeError:
77 return self.__delitem__(name)
78
79 # Removes the 'Components Folder'
80
81 manage_options = Container.manage_options[:2] + Container.manage_options[3:]
82
83 __ac_permissions__ = (
84 (AccessContentsInformation, ()),
85 (AddPortalMember, ()),
86 (SetOwnPassword, ()),
87 (SetOwnProperties, ()),
88 (MailForgottenPassword, ()),
89 (RequestReview, ()),
90 (ReviewPortalContent, ()),
91 (AddPortalContent, ()),
92 (AddPortalFolders, ()),
93 (ListPortalMembers, ()),
94 (ReplyToItem, ()),
95 (View, ("isEffective",)),
96 (
97 ModifyPortalContent,
98 (
99 "manage_cutObjects",
100 "manage_pasteObjects",
101 "manage_renameForm",
102 "manage_renameObject",
103 "manage_renameObjects",
104 ),
105 ),
106 )
107
108 # Switch off ZMI ordering interface as it assumes a slightly
109 # different functionality
110 has_order_support = 0
111 management_page_charset = "utf-8"
112 _default_sort_key = "id"
113 _properties = (
114 {"id": "title", "type": "string", "mode": "w"},
115 {"id": "description", "type": "text", "mode": "w"},
116 )
117 title = ""
118 description = ""
119 icon = "misc_/CMFPlone/tool.gif"
120
121 # From PortalObjectBase
122 def __init__(self, id, title=""):
123 super().__init__(id, title=title)
124 components = PersistentComponents("++etc++site")
125 components.__parent__ = self
126 self.setSiteManager(components)
127
128 # From PortalObjectBase
129 def __before_publishing_traverse__(self, arg1, arg2=None):
130 """Pre-traversal hook."""
131 # XXX hack around a bug(?) in BeforeTraverse.MultiHook
132 REQUEST = arg2 or arg1
133
134 try:
135 notify(BeforeTraverseEvent(self, REQUEST))
136 except ComponentLookupError:
137 # allow ZMI access, even if the portal's site manager is missing
138 pass
139 self.setupCurrentSkin(REQUEST)
140
141 super().__before_publishing_traverse__(arg1, arg2)
142
143 # Concept from OFS.OrderSupport
144 @security.protected(permissions.AccessContentsInformation)
145 def tpValues(self):
146 # Return a list of subobjects, used by ZMI tree tag (and only there).
147 # see also https://github.com/plone/Products.CMFPlone/issues/3323
148 return sorted(
149 (
150 obj
151 for obj in self.objectValues()
152 if getattr(aq_base(obj), "isPrincipiaFolderish", False)
153 ),
154 key=lambda obj: obj.getId(),
155 )
156
157 def __browser_default__(self, request):
158 """Set default so we can return whatever we want instead
159 of index_html"""
160 return getToolByName(self, "plone_utils").browserDefault(self)
161
162 def index_html(self):
163 """Acquire if not present."""
164 request = getattr(self, "REQUEST", None)
165 if (
166 request is not None
167 and "REQUEST_METHOD" in request
168 and request.maybe_webdav_client
169 ):
170 method = request["REQUEST_METHOD"]
171 if bbb.HAS_ZSERVER and method in ("PUT",):
172 # Very likely a WebDAV client trying to create something
173 result = NullResource(self, "index_html")
174 setattr(result, "__replaceable__", REPLACEABLE)
175 return result
176 elif method not in ("GET", "HEAD", "POST"):
177 raise AttributeError("index_html")
178 # Acquire from skin.
179 _target = self.__getattr__("index_html")
180 result = aq_base(_target).__of__(self)
181 setattr(result, "__replaceable__", REPLACEABLE)
182 return result
183
184 index_html = ComputedAttribute(index_html, 1)
185
186 def manage_beforeDelete(self, container, item):
187 # Should send out an Event before Site is being deleted.
188 self.removal_inprogress = 1
189 PloneSite.inheritedAttribute("manage_beforeDelete")(self, container, item)
190
191 @security.protected(permissions.DeleteObjects)
192 def manage_delObjects(self, ids=None, REQUEST=None):
193 """We need to enforce security."""
194 if ids is None:
195 ids = []
196 if isinstance(ids, str):
197 ids = [ids]
198 for id in ids:
199 item = self._getOb(id)
200 if not _checkPermission(permissions.DeleteObjects, item):
201 raise Unauthorized("Do not have permissions to remove this object")
202 return PortalObjectBase.manage_delObjects(self, ids, REQUEST=REQUEST)
203
204 def view(self):
205 """Ensure that we get a plain view of the object, via a delegation to
206 __call__(), which is defined in BrowserDefaultMixin
207 """
208 return self()
209
210 @security.protected(permissions.AccessContentsInformation)
211 def folderlistingFolderContents(self, contentFilter=None):
212 """Calls listFolderContents in protected only by ACI so that
213 folder_listing can work without the List folder contents permission.
214
215 This is copied from Archetypes Basefolder and is needed by the
216 reference browser.
217 """
218 return self.listFolderContents(contentFilter)
219
220 def isEffective(self, date):
221 # Override DefaultDublinCoreImpl's test, since we are always viewable.
222 return 1
223
224
225 # Remove the IContentish interface so we don't listen to events that won't
226 # apply to the site root, ie handleUidAnnotationEvent
227 classImplementsOnly(PloneSite, implementedBy(PloneSite) - IContentish)
228
229 InitializeClass(PloneSite)
230
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/Products/CMFPlone/Portal.py b/Products/CMFPlone/Portal.py
--- a/Products/CMFPlone/Portal.py
+++ b/Products/CMFPlone/Portal.py
@@ -15,7 +15,6 @@
from plone.base.permissions import View
from plone.dexterity.content import Container
from Products.CMFCore import permissions
-from Products.CMFCore.interfaces import IContentish
from Products.CMFCore.interfaces import ISiteRoot
from Products.CMFCore.permissions import AccessContentsInformation
from Products.CMFCore.permissions import AddPortalMember
@@ -32,8 +31,6 @@
from Products.CMFPlone import bbb
from Products.Five.component.interfaces import IObjectManagerSite
from zope.event import notify
-from zope.interface import classImplementsOnly
-from zope.interface import implementedBy
from zope.interface import implementer
from zope.interface.interfaces import ComponentLookupError
from zope.traversing.interfaces import BeforeTraverseEvent
@@ -222,8 +219,4 @@
return 1
-# Remove the IContentish interface so we don't listen to events that won't
-# apply to the site root, ie handleUidAnnotationEvent
-classImplementsOnly(PloneSite, implementedBy(PloneSite) - IContentish)
-
InitializeClass(PloneSite)
|
{"golden_diff": "diff --git a/Products/CMFPlone/Portal.py b/Products/CMFPlone/Portal.py\n--- a/Products/CMFPlone/Portal.py\n+++ b/Products/CMFPlone/Portal.py\n@@ -15,7 +15,6 @@\n from plone.base.permissions import View\n from plone.dexterity.content import Container\n from Products.CMFCore import permissions\n-from Products.CMFCore.interfaces import IContentish\n from Products.CMFCore.interfaces import ISiteRoot\n from Products.CMFCore.permissions import AccessContentsInformation\n from Products.CMFCore.permissions import AddPortalMember\n@@ -32,8 +31,6 @@\n from Products.CMFPlone import bbb\n from Products.Five.component.interfaces import IObjectManagerSite\n from zope.event import notify\n-from zope.interface import classImplementsOnly\n-from zope.interface import implementedBy\n from zope.interface import implementer\n from zope.interface.interfaces import ComponentLookupError\n from zope.traversing.interfaces import BeforeTraverseEvent\n@@ -222,8 +219,4 @@\n return 1\n \n \n-# Remove the IContentish interface so we don't listen to events that won't\n-# apply to the site root, ie handleUidAnnotationEvent\n-classImplementsOnly(PloneSite, implementedBy(PloneSite) - IContentish)\n-\n InitializeClass(PloneSite)\n", "issue": "Un-remove IContentish from site root class\nOn https://github.com/plone/Products.CMFPlone/blob/016459fd9d023017e9dc0a0b635bd66099826db1/Products/CMFPlone/Portal.py#L214 I remove IContentish, because having it broke tests and setup code in places that I didn't feel like touching.\r\n\r\nNowadays we have some test failures because of the removal. See https://github.com/plone/plone.restapi/pull/1674#issuecomment-1664193927 for additional details, but suffice to say that `conversation_view` is expected to exist on the site root, but isn't because it's bound to `IContentish`.\n", "before_files": [{"content": "from AccessControl import ClassSecurityInfo\nfrom AccessControl import Unauthorized\nfrom AccessControl.class_init import InitializeClass\nfrom Acquisition import aq_base\nfrom ComputedAttribute import ComputedAttribute\nfrom five.localsitemanager.registry import PersistentComponents\nfrom OFS.ObjectManager import REPLACEABLE\nfrom plone.base.interfaces.siteroot import IPloneSiteRoot\nfrom plone.base.interfaces.syndication import ISyndicatable\nfrom plone.base.permissions import AddPortalContent\nfrom plone.base.permissions import AddPortalFolders\nfrom plone.base.permissions import ListPortalMembers\nfrom plone.base.permissions import ModifyPortalContent\nfrom plone.base.permissions import ReplyToItem\nfrom plone.base.permissions import View\nfrom plone.dexterity.content import Container\nfrom Products.CMFCore import permissions\nfrom Products.CMFCore.interfaces import IContentish\nfrom Products.CMFCore.interfaces import ISiteRoot\nfrom Products.CMFCore.permissions import AccessContentsInformation\nfrom Products.CMFCore.permissions import AddPortalMember\nfrom Products.CMFCore.permissions import MailForgottenPassword\nfrom Products.CMFCore.permissions import RequestReview\nfrom Products.CMFCore.permissions import ReviewPortalContent\nfrom Products.CMFCore.permissions import SetOwnPassword\nfrom Products.CMFCore.permissions import SetOwnProperties\nfrom Products.CMFCore.PortalFolder import PortalFolderBase\nfrom Products.CMFCore.PortalObject import PortalObjectBase\nfrom Products.CMFCore.Skinnable import SkinnableObjectManager\nfrom Products.CMFCore.utils import _checkPermission\nfrom Products.CMFCore.utils import getToolByName\nfrom Products.CMFPlone import bbb\nfrom Products.Five.component.interfaces import IObjectManagerSite\nfrom zope.event import notify\nfrom zope.interface import classImplementsOnly\nfrom zope.interface import implementedBy\nfrom zope.interface import implementer\nfrom zope.interface.interfaces import ComponentLookupError\nfrom zope.traversing.interfaces import BeforeTraverseEvent\n\n\nif bbb.HAS_ZSERVER:\n from webdav.NullResource import NullResource\n\n\n@implementer(IPloneSiteRoot, ISiteRoot, ISyndicatable, IObjectManagerSite)\nclass PloneSite(Container, SkinnableObjectManager):\n \"\"\"The Plone site object.\"\"\"\n\n security = ClassSecurityInfo()\n meta_type = portal_type = \"Plone Site\"\n\n # Ensure certain attributes come from the correct base class.\n _checkId = SkinnableObjectManager._checkId\n manage_main = PortalFolderBase.manage_main\n\n def __getattr__(self, name):\n try:\n # Try DX\n return super().__getattr__(name)\n except AttributeError:\n # Check portal_skins\n return SkinnableObjectManager.__getattr__(self, name)\n\n def __setattr__(self, name, obj):\n # handle re setting an item as an attribute\n if self._tree is not None and name in self:\n del self[name]\n self[name] = obj\n else:\n super().__setattr__(name, obj)\n\n def __delattr__(self, name):\n try:\n return super().__delattr__(name)\n except AttributeError:\n return self.__delitem__(name)\n\n # Removes the 'Components Folder'\n\n manage_options = Container.manage_options[:2] + Container.manage_options[3:]\n\n __ac_permissions__ = (\n (AccessContentsInformation, ()),\n (AddPortalMember, ()),\n (SetOwnPassword, ()),\n (SetOwnProperties, ()),\n (MailForgottenPassword, ()),\n (RequestReview, ()),\n (ReviewPortalContent, ()),\n (AddPortalContent, ()),\n (AddPortalFolders, ()),\n (ListPortalMembers, ()),\n (ReplyToItem, ()),\n (View, (\"isEffective\",)),\n (\n ModifyPortalContent,\n (\n \"manage_cutObjects\",\n \"manage_pasteObjects\",\n \"manage_renameForm\",\n \"manage_renameObject\",\n \"manage_renameObjects\",\n ),\n ),\n )\n\n # Switch off ZMI ordering interface as it assumes a slightly\n # different functionality\n has_order_support = 0\n management_page_charset = \"utf-8\"\n _default_sort_key = \"id\"\n _properties = (\n {\"id\": \"title\", \"type\": \"string\", \"mode\": \"w\"},\n {\"id\": \"description\", \"type\": \"text\", \"mode\": \"w\"},\n )\n title = \"\"\n description = \"\"\n icon = \"misc_/CMFPlone/tool.gif\"\n\n # From PortalObjectBase\n def __init__(self, id, title=\"\"):\n super().__init__(id, title=title)\n components = PersistentComponents(\"++etc++site\")\n components.__parent__ = self\n self.setSiteManager(components)\n\n # From PortalObjectBase\n def __before_publishing_traverse__(self, arg1, arg2=None):\n \"\"\"Pre-traversal hook.\"\"\"\n # XXX hack around a bug(?) in BeforeTraverse.MultiHook\n REQUEST = arg2 or arg1\n\n try:\n notify(BeforeTraverseEvent(self, REQUEST))\n except ComponentLookupError:\n # allow ZMI access, even if the portal's site manager is missing\n pass\n self.setupCurrentSkin(REQUEST)\n\n super().__before_publishing_traverse__(arg1, arg2)\n\n # Concept from OFS.OrderSupport\n @security.protected(permissions.AccessContentsInformation)\n def tpValues(self):\n # Return a list of subobjects, used by ZMI tree tag (and only there).\n # see also https://github.com/plone/Products.CMFPlone/issues/3323\n return sorted(\n (\n obj\n for obj in self.objectValues()\n if getattr(aq_base(obj), \"isPrincipiaFolderish\", False)\n ),\n key=lambda obj: obj.getId(),\n )\n\n def __browser_default__(self, request):\n \"\"\"Set default so we can return whatever we want instead\n of index_html\"\"\"\n return getToolByName(self, \"plone_utils\").browserDefault(self)\n\n def index_html(self):\n \"\"\"Acquire if not present.\"\"\"\n request = getattr(self, \"REQUEST\", None)\n if (\n request is not None\n and \"REQUEST_METHOD\" in request\n and request.maybe_webdav_client\n ):\n method = request[\"REQUEST_METHOD\"]\n if bbb.HAS_ZSERVER and method in (\"PUT\",):\n # Very likely a WebDAV client trying to create something\n result = NullResource(self, \"index_html\")\n setattr(result, \"__replaceable__\", REPLACEABLE)\n return result\n elif method not in (\"GET\", \"HEAD\", \"POST\"):\n raise AttributeError(\"index_html\")\n # Acquire from skin.\n _target = self.__getattr__(\"index_html\")\n result = aq_base(_target).__of__(self)\n setattr(result, \"__replaceable__\", REPLACEABLE)\n return result\n\n index_html = ComputedAttribute(index_html, 1)\n\n def manage_beforeDelete(self, container, item):\n # Should send out an Event before Site is being deleted.\n self.removal_inprogress = 1\n PloneSite.inheritedAttribute(\"manage_beforeDelete\")(self, container, item)\n\n @security.protected(permissions.DeleteObjects)\n def manage_delObjects(self, ids=None, REQUEST=None):\n \"\"\"We need to enforce security.\"\"\"\n if ids is None:\n ids = []\n if isinstance(ids, str):\n ids = [ids]\n for id in ids:\n item = self._getOb(id)\n if not _checkPermission(permissions.DeleteObjects, item):\n raise Unauthorized(\"Do not have permissions to remove this object\")\n return PortalObjectBase.manage_delObjects(self, ids, REQUEST=REQUEST)\n\n def view(self):\n \"\"\"Ensure that we get a plain view of the object, via a delegation to\n __call__(), which is defined in BrowserDefaultMixin\n \"\"\"\n return self()\n\n @security.protected(permissions.AccessContentsInformation)\n def folderlistingFolderContents(self, contentFilter=None):\n \"\"\"Calls listFolderContents in protected only by ACI so that\n folder_listing can work without the List folder contents permission.\n\n This is copied from Archetypes Basefolder and is needed by the\n reference browser.\n \"\"\"\n return self.listFolderContents(contentFilter)\n\n def isEffective(self, date):\n # Override DefaultDublinCoreImpl's test, since we are always viewable.\n return 1\n\n\n# Remove the IContentish interface so we don't listen to events that won't\n# apply to the site root, ie handleUidAnnotationEvent\nclassImplementsOnly(PloneSite, implementedBy(PloneSite) - IContentish)\n\nInitializeClass(PloneSite)\n", "path": "Products/CMFPlone/Portal.py"}], "after_files": [{"content": "from AccessControl import ClassSecurityInfo\nfrom AccessControl import Unauthorized\nfrom AccessControl.class_init import InitializeClass\nfrom Acquisition import aq_base\nfrom ComputedAttribute import ComputedAttribute\nfrom five.localsitemanager.registry import PersistentComponents\nfrom OFS.ObjectManager import REPLACEABLE\nfrom plone.base.interfaces.siteroot import IPloneSiteRoot\nfrom plone.base.interfaces.syndication import ISyndicatable\nfrom plone.base.permissions import AddPortalContent\nfrom plone.base.permissions import AddPortalFolders\nfrom plone.base.permissions import ListPortalMembers\nfrom plone.base.permissions import ModifyPortalContent\nfrom plone.base.permissions import ReplyToItem\nfrom plone.base.permissions import View\nfrom plone.dexterity.content import Container\nfrom Products.CMFCore import permissions\nfrom Products.CMFCore.interfaces import ISiteRoot\nfrom Products.CMFCore.permissions import AccessContentsInformation\nfrom Products.CMFCore.permissions import AddPortalMember\nfrom Products.CMFCore.permissions import MailForgottenPassword\nfrom Products.CMFCore.permissions import RequestReview\nfrom Products.CMFCore.permissions import ReviewPortalContent\nfrom Products.CMFCore.permissions import SetOwnPassword\nfrom Products.CMFCore.permissions import SetOwnProperties\nfrom Products.CMFCore.PortalFolder import PortalFolderBase\nfrom Products.CMFCore.PortalObject import PortalObjectBase\nfrom Products.CMFCore.Skinnable import SkinnableObjectManager\nfrom Products.CMFCore.utils import _checkPermission\nfrom Products.CMFCore.utils import getToolByName\nfrom Products.CMFPlone import bbb\nfrom Products.Five.component.interfaces import IObjectManagerSite\nfrom zope.event import notify\nfrom zope.interface import implementer\nfrom zope.interface.interfaces import ComponentLookupError\nfrom zope.traversing.interfaces import BeforeTraverseEvent\n\n\nif bbb.HAS_ZSERVER:\n from webdav.NullResource import NullResource\n\n\n@implementer(IPloneSiteRoot, ISiteRoot, ISyndicatable, IObjectManagerSite)\nclass PloneSite(Container, SkinnableObjectManager):\n \"\"\"The Plone site object.\"\"\"\n\n security = ClassSecurityInfo()\n meta_type = portal_type = \"Plone Site\"\n\n # Ensure certain attributes come from the correct base class.\n _checkId = SkinnableObjectManager._checkId\n manage_main = PortalFolderBase.manage_main\n\n def __getattr__(self, name):\n try:\n # Try DX\n return super().__getattr__(name)\n except AttributeError:\n # Check portal_skins\n return SkinnableObjectManager.__getattr__(self, name)\n\n def __setattr__(self, name, obj):\n # handle re setting an item as an attribute\n if self._tree is not None and name in self:\n del self[name]\n self[name] = obj\n else:\n super().__setattr__(name, obj)\n\n def __delattr__(self, name):\n try:\n return super().__delattr__(name)\n except AttributeError:\n return self.__delitem__(name)\n\n # Removes the 'Components Folder'\n\n manage_options = Container.manage_options[:2] + Container.manage_options[3:]\n\n __ac_permissions__ = (\n (AccessContentsInformation, ()),\n (AddPortalMember, ()),\n (SetOwnPassword, ()),\n (SetOwnProperties, ()),\n (MailForgottenPassword, ()),\n (RequestReview, ()),\n (ReviewPortalContent, ()),\n (AddPortalContent, ()),\n (AddPortalFolders, ()),\n (ListPortalMembers, ()),\n (ReplyToItem, ()),\n (View, (\"isEffective\",)),\n (\n ModifyPortalContent,\n (\n \"manage_cutObjects\",\n \"manage_pasteObjects\",\n \"manage_renameForm\",\n \"manage_renameObject\",\n \"manage_renameObjects\",\n ),\n ),\n )\n\n # Switch off ZMI ordering interface as it assumes a slightly\n # different functionality\n has_order_support = 0\n management_page_charset = \"utf-8\"\n _default_sort_key = \"id\"\n _properties = (\n {\"id\": \"title\", \"type\": \"string\", \"mode\": \"w\"},\n {\"id\": \"description\", \"type\": \"text\", \"mode\": \"w\"},\n )\n title = \"\"\n description = \"\"\n icon = \"misc_/CMFPlone/tool.gif\"\n\n # From PortalObjectBase\n def __init__(self, id, title=\"\"):\n super().__init__(id, title=title)\n components = PersistentComponents(\"++etc++site\")\n components.__parent__ = self\n self.setSiteManager(components)\n\n # From PortalObjectBase\n def __before_publishing_traverse__(self, arg1, arg2=None):\n \"\"\"Pre-traversal hook.\"\"\"\n # XXX hack around a bug(?) in BeforeTraverse.MultiHook\n REQUEST = arg2 or arg1\n\n try:\n notify(BeforeTraverseEvent(self, REQUEST))\n except ComponentLookupError:\n # allow ZMI access, even if the portal's site manager is missing\n pass\n self.setupCurrentSkin(REQUEST)\n\n super().__before_publishing_traverse__(arg1, arg2)\n\n # Concept from OFS.OrderSupport\n @security.protected(permissions.AccessContentsInformation)\n def tpValues(self):\n # Return a list of subobjects, used by ZMI tree tag (and only there).\n # see also https://github.com/plone/Products.CMFPlone/issues/3323\n return sorted(\n (\n obj\n for obj in self.objectValues()\n if getattr(aq_base(obj), \"isPrincipiaFolderish\", False)\n ),\n key=lambda obj: obj.getId(),\n )\n\n def __browser_default__(self, request):\n \"\"\"Set default so we can return whatever we want instead\n of index_html\"\"\"\n return getToolByName(self, \"plone_utils\").browserDefault(self)\n\n def index_html(self):\n \"\"\"Acquire if not present.\"\"\"\n request = getattr(self, \"REQUEST\", None)\n if (\n request is not None\n and \"REQUEST_METHOD\" in request\n and request.maybe_webdav_client\n ):\n method = request[\"REQUEST_METHOD\"]\n if bbb.HAS_ZSERVER and method in (\"PUT\",):\n # Very likely a WebDAV client trying to create something\n result = NullResource(self, \"index_html\")\n setattr(result, \"__replaceable__\", REPLACEABLE)\n return result\n elif method not in (\"GET\", \"HEAD\", \"POST\"):\n raise AttributeError(\"index_html\")\n # Acquire from skin.\n _target = self.__getattr__(\"index_html\")\n result = aq_base(_target).__of__(self)\n setattr(result, \"__replaceable__\", REPLACEABLE)\n return result\n\n index_html = ComputedAttribute(index_html, 1)\n\n def manage_beforeDelete(self, container, item):\n # Should send out an Event before Site is being deleted.\n self.removal_inprogress = 1\n PloneSite.inheritedAttribute(\"manage_beforeDelete\")(self, container, item)\n\n @security.protected(permissions.DeleteObjects)\n def manage_delObjects(self, ids=None, REQUEST=None):\n \"\"\"We need to enforce security.\"\"\"\n if ids is None:\n ids = []\n if isinstance(ids, str):\n ids = [ids]\n for id in ids:\n item = self._getOb(id)\n if not _checkPermission(permissions.DeleteObjects, item):\n raise Unauthorized(\"Do not have permissions to remove this object\")\n return PortalObjectBase.manage_delObjects(self, ids, REQUEST=REQUEST)\n\n def view(self):\n \"\"\"Ensure that we get a plain view of the object, via a delegation to\n __call__(), which is defined in BrowserDefaultMixin\n \"\"\"\n return self()\n\n @security.protected(permissions.AccessContentsInformation)\n def folderlistingFolderContents(self, contentFilter=None):\n \"\"\"Calls listFolderContents in protected only by ACI so that\n folder_listing can work without the List folder contents permission.\n\n This is copied from Archetypes Basefolder and is needed by the\n reference browser.\n \"\"\"\n return self.listFolderContents(contentFilter)\n\n def isEffective(self, date):\n # Override DefaultDublinCoreImpl's test, since we are always viewable.\n return 1\n\n\nInitializeClass(PloneSite)\n", "path": "Products/CMFPlone/Portal.py"}]}
| 2,914 | 305 |
gh_patches_debug_6558
|
rasdani/github-patches
|
git_diff
|
strawberry-graphql__strawberry-615
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
strawberry.utils.typing.get_optional_annotation fails when provided an `Optional[Union]`
`strawberry.utils.typing.get_optional_annotation` fails when provided an `Optional[Union]`
```pycon
>>> from typing import Optional, Union
>>> from strawberry.utils.typing import get_optional_annotation
>>> get_optional_annotation(Optional[Union[int, str]])
<class 'int'>
```
This should return `Union[int, str]` instead
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `strawberry/utils/typing.py`
Content:
```
1 import typing
2 from collections.abc import AsyncGenerator, Callable
3 from typing import Type, TypeVar
4
5
6 try:
7 from typing import ForwardRef # type: ignore
8 except ImportError: # pragma: no cover
9 # ForwardRef is private in python 3.6 and 3.7
10 from typing import _ForwardRef as ForwardRef # type: ignore
11
12
13 def is_list(annotation: Type) -> bool:
14 """Returns True if annotation is a List"""
15
16 annotation_origin = getattr(annotation, "__origin__", None)
17
18 return annotation_origin == list
19
20
21 def is_union(annotation: Type) -> bool:
22 """Returns True if annotation is a Union"""
23
24 annotation_origin = getattr(annotation, "__origin__", None)
25
26 return annotation_origin == typing.Union
27
28
29 def is_optional(annotation: Type) -> bool:
30 """Returns True if the annotation is Optional[SomeType]"""
31
32 # Optionals are represented as unions
33
34 if not is_union(annotation):
35 return False
36
37 types = annotation.__args__
38
39 # A Union to be optional needs to have at least one None type
40 return any([x == None.__class__ for x in types]) # noqa:E711
41
42
43 def get_optional_annotation(annotation: Type) -> Type:
44 types = annotation.__args__
45 non_none_types = [x for x in types if x != None.__class__] # noqa:E711
46
47 return non_none_types[0]
48
49
50 def get_list_annotation(annotation: Type) -> Type:
51 return annotation.__args__[0]
52
53
54 def is_async_generator(annotation: Type) -> bool:
55 return getattr(annotation, "__origin__", None) == AsyncGenerator
56
57
58 def get_async_generator_annotation(annotation: Type) -> Type:
59 return annotation.__args__[0]
60
61
62 def is_generic(annotation: Type) -> bool:
63 """Returns True if the annotation is or extends a generic."""
64 return (
65 isinstance(annotation, type)
66 and issubclass(annotation, typing.Generic) # type:ignore
67 or isinstance(annotation, typing._GenericAlias) # type:ignore
68 and annotation.__origin__
69 not in (
70 list,
71 typing.Union,
72 tuple,
73 typing.ClassVar,
74 AsyncGenerator,
75 )
76 )
77
78
79 def is_type_var(annotation: Type) -> bool:
80 """Returns True if the annotation is a TypeVar."""
81
82 return isinstance(annotation, TypeVar) # type:ignore
83
84
85 def has_type_var(annotation: Type) -> bool:
86 """
87 Returns True if the annotation or any of
88 its argument have a TypeVar as argument.
89 """
90 return any(
91 is_type_var(arg) or has_type_var(arg)
92 for arg in getattr(annotation, "__args__", [])
93 )
94
95
96 def get_parameters(annotation: Type):
97 if (
98 isinstance(annotation, typing._GenericAlias) # type:ignore
99 or isinstance(annotation, type)
100 and issubclass(annotation, typing.Generic) # type:ignore
101 and annotation is not typing.Generic
102 ):
103 return annotation.__parameters__
104 else:
105 return () # pragma: no cover
106
107
108 def get_origin(annotation: Type):
109 if isinstance(annotation, typing._GenericAlias): # type:ignore
110 return (
111 annotation.__origin__
112 if annotation.__origin__ is not typing.ClassVar
113 else None
114 )
115
116 if annotation is typing.Generic: # pragma: no cover
117 return typing.Generic
118
119 return None # pragma: no cover
120
121
122 def get_args(annotation: Type):
123 if isinstance(annotation, typing._GenericAlias): # type:ignore
124 res = annotation.__args__
125
126 if (
127 get_origin(annotation) is Callable and res[0] is not Ellipsis
128 ): # pragma: no cover
129 res = (list(res[:-1]), res[-1])
130
131 return res
132
133 return ()
134
135
136 def is_forward_ref(annotation: Type) -> bool:
137 return isinstance(annotation, ForwardRef)
138
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/strawberry/utils/typing.py b/strawberry/utils/typing.py
--- a/strawberry/utils/typing.py
+++ b/strawberry/utils/typing.py
@@ -42,7 +42,14 @@
def get_optional_annotation(annotation: Type) -> Type:
types = annotation.__args__
- non_none_types = [x for x in types if x != None.__class__] # noqa:E711
+
+ non_none_types = tuple(x for x in types if x != None.__class__) # noqa:E711
+
+ # if we have multiple non none types we want to return a copy of this
+ # type (normally a Union type).
+
+ if len(non_none_types) > 1:
+ return annotation.copy_with(non_none_types)
return non_none_types[0]
|
{"golden_diff": "diff --git a/strawberry/utils/typing.py b/strawberry/utils/typing.py\n--- a/strawberry/utils/typing.py\n+++ b/strawberry/utils/typing.py\n@@ -42,7 +42,14 @@\n \n def get_optional_annotation(annotation: Type) -> Type:\n types = annotation.__args__\n- non_none_types = [x for x in types if x != None.__class__] # noqa:E711\n+\n+ non_none_types = tuple(x for x in types if x != None.__class__) # noqa:E711\n+\n+ # if we have multiple non none types we want to return a copy of this\n+ # type (normally a Union type).\n+\n+ if len(non_none_types) > 1:\n+ return annotation.copy_with(non_none_types)\n \n return non_none_types[0]\n", "issue": "strawberry.utils.typing.get_optional_annotation fails when provided an `Optional[Union]`\n`strawberry.utils.typing.get_optional_annotation` fails when provided an `Optional[Union]`\r\n\r\n```pycon\r\n>>> from typing import Optional, Union\r\n>>> from strawberry.utils.typing import get_optional_annotation\r\n\r\n>>> get_optional_annotation(Optional[Union[int, str]])\r\n<class 'int'>\r\n```\r\nThis should return `Union[int, str]` instead \n", "before_files": [{"content": "import typing\nfrom collections.abc import AsyncGenerator, Callable\nfrom typing import Type, TypeVar\n\n\ntry:\n from typing import ForwardRef # type: ignore\nexcept ImportError: # pragma: no cover\n # ForwardRef is private in python 3.6 and 3.7\n from typing import _ForwardRef as ForwardRef # type: ignore\n\n\ndef is_list(annotation: Type) -> bool:\n \"\"\"Returns True if annotation is a List\"\"\"\n\n annotation_origin = getattr(annotation, \"__origin__\", None)\n\n return annotation_origin == list\n\n\ndef is_union(annotation: Type) -> bool:\n \"\"\"Returns True if annotation is a Union\"\"\"\n\n annotation_origin = getattr(annotation, \"__origin__\", None)\n\n return annotation_origin == typing.Union\n\n\ndef is_optional(annotation: Type) -> bool:\n \"\"\"Returns True if the annotation is Optional[SomeType]\"\"\"\n\n # Optionals are represented as unions\n\n if not is_union(annotation):\n return False\n\n types = annotation.__args__\n\n # A Union to be optional needs to have at least one None type\n return any([x == None.__class__ for x in types]) # noqa:E711\n\n\ndef get_optional_annotation(annotation: Type) -> Type:\n types = annotation.__args__\n non_none_types = [x for x in types if x != None.__class__] # noqa:E711\n\n return non_none_types[0]\n\n\ndef get_list_annotation(annotation: Type) -> Type:\n return annotation.__args__[0]\n\n\ndef is_async_generator(annotation: Type) -> bool:\n return getattr(annotation, \"__origin__\", None) == AsyncGenerator\n\n\ndef get_async_generator_annotation(annotation: Type) -> Type:\n return annotation.__args__[0]\n\n\ndef is_generic(annotation: Type) -> bool:\n \"\"\"Returns True if the annotation is or extends a generic.\"\"\"\n return (\n isinstance(annotation, type)\n and issubclass(annotation, typing.Generic) # type:ignore\n or isinstance(annotation, typing._GenericAlias) # type:ignore\n and annotation.__origin__\n not in (\n list,\n typing.Union,\n tuple,\n typing.ClassVar,\n AsyncGenerator,\n )\n )\n\n\ndef is_type_var(annotation: Type) -> bool:\n \"\"\"Returns True if the annotation is a TypeVar.\"\"\"\n\n return isinstance(annotation, TypeVar) # type:ignore\n\n\ndef has_type_var(annotation: Type) -> bool:\n \"\"\"\n Returns True if the annotation or any of\n its argument have a TypeVar as argument.\n \"\"\"\n return any(\n is_type_var(arg) or has_type_var(arg)\n for arg in getattr(annotation, \"__args__\", [])\n )\n\n\ndef get_parameters(annotation: Type):\n if (\n isinstance(annotation, typing._GenericAlias) # type:ignore\n or isinstance(annotation, type)\n and issubclass(annotation, typing.Generic) # type:ignore\n and annotation is not typing.Generic\n ):\n return annotation.__parameters__\n else:\n return () # pragma: no cover\n\n\ndef get_origin(annotation: Type):\n if isinstance(annotation, typing._GenericAlias): # type:ignore\n return (\n annotation.__origin__\n if annotation.__origin__ is not typing.ClassVar\n else None\n )\n\n if annotation is typing.Generic: # pragma: no cover\n return typing.Generic\n\n return None # pragma: no cover\n\n\ndef get_args(annotation: Type):\n if isinstance(annotation, typing._GenericAlias): # type:ignore\n res = annotation.__args__\n\n if (\n get_origin(annotation) is Callable and res[0] is not Ellipsis\n ): # pragma: no cover\n res = (list(res[:-1]), res[-1])\n\n return res\n\n return ()\n\n\ndef is_forward_ref(annotation: Type) -> bool:\n return isinstance(annotation, ForwardRef)\n", "path": "strawberry/utils/typing.py"}], "after_files": [{"content": "import typing\nfrom collections.abc import AsyncGenerator, Callable\nfrom typing import Type, TypeVar\n\n\ntry:\n from typing import ForwardRef # type: ignore\nexcept ImportError: # pragma: no cover\n # ForwardRef is private in python 3.6 and 3.7\n from typing import _ForwardRef as ForwardRef # type: ignore\n\n\ndef is_list(annotation: Type) -> bool:\n \"\"\"Returns True if annotation is a List\"\"\"\n\n annotation_origin = getattr(annotation, \"__origin__\", None)\n\n return annotation_origin == list\n\n\ndef is_union(annotation: Type) -> bool:\n \"\"\"Returns True if annotation is a Union\"\"\"\n\n annotation_origin = getattr(annotation, \"__origin__\", None)\n\n return annotation_origin == typing.Union\n\n\ndef is_optional(annotation: Type) -> bool:\n \"\"\"Returns True if the annotation is Optional[SomeType]\"\"\"\n\n # Optionals are represented as unions\n\n if not is_union(annotation):\n return False\n\n types = annotation.__args__\n\n # A Union to be optional needs to have at least one None type\n return any([x == None.__class__ for x in types]) # noqa:E711\n\n\ndef get_optional_annotation(annotation: Type) -> Type:\n types = annotation.__args__\n\n non_none_types = tuple(x for x in types if x != None.__class__) # noqa:E711\n\n # if we have multiple non none types we want to return a copy of this\n # type (normally a Union type).\n\n if len(non_none_types) > 1:\n return annotation.copy_with(non_none_types)\n\n return non_none_types[0]\n\n\ndef get_list_annotation(annotation: Type) -> Type:\n return annotation.__args__[0]\n\n\ndef is_async_generator(annotation: Type) -> bool:\n return getattr(annotation, \"__origin__\", None) == AsyncGenerator\n\n\ndef get_async_generator_annotation(annotation: Type) -> Type:\n return annotation.__args__[0]\n\n\ndef is_generic(annotation: Type) -> bool:\n \"\"\"Returns True if the annotation is or extends a generic.\"\"\"\n return (\n isinstance(annotation, type)\n and issubclass(annotation, typing.Generic) # type:ignore\n or isinstance(annotation, typing._GenericAlias) # type:ignore\n and annotation.__origin__\n not in (\n list,\n typing.Union,\n tuple,\n typing.ClassVar,\n AsyncGenerator,\n )\n )\n\n\ndef is_type_var(annotation: Type) -> bool:\n \"\"\"Returns True if the annotation is a TypeVar.\"\"\"\n\n return isinstance(annotation, TypeVar) # type:ignore\n\n\ndef has_type_var(annotation: Type) -> bool:\n \"\"\"\n Returns True if the annotation or any of\n its argument have a TypeVar as argument.\n \"\"\"\n return any(\n is_type_var(arg) or has_type_var(arg)\n for arg in getattr(annotation, \"__args__\", [])\n )\n\n\ndef get_parameters(annotation: Type):\n if (\n isinstance(annotation, typing._GenericAlias) # type:ignore\n or isinstance(annotation, type)\n and issubclass(annotation, typing.Generic) # type:ignore\n and annotation is not typing.Generic\n ):\n return annotation.__parameters__\n else:\n return () # pragma: no cover\n\n\ndef get_origin(annotation: Type):\n if isinstance(annotation, typing._GenericAlias): # type:ignore\n return (\n annotation.__origin__\n if annotation.__origin__ is not typing.ClassVar\n else None\n )\n\n if annotation is typing.Generic: # pragma: no cover\n return typing.Generic\n\n return None # pragma: no cover\n\n\ndef get_args(annotation: Type):\n if isinstance(annotation, typing._GenericAlias): # type:ignore\n res = annotation.__args__\n\n if (\n get_origin(annotation) is Callable and res[0] is not Ellipsis\n ): # pragma: no cover\n res = (list(res[:-1]), res[-1])\n\n return res\n\n return ()\n\n\ndef is_forward_ref(annotation: Type) -> bool:\n return isinstance(annotation, ForwardRef)\n", "path": "strawberry/utils/typing.py"}]}
| 1,526 | 194 |
gh_patches_debug_32082
|
rasdani/github-patches
|
git_diff
|
aws__aws-cli-900
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Provide better error message for invalid endpoint urls
The error message could provide more context about what exactly went wrong with the request. For example:
```
$ aws s3api list-buckets --endpoint-url example.com
Invalid URL u'/': No schema supplied
```
A better error message would be something like:
```
$ aws s3api list-buckets --endpoint-url example.com
Bad value for --endpoint-url "example.com": scheme is missing. Must be of the form http://<hostname>/ or https://<hostname>/
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `awscli/compat.py`
Content:
```
1 # Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2
3 # Licensed under the Apache License, Version 2.0 (the "License"). You
4 # may not use this file except in compliance with the License. A copy of
5 # the License is located at
6
7 # http://aws.amazon.com/apache2.0/
8
9 # or in the "license" file accompanying this file. This file is
10 # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 # ANY KIND, either express or implied. See the License for the specific
12 # language governing permissions and limitations under the License.
13 import sys
14 import six
15
16 if six.PY3:
17 import locale
18
19 def get_stdout_text_writer():
20 return sys.stdout
21
22 def compat_open(filename, mode='r', encoding=None):
23 """Back-port open() that accepts an encoding argument.
24
25 In python3 this uses the built in open() and in python2 this
26 uses the io.open() function.
27
28 If the file is not being opened in binary mode, then we'll
29 use locale.getpreferredencoding() to find the preferred
30 encoding.
31
32 """
33 if 'b' not in mode:
34 encoding = locale.getpreferredencoding()
35 return open(filename, mode, encoding=encoding)
36
37 else:
38 import codecs
39 import locale
40 import io
41
42 def get_stdout_text_writer():
43 # In python3, all the sys.stdout/sys.stderr streams are in text
44 # mode. This means they expect unicode, and will encode the
45 # unicode automatically before actually writing to stdout/stderr.
46 # In python2, that's not the case. In order to provide a consistent
47 # interface, we can create a wrapper around sys.stdout that will take
48 # unicode, and automatically encode it to the preferred encoding.
49 # That way consumers can just call get_stdout_text_writer() and write
50 # unicode to the returned stream. Note that get_stdout_text_writer
51 # just returns sys.stdout in the PY3 section above because python3
52 # handles this.
53 return codecs.getwriter(locale.getpreferredencoding())(sys.stdout)
54
55 def compat_open(filename, mode='r', encoding=None):
56 # See docstring for compat_open in the PY3 section above.
57 if 'b' not in mode:
58 encoding = locale.getpreferredencoding()
59 return io.open(filename, mode, encoding=encoding)
60
```
Path: `awscli/customizations/globalargs.py`
Content:
```
1 # Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License"). You
4 # may not use this file except in compliance with the License. A copy of
5 # the License is located at
6 #
7 # http://aws.amazon.com/apache2.0/
8 #
9 # or in the "license" file accompanying this file. This file is
10 # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 # ANY KIND, either express or implied. See the License for the specific
12 # language governing permissions and limitations under the License.
13 import sys
14 import os
15
16 import jmespath
17
18
19 def register_parse_global_args(cli):
20 cli.register('top-level-args-parsed', resolve_types)
21 cli.register('top-level-args-parsed', no_sign_request)
22
23
24 def resolve_types(parsed_args, **kwargs):
25 # This emulates the "type" arg from argparse, but does so in a way
26 # that plugins can also hook into this process.
27 _resolve_arg(parsed_args, 'query')
28 _resolve_arg(parsed_args, 'verify_ssl')
29
30
31 def _resolve_arg(parsed_args, name):
32 value = getattr(parsed_args, name, None)
33 if value is not None:
34 new_value = getattr(sys.modules[__name__], '_resolve_%s' % name)(value)
35 setattr(parsed_args, name, new_value)
36
37
38 def _resolve_query(value):
39 try:
40 return jmespath.compile(value)
41 except Exception as e:
42 raise ValueError("Bad value for --query %s: %s" % (value, str(e)))
43
44
45 def _resolve_verify_ssl(value):
46 verify = None
47 if not value:
48 verify = False
49 else:
50 verify = os.environ.get('AWS_CA_BUNDLE')
51 return verify
52
53
54 def no_sign_request(parsed_args, session, **kwargs):
55 if not parsed_args.sign_request:
56 # In order to make signing disabled for all requests
57 # we need to set the signature_version to None for
58 # any service created. This ensures that get_endpoint()
59 # will not look for auth.
60 session.register('service-created', disable_signing)
61
62
63 def disable_signing(service, **kwargs):
64 service.signature_version = None
65
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/awscli/compat.py b/awscli/compat.py
--- a/awscli/compat.py
+++ b/awscli/compat.py
@@ -15,6 +15,7 @@
if six.PY3:
import locale
+ import urllib.parse as urlparse
def get_stdout_text_writer():
return sys.stdout
@@ -38,6 +39,7 @@
import codecs
import locale
import io
+ import urlparse
def get_stdout_text_writer():
# In python3, all the sys.stdout/sys.stderr streams are in text
diff --git a/awscli/customizations/globalargs.py b/awscli/customizations/globalargs.py
--- a/awscli/customizations/globalargs.py
+++ b/awscli/customizations/globalargs.py
@@ -15,6 +15,8 @@
import jmespath
+from awscli.compat import urlparse
+
def register_parse_global_args(cli):
cli.register('top-level-args-parsed', resolve_types)
@@ -26,6 +28,7 @@
# that plugins can also hook into this process.
_resolve_arg(parsed_args, 'query')
_resolve_arg(parsed_args, 'verify_ssl')
+ _resolve_arg(parsed_args, 'endpoint_url')
def _resolve_arg(parsed_args, name):
@@ -51,6 +54,17 @@
return verify
+def _resolve_endpoint_url(value):
+ parsed = urlparse.urlparse(value)
+ # Our http library requires you specify an endpoint url
+ # that contains a scheme, so we'll verify that up front.
+ if not parsed.scheme:
+ raise ValueError('Bad value for --endpoint-url "%s": scheme is '
+ 'missing. Must be of the form '
+ 'http://<hostname>/ or https://<hostname>/' % value)
+ return value
+
+
def no_sign_request(parsed_args, session, **kwargs):
if not parsed_args.sign_request:
# In order to make signing disabled for all requests
|
{"golden_diff": "diff --git a/awscli/compat.py b/awscli/compat.py\n--- a/awscli/compat.py\n+++ b/awscli/compat.py\n@@ -15,6 +15,7 @@\n \n if six.PY3:\n import locale\n+ import urllib.parse as urlparse\n \n def get_stdout_text_writer():\n return sys.stdout\n@@ -38,6 +39,7 @@\n import codecs\n import locale\n import io\n+ import urlparse\n \n def get_stdout_text_writer():\n # In python3, all the sys.stdout/sys.stderr streams are in text\ndiff --git a/awscli/customizations/globalargs.py b/awscli/customizations/globalargs.py\n--- a/awscli/customizations/globalargs.py\n+++ b/awscli/customizations/globalargs.py\n@@ -15,6 +15,8 @@\n \n import jmespath\n \n+from awscli.compat import urlparse\n+\n \n def register_parse_global_args(cli):\n cli.register('top-level-args-parsed', resolve_types)\n@@ -26,6 +28,7 @@\n # that plugins can also hook into this process.\n _resolve_arg(parsed_args, 'query')\n _resolve_arg(parsed_args, 'verify_ssl')\n+ _resolve_arg(parsed_args, 'endpoint_url')\n \n \n def _resolve_arg(parsed_args, name):\n@@ -51,6 +54,17 @@\n return verify\n \n \n+def _resolve_endpoint_url(value):\n+ parsed = urlparse.urlparse(value)\n+ # Our http library requires you specify an endpoint url\n+ # that contains a scheme, so we'll verify that up front.\n+ if not parsed.scheme:\n+ raise ValueError('Bad value for --endpoint-url \"%s\": scheme is '\n+ 'missing. Must be of the form '\n+ 'http://<hostname>/ or https://<hostname>/' % value)\n+ return value\n+\n+\n def no_sign_request(parsed_args, session, **kwargs):\n if not parsed_args.sign_request:\n # In order to make signing disabled for all requests\n", "issue": "Provide better error message for invalid endpoint urls\nThe error message could provide more context about what exactly went wrong with the request. For example:\n\n```\n$ aws s3api list-buckets --endpoint-url example.com\n\nInvalid URL u'/': No schema supplied\n```\n\nA better error message would be something like:\n\n```\n$ aws s3api list-buckets --endpoint-url example.com\n\nBad value for --endpoint-url \"example.com\": scheme is missing. Must be of the form http://<hostname>/ or https://<hostname>/\n```\n\n", "before_files": [{"content": "# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n\n# Licensed under the Apache License, Version 2.0 (the \"License\"). You\n# may not use this file except in compliance with the License. A copy of\n# the License is located at\n\n# http://aws.amazon.com/apache2.0/\n\n# or in the \"license\" file accompanying this file. This file is\n# distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF\n# ANY KIND, either express or implied. See the License for the specific\n# language governing permissions and limitations under the License.\nimport sys\nimport six\n\nif six.PY3:\n import locale\n\n def get_stdout_text_writer():\n return sys.stdout\n\n def compat_open(filename, mode='r', encoding=None):\n \"\"\"Back-port open() that accepts an encoding argument.\n\n In python3 this uses the built in open() and in python2 this\n uses the io.open() function.\n\n If the file is not being opened in binary mode, then we'll\n use locale.getpreferredencoding() to find the preferred\n encoding.\n\n \"\"\"\n if 'b' not in mode:\n encoding = locale.getpreferredencoding()\n return open(filename, mode, encoding=encoding)\n\nelse:\n import codecs\n import locale\n import io\n\n def get_stdout_text_writer():\n # In python3, all the sys.stdout/sys.stderr streams are in text\n # mode. This means they expect unicode, and will encode the\n # unicode automatically before actually writing to stdout/stderr.\n # In python2, that's not the case. In order to provide a consistent\n # interface, we can create a wrapper around sys.stdout that will take\n # unicode, and automatically encode it to the preferred encoding.\n # That way consumers can just call get_stdout_text_writer() and write\n # unicode to the returned stream. Note that get_stdout_text_writer\n # just returns sys.stdout in the PY3 section above because python3\n # handles this.\n return codecs.getwriter(locale.getpreferredencoding())(sys.stdout)\n\n def compat_open(filename, mode='r', encoding=None):\n # See docstring for compat_open in the PY3 section above.\n if 'b' not in mode:\n encoding = locale.getpreferredencoding()\n return io.open(filename, mode, encoding=encoding)\n", "path": "awscli/compat.py"}, {"content": "# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"). You\n# may not use this file except in compliance with the License. A copy of\n# the License is located at\n#\n# http://aws.amazon.com/apache2.0/\n#\n# or in the \"license\" file accompanying this file. This file is\n# distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF\n# ANY KIND, either express or implied. See the License for the specific\n# language governing permissions and limitations under the License.\nimport sys\nimport os\n\nimport jmespath\n\n\ndef register_parse_global_args(cli):\n cli.register('top-level-args-parsed', resolve_types)\n cli.register('top-level-args-parsed', no_sign_request)\n\n\ndef resolve_types(parsed_args, **kwargs):\n # This emulates the \"type\" arg from argparse, but does so in a way\n # that plugins can also hook into this process.\n _resolve_arg(parsed_args, 'query')\n _resolve_arg(parsed_args, 'verify_ssl')\n\n\ndef _resolve_arg(parsed_args, name):\n value = getattr(parsed_args, name, None)\n if value is not None:\n new_value = getattr(sys.modules[__name__], '_resolve_%s' % name)(value)\n setattr(parsed_args, name, new_value)\n\n\ndef _resolve_query(value):\n try:\n return jmespath.compile(value)\n except Exception as e:\n raise ValueError(\"Bad value for --query %s: %s\" % (value, str(e)))\n\n\ndef _resolve_verify_ssl(value):\n verify = None\n if not value:\n verify = False\n else:\n verify = os.environ.get('AWS_CA_BUNDLE')\n return verify\n\n\ndef no_sign_request(parsed_args, session, **kwargs):\n if not parsed_args.sign_request:\n # In order to make signing disabled for all requests\n # we need to set the signature_version to None for\n # any service created. This ensures that get_endpoint()\n # will not look for auth.\n session.register('service-created', disable_signing)\n\n\ndef disable_signing(service, **kwargs):\n service.signature_version = None\n", "path": "awscli/customizations/globalargs.py"}], "after_files": [{"content": "# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n\n# Licensed under the Apache License, Version 2.0 (the \"License\"). You\n# may not use this file except in compliance with the License. A copy of\n# the License is located at\n\n# http://aws.amazon.com/apache2.0/\n\n# or in the \"license\" file accompanying this file. This file is\n# distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF\n# ANY KIND, either express or implied. See the License for the specific\n# language governing permissions and limitations under the License.\nimport sys\nimport six\n\nif six.PY3:\n import locale\n import urllib.parse as urlparse\n\n def get_stdout_text_writer():\n return sys.stdout\n\n def compat_open(filename, mode='r', encoding=None):\n \"\"\"Back-port open() that accepts an encoding argument.\n\n In python3 this uses the built in open() and in python2 this\n uses the io.open() function.\n\n If the file is not being opened in binary mode, then we'll\n use locale.getpreferredencoding() to find the preferred\n encoding.\n\n \"\"\"\n if 'b' not in mode:\n encoding = locale.getpreferredencoding()\n return open(filename, mode, encoding=encoding)\n\nelse:\n import codecs\n import locale\n import io\n import urlparse\n\n def get_stdout_text_writer():\n # In python3, all the sys.stdout/sys.stderr streams are in text\n # mode. This means they expect unicode, and will encode the\n # unicode automatically before actually writing to stdout/stderr.\n # In python2, that's not the case. In order to provide a consistent\n # interface, we can create a wrapper around sys.stdout that will take\n # unicode, and automatically encode it to the preferred encoding.\n # That way consumers can just call get_stdout_text_writer() and write\n # unicode to the returned stream. Note that get_stdout_text_writer\n # just returns sys.stdout in the PY3 section above because python3\n # handles this.\n return codecs.getwriter(locale.getpreferredencoding())(sys.stdout)\n\n def compat_open(filename, mode='r', encoding=None):\n # See docstring for compat_open in the PY3 section above.\n if 'b' not in mode:\n encoding = locale.getpreferredencoding()\n return io.open(filename, mode, encoding=encoding)\n", "path": "awscli/compat.py"}, {"content": "# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"). You\n# may not use this file except in compliance with the License. A copy of\n# the License is located at\n#\n# http://aws.amazon.com/apache2.0/\n#\n# or in the \"license\" file accompanying this file. This file is\n# distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF\n# ANY KIND, either express or implied. See the License for the specific\n# language governing permissions and limitations under the License.\nimport sys\nimport os\n\nimport jmespath\n\nfrom awscli.compat import urlparse\n\n\ndef register_parse_global_args(cli):\n cli.register('top-level-args-parsed', resolve_types)\n cli.register('top-level-args-parsed', no_sign_request)\n\n\ndef resolve_types(parsed_args, **kwargs):\n # This emulates the \"type\" arg from argparse, but does so in a way\n # that plugins can also hook into this process.\n _resolve_arg(parsed_args, 'query')\n _resolve_arg(parsed_args, 'verify_ssl')\n _resolve_arg(parsed_args, 'endpoint_url')\n\n\ndef _resolve_arg(parsed_args, name):\n value = getattr(parsed_args, name, None)\n if value is not None:\n new_value = getattr(sys.modules[__name__], '_resolve_%s' % name)(value)\n setattr(parsed_args, name, new_value)\n\n\ndef _resolve_query(value):\n try:\n return jmespath.compile(value)\n except Exception as e:\n raise ValueError(\"Bad value for --query %s: %s\" % (value, str(e)))\n\n\ndef _resolve_verify_ssl(value):\n verify = None\n if not value:\n verify = False\n else:\n verify = os.environ.get('AWS_CA_BUNDLE')\n return verify\n\n\ndef _resolve_endpoint_url(value):\n parsed = urlparse.urlparse(value)\n # Our http library requires you specify an endpoint url\n # that contains a scheme, so we'll verify that up front.\n if not parsed.scheme:\n raise ValueError('Bad value for --endpoint-url \"%s\": scheme is '\n 'missing. Must be of the form '\n 'http://<hostname>/ or https://<hostname>/' % value)\n return value\n\n\ndef no_sign_request(parsed_args, session, **kwargs):\n if not parsed_args.sign_request:\n # In order to make signing disabled for all requests\n # we need to set the signature_version to None for\n # any service created. This ensures that get_endpoint()\n # will not look for auth.\n session.register('service-created', disable_signing)\n\n\ndef disable_signing(service, **kwargs):\n service.signature_version = None\n", "path": "awscli/customizations/globalargs.py"}]}
| 1,648 | 440 |
gh_patches_debug_6559
|
rasdani/github-patches
|
git_diff
|
aws-cloudformation__cfn-lint-595
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Support for multiple CodePipeline OutputArtifacts
*cfn-lint version*: 0.12.1
*Description of issue.*
The CloudFormation linter does not yet support having multiple values for the OutputArtifacts property. When linting a template it gives the following error message:
`E2541 Action "CodeBuild" declares 2 OutputArtifacts which is not in expected range [0, 1].`
```yaml
---
AWSTemplateFormatVersion: 2010-09-09
Resources:
Pipeline:
Type: AWS::CodePipeline::Pipeline
Properties:
Name: pipeline
RoleArn: 'rolearn'
RestartExecutionOnUpdate: true
ArtifactStore:
Location: 'artifactbucket'
Type: S3
Stages:
- Name: Source
Actions:
- Name: SourceRepo
ActionTypeId:
# More info on Possible Values: https://docs.aws.amazon.com/codepipeline/latest/userguide/reference-pipeline-structure.html#action-requirements
Category: Source
Owner: ThirdParty
Provider: GitHub
Version: "1"
Configuration:
Owner: '{{resolve:ssm:/service/github/owner:1}}'
OAuthToken: '{{resolve:ssm:/service/github/token:3}}'
Repo: 'repo'
Branch: 'develop'
PollForSourceChanges: true
OutputArtifacts:
- Name: source
RunOrder: 1
- Name: Build
Actions:
- Name: CodeBuild
ActionTypeId:
Category: Build
Owner: AWS
Provider: CodeBuild
Version: "1"
Configuration:
ProjectName: 'codebuildproject'
InputArtifacts:
- Name: source
OutputArtifacts:
- Name: artifact1
- Name: artifact2 # this goes wrong
RunOrder: 1
```
As additional information a [blog post](https://aws.amazon.com/about-aws/whats-new/2018/08/aws-codebuild-adds-ability-to-create-build-projects-with-multiple-input-sources-and-output-artifacts/) about the release of support for this and the [CloudFormation spec](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codepipeline-pipeline-stages-actions.html).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/cfnlint/rules/resources/codepipeline/CodepipelineStageActions.py`
Content:
```
1 """
2 Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
3
4 Permission is hereby granted, free of charge, to any person obtaining a copy of this
5 software and associated documentation files (the "Software"), to deal in the Software
6 without restriction, including without limitation the rights to use, copy, modify,
7 merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
8 permit persons to whom the Software is furnished to do so.
9
10 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
11 INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
12 PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
13 HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
14 OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
15 SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
16 """
17 from cfnlint import CloudFormationLintRule
18 from cfnlint import RuleMatch
19
20
21 class CodepipelineStageActions(CloudFormationLintRule):
22 """Check if CodePipeline Stage Actions are set up properly."""
23 id = 'E2541'
24 shortdesc = 'CodePipeline Stage Actions'
25 description = 'See if CodePipeline stage actions are set correctly'
26 source_url = 'https://docs.aws.amazon.com/codepipeline/latest/userguide/reference-pipeline-structure.html#pipeline-requirements'
27 tags = ['resources', 'codepipeline']
28
29 VALID_OWNER_STRINGS = {'AWS', 'ThirdParty', 'Custom'}
30
31 CONSTRAINTS = {
32 'AWS': {
33 'Source': {
34 'S3': {
35 'InputArtifactRange': 0,
36 'OutputArtifactRange': 1,
37 },
38 'CodeCommit': {
39 'InputArtifactRange': 0,
40 'OutputArtifactRange': 1,
41 }
42 },
43 'Build': {
44 'CodeBuild': {
45 'InputArtifactRange': 1,
46 'OutputArtifactRange': (0, 1),
47 },
48 },
49 'Test': {
50 'CodeBuild': {
51 'InputArtifactRange': 1,
52 'OutputArtifactRange': (0, 1),
53 }
54 },
55 'Approval': {
56 'Manual': {
57 'InputArtifactRange': 0,
58 'OutputArtifactRange': 0,
59 }
60 },
61 'Deploy': {
62 'CloudFormation': {
63 'InputArtifactRange': (0, 10),
64 'OutputArtifactRange': (0, 1),
65 },
66 'CodeDeploy': {
67 'InputArtifactRange': 1,
68 'OutputArtifactRange': 0,
69 },
70 'ElasticBeanstalk': {
71 'InputArtifactRange': 1,
72 'OutputArtifactRange': 0,
73 },
74 'OpsWorks': {
75 'InputArtifactRange': 1,
76 'OutputArtifactRange': 0,
77 },
78 'ECS': {
79 'InputArtifactRange': 1,
80 'OutputArtifactRange': 0,
81 },
82 },
83 'Invoke': {
84 'Lambda': {
85 'InputArtifactRange': (0, 5),
86 'OutputArtifactRange': (0, 5),
87 }
88 }
89 },
90 'ThirdParty': {
91 'Source': {
92 'GitHub': {
93 'InputArtifactRange': 0,
94 'OutputArtifactRange': 1,
95 }
96 },
97 },
98 }
99
100 KEY_MAP = {
101 'InputArtifacts': 'InputArtifactRange',
102 'OutputArtifacts': 'OutputArtifactRange',
103 }
104
105 def check_artifact_counts(self, action, artifact_type, path):
106 """Check that artifact counts are within valid ranges."""
107 matches = []
108
109 action_type_id = action.get('ActionTypeId')
110 owner = action_type_id.get('Owner')
111 category = action_type_id.get('Category')
112 provider = action_type_id.get('Provider')
113
114 if isinstance(owner, dict) or isinstance(category, dict) or isinstance(provider, dict):
115 self.logger.debug('owner, category, provider need to be strings to validate. Skipping.')
116 return matches
117
118 constraints = self.CONSTRAINTS.get(owner, {}).get(category, {}).get(provider, {})
119 if not constraints:
120 return matches
121 artifact_count = len(action.get(artifact_type, []))
122
123 constraint_key = self.KEY_MAP[artifact_type]
124 if isinstance(constraints[constraint_key], tuple):
125 min_, max_ = constraints[constraint_key]
126 if not (min_ <= artifact_count <= max_):
127 message = (
128 'Action "{action}" declares {number} {artifact_type} which is not in '
129 'expected range [{a}, {b}].'
130 ).format(
131 action=action['Name'],
132 number=artifact_count,
133 artifact_type=artifact_type,
134 a=min_,
135 b=max_
136 )
137 matches.append(RuleMatch(
138 path + [artifact_type],
139 message
140 ))
141 else:
142 if artifact_count != constraints[constraint_key]:
143 message = (
144 'Action "{action}" declares {number} {artifact_type} which is not the '
145 'expected number [{a}].'
146 ).format(
147 action=action['Name'],
148 number=artifact_count,
149 artifact_type=artifact_type,
150 a=constraints[constraint_key]
151 )
152 matches.append(RuleMatch(
153 path + [artifact_type],
154 message
155 ))
156
157 return matches
158
159 def check_owner(self, action, path):
160 """Check that action type owner is valid."""
161 matches = []
162
163 owner = action.get('ActionTypeId').get('Owner')
164 if owner not in self.VALID_OWNER_STRINGS and owner is not None:
165 message = (
166 'For all currently supported action types, the only valid owner '
167 'strings are {owners}'
168 ).format(
169 owners=', '.join(list(self.VALID_OWNER_STRINGS))
170 )
171 matches.append(RuleMatch(
172 path + ['ActionTypeId', 'Owner'],
173 message
174 ))
175
176 return matches
177
178 def check_version(self, action, path):
179 """Check that action type version is valid."""
180 matches = []
181
182 version = action.get('ActionTypeId', {}).get('Version')
183 if isinstance(version, dict):
184 self.logger.debug('Unable to validate version when an object is used. Skipping')
185 elif version != '1':
186 message = 'For all currently supported action types, the only valid version string is "1".'
187 matches.append(RuleMatch(
188 path + ['ActionTypeId', 'Version'],
189 message
190 ))
191 return matches
192
193 def check_names_unique(self, action, path, action_names):
194 """Check that action names are unique."""
195 matches = []
196
197 if action.get('Name') in action_names:
198 message = 'All action names within a stage must be unique. ({name})'.format(
199 name=action.get('Name')
200 )
201 matches.append(RuleMatch(path + ['Name'], message))
202 action_names.add(action.get('Name'))
203
204 return matches
205
206 def match(self, cfn):
207 """Check that stage actions are set up properly."""
208 matches = []
209
210 resources = cfn.get_resource_properties(['AWS::CodePipeline::Pipeline'])
211 for resource in resources:
212 path = resource['Path']
213 properties = resource['Value']
214
215 s_stages = properties.get_safe('Stages', path)
216 for s_stage_v, s_stage_p in s_stages:
217 if not isinstance(s_stage_v, list):
218 self.logger.debug('Stages not list. Should have been caught by generic linting.')
219 return matches
220
221 for l_i_stage, l_i_path in s_stage_v.items_safe(s_stage_p):
222 action_names = set()
223 s_actions = l_i_stage.get_safe('Actions', l_i_path)
224 for s_action_v, s_action_p in s_actions:
225 if not isinstance(s_action_v, list):
226 self.logger.debug('Actions not list. Should have been caught by generic linting.')
227 return matches
228
229 for l_i_a_action, l_i_a_path in s_action_v.items_safe(s_action_p):
230 try:
231 matches.extend(self.check_names_unique(l_i_a_action, l_i_a_path, action_names))
232 matches.extend(self.check_version(l_i_a_action, l_i_a_path))
233 matches.extend(self.check_owner(l_i_a_action, l_i_a_path))
234 matches.extend(self.check_artifact_counts(l_i_a_action, 'InputArtifacts', l_i_a_path))
235 matches.extend(self.check_artifact_counts(l_i_a_action, 'OutputArtifacts', l_i_a_path))
236 except AttributeError as err:
237 self.logger.debug('Got AttributeError. Should have been caught by generic linting. '
238 'Ignoring the error here: %s', str(err))
239
240 return matches
241
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/cfnlint/rules/resources/codepipeline/CodepipelineStageActions.py b/src/cfnlint/rules/resources/codepipeline/CodepipelineStageActions.py
--- a/src/cfnlint/rules/resources/codepipeline/CodepipelineStageActions.py
+++ b/src/cfnlint/rules/resources/codepipeline/CodepipelineStageActions.py
@@ -40,12 +40,6 @@
'OutputArtifactRange': 1,
}
},
- 'Build': {
- 'CodeBuild': {
- 'InputArtifactRange': 1,
- 'OutputArtifactRange': (0, 1),
- },
- },
'Test': {
'CodeBuild': {
'InputArtifactRange': 1,
|
{"golden_diff": "diff --git a/src/cfnlint/rules/resources/codepipeline/CodepipelineStageActions.py b/src/cfnlint/rules/resources/codepipeline/CodepipelineStageActions.py\n--- a/src/cfnlint/rules/resources/codepipeline/CodepipelineStageActions.py\n+++ b/src/cfnlint/rules/resources/codepipeline/CodepipelineStageActions.py\n@@ -40,12 +40,6 @@\n 'OutputArtifactRange': 1,\n }\n },\n- 'Build': {\n- 'CodeBuild': {\n- 'InputArtifactRange': 1,\n- 'OutputArtifactRange': (0, 1),\n- },\n- },\n 'Test': {\n 'CodeBuild': {\n 'InputArtifactRange': 1,\n", "issue": "Support for multiple CodePipeline OutputArtifacts\n*cfn-lint version*: 0.12.1\r\n\r\n*Description of issue.*\r\n\r\nThe CloudFormation linter does not yet support having multiple values for the OutputArtifacts property. When linting a template it gives the following error message:\r\n`E2541 Action \"CodeBuild\" declares 2 OutputArtifacts which is not in expected range [0, 1].`\r\n\r\n```yaml\r\n---\r\nAWSTemplateFormatVersion: 2010-09-09\r\nResources:\r\n Pipeline:\r\n Type: AWS::CodePipeline::Pipeline\r\n Properties:\r\n Name: pipeline\r\n RoleArn: 'rolearn'\r\n RestartExecutionOnUpdate: true\r\n ArtifactStore:\r\n Location: 'artifactbucket'\r\n Type: S3\r\n Stages:\r\n - Name: Source\r\n Actions:\r\n - Name: SourceRepo\r\n ActionTypeId:\r\n # More info on Possible Values: https://docs.aws.amazon.com/codepipeline/latest/userguide/reference-pipeline-structure.html#action-requirements\r\n Category: Source\r\n Owner: ThirdParty\r\n Provider: GitHub\r\n Version: \"1\"\r\n Configuration:\r\n Owner: '{{resolve:ssm:/service/github/owner:1}}'\r\n OAuthToken: '{{resolve:ssm:/service/github/token:3}}'\r\n Repo: 'repo'\r\n Branch: 'develop'\r\n PollForSourceChanges: true\r\n OutputArtifacts:\r\n - Name: source\r\n RunOrder: 1\r\n - Name: Build\r\n Actions:\r\n - Name: CodeBuild\r\n ActionTypeId:\r\n Category: Build\r\n Owner: AWS\r\n Provider: CodeBuild\r\n Version: \"1\"\r\n Configuration:\r\n ProjectName: 'codebuildproject'\r\n InputArtifacts:\r\n - Name: source\r\n OutputArtifacts:\r\n - Name: artifact1\r\n - Name: artifact2 # this goes wrong\r\n RunOrder: 1\r\n```\r\n\r\nAs additional information a [blog post](https://aws.amazon.com/about-aws/whats-new/2018/08/aws-codebuild-adds-ability-to-create-build-projects-with-multiple-input-sources-and-output-artifacts/) about the release of support for this and the [CloudFormation spec](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codepipeline-pipeline-stages-actions.html).\n", "before_files": [{"content": "\"\"\"\n Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n\n Permission is hereby granted, free of charge, to any person obtaining a copy of this\n software and associated documentation files (the \"Software\"), to deal in the Software\n without restriction, including without limitation the rights to use, copy, modify,\n merge, publish, distribute, sublicense, and/or sell copies of the Software, and to\n permit persons to whom the Software is furnished to do so.\n\n THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,\n INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\n PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT\n HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\n SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\"\"\"\nfrom cfnlint import CloudFormationLintRule\nfrom cfnlint import RuleMatch\n\n\nclass CodepipelineStageActions(CloudFormationLintRule):\n \"\"\"Check if CodePipeline Stage Actions are set up properly.\"\"\"\n id = 'E2541'\n shortdesc = 'CodePipeline Stage Actions'\n description = 'See if CodePipeline stage actions are set correctly'\n source_url = 'https://docs.aws.amazon.com/codepipeline/latest/userguide/reference-pipeline-structure.html#pipeline-requirements'\n tags = ['resources', 'codepipeline']\n\n VALID_OWNER_STRINGS = {'AWS', 'ThirdParty', 'Custom'}\n\n CONSTRAINTS = {\n 'AWS': {\n 'Source': {\n 'S3': {\n 'InputArtifactRange': 0,\n 'OutputArtifactRange': 1,\n },\n 'CodeCommit': {\n 'InputArtifactRange': 0,\n 'OutputArtifactRange': 1,\n }\n },\n 'Build': {\n 'CodeBuild': {\n 'InputArtifactRange': 1,\n 'OutputArtifactRange': (0, 1),\n },\n },\n 'Test': {\n 'CodeBuild': {\n 'InputArtifactRange': 1,\n 'OutputArtifactRange': (0, 1),\n }\n },\n 'Approval': {\n 'Manual': {\n 'InputArtifactRange': 0,\n 'OutputArtifactRange': 0,\n }\n },\n 'Deploy': {\n 'CloudFormation': {\n 'InputArtifactRange': (0, 10),\n 'OutputArtifactRange': (0, 1),\n },\n 'CodeDeploy': {\n 'InputArtifactRange': 1,\n 'OutputArtifactRange': 0,\n },\n 'ElasticBeanstalk': {\n 'InputArtifactRange': 1,\n 'OutputArtifactRange': 0,\n },\n 'OpsWorks': {\n 'InputArtifactRange': 1,\n 'OutputArtifactRange': 0,\n },\n 'ECS': {\n 'InputArtifactRange': 1,\n 'OutputArtifactRange': 0,\n },\n },\n 'Invoke': {\n 'Lambda': {\n 'InputArtifactRange': (0, 5),\n 'OutputArtifactRange': (0, 5),\n }\n }\n },\n 'ThirdParty': {\n 'Source': {\n 'GitHub': {\n 'InputArtifactRange': 0,\n 'OutputArtifactRange': 1,\n }\n },\n },\n }\n\n KEY_MAP = {\n 'InputArtifacts': 'InputArtifactRange',\n 'OutputArtifacts': 'OutputArtifactRange',\n }\n\n def check_artifact_counts(self, action, artifact_type, path):\n \"\"\"Check that artifact counts are within valid ranges.\"\"\"\n matches = []\n\n action_type_id = action.get('ActionTypeId')\n owner = action_type_id.get('Owner')\n category = action_type_id.get('Category')\n provider = action_type_id.get('Provider')\n\n if isinstance(owner, dict) or isinstance(category, dict) or isinstance(provider, dict):\n self.logger.debug('owner, category, provider need to be strings to validate. Skipping.')\n return matches\n\n constraints = self.CONSTRAINTS.get(owner, {}).get(category, {}).get(provider, {})\n if not constraints:\n return matches\n artifact_count = len(action.get(artifact_type, []))\n\n constraint_key = self.KEY_MAP[artifact_type]\n if isinstance(constraints[constraint_key], tuple):\n min_, max_ = constraints[constraint_key]\n if not (min_ <= artifact_count <= max_):\n message = (\n 'Action \"{action}\" declares {number} {artifact_type} which is not in '\n 'expected range [{a}, {b}].'\n ).format(\n action=action['Name'],\n number=artifact_count,\n artifact_type=artifact_type,\n a=min_,\n b=max_\n )\n matches.append(RuleMatch(\n path + [artifact_type],\n message\n ))\n else:\n if artifact_count != constraints[constraint_key]:\n message = (\n 'Action \"{action}\" declares {number} {artifact_type} which is not the '\n 'expected number [{a}].'\n ).format(\n action=action['Name'],\n number=artifact_count,\n artifact_type=artifact_type,\n a=constraints[constraint_key]\n )\n matches.append(RuleMatch(\n path + [artifact_type],\n message\n ))\n\n return matches\n\n def check_owner(self, action, path):\n \"\"\"Check that action type owner is valid.\"\"\"\n matches = []\n\n owner = action.get('ActionTypeId').get('Owner')\n if owner not in self.VALID_OWNER_STRINGS and owner is not None:\n message = (\n 'For all currently supported action types, the only valid owner '\n 'strings are {owners}'\n ).format(\n owners=', '.join(list(self.VALID_OWNER_STRINGS))\n )\n matches.append(RuleMatch(\n path + ['ActionTypeId', 'Owner'],\n message\n ))\n\n return matches\n\n def check_version(self, action, path):\n \"\"\"Check that action type version is valid.\"\"\"\n matches = []\n\n version = action.get('ActionTypeId', {}).get('Version')\n if isinstance(version, dict):\n self.logger.debug('Unable to validate version when an object is used. Skipping')\n elif version != '1':\n message = 'For all currently supported action types, the only valid version string is \"1\".'\n matches.append(RuleMatch(\n path + ['ActionTypeId', 'Version'],\n message\n ))\n return matches\n\n def check_names_unique(self, action, path, action_names):\n \"\"\"Check that action names are unique.\"\"\"\n matches = []\n\n if action.get('Name') in action_names:\n message = 'All action names within a stage must be unique. ({name})'.format(\n name=action.get('Name')\n )\n matches.append(RuleMatch(path + ['Name'], message))\n action_names.add(action.get('Name'))\n\n return matches\n\n def match(self, cfn):\n \"\"\"Check that stage actions are set up properly.\"\"\"\n matches = []\n\n resources = cfn.get_resource_properties(['AWS::CodePipeline::Pipeline'])\n for resource in resources:\n path = resource['Path']\n properties = resource['Value']\n\n s_stages = properties.get_safe('Stages', path)\n for s_stage_v, s_stage_p in s_stages:\n if not isinstance(s_stage_v, list):\n self.logger.debug('Stages not list. Should have been caught by generic linting.')\n return matches\n\n for l_i_stage, l_i_path in s_stage_v.items_safe(s_stage_p):\n action_names = set()\n s_actions = l_i_stage.get_safe('Actions', l_i_path)\n for s_action_v, s_action_p in s_actions:\n if not isinstance(s_action_v, list):\n self.logger.debug('Actions not list. Should have been caught by generic linting.')\n return matches\n\n for l_i_a_action, l_i_a_path in s_action_v.items_safe(s_action_p):\n try:\n matches.extend(self.check_names_unique(l_i_a_action, l_i_a_path, action_names))\n matches.extend(self.check_version(l_i_a_action, l_i_a_path))\n matches.extend(self.check_owner(l_i_a_action, l_i_a_path))\n matches.extend(self.check_artifact_counts(l_i_a_action, 'InputArtifacts', l_i_a_path))\n matches.extend(self.check_artifact_counts(l_i_a_action, 'OutputArtifacts', l_i_a_path))\n except AttributeError as err:\n self.logger.debug('Got AttributeError. Should have been caught by generic linting. '\n 'Ignoring the error here: %s', str(err))\n\n return matches\n", "path": "src/cfnlint/rules/resources/codepipeline/CodepipelineStageActions.py"}], "after_files": [{"content": "\"\"\"\n Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n\n Permission is hereby granted, free of charge, to any person obtaining a copy of this\n software and associated documentation files (the \"Software\"), to deal in the Software\n without restriction, including without limitation the rights to use, copy, modify,\n merge, publish, distribute, sublicense, and/or sell copies of the Software, and to\n permit persons to whom the Software is furnished to do so.\n\n THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,\n INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\n PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT\n HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\n SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\"\"\"\nfrom cfnlint import CloudFormationLintRule\nfrom cfnlint import RuleMatch\n\n\nclass CodepipelineStageActions(CloudFormationLintRule):\n \"\"\"Check if CodePipeline Stage Actions are set up properly.\"\"\"\n id = 'E2541'\n shortdesc = 'CodePipeline Stage Actions'\n description = 'See if CodePipeline stage actions are set correctly'\n source_url = 'https://docs.aws.amazon.com/codepipeline/latest/userguide/reference-pipeline-structure.html#pipeline-requirements'\n tags = ['resources', 'codepipeline']\n\n VALID_OWNER_STRINGS = {'AWS', 'ThirdParty', 'Custom'}\n\n CONSTRAINTS = {\n 'AWS': {\n 'Source': {\n 'S3': {\n 'InputArtifactRange': 0,\n 'OutputArtifactRange': 1,\n },\n 'CodeCommit': {\n 'InputArtifactRange': 0,\n 'OutputArtifactRange': 1,\n }\n },\n 'Test': {\n 'CodeBuild': {\n 'InputArtifactRange': 1,\n 'OutputArtifactRange': (0, 1),\n }\n },\n 'Approval': {\n 'Manual': {\n 'InputArtifactRange': 0,\n 'OutputArtifactRange': 0,\n }\n },\n 'Deploy': {\n 'CloudFormation': {\n 'InputArtifactRange': (0, 10),\n 'OutputArtifactRange': (0, 1),\n },\n 'CodeDeploy': {\n 'InputArtifactRange': 1,\n 'OutputArtifactRange': 0,\n },\n 'ElasticBeanstalk': {\n 'InputArtifactRange': 1,\n 'OutputArtifactRange': 0,\n },\n 'OpsWorks': {\n 'InputArtifactRange': 1,\n 'OutputArtifactRange': 0,\n },\n 'ECS': {\n 'InputArtifactRange': 1,\n 'OutputArtifactRange': 0,\n },\n },\n 'Invoke': {\n 'Lambda': {\n 'InputArtifactRange': (0, 5),\n 'OutputArtifactRange': (0, 5),\n }\n }\n },\n 'ThirdParty': {\n 'Source': {\n 'GitHub': {\n 'InputArtifactRange': 0,\n 'OutputArtifactRange': 1,\n }\n },\n },\n }\n\n KEY_MAP = {\n 'InputArtifacts': 'InputArtifactRange',\n 'OutputArtifacts': 'OutputArtifactRange',\n }\n\n def check_artifact_counts(self, action, artifact_type, path):\n \"\"\"Check that artifact counts are within valid ranges.\"\"\"\n matches = []\n\n action_type_id = action.get('ActionTypeId')\n owner = action_type_id.get('Owner')\n category = action_type_id.get('Category')\n provider = action_type_id.get('Provider')\n\n if isinstance(owner, dict) or isinstance(category, dict) or isinstance(provider, dict):\n self.logger.debug('owner, category, provider need to be strings to validate. Skipping.')\n return matches\n\n constraints = self.CONSTRAINTS.get(owner, {}).get(category, {}).get(provider, {})\n if not constraints:\n return matches\n artifact_count = len(action.get(artifact_type, []))\n\n constraint_key = self.KEY_MAP[artifact_type]\n if isinstance(constraints[constraint_key], tuple):\n min_, max_ = constraints[constraint_key]\n if not (min_ <= artifact_count <= max_):\n message = (\n 'Action \"{action}\" declares {number} {artifact_type} which is not in '\n 'expected range [{a}, {b}].'\n ).format(\n action=action['Name'],\n number=artifact_count,\n artifact_type=artifact_type,\n a=min_,\n b=max_\n )\n matches.append(RuleMatch(\n path + [artifact_type],\n message\n ))\n else:\n if artifact_count != constraints[constraint_key]:\n message = (\n 'Action \"{action}\" declares {number} {artifact_type} which is not the '\n 'expected number [{a}].'\n ).format(\n action=action['Name'],\n number=artifact_count,\n artifact_type=artifact_type,\n a=constraints[constraint_key]\n )\n matches.append(RuleMatch(\n path + [artifact_type],\n message\n ))\n\n return matches\n\n def check_owner(self, action, path):\n \"\"\"Check that action type owner is valid.\"\"\"\n matches = []\n\n owner = action.get('ActionTypeId').get('Owner')\n if owner not in self.VALID_OWNER_STRINGS and owner is not None:\n message = (\n 'For all currently supported action types, the only valid owner '\n 'strings are {owners}'\n ).format(\n owners=', '.join(list(self.VALID_OWNER_STRINGS))\n )\n matches.append(RuleMatch(\n path + ['ActionTypeId', 'Owner'],\n message\n ))\n\n return matches\n\n def check_version(self, action, path):\n \"\"\"Check that action type version is valid.\"\"\"\n matches = []\n\n version = action.get('ActionTypeId', {}).get('Version')\n if isinstance(version, dict):\n self.logger.debug('Unable to validate version when an object is used. Skipping')\n elif version != '1':\n message = 'For all currently supported action types, the only valid version string is \"1\".'\n matches.append(RuleMatch(\n path + ['ActionTypeId', 'Version'],\n message\n ))\n return matches\n\n def check_names_unique(self, action, path, action_names):\n \"\"\"Check that action names are unique.\"\"\"\n matches = []\n\n if action.get('Name') in action_names:\n message = 'All action names within a stage must be unique. ({name})'.format(\n name=action.get('Name')\n )\n matches.append(RuleMatch(path + ['Name'], message))\n action_names.add(action.get('Name'))\n\n return matches\n\n def match(self, cfn):\n \"\"\"Check that stage actions are set up properly.\"\"\"\n matches = []\n\n resources = cfn.get_resource_properties(['AWS::CodePipeline::Pipeline'])\n for resource in resources:\n path = resource['Path']\n properties = resource['Value']\n\n s_stages = properties.get_safe('Stages', path)\n for s_stage_v, s_stage_p in s_stages:\n if not isinstance(s_stage_v, list):\n self.logger.debug('Stages not list. Should have been caught by generic linting.')\n return matches\n\n for l_i_stage, l_i_path in s_stage_v.items_safe(s_stage_p):\n action_names = set()\n s_actions = l_i_stage.get_safe('Actions', l_i_path)\n for s_action_v, s_action_p in s_actions:\n if not isinstance(s_action_v, list):\n self.logger.debug('Actions not list. Should have been caught by generic linting.')\n return matches\n\n for l_i_a_action, l_i_a_path in s_action_v.items_safe(s_action_p):\n try:\n matches.extend(self.check_names_unique(l_i_a_action, l_i_a_path, action_names))\n matches.extend(self.check_version(l_i_a_action, l_i_a_path))\n matches.extend(self.check_owner(l_i_a_action, l_i_a_path))\n matches.extend(self.check_artifact_counts(l_i_a_action, 'InputArtifacts', l_i_a_path))\n matches.extend(self.check_artifact_counts(l_i_a_action, 'OutputArtifacts', l_i_a_path))\n except AttributeError as err:\n self.logger.debug('Got AttributeError. Should have been caught by generic linting. '\n 'Ignoring the error here: %s', str(err))\n\n return matches\n", "path": "src/cfnlint/rules/resources/codepipeline/CodepipelineStageActions.py"}]}
| 3,273 | 156 |
gh_patches_debug_7632
|
rasdani/github-patches
|
git_diff
|
aws__aws-cli-4308
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
aws emr create-cluster help command returns error
how to reproduce
1. upgrade to awscli 1.16.190 or 1.16.194 or 1.16.196
at the moment it's enough to install via pip either on macOS(1.16.194) or on linux(1.16.196), or using Homebrew(1.16.190) on macOS
```
# on Ubuntu 16.04 linux
$ pip install --upgrade awscli
<... output skipped - but it was successful, no errors ...>
$ aws --version
aws-cli/1.16.196 Python/2.7.12 Linux/4.4.0-97-generic botocore/1.12.186
$ aws emr create-cluster help
[Errno 2] No such file or directory: '/usr/local/lib/python2.7/dist-packages/awscli/examples/emr/create-cluster-synopsis.txt'
#or on macOS just for example using the one installed via Homebrew
$ brew install awscli
<... output skipped - but it was successful, no errors ...>
$ aws --version
aws-cli/1.16.190 Python/3.7.4 Darwin/18.6.0 botocore/1.12.180
$ aws emr create-cluster help
[Errno 2] No such file or directory: '/usr/local/Cellar/awscli/1.16.190/libexec/lib/python3.7/site-packages/awscli/examples/emr/create-cluster-synopsis.txt'
#or on macOS using aws installed via pip3
$ aws emr create-cluster help
[Errno 2] No such file or directory: '/usr/local/lib/python3.7/site-packages/awscli/examples/emr/create-cluster-synopsis.txt'
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2 import codecs
3 import os.path
4 import re
5 import sys
6
7 from setuptools import setup, find_packages
8
9
10 here = os.path.abspath(os.path.dirname(__file__))
11
12
13 def read(*parts):
14 return codecs.open(os.path.join(here, *parts), 'r').read()
15
16
17 def find_version(*file_paths):
18 version_file = read(*file_paths)
19 version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
20 version_file, re.M)
21 if version_match:
22 return version_match.group(1)
23 raise RuntimeError("Unable to find version string.")
24
25
26 requires = ['botocore==1.12.187',
27 'colorama>=0.2.5,<=0.3.9',
28 'docutils>=0.10',
29 'rsa>=3.1.2,<=3.5.0',
30 's3transfer>=0.2.0,<0.3.0']
31
32
33 if sys.version_info[:2] == (2, 6):
34 # For python2.6 we have to require argparse since it
35 # was not in stdlib until 2.7.
36 requires.append('argparse>=1.1')
37
38 # For Python 2.6, we have to require a different verion of PyYAML since the latest
39 # versions dropped support for Python 2.6.
40 requires.append('PyYAML>=3.10,<=3.13')
41 else:
42 requires.append('PyYAML>=3.10,<=5.1')
43
44
45 setup_options = dict(
46 name='awscli',
47 version=find_version("awscli", "__init__.py"),
48 description='Universal Command Line Environment for AWS.',
49 long_description=read('README.rst'),
50 author='Amazon Web Services',
51 url='http://aws.amazon.com/cli/',
52 scripts=['bin/aws', 'bin/aws.cmd',
53 'bin/aws_completer', 'bin/aws_zsh_completer.sh',
54 'bin/aws_bash_completer'],
55 packages=find_packages(exclude=['tests*']),
56 package_data={'awscli': ['data/*.json', 'examples/*/*.rst',
57 'examples/*/*/*.rst', 'topics/*.rst',
58 'topics/*.json']},
59 install_requires=requires,
60 extras_require={
61 ':python_version=="2.6"': [
62 'argparse>=1.1',
63 ]
64 },
65 license="Apache License 2.0",
66 classifiers=[
67 'Development Status :: 5 - Production/Stable',
68 'Intended Audience :: Developers',
69 'Intended Audience :: System Administrators',
70 'Natural Language :: English',
71 'License :: OSI Approved :: Apache Software License',
72 'Programming Language :: Python',
73 'Programming Language :: Python :: 2',
74 'Programming Language :: Python :: 2.6',
75 'Programming Language :: Python :: 2.7',
76 'Programming Language :: Python :: 3',
77 'Programming Language :: Python :: 3.3',
78 'Programming Language :: Python :: 3.4',
79 'Programming Language :: Python :: 3.5',
80 'Programming Language :: Python :: 3.6',
81 'Programming Language :: Python :: 3.7',
82 ],
83 )
84
85 if 'py2exe' in sys.argv:
86 # This will actually give us a py2exe command.
87 import py2exe
88 # And we have some py2exe specific options.
89 setup_options['options'] = {
90 'py2exe': {
91 'optimize': 0,
92 'skip_archive': True,
93 'dll_excludes': ['crypt32.dll'],
94 'packages': ['docutils', 'urllib', 'httplib', 'HTMLParser',
95 'awscli', 'ConfigParser', 'xml.etree', 'pipes'],
96 }
97 }
98 setup_options['console'] = ['bin/aws']
99
100
101 setup(**setup_options)
102
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -54,6 +54,7 @@
'bin/aws_bash_completer'],
packages=find_packages(exclude=['tests*']),
package_data={'awscli': ['data/*.json', 'examples/*/*.rst',
+ 'examples/*/*.txt', 'examples/*/*/*.txt',
'examples/*/*/*.rst', 'topics/*.rst',
'topics/*.json']},
install_requires=requires,
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -54,6 +54,7 @@\n 'bin/aws_bash_completer'],\n packages=find_packages(exclude=['tests*']),\n package_data={'awscli': ['data/*.json', 'examples/*/*.rst',\n+ 'examples/*/*.txt', 'examples/*/*/*.txt',\n 'examples/*/*/*.rst', 'topics/*.rst',\n 'topics/*.json']},\n install_requires=requires,\n", "issue": "aws emr create-cluster help command returns error\nhow to reproduce \r\n\r\n1. upgrade to awscli 1.16.190 or 1.16.194 or 1.16.196\r\nat the moment it's enough to install via pip either on macOS(1.16.194) or on linux(1.16.196), or using Homebrew(1.16.190) on macOS\r\n```\r\n# on Ubuntu 16.04 linux \r\n$ pip install --upgrade awscli\r\n<... output skipped - but it was successful, no errors ...>\r\n\r\n$ aws --version\r\naws-cli/1.16.196 Python/2.7.12 Linux/4.4.0-97-generic botocore/1.12.186\r\n\r\n$ aws emr create-cluster help\r\n\r\n[Errno 2] No such file or directory: '/usr/local/lib/python2.7/dist-packages/awscli/examples/emr/create-cluster-synopsis.txt'\r\n\r\n\r\n\r\n#or on macOS just for example using the one installed via Homebrew\r\n$ brew install awscli\r\n<... output skipped - but it was successful, no errors ...>\r\n\r\n$ aws --version\r\naws-cli/1.16.190 Python/3.7.4 Darwin/18.6.0 botocore/1.12.180\r\n\r\n$ aws emr create-cluster help\r\n[Errno 2] No such file or directory: '/usr/local/Cellar/awscli/1.16.190/libexec/lib/python3.7/site-packages/awscli/examples/emr/create-cluster-synopsis.txt'\r\n\r\n#or on macOS using aws installed via pip3\r\n$ aws emr create-cluster help\r\n\r\n[Errno 2] No such file or directory: '/usr/local/lib/python3.7/site-packages/awscli/examples/emr/create-cluster-synopsis.txt'\r\n```\n", "before_files": [{"content": "#!/usr/bin/env python\nimport codecs\nimport os.path\nimport re\nimport sys\n\nfrom setuptools import setup, find_packages\n\n\nhere = os.path.abspath(os.path.dirname(__file__))\n\n\ndef read(*parts):\n return codecs.open(os.path.join(here, *parts), 'r').read()\n\n\ndef find_version(*file_paths):\n version_file = read(*file_paths)\n version_match = re.search(r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\",\n version_file, re.M)\n if version_match:\n return version_match.group(1)\n raise RuntimeError(\"Unable to find version string.\")\n\n\nrequires = ['botocore==1.12.187',\n 'colorama>=0.2.5,<=0.3.9',\n 'docutils>=0.10',\n 'rsa>=3.1.2,<=3.5.0',\n 's3transfer>=0.2.0,<0.3.0']\n\n\nif sys.version_info[:2] == (2, 6):\n # For python2.6 we have to require argparse since it\n # was not in stdlib until 2.7.\n requires.append('argparse>=1.1')\n\n # For Python 2.6, we have to require a different verion of PyYAML since the latest\n # versions dropped support for Python 2.6.\n requires.append('PyYAML>=3.10,<=3.13')\nelse:\n requires.append('PyYAML>=3.10,<=5.1')\n\n\nsetup_options = dict(\n name='awscli',\n version=find_version(\"awscli\", \"__init__.py\"),\n description='Universal Command Line Environment for AWS.',\n long_description=read('README.rst'),\n author='Amazon Web Services',\n url='http://aws.amazon.com/cli/',\n scripts=['bin/aws', 'bin/aws.cmd',\n 'bin/aws_completer', 'bin/aws_zsh_completer.sh',\n 'bin/aws_bash_completer'],\n packages=find_packages(exclude=['tests*']),\n package_data={'awscli': ['data/*.json', 'examples/*/*.rst',\n 'examples/*/*/*.rst', 'topics/*.rst',\n 'topics/*.json']},\n install_requires=requires,\n extras_require={\n ':python_version==\"2.6\"': [\n 'argparse>=1.1',\n ]\n },\n license=\"Apache License 2.0\",\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'Intended Audience :: System Administrators',\n 'Natural Language :: English',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n ],\n)\n\nif 'py2exe' in sys.argv:\n # This will actually give us a py2exe command.\n import py2exe\n # And we have some py2exe specific options.\n setup_options['options'] = {\n 'py2exe': {\n 'optimize': 0,\n 'skip_archive': True,\n 'dll_excludes': ['crypt32.dll'],\n 'packages': ['docutils', 'urllib', 'httplib', 'HTMLParser',\n 'awscli', 'ConfigParser', 'xml.etree', 'pipes'],\n }\n }\n setup_options['console'] = ['bin/aws']\n\n\nsetup(**setup_options)\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\nimport codecs\nimport os.path\nimport re\nimport sys\n\nfrom setuptools import setup, find_packages\n\n\nhere = os.path.abspath(os.path.dirname(__file__))\n\n\ndef read(*parts):\n return codecs.open(os.path.join(here, *parts), 'r').read()\n\n\ndef find_version(*file_paths):\n version_file = read(*file_paths)\n version_match = re.search(r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\",\n version_file, re.M)\n if version_match:\n return version_match.group(1)\n raise RuntimeError(\"Unable to find version string.\")\n\n\nrequires = ['botocore==1.12.187',\n 'colorama>=0.2.5,<=0.3.9',\n 'docutils>=0.10',\n 'rsa>=3.1.2,<=3.5.0',\n 's3transfer>=0.2.0,<0.3.0']\n\n\nif sys.version_info[:2] == (2, 6):\n # For python2.6 we have to require argparse since it\n # was not in stdlib until 2.7.\n requires.append('argparse>=1.1')\n\n # For Python 2.6, we have to require a different verion of PyYAML since the latest\n # versions dropped support for Python 2.6.\n requires.append('PyYAML>=3.10,<=3.13')\nelse:\n requires.append('PyYAML>=3.10,<=5.1')\n\n\nsetup_options = dict(\n name='awscli',\n version=find_version(\"awscli\", \"__init__.py\"),\n description='Universal Command Line Environment for AWS.',\n long_description=read('README.rst'),\n author='Amazon Web Services',\n url='http://aws.amazon.com/cli/',\n scripts=['bin/aws', 'bin/aws.cmd',\n 'bin/aws_completer', 'bin/aws_zsh_completer.sh',\n 'bin/aws_bash_completer'],\n packages=find_packages(exclude=['tests*']),\n package_data={'awscli': ['data/*.json', 'examples/*/*.rst',\n 'examples/*/*.txt', 'examples/*/*/*.txt',\n 'examples/*/*/*.rst', 'topics/*.rst',\n 'topics/*.json']},\n install_requires=requires,\n extras_require={\n ':python_version==\"2.6\"': [\n 'argparse>=1.1',\n ]\n },\n license=\"Apache License 2.0\",\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'Intended Audience :: System Administrators',\n 'Natural Language :: English',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n ],\n)\n\nif 'py2exe' in sys.argv:\n # This will actually give us a py2exe command.\n import py2exe\n # And we have some py2exe specific options.\n setup_options['options'] = {\n 'py2exe': {\n 'optimize': 0,\n 'skip_archive': True,\n 'dll_excludes': ['crypt32.dll'],\n 'packages': ['docutils', 'urllib', 'httplib', 'HTMLParser',\n 'awscli', 'ConfigParser', 'xml.etree', 'pipes'],\n }\n }\n setup_options['console'] = ['bin/aws']\n\n\nsetup(**setup_options)\n", "path": "setup.py"}]}
| 1,723 | 110 |
gh_patches_debug_6935
|
rasdani/github-patches
|
git_diff
|
googleapis__google-auth-library-python-51
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Create system tests for service account-based credentials
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 # Copyright 2014 Google Inc.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from setuptools import find_packages
16 from setuptools import setup
17
18
19 DEPENDENCIES = (
20 'pyasn1>=0.1.7',
21 'pyasn1-modules>=0.0.5',
22 'rsa>=3.1.4',
23 'six>=1.9.0',
24 )
25
26
27 with open('README.rst', 'r') as fh:
28 long_description = fh.read()
29
30 setup(
31 name='google-auth',
32 version='0.0.1',
33 author='Google Cloud Platform',
34 author_email='[email protected]',
35 description='Google Authentication Library',
36 long_description=long_description,
37 url='https://github.com/GoogleCloudPlatform/google-auth-library-python',
38 packages=find_packages(exclude='tests'),
39 namespace_packages=('google',),
40 install_requires=DEPENDENCIES,
41 license='Apache 2.0',
42 keywords='google auth oauth client',
43 classifiers=(
44 'Programming Language :: Python :: 2',
45 'Programming Language :: Python :: 2.7',
46 'Programming Language :: Python :: 3',
47 'Programming Language :: Python :: 3.4',
48 'Programming Language :: Python :: 3.5',
49 'Development Status :: 3 - Alpha',
50 'Intended Audience :: Developers',
51 'License :: OSI Approved :: Apache Software License',
52 'Operating System :: POSIX',
53 'Operating System :: Microsoft :: Windows',
54 'Operating System :: MacOS :: MacOS X',
55 'Operating System :: OS Independent',
56 'Topic :: Internet :: WWW/HTTP',
57 ),
58 )
59
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -35,7 +35,7 @@
description='Google Authentication Library',
long_description=long_description,
url='https://github.com/GoogleCloudPlatform/google-auth-library-python',
- packages=find_packages(exclude='tests'),
+ packages=find_packages(exclude=('tests', 'system_tests')),
namespace_packages=('google',),
install_requires=DEPENDENCIES,
license='Apache 2.0',
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -35,7 +35,7 @@\n description='Google Authentication Library',\n long_description=long_description,\n url='https://github.com/GoogleCloudPlatform/google-auth-library-python',\n- packages=find_packages(exclude='tests'),\n+ packages=find_packages(exclude=('tests', 'system_tests')),\n namespace_packages=('google',),\n install_requires=DEPENDENCIES,\n license='Apache 2.0',\n", "issue": "Create system tests for service account-based credentials\n\n", "before_files": [{"content": "# Copyright 2014 Google Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom setuptools import find_packages\nfrom setuptools import setup\n\n\nDEPENDENCIES = (\n 'pyasn1>=0.1.7',\n 'pyasn1-modules>=0.0.5',\n 'rsa>=3.1.4',\n 'six>=1.9.0',\n)\n\n\nwith open('README.rst', 'r') as fh:\n long_description = fh.read()\n\nsetup(\n name='google-auth',\n version='0.0.1',\n author='Google Cloud Platform',\n author_email='[email protected]',\n description='Google Authentication Library',\n long_description=long_description,\n url='https://github.com/GoogleCloudPlatform/google-auth-library-python',\n packages=find_packages(exclude='tests'),\n namespace_packages=('google',),\n install_requires=DEPENDENCIES,\n license='Apache 2.0',\n keywords='google auth oauth client',\n classifiers=(\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Development Status :: 3 - Alpha',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Apache Software License',\n 'Operating System :: POSIX',\n 'Operating System :: Microsoft :: Windows',\n 'Operating System :: MacOS :: MacOS X',\n 'Operating System :: OS Independent',\n 'Topic :: Internet :: WWW/HTTP',\n ),\n)\n", "path": "setup.py"}], "after_files": [{"content": "# Copyright 2014 Google Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom setuptools import find_packages\nfrom setuptools import setup\n\n\nDEPENDENCIES = (\n 'pyasn1>=0.1.7',\n 'pyasn1-modules>=0.0.5',\n 'rsa>=3.1.4',\n 'six>=1.9.0',\n)\n\n\nwith open('README.rst', 'r') as fh:\n long_description = fh.read()\n\nsetup(\n name='google-auth',\n version='0.0.1',\n author='Google Cloud Platform',\n author_email='[email protected]',\n description='Google Authentication Library',\n long_description=long_description,\n url='https://github.com/GoogleCloudPlatform/google-auth-library-python',\n packages=find_packages(exclude=('tests', 'system_tests')),\n namespace_packages=('google',),\n install_requires=DEPENDENCIES,\n license='Apache 2.0',\n keywords='google auth oauth client',\n classifiers=(\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Development Status :: 3 - Alpha',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Apache Software License',\n 'Operating System :: POSIX',\n 'Operating System :: Microsoft :: Windows',\n 'Operating System :: MacOS :: MacOS X',\n 'Operating System :: OS Independent',\n 'Topic :: Internet :: WWW/HTTP',\n ),\n)\n", "path": "setup.py"}]}
| 834 | 109 |
gh_patches_debug_18389
|
rasdani/github-patches
|
git_diff
|
akvo__akvo-rsr-1829
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
IATI import
## Test plan
1. Take any IATI file from the [IATI registry](http://iatiregistry.org)
2. Make sure the reporting organisation is in RSR, with the correct IATI Organisation ID and 'Reportable' set to True.
3. In the old admin, add a new IATI import. Either fill in an external URL, or a local file. The user that is selected will get an email with a summary of the import. _Note that for larger files (e.g. > 100 projects), you might see a timeout. However, the import should continue to run in the background._
4. When the import is done, it should show up with status completed, and the mail with import details should have been sent. _Note that files up to 100 projects take about half a minute._
## Issue description
See product design repository: https://github.com/akvo/akvo-product-design/issues/97
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `akvo/rsr/migrations/0032_auto_20151001_0956.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 from __future__ import unicode_literals
3
4 from django.db import models, migrations
5 import akvo.rsr.models.iati_import
6 from django.conf import settings
7 import akvo.rsr.fields
8
9
10 class Migration(migrations.Migration):
11
12 dependencies = [
13 ('rsr', '0031_auto_20150825_1109'),
14 ]
15
16 operations = [
17 migrations.CreateModel(
18 name='IatiImport',
19 fields=[
20 ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
21 ('url', models.URLField(verbose_name='url', blank=True)),
22 ('local_file', models.FileField(upload_to=akvo.rsr.models.iati_import.file_path, verbose_name='local file', blank=True)),
23 ('status', models.PositiveSmallIntegerField(default=1, verbose_name='status', choices=[(1, 'pending'), (2, 'retrieving file'), (3, 'import in progress'), (4, 'completed'), (5, 'cancelled')])),
24 ('start_date', models.DateTimeField(null=True, verbose_name='start date', blank=True)),
25 ('end_date', models.DateTimeField(null=True, verbose_name='end date', blank=True)),
26 ],
27 options={
28 'verbose_name': 'IATI import',
29 'verbose_name_plural': 'IATI imports',
30 },
31 bases=(models.Model,),
32 ),
33 migrations.CreateModel(
34 name='IatiImportLog',
35 fields=[
36 ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
37 ('severity', models.PositiveSmallIntegerField(default=1, verbose_name='severity', choices=[(0, 'information'), (1, 'critical error'), (2, 'value not saved'), (3, 'value partly saved')])),
38 ('text', akvo.rsr.fields.ValidXMLTextField(verbose_name='text')),
39 ('iati_import', models.ForeignKey(related_name='iati_import_logs', verbose_name='iati_import', to='rsr.IatiImport')),
40 ('project', models.ForeignKey(related_name='iati_project_import_logs', verbose_name='project', blank=True, to='rsr.Project', null=True)),
41 ],
42 options={
43 'verbose_name': 'IATI import log',
44 'verbose_name_plural': 'IATI import logs',
45 },
46 bases=(models.Model,),
47 ),
48 migrations.CreateModel(
49 name='IatiProjectImport',
50 fields=[
51 ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
52 ('action', models.PositiveSmallIntegerField(verbose_name='action', choices=[(1, 'create'), (2, 'update')])),
53 ('status', models.PositiveSmallIntegerField(default=1, verbose_name='status', choices=[(1, 'pending'), (2, 'import in progress'), (3, 'completed'), (4, 'cancelled')])),
54 ('start_date', models.DateTimeField(null=True, verbose_name='start date', blank=True)),
55 ('end_date', models.DateTimeField(null=True, verbose_name='end date', blank=True)),
56 ('iati_import', models.ForeignKey(related_name='iati_project_imports', verbose_name='iati_import', to='rsr.IatiImport')),
57 ('project', models.ForeignKey(related_name='iati_project_imports', verbose_name='project', to='rsr.Project')),
58 ],
59 options={
60 'verbose_name': 'IATI project import',
61 'verbose_name_plural': 'IATI project imports',
62 },
63 bases=(models.Model,),
64 ),
65 migrations.AddField(
66 model_name='iatiimport',
67 name='projects',
68 field=models.ManyToManyField(to='rsr.Project', verbose_name='projects', through='rsr.IatiProjectImport', blank=True),
69 preserve_default=True,
70 ),
71 migrations.AddField(
72 model_name='iatiimport',
73 name='user',
74 field=models.ForeignKey(related_name='iati_imports', verbose_name='user', to=settings.AUTH_USER_MODEL),
75 preserve_default=True,
76 ),
77 migrations.AlterField(
78 model_name='budgetitem',
79 name='amount',
80 field=models.DecimalField(null=True, verbose_name='amount', max_digits=14, decimal_places=2, blank=True),
81 preserve_default=True,
82 ),
83 migrations.AlterField(
84 model_name='partnership',
85 name='funding_amount',
86 field=models.DecimalField(decimal_places=2, max_digits=14, blank=True, help_text="The funding amount of the partner.<br>Note that it's only possible to indicate a funding amount for funding partners.", null=True, verbose_name='funding amount', db_index=True),
87 preserve_default=True,
88 ),
89 migrations.AlterField(
90 model_name='planneddisbursement',
91 name='value',
92 field=models.DecimalField(null=True, verbose_name='value', max_digits=14, decimal_places=2, blank=True),
93 preserve_default=True,
94 ),
95 migrations.AlterField(
96 model_name='project',
97 name='budget',
98 field=models.DecimalField(decimal_places=2, default=0, max_digits=14, blank=True, null=True, verbose_name='project budget', db_index=True),
99 preserve_default=True,
100 ),
101 migrations.AlterField(
102 model_name='project',
103 name='funds',
104 field=models.DecimalField(decimal_places=2, default=0, max_digits=14, blank=True, null=True, db_index=True),
105 preserve_default=True,
106 ),
107 migrations.AlterField(
108 model_name='project',
109 name='funds_needed',
110 field=models.DecimalField(decimal_places=2, default=0, max_digits=14, blank=True, null=True, db_index=True),
111 preserve_default=True,
112 ),
113 migrations.AlterField(
114 model_name='transaction',
115 name='value',
116 field=models.DecimalField(decimal_places=2, max_digits=14, blank=True, help_text='Enter the transaction amount.', null=True, verbose_name='value'),
117 preserve_default=True,
118 ),
119 ]
120
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/akvo/rsr/migrations/0032_auto_20151001_0956.py b/akvo/rsr/migrations/0032_auto_20151001_0956.py
--- a/akvo/rsr/migrations/0032_auto_20151001_0956.py
+++ b/akvo/rsr/migrations/0032_auto_20151001_0956.py
@@ -116,4 +116,10 @@
field=models.DecimalField(decimal_places=2, max_digits=14, blank=True, help_text='Enter the transaction amount.', null=True, verbose_name='value'),
preserve_default=True,
),
+ migrations.AlterField(
+ model_name='project',
+ name='iati_activity_id',
+ field=akvo.rsr.fields.ValidXMLCharField(null=True, max_length=100, blank=True, help_text='This should be the official unique IATI Identifier for the project. The identifier consists of the IATI organisation identifier and the (organisations internal) project identifier, e.g. NL-KVK-31156201-TZ1234. (100 characters)<br>Note that \'projects\' in this form are the same as \'activities\' in IATI.<br><a href="http://iatistandard.org/activity-standard/iati-activities/iati-activity/iati-identifier" target="_blank">How to create</a>', verbose_name='IATI Project Identifier', db_index=True),
+ preserve_default=True,
+ ),
]
|
{"golden_diff": "diff --git a/akvo/rsr/migrations/0032_auto_20151001_0956.py b/akvo/rsr/migrations/0032_auto_20151001_0956.py\n--- a/akvo/rsr/migrations/0032_auto_20151001_0956.py\n+++ b/akvo/rsr/migrations/0032_auto_20151001_0956.py\n@@ -116,4 +116,10 @@\n field=models.DecimalField(decimal_places=2, max_digits=14, blank=True, help_text='Enter the transaction amount.', null=True, verbose_name='value'),\n preserve_default=True,\n ),\n+ migrations.AlterField(\n+ model_name='project',\n+ name='iati_activity_id',\n+ field=akvo.rsr.fields.ValidXMLCharField(null=True, max_length=100, blank=True, help_text='This should be the official unique IATI Identifier for the project. The identifier consists of the IATI organisation identifier and the (organisations internal) project identifier, e.g. NL-KVK-31156201-TZ1234. (100 characters)<br>Note that \\'projects\\' in this form are the same as \\'activities\\' in IATI.<br><a href=\"http://iatistandard.org/activity-standard/iati-activities/iati-activity/iati-identifier\" target=\"_blank\">How to create</a>', verbose_name='IATI Project Identifier', db_index=True),\n+ preserve_default=True,\n+ ),\n ]\n", "issue": "IATI import\n## Test plan\n1. Take any IATI file from the [IATI registry](http://iatiregistry.org)\n2. Make sure the reporting organisation is in RSR, with the correct IATI Organisation ID and 'Reportable' set to True.\n3. In the old admin, add a new IATI import. Either fill in an external URL, or a local file. The user that is selected will get an email with a summary of the import. _Note that for larger files (e.g. > 100 projects), you might see a timeout. However, the import should continue to run in the background._\n4. When the import is done, it should show up with status completed, and the mail with import details should have been sent. _Note that files up to 100 projects take about half a minute._\n## Issue description\n\nSee product design repository: https://github.com/akvo/akvo-product-design/issues/97\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models, migrations\nimport akvo.rsr.models.iati_import\nfrom django.conf import settings\nimport akvo.rsr.fields\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('rsr', '0031_auto_20150825_1109'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='IatiImport',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('url', models.URLField(verbose_name='url', blank=True)),\n ('local_file', models.FileField(upload_to=akvo.rsr.models.iati_import.file_path, verbose_name='local file', blank=True)),\n ('status', models.PositiveSmallIntegerField(default=1, verbose_name='status', choices=[(1, 'pending'), (2, 'retrieving file'), (3, 'import in progress'), (4, 'completed'), (5, 'cancelled')])),\n ('start_date', models.DateTimeField(null=True, verbose_name='start date', blank=True)),\n ('end_date', models.DateTimeField(null=True, verbose_name='end date', blank=True)),\n ],\n options={\n 'verbose_name': 'IATI import',\n 'verbose_name_plural': 'IATI imports',\n },\n bases=(models.Model,),\n ),\n migrations.CreateModel(\n name='IatiImportLog',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('severity', models.PositiveSmallIntegerField(default=1, verbose_name='severity', choices=[(0, 'information'), (1, 'critical error'), (2, 'value not saved'), (3, 'value partly saved')])),\n ('text', akvo.rsr.fields.ValidXMLTextField(verbose_name='text')),\n ('iati_import', models.ForeignKey(related_name='iati_import_logs', verbose_name='iati_import', to='rsr.IatiImport')),\n ('project', models.ForeignKey(related_name='iati_project_import_logs', verbose_name='project', blank=True, to='rsr.Project', null=True)),\n ],\n options={\n 'verbose_name': 'IATI import log',\n 'verbose_name_plural': 'IATI import logs',\n },\n bases=(models.Model,),\n ),\n migrations.CreateModel(\n name='IatiProjectImport',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('action', models.PositiveSmallIntegerField(verbose_name='action', choices=[(1, 'create'), (2, 'update')])),\n ('status', models.PositiveSmallIntegerField(default=1, verbose_name='status', choices=[(1, 'pending'), (2, 'import in progress'), (3, 'completed'), (4, 'cancelled')])),\n ('start_date', models.DateTimeField(null=True, verbose_name='start date', blank=True)),\n ('end_date', models.DateTimeField(null=True, verbose_name='end date', blank=True)),\n ('iati_import', models.ForeignKey(related_name='iati_project_imports', verbose_name='iati_import', to='rsr.IatiImport')),\n ('project', models.ForeignKey(related_name='iati_project_imports', verbose_name='project', to='rsr.Project')),\n ],\n options={\n 'verbose_name': 'IATI project import',\n 'verbose_name_plural': 'IATI project imports',\n },\n bases=(models.Model,),\n ),\n migrations.AddField(\n model_name='iatiimport',\n name='projects',\n field=models.ManyToManyField(to='rsr.Project', verbose_name='projects', through='rsr.IatiProjectImport', blank=True),\n preserve_default=True,\n ),\n migrations.AddField(\n model_name='iatiimport',\n name='user',\n field=models.ForeignKey(related_name='iati_imports', verbose_name='user', to=settings.AUTH_USER_MODEL),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='budgetitem',\n name='amount',\n field=models.DecimalField(null=True, verbose_name='amount', max_digits=14, decimal_places=2, blank=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='partnership',\n name='funding_amount',\n field=models.DecimalField(decimal_places=2, max_digits=14, blank=True, help_text=\"The funding amount of the partner.<br>Note that it's only possible to indicate a funding amount for funding partners.\", null=True, verbose_name='funding amount', db_index=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='planneddisbursement',\n name='value',\n field=models.DecimalField(null=True, verbose_name='value', max_digits=14, decimal_places=2, blank=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='project',\n name='budget',\n field=models.DecimalField(decimal_places=2, default=0, max_digits=14, blank=True, null=True, verbose_name='project budget', db_index=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='project',\n name='funds',\n field=models.DecimalField(decimal_places=2, default=0, max_digits=14, blank=True, null=True, db_index=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='project',\n name='funds_needed',\n field=models.DecimalField(decimal_places=2, default=0, max_digits=14, blank=True, null=True, db_index=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='transaction',\n name='value',\n field=models.DecimalField(decimal_places=2, max_digits=14, blank=True, help_text='Enter the transaction amount.', null=True, verbose_name='value'),\n preserve_default=True,\n ),\n ]\n", "path": "akvo/rsr/migrations/0032_auto_20151001_0956.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models, migrations\nimport akvo.rsr.models.iati_import\nfrom django.conf import settings\nimport akvo.rsr.fields\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('rsr', '0031_auto_20150825_1109'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='IatiImport',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('url', models.URLField(verbose_name='url', blank=True)),\n ('local_file', models.FileField(upload_to=akvo.rsr.models.iati_import.file_path, verbose_name='local file', blank=True)),\n ('status', models.PositiveSmallIntegerField(default=1, verbose_name='status', choices=[(1, 'pending'), (2, 'retrieving file'), (3, 'import in progress'), (4, 'completed'), (5, 'cancelled')])),\n ('start_date', models.DateTimeField(null=True, verbose_name='start date', blank=True)),\n ('end_date', models.DateTimeField(null=True, verbose_name='end date', blank=True)),\n ],\n options={\n 'verbose_name': 'IATI import',\n 'verbose_name_plural': 'IATI imports',\n },\n bases=(models.Model,),\n ),\n migrations.CreateModel(\n name='IatiImportLog',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('severity', models.PositiveSmallIntegerField(default=1, verbose_name='severity', choices=[(0, 'information'), (1, 'critical error'), (2, 'value not saved'), (3, 'value partly saved')])),\n ('text', akvo.rsr.fields.ValidXMLTextField(verbose_name='text')),\n ('iati_import', models.ForeignKey(related_name='iati_import_logs', verbose_name='iati_import', to='rsr.IatiImport')),\n ('project', models.ForeignKey(related_name='iati_project_import_logs', verbose_name='project', blank=True, to='rsr.Project', null=True)),\n ],\n options={\n 'verbose_name': 'IATI import log',\n 'verbose_name_plural': 'IATI import logs',\n },\n bases=(models.Model,),\n ),\n migrations.CreateModel(\n name='IatiProjectImport',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('action', models.PositiveSmallIntegerField(verbose_name='action', choices=[(1, 'create'), (2, 'update')])),\n ('status', models.PositiveSmallIntegerField(default=1, verbose_name='status', choices=[(1, 'pending'), (2, 'import in progress'), (3, 'completed'), (4, 'cancelled')])),\n ('start_date', models.DateTimeField(null=True, verbose_name='start date', blank=True)),\n ('end_date', models.DateTimeField(null=True, verbose_name='end date', blank=True)),\n ('iati_import', models.ForeignKey(related_name='iati_project_imports', verbose_name='iati_import', to='rsr.IatiImport')),\n ('project', models.ForeignKey(related_name='iati_project_imports', verbose_name='project', to='rsr.Project')),\n ],\n options={\n 'verbose_name': 'IATI project import',\n 'verbose_name_plural': 'IATI project imports',\n },\n bases=(models.Model,),\n ),\n migrations.AddField(\n model_name='iatiimport',\n name='projects',\n field=models.ManyToManyField(to='rsr.Project', verbose_name='projects', through='rsr.IatiProjectImport', blank=True),\n preserve_default=True,\n ),\n migrations.AddField(\n model_name='iatiimport',\n name='user',\n field=models.ForeignKey(related_name='iati_imports', verbose_name='user', to=settings.AUTH_USER_MODEL),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='budgetitem',\n name='amount',\n field=models.DecimalField(null=True, verbose_name='amount', max_digits=14, decimal_places=2, blank=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='partnership',\n name='funding_amount',\n field=models.DecimalField(decimal_places=2, max_digits=14, blank=True, help_text=\"The funding amount of the partner.<br>Note that it's only possible to indicate a funding amount for funding partners.\", null=True, verbose_name='funding amount', db_index=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='planneddisbursement',\n name='value',\n field=models.DecimalField(null=True, verbose_name='value', max_digits=14, decimal_places=2, blank=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='project',\n name='budget',\n field=models.DecimalField(decimal_places=2, default=0, max_digits=14, blank=True, null=True, verbose_name='project budget', db_index=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='project',\n name='funds',\n field=models.DecimalField(decimal_places=2, default=0, max_digits=14, blank=True, null=True, db_index=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='project',\n name='funds_needed',\n field=models.DecimalField(decimal_places=2, default=0, max_digits=14, blank=True, null=True, db_index=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='transaction',\n name='value',\n field=models.DecimalField(decimal_places=2, max_digits=14, blank=True, help_text='Enter the transaction amount.', null=True, verbose_name='value'),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='project',\n name='iati_activity_id',\n field=akvo.rsr.fields.ValidXMLCharField(null=True, max_length=100, blank=True, help_text='This should be the official unique IATI Identifier for the project. The identifier consists of the IATI organisation identifier and the (organisations internal) project identifier, e.g. NL-KVK-31156201-TZ1234. (100 characters)<br>Note that \\'projects\\' in this form are the same as \\'activities\\' in IATI.<br><a href=\"http://iatistandard.org/activity-standard/iati-activities/iati-activity/iati-identifier\" target=\"_blank\">How to create</a>', verbose_name='IATI Project Identifier', db_index=True),\n preserve_default=True,\n ),\n ]\n", "path": "akvo/rsr/migrations/0032_auto_20151001_0956.py"}]}
| 2,020 | 374 |
gh_patches_debug_40596
|
rasdani/github-patches
|
git_diff
|
alltheplaces__alltheplaces-4073
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
StructuredDataSpider's wanted_types duplicates items
If a JSON-LD contains e.g. `"@type":["HomeGoodsStore","FurnitureStore"]`, then StructuredDataSpider should only consider the item once when given `wanted_types = ["HomeGoodsStore", "FurnitureStore"]`.
(If there's really some reason to handle the item once for each of the types, LinkedDataParser can be invoked directly; if the site contains two distinct items, e.g. "Store" and "Pharmacy", then those are expressed as separate items, not two types on the same item.)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `locations/structured_data_spider.py`
Content:
```
1 import re
2
3 from scrapy import Spider
4
5 from locations.linked_data_parser import LinkedDataParser
6 from locations.microdata_parser import MicrodataParser
7
8
9 def extract_email(item, response):
10 for link in response.xpath("//a[contains(@href, 'mailto')]/@href").getall():
11 link = link.strip()
12 if link.startswith("mailto:"):
13 item["email"] = link.replace("mailto:", "")
14 return
15
16
17 def extract_phone(item, response):
18 for link in response.xpath("//a[contains(@href, 'tel')]/@href").getall():
19 link = link.strip()
20 if link.startswith("tel:"):
21
22 item["phone"] = link.replace("tel:", "")
23 return
24
25
26 def extract_twitter(item, response):
27 if twitter := response.xpath('//meta[@name="twitter:site"]/@content').get():
28 item["twitter"] = twitter.strip()
29
30
31 def extract_image(item, response):
32 if image := response.xpath('//meta[@name="twitter:image"]/@content').get():
33 item["image"] = image.strip()
34 return
35 if image := response.xpath('//meta[@name="og:image"]/@content').get():
36 item["image"] = image.strip()
37
38
39 class StructuredDataSpider(Spider):
40
41 wanted_types = []
42 search_for_email = True
43 search_for_phone = True
44 search_for_twitter = True
45 search_for_image = True
46
47 def parse_sd(self, response):
48 MicrodataParser.convert_to_json_ld(response)
49 for wanted_type in self.wanted_types:
50 if item := LinkedDataParser.parse(response, wanted_type):
51
52 if item["ref"] is None:
53 if hasattr(self, "rules"):
54 # Attempt to pull a match from CrawlSpider.rules
55 for rule in getattr(self, "rules"):
56 for allow in rule.link_extractor.allow_res:
57 if match := re.match(allow, response.url):
58 if len(match.groups()) > 0:
59 item["ref"] = match.group(1)
60 elif hasattr(self, "sitemap_rules"):
61 # Attempt to pull a match from SitemapSpider.sitemap_rules
62 for rule in getattr(self, "sitemap_rules"):
63 if match := re.match(rule[0], response.url):
64 if len(match.groups()) > 0:
65 item["ref"] = match.group(1)
66
67 if item["ref"] is None:
68 item["ref"] = response.url
69
70 if self.search_for_email and item["email"] is None:
71 extract_email(item, response)
72
73 if self.search_for_phone and item["phone"] is None:
74 extract_phone(item, response)
75
76 if self.search_for_twitter and item.get("twitter") is None:
77 extract_twitter(item, response)
78
79 if self.search_for_image and item.get("image") is None:
80 extract_image(item, response)
81
82 yield from self.inspect_item(item, response)
83
84 def inspect_item(self, item, response):
85 """Override with any additional processing on the item."""
86 yield item
87
```
Path: `locations/linked_data_parser.py`
Content:
```
1 import json
2
3 from locations.hours import OpeningHours
4 from locations.items import GeojsonPointItem
5
6
7 class LinkedDataParser(object):
8 @staticmethod
9 def iter_linked_data(response):
10 lds = response.xpath('//script[@type="application/ld+json"]//text()').getall()
11 for ld in lds:
12 try:
13 ld_obj = json.loads(ld, strict=False)
14 except json.decoder.JSONDecodeError:
15 continue
16
17 if isinstance(ld_obj, dict):
18 if "@graph" in ld_obj:
19 yield from ld_obj["@graph"]
20 else:
21 yield ld_obj
22 elif isinstance(ld_obj, list):
23 yield from ld_obj
24 else:
25 raise TypeError(ld_obj)
26
27 @staticmethod
28 def find_linked_data(response, wanted_type) -> {}:
29 for ld_obj in LinkedDataParser.iter_linked_data(response):
30 if not ld_obj.get("@type"):
31 continue
32
33 types = ld_obj["@type"]
34
35 if not isinstance(types, list):
36 types = [types]
37
38 for t in types:
39 if LinkedDataParser.check_type(t, wanted_type, default=False):
40 return ld_obj
41
42 @staticmethod
43 def parse_ld(ld) -> GeojsonPointItem:
44 item = GeojsonPointItem()
45
46 if (
47 (geo := ld.get("geo"))
48 or "location" in ld
49 and (geo := ld["location"].get("geo"))
50 ):
51 if isinstance(geo, list):
52 geo = geo[0]
53
54 if LinkedDataParser.check_type(geo.get("@type"), "GeoCoordinates"):
55 item["lat"] = LinkedDataParser.get_clean(geo, "latitude")
56 item["lon"] = LinkedDataParser.get_clean(geo, "longitude")
57
58 item["name"] = LinkedDataParser.get_clean(ld, "name")
59
60 if addr := LinkedDataParser.get_clean(ld, "address"):
61 if isinstance(addr, list):
62 addr = addr[0]
63
64 if isinstance(addr, str):
65 item["addr_full"] = addr
66 elif isinstance(addr, dict):
67 if LinkedDataParser.check_type(addr.get("@type"), "PostalAddress"):
68 item["street_address"] = LinkedDataParser.get_case_insensitive(
69 addr, "streetAddress"
70 )
71 item["city"] = LinkedDataParser.get_case_insensitive(
72 addr, "addressLocality"
73 )
74 item["state"] = LinkedDataParser.get_case_insensitive(
75 addr, "addressRegion"
76 )
77 item["postcode"] = LinkedDataParser.get_case_insensitive(
78 addr, "postalCode"
79 )
80 country = LinkedDataParser.get_case_insensitive(
81 addr, "addressCountry"
82 )
83
84 if isinstance(country, str):
85 item["country"] = country
86 elif isinstance(country, dict):
87 if LinkedDataParser.check_type(country.get("@type"), "Country"):
88 item["country"] = country.get("name")
89
90 # Common mistake to put "telephone" in "address"
91 item["phone"] = LinkedDataParser.get_clean(addr, "telephone")
92
93 if item.get("phone") is None:
94 item["phone"] = LinkedDataParser.get_clean(ld, "telephone")
95
96 if isinstance(item["phone"], list):
97 item["phone"] = item["phone"][0]
98
99 if isinstance(item["phone"], str):
100 item["phone"] = item["phone"].replace("tel:", "")
101
102 item["email"] = LinkedDataParser.get_clean(ld, "email")
103
104 if isinstance(item["email"], str):
105 item["email"] = item["email"].replace("mailto:", "")
106
107 item["website"] = ld.get("url")
108
109 try:
110 oh = OpeningHours()
111 oh.from_linked_data(ld)
112 item["opening_hours"] = oh.as_opening_hours()
113 except:
114 pass
115
116 if image := ld.get("image"):
117 if isinstance(image, list):
118 image = image[0]
119
120 if isinstance(image, str):
121 item["image"] = image
122 elif isinstance(image, dict):
123 if LinkedDataParser.check_type(image.get("@type"), "ImageObject"):
124 item["image"] = image.get("contentUrl")
125
126 item["ref"] = ld.get("branchCode")
127 if item["ref"] is None or item["ref"] == "":
128 item["ref"] = ld.get("@id")
129
130 if item["ref"] == "":
131 item["ref"] = None
132
133 return item
134
135 @staticmethod
136 def parse(response, wanted_type) -> GeojsonPointItem:
137 ld_item = LinkedDataParser.find_linked_data(response, wanted_type)
138 if ld_item:
139 item = LinkedDataParser.parse_ld(ld_item)
140
141 if item["website"] is None:
142 item["website"] = response.url
143 elif item["website"] == "":
144 item["website"] = response.url
145 elif item["website"][0] == "/":
146 item["website"] = response.url
147 elif item["website"].startswith("www"):
148 item["website"] = "https://" + item["website"]
149
150 return item
151
152 @staticmethod
153 def get_clean(obj, key):
154 if value := obj.get(key):
155 if isinstance(value, str):
156 if value == "null":
157 return None
158 return value.strip()
159 return value
160
161 @staticmethod
162 def get_case_insensitive(obj, key):
163 # Prioritise the case correct key
164 if value := LinkedDataParser.get_clean(obj, key):
165 return value
166
167 for real_key in obj:
168 if real_key.lower() == key.lower():
169 return LinkedDataParser.get_clean(obj, real_key)
170
171 @staticmethod
172 def check_type(type: str, wanted_type: str, default: bool = True) -> bool:
173 if default and type is None:
174 return True
175
176 return (
177 type.lower()
178 .replace("http://", "")
179 .replace("https://", "")
180 .replace("schema.org/", "")
181 == wanted_type.lower()
182 )
183
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/locations/linked_data_parser.py b/locations/linked_data_parser.py
--- a/locations/linked_data_parser.py
+++ b/locations/linked_data_parser.py
@@ -35,8 +35,22 @@
if not isinstance(types, list):
types = [types]
- for t in types:
- if LinkedDataParser.check_type(t, wanted_type, default=False):
+ types = [LinkedDataParser.clean_type(t) for t in types]
+
+ if isinstance(wanted_type, list):
+ wanted_types = wanted_type
+ else:
+ wanted_types = [wanted_type]
+
+ wanted_types = [LinkedDataParser.clean_type(t) for t in wanted_types]
+
+ for wanted_type in wanted_types:
+ valid_type = True
+ for t in types:
+ if not t in wanted_types:
+ valid_type = False
+
+ if valid_type:
return ld_obj
@staticmethod
@@ -173,10 +187,13 @@
if default and type is None:
return True
+ return LinkedDataParser.clean_type(type) == wanted_type.lower()
+
+ @staticmethod
+ def clean_type(type: str) -> str:
return (
type.lower()
.replace("http://", "")
.replace("https://", "")
.replace("schema.org/", "")
- == wanted_type.lower()
)
diff --git a/locations/structured_data_spider.py b/locations/structured_data_spider.py
--- a/locations/structured_data_spider.py
+++ b/locations/structured_data_spider.py
@@ -38,7 +38,19 @@
class StructuredDataSpider(Spider):
- wanted_types = []
+ wanted_types = [
+ "LocalBusiness",
+ "Store",
+ "Restaurant",
+ "BankOrCreditUnion",
+ "GroceryStore",
+ "FastFoodRestaurant",
+ "Hotel",
+ "Place",
+ "ClothingStore",
+ "DepartmentStore",
+ "HardwareStore",
+ ]
search_for_email = True
search_for_phone = True
search_for_twitter = True
@@ -47,7 +59,10 @@
def parse_sd(self, response):
MicrodataParser.convert_to_json_ld(response)
for wanted_type in self.wanted_types:
- if item := LinkedDataParser.parse(response, wanted_type):
+ if ld_item := LinkedDataParser.find_linked_data(response, wanted_type):
+ self.pre_process_data(ld_item)
+
+ item = LinkedDataParser.parse_ld(ld_item)
if item["ref"] is None:
if hasattr(self, "rules"):
@@ -79,8 +94,16 @@
if self.search_for_image and item.get("image") is None:
extract_image(item, response)
- yield from self.inspect_item(item, response)
+ yield from self.post_process_item(item, response, ld_item)
+
+ def pre_process_data(self, ld_data, **kwargs):
+ """Override with any pre-processing on the item."""
+ pass
+
+ def post_process_item(self, item, response, ld_data, **kwargs):
+ """Override with any post-processing on the item."""
+ yield from self.inspect_item(item, response)
def inspect_item(self, item, response):
- """Override with any additional processing on the item."""
+ """Deprecated, please use post_process_item(self, item, response, ld_data):"""
yield item
|
{"golden_diff": "diff --git a/locations/linked_data_parser.py b/locations/linked_data_parser.py\n--- a/locations/linked_data_parser.py\n+++ b/locations/linked_data_parser.py\n@@ -35,8 +35,22 @@\n if not isinstance(types, list):\n types = [types]\n \n- for t in types:\n- if LinkedDataParser.check_type(t, wanted_type, default=False):\n+ types = [LinkedDataParser.clean_type(t) for t in types]\n+\n+ if isinstance(wanted_type, list):\n+ wanted_types = wanted_type\n+ else:\n+ wanted_types = [wanted_type]\n+\n+ wanted_types = [LinkedDataParser.clean_type(t) for t in wanted_types]\n+\n+ for wanted_type in wanted_types:\n+ valid_type = True\n+ for t in types:\n+ if not t in wanted_types:\n+ valid_type = False\n+\n+ if valid_type:\n return ld_obj\n \n @staticmethod\n@@ -173,10 +187,13 @@\n if default and type is None:\n return True\n \n+ return LinkedDataParser.clean_type(type) == wanted_type.lower()\n+\n+ @staticmethod\n+ def clean_type(type: str) -> str:\n return (\n type.lower()\n .replace(\"http://\", \"\")\n .replace(\"https://\", \"\")\n .replace(\"schema.org/\", \"\")\n- == wanted_type.lower()\n )\ndiff --git a/locations/structured_data_spider.py b/locations/structured_data_spider.py\n--- a/locations/structured_data_spider.py\n+++ b/locations/structured_data_spider.py\n@@ -38,7 +38,19 @@\n \n class StructuredDataSpider(Spider):\n \n- wanted_types = []\n+ wanted_types = [\n+ \"LocalBusiness\",\n+ \"Store\",\n+ \"Restaurant\",\n+ \"BankOrCreditUnion\",\n+ \"GroceryStore\",\n+ \"FastFoodRestaurant\",\n+ \"Hotel\",\n+ \"Place\",\n+ \"ClothingStore\",\n+ \"DepartmentStore\",\n+ \"HardwareStore\",\n+ ]\n search_for_email = True\n search_for_phone = True\n search_for_twitter = True\n@@ -47,7 +59,10 @@\n def parse_sd(self, response):\n MicrodataParser.convert_to_json_ld(response)\n for wanted_type in self.wanted_types:\n- if item := LinkedDataParser.parse(response, wanted_type):\n+ if ld_item := LinkedDataParser.find_linked_data(response, wanted_type):\n+ self.pre_process_data(ld_item)\n+\n+ item = LinkedDataParser.parse_ld(ld_item)\n \n if item[\"ref\"] is None:\n if hasattr(self, \"rules\"):\n@@ -79,8 +94,16 @@\n if self.search_for_image and item.get(\"image\") is None:\n extract_image(item, response)\n \n- yield from self.inspect_item(item, response)\n+ yield from self.post_process_item(item, response, ld_item)\n+\n+ def pre_process_data(self, ld_data, **kwargs):\n+ \"\"\"Override with any pre-processing on the item.\"\"\"\n+ pass\n+\n+ def post_process_item(self, item, response, ld_data, **kwargs):\n+ \"\"\"Override with any post-processing on the item.\"\"\"\n+ yield from self.inspect_item(item, response)\n \n def inspect_item(self, item, response):\n- \"\"\"Override with any additional processing on the item.\"\"\"\n+ \"\"\"Deprecated, please use post_process_item(self, item, response, ld_data):\"\"\"\n yield item\n", "issue": "StructuredDataSpider's wanted_types duplicates items\nIf a JSON-LD contains e.g. `\"@type\":[\"HomeGoodsStore\",\"FurnitureStore\"]`, then StructuredDataSpider should only consider the item once when given `wanted_types = [\"HomeGoodsStore\", \"FurnitureStore\"]`.\r\n\r\n(If there's really some reason to handle the item once for each of the types, LinkedDataParser can be invoked directly; if the site contains two distinct items, e.g. \"Store\" and \"Pharmacy\", then those are expressed as separate items, not two types on the same item.)\n", "before_files": [{"content": "import re\n\nfrom scrapy import Spider\n\nfrom locations.linked_data_parser import LinkedDataParser\nfrom locations.microdata_parser import MicrodataParser\n\n\ndef extract_email(item, response):\n for link in response.xpath(\"//a[contains(@href, 'mailto')]/@href\").getall():\n link = link.strip()\n if link.startswith(\"mailto:\"):\n item[\"email\"] = link.replace(\"mailto:\", \"\")\n return\n\n\ndef extract_phone(item, response):\n for link in response.xpath(\"//a[contains(@href, 'tel')]/@href\").getall():\n link = link.strip()\n if link.startswith(\"tel:\"):\n\n item[\"phone\"] = link.replace(\"tel:\", \"\")\n return\n\n\ndef extract_twitter(item, response):\n if twitter := response.xpath('//meta[@name=\"twitter:site\"]/@content').get():\n item[\"twitter\"] = twitter.strip()\n\n\ndef extract_image(item, response):\n if image := response.xpath('//meta[@name=\"twitter:image\"]/@content').get():\n item[\"image\"] = image.strip()\n return\n if image := response.xpath('//meta[@name=\"og:image\"]/@content').get():\n item[\"image\"] = image.strip()\n\n\nclass StructuredDataSpider(Spider):\n\n wanted_types = []\n search_for_email = True\n search_for_phone = True\n search_for_twitter = True\n search_for_image = True\n\n def parse_sd(self, response):\n MicrodataParser.convert_to_json_ld(response)\n for wanted_type in self.wanted_types:\n if item := LinkedDataParser.parse(response, wanted_type):\n\n if item[\"ref\"] is None:\n if hasattr(self, \"rules\"):\n # Attempt to pull a match from CrawlSpider.rules\n for rule in getattr(self, \"rules\"):\n for allow in rule.link_extractor.allow_res:\n if match := re.match(allow, response.url):\n if len(match.groups()) > 0:\n item[\"ref\"] = match.group(1)\n elif hasattr(self, \"sitemap_rules\"):\n # Attempt to pull a match from SitemapSpider.sitemap_rules\n for rule in getattr(self, \"sitemap_rules\"):\n if match := re.match(rule[0], response.url):\n if len(match.groups()) > 0:\n item[\"ref\"] = match.group(1)\n\n if item[\"ref\"] is None:\n item[\"ref\"] = response.url\n\n if self.search_for_email and item[\"email\"] is None:\n extract_email(item, response)\n\n if self.search_for_phone and item[\"phone\"] is None:\n extract_phone(item, response)\n\n if self.search_for_twitter and item.get(\"twitter\") is None:\n extract_twitter(item, response)\n\n if self.search_for_image and item.get(\"image\") is None:\n extract_image(item, response)\n\n yield from self.inspect_item(item, response)\n\n def inspect_item(self, item, response):\n \"\"\"Override with any additional processing on the item.\"\"\"\n yield item\n", "path": "locations/structured_data_spider.py"}, {"content": "import json\n\nfrom locations.hours import OpeningHours\nfrom locations.items import GeojsonPointItem\n\n\nclass LinkedDataParser(object):\n @staticmethod\n def iter_linked_data(response):\n lds = response.xpath('//script[@type=\"application/ld+json\"]//text()').getall()\n for ld in lds:\n try:\n ld_obj = json.loads(ld, strict=False)\n except json.decoder.JSONDecodeError:\n continue\n\n if isinstance(ld_obj, dict):\n if \"@graph\" in ld_obj:\n yield from ld_obj[\"@graph\"]\n else:\n yield ld_obj\n elif isinstance(ld_obj, list):\n yield from ld_obj\n else:\n raise TypeError(ld_obj)\n\n @staticmethod\n def find_linked_data(response, wanted_type) -> {}:\n for ld_obj in LinkedDataParser.iter_linked_data(response):\n if not ld_obj.get(\"@type\"):\n continue\n\n types = ld_obj[\"@type\"]\n\n if not isinstance(types, list):\n types = [types]\n\n for t in types:\n if LinkedDataParser.check_type(t, wanted_type, default=False):\n return ld_obj\n\n @staticmethod\n def parse_ld(ld) -> GeojsonPointItem:\n item = GeojsonPointItem()\n\n if (\n (geo := ld.get(\"geo\"))\n or \"location\" in ld\n and (geo := ld[\"location\"].get(\"geo\"))\n ):\n if isinstance(geo, list):\n geo = geo[0]\n\n if LinkedDataParser.check_type(geo.get(\"@type\"), \"GeoCoordinates\"):\n item[\"lat\"] = LinkedDataParser.get_clean(geo, \"latitude\")\n item[\"lon\"] = LinkedDataParser.get_clean(geo, \"longitude\")\n\n item[\"name\"] = LinkedDataParser.get_clean(ld, \"name\")\n\n if addr := LinkedDataParser.get_clean(ld, \"address\"):\n if isinstance(addr, list):\n addr = addr[0]\n\n if isinstance(addr, str):\n item[\"addr_full\"] = addr\n elif isinstance(addr, dict):\n if LinkedDataParser.check_type(addr.get(\"@type\"), \"PostalAddress\"):\n item[\"street_address\"] = LinkedDataParser.get_case_insensitive(\n addr, \"streetAddress\"\n )\n item[\"city\"] = LinkedDataParser.get_case_insensitive(\n addr, \"addressLocality\"\n )\n item[\"state\"] = LinkedDataParser.get_case_insensitive(\n addr, \"addressRegion\"\n )\n item[\"postcode\"] = LinkedDataParser.get_case_insensitive(\n addr, \"postalCode\"\n )\n country = LinkedDataParser.get_case_insensitive(\n addr, \"addressCountry\"\n )\n\n if isinstance(country, str):\n item[\"country\"] = country\n elif isinstance(country, dict):\n if LinkedDataParser.check_type(country.get(\"@type\"), \"Country\"):\n item[\"country\"] = country.get(\"name\")\n\n # Common mistake to put \"telephone\" in \"address\"\n item[\"phone\"] = LinkedDataParser.get_clean(addr, \"telephone\")\n\n if item.get(\"phone\") is None:\n item[\"phone\"] = LinkedDataParser.get_clean(ld, \"telephone\")\n\n if isinstance(item[\"phone\"], list):\n item[\"phone\"] = item[\"phone\"][0]\n\n if isinstance(item[\"phone\"], str):\n item[\"phone\"] = item[\"phone\"].replace(\"tel:\", \"\")\n\n item[\"email\"] = LinkedDataParser.get_clean(ld, \"email\")\n\n if isinstance(item[\"email\"], str):\n item[\"email\"] = item[\"email\"].replace(\"mailto:\", \"\")\n\n item[\"website\"] = ld.get(\"url\")\n\n try:\n oh = OpeningHours()\n oh.from_linked_data(ld)\n item[\"opening_hours\"] = oh.as_opening_hours()\n except:\n pass\n\n if image := ld.get(\"image\"):\n if isinstance(image, list):\n image = image[0]\n\n if isinstance(image, str):\n item[\"image\"] = image\n elif isinstance(image, dict):\n if LinkedDataParser.check_type(image.get(\"@type\"), \"ImageObject\"):\n item[\"image\"] = image.get(\"contentUrl\")\n\n item[\"ref\"] = ld.get(\"branchCode\")\n if item[\"ref\"] is None or item[\"ref\"] == \"\":\n item[\"ref\"] = ld.get(\"@id\")\n\n if item[\"ref\"] == \"\":\n item[\"ref\"] = None\n\n return item\n\n @staticmethod\n def parse(response, wanted_type) -> GeojsonPointItem:\n ld_item = LinkedDataParser.find_linked_data(response, wanted_type)\n if ld_item:\n item = LinkedDataParser.parse_ld(ld_item)\n\n if item[\"website\"] is None:\n item[\"website\"] = response.url\n elif item[\"website\"] == \"\":\n item[\"website\"] = response.url\n elif item[\"website\"][0] == \"/\":\n item[\"website\"] = response.url\n elif item[\"website\"].startswith(\"www\"):\n item[\"website\"] = \"https://\" + item[\"website\"]\n\n return item\n\n @staticmethod\n def get_clean(obj, key):\n if value := obj.get(key):\n if isinstance(value, str):\n if value == \"null\":\n return None\n return value.strip()\n return value\n\n @staticmethod\n def get_case_insensitive(obj, key):\n # Prioritise the case correct key\n if value := LinkedDataParser.get_clean(obj, key):\n return value\n\n for real_key in obj:\n if real_key.lower() == key.lower():\n return LinkedDataParser.get_clean(obj, real_key)\n\n @staticmethod\n def check_type(type: str, wanted_type: str, default: bool = True) -> bool:\n if default and type is None:\n return True\n\n return (\n type.lower()\n .replace(\"http://\", \"\")\n .replace(\"https://\", \"\")\n .replace(\"schema.org/\", \"\")\n == wanted_type.lower()\n )\n", "path": "locations/linked_data_parser.py"}], "after_files": [{"content": "import re\n\nfrom scrapy import Spider\n\nfrom locations.linked_data_parser import LinkedDataParser\nfrom locations.microdata_parser import MicrodataParser\n\n\ndef extract_email(item, response):\n for link in response.xpath(\"//a[contains(@href, 'mailto')]/@href\").getall():\n link = link.strip()\n if link.startswith(\"mailto:\"):\n item[\"email\"] = link.replace(\"mailto:\", \"\")\n return\n\n\ndef extract_phone(item, response):\n for link in response.xpath(\"//a[contains(@href, 'tel')]/@href\").getall():\n link = link.strip()\n if link.startswith(\"tel:\"):\n\n item[\"phone\"] = link.replace(\"tel:\", \"\")\n return\n\n\ndef extract_twitter(item, response):\n if twitter := response.xpath('//meta[@name=\"twitter:site\"]/@content').get():\n item[\"twitter\"] = twitter.strip()\n\n\ndef extract_image(item, response):\n if image := response.xpath('//meta[@name=\"twitter:image\"]/@content').get():\n item[\"image\"] = image.strip()\n return\n if image := response.xpath('//meta[@name=\"og:image\"]/@content').get():\n item[\"image\"] = image.strip()\n\n\nclass StructuredDataSpider(Spider):\n\n wanted_types = [\n \"LocalBusiness\",\n \"Store\",\n \"Restaurant\",\n \"BankOrCreditUnion\",\n \"GroceryStore\",\n \"FastFoodRestaurant\",\n \"Hotel\",\n \"Place\",\n \"ClothingStore\",\n \"DepartmentStore\",\n \"HardwareStore\",\n ]\n search_for_email = True\n search_for_phone = True\n search_for_twitter = True\n search_for_image = True\n\n def parse_sd(self, response):\n MicrodataParser.convert_to_json_ld(response)\n for wanted_type in self.wanted_types:\n if ld_item := LinkedDataParser.find_linked_data(response, wanted_type):\n self.pre_process_data(ld_item)\n\n item = LinkedDataParser.parse_ld(ld_item)\n\n if item[\"ref\"] is None:\n if hasattr(self, \"rules\"):\n # Attempt to pull a match from CrawlSpider.rules\n for rule in getattr(self, \"rules\"):\n for allow in rule.link_extractor.allow_res:\n if match := re.match(allow, response.url):\n if len(match.groups()) > 0:\n item[\"ref\"] = match.group(1)\n elif hasattr(self, \"sitemap_rules\"):\n # Attempt to pull a match from SitemapSpider.sitemap_rules\n for rule in getattr(self, \"sitemap_rules\"):\n if match := re.match(rule[0], response.url):\n if len(match.groups()) > 0:\n item[\"ref\"] = match.group(1)\n\n if item[\"ref\"] is None:\n item[\"ref\"] = response.url\n\n if self.search_for_email and item[\"email\"] is None:\n extract_email(item, response)\n\n if self.search_for_phone and item[\"phone\"] is None:\n extract_phone(item, response)\n\n if self.search_for_twitter and item.get(\"twitter\") is None:\n extract_twitter(item, response)\n\n if self.search_for_image and item.get(\"image\") is None:\n extract_image(item, response)\n\n yield from self.post_process_item(item, response, ld_item)\n\n def pre_process_data(self, ld_data, **kwargs):\n \"\"\"Override with any pre-processing on the item.\"\"\"\n pass\n\n def post_process_item(self, item, response, ld_data, **kwargs):\n \"\"\"Override with any post-processing on the item.\"\"\"\n yield from self.inspect_item(item, response)\n\n def inspect_item(self, item, response):\n \"\"\"Deprecated, please use post_process_item(self, item, response, ld_data):\"\"\"\n yield item\n", "path": "locations/structured_data_spider.py"}, {"content": "import json\n\nfrom locations.hours import OpeningHours\nfrom locations.items import GeojsonPointItem\n\n\nclass LinkedDataParser(object):\n @staticmethod\n def iter_linked_data(response):\n lds = response.xpath('//script[@type=\"application/ld+json\"]//text()').getall()\n for ld in lds:\n try:\n ld_obj = json.loads(ld, strict=False)\n except json.decoder.JSONDecodeError:\n continue\n\n if isinstance(ld_obj, dict):\n if \"@graph\" in ld_obj:\n yield from ld_obj[\"@graph\"]\n else:\n yield ld_obj\n elif isinstance(ld_obj, list):\n yield from ld_obj\n else:\n raise TypeError(ld_obj)\n\n @staticmethod\n def find_linked_data(response, wanted_type) -> {}:\n for ld_obj in LinkedDataParser.iter_linked_data(response):\n if not ld_obj.get(\"@type\"):\n continue\n\n types = ld_obj[\"@type\"]\n\n if not isinstance(types, list):\n types = [types]\n\n types = [LinkedDataParser.clean_type(t) for t in types]\n\n if isinstance(wanted_type, list):\n wanted_types = wanted_type\n else:\n wanted_types = [wanted_type]\n\n wanted_types = [LinkedDataParser.clean_type(t) for t in wanted_types]\n\n for wanted_type in wanted_types:\n valid_type = True\n for t in types:\n if not t in wanted_types:\n valid_type = False\n\n if valid_type:\n return ld_obj\n\n @staticmethod\n def parse_ld(ld) -> GeojsonPointItem:\n item = GeojsonPointItem()\n\n if (\n (geo := ld.get(\"geo\"))\n or \"location\" in ld\n and (geo := ld[\"location\"].get(\"geo\"))\n ):\n if isinstance(geo, list):\n geo = geo[0]\n\n if LinkedDataParser.check_type(geo.get(\"@type\"), \"GeoCoordinates\"):\n item[\"lat\"] = LinkedDataParser.get_clean(geo, \"latitude\")\n item[\"lon\"] = LinkedDataParser.get_clean(geo, \"longitude\")\n\n item[\"name\"] = LinkedDataParser.get_clean(ld, \"name\")\n\n if addr := LinkedDataParser.get_clean(ld, \"address\"):\n if isinstance(addr, list):\n addr = addr[0]\n\n if isinstance(addr, str):\n item[\"addr_full\"] = addr\n elif isinstance(addr, dict):\n if LinkedDataParser.check_type(addr.get(\"@type\"), \"PostalAddress\"):\n item[\"street_address\"] = LinkedDataParser.get_case_insensitive(\n addr, \"streetAddress\"\n )\n item[\"city\"] = LinkedDataParser.get_case_insensitive(\n addr, \"addressLocality\"\n )\n item[\"state\"] = LinkedDataParser.get_case_insensitive(\n addr, \"addressRegion\"\n )\n item[\"postcode\"] = LinkedDataParser.get_case_insensitive(\n addr, \"postalCode\"\n )\n country = LinkedDataParser.get_case_insensitive(\n addr, \"addressCountry\"\n )\n\n if isinstance(country, str):\n item[\"country\"] = country\n elif isinstance(country, dict):\n if LinkedDataParser.check_type(country.get(\"@type\"), \"Country\"):\n item[\"country\"] = country.get(\"name\")\n\n # Common mistake to put \"telephone\" in \"address\"\n item[\"phone\"] = LinkedDataParser.get_clean(addr, \"telephone\")\n\n if item.get(\"phone\") is None:\n item[\"phone\"] = LinkedDataParser.get_clean(ld, \"telephone\")\n\n if isinstance(item[\"phone\"], list):\n item[\"phone\"] = item[\"phone\"][0]\n\n if isinstance(item[\"phone\"], str):\n item[\"phone\"] = item[\"phone\"].replace(\"tel:\", \"\")\n\n item[\"email\"] = LinkedDataParser.get_clean(ld, \"email\")\n\n if isinstance(item[\"email\"], str):\n item[\"email\"] = item[\"email\"].replace(\"mailto:\", \"\")\n\n item[\"website\"] = ld.get(\"url\")\n\n try:\n oh = OpeningHours()\n oh.from_linked_data(ld)\n item[\"opening_hours\"] = oh.as_opening_hours()\n except:\n pass\n\n if image := ld.get(\"image\"):\n if isinstance(image, list):\n image = image[0]\n\n if isinstance(image, str):\n item[\"image\"] = image\n elif isinstance(image, dict):\n if LinkedDataParser.check_type(image.get(\"@type\"), \"ImageObject\"):\n item[\"image\"] = image.get(\"contentUrl\")\n\n item[\"ref\"] = ld.get(\"branchCode\")\n if item[\"ref\"] is None or item[\"ref\"] == \"\":\n item[\"ref\"] = ld.get(\"@id\")\n\n if item[\"ref\"] == \"\":\n item[\"ref\"] = None\n\n return item\n\n @staticmethod\n def parse(response, wanted_type) -> GeojsonPointItem:\n ld_item = LinkedDataParser.find_linked_data(response, wanted_type)\n if ld_item:\n item = LinkedDataParser.parse_ld(ld_item)\n\n if item[\"website\"] is None:\n item[\"website\"] = response.url\n elif item[\"website\"] == \"\":\n item[\"website\"] = response.url\n elif item[\"website\"][0] == \"/\":\n item[\"website\"] = response.url\n elif item[\"website\"].startswith(\"www\"):\n item[\"website\"] = \"https://\" + item[\"website\"]\n\n return item\n\n @staticmethod\n def get_clean(obj, key):\n if value := obj.get(key):\n if isinstance(value, str):\n if value == \"null\":\n return None\n return value.strip()\n return value\n\n @staticmethod\n def get_case_insensitive(obj, key):\n # Prioritise the case correct key\n if value := LinkedDataParser.get_clean(obj, key):\n return value\n\n for real_key in obj:\n if real_key.lower() == key.lower():\n return LinkedDataParser.get_clean(obj, real_key)\n\n @staticmethod\n def check_type(type: str, wanted_type: str, default: bool = True) -> bool:\n if default and type is None:\n return True\n\n return LinkedDataParser.clean_type(type) == wanted_type.lower()\n\n @staticmethod\n def clean_type(type: str) -> str:\n return (\n type.lower()\n .replace(\"http://\", \"\")\n .replace(\"https://\", \"\")\n .replace(\"schema.org/\", \"\")\n )\n", "path": "locations/linked_data_parser.py"}]}
| 2,936 | 782 |
gh_patches_debug_40166
|
rasdani/github-patches
|
git_diff
|
learningequality__kolibri-2092
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Setup wizard is broken
## Summary
* Submitting the setup wizard returns `{language_code: ["This field is required."]}`
## System information
- Version: 0.6
## How to reproduce
1. Go through setup wizard
## Real-life consequences
Sadness
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kolibri/core/device/serializers.py`
Content:
```
1 from django.db import transaction
2 from django.utils.translation import check_for_language, ugettext_lazy as _
3 from kolibri.auth.constants.facility_presets import choices, mappings
4 from kolibri.auth.constants.role_kinds import ADMIN
5 from kolibri.auth.models import Facility, FacilityUser
6 from kolibri.auth.serializers import FacilitySerializer, FacilityUserSerializer
7 from rest_framework import serializers
8
9 from .models import DevicePermissions, DeviceSettings
10
11
12 class DevicePermissionsSerializer(serializers.ModelSerializer):
13
14 class Meta:
15 model = DevicePermissions
16 fields = (
17 'user', 'is_superuser', 'can_manage_content',
18 )
19
20 class NoFacilityFacilityUserSerializer(FacilityUserSerializer):
21
22 class Meta:
23 model = FacilityUser
24 fields = ('id', 'username', 'full_name', 'password', )
25
26
27 class DeviceProvisionSerializer(serializers.Serializer):
28 facility = FacilitySerializer()
29 preset = serializers.ChoiceField(choices=choices)
30 superuser = NoFacilityFacilityUserSerializer()
31 language_code = serializers.CharField(max_length=15)
32
33 class Meta:
34 fields = ('facility', 'dataset', 'superuser', 'language_code')
35
36 def validate_language_code(self, language_code):
37 """
38 Check that the language_code is supported by Kolibri
39 """
40 if not check_for_language(language_code):
41 raise serializers.ValidationError(_("Language is not supported by Kolibri"))
42 return language_code
43
44 def create(self, validated_data):
45 """
46 Endpoint for initial setup of a device.
47 Expects a value for:
48 default language - the default language of this Kolibri device
49 facility - the required fields for setting up a facility
50 facilitydataset - facility configuration options
51 superuser - the required fields for a facilityuser who will be set as the super user for this device
52 """
53 with transaction.atomic():
54 facility = Facility.objects.create(**validated_data.pop('facility'))
55 preset = validated_data.pop('preset')
56 dataset_data = mappings[preset]
57 for key, value in dataset_data.items():
58 setattr(facility.dataset, key, value)
59 facility.dataset.save()
60 superuser_data = validated_data.pop('superuser')
61 superuser_data['facility'] = facility
62 superuser = FacilityUserSerializer(data=superuser_data).create(superuser_data)
63 facility.add_role(superuser, ADMIN)
64 DevicePermissions.objects.create(user=superuser, is_superuser=True)
65 language_code = validated_data.pop('language_code')
66 device_settings, created = DeviceSettings.objects.get_or_create()
67 device_settings.is_provisioned = True
68 device_settings.language_code = language_code
69 device_settings.save()
70 return {
71 "facility": facility,
72 "preset": preset,
73 "superuser": superuser,
74 "language_code": language_code
75 }
76
```
Path: `kolibri/core/device/models.py`
Content:
```
1 from django.conf import settings
2 from django.db import models
3 from kolibri.auth.models import FacilityUser
4
5 from .permissions import UserCanManageDevicePermissions
6
7
8 class DevicePermissions(models.Model):
9 """
10 This class stores metadata about device permissions for FacilityUsers.
11 """
12
13 permissions = UserCanManageDevicePermissions()
14
15 user = models.OneToOneField(FacilityUser, on_delete=models.CASCADE, related_name='devicepermissions', blank=False, null=False, primary_key=True)
16 is_superuser = models.BooleanField(default=False)
17 can_manage_content = models.BooleanField(default=False)
18
19
20 class DeviceSettings(models.Model):
21 """
22 This class stores data about settings particular to this device
23 """
24
25 is_provisioned = models.BooleanField(default=False)
26 language_code = models.CharField(max_length=15, default=settings.LANGUAGE_CODE)
27
28 def save(self, *args, **kwargs):
29 self.pk = 1
30 super(DeviceSettings, self).save(*args, **kwargs)
31
```
Path: `kolibri/core/device/migrations/0001_initial.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 # Generated by Django 1.9.7 on 2017-08-16 23:05
3 from __future__ import unicode_literals
4
5 import django.db.models.deletion
6 from django.conf import settings
7 from django.db import migrations, models
8
9
10 class Migration(migrations.Migration):
11
12 initial = True
13
14 dependencies = [
15 ('kolibriauth', '0003_auto_20170621_0958'),
16 ]
17
18 operations = [
19 migrations.CreateModel(
20 name='DevicePermissions',
21 fields=[
22 ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, related_name='devicepermissions', serialize=False, to=settings.AUTH_USER_MODEL)),
23 ('is_superuser', models.BooleanField(default=False)),
24 ('can_manage_content', models.BooleanField(default=False)),
25 ],
26 ),
27 migrations.CreateModel(
28 name='DeviceSettings',
29 fields=[
30 ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
31 ('is_provisioned', models.BooleanField(default=False)),
32 ('language_code', models.CharField(default='en', max_length=15)),
33 ],
34 ),
35 ]
36
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/kolibri/core/device/migrations/0001_initial.py b/kolibri/core/device/migrations/0001_initial.py
--- a/kolibri/core/device/migrations/0001_initial.py
+++ b/kolibri/core/device/migrations/0001_initial.py
@@ -29,7 +29,7 @@
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_provisioned', models.BooleanField(default=False)),
- ('language_code', models.CharField(default='en', max_length=15)),
+ ('language_id', models.CharField(default='en', max_length=15)),
],
),
]
diff --git a/kolibri/core/device/models.py b/kolibri/core/device/models.py
--- a/kolibri/core/device/models.py
+++ b/kolibri/core/device/models.py
@@ -23,7 +23,7 @@
"""
is_provisioned = models.BooleanField(default=False)
- language_code = models.CharField(max_length=15, default=settings.LANGUAGE_CODE)
+ language_id = models.CharField(max_length=15, default=settings.LANGUAGE_CODE)
def save(self, *args, **kwargs):
self.pk = 1
diff --git a/kolibri/core/device/serializers.py b/kolibri/core/device/serializers.py
--- a/kolibri/core/device/serializers.py
+++ b/kolibri/core/device/serializers.py
@@ -28,18 +28,18 @@
facility = FacilitySerializer()
preset = serializers.ChoiceField(choices=choices)
superuser = NoFacilityFacilityUserSerializer()
- language_code = serializers.CharField(max_length=15)
+ language_id = serializers.CharField(max_length=15)
class Meta:
- fields = ('facility', 'dataset', 'superuser', 'language_code')
+ fields = ('facility', 'dataset', 'superuser', 'language_id')
- def validate_language_code(self, language_code):
+ def validate_language_id(self, language_id):
"""
- Check that the language_code is supported by Kolibri
+ Check that the language_id is supported by Kolibri
"""
- if not check_for_language(language_code):
+ if not check_for_language(language_id):
raise serializers.ValidationError(_("Language is not supported by Kolibri"))
- return language_code
+ return language_id
def create(self, validated_data):
"""
@@ -62,14 +62,14 @@
superuser = FacilityUserSerializer(data=superuser_data).create(superuser_data)
facility.add_role(superuser, ADMIN)
DevicePermissions.objects.create(user=superuser, is_superuser=True)
- language_code = validated_data.pop('language_code')
+ language_id = validated_data.pop('language_id')
device_settings, created = DeviceSettings.objects.get_or_create()
device_settings.is_provisioned = True
- device_settings.language_code = language_code
+ device_settings.language_id = language_id
device_settings.save()
return {
"facility": facility,
"preset": preset,
"superuser": superuser,
- "language_code": language_code
+ "language_id": language_id
}
|
{"golden_diff": "diff --git a/kolibri/core/device/migrations/0001_initial.py b/kolibri/core/device/migrations/0001_initial.py\n--- a/kolibri/core/device/migrations/0001_initial.py\n+++ b/kolibri/core/device/migrations/0001_initial.py\n@@ -29,7 +29,7 @@\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('is_provisioned', models.BooleanField(default=False)),\n- ('language_code', models.CharField(default='en', max_length=15)),\n+ ('language_id', models.CharField(default='en', max_length=15)),\n ],\n ),\n ]\ndiff --git a/kolibri/core/device/models.py b/kolibri/core/device/models.py\n--- a/kolibri/core/device/models.py\n+++ b/kolibri/core/device/models.py\n@@ -23,7 +23,7 @@\n \"\"\"\n \n is_provisioned = models.BooleanField(default=False)\n- language_code = models.CharField(max_length=15, default=settings.LANGUAGE_CODE)\n+ language_id = models.CharField(max_length=15, default=settings.LANGUAGE_CODE)\n \n def save(self, *args, **kwargs):\n self.pk = 1\ndiff --git a/kolibri/core/device/serializers.py b/kolibri/core/device/serializers.py\n--- a/kolibri/core/device/serializers.py\n+++ b/kolibri/core/device/serializers.py\n@@ -28,18 +28,18 @@\n facility = FacilitySerializer()\n preset = serializers.ChoiceField(choices=choices)\n superuser = NoFacilityFacilityUserSerializer()\n- language_code = serializers.CharField(max_length=15)\n+ language_id = serializers.CharField(max_length=15)\n \n class Meta:\n- fields = ('facility', 'dataset', 'superuser', 'language_code')\n+ fields = ('facility', 'dataset', 'superuser', 'language_id')\n \n- def validate_language_code(self, language_code):\n+ def validate_language_id(self, language_id):\n \"\"\"\n- Check that the language_code is supported by Kolibri\n+ Check that the language_id is supported by Kolibri\n \"\"\"\n- if not check_for_language(language_code):\n+ if not check_for_language(language_id):\n raise serializers.ValidationError(_(\"Language is not supported by Kolibri\"))\n- return language_code\n+ return language_id\n \n def create(self, validated_data):\n \"\"\"\n@@ -62,14 +62,14 @@\n superuser = FacilityUserSerializer(data=superuser_data).create(superuser_data)\n facility.add_role(superuser, ADMIN)\n DevicePermissions.objects.create(user=superuser, is_superuser=True)\n- language_code = validated_data.pop('language_code')\n+ language_id = validated_data.pop('language_id')\n device_settings, created = DeviceSettings.objects.get_or_create()\n device_settings.is_provisioned = True\n- device_settings.language_code = language_code\n+ device_settings.language_id = language_id\n device_settings.save()\n return {\n \"facility\": facility,\n \"preset\": preset,\n \"superuser\": superuser,\n- \"language_code\": language_code\n+ \"language_id\": language_id\n }\n", "issue": "Setup wizard is broken\n## Summary\r\n\r\n* Submitting the setup wizard returns `{language_code: [\"This field is required.\"]}`\r\n\r\n## System information\r\n\r\n - Version: 0.6\r\n\r\n## How to reproduce\r\n\r\n1. Go through setup wizard\r\n\r\n## Real-life consequences\r\n\r\nSadness\n", "before_files": [{"content": "from django.db import transaction\nfrom django.utils.translation import check_for_language, ugettext_lazy as _\nfrom kolibri.auth.constants.facility_presets import choices, mappings\nfrom kolibri.auth.constants.role_kinds import ADMIN\nfrom kolibri.auth.models import Facility, FacilityUser\nfrom kolibri.auth.serializers import FacilitySerializer, FacilityUserSerializer\nfrom rest_framework import serializers\n\nfrom .models import DevicePermissions, DeviceSettings\n\n\nclass DevicePermissionsSerializer(serializers.ModelSerializer):\n\n class Meta:\n model = DevicePermissions\n fields = (\n 'user', 'is_superuser', 'can_manage_content',\n )\n\nclass NoFacilityFacilityUserSerializer(FacilityUserSerializer):\n\n class Meta:\n model = FacilityUser\n fields = ('id', 'username', 'full_name', 'password', )\n\n\nclass DeviceProvisionSerializer(serializers.Serializer):\n facility = FacilitySerializer()\n preset = serializers.ChoiceField(choices=choices)\n superuser = NoFacilityFacilityUserSerializer()\n language_code = serializers.CharField(max_length=15)\n\n class Meta:\n fields = ('facility', 'dataset', 'superuser', 'language_code')\n\n def validate_language_code(self, language_code):\n \"\"\"\n Check that the language_code is supported by Kolibri\n \"\"\"\n if not check_for_language(language_code):\n raise serializers.ValidationError(_(\"Language is not supported by Kolibri\"))\n return language_code\n\n def create(self, validated_data):\n \"\"\"\n Endpoint for initial setup of a device.\n Expects a value for:\n default language - the default language of this Kolibri device\n facility - the required fields for setting up a facility\n facilitydataset - facility configuration options\n superuser - the required fields for a facilityuser who will be set as the super user for this device\n \"\"\"\n with transaction.atomic():\n facility = Facility.objects.create(**validated_data.pop('facility'))\n preset = validated_data.pop('preset')\n dataset_data = mappings[preset]\n for key, value in dataset_data.items():\n setattr(facility.dataset, key, value)\n facility.dataset.save()\n superuser_data = validated_data.pop('superuser')\n superuser_data['facility'] = facility\n superuser = FacilityUserSerializer(data=superuser_data).create(superuser_data)\n facility.add_role(superuser, ADMIN)\n DevicePermissions.objects.create(user=superuser, is_superuser=True)\n language_code = validated_data.pop('language_code')\n device_settings, created = DeviceSettings.objects.get_or_create()\n device_settings.is_provisioned = True\n device_settings.language_code = language_code\n device_settings.save()\n return {\n \"facility\": facility,\n \"preset\": preset,\n \"superuser\": superuser,\n \"language_code\": language_code\n }\n", "path": "kolibri/core/device/serializers.py"}, {"content": "from django.conf import settings\nfrom django.db import models\nfrom kolibri.auth.models import FacilityUser\n\nfrom .permissions import UserCanManageDevicePermissions\n\n\nclass DevicePermissions(models.Model):\n \"\"\"\n This class stores metadata about device permissions for FacilityUsers.\n \"\"\"\n\n permissions = UserCanManageDevicePermissions()\n\n user = models.OneToOneField(FacilityUser, on_delete=models.CASCADE, related_name='devicepermissions', blank=False, null=False, primary_key=True)\n is_superuser = models.BooleanField(default=False)\n can_manage_content = models.BooleanField(default=False)\n\n\nclass DeviceSettings(models.Model):\n \"\"\"\n This class stores data about settings particular to this device\n \"\"\"\n\n is_provisioned = models.BooleanField(default=False)\n language_code = models.CharField(max_length=15, default=settings.LANGUAGE_CODE)\n\n def save(self, *args, **kwargs):\n self.pk = 1\n super(DeviceSettings, self).save(*args, **kwargs)\n", "path": "kolibri/core/device/models.py"}, {"content": "# -*- coding: utf-8 -*-\n# Generated by Django 1.9.7 on 2017-08-16 23:05\nfrom __future__ import unicode_literals\n\nimport django.db.models.deletion\nfrom django.conf import settings\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ('kolibriauth', '0003_auto_20170621_0958'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='DevicePermissions',\n fields=[\n ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, related_name='devicepermissions', serialize=False, to=settings.AUTH_USER_MODEL)),\n ('is_superuser', models.BooleanField(default=False)),\n ('can_manage_content', models.BooleanField(default=False)),\n ],\n ),\n migrations.CreateModel(\n name='DeviceSettings',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('is_provisioned', models.BooleanField(default=False)),\n ('language_code', models.CharField(default='en', max_length=15)),\n ],\n ),\n ]\n", "path": "kolibri/core/device/migrations/0001_initial.py"}], "after_files": [{"content": "from django.db import transaction\nfrom django.utils.translation import check_for_language, ugettext_lazy as _\nfrom kolibri.auth.constants.facility_presets import choices, mappings\nfrom kolibri.auth.constants.role_kinds import ADMIN\nfrom kolibri.auth.models import Facility, FacilityUser\nfrom kolibri.auth.serializers import FacilitySerializer, FacilityUserSerializer\nfrom rest_framework import serializers\n\nfrom .models import DevicePermissions, DeviceSettings\n\n\nclass DevicePermissionsSerializer(serializers.ModelSerializer):\n\n class Meta:\n model = DevicePermissions\n fields = (\n 'user', 'is_superuser', 'can_manage_content',\n )\n\nclass NoFacilityFacilityUserSerializer(FacilityUserSerializer):\n\n class Meta:\n model = FacilityUser\n fields = ('id', 'username', 'full_name', 'password', )\n\n\nclass DeviceProvisionSerializer(serializers.Serializer):\n facility = FacilitySerializer()\n preset = serializers.ChoiceField(choices=choices)\n superuser = NoFacilityFacilityUserSerializer()\n language_id = serializers.CharField(max_length=15)\n\n class Meta:\n fields = ('facility', 'dataset', 'superuser', 'language_id')\n\n def validate_language_id(self, language_id):\n \"\"\"\n Check that the language_id is supported by Kolibri\n \"\"\"\n if not check_for_language(language_id):\n raise serializers.ValidationError(_(\"Language is not supported by Kolibri\"))\n return language_id\n\n def create(self, validated_data):\n \"\"\"\n Endpoint for initial setup of a device.\n Expects a value for:\n default language - the default language of this Kolibri device\n facility - the required fields for setting up a facility\n facilitydataset - facility configuration options\n superuser - the required fields for a facilityuser who will be set as the super user for this device\n \"\"\"\n with transaction.atomic():\n facility = Facility.objects.create(**validated_data.pop('facility'))\n preset = validated_data.pop('preset')\n dataset_data = mappings[preset]\n for key, value in dataset_data.items():\n setattr(facility.dataset, key, value)\n facility.dataset.save()\n superuser_data = validated_data.pop('superuser')\n superuser_data['facility'] = facility\n superuser = FacilityUserSerializer(data=superuser_data).create(superuser_data)\n facility.add_role(superuser, ADMIN)\n DevicePermissions.objects.create(user=superuser, is_superuser=True)\n language_id = validated_data.pop('language_id')\n device_settings, created = DeviceSettings.objects.get_or_create()\n device_settings.is_provisioned = True\n device_settings.language_id = language_id\n device_settings.save()\n return {\n \"facility\": facility,\n \"preset\": preset,\n \"superuser\": superuser,\n \"language_id\": language_id\n }\n", "path": "kolibri/core/device/serializers.py"}, {"content": "from django.conf import settings\nfrom django.db import models\nfrom kolibri.auth.models import FacilityUser\n\nfrom .permissions import UserCanManageDevicePermissions\n\n\nclass DevicePermissions(models.Model):\n \"\"\"\n This class stores metadata about device permissions for FacilityUsers.\n \"\"\"\n\n permissions = UserCanManageDevicePermissions()\n\n user = models.OneToOneField(FacilityUser, on_delete=models.CASCADE, related_name='devicepermissions', blank=False, null=False, primary_key=True)\n is_superuser = models.BooleanField(default=False)\n can_manage_content = models.BooleanField(default=False)\n\n\nclass DeviceSettings(models.Model):\n \"\"\"\n This class stores data about settings particular to this device\n \"\"\"\n\n is_provisioned = models.BooleanField(default=False)\n language_id = models.CharField(max_length=15, default=settings.LANGUAGE_CODE)\n\n def save(self, *args, **kwargs):\n self.pk = 1\n super(DeviceSettings, self).save(*args, **kwargs)\n", "path": "kolibri/core/device/models.py"}, {"content": "# -*- coding: utf-8 -*-\n# Generated by Django 1.9.7 on 2017-08-16 23:05\nfrom __future__ import unicode_literals\n\nimport django.db.models.deletion\nfrom django.conf import settings\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ('kolibriauth', '0003_auto_20170621_0958'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='DevicePermissions',\n fields=[\n ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, related_name='devicepermissions', serialize=False, to=settings.AUTH_USER_MODEL)),\n ('is_superuser', models.BooleanField(default=False)),\n ('can_manage_content', models.BooleanField(default=False)),\n ],\n ),\n migrations.CreateModel(\n name='DeviceSettings',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('is_provisioned', models.BooleanField(default=False)),\n ('language_id', models.CharField(default='en', max_length=15)),\n ],\n ),\n ]\n", "path": "kolibri/core/device/migrations/0001_initial.py"}]}
| 1,679 | 718 |
gh_patches_debug_9304
|
rasdani/github-patches
|
git_diff
|
readthedocs__readthedocs.org-1404
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Pip install failing to install sphinx when PYTHON_HOME is set
If the env variable PYTHON_HOME exists, pip install is failing to install spinx into the virtualenv.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `readthedocs/projects/utils.py`
Content:
```
1 """Utility functions used by projects.
2 """
3 import fnmatch
4 import os
5 import re
6 import subprocess
7 import traceback
8 import logging
9 from httplib2 import Http
10
11 from django.conf import settings
12 from distutils2.version import NormalizedVersion, suggest_normalized_version
13 import redis
14
15 from builds.constants import LATEST
16
17
18 log = logging.getLogger(__name__)
19
20 def version_from_slug(slug, version):
21 from projects import tasks
22 from builds.models import Version
23 from tastyapi import apiv2 as api
24 if getattr(settings, 'DONT_HIT_DB', True):
25 version_data = api.version().get(project=slug, slug=version)['results'][0]
26 v = tasks.make_api_version(version_data)
27 else:
28 v = Version.objects.get(project__slug=slug, slug=version)
29 return v
30
31 def symlink(project, version=LATEST):
32 from projects import symlinks
33 v = version_from_slug(project, version)
34 log.info("Symlinking %s" % v)
35 symlinks.symlink_subprojects(v)
36 symlinks.symlink_cnames(v)
37 symlinks.symlink_translations(v)
38
39 def update_static_metadata(project_pk):
40 """
41 This is here to avoid circular imports in models.py
42 """
43 from projects import tasks
44 tasks.update_static_metadata.delay(project_pk)
45
46 def find_file(file):
47 """Find matching filenames in the current directory and its subdirectories,
48 and return a list of matching filenames.
49 """
50 matches = []
51 for root, dirnames, filenames in os.walk('.'):
52 for filename in fnmatch.filter(filenames, file):
53 matches.append(os.path.join(root, filename))
54 return matches
55
56
57 def run(*commands, **kwargs):
58 """
59 Run one or more commands, and return ``(status, out, err)``.
60 If more than one command is given, then this is equivalent to
61 chaining them together with ``&&``; if all commands succeed, then
62 ``(status, out, err)`` will represent the last successful command.
63 If one command failed, then ``(status, out, err)`` will represent
64 the failed command.
65 """
66 environment = os.environ.copy()
67 environment['READTHEDOCS'] = 'True'
68 if 'DJANGO_SETTINGS_MODULE' in environment:
69 del environment['DJANGO_SETTINGS_MODULE']
70 if 'PYTHONPATH' in environment:
71 del environment['PYTHONPATH']
72 cwd = os.getcwd()
73 if not commands:
74 raise ValueError("run() requires one or more command-line strings")
75 shell = kwargs.get('shell', False)
76
77 for command in commands:
78 if shell:
79 log.info("Running commands in a shell")
80 run_command = command
81 else:
82 run_command = command.split()
83 log.info("Running: '%s' [%s]" % (command, cwd))
84 try:
85 p = subprocess.Popen(run_command, shell=shell, cwd=cwd,
86 stdout=subprocess.PIPE,
87 stderr=subprocess.PIPE, env=environment)
88
89 out, err = p.communicate()
90 ret = p.returncode
91 except:
92 out = ''
93 err = traceback.format_exc()
94 ret = -1
95 log.error("Command failed", exc_info=True)
96
97 return (ret, out, err)
98
99
100 def safe_write(filename, contents):
101 """Write ``contents`` to the given ``filename``. If the filename's
102 directory does not exist, it is created. Contents are written as UTF-8,
103 ignoring any characters that cannot be encoded as UTF-8.
104 """
105 dirname = os.path.dirname(filename)
106 if not os.path.exists(dirname):
107 os.makedirs(dirname)
108 with open(filename, 'w') as fh:
109 fh.write(contents.encode('utf-8', 'ignore'))
110 fh.close()
111
112
113 def mkversion(version_obj):
114 try:
115 if hasattr(version_obj, 'slug'):
116 ver = NormalizedVersion(
117 suggest_normalized_version(version_obj.slug)
118 )
119 else:
120 ver = NormalizedVersion(
121 suggest_normalized_version(version_obj['slug'])
122 )
123 return ver
124 except TypeError:
125 return None
126
127
128 def highest_version(version_list):
129 highest = [None, None]
130 for version in version_list:
131 ver = mkversion(version)
132 if not ver:
133 continue
134 elif highest[1] and ver:
135 # If there's a highest, and no version, we don't need to set
136 # anything
137 if ver > highest[1]:
138 highest = [version, ver]
139 else:
140 highest = [version, ver]
141 return highest
142
143
144 def purge_version(version, mainsite=False, subdomain=False, cname=False):
145 varnish_servers = getattr(settings, 'VARNISH_SERVERS', None)
146 h = Http()
147 if varnish_servers:
148 for server in varnish_servers:
149 if subdomain:
150 #Send a request to the Server, to purge the URL of the Host.
151 host = "%s.readthedocs.org" % version.project.slug
152 headers = {'Host': host}
153 url = "/en/%s/*" % version.slug
154 to_purge = "http://%s%s" % (server, url)
155 log.info("Purging %s on %s" % (url, host))
156 h.request(to_purge, method="PURGE", headers=headers)
157 if mainsite:
158 headers = {'Host': "readthedocs.org"}
159 url = "/docs/%s/en/%s/*" % (version.project.slug, version.slug)
160 to_purge = "http://%s%s" % (server, url)
161 log.info("Purging %s on readthedocs.org" % url)
162 h.request(to_purge, method="PURGE", headers=headers)
163 root_url = "/docs/%s/" % version.project.slug
164 to_purge = "http://%s%s" % (server, root_url)
165 log.info("Purging %s on readthedocs.org" % root_url)
166 h.request(to_purge, method="PURGE", headers=headers)
167 if cname:
168 redis_conn = redis.Redis(**settings.REDIS)
169 for cnamed in redis_conn.smembers('rtd_slug:v1:%s'
170 % version.project.slug):
171 headers = {'Host': cnamed}
172 url = "/en/%s/*" % version.slug
173 to_purge = "http://%s%s" % (server, url)
174 log.info("Purging %s on %s" % (url, cnamed))
175 h.request(to_purge, method="PURGE", headers=headers)
176 root_url = "/"
177 to_purge = "http://%s%s" % (server, root_url)
178 log.info("Purging %s on %s" % (root_url, cnamed))
179 h.request(to_purge, method="PURGE", headers=headers)
180
181
182 class DictObj(object):
183 def __getattr__(self, attr):
184 return self.__dict__.get(attr)
185
186 # Prevent saving the temporary Project instance
187 def _new_save(*args, **kwargs):
188 log.warning("Called save on a non-real object.")
189 return 0
190
191 def make_api_version(version_data):
192 from builds.models import Version
193 for key in ['resource_uri', 'absolute_url', 'downloads']:
194 if key in version_data:
195 del version_data[key]
196 project_data = version_data['project']
197 project = make_api_project(project_data)
198 version_data['project'] = project
199 ver = Version(**version_data)
200 ver.save = _new_save
201
202 return ver
203
204
205 def make_api_project(project_data):
206 from projects.models import Project
207 for key in ['users', 'resource_uri', 'absolute_url', 'downloads', 'main_language_project', 'related_projects']:
208 if key in project_data:
209 del project_data[key]
210 project = Project(**project_data)
211 project.save = _new_save
212 return project
213
214
215 def github_paginate(client, url):
216 """
217 Scans trough all github paginates results and returns the concatenated
218 list of results.
219
220 :param client: requests client instance
221 :param url: start url to get the data from.
222
223 See https://developer.github.com/v3/#pagination
224 """
225 result = []
226 while url:
227 r = session.get(url)
228 result.extend(r.json())
229 url = r.links.get('next')
230 return result
231
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/readthedocs/projects/utils.py b/readthedocs/projects/utils.py
--- a/readthedocs/projects/utils.py
+++ b/readthedocs/projects/utils.py
@@ -69,6 +69,10 @@
del environment['DJANGO_SETTINGS_MODULE']
if 'PYTHONPATH' in environment:
del environment['PYTHONPATH']
+ # Remove PYTHONHOME env variable if set, otherwise pip install of requirements
+ # into virtualenv will install incorrectly
+ if 'PYTHONHOME' in environment:
+ del environment['PYTHONHOME']
cwd = os.getcwd()
if not commands:
raise ValueError("run() requires one or more command-line strings")
|
{"golden_diff": "diff --git a/readthedocs/projects/utils.py b/readthedocs/projects/utils.py\n--- a/readthedocs/projects/utils.py\n+++ b/readthedocs/projects/utils.py\n@@ -69,6 +69,10 @@\n del environment['DJANGO_SETTINGS_MODULE']\n if 'PYTHONPATH' in environment:\n del environment['PYTHONPATH']\n+ # Remove PYTHONHOME env variable if set, otherwise pip install of requirements\n+ # into virtualenv will install incorrectly\n+ if 'PYTHONHOME' in environment:\n+ del environment['PYTHONHOME']\n cwd = os.getcwd()\n if not commands:\n raise ValueError(\"run() requires one or more command-line strings\")\n", "issue": "Pip install failing to install sphinx when PYTHON_HOME is set\nIf the env variable PYTHON_HOME exists, pip install is failing to install spinx into the virtualenv.\n\n", "before_files": [{"content": "\"\"\"Utility functions used by projects.\n\"\"\"\nimport fnmatch\nimport os\nimport re\nimport subprocess\nimport traceback\nimport logging\nfrom httplib2 import Http\n\nfrom django.conf import settings\nfrom distutils2.version import NormalizedVersion, suggest_normalized_version\nimport redis\n\nfrom builds.constants import LATEST\n\n\nlog = logging.getLogger(__name__)\n\ndef version_from_slug(slug, version):\n from projects import tasks\n from builds.models import Version\n from tastyapi import apiv2 as api\n if getattr(settings, 'DONT_HIT_DB', True):\n version_data = api.version().get(project=slug, slug=version)['results'][0]\n v = tasks.make_api_version(version_data)\n else:\n v = Version.objects.get(project__slug=slug, slug=version)\n return v\n\ndef symlink(project, version=LATEST):\n from projects import symlinks\n v = version_from_slug(project, version)\n log.info(\"Symlinking %s\" % v)\n symlinks.symlink_subprojects(v)\n symlinks.symlink_cnames(v)\n symlinks.symlink_translations(v)\n\ndef update_static_metadata(project_pk):\n \"\"\"\n This is here to avoid circular imports in models.py\n \"\"\"\n from projects import tasks\n tasks.update_static_metadata.delay(project_pk)\n\ndef find_file(file):\n \"\"\"Find matching filenames in the current directory and its subdirectories,\n and return a list of matching filenames.\n \"\"\"\n matches = []\n for root, dirnames, filenames in os.walk('.'):\n for filename in fnmatch.filter(filenames, file):\n matches.append(os.path.join(root, filename))\n return matches\n\n\ndef run(*commands, **kwargs):\n \"\"\"\n Run one or more commands, and return ``(status, out, err)``.\n If more than one command is given, then this is equivalent to\n chaining them together with ``&&``; if all commands succeed, then\n ``(status, out, err)`` will represent the last successful command.\n If one command failed, then ``(status, out, err)`` will represent\n the failed command.\n \"\"\"\n environment = os.environ.copy()\n environment['READTHEDOCS'] = 'True'\n if 'DJANGO_SETTINGS_MODULE' in environment:\n del environment['DJANGO_SETTINGS_MODULE']\n if 'PYTHONPATH' in environment:\n del environment['PYTHONPATH']\n cwd = os.getcwd()\n if not commands:\n raise ValueError(\"run() requires one or more command-line strings\")\n shell = kwargs.get('shell', False)\n\n for command in commands:\n if shell:\n log.info(\"Running commands in a shell\")\n run_command = command\n else:\n run_command = command.split()\n log.info(\"Running: '%s' [%s]\" % (command, cwd))\n try:\n p = subprocess.Popen(run_command, shell=shell, cwd=cwd,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE, env=environment)\n\n out, err = p.communicate()\n ret = p.returncode\n except:\n out = ''\n err = traceback.format_exc()\n ret = -1\n log.error(\"Command failed\", exc_info=True)\n\n return (ret, out, err)\n\n\ndef safe_write(filename, contents):\n \"\"\"Write ``contents`` to the given ``filename``. If the filename's\n directory does not exist, it is created. Contents are written as UTF-8,\n ignoring any characters that cannot be encoded as UTF-8.\n \"\"\"\n dirname = os.path.dirname(filename)\n if not os.path.exists(dirname):\n os.makedirs(dirname)\n with open(filename, 'w') as fh:\n fh.write(contents.encode('utf-8', 'ignore'))\n fh.close()\n\n\ndef mkversion(version_obj):\n try:\n if hasattr(version_obj, 'slug'):\n ver = NormalizedVersion(\n suggest_normalized_version(version_obj.slug)\n )\n else:\n ver = NormalizedVersion(\n suggest_normalized_version(version_obj['slug'])\n )\n return ver\n except TypeError:\n return None\n\n\ndef highest_version(version_list):\n highest = [None, None]\n for version in version_list:\n ver = mkversion(version)\n if not ver:\n continue\n elif highest[1] and ver:\n # If there's a highest, and no version, we don't need to set\n # anything\n if ver > highest[1]:\n highest = [version, ver]\n else:\n highest = [version, ver]\n return highest\n\n\ndef purge_version(version, mainsite=False, subdomain=False, cname=False):\n varnish_servers = getattr(settings, 'VARNISH_SERVERS', None)\n h = Http()\n if varnish_servers:\n for server in varnish_servers:\n if subdomain:\n #Send a request to the Server, to purge the URL of the Host.\n host = \"%s.readthedocs.org\" % version.project.slug\n headers = {'Host': host}\n url = \"/en/%s/*\" % version.slug\n to_purge = \"http://%s%s\" % (server, url)\n log.info(\"Purging %s on %s\" % (url, host))\n h.request(to_purge, method=\"PURGE\", headers=headers)\n if mainsite:\n headers = {'Host': \"readthedocs.org\"}\n url = \"/docs/%s/en/%s/*\" % (version.project.slug, version.slug)\n to_purge = \"http://%s%s\" % (server, url)\n log.info(\"Purging %s on readthedocs.org\" % url)\n h.request(to_purge, method=\"PURGE\", headers=headers)\n root_url = \"/docs/%s/\" % version.project.slug\n to_purge = \"http://%s%s\" % (server, root_url)\n log.info(\"Purging %s on readthedocs.org\" % root_url)\n h.request(to_purge, method=\"PURGE\", headers=headers)\n if cname:\n redis_conn = redis.Redis(**settings.REDIS)\n for cnamed in redis_conn.smembers('rtd_slug:v1:%s'\n % version.project.slug):\n headers = {'Host': cnamed}\n url = \"/en/%s/*\" % version.slug\n to_purge = \"http://%s%s\" % (server, url)\n log.info(\"Purging %s on %s\" % (url, cnamed))\n h.request(to_purge, method=\"PURGE\", headers=headers)\n root_url = \"/\"\n to_purge = \"http://%s%s\" % (server, root_url)\n log.info(\"Purging %s on %s\" % (root_url, cnamed))\n h.request(to_purge, method=\"PURGE\", headers=headers)\n\n\nclass DictObj(object):\n def __getattr__(self, attr):\n return self.__dict__.get(attr)\n\n# Prevent saving the temporary Project instance\ndef _new_save(*args, **kwargs):\n log.warning(\"Called save on a non-real object.\")\n return 0\n\ndef make_api_version(version_data):\n from builds.models import Version\n for key in ['resource_uri', 'absolute_url', 'downloads']:\n if key in version_data:\n del version_data[key]\n project_data = version_data['project']\n project = make_api_project(project_data)\n version_data['project'] = project\n ver = Version(**version_data)\n ver.save = _new_save\n\n return ver\n\n\ndef make_api_project(project_data):\n from projects.models import Project\n for key in ['users', 'resource_uri', 'absolute_url', 'downloads', 'main_language_project', 'related_projects']:\n if key in project_data:\n del project_data[key]\n project = Project(**project_data)\n project.save = _new_save\n return project\n\n\ndef github_paginate(client, url):\n \"\"\"\n Scans trough all github paginates results and returns the concatenated\n list of results.\n\n :param client: requests client instance\n :param url: start url to get the data from.\n\n See https://developer.github.com/v3/#pagination\n \"\"\"\n result = []\n while url:\n r = session.get(url)\n result.extend(r.json())\n url = r.links.get('next')\n return result\n", "path": "readthedocs/projects/utils.py"}], "after_files": [{"content": "\"\"\"Utility functions used by projects.\n\"\"\"\nimport fnmatch\nimport os\nimport re\nimport subprocess\nimport traceback\nimport logging\nfrom httplib2 import Http\n\nfrom django.conf import settings\nfrom distutils2.version import NormalizedVersion, suggest_normalized_version\nimport redis\n\nfrom builds.constants import LATEST\n\n\nlog = logging.getLogger(__name__)\n\ndef version_from_slug(slug, version):\n from projects import tasks\n from builds.models import Version\n from tastyapi import apiv2 as api\n if getattr(settings, 'DONT_HIT_DB', True):\n version_data = api.version().get(project=slug, slug=version)['results'][0]\n v = tasks.make_api_version(version_data)\n else:\n v = Version.objects.get(project__slug=slug, slug=version)\n return v\n\ndef symlink(project, version=LATEST):\n from projects import symlinks\n v = version_from_slug(project, version)\n log.info(\"Symlinking %s\" % v)\n symlinks.symlink_subprojects(v)\n symlinks.symlink_cnames(v)\n symlinks.symlink_translations(v)\n\ndef update_static_metadata(project_pk):\n \"\"\"\n This is here to avoid circular imports in models.py\n \"\"\"\n from projects import tasks\n tasks.update_static_metadata.delay(project_pk)\n\ndef find_file(file):\n \"\"\"Find matching filenames in the current directory and its subdirectories,\n and return a list of matching filenames.\n \"\"\"\n matches = []\n for root, dirnames, filenames in os.walk('.'):\n for filename in fnmatch.filter(filenames, file):\n matches.append(os.path.join(root, filename))\n return matches\n\n\ndef run(*commands, **kwargs):\n \"\"\"\n Run one or more commands, and return ``(status, out, err)``.\n If more than one command is given, then this is equivalent to\n chaining them together with ``&&``; if all commands succeed, then\n ``(status, out, err)`` will represent the last successful command.\n If one command failed, then ``(status, out, err)`` will represent\n the failed command.\n \"\"\"\n environment = os.environ.copy()\n environment['READTHEDOCS'] = 'True'\n if 'DJANGO_SETTINGS_MODULE' in environment:\n del environment['DJANGO_SETTINGS_MODULE']\n if 'PYTHONPATH' in environment:\n del environment['PYTHONPATH']\n # Remove PYTHONHOME env variable if set, otherwise pip install of requirements\n # into virtualenv will install incorrectly\n if 'PYTHONHOME' in environment:\n del environment['PYTHONHOME']\n cwd = os.getcwd()\n if not commands:\n raise ValueError(\"run() requires one or more command-line strings\")\n shell = kwargs.get('shell', False)\n\n for command in commands:\n if shell:\n log.info(\"Running commands in a shell\")\n run_command = command\n else:\n run_command = command.split()\n log.info(\"Running: '%s' [%s]\" % (command, cwd))\n try:\n p = subprocess.Popen(run_command, shell=shell, cwd=cwd,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE, env=environment)\n\n out, err = p.communicate()\n ret = p.returncode\n except:\n out = ''\n err = traceback.format_exc()\n ret = -1\n log.error(\"Command failed\", exc_info=True)\n\n return (ret, out, err)\n\n\ndef safe_write(filename, contents):\n \"\"\"Write ``contents`` to the given ``filename``. If the filename's\n directory does not exist, it is created. Contents are written as UTF-8,\n ignoring any characters that cannot be encoded as UTF-8.\n \"\"\"\n dirname = os.path.dirname(filename)\n if not os.path.exists(dirname):\n os.makedirs(dirname)\n with open(filename, 'w') as fh:\n fh.write(contents.encode('utf-8', 'ignore'))\n fh.close()\n\n\ndef mkversion(version_obj):\n try:\n if hasattr(version_obj, 'slug'):\n ver = NormalizedVersion(\n suggest_normalized_version(version_obj.slug)\n )\n else:\n ver = NormalizedVersion(\n suggest_normalized_version(version_obj['slug'])\n )\n return ver\n except TypeError:\n return None\n\n\ndef highest_version(version_list):\n highest = [None, None]\n for version in version_list:\n ver = mkversion(version)\n if not ver:\n continue\n elif highest[1] and ver:\n # If there's a highest, and no version, we don't need to set\n # anything\n if ver > highest[1]:\n highest = [version, ver]\n else:\n highest = [version, ver]\n return highest\n\n\ndef purge_version(version, mainsite=False, subdomain=False, cname=False):\n varnish_servers = getattr(settings, 'VARNISH_SERVERS', None)\n h = Http()\n if varnish_servers:\n for server in varnish_servers:\n if subdomain:\n #Send a request to the Server, to purge the URL of the Host.\n host = \"%s.readthedocs.org\" % version.project.slug\n headers = {'Host': host}\n url = \"/en/%s/*\" % version.slug\n to_purge = \"http://%s%s\" % (server, url)\n log.info(\"Purging %s on %s\" % (url, host))\n h.request(to_purge, method=\"PURGE\", headers=headers)\n if mainsite:\n headers = {'Host': \"readthedocs.org\"}\n url = \"/docs/%s/en/%s/*\" % (version.project.slug, version.slug)\n to_purge = \"http://%s%s\" % (server, url)\n log.info(\"Purging %s on readthedocs.org\" % url)\n h.request(to_purge, method=\"PURGE\", headers=headers)\n root_url = \"/docs/%s/\" % version.project.slug\n to_purge = \"http://%s%s\" % (server, root_url)\n log.info(\"Purging %s on readthedocs.org\" % root_url)\n h.request(to_purge, method=\"PURGE\", headers=headers)\n if cname:\n redis_conn = redis.Redis(**settings.REDIS)\n for cnamed in redis_conn.smembers('rtd_slug:v1:%s'\n % version.project.slug):\n headers = {'Host': cnamed}\n url = \"/en/%s/*\" % version.slug\n to_purge = \"http://%s%s\" % (server, url)\n log.info(\"Purging %s on %s\" % (url, cnamed))\n h.request(to_purge, method=\"PURGE\", headers=headers)\n root_url = \"/\"\n to_purge = \"http://%s%s\" % (server, root_url)\n log.info(\"Purging %s on %s\" % (root_url, cnamed))\n h.request(to_purge, method=\"PURGE\", headers=headers)\n\n\nclass DictObj(object):\n def __getattr__(self, attr):\n return self.__dict__.get(attr)\n\n# Prevent saving the temporary Project instance\ndef _new_save(*args, **kwargs):\n log.warning(\"Called save on a non-real object.\")\n return 0\n\ndef make_api_version(version_data):\n from builds.models import Version\n for key in ['resource_uri', 'absolute_url', 'downloads']:\n if key in version_data:\n del version_data[key]\n project_data = version_data['project']\n project = make_api_project(project_data)\n version_data['project'] = project\n ver = Version(**version_data)\n ver.save = _new_save\n\n return ver\n\n\ndef make_api_project(project_data):\n from projects.models import Project\n for key in ['users', 'resource_uri', 'absolute_url', 'downloads', 'main_language_project', 'related_projects']:\n if key in project_data:\n del project_data[key]\n project = Project(**project_data)\n project.save = _new_save\n return project\n\n\ndef github_paginate(client, url):\n \"\"\"\n Scans trough all github paginates results and returns the concatenated\n list of results.\n\n :param client: requests client instance\n :param url: start url to get the data from.\n\n See https://developer.github.com/v3/#pagination\n \"\"\"\n result = []\n while url:\n r = session.get(url)\n result.extend(r.json())\n url = r.links.get('next')\n return result\n", "path": "readthedocs/projects/utils.py"}]}
| 2,677 | 144 |
gh_patches_debug_10814
|
rasdani/github-patches
|
git_diff
|
mitmproxy__mitmproxy-1323
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[tests actually pass] Remove harextractor example
If nobody objects, this PR (temporarily) removes the harextractor example, which depends on harparser, a Python2-only library. It's probably worth the time to rewrite this without any dependencies, but first we want to get a Python 3 release out of the door.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/har_extractor.py`
Content:
```
1 """
2 This inline script utilizes harparser.HAR from
3 https://github.com/JustusW/harparser to generate a HAR log object.
4 """
5 import six
6 import sys
7 import pytz
8 from harparser import HAR
9
10 from datetime import datetime
11
12
13 class _HARLog(HAR.log):
14 # The attributes need to be registered here for them to actually be
15 # available later via self. This is due to HAREncodable linking __getattr__
16 # to __getitem__. Anything that is set only in __init__ will just be added
17 # as key/value pair to self.__classes__.
18 __page_list__ = []
19 __page_count__ = 0
20 __page_ref__ = {}
21
22 def __init__(self, page_list=[]):
23 self.__page_list__ = page_list
24 self.__page_count__ = 0
25 self.__page_ref__ = {}
26
27 HAR.log.__init__(self, {"version": "1.2",
28 "creator": {"name": "MITMPROXY HARExtractor",
29 "version": "0.1",
30 "comment": ""},
31 "pages": [],
32 "entries": []})
33
34 def reset(self):
35 self.__init__(self.__page_list__)
36
37 def add(self, obj):
38 if isinstance(obj, HAR.pages):
39 self['pages'].append(obj)
40 if isinstance(obj, HAR.entries):
41 self['entries'].append(obj)
42
43 def create_page_id(self):
44 self.__page_count__ += 1
45 return "autopage_%s" % str(self.__page_count__)
46
47 def set_page_ref(self, page, ref):
48 self.__page_ref__[page] = ref
49
50 def get_page_ref(self, page):
51 return self.__page_ref__.get(page, None)
52
53 def get_page_list(self):
54 return self.__page_list__
55
56
57 def start(context):
58 """
59 On start we create a HARLog instance. You will have to adapt this to
60 suit your actual needs of HAR generation. As it will probably be
61 necessary to cluster logs by IPs or reset them from time to time.
62 """
63 context.dump_file = None
64 if len(sys.argv) > 1:
65 context.dump_file = sys.argv[1]
66 else:
67 raise ValueError(
68 'Usage: -s "har_extractor.py filename" '
69 '(- will output to stdout, filenames ending with .zhar '
70 'will result in compressed har)'
71 )
72 context.HARLog = _HARLog()
73 context.seen_server = set()
74
75
76 def response(context, flow):
77 """
78 Called when a server response has been received. At the time of this
79 message both a request and a response are present and completely done.
80 """
81 # Values are converted from float seconds to int milliseconds later.
82 ssl_time = -.001
83 connect_time = -.001
84 if flow.server_conn not in context.seen_server:
85 # Calculate the connect_time for this server_conn. Afterwards add it to
86 # seen list, in order to avoid the connect_time being present in entries
87 # that use an existing connection.
88 connect_time = (flow.server_conn.timestamp_tcp_setup -
89 flow.server_conn.timestamp_start)
90 context.seen_server.add(flow.server_conn)
91
92 if flow.server_conn.timestamp_ssl_setup is not None:
93 # Get the ssl_time for this server_conn as the difference between
94 # the start of the successful tcp setup and the successful ssl
95 # setup. If no ssl setup has been made it is left as -1 since it
96 # doesn't apply to this connection.
97 ssl_time = (flow.server_conn.timestamp_ssl_setup -
98 flow.server_conn.timestamp_tcp_setup)
99
100 # Calculate the raw timings from the different timestamps present in the
101 # request and response object. For lack of a way to measure it dns timings
102 # can not be calculated. The same goes for HAR blocked: MITMProxy will open
103 # a server connection as soon as it receives the host and port from the
104 # client connection. So the time spent waiting is actually spent waiting
105 # between request.timestamp_end and response.timestamp_start thus it
106 # correlates to HAR wait instead.
107 timings_raw = {
108 'send': flow.request.timestamp_end - flow.request.timestamp_start,
109 'wait': flow.response.timestamp_start - flow.request.timestamp_end,
110 'receive': flow.response.timestamp_end - flow.response.timestamp_start,
111 'connect': connect_time,
112 'ssl': ssl_time
113 }
114
115 # HAR timings are integers in ms, so we have to re-encode the raw timings to
116 # that format.
117 timings = dict([(k, int(1000 * v)) for k, v in six.iteritems(timings_raw)])
118
119 # The full_time is the sum of all timings.
120 # Timings set to -1 will be ignored as per spec.
121 full_time = sum(v for v in timings.values() if v > -1)
122
123 started_date_time = datetime.utcfromtimestamp(
124 flow.request.timestamp_start).replace(tzinfo=pytz.timezone("UTC")).isoformat()
125
126 request_query_string = [{"name": k, "value": v}
127 for k, v in flow.request.query or {}]
128
129 response_body_size = len(flow.response.content)
130 response_body_decoded_size = len(flow.response.get_decoded_content())
131 response_body_compression = response_body_decoded_size - response_body_size
132
133 entry = HAR.entries({
134 "startedDateTime": started_date_time,
135 "time": full_time,
136 "request": {
137 "method": flow.request.method,
138 "url": flow.request.url,
139 "httpVersion": flow.request.http_version,
140 "cookies": format_cookies(flow.request.cookies),
141 "headers": format_headers(flow.request.headers),
142 "queryString": request_query_string,
143 "headersSize": len(str(flow.request.headers)),
144 "bodySize": len(flow.request.content),
145 },
146 "response": {
147 "status": flow.response.status_code,
148 "statusText": flow.response.reason,
149 "httpVersion": flow.response.http_version,
150 "cookies": format_cookies(flow.response.cookies),
151 "headers": format_headers(flow.response.headers),
152 "content": {
153 "size": response_body_size,
154 "compression": response_body_compression,
155 "mimeType": flow.response.headers.get('Content-Type', '')
156 },
157 "redirectURL": flow.response.headers.get('Location', ''),
158 "headersSize": len(str(flow.response.headers)),
159 "bodySize": response_body_size,
160 },
161 "cache": {},
162 "timings": timings,
163 })
164
165 # If the current url is in the page list of context.HARLog or
166 # does not have a referrer, we add it as a new pages object.
167 is_new_page = (
168 flow.request.url in context.HARLog.get_page_list() or
169 flow.request.headers.get('Referer') is None
170 )
171 if is_new_page:
172 page_id = context.HARLog.create_page_id()
173 context.HARLog.add(
174 HAR.pages({
175 "startedDateTime": entry['startedDateTime'],
176 "id": page_id,
177 "title": flow.request.url,
178 "pageTimings": {}
179 })
180 )
181 context.HARLog.set_page_ref(flow.request.url, page_id)
182 entry['pageref'] = page_id
183
184 # Lookup the referer in the page_ref of context.HARLog to point this entries
185 # pageref attribute to the right pages object, then set it as a new
186 # reference to build a reference tree.
187 elif context.HARLog.get_page_ref(flow.request.headers.get('Referer')) is not None:
188 entry['pageref'] = context.HARLog.get_page_ref(
189 flow.request.headers['Referer']
190 )
191 context.HARLog.set_page_ref(
192 flow.request.headers['Referer'], entry['pageref']
193 )
194
195 context.HARLog.add(entry)
196
197
198 def done(context):
199 """
200 Called once on script shutdown, after any other events.
201 """
202 import pprint
203 import json
204
205 json_dump = context.HARLog.json()
206 compressed_json_dump = context.HARLog.compress()
207
208 if context.dump_file == '-':
209 context.log(pprint.pformat(json.loads(json_dump)))
210 elif context.dump_file.endswith('.zhar'):
211 file(context.dump_file, "w").write(compressed_json_dump)
212 else:
213 file(context.dump_file, "w").write(json_dump)
214 context.log(
215 "HAR log finished with %s bytes (%s bytes compressed)" % (
216 len(json_dump), len(compressed_json_dump)
217 )
218 )
219 context.log(
220 "Compression rate is %s%%" % str(
221 100. * len(compressed_json_dump) / len(json_dump)
222 )
223 )
224
225
226 def format_cookies(obj):
227 if obj:
228 return [{"name": k.strip(), "value": v[0]} for k, v in obj.items()]
229 return ""
230
231
232 def format_headers(obj):
233 if obj:
234 return [{"name": k, "value": v} for k, v in obj.fields]
235 return ""
236
237
238 def print_attributes(obj, filter_string=None, hide_privates=False):
239 """
240 Useful helper method to quickly get all attributes of an object and its
241 values.
242 """
243 for attr in dir(obj):
244 if hide_privates and "__" in attr:
245 continue
246 if filter_string is not None and filter_string not in attr:
247 continue
248 value = getattr(obj, attr)
249 print("%s.%s" % ('obj', attr), value, type(value))
250
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/examples/har_extractor.py b/examples/har_extractor.py
--- a/examples/har_extractor.py
+++ b/examples/har_extractor.py
@@ -60,6 +60,12 @@
suit your actual needs of HAR generation. As it will probably be
necessary to cluster logs by IPs or reset them from time to time.
"""
+ if sys.version_info >= (3, 0):
+ raise RuntimeError(
+ "har_extractor.py does not work on Python 3. "
+ "Please check out https://github.com/mitmproxy/mitmproxy/issues/1320 "
+ "if you want to help making this work again."
+ )
context.dump_file = None
if len(sys.argv) > 1:
context.dump_file = sys.argv[1]
|
{"golden_diff": "diff --git a/examples/har_extractor.py b/examples/har_extractor.py\n--- a/examples/har_extractor.py\n+++ b/examples/har_extractor.py\n@@ -60,6 +60,12 @@\n suit your actual needs of HAR generation. As it will probably be\n necessary to cluster logs by IPs or reset them from time to time.\n \"\"\"\n+ if sys.version_info >= (3, 0):\n+ raise RuntimeError(\n+ \"har_extractor.py does not work on Python 3. \"\n+ \"Please check out https://github.com/mitmproxy/mitmproxy/issues/1320 \"\n+ \"if you want to help making this work again.\"\n+ )\n context.dump_file = None\n if len(sys.argv) > 1:\n context.dump_file = sys.argv[1]\n", "issue": "[tests actually pass] Remove harextractor example\nIf nobody objects, this PR (temporarily) removes the harextractor example, which depends on harparser, a Python2-only library. It's probably worth the time to rewrite this without any dependencies, but first we want to get a Python 3 release out of the door.\n\n", "before_files": [{"content": "\"\"\"\n This inline script utilizes harparser.HAR from\n https://github.com/JustusW/harparser to generate a HAR log object.\n\"\"\"\nimport six\nimport sys\nimport pytz\nfrom harparser import HAR\n\nfrom datetime import datetime\n\n\nclass _HARLog(HAR.log):\n # The attributes need to be registered here for them to actually be\n # available later via self. This is due to HAREncodable linking __getattr__\n # to __getitem__. Anything that is set only in __init__ will just be added\n # as key/value pair to self.__classes__.\n __page_list__ = []\n __page_count__ = 0\n __page_ref__ = {}\n\n def __init__(self, page_list=[]):\n self.__page_list__ = page_list\n self.__page_count__ = 0\n self.__page_ref__ = {}\n\n HAR.log.__init__(self, {\"version\": \"1.2\",\n \"creator\": {\"name\": \"MITMPROXY HARExtractor\",\n \"version\": \"0.1\",\n \"comment\": \"\"},\n \"pages\": [],\n \"entries\": []})\n\n def reset(self):\n self.__init__(self.__page_list__)\n\n def add(self, obj):\n if isinstance(obj, HAR.pages):\n self['pages'].append(obj)\n if isinstance(obj, HAR.entries):\n self['entries'].append(obj)\n\n def create_page_id(self):\n self.__page_count__ += 1\n return \"autopage_%s\" % str(self.__page_count__)\n\n def set_page_ref(self, page, ref):\n self.__page_ref__[page] = ref\n\n def get_page_ref(self, page):\n return self.__page_ref__.get(page, None)\n\n def get_page_list(self):\n return self.__page_list__\n\n\ndef start(context):\n \"\"\"\n On start we create a HARLog instance. You will have to adapt this to\n suit your actual needs of HAR generation. As it will probably be\n necessary to cluster logs by IPs or reset them from time to time.\n \"\"\"\n context.dump_file = None\n if len(sys.argv) > 1:\n context.dump_file = sys.argv[1]\n else:\n raise ValueError(\n 'Usage: -s \"har_extractor.py filename\" '\n '(- will output to stdout, filenames ending with .zhar '\n 'will result in compressed har)'\n )\n context.HARLog = _HARLog()\n context.seen_server = set()\n\n\ndef response(context, flow):\n \"\"\"\n Called when a server response has been received. At the time of this\n message both a request and a response are present and completely done.\n \"\"\"\n # Values are converted from float seconds to int milliseconds later.\n ssl_time = -.001\n connect_time = -.001\n if flow.server_conn not in context.seen_server:\n # Calculate the connect_time for this server_conn. Afterwards add it to\n # seen list, in order to avoid the connect_time being present in entries\n # that use an existing connection.\n connect_time = (flow.server_conn.timestamp_tcp_setup -\n flow.server_conn.timestamp_start)\n context.seen_server.add(flow.server_conn)\n\n if flow.server_conn.timestamp_ssl_setup is not None:\n # Get the ssl_time for this server_conn as the difference between\n # the start of the successful tcp setup and the successful ssl\n # setup. If no ssl setup has been made it is left as -1 since it\n # doesn't apply to this connection.\n ssl_time = (flow.server_conn.timestamp_ssl_setup -\n flow.server_conn.timestamp_tcp_setup)\n\n # Calculate the raw timings from the different timestamps present in the\n # request and response object. For lack of a way to measure it dns timings\n # can not be calculated. The same goes for HAR blocked: MITMProxy will open\n # a server connection as soon as it receives the host and port from the\n # client connection. So the time spent waiting is actually spent waiting\n # between request.timestamp_end and response.timestamp_start thus it\n # correlates to HAR wait instead.\n timings_raw = {\n 'send': flow.request.timestamp_end - flow.request.timestamp_start,\n 'wait': flow.response.timestamp_start - flow.request.timestamp_end,\n 'receive': flow.response.timestamp_end - flow.response.timestamp_start,\n 'connect': connect_time,\n 'ssl': ssl_time\n }\n\n # HAR timings are integers in ms, so we have to re-encode the raw timings to\n # that format.\n timings = dict([(k, int(1000 * v)) for k, v in six.iteritems(timings_raw)])\n\n # The full_time is the sum of all timings.\n # Timings set to -1 will be ignored as per spec.\n full_time = sum(v for v in timings.values() if v > -1)\n\n started_date_time = datetime.utcfromtimestamp(\n flow.request.timestamp_start).replace(tzinfo=pytz.timezone(\"UTC\")).isoformat()\n\n request_query_string = [{\"name\": k, \"value\": v}\n for k, v in flow.request.query or {}]\n\n response_body_size = len(flow.response.content)\n response_body_decoded_size = len(flow.response.get_decoded_content())\n response_body_compression = response_body_decoded_size - response_body_size\n\n entry = HAR.entries({\n \"startedDateTime\": started_date_time,\n \"time\": full_time,\n \"request\": {\n \"method\": flow.request.method,\n \"url\": flow.request.url,\n \"httpVersion\": flow.request.http_version,\n \"cookies\": format_cookies(flow.request.cookies),\n \"headers\": format_headers(flow.request.headers),\n \"queryString\": request_query_string,\n \"headersSize\": len(str(flow.request.headers)),\n \"bodySize\": len(flow.request.content),\n },\n \"response\": {\n \"status\": flow.response.status_code,\n \"statusText\": flow.response.reason,\n \"httpVersion\": flow.response.http_version,\n \"cookies\": format_cookies(flow.response.cookies),\n \"headers\": format_headers(flow.response.headers),\n \"content\": {\n \"size\": response_body_size,\n \"compression\": response_body_compression,\n \"mimeType\": flow.response.headers.get('Content-Type', '')\n },\n \"redirectURL\": flow.response.headers.get('Location', ''),\n \"headersSize\": len(str(flow.response.headers)),\n \"bodySize\": response_body_size,\n },\n \"cache\": {},\n \"timings\": timings,\n })\n\n # If the current url is in the page list of context.HARLog or\n # does not have a referrer, we add it as a new pages object.\n is_new_page = (\n flow.request.url in context.HARLog.get_page_list() or\n flow.request.headers.get('Referer') is None\n )\n if is_new_page:\n page_id = context.HARLog.create_page_id()\n context.HARLog.add(\n HAR.pages({\n \"startedDateTime\": entry['startedDateTime'],\n \"id\": page_id,\n \"title\": flow.request.url,\n \"pageTimings\": {}\n })\n )\n context.HARLog.set_page_ref(flow.request.url, page_id)\n entry['pageref'] = page_id\n\n # Lookup the referer in the page_ref of context.HARLog to point this entries\n # pageref attribute to the right pages object, then set it as a new\n # reference to build a reference tree.\n elif context.HARLog.get_page_ref(flow.request.headers.get('Referer')) is not None:\n entry['pageref'] = context.HARLog.get_page_ref(\n flow.request.headers['Referer']\n )\n context.HARLog.set_page_ref(\n flow.request.headers['Referer'], entry['pageref']\n )\n\n context.HARLog.add(entry)\n\n\ndef done(context):\n \"\"\"\n Called once on script shutdown, after any other events.\n \"\"\"\n import pprint\n import json\n\n json_dump = context.HARLog.json()\n compressed_json_dump = context.HARLog.compress()\n\n if context.dump_file == '-':\n context.log(pprint.pformat(json.loads(json_dump)))\n elif context.dump_file.endswith('.zhar'):\n file(context.dump_file, \"w\").write(compressed_json_dump)\n else:\n file(context.dump_file, \"w\").write(json_dump)\n context.log(\n \"HAR log finished with %s bytes (%s bytes compressed)\" % (\n len(json_dump), len(compressed_json_dump)\n )\n )\n context.log(\n \"Compression rate is %s%%\" % str(\n 100. * len(compressed_json_dump) / len(json_dump)\n )\n )\n\n\ndef format_cookies(obj):\n if obj:\n return [{\"name\": k.strip(), \"value\": v[0]} for k, v in obj.items()]\n return \"\"\n\n\ndef format_headers(obj):\n if obj:\n return [{\"name\": k, \"value\": v} for k, v in obj.fields]\n return \"\"\n\n\ndef print_attributes(obj, filter_string=None, hide_privates=False):\n \"\"\"\n Useful helper method to quickly get all attributes of an object and its\n values.\n \"\"\"\n for attr in dir(obj):\n if hide_privates and \"__\" in attr:\n continue\n if filter_string is not None and filter_string not in attr:\n continue\n value = getattr(obj, attr)\n print(\"%s.%s\" % ('obj', attr), value, type(value))\n", "path": "examples/har_extractor.py"}], "after_files": [{"content": "\"\"\"\n This inline script utilizes harparser.HAR from\n https://github.com/JustusW/harparser to generate a HAR log object.\n\"\"\"\nimport six\nimport sys\nimport pytz\nfrom harparser import HAR\n\nfrom datetime import datetime\n\n\nclass _HARLog(HAR.log):\n # The attributes need to be registered here for them to actually be\n # available later via self. This is due to HAREncodable linking __getattr__\n # to __getitem__. Anything that is set only in __init__ will just be added\n # as key/value pair to self.__classes__.\n __page_list__ = []\n __page_count__ = 0\n __page_ref__ = {}\n\n def __init__(self, page_list=[]):\n self.__page_list__ = page_list\n self.__page_count__ = 0\n self.__page_ref__ = {}\n\n HAR.log.__init__(self, {\"version\": \"1.2\",\n \"creator\": {\"name\": \"MITMPROXY HARExtractor\",\n \"version\": \"0.1\",\n \"comment\": \"\"},\n \"pages\": [],\n \"entries\": []})\n\n def reset(self):\n self.__init__(self.__page_list__)\n\n def add(self, obj):\n if isinstance(obj, HAR.pages):\n self['pages'].append(obj)\n if isinstance(obj, HAR.entries):\n self['entries'].append(obj)\n\n def create_page_id(self):\n self.__page_count__ += 1\n return \"autopage_%s\" % str(self.__page_count__)\n\n def set_page_ref(self, page, ref):\n self.__page_ref__[page] = ref\n\n def get_page_ref(self, page):\n return self.__page_ref__.get(page, None)\n\n def get_page_list(self):\n return self.__page_list__\n\n\ndef start(context):\n \"\"\"\n On start we create a HARLog instance. You will have to adapt this to\n suit your actual needs of HAR generation. As it will probably be\n necessary to cluster logs by IPs or reset them from time to time.\n \"\"\"\n if sys.version_info >= (3, 0):\n raise RuntimeError(\n \"har_extractor.py does not work on Python 3. \"\n \"Please check out https://github.com/mitmproxy/mitmproxy/issues/1320 \"\n \"if you want to help making this work again.\"\n )\n context.dump_file = None\n if len(sys.argv) > 1:\n context.dump_file = sys.argv[1]\n else:\n raise ValueError(\n 'Usage: -s \"har_extractor.py filename\" '\n '(- will output to stdout, filenames ending with .zhar '\n 'will result in compressed har)'\n )\n context.HARLog = _HARLog()\n context.seen_server = set()\n\n\ndef response(context, flow):\n \"\"\"\n Called when a server response has been received. At the time of this\n message both a request and a response are present and completely done.\n \"\"\"\n # Values are converted from float seconds to int milliseconds later.\n ssl_time = -.001\n connect_time = -.001\n if flow.server_conn not in context.seen_server:\n # Calculate the connect_time for this server_conn. Afterwards add it to\n # seen list, in order to avoid the connect_time being present in entries\n # that use an existing connection.\n connect_time = (flow.server_conn.timestamp_tcp_setup -\n flow.server_conn.timestamp_start)\n context.seen_server.add(flow.server_conn)\n\n if flow.server_conn.timestamp_ssl_setup is not None:\n # Get the ssl_time for this server_conn as the difference between\n # the start of the successful tcp setup and the successful ssl\n # setup. If no ssl setup has been made it is left as -1 since it\n # doesn't apply to this connection.\n ssl_time = (flow.server_conn.timestamp_ssl_setup -\n flow.server_conn.timestamp_tcp_setup)\n\n # Calculate the raw timings from the different timestamps present in the\n # request and response object. For lack of a way to measure it dns timings\n # can not be calculated. The same goes for HAR blocked: MITMProxy will open\n # a server connection as soon as it receives the host and port from the\n # client connection. So the time spent waiting is actually spent waiting\n # between request.timestamp_end and response.timestamp_start thus it\n # correlates to HAR wait instead.\n timings_raw = {\n 'send': flow.request.timestamp_end - flow.request.timestamp_start,\n 'wait': flow.response.timestamp_start - flow.request.timestamp_end,\n 'receive': flow.response.timestamp_end - flow.response.timestamp_start,\n 'connect': connect_time,\n 'ssl': ssl_time\n }\n\n # HAR timings are integers in ms, so we have to re-encode the raw timings to\n # that format.\n timings = dict([(k, int(1000 * v)) for k, v in six.iteritems(timings_raw)])\n\n # The full_time is the sum of all timings.\n # Timings set to -1 will be ignored as per spec.\n full_time = sum(v for v in timings.values() if v > -1)\n\n started_date_time = datetime.utcfromtimestamp(\n flow.request.timestamp_start).replace(tzinfo=pytz.timezone(\"UTC\")).isoformat()\n\n request_query_string = [{\"name\": k, \"value\": v}\n for k, v in flow.request.query or {}]\n\n response_body_size = len(flow.response.content)\n response_body_decoded_size = len(flow.response.get_decoded_content())\n response_body_compression = response_body_decoded_size - response_body_size\n\n entry = HAR.entries({\n \"startedDateTime\": started_date_time,\n \"time\": full_time,\n \"request\": {\n \"method\": flow.request.method,\n \"url\": flow.request.url,\n \"httpVersion\": flow.request.http_version,\n \"cookies\": format_cookies(flow.request.cookies),\n \"headers\": format_headers(flow.request.headers),\n \"queryString\": request_query_string,\n \"headersSize\": len(str(flow.request.headers)),\n \"bodySize\": len(flow.request.content),\n },\n \"response\": {\n \"status\": flow.response.status_code,\n \"statusText\": flow.response.reason,\n \"httpVersion\": flow.response.http_version,\n \"cookies\": format_cookies(flow.response.cookies),\n \"headers\": format_headers(flow.response.headers),\n \"content\": {\n \"size\": response_body_size,\n \"compression\": response_body_compression,\n \"mimeType\": flow.response.headers.get('Content-Type', '')\n },\n \"redirectURL\": flow.response.headers.get('Location', ''),\n \"headersSize\": len(str(flow.response.headers)),\n \"bodySize\": response_body_size,\n },\n \"cache\": {},\n \"timings\": timings,\n })\n\n # If the current url is in the page list of context.HARLog or\n # does not have a referrer, we add it as a new pages object.\n is_new_page = (\n flow.request.url in context.HARLog.get_page_list() or\n flow.request.headers.get('Referer') is None\n )\n if is_new_page:\n page_id = context.HARLog.create_page_id()\n context.HARLog.add(\n HAR.pages({\n \"startedDateTime\": entry['startedDateTime'],\n \"id\": page_id,\n \"title\": flow.request.url,\n \"pageTimings\": {}\n })\n )\n context.HARLog.set_page_ref(flow.request.url, page_id)\n entry['pageref'] = page_id\n\n # Lookup the referer in the page_ref of context.HARLog to point this entries\n # pageref attribute to the right pages object, then set it as a new\n # reference to build a reference tree.\n elif context.HARLog.get_page_ref(flow.request.headers.get('Referer')) is not None:\n entry['pageref'] = context.HARLog.get_page_ref(\n flow.request.headers['Referer']\n )\n context.HARLog.set_page_ref(\n flow.request.headers['Referer'], entry['pageref']\n )\n\n context.HARLog.add(entry)\n\n\ndef done(context):\n \"\"\"\n Called once on script shutdown, after any other events.\n \"\"\"\n import pprint\n import json\n\n json_dump = context.HARLog.json()\n compressed_json_dump = context.HARLog.compress()\n\n if context.dump_file == '-':\n context.log(pprint.pformat(json.loads(json_dump)))\n elif context.dump_file.endswith('.zhar'):\n file(context.dump_file, \"w\").write(compressed_json_dump)\n else:\n file(context.dump_file, \"w\").write(json_dump)\n context.log(\n \"HAR log finished with %s bytes (%s bytes compressed)\" % (\n len(json_dump), len(compressed_json_dump)\n )\n )\n context.log(\n \"Compression rate is %s%%\" % str(\n 100. * len(compressed_json_dump) / len(json_dump)\n )\n )\n\n\ndef format_cookies(obj):\n if obj:\n return [{\"name\": k.strip(), \"value\": v[0]} for k, v in obj.items()]\n return \"\"\n\n\ndef format_headers(obj):\n if obj:\n return [{\"name\": k, \"value\": v} for k, v in obj.fields]\n return \"\"\n\n\ndef print_attributes(obj, filter_string=None, hide_privates=False):\n \"\"\"\n Useful helper method to quickly get all attributes of an object and its\n values.\n \"\"\"\n for attr in dir(obj):\n if hide_privates and \"__\" in attr:\n continue\n if filter_string is not None and filter_string not in attr:\n continue\n value = getattr(obj, attr)\n print(\"%s.%s\" % ('obj', attr), value, type(value))\n", "path": "examples/har_extractor.py"}]}
| 3,033 | 176 |
gh_patches_debug_24573
|
rasdani/github-patches
|
git_diff
|
TabbycatDebate__tabbycat-1258
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Error message for BP voting ballots crashes with 500
Sentry Issue: [BACKEND-2BV](https://sentry.io/organizations/tabbycat/issues/1252961179/?referrer=github_integration)
```
AttributeError: 'tuple' object has no attribute 'replace'
(6 additional frame(s) were not displayed)
...
File "django/views/generic/base.py", line 97, in dispatch
return handler(request, *args, **kwargs)
File "django/views/generic/base.py", line 158, in get
context = self.get_context_data(**kwargs)
File "options/views.py", line 54, in get_context_data
"configuration to use consensus ballots."))
File "django/utils/translation/__init__.py", line 79, in gettext
return _trans.gettext(message)
File "django/utils/translation/trans_real.py", line 282, in gettext
eol_message = message.replace('\r\n', '\n').replace('\r', '\n')
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `tabbycat/options/views.py`
Content:
```
1 import logging
2
3 from django.contrib import messages
4 from django.http import Http404
5 from django.utils.text import slugify
6 from django.utils.translation import gettext as _
7 from django.views.generic import TemplateView
8 from dynamic_preferences.views import PreferenceFormView
9
10 from actionlog.mixins import LogActionMixin
11 from actionlog.models import ActionLogEntry
12 from tournaments.mixins import TournamentMixin
13 from utils.mixins import AdministratorMixin
14 from utils.misc import reverse_tournament
15
16 from .presets import all_presets, get_preferences_data
17 from .forms import tournament_preference_form_builder
18 from .preferences import tournament_preferences_registry
19
20 logger = logging.getLogger(__name__)
21
22
23 class TournamentConfigIndexView(AdministratorMixin, TournamentMixin, TemplateView):
24 template_name = "preferences_index.html"
25
26 def get_preset_options(self):
27 """Returns a list of all preset classes."""
28 preset_options = []
29
30 for preset_class in all_presets():
31 preset_class.slugified_name = slugify(preset_class.__name__)
32 preset_options.append(preset_class)
33
34 preset_options.sort(key=lambda x: (x.show_in_list, x.name))
35 return preset_options
36
37 def get_context_data(self, **kwargs):
38 kwargs["presets"] = self.get_preset_options()
39 t = self.tournament
40 if t.pref('teams_in_debate') == 'bp':
41 if t.pref('ballots_per_debate_prelim') == 'per-adj' or \
42 t.pref('ballots_per_debate_elim') == 'per-adj':
43 error = _(("Your draw rules specify four teams per-debate but ",
44 "your ballot setting specifies that adjudicators ",
45 "submit independent ballots. These settings ",
46 "<strong>are not compatible and will cause results ",
47 "entry to crash</strong>. You need to go back to ",
48 "the Debate Rules settings and change your ",
49 "configuration to use consensus ballots."))
50 messages.error(self.request, error)
51
52 return super().get_context_data(**kwargs)
53
54
55 class TournamentPreferenceFormView(AdministratorMixin, LogActionMixin, TournamentMixin, PreferenceFormView):
56 registry = tournament_preferences_registry
57 section = None
58 template_name = "preferences_section_set.html"
59
60 action_log_type = ActionLogEntry.ACTION_TYPE_OPTIONS_EDIT
61
62 def form_valid(self, *args, **kwargs):
63 messages.success(self.request, _("Tournament options (%(section)s) saved.") % {'section': self.section.verbose_name})
64 return super().form_valid(*args, **kwargs)
65
66 def get_success_url(self):
67 return reverse_tournament('options-tournament-index', self.tournament)
68
69 def get_form_class(self, *args, **kwargs):
70 section = self.kwargs.get('section', None)
71 form_class = tournament_preference_form_builder(instance=self.tournament, section=section)
72 return form_class
73
74
75 class ConfirmTournamentPreferencesView(AdministratorMixin, TournamentMixin, TemplateView):
76 template_name = "preferences_presets_confirm.html"
77
78 def get_selected_preset(self):
79 preset_name = self.kwargs["preset_name"]
80 # Retrieve the class that matches the name
81 selected_presets = [x for x in all_presets() if slugify(x.__name__) == preset_name]
82 if len(selected_presets) == 0:
83 logger.warning("Could not find preset: %s", preset_name)
84 raise Http404("Preset {!r} no found.".format(preset_name))
85 elif len(selected_presets) > 1:
86 logger.warning("Found more than one preset for %s", preset_name)
87 return selected_presets[0]
88
89 def get_context_data(self, **kwargs):
90 selected_preset = self.get_selected_preset()
91 preset_preferences = get_preferences_data(selected_preset, self.tournament)
92 kwargs["preset_title"] = selected_preset.name
93 kwargs["preset_name"] = self.kwargs["preset_name"]
94 kwargs["changed_preferences"] = [p for p in preset_preferences if p['changed']]
95 kwargs["unchanged_preferences"] = [p for p in preset_preferences if not p['changed']]
96 return super().get_context_data(**kwargs)
97
98 def get_template_names(self):
99 if self.request.method == 'GET':
100 return ["preferences_presets_confirm.html"]
101 else:
102 return ["preferences_presets_complete.html"]
103
104 def save_presets(self):
105 selected_preset = self.get_selected_preset()
106 preset_preferences = get_preferences_data(selected_preset, self.tournament)
107
108 for pref in preset_preferences:
109 self.tournament.preferences[pref['key']] = pref['new_value']
110
111 ActionLogEntry.objects.log(type=ActionLogEntry.ACTION_TYPE_OPTIONS_EDIT,
112 user=self.request.user, tournament=self.tournament, content_object=self.tournament)
113 messages.success(self.request, _("Tournament options saved according to preset "
114 "%(name)s.") % {'name': selected_preset.name})
115
116 def post(self, request, *args, **kwargs):
117 context = self.get_context_data(**kwargs)
118 self.save_presets()
119 return self.render_to_response(context)
120
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/tabbycat/options/views.py b/tabbycat/options/views.py
--- a/tabbycat/options/views.py
+++ b/tabbycat/options/views.py
@@ -40,13 +40,13 @@
if t.pref('teams_in_debate') == 'bp':
if t.pref('ballots_per_debate_prelim') == 'per-adj' or \
t.pref('ballots_per_debate_elim') == 'per-adj':
- error = _(("Your draw rules specify four teams per-debate but ",
- "your ballot setting specifies that adjudicators ",
- "submit independent ballots. These settings ",
- "<strong>are not compatible and will cause results ",
- "entry to crash</strong>. You need to go back to ",
- "the Debate Rules settings and change your ",
- "configuration to use consensus ballots."))
+ error = _("Your draw rules specify four teams per-debate but "
+ "your ballot setting specifies that adjudicators "
+ "submit independent ballots. These settings "
+ "<strong>are not compatible and will cause results "
+ "entry to crash</strong>. You need to go back to "
+ "the Debate Rules settings and change your "
+ "configuration to use consensus ballots.")
messages.error(self.request, error)
return super().get_context_data(**kwargs)
|
{"golden_diff": "diff --git a/tabbycat/options/views.py b/tabbycat/options/views.py\n--- a/tabbycat/options/views.py\n+++ b/tabbycat/options/views.py\n@@ -40,13 +40,13 @@\n if t.pref('teams_in_debate') == 'bp':\n if t.pref('ballots_per_debate_prelim') == 'per-adj' or \\\n t.pref('ballots_per_debate_elim') == 'per-adj':\n- error = _((\"Your draw rules specify four teams per-debate but \",\n- \"your ballot setting specifies that adjudicators \",\n- \"submit independent ballots. These settings \",\n- \"<strong>are not compatible and will cause results \",\n- \"entry to crash</strong>. You need to go back to \",\n- \"the Debate Rules settings and change your \",\n- \"configuration to use consensus ballots.\"))\n+ error = _(\"Your draw rules specify four teams per-debate but \"\n+ \"your ballot setting specifies that adjudicators \"\n+ \"submit independent ballots. These settings \"\n+ \"<strong>are not compatible and will cause results \"\n+ \"entry to crash</strong>. You need to go back to \"\n+ \"the Debate Rules settings and change your \"\n+ \"configuration to use consensus ballots.\")\n messages.error(self.request, error)\n \n return super().get_context_data(**kwargs)\n", "issue": "Error message for BP voting ballots crashes with 500\nSentry Issue: [BACKEND-2BV](https://sentry.io/organizations/tabbycat/issues/1252961179/?referrer=github_integration)\n\n```\nAttributeError: 'tuple' object has no attribute 'replace'\n(6 additional frame(s) were not displayed)\n...\n File \"django/views/generic/base.py\", line 97, in dispatch\n return handler(request, *args, **kwargs)\n File \"django/views/generic/base.py\", line 158, in get\n context = self.get_context_data(**kwargs)\n File \"options/views.py\", line 54, in get_context_data\n \"configuration to use consensus ballots.\"))\n File \"django/utils/translation/__init__.py\", line 79, in gettext\n return _trans.gettext(message)\n File \"django/utils/translation/trans_real.py\", line 282, in gettext\n eol_message = message.replace('\\r\\n', '\\n').replace('\\r', '\\n')\n```\n", "before_files": [{"content": "import logging\n\nfrom django.contrib import messages\nfrom django.http import Http404\nfrom django.utils.text import slugify\nfrom django.utils.translation import gettext as _\nfrom django.views.generic import TemplateView\nfrom dynamic_preferences.views import PreferenceFormView\n\nfrom actionlog.mixins import LogActionMixin\nfrom actionlog.models import ActionLogEntry\nfrom tournaments.mixins import TournamentMixin\nfrom utils.mixins import AdministratorMixin\nfrom utils.misc import reverse_tournament\n\nfrom .presets import all_presets, get_preferences_data\nfrom .forms import tournament_preference_form_builder\nfrom .preferences import tournament_preferences_registry\n\nlogger = logging.getLogger(__name__)\n\n\nclass TournamentConfigIndexView(AdministratorMixin, TournamentMixin, TemplateView):\n template_name = \"preferences_index.html\"\n\n def get_preset_options(self):\n \"\"\"Returns a list of all preset classes.\"\"\"\n preset_options = []\n\n for preset_class in all_presets():\n preset_class.slugified_name = slugify(preset_class.__name__)\n preset_options.append(preset_class)\n\n preset_options.sort(key=lambda x: (x.show_in_list, x.name))\n return preset_options\n\n def get_context_data(self, **kwargs):\n kwargs[\"presets\"] = self.get_preset_options()\n t = self.tournament\n if t.pref('teams_in_debate') == 'bp':\n if t.pref('ballots_per_debate_prelim') == 'per-adj' or \\\n t.pref('ballots_per_debate_elim') == 'per-adj':\n error = _((\"Your draw rules specify four teams per-debate but \",\n \"your ballot setting specifies that adjudicators \",\n \"submit independent ballots. These settings \",\n \"<strong>are not compatible and will cause results \",\n \"entry to crash</strong>. You need to go back to \",\n \"the Debate Rules settings and change your \",\n \"configuration to use consensus ballots.\"))\n messages.error(self.request, error)\n\n return super().get_context_data(**kwargs)\n\n\nclass TournamentPreferenceFormView(AdministratorMixin, LogActionMixin, TournamentMixin, PreferenceFormView):\n registry = tournament_preferences_registry\n section = None\n template_name = \"preferences_section_set.html\"\n\n action_log_type = ActionLogEntry.ACTION_TYPE_OPTIONS_EDIT\n\n def form_valid(self, *args, **kwargs):\n messages.success(self.request, _(\"Tournament options (%(section)s) saved.\") % {'section': self.section.verbose_name})\n return super().form_valid(*args, **kwargs)\n\n def get_success_url(self):\n return reverse_tournament('options-tournament-index', self.tournament)\n\n def get_form_class(self, *args, **kwargs):\n section = self.kwargs.get('section', None)\n form_class = tournament_preference_form_builder(instance=self.tournament, section=section)\n return form_class\n\n\nclass ConfirmTournamentPreferencesView(AdministratorMixin, TournamentMixin, TemplateView):\n template_name = \"preferences_presets_confirm.html\"\n\n def get_selected_preset(self):\n preset_name = self.kwargs[\"preset_name\"]\n # Retrieve the class that matches the name\n selected_presets = [x for x in all_presets() if slugify(x.__name__) == preset_name]\n if len(selected_presets) == 0:\n logger.warning(\"Could not find preset: %s\", preset_name)\n raise Http404(\"Preset {!r} no found.\".format(preset_name))\n elif len(selected_presets) > 1:\n logger.warning(\"Found more than one preset for %s\", preset_name)\n return selected_presets[0]\n\n def get_context_data(self, **kwargs):\n selected_preset = self.get_selected_preset()\n preset_preferences = get_preferences_data(selected_preset, self.tournament)\n kwargs[\"preset_title\"] = selected_preset.name\n kwargs[\"preset_name\"] = self.kwargs[\"preset_name\"]\n kwargs[\"changed_preferences\"] = [p for p in preset_preferences if p['changed']]\n kwargs[\"unchanged_preferences\"] = [p for p in preset_preferences if not p['changed']]\n return super().get_context_data(**kwargs)\n\n def get_template_names(self):\n if self.request.method == 'GET':\n return [\"preferences_presets_confirm.html\"]\n else:\n return [\"preferences_presets_complete.html\"]\n\n def save_presets(self):\n selected_preset = self.get_selected_preset()\n preset_preferences = get_preferences_data(selected_preset, self.tournament)\n\n for pref in preset_preferences:\n self.tournament.preferences[pref['key']] = pref['new_value']\n\n ActionLogEntry.objects.log(type=ActionLogEntry.ACTION_TYPE_OPTIONS_EDIT,\n user=self.request.user, tournament=self.tournament, content_object=self.tournament)\n messages.success(self.request, _(\"Tournament options saved according to preset \"\n \"%(name)s.\") % {'name': selected_preset.name})\n\n def post(self, request, *args, **kwargs):\n context = self.get_context_data(**kwargs)\n self.save_presets()\n return self.render_to_response(context)\n", "path": "tabbycat/options/views.py"}], "after_files": [{"content": "import logging\n\nfrom django.contrib import messages\nfrom django.http import Http404\nfrom django.utils.text import slugify\nfrom django.utils.translation import gettext as _\nfrom django.views.generic import TemplateView\nfrom dynamic_preferences.views import PreferenceFormView\n\nfrom actionlog.mixins import LogActionMixin\nfrom actionlog.models import ActionLogEntry\nfrom tournaments.mixins import TournamentMixin\nfrom utils.mixins import AdministratorMixin\nfrom utils.misc import reverse_tournament\n\nfrom .presets import all_presets, get_preferences_data\nfrom .forms import tournament_preference_form_builder\nfrom .preferences import tournament_preferences_registry\n\nlogger = logging.getLogger(__name__)\n\n\nclass TournamentConfigIndexView(AdministratorMixin, TournamentMixin, TemplateView):\n template_name = \"preferences_index.html\"\n\n def get_preset_options(self):\n \"\"\"Returns a list of all preset classes.\"\"\"\n preset_options = []\n\n for preset_class in all_presets():\n preset_class.slugified_name = slugify(preset_class.__name__)\n preset_options.append(preset_class)\n\n preset_options.sort(key=lambda x: (x.show_in_list, x.name))\n return preset_options\n\n def get_context_data(self, **kwargs):\n kwargs[\"presets\"] = self.get_preset_options()\n t = self.tournament\n if t.pref('teams_in_debate') == 'bp':\n if t.pref('ballots_per_debate_prelim') == 'per-adj' or \\\n t.pref('ballots_per_debate_elim') == 'per-adj':\n error = _(\"Your draw rules specify four teams per-debate but \"\n \"your ballot setting specifies that adjudicators \"\n \"submit independent ballots. These settings \"\n \"<strong>are not compatible and will cause results \"\n \"entry to crash</strong>. You need to go back to \"\n \"the Debate Rules settings and change your \"\n \"configuration to use consensus ballots.\")\n messages.error(self.request, error)\n\n return super().get_context_data(**kwargs)\n\n\nclass TournamentPreferenceFormView(AdministratorMixin, LogActionMixin, TournamentMixin, PreferenceFormView):\n registry = tournament_preferences_registry\n section = None\n template_name = \"preferences_section_set.html\"\n\n action_log_type = ActionLogEntry.ACTION_TYPE_OPTIONS_EDIT\n\n def form_valid(self, *args, **kwargs):\n messages.success(self.request, _(\"Tournament options (%(section)s) saved.\") % {'section': self.section.verbose_name})\n return super().form_valid(*args, **kwargs)\n\n def get_success_url(self):\n return reverse_tournament('options-tournament-index', self.tournament)\n\n def get_form_class(self, *args, **kwargs):\n section = self.kwargs.get('section', None)\n form_class = tournament_preference_form_builder(instance=self.tournament, section=section)\n return form_class\n\n\nclass ConfirmTournamentPreferencesView(AdministratorMixin, TournamentMixin, TemplateView):\n template_name = \"preferences_presets_confirm.html\"\n\n def get_selected_preset(self):\n preset_name = self.kwargs[\"preset_name\"]\n # Retrieve the class that matches the name\n selected_presets = [x for x in all_presets() if slugify(x.__name__) == preset_name]\n if len(selected_presets) == 0:\n logger.warning(\"Could not find preset: %s\", preset_name)\n raise Http404(\"Preset {!r} no found.\".format(preset_name))\n elif len(selected_presets) > 1:\n logger.warning(\"Found more than one preset for %s\", preset_name)\n return selected_presets[0]\n\n def get_context_data(self, **kwargs):\n selected_preset = self.get_selected_preset()\n preset_preferences = get_preferences_data(selected_preset, self.tournament)\n kwargs[\"preset_title\"] = selected_preset.name\n kwargs[\"preset_name\"] = self.kwargs[\"preset_name\"]\n kwargs[\"changed_preferences\"] = [p for p in preset_preferences if p['changed']]\n kwargs[\"unchanged_preferences\"] = [p for p in preset_preferences if not p['changed']]\n return super().get_context_data(**kwargs)\n\n def get_template_names(self):\n if self.request.method == 'GET':\n return [\"preferences_presets_confirm.html\"]\n else:\n return [\"preferences_presets_complete.html\"]\n\n def save_presets(self):\n selected_preset = self.get_selected_preset()\n preset_preferences = get_preferences_data(selected_preset, self.tournament)\n\n for pref in preset_preferences:\n self.tournament.preferences[pref['key']] = pref['new_value']\n\n ActionLogEntry.objects.log(type=ActionLogEntry.ACTION_TYPE_OPTIONS_EDIT,\n user=self.request.user, tournament=self.tournament, content_object=self.tournament)\n messages.success(self.request, _(\"Tournament options saved according to preset \"\n \"%(name)s.\") % {'name': selected_preset.name})\n\n def post(self, request, *args, **kwargs):\n context = self.get_context_data(**kwargs)\n self.save_presets()\n return self.render_to_response(context)\n", "path": "tabbycat/options/views.py"}]}
| 1,823 | 296 |
gh_patches_debug_21936
|
rasdani/github-patches
|
git_diff
|
beeware__toga-1373
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Use Alpha Version of Pythonnet
**Description**
Pythonnet has released a few days ago an [alpha version](https://pypi.org/project/pythonnet/3.0.0a1/) of Pythonnet 3.0.
ATM we use a hashed version (8d93c39d) of Pythonnet instead of an official release.
In the case that we don't want to wait until an official version of Pythonnet is released (which we don't have any approximation when this would happen), I think we should at least use the alpha version.
**Describe alternatives you've considered**
An alternative is to keep the hashed version as it is :)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/winforms/setup.py`
Content:
```
1 #!/usr/bin/env python
2 import re
3
4 from setuptools import setup
5
6 # Version handline needs to be programatic because
7 # we can't import toga_winforms to compute the version;
8 # and to support versioned subpackage dependencies
9 with open('toga_winforms/__init__.py', encoding='utf8') as version_file:
10 version_match = re.search(
11 r"^__version__ = ['\"]([^'\"]*)['\"]",
12 version_file.read(),
13 re.M
14 )
15 if version_match:
16 version = version_match.group(1)
17 else:
18 raise RuntimeError("Unable to find version string.")
19
20 setup(
21 version=version,
22 install_requires=[
23 # The Python.net team hasn't published 2.X wheels for Python 3.9 or 3.10,
24 # and their development effort seems to be focussed on the 3.X branch;
25 # they've indicated they're not planning to make the 2.X branch compatible
26 # with Python 3.10. If we want to be able to support "current" Python,
27 # we need to work off a source release until they formally release 3.0.
28 #
29 # The 8d93c39d hash is, as best as I can work out, what was in the
30 # 3.0.0-preview2021-10-05 release published to nuget - but they didn't
31 # tag anything for that release. That release contained a bug
32 # (https://github.com/pythonnet/pythonnet/issues/1613) that didn't play well
33 # with pip 21.3, so we use 94b1a71c which was released about a month later.
34 'pythonnet @ git+https://github.com/pythonnet/pythonnet@94b1a71c#egg=pythonnet',
35 'toga-core==%s' % version,
36 ],
37 test_suite='tests',
38 test_require=[
39 'toga-dummy==%s' % version,
40 ]
41 )
42
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/winforms/setup.py b/src/winforms/setup.py
--- a/src/winforms/setup.py
+++ b/src/winforms/setup.py
@@ -24,14 +24,11 @@
# and their development effort seems to be focussed on the 3.X branch;
# they've indicated they're not planning to make the 2.X branch compatible
# with Python 3.10. If we want to be able to support "current" Python,
- # we need to work off a source release until they formally release 3.0.
+ # we need to use the 3.0 branch.
#
- # The 8d93c39d hash is, as best as I can work out, what was in the
- # 3.0.0-preview2021-10-05 release published to nuget - but they didn't
- # tag anything for that release. That release contained a bug
- # (https://github.com/pythonnet/pythonnet/issues/1613) that didn't play well
- # with pip 21.3, so we use 94b1a71c which was released about a month later.
- 'pythonnet @ git+https://github.com/pythonnet/pythonnet@94b1a71c#egg=pythonnet',
+ # At time of writing, the most recent (and only) version of Python.net 3.0
+ # that has been released is the alpha version 3.0.0a1.
+ 'pythonnet>=3.0.0a1',
'toga-core==%s' % version,
],
test_suite='tests',
|
{"golden_diff": "diff --git a/src/winforms/setup.py b/src/winforms/setup.py\n--- a/src/winforms/setup.py\n+++ b/src/winforms/setup.py\n@@ -24,14 +24,11 @@\n # and their development effort seems to be focussed on the 3.X branch;\n # they've indicated they're not planning to make the 2.X branch compatible\n # with Python 3.10. If we want to be able to support \"current\" Python,\n- # we need to work off a source release until they formally release 3.0.\n+ # we need to use the 3.0 branch.\n #\n- # The 8d93c39d hash is, as best as I can work out, what was in the\n- # 3.0.0-preview2021-10-05 release published to nuget - but they didn't\n- # tag anything for that release. That release contained a bug\n- # (https://github.com/pythonnet/pythonnet/issues/1613) that didn't play well\n- # with pip 21.3, so we use 94b1a71c which was released about a month later.\n- 'pythonnet @ git+https://github.com/pythonnet/pythonnet@94b1a71c#egg=pythonnet',\n+ # At time of writing, the most recent (and only) version of Python.net 3.0\n+ # that has been released is the alpha version 3.0.0a1.\n+ 'pythonnet>=3.0.0a1',\n 'toga-core==%s' % version,\n ],\n test_suite='tests',\n", "issue": "Use Alpha Version of Pythonnet\n**Description**\r\nPythonnet has released a few days ago an [alpha version](https://pypi.org/project/pythonnet/3.0.0a1/) of Pythonnet 3.0.\r\nATM we use a hashed version (8d93c39d) of Pythonnet instead of an official release.\r\n\r\nIn the case that we don't want to wait until an official version of Pythonnet is released (which we don't have any approximation when this would happen), I think we should at least use the alpha version.\r\n\r\n**Describe alternatives you've considered**\r\nAn alternative is to keep the hashed version as it is :)\n", "before_files": [{"content": "#!/usr/bin/env python\nimport re\n\nfrom setuptools import setup\n\n# Version handline needs to be programatic because\n# we can't import toga_winforms to compute the version;\n# and to support versioned subpackage dependencies\nwith open('toga_winforms/__init__.py', encoding='utf8') as version_file:\n version_match = re.search(\n r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\",\n version_file.read(),\n re.M\n )\n if version_match:\n version = version_match.group(1)\n else:\n raise RuntimeError(\"Unable to find version string.\")\n\nsetup(\n version=version,\n install_requires=[\n # The Python.net team hasn't published 2.X wheels for Python 3.9 or 3.10,\n # and their development effort seems to be focussed on the 3.X branch;\n # they've indicated they're not planning to make the 2.X branch compatible\n # with Python 3.10. If we want to be able to support \"current\" Python,\n # we need to work off a source release until they formally release 3.0.\n #\n # The 8d93c39d hash is, as best as I can work out, what was in the\n # 3.0.0-preview2021-10-05 release published to nuget - but they didn't\n # tag anything for that release. That release contained a bug\n # (https://github.com/pythonnet/pythonnet/issues/1613) that didn't play well\n # with pip 21.3, so we use 94b1a71c which was released about a month later.\n 'pythonnet @ git+https://github.com/pythonnet/pythonnet@94b1a71c#egg=pythonnet',\n 'toga-core==%s' % version,\n ],\n test_suite='tests',\n test_require=[\n 'toga-dummy==%s' % version,\n ]\n)\n", "path": "src/winforms/setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\nimport re\n\nfrom setuptools import setup\n\n# Version handline needs to be programatic because\n# we can't import toga_winforms to compute the version;\n# and to support versioned subpackage dependencies\nwith open('toga_winforms/__init__.py', encoding='utf8') as version_file:\n version_match = re.search(\n r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\",\n version_file.read(),\n re.M\n )\n if version_match:\n version = version_match.group(1)\n else:\n raise RuntimeError(\"Unable to find version string.\")\n\nsetup(\n version=version,\n install_requires=[\n # The Python.net team hasn't published 2.X wheels for Python 3.9 or 3.10,\n # and their development effort seems to be focussed on the 3.X branch;\n # they've indicated they're not planning to make the 2.X branch compatible\n # with Python 3.10. If we want to be able to support \"current\" Python,\n # we need to use the 3.0 branch.\n #\n # At time of writing, the most recent (and only) version of Python.net 3.0\n # that has been released is the alpha version 3.0.0a1.\n 'pythonnet>=3.0.0a1',\n 'toga-core==%s' % version,\n ],\n test_suite='tests',\n test_require=[\n 'toga-dummy==%s' % version,\n ]\n)\n", "path": "src/winforms/setup.py"}]}
| 918 | 381 |
gh_patches_debug_40915
|
rasdani/github-patches
|
git_diff
|
Lightning-Universe__lightning-flash-1067
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
`TabularClassifier` fails with `embedding_sizes=None` (the default)
## 🐛 Bug
Passes if you provide embedding sizes as an empty list.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `flash/tabular/regression/model.py`
Content:
```
1 # Copyright The PyTorch Lightning team.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 from functools import partial
15 from typing import Any, Callable, Dict, List, Optional, Tuple, Type
16
17 import torch
18 from torch.nn import functional as F
19
20 from flash.core.data.io.input import DataKeys, ServeInput
21 from flash.core.data.io.input_transform import InputTransform
22 from flash.core.regression import RegressionTask
23 from flash.core.serve import Composition
24 from flash.core.utilities.imports import _TABULAR_AVAILABLE, requires
25 from flash.core.utilities.types import (
26 INPUT_TRANSFORM_TYPE,
27 LR_SCHEDULER_TYPE,
28 METRICS_TYPE,
29 OPTIMIZER_TYPE,
30 OUTPUT_TYPE,
31 )
32 from flash.tabular.input import TabularDeserializer
33
34 if _TABULAR_AVAILABLE:
35 from pytorch_tabnet.tab_network import TabNet
36
37
38 class TabularRegressor(RegressionTask):
39 """The ``TabularRegressor`` is a :class:`~flash.Task` for regression tabular data.
40
41 Args:
42 num_features: Number of columns in table (not including target column).
43 embedding_sizes: List of (num_classes, emb_dim) to form categorical embeddings.
44 loss_fn: Loss function for training, defaults to cross entropy.
45 optimizer: Optimizer to use for training.
46 lr_scheduler: The LR scheduler to use during training.
47 metrics: Metrics to compute for training and evaluation. Can either be an metric from the `torchmetrics`
48 package, a custom metric inherenting from `torchmetrics.Metric`, a callable function or a list/dict
49 containing a combination of the aforementioned. In all cases, each metric needs to have the signature
50 `metric(preds,target)` and return a single scalar tensor.
51 learning_rate: Learning rate to use for training.
52 multi_label: Whether the targets are multi-label or not.
53 output: The :class:`~flash.core.data.io.output.Output` to use when formatting prediction outputs.
54 **tabnet_kwargs: Optional additional arguments for the TabNet model, see
55 `pytorch_tabnet <https://dreamquark-ai.github.io/tabnet/_modules/pytorch_tabnet/tab_network.html#TabNet>`_.
56 """
57
58 required_extras: str = "tabular"
59
60 def __init__(
61 self,
62 num_features: int,
63 embedding_sizes: List[Tuple[int, int]] = None,
64 loss_fn: Callable = F.mse_loss,
65 optimizer: OPTIMIZER_TYPE = "Adam",
66 lr_scheduler: LR_SCHEDULER_TYPE = None,
67 metrics: METRICS_TYPE = None,
68 learning_rate: float = 1e-2,
69 output: OUTPUT_TYPE = None,
70 **tabnet_kwargs,
71 ):
72 self.save_hyperparameters()
73
74 cat_dims, cat_emb_dim = zip(*embedding_sizes) if embedding_sizes else ([], [])
75 model = TabNet(
76 input_dim=num_features,
77 output_dim=1,
78 cat_idxs=list(range(len(embedding_sizes))),
79 cat_dims=list(cat_dims),
80 cat_emb_dim=list(cat_emb_dim),
81 **tabnet_kwargs,
82 )
83
84 super().__init__(
85 model=model,
86 loss_fn=loss_fn,
87 optimizer=optimizer,
88 lr_scheduler=lr_scheduler,
89 metrics=metrics,
90 learning_rate=learning_rate,
91 output=output,
92 )
93
94 self.save_hyperparameters()
95
96 def forward(self, x_in) -> torch.Tensor:
97 # TabNet takes single input, x_in is composed of (categorical, numerical)
98 xs = [x for x in x_in if x.numel()]
99 x = torch.cat(xs, dim=1)
100 return self.model(x)[0].flatten()
101
102 def training_step(self, batch: Any, batch_idx: int) -> Any:
103 batch = (batch[DataKeys.INPUT], batch[DataKeys.TARGET])
104 return super().training_step(batch, batch_idx)
105
106 def validation_step(self, batch: Any, batch_idx: int) -> Any:
107 batch = (batch[DataKeys.INPUT], batch[DataKeys.TARGET])
108 return super().validation_step(batch, batch_idx)
109
110 def test_step(self, batch: Any, batch_idx: int) -> Any:
111 batch = (batch[DataKeys.INPUT], batch[DataKeys.TARGET])
112 return super().test_step(batch, batch_idx)
113
114 def predict_step(self, batch: Any, batch_idx: int, dataloader_idx: int = 0) -> Any:
115 batch = batch[DataKeys.INPUT]
116 return self(batch)
117
118 @classmethod
119 def from_data(cls, datamodule, **kwargs) -> "TabularRegressor":
120 model = cls(datamodule.num_features, datamodule.embedding_sizes, **kwargs)
121 return model
122
123 @requires("serve")
124 def serve(
125 self,
126 host: str = "127.0.0.1",
127 port: int = 8000,
128 sanity_check: bool = True,
129 input_cls: Optional[Type[ServeInput]] = TabularDeserializer,
130 transform: INPUT_TRANSFORM_TYPE = InputTransform,
131 transform_kwargs: Optional[Dict] = None,
132 parameters: Optional[Dict[str, Any]] = None,
133 ) -> Composition:
134 return super().serve(
135 host, port, sanity_check, partial(input_cls, parameters=parameters), transform, transform_kwargs
136 )
137
```
Path: `flash/tabular/classification/model.py`
Content:
```
1 # Copyright The PyTorch Lightning team.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 from functools import partial
15 from typing import Any, Callable, Dict, List, Optional, Tuple, Type
16
17 import torch
18 from torch.nn import functional as F
19
20 from flash.core.classification import ClassificationTask, ProbabilitiesOutput
21 from flash.core.data.io.input import DataKeys, ServeInput
22 from flash.core.data.io.input_transform import InputTransform
23 from flash.core.serve import Composition
24 from flash.core.utilities.imports import _TABULAR_AVAILABLE, requires
25 from flash.core.utilities.types import (
26 INPUT_TRANSFORM_TYPE,
27 LR_SCHEDULER_TYPE,
28 METRICS_TYPE,
29 OPTIMIZER_TYPE,
30 OUTPUT_TYPE,
31 )
32 from flash.tabular.input import TabularDeserializer
33
34 if _TABULAR_AVAILABLE:
35 from pytorch_tabnet.tab_network import TabNet
36
37
38 class TabularClassifier(ClassificationTask):
39 """The ``TabularClassifier`` is a :class:`~flash.Task` for classifying tabular data. For more details, see
40 :ref:`tabular_classification`.
41
42 Args:
43 num_features: Number of columns in table (not including target column).
44 num_classes: Number of classes to classify.
45 embedding_sizes: List of (num_classes, emb_dim) to form categorical embeddings.
46 loss_fn: Loss function for training, defaults to cross entropy.
47 optimizer: Optimizer to use for training.
48 lr_scheduler: The LR scheduler to use during training.
49 metrics: Metrics to compute for training and evaluation. Can either be an metric from the `torchmetrics`
50 package, a custom metric inherenting from `torchmetrics.Metric`, a callable function or a list/dict
51 containing a combination of the aforementioned. In all cases, each metric needs to have the signature
52 `metric(preds,target)` and return a single scalar tensor. Defaults to :class:`torchmetrics.Accuracy`.
53 learning_rate: Learning rate to use for training.
54 multi_label: Whether the targets are multi-label or not.
55 output: The :class:`~flash.core.data.io.output.Output` to use when formatting prediction outputs.
56 **tabnet_kwargs: Optional additional arguments for the TabNet model, see
57 `pytorch_tabnet <https://dreamquark-ai.github.io/tabnet/_modules/pytorch_tabnet/tab_network.html#TabNet>`_.
58 """
59
60 required_extras: str = "tabular"
61
62 def __init__(
63 self,
64 num_features: int,
65 num_classes: int,
66 embedding_sizes: List[Tuple[int, int]] = None,
67 loss_fn: Callable = F.cross_entropy,
68 optimizer: OPTIMIZER_TYPE = "Adam",
69 lr_scheduler: LR_SCHEDULER_TYPE = None,
70 metrics: METRICS_TYPE = None,
71 learning_rate: float = 1e-2,
72 multi_label: bool = False,
73 output: OUTPUT_TYPE = None,
74 **tabnet_kwargs,
75 ):
76 self.save_hyperparameters()
77
78 cat_dims, cat_emb_dim = zip(*embedding_sizes) if embedding_sizes else ([], [])
79 model = TabNet(
80 input_dim=num_features,
81 output_dim=num_classes,
82 cat_idxs=list(range(len(embedding_sizes))),
83 cat_dims=list(cat_dims),
84 cat_emb_dim=list(cat_emb_dim),
85 **tabnet_kwargs,
86 )
87
88 super().__init__(
89 model=model,
90 loss_fn=loss_fn,
91 optimizer=optimizer,
92 lr_scheduler=lr_scheduler,
93 metrics=metrics,
94 learning_rate=learning_rate,
95 multi_label=multi_label,
96 output=output or ProbabilitiesOutput(),
97 )
98
99 self.save_hyperparameters()
100
101 def forward(self, x_in) -> torch.Tensor:
102 # TabNet takes single input, x_in is composed of (categorical, numerical)
103 xs = [x for x in x_in if x.numel()]
104 x = torch.cat(xs, dim=1)
105 return self.model(x)[0]
106
107 def training_step(self, batch: Any, batch_idx: int) -> Any:
108 batch = (batch[DataKeys.INPUT], batch[DataKeys.TARGET])
109 return super().training_step(batch, batch_idx)
110
111 def validation_step(self, batch: Any, batch_idx: int) -> Any:
112 batch = (batch[DataKeys.INPUT], batch[DataKeys.TARGET])
113 return super().validation_step(batch, batch_idx)
114
115 def test_step(self, batch: Any, batch_idx: int) -> Any:
116 batch = (batch[DataKeys.INPUT], batch[DataKeys.TARGET])
117 return super().test_step(batch, batch_idx)
118
119 def predict_step(self, batch: Any, batch_idx: int, dataloader_idx: int = 0) -> Any:
120 batch = batch[DataKeys.INPUT]
121 return self(batch)
122
123 @classmethod
124 def from_data(cls, datamodule, **kwargs) -> "TabularClassifier":
125 model = cls(datamodule.num_features, datamodule.num_classes, datamodule.embedding_sizes, **kwargs)
126 return model
127
128 @staticmethod
129 def _ci_benchmark_fn(history: List[Dict[str, Any]]):
130 """This function is used only for debugging usage with CI."""
131 assert history[-1]["val_accuracy"] > 0.6, history[-1]["val_accuracy"]
132
133 @requires("serve")
134 def serve(
135 self,
136 host: str = "127.0.0.1",
137 port: int = 8000,
138 sanity_check: bool = True,
139 input_cls: Optional[Type[ServeInput]] = TabularDeserializer,
140 transform: INPUT_TRANSFORM_TYPE = InputTransform,
141 transform_kwargs: Optional[Dict] = None,
142 parameters: Optional[Dict[str, Any]] = None,
143 ) -> Composition:
144 return super().serve(
145 host, port, sanity_check, partial(input_cls, parameters=parameters), transform, transform_kwargs
146 )
147
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/flash/tabular/classification/model.py b/flash/tabular/classification/model.py
--- a/flash/tabular/classification/model.py
+++ b/flash/tabular/classification/model.py
@@ -42,7 +42,8 @@
Args:
num_features: Number of columns in table (not including target column).
num_classes: Number of classes to classify.
- embedding_sizes: List of (num_classes, emb_dim) to form categorical embeddings.
+ embedding_sizes: List of (num_classes, emb_dim) to form categorical embeddings (or ``None`` if there are no
+ categorical fields in the data).
loss_fn: Loss function for training, defaults to cross entropy.
optimizer: Optimizer to use for training.
lr_scheduler: The LR scheduler to use during training.
@@ -63,7 +64,7 @@
self,
num_features: int,
num_classes: int,
- embedding_sizes: List[Tuple[int, int]] = None,
+ embedding_sizes: Optional[List[Tuple[int, int]]] = None,
loss_fn: Callable = F.cross_entropy,
optimizer: OPTIMIZER_TYPE = "Adam",
lr_scheduler: LR_SCHEDULER_TYPE = None,
@@ -75,7 +76,11 @@
):
self.save_hyperparameters()
- cat_dims, cat_emb_dim = zip(*embedding_sizes) if embedding_sizes else ([], [])
+ if embedding_sizes:
+ cat_dims, cat_emb_dim = zip(*embedding_sizes)
+ else:
+ cat_dims, cat_emb_dim, embedding_sizes = [], [], []
+
model = TabNet(
input_dim=num_features,
output_dim=num_classes,
diff --git a/flash/tabular/regression/model.py b/flash/tabular/regression/model.py
--- a/flash/tabular/regression/model.py
+++ b/flash/tabular/regression/model.py
@@ -40,7 +40,8 @@
Args:
num_features: Number of columns in table (not including target column).
- embedding_sizes: List of (num_classes, emb_dim) to form categorical embeddings.
+ embedding_sizes: List of (num_classes, emb_dim) to form categorical embeddings (or ``None`` if there are no
+ categorical fields in the data).
loss_fn: Loss function for training, defaults to cross entropy.
optimizer: Optimizer to use for training.
lr_scheduler: The LR scheduler to use during training.
@@ -60,7 +61,7 @@
def __init__(
self,
num_features: int,
- embedding_sizes: List[Tuple[int, int]] = None,
+ embedding_sizes: Optional[List[Tuple[int, int]]] = None,
loss_fn: Callable = F.mse_loss,
optimizer: OPTIMIZER_TYPE = "Adam",
lr_scheduler: LR_SCHEDULER_TYPE = None,
@@ -71,7 +72,11 @@
):
self.save_hyperparameters()
- cat_dims, cat_emb_dim = zip(*embedding_sizes) if embedding_sizes else ([], [])
+ if embedding_sizes:
+ cat_dims, cat_emb_dim = zip(*embedding_sizes)
+ else:
+ cat_dims, cat_emb_dim, embedding_sizes = [], [], []
+
model = TabNet(
input_dim=num_features,
output_dim=1,
|
{"golden_diff": "diff --git a/flash/tabular/classification/model.py b/flash/tabular/classification/model.py\n--- a/flash/tabular/classification/model.py\n+++ b/flash/tabular/classification/model.py\n@@ -42,7 +42,8 @@\n Args:\n num_features: Number of columns in table (not including target column).\n num_classes: Number of classes to classify.\n- embedding_sizes: List of (num_classes, emb_dim) to form categorical embeddings.\n+ embedding_sizes: List of (num_classes, emb_dim) to form categorical embeddings (or ``None`` if there are no\n+ categorical fields in the data).\n loss_fn: Loss function for training, defaults to cross entropy.\n optimizer: Optimizer to use for training.\n lr_scheduler: The LR scheduler to use during training.\n@@ -63,7 +64,7 @@\n self,\n num_features: int,\n num_classes: int,\n- embedding_sizes: List[Tuple[int, int]] = None,\n+ embedding_sizes: Optional[List[Tuple[int, int]]] = None,\n loss_fn: Callable = F.cross_entropy,\n optimizer: OPTIMIZER_TYPE = \"Adam\",\n lr_scheduler: LR_SCHEDULER_TYPE = None,\n@@ -75,7 +76,11 @@\n ):\n self.save_hyperparameters()\n \n- cat_dims, cat_emb_dim = zip(*embedding_sizes) if embedding_sizes else ([], [])\n+ if embedding_sizes:\n+ cat_dims, cat_emb_dim = zip(*embedding_sizes)\n+ else:\n+ cat_dims, cat_emb_dim, embedding_sizes = [], [], []\n+\n model = TabNet(\n input_dim=num_features,\n output_dim=num_classes,\ndiff --git a/flash/tabular/regression/model.py b/flash/tabular/regression/model.py\n--- a/flash/tabular/regression/model.py\n+++ b/flash/tabular/regression/model.py\n@@ -40,7 +40,8 @@\n \n Args:\n num_features: Number of columns in table (not including target column).\n- embedding_sizes: List of (num_classes, emb_dim) to form categorical embeddings.\n+ embedding_sizes: List of (num_classes, emb_dim) to form categorical embeddings (or ``None`` if there are no\n+ categorical fields in the data).\n loss_fn: Loss function for training, defaults to cross entropy.\n optimizer: Optimizer to use for training.\n lr_scheduler: The LR scheduler to use during training.\n@@ -60,7 +61,7 @@\n def __init__(\n self,\n num_features: int,\n- embedding_sizes: List[Tuple[int, int]] = None,\n+ embedding_sizes: Optional[List[Tuple[int, int]]] = None,\n loss_fn: Callable = F.mse_loss,\n optimizer: OPTIMIZER_TYPE = \"Adam\",\n lr_scheduler: LR_SCHEDULER_TYPE = None,\n@@ -71,7 +72,11 @@\n ):\n self.save_hyperparameters()\n \n- cat_dims, cat_emb_dim = zip(*embedding_sizes) if embedding_sizes else ([], [])\n+ if embedding_sizes:\n+ cat_dims, cat_emb_dim = zip(*embedding_sizes)\n+ else:\n+ cat_dims, cat_emb_dim, embedding_sizes = [], [], []\n+\n model = TabNet(\n input_dim=num_features,\n output_dim=1,\n", "issue": "`TabularClassifier` fails with `embedding_sizes=None` (the default)\n## \ud83d\udc1b Bug\r\n\r\nPasses if you provide embedding sizes as an empty list.\r\n\n", "before_files": [{"content": "# Copyright The PyTorch Lightning team.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom functools import partial\nfrom typing import Any, Callable, Dict, List, Optional, Tuple, Type\n\nimport torch\nfrom torch.nn import functional as F\n\nfrom flash.core.data.io.input import DataKeys, ServeInput\nfrom flash.core.data.io.input_transform import InputTransform\nfrom flash.core.regression import RegressionTask\nfrom flash.core.serve import Composition\nfrom flash.core.utilities.imports import _TABULAR_AVAILABLE, requires\nfrom flash.core.utilities.types import (\n INPUT_TRANSFORM_TYPE,\n LR_SCHEDULER_TYPE,\n METRICS_TYPE,\n OPTIMIZER_TYPE,\n OUTPUT_TYPE,\n)\nfrom flash.tabular.input import TabularDeserializer\n\nif _TABULAR_AVAILABLE:\n from pytorch_tabnet.tab_network import TabNet\n\n\nclass TabularRegressor(RegressionTask):\n \"\"\"The ``TabularRegressor`` is a :class:`~flash.Task` for regression tabular data.\n\n Args:\n num_features: Number of columns in table (not including target column).\n embedding_sizes: List of (num_classes, emb_dim) to form categorical embeddings.\n loss_fn: Loss function for training, defaults to cross entropy.\n optimizer: Optimizer to use for training.\n lr_scheduler: The LR scheduler to use during training.\n metrics: Metrics to compute for training and evaluation. Can either be an metric from the `torchmetrics`\n package, a custom metric inherenting from `torchmetrics.Metric`, a callable function or a list/dict\n containing a combination of the aforementioned. In all cases, each metric needs to have the signature\n `metric(preds,target)` and return a single scalar tensor.\n learning_rate: Learning rate to use for training.\n multi_label: Whether the targets are multi-label or not.\n output: The :class:`~flash.core.data.io.output.Output` to use when formatting prediction outputs.\n **tabnet_kwargs: Optional additional arguments for the TabNet model, see\n `pytorch_tabnet <https://dreamquark-ai.github.io/tabnet/_modules/pytorch_tabnet/tab_network.html#TabNet>`_.\n \"\"\"\n\n required_extras: str = \"tabular\"\n\n def __init__(\n self,\n num_features: int,\n embedding_sizes: List[Tuple[int, int]] = None,\n loss_fn: Callable = F.mse_loss,\n optimizer: OPTIMIZER_TYPE = \"Adam\",\n lr_scheduler: LR_SCHEDULER_TYPE = None,\n metrics: METRICS_TYPE = None,\n learning_rate: float = 1e-2,\n output: OUTPUT_TYPE = None,\n **tabnet_kwargs,\n ):\n self.save_hyperparameters()\n\n cat_dims, cat_emb_dim = zip(*embedding_sizes) if embedding_sizes else ([], [])\n model = TabNet(\n input_dim=num_features,\n output_dim=1,\n cat_idxs=list(range(len(embedding_sizes))),\n cat_dims=list(cat_dims),\n cat_emb_dim=list(cat_emb_dim),\n **tabnet_kwargs,\n )\n\n super().__init__(\n model=model,\n loss_fn=loss_fn,\n optimizer=optimizer,\n lr_scheduler=lr_scheduler,\n metrics=metrics,\n learning_rate=learning_rate,\n output=output,\n )\n\n self.save_hyperparameters()\n\n def forward(self, x_in) -> torch.Tensor:\n # TabNet takes single input, x_in is composed of (categorical, numerical)\n xs = [x for x in x_in if x.numel()]\n x = torch.cat(xs, dim=1)\n return self.model(x)[0].flatten()\n\n def training_step(self, batch: Any, batch_idx: int) -> Any:\n batch = (batch[DataKeys.INPUT], batch[DataKeys.TARGET])\n return super().training_step(batch, batch_idx)\n\n def validation_step(self, batch: Any, batch_idx: int) -> Any:\n batch = (batch[DataKeys.INPUT], batch[DataKeys.TARGET])\n return super().validation_step(batch, batch_idx)\n\n def test_step(self, batch: Any, batch_idx: int) -> Any:\n batch = (batch[DataKeys.INPUT], batch[DataKeys.TARGET])\n return super().test_step(batch, batch_idx)\n\n def predict_step(self, batch: Any, batch_idx: int, dataloader_idx: int = 0) -> Any:\n batch = batch[DataKeys.INPUT]\n return self(batch)\n\n @classmethod\n def from_data(cls, datamodule, **kwargs) -> \"TabularRegressor\":\n model = cls(datamodule.num_features, datamodule.embedding_sizes, **kwargs)\n return model\n\n @requires(\"serve\")\n def serve(\n self,\n host: str = \"127.0.0.1\",\n port: int = 8000,\n sanity_check: bool = True,\n input_cls: Optional[Type[ServeInput]] = TabularDeserializer,\n transform: INPUT_TRANSFORM_TYPE = InputTransform,\n transform_kwargs: Optional[Dict] = None,\n parameters: Optional[Dict[str, Any]] = None,\n ) -> Composition:\n return super().serve(\n host, port, sanity_check, partial(input_cls, parameters=parameters), transform, transform_kwargs\n )\n", "path": "flash/tabular/regression/model.py"}, {"content": "# Copyright The PyTorch Lightning team.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom functools import partial\nfrom typing import Any, Callable, Dict, List, Optional, Tuple, Type\n\nimport torch\nfrom torch.nn import functional as F\n\nfrom flash.core.classification import ClassificationTask, ProbabilitiesOutput\nfrom flash.core.data.io.input import DataKeys, ServeInput\nfrom flash.core.data.io.input_transform import InputTransform\nfrom flash.core.serve import Composition\nfrom flash.core.utilities.imports import _TABULAR_AVAILABLE, requires\nfrom flash.core.utilities.types import (\n INPUT_TRANSFORM_TYPE,\n LR_SCHEDULER_TYPE,\n METRICS_TYPE,\n OPTIMIZER_TYPE,\n OUTPUT_TYPE,\n)\nfrom flash.tabular.input import TabularDeserializer\n\nif _TABULAR_AVAILABLE:\n from pytorch_tabnet.tab_network import TabNet\n\n\nclass TabularClassifier(ClassificationTask):\n \"\"\"The ``TabularClassifier`` is a :class:`~flash.Task` for classifying tabular data. For more details, see\n :ref:`tabular_classification`.\n\n Args:\n num_features: Number of columns in table (not including target column).\n num_classes: Number of classes to classify.\n embedding_sizes: List of (num_classes, emb_dim) to form categorical embeddings.\n loss_fn: Loss function for training, defaults to cross entropy.\n optimizer: Optimizer to use for training.\n lr_scheduler: The LR scheduler to use during training.\n metrics: Metrics to compute for training and evaluation. Can either be an metric from the `torchmetrics`\n package, a custom metric inherenting from `torchmetrics.Metric`, a callable function or a list/dict\n containing a combination of the aforementioned. In all cases, each metric needs to have the signature\n `metric(preds,target)` and return a single scalar tensor. Defaults to :class:`torchmetrics.Accuracy`.\n learning_rate: Learning rate to use for training.\n multi_label: Whether the targets are multi-label or not.\n output: The :class:`~flash.core.data.io.output.Output` to use when formatting prediction outputs.\n **tabnet_kwargs: Optional additional arguments for the TabNet model, see\n `pytorch_tabnet <https://dreamquark-ai.github.io/tabnet/_modules/pytorch_tabnet/tab_network.html#TabNet>`_.\n \"\"\"\n\n required_extras: str = \"tabular\"\n\n def __init__(\n self,\n num_features: int,\n num_classes: int,\n embedding_sizes: List[Tuple[int, int]] = None,\n loss_fn: Callable = F.cross_entropy,\n optimizer: OPTIMIZER_TYPE = \"Adam\",\n lr_scheduler: LR_SCHEDULER_TYPE = None,\n metrics: METRICS_TYPE = None,\n learning_rate: float = 1e-2,\n multi_label: bool = False,\n output: OUTPUT_TYPE = None,\n **tabnet_kwargs,\n ):\n self.save_hyperparameters()\n\n cat_dims, cat_emb_dim = zip(*embedding_sizes) if embedding_sizes else ([], [])\n model = TabNet(\n input_dim=num_features,\n output_dim=num_classes,\n cat_idxs=list(range(len(embedding_sizes))),\n cat_dims=list(cat_dims),\n cat_emb_dim=list(cat_emb_dim),\n **tabnet_kwargs,\n )\n\n super().__init__(\n model=model,\n loss_fn=loss_fn,\n optimizer=optimizer,\n lr_scheduler=lr_scheduler,\n metrics=metrics,\n learning_rate=learning_rate,\n multi_label=multi_label,\n output=output or ProbabilitiesOutput(),\n )\n\n self.save_hyperparameters()\n\n def forward(self, x_in) -> torch.Tensor:\n # TabNet takes single input, x_in is composed of (categorical, numerical)\n xs = [x for x in x_in if x.numel()]\n x = torch.cat(xs, dim=1)\n return self.model(x)[0]\n\n def training_step(self, batch: Any, batch_idx: int) -> Any:\n batch = (batch[DataKeys.INPUT], batch[DataKeys.TARGET])\n return super().training_step(batch, batch_idx)\n\n def validation_step(self, batch: Any, batch_idx: int) -> Any:\n batch = (batch[DataKeys.INPUT], batch[DataKeys.TARGET])\n return super().validation_step(batch, batch_idx)\n\n def test_step(self, batch: Any, batch_idx: int) -> Any:\n batch = (batch[DataKeys.INPUT], batch[DataKeys.TARGET])\n return super().test_step(batch, batch_idx)\n\n def predict_step(self, batch: Any, batch_idx: int, dataloader_idx: int = 0) -> Any:\n batch = batch[DataKeys.INPUT]\n return self(batch)\n\n @classmethod\n def from_data(cls, datamodule, **kwargs) -> \"TabularClassifier\":\n model = cls(datamodule.num_features, datamodule.num_classes, datamodule.embedding_sizes, **kwargs)\n return model\n\n @staticmethod\n def _ci_benchmark_fn(history: List[Dict[str, Any]]):\n \"\"\"This function is used only for debugging usage with CI.\"\"\"\n assert history[-1][\"val_accuracy\"] > 0.6, history[-1][\"val_accuracy\"]\n\n @requires(\"serve\")\n def serve(\n self,\n host: str = \"127.0.0.1\",\n port: int = 8000,\n sanity_check: bool = True,\n input_cls: Optional[Type[ServeInput]] = TabularDeserializer,\n transform: INPUT_TRANSFORM_TYPE = InputTransform,\n transform_kwargs: Optional[Dict] = None,\n parameters: Optional[Dict[str, Any]] = None,\n ) -> Composition:\n return super().serve(\n host, port, sanity_check, partial(input_cls, parameters=parameters), transform, transform_kwargs\n )\n", "path": "flash/tabular/classification/model.py"}], "after_files": [{"content": "# Copyright The PyTorch Lightning team.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom functools import partial\nfrom typing import Any, Callable, Dict, List, Optional, Tuple, Type\n\nimport torch\nfrom torch.nn import functional as F\n\nfrom flash.core.data.io.input import DataKeys, ServeInput\nfrom flash.core.data.io.input_transform import InputTransform\nfrom flash.core.regression import RegressionTask\nfrom flash.core.serve import Composition\nfrom flash.core.utilities.imports import _TABULAR_AVAILABLE, requires\nfrom flash.core.utilities.types import (\n INPUT_TRANSFORM_TYPE,\n LR_SCHEDULER_TYPE,\n METRICS_TYPE,\n OPTIMIZER_TYPE,\n OUTPUT_TYPE,\n)\nfrom flash.tabular.input import TabularDeserializer\n\nif _TABULAR_AVAILABLE:\n from pytorch_tabnet.tab_network import TabNet\n\n\nclass TabularRegressor(RegressionTask):\n \"\"\"The ``TabularRegressor`` is a :class:`~flash.Task` for regression tabular data.\n\n Args:\n num_features: Number of columns in table (not including target column).\n embedding_sizes: List of (num_classes, emb_dim) to form categorical embeddings (or ``None`` if there are no\n categorical fields in the data).\n loss_fn: Loss function for training, defaults to cross entropy.\n optimizer: Optimizer to use for training.\n lr_scheduler: The LR scheduler to use during training.\n metrics: Metrics to compute for training and evaluation. Can either be an metric from the `torchmetrics`\n package, a custom metric inherenting from `torchmetrics.Metric`, a callable function or a list/dict\n containing a combination of the aforementioned. In all cases, each metric needs to have the signature\n `metric(preds,target)` and return a single scalar tensor.\n learning_rate: Learning rate to use for training.\n multi_label: Whether the targets are multi-label or not.\n output: The :class:`~flash.core.data.io.output.Output` to use when formatting prediction outputs.\n **tabnet_kwargs: Optional additional arguments for the TabNet model, see\n `pytorch_tabnet <https://dreamquark-ai.github.io/tabnet/_modules/pytorch_tabnet/tab_network.html#TabNet>`_.\n \"\"\"\n\n required_extras: str = \"tabular\"\n\n def __init__(\n self,\n num_features: int,\n embedding_sizes: Optional[List[Tuple[int, int]]] = None,\n loss_fn: Callable = F.mse_loss,\n optimizer: OPTIMIZER_TYPE = \"Adam\",\n lr_scheduler: LR_SCHEDULER_TYPE = None,\n metrics: METRICS_TYPE = None,\n learning_rate: float = 1e-2,\n output: OUTPUT_TYPE = None,\n **tabnet_kwargs,\n ):\n self.save_hyperparameters()\n\n if embedding_sizes:\n cat_dims, cat_emb_dim = zip(*embedding_sizes)\n else:\n cat_dims, cat_emb_dim, embedding_sizes = [], [], []\n\n model = TabNet(\n input_dim=num_features,\n output_dim=1,\n cat_idxs=list(range(len(embedding_sizes))),\n cat_dims=list(cat_dims),\n cat_emb_dim=list(cat_emb_dim),\n **tabnet_kwargs,\n )\n\n super().__init__(\n model=model,\n loss_fn=loss_fn,\n optimizer=optimizer,\n lr_scheduler=lr_scheduler,\n metrics=metrics,\n learning_rate=learning_rate,\n output=output,\n )\n\n self.save_hyperparameters()\n\n def forward(self, x_in) -> torch.Tensor:\n # TabNet takes single input, x_in is composed of (categorical, numerical)\n xs = [x for x in x_in if x.numel()]\n x = torch.cat(xs, dim=1)\n return self.model(x)[0].flatten()\n\n def training_step(self, batch: Any, batch_idx: int) -> Any:\n batch = (batch[DataKeys.INPUT], batch[DataKeys.TARGET])\n return super().training_step(batch, batch_idx)\n\n def validation_step(self, batch: Any, batch_idx: int) -> Any:\n batch = (batch[DataKeys.INPUT], batch[DataKeys.TARGET])\n return super().validation_step(batch, batch_idx)\n\n def test_step(self, batch: Any, batch_idx: int) -> Any:\n batch = (batch[DataKeys.INPUT], batch[DataKeys.TARGET])\n return super().test_step(batch, batch_idx)\n\n def predict_step(self, batch: Any, batch_idx: int, dataloader_idx: int = 0) -> Any:\n batch = batch[DataKeys.INPUT]\n return self(batch)\n\n @classmethod\n def from_data(cls, datamodule, **kwargs) -> \"TabularRegressor\":\n model = cls(datamodule.num_features, datamodule.embedding_sizes, **kwargs)\n return model\n\n @requires(\"serve\")\n def serve(\n self,\n host: str = \"127.0.0.1\",\n port: int = 8000,\n sanity_check: bool = True,\n input_cls: Optional[Type[ServeInput]] = TabularDeserializer,\n transform: INPUT_TRANSFORM_TYPE = InputTransform,\n transform_kwargs: Optional[Dict] = None,\n parameters: Optional[Dict[str, Any]] = None,\n ) -> Composition:\n return super().serve(\n host, port, sanity_check, partial(input_cls, parameters=parameters), transform, transform_kwargs\n )\n", "path": "flash/tabular/regression/model.py"}, {"content": "# Copyright The PyTorch Lightning team.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom functools import partial\nfrom typing import Any, Callable, Dict, List, Optional, Tuple, Type\n\nimport torch\nfrom torch.nn import functional as F\n\nfrom flash.core.classification import ClassificationTask, ProbabilitiesOutput\nfrom flash.core.data.io.input import DataKeys, ServeInput\nfrom flash.core.data.io.input_transform import InputTransform\nfrom flash.core.serve import Composition\nfrom flash.core.utilities.imports import _TABULAR_AVAILABLE, requires\nfrom flash.core.utilities.types import (\n INPUT_TRANSFORM_TYPE,\n LR_SCHEDULER_TYPE,\n METRICS_TYPE,\n OPTIMIZER_TYPE,\n OUTPUT_TYPE,\n)\nfrom flash.tabular.input import TabularDeserializer\n\nif _TABULAR_AVAILABLE:\n from pytorch_tabnet.tab_network import TabNet\n\n\nclass TabularClassifier(ClassificationTask):\n \"\"\"The ``TabularClassifier`` is a :class:`~flash.Task` for classifying tabular data. For more details, see\n :ref:`tabular_classification`.\n\n Args:\n num_features: Number of columns in table (not including target column).\n num_classes: Number of classes to classify.\n embedding_sizes: List of (num_classes, emb_dim) to form categorical embeddings (or ``None`` if there are no\n categorical fields in the data).\n loss_fn: Loss function for training, defaults to cross entropy.\n optimizer: Optimizer to use for training.\n lr_scheduler: The LR scheduler to use during training.\n metrics: Metrics to compute for training and evaluation. Can either be an metric from the `torchmetrics`\n package, a custom metric inherenting from `torchmetrics.Metric`, a callable function or a list/dict\n containing a combination of the aforementioned. In all cases, each metric needs to have the signature\n `metric(preds,target)` and return a single scalar tensor. Defaults to :class:`torchmetrics.Accuracy`.\n learning_rate: Learning rate to use for training.\n multi_label: Whether the targets are multi-label or not.\n output: The :class:`~flash.core.data.io.output.Output` to use when formatting prediction outputs.\n **tabnet_kwargs: Optional additional arguments for the TabNet model, see\n `pytorch_tabnet <https://dreamquark-ai.github.io/tabnet/_modules/pytorch_tabnet/tab_network.html#TabNet>`_.\n \"\"\"\n\n required_extras: str = \"tabular\"\n\n def __init__(\n self,\n num_features: int,\n num_classes: int,\n embedding_sizes: Optional[List[Tuple[int, int]]] = None,\n loss_fn: Callable = F.cross_entropy,\n optimizer: OPTIMIZER_TYPE = \"Adam\",\n lr_scheduler: LR_SCHEDULER_TYPE = None,\n metrics: METRICS_TYPE = None,\n learning_rate: float = 1e-2,\n multi_label: bool = False,\n output: OUTPUT_TYPE = None,\n **tabnet_kwargs,\n ):\n self.save_hyperparameters()\n\n if embedding_sizes:\n cat_dims, cat_emb_dim = zip(*embedding_sizes)\n else:\n cat_dims, cat_emb_dim, embedding_sizes = [], [], []\n\n model = TabNet(\n input_dim=num_features,\n output_dim=num_classes,\n cat_idxs=list(range(len(embedding_sizes))),\n cat_dims=list(cat_dims),\n cat_emb_dim=list(cat_emb_dim),\n **tabnet_kwargs,\n )\n\n super().__init__(\n model=model,\n loss_fn=loss_fn,\n optimizer=optimizer,\n lr_scheduler=lr_scheduler,\n metrics=metrics,\n learning_rate=learning_rate,\n multi_label=multi_label,\n output=output or ProbabilitiesOutput(),\n )\n\n self.save_hyperparameters()\n\n def forward(self, x_in) -> torch.Tensor:\n # TabNet takes single input, x_in is composed of (categorical, numerical)\n xs = [x for x in x_in if x.numel()]\n x = torch.cat(xs, dim=1)\n return self.model(x)[0]\n\n def training_step(self, batch: Any, batch_idx: int) -> Any:\n batch = (batch[DataKeys.INPUT], batch[DataKeys.TARGET])\n return super().training_step(batch, batch_idx)\n\n def validation_step(self, batch: Any, batch_idx: int) -> Any:\n batch = (batch[DataKeys.INPUT], batch[DataKeys.TARGET])\n return super().validation_step(batch, batch_idx)\n\n def test_step(self, batch: Any, batch_idx: int) -> Any:\n batch = (batch[DataKeys.INPUT], batch[DataKeys.TARGET])\n return super().test_step(batch, batch_idx)\n\n def predict_step(self, batch: Any, batch_idx: int, dataloader_idx: int = 0) -> Any:\n batch = batch[DataKeys.INPUT]\n return self(batch)\n\n @classmethod\n def from_data(cls, datamodule, **kwargs) -> \"TabularClassifier\":\n model = cls(datamodule.num_features, datamodule.num_classes, datamodule.embedding_sizes, **kwargs)\n return model\n\n @staticmethod\n def _ci_benchmark_fn(history: List[Dict[str, Any]]):\n \"\"\"This function is used only for debugging usage with CI.\"\"\"\n assert history[-1][\"val_accuracy\"] > 0.6, history[-1][\"val_accuracy\"]\n\n @requires(\"serve\")\n def serve(\n self,\n host: str = \"127.0.0.1\",\n port: int = 8000,\n sanity_check: bool = True,\n input_cls: Optional[Type[ServeInput]] = TabularDeserializer,\n transform: INPUT_TRANSFORM_TYPE = InputTransform,\n transform_kwargs: Optional[Dict] = None,\n parameters: Optional[Dict[str, Any]] = None,\n ) -> Composition:\n return super().serve(\n host, port, sanity_check, partial(input_cls, parameters=parameters), transform, transform_kwargs\n )\n", "path": "flash/tabular/classification/model.py"}]}
| 3,555 | 725 |
gh_patches_debug_26975
|
rasdani/github-patches
|
git_diff
|
inventree__InvenTree-4151
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[FR] :checkered_flag: Simple API endpoint to change user metadata
### Please verify that this feature request has NOT been suggested before.
- [X] I checked and didn't find similar feature request
### Problem statement
we currently provide an API endpoint for editing user data via a form. Fetching and changing endpoints are different and take different parameters.
### Suggested solution
I think it would be better to provide a RetrieveUpdateAPI endpoint under `/api/user/me` (somewhat a convention). This endpoint could provide and patch all the metadata for a user.
### Describe alternatives you've considered
N/A
### Examples of other systems
_No response_
### Do you want to develop this?
- [X] I want to develop this.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `InvenTree/users/api.py`
Content:
```
1 """DRF API definition for the 'users' app"""
2
3 from django.contrib.auth.models import User
4 from django.core.exceptions import ObjectDoesNotExist
5 from django.urls import include, path, re_path
6
7 from django_filters.rest_framework import DjangoFilterBackend
8 from rest_framework import filters, permissions, status
9 from rest_framework.authtoken.models import Token
10 from rest_framework.response import Response
11 from rest_framework.views import APIView
12
13 from InvenTree.mixins import ListAPI, RetrieveAPI
14 from InvenTree.serializers import UserSerializer
15 from users.models import Owner, RuleSet, check_user_role
16 from users.serializers import OwnerSerializer
17
18
19 class OwnerList(ListAPI):
20 """List API endpoint for Owner model.
21
22 Cannot create.
23 """
24
25 queryset = Owner.objects.all()
26 serializer_class = OwnerSerializer
27
28 def filter_queryset(self, queryset):
29 """Implement text search for the "owner" model.
30
31 Note that an "owner" can be either a group, or a user,
32 so we cannot do a direct text search.
33
34 A "hack" here is to post-process the queryset and simply
35 remove any values which do not match.
36
37 It is not necessarily "efficient" to do it this way,
38 but until we determine a better way, this is what we have...
39 """
40 search_term = str(self.request.query_params.get('search', '')).lower()
41
42 queryset = super().filter_queryset(queryset)
43
44 if not search_term:
45 return queryset
46
47 results = []
48
49 # Extract search term f
50
51 for result in queryset.all():
52 if search_term in result.name().lower():
53 results.append(result)
54
55 return results
56
57
58 class OwnerDetail(RetrieveAPI):
59 """Detail API endpoint for Owner model.
60
61 Cannot edit or delete
62 """
63
64 queryset = Owner.objects.all()
65 serializer_class = OwnerSerializer
66
67
68 class RoleDetails(APIView):
69 """API endpoint which lists the available role permissions for the current user.
70
71 (Requires authentication)
72 """
73
74 permission_classes = [
75 permissions.IsAuthenticated
76 ]
77
78 def get(self, request, *args, **kwargs):
79 """Return the list of roles / permissions available to the current user"""
80 user = request.user
81
82 roles = {}
83
84 for ruleset in RuleSet.RULESET_CHOICES:
85
86 role, text = ruleset
87
88 permissions = []
89
90 for permission in RuleSet.RULESET_PERMISSIONS:
91 if check_user_role(user, role, permission):
92
93 permissions.append(permission)
94
95 if len(permissions) > 0:
96 roles[role] = permissions
97 else:
98 roles[role] = None # pragma: no cover
99
100 data = {
101 'user': user.pk,
102 'username': user.username,
103 'roles': roles,
104 'is_staff': user.is_staff,
105 'is_superuser': user.is_superuser,
106 }
107
108 return Response(data)
109
110
111 class UserDetail(RetrieveAPI):
112 """Detail endpoint for a single user."""
113
114 queryset = User.objects.all()
115 serializer_class = UserSerializer
116 permission_classes = (permissions.IsAuthenticated,)
117
118
119 class UserList(ListAPI):
120 """List endpoint for detail on all users."""
121
122 queryset = User.objects.all()
123 serializer_class = UserSerializer
124 permission_classes = (permissions.IsAuthenticated,)
125
126 filter_backends = [
127 DjangoFilterBackend,
128 filters.SearchFilter,
129 ]
130
131 search_fields = [
132 'first_name',
133 'last_name',
134 'username',
135 ]
136
137
138 class GetAuthToken(APIView):
139 """Return authentication token for an authenticated user."""
140
141 permission_classes = [
142 permissions.IsAuthenticated,
143 ]
144
145 def get(self, request, *args, **kwargs):
146 """Return an API token if the user is authenticated
147
148 - If the user already has a token, return it
149 - Otherwise, create a new token
150 """
151 if request.user.is_authenticated:
152 # Get the user token (or create one if it does not exist)
153 token, created = Token.objects.get_or_create(user=request.user)
154 return Response({
155 'token': token.key,
156 })
157
158 def delete(self, request):
159 """User has requested deletion of API token"""
160 try:
161 request.user.auth_token.delete()
162 return Response({"success": "Successfully logged out."},
163 status=status.HTTP_202_ACCEPTED)
164 except (AttributeError, ObjectDoesNotExist):
165 return Response({"error": "Bad request"},
166 status=status.HTTP_400_BAD_REQUEST)
167
168
169 user_urls = [
170
171 re_path(r'roles/?$', RoleDetails.as_view(), name='api-user-roles'),
172 re_path(r'token/?$', GetAuthToken.as_view(), name='api-token'),
173
174 re_path(r'^owner/', include([
175 path('<int:pk>/', OwnerDetail.as_view(), name='api-owner-detail'),
176 re_path(r'^.*$', OwnerList.as_view(), name='api-owner-list'),
177 ])),
178
179 re_path(r'^(?P<pk>[0-9]+)/?$', UserDetail.as_view(), name='user-detail'),
180 path('', UserList.as_view()),
181 ]
182
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/InvenTree/users/api.py b/InvenTree/users/api.py
--- a/InvenTree/users/api.py
+++ b/InvenTree/users/api.py
@@ -10,7 +10,7 @@
from rest_framework.response import Response
from rest_framework.views import APIView
-from InvenTree.mixins import ListAPI, RetrieveAPI
+from InvenTree.mixins import ListAPI, RetrieveAPI, RetrieveUpdateAPI
from InvenTree.serializers import UserSerializer
from users.models import Owner, RuleSet, check_user_role
from users.serializers import OwnerSerializer
@@ -116,6 +116,14 @@
permission_classes = (permissions.IsAuthenticated,)
+class MeUserDetail(RetrieveUpdateAPI, UserDetail):
+ """Detail endpoint for current user."""
+
+ def get_object(self):
+ """Always return the current user object"""
+ return self.request.user
+
+
class UserList(ListAPI):
"""List endpoint for detail on all users."""
@@ -170,6 +178,7 @@
re_path(r'roles/?$', RoleDetails.as_view(), name='api-user-roles'),
re_path(r'token/?$', GetAuthToken.as_view(), name='api-token'),
+ re_path(r'^me/', MeUserDetail.as_view(), name='api-user-me'),
re_path(r'^owner/', include([
path('<int:pk>/', OwnerDetail.as_view(), name='api-owner-detail'),
|
{"golden_diff": "diff --git a/InvenTree/users/api.py b/InvenTree/users/api.py\n--- a/InvenTree/users/api.py\n+++ b/InvenTree/users/api.py\n@@ -10,7 +10,7 @@\n from rest_framework.response import Response\n from rest_framework.views import APIView\n \n-from InvenTree.mixins import ListAPI, RetrieveAPI\n+from InvenTree.mixins import ListAPI, RetrieveAPI, RetrieveUpdateAPI\n from InvenTree.serializers import UserSerializer\n from users.models import Owner, RuleSet, check_user_role\n from users.serializers import OwnerSerializer\n@@ -116,6 +116,14 @@\n permission_classes = (permissions.IsAuthenticated,)\n \n \n+class MeUserDetail(RetrieveUpdateAPI, UserDetail):\n+ \"\"\"Detail endpoint for current user.\"\"\"\n+\n+ def get_object(self):\n+ \"\"\"Always return the current user object\"\"\"\n+ return self.request.user\n+\n+\n class UserList(ListAPI):\n \"\"\"List endpoint for detail on all users.\"\"\"\n \n@@ -170,6 +178,7 @@\n \n re_path(r'roles/?$', RoleDetails.as_view(), name='api-user-roles'),\n re_path(r'token/?$', GetAuthToken.as_view(), name='api-token'),\n+ re_path(r'^me/', MeUserDetail.as_view(), name='api-user-me'),\n \n re_path(r'^owner/', include([\n path('<int:pk>/', OwnerDetail.as_view(), name='api-owner-detail'),\n", "issue": "[FR] :checkered_flag: Simple API endpoint to change user metadata\n### Please verify that this feature request has NOT been suggested before.\n\n- [X] I checked and didn't find similar feature request\n\n### Problem statement\n\nwe currently provide an API endpoint for editing user data via a form. Fetching and changing endpoints are different and take different parameters.\n\n### Suggested solution\n\nI think it would be better to provide a RetrieveUpdateAPI endpoint under `/api/user/me` (somewhat a convention). This endpoint could provide and patch all the metadata for a user.\n\n### Describe alternatives you've considered\n\nN/A\n\n### Examples of other systems\n\n_No response_\n\n### Do you want to develop this?\n\n- [X] I want to develop this.\n", "before_files": [{"content": "\"\"\"DRF API definition for the 'users' app\"\"\"\n\nfrom django.contrib.auth.models import User\nfrom django.core.exceptions import ObjectDoesNotExist\nfrom django.urls import include, path, re_path\n\nfrom django_filters.rest_framework import DjangoFilterBackend\nfrom rest_framework import filters, permissions, status\nfrom rest_framework.authtoken.models import Token\nfrom rest_framework.response import Response\nfrom rest_framework.views import APIView\n\nfrom InvenTree.mixins import ListAPI, RetrieveAPI\nfrom InvenTree.serializers import UserSerializer\nfrom users.models import Owner, RuleSet, check_user_role\nfrom users.serializers import OwnerSerializer\n\n\nclass OwnerList(ListAPI):\n \"\"\"List API endpoint for Owner model.\n\n Cannot create.\n \"\"\"\n\n queryset = Owner.objects.all()\n serializer_class = OwnerSerializer\n\n def filter_queryset(self, queryset):\n \"\"\"Implement text search for the \"owner\" model.\n\n Note that an \"owner\" can be either a group, or a user,\n so we cannot do a direct text search.\n\n A \"hack\" here is to post-process the queryset and simply\n remove any values which do not match.\n\n It is not necessarily \"efficient\" to do it this way,\n but until we determine a better way, this is what we have...\n \"\"\"\n search_term = str(self.request.query_params.get('search', '')).lower()\n\n queryset = super().filter_queryset(queryset)\n\n if not search_term:\n return queryset\n\n results = []\n\n # Extract search term f\n\n for result in queryset.all():\n if search_term in result.name().lower():\n results.append(result)\n\n return results\n\n\nclass OwnerDetail(RetrieveAPI):\n \"\"\"Detail API endpoint for Owner model.\n\n Cannot edit or delete\n \"\"\"\n\n queryset = Owner.objects.all()\n serializer_class = OwnerSerializer\n\n\nclass RoleDetails(APIView):\n \"\"\"API endpoint which lists the available role permissions for the current user.\n\n (Requires authentication)\n \"\"\"\n\n permission_classes = [\n permissions.IsAuthenticated\n ]\n\n def get(self, request, *args, **kwargs):\n \"\"\"Return the list of roles / permissions available to the current user\"\"\"\n user = request.user\n\n roles = {}\n\n for ruleset in RuleSet.RULESET_CHOICES:\n\n role, text = ruleset\n\n permissions = []\n\n for permission in RuleSet.RULESET_PERMISSIONS:\n if check_user_role(user, role, permission):\n\n permissions.append(permission)\n\n if len(permissions) > 0:\n roles[role] = permissions\n else:\n roles[role] = None # pragma: no cover\n\n data = {\n 'user': user.pk,\n 'username': user.username,\n 'roles': roles,\n 'is_staff': user.is_staff,\n 'is_superuser': user.is_superuser,\n }\n\n return Response(data)\n\n\nclass UserDetail(RetrieveAPI):\n \"\"\"Detail endpoint for a single user.\"\"\"\n\n queryset = User.objects.all()\n serializer_class = UserSerializer\n permission_classes = (permissions.IsAuthenticated,)\n\n\nclass UserList(ListAPI):\n \"\"\"List endpoint for detail on all users.\"\"\"\n\n queryset = User.objects.all()\n serializer_class = UserSerializer\n permission_classes = (permissions.IsAuthenticated,)\n\n filter_backends = [\n DjangoFilterBackend,\n filters.SearchFilter,\n ]\n\n search_fields = [\n 'first_name',\n 'last_name',\n 'username',\n ]\n\n\nclass GetAuthToken(APIView):\n \"\"\"Return authentication token for an authenticated user.\"\"\"\n\n permission_classes = [\n permissions.IsAuthenticated,\n ]\n\n def get(self, request, *args, **kwargs):\n \"\"\"Return an API token if the user is authenticated\n\n - If the user already has a token, return it\n - Otherwise, create a new token\n \"\"\"\n if request.user.is_authenticated:\n # Get the user token (or create one if it does not exist)\n token, created = Token.objects.get_or_create(user=request.user)\n return Response({\n 'token': token.key,\n })\n\n def delete(self, request):\n \"\"\"User has requested deletion of API token\"\"\"\n try:\n request.user.auth_token.delete()\n return Response({\"success\": \"Successfully logged out.\"},\n status=status.HTTP_202_ACCEPTED)\n except (AttributeError, ObjectDoesNotExist):\n return Response({\"error\": \"Bad request\"},\n status=status.HTTP_400_BAD_REQUEST)\n\n\nuser_urls = [\n\n re_path(r'roles/?$', RoleDetails.as_view(), name='api-user-roles'),\n re_path(r'token/?$', GetAuthToken.as_view(), name='api-token'),\n\n re_path(r'^owner/', include([\n path('<int:pk>/', OwnerDetail.as_view(), name='api-owner-detail'),\n re_path(r'^.*$', OwnerList.as_view(), name='api-owner-list'),\n ])),\n\n re_path(r'^(?P<pk>[0-9]+)/?$', UserDetail.as_view(), name='user-detail'),\n path('', UserList.as_view()),\n]\n", "path": "InvenTree/users/api.py"}], "after_files": [{"content": "\"\"\"DRF API definition for the 'users' app\"\"\"\n\nfrom django.contrib.auth.models import User\nfrom django.core.exceptions import ObjectDoesNotExist\nfrom django.urls import include, path, re_path\n\nfrom django_filters.rest_framework import DjangoFilterBackend\nfrom rest_framework import filters, permissions, status\nfrom rest_framework.authtoken.models import Token\nfrom rest_framework.response import Response\nfrom rest_framework.views import APIView\n\nfrom InvenTree.mixins import ListAPI, RetrieveAPI, RetrieveUpdateAPI\nfrom InvenTree.serializers import UserSerializer\nfrom users.models import Owner, RuleSet, check_user_role\nfrom users.serializers import OwnerSerializer\n\n\nclass OwnerList(ListAPI):\n \"\"\"List API endpoint for Owner model.\n\n Cannot create.\n \"\"\"\n\n queryset = Owner.objects.all()\n serializer_class = OwnerSerializer\n\n def filter_queryset(self, queryset):\n \"\"\"Implement text search for the \"owner\" model.\n\n Note that an \"owner\" can be either a group, or a user,\n so we cannot do a direct text search.\n\n A \"hack\" here is to post-process the queryset and simply\n remove any values which do not match.\n\n It is not necessarily \"efficient\" to do it this way,\n but until we determine a better way, this is what we have...\n \"\"\"\n search_term = str(self.request.query_params.get('search', '')).lower()\n\n queryset = super().filter_queryset(queryset)\n\n if not search_term:\n return queryset\n\n results = []\n\n # Extract search term f\n\n for result in queryset.all():\n if search_term in result.name().lower():\n results.append(result)\n\n return results\n\n\nclass OwnerDetail(RetrieveAPI):\n \"\"\"Detail API endpoint for Owner model.\n\n Cannot edit or delete\n \"\"\"\n\n queryset = Owner.objects.all()\n serializer_class = OwnerSerializer\n\n\nclass RoleDetails(APIView):\n \"\"\"API endpoint which lists the available role permissions for the current user.\n\n (Requires authentication)\n \"\"\"\n\n permission_classes = [\n permissions.IsAuthenticated\n ]\n\n def get(self, request, *args, **kwargs):\n \"\"\"Return the list of roles / permissions available to the current user\"\"\"\n user = request.user\n\n roles = {}\n\n for ruleset in RuleSet.RULESET_CHOICES:\n\n role, text = ruleset\n\n permissions = []\n\n for permission in RuleSet.RULESET_PERMISSIONS:\n if check_user_role(user, role, permission):\n\n permissions.append(permission)\n\n if len(permissions) > 0:\n roles[role] = permissions\n else:\n roles[role] = None # pragma: no cover\n\n data = {\n 'user': user.pk,\n 'username': user.username,\n 'roles': roles,\n 'is_staff': user.is_staff,\n 'is_superuser': user.is_superuser,\n }\n\n return Response(data)\n\n\nclass UserDetail(RetrieveAPI):\n \"\"\"Detail endpoint for a single user.\"\"\"\n\n queryset = User.objects.all()\n serializer_class = UserSerializer\n permission_classes = (permissions.IsAuthenticated,)\n\n\nclass MeUserDetail(RetrieveUpdateAPI, UserDetail):\n \"\"\"Detail endpoint for current user.\"\"\"\n\n def get_object(self):\n \"\"\"Always return the current user object\"\"\"\n return self.request.user\n\n\nclass UserList(ListAPI):\n \"\"\"List endpoint for detail on all users.\"\"\"\n\n queryset = User.objects.all()\n serializer_class = UserSerializer\n permission_classes = (permissions.IsAuthenticated,)\n\n filter_backends = [\n DjangoFilterBackend,\n filters.SearchFilter,\n ]\n\n search_fields = [\n 'first_name',\n 'last_name',\n 'username',\n ]\n\n\nclass GetAuthToken(APIView):\n \"\"\"Return authentication token for an authenticated user.\"\"\"\n\n permission_classes = [\n permissions.IsAuthenticated,\n ]\n\n def get(self, request, *args, **kwargs):\n \"\"\"Return an API token if the user is authenticated\n\n - If the user already has a token, return it\n - Otherwise, create a new token\n \"\"\"\n if request.user.is_authenticated:\n # Get the user token (or create one if it does not exist)\n token, created = Token.objects.get_or_create(user=request.user)\n return Response({\n 'token': token.key,\n })\n\n def delete(self, request):\n \"\"\"User has requested deletion of API token\"\"\"\n try:\n request.user.auth_token.delete()\n return Response({\"success\": \"Successfully logged out.\"},\n status=status.HTTP_202_ACCEPTED)\n except (AttributeError, ObjectDoesNotExist):\n return Response({\"error\": \"Bad request\"},\n status=status.HTTP_400_BAD_REQUEST)\n\n\nuser_urls = [\n\n re_path(r'roles/?$', RoleDetails.as_view(), name='api-user-roles'),\n re_path(r'token/?$', GetAuthToken.as_view(), name='api-token'),\n re_path(r'^me/', MeUserDetail.as_view(), name='api-user-me'),\n\n re_path(r'^owner/', include([\n path('<int:pk>/', OwnerDetail.as_view(), name='api-owner-detail'),\n re_path(r'^.*$', OwnerList.as_view(), name='api-owner-list'),\n ])),\n\n re_path(r'^(?P<pk>[0-9]+)/?$', UserDetail.as_view(), name='user-detail'),\n path('', UserList.as_view()),\n]\n", "path": "InvenTree/users/api.py"}]}
| 1,948 | 321 |
gh_patches_debug_47762
|
rasdani/github-patches
|
git_diff
|
Kinto__kinto-2027
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Crash with "in <string>' requires string as left operand, not int"
```
ValidationError: 'minVersion' is a required property
Failed validating 'required' in schema['properties']['versionRange']['items']['properties']['targetApplication']['items']:
{'additionalProperties': False,
'description': 'Target application',
'properties': {'guid': {'description': 'The application unique '
'identifier.',
'enum': ['{ec8030f7-c20a-464f-9b0e-13a3a9e97384}',
'{3550f703-e582-4d05-9a08-453d09bdfdc6}',
'{92650c4d-4b8e-4d2a-b7eb-24ecf4f6b63a}',
'{aa3c5121-dab2-40e2-81ca-7ea25febc110}'],
'enumNames': ['Firefox',
'Thunderbird',
'Seamonkey',
'Android'],
'title': 'Application id',
'type': 'string'},
'maxVersion': {'$ref': '#/definitions/maxVersion'},
'minVersion': {'$ref': '#/definitions/minVersion'}},
'required': ['guid', 'minVersion', 'maxVersion'],
'title': 'Target application',
'type': 'object'}
On instance['versionRange'][0]['targetApplication'][0]:
{'guid': 'ec8030f7-c20a-464f-9b0e-13a3a9e97384', 'maxVersion': '57.0.*'}
File "kinto/views/records.py", line 73, in process_record
jsonschema.validate(data, schema)
File "jsonschema/validators.py", line 541, in validate
cls(schema, *args, **kwargs).validate(instance)
File "jsonschema/validators.py", line 130, in validate
raise error
TypeError: 'in <string>' requires string as left operand, not int
(11 additional frame(s) were not displayed)
...
File "cornice/service.py", line 494, in wrapper
response = view_()
File "kinto/core/resource/__init__.py", line 463, in put
new_record = self.process_record(post_record, old=existing)
File "kinto/views/records.py", line 81, in process_record
raise_invalid(self.request, name=field, description=e.message)
File "kinto/core/errors.py", line 178, in raise_invalid
response = json_error_handler(request)
File "kinto/core/errors.py", line 149, in json_error_handler
if name in description:
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kinto/core/errors.py`
Content:
```
1 import colander
2 import logging
3 from pyramid import httpexceptions
4 from enum import Enum
5
6 from kinto.core.schema import Any
7 from kinto.core.utils import json, reapply_cors
8
9
10 class ERRORS(Enum):
11 """Predefined errors as specified by the API.
12
13 +-------------+-------+------------------------------------------------+
14 | Status code | Errno | Description |
15 +=============+=======+================================================+
16 | 401 | 104 | Missing Authorization Token |
17 +-------------+-------+------------------------------------------------+
18 | 401 | 105 | Invalid Authorization Token |
19 +-------------+-------+------------------------------------------------+
20 | 400 | 106 | request body was not valid JSON |
21 +-------------+-------+------------------------------------------------+
22 | 400 | 107 | invalid request parameter |
23 +-------------+-------+------------------------------------------------+
24 | 400 | 108 | missing request parameter |
25 +-------------+-------+------------------------------------------------+
26 | 400 | 109 | invalid posted data |
27 +-------------+-------+------------------------------------------------+
28 | 404 | 110 | Invalid Token / id |
29 +-------------+-------+------------------------------------------------+
30 | 404 | 111 | Missing Token / id |
31 +-------------+-------+------------------------------------------------+
32 | 411 | 112 | Content-Length header was not provided |
33 +-------------+-------+------------------------------------------------+
34 | 413 | 113 | Request body too large |
35 +-------------+-------+------------------------------------------------+
36 | 412 | 114 | Resource was modified meanwhile |
37 +-------------+-------+------------------------------------------------+
38 | 405 | 115 | Method not allowed on this end point |
39 +-------------+-------+------------------------------------------------+
40 | 404 | 116 | Requested version not available on this server |
41 +-------------+-------+------------------------------------------------+
42 | 429 | 117 | Client has sent too many requests |
43 +-------------+-------+------------------------------------------------+
44 | 403 | 121 | Resource's access forbidden for this user |
45 +-------------+-------+------------------------------------------------+
46 | 409 | 122 | Another resource violates constraint |
47 +-------------+-------+------------------------------------------------+
48 | 500 | 999 | Internal Server Error |
49 +-------------+-------+------------------------------------------------+
50 | 503 | 201 | Service Temporary unavailable due to high load |
51 +-------------+-------+------------------------------------------------+
52 | 410 | 202 | Service deprecated |
53 +-------------+-------+------------------------------------------------+
54 """
55
56 MISSING_AUTH_TOKEN = 104
57 INVALID_AUTH_TOKEN = 105
58 BADJSON = 106
59 INVALID_PARAMETERS = 107
60 MISSING_PARAMETERS = 108
61 INVALID_POSTED_DATA = 109
62 INVALID_RESOURCE_ID = 110
63 MISSING_RESOURCE = 111
64 MISSING_CONTENT_LENGTH = 112
65 REQUEST_TOO_LARGE = 113
66 MODIFIED_MEANWHILE = 114
67 METHOD_NOT_ALLOWED = 115
68 VERSION_NOT_AVAILABLE = 116
69 CLIENT_REACHED_CAPACITY = 117
70 FORBIDDEN = 121
71 CONSTRAINT_VIOLATED = 122
72 UNDEFINED = 999
73 BACKEND = 201
74 SERVICE_DEPRECATED = 202
75
76
77 class ErrorSchema(colander.MappingSchema):
78 """Payload schema for Kinto errors."""
79
80 code = colander.SchemaNode(colander.Integer())
81 errno = colander.SchemaNode(colander.Integer())
82 error = colander.SchemaNode(colander.String())
83 message = colander.SchemaNode(colander.String(), missing=colander.drop)
84 info = colander.SchemaNode(colander.String(), missing=colander.drop)
85 details = colander.SchemaNode(Any(), missing=colander.drop)
86
87
88 def http_error(
89 httpexception, errno=None, code=None, error=None, message=None, info=None, details=None
90 ):
91 """Return a JSON formated response matching the error HTTP API.
92
93 :param httpexception: Instance of :mod:`~pyramid:pyramid.httpexceptions`
94 :param errno: stable application-level error number (e.g. 109)
95 :param code: matches the HTTP status code (e.g 400)
96 :param error: string description of error type (e.g. "Bad request")
97 :param message: context information (e.g. "Invalid request parameters")
98 :param info: information about error (e.g. URL to troubleshooting)
99 :param details: additional structured details (conflicting object)
100 :returns: the formatted response object
101 :rtype: pyramid.httpexceptions.HTTPException
102 """
103 errno = errno or ERRORS.UNDEFINED
104
105 if isinstance(errno, Enum):
106 errno = errno.value
107
108 body = {
109 "code": code or httpexception.code,
110 "errno": errno,
111 "error": error or httpexception.title,
112 "message": message,
113 "info": info,
114 "details": details or colander.drop,
115 }
116
117 response = httpexception
118 response.errno = errno
119 response.json = ErrorSchema().deserialize(body)
120 response.content_type = "application/json"
121 return response
122
123
124 def json_error_handler(request):
125 """Cornice JSON error handler, returning consistant JSON formatted errors
126 from schema validation errors.
127
128 This is meant to be used is custom services in your applications.
129
130 .. code-block:: python
131
132 upload = Service(name="upload", path='/upload',
133 error_handler=errors.json_error_handler)
134
135 .. warning::
136
137 Only the first error of the list is formatted in the response.
138 (c.f. HTTP API).
139 """
140 errors = request.errors
141 sorted_errors = sorted(errors, key=lambda x: str(x["name"]))
142 # In Cornice, we call error handler if at least one error was set.
143 error = sorted_errors[0]
144 name = error["name"]
145 description = error["description"]
146
147 if isinstance(description, bytes):
148 description = error["description"].decode("utf-8")
149
150 if name is not None:
151 if name in description:
152 message = description
153 else:
154 message = "{name} in {location}: {description}".format_map(error)
155 else:
156 message = "{location}: {description}".format_map(error)
157
158 response = http_error(
159 httpexceptions.HTTPBadRequest(),
160 code=errors.status,
161 errno=ERRORS.INVALID_PARAMETERS.value,
162 error="Invalid parameters",
163 message=message,
164 details=errors,
165 )
166 response.status = errors.status
167 response = reapply_cors(request, response)
168 return response
169
170
171 def raise_invalid(request, location="body", name=None, description=None, **kwargs):
172 """Helper to raise a validation error.
173
174 :param location: location in request (e.g. ``'querystring'``)
175 :param name: field name
176 :param description: detailed description of validation error
177
178 :raises: :class:`~pyramid:pyramid.httpexceptions.HTTPBadRequest`
179 """
180 request.errors.add(location, name, description, **kwargs)
181 response = json_error_handler(request)
182 raise response
183
184
185 def send_alert(request, message=None, url=None, code="soft-eol"):
186 """Helper to add an Alert header to the response.
187
188 :param code: The type of error 'soft-eol', 'hard-eol'
189 :param message: The description message.
190 :param url: The URL for more information, default to the documentation url.
191 """
192 if url is None:
193 url = request.registry.settings["project_docs"]
194
195 request.response.headers["Alert"] = json.dumps({"code": code, "message": message, "url": url})
196
197
198 def request_GET(request):
199 """Catches a UnicodeDecode error in request.GET in case a wrong request was received.
200 Fixing a webob long term issue: https://github.com/Pylons/webob/issues/161
201 """
202 try:
203 return request.GET
204 except UnicodeDecodeError:
205 querystring = request.environ.get("QUERY_STRING", "")
206 logger = logging.getLogger(__name__)
207 logger.warning("Error decoding QUERY_STRING: %s" % request.environ)
208 raise http_error(
209 httpexceptions.HTTPBadRequest(),
210 errno=ERRORS.INVALID_PARAMETERS,
211 message="A request with an incorrect encoding in the querystring was"
212 "received. Please make sure your requests are encoded in UTF-8: %s" % querystring,
213 )
214
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/kinto/core/errors.py b/kinto/core/errors.py
--- a/kinto/core/errors.py
+++ b/kinto/core/errors.py
@@ -148,7 +148,7 @@
description = error["description"].decode("utf-8")
if name is not None:
- if name in description:
+ if str(name) in description:
message = description
else:
message = "{name} in {location}: {description}".format_map(error)
|
{"golden_diff": "diff --git a/kinto/core/errors.py b/kinto/core/errors.py\n--- a/kinto/core/errors.py\n+++ b/kinto/core/errors.py\n@@ -148,7 +148,7 @@\n description = error[\"description\"].decode(\"utf-8\")\n \n if name is not None:\n- if name in description:\n+ if str(name) in description:\n message = description\n else:\n message = \"{name} in {location}: {description}\".format_map(error)\n", "issue": "Crash with \"in <string>' requires string as left operand, not int\"\n```\r\nValidationError: 'minVersion' is a required property\r\n\r\nFailed validating 'required' in schema['properties']['versionRange']['items']['properties']['targetApplication']['items']:\r\n {'additionalProperties': False,\r\n 'description': 'Target application',\r\n 'properties': {'guid': {'description': 'The application unique '\r\n 'identifier.',\r\n 'enum': ['{ec8030f7-c20a-464f-9b0e-13a3a9e97384}',\r\n '{3550f703-e582-4d05-9a08-453d09bdfdc6}',\r\n '{92650c4d-4b8e-4d2a-b7eb-24ecf4f6b63a}',\r\n '{aa3c5121-dab2-40e2-81ca-7ea25febc110}'],\r\n 'enumNames': ['Firefox',\r\n 'Thunderbird',\r\n 'Seamonkey',\r\n 'Android'],\r\n 'title': 'Application id',\r\n 'type': 'string'},\r\n 'maxVersion': {'$ref': '#/definitions/maxVersion'},\r\n 'minVersion': {'$ref': '#/definitions/minVersion'}},\r\n 'required': ['guid', 'minVersion', 'maxVersion'],\r\n 'title': 'Target application',\r\n 'type': 'object'}\r\n\r\nOn instance['versionRange'][0]['targetApplication'][0]:\r\n {'guid': 'ec8030f7-c20a-464f-9b0e-13a3a9e97384', 'maxVersion': '57.0.*'}\r\n File \"kinto/views/records.py\", line 73, in process_record\r\n jsonschema.validate(data, schema)\r\n File \"jsonschema/validators.py\", line 541, in validate\r\n cls(schema, *args, **kwargs).validate(instance)\r\n File \"jsonschema/validators.py\", line 130, in validate\r\n raise error\r\n\r\nTypeError: 'in <string>' requires string as left operand, not int\r\n(11 additional frame(s) were not displayed)\r\n...\r\n File \"cornice/service.py\", line 494, in wrapper\r\n response = view_()\r\n File \"kinto/core/resource/__init__.py\", line 463, in put\r\n new_record = self.process_record(post_record, old=existing)\r\n File \"kinto/views/records.py\", line 81, in process_record\r\n raise_invalid(self.request, name=field, description=e.message)\r\n File \"kinto/core/errors.py\", line 178, in raise_invalid\r\n response = json_error_handler(request)\r\n File \"kinto/core/errors.py\", line 149, in json_error_handler\r\n if name in description:\r\n```\n", "before_files": [{"content": "import colander\nimport logging\nfrom pyramid import httpexceptions\nfrom enum import Enum\n\nfrom kinto.core.schema import Any\nfrom kinto.core.utils import json, reapply_cors\n\n\nclass ERRORS(Enum):\n \"\"\"Predefined errors as specified by the API.\n\n +-------------+-------+------------------------------------------------+\n | Status code | Errno | Description |\n +=============+=======+================================================+\n | 401 | 104 | Missing Authorization Token |\n +-------------+-------+------------------------------------------------+\n | 401 | 105 | Invalid Authorization Token |\n +-------------+-------+------------------------------------------------+\n | 400 | 106 | request body was not valid JSON |\n +-------------+-------+------------------------------------------------+\n | 400 | 107 | invalid request parameter |\n +-------------+-------+------------------------------------------------+\n | 400 | 108 | missing request parameter |\n +-------------+-------+------------------------------------------------+\n | 400 | 109 | invalid posted data |\n +-------------+-------+------------------------------------------------+\n | 404 | 110 | Invalid Token / id |\n +-------------+-------+------------------------------------------------+\n | 404 | 111 | Missing Token / id |\n +-------------+-------+------------------------------------------------+\n | 411 | 112 | Content-Length header was not provided |\n +-------------+-------+------------------------------------------------+\n | 413 | 113 | Request body too large |\n +-------------+-------+------------------------------------------------+\n | 412 | 114 | Resource was modified meanwhile |\n +-------------+-------+------------------------------------------------+\n | 405 | 115 | Method not allowed on this end point |\n +-------------+-------+------------------------------------------------+\n | 404 | 116 | Requested version not available on this server |\n +-------------+-------+------------------------------------------------+\n | 429 | 117 | Client has sent too many requests |\n +-------------+-------+------------------------------------------------+\n | 403 | 121 | Resource's access forbidden for this user |\n +-------------+-------+------------------------------------------------+\n | 409 | 122 | Another resource violates constraint |\n +-------------+-------+------------------------------------------------+\n | 500 | 999 | Internal Server Error |\n +-------------+-------+------------------------------------------------+\n | 503 | 201 | Service Temporary unavailable due to high load |\n +-------------+-------+------------------------------------------------+\n | 410 | 202 | Service deprecated |\n +-------------+-------+------------------------------------------------+\n \"\"\"\n\n MISSING_AUTH_TOKEN = 104\n INVALID_AUTH_TOKEN = 105\n BADJSON = 106\n INVALID_PARAMETERS = 107\n MISSING_PARAMETERS = 108\n INVALID_POSTED_DATA = 109\n INVALID_RESOURCE_ID = 110\n MISSING_RESOURCE = 111\n MISSING_CONTENT_LENGTH = 112\n REQUEST_TOO_LARGE = 113\n MODIFIED_MEANWHILE = 114\n METHOD_NOT_ALLOWED = 115\n VERSION_NOT_AVAILABLE = 116\n CLIENT_REACHED_CAPACITY = 117\n FORBIDDEN = 121\n CONSTRAINT_VIOLATED = 122\n UNDEFINED = 999\n BACKEND = 201\n SERVICE_DEPRECATED = 202\n\n\nclass ErrorSchema(colander.MappingSchema):\n \"\"\"Payload schema for Kinto errors.\"\"\"\n\n code = colander.SchemaNode(colander.Integer())\n errno = colander.SchemaNode(colander.Integer())\n error = colander.SchemaNode(colander.String())\n message = colander.SchemaNode(colander.String(), missing=colander.drop)\n info = colander.SchemaNode(colander.String(), missing=colander.drop)\n details = colander.SchemaNode(Any(), missing=colander.drop)\n\n\ndef http_error(\n httpexception, errno=None, code=None, error=None, message=None, info=None, details=None\n):\n \"\"\"Return a JSON formated response matching the error HTTP API.\n\n :param httpexception: Instance of :mod:`~pyramid:pyramid.httpexceptions`\n :param errno: stable application-level error number (e.g. 109)\n :param code: matches the HTTP status code (e.g 400)\n :param error: string description of error type (e.g. \"Bad request\")\n :param message: context information (e.g. \"Invalid request parameters\")\n :param info: information about error (e.g. URL to troubleshooting)\n :param details: additional structured details (conflicting object)\n :returns: the formatted response object\n :rtype: pyramid.httpexceptions.HTTPException\n \"\"\"\n errno = errno or ERRORS.UNDEFINED\n\n if isinstance(errno, Enum):\n errno = errno.value\n\n body = {\n \"code\": code or httpexception.code,\n \"errno\": errno,\n \"error\": error or httpexception.title,\n \"message\": message,\n \"info\": info,\n \"details\": details or colander.drop,\n }\n\n response = httpexception\n response.errno = errno\n response.json = ErrorSchema().deserialize(body)\n response.content_type = \"application/json\"\n return response\n\n\ndef json_error_handler(request):\n \"\"\"Cornice JSON error handler, returning consistant JSON formatted errors\n from schema validation errors.\n\n This is meant to be used is custom services in your applications.\n\n .. code-block:: python\n\n upload = Service(name=\"upload\", path='/upload',\n error_handler=errors.json_error_handler)\n\n .. warning::\n\n Only the first error of the list is formatted in the response.\n (c.f. HTTP API).\n \"\"\"\n errors = request.errors\n sorted_errors = sorted(errors, key=lambda x: str(x[\"name\"]))\n # In Cornice, we call error handler if at least one error was set.\n error = sorted_errors[0]\n name = error[\"name\"]\n description = error[\"description\"]\n\n if isinstance(description, bytes):\n description = error[\"description\"].decode(\"utf-8\")\n\n if name is not None:\n if name in description:\n message = description\n else:\n message = \"{name} in {location}: {description}\".format_map(error)\n else:\n message = \"{location}: {description}\".format_map(error)\n\n response = http_error(\n httpexceptions.HTTPBadRequest(),\n code=errors.status,\n errno=ERRORS.INVALID_PARAMETERS.value,\n error=\"Invalid parameters\",\n message=message,\n details=errors,\n )\n response.status = errors.status\n response = reapply_cors(request, response)\n return response\n\n\ndef raise_invalid(request, location=\"body\", name=None, description=None, **kwargs):\n \"\"\"Helper to raise a validation error.\n\n :param location: location in request (e.g. ``'querystring'``)\n :param name: field name\n :param description: detailed description of validation error\n\n :raises: :class:`~pyramid:pyramid.httpexceptions.HTTPBadRequest`\n \"\"\"\n request.errors.add(location, name, description, **kwargs)\n response = json_error_handler(request)\n raise response\n\n\ndef send_alert(request, message=None, url=None, code=\"soft-eol\"):\n \"\"\"Helper to add an Alert header to the response.\n\n :param code: The type of error 'soft-eol', 'hard-eol'\n :param message: The description message.\n :param url: The URL for more information, default to the documentation url.\n \"\"\"\n if url is None:\n url = request.registry.settings[\"project_docs\"]\n\n request.response.headers[\"Alert\"] = json.dumps({\"code\": code, \"message\": message, \"url\": url})\n\n\ndef request_GET(request):\n \"\"\"Catches a UnicodeDecode error in request.GET in case a wrong request was received.\n Fixing a webob long term issue: https://github.com/Pylons/webob/issues/161\n \"\"\"\n try:\n return request.GET\n except UnicodeDecodeError:\n querystring = request.environ.get(\"QUERY_STRING\", \"\")\n logger = logging.getLogger(__name__)\n logger.warning(\"Error decoding QUERY_STRING: %s\" % request.environ)\n raise http_error(\n httpexceptions.HTTPBadRequest(),\n errno=ERRORS.INVALID_PARAMETERS,\n message=\"A request with an incorrect encoding in the querystring was\"\n \"received. Please make sure your requests are encoded in UTF-8: %s\" % querystring,\n )\n", "path": "kinto/core/errors.py"}], "after_files": [{"content": "import colander\nimport logging\nfrom pyramid import httpexceptions\nfrom enum import Enum\n\nfrom kinto.core.schema import Any\nfrom kinto.core.utils import json, reapply_cors\n\n\nclass ERRORS(Enum):\n \"\"\"Predefined errors as specified by the API.\n\n +-------------+-------+------------------------------------------------+\n | Status code | Errno | Description |\n +=============+=======+================================================+\n | 401 | 104 | Missing Authorization Token |\n +-------------+-------+------------------------------------------------+\n | 401 | 105 | Invalid Authorization Token |\n +-------------+-------+------------------------------------------------+\n | 400 | 106 | request body was not valid JSON |\n +-------------+-------+------------------------------------------------+\n | 400 | 107 | invalid request parameter |\n +-------------+-------+------------------------------------------------+\n | 400 | 108 | missing request parameter |\n +-------------+-------+------------------------------------------------+\n | 400 | 109 | invalid posted data |\n +-------------+-------+------------------------------------------------+\n | 404 | 110 | Invalid Token / id |\n +-------------+-------+------------------------------------------------+\n | 404 | 111 | Missing Token / id |\n +-------------+-------+------------------------------------------------+\n | 411 | 112 | Content-Length header was not provided |\n +-------------+-------+------------------------------------------------+\n | 413 | 113 | Request body too large |\n +-------------+-------+------------------------------------------------+\n | 412 | 114 | Resource was modified meanwhile |\n +-------------+-------+------------------------------------------------+\n | 405 | 115 | Method not allowed on this end point |\n +-------------+-------+------------------------------------------------+\n | 404 | 116 | Requested version not available on this server |\n +-------------+-------+------------------------------------------------+\n | 429 | 117 | Client has sent too many requests |\n +-------------+-------+------------------------------------------------+\n | 403 | 121 | Resource's access forbidden for this user |\n +-------------+-------+------------------------------------------------+\n | 409 | 122 | Another resource violates constraint |\n +-------------+-------+------------------------------------------------+\n | 500 | 999 | Internal Server Error |\n +-------------+-------+------------------------------------------------+\n | 503 | 201 | Service Temporary unavailable due to high load |\n +-------------+-------+------------------------------------------------+\n | 410 | 202 | Service deprecated |\n +-------------+-------+------------------------------------------------+\n \"\"\"\n\n MISSING_AUTH_TOKEN = 104\n INVALID_AUTH_TOKEN = 105\n BADJSON = 106\n INVALID_PARAMETERS = 107\n MISSING_PARAMETERS = 108\n INVALID_POSTED_DATA = 109\n INVALID_RESOURCE_ID = 110\n MISSING_RESOURCE = 111\n MISSING_CONTENT_LENGTH = 112\n REQUEST_TOO_LARGE = 113\n MODIFIED_MEANWHILE = 114\n METHOD_NOT_ALLOWED = 115\n VERSION_NOT_AVAILABLE = 116\n CLIENT_REACHED_CAPACITY = 117\n FORBIDDEN = 121\n CONSTRAINT_VIOLATED = 122\n UNDEFINED = 999\n BACKEND = 201\n SERVICE_DEPRECATED = 202\n\n\nclass ErrorSchema(colander.MappingSchema):\n \"\"\"Payload schema for Kinto errors.\"\"\"\n\n code = colander.SchemaNode(colander.Integer())\n errno = colander.SchemaNode(colander.Integer())\n error = colander.SchemaNode(colander.String())\n message = colander.SchemaNode(colander.String(), missing=colander.drop)\n info = colander.SchemaNode(colander.String(), missing=colander.drop)\n details = colander.SchemaNode(Any(), missing=colander.drop)\n\n\ndef http_error(\n httpexception, errno=None, code=None, error=None, message=None, info=None, details=None\n):\n \"\"\"Return a JSON formated response matching the error HTTP API.\n\n :param httpexception: Instance of :mod:`~pyramid:pyramid.httpexceptions`\n :param errno: stable application-level error number (e.g. 109)\n :param code: matches the HTTP status code (e.g 400)\n :param error: string description of error type (e.g. \"Bad request\")\n :param message: context information (e.g. \"Invalid request parameters\")\n :param info: information about error (e.g. URL to troubleshooting)\n :param details: additional structured details (conflicting object)\n :returns: the formatted response object\n :rtype: pyramid.httpexceptions.HTTPException\n \"\"\"\n errno = errno or ERRORS.UNDEFINED\n\n if isinstance(errno, Enum):\n errno = errno.value\n\n body = {\n \"code\": code or httpexception.code,\n \"errno\": errno,\n \"error\": error or httpexception.title,\n \"message\": message,\n \"info\": info,\n \"details\": details or colander.drop,\n }\n\n response = httpexception\n response.errno = errno\n response.json = ErrorSchema().deserialize(body)\n response.content_type = \"application/json\"\n return response\n\n\ndef json_error_handler(request):\n \"\"\"Cornice JSON error handler, returning consistant JSON formatted errors\n from schema validation errors.\n\n This is meant to be used is custom services in your applications.\n\n .. code-block:: python\n\n upload = Service(name=\"upload\", path='/upload',\n error_handler=errors.json_error_handler)\n\n .. warning::\n\n Only the first error of the list is formatted in the response.\n (c.f. HTTP API).\n \"\"\"\n errors = request.errors\n sorted_errors = sorted(errors, key=lambda x: str(x[\"name\"]))\n # In Cornice, we call error handler if at least one error was set.\n error = sorted_errors[0]\n name = error[\"name\"]\n description = error[\"description\"]\n\n if isinstance(description, bytes):\n description = error[\"description\"].decode(\"utf-8\")\n\n if name is not None:\n if str(name) in description:\n message = description\n else:\n message = \"{name} in {location}: {description}\".format_map(error)\n else:\n message = \"{location}: {description}\".format_map(error)\n\n response = http_error(\n httpexceptions.HTTPBadRequest(),\n code=errors.status,\n errno=ERRORS.INVALID_PARAMETERS.value,\n error=\"Invalid parameters\",\n message=message,\n details=errors,\n )\n response.status = errors.status\n response = reapply_cors(request, response)\n return response\n\n\ndef raise_invalid(request, location=\"body\", name=None, description=None, **kwargs):\n \"\"\"Helper to raise a validation error.\n\n :param location: location in request (e.g. ``'querystring'``)\n :param name: field name\n :param description: detailed description of validation error\n\n :raises: :class:`~pyramid:pyramid.httpexceptions.HTTPBadRequest`\n \"\"\"\n request.errors.add(location, name, description, **kwargs)\n response = json_error_handler(request)\n raise response\n\n\ndef send_alert(request, message=None, url=None, code=\"soft-eol\"):\n \"\"\"Helper to add an Alert header to the response.\n\n :param code: The type of error 'soft-eol', 'hard-eol'\n :param message: The description message.\n :param url: The URL for more information, default to the documentation url.\n \"\"\"\n if url is None:\n url = request.registry.settings[\"project_docs\"]\n\n request.response.headers[\"Alert\"] = json.dumps({\"code\": code, \"message\": message, \"url\": url})\n\n\ndef request_GET(request):\n \"\"\"Catches a UnicodeDecode error in request.GET in case a wrong request was received.\n Fixing a webob long term issue: https://github.com/Pylons/webob/issues/161\n \"\"\"\n try:\n return request.GET\n except UnicodeDecodeError:\n querystring = request.environ.get(\"QUERY_STRING\", \"\")\n logger = logging.getLogger(__name__)\n logger.warning(\"Error decoding QUERY_STRING: %s\" % request.environ)\n raise http_error(\n httpexceptions.HTTPBadRequest(),\n errno=ERRORS.INVALID_PARAMETERS,\n message=\"A request with an incorrect encoding in the querystring was\"\n \"received. Please make sure your requests are encoded in UTF-8: %s\" % querystring,\n )\n", "path": "kinto/core/errors.py"}]}
| 3,376 | 108 |
gh_patches_debug_151
|
rasdani/github-patches
|
git_diff
|
pyca__cryptography-2606
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Set a minimum version on setuptools
Apparently it fails in hilarious ways with very very old setuptools (or even distribute). We should set a floor in `setup.py`.
@dstufft do you have opinions on what a reasonable floor would be?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2
3 # This file is dual licensed under the terms of the Apache License, Version
4 # 2.0, and the BSD License. See the LICENSE file in the root of this repository
5 # for complete details.
6
7 from __future__ import absolute_import, division, print_function
8
9 import os
10 import platform
11 import subprocess
12 import sys
13 from distutils.command.build import build
14
15 import pkg_resources
16
17 from setuptools import find_packages, setup
18 from setuptools.command.install import install
19 from setuptools.command.test import test
20
21
22 base_dir = os.path.dirname(__file__)
23 src_dir = os.path.join(base_dir, "src")
24
25 # When executing the setup.py, we need to be able to import ourselves, this
26 # means that we need to add the src/ directory to the sys.path.
27 sys.path.insert(0, src_dir)
28
29 about = {}
30 with open(os.path.join(src_dir, "cryptography", "__about__.py")) as f:
31 exec(f.read(), about)
32
33
34 VECTORS_DEPENDENCY = "cryptography_vectors=={0}".format(about['__version__'])
35
36 requirements = [
37 "idna>=2.0",
38 "pyasn1>=0.1.8",
39 "six>=1.4.1",
40 "setuptools",
41 ]
42 setup_requirements = []
43
44 if sys.version_info < (3, 4):
45 requirements.append("enum34")
46
47 if sys.version_info < (3, 3):
48 requirements.append("ipaddress")
49
50 if platform.python_implementation() == "PyPy":
51 if sys.pypy_version_info < (2, 6):
52 raise RuntimeError(
53 "cryptography 1.0 is not compatible with PyPy < 2.6. Please "
54 "upgrade PyPy to use this library."
55 )
56 else:
57 requirements.append("cffi>=1.1.0")
58 setup_requirements.append("cffi>=1.1.0")
59
60 # If you add a new dep here you probably need to add it in the tox.ini as well
61 test_requirements = [
62 "pytest",
63 "pretend",
64 "iso8601",
65 "hypothesis",
66 "pyasn1_modules",
67 ]
68
69 # If there's no vectors locally that probably means we are in a tarball and
70 # need to go and get the matching vectors package from PyPi
71 if not os.path.exists(os.path.join(base_dir, "vectors/setup.py")):
72 test_requirements.append(VECTORS_DEPENDENCY)
73
74
75 def cc_is_available():
76 return sys.platform == "darwin" and list(map(
77 int, platform.mac_ver()[0].split("."))) >= [10, 8, 0]
78
79
80 backends = [
81 "openssl = cryptography.hazmat.backends.openssl:backend"
82 ]
83
84 if cc_is_available():
85 backends.append(
86 "commoncrypto = cryptography.hazmat.backends.commoncrypto:backend",
87 )
88
89
90 class PyTest(test):
91 def finalize_options(self):
92 test.finalize_options(self)
93 self.test_args = []
94 self.test_suite = True
95
96 # This means there's a vectors/ folder with the package in here.
97 # cd into it, install the vectors package and then refresh sys.path
98 if VECTORS_DEPENDENCY not in test_requirements:
99 subprocess.check_call(
100 [sys.executable, "setup.py", "install"], cwd="vectors"
101 )
102 pkg_resources.get_distribution("cryptography_vectors").activate()
103
104 def run_tests(self):
105 # Import here because in module scope the eggs are not loaded.
106 import pytest
107 test_args = [os.path.join(base_dir, "tests")]
108 errno = pytest.main(test_args)
109 sys.exit(errno)
110
111
112 def keywords_with_side_effects(argv):
113 """
114 Get a dictionary with setup keywords that (can) have side effects.
115
116 :param argv: A list of strings with command line arguments.
117 :returns: A dictionary with keyword arguments for the ``setup()`` function.
118
119 This setup.py script uses the setuptools 'setup_requires' feature because
120 this is required by the cffi package to compile extension modules. The
121 purpose of ``keywords_with_side_effects()`` is to avoid triggering the cffi
122 build process as a result of setup.py invocations that don't need the cffi
123 module to be built (setup.py serves the dual purpose of exposing package
124 metadata).
125
126 All of the options listed by ``python setup.py --help`` that print
127 information should be recognized here. The commands ``clean``,
128 ``egg_info``, ``register``, ``sdist`` and ``upload`` are also recognized.
129 Any combination of these options and commands is also supported.
130
131 This function was originally based on the `setup.py script`_ of SciPy (see
132 also the discussion in `pip issue #25`_).
133
134 .. _pip issue #25: https://github.com/pypa/pip/issues/25
135 .. _setup.py script: https://github.com/scipy/scipy/blob/master/setup.py
136 """
137 no_setup_requires_arguments = (
138 '-h', '--help',
139 '-n', '--dry-run',
140 '-q', '--quiet',
141 '-v', '--verbose',
142 '-V', '--version',
143 '--author',
144 '--author-email',
145 '--classifiers',
146 '--contact',
147 '--contact-email',
148 '--description',
149 '--egg-base',
150 '--fullname',
151 '--help-commands',
152 '--keywords',
153 '--licence',
154 '--license',
155 '--long-description',
156 '--maintainer',
157 '--maintainer-email',
158 '--name',
159 '--no-user-cfg',
160 '--obsoletes',
161 '--platforms',
162 '--provides',
163 '--requires',
164 '--url',
165 'clean',
166 'egg_info',
167 'register',
168 'sdist',
169 'upload',
170 )
171
172 def is_short_option(argument):
173 """Check whether a command line argument is a short option."""
174 return len(argument) >= 2 and argument[0] == '-' and argument[1] != '-'
175
176 def expand_short_options(argument):
177 """Expand combined short options into canonical short options."""
178 return ('-' + char for char in argument[1:])
179
180 def argument_without_setup_requirements(argv, i):
181 """Check whether a command line argument needs setup requirements."""
182 if argv[i] in no_setup_requires_arguments:
183 # Simple case: An argument which is either an option or a command
184 # which doesn't need setup requirements.
185 return True
186 elif (is_short_option(argv[i]) and
187 all(option in no_setup_requires_arguments
188 for option in expand_short_options(argv[i]))):
189 # Not so simple case: Combined short options none of which need
190 # setup requirements.
191 return True
192 elif argv[i - 1:i] == ['--egg-base']:
193 # Tricky case: --egg-info takes an argument which should not make
194 # us use setup_requires (defeating the purpose of this code).
195 return True
196 else:
197 return False
198
199 if all(argument_without_setup_requirements(argv, i)
200 for i in range(1, len(argv))):
201 return {
202 "cmdclass": {
203 "build": DummyBuild,
204 "install": DummyInstall,
205 "test": DummyPyTest,
206 }
207 }
208 else:
209 cffi_modules = [
210 "src/_cffi_src/build_openssl.py:ffi",
211 "src/_cffi_src/build_constant_time.py:ffi",
212 "src/_cffi_src/build_padding.py:ffi",
213 ]
214 if cc_is_available():
215 cffi_modules.append("src/_cffi_src/build_commoncrypto.py:ffi")
216
217 return {
218 "setup_requires": setup_requirements,
219 "cmdclass": {
220 "test": PyTest,
221 },
222 "cffi_modules": cffi_modules
223 }
224
225
226 setup_requires_error = ("Requested setup command that needs 'setup_requires' "
227 "while command line arguments implied a side effect "
228 "free command or option.")
229
230
231 class DummyBuild(build):
232 """
233 This class makes it very obvious when ``keywords_with_side_effects()`` has
234 incorrectly interpreted the command line arguments to ``setup.py build`` as
235 one of the 'side effect free' commands or options.
236 """
237
238 def run(self):
239 raise RuntimeError(setup_requires_error)
240
241
242 class DummyInstall(install):
243 """
244 This class makes it very obvious when ``keywords_with_side_effects()`` has
245 incorrectly interpreted the command line arguments to ``setup.py install``
246 as one of the 'side effect free' commands or options.
247 """
248
249 def run(self):
250 raise RuntimeError(setup_requires_error)
251
252
253 class DummyPyTest(test):
254 """
255 This class makes it very obvious when ``keywords_with_side_effects()`` has
256 incorrectly interpreted the command line arguments to ``setup.py test`` as
257 one of the 'side effect free' commands or options.
258 """
259
260 def run_tests(self):
261 raise RuntimeError(setup_requires_error)
262
263
264 with open(os.path.join(base_dir, "README.rst")) as f:
265 long_description = f.read()
266
267
268 setup(
269 name=about["__title__"],
270 version=about["__version__"],
271
272 description=about["__summary__"],
273 long_description=long_description,
274 license=about["__license__"],
275 url=about["__uri__"],
276
277 author=about["__author__"],
278 author_email=about["__email__"],
279
280 classifiers=[
281 "Intended Audience :: Developers",
282 "License :: OSI Approved :: Apache Software License",
283 "License :: OSI Approved :: BSD License",
284 "Natural Language :: English",
285 "Operating System :: MacOS :: MacOS X",
286 "Operating System :: POSIX",
287 "Operating System :: POSIX :: BSD",
288 "Operating System :: POSIX :: Linux",
289 "Operating System :: Microsoft :: Windows",
290 "Programming Language :: Python",
291 "Programming Language :: Python :: 2",
292 "Programming Language :: Python :: 2.6",
293 "Programming Language :: Python :: 2.7",
294 "Programming Language :: Python :: 3",
295 "Programming Language :: Python :: 3.3",
296 "Programming Language :: Python :: 3.4",
297 "Programming Language :: Python :: 3.5",
298 "Programming Language :: Python :: Implementation :: CPython",
299 "Programming Language :: Python :: Implementation :: PyPy",
300 "Topic :: Security :: Cryptography",
301 ],
302
303 package_dir={"": "src"},
304 packages=find_packages(
305 where="src", exclude=["_cffi_src", "_cffi_src.*", "tests", "tests.*"]
306 ),
307 include_package_data=True,
308
309 install_requires=requirements,
310 tests_require=test_requirements,
311
312 # for cffi
313 zip_safe=False,
314 ext_package="cryptography.hazmat.bindings",
315 entry_points={
316 "cryptography.backends": backends,
317 },
318 **keywords_with_side_effects(sys.argv)
319 )
320
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -37,7 +37,7 @@
"idna>=2.0",
"pyasn1>=0.1.8",
"six>=1.4.1",
- "setuptools",
+ "setuptools>=1.0",
]
setup_requirements = []
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -37,7 +37,7 @@\n \"idna>=2.0\",\n \"pyasn1>=0.1.8\",\n \"six>=1.4.1\",\n- \"setuptools\",\n+ \"setuptools>=1.0\",\n ]\n setup_requirements = []\n", "issue": "Set a minimum version on setuptools\nApparently it fails in hilarious ways with very very old setuptools (or even distribute). We should set a floor in `setup.py`.\n\n@dstufft do you have opinions on what a reasonable floor would be?\n\n", "before_files": [{"content": "#!/usr/bin/env python\n\n# This file is dual licensed under the terms of the Apache License, Version\n# 2.0, and the BSD License. See the LICENSE file in the root of this repository\n# for complete details.\n\nfrom __future__ import absolute_import, division, print_function\n\nimport os\nimport platform\nimport subprocess\nimport sys\nfrom distutils.command.build import build\n\nimport pkg_resources\n\nfrom setuptools import find_packages, setup\nfrom setuptools.command.install import install\nfrom setuptools.command.test import test\n\n\nbase_dir = os.path.dirname(__file__)\nsrc_dir = os.path.join(base_dir, \"src\")\n\n# When executing the setup.py, we need to be able to import ourselves, this\n# means that we need to add the src/ directory to the sys.path.\nsys.path.insert(0, src_dir)\n\nabout = {}\nwith open(os.path.join(src_dir, \"cryptography\", \"__about__.py\")) as f:\n exec(f.read(), about)\n\n\nVECTORS_DEPENDENCY = \"cryptography_vectors=={0}\".format(about['__version__'])\n\nrequirements = [\n \"idna>=2.0\",\n \"pyasn1>=0.1.8\",\n \"six>=1.4.1\",\n \"setuptools\",\n]\nsetup_requirements = []\n\nif sys.version_info < (3, 4):\n requirements.append(\"enum34\")\n\nif sys.version_info < (3, 3):\n requirements.append(\"ipaddress\")\n\nif platform.python_implementation() == \"PyPy\":\n if sys.pypy_version_info < (2, 6):\n raise RuntimeError(\n \"cryptography 1.0 is not compatible with PyPy < 2.6. Please \"\n \"upgrade PyPy to use this library.\"\n )\nelse:\n requirements.append(\"cffi>=1.1.0\")\n setup_requirements.append(\"cffi>=1.1.0\")\n\n# If you add a new dep here you probably need to add it in the tox.ini as well\ntest_requirements = [\n \"pytest\",\n \"pretend\",\n \"iso8601\",\n \"hypothesis\",\n \"pyasn1_modules\",\n]\n\n# If there's no vectors locally that probably means we are in a tarball and\n# need to go and get the matching vectors package from PyPi\nif not os.path.exists(os.path.join(base_dir, \"vectors/setup.py\")):\n test_requirements.append(VECTORS_DEPENDENCY)\n\n\ndef cc_is_available():\n return sys.platform == \"darwin\" and list(map(\n int, platform.mac_ver()[0].split(\".\"))) >= [10, 8, 0]\n\n\nbackends = [\n \"openssl = cryptography.hazmat.backends.openssl:backend\"\n]\n\nif cc_is_available():\n backends.append(\n \"commoncrypto = cryptography.hazmat.backends.commoncrypto:backend\",\n )\n\n\nclass PyTest(test):\n def finalize_options(self):\n test.finalize_options(self)\n self.test_args = []\n self.test_suite = True\n\n # This means there's a vectors/ folder with the package in here.\n # cd into it, install the vectors package and then refresh sys.path\n if VECTORS_DEPENDENCY not in test_requirements:\n subprocess.check_call(\n [sys.executable, \"setup.py\", \"install\"], cwd=\"vectors\"\n )\n pkg_resources.get_distribution(\"cryptography_vectors\").activate()\n\n def run_tests(self):\n # Import here because in module scope the eggs are not loaded.\n import pytest\n test_args = [os.path.join(base_dir, \"tests\")]\n errno = pytest.main(test_args)\n sys.exit(errno)\n\n\ndef keywords_with_side_effects(argv):\n \"\"\"\n Get a dictionary with setup keywords that (can) have side effects.\n\n :param argv: A list of strings with command line arguments.\n :returns: A dictionary with keyword arguments for the ``setup()`` function.\n\n This setup.py script uses the setuptools 'setup_requires' feature because\n this is required by the cffi package to compile extension modules. The\n purpose of ``keywords_with_side_effects()`` is to avoid triggering the cffi\n build process as a result of setup.py invocations that don't need the cffi\n module to be built (setup.py serves the dual purpose of exposing package\n metadata).\n\n All of the options listed by ``python setup.py --help`` that print\n information should be recognized here. The commands ``clean``,\n ``egg_info``, ``register``, ``sdist`` and ``upload`` are also recognized.\n Any combination of these options and commands is also supported.\n\n This function was originally based on the `setup.py script`_ of SciPy (see\n also the discussion in `pip issue #25`_).\n\n .. _pip issue #25: https://github.com/pypa/pip/issues/25\n .. _setup.py script: https://github.com/scipy/scipy/blob/master/setup.py\n \"\"\"\n no_setup_requires_arguments = (\n '-h', '--help',\n '-n', '--dry-run',\n '-q', '--quiet',\n '-v', '--verbose',\n '-V', '--version',\n '--author',\n '--author-email',\n '--classifiers',\n '--contact',\n '--contact-email',\n '--description',\n '--egg-base',\n '--fullname',\n '--help-commands',\n '--keywords',\n '--licence',\n '--license',\n '--long-description',\n '--maintainer',\n '--maintainer-email',\n '--name',\n '--no-user-cfg',\n '--obsoletes',\n '--platforms',\n '--provides',\n '--requires',\n '--url',\n 'clean',\n 'egg_info',\n 'register',\n 'sdist',\n 'upload',\n )\n\n def is_short_option(argument):\n \"\"\"Check whether a command line argument is a short option.\"\"\"\n return len(argument) >= 2 and argument[0] == '-' and argument[1] != '-'\n\n def expand_short_options(argument):\n \"\"\"Expand combined short options into canonical short options.\"\"\"\n return ('-' + char for char in argument[1:])\n\n def argument_without_setup_requirements(argv, i):\n \"\"\"Check whether a command line argument needs setup requirements.\"\"\"\n if argv[i] in no_setup_requires_arguments:\n # Simple case: An argument which is either an option or a command\n # which doesn't need setup requirements.\n return True\n elif (is_short_option(argv[i]) and\n all(option in no_setup_requires_arguments\n for option in expand_short_options(argv[i]))):\n # Not so simple case: Combined short options none of which need\n # setup requirements.\n return True\n elif argv[i - 1:i] == ['--egg-base']:\n # Tricky case: --egg-info takes an argument which should not make\n # us use setup_requires (defeating the purpose of this code).\n return True\n else:\n return False\n\n if all(argument_without_setup_requirements(argv, i)\n for i in range(1, len(argv))):\n return {\n \"cmdclass\": {\n \"build\": DummyBuild,\n \"install\": DummyInstall,\n \"test\": DummyPyTest,\n }\n }\n else:\n cffi_modules = [\n \"src/_cffi_src/build_openssl.py:ffi\",\n \"src/_cffi_src/build_constant_time.py:ffi\",\n \"src/_cffi_src/build_padding.py:ffi\",\n ]\n if cc_is_available():\n cffi_modules.append(\"src/_cffi_src/build_commoncrypto.py:ffi\")\n\n return {\n \"setup_requires\": setup_requirements,\n \"cmdclass\": {\n \"test\": PyTest,\n },\n \"cffi_modules\": cffi_modules\n }\n\n\nsetup_requires_error = (\"Requested setup command that needs 'setup_requires' \"\n \"while command line arguments implied a side effect \"\n \"free command or option.\")\n\n\nclass DummyBuild(build):\n \"\"\"\n This class makes it very obvious when ``keywords_with_side_effects()`` has\n incorrectly interpreted the command line arguments to ``setup.py build`` as\n one of the 'side effect free' commands or options.\n \"\"\"\n\n def run(self):\n raise RuntimeError(setup_requires_error)\n\n\nclass DummyInstall(install):\n \"\"\"\n This class makes it very obvious when ``keywords_with_side_effects()`` has\n incorrectly interpreted the command line arguments to ``setup.py install``\n as one of the 'side effect free' commands or options.\n \"\"\"\n\n def run(self):\n raise RuntimeError(setup_requires_error)\n\n\nclass DummyPyTest(test):\n \"\"\"\n This class makes it very obvious when ``keywords_with_side_effects()`` has\n incorrectly interpreted the command line arguments to ``setup.py test`` as\n one of the 'side effect free' commands or options.\n \"\"\"\n\n def run_tests(self):\n raise RuntimeError(setup_requires_error)\n\n\nwith open(os.path.join(base_dir, \"README.rst\")) as f:\n long_description = f.read()\n\n\nsetup(\n name=about[\"__title__\"],\n version=about[\"__version__\"],\n\n description=about[\"__summary__\"],\n long_description=long_description,\n license=about[\"__license__\"],\n url=about[\"__uri__\"],\n\n author=about[\"__author__\"],\n author_email=about[\"__email__\"],\n\n classifiers=[\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: Apache Software License\",\n \"License :: OSI Approved :: BSD License\",\n \"Natural Language :: English\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: POSIX\",\n \"Operating System :: POSIX :: BSD\",\n \"Operating System :: POSIX :: Linux\",\n \"Operating System :: Microsoft :: Windows\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2\",\n \"Programming Language :: Python :: 2.6\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.3\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: Implementation :: CPython\",\n \"Programming Language :: Python :: Implementation :: PyPy\",\n \"Topic :: Security :: Cryptography\",\n ],\n\n package_dir={\"\": \"src\"},\n packages=find_packages(\n where=\"src\", exclude=[\"_cffi_src\", \"_cffi_src.*\", \"tests\", \"tests.*\"]\n ),\n include_package_data=True,\n\n install_requires=requirements,\n tests_require=test_requirements,\n\n # for cffi\n zip_safe=False,\n ext_package=\"cryptography.hazmat.bindings\",\n entry_points={\n \"cryptography.backends\": backends,\n },\n **keywords_with_side_effects(sys.argv)\n)\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\n# This file is dual licensed under the terms of the Apache License, Version\n# 2.0, and the BSD License. See the LICENSE file in the root of this repository\n# for complete details.\n\nfrom __future__ import absolute_import, division, print_function\n\nimport os\nimport platform\nimport subprocess\nimport sys\nfrom distutils.command.build import build\n\nimport pkg_resources\n\nfrom setuptools import find_packages, setup\nfrom setuptools.command.install import install\nfrom setuptools.command.test import test\n\n\nbase_dir = os.path.dirname(__file__)\nsrc_dir = os.path.join(base_dir, \"src\")\n\n# When executing the setup.py, we need to be able to import ourselves, this\n# means that we need to add the src/ directory to the sys.path.\nsys.path.insert(0, src_dir)\n\nabout = {}\nwith open(os.path.join(src_dir, \"cryptography\", \"__about__.py\")) as f:\n exec(f.read(), about)\n\n\nVECTORS_DEPENDENCY = \"cryptography_vectors=={0}\".format(about['__version__'])\n\nrequirements = [\n \"idna>=2.0\",\n \"pyasn1>=0.1.8\",\n \"six>=1.4.1\",\n \"setuptools>=1.0\",\n]\nsetup_requirements = []\n\nif sys.version_info < (3, 4):\n requirements.append(\"enum34\")\n\nif sys.version_info < (3, 3):\n requirements.append(\"ipaddress\")\n\nif platform.python_implementation() == \"PyPy\":\n if sys.pypy_version_info < (2, 6):\n raise RuntimeError(\n \"cryptography 1.0 is not compatible with PyPy < 2.6. Please \"\n \"upgrade PyPy to use this library.\"\n )\nelse:\n requirements.append(\"cffi>=1.1.0\")\n setup_requirements.append(\"cffi>=1.1.0\")\n\n# If you add a new dep here you probably need to add it in the tox.ini as well\ntest_requirements = [\n \"pytest\",\n \"pretend\",\n \"iso8601\",\n \"hypothesis\",\n \"pyasn1_modules\",\n]\n\n# If there's no vectors locally that probably means we are in a tarball and\n# need to go and get the matching vectors package from PyPi\nif not os.path.exists(os.path.join(base_dir, \"vectors/setup.py\")):\n test_requirements.append(VECTORS_DEPENDENCY)\n\n\ndef cc_is_available():\n return sys.platform == \"darwin\" and list(map(\n int, platform.mac_ver()[0].split(\".\"))) >= [10, 8, 0]\n\n\nbackends = [\n \"openssl = cryptography.hazmat.backends.openssl:backend\"\n]\n\nif cc_is_available():\n backends.append(\n \"commoncrypto = cryptography.hazmat.backends.commoncrypto:backend\",\n )\n\n\nclass PyTest(test):\n def finalize_options(self):\n test.finalize_options(self)\n self.test_args = []\n self.test_suite = True\n\n # This means there's a vectors/ folder with the package in here.\n # cd into it, install the vectors package and then refresh sys.path\n if VECTORS_DEPENDENCY not in test_requirements:\n subprocess.check_call(\n [sys.executable, \"setup.py\", \"install\"], cwd=\"vectors\"\n )\n pkg_resources.get_distribution(\"cryptography_vectors\").activate()\n\n def run_tests(self):\n # Import here because in module scope the eggs are not loaded.\n import pytest\n test_args = [os.path.join(base_dir, \"tests\")]\n errno = pytest.main(test_args)\n sys.exit(errno)\n\n\ndef keywords_with_side_effects(argv):\n \"\"\"\n Get a dictionary with setup keywords that (can) have side effects.\n\n :param argv: A list of strings with command line arguments.\n :returns: A dictionary with keyword arguments for the ``setup()`` function.\n\n This setup.py script uses the setuptools 'setup_requires' feature because\n this is required by the cffi package to compile extension modules. The\n purpose of ``keywords_with_side_effects()`` is to avoid triggering the cffi\n build process as a result of setup.py invocations that don't need the cffi\n module to be built (setup.py serves the dual purpose of exposing package\n metadata).\n\n All of the options listed by ``python setup.py --help`` that print\n information should be recognized here. The commands ``clean``,\n ``egg_info``, ``register``, ``sdist`` and ``upload`` are also recognized.\n Any combination of these options and commands is also supported.\n\n This function was originally based on the `setup.py script`_ of SciPy (see\n also the discussion in `pip issue #25`_).\n\n .. _pip issue #25: https://github.com/pypa/pip/issues/25\n .. _setup.py script: https://github.com/scipy/scipy/blob/master/setup.py\n \"\"\"\n no_setup_requires_arguments = (\n '-h', '--help',\n '-n', '--dry-run',\n '-q', '--quiet',\n '-v', '--verbose',\n '-V', '--version',\n '--author',\n '--author-email',\n '--classifiers',\n '--contact',\n '--contact-email',\n '--description',\n '--egg-base',\n '--fullname',\n '--help-commands',\n '--keywords',\n '--licence',\n '--license',\n '--long-description',\n '--maintainer',\n '--maintainer-email',\n '--name',\n '--no-user-cfg',\n '--obsoletes',\n '--platforms',\n '--provides',\n '--requires',\n '--url',\n 'clean',\n 'egg_info',\n 'register',\n 'sdist',\n 'upload',\n )\n\n def is_short_option(argument):\n \"\"\"Check whether a command line argument is a short option.\"\"\"\n return len(argument) >= 2 and argument[0] == '-' and argument[1] != '-'\n\n def expand_short_options(argument):\n \"\"\"Expand combined short options into canonical short options.\"\"\"\n return ('-' + char for char in argument[1:])\n\n def argument_without_setup_requirements(argv, i):\n \"\"\"Check whether a command line argument needs setup requirements.\"\"\"\n if argv[i] in no_setup_requires_arguments:\n # Simple case: An argument which is either an option or a command\n # which doesn't need setup requirements.\n return True\n elif (is_short_option(argv[i]) and\n all(option in no_setup_requires_arguments\n for option in expand_short_options(argv[i]))):\n # Not so simple case: Combined short options none of which need\n # setup requirements.\n return True\n elif argv[i - 1:i] == ['--egg-base']:\n # Tricky case: --egg-info takes an argument which should not make\n # us use setup_requires (defeating the purpose of this code).\n return True\n else:\n return False\n\n if all(argument_without_setup_requirements(argv, i)\n for i in range(1, len(argv))):\n return {\n \"cmdclass\": {\n \"build\": DummyBuild,\n \"install\": DummyInstall,\n \"test\": DummyPyTest,\n }\n }\n else:\n cffi_modules = [\n \"src/_cffi_src/build_openssl.py:ffi\",\n \"src/_cffi_src/build_constant_time.py:ffi\",\n \"src/_cffi_src/build_padding.py:ffi\",\n ]\n if cc_is_available():\n cffi_modules.append(\"src/_cffi_src/build_commoncrypto.py:ffi\")\n\n return {\n \"setup_requires\": setup_requirements,\n \"cmdclass\": {\n \"test\": PyTest,\n },\n \"cffi_modules\": cffi_modules\n }\n\n\nsetup_requires_error = (\"Requested setup command that needs 'setup_requires' \"\n \"while command line arguments implied a side effect \"\n \"free command or option.\")\n\n\nclass DummyBuild(build):\n \"\"\"\n This class makes it very obvious when ``keywords_with_side_effects()`` has\n incorrectly interpreted the command line arguments to ``setup.py build`` as\n one of the 'side effect free' commands or options.\n \"\"\"\n\n def run(self):\n raise RuntimeError(setup_requires_error)\n\n\nclass DummyInstall(install):\n \"\"\"\n This class makes it very obvious when ``keywords_with_side_effects()`` has\n incorrectly interpreted the command line arguments to ``setup.py install``\n as one of the 'side effect free' commands or options.\n \"\"\"\n\n def run(self):\n raise RuntimeError(setup_requires_error)\n\n\nclass DummyPyTest(test):\n \"\"\"\n This class makes it very obvious when ``keywords_with_side_effects()`` has\n incorrectly interpreted the command line arguments to ``setup.py test`` as\n one of the 'side effect free' commands or options.\n \"\"\"\n\n def run_tests(self):\n raise RuntimeError(setup_requires_error)\n\n\nwith open(os.path.join(base_dir, \"README.rst\")) as f:\n long_description = f.read()\n\n\nsetup(\n name=about[\"__title__\"],\n version=about[\"__version__\"],\n\n description=about[\"__summary__\"],\n long_description=long_description,\n license=about[\"__license__\"],\n url=about[\"__uri__\"],\n\n author=about[\"__author__\"],\n author_email=about[\"__email__\"],\n\n classifiers=[\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: Apache Software License\",\n \"License :: OSI Approved :: BSD License\",\n \"Natural Language :: English\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: POSIX\",\n \"Operating System :: POSIX :: BSD\",\n \"Operating System :: POSIX :: Linux\",\n \"Operating System :: Microsoft :: Windows\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2\",\n \"Programming Language :: Python :: 2.6\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.3\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: Implementation :: CPython\",\n \"Programming Language :: Python :: Implementation :: PyPy\",\n \"Topic :: Security :: Cryptography\",\n ],\n\n package_dir={\"\": \"src\"},\n packages=find_packages(\n where=\"src\", exclude=[\"_cffi_src\", \"_cffi_src.*\", \"tests\", \"tests.*\"]\n ),\n include_package_data=True,\n\n install_requires=requirements,\n tests_require=test_requirements,\n\n # for cffi\n zip_safe=False,\n ext_package=\"cryptography.hazmat.bindings\",\n entry_points={\n \"cryptography.backends\": backends,\n },\n **keywords_with_side_effects(sys.argv)\n)\n", "path": "setup.py"}]}
| 3,498 | 84 |
gh_patches_debug_37898
|
rasdani/github-patches
|
git_diff
|
getsentry__sentry-python-1787
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Link errors to OTel spans
### Problem Statement
If you use the Otel instrumentation caught errors are not associated with performance spans coming from otel
### Solution Brainstorm
Make sure errors are associated with the current otel span. See Java implementation for inspiration.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sentry_sdk/integrations/opentelemetry/span_processor.py`
Content:
```
1 from datetime import datetime
2
3 from opentelemetry.context import get_value # type: ignore
4 from opentelemetry.sdk.trace import SpanProcessor # type: ignore
5 from opentelemetry.semconv.trace import SpanAttributes # type: ignore
6 from opentelemetry.trace import ( # type: ignore
7 format_span_id,
8 format_trace_id,
9 SpanContext,
10 Span as OTelSpan,
11 SpanKind,
12 )
13 from sentry_sdk.consts import INSTRUMENTER
14 from sentry_sdk.hub import Hub
15 from sentry_sdk.integrations.opentelemetry.consts import (
16 SENTRY_BAGGAGE_KEY,
17 SENTRY_TRACE_KEY,
18 )
19 from sentry_sdk.tracing import Transaction, Span as SentrySpan
20 from sentry_sdk.utils import Dsn
21 from sentry_sdk._types import MYPY
22
23 from urllib3.util import parse_url as urlparse # type: ignore
24
25 if MYPY:
26 from typing import Any
27 from typing import Dict
28 from typing import Union
29
30 OPEN_TELEMETRY_CONTEXT = "otel"
31
32
33 class SentrySpanProcessor(SpanProcessor): # type: ignore
34 """
35 Converts OTel spans into Sentry spans so they can be sent to the Sentry backend.
36 """
37
38 # The mapping from otel span ids to sentry spans
39 otel_span_map = {} # type: Dict[str, Union[Transaction, OTelSpan]]
40
41 def __new__(cls):
42 # type: () -> SentrySpanProcessor
43 if not hasattr(cls, "instance"):
44 cls.instance = super(SentrySpanProcessor, cls).__new__(cls)
45
46 return cls.instance
47
48 def on_start(self, otel_span, parent_context=None):
49 # type: (OTelSpan, SpanContext) -> None
50 hub = Hub.current
51 if not hub:
52 return
53
54 if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL:
55 return
56
57 if not otel_span.context.is_valid:
58 return
59
60 if self._is_sentry_span(hub, otel_span):
61 return
62
63 trace_data = self._get_trace_data(otel_span, parent_context)
64
65 parent_span_id = trace_data["parent_span_id"]
66 sentry_parent_span = (
67 self.otel_span_map.get(parent_span_id, None) if parent_span_id else None
68 )
69
70 sentry_span = None
71 if sentry_parent_span:
72 sentry_span = sentry_parent_span.start_child(
73 span_id=trace_data["span_id"],
74 description=otel_span.name,
75 start_timestamp=datetime.fromtimestamp(otel_span.start_time / 1e9),
76 instrumenter=INSTRUMENTER.OTEL,
77 )
78 else:
79 sentry_span = hub.start_transaction(
80 name=otel_span.name,
81 span_id=trace_data["span_id"],
82 parent_span_id=parent_span_id,
83 trace_id=trace_data["trace_id"],
84 baggage=trace_data["baggage"],
85 start_timestamp=datetime.fromtimestamp(otel_span.start_time / 1e9),
86 instrumenter=INSTRUMENTER.OTEL,
87 )
88
89 self.otel_span_map[trace_data["span_id"]] = sentry_span
90
91 def on_end(self, otel_span):
92 # type: (OTelSpan) -> None
93 hub = Hub.current
94 if not hub:
95 return
96
97 if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL:
98 return
99
100 if not otel_span.context.is_valid:
101 return
102
103 span_id = format_span_id(otel_span.context.span_id)
104 sentry_span = self.otel_span_map.pop(span_id, None)
105 if not sentry_span:
106 return
107
108 sentry_span.op = otel_span.name
109
110 if isinstance(sentry_span, Transaction):
111 sentry_span.name = otel_span.name
112 sentry_span.set_context(
113 OPEN_TELEMETRY_CONTEXT, self._get_otel_context(otel_span)
114 )
115
116 else:
117 self._update_span_with_otel_data(sentry_span, otel_span)
118
119 sentry_span.finish(
120 end_timestamp=datetime.fromtimestamp(otel_span.end_time / 1e9)
121 )
122
123 def _is_sentry_span(self, hub, otel_span):
124 # type: (Hub, OTelSpan) -> bool
125 """
126 Break infinite loop:
127 HTTP requests to Sentry are caught by OTel and send again to Sentry.
128 """
129 otel_span_url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None)
130 dsn_url = hub.client and Dsn(hub.client.dsn or "").netloc
131
132 if otel_span_url and dsn_url in otel_span_url:
133 return True
134
135 return False
136
137 def _get_otel_context(self, otel_span):
138 # type: (OTelSpan) -> Dict[str, Any]
139 """
140 Returns the OTel context for Sentry.
141 See: https://develop.sentry.dev/sdk/performance/opentelemetry/#step-5-add-opentelemetry-context
142 """
143 ctx = {}
144
145 if otel_span.attributes:
146 ctx["attributes"] = dict(otel_span.attributes)
147
148 if otel_span.resource.attributes:
149 ctx["resource"] = dict(otel_span.resource.attributes)
150
151 return ctx
152
153 def _get_trace_data(self, otel_span, parent_context):
154 # type: (OTelSpan, SpanContext) -> Dict[str, Any]
155 """
156 Extracts tracing information from one OTel span and its parent OTel context.
157 """
158 trace_data = {}
159
160 span_id = format_span_id(otel_span.context.span_id)
161 trace_data["span_id"] = span_id
162
163 trace_id = format_trace_id(otel_span.context.trace_id)
164 trace_data["trace_id"] = trace_id
165
166 parent_span_id = (
167 format_span_id(otel_span.parent.span_id) if otel_span.parent else None
168 )
169 trace_data["parent_span_id"] = parent_span_id
170
171 sentry_trace_data = get_value(SENTRY_TRACE_KEY, parent_context)
172 trace_data["parent_sampled"] = (
173 sentry_trace_data[2] if sentry_trace_data else None
174 )
175
176 baggage = get_value(SENTRY_BAGGAGE_KEY, parent_context)
177 trace_data["baggage"] = baggage
178
179 return trace_data
180
181 def _update_span_with_otel_data(self, sentry_span, otel_span):
182 # type: (SentrySpan, OTelSpan) -> None
183 """
184 Convert OTel span data and update the Sentry span with it.
185 This should eventually happen on the server when ingesting the spans.
186 """
187 for key, val in otel_span.attributes.items():
188 sentry_span.set_data(key, val)
189
190 sentry_span.set_data("otel.kind", otel_span.kind)
191
192 op = otel_span.name
193 description = otel_span.name
194
195 http_method = otel_span.attributes.get(SpanAttributes.HTTP_METHOD, None)
196 db_query = otel_span.attributes.get(SpanAttributes.DB_SYSTEM, None)
197
198 if http_method:
199 op = "http"
200
201 if otel_span.kind == SpanKind.SERVER:
202 op += ".server"
203 elif otel_span.kind == SpanKind.CLIENT:
204 op += ".client"
205
206 description = http_method
207
208 peer_name = otel_span.attributes.get(SpanAttributes.NET_PEER_NAME, None)
209 if peer_name:
210 description += " {}".format(peer_name)
211
212 target = otel_span.attributes.get(SpanAttributes.HTTP_TARGET, None)
213 if target:
214 description += " {}".format(target)
215
216 if not peer_name and not target:
217 url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None)
218 if url:
219 parsed_url = urlparse(url)
220 url = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
221 description += " {}".format(url)
222
223 status_code = otel_span.attributes.get(
224 SpanAttributes.HTTP_STATUS_CODE, None
225 )
226 if status_code:
227 sentry_span.set_http_status(status_code)
228
229 elif db_query:
230 op = "db"
231 statement = otel_span.attributes.get(SpanAttributes.DB_STATEMENT, None)
232 if statement:
233 description = statement
234
235 sentry_span.op = op
236 sentry_span.description = description
237
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py
--- a/sentry_sdk/integrations/opentelemetry/span_processor.py
+++ b/sentry_sdk/integrations/opentelemetry/span_processor.py
@@ -6,16 +6,22 @@
from opentelemetry.trace import ( # type: ignore
format_span_id,
format_trace_id,
+ get_current_span,
SpanContext,
Span as OTelSpan,
SpanKind,
)
+from opentelemetry.trace.span import ( # type: ignore
+ INVALID_SPAN_ID,
+ INVALID_TRACE_ID,
+)
from sentry_sdk.consts import INSTRUMENTER
from sentry_sdk.hub import Hub
from sentry_sdk.integrations.opentelemetry.consts import (
SENTRY_BAGGAGE_KEY,
SENTRY_TRACE_KEY,
)
+from sentry_sdk.scope import add_global_event_processor
from sentry_sdk.tracing import Transaction, Span as SentrySpan
from sentry_sdk.utils import Dsn
from sentry_sdk._types import MYPY
@@ -26,10 +32,44 @@
from typing import Any
from typing import Dict
from typing import Union
+ from sentry_sdk._types import Event, Hint
OPEN_TELEMETRY_CONTEXT = "otel"
+def link_trace_context_to_error_event(event, otel_span_map):
+ # type: (Event, Dict[str, Union[Transaction, OTelSpan]]) -> Event
+ hub = Hub.current
+ if not hub:
+ return event
+
+ if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL:
+ return event
+
+ if hasattr(event, "type") and event["type"] == "transaction":
+ return event
+
+ otel_span = get_current_span()
+ if not otel_span:
+ return event
+
+ ctx = otel_span.get_span_context()
+ trace_id = format_trace_id(ctx.trace_id)
+ span_id = format_span_id(ctx.span_id)
+
+ if trace_id == INVALID_TRACE_ID or span_id == INVALID_SPAN_ID:
+ return event
+
+ sentry_span = otel_span_map.get(span_id, None)
+ if not sentry_span:
+ return event
+
+ contexts = event.setdefault("contexts", {})
+ contexts.setdefault("trace", {}).update(sentry_span.get_trace_context())
+
+ return event
+
+
class SentrySpanProcessor(SpanProcessor): # type: ignore
"""
Converts OTel spans into Sentry spans so they can be sent to the Sentry backend.
@@ -45,6 +85,13 @@
return cls.instance
+ def __init__(self):
+ # type: () -> None
+ @add_global_event_processor
+ def global_event_processor(event, hint):
+ # type: (Event, Hint) -> Event
+ return link_trace_context_to_error_event(event, self.otel_span_map)
+
def on_start(self, otel_span, parent_context=None):
# type: (OTelSpan, SpanContext) -> None
hub = Hub.current
|
{"golden_diff": "diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py\n--- a/sentry_sdk/integrations/opentelemetry/span_processor.py\n+++ b/sentry_sdk/integrations/opentelemetry/span_processor.py\n@@ -6,16 +6,22 @@\n from opentelemetry.trace import ( # type: ignore\n format_span_id,\n format_trace_id,\n+ get_current_span,\n SpanContext,\n Span as OTelSpan,\n SpanKind,\n )\n+from opentelemetry.trace.span import ( # type: ignore\n+ INVALID_SPAN_ID,\n+ INVALID_TRACE_ID,\n+)\n from sentry_sdk.consts import INSTRUMENTER\n from sentry_sdk.hub import Hub\n from sentry_sdk.integrations.opentelemetry.consts import (\n SENTRY_BAGGAGE_KEY,\n SENTRY_TRACE_KEY,\n )\n+from sentry_sdk.scope import add_global_event_processor\n from sentry_sdk.tracing import Transaction, Span as SentrySpan\n from sentry_sdk.utils import Dsn\n from sentry_sdk._types import MYPY\n@@ -26,10 +32,44 @@\n from typing import Any\n from typing import Dict\n from typing import Union\n+ from sentry_sdk._types import Event, Hint\n \n OPEN_TELEMETRY_CONTEXT = \"otel\"\n \n \n+def link_trace_context_to_error_event(event, otel_span_map):\n+ # type: (Event, Dict[str, Union[Transaction, OTelSpan]]) -> Event\n+ hub = Hub.current\n+ if not hub:\n+ return event\n+\n+ if hub.client and hub.client.options[\"instrumenter\"] != INSTRUMENTER.OTEL:\n+ return event\n+\n+ if hasattr(event, \"type\") and event[\"type\"] == \"transaction\":\n+ return event\n+\n+ otel_span = get_current_span()\n+ if not otel_span:\n+ return event\n+\n+ ctx = otel_span.get_span_context()\n+ trace_id = format_trace_id(ctx.trace_id)\n+ span_id = format_span_id(ctx.span_id)\n+\n+ if trace_id == INVALID_TRACE_ID or span_id == INVALID_SPAN_ID:\n+ return event\n+\n+ sentry_span = otel_span_map.get(span_id, None)\n+ if not sentry_span:\n+ return event\n+\n+ contexts = event.setdefault(\"contexts\", {})\n+ contexts.setdefault(\"trace\", {}).update(sentry_span.get_trace_context())\n+\n+ return event\n+\n+\n class SentrySpanProcessor(SpanProcessor): # type: ignore\n \"\"\"\n Converts OTel spans into Sentry spans so they can be sent to the Sentry backend.\n@@ -45,6 +85,13 @@\n \n return cls.instance\n \n+ def __init__(self):\n+ # type: () -> None\n+ @add_global_event_processor\n+ def global_event_processor(event, hint):\n+ # type: (Event, Hint) -> Event\n+ return link_trace_context_to_error_event(event, self.otel_span_map)\n+\n def on_start(self, otel_span, parent_context=None):\n # type: (OTelSpan, SpanContext) -> None\n hub = Hub.current\n", "issue": "Link errors to OTel spans\n### Problem Statement\n\nIf you use the Otel instrumentation caught errors are not associated with performance spans coming from otel\n\n### Solution Brainstorm\n\nMake sure errors are associated with the current otel span. See Java implementation for inspiration.\n", "before_files": [{"content": "from datetime import datetime\n\nfrom opentelemetry.context import get_value # type: ignore\nfrom opentelemetry.sdk.trace import SpanProcessor # type: ignore\nfrom opentelemetry.semconv.trace import SpanAttributes # type: ignore\nfrom opentelemetry.trace import ( # type: ignore\n format_span_id,\n format_trace_id,\n SpanContext,\n Span as OTelSpan,\n SpanKind,\n)\nfrom sentry_sdk.consts import INSTRUMENTER\nfrom sentry_sdk.hub import Hub\nfrom sentry_sdk.integrations.opentelemetry.consts import (\n SENTRY_BAGGAGE_KEY,\n SENTRY_TRACE_KEY,\n)\nfrom sentry_sdk.tracing import Transaction, Span as SentrySpan\nfrom sentry_sdk.utils import Dsn\nfrom sentry_sdk._types import MYPY\n\nfrom urllib3.util import parse_url as urlparse # type: ignore\n\nif MYPY:\n from typing import Any\n from typing import Dict\n from typing import Union\n\nOPEN_TELEMETRY_CONTEXT = \"otel\"\n\n\nclass SentrySpanProcessor(SpanProcessor): # type: ignore\n \"\"\"\n Converts OTel spans into Sentry spans so they can be sent to the Sentry backend.\n \"\"\"\n\n # The mapping from otel span ids to sentry spans\n otel_span_map = {} # type: Dict[str, Union[Transaction, OTelSpan]]\n\n def __new__(cls):\n # type: () -> SentrySpanProcessor\n if not hasattr(cls, \"instance\"):\n cls.instance = super(SentrySpanProcessor, cls).__new__(cls)\n\n return cls.instance\n\n def on_start(self, otel_span, parent_context=None):\n # type: (OTelSpan, SpanContext) -> None\n hub = Hub.current\n if not hub:\n return\n\n if hub.client and hub.client.options[\"instrumenter\"] != INSTRUMENTER.OTEL:\n return\n\n if not otel_span.context.is_valid:\n return\n\n if self._is_sentry_span(hub, otel_span):\n return\n\n trace_data = self._get_trace_data(otel_span, parent_context)\n\n parent_span_id = trace_data[\"parent_span_id\"]\n sentry_parent_span = (\n self.otel_span_map.get(parent_span_id, None) if parent_span_id else None\n )\n\n sentry_span = None\n if sentry_parent_span:\n sentry_span = sentry_parent_span.start_child(\n span_id=trace_data[\"span_id\"],\n description=otel_span.name,\n start_timestamp=datetime.fromtimestamp(otel_span.start_time / 1e9),\n instrumenter=INSTRUMENTER.OTEL,\n )\n else:\n sentry_span = hub.start_transaction(\n name=otel_span.name,\n span_id=trace_data[\"span_id\"],\n parent_span_id=parent_span_id,\n trace_id=trace_data[\"trace_id\"],\n baggage=trace_data[\"baggage\"],\n start_timestamp=datetime.fromtimestamp(otel_span.start_time / 1e9),\n instrumenter=INSTRUMENTER.OTEL,\n )\n\n self.otel_span_map[trace_data[\"span_id\"]] = sentry_span\n\n def on_end(self, otel_span):\n # type: (OTelSpan) -> None\n hub = Hub.current\n if not hub:\n return\n\n if hub.client and hub.client.options[\"instrumenter\"] != INSTRUMENTER.OTEL:\n return\n\n if not otel_span.context.is_valid:\n return\n\n span_id = format_span_id(otel_span.context.span_id)\n sentry_span = self.otel_span_map.pop(span_id, None)\n if not sentry_span:\n return\n\n sentry_span.op = otel_span.name\n\n if isinstance(sentry_span, Transaction):\n sentry_span.name = otel_span.name\n sentry_span.set_context(\n OPEN_TELEMETRY_CONTEXT, self._get_otel_context(otel_span)\n )\n\n else:\n self._update_span_with_otel_data(sentry_span, otel_span)\n\n sentry_span.finish(\n end_timestamp=datetime.fromtimestamp(otel_span.end_time / 1e9)\n )\n\n def _is_sentry_span(self, hub, otel_span):\n # type: (Hub, OTelSpan) -> bool\n \"\"\"\n Break infinite loop:\n HTTP requests to Sentry are caught by OTel and send again to Sentry.\n \"\"\"\n otel_span_url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None)\n dsn_url = hub.client and Dsn(hub.client.dsn or \"\").netloc\n\n if otel_span_url and dsn_url in otel_span_url:\n return True\n\n return False\n\n def _get_otel_context(self, otel_span):\n # type: (OTelSpan) -> Dict[str, Any]\n \"\"\"\n Returns the OTel context for Sentry.\n See: https://develop.sentry.dev/sdk/performance/opentelemetry/#step-5-add-opentelemetry-context\n \"\"\"\n ctx = {}\n\n if otel_span.attributes:\n ctx[\"attributes\"] = dict(otel_span.attributes)\n\n if otel_span.resource.attributes:\n ctx[\"resource\"] = dict(otel_span.resource.attributes)\n\n return ctx\n\n def _get_trace_data(self, otel_span, parent_context):\n # type: (OTelSpan, SpanContext) -> Dict[str, Any]\n \"\"\"\n Extracts tracing information from one OTel span and its parent OTel context.\n \"\"\"\n trace_data = {}\n\n span_id = format_span_id(otel_span.context.span_id)\n trace_data[\"span_id\"] = span_id\n\n trace_id = format_trace_id(otel_span.context.trace_id)\n trace_data[\"trace_id\"] = trace_id\n\n parent_span_id = (\n format_span_id(otel_span.parent.span_id) if otel_span.parent else None\n )\n trace_data[\"parent_span_id\"] = parent_span_id\n\n sentry_trace_data = get_value(SENTRY_TRACE_KEY, parent_context)\n trace_data[\"parent_sampled\"] = (\n sentry_trace_data[2] if sentry_trace_data else None\n )\n\n baggage = get_value(SENTRY_BAGGAGE_KEY, parent_context)\n trace_data[\"baggage\"] = baggage\n\n return trace_data\n\n def _update_span_with_otel_data(self, sentry_span, otel_span):\n # type: (SentrySpan, OTelSpan) -> None\n \"\"\"\n Convert OTel span data and update the Sentry span with it.\n This should eventually happen on the server when ingesting the spans.\n \"\"\"\n for key, val in otel_span.attributes.items():\n sentry_span.set_data(key, val)\n\n sentry_span.set_data(\"otel.kind\", otel_span.kind)\n\n op = otel_span.name\n description = otel_span.name\n\n http_method = otel_span.attributes.get(SpanAttributes.HTTP_METHOD, None)\n db_query = otel_span.attributes.get(SpanAttributes.DB_SYSTEM, None)\n\n if http_method:\n op = \"http\"\n\n if otel_span.kind == SpanKind.SERVER:\n op += \".server\"\n elif otel_span.kind == SpanKind.CLIENT:\n op += \".client\"\n\n description = http_method\n\n peer_name = otel_span.attributes.get(SpanAttributes.NET_PEER_NAME, None)\n if peer_name:\n description += \" {}\".format(peer_name)\n\n target = otel_span.attributes.get(SpanAttributes.HTTP_TARGET, None)\n if target:\n description += \" {}\".format(target)\n\n if not peer_name and not target:\n url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None)\n if url:\n parsed_url = urlparse(url)\n url = f\"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}\"\n description += \" {}\".format(url)\n\n status_code = otel_span.attributes.get(\n SpanAttributes.HTTP_STATUS_CODE, None\n )\n if status_code:\n sentry_span.set_http_status(status_code)\n\n elif db_query:\n op = \"db\"\n statement = otel_span.attributes.get(SpanAttributes.DB_STATEMENT, None)\n if statement:\n description = statement\n\n sentry_span.op = op\n sentry_span.description = description\n", "path": "sentry_sdk/integrations/opentelemetry/span_processor.py"}], "after_files": [{"content": "from datetime import datetime\n\nfrom opentelemetry.context import get_value # type: ignore\nfrom opentelemetry.sdk.trace import SpanProcessor # type: ignore\nfrom opentelemetry.semconv.trace import SpanAttributes # type: ignore\nfrom opentelemetry.trace import ( # type: ignore\n format_span_id,\n format_trace_id,\n get_current_span,\n SpanContext,\n Span as OTelSpan,\n SpanKind,\n)\nfrom opentelemetry.trace.span import ( # type: ignore\n INVALID_SPAN_ID,\n INVALID_TRACE_ID,\n)\nfrom sentry_sdk.consts import INSTRUMENTER\nfrom sentry_sdk.hub import Hub\nfrom sentry_sdk.integrations.opentelemetry.consts import (\n SENTRY_BAGGAGE_KEY,\n SENTRY_TRACE_KEY,\n)\nfrom sentry_sdk.scope import add_global_event_processor\nfrom sentry_sdk.tracing import Transaction, Span as SentrySpan\nfrom sentry_sdk.utils import Dsn\nfrom sentry_sdk._types import MYPY\n\nfrom urllib3.util import parse_url as urlparse # type: ignore\n\nif MYPY:\n from typing import Any\n from typing import Dict\n from typing import Union\n from sentry_sdk._types import Event, Hint\n\nOPEN_TELEMETRY_CONTEXT = \"otel\"\n\n\ndef link_trace_context_to_error_event(event, otel_span_map):\n # type: (Event, Dict[str, Union[Transaction, OTelSpan]]) -> Event\n hub = Hub.current\n if not hub:\n return event\n\n if hub.client and hub.client.options[\"instrumenter\"] != INSTRUMENTER.OTEL:\n return event\n\n if hasattr(event, \"type\") and event[\"type\"] == \"transaction\":\n return event\n\n otel_span = get_current_span()\n if not otel_span:\n return event\n\n ctx = otel_span.get_span_context()\n trace_id = format_trace_id(ctx.trace_id)\n span_id = format_span_id(ctx.span_id)\n\n if trace_id == INVALID_TRACE_ID or span_id == INVALID_SPAN_ID:\n return event\n\n sentry_span = otel_span_map.get(span_id, None)\n if not sentry_span:\n return event\n\n contexts = event.setdefault(\"contexts\", {})\n contexts.setdefault(\"trace\", {}).update(sentry_span.get_trace_context())\n\n return event\n\n\nclass SentrySpanProcessor(SpanProcessor): # type: ignore\n \"\"\"\n Converts OTel spans into Sentry spans so they can be sent to the Sentry backend.\n \"\"\"\n\n # The mapping from otel span ids to sentry spans\n otel_span_map = {} # type: Dict[str, Union[Transaction, OTelSpan]]\n\n def __new__(cls):\n # type: () -> SentrySpanProcessor\n if not hasattr(cls, \"instance\"):\n cls.instance = super(SentrySpanProcessor, cls).__new__(cls)\n\n return cls.instance\n\n def __init__(self):\n # type: () -> None\n @add_global_event_processor\n def global_event_processor(event, hint):\n # type: (Event, Hint) -> Event\n return link_trace_context_to_error_event(event, self.otel_span_map)\n\n def on_start(self, otel_span, parent_context=None):\n # type: (OTelSpan, SpanContext) -> None\n hub = Hub.current\n if not hub:\n return\n\n if hub.client and hub.client.options[\"instrumenter\"] != INSTRUMENTER.OTEL:\n return\n\n if not otel_span.context.is_valid:\n return\n\n if self._is_sentry_span(hub, otel_span):\n return\n\n trace_data = self._get_trace_data(otel_span, parent_context)\n\n parent_span_id = trace_data[\"parent_span_id\"]\n sentry_parent_span = (\n self.otel_span_map.get(parent_span_id, None) if parent_span_id else None\n )\n\n sentry_span = None\n if sentry_parent_span:\n sentry_span = sentry_parent_span.start_child(\n span_id=trace_data[\"span_id\"],\n description=otel_span.name,\n start_timestamp=datetime.fromtimestamp(otel_span.start_time / 1e9),\n instrumenter=INSTRUMENTER.OTEL,\n )\n else:\n sentry_span = hub.start_transaction(\n name=otel_span.name,\n span_id=trace_data[\"span_id\"],\n parent_span_id=parent_span_id,\n trace_id=trace_data[\"trace_id\"],\n baggage=trace_data[\"baggage\"],\n start_timestamp=datetime.fromtimestamp(otel_span.start_time / 1e9),\n instrumenter=INSTRUMENTER.OTEL,\n )\n\n self.otel_span_map[trace_data[\"span_id\"]] = sentry_span\n\n def on_end(self, otel_span):\n # type: (OTelSpan) -> None\n hub = Hub.current\n if not hub:\n return\n\n if hub.client and hub.client.options[\"instrumenter\"] != INSTRUMENTER.OTEL:\n return\n\n if not otel_span.context.is_valid:\n return\n\n span_id = format_span_id(otel_span.context.span_id)\n sentry_span = self.otel_span_map.pop(span_id, None)\n if not sentry_span:\n return\n\n sentry_span.op = otel_span.name\n\n if isinstance(sentry_span, Transaction):\n sentry_span.name = otel_span.name\n sentry_span.set_context(\n OPEN_TELEMETRY_CONTEXT, self._get_otel_context(otel_span)\n )\n\n else:\n self._update_span_with_otel_data(sentry_span, otel_span)\n\n sentry_span.finish(\n end_timestamp=datetime.fromtimestamp(otel_span.end_time / 1e9)\n )\n\n def _is_sentry_span(self, hub, otel_span):\n # type: (Hub, OTelSpan) -> bool\n \"\"\"\n Break infinite loop:\n HTTP requests to Sentry are caught by OTel and send again to Sentry.\n \"\"\"\n otel_span_url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None)\n dsn_url = hub.client and Dsn(hub.client.dsn or \"\").netloc\n\n if otel_span_url and dsn_url in otel_span_url:\n return True\n\n return False\n\n def _get_otel_context(self, otel_span):\n # type: (OTelSpan) -> Dict[str, Any]\n \"\"\"\n Returns the OTel context for Sentry.\n See: https://develop.sentry.dev/sdk/performance/opentelemetry/#step-5-add-opentelemetry-context\n \"\"\"\n ctx = {}\n\n if otel_span.attributes:\n ctx[\"attributes\"] = dict(otel_span.attributes)\n\n if otel_span.resource.attributes:\n ctx[\"resource\"] = dict(otel_span.resource.attributes)\n\n return ctx\n\n def _get_trace_data(self, otel_span, parent_context):\n # type: (OTelSpan, SpanContext) -> Dict[str, Any]\n \"\"\"\n Extracts tracing information from one OTel span and its parent OTel context.\n \"\"\"\n trace_data = {}\n\n span_id = format_span_id(otel_span.context.span_id)\n trace_data[\"span_id\"] = span_id\n\n trace_id = format_trace_id(otel_span.context.trace_id)\n trace_data[\"trace_id\"] = trace_id\n\n parent_span_id = (\n format_span_id(otel_span.parent.span_id) if otel_span.parent else None\n )\n trace_data[\"parent_span_id\"] = parent_span_id\n\n sentry_trace_data = get_value(SENTRY_TRACE_KEY, parent_context)\n trace_data[\"parent_sampled\"] = (\n sentry_trace_data[2] if sentry_trace_data else None\n )\n\n baggage = get_value(SENTRY_BAGGAGE_KEY, parent_context)\n trace_data[\"baggage\"] = baggage\n\n return trace_data\n\n def _update_span_with_otel_data(self, sentry_span, otel_span):\n # type: (SentrySpan, OTelSpan) -> None\n \"\"\"\n Convert OTel span data and update the Sentry span with it.\n This should eventually happen on the server when ingesting the spans.\n \"\"\"\n for key, val in otel_span.attributes.items():\n sentry_span.set_data(key, val)\n\n sentry_span.set_data(\"otel.kind\", otel_span.kind)\n\n op = otel_span.name\n description = otel_span.name\n\n http_method = otel_span.attributes.get(SpanAttributes.HTTP_METHOD, None)\n db_query = otel_span.attributes.get(SpanAttributes.DB_SYSTEM, None)\n\n if http_method:\n op = \"http\"\n\n if otel_span.kind == SpanKind.SERVER:\n op += \".server\"\n elif otel_span.kind == SpanKind.CLIENT:\n op += \".client\"\n\n description = http_method\n\n peer_name = otel_span.attributes.get(SpanAttributes.NET_PEER_NAME, None)\n if peer_name:\n description += \" {}\".format(peer_name)\n\n target = otel_span.attributes.get(SpanAttributes.HTTP_TARGET, None)\n if target:\n description += \" {}\".format(target)\n\n if not peer_name and not target:\n url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None)\n if url:\n parsed_url = urlparse(url)\n url = f\"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}\"\n description += \" {}\".format(url)\n\n status_code = otel_span.attributes.get(\n SpanAttributes.HTTP_STATUS_CODE, None\n )\n if status_code:\n sentry_span.set_http_status(status_code)\n\n elif db_query:\n op = \"db\"\n statement = otel_span.attributes.get(SpanAttributes.DB_STATEMENT, None)\n if statement:\n description = statement\n\n sentry_span.op = op\n sentry_span.description = description\n", "path": "sentry_sdk/integrations/opentelemetry/span_processor.py"}]}
| 2,741 | 710 |
gh_patches_debug_23370
|
rasdani/github-patches
|
git_diff
|
python-gitlab__python-gitlab-1373
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Cannot list package files
## Description of the problem, including code/CLI snippet
[Listing package files](https://docs.gitlab.com/ee/api/packages.html#list-package-files) appears to be unsupported. The API endpoint was introduced in GitLab 11.8.
## Expected Behavior
Listing package files should be possible.
## Actual Behavior
Listing package files is not possible.
## Specifications
- python-gitlab version: 2.6.0
- API version you are using (v3/v4): v4
- Gitlab server version (or gitlab.com): gitlab.com
PR incoming.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `gitlab/v4/objects/packages.py`
Content:
```
1 from gitlab.base import RESTManager, RESTObject
2 from gitlab.mixins import DeleteMixin, GetMixin, ListMixin, ObjectDeleteMixin
3
4
5 __all__ = [
6 "GroupPackage",
7 "GroupPackageManager",
8 "ProjectPackage",
9 "ProjectPackageManager",
10 ]
11
12
13 class GroupPackage(RESTObject):
14 pass
15
16
17 class GroupPackageManager(ListMixin, RESTManager):
18 _path = "/groups/%(group_id)s/packages"
19 _obj_cls = GroupPackage
20 _from_parent_attrs = {"group_id": "id"}
21 _list_filters = (
22 "exclude_subgroups",
23 "order_by",
24 "sort",
25 "package_type",
26 "package_name",
27 )
28
29
30 class ProjectPackage(ObjectDeleteMixin, RESTObject):
31 pass
32
33
34 class ProjectPackageManager(ListMixin, GetMixin, DeleteMixin, RESTManager):
35 _path = "/projects/%(project_id)s/packages"
36 _obj_cls = ProjectPackage
37 _from_parent_attrs = {"project_id": "id"}
38 _list_filters = (
39 "order_by",
40 "sort",
41 "package_type",
42 "package_name",
43 )
44
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/gitlab/v4/objects/packages.py b/gitlab/v4/objects/packages.py
--- a/gitlab/v4/objects/packages.py
+++ b/gitlab/v4/objects/packages.py
@@ -1,12 +1,13 @@
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import DeleteMixin, GetMixin, ListMixin, ObjectDeleteMixin
-
__all__ = [
"GroupPackage",
"GroupPackageManager",
"ProjectPackage",
"ProjectPackageManager",
+ "ProjectPackageFile",
+ "ProjectPackageFileManager",
]
@@ -28,7 +29,7 @@
class ProjectPackage(ObjectDeleteMixin, RESTObject):
- pass
+ _managers = (("package_files", "ProjectPackageFileManager"),)
class ProjectPackageManager(ListMixin, GetMixin, DeleteMixin, RESTManager):
@@ -41,3 +42,13 @@
"package_type",
"package_name",
)
+
+
+class ProjectPackageFile(RESTObject):
+ pass
+
+
+class ProjectPackageFileManager(ListMixin, RESTManager):
+ _path = "/projects/%(project_id)s/packages/%(package_id)s/package_files"
+ _obj_cls = ProjectPackageFile
+ _from_parent_attrs = {"project_id": "project_id", "package_id": "id"}
|
{"golden_diff": "diff --git a/gitlab/v4/objects/packages.py b/gitlab/v4/objects/packages.py\n--- a/gitlab/v4/objects/packages.py\n+++ b/gitlab/v4/objects/packages.py\n@@ -1,12 +1,13 @@\n from gitlab.base import RESTManager, RESTObject\n from gitlab.mixins import DeleteMixin, GetMixin, ListMixin, ObjectDeleteMixin\n \n-\n __all__ = [\n \"GroupPackage\",\n \"GroupPackageManager\",\n \"ProjectPackage\",\n \"ProjectPackageManager\",\n+ \"ProjectPackageFile\",\n+ \"ProjectPackageFileManager\",\n ]\n \n \n@@ -28,7 +29,7 @@\n \n \n class ProjectPackage(ObjectDeleteMixin, RESTObject):\n- pass\n+ _managers = ((\"package_files\", \"ProjectPackageFileManager\"),)\n \n \n class ProjectPackageManager(ListMixin, GetMixin, DeleteMixin, RESTManager):\n@@ -41,3 +42,13 @@\n \"package_type\",\n \"package_name\",\n )\n+\n+\n+class ProjectPackageFile(RESTObject):\n+ pass\n+\n+\n+class ProjectPackageFileManager(ListMixin, RESTManager):\n+ _path = \"/projects/%(project_id)s/packages/%(package_id)s/package_files\"\n+ _obj_cls = ProjectPackageFile\n+ _from_parent_attrs = {\"project_id\": \"project_id\", \"package_id\": \"id\"}\n", "issue": "Cannot list package files\n## Description of the problem, including code/CLI snippet\r\n\r\n[Listing package files](https://docs.gitlab.com/ee/api/packages.html#list-package-files) appears to be unsupported. The API endpoint was introduced in GitLab 11.8.\r\n\r\n## Expected Behavior\r\n\r\nListing package files should be possible.\r\n\r\n## Actual Behavior\r\n\r\nListing package files is not possible.\r\n\r\n## Specifications\r\n\r\n - python-gitlab version: 2.6.0\r\n - API version you are using (v3/v4): v4\r\n - Gitlab server version (or gitlab.com): gitlab.com\r\n\r\n\r\nPR incoming.\n", "before_files": [{"content": "from gitlab.base import RESTManager, RESTObject\nfrom gitlab.mixins import DeleteMixin, GetMixin, ListMixin, ObjectDeleteMixin\n\n\n__all__ = [\n \"GroupPackage\",\n \"GroupPackageManager\",\n \"ProjectPackage\",\n \"ProjectPackageManager\",\n]\n\n\nclass GroupPackage(RESTObject):\n pass\n\n\nclass GroupPackageManager(ListMixin, RESTManager):\n _path = \"/groups/%(group_id)s/packages\"\n _obj_cls = GroupPackage\n _from_parent_attrs = {\"group_id\": \"id\"}\n _list_filters = (\n \"exclude_subgroups\",\n \"order_by\",\n \"sort\",\n \"package_type\",\n \"package_name\",\n )\n\n\nclass ProjectPackage(ObjectDeleteMixin, RESTObject):\n pass\n\n\nclass ProjectPackageManager(ListMixin, GetMixin, DeleteMixin, RESTManager):\n _path = \"/projects/%(project_id)s/packages\"\n _obj_cls = ProjectPackage\n _from_parent_attrs = {\"project_id\": \"id\"}\n _list_filters = (\n \"order_by\",\n \"sort\",\n \"package_type\",\n \"package_name\",\n )\n", "path": "gitlab/v4/objects/packages.py"}], "after_files": [{"content": "from gitlab.base import RESTManager, RESTObject\nfrom gitlab.mixins import DeleteMixin, GetMixin, ListMixin, ObjectDeleteMixin\n\n__all__ = [\n \"GroupPackage\",\n \"GroupPackageManager\",\n \"ProjectPackage\",\n \"ProjectPackageManager\",\n \"ProjectPackageFile\",\n \"ProjectPackageFileManager\",\n]\n\n\nclass GroupPackage(RESTObject):\n pass\n\n\nclass GroupPackageManager(ListMixin, RESTManager):\n _path = \"/groups/%(group_id)s/packages\"\n _obj_cls = GroupPackage\n _from_parent_attrs = {\"group_id\": \"id\"}\n _list_filters = (\n \"exclude_subgroups\",\n \"order_by\",\n \"sort\",\n \"package_type\",\n \"package_name\",\n )\n\n\nclass ProjectPackage(ObjectDeleteMixin, RESTObject):\n _managers = ((\"package_files\", \"ProjectPackageFileManager\"),)\n\n\nclass ProjectPackageManager(ListMixin, GetMixin, DeleteMixin, RESTManager):\n _path = \"/projects/%(project_id)s/packages\"\n _obj_cls = ProjectPackage\n _from_parent_attrs = {\"project_id\": \"id\"}\n _list_filters = (\n \"order_by\",\n \"sort\",\n \"package_type\",\n \"package_name\",\n )\n\n\nclass ProjectPackageFile(RESTObject):\n pass\n\n\nclass ProjectPackageFileManager(ListMixin, RESTManager):\n _path = \"/projects/%(project_id)s/packages/%(package_id)s/package_files\"\n _obj_cls = ProjectPackageFile\n _from_parent_attrs = {\"project_id\": \"project_id\", \"package_id\": \"id\"}\n", "path": "gitlab/v4/objects/packages.py"}]}
| 713 | 293 |
gh_patches_debug_1982
|
rasdani/github-patches
|
git_diff
|
ivy-llc__ivy-14109
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
frombuffer
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ivy/functional/frontends/torch/creation_ops.py`
Content:
```
1 # local
2 import ivy
3 from ivy.func_wrapper import with_unsupported_dtypes
4 from ivy.functional.frontends.torch.func_wrapper import to_ivy_arrays_and_back
5
6
7 @to_ivy_arrays_and_back
8 def empty(
9 *args,
10 size=None,
11 out=None,
12 dtype=None,
13 layout=None,
14 device=None,
15 requires_grad=False,
16 pin_memory=False,
17 memory_format=None,
18 ):
19 if args and size:
20 raise TypeError("empty() got multiple values for argument 'shape'")
21 if size is None:
22 size = args[0] if isinstance(args[0], (tuple, list)) else args
23 return ivy.empty(shape=size, dtype=dtype, device=device, out=out)
24
25
26 @to_ivy_arrays_and_back
27 def full(
28 size,
29 fill_value,
30 *,
31 out=None,
32 dtype=None,
33 layout=None,
34 device=None,
35 requires_grad=None,
36 ):
37 ret = ivy.full(
38 shape=size, fill_value=fill_value, dtype=dtype, device=device, out=out
39 )
40 return ret
41
42
43 @to_ivy_arrays_and_back
44 def ones(*args, size=None, out=None, dtype=None, device=None, requires_grad=False):
45 if args and size:
46 raise TypeError("ones() got multiple values for argument 'shape'")
47 if size is None:
48 size = args[0] if isinstance(args[0], (tuple, list)) else args
49 return ivy.ones(shape=size, dtype=dtype, device=device, out=out)
50
51
52 @to_ivy_arrays_and_back
53 def ones_like_v_0p3p0_to_0p3p1(input, out=None):
54 return ivy.ones_like(input, out=None)
55
56
57 @to_ivy_arrays_and_back
58 def heaviside(input, values, *, out=None):
59 return ivy.heaviside(input, values, out=out)
60
61
62 @to_ivy_arrays_and_back
63 def ones_like_v_0p4p0_and_above(
64 input,
65 *,
66 dtype=None,
67 layout=None,
68 device=None,
69 requires_grad=False,
70 memory_format=None,
71 ):
72 ret = ivy.ones_like(input, dtype=dtype, device=device)
73 return ret
74
75
76 @to_ivy_arrays_and_back
77 def zeros(*args, size=None, out=None, dtype=None, device=None, requires_grad=False):
78 if args and size:
79 raise TypeError("zeros() got multiple values for argument 'shape'")
80 if size is None:
81 size = args[0] if isinstance(args[0], (tuple, list)) else args
82 return ivy.zeros(shape=size, dtype=dtype, device=device, out=out)
83
84
85 @to_ivy_arrays_and_back
86 def zeros_like(
87 input,
88 *,
89 dtype=None,
90 layout=None,
91 device=None,
92 requires_grad=False,
93 memory_format=None,
94 ):
95 ret = ivy.zeros_like(input, dtype=dtype, device=device)
96 return ret
97
98
99 @to_ivy_arrays_and_back
100 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
101 def arange(
102 *args,
103 out=None,
104 dtype=None,
105 layout=None,
106 device=None,
107 requires_grad=False,
108 ):
109 if len(args) == 1:
110 end = args[0]
111 start = 0
112 step = 1
113 elif len(args) == 3:
114 start, end, step = args
115 else:
116 ivy.utils.assertions.check_true(
117 len(args) == 1 or len(args) == 3,
118 "only 1 or 3 positional arguments are supported",
119 )
120 return ivy.arange(start, end, step, dtype=dtype, device=device)
121
122
123 @to_ivy_arrays_and_back
124 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
125 def range(
126 *args,
127 dtype=None,
128 layout=None,
129 device=None,
130 requires_grad=False,
131 ):
132 if len(args) == 1:
133 end = args[0]
134 start = 0
135 step = 1
136 elif len(args) == 3:
137 start, end, step = args
138 else:
139 ivy.utils.assertions.check_true(
140 len(args) == 1 or len(args) == 3,
141 "only 1 or 3 positional arguments are supported",
142 )
143 range_vec = []
144 elem = start
145 while 1:
146 range_vec = range_vec + [elem]
147 elem += step
148 if start == end:
149 break
150 if start < end:
151 if elem > end:
152 break
153 else:
154 if elem < end:
155 break
156 return ivy.array(range_vec, dtype=dtype, device=device)
157
158
159 @to_ivy_arrays_and_back
160 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
161 def linspace(
162 start,
163 end,
164 steps,
165 *,
166 out=None,
167 dtype=None,
168 device=None,
169 layout=None,
170 requires_grad=False,
171 ):
172 ret = ivy.linspace(start, end, num=steps, dtype=dtype, device=device, out=out)
173 return ret
174
175
176 @to_ivy_arrays_and_back
177 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
178 def logspace(
179 start,
180 end,
181 steps,
182 *,
183 base=10.0,
184 out=None,
185 dtype=None,
186 layout=None,
187 device=None,
188 requires_grad=False,
189 ):
190 ret = ivy.logspace(
191 start, end, num=steps, base=base, dtype=dtype, device=device, out=out
192 )
193 return ret
194
195
196 @to_ivy_arrays_and_back
197 def eye(
198 n, m=None, *, out=None, dtype=None, layout=None, device=None, requires_grad=False
199 ):
200 ret = ivy.eye(n_rows=n, n_columns=m, dtype=dtype, device=device, out=out)
201 return ret
202
203
204 @to_ivy_arrays_and_back
205 def from_dlpack(ext_tensor):
206 return ivy.from_dlpack(ext_tensor)
207
208
209 @to_ivy_arrays_and_back
210 def empty_like(
211 input,
212 *,
213 dtype=None,
214 layout=None,
215 device=None,
216 requires_grad=False,
217 memory_format=None,
218 ):
219 ret = ivy.empty_like(input, dtype=dtype, device=device)
220 return ret
221
222
223 @to_ivy_arrays_and_back
224 def full_like(
225 input,
226 fill_value,
227 *,
228 dtype=None,
229 layout=None,
230 device=None,
231 requires_grad=False,
232 memory_format=None,
233 ):
234 return ivy.full_like(input, fill_value, dtype=dtype, device=device)
235
236
237 @to_ivy_arrays_and_back
238 def as_tensor(
239 data,
240 *,
241 dtype=None,
242 device=None,
243 ):
244 return ivy.asarray(data, dtype=dtype, device=device)
245
246
247 @to_ivy_arrays_and_back
248 def from_numpy(data, /):
249 return ivy.asarray(data, dtype=ivy.dtype(data))
250
251
252 from_numpy.supported_dtypes = ("ndarray",)
253
254
255 @to_ivy_arrays_and_back
256 def as_strided(input, size, stride, storage_offset=None):
257 ind = ivy.array([0], dtype=ivy.int64)
258 for i, (size_i, stride_i) in enumerate(zip(size, stride)):
259 r_size = [1] * len(stride)
260 r_size[i] = -1
261 ind = ind + ivy.reshape(ivy.arange(size_i), r_size) * stride_i
262 if storage_offset:
263 ind = ind + storage_offset
264 return ivy.gather(ivy.flatten(input), ind)
265
266
267 @to_ivy_arrays_and_back
268 def tensor(
269 data,
270 *,
271 dtype=None,
272 device=None,
273 requires_grad=False,
274 pin_memory=False,
275 ):
276 return ivy.array(data, dtype=dtype, device=device)
277
278
279 @to_ivy_arrays_and_back
280 def asarray(
281 obj,
282 *,
283 dtype=None,
284 device=None,
285 copy=None,
286 ):
287 return ivy.asarray(obj, copy=copy, dtype=dtype, device=device)
288
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/ivy/functional/frontends/torch/creation_ops.py b/ivy/functional/frontends/torch/creation_ops.py
--- a/ivy/functional/frontends/torch/creation_ops.py
+++ b/ivy/functional/frontends/torch/creation_ops.py
@@ -285,3 +285,15 @@
copy=None,
):
return ivy.asarray(obj, copy=copy, dtype=dtype, device=device)
+
+
+@to_ivy_arrays_and_back
+def frombuffer(
+ buffer,
+ *,
+ dtype,
+ count=-1,
+ offset=0,
+ requires_grad=False,
+):
+ return ivy.frombuffer(buffer, dtype=dtype, count=count, offset=offset)
|
{"golden_diff": "diff --git a/ivy/functional/frontends/torch/creation_ops.py b/ivy/functional/frontends/torch/creation_ops.py\n--- a/ivy/functional/frontends/torch/creation_ops.py\n+++ b/ivy/functional/frontends/torch/creation_ops.py\n@@ -285,3 +285,15 @@\n copy=None,\n ):\n return ivy.asarray(obj, copy=copy, dtype=dtype, device=device)\n+\n+\n+@to_ivy_arrays_and_back\n+def frombuffer(\n+ buffer, \n+ *, \n+ dtype,\n+ count=-1,\n+ offset=0,\n+ requires_grad=False,\n+):\n+ return ivy.frombuffer(buffer, dtype=dtype, count=count, offset=offset)\n", "issue": "frombuffer\n\n", "before_files": [{"content": "# local\nimport ivy\nfrom ivy.func_wrapper import with_unsupported_dtypes\nfrom ivy.functional.frontends.torch.func_wrapper import to_ivy_arrays_and_back\n\n\n@to_ivy_arrays_and_back\ndef empty(\n *args,\n size=None,\n out=None,\n dtype=None,\n layout=None,\n device=None,\n requires_grad=False,\n pin_memory=False,\n memory_format=None,\n):\n if args and size:\n raise TypeError(\"empty() got multiple values for argument 'shape'\")\n if size is None:\n size = args[0] if isinstance(args[0], (tuple, list)) else args\n return ivy.empty(shape=size, dtype=dtype, device=device, out=out)\n\n\n@to_ivy_arrays_and_back\ndef full(\n size,\n fill_value,\n *,\n out=None,\n dtype=None,\n layout=None,\n device=None,\n requires_grad=None,\n):\n ret = ivy.full(\n shape=size, fill_value=fill_value, dtype=dtype, device=device, out=out\n )\n return ret\n\n\n@to_ivy_arrays_and_back\ndef ones(*args, size=None, out=None, dtype=None, device=None, requires_grad=False):\n if args and size:\n raise TypeError(\"ones() got multiple values for argument 'shape'\")\n if size is None:\n size = args[0] if isinstance(args[0], (tuple, list)) else args\n return ivy.ones(shape=size, dtype=dtype, device=device, out=out)\n\n\n@to_ivy_arrays_and_back\ndef ones_like_v_0p3p0_to_0p3p1(input, out=None):\n return ivy.ones_like(input, out=None)\n\n\n@to_ivy_arrays_and_back\ndef heaviside(input, values, *, out=None):\n return ivy.heaviside(input, values, out=out)\n\n\n@to_ivy_arrays_and_back\ndef ones_like_v_0p4p0_and_above(\n input,\n *,\n dtype=None,\n layout=None,\n device=None,\n requires_grad=False,\n memory_format=None,\n):\n ret = ivy.ones_like(input, dtype=dtype, device=device)\n return ret\n\n\n@to_ivy_arrays_and_back\ndef zeros(*args, size=None, out=None, dtype=None, device=None, requires_grad=False):\n if args and size:\n raise TypeError(\"zeros() got multiple values for argument 'shape'\")\n if size is None:\n size = args[0] if isinstance(args[0], (tuple, list)) else args\n return ivy.zeros(shape=size, dtype=dtype, device=device, out=out)\n\n\n@to_ivy_arrays_and_back\ndef zeros_like(\n input,\n *,\n dtype=None,\n layout=None,\n device=None,\n requires_grad=False,\n memory_format=None,\n):\n ret = ivy.zeros_like(input, dtype=dtype, device=device)\n return ret\n\n\n@to_ivy_arrays_and_back\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\ndef arange(\n *args,\n out=None,\n dtype=None,\n layout=None,\n device=None,\n requires_grad=False,\n):\n if len(args) == 1:\n end = args[0]\n start = 0\n step = 1\n elif len(args) == 3:\n start, end, step = args\n else:\n ivy.utils.assertions.check_true(\n len(args) == 1 or len(args) == 3,\n \"only 1 or 3 positional arguments are supported\",\n )\n return ivy.arange(start, end, step, dtype=dtype, device=device)\n\n\n@to_ivy_arrays_and_back\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\ndef range(\n *args,\n dtype=None,\n layout=None,\n device=None,\n requires_grad=False,\n):\n if len(args) == 1:\n end = args[0]\n start = 0\n step = 1\n elif len(args) == 3:\n start, end, step = args\n else:\n ivy.utils.assertions.check_true(\n len(args) == 1 or len(args) == 3,\n \"only 1 or 3 positional arguments are supported\",\n )\n range_vec = []\n elem = start\n while 1:\n range_vec = range_vec + [elem]\n elem += step\n if start == end:\n break\n if start < end:\n if elem > end:\n break\n else:\n if elem < end:\n break\n return ivy.array(range_vec, dtype=dtype, device=device)\n\n\n@to_ivy_arrays_and_back\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\ndef linspace(\n start,\n end,\n steps,\n *,\n out=None,\n dtype=None,\n device=None,\n layout=None,\n requires_grad=False,\n):\n ret = ivy.linspace(start, end, num=steps, dtype=dtype, device=device, out=out)\n return ret\n\n\n@to_ivy_arrays_and_back\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\ndef logspace(\n start,\n end,\n steps,\n *,\n base=10.0,\n out=None,\n dtype=None,\n layout=None,\n device=None,\n requires_grad=False,\n):\n ret = ivy.logspace(\n start, end, num=steps, base=base, dtype=dtype, device=device, out=out\n )\n return ret\n\n\n@to_ivy_arrays_and_back\ndef eye(\n n, m=None, *, out=None, dtype=None, layout=None, device=None, requires_grad=False\n):\n ret = ivy.eye(n_rows=n, n_columns=m, dtype=dtype, device=device, out=out)\n return ret\n\n\n@to_ivy_arrays_and_back\ndef from_dlpack(ext_tensor):\n return ivy.from_dlpack(ext_tensor)\n\n\n@to_ivy_arrays_and_back\ndef empty_like(\n input,\n *,\n dtype=None,\n layout=None,\n device=None,\n requires_grad=False,\n memory_format=None,\n):\n ret = ivy.empty_like(input, dtype=dtype, device=device)\n return ret\n\n\n@to_ivy_arrays_and_back\ndef full_like(\n input,\n fill_value,\n *,\n dtype=None,\n layout=None,\n device=None,\n requires_grad=False,\n memory_format=None,\n):\n return ivy.full_like(input, fill_value, dtype=dtype, device=device)\n\n\n@to_ivy_arrays_and_back\ndef as_tensor(\n data,\n *,\n dtype=None,\n device=None,\n):\n return ivy.asarray(data, dtype=dtype, device=device)\n\n\n@to_ivy_arrays_and_back\ndef from_numpy(data, /):\n return ivy.asarray(data, dtype=ivy.dtype(data))\n\n\nfrom_numpy.supported_dtypes = (\"ndarray\",)\n\n\n@to_ivy_arrays_and_back\ndef as_strided(input, size, stride, storage_offset=None):\n ind = ivy.array([0], dtype=ivy.int64)\n for i, (size_i, stride_i) in enumerate(zip(size, stride)):\n r_size = [1] * len(stride)\n r_size[i] = -1\n ind = ind + ivy.reshape(ivy.arange(size_i), r_size) * stride_i\n if storage_offset:\n ind = ind + storage_offset\n return ivy.gather(ivy.flatten(input), ind)\n\n\n@to_ivy_arrays_and_back\ndef tensor(\n data,\n *,\n dtype=None,\n device=None,\n requires_grad=False,\n pin_memory=False,\n):\n return ivy.array(data, dtype=dtype, device=device)\n\n\n@to_ivy_arrays_and_back\ndef asarray(\n obj,\n *,\n dtype=None,\n device=None,\n copy=None,\n):\n return ivy.asarray(obj, copy=copy, dtype=dtype, device=device)\n", "path": "ivy/functional/frontends/torch/creation_ops.py"}], "after_files": [{"content": "# local\nimport ivy\nfrom ivy.func_wrapper import with_unsupported_dtypes\nfrom ivy.functional.frontends.torch.func_wrapper import to_ivy_arrays_and_back\n\n\n@to_ivy_arrays_and_back\ndef empty(\n *args,\n size=None,\n out=None,\n dtype=None,\n layout=None,\n device=None,\n requires_grad=False,\n pin_memory=False,\n memory_format=None,\n):\n if args and size:\n raise TypeError(\"empty() got multiple values for argument 'shape'\")\n if size is None:\n size = args[0] if isinstance(args[0], (tuple, list)) else args\n return ivy.empty(shape=size, dtype=dtype, device=device, out=out)\n\n\n@to_ivy_arrays_and_back\ndef full(\n size,\n fill_value,\n *,\n out=None,\n dtype=None,\n layout=None,\n device=None,\n requires_grad=None,\n):\n ret = ivy.full(\n shape=size, fill_value=fill_value, dtype=dtype, device=device, out=out\n )\n return ret\n\n\n@to_ivy_arrays_and_back\ndef ones(*args, size=None, out=None, dtype=None, device=None, requires_grad=False):\n if args and size:\n raise TypeError(\"ones() got multiple values for argument 'shape'\")\n if size is None:\n size = args[0] if isinstance(args[0], (tuple, list)) else args\n return ivy.ones(shape=size, dtype=dtype, device=device, out=out)\n\n\n@to_ivy_arrays_and_back\ndef ones_like_v_0p3p0_to_0p3p1(input, out=None):\n return ivy.ones_like(input, out=None)\n\n\n@to_ivy_arrays_and_back\ndef heaviside(input, values, *, out=None):\n return ivy.heaviside(input, values, out=out)\n\n\n@to_ivy_arrays_and_back\ndef ones_like_v_0p4p0_and_above(\n input,\n *,\n dtype=None,\n layout=None,\n device=None,\n requires_grad=False,\n memory_format=None,\n):\n ret = ivy.ones_like(input, dtype=dtype, device=device)\n return ret\n\n\n@to_ivy_arrays_and_back\ndef zeros(*args, size=None, out=None, dtype=None, device=None, requires_grad=False):\n if args and size:\n raise TypeError(\"zeros() got multiple values for argument 'shape'\")\n if size is None:\n size = args[0] if isinstance(args[0], (tuple, list)) else args\n return ivy.zeros(shape=size, dtype=dtype, device=device, out=out)\n\n\n@to_ivy_arrays_and_back\ndef zeros_like(\n input,\n *,\n dtype=None,\n layout=None,\n device=None,\n requires_grad=False,\n memory_format=None,\n):\n ret = ivy.zeros_like(input, dtype=dtype, device=device)\n return ret\n\n\n@to_ivy_arrays_and_back\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\ndef arange(\n *args,\n out=None,\n dtype=None,\n layout=None,\n device=None,\n requires_grad=False,\n):\n if len(args) == 1:\n end = args[0]\n start = 0\n step = 1\n elif len(args) == 3:\n start, end, step = args\n else:\n ivy.utils.assertions.check_true(\n len(args) == 1 or len(args) == 3,\n \"only 1 or 3 positional arguments are supported\",\n )\n return ivy.arange(start, end, step, dtype=dtype, device=device)\n\n\n@to_ivy_arrays_and_back\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\ndef range(\n *args,\n dtype=None,\n layout=None,\n device=None,\n requires_grad=False,\n):\n if len(args) == 1:\n end = args[0]\n start = 0\n step = 1\n elif len(args) == 3:\n start, end, step = args\n else:\n ivy.utils.assertions.check_true(\n len(args) == 1 or len(args) == 3,\n \"only 1 or 3 positional arguments are supported\",\n )\n range_vec = []\n elem = start\n while 1:\n range_vec = range_vec + [elem]\n elem += step\n if start == end:\n break\n if start < end:\n if elem > end:\n break\n else:\n if elem < end:\n break\n return ivy.array(range_vec, dtype=dtype, device=device)\n\n\n@to_ivy_arrays_and_back\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\ndef linspace(\n start,\n end,\n steps,\n *,\n out=None,\n dtype=None,\n device=None,\n layout=None,\n requires_grad=False,\n):\n ret = ivy.linspace(start, end, num=steps, dtype=dtype, device=device, out=out)\n return ret\n\n\n@to_ivy_arrays_and_back\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\ndef logspace(\n start,\n end,\n steps,\n *,\n base=10.0,\n out=None,\n dtype=None,\n layout=None,\n device=None,\n requires_grad=False,\n):\n ret = ivy.logspace(\n start, end, num=steps, base=base, dtype=dtype, device=device, out=out\n )\n return ret\n\n\n@to_ivy_arrays_and_back\ndef eye(\n n, m=None, *, out=None, dtype=None, layout=None, device=None, requires_grad=False\n):\n ret = ivy.eye(n_rows=n, n_columns=m, dtype=dtype, device=device, out=out)\n return ret\n\n\n@to_ivy_arrays_and_back\ndef from_dlpack(ext_tensor):\n return ivy.from_dlpack(ext_tensor)\n\n\n@to_ivy_arrays_and_back\ndef empty_like(\n input,\n *,\n dtype=None,\n layout=None,\n device=None,\n requires_grad=False,\n memory_format=None,\n):\n ret = ivy.empty_like(input, dtype=dtype, device=device)\n return ret\n\n\n@to_ivy_arrays_and_back\ndef full_like(\n input,\n fill_value,\n *,\n dtype=None,\n layout=None,\n device=None,\n requires_grad=False,\n memory_format=None,\n):\n return ivy.full_like(input, fill_value, dtype=dtype, device=device)\n\n\n@to_ivy_arrays_and_back\ndef as_tensor(\n data,\n *,\n dtype=None,\n device=None,\n):\n return ivy.asarray(data, dtype=dtype, device=device)\n\n\n@to_ivy_arrays_and_back\ndef from_numpy(data, /):\n return ivy.asarray(data, dtype=ivy.dtype(data))\n\n\nfrom_numpy.supported_dtypes = (\"ndarray\",)\n\n\n@to_ivy_arrays_and_back\ndef as_strided(input, size, stride, storage_offset=None):\n ind = ivy.array([0], dtype=ivy.int64)\n for i, (size_i, stride_i) in enumerate(zip(size, stride)):\n r_size = [1] * len(stride)\n r_size[i] = -1\n ind = ind + ivy.reshape(ivy.arange(size_i), r_size) * stride_i\n if storage_offset:\n ind = ind + storage_offset\n return ivy.gather(ivy.flatten(input), ind)\n\n\n@to_ivy_arrays_and_back\ndef tensor(\n data,\n *,\n dtype=None,\n device=None,\n requires_grad=False,\n pin_memory=False,\n):\n return ivy.array(data, dtype=dtype, device=device)\n\n\n@to_ivy_arrays_and_back\ndef asarray(\n obj,\n *,\n dtype=None,\n device=None,\n copy=None,\n):\n return ivy.asarray(obj, copy=copy, dtype=dtype, device=device)\n\n\n@to_ivy_arrays_and_back\ndef frombuffer(\n buffer, \n *, \n dtype,\n count=-1,\n offset=0,\n requires_grad=False,\n):\n return ivy.frombuffer(buffer, dtype=dtype, count=count, offset=offset)\n", "path": "ivy/functional/frontends/torch/creation_ops.py"}]}
| 2,807 | 167 |
gh_patches_debug_43206
|
rasdani/github-patches
|
git_diff
|
conan-io__conan-center-index-1675
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[request] icu/66.1
### Package Details
* Package Name/Version: **icu/66.1**
* Changelog: **https://github.com/unicode-org/icu/releases/tag/release-66-1**
The above mentioned version is newly released by the upstream project and not yet available as a recipe. Please add this version.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `recipes/icu/all/conanfile.py`
Content:
```
1 import os
2 import glob
3 import platform
4 import shutil
5 from conans import ConanFile, tools, AutoToolsBuildEnvironment
6 from conans.tools import Version
7
8
9 class ICUBase(ConanFile):
10 name = "icu"
11 homepage = "http://site.icu-project.org"
12 license = "ICU"
13 description = "ICU is a mature, widely used set of C/C++ and Java libraries " \
14 "providing Unicode and Globalization support for software applications."
15 url = "https://github.com/conan-io/conan-center-index"
16 topics = ("conan", "icu", "icu4c", "i see you", "unicode")
17 _source_subfolder = "source_subfolder"
18 _build_subfolder = "build_subfolder"
19 _env_build = None
20 settings = "os", "arch", "compiler", "build_type"
21 options = {"shared": [True, False],
22 "fPIC": [True, False],
23 "data_packaging": ["files", "archive", "library", "static"],
24 "with_unit_tests": [True, False],
25 "silent": [True, False],
26 "with_dyload": [True, False]}
27 default_options = {"shared": False,
28 "fPIC": True,
29 "data_packaging": "archive",
30 "with_unit_tests": False,
31 "silent": True,
32 "with_dyload": True}
33
34 @property
35 def _is_msvc(self):
36 return self.settings.compiler == "Visual Studio"
37
38 @property
39 def _is_mingw(self):
40 return self.settings.os == "Windows" and self.settings.compiler == "gcc"
41
42 def build_requirements(self):
43 if tools.os_info.is_windows and "CONAN_BASH_PATH" not in os.environ and \
44 tools.os_info.detect_windows_subsystem() != "msys2":
45 self.build_requires("msys2/20190524")
46
47 def source(self):
48 tools.get(**self.conan_data["sources"][self.version])
49 os.rename("icu", self._source_subfolder)
50
51 def _workaround_icu_20545(self):
52 if tools.os_info.is_windows:
53 # https://unicode-org.atlassian.net/projects/ICU/issues/ICU-20545
54 srcdir = os.path.join(self.build_folder, self._source_subfolder, "source")
55 makeconv_cpp = os.path.join(srcdir, "tools", "makeconv", "makeconv.cpp")
56 tools.replace_in_file(makeconv_cpp,
57 "pathBuf.appendPathPart(arg, localError);",
58 "pathBuf.append('/', localError); pathBuf.append(arg, localError);")
59
60 def build(self):
61 for filename in glob.glob("patches/*.patch"):
62 self.output.info('applying patch "%s"' % filename)
63 tools.patch(base_path=self._source_subfolder, patch_file=filename)
64
65 if self._is_msvc:
66 run_configure_icu_file = os.path.join(self._source_subfolder, 'source', 'runConfigureICU')
67
68 flags = "-%s" % self.settings.compiler.runtime
69 if self.settings.get_safe("build_type") in ['Debug', 'RelWithDebInfo'] and Version(self.settings.compiler.version) >= "12":
70 flags += " -FS"
71 tools.replace_in_file(run_configure_icu_file, "-MDd", flags)
72 tools.replace_in_file(run_configure_icu_file, "-MD", flags)
73
74 self._workaround_icu_20545()
75
76 self._env_build = AutoToolsBuildEnvironment(self)
77 if not self.options.get_safe("shared"):
78 self._env_build.defines.append("U_STATIC_IMPLEMENTATION")
79 if tools.is_apple_os(self.settings.os):
80 self._env_build.defines.append("_DARWIN_C_SOURCE")
81 if self.settings.get_safe("os.version"):
82 self._env_build.flags.append(tools.apple_deployment_target_flag(self.settings.os,
83 self.settings.os.version))
84
85 if "msys2" in self.deps_user_info:
86 self._env_build.vars["PYTHON"] = tools.unix_path(os.path.join(self.deps_env_info["msys2"].MSYS_BIN, "python"), tools.MSYS2)
87
88 build_dir = os.path.join(self.build_folder, self._source_subfolder, 'build')
89 os.mkdir(build_dir)
90
91 with tools.vcvars(self.settings) if self._is_msvc else tools.no_op():
92 with tools.environment_append(self._env_build.vars):
93 with tools.chdir(build_dir):
94 # workaround for https://unicode-org.atlassian.net/browse/ICU-20531
95 os.makedirs(os.path.join("data", "out", "tmp"))
96
97 self.run(self._build_config_cmd, win_bash=tools.os_info.is_windows)
98 if self.options.get_safe("silent"):
99 silent = '--silent' if self.options.silent else 'VERBOSE=1'
100 else:
101 silent = '--silent'
102 command = "make {silent} -j {cpu_count}".format(silent=silent,
103 cpu_count=tools.cpu_count())
104 self.run(command, win_bash=tools.os_info.is_windows)
105 if self.options.get_safe("with_unit_tests"):
106 command = "make {silent} check".format(silent=silent)
107 self.run(command, win_bash=tools.os_info.is_windows)
108 command = "make {silent} install".format(silent=silent)
109 self.run(command, win_bash=tools.os_info.is_windows)
110
111 self._install_name_tool()
112
113 def package(self):
114 if self._is_msvc:
115 for dll in glob.glob(os.path.join(self.package_folder, 'lib', '*.dll')):
116 shutil.move(dll, os.path.join(self.package_folder, 'bin'))
117
118 self.copy("LICENSE", dst="licenses", src=os.path.join(self.source_folder, self._source_subfolder))
119 tools.rmdir(os.path.join(self.package_folder, "lib", "pkgconfig"))
120 tools.rmdir(os.path.join(self.package_folder, "share"))
121
122 @staticmethod
123 def detected_os():
124 if tools.OSInfo().is_macos:
125 return "Macos"
126 if tools.OSInfo().is_windows:
127 return "Windows"
128 return platform.system()
129
130 @property
131 def cross_building(self):
132 if tools.cross_building(self.settings):
133 if self.settings.os == self.detected_os():
134 if self.settings.arch == "x86" and tools.detected_architecture() == "x86_64":
135 return False
136 return True
137 return False
138
139 @property
140 def build_config_args(self):
141 prefix = self.package_folder.replace('\\', '/')
142 platform = {("Windows", "Visual Studio"): "Cygwin/MSVC",
143 ("Windows", "gcc"): "MinGW",
144 ("AIX", "gcc"): "AIX/GCC",
145 ("AIX", "xlc"): "AIX",
146 ("SunOS", "gcc"): "Solaris/GCC",
147 ("Linux", "gcc"): "Linux/gcc",
148 ("Linux", "clang"): "Linux",
149 ("Macos", "gcc"): "MacOSX",
150 ("Macos", "clang"): "MacOSX",
151 ("Macos", "apple-clang"): "MacOSX"}.get((str(self.settings.os),
152 str(self.settings.compiler)))
153 arch64 = ['x86_64', 'sparcv9', 'ppc64']
154 bits = "64" if self.settings.arch in arch64 else "32"
155 args = [platform,
156 "--prefix={0}".format(prefix),
157 "--with-library-bits={0}".format(bits),
158 "--disable-samples",
159 "--disable-layout",
160 "--disable-layoutex"]
161
162 if not self.options.with_dyload:
163 args += ["--disable-dyload"]
164
165 if self.cross_building:
166 if self._env_build.build:
167 args.append("--build=%s" % self._env_build.build)
168 if self._env_build.host:
169 args.append("--host=%s" % self._env_build.host)
170 if self._env_build.target:
171 args.append("--target=%s" % self._env_build.target)
172
173 if self.options.get_safe("data_packaging"):
174 args.append("--with-data-packaging={0}".format(self.options.data_packaging))
175 else:
176 args.append("--with-data-packaging=static")
177 datadir = os.path.join(self.package_folder, "lib")
178 datadir = datadir.replace("\\", "/") if tools.os_info.is_windows else datadir
179 args.append("--datarootdir=%s" % datadir) # do not use share
180 bindir = os.path.join(self.package_folder, "bin")
181 bindir = bindir.replace("\\", "/") if tools.os_info.is_windows else bindir
182 args.append("--sbindir=%s" % bindir)
183
184 if self._is_mingw:
185 mingw_chost = 'i686-w64-mingw32' if self.settings.arch == 'x86' else 'x86_64-w64-mingw32'
186 args.extend(["--build={0}".format(mingw_chost),
187 "--host={0}".format(mingw_chost)])
188
189 if self.settings.get_safe("build_type") == "Debug":
190 args.extend(["--disable-release", "--enable-debug"])
191 if self.options.get_safe("shared"):
192 args.extend(["--disable-static", "--enable-shared"])
193 else:
194 args.extend(["--enable-static", "--disable-shared"])
195 if not self.options.get_safe("with_unit_tests"):
196 args.append('--disable-tests')
197 return args
198
199 @property
200 def _build_config_cmd(self):
201 return "../source/runConfigureICU %s" % " ".join(self.build_config_args)
202
203 def _install_name_tool(self):
204 if tools.is_apple_os(self.settings.os):
205 with tools.chdir(os.path.join(self.package_folder, 'lib')):
206 for dylib in glob.glob('*icu*.{0}.dylib'.format(self.version)):
207 command = 'install_name_tool -id {0} {1}'.format(os.path.basename(dylib), dylib)
208 self.output.info(command)
209 self.run(command)
210
211 def package_id(self):
212 del self.info.options.with_unit_tests # ICU unit testing shouldn't affect the package's ID
213 del self.info.options.silent # Verbosity doesn't affect package's ID
214
215 def config_options(self):
216 if self.settings.os == "Windows":
217 del self.options.fPIC
218
219 def package_info(self):
220 self.cpp_info.names['cmake_find_package'] = 'ICU'
221 self.cpp_info.names['cmake_find_package_multi'] = 'ICU'
222
223 def lib_name(lib):
224 name = lib
225 if self.settings.os == "Windows":
226 if not self.options.shared:
227 name = 's' + name
228 if self.settings.build_type == "Debug":
229 name += 'd'
230 return name
231
232 libs = ['icuin' if self.settings.os == "Windows" else 'icui18n',
233 'icuio', 'icutest', 'icutu', 'icuuc',
234 'icudt' if self.settings.os == "Windows" else 'icudata']
235 self.cpp_info.libs = [lib_name(lib) for lib in libs]
236 self.cpp_info.bindirs.append('lib')
237
238 data_dir_name = self.name
239 if self.settings.os == "Windows" and self.settings.build_type == "Debug":
240 data_dir_name += 'd'
241 data_dir = os.path.join(self.package_folder, 'lib', data_dir_name, self.version)
242 vtag = self.version.split('.')[0]
243 data_file = "icudt{v}l.dat".format(v=vtag)
244 data_path = os.path.join(data_dir, data_file).replace('\\', '/')
245 if self.options.get_safe("data_packaging") in ["files", "archive"]:
246 self.env_info.ICU_DATA.append(data_path)
247
248 if not self.options.shared:
249 self.cpp_info.defines.append("U_STATIC_IMPLEMENTATION")
250 if self.settings.os == 'Linux' and self.options.with_dyload:
251 self.cpp_info.libs.append('dl')
252
253 if self.settings.os == 'Windows':
254 self.cpp_info.libs.append('advapi32')
255
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/recipes/icu/all/conanfile.py b/recipes/icu/all/conanfile.py
--- a/recipes/icu/all/conanfile.py
+++ b/recipes/icu/all/conanfile.py
@@ -18,6 +18,7 @@
_build_subfolder = "build_subfolder"
_env_build = None
settings = "os", "arch", "compiler", "build_type"
+ exports = ["patches/*.patch"]
options = {"shared": [True, False],
"fPIC": [True, False],
"data_packaging": ["files", "archive", "library", "static"],
@@ -58,10 +59,8 @@
"pathBuf.append('/', localError); pathBuf.append(arg, localError);")
def build(self):
- for filename in glob.glob("patches/*.patch"):
- self.output.info('applying patch "%s"' % filename)
- tools.patch(base_path=self._source_subfolder, patch_file=filename)
-
+ for p in self.conan_data["patches"][self.version]:
+ tools.patch(**p)
if self._is_msvc:
run_configure_icu_file = os.path.join(self._source_subfolder, 'source', 'runConfigureICU')
@@ -111,31 +110,13 @@
self._install_name_tool()
def package(self):
- if self._is_msvc:
- for dll in glob.glob(os.path.join(self.package_folder, 'lib', '*.dll')):
- shutil.move(dll, os.path.join(self.package_folder, 'bin'))
+ for dll in glob.glob(os.path.join(self.package_folder, 'lib', '*.dll')):
+ shutil.move(dll, os.path.join(self.package_folder, 'bin'))
self.copy("LICENSE", dst="licenses", src=os.path.join(self.source_folder, self._source_subfolder))
tools.rmdir(os.path.join(self.package_folder, "lib", "pkgconfig"))
tools.rmdir(os.path.join(self.package_folder, "share"))
- @staticmethod
- def detected_os():
- if tools.OSInfo().is_macos:
- return "Macos"
- if tools.OSInfo().is_windows:
- return "Windows"
- return platform.system()
-
- @property
- def cross_building(self):
- if tools.cross_building(self.settings):
- if self.settings.os == self.detected_os():
- if self.settings.arch == "x86" and tools.detected_architecture() == "x86_64":
- return False
- return True
- return False
-
@property
def build_config_args(self):
prefix = self.package_folder.replace('\\', '/')
@@ -157,12 +138,13 @@
"--with-library-bits={0}".format(bits),
"--disable-samples",
"--disable-layout",
- "--disable-layoutex"]
+ "--disable-layoutex",
+ "--disable-extras"]
if not self.options.with_dyload:
args += ["--disable-dyload"]
- if self.cross_building:
+ if tools.cross_building(self.settings, skip_x64_x86=True):
if self._env_build.build:
args.append("--build=%s" % self._env_build.build)
if self._env_build.host:
@@ -247,8 +229,10 @@
if not self.options.shared:
self.cpp_info.defines.append("U_STATIC_IMPLEMENTATION")
- if self.settings.os == 'Linux' and self.options.with_dyload:
- self.cpp_info.libs.append('dl')
+ if self.settings.os == 'Linux':
+ if self.options.with_dyload:
+ self.cpp_info.system_libs.append('dl')
+ self.cpp_info.system_libs.append('pthread')
if self.settings.os == 'Windows':
- self.cpp_info.libs.append('advapi32')
+ self.cpp_info.system_libs.append('advapi32')
|
{"golden_diff": "diff --git a/recipes/icu/all/conanfile.py b/recipes/icu/all/conanfile.py\n--- a/recipes/icu/all/conanfile.py\n+++ b/recipes/icu/all/conanfile.py\n@@ -18,6 +18,7 @@\n _build_subfolder = \"build_subfolder\"\n _env_build = None\n settings = \"os\", \"arch\", \"compiler\", \"build_type\"\n+ exports = [\"patches/*.patch\"]\n options = {\"shared\": [True, False],\n \"fPIC\": [True, False],\n \"data_packaging\": [\"files\", \"archive\", \"library\", \"static\"],\n@@ -58,10 +59,8 @@\n \"pathBuf.append('/', localError); pathBuf.append(arg, localError);\")\n \n def build(self):\n- for filename in glob.glob(\"patches/*.patch\"):\n- self.output.info('applying patch \"%s\"' % filename)\n- tools.patch(base_path=self._source_subfolder, patch_file=filename)\n-\n+ for p in self.conan_data[\"patches\"][self.version]:\n+ tools.patch(**p)\n if self._is_msvc:\n run_configure_icu_file = os.path.join(self._source_subfolder, 'source', 'runConfigureICU')\n \n@@ -111,31 +110,13 @@\n self._install_name_tool()\n \n def package(self):\n- if self._is_msvc:\n- for dll in glob.glob(os.path.join(self.package_folder, 'lib', '*.dll')):\n- shutil.move(dll, os.path.join(self.package_folder, 'bin'))\n+ for dll in glob.glob(os.path.join(self.package_folder, 'lib', '*.dll')):\n+ shutil.move(dll, os.path.join(self.package_folder, 'bin'))\n \n self.copy(\"LICENSE\", dst=\"licenses\", src=os.path.join(self.source_folder, self._source_subfolder))\n tools.rmdir(os.path.join(self.package_folder, \"lib\", \"pkgconfig\"))\n tools.rmdir(os.path.join(self.package_folder, \"share\"))\n \n- @staticmethod\n- def detected_os():\n- if tools.OSInfo().is_macos:\n- return \"Macos\"\n- if tools.OSInfo().is_windows:\n- return \"Windows\"\n- return platform.system()\n-\n- @property\n- def cross_building(self):\n- if tools.cross_building(self.settings):\n- if self.settings.os == self.detected_os():\n- if self.settings.arch == \"x86\" and tools.detected_architecture() == \"x86_64\":\n- return False\n- return True\n- return False\n-\n @property\n def build_config_args(self):\n prefix = self.package_folder.replace('\\\\', '/')\n@@ -157,12 +138,13 @@\n \"--with-library-bits={0}\".format(bits),\n \"--disable-samples\",\n \"--disable-layout\",\n- \"--disable-layoutex\"]\n+ \"--disable-layoutex\",\n+ \"--disable-extras\"]\n \n if not self.options.with_dyload:\n args += [\"--disable-dyload\"]\n \n- if self.cross_building:\n+ if tools.cross_building(self.settings, skip_x64_x86=True):\n if self._env_build.build:\n args.append(\"--build=%s\" % self._env_build.build)\n if self._env_build.host:\n@@ -247,8 +229,10 @@\n \n if not self.options.shared:\n self.cpp_info.defines.append(\"U_STATIC_IMPLEMENTATION\")\n- if self.settings.os == 'Linux' and self.options.with_dyload:\n- self.cpp_info.libs.append('dl')\n+ if self.settings.os == 'Linux':\n+ if self.options.with_dyload:\n+ self.cpp_info.system_libs.append('dl')\n+ self.cpp_info.system_libs.append('pthread')\n \n if self.settings.os == 'Windows':\n- self.cpp_info.libs.append('advapi32')\n+ self.cpp_info.system_libs.append('advapi32')\n", "issue": "[request] icu/66.1\n### Package Details\r\n * Package Name/Version: **icu/66.1**\r\n * Changelog: **https://github.com/unicode-org/icu/releases/tag/release-66-1**\r\n\r\n\r\nThe above mentioned version is newly released by the upstream project and not yet available as a recipe. Please add this version.\r\n\n", "before_files": [{"content": "import os\nimport glob\nimport platform\nimport shutil\nfrom conans import ConanFile, tools, AutoToolsBuildEnvironment\nfrom conans.tools import Version\n\n\nclass ICUBase(ConanFile):\n name = \"icu\"\n homepage = \"http://site.icu-project.org\"\n license = \"ICU\"\n description = \"ICU is a mature, widely used set of C/C++ and Java libraries \" \\\n \"providing Unicode and Globalization support for software applications.\"\n url = \"https://github.com/conan-io/conan-center-index\"\n topics = (\"conan\", \"icu\", \"icu4c\", \"i see you\", \"unicode\")\n _source_subfolder = \"source_subfolder\"\n _build_subfolder = \"build_subfolder\"\n _env_build = None\n settings = \"os\", \"arch\", \"compiler\", \"build_type\"\n options = {\"shared\": [True, False],\n \"fPIC\": [True, False],\n \"data_packaging\": [\"files\", \"archive\", \"library\", \"static\"],\n \"with_unit_tests\": [True, False],\n \"silent\": [True, False],\n \"with_dyload\": [True, False]}\n default_options = {\"shared\": False,\n \"fPIC\": True,\n \"data_packaging\": \"archive\",\n \"with_unit_tests\": False,\n \"silent\": True,\n \"with_dyload\": True}\n\n @property\n def _is_msvc(self):\n return self.settings.compiler == \"Visual Studio\"\n\n @property\n def _is_mingw(self):\n return self.settings.os == \"Windows\" and self.settings.compiler == \"gcc\"\n\n def build_requirements(self):\n if tools.os_info.is_windows and \"CONAN_BASH_PATH\" not in os.environ and \\\n tools.os_info.detect_windows_subsystem() != \"msys2\":\n self.build_requires(\"msys2/20190524\")\n\n def source(self):\n tools.get(**self.conan_data[\"sources\"][self.version])\n os.rename(\"icu\", self._source_subfolder)\n\n def _workaround_icu_20545(self):\n if tools.os_info.is_windows:\n # https://unicode-org.atlassian.net/projects/ICU/issues/ICU-20545\n srcdir = os.path.join(self.build_folder, self._source_subfolder, \"source\")\n makeconv_cpp = os.path.join(srcdir, \"tools\", \"makeconv\", \"makeconv.cpp\")\n tools.replace_in_file(makeconv_cpp,\n \"pathBuf.appendPathPart(arg, localError);\",\n \"pathBuf.append('/', localError); pathBuf.append(arg, localError);\")\n\n def build(self):\n for filename in glob.glob(\"patches/*.patch\"):\n self.output.info('applying patch \"%s\"' % filename)\n tools.patch(base_path=self._source_subfolder, patch_file=filename)\n\n if self._is_msvc:\n run_configure_icu_file = os.path.join(self._source_subfolder, 'source', 'runConfigureICU')\n\n flags = \"-%s\" % self.settings.compiler.runtime\n if self.settings.get_safe(\"build_type\") in ['Debug', 'RelWithDebInfo'] and Version(self.settings.compiler.version) >= \"12\":\n flags += \" -FS\"\n tools.replace_in_file(run_configure_icu_file, \"-MDd\", flags)\n tools.replace_in_file(run_configure_icu_file, \"-MD\", flags)\n\n self._workaround_icu_20545()\n\n self._env_build = AutoToolsBuildEnvironment(self)\n if not self.options.get_safe(\"shared\"):\n self._env_build.defines.append(\"U_STATIC_IMPLEMENTATION\")\n if tools.is_apple_os(self.settings.os):\n self._env_build.defines.append(\"_DARWIN_C_SOURCE\")\n if self.settings.get_safe(\"os.version\"):\n self._env_build.flags.append(tools.apple_deployment_target_flag(self.settings.os,\n self.settings.os.version))\n\n if \"msys2\" in self.deps_user_info:\n self._env_build.vars[\"PYTHON\"] = tools.unix_path(os.path.join(self.deps_env_info[\"msys2\"].MSYS_BIN, \"python\"), tools.MSYS2)\n\n build_dir = os.path.join(self.build_folder, self._source_subfolder, 'build')\n os.mkdir(build_dir)\n\n with tools.vcvars(self.settings) if self._is_msvc else tools.no_op():\n with tools.environment_append(self._env_build.vars):\n with tools.chdir(build_dir):\n # workaround for https://unicode-org.atlassian.net/browse/ICU-20531\n os.makedirs(os.path.join(\"data\", \"out\", \"tmp\"))\n\n self.run(self._build_config_cmd, win_bash=tools.os_info.is_windows)\n if self.options.get_safe(\"silent\"):\n silent = '--silent' if self.options.silent else 'VERBOSE=1'\n else:\n silent = '--silent'\n command = \"make {silent} -j {cpu_count}\".format(silent=silent,\n cpu_count=tools.cpu_count())\n self.run(command, win_bash=tools.os_info.is_windows)\n if self.options.get_safe(\"with_unit_tests\"):\n command = \"make {silent} check\".format(silent=silent)\n self.run(command, win_bash=tools.os_info.is_windows)\n command = \"make {silent} install\".format(silent=silent)\n self.run(command, win_bash=tools.os_info.is_windows)\n\n self._install_name_tool()\n\n def package(self):\n if self._is_msvc:\n for dll in glob.glob(os.path.join(self.package_folder, 'lib', '*.dll')):\n shutil.move(dll, os.path.join(self.package_folder, 'bin'))\n\n self.copy(\"LICENSE\", dst=\"licenses\", src=os.path.join(self.source_folder, self._source_subfolder))\n tools.rmdir(os.path.join(self.package_folder, \"lib\", \"pkgconfig\"))\n tools.rmdir(os.path.join(self.package_folder, \"share\"))\n\n @staticmethod\n def detected_os():\n if tools.OSInfo().is_macos:\n return \"Macos\"\n if tools.OSInfo().is_windows:\n return \"Windows\"\n return platform.system()\n\n @property\n def cross_building(self):\n if tools.cross_building(self.settings):\n if self.settings.os == self.detected_os():\n if self.settings.arch == \"x86\" and tools.detected_architecture() == \"x86_64\":\n return False\n return True\n return False\n\n @property\n def build_config_args(self):\n prefix = self.package_folder.replace('\\\\', '/')\n platform = {(\"Windows\", \"Visual Studio\"): \"Cygwin/MSVC\",\n (\"Windows\", \"gcc\"): \"MinGW\",\n (\"AIX\", \"gcc\"): \"AIX/GCC\",\n (\"AIX\", \"xlc\"): \"AIX\",\n (\"SunOS\", \"gcc\"): \"Solaris/GCC\",\n (\"Linux\", \"gcc\"): \"Linux/gcc\",\n (\"Linux\", \"clang\"): \"Linux\",\n (\"Macos\", \"gcc\"): \"MacOSX\",\n (\"Macos\", \"clang\"): \"MacOSX\",\n (\"Macos\", \"apple-clang\"): \"MacOSX\"}.get((str(self.settings.os),\n str(self.settings.compiler)))\n arch64 = ['x86_64', 'sparcv9', 'ppc64']\n bits = \"64\" if self.settings.arch in arch64 else \"32\"\n args = [platform,\n \"--prefix={0}\".format(prefix),\n \"--with-library-bits={0}\".format(bits),\n \"--disable-samples\",\n \"--disable-layout\",\n \"--disable-layoutex\"]\n \n if not self.options.with_dyload:\n args += [\"--disable-dyload\"]\n\n if self.cross_building:\n if self._env_build.build:\n args.append(\"--build=%s\" % self._env_build.build)\n if self._env_build.host:\n args.append(\"--host=%s\" % self._env_build.host)\n if self._env_build.target:\n args.append(\"--target=%s\" % self._env_build.target)\n\n if self.options.get_safe(\"data_packaging\"):\n args.append(\"--with-data-packaging={0}\".format(self.options.data_packaging))\n else:\n args.append(\"--with-data-packaging=static\")\n datadir = os.path.join(self.package_folder, \"lib\")\n datadir = datadir.replace(\"\\\\\", \"/\") if tools.os_info.is_windows else datadir\n args.append(\"--datarootdir=%s\" % datadir) # do not use share\n bindir = os.path.join(self.package_folder, \"bin\")\n bindir = bindir.replace(\"\\\\\", \"/\") if tools.os_info.is_windows else bindir\n args.append(\"--sbindir=%s\" % bindir)\n\n if self._is_mingw:\n mingw_chost = 'i686-w64-mingw32' if self.settings.arch == 'x86' else 'x86_64-w64-mingw32'\n args.extend([\"--build={0}\".format(mingw_chost),\n \"--host={0}\".format(mingw_chost)])\n\n if self.settings.get_safe(\"build_type\") == \"Debug\":\n args.extend([\"--disable-release\", \"--enable-debug\"])\n if self.options.get_safe(\"shared\"):\n args.extend([\"--disable-static\", \"--enable-shared\"])\n else:\n args.extend([\"--enable-static\", \"--disable-shared\"])\n if not self.options.get_safe(\"with_unit_tests\"):\n args.append('--disable-tests')\n return args\n\n @property\n def _build_config_cmd(self):\n return \"../source/runConfigureICU %s\" % \" \".join(self.build_config_args)\n\n def _install_name_tool(self):\n if tools.is_apple_os(self.settings.os):\n with tools.chdir(os.path.join(self.package_folder, 'lib')):\n for dylib in glob.glob('*icu*.{0}.dylib'.format(self.version)):\n command = 'install_name_tool -id {0} {1}'.format(os.path.basename(dylib), dylib)\n self.output.info(command)\n self.run(command)\n\n def package_id(self):\n del self.info.options.with_unit_tests # ICU unit testing shouldn't affect the package's ID\n del self.info.options.silent # Verbosity doesn't affect package's ID\n\n def config_options(self):\n if self.settings.os == \"Windows\":\n del self.options.fPIC\n\n def package_info(self):\n self.cpp_info.names['cmake_find_package'] = 'ICU'\n self.cpp_info.names['cmake_find_package_multi'] = 'ICU'\n\n def lib_name(lib):\n name = lib\n if self.settings.os == \"Windows\":\n if not self.options.shared:\n name = 's' + name\n if self.settings.build_type == \"Debug\":\n name += 'd'\n return name\n\n libs = ['icuin' if self.settings.os == \"Windows\" else 'icui18n',\n 'icuio', 'icutest', 'icutu', 'icuuc',\n 'icudt' if self.settings.os == \"Windows\" else 'icudata']\n self.cpp_info.libs = [lib_name(lib) for lib in libs]\n self.cpp_info.bindirs.append('lib')\n\n data_dir_name = self.name\n if self.settings.os == \"Windows\" and self.settings.build_type == \"Debug\":\n data_dir_name += 'd'\n data_dir = os.path.join(self.package_folder, 'lib', data_dir_name, self.version)\n vtag = self.version.split('.')[0]\n data_file = \"icudt{v}l.dat\".format(v=vtag)\n data_path = os.path.join(data_dir, data_file).replace('\\\\', '/')\n if self.options.get_safe(\"data_packaging\") in [\"files\", \"archive\"]:\n self.env_info.ICU_DATA.append(data_path)\n\n if not self.options.shared:\n self.cpp_info.defines.append(\"U_STATIC_IMPLEMENTATION\")\n if self.settings.os == 'Linux' and self.options.with_dyload:\n self.cpp_info.libs.append('dl')\n\n if self.settings.os == 'Windows':\n self.cpp_info.libs.append('advapi32')\n", "path": "recipes/icu/all/conanfile.py"}], "after_files": [{"content": "import os\nimport glob\nimport platform\nimport shutil\nfrom conans import ConanFile, tools, AutoToolsBuildEnvironment\nfrom conans.tools import Version\n\n\nclass ICUBase(ConanFile):\n name = \"icu\"\n homepage = \"http://site.icu-project.org\"\n license = \"ICU\"\n description = \"ICU is a mature, widely used set of C/C++ and Java libraries \" \\\n \"providing Unicode and Globalization support for software applications.\"\n url = \"https://github.com/conan-io/conan-center-index\"\n topics = (\"conan\", \"icu\", \"icu4c\", \"i see you\", \"unicode\")\n _source_subfolder = \"source_subfolder\"\n _build_subfolder = \"build_subfolder\"\n _env_build = None\n settings = \"os\", \"arch\", \"compiler\", \"build_type\"\n exports = [\"patches/*.patch\"]\n options = {\"shared\": [True, False],\n \"fPIC\": [True, False],\n \"data_packaging\": [\"files\", \"archive\", \"library\", \"static\"],\n \"with_unit_tests\": [True, False],\n \"silent\": [True, False],\n \"with_dyload\": [True, False]}\n default_options = {\"shared\": False,\n \"fPIC\": True,\n \"data_packaging\": \"archive\",\n \"with_unit_tests\": False,\n \"silent\": True,\n \"with_dyload\": True}\n\n @property\n def _is_msvc(self):\n return self.settings.compiler == \"Visual Studio\"\n\n @property\n def _is_mingw(self):\n return self.settings.os == \"Windows\" and self.settings.compiler == \"gcc\"\n\n def build_requirements(self):\n if tools.os_info.is_windows and \"CONAN_BASH_PATH\" not in os.environ and \\\n tools.os_info.detect_windows_subsystem() != \"msys2\":\n self.build_requires(\"msys2/20190524\")\n\n def source(self):\n tools.get(**self.conan_data[\"sources\"][self.version])\n os.rename(\"icu\", self._source_subfolder)\n\n def _workaround_icu_20545(self):\n if tools.os_info.is_windows:\n # https://unicode-org.atlassian.net/projects/ICU/issues/ICU-20545\n srcdir = os.path.join(self.build_folder, self._source_subfolder, \"source\")\n makeconv_cpp = os.path.join(srcdir, \"tools\", \"makeconv\", \"makeconv.cpp\")\n tools.replace_in_file(makeconv_cpp,\n \"pathBuf.appendPathPart(arg, localError);\",\n \"pathBuf.append('/', localError); pathBuf.append(arg, localError);\")\n\n def build(self):\n for p in self.conan_data[\"patches\"][self.version]:\n tools.patch(**p)\n if self._is_msvc:\n run_configure_icu_file = os.path.join(self._source_subfolder, 'source', 'runConfigureICU')\n\n flags = \"-%s\" % self.settings.compiler.runtime\n if self.settings.get_safe(\"build_type\") in ['Debug', 'RelWithDebInfo'] and Version(self.settings.compiler.version) >= \"12\":\n flags += \" -FS\"\n tools.replace_in_file(run_configure_icu_file, \"-MDd\", flags)\n tools.replace_in_file(run_configure_icu_file, \"-MD\", flags)\n\n self._workaround_icu_20545()\n\n self._env_build = AutoToolsBuildEnvironment(self)\n if not self.options.get_safe(\"shared\"):\n self._env_build.defines.append(\"U_STATIC_IMPLEMENTATION\")\n if tools.is_apple_os(self.settings.os):\n self._env_build.defines.append(\"_DARWIN_C_SOURCE\")\n if self.settings.get_safe(\"os.version\"):\n self._env_build.flags.append(tools.apple_deployment_target_flag(self.settings.os,\n self.settings.os.version))\n\n if \"msys2\" in self.deps_user_info:\n self._env_build.vars[\"PYTHON\"] = tools.unix_path(os.path.join(self.deps_env_info[\"msys2\"].MSYS_BIN, \"python\"), tools.MSYS2)\n\n build_dir = os.path.join(self.build_folder, self._source_subfolder, 'build')\n os.mkdir(build_dir)\n\n with tools.vcvars(self.settings) if self._is_msvc else tools.no_op():\n with tools.environment_append(self._env_build.vars):\n with tools.chdir(build_dir):\n # workaround for https://unicode-org.atlassian.net/browse/ICU-20531\n os.makedirs(os.path.join(\"data\", \"out\", \"tmp\"))\n\n self.run(self._build_config_cmd, win_bash=tools.os_info.is_windows)\n if self.options.get_safe(\"silent\"):\n silent = '--silent' if self.options.silent else 'VERBOSE=1'\n else:\n silent = '--silent'\n command = \"make {silent} -j {cpu_count}\".format(silent=silent,\n cpu_count=tools.cpu_count())\n self.run(command, win_bash=tools.os_info.is_windows)\n if self.options.get_safe(\"with_unit_tests\"):\n command = \"make {silent} check\".format(silent=silent)\n self.run(command, win_bash=tools.os_info.is_windows)\n command = \"make {silent} install\".format(silent=silent)\n self.run(command, win_bash=tools.os_info.is_windows)\n\n self._install_name_tool()\n\n def package(self):\n for dll in glob.glob(os.path.join(self.package_folder, 'lib', '*.dll')):\n shutil.move(dll, os.path.join(self.package_folder, 'bin'))\n\n self.copy(\"LICENSE\", dst=\"licenses\", src=os.path.join(self.source_folder, self._source_subfolder))\n tools.rmdir(os.path.join(self.package_folder, \"lib\", \"pkgconfig\"))\n tools.rmdir(os.path.join(self.package_folder, \"share\"))\n\n @property\n def build_config_args(self):\n prefix = self.package_folder.replace('\\\\', '/')\n platform = {(\"Windows\", \"Visual Studio\"): \"Cygwin/MSVC\",\n (\"Windows\", \"gcc\"): \"MinGW\",\n (\"AIX\", \"gcc\"): \"AIX/GCC\",\n (\"AIX\", \"xlc\"): \"AIX\",\n (\"SunOS\", \"gcc\"): \"Solaris/GCC\",\n (\"Linux\", \"gcc\"): \"Linux/gcc\",\n (\"Linux\", \"clang\"): \"Linux\",\n (\"Macos\", \"gcc\"): \"MacOSX\",\n (\"Macos\", \"clang\"): \"MacOSX\",\n (\"Macos\", \"apple-clang\"): \"MacOSX\"}.get((str(self.settings.os),\n str(self.settings.compiler)))\n arch64 = ['x86_64', 'sparcv9', 'ppc64']\n bits = \"64\" if self.settings.arch in arch64 else \"32\"\n args = [platform,\n \"--prefix={0}\".format(prefix),\n \"--with-library-bits={0}\".format(bits),\n \"--disable-samples\",\n \"--disable-layout\",\n \"--disable-layoutex\",\n \"--disable-extras\"]\n \n if not self.options.with_dyload:\n args += [\"--disable-dyload\"]\n\n if tools.cross_building(self.settings, skip_x64_x86=True):\n if self._env_build.build:\n args.append(\"--build=%s\" % self._env_build.build)\n if self._env_build.host:\n args.append(\"--host=%s\" % self._env_build.host)\n if self._env_build.target:\n args.append(\"--target=%s\" % self._env_build.target)\n\n if self.options.get_safe(\"data_packaging\"):\n args.append(\"--with-data-packaging={0}\".format(self.options.data_packaging))\n else:\n args.append(\"--with-data-packaging=static\")\n datadir = os.path.join(self.package_folder, \"lib\")\n datadir = datadir.replace(\"\\\\\", \"/\") if tools.os_info.is_windows else datadir\n args.append(\"--datarootdir=%s\" % datadir) # do not use share\n bindir = os.path.join(self.package_folder, \"bin\")\n bindir = bindir.replace(\"\\\\\", \"/\") if tools.os_info.is_windows else bindir\n args.append(\"--sbindir=%s\" % bindir)\n\n if self._is_mingw:\n mingw_chost = 'i686-w64-mingw32' if self.settings.arch == 'x86' else 'x86_64-w64-mingw32'\n args.extend([\"--build={0}\".format(mingw_chost),\n \"--host={0}\".format(mingw_chost)])\n\n if self.settings.get_safe(\"build_type\") == \"Debug\":\n args.extend([\"--disable-release\", \"--enable-debug\"])\n if self.options.get_safe(\"shared\"):\n args.extend([\"--disable-static\", \"--enable-shared\"])\n else:\n args.extend([\"--enable-static\", \"--disable-shared\"])\n if not self.options.get_safe(\"with_unit_tests\"):\n args.append('--disable-tests')\n return args\n\n @property\n def _build_config_cmd(self):\n return \"../source/runConfigureICU %s\" % \" \".join(self.build_config_args)\n\n def _install_name_tool(self):\n if tools.is_apple_os(self.settings.os):\n with tools.chdir(os.path.join(self.package_folder, 'lib')):\n for dylib in glob.glob('*icu*.{0}.dylib'.format(self.version)):\n command = 'install_name_tool -id {0} {1}'.format(os.path.basename(dylib), dylib)\n self.output.info(command)\n self.run(command)\n\n def package_id(self):\n del self.info.options.with_unit_tests # ICU unit testing shouldn't affect the package's ID\n del self.info.options.silent # Verbosity doesn't affect package's ID\n\n def config_options(self):\n if self.settings.os == \"Windows\":\n del self.options.fPIC\n\n def package_info(self):\n self.cpp_info.names['cmake_find_package'] = 'ICU'\n self.cpp_info.names['cmake_find_package_multi'] = 'ICU'\n\n def lib_name(lib):\n name = lib\n if self.settings.os == \"Windows\":\n if not self.options.shared:\n name = 's' + name\n if self.settings.build_type == \"Debug\":\n name += 'd'\n return name\n\n libs = ['icuin' if self.settings.os == \"Windows\" else 'icui18n',\n 'icuio', 'icutest', 'icutu', 'icuuc',\n 'icudt' if self.settings.os == \"Windows\" else 'icudata']\n self.cpp_info.libs = [lib_name(lib) for lib in libs]\n self.cpp_info.bindirs.append('lib')\n\n data_dir_name = self.name\n if self.settings.os == \"Windows\" and self.settings.build_type == \"Debug\":\n data_dir_name += 'd'\n data_dir = os.path.join(self.package_folder, 'lib', data_dir_name, self.version)\n vtag = self.version.split('.')[0]\n data_file = \"icudt{v}l.dat\".format(v=vtag)\n data_path = os.path.join(data_dir, data_file).replace('\\\\', '/')\n if self.options.get_safe(\"data_packaging\") in [\"files\", \"archive\"]:\n self.env_info.ICU_DATA.append(data_path)\n\n if not self.options.shared:\n self.cpp_info.defines.append(\"U_STATIC_IMPLEMENTATION\")\n if self.settings.os == 'Linux':\n if self.options.with_dyload:\n self.cpp_info.system_libs.append('dl')\n self.cpp_info.system_libs.append('pthread')\n\n if self.settings.os == 'Windows':\n self.cpp_info.system_libs.append('advapi32')\n", "path": "recipes/icu/all/conanfile.py"}]}
| 3,677 | 881 |
gh_patches_debug_55160
|
rasdani/github-patches
|
git_diff
|
spack__spack-43770
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Installation issue: nettle fails to build due to undocumented openssl dependency?
### Steps to reproduce the issue
```console
$ spack spec -I <spec>
Input spec
--------------------------------
- nettle
Concretized
--------------------------------
- [email protected]%[email protected] build_system=autotools arch=linux-centos7-x86_64
[+] ^[email protected]%[email protected]~guile build_system=generic arch=linux-centos7-x86_64
[+] ^[email protected]%[email protected]+cxx build_system=autotools libs=shared,static patches=69ad2e2 arch=linux-centos7-x86_64
[+] ^[email protected]%[email protected] build_system=autotools patches=35c4492,7793209,a49dd5b arch=linux-centos7-x86_64
[+] ^[email protected]%[email protected]+cpanm+opcode+open+shared+threads build_system=generic patches=714e4d1 arch=linux-centos7-x86_64
[+] ^[email protected]%[email protected]+cxx~docs+stl build_system=autotools patches=26090f4,b231fcc arch=linux-centos7-x86_64
[+] ^[email protected]%[email protected]~debug~pic+shared build_system=generic arch=linux-centos7-x86_64
[+] ^[email protected]%[email protected] build_system=autotools arch=linux-centos7-x86_64
[+] ^[email protected]%[email protected] build_system=autotools patches=bbf97f1 arch=linux-centos7-x86_64
[+] ^[email protected]%[email protected]~symlinks+termlib abi=none build_system=autotools arch=linux-centos7-x86_64
[+] ^[email protected]%[email protected] build_system=autotools arch=linux-centos7-x86_64
[+] ^[email protected]%[email protected]+compat+opt build_system=autotools arch=linux-centos7-x86_64
[+] ^[email protected]%[email protected] build_system=autotools arch=linux-centos7-x86_64
[+] ^[email protected]%[email protected] build_system=autotools arch=linux-centos7-x86_64
[+] ^[email protected]%[email protected]+sigsegv build_system=autotools patches=9dc5fbd,bfdffa7 arch=linux-centos7-x86_64
[+] ^[email protected]%[email protected] build_system=autotools arch=linux-centos7-x86_64
[+] ^[email protected]%[email protected] build_system=autotools libs=shared,static arch=linux-centos7-x86_64
[+] ^[email protected]%[email protected] build_system=autotools arch=linux-centos7-x86_64
```
### Error message
<details><summary>Error message</summary>
<pre>
==> nettle: Executing phase: 'build'
==> Error: ProcessError: Command exited with status 2:
'make' '-j16' 'V=1'
...
1151 nettle-openssl.o: In function `openssl_md5_init':
>> 1152 /localData/000scratch/rowanw/spack-stage/spack-stage-nettle-3.9.1-bv6yy4efn7x73ybk5at6bly7tplvvul
5/spack-src/examples/nettle-openssl.c:408: undefined reference to `EVP_MD_CTX_new'
1153 nettle-openssl.o: In function `openssl_sha1_init':
>> 1154 /localData/000scratch/rowanw/spack-stage/spack-stage-nettle-3.9.1-bv6yy4efn7x73ybk5at6bly7tplvvul
5/spack-src/examples/nettle-openssl.c:409: undefined reference to `EVP_MD_CTX_new'
>> 1155 collect2: error: ld returned 1 exit status
>> 1156 make[1]: *** [Makefile:100: nettle-benchmark] Error 1
</pre></details>
### Information on your system
* **Spack:** 0.21.0 (c35700db51bfc673798643697df3ef0e8a5177f1)
* **Python:** 3.8.18
* **Platform:** linux-centos7-ivybridge
* **Concretizer:** clingo
### Additional information
A [quick google](https://stackoverflow.com/questions/46768071/openssl-linking-undefined-reference-evp-md-ctx-new-and-fre) of the error message suggests this is due to linking against an old openssl version, which checks out as I'm running on centos 7 and the default system libcrypto does not include the missing symbol while a newer version does:
```
$ ls -al /lib64/libcrypto.so
lrwxrwxrwx 1 root root 19 Apr 11 2023 /lib64/libcrypto.so -> libcrypto.so.1.0.2k
$ nm --dynamic /lib64/libcrypto.so.1.0.2k |grep EVP_MD_CTX_new
$ nm --dynamic /lib64/libcrypto.so.1.1.1k |grep EVP_MD_CTX_new
000000000015be20 T EVP_MD_CTX_new
```
Obviously spack shouldn't be relying on the system library; the nettle package doesn't specify any kind of dependency on openssl so that seems like a bug.
### General information
- [X] I have run `spack debug report` and reported the version of Spack/Python/Platform
- [X] I have run `spack maintainers <name-of-the-package>` and **@mentioned** any maintainers
- [X] I have uploaded the build log and environment files
- [X] I have searched the issues of this repo and believe this is not a duplicate
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `var/spack/repos/builtin/packages/nettle/package.py`
Content:
```
1 # Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
2 # Spack Project Developers. See the top-level COPYRIGHT file for details.
3 #
4 # SPDX-License-Identifier: (Apache-2.0 OR MIT)
5
6 from spack.package import *
7
8
9 class Nettle(AutotoolsPackage, GNUMirrorPackage):
10 """The Nettle package contains the low-level cryptographic library
11 that is designed to fit easily in many contexts."""
12
13 homepage = "https://www.lysator.liu.se/~nisse/nettle/"
14 gnu_mirror_path = "nettle/nettle-3.3.tar.gz"
15
16 license("GPL-2.0-or-later OR LGPL-3.0-or-later")
17
18 version("3.9.1", sha256="ccfeff981b0ca71bbd6fbcb054f407c60ffb644389a5be80d6716d5b550c6ce3")
19 version("3.8.1", sha256="364f3e2b77cd7dcde83fd7c45219c834e54b0c75e428b6f894a23d12dd41cbfe")
20 version("3.4.1", sha256="f941cf1535cd5d1819be5ccae5babef01f6db611f9b5a777bae9c7604b8a92ad")
21 version("3.4", sha256="ae7a42df026550b85daca8389b6a60ba6313b0567f374392e54918588a411e94")
22 version("3.3", sha256="46942627d5d0ca11720fec18d81fc38f7ef837ea4197c1f630e71ce0d470b11e")
23 version("3.2", sha256="ea4283def236413edab5a4cf9cf32adf540c8df1b9b67641cfc2302fca849d97")
24 version("2.7.1", sha256="bc71ebd43435537d767799e414fce88e521b7278d48c860651216e1fc6555b40")
25 version("2.7", sha256="c294ea133c05382cc2effb1734d49f4abeb1ad8515543a333de49a11422cd4d6")
26
27 depends_on("gmp")
28 depends_on("m4", type="build")
29 depends_on("openssl")
30
31 def configure_args(self):
32 return ["CFLAGS={0}".format(self.compiler.c99_flag)]
33
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/var/spack/repos/builtin/packages/nettle/package.py b/var/spack/repos/builtin/packages/nettle/package.py
--- a/var/spack/repos/builtin/packages/nettle/package.py
+++ b/var/spack/repos/builtin/packages/nettle/package.py
@@ -26,7 +26,11 @@
depends_on("gmp")
depends_on("m4", type="build")
- depends_on("openssl")
+
+ def flag_handler(self, name, flags):
+ if name == "cflags":
+ flags.append(self.compiler.c99_flag)
+ return (flags, None, None)
def configure_args(self):
- return ["CFLAGS={0}".format(self.compiler.c99_flag)]
+ return ["--disable-openssl"]
|
{"golden_diff": "diff --git a/var/spack/repos/builtin/packages/nettle/package.py b/var/spack/repos/builtin/packages/nettle/package.py\n--- a/var/spack/repos/builtin/packages/nettle/package.py\n+++ b/var/spack/repos/builtin/packages/nettle/package.py\n@@ -26,7 +26,11 @@\n \n depends_on(\"gmp\")\n depends_on(\"m4\", type=\"build\")\n- depends_on(\"openssl\")\n+\n+ def flag_handler(self, name, flags):\n+ if name == \"cflags\":\n+ flags.append(self.compiler.c99_flag)\n+ return (flags, None, None)\n \n def configure_args(self):\n- return [\"CFLAGS={0}\".format(self.compiler.c99_flag)]\n+ return [\"--disable-openssl\"]\n", "issue": "Installation issue: nettle fails to build due to undocumented openssl dependency?\n### Steps to reproduce the issue\n\n```console\r\n$ spack spec -I <spec>\r\nInput spec\r\n--------------------------------\r\n - nettle\r\n\r\nConcretized\r\n--------------------------------\r\n - [email protected]%[email protected] build_system=autotools arch=linux-centos7-x86_64\r\n[+] ^[email protected]%[email protected]~guile build_system=generic arch=linux-centos7-x86_64\r\n[+] ^[email protected]%[email protected]+cxx build_system=autotools libs=shared,static patches=69ad2e2 arch=linux-centos7-x86_64\r\n[+] ^[email protected]%[email protected] build_system=autotools patches=35c4492,7793209,a49dd5b arch=linux-centos7-x86_64\r\n[+] ^[email protected]%[email protected]+cpanm+opcode+open+shared+threads build_system=generic patches=714e4d1 arch=linux-centos7-x86_64\r\n[+] ^[email protected]%[email protected]+cxx~docs+stl build_system=autotools patches=26090f4,b231fcc arch=linux-centos7-x86_64\r\n[+] ^[email protected]%[email protected]~debug~pic+shared build_system=generic arch=linux-centos7-x86_64\r\n[+] ^[email protected]%[email protected] build_system=autotools arch=linux-centos7-x86_64\r\n[+] ^[email protected]%[email protected] build_system=autotools patches=bbf97f1 arch=linux-centos7-x86_64\r\n[+] ^[email protected]%[email protected]~symlinks+termlib abi=none build_system=autotools arch=linux-centos7-x86_64\r\n[+] ^[email protected]%[email protected] build_system=autotools arch=linux-centos7-x86_64\r\n[+] ^[email protected]%[email protected]+compat+opt build_system=autotools arch=linux-centos7-x86_64\r\n[+] ^[email protected]%[email protected] build_system=autotools arch=linux-centos7-x86_64\r\n[+] ^[email protected]%[email protected] build_system=autotools arch=linux-centos7-x86_64\r\n[+] ^[email protected]%[email protected]+sigsegv build_system=autotools patches=9dc5fbd,bfdffa7 arch=linux-centos7-x86_64\r\n[+] ^[email protected]%[email protected] build_system=autotools arch=linux-centos7-x86_64\r\n[+] ^[email protected]%[email protected] build_system=autotools libs=shared,static arch=linux-centos7-x86_64\r\n[+] ^[email protected]%[email protected] build_system=autotools arch=linux-centos7-x86_64\r\n```\r\n\n\n### Error message\n\n<details><summary>Error message</summary>\r\n\r\n<pre>\r\n==> nettle: Executing phase: 'build'\r\n==> Error: ProcessError: Command exited with status 2:\r\n 'make' '-j16' 'V=1'\r\n...\r\n 1151 nettle-openssl.o: In function `openssl_md5_init':\r\n >> 1152 /localData/000scratch/rowanw/spack-stage/spack-stage-nettle-3.9.1-bv6yy4efn7x73ybk5at6bly7tplvvul\r\n 5/spack-src/examples/nettle-openssl.c:408: undefined reference to `EVP_MD_CTX_new'\r\n 1153 nettle-openssl.o: In function `openssl_sha1_init':\r\n >> 1154 /localData/000scratch/rowanw/spack-stage/spack-stage-nettle-3.9.1-bv6yy4efn7x73ybk5at6bly7tplvvul\r\n 5/spack-src/examples/nettle-openssl.c:409: undefined reference to `EVP_MD_CTX_new'\r\n >> 1155 collect2: error: ld returned 1 exit status\r\n >> 1156 make[1]: *** [Makefile:100: nettle-benchmark] Error 1\r\n</pre></details>\r\n\n\n### Information on your system\n\n* **Spack:** 0.21.0 (c35700db51bfc673798643697df3ef0e8a5177f1)\r\n* **Python:** 3.8.18\r\n* **Platform:** linux-centos7-ivybridge\r\n* **Concretizer:** clingo\n\n### Additional information\n\nA [quick google](https://stackoverflow.com/questions/46768071/openssl-linking-undefined-reference-evp-md-ctx-new-and-fre) of the error message suggests this is due to linking against an old openssl version, which checks out as I'm running on centos 7 and the default system libcrypto does not include the missing symbol while a newer version does:\r\n\r\n```\r\n$ ls -al /lib64/libcrypto.so\r\nlrwxrwxrwx 1 root root 19 Apr 11 2023 /lib64/libcrypto.so -> libcrypto.so.1.0.2k\r\n\r\n$ nm --dynamic /lib64/libcrypto.so.1.0.2k |grep EVP_MD_CTX_new\r\n\r\n$ nm --dynamic /lib64/libcrypto.so.1.1.1k |grep EVP_MD_CTX_new\r\n000000000015be20 T EVP_MD_CTX_new\r\n```\r\n\r\nObviously spack shouldn't be relying on the system library; the nettle package doesn't specify any kind of dependency on openssl so that seems like a bug.\n\n### General information\n\n- [X] I have run `spack debug report` and reported the version of Spack/Python/Platform\n- [X] I have run `spack maintainers <name-of-the-package>` and **@mentioned** any maintainers\n- [X] I have uploaded the build log and environment files\n- [X] I have searched the issues of this repo and believe this is not a duplicate\n", "before_files": [{"content": "# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other\n# Spack Project Developers. See the top-level COPYRIGHT file for details.\n#\n# SPDX-License-Identifier: (Apache-2.0 OR MIT)\n\nfrom spack.package import *\n\n\nclass Nettle(AutotoolsPackage, GNUMirrorPackage):\n \"\"\"The Nettle package contains the low-level cryptographic library\n that is designed to fit easily in many contexts.\"\"\"\n\n homepage = \"https://www.lysator.liu.se/~nisse/nettle/\"\n gnu_mirror_path = \"nettle/nettle-3.3.tar.gz\"\n\n license(\"GPL-2.0-or-later OR LGPL-3.0-or-later\")\n\n version(\"3.9.1\", sha256=\"ccfeff981b0ca71bbd6fbcb054f407c60ffb644389a5be80d6716d5b550c6ce3\")\n version(\"3.8.1\", sha256=\"364f3e2b77cd7dcde83fd7c45219c834e54b0c75e428b6f894a23d12dd41cbfe\")\n version(\"3.4.1\", sha256=\"f941cf1535cd5d1819be5ccae5babef01f6db611f9b5a777bae9c7604b8a92ad\")\n version(\"3.4\", sha256=\"ae7a42df026550b85daca8389b6a60ba6313b0567f374392e54918588a411e94\")\n version(\"3.3\", sha256=\"46942627d5d0ca11720fec18d81fc38f7ef837ea4197c1f630e71ce0d470b11e\")\n version(\"3.2\", sha256=\"ea4283def236413edab5a4cf9cf32adf540c8df1b9b67641cfc2302fca849d97\")\n version(\"2.7.1\", sha256=\"bc71ebd43435537d767799e414fce88e521b7278d48c860651216e1fc6555b40\")\n version(\"2.7\", sha256=\"c294ea133c05382cc2effb1734d49f4abeb1ad8515543a333de49a11422cd4d6\")\n\n depends_on(\"gmp\")\n depends_on(\"m4\", type=\"build\")\n depends_on(\"openssl\")\n\n def configure_args(self):\n return [\"CFLAGS={0}\".format(self.compiler.c99_flag)]\n", "path": "var/spack/repos/builtin/packages/nettle/package.py"}], "after_files": [{"content": "# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other\n# Spack Project Developers. See the top-level COPYRIGHT file for details.\n#\n# SPDX-License-Identifier: (Apache-2.0 OR MIT)\n\nfrom spack.package import *\n\n\nclass Nettle(AutotoolsPackage, GNUMirrorPackage):\n \"\"\"The Nettle package contains the low-level cryptographic library\n that is designed to fit easily in many contexts.\"\"\"\n\n homepage = \"https://www.lysator.liu.se/~nisse/nettle/\"\n gnu_mirror_path = \"nettle/nettle-3.3.tar.gz\"\n\n license(\"GPL-2.0-or-later OR LGPL-3.0-or-later\")\n\n version(\"3.9.1\", sha256=\"ccfeff981b0ca71bbd6fbcb054f407c60ffb644389a5be80d6716d5b550c6ce3\")\n version(\"3.8.1\", sha256=\"364f3e2b77cd7dcde83fd7c45219c834e54b0c75e428b6f894a23d12dd41cbfe\")\n version(\"3.4.1\", sha256=\"f941cf1535cd5d1819be5ccae5babef01f6db611f9b5a777bae9c7604b8a92ad\")\n version(\"3.4\", sha256=\"ae7a42df026550b85daca8389b6a60ba6313b0567f374392e54918588a411e94\")\n version(\"3.3\", sha256=\"46942627d5d0ca11720fec18d81fc38f7ef837ea4197c1f630e71ce0d470b11e\")\n version(\"3.2\", sha256=\"ea4283def236413edab5a4cf9cf32adf540c8df1b9b67641cfc2302fca849d97\")\n version(\"2.7.1\", sha256=\"bc71ebd43435537d767799e414fce88e521b7278d48c860651216e1fc6555b40\")\n version(\"2.7\", sha256=\"c294ea133c05382cc2effb1734d49f4abeb1ad8515543a333de49a11422cd4d6\")\n\n depends_on(\"gmp\")\n depends_on(\"m4\", type=\"build\")\n\n def flag_handler(self, name, flags):\n if name == \"cflags\":\n flags.append(self.compiler.c99_flag)\n return (flags, None, None)\n\n def configure_args(self):\n return [\"--disable-openssl\"]\n", "path": "var/spack/repos/builtin/packages/nettle/package.py"}]}
| 2,712 | 170 |
gh_patches_debug_21918
|
rasdani/github-patches
|
git_diff
|
pre-commit__pre-commit-79
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Skip hook if there are no files to run for it.
This blocks adding `flake8` as a hook as it explodes when there are no files.
This will also be a bit of a performance hack.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pre_commit/commands.py`
Content:
```
1 from __future__ import print_function
2
3 import logging
4 import os
5 import pkg_resources
6 import shutil
7 import stat
8 import subprocess
9 import sys
10 from asottile.ordereddict import OrderedDict
11 from asottile.yaml import ordered_dump
12 from asottile.yaml import ordered_load
13 from plumbum import local
14
15 import pre_commit.constants as C
16 from pre_commit import git
17 from pre_commit import color
18 from pre_commit.clientlib.validate_config import CONFIG_JSON_SCHEMA
19 from pre_commit.clientlib.validate_config import load_config
20 from pre_commit.jsonschema_extensions import remove_defaults
21 from pre_commit.logging_handler import LoggingHandler
22 from pre_commit.repository import Repository
23 from pre_commit.staged_files_only import staged_files_only
24
25
26 logger = logging.getLogger('pre_commit')
27
28 COLS = int(subprocess.Popen(['tput', 'cols'], stdout=subprocess.PIPE).communicate()[0])
29
30 PASS_FAIL_LENGTH = 6
31
32
33 def install(runner):
34 """Install the pre-commit hooks."""
35 pre_commit_file = pkg_resources.resource_filename('pre_commit', 'resources/pre-commit.sh')
36 with open(runner.pre_commit_path, 'w') as pre_commit_file_obj:
37 pre_commit_file_obj.write(open(pre_commit_file).read())
38
39 original_mode = os.stat(runner.pre_commit_path).st_mode
40 os.chmod(
41 runner.pre_commit_path,
42 original_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH,
43 )
44
45 print('pre-commit installed at {0}'.format(runner.pre_commit_path))
46
47 return 0
48
49
50 def uninstall(runner):
51 """Uninstall the pre-commit hooks."""
52 if os.path.exists(runner.pre_commit_path):
53 os.remove(runner.pre_commit_path)
54 print('pre-commit uninstalled')
55 return 0
56
57
58 class RepositoryCannotBeUpdatedError(RuntimeError):
59 pass
60
61
62 def _update_repository(repo_config):
63 """Updates a repository to the tip of `master`. If the repository cannot
64 be updated because a hook that is configured does not exist in `master`,
65 this raises a RepositoryCannotBeUpdatedError
66
67 Args:
68 repo_config - A config for a repository
69 """
70 repo = Repository(repo_config)
71
72 with repo.in_checkout():
73 local['git']['fetch']()
74 head_sha = local['git']['rev-parse', 'origin/master']().strip()
75
76 # Don't bother trying to update if our sha is the same
77 if head_sha == repo_config['sha']:
78 return repo_config
79
80 # Construct a new config with the head sha
81 new_config = OrderedDict(repo_config)
82 new_config['sha'] = head_sha
83 new_repo = Repository(new_config)
84
85 # See if any of our hooks were deleted with the new commits
86 hooks = set(repo.hooks.keys())
87 hooks_missing = hooks - (hooks & set(new_repo.manifest.keys()))
88 if hooks_missing:
89 raise RepositoryCannotBeUpdatedError(
90 'Cannot update because the tip of master is missing these hooks:\n'
91 '{0}'.format(', '.join(sorted(hooks_missing)))
92 )
93
94 return remove_defaults([new_config], CONFIG_JSON_SCHEMA)[0]
95
96
97 def autoupdate(runner):
98 """Auto-update the pre-commit config to the latest versions of repos."""
99 retv = 0
100 output_configs = []
101 changed = False
102
103 input_configs = load_config(
104 runner.config_file_path,
105 load_strategy=ordered_load,
106 )
107
108 for repo_config in input_configs:
109 print('Updating {0}...'.format(repo_config['repo']), end='')
110 try:
111 new_repo_config = _update_repository(repo_config)
112 except RepositoryCannotBeUpdatedError as error:
113 print(error.args[0])
114 output_configs.append(repo_config)
115 retv = 1
116 continue
117
118 if new_repo_config['sha'] != repo_config['sha']:
119 changed = True
120 print(
121 'updating {0} -> {1}.'.format(
122 repo_config['sha'], new_repo_config['sha'],
123 )
124 )
125 output_configs.append(new_repo_config)
126 else:
127 print('already up to date.')
128 output_configs.append(repo_config)
129
130 if changed:
131 with open(runner.config_file_path, 'w') as config_file:
132 config_file.write(
133 ordered_dump(output_configs, **C.YAML_DUMP_KWARGS)
134 )
135
136 return retv
137
138
139 def clean(runner):
140 if os.path.exists(runner.hooks_workspace_path):
141 shutil.rmtree(runner.hooks_workspace_path)
142 print('Cleaned {0}.'.format(runner.hooks_workspace_path))
143 return 0
144
145
146 def _run_single_hook(runner, repository, hook_id, args, write):
147 if args.all_files:
148 get_filenames = git.get_all_files_matching
149 else:
150 get_filenames = git.get_staged_files_matching
151
152 hook = repository.hooks[hook_id]
153
154 # Print the hook and the dots first in case the hook takes hella long to
155 # run.
156 write(
157 '{0}{1}'.format(
158 hook['name'],
159 '.' * (COLS - len(hook['name']) - PASS_FAIL_LENGTH - 6),
160 ),
161 )
162 sys.stdout.flush()
163
164 retcode, stdout, stderr = repository.run_hook(
165 runner.cmd_runner,
166 hook_id,
167 get_filenames(hook['files'], hook['exclude']),
168 )
169
170 if retcode != repository.hooks[hook_id]['expected_return_value']:
171 retcode = 1
172 print_color = color.RED
173 pass_fail = 'Failed'
174 else:
175 retcode = 0
176 print_color = color.GREEN
177 pass_fail = 'Passed'
178
179 write(color.format_color(pass_fail, print_color, args.color) + '\n')
180
181 if (stdout or stderr) and (retcode or args.verbose):
182 write('\n')
183 for output in (stdout, stderr):
184 if output.strip():
185 write(output.strip() + '\n')
186 write('\n')
187
188 return retcode
189
190
191 def _run_hooks(runner, args, write):
192 """Actually run the hooks."""
193 retval = 0
194
195 for repo in runner.repositories:
196 for hook_id in repo.hooks:
197 retval |= _run_single_hook(runner, repo, hook_id, args, write=write)
198
199 return retval
200
201
202 def _run_hook(runner, hook_id, args, write):
203 for repo in runner.repositories:
204 if hook_id in repo.hooks:
205 return _run_single_hook(runner, repo, hook_id, args, write=write)
206 else:
207 write('No hook with id `{0}`\n'.format(hook_id))
208 return 1
209
210
211 def run(runner, args, write=sys.stdout.write):
212 # Set up our logging handler
213 logger.addHandler(LoggingHandler(args.color))
214 logger.setLevel(logging.INFO)
215
216 with staged_files_only(runner.cmd_runner):
217 if args.hook:
218 return _run_hook(runner, args.hook, args, write=write)
219 else:
220 return _run_hooks(runner, args, write=write)
221
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pre_commit/commands.py b/pre_commit/commands.py
--- a/pre_commit/commands.py
+++ b/pre_commit/commands.py
@@ -151,6 +151,26 @@
hook = repository.hooks[hook_id]
+ filenames = get_filenames(hook['files'], hook['exclude'])
+ if not filenames:
+ no_files_msg = '(no files to check) '
+ skipped_msg = 'Skipped'
+ write(
+ '{0}{1}{2}{3}\n'.format(
+ hook['name'],
+ '.' * (
+ COLS -
+ len(hook['name']) -
+ len(no_files_msg) -
+ len(skipped_msg) -
+ 6
+ ),
+ no_files_msg,
+ color.format_color(skipped_msg, color.TURQUOISE, args.color),
+ )
+ )
+ return 0
+
# Print the hook and the dots first in case the hook takes hella long to
# run.
write(
@@ -164,7 +184,7 @@
retcode, stdout, stderr = repository.run_hook(
runner.cmd_runner,
hook_id,
- get_filenames(hook['files'], hook['exclude']),
+ filenames,
)
if retcode != repository.hooks[hook_id]['expected_return_value']:
|
{"golden_diff": "diff --git a/pre_commit/commands.py b/pre_commit/commands.py\n--- a/pre_commit/commands.py\n+++ b/pre_commit/commands.py\n@@ -151,6 +151,26 @@\n \n hook = repository.hooks[hook_id]\n \n+ filenames = get_filenames(hook['files'], hook['exclude'])\n+ if not filenames:\n+ no_files_msg = '(no files to check) '\n+ skipped_msg = 'Skipped'\n+ write(\n+ '{0}{1}{2}{3}\\n'.format(\n+ hook['name'],\n+ '.' * (\n+ COLS -\n+ len(hook['name']) -\n+ len(no_files_msg) -\n+ len(skipped_msg) -\n+ 6\n+ ),\n+ no_files_msg,\n+ color.format_color(skipped_msg, color.TURQUOISE, args.color),\n+ )\n+ )\n+ return 0\n+\n # Print the hook and the dots first in case the hook takes hella long to\n # run.\n write(\n@@ -164,7 +184,7 @@\n retcode, stdout, stderr = repository.run_hook(\n runner.cmd_runner,\n hook_id,\n- get_filenames(hook['files'], hook['exclude']),\n+ filenames,\n )\n \n if retcode != repository.hooks[hook_id]['expected_return_value']:\n", "issue": "Skip hook if there are no files to run for it.\nThis blocks adding `flake8` as a hook as it explodes when there are no files.\n\nThis will also be a bit of a performance hack.\n\n", "before_files": [{"content": "from __future__ import print_function\n\nimport logging\nimport os\nimport pkg_resources\nimport shutil\nimport stat\nimport subprocess\nimport sys\nfrom asottile.ordereddict import OrderedDict\nfrom asottile.yaml import ordered_dump\nfrom asottile.yaml import ordered_load\nfrom plumbum import local\n\nimport pre_commit.constants as C\nfrom pre_commit import git\nfrom pre_commit import color\nfrom pre_commit.clientlib.validate_config import CONFIG_JSON_SCHEMA\nfrom pre_commit.clientlib.validate_config import load_config\nfrom pre_commit.jsonschema_extensions import remove_defaults\nfrom pre_commit.logging_handler import LoggingHandler\nfrom pre_commit.repository import Repository\nfrom pre_commit.staged_files_only import staged_files_only\n\n\nlogger = logging.getLogger('pre_commit')\n\nCOLS = int(subprocess.Popen(['tput', 'cols'], stdout=subprocess.PIPE).communicate()[0])\n\nPASS_FAIL_LENGTH = 6\n\n\ndef install(runner):\n \"\"\"Install the pre-commit hooks.\"\"\"\n pre_commit_file = pkg_resources.resource_filename('pre_commit', 'resources/pre-commit.sh')\n with open(runner.pre_commit_path, 'w') as pre_commit_file_obj:\n pre_commit_file_obj.write(open(pre_commit_file).read())\n\n original_mode = os.stat(runner.pre_commit_path).st_mode\n os.chmod(\n runner.pre_commit_path,\n original_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH,\n )\n\n print('pre-commit installed at {0}'.format(runner.pre_commit_path))\n\n return 0\n\n\ndef uninstall(runner):\n \"\"\"Uninstall the pre-commit hooks.\"\"\"\n if os.path.exists(runner.pre_commit_path):\n os.remove(runner.pre_commit_path)\n print('pre-commit uninstalled')\n return 0\n\n\nclass RepositoryCannotBeUpdatedError(RuntimeError):\n pass\n\n\ndef _update_repository(repo_config):\n \"\"\"Updates a repository to the tip of `master`. If the repository cannot\n be updated because a hook that is configured does not exist in `master`,\n this raises a RepositoryCannotBeUpdatedError\n\n Args:\n repo_config - A config for a repository\n \"\"\"\n repo = Repository(repo_config)\n\n with repo.in_checkout():\n local['git']['fetch']()\n head_sha = local['git']['rev-parse', 'origin/master']().strip()\n\n # Don't bother trying to update if our sha is the same\n if head_sha == repo_config['sha']:\n return repo_config\n\n # Construct a new config with the head sha\n new_config = OrderedDict(repo_config)\n new_config['sha'] = head_sha\n new_repo = Repository(new_config)\n\n # See if any of our hooks were deleted with the new commits\n hooks = set(repo.hooks.keys())\n hooks_missing = hooks - (hooks & set(new_repo.manifest.keys()))\n if hooks_missing:\n raise RepositoryCannotBeUpdatedError(\n 'Cannot update because the tip of master is missing these hooks:\\n'\n '{0}'.format(', '.join(sorted(hooks_missing)))\n )\n\n return remove_defaults([new_config], CONFIG_JSON_SCHEMA)[0]\n\n\ndef autoupdate(runner):\n \"\"\"Auto-update the pre-commit config to the latest versions of repos.\"\"\"\n retv = 0\n output_configs = []\n changed = False\n\n input_configs = load_config(\n runner.config_file_path,\n load_strategy=ordered_load,\n )\n\n for repo_config in input_configs:\n print('Updating {0}...'.format(repo_config['repo']), end='')\n try:\n new_repo_config = _update_repository(repo_config)\n except RepositoryCannotBeUpdatedError as error:\n print(error.args[0])\n output_configs.append(repo_config)\n retv = 1\n continue\n\n if new_repo_config['sha'] != repo_config['sha']:\n changed = True\n print(\n 'updating {0} -> {1}.'.format(\n repo_config['sha'], new_repo_config['sha'],\n )\n )\n output_configs.append(new_repo_config)\n else:\n print('already up to date.')\n output_configs.append(repo_config)\n\n if changed:\n with open(runner.config_file_path, 'w') as config_file:\n config_file.write(\n ordered_dump(output_configs, **C.YAML_DUMP_KWARGS)\n )\n\n return retv\n\n\ndef clean(runner):\n if os.path.exists(runner.hooks_workspace_path):\n shutil.rmtree(runner.hooks_workspace_path)\n print('Cleaned {0}.'.format(runner.hooks_workspace_path))\n return 0\n\n\ndef _run_single_hook(runner, repository, hook_id, args, write):\n if args.all_files:\n get_filenames = git.get_all_files_matching\n else:\n get_filenames = git.get_staged_files_matching\n\n hook = repository.hooks[hook_id]\n\n # Print the hook and the dots first in case the hook takes hella long to\n # run.\n write(\n '{0}{1}'.format(\n hook['name'],\n '.' * (COLS - len(hook['name']) - PASS_FAIL_LENGTH - 6),\n ),\n )\n sys.stdout.flush()\n\n retcode, stdout, stderr = repository.run_hook(\n runner.cmd_runner,\n hook_id,\n get_filenames(hook['files'], hook['exclude']),\n )\n\n if retcode != repository.hooks[hook_id]['expected_return_value']:\n retcode = 1\n print_color = color.RED\n pass_fail = 'Failed'\n else:\n retcode = 0\n print_color = color.GREEN\n pass_fail = 'Passed'\n\n write(color.format_color(pass_fail, print_color, args.color) + '\\n')\n\n if (stdout or stderr) and (retcode or args.verbose):\n write('\\n')\n for output in (stdout, stderr):\n if output.strip():\n write(output.strip() + '\\n')\n write('\\n')\n\n return retcode\n\n\ndef _run_hooks(runner, args, write):\n \"\"\"Actually run the hooks.\"\"\"\n retval = 0\n\n for repo in runner.repositories:\n for hook_id in repo.hooks:\n retval |= _run_single_hook(runner, repo, hook_id, args, write=write)\n\n return retval\n\n\ndef _run_hook(runner, hook_id, args, write):\n for repo in runner.repositories:\n if hook_id in repo.hooks:\n return _run_single_hook(runner, repo, hook_id, args, write=write)\n else:\n write('No hook with id `{0}`\\n'.format(hook_id))\n return 1\n\n\ndef run(runner, args, write=sys.stdout.write):\n # Set up our logging handler\n logger.addHandler(LoggingHandler(args.color))\n logger.setLevel(logging.INFO)\n\n with staged_files_only(runner.cmd_runner):\n if args.hook:\n return _run_hook(runner, args.hook, args, write=write)\n else:\n return _run_hooks(runner, args, write=write)\n", "path": "pre_commit/commands.py"}], "after_files": [{"content": "from __future__ import print_function\n\nimport logging\nimport os\nimport pkg_resources\nimport shutil\nimport stat\nimport subprocess\nimport sys\nfrom asottile.ordereddict import OrderedDict\nfrom asottile.yaml import ordered_dump\nfrom asottile.yaml import ordered_load\nfrom plumbum import local\n\nimport pre_commit.constants as C\nfrom pre_commit import git\nfrom pre_commit import color\nfrom pre_commit.clientlib.validate_config import CONFIG_JSON_SCHEMA\nfrom pre_commit.clientlib.validate_config import load_config\nfrom pre_commit.jsonschema_extensions import remove_defaults\nfrom pre_commit.logging_handler import LoggingHandler\nfrom pre_commit.repository import Repository\nfrom pre_commit.staged_files_only import staged_files_only\n\n\nlogger = logging.getLogger('pre_commit')\n\nCOLS = int(subprocess.Popen(['tput', 'cols'], stdout=subprocess.PIPE).communicate()[0])\n\nPASS_FAIL_LENGTH = 6\n\n\ndef install(runner):\n \"\"\"Install the pre-commit hooks.\"\"\"\n pre_commit_file = pkg_resources.resource_filename('pre_commit', 'resources/pre-commit.sh')\n with open(runner.pre_commit_path, 'w') as pre_commit_file_obj:\n pre_commit_file_obj.write(open(pre_commit_file).read())\n\n original_mode = os.stat(runner.pre_commit_path).st_mode\n os.chmod(\n runner.pre_commit_path,\n original_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH,\n )\n\n print('pre-commit installed at {0}'.format(runner.pre_commit_path))\n\n return 0\n\n\ndef uninstall(runner):\n \"\"\"Uninstall the pre-commit hooks.\"\"\"\n if os.path.exists(runner.pre_commit_path):\n os.remove(runner.pre_commit_path)\n print('pre-commit uninstalled')\n return 0\n\n\nclass RepositoryCannotBeUpdatedError(RuntimeError):\n pass\n\n\ndef _update_repository(repo_config):\n \"\"\"Updates a repository to the tip of `master`. If the repository cannot\n be updated because a hook that is configured does not exist in `master`,\n this raises a RepositoryCannotBeUpdatedError\n\n Args:\n repo_config - A config for a repository\n \"\"\"\n repo = Repository(repo_config)\n\n with repo.in_checkout():\n local['git']['fetch']()\n head_sha = local['git']['rev-parse', 'origin/master']().strip()\n\n # Don't bother trying to update if our sha is the same\n if head_sha == repo_config['sha']:\n return repo_config\n\n # Construct a new config with the head sha\n new_config = OrderedDict(repo_config)\n new_config['sha'] = head_sha\n new_repo = Repository(new_config)\n\n # See if any of our hooks were deleted with the new commits\n hooks = set(repo.hooks.keys())\n hooks_missing = hooks - (hooks & set(new_repo.manifest.keys()))\n if hooks_missing:\n raise RepositoryCannotBeUpdatedError(\n 'Cannot update because the tip of master is missing these hooks:\\n'\n '{0}'.format(', '.join(sorted(hooks_missing)))\n )\n\n return remove_defaults([new_config], CONFIG_JSON_SCHEMA)[0]\n\n\ndef autoupdate(runner):\n \"\"\"Auto-update the pre-commit config to the latest versions of repos.\"\"\"\n retv = 0\n output_configs = []\n changed = False\n\n input_configs = load_config(\n runner.config_file_path,\n load_strategy=ordered_load,\n )\n\n for repo_config in input_configs:\n print('Updating {0}...'.format(repo_config['repo']), end='')\n try:\n new_repo_config = _update_repository(repo_config)\n except RepositoryCannotBeUpdatedError as error:\n print(error.args[0])\n output_configs.append(repo_config)\n retv = 1\n continue\n\n if new_repo_config['sha'] != repo_config['sha']:\n changed = True\n print(\n 'updating {0} -> {1}.'.format(\n repo_config['sha'], new_repo_config['sha'],\n )\n )\n output_configs.append(new_repo_config)\n else:\n print('already up to date.')\n output_configs.append(repo_config)\n\n if changed:\n with open(runner.config_file_path, 'w') as config_file:\n config_file.write(\n ordered_dump(output_configs, **C.YAML_DUMP_KWARGS)\n )\n\n return retv\n\n\ndef clean(runner):\n if os.path.exists(runner.hooks_workspace_path):\n shutil.rmtree(runner.hooks_workspace_path)\n print('Cleaned {0}.'.format(runner.hooks_workspace_path))\n return 0\n\n\ndef _run_single_hook(runner, repository, hook_id, args, write):\n if args.all_files:\n get_filenames = git.get_all_files_matching\n else:\n get_filenames = git.get_staged_files_matching\n\n hook = repository.hooks[hook_id]\n\n filenames = get_filenames(hook['files'], hook['exclude'])\n if not filenames:\n no_files_msg = '(no files to check) '\n skipped_msg = 'Skipped'\n write(\n '{0}{1}{2}{3}\\n'.format(\n hook['name'],\n '.' * (\n COLS -\n len(hook['name']) -\n len(no_files_msg) -\n len(skipped_msg) -\n 6\n ),\n no_files_msg,\n color.format_color(skipped_msg, color.TURQUOISE, args.color),\n )\n )\n return 0\n\n # Print the hook and the dots first in case the hook takes hella long to\n # run.\n write(\n '{0}{1}'.format(\n hook['name'],\n '.' * (COLS - len(hook['name']) - PASS_FAIL_LENGTH - 6),\n ),\n )\n sys.stdout.flush()\n\n retcode, stdout, stderr = repository.run_hook(\n runner.cmd_runner,\n hook_id,\n filenames,\n )\n\n if retcode != repository.hooks[hook_id]['expected_return_value']:\n retcode = 1\n print_color = color.RED\n pass_fail = 'Failed'\n else:\n retcode = 0\n print_color = color.GREEN\n pass_fail = 'Passed'\n\n write(color.format_color(pass_fail, print_color, args.color) + '\\n')\n\n if (stdout or stderr) and (retcode or args.verbose):\n write('\\n')\n for output in (stdout, stderr):\n if output.strip():\n write(output.strip() + '\\n')\n write('\\n')\n\n return retcode\n\n\ndef _run_hooks(runner, args, write):\n \"\"\"Actually run the hooks.\"\"\"\n retval = 0\n\n for repo in runner.repositories:\n for hook_id in repo.hooks:\n retval |= _run_single_hook(runner, repo, hook_id, args, write=write)\n\n return retval\n\n\ndef _run_hook(runner, hook_id, args, write):\n for repo in runner.repositories:\n if hook_id in repo.hooks:\n return _run_single_hook(runner, repo, hook_id, args, write=write)\n else:\n write('No hook with id `{0}`\\n'.format(hook_id))\n return 1\n\n\ndef run(runner, args, write=sys.stdout.write):\n # Set up our logging handler\n logger.addHandler(LoggingHandler(args.color))\n logger.setLevel(logging.INFO)\n\n with staged_files_only(runner.cmd_runner):\n if args.hook:\n return _run_hook(runner, args.hook, args, write=write)\n else:\n return _run_hooks(runner, args, write=write)\n", "path": "pre_commit/commands.py"}]}
| 2,380 | 305 |
gh_patches_debug_31142
|
rasdani/github-patches
|
git_diff
|
ansible-collections__community.general-8238
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
bitwarden_secrets_manager: Handle rate limits
### Summary
I'm not finding any official documentation on it yet but Bitwarden's Secret Manager seems to have a rate limit of 5 requests per second. When the rate limit is hit, the lookup fails with an error: 429 Too Many Requests; Slow down! Too many requests. Try again in 1s.
### Issue Type
Bug Report
### Component Name
bitwarden_secret_manager
### Ansible Version
```console (paste below)
$ ansible --version
ansible [core 2.16.1]
config file = /mnt/ansible/ansible.cfg
configured module search path = ['/home/matta/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.11/site-packages/ansible
ansible collection location = /mnt/ansible/collections
executable location = /usr/bin/ansible
python version = 3.11.9 (main, Apr 14 2024, 13:40:00) [GCC 13.2.1 20231014] (/usr/bin/python3)
jinja version = 3.1.2
libyaml = True
```
### Community.general Version
```console (paste below)
$ ansible-galaxy collection list community.general
# /mnt/ansible/collections/ansible_collections
Collection Version
----------------- -------
community.general 8.5.0
# /usr/lib/python3.11/site-packages/ansible_collections
Collection Version
----------------- -------
community.general 7.5.1
```
### Configuration
```console (paste below)
$ ansible-config dump --only-changed
COLLECTIONS_PATHS(/mnt/ansible/ansible.cfg) = ['/mnt/ansible/collections']
CONFIG_FILE() = /mnt/ansible/ansible.cfg
DEFAULT_FORKS(/mnt/ansible/ansible.cfg) = 10
DEFAULT_HOST_LIST(/mnt/ansible/ansible.cfg) = ['/mnt/ansible/inventory']
DEFAULT_MANAGED_STR(/mnt/ansible/ansible.cfg) = This file is managed by Ansible. Do not modify directly!%n
template: {file}
date: %Y-%m-%d %H:%M:%S
user: {uid}
host: {host}
DISPLAY_SKIPPED_HOSTS(/mnt/ansible/ansible.cfg) = False
EDITOR(env: EDITOR) = vim
INTERPRETER_PYTHON(/mnt/ansible/ansible.cfg) = auto_silent
PAGER(env: PAGER) = less
```
### OS / Environment
Alpine Linux 3.19
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
---
- name: Bitwarden Secrets Manager Rate Limit Reproduction
hosts:
- xen01
- xen02
- xen03
- xen04
- xen05
- xen06
become: false
gather_facts: false
tasks:
- debug:
var: "{{ lookup('community.general.bitwarden_secrets_manager', '<secret id here>').value }}"
```
### Expected Results
I would expect the module to handle the 429 error with a back-off and retry until it succeeds
### Actual Results
```console (paste below)
PLAY [Bitwarden Secrets Manager Rate Limit Reproduction] ******************************************************************************************************************************************************************************************************************
TASK [debug] **************************************************************************************************************************************************************************************************************************************************************
fatal: [xen01]: FAILED! => {"msg": "Error: \n 0: Received error message from server: [429 Too Many Requests] {\"message\":\"Slow down! Too many requests. Try again in 1s.\",\"validationErrors\":null,\"exceptionMessage\":null,\"exceptionStackTrace\":null,\"innerExceptionMessage\":null,\"object\":\"error\"}\n\nLocation:\n /home/matta/alpine-package-repository/main/bws/src/.cargo/registry/src/index.crates.io-6f17d22bba15001f/bws-0.4.0/src/main.rs:334\n\nBacktrace omitted. Run with RUST_BACKTRACE=1 environment variable to display it.\nRun with RUST_BACKTRACE=full to include source snippets.\n"}
ok: [xen03] => {
"this-is-a-test-secret": "{{this-is-a-test-secret}}"
}
ok: [xen04] => {
"this-is-a-test-secret": "{{this-is-a-test-secret}}"
}
ok: [xen05] => {
"this-is-a-test-secret": "{{this-is-a-test-secret}}"
}
ok: [xen06] => {
"this-is-a-test-secret": "{{this-is-a-test-secret}}"
}
ok: [xen02] => {
"this-is-a-test-secret": "{{this-is-a-test-secret}}"
}
PLAY RECAP ****************************************************************************************************************************************************************************************************************************************************************
xen01 : ok=0 changed=0 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
xen02 : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
xen03 : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
xen04 : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
xen05 : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
xen06 : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `plugins/lookup/bitwarden_secrets_manager.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 # Copyright (c) 2023, jantari (https://github.com/jantari)
3 # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
4 # SPDX-License-Identifier: GPL-3.0-or-later
5 from __future__ import (absolute_import, division, print_function)
6
7 __metaclass__ = type
8
9 DOCUMENTATION = """
10 name: bitwarden_secrets_manager
11 author:
12 - jantari (@jantari)
13 requirements:
14 - bws (command line utility)
15 short_description: Retrieve secrets from Bitwarden Secrets Manager
16 version_added: 7.2.0
17 description:
18 - Retrieve secrets from Bitwarden Secrets Manager.
19 options:
20 _terms:
21 description: Secret ID(s) to fetch values for.
22 required: true
23 type: list
24 elements: str
25 bws_access_token:
26 description: The BWS access token to use for this lookup.
27 env:
28 - name: BWS_ACCESS_TOKEN
29 required: true
30 type: str
31 """
32
33 EXAMPLES = """
34 - name: Get a secret relying on the BWS_ACCESS_TOKEN environment variable for authentication
35 ansible.builtin.debug:
36 msg: >-
37 {{ lookup("community.general.bitwarden_secrets_manager", "2bc23e48-4932-40de-a047-5524b7ddc972") }}
38
39 - name: Get a secret passing an explicit access token for authentication
40 ansible.builtin.debug:
41 msg: >-
42 {{
43 lookup(
44 "community.general.bitwarden_secrets_manager",
45 "2bc23e48-4932-40de-a047-5524b7ddc972",
46 bws_access_token="9.4f570d14-4b54-42f5-bc07-60f4450b1db5.YmluYXJ5LXNvbWV0aGluZy0xMjMK:d2h5IGhlbGxvIHRoZXJlCg=="
47 )
48 }}
49
50 - name: Get two different secrets each using a different access token for authentication
51 ansible.builtin.debug:
52 msg:
53 - '{{ lookup("community.general.bitwarden_secrets_manager", "2bc23e48-4932-40de-a047-5524b7ddc972", bws_access_token=token1) }}'
54 - '{{ lookup("community.general.bitwarden_secrets_manager", "9d89af4c-eb5d-41f5-bb0f-4ae81215c768", bws_access_token=token2) }}'
55 vars:
56 token1: "9.4f570d14-4b54-42f5-bc07-60f4450b1db5.YmluYXJ5LXNvbWV0aGluZy0xMjMK:d2h5IGhlbGxvIHRoZXJlCg=="
57 token2: "1.69b72797-6ea9-4687-a11e-848e41a30ae6.YW5zaWJsZSBpcyBncmVhdD8K:YW5zaWJsZSBpcyBncmVhdAo="
58
59 - name: Get just the value of a secret
60 ansible.builtin.debug:
61 msg: >-
62 {{ lookup("community.general.bitwarden_secrets_manager", "2bc23e48-4932-40de-a047-5524b7ddc972").value }}
63 """
64
65 RETURN = """
66 _raw:
67 description: List containing one or more secrets.
68 type: list
69 elements: dict
70 """
71
72 from subprocess import Popen, PIPE
73
74 from ansible.errors import AnsibleLookupError
75 from ansible.module_utils.common.text.converters import to_text
76 from ansible.parsing.ajson import AnsibleJSONDecoder
77 from ansible.plugins.lookup import LookupBase
78
79
80 class BitwardenSecretsManagerException(AnsibleLookupError):
81 pass
82
83
84 class BitwardenSecretsManager(object):
85 def __init__(self, path='bws'):
86 self._cli_path = path
87
88 @property
89 def cli_path(self):
90 return self._cli_path
91
92 def _run(self, args, stdin=None):
93 p = Popen([self.cli_path] + args, stdout=PIPE, stderr=PIPE, stdin=PIPE)
94 out, err = p.communicate(stdin)
95 rc = p.wait()
96 return to_text(out, errors='surrogate_or_strict'), to_text(err, errors='surrogate_or_strict'), rc
97
98 def get_secret(self, secret_id, bws_access_token):
99 """Get and return the secret with the given secret_id.
100 """
101
102 # Prepare set of params for Bitwarden Secrets Manager CLI
103 # Color output was not always disabled correctly with the default 'auto' setting so explicitly disable it.
104 params = [
105 '--color', 'no',
106 '--access-token', bws_access_token,
107 'get', 'secret', secret_id
108 ]
109
110 out, err, rc = self._run(params)
111 if rc != 0:
112 raise BitwardenSecretsManagerException(to_text(err))
113
114 return AnsibleJSONDecoder().raw_decode(out)[0]
115
116
117 class LookupModule(LookupBase):
118 def run(self, terms, variables=None, **kwargs):
119 self.set_options(var_options=variables, direct=kwargs)
120 bws_access_token = self.get_option('bws_access_token')
121
122 return [_bitwarden_secrets_manager.get_secret(term, bws_access_token) for term in terms]
123
124
125 _bitwarden_secrets_manager = BitwardenSecretsManager()
126
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/plugins/lookup/bitwarden_secrets_manager.py b/plugins/lookup/bitwarden_secrets_manager.py
--- a/plugins/lookup/bitwarden_secrets_manager.py
+++ b/plugins/lookup/bitwarden_secrets_manager.py
@@ -70,6 +70,7 @@
"""
from subprocess import Popen, PIPE
+from time import sleep
from ansible.errors import AnsibleLookupError
from ansible.module_utils.common.text.converters import to_text
@@ -84,11 +85,29 @@
class BitwardenSecretsManager(object):
def __init__(self, path='bws'):
self._cli_path = path
+ self._max_retries = 3
+ self._retry_delay = 1
@property
def cli_path(self):
return self._cli_path
+ def _run_with_retry(self, args, stdin=None, retries=0):
+ out, err, rc = self._run(args, stdin)
+
+ if rc != 0:
+ if retries >= self._max_retries:
+ raise BitwardenSecretsManagerException("Max retries exceeded. Unable to retrieve secret.")
+
+ if "Too many requests" in err:
+ delay = self._retry_delay * (2 ** retries)
+ sleep(delay)
+ return self._run_with_retry(args, stdin, retries + 1)
+ else:
+ raise BitwardenSecretsManagerException(f"Command failed with return code {rc}: {err}")
+
+ return out, err, rc
+
def _run(self, args, stdin=None):
p = Popen([self.cli_path] + args, stdout=PIPE, stderr=PIPE, stdin=PIPE)
out, err = p.communicate(stdin)
@@ -107,7 +126,7 @@
'get', 'secret', secret_id
]
- out, err, rc = self._run(params)
+ out, err, rc = self._run_with_retry(params)
if rc != 0:
raise BitwardenSecretsManagerException(to_text(err))
|
{"golden_diff": "diff --git a/plugins/lookup/bitwarden_secrets_manager.py b/plugins/lookup/bitwarden_secrets_manager.py\n--- a/plugins/lookup/bitwarden_secrets_manager.py\n+++ b/plugins/lookup/bitwarden_secrets_manager.py\n@@ -70,6 +70,7 @@\n \"\"\"\n \n from subprocess import Popen, PIPE\n+from time import sleep\n \n from ansible.errors import AnsibleLookupError\n from ansible.module_utils.common.text.converters import to_text\n@@ -84,11 +85,29 @@\n class BitwardenSecretsManager(object):\n def __init__(self, path='bws'):\n self._cli_path = path\n+ self._max_retries = 3\n+ self._retry_delay = 1\n \n @property\n def cli_path(self):\n return self._cli_path\n \n+ def _run_with_retry(self, args, stdin=None, retries=0):\n+ out, err, rc = self._run(args, stdin)\n+\n+ if rc != 0:\n+ if retries >= self._max_retries:\n+ raise BitwardenSecretsManagerException(\"Max retries exceeded. Unable to retrieve secret.\")\n+\n+ if \"Too many requests\" in err:\n+ delay = self._retry_delay * (2 ** retries)\n+ sleep(delay)\n+ return self._run_with_retry(args, stdin, retries + 1)\n+ else:\n+ raise BitwardenSecretsManagerException(f\"Command failed with return code {rc}: {err}\")\n+\n+ return out, err, rc\n+\n def _run(self, args, stdin=None):\n p = Popen([self.cli_path] + args, stdout=PIPE, stderr=PIPE, stdin=PIPE)\n out, err = p.communicate(stdin)\n@@ -107,7 +126,7 @@\n 'get', 'secret', secret_id\n ]\n \n- out, err, rc = self._run(params)\n+ out, err, rc = self._run_with_retry(params)\n if rc != 0:\n raise BitwardenSecretsManagerException(to_text(err))\n", "issue": "bitwarden_secrets_manager: Handle rate limits\n### Summary\n\nI'm not finding any official documentation on it yet but Bitwarden's Secret Manager seems to have a rate limit of 5 requests per second. When the rate limit is hit, the lookup fails with an error: 429 Too Many Requests; Slow down! Too many requests. Try again in 1s.\r\n\n\n### Issue Type\n\nBug Report\n\n### Component Name\n\nbitwarden_secret_manager\n\n### Ansible Version\n\n```console (paste below)\r\n$ ansible --version\r\nansible [core 2.16.1]\r\n config file = /mnt/ansible/ansible.cfg\r\n configured module search path = ['/home/matta/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']\r\n ansible python module location = /usr/lib/python3.11/site-packages/ansible\r\n ansible collection location = /mnt/ansible/collections\r\n executable location = /usr/bin/ansible\r\n python version = 3.11.9 (main, Apr 14 2024, 13:40:00) [GCC 13.2.1 20231014] (/usr/bin/python3)\r\n jinja version = 3.1.2\r\n libyaml = True\r\n```\r\n\n\n### Community.general Version\n\n```console (paste below)\r\n$ ansible-galaxy collection list community.general\r\n\r\n# /mnt/ansible/collections/ansible_collections\r\nCollection Version\r\n----------------- -------\r\ncommunity.general 8.5.0\r\n\r\n# /usr/lib/python3.11/site-packages/ansible_collections\r\nCollection Version\r\n----------------- -------\r\ncommunity.general 7.5.1\r\n```\r\n\n\n### Configuration\n\n```console (paste below)\r\n$ ansible-config dump --only-changed\r\nCOLLECTIONS_PATHS(/mnt/ansible/ansible.cfg) = ['/mnt/ansible/collections']\r\nCONFIG_FILE() = /mnt/ansible/ansible.cfg\r\nDEFAULT_FORKS(/mnt/ansible/ansible.cfg) = 10\r\nDEFAULT_HOST_LIST(/mnt/ansible/ansible.cfg) = ['/mnt/ansible/inventory']\r\nDEFAULT_MANAGED_STR(/mnt/ansible/ansible.cfg) = This file is managed by Ansible. Do not modify directly!%n\r\ntemplate: {file}\r\ndate: %Y-%m-%d %H:%M:%S\r\nuser: {uid}\r\nhost: {host}\r\nDISPLAY_SKIPPED_HOSTS(/mnt/ansible/ansible.cfg) = False\r\nEDITOR(env: EDITOR) = vim\r\nINTERPRETER_PYTHON(/mnt/ansible/ansible.cfg) = auto_silent\r\nPAGER(env: PAGER) = less\r\n```\r\n\n\n### OS / Environment\n\nAlpine Linux 3.19\n\n### Steps to Reproduce\n\n<!--- Paste example playbooks or commands between quotes below -->\r\n```yaml (paste below)\r\n---\r\n- name: Bitwarden Secrets Manager Rate Limit Reproduction\r\n hosts:\r\n - xen01\r\n - xen02\r\n - xen03\r\n - xen04\r\n - xen05\r\n - xen06\r\n become: false\r\n gather_facts: false\r\n tasks:\r\n - debug:\r\n var: \"{{ lookup('community.general.bitwarden_secrets_manager', '<secret id here>').value }}\"\r\n```\r\n\n\n### Expected Results\n\nI would expect the module to handle the 429 error with a back-off and retry until it succeeds\n\n### Actual Results\n\n```console (paste below)\r\nPLAY [Bitwarden Secrets Manager Rate Limit Reproduction] ******************************************************************************************************************************************************************************************************************\r\nTASK [debug] **************************************************************************************************************************************************************************************************************************************************************\r\nfatal: [xen01]: FAILED! => {\"msg\": \"Error: \\n 0: Received error message from server: [429 Too Many Requests] {\\\"message\\\":\\\"Slow down! Too many requests. Try again in 1s.\\\",\\\"validationErrors\\\":null,\\\"exceptionMessage\\\":null,\\\"exceptionStackTrace\\\":null,\\\"innerExceptionMessage\\\":null,\\\"object\\\":\\\"error\\\"}\\n\\nLocation:\\n /home/matta/alpine-package-repository/main/bws/src/.cargo/registry/src/index.crates.io-6f17d22bba15001f/bws-0.4.0/src/main.rs:334\\n\\nBacktrace omitted. Run with RUST_BACKTRACE=1 environment variable to display it.\\nRun with RUST_BACKTRACE=full to include source snippets.\\n\"}\r\nok: [xen03] => {\r\n \"this-is-a-test-secret\": \"{{this-is-a-test-secret}}\"\r\n}\r\nok: [xen04] => {\r\n \"this-is-a-test-secret\": \"{{this-is-a-test-secret}}\"\r\n}\r\nok: [xen05] => {\r\n \"this-is-a-test-secret\": \"{{this-is-a-test-secret}}\"\r\n}\r\nok: [xen06] => {\r\n \"this-is-a-test-secret\": \"{{this-is-a-test-secret}}\"\r\n}\r\nok: [xen02] => {\r\n \"this-is-a-test-secret\": \"{{this-is-a-test-secret}}\"\r\n}\r\n\r\nPLAY RECAP ****************************************************************************************************************************************************************************************************************************************************************\r\nxen01 : ok=0 changed=0 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0\r\nxen02 : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0\r\nxen03 : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0\r\nxen04 : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0\r\nxen05 : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0\r\nxen06 : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0\r\n```\r\n\n\n### Code of Conduct\n\n- [X] I agree to follow the Ansible Code of Conduct\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n# Copyright (c) 2023, jantari (https://github.com/jantari)\n# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)\n# SPDX-License-Identifier: GPL-3.0-or-later\nfrom __future__ import (absolute_import, division, print_function)\n\n__metaclass__ = type\n\nDOCUMENTATION = \"\"\"\n name: bitwarden_secrets_manager\n author:\n - jantari (@jantari)\n requirements:\n - bws (command line utility)\n short_description: Retrieve secrets from Bitwarden Secrets Manager\n version_added: 7.2.0\n description:\n - Retrieve secrets from Bitwarden Secrets Manager.\n options:\n _terms:\n description: Secret ID(s) to fetch values for.\n required: true\n type: list\n elements: str\n bws_access_token:\n description: The BWS access token to use for this lookup.\n env:\n - name: BWS_ACCESS_TOKEN\n required: true\n type: str\n\"\"\"\n\nEXAMPLES = \"\"\"\n- name: Get a secret relying on the BWS_ACCESS_TOKEN environment variable for authentication\n ansible.builtin.debug:\n msg: >-\n {{ lookup(\"community.general.bitwarden_secrets_manager\", \"2bc23e48-4932-40de-a047-5524b7ddc972\") }}\n\n- name: Get a secret passing an explicit access token for authentication\n ansible.builtin.debug:\n msg: >-\n {{\n lookup(\n \"community.general.bitwarden_secrets_manager\",\n \"2bc23e48-4932-40de-a047-5524b7ddc972\",\n bws_access_token=\"9.4f570d14-4b54-42f5-bc07-60f4450b1db5.YmluYXJ5LXNvbWV0aGluZy0xMjMK:d2h5IGhlbGxvIHRoZXJlCg==\"\n )\n }}\n\n- name: Get two different secrets each using a different access token for authentication\n ansible.builtin.debug:\n msg:\n - '{{ lookup(\"community.general.bitwarden_secrets_manager\", \"2bc23e48-4932-40de-a047-5524b7ddc972\", bws_access_token=token1) }}'\n - '{{ lookup(\"community.general.bitwarden_secrets_manager\", \"9d89af4c-eb5d-41f5-bb0f-4ae81215c768\", bws_access_token=token2) }}'\n vars:\n token1: \"9.4f570d14-4b54-42f5-bc07-60f4450b1db5.YmluYXJ5LXNvbWV0aGluZy0xMjMK:d2h5IGhlbGxvIHRoZXJlCg==\"\n token2: \"1.69b72797-6ea9-4687-a11e-848e41a30ae6.YW5zaWJsZSBpcyBncmVhdD8K:YW5zaWJsZSBpcyBncmVhdAo=\"\n\n- name: Get just the value of a secret\n ansible.builtin.debug:\n msg: >-\n {{ lookup(\"community.general.bitwarden_secrets_manager\", \"2bc23e48-4932-40de-a047-5524b7ddc972\").value }}\n\"\"\"\n\nRETURN = \"\"\"\n _raw:\n description: List containing one or more secrets.\n type: list\n elements: dict\n\"\"\"\n\nfrom subprocess import Popen, PIPE\n\nfrom ansible.errors import AnsibleLookupError\nfrom ansible.module_utils.common.text.converters import to_text\nfrom ansible.parsing.ajson import AnsibleJSONDecoder\nfrom ansible.plugins.lookup import LookupBase\n\n\nclass BitwardenSecretsManagerException(AnsibleLookupError):\n pass\n\n\nclass BitwardenSecretsManager(object):\n def __init__(self, path='bws'):\n self._cli_path = path\n\n @property\n def cli_path(self):\n return self._cli_path\n\n def _run(self, args, stdin=None):\n p = Popen([self.cli_path] + args, stdout=PIPE, stderr=PIPE, stdin=PIPE)\n out, err = p.communicate(stdin)\n rc = p.wait()\n return to_text(out, errors='surrogate_or_strict'), to_text(err, errors='surrogate_or_strict'), rc\n\n def get_secret(self, secret_id, bws_access_token):\n \"\"\"Get and return the secret with the given secret_id.\n \"\"\"\n\n # Prepare set of params for Bitwarden Secrets Manager CLI\n # Color output was not always disabled correctly with the default 'auto' setting so explicitly disable it.\n params = [\n '--color', 'no',\n '--access-token', bws_access_token,\n 'get', 'secret', secret_id\n ]\n\n out, err, rc = self._run(params)\n if rc != 0:\n raise BitwardenSecretsManagerException(to_text(err))\n\n return AnsibleJSONDecoder().raw_decode(out)[0]\n\n\nclass LookupModule(LookupBase):\n def run(self, terms, variables=None, **kwargs):\n self.set_options(var_options=variables, direct=kwargs)\n bws_access_token = self.get_option('bws_access_token')\n\n return [_bitwarden_secrets_manager.get_secret(term, bws_access_token) for term in terms]\n\n\n_bitwarden_secrets_manager = BitwardenSecretsManager()\n", "path": "plugins/lookup/bitwarden_secrets_manager.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n# Copyright (c) 2023, jantari (https://github.com/jantari)\n# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)\n# SPDX-License-Identifier: GPL-3.0-or-later\nfrom __future__ import (absolute_import, division, print_function)\n\n__metaclass__ = type\n\nDOCUMENTATION = \"\"\"\n name: bitwarden_secrets_manager\n author:\n - jantari (@jantari)\n requirements:\n - bws (command line utility)\n short_description: Retrieve secrets from Bitwarden Secrets Manager\n version_added: 7.2.0\n description:\n - Retrieve secrets from Bitwarden Secrets Manager.\n options:\n _terms:\n description: Secret ID(s) to fetch values for.\n required: true\n type: list\n elements: str\n bws_access_token:\n description: The BWS access token to use for this lookup.\n env:\n - name: BWS_ACCESS_TOKEN\n required: true\n type: str\n\"\"\"\n\nEXAMPLES = \"\"\"\n- name: Get a secret relying on the BWS_ACCESS_TOKEN environment variable for authentication\n ansible.builtin.debug:\n msg: >-\n {{ lookup(\"community.general.bitwarden_secrets_manager\", \"2bc23e48-4932-40de-a047-5524b7ddc972\") }}\n\n- name: Get a secret passing an explicit access token for authentication\n ansible.builtin.debug:\n msg: >-\n {{\n lookup(\n \"community.general.bitwarden_secrets_manager\",\n \"2bc23e48-4932-40de-a047-5524b7ddc972\",\n bws_access_token=\"9.4f570d14-4b54-42f5-bc07-60f4450b1db5.YmluYXJ5LXNvbWV0aGluZy0xMjMK:d2h5IGhlbGxvIHRoZXJlCg==\"\n )\n }}\n\n- name: Get two different secrets each using a different access token for authentication\n ansible.builtin.debug:\n msg:\n - '{{ lookup(\"community.general.bitwarden_secrets_manager\", \"2bc23e48-4932-40de-a047-5524b7ddc972\", bws_access_token=token1) }}'\n - '{{ lookup(\"community.general.bitwarden_secrets_manager\", \"9d89af4c-eb5d-41f5-bb0f-4ae81215c768\", bws_access_token=token2) }}'\n vars:\n token1: \"9.4f570d14-4b54-42f5-bc07-60f4450b1db5.YmluYXJ5LXNvbWV0aGluZy0xMjMK:d2h5IGhlbGxvIHRoZXJlCg==\"\n token2: \"1.69b72797-6ea9-4687-a11e-848e41a30ae6.YW5zaWJsZSBpcyBncmVhdD8K:YW5zaWJsZSBpcyBncmVhdAo=\"\n\n- name: Get just the value of a secret\n ansible.builtin.debug:\n msg: >-\n {{ lookup(\"community.general.bitwarden_secrets_manager\", \"2bc23e48-4932-40de-a047-5524b7ddc972\").value }}\n\"\"\"\n\nRETURN = \"\"\"\n _raw:\n description: List containing one or more secrets.\n type: list\n elements: dict\n\"\"\"\n\nfrom subprocess import Popen, PIPE\nfrom time import sleep\n\nfrom ansible.errors import AnsibleLookupError\nfrom ansible.module_utils.common.text.converters import to_text\nfrom ansible.parsing.ajson import AnsibleJSONDecoder\nfrom ansible.plugins.lookup import LookupBase\n\n\nclass BitwardenSecretsManagerException(AnsibleLookupError):\n pass\n\n\nclass BitwardenSecretsManager(object):\n def __init__(self, path='bws'):\n self._cli_path = path\n self._max_retries = 3\n self._retry_delay = 1\n\n @property\n def cli_path(self):\n return self._cli_path\n\n def _run_with_retry(self, args, stdin=None, retries=0):\n out, err, rc = self._run(args, stdin)\n\n if rc != 0:\n if retries >= self._max_retries:\n raise BitwardenSecretsManagerException(\"Max retries exceeded. Unable to retrieve secret.\")\n\n if \"Too many requests\" in err:\n delay = self._retry_delay * (2 ** retries)\n sleep(delay)\n return self._run_with_retry(args, stdin, retries + 1)\n else:\n raise BitwardenSecretsManagerException(f\"Command failed with return code {rc}: {err}\")\n\n return out, err, rc\n\n def _run(self, args, stdin=None):\n p = Popen([self.cli_path] + args, stdout=PIPE, stderr=PIPE, stdin=PIPE)\n out, err = p.communicate(stdin)\n rc = p.wait()\n return to_text(out, errors='surrogate_or_strict'), to_text(err, errors='surrogate_or_strict'), rc\n\n def get_secret(self, secret_id, bws_access_token):\n \"\"\"Get and return the secret with the given secret_id.\n \"\"\"\n\n # Prepare set of params for Bitwarden Secrets Manager CLI\n # Color output was not always disabled correctly with the default 'auto' setting so explicitly disable it.\n params = [\n '--color', 'no',\n '--access-token', bws_access_token,\n 'get', 'secret', secret_id\n ]\n\n out, err, rc = self._run_with_retry(params)\n if rc != 0:\n raise BitwardenSecretsManagerException(to_text(err))\n\n return AnsibleJSONDecoder().raw_decode(out)[0]\n\n\nclass LookupModule(LookupBase):\n def run(self, terms, variables=None, **kwargs):\n self.set_options(var_options=variables, direct=kwargs)\n bws_access_token = self.get_option('bws_access_token')\n\n return [_bitwarden_secrets_manager.get_secret(term, bws_access_token) for term in terms]\n\n\n_bitwarden_secrets_manager = BitwardenSecretsManager()\n", "path": "plugins/lookup/bitwarden_secrets_manager.py"}]}
| 3,204 | 462 |
gh_patches_debug_29144
|
rasdani/github-patches
|
git_diff
|
qtile__qtile-2235
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
No filename provided Image widget causes QTile to crash.
# The issue
When no filename argument, OR an invalid filename is provided for the Image widget, Qtile seems to crash, and needs to be killed to restart. You are obviously not supposed to provide a non-existant image, but I have doubts that it crashing is intended behavior.
What I am describing here as a "crash" is no keyboard input being accepted, and windows from *all* other workspaces being displayed on the workspace you are currently on. If this is not actually a crash, I apologize, but regardless, Qtile becomes unusable until the process is killed and I am kicked back to my Display Manager.
# Steps to reproduce
In your bar, create a new ``Image`` widget somewhere inside. Either provide a path to an image that does not exist, or do not provide one period.
# Qtile version
This is the commit hash of the version I am running.
6c4d0557124989d46ffb2bb24f4468db687fcdb2
# Stack traces
No stack traces from xsession-errors, or the Qtile log are produced, however I have traced the error (through using the logger provided in the module's file) to the ``_configure`` method of the Image widget, and it seems to be coming the line: ``base._Widget._configure(self, qtile, bar)``
# Configuration
https://pastebin.com/qxBq6yPn
If there is any information I got wrong here, or some other bit of information I can provide that will help this issue get solved, I will try my best.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `libqtile/widget/image.py`
Content:
```
1 # Copyright (c) 2013 dequis
2 # Copyright (c) 2014 Sean Vig
3 # Copyright (c) 2014 Adi Sieker
4 #
5 # Permission is hereby granted, free of charge, to any person obtaining a copy
6 # of this software and associated documentation files (the "Software"), to deal
7 # in the Software without restriction, including without limitation the rights
8 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 # copies of the Software, and to permit persons to whom the Software is
10 # furnished to do so, subject to the following conditions:
11 #
12 # The above copyright notice and this permission notice shall be included in
13 # all copies or substantial portions of the Software.
14 #
15 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 # SOFTWARE.
22 import os
23
24 from libqtile import bar
25 from libqtile.images import Img
26 from libqtile.log_utils import logger
27 from libqtile.widget import base
28
29
30 class Image(base._Widget, base.MarginMixin):
31 """Display a PNG image on the bar"""
32 orientations = base.ORIENTATION_BOTH
33 defaults = [
34 ("scale", True, "Enable/Disable image scaling"),
35 ("rotate", 0.0, "rotate the image in degrees counter-clockwise"),
36 ("filename", None, "Image filename. Can contain '~'"),
37 ]
38
39 def __init__(self, length=bar.CALCULATED, width=None, **config):
40 # 'width' was replaced by 'length' since the widget can be installed in
41 # vertical bars
42 if width is not None:
43 logger.warning('width kwarg or positional argument is '
44 'deprecated. Please use length.')
45 length = width
46
47 base._Widget.__init__(self, length, **config)
48 self.add_defaults(Image.defaults)
49 self.add_defaults(base.MarginMixin.defaults)
50
51 # make the default 0 instead
52 self._variable_defaults["margin"] = 0
53
54 def _configure(self, qtile, bar):
55 base._Widget._configure(self, qtile, bar)
56
57 if not self.filename:
58 raise ValueError("Filename not set!")
59
60 self.filename = os.path.expanduser(self.filename)
61
62 if not os.path.exists(self.filename):
63 raise ValueError("File does not exist: {}".format(self.filename))
64
65 img = Img.from_path(self.filename)
66 self.img = img
67 img.theta = self.rotate
68 if not self.scale:
69 return
70 if self.bar.horizontal:
71 new_height = self.bar.height - (self.margin_y * 2)
72 img.resize(height=new_height)
73 else:
74 new_width = self.bar.width - (self.margin_x * 2)
75 img.resize(width=new_width)
76
77 def draw(self):
78 self.drawer.clear(self.background or self.bar.background)
79 self.drawer.ctx.save()
80 self.drawer.ctx.translate(self.margin_x, self.margin_y)
81 self.drawer.ctx.set_source(self.img.pattern)
82 self.drawer.ctx.paint()
83 self.drawer.ctx.restore()
84
85 if self.bar.horizontal:
86 self.drawer.draw(offsetx=self.offset, width=self.width)
87 else:
88 self.drawer.draw(offsety=self.offset, height=self.width)
89
90 def calculate_length(self):
91 if self.bar.horizontal:
92 return self.img.width + (self.margin_x * 2)
93 else:
94 return self.img.height + (self.margin_y * 2)
95
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/libqtile/widget/image.py b/libqtile/widget/image.py
--- a/libqtile/widget/image.py
+++ b/libqtile/widget/image.py
@@ -53,14 +53,17 @@
def _configure(self, qtile, bar):
base._Widget._configure(self, qtile, bar)
+ self.img = None
if not self.filename:
- raise ValueError("Filename not set!")
+ logger.warning("Image filename not set!")
+ return
self.filename = os.path.expanduser(self.filename)
if not os.path.exists(self.filename):
- raise ValueError("File does not exist: {}".format(self.filename))
+ logger.warning("Image does not exist: {}".format(self.filename))
+ return
img = Img.from_path(self.filename)
self.img = img
@@ -75,6 +78,9 @@
img.resize(width=new_width)
def draw(self):
+ if self.img is None:
+ return
+
self.drawer.clear(self.background or self.bar.background)
self.drawer.ctx.save()
self.drawer.ctx.translate(self.margin_x, self.margin_y)
@@ -88,6 +94,9 @@
self.drawer.draw(offsety=self.offset, height=self.width)
def calculate_length(self):
+ if self.img is None:
+ return 0
+
if self.bar.horizontal:
return self.img.width + (self.margin_x * 2)
else:
|
{"golden_diff": "diff --git a/libqtile/widget/image.py b/libqtile/widget/image.py\n--- a/libqtile/widget/image.py\n+++ b/libqtile/widget/image.py\n@@ -53,14 +53,17 @@\n \n def _configure(self, qtile, bar):\n base._Widget._configure(self, qtile, bar)\n+ self.img = None\n \n if not self.filename:\n- raise ValueError(\"Filename not set!\")\n+ logger.warning(\"Image filename not set!\")\n+ return\n \n self.filename = os.path.expanduser(self.filename)\n \n if not os.path.exists(self.filename):\n- raise ValueError(\"File does not exist: {}\".format(self.filename))\n+ logger.warning(\"Image does not exist: {}\".format(self.filename))\n+ return\n \n img = Img.from_path(self.filename)\n self.img = img\n@@ -75,6 +78,9 @@\n img.resize(width=new_width)\n \n def draw(self):\n+ if self.img is None:\n+ return\n+\n self.drawer.clear(self.background or self.bar.background)\n self.drawer.ctx.save()\n self.drawer.ctx.translate(self.margin_x, self.margin_y)\n@@ -88,6 +94,9 @@\n self.drawer.draw(offsety=self.offset, height=self.width)\n \n def calculate_length(self):\n+ if self.img is None:\n+ return 0\n+\n if self.bar.horizontal:\n return self.img.width + (self.margin_x * 2)\n else:\n", "issue": "No filename provided Image widget causes QTile to crash.\n# The issue\r\nWhen no filename argument, OR an invalid filename is provided for the Image widget, Qtile seems to crash, and needs to be killed to restart. You are obviously not supposed to provide a non-existant image, but I have doubts that it crashing is intended behavior. \r\n\r\nWhat I am describing here as a \"crash\" is no keyboard input being accepted, and windows from *all* other workspaces being displayed on the workspace you are currently on. If this is not actually a crash, I apologize, but regardless, Qtile becomes unusable until the process is killed and I am kicked back to my Display Manager.\r\n\r\n# Steps to reproduce\r\nIn your bar, create a new ``Image`` widget somewhere inside. Either provide a path to an image that does not exist, or do not provide one period.\r\n\r\n# Qtile version\r\nThis is the commit hash of the version I am running.\r\n6c4d0557124989d46ffb2bb24f4468db687fcdb2\r\n\r\n# Stack traces\r\nNo stack traces from xsession-errors, or the Qtile log are produced, however I have traced the error (through using the logger provided in the module's file) to the ``_configure`` method of the Image widget, and it seems to be coming the line: ``base._Widget._configure(self, qtile, bar)``\r\n\r\n# Configuration\r\nhttps://pastebin.com/qxBq6yPn\r\n\r\nIf there is any information I got wrong here, or some other bit of information I can provide that will help this issue get solved, I will try my best.\n", "before_files": [{"content": "# Copyright (c) 2013 dequis\n# Copyright (c) 2014 Sean Vig\n# Copyright (c) 2014 Adi Sieker\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\nimport os\n\nfrom libqtile import bar\nfrom libqtile.images import Img\nfrom libqtile.log_utils import logger\nfrom libqtile.widget import base\n\n\nclass Image(base._Widget, base.MarginMixin):\n \"\"\"Display a PNG image on the bar\"\"\"\n orientations = base.ORIENTATION_BOTH\n defaults = [\n (\"scale\", True, \"Enable/Disable image scaling\"),\n (\"rotate\", 0.0, \"rotate the image in degrees counter-clockwise\"),\n (\"filename\", None, \"Image filename. Can contain '~'\"),\n ]\n\n def __init__(self, length=bar.CALCULATED, width=None, **config):\n # 'width' was replaced by 'length' since the widget can be installed in\n # vertical bars\n if width is not None:\n logger.warning('width kwarg or positional argument is '\n 'deprecated. Please use length.')\n length = width\n\n base._Widget.__init__(self, length, **config)\n self.add_defaults(Image.defaults)\n self.add_defaults(base.MarginMixin.defaults)\n\n # make the default 0 instead\n self._variable_defaults[\"margin\"] = 0\n\n def _configure(self, qtile, bar):\n base._Widget._configure(self, qtile, bar)\n\n if not self.filename:\n raise ValueError(\"Filename not set!\")\n\n self.filename = os.path.expanduser(self.filename)\n\n if not os.path.exists(self.filename):\n raise ValueError(\"File does not exist: {}\".format(self.filename))\n\n img = Img.from_path(self.filename)\n self.img = img\n img.theta = self.rotate\n if not self.scale:\n return\n if self.bar.horizontal:\n new_height = self.bar.height - (self.margin_y * 2)\n img.resize(height=new_height)\n else:\n new_width = self.bar.width - (self.margin_x * 2)\n img.resize(width=new_width)\n\n def draw(self):\n self.drawer.clear(self.background or self.bar.background)\n self.drawer.ctx.save()\n self.drawer.ctx.translate(self.margin_x, self.margin_y)\n self.drawer.ctx.set_source(self.img.pattern)\n self.drawer.ctx.paint()\n self.drawer.ctx.restore()\n\n if self.bar.horizontal:\n self.drawer.draw(offsetx=self.offset, width=self.width)\n else:\n self.drawer.draw(offsety=self.offset, height=self.width)\n\n def calculate_length(self):\n if self.bar.horizontal:\n return self.img.width + (self.margin_x * 2)\n else:\n return self.img.height + (self.margin_y * 2)\n", "path": "libqtile/widget/image.py"}], "after_files": [{"content": "# Copyright (c) 2013 dequis\n# Copyright (c) 2014 Sean Vig\n# Copyright (c) 2014 Adi Sieker\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\nimport os\n\nfrom libqtile import bar\nfrom libqtile.images import Img\nfrom libqtile.log_utils import logger\nfrom libqtile.widget import base\n\n\nclass Image(base._Widget, base.MarginMixin):\n \"\"\"Display a PNG image on the bar\"\"\"\n orientations = base.ORIENTATION_BOTH\n defaults = [\n (\"scale\", True, \"Enable/Disable image scaling\"),\n (\"rotate\", 0.0, \"rotate the image in degrees counter-clockwise\"),\n (\"filename\", None, \"Image filename. Can contain '~'\"),\n ]\n\n def __init__(self, length=bar.CALCULATED, width=None, **config):\n # 'width' was replaced by 'length' since the widget can be installed in\n # vertical bars\n if width is not None:\n logger.warning('width kwarg or positional argument is '\n 'deprecated. Please use length.')\n length = width\n\n base._Widget.__init__(self, length, **config)\n self.add_defaults(Image.defaults)\n self.add_defaults(base.MarginMixin.defaults)\n\n # make the default 0 instead\n self._variable_defaults[\"margin\"] = 0\n\n def _configure(self, qtile, bar):\n base._Widget._configure(self, qtile, bar)\n self.img = None\n\n if not self.filename:\n logger.warning(\"Image filename not set!\")\n return\n\n self.filename = os.path.expanduser(self.filename)\n\n if not os.path.exists(self.filename):\n logger.warning(\"Image does not exist: {}\".format(self.filename))\n return\n\n img = Img.from_path(self.filename)\n self.img = img\n img.theta = self.rotate\n if not self.scale:\n return\n if self.bar.horizontal:\n new_height = self.bar.height - (self.margin_y * 2)\n img.resize(height=new_height)\n else:\n new_width = self.bar.width - (self.margin_x * 2)\n img.resize(width=new_width)\n\n def draw(self):\n if self.img is None:\n return\n\n self.drawer.clear(self.background or self.bar.background)\n self.drawer.ctx.save()\n self.drawer.ctx.translate(self.margin_x, self.margin_y)\n self.drawer.ctx.set_source(self.img.pattern)\n self.drawer.ctx.paint()\n self.drawer.ctx.restore()\n\n if self.bar.horizontal:\n self.drawer.draw(offsetx=self.offset, width=self.width)\n else:\n self.drawer.draw(offsety=self.offset, height=self.width)\n\n def calculate_length(self):\n if self.img is None:\n return 0\n\n if self.bar.horizontal:\n return self.img.width + (self.margin_x * 2)\n else:\n return self.img.height + (self.margin_y * 2)\n", "path": "libqtile/widget/image.py"}]}
| 1,608 | 319 |
gh_patches_debug_31175
|
rasdani/github-patches
|
git_diff
|
dotkom__onlineweb4-1388
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
SplashYear matching query does not exist
https://github.com/dotKom/onlineweb4/blob/develop/apps/splash/models.py#L11 tries to get a SplashYear, but there are no objects matching the query, so it triggers an exception.
> DoesNotExist: SplashYear matching query does not exist.
How to quick-fix: Create a SplashYear which satisfies the matching query.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `apps/events/utils.py`
Content:
```
1 #-*- coding: utf-8 -*-
2 from datetime import timedelta
3
4 from django.conf import settings
5 from django.core.mail import send_mail
6 from django.utils import timezone
7
8 from django.core.signing import Signer, BadSignature
9 from django.http import HttpResponse
10 from django.utils import timezone
11 from filebrowser.base import FileObject
12 from filebrowser.settings import VERSIONS
13
14 from apps.authentication.models import OnlineUser as User
15 from apps.events.models import Event
16 from apps.splash.models import SplashYear
17
18 import icalendar
19
20
21 def get_group_restricted_events(user):
22 """ Returns a queryset of events with attendance_event that a user has access to """
23 types_allowed = []
24
25 groups = user.groups.all()
26
27 if reduce(lambda r, g: g.name in ['Hovedstyret', 'dotKom'] or r, groups, False):
28 return Event.objects.filter(attendance_event__isnull=False)
29
30 for group in groups:
31 if group.name == 'arrKom':
32 types_allowed.append(1) # sosialt
33 types_allowed.append(4) # utflukt
34
35 if group.name == 'bedKom':
36 types_allowed.append(2) # bedriftspresentasjon
37
38 if group.name == 'fagKom':
39 types_allowed.append(3) # kurs
40
41 return Event.objects.filter(attendance_event__isnull=False, event_type__in=types_allowed)
42
43
44 def handle_waitlist_bump(event, host, attendees, payment=None):
45
46 title = u'Du har fått plass på %s' % (event.title)
47
48 extended_deadline = timezone.now() + timedelta(days=2)
49 message = u'Du har stått på venteliste for arrangementet "%s" og har nå fått plass.\n' % (unicode(event.title))
50
51 if payment:
52 if payment.payment_type == 1: #Instant
53 for attendee in attendees:
54 payment.create_payment_delay(attendee.user, extended_deadline)
55 message += u"Dette arrangementet krever betaling og du må betale innen 48 timer."
56
57 elif payment.payment_type == 2: #Deadline
58 if payment.deadline > extended_deadline: #More than 2 days left of payment deadline
59 message += u"Dette arrangementet krever betaling og fristen for og betale er %s" % (payment.deadline.strftime('%-d %B %Y kl: %H:%M'))
60 else: #The deadline is in less than 2 days
61 for attendee in attendees:
62 payment.create_payment_delay(attendee.user, extended_deadline)
63 message += u"Dette arrangementet krever betaling og du har 48 timer på å betale"
64
65 elif payment.payment_type == 3: #Delay
66 deadline = timezone.now() + timedelta(days=payment.delay)
67 for attendee in attendees:
68 payment.create_payment_delay(attendee.user, deadline)
69 message += u"Dette arrangementet krever betaling og du må betale innen %d dager." % (payment.delay)
70 if len(payment.prices()) == 1:
71 message += u"\nPrisen for dette arrangementet er %skr." % (payment.prices()[0].price)
72 # elif len(payment.prices()) >= 2:
73 # message += u"\nDette arrangementet har flere prisklasser:"
74 # for payment_price in payment.prices():
75 # message += "\n%s: %skr" % (payment_price.description, payment_price.price)
76 else:
77 message += u"Det kreves ingen ekstra handling fra deg med mindre du vil melde deg av."
78
79 message += u"\n\nFor mer info:"
80 message += u"\nhttp://%s%s" % (host, event.get_absolute_url())
81
82 for attendee in attendees:
83 send_mail(title, message, settings.DEFAULT_FROM_EMAIL, [attendee.user.email])
84
85
86 class Calendar(object):
87 def __init__(self):
88 self.cal = icalendar.Calendar()
89 # Filename served by webserver
90 self.filename = 'online'
91 # Required ical info
92 self.cal.add('prodid', '-//Online//Onlineweb//EN')
93 self.cal.add('version', '2.0')
94
95 def add_event(self, event):
96 raise NotImplementedError
97
98 def add_events(self, events):
99 for event in events:
100 self.add_event(event)
101
102 def output(self):
103 """Return icalendar as text"""
104 return self.cal.to_ical()
105
106 def response(self):
107 """Returns a response object"""
108 response = HttpResponse(self.cal.to_ical(), content_type='text/calendar')
109 response['Content-Type'] = 'text/calendar; charset=utf-8'
110 response['Content-Disposition'] = 'attachment; filename=' + self.filename + '.ics'
111 return response
112
113
114 class EventCalendar(Calendar):
115 def user(self, user):
116 """
117 Personalized calendar
118 This calendar is publicly available, but the url is not guessable so data should not be leaked to everyone
119 """
120 signer = Signer()
121 try:
122 username = signer.unsign(user)
123 user = User.objects.get(username=username)
124 except (BadSignature, User.DoesNotExist):
125 user = None
126 if user:
127 # Getting all events that the user has/is participating in
128 self.add_events(Event.objects.filter(
129 attendance_event__attendees__user=user
130 ).order_by('event_start').prefetch_related(
131 'attendance_event', 'attendance_event__attendees'
132 ))
133 self.filename = username
134
135 def event(self, event_id):
136 """Single event"""
137 try:
138 self.add_event(Event.objects.get(id=event_id))
139 except Event.DoesNotExist:
140 pass
141 self.filename = str(event_id)
142
143 def events(self):
144 """All events that haven't ended yet"""
145 self.add_events(Event.objects.filter(event_end__gt=timezone.now()).order_by('event_start'))
146 self.filename = 'events'
147
148 def add_event(self, event):
149 cal_event = icalendar.Event()
150
151 cal_event.add('dtstart', event.event_start)
152 cal_event.add('dtend', event.event_end)
153 cal_event.add('location', event.location)
154 cal_event.add('summary', event.title)
155 cal_event.add('description', event.ingress_short)
156 cal_event.add('uid', 'event-' + str(event.id) + '@online.ntnu.no')
157
158 self.cal.add_component(cal_event)
159
160
161 class SplashCalendar(Calendar):
162 def add_event(self, event):
163 cal_event = icalendar.Event()
164 cal_event.add('dtstart', event.start_time)
165 cal_event.add('dtend', event.end_time)
166 cal_event.add('summary', event.title)
167 cal_event.add('description', event.content)
168 cal_event.add('uid', 'splash-' + str(event.id) + '@online.ntnu.no')
169
170 self.cal.add_component(cal_event)
171
172 def events(self):
173 self.add_events(SplashYear.objects.current().splash_events.all())
174 self.filename = 'events'
175
176
177 def find_image_versions(event):
178 img = event.image
179 img_strings = []
180
181 for ver in VERSIONS.keys():
182 if ver.startswith('events_'):
183 img_strings.append(img.version_generate(ver).url)
184
185 return img_strings
186
```
Path: `apps/splash/views.py`
Content:
```
1 from django.shortcuts import render
2 from apps.splash.models import SplashYear
3
4 from apps.events.utils import SplashCalendar
5
6
7 def index(request):
8 splash_year = SplashYear.objects.current()
9
10 splash_year.events = _merge_events(splash_year.splash_events.all())
11
12 return render(request, 'splash/base.html', {'splash_year': splash_year})
13
14
15 def calendar_export(request):
16 calendar = SplashCalendar()
17 calendar.events()
18 return calendar.response()
19
20
21 # And I'm really sorry for this ...
22 def _merge_events(splash_events):
23 events = []
24
25 for event in splash_events:
26 if len(events) > 0 and event.start_time.strftime('%d-%m') == events[-1][0].start_time.strftime('%d-%m'):
27 events[-1].append(event)
28 else:
29 events.append([event])
30
31 return events
32
```
Path: `apps/splash/models.py`
Content:
```
1 from django.db import models
2
3 from django_extensions.db.models import TimeStampedModel
4 import datetime
5 import reversion
6
7
8 class SplashYearManager(models.Manager):
9 def current(self):
10 # I'm really sorry ...
11 return self.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180)))
12
13
14 class SplashYear(models.Model):
15 title = models.CharField(u'title', max_length=100)
16 start_date = models.DateField(u'start_date')
17
18 objects = SplashYearManager()
19
20 def __unicode__(self):
21 return self.title
22
23 class Meta(object):
24 ordering = ('-start_date',)
25
26
27 class SplashEvent(TimeStampedModel, models.Model):
28 title = models.CharField(u'title', max_length=100)
29 content = models.TextField(u'content')
30 start_time = models.DateTimeField()
31 end_time = models.DateTimeField()
32 splash_year = models.ForeignKey('SplashYear', related_name='splash_events')
33
34 def __unicode__(self):
35 return self.title
36
37 class Meta(object):
38 ordering = ('start_time',)
39
40 reversion.register(SplashYear)
41 reversion.register(SplashEvent)
42
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/apps/events/utils.py b/apps/events/utils.py
--- a/apps/events/utils.py
+++ b/apps/events/utils.py
@@ -170,7 +170,7 @@
self.cal.add_component(cal_event)
def events(self):
- self.add_events(SplashYear.objects.current().splash_events.all())
+ self.add_events(SplashYear.objects.current_events())
self.filename = 'events'
diff --git a/apps/splash/models.py b/apps/splash/models.py
--- a/apps/splash/models.py
+++ b/apps/splash/models.py
@@ -8,7 +8,14 @@
class SplashYearManager(models.Manager):
def current(self):
# I'm really sorry ...
- return self.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180)))
+ half_a_year_ago = str(datetime.date.today() - datetime.timedelta(180))
+ return self.filter(start_date__gt=half_a_year_ago).first()
+
+ def current_events(self):
+ current_splash = self.current()
+ if current_splash:
+ return current_splash.events()
+ return self.none()
class SplashYear(models.Model):
@@ -17,6 +24,9 @@
objects = SplashYearManager()
+ def events(self):
+ return self.splash_events.all()
+
def __unicode__(self):
return self.title
diff --git a/apps/splash/views.py b/apps/splash/views.py
--- a/apps/splash/views.py
+++ b/apps/splash/views.py
@@ -1,3 +1,4 @@
+from django.http import Http404
from django.shortcuts import render
from apps.splash.models import SplashYear
@@ -6,8 +7,10 @@
def index(request):
splash_year = SplashYear.objects.current()
+ if not splash_year:
+ raise Http404
- splash_year.events = _merge_events(splash_year.splash_events.all())
+ splash_year.events = _merge_events(splash_year.events())
return render(request, 'splash/base.html', {'splash_year': splash_year})
|
{"golden_diff": "diff --git a/apps/events/utils.py b/apps/events/utils.py\n--- a/apps/events/utils.py\n+++ b/apps/events/utils.py\n@@ -170,7 +170,7 @@\n self.cal.add_component(cal_event)\n \n def events(self):\n- self.add_events(SplashYear.objects.current().splash_events.all())\n+ self.add_events(SplashYear.objects.current_events())\n self.filename = 'events'\n \n \ndiff --git a/apps/splash/models.py b/apps/splash/models.py\n--- a/apps/splash/models.py\n+++ b/apps/splash/models.py\n@@ -8,7 +8,14 @@\n class SplashYearManager(models.Manager):\n def current(self):\n # I'm really sorry ...\n- return self.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180)))\n+ half_a_year_ago = str(datetime.date.today() - datetime.timedelta(180))\n+ return self.filter(start_date__gt=half_a_year_ago).first()\n+\n+ def current_events(self):\n+ current_splash = self.current()\n+ if current_splash:\n+ return current_splash.events()\n+ return self.none()\n \n \n class SplashYear(models.Model):\n@@ -17,6 +24,9 @@\n \n objects = SplashYearManager()\n \n+ def events(self):\n+ return self.splash_events.all()\n+\n def __unicode__(self):\n return self.title\n \ndiff --git a/apps/splash/views.py b/apps/splash/views.py\n--- a/apps/splash/views.py\n+++ b/apps/splash/views.py\n@@ -1,3 +1,4 @@\n+from django.http import Http404\n from django.shortcuts import render\n from apps.splash.models import SplashYear\n \n@@ -6,8 +7,10 @@\n \n def index(request):\n splash_year = SplashYear.objects.current()\n+ if not splash_year:\n+ raise Http404\n \n- splash_year.events = _merge_events(splash_year.splash_events.all())\n+ splash_year.events = _merge_events(splash_year.events())\n \n return render(request, 'splash/base.html', {'splash_year': splash_year})\n", "issue": "SplashYear matching query does not exist\nhttps://github.com/dotKom/onlineweb4/blob/develop/apps/splash/models.py#L11 tries to get a SplashYear, but there are no objects matching the query, so it triggers an exception. \n\n> DoesNotExist: SplashYear matching query does not exist.\n\nHow to quick-fix: Create a SplashYear which satisfies the matching query.\n\n", "before_files": [{"content": "#-*- coding: utf-8 -*-\nfrom datetime import timedelta\n\nfrom django.conf import settings\nfrom django.core.mail import send_mail\nfrom django.utils import timezone\n\nfrom django.core.signing import Signer, BadSignature\nfrom django.http import HttpResponse\nfrom django.utils import timezone\nfrom filebrowser.base import FileObject\nfrom filebrowser.settings import VERSIONS\n\nfrom apps.authentication.models import OnlineUser as User\nfrom apps.events.models import Event\nfrom apps.splash.models import SplashYear\n\nimport icalendar\n\n\ndef get_group_restricted_events(user):\n \"\"\" Returns a queryset of events with attendance_event that a user has access to \"\"\"\n types_allowed = []\n\n groups = user.groups.all()\n\n if reduce(lambda r, g: g.name in ['Hovedstyret', 'dotKom'] or r, groups, False):\n return Event.objects.filter(attendance_event__isnull=False)\n\n for group in groups:\n if group.name == 'arrKom':\n types_allowed.append(1) # sosialt\n types_allowed.append(4) # utflukt\n\n if group.name == 'bedKom':\n types_allowed.append(2) # bedriftspresentasjon\n\n if group.name == 'fagKom':\n types_allowed.append(3) # kurs\n\n return Event.objects.filter(attendance_event__isnull=False, event_type__in=types_allowed)\n\n\ndef handle_waitlist_bump(event, host, attendees, payment=None):\n\n title = u'Du har f\u00e5tt plass p\u00e5 %s' % (event.title)\n\n extended_deadline = timezone.now() + timedelta(days=2)\n message = u'Du har st\u00e5tt p\u00e5 venteliste for arrangementet \"%s\" og har n\u00e5 f\u00e5tt plass.\\n' % (unicode(event.title))\n\n if payment:\n if payment.payment_type == 1: #Instant\n for attendee in attendees:\n payment.create_payment_delay(attendee.user, extended_deadline)\n message += u\"Dette arrangementet krever betaling og du m\u00e5 betale innen 48 timer.\"\n\n elif payment.payment_type == 2: #Deadline\n if payment.deadline > extended_deadline: #More than 2 days left of payment deadline\n message += u\"Dette arrangementet krever betaling og fristen for og betale er %s\" % (payment.deadline.strftime('%-d %B %Y kl: %H:%M'))\n else: #The deadline is in less than 2 days\n for attendee in attendees:\n payment.create_payment_delay(attendee.user, extended_deadline)\n message += u\"Dette arrangementet krever betaling og du har 48 timer p\u00e5 \u00e5 betale\"\n\n elif payment.payment_type == 3: #Delay\n deadline = timezone.now() + timedelta(days=payment.delay)\n for attendee in attendees:\n payment.create_payment_delay(attendee.user, deadline)\n message += u\"Dette arrangementet krever betaling og du m\u00e5 betale innen %d dager.\" % (payment.delay)\n if len(payment.prices()) == 1:\n message += u\"\\nPrisen for dette arrangementet er %skr.\" % (payment.prices()[0].price)\n # elif len(payment.prices()) >= 2:\n # message += u\"\\nDette arrangementet har flere prisklasser:\"\n # for payment_price in payment.prices():\n # message += \"\\n%s: %skr\" % (payment_price.description, payment_price.price)\n else:\n message += u\"Det kreves ingen ekstra handling fra deg med mindre du vil melde deg av.\"\n\n message += u\"\\n\\nFor mer info:\"\n message += u\"\\nhttp://%s%s\" % (host, event.get_absolute_url())\n\n for attendee in attendees:\n send_mail(title, message, settings.DEFAULT_FROM_EMAIL, [attendee.user.email])\n\n\nclass Calendar(object):\n def __init__(self):\n self.cal = icalendar.Calendar()\n # Filename served by webserver\n self.filename = 'online'\n # Required ical info\n self.cal.add('prodid', '-//Online//Onlineweb//EN')\n self.cal.add('version', '2.0')\n\n def add_event(self, event):\n raise NotImplementedError\n\n def add_events(self, events):\n for event in events:\n self.add_event(event)\n\n def output(self):\n \"\"\"Return icalendar as text\"\"\"\n return self.cal.to_ical()\n\n def response(self):\n \"\"\"Returns a response object\"\"\"\n response = HttpResponse(self.cal.to_ical(), content_type='text/calendar')\n response['Content-Type'] = 'text/calendar; charset=utf-8'\n response['Content-Disposition'] = 'attachment; filename=' + self.filename + '.ics'\n return response\n\n\nclass EventCalendar(Calendar):\n def user(self, user):\n \"\"\"\n Personalized calendar\n This calendar is publicly available, but the url is not guessable so data should not be leaked to everyone\n \"\"\"\n signer = Signer()\n try:\n username = signer.unsign(user)\n user = User.objects.get(username=username)\n except (BadSignature, User.DoesNotExist):\n user = None\n if user:\n # Getting all events that the user has/is participating in\n self.add_events(Event.objects.filter(\n attendance_event__attendees__user=user\n ).order_by('event_start').prefetch_related(\n 'attendance_event', 'attendance_event__attendees'\n ))\n self.filename = username\n\n def event(self, event_id):\n \"\"\"Single event\"\"\"\n try:\n self.add_event(Event.objects.get(id=event_id))\n except Event.DoesNotExist:\n pass\n self.filename = str(event_id)\n\n def events(self):\n \"\"\"All events that haven't ended yet\"\"\"\n self.add_events(Event.objects.filter(event_end__gt=timezone.now()).order_by('event_start'))\n self.filename = 'events'\n\n def add_event(self, event):\n cal_event = icalendar.Event()\n\n cal_event.add('dtstart', event.event_start)\n cal_event.add('dtend', event.event_end)\n cal_event.add('location', event.location)\n cal_event.add('summary', event.title)\n cal_event.add('description', event.ingress_short)\n cal_event.add('uid', 'event-' + str(event.id) + '@online.ntnu.no')\n\n self.cal.add_component(cal_event)\n\n\nclass SplashCalendar(Calendar):\n def add_event(self, event):\n cal_event = icalendar.Event()\n cal_event.add('dtstart', event.start_time)\n cal_event.add('dtend', event.end_time)\n cal_event.add('summary', event.title)\n cal_event.add('description', event.content)\n cal_event.add('uid', 'splash-' + str(event.id) + '@online.ntnu.no')\n\n self.cal.add_component(cal_event)\n\n def events(self):\n self.add_events(SplashYear.objects.current().splash_events.all())\n self.filename = 'events'\n\n\ndef find_image_versions(event):\n img = event.image\n img_strings = []\n\n for ver in VERSIONS.keys():\n if ver.startswith('events_'):\n img_strings.append(img.version_generate(ver).url)\n\n return img_strings\n", "path": "apps/events/utils.py"}, {"content": "from django.shortcuts import render\nfrom apps.splash.models import SplashYear\n\nfrom apps.events.utils import SplashCalendar\n\n\ndef index(request):\n splash_year = SplashYear.objects.current()\n\n splash_year.events = _merge_events(splash_year.splash_events.all())\n\n return render(request, 'splash/base.html', {'splash_year': splash_year})\n\n\ndef calendar_export(request):\n calendar = SplashCalendar()\n calendar.events()\n return calendar.response()\n\n\n# And I'm really sorry for this ...\ndef _merge_events(splash_events):\n events = []\n\n for event in splash_events:\n if len(events) > 0 and event.start_time.strftime('%d-%m') == events[-1][0].start_time.strftime('%d-%m'):\n events[-1].append(event)\n else:\n events.append([event])\n\n return events\n", "path": "apps/splash/views.py"}, {"content": "from django.db import models\n\nfrom django_extensions.db.models import TimeStampedModel\nimport datetime\nimport reversion\n\n\nclass SplashYearManager(models.Manager):\n def current(self):\n # I'm really sorry ...\n return self.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180)))\n\n\nclass SplashYear(models.Model):\n title = models.CharField(u'title', max_length=100)\n start_date = models.DateField(u'start_date')\n\n objects = SplashYearManager()\n\n def __unicode__(self):\n return self.title\n\n class Meta(object):\n ordering = ('-start_date',)\n\n\nclass SplashEvent(TimeStampedModel, models.Model):\n title = models.CharField(u'title', max_length=100)\n content = models.TextField(u'content')\n start_time = models.DateTimeField()\n end_time = models.DateTimeField()\n splash_year = models.ForeignKey('SplashYear', related_name='splash_events')\n\n def __unicode__(self):\n return self.title\n\n class Meta(object):\n ordering = ('start_time',)\n\nreversion.register(SplashYear)\nreversion.register(SplashEvent)\n", "path": "apps/splash/models.py"}], "after_files": [{"content": "#-*- coding: utf-8 -*-\nfrom datetime import timedelta\n\nfrom django.conf import settings\nfrom django.core.mail import send_mail\nfrom django.utils import timezone\n\nfrom django.core.signing import Signer, BadSignature\nfrom django.http import HttpResponse\nfrom django.utils import timezone\nfrom filebrowser.base import FileObject\nfrom filebrowser.settings import VERSIONS\n\nfrom apps.authentication.models import OnlineUser as User\nfrom apps.events.models import Event\nfrom apps.splash.models import SplashYear\n\nimport icalendar\n\n\ndef get_group_restricted_events(user):\n \"\"\" Returns a queryset of events with attendance_event that a user has access to \"\"\"\n types_allowed = []\n\n groups = user.groups.all()\n\n if reduce(lambda r, g: g.name in ['Hovedstyret', 'dotKom'] or r, groups, False):\n return Event.objects.filter(attendance_event__isnull=False)\n\n for group in groups:\n if group.name == 'arrKom':\n types_allowed.append(1) # sosialt\n types_allowed.append(4) # utflukt\n\n if group.name == 'bedKom':\n types_allowed.append(2) # bedriftspresentasjon\n\n if group.name == 'fagKom':\n types_allowed.append(3) # kurs\n\n return Event.objects.filter(attendance_event__isnull=False, event_type__in=types_allowed)\n\n\ndef handle_waitlist_bump(event, host, attendees, payment=None):\n\n title = u'Du har f\u00e5tt plass p\u00e5 %s' % (event.title)\n\n extended_deadline = timezone.now() + timedelta(days=2)\n message = u'Du har st\u00e5tt p\u00e5 venteliste for arrangementet \"%s\" og har n\u00e5 f\u00e5tt plass.\\n' % (unicode(event.title))\n\n if payment:\n if payment.payment_type == 1: #Instant\n for attendee in attendees:\n payment.create_payment_delay(attendee.user, extended_deadline)\n message += u\"Dette arrangementet krever betaling og du m\u00e5 betale innen 48 timer.\"\n\n elif payment.payment_type == 2: #Deadline\n if payment.deadline > extended_deadline: #More than 2 days left of payment deadline\n message += u\"Dette arrangementet krever betaling og fristen for og betale er %s\" % (payment.deadline.strftime('%-d %B %Y kl: %H:%M'))\n else: #The deadline is in less than 2 days\n for attendee in attendees:\n payment.create_payment_delay(attendee.user, extended_deadline)\n message += u\"Dette arrangementet krever betaling og du har 48 timer p\u00e5 \u00e5 betale\"\n\n elif payment.payment_type == 3: #Delay\n deadline = timezone.now() + timedelta(days=payment.delay)\n for attendee in attendees:\n payment.create_payment_delay(attendee.user, deadline)\n message += u\"Dette arrangementet krever betaling og du m\u00e5 betale innen %d dager.\" % (payment.delay)\n if len(payment.prices()) == 1:\n message += u\"\\nPrisen for dette arrangementet er %skr.\" % (payment.prices()[0].price)\n # elif len(payment.prices()) >= 2:\n # message += u\"\\nDette arrangementet har flere prisklasser:\"\n # for payment_price in payment.prices():\n # message += \"\\n%s: %skr\" % (payment_price.description, payment_price.price)\n else:\n message += u\"Det kreves ingen ekstra handling fra deg med mindre du vil melde deg av.\"\n\n message += u\"\\n\\nFor mer info:\"\n message += u\"\\nhttp://%s%s\" % (host, event.get_absolute_url())\n\n for attendee in attendees:\n send_mail(title, message, settings.DEFAULT_FROM_EMAIL, [attendee.user.email])\n\n\nclass Calendar(object):\n def __init__(self):\n self.cal = icalendar.Calendar()\n # Filename served by webserver\n self.filename = 'online'\n # Required ical info\n self.cal.add('prodid', '-//Online//Onlineweb//EN')\n self.cal.add('version', '2.0')\n\n def add_event(self, event):\n raise NotImplementedError\n\n def add_events(self, events):\n for event in events:\n self.add_event(event)\n\n def output(self):\n \"\"\"Return icalendar as text\"\"\"\n return self.cal.to_ical()\n\n def response(self):\n \"\"\"Returns a response object\"\"\"\n response = HttpResponse(self.cal.to_ical(), content_type='text/calendar')\n response['Content-Type'] = 'text/calendar; charset=utf-8'\n response['Content-Disposition'] = 'attachment; filename=' + self.filename + '.ics'\n return response\n\n\nclass EventCalendar(Calendar):\n def user(self, user):\n \"\"\"\n Personalized calendar\n This calendar is publicly available, but the url is not guessable so data should not be leaked to everyone\n \"\"\"\n signer = Signer()\n try:\n username = signer.unsign(user)\n user = User.objects.get(username=username)\n except (BadSignature, User.DoesNotExist):\n user = None\n if user:\n # Getting all events that the user has/is participating in\n self.add_events(Event.objects.filter(\n attendance_event__attendees__user=user\n ).order_by('event_start').prefetch_related(\n 'attendance_event', 'attendance_event__attendees'\n ))\n self.filename = username\n\n def event(self, event_id):\n \"\"\"Single event\"\"\"\n try:\n self.add_event(Event.objects.get(id=event_id))\n except Event.DoesNotExist:\n pass\n self.filename = str(event_id)\n\n def events(self):\n \"\"\"All events that haven't ended yet\"\"\"\n self.add_events(Event.objects.filter(event_end__gt=timezone.now()).order_by('event_start'))\n self.filename = 'events'\n\n def add_event(self, event):\n cal_event = icalendar.Event()\n\n cal_event.add('dtstart', event.event_start)\n cal_event.add('dtend', event.event_end)\n cal_event.add('location', event.location)\n cal_event.add('summary', event.title)\n cal_event.add('description', event.ingress_short)\n cal_event.add('uid', 'event-' + str(event.id) + '@online.ntnu.no')\n\n self.cal.add_component(cal_event)\n\n\nclass SplashCalendar(Calendar):\n def add_event(self, event):\n cal_event = icalendar.Event()\n cal_event.add('dtstart', event.start_time)\n cal_event.add('dtend', event.end_time)\n cal_event.add('summary', event.title)\n cal_event.add('description', event.content)\n cal_event.add('uid', 'splash-' + str(event.id) + '@online.ntnu.no')\n\n self.cal.add_component(cal_event)\n\n def events(self):\n self.add_events(SplashYear.objects.current_events())\n self.filename = 'events'\n\n\ndef find_image_versions(event):\n img = event.image\n img_strings = []\n\n for ver in VERSIONS.keys():\n if ver.startswith('events_'):\n img_strings.append(img.version_generate(ver).url)\n\n return img_strings\n", "path": "apps/events/utils.py"}, {"content": "from django.http import Http404\nfrom django.shortcuts import render\nfrom apps.splash.models import SplashYear\n\nfrom apps.events.utils import SplashCalendar\n\n\ndef index(request):\n splash_year = SplashYear.objects.current()\n if not splash_year:\n raise Http404\n\n splash_year.events = _merge_events(splash_year.events())\n\n return render(request, 'splash/base.html', {'splash_year': splash_year})\n\n\ndef calendar_export(request):\n calendar = SplashCalendar()\n calendar.events()\n return calendar.response()\n\n\n# And I'm really sorry for this ...\ndef _merge_events(splash_events):\n events = []\n\n for event in splash_events:\n if len(events) > 0 and event.start_time.strftime('%d-%m') == events[-1][0].start_time.strftime('%d-%m'):\n events[-1].append(event)\n else:\n events.append([event])\n\n return events\n", "path": "apps/splash/views.py"}, {"content": "from django.db import models\n\nfrom django_extensions.db.models import TimeStampedModel\nimport datetime\nimport reversion\n\n\nclass SplashYearManager(models.Manager):\n def current(self):\n # I'm really sorry ...\n half_a_year_ago = str(datetime.date.today() - datetime.timedelta(180))\n return self.filter(start_date__gt=half_a_year_ago).first()\n\n def current_events(self):\n current_splash = self.current()\n if current_splash:\n return current_splash.events()\n return self.none()\n\n\nclass SplashYear(models.Model):\n title = models.CharField(u'title', max_length=100)\n start_date = models.DateField(u'start_date')\n\n objects = SplashYearManager()\n\n def events(self):\n return self.splash_events.all()\n\n def __unicode__(self):\n return self.title\n\n class Meta(object):\n ordering = ('-start_date',)\n\n\nclass SplashEvent(TimeStampedModel, models.Model):\n title = models.CharField(u'title', max_length=100)\n content = models.TextField(u'content')\n start_time = models.DateTimeField()\n end_time = models.DateTimeField()\n splash_year = models.ForeignKey('SplashYear', related_name='splash_events')\n\n def __unicode__(self):\n return self.title\n\n class Meta(object):\n ordering = ('start_time',)\n\nreversion.register(SplashYear)\nreversion.register(SplashEvent)\n", "path": "apps/splash/models.py"}]}
| 2,948 | 464 |
gh_patches_debug_12480
|
rasdani/github-patches
|
git_diff
|
wagtail__wagtail-7023
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Wagtail API pages endpoint responds with a 500 error when entering an invalid value to a filter
### Issue Summary
When querying the Wagtail API and using filters, invalid filter values cause the server to respond with a 500 error.
Given a `BlogPage` with an `author` ForeignKey.
e.g. `/api/v2/pages/?type=blog.BlogPage&author=a`
This returns a 500 error.
### Steps to Reproduce
1. Create any page type that has a foreign key
2. Query that page type using the Wagtail API, and pass an invalid value to a filter (e.g. a letter to a foreign key field)
3. Server Error 500
Any other relevant information. For example, why do you consider this a bug and what did you expect to happen instead?
* This is not a server error. It's a client error.
* We already have this error message for invalid IDs:
```
HTTP 400 Bad Request
Allow: GET, HEAD, OPTIONS
Content-Type: application/json
Vary: Accept
{
"message": "field filter error. 'a' is not a valid value for id (invalid literal for int() with base 10: 'a')"
}
```
* A 400 error is expected.
### Technical details
* Python version: 3.8.6.
* Django version: 3.1.
* Wagtail version: 2.12.
* Browser version: Firefox 87
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `wagtail/api/v2/filters.py`
Content:
```
1 from django.conf import settings
2 from django.db import models
3 from django.shortcuts import get_object_or_404
4 from rest_framework.filters import BaseFilterBackend
5 from taggit.managers import TaggableManager
6
7 from wagtail.core.models import Locale, Page
8 from wagtail.search.backends import get_search_backend
9 from wagtail.search.backends.base import FilterFieldError, OrderByFieldError
10
11 from .utils import BadRequestError, parse_boolean
12
13
14 class FieldsFilter(BaseFilterBackend):
15 def filter_queryset(self, request, queryset, view):
16 """
17 This performs field level filtering on the result set
18 Eg: ?title=James Joyce
19 """
20 fields = set(view.get_available_fields(queryset.model, db_fields_only=True))
21
22 # Locale is a database field, but we provide a separate filter for it
23 if 'locale' in fields:
24 fields.remove('locale')
25
26 for field_name, value in request.GET.items():
27 if field_name in fields:
28 try:
29 field = queryset.model._meta.get_field(field_name)
30 except LookupError:
31 field = None
32
33 # Convert value into python
34 try:
35 if isinstance(field, (models.BooleanField, models.NullBooleanField)):
36 value = parse_boolean(value)
37 elif isinstance(field, (models.IntegerField, models.AutoField)):
38 value = int(value)
39 except ValueError as e:
40 raise BadRequestError("field filter error. '%s' is not a valid value for %s (%s)" % (
41 value,
42 field_name,
43 str(e)
44 ))
45
46 if isinstance(field, TaggableManager):
47 for tag in value.split(','):
48 queryset = queryset.filter(**{field_name + '__name': tag})
49
50 # Stick a message on the queryset to indicate that tag filtering has been performed
51 # This will let the do_search method know that it must raise an error as searching
52 # and tag filtering at the same time is not supported
53 queryset._filtered_by_tag = True
54 else:
55 queryset = queryset.filter(**{field_name: value})
56
57 return queryset
58
59
60 class OrderingFilter(BaseFilterBackend):
61 def filter_queryset(self, request, queryset, view):
62 """
63 This applies ordering to the result set
64 Eg: ?order=title
65
66 It also supports reverse ordering
67 Eg: ?order=-title
68
69 And random ordering
70 Eg: ?order=random
71 """
72 if 'order' in request.GET:
73 order_by = request.GET['order']
74
75 # Random ordering
76 if order_by == 'random':
77 # Prevent ordering by random with offset
78 if 'offset' in request.GET:
79 raise BadRequestError("random ordering with offset is not supported")
80
81 return queryset.order_by('?')
82
83 # Check if reverse ordering is set
84 if order_by.startswith('-'):
85 reverse_order = True
86 order_by = order_by[1:]
87 else:
88 reverse_order = False
89
90 # Add ordering
91 if order_by in view.get_available_fields(queryset.model):
92 queryset = queryset.order_by(order_by)
93 else:
94 # Unknown field
95 raise BadRequestError("cannot order by '%s' (unknown field)" % order_by)
96
97 # Reverse order
98 if reverse_order:
99 queryset = queryset.reverse()
100
101 return queryset
102
103
104 class SearchFilter(BaseFilterBackend):
105 def filter_queryset(self, request, queryset, view):
106 """
107 This performs a full-text search on the result set
108 Eg: ?search=James Joyce
109 """
110 search_enabled = getattr(settings, 'WAGTAILAPI_SEARCH_ENABLED', True)
111
112 if 'search' in request.GET:
113 if not search_enabled:
114 raise BadRequestError("search is disabled")
115
116 # Searching and filtering by tag at the same time is not supported
117 if getattr(queryset, '_filtered_by_tag', False):
118 raise BadRequestError("filtering by tag with a search query is not supported")
119
120 search_query = request.GET['search']
121 search_operator = request.GET.get('search_operator', None)
122 order_by_relevance = 'order' not in request.GET
123
124 sb = get_search_backend()
125 try:
126 queryset = sb.search(search_query, queryset, operator=search_operator, order_by_relevance=order_by_relevance)
127 except FilterFieldError as e:
128 raise BadRequestError("cannot filter by '{}' while searching (field is not indexed)".format(e.field_name))
129 except OrderByFieldError as e:
130 raise BadRequestError("cannot order by '{}' while searching (field is not indexed)".format(e.field_name))
131
132 return queryset
133
134
135 class ChildOfFilter(BaseFilterBackend):
136 """
137 Implements the ?child_of filter used to filter the results to only contain
138 pages that are direct children of the specified page.
139 """
140 def filter_queryset(self, request, queryset, view):
141 if 'child_of' in request.GET:
142 try:
143 parent_page_id = int(request.GET['child_of'])
144 if parent_page_id < 0:
145 raise ValueError()
146
147 parent_page = view.get_base_queryset().get(id=parent_page_id)
148 except ValueError:
149 if request.GET['child_of'] == 'root':
150 parent_page = view.get_root_page()
151 else:
152 raise BadRequestError("child_of must be a positive integer")
153 except Page.DoesNotExist:
154 raise BadRequestError("parent page doesn't exist")
155
156 queryset = queryset.child_of(parent_page)
157
158 # Save the parent page on the queryset. This is required for the page
159 # explorer, which needs to pass the parent page into
160 # `construct_explorer_page_queryset` hook functions
161 queryset._filtered_by_child_of = parent_page
162
163 return queryset
164
165
166 class DescendantOfFilter(BaseFilterBackend):
167 """
168 Implements the ?decendant_of filter which limits the set of pages to a
169 particular branch of the page tree.
170 """
171 def filter_queryset(self, request, queryset, view):
172 if 'descendant_of' in request.GET:
173 if hasattr(queryset, '_filtered_by_child_of'):
174 raise BadRequestError("filtering by descendant_of with child_of is not supported")
175 try:
176 parent_page_id = int(request.GET['descendant_of'])
177 if parent_page_id < 0:
178 raise ValueError()
179
180 parent_page = view.get_base_queryset().get(id=parent_page_id)
181 except ValueError:
182 if request.GET['descendant_of'] == 'root':
183 parent_page = view.get_root_page()
184 else:
185 raise BadRequestError("descendant_of must be a positive integer")
186 except Page.DoesNotExist:
187 raise BadRequestError("ancestor page doesn't exist")
188
189 queryset = queryset.descendant_of(parent_page)
190
191 return queryset
192
193
194 class TranslationOfFilter(BaseFilterBackend):
195 """
196 Implements the ?translation_of filter which limits the set of pages to translations
197 of a page.
198 """
199 def filter_queryset(self, request, queryset, view):
200 if 'translation_of' in request.GET:
201 try:
202 page_id = int(request.GET['translation_of'])
203 if page_id < 0:
204 raise ValueError()
205
206 page = view.get_base_queryset().get(id=page_id)
207 except ValueError:
208 if request.GET['translation_of'] == 'root':
209 page = view.get_root_page()
210 else:
211 raise BadRequestError("translation_of must be a positive integer")
212 except Page.DoesNotExist:
213 raise BadRequestError("translation_of page doesn't exist")
214
215 _filtered_by_child_of = getattr(queryset, '_filtered_by_child_of', None)
216
217 queryset = queryset.translation_of(page)
218
219 if _filtered_by_child_of:
220 queryset._filtered_by_child_of = _filtered_by_child_of
221
222 return queryset
223
224
225 class LocaleFilter(BaseFilterBackend):
226 """
227 Implements the ?locale filter which limits the set of pages to a
228 particular locale.
229 """
230 def filter_queryset(self, request, queryset, view):
231 if 'locale' in request.GET:
232 _filtered_by_child_of = getattr(queryset, '_filtered_by_child_of', None)
233
234 locale = get_object_or_404(Locale, language_code=request.GET['locale'])
235 queryset = queryset.filter(locale=locale)
236
237 if _filtered_by_child_of:
238 queryset._filtered_by_child_of = _filtered_by_child_of
239
240 return queryset
241
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/wagtail/api/v2/filters.py b/wagtail/api/v2/filters.py
--- a/wagtail/api/v2/filters.py
+++ b/wagtail/api/v2/filters.py
@@ -36,6 +36,8 @@
value = parse_boolean(value)
elif isinstance(field, (models.IntegerField, models.AutoField)):
value = int(value)
+ elif isinstance(field, models.ForeignKey):
+ value = field.target_field.get_prep_value(value)
except ValueError as e:
raise BadRequestError("field filter error. '%s' is not a valid value for %s (%s)" % (
value,
|
{"golden_diff": "diff --git a/wagtail/api/v2/filters.py b/wagtail/api/v2/filters.py\n--- a/wagtail/api/v2/filters.py\n+++ b/wagtail/api/v2/filters.py\n@@ -36,6 +36,8 @@\n value = parse_boolean(value)\n elif isinstance(field, (models.IntegerField, models.AutoField)):\n value = int(value)\n+ elif isinstance(field, models.ForeignKey):\n+ value = field.target_field.get_prep_value(value)\n except ValueError as e:\n raise BadRequestError(\"field filter error. '%s' is not a valid value for %s (%s)\" % (\n value,\n", "issue": "Wagtail API pages endpoint responds with a 500 error when entering an invalid value to a filter\n### Issue Summary\r\n\r\nWhen querying the Wagtail API and using filters, invalid filter values cause the server to respond with a 500 error.\r\n\r\nGiven a `BlogPage` with an `author` ForeignKey.\r\n\r\ne.g. `/api/v2/pages/?type=blog.BlogPage&author=a`\r\n\r\nThis returns a 500 error.\r\n\r\n### Steps to Reproduce\r\n\r\n1. Create any page type that has a foreign key\r\n2. Query that page type using the Wagtail API, and pass an invalid value to a filter (e.g. a letter to a foreign key field)\r\n3. Server Error 500\r\n\r\nAny other relevant information. For example, why do you consider this a bug and what did you expect to happen instead?\r\n\r\n* This is not a server error. It's a client error.\r\n* We already have this error message for invalid IDs:\r\n\r\n```\r\nHTTP 400 Bad Request\r\nAllow: GET, HEAD, OPTIONS\r\nContent-Type: application/json\r\nVary: Accept\r\n\r\n{\r\n \"message\": \"field filter error. 'a' is not a valid value for id (invalid literal for int() with base 10: 'a')\"\r\n}\r\n```\r\n\r\n* A 400 error is expected.\r\n\r\n\r\n### Technical details\r\n\r\n* Python version: 3.8.6.\r\n* Django version: 3.1.\r\n* Wagtail version: 2.12.\r\n* Browser version: Firefox 87\r\n\n", "before_files": [{"content": "from django.conf import settings\nfrom django.db import models\nfrom django.shortcuts import get_object_or_404\nfrom rest_framework.filters import BaseFilterBackend\nfrom taggit.managers import TaggableManager\n\nfrom wagtail.core.models import Locale, Page\nfrom wagtail.search.backends import get_search_backend\nfrom wagtail.search.backends.base import FilterFieldError, OrderByFieldError\n\nfrom .utils import BadRequestError, parse_boolean\n\n\nclass FieldsFilter(BaseFilterBackend):\n def filter_queryset(self, request, queryset, view):\n \"\"\"\n This performs field level filtering on the result set\n Eg: ?title=James Joyce\n \"\"\"\n fields = set(view.get_available_fields(queryset.model, db_fields_only=True))\n\n # Locale is a database field, but we provide a separate filter for it\n if 'locale' in fields:\n fields.remove('locale')\n\n for field_name, value in request.GET.items():\n if field_name in fields:\n try:\n field = queryset.model._meta.get_field(field_name)\n except LookupError:\n field = None\n\n # Convert value into python\n try:\n if isinstance(field, (models.BooleanField, models.NullBooleanField)):\n value = parse_boolean(value)\n elif isinstance(field, (models.IntegerField, models.AutoField)):\n value = int(value)\n except ValueError as e:\n raise BadRequestError(\"field filter error. '%s' is not a valid value for %s (%s)\" % (\n value,\n field_name,\n str(e)\n ))\n\n if isinstance(field, TaggableManager):\n for tag in value.split(','):\n queryset = queryset.filter(**{field_name + '__name': tag})\n\n # Stick a message on the queryset to indicate that tag filtering has been performed\n # This will let the do_search method know that it must raise an error as searching\n # and tag filtering at the same time is not supported\n queryset._filtered_by_tag = True\n else:\n queryset = queryset.filter(**{field_name: value})\n\n return queryset\n\n\nclass OrderingFilter(BaseFilterBackend):\n def filter_queryset(self, request, queryset, view):\n \"\"\"\n This applies ordering to the result set\n Eg: ?order=title\n\n It also supports reverse ordering\n Eg: ?order=-title\n\n And random ordering\n Eg: ?order=random\n \"\"\"\n if 'order' in request.GET:\n order_by = request.GET['order']\n\n # Random ordering\n if order_by == 'random':\n # Prevent ordering by random with offset\n if 'offset' in request.GET:\n raise BadRequestError(\"random ordering with offset is not supported\")\n\n return queryset.order_by('?')\n\n # Check if reverse ordering is set\n if order_by.startswith('-'):\n reverse_order = True\n order_by = order_by[1:]\n else:\n reverse_order = False\n\n # Add ordering\n if order_by in view.get_available_fields(queryset.model):\n queryset = queryset.order_by(order_by)\n else:\n # Unknown field\n raise BadRequestError(\"cannot order by '%s' (unknown field)\" % order_by)\n\n # Reverse order\n if reverse_order:\n queryset = queryset.reverse()\n\n return queryset\n\n\nclass SearchFilter(BaseFilterBackend):\n def filter_queryset(self, request, queryset, view):\n \"\"\"\n This performs a full-text search on the result set\n Eg: ?search=James Joyce\n \"\"\"\n search_enabled = getattr(settings, 'WAGTAILAPI_SEARCH_ENABLED', True)\n\n if 'search' in request.GET:\n if not search_enabled:\n raise BadRequestError(\"search is disabled\")\n\n # Searching and filtering by tag at the same time is not supported\n if getattr(queryset, '_filtered_by_tag', False):\n raise BadRequestError(\"filtering by tag with a search query is not supported\")\n\n search_query = request.GET['search']\n search_operator = request.GET.get('search_operator', None)\n order_by_relevance = 'order' not in request.GET\n\n sb = get_search_backend()\n try:\n queryset = sb.search(search_query, queryset, operator=search_operator, order_by_relevance=order_by_relevance)\n except FilterFieldError as e:\n raise BadRequestError(\"cannot filter by '{}' while searching (field is not indexed)\".format(e.field_name))\n except OrderByFieldError as e:\n raise BadRequestError(\"cannot order by '{}' while searching (field is not indexed)\".format(e.field_name))\n\n return queryset\n\n\nclass ChildOfFilter(BaseFilterBackend):\n \"\"\"\n Implements the ?child_of filter used to filter the results to only contain\n pages that are direct children of the specified page.\n \"\"\"\n def filter_queryset(self, request, queryset, view):\n if 'child_of' in request.GET:\n try:\n parent_page_id = int(request.GET['child_of'])\n if parent_page_id < 0:\n raise ValueError()\n\n parent_page = view.get_base_queryset().get(id=parent_page_id)\n except ValueError:\n if request.GET['child_of'] == 'root':\n parent_page = view.get_root_page()\n else:\n raise BadRequestError(\"child_of must be a positive integer\")\n except Page.DoesNotExist:\n raise BadRequestError(\"parent page doesn't exist\")\n\n queryset = queryset.child_of(parent_page)\n\n # Save the parent page on the queryset. This is required for the page\n # explorer, which needs to pass the parent page into\n # `construct_explorer_page_queryset` hook functions\n queryset._filtered_by_child_of = parent_page\n\n return queryset\n\n\nclass DescendantOfFilter(BaseFilterBackend):\n \"\"\"\n Implements the ?decendant_of filter which limits the set of pages to a\n particular branch of the page tree.\n \"\"\"\n def filter_queryset(self, request, queryset, view):\n if 'descendant_of' in request.GET:\n if hasattr(queryset, '_filtered_by_child_of'):\n raise BadRequestError(\"filtering by descendant_of with child_of is not supported\")\n try:\n parent_page_id = int(request.GET['descendant_of'])\n if parent_page_id < 0:\n raise ValueError()\n\n parent_page = view.get_base_queryset().get(id=parent_page_id)\n except ValueError:\n if request.GET['descendant_of'] == 'root':\n parent_page = view.get_root_page()\n else:\n raise BadRequestError(\"descendant_of must be a positive integer\")\n except Page.DoesNotExist:\n raise BadRequestError(\"ancestor page doesn't exist\")\n\n queryset = queryset.descendant_of(parent_page)\n\n return queryset\n\n\nclass TranslationOfFilter(BaseFilterBackend):\n \"\"\"\n Implements the ?translation_of filter which limits the set of pages to translations\n of a page.\n \"\"\"\n def filter_queryset(self, request, queryset, view):\n if 'translation_of' in request.GET:\n try:\n page_id = int(request.GET['translation_of'])\n if page_id < 0:\n raise ValueError()\n\n page = view.get_base_queryset().get(id=page_id)\n except ValueError:\n if request.GET['translation_of'] == 'root':\n page = view.get_root_page()\n else:\n raise BadRequestError(\"translation_of must be a positive integer\")\n except Page.DoesNotExist:\n raise BadRequestError(\"translation_of page doesn't exist\")\n\n _filtered_by_child_of = getattr(queryset, '_filtered_by_child_of', None)\n\n queryset = queryset.translation_of(page)\n\n if _filtered_by_child_of:\n queryset._filtered_by_child_of = _filtered_by_child_of\n\n return queryset\n\n\nclass LocaleFilter(BaseFilterBackend):\n \"\"\"\n Implements the ?locale filter which limits the set of pages to a\n particular locale.\n \"\"\"\n def filter_queryset(self, request, queryset, view):\n if 'locale' in request.GET:\n _filtered_by_child_of = getattr(queryset, '_filtered_by_child_of', None)\n\n locale = get_object_or_404(Locale, language_code=request.GET['locale'])\n queryset = queryset.filter(locale=locale)\n\n if _filtered_by_child_of:\n queryset._filtered_by_child_of = _filtered_by_child_of\n\n return queryset\n", "path": "wagtail/api/v2/filters.py"}], "after_files": [{"content": "from django.conf import settings\nfrom django.db import models\nfrom django.shortcuts import get_object_or_404\nfrom rest_framework.filters import BaseFilterBackend\nfrom taggit.managers import TaggableManager\n\nfrom wagtail.core.models import Locale, Page\nfrom wagtail.search.backends import get_search_backend\nfrom wagtail.search.backends.base import FilterFieldError, OrderByFieldError\n\nfrom .utils import BadRequestError, parse_boolean\n\n\nclass FieldsFilter(BaseFilterBackend):\n def filter_queryset(self, request, queryset, view):\n \"\"\"\n This performs field level filtering on the result set\n Eg: ?title=James Joyce\n \"\"\"\n fields = set(view.get_available_fields(queryset.model, db_fields_only=True))\n\n # Locale is a database field, but we provide a separate filter for it\n if 'locale' in fields:\n fields.remove('locale')\n\n for field_name, value in request.GET.items():\n if field_name in fields:\n try:\n field = queryset.model._meta.get_field(field_name)\n except LookupError:\n field = None\n\n # Convert value into python\n try:\n if isinstance(field, (models.BooleanField, models.NullBooleanField)):\n value = parse_boolean(value)\n elif isinstance(field, (models.IntegerField, models.AutoField)):\n value = int(value)\n elif isinstance(field, models.ForeignKey):\n value = field.target_field.get_prep_value(value)\n except ValueError as e:\n raise BadRequestError(\"field filter error. '%s' is not a valid value for %s (%s)\" % (\n value,\n field_name,\n str(e)\n ))\n\n if isinstance(field, TaggableManager):\n for tag in value.split(','):\n queryset = queryset.filter(**{field_name + '__name': tag})\n\n # Stick a message on the queryset to indicate that tag filtering has been performed\n # This will let the do_search method know that it must raise an error as searching\n # and tag filtering at the same time is not supported\n queryset._filtered_by_tag = True\n else:\n queryset = queryset.filter(**{field_name: value})\n\n return queryset\n\n\nclass OrderingFilter(BaseFilterBackend):\n def filter_queryset(self, request, queryset, view):\n \"\"\"\n This applies ordering to the result set\n Eg: ?order=title\n\n It also supports reverse ordering\n Eg: ?order=-title\n\n And random ordering\n Eg: ?order=random\n \"\"\"\n if 'order' in request.GET:\n order_by = request.GET['order']\n\n # Random ordering\n if order_by == 'random':\n # Prevent ordering by random with offset\n if 'offset' in request.GET:\n raise BadRequestError(\"random ordering with offset is not supported\")\n\n return queryset.order_by('?')\n\n # Check if reverse ordering is set\n if order_by.startswith('-'):\n reverse_order = True\n order_by = order_by[1:]\n else:\n reverse_order = False\n\n # Add ordering\n if order_by in view.get_available_fields(queryset.model):\n queryset = queryset.order_by(order_by)\n else:\n # Unknown field\n raise BadRequestError(\"cannot order by '%s' (unknown field)\" % order_by)\n\n # Reverse order\n if reverse_order:\n queryset = queryset.reverse()\n\n return queryset\n\n\nclass SearchFilter(BaseFilterBackend):\n def filter_queryset(self, request, queryset, view):\n \"\"\"\n This performs a full-text search on the result set\n Eg: ?search=James Joyce\n \"\"\"\n search_enabled = getattr(settings, 'WAGTAILAPI_SEARCH_ENABLED', True)\n\n if 'search' in request.GET:\n if not search_enabled:\n raise BadRequestError(\"search is disabled\")\n\n # Searching and filtering by tag at the same time is not supported\n if getattr(queryset, '_filtered_by_tag', False):\n raise BadRequestError(\"filtering by tag with a search query is not supported\")\n\n search_query = request.GET['search']\n search_operator = request.GET.get('search_operator', None)\n order_by_relevance = 'order' not in request.GET\n\n sb = get_search_backend()\n try:\n queryset = sb.search(search_query, queryset, operator=search_operator, order_by_relevance=order_by_relevance)\n except FilterFieldError as e:\n raise BadRequestError(\"cannot filter by '{}' while searching (field is not indexed)\".format(e.field_name))\n except OrderByFieldError as e:\n raise BadRequestError(\"cannot order by '{}' while searching (field is not indexed)\".format(e.field_name))\n\n return queryset\n\n\nclass ChildOfFilter(BaseFilterBackend):\n \"\"\"\n Implements the ?child_of filter used to filter the results to only contain\n pages that are direct children of the specified page.\n \"\"\"\n def filter_queryset(self, request, queryset, view):\n if 'child_of' in request.GET:\n try:\n parent_page_id = int(request.GET['child_of'])\n if parent_page_id < 0:\n raise ValueError()\n\n parent_page = view.get_base_queryset().get(id=parent_page_id)\n except ValueError:\n if request.GET['child_of'] == 'root':\n parent_page = view.get_root_page()\n else:\n raise BadRequestError(\"child_of must be a positive integer\")\n except Page.DoesNotExist:\n raise BadRequestError(\"parent page doesn't exist\")\n\n queryset = queryset.child_of(parent_page)\n\n # Save the parent page on the queryset. This is required for the page\n # explorer, which needs to pass the parent page into\n # `construct_explorer_page_queryset` hook functions\n queryset._filtered_by_child_of = parent_page\n\n return queryset\n\n\nclass DescendantOfFilter(BaseFilterBackend):\n \"\"\"\n Implements the ?decendant_of filter which limits the set of pages to a\n particular branch of the page tree.\n \"\"\"\n def filter_queryset(self, request, queryset, view):\n if 'descendant_of' in request.GET:\n if hasattr(queryset, '_filtered_by_child_of'):\n raise BadRequestError(\"filtering by descendant_of with child_of is not supported\")\n try:\n parent_page_id = int(request.GET['descendant_of'])\n if parent_page_id < 0:\n raise ValueError()\n\n parent_page = view.get_base_queryset().get(id=parent_page_id)\n except ValueError:\n if request.GET['descendant_of'] == 'root':\n parent_page = view.get_root_page()\n else:\n raise BadRequestError(\"descendant_of must be a positive integer\")\n except Page.DoesNotExist:\n raise BadRequestError(\"ancestor page doesn't exist\")\n\n queryset = queryset.descendant_of(parent_page)\n\n return queryset\n\n\nclass TranslationOfFilter(BaseFilterBackend):\n \"\"\"\n Implements the ?translation_of filter which limits the set of pages to translations\n of a page.\n \"\"\"\n def filter_queryset(self, request, queryset, view):\n if 'translation_of' in request.GET:\n try:\n page_id = int(request.GET['translation_of'])\n if page_id < 0:\n raise ValueError()\n\n page = view.get_base_queryset().get(id=page_id)\n except ValueError:\n if request.GET['translation_of'] == 'root':\n page = view.get_root_page()\n else:\n raise BadRequestError(\"translation_of must be a positive integer\")\n except Page.DoesNotExist:\n raise BadRequestError(\"translation_of page doesn't exist\")\n\n _filtered_by_child_of = getattr(queryset, '_filtered_by_child_of', None)\n\n queryset = queryset.translation_of(page)\n\n if _filtered_by_child_of:\n queryset._filtered_by_child_of = _filtered_by_child_of\n\n return queryset\n\n\nclass LocaleFilter(BaseFilterBackend):\n \"\"\"\n Implements the ?locale filter which limits the set of pages to a\n particular locale.\n \"\"\"\n def filter_queryset(self, request, queryset, view):\n if 'locale' in request.GET:\n _filtered_by_child_of = getattr(queryset, '_filtered_by_child_of', None)\n\n locale = get_object_or_404(Locale, language_code=request.GET['locale'])\n queryset = queryset.filter(locale=locale)\n\n if _filtered_by_child_of:\n queryset._filtered_by_child_of = _filtered_by_child_of\n\n return queryset\n", "path": "wagtail/api/v2/filters.py"}]}
| 2,960 | 140 |
gh_patches_debug_2346
|
rasdani/github-patches
|
git_diff
|
mesonbuild__meson-10268
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
-Dbuildtype=release fails on CompCert
**Describe the bug**
Building with CompCert on release mode fails.
The error message is as follows:
```
ccomp: error: Unknown option `-03'
```
Note that this unknown option is "dash zero three" and not "dash O three". Maybe someone accidentally typed a zero where they wanted a letter "O"?
**To Reproduce**
The bug seems to trigger with any program.
Download this Meson "Hello, world!" program: [meson-compcert.zip](https://github.com/mesonbuild/meson/files/8468156/meson-compcert.zip).
Try to build it with:
```console
$ CC=ccomp meson setup -Dbuildtype=release build && meson compile -C build
```
**Expected behavior**
A successful build.
**system parameters**
I'm running Meson 0.62 from PyPi and my CompCert is the binary package of version 3.9 from the OpenBSD ports system.
My ninja is version 1.10.2 and my python is version 3.8.12.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mesonbuild/compilers/mixins/compcert.py`
Content:
```
1 # Copyright 2012-2019 The Meson development team
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6
7 # http://www.apache.org/licenses/LICENSE-2.0
8
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """Representations specific to the CompCert C compiler family."""
16
17 import os
18 import re
19 import typing as T
20
21 if T.TYPE_CHECKING:
22 from ...environment import Environment
23 from ...compilers.compilers import Compiler
24 else:
25 # This is a bit clever, for mypy we pretend that these mixins descend from
26 # Compiler, so we get all of the methods and attributes defined for us, but
27 # for runtime we make them descend from object (which all classes normally
28 # do). This gives up DRYer type checking, with no runtime impact
29 Compiler = object
30
31 ccomp_buildtype_args = {
32 'plain': [''],
33 'debug': ['-O0', '-g'],
34 'debugoptimized': ['-O0', '-g'],
35 'release': ['-03'],
36 'minsize': ['-Os'],
37 'custom': ['-Obranchless'],
38 } # type: T.Dict[str, T.List[str]]
39
40 ccomp_optimization_args = {
41 '0': ['-O0'],
42 'g': ['-O0'],
43 '1': ['-O1'],
44 '2': ['-O2'],
45 '3': ['-O3'],
46 's': ['-Os']
47 } # type: T.Dict[str, T.List[str]]
48
49 ccomp_debug_args = {
50 False: [],
51 True: ['-g']
52 } # type: T.Dict[bool, T.List[str]]
53
54 # As of CompCert 20.04, these arguments should be passed to the underlying gcc linker (via -WUl,<arg>)
55 # There are probably (many) more, but these are those used by picolibc
56 ccomp_args_to_wul = [
57 r"^-ffreestanding$",
58 r"^-r$"
59 ] # type: T.List[str]
60
61 class CompCertCompiler(Compiler):
62
63 id = 'ccomp'
64
65 def __init__(self) -> None:
66 # Assembly
67 self.can_compile_suffixes.add('s')
68 default_warn_args = [] # type: T.List[str]
69 self.warn_args = {'0': [],
70 '1': default_warn_args,
71 '2': default_warn_args + [],
72 '3': default_warn_args + []} # type: T.Dict[str, T.List[str]]
73
74 def get_always_args(self) -> T.List[str]:
75 return []
76
77 def get_pic_args(self) -> T.List[str]:
78 # As of now, CompCert does not support PIC
79 return []
80
81 def get_buildtype_args(self, buildtype: str) -> T.List[str]:
82 return ccomp_buildtype_args[buildtype]
83
84 def get_pch_suffix(self) -> str:
85 return 'pch'
86
87 def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
88 return []
89
90 def unix_args_to_native(self, args: T.List[str]) -> T.List[str]:
91 "Always returns a copy that can be independently mutated"
92 patched_args = [] # type: T.List[str]
93 for arg in args:
94 added = 0
95 for ptrn in ccomp_args_to_wul:
96 if re.match(ptrn, arg):
97 patched_args.append('-WUl,' + arg)
98 added = 1
99 if not added:
100 patched_args.append(arg)
101 return patched_args
102
103 def thread_flags(self, env: 'Environment') -> T.List[str]:
104 return []
105
106 def get_preprocess_only_args(self) -> T.List[str]:
107 return ['-E']
108
109 def get_compile_only_args(self) -> T.List[str]:
110 return ['-c']
111
112 def get_coverage_args(self) -> T.List[str]:
113 return []
114
115 def get_no_stdinc_args(self) -> T.List[str]:
116 return ['-nostdinc']
117
118 def get_no_stdlib_link_args(self) -> T.List[str]:
119 return ['-nostdlib']
120
121 def get_optimization_args(self, optimization_level: str) -> T.List[str]:
122 return ccomp_optimization_args[optimization_level]
123
124 def get_debug_args(self, is_debug: bool) -> T.List[str]:
125 return ccomp_debug_args[is_debug]
126
127 def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
128 for idx, i in enumerate(parameter_list):
129 if i[:9] == '-I':
130 parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
131
132 return parameter_list
133
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/mesonbuild/compilers/mixins/compcert.py b/mesonbuild/compilers/mixins/compcert.py
--- a/mesonbuild/compilers/mixins/compcert.py
+++ b/mesonbuild/compilers/mixins/compcert.py
@@ -32,7 +32,7 @@
'plain': [''],
'debug': ['-O0', '-g'],
'debugoptimized': ['-O0', '-g'],
- 'release': ['-03'],
+ 'release': ['-O3'],
'minsize': ['-Os'],
'custom': ['-Obranchless'],
} # type: T.Dict[str, T.List[str]]
|
{"golden_diff": "diff --git a/mesonbuild/compilers/mixins/compcert.py b/mesonbuild/compilers/mixins/compcert.py\n--- a/mesonbuild/compilers/mixins/compcert.py\n+++ b/mesonbuild/compilers/mixins/compcert.py\n@@ -32,7 +32,7 @@\n 'plain': [''],\n 'debug': ['-O0', '-g'],\n 'debugoptimized': ['-O0', '-g'],\n- 'release': ['-03'],\n+ 'release': ['-O3'],\n 'minsize': ['-Os'],\n 'custom': ['-Obranchless'],\n } # type: T.Dict[str, T.List[str]]\n", "issue": "-Dbuildtype=release fails on CompCert\n**Describe the bug**\r\nBuilding with CompCert on release mode fails. \r\nThe error message is as follows:\r\n```\r\nccomp: error: Unknown option `-03'\r\n```\r\nNote that this unknown option is \"dash zero three\" and not \"dash O three\". Maybe someone accidentally typed a zero where they wanted a letter \"O\"?\r\n\r\n**To Reproduce**\r\nThe bug seems to trigger with any program.\r\nDownload this Meson \"Hello, world!\" program: [meson-compcert.zip](https://github.com/mesonbuild/meson/files/8468156/meson-compcert.zip).\r\nTry to build it with:\r\n```console\r\n$ CC=ccomp meson setup -Dbuildtype=release build && meson compile -C build\r\n```\r\n\r\n**Expected behavior**\r\nA successful build.\r\n\r\n**system parameters**\r\nI'm running Meson 0.62 from PyPi and my CompCert is the binary package of version 3.9 from the OpenBSD ports system.\r\nMy ninja is version 1.10.2 and my python is version 3.8.12.\n", "before_files": [{"content": "# Copyright 2012-2019 The Meson development team\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Representations specific to the CompCert C compiler family.\"\"\"\n\nimport os\nimport re\nimport typing as T\n\nif T.TYPE_CHECKING:\n from ...environment import Environment\n from ...compilers.compilers import Compiler\nelse:\n # This is a bit clever, for mypy we pretend that these mixins descend from\n # Compiler, so we get all of the methods and attributes defined for us, but\n # for runtime we make them descend from object (which all classes normally\n # do). This gives up DRYer type checking, with no runtime impact\n Compiler = object\n\nccomp_buildtype_args = {\n 'plain': [''],\n 'debug': ['-O0', '-g'],\n 'debugoptimized': ['-O0', '-g'],\n 'release': ['-03'],\n 'minsize': ['-Os'],\n 'custom': ['-Obranchless'],\n} # type: T.Dict[str, T.List[str]]\n\nccomp_optimization_args = {\n '0': ['-O0'],\n 'g': ['-O0'],\n '1': ['-O1'],\n '2': ['-O2'],\n '3': ['-O3'],\n 's': ['-Os']\n} # type: T.Dict[str, T.List[str]]\n\nccomp_debug_args = {\n False: [],\n True: ['-g']\n} # type: T.Dict[bool, T.List[str]]\n\n# As of CompCert 20.04, these arguments should be passed to the underlying gcc linker (via -WUl,<arg>)\n# There are probably (many) more, but these are those used by picolibc\nccomp_args_to_wul = [\n r\"^-ffreestanding$\",\n r\"^-r$\"\n] # type: T.List[str]\n\nclass CompCertCompiler(Compiler):\n\n id = 'ccomp'\n\n def __init__(self) -> None:\n # Assembly\n self.can_compile_suffixes.add('s')\n default_warn_args = [] # type: T.List[str]\n self.warn_args = {'0': [],\n '1': default_warn_args,\n '2': default_warn_args + [],\n '3': default_warn_args + []} # type: T.Dict[str, T.List[str]]\n\n def get_always_args(self) -> T.List[str]:\n return []\n\n def get_pic_args(self) -> T.List[str]:\n # As of now, CompCert does not support PIC\n return []\n\n def get_buildtype_args(self, buildtype: str) -> T.List[str]:\n return ccomp_buildtype_args[buildtype]\n\n def get_pch_suffix(self) -> str:\n return 'pch'\n\n def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:\n return []\n\n def unix_args_to_native(self, args: T.List[str]) -> T.List[str]:\n \"Always returns a copy that can be independently mutated\"\n patched_args = [] # type: T.List[str]\n for arg in args:\n added = 0\n for ptrn in ccomp_args_to_wul:\n if re.match(ptrn, arg):\n patched_args.append('-WUl,' + arg)\n added = 1\n if not added:\n patched_args.append(arg)\n return patched_args\n\n def thread_flags(self, env: 'Environment') -> T.List[str]:\n return []\n\n def get_preprocess_only_args(self) -> T.List[str]:\n return ['-E']\n\n def get_compile_only_args(self) -> T.List[str]:\n return ['-c']\n\n def get_coverage_args(self) -> T.List[str]:\n return []\n\n def get_no_stdinc_args(self) -> T.List[str]:\n return ['-nostdinc']\n\n def get_no_stdlib_link_args(self) -> T.List[str]:\n return ['-nostdlib']\n\n def get_optimization_args(self, optimization_level: str) -> T.List[str]:\n return ccomp_optimization_args[optimization_level]\n\n def get_debug_args(self, is_debug: bool) -> T.List[str]:\n return ccomp_debug_args[is_debug]\n\n def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:\n for idx, i in enumerate(parameter_list):\n if i[:9] == '-I':\n parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))\n\n return parameter_list\n", "path": "mesonbuild/compilers/mixins/compcert.py"}], "after_files": [{"content": "# Copyright 2012-2019 The Meson development team\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Representations specific to the CompCert C compiler family.\"\"\"\n\nimport os\nimport re\nimport typing as T\n\nif T.TYPE_CHECKING:\n from ...environment import Environment\n from ...compilers.compilers import Compiler\nelse:\n # This is a bit clever, for mypy we pretend that these mixins descend from\n # Compiler, so we get all of the methods and attributes defined for us, but\n # for runtime we make them descend from object (which all classes normally\n # do). This gives up DRYer type checking, with no runtime impact\n Compiler = object\n\nccomp_buildtype_args = {\n 'plain': [''],\n 'debug': ['-O0', '-g'],\n 'debugoptimized': ['-O0', '-g'],\n 'release': ['-O3'],\n 'minsize': ['-Os'],\n 'custom': ['-Obranchless'],\n} # type: T.Dict[str, T.List[str]]\n\nccomp_optimization_args = {\n '0': ['-O0'],\n 'g': ['-O0'],\n '1': ['-O1'],\n '2': ['-O2'],\n '3': ['-O3'],\n 's': ['-Os']\n} # type: T.Dict[str, T.List[str]]\n\nccomp_debug_args = {\n False: [],\n True: ['-g']\n} # type: T.Dict[bool, T.List[str]]\n\n# As of CompCert 20.04, these arguments should be passed to the underlying gcc linker (via -WUl,<arg>)\n# There are probably (many) more, but these are those used by picolibc\nccomp_args_to_wul = [\n r\"^-ffreestanding$\",\n r\"^-r$\"\n] # type: T.List[str]\n\nclass CompCertCompiler(Compiler):\n\n id = 'ccomp'\n\n def __init__(self) -> None:\n # Assembly\n self.can_compile_suffixes.add('s')\n default_warn_args = [] # type: T.List[str]\n self.warn_args = {'0': [],\n '1': default_warn_args,\n '2': default_warn_args + [],\n '3': default_warn_args + []} # type: T.Dict[str, T.List[str]]\n\n def get_always_args(self) -> T.List[str]:\n return []\n\n def get_pic_args(self) -> T.List[str]:\n # As of now, CompCert does not support PIC\n return []\n\n def get_buildtype_args(self, buildtype: str) -> T.List[str]:\n return ccomp_buildtype_args[buildtype]\n\n def get_pch_suffix(self) -> str:\n return 'pch'\n\n def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:\n return []\n\n def unix_args_to_native(self, args: T.List[str]) -> T.List[str]:\n \"Always returns a copy that can be independently mutated\"\n patched_args = [] # type: T.List[str]\n for arg in args:\n added = 0\n for ptrn in ccomp_args_to_wul:\n if re.match(ptrn, arg):\n patched_args.append('-WUl,' + arg)\n added = 1\n if not added:\n patched_args.append(arg)\n return patched_args\n\n def thread_flags(self, env: 'Environment') -> T.List[str]:\n return []\n\n def get_preprocess_only_args(self) -> T.List[str]:\n return ['-E']\n\n def get_compile_only_args(self) -> T.List[str]:\n return ['-c']\n\n def get_coverage_args(self) -> T.List[str]:\n return []\n\n def get_no_stdinc_args(self) -> T.List[str]:\n return ['-nostdinc']\n\n def get_no_stdlib_link_args(self) -> T.List[str]:\n return ['-nostdlib']\n\n def get_optimization_args(self, optimization_level: str) -> T.List[str]:\n return ccomp_optimization_args[optimization_level]\n\n def get_debug_args(self, is_debug: bool) -> T.List[str]:\n return ccomp_debug_args[is_debug]\n\n def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:\n for idx, i in enumerate(parameter_list):\n if i[:9] == '-I':\n parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))\n\n return parameter_list\n", "path": "mesonbuild/compilers/mixins/compcert.py"}]}
| 1,916 | 153 |
gh_patches_debug_37602
|
rasdani/github-patches
|
git_diff
|
arviz-devs__arviz-625
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Remove load_data and save_data functions before 0.4
`load_data` and `save_data` are currently deprecated (after 0.3.1 release). They need to be removed after 0.4 (assuming next release is going to be 0.3.2).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `arviz/data/__init__.py`
Content:
```
1 """Code for loading and manipulating data structures."""
2 from .inference_data import InferenceData, concat
3 from .io_netcdf import from_netcdf, to_netcdf, load_data, save_data
4 from .datasets import load_arviz_data, list_datasets, clear_data_home
5 from .base import numpy_to_data_array, dict_to_dataset
6 from .converters import convert_to_dataset, convert_to_inference_data
7 from .io_cmdstan import from_cmdstan
8 from .io_dict import from_dict
9 from .io_pymc3 import from_pymc3
10 from .io_pystan import from_pystan
11 from .io_emcee import from_emcee
12 from .io_pyro import from_pyro
13 from .io_tfp import from_tfp
14
15 __all__ = [
16 "InferenceData",
17 "concat",
18 "load_arviz_data",
19 "list_datasets",
20 "clear_data_home",
21 "numpy_to_data_array",
22 "dict_to_dataset",
23 "convert_to_dataset",
24 "convert_to_inference_data",
25 "from_pymc3",
26 "from_pystan",
27 "from_emcee",
28 "from_cmdstan",
29 "from_dict",
30 "from_pyro",
31 "from_tfp",
32 "from_netcdf",
33 "to_netcdf",
34 "load_data",
35 "save_data",
36 ]
37
```
Path: `arviz/data/io_netcdf.py`
Content:
```
1 """Input and output support for data."""
2 import warnings
3 from .inference_data import InferenceData
4 from .converters import convert_to_inference_data
5
6
7 def from_netcdf(filename):
8 """Load netcdf file back into an arviz.InferenceData.
9
10 Parameters
11 ----------
12 filename : str
13 name or path of the file to load trace
14 """
15 return InferenceData.from_netcdf(filename)
16
17
18 def to_netcdf(data, filename, *, group="posterior", coords=None, dims=None):
19 """Save dataset as a netcdf file.
20
21 WARNING: Only idempotent in case `data` is InferenceData
22
23 Parameters
24 ----------
25 data : InferenceData, or any object accepted by `convert_to_inference_data`
26 Object to be saved
27 filename : str
28 name or path of the file to load trace
29 group : str (optional)
30 In case `data` is not InferenceData, this is the group it will be saved to
31 coords : dict (optional)
32 See `convert_to_inference_data`
33 dims : dict (optional)
34 See `convert_to_inference_data`
35
36 Returns
37 -------
38 str
39 filename saved to
40 """
41 inference_data = convert_to_inference_data(data, group=group, coords=coords, dims=dims)
42 file_name = inference_data.to_netcdf(filename)
43 return file_name
44
45
46 def load_data(filename):
47 """Load netcdf file back into an arviz.InferenceData.
48
49 Parameters
50 ----------
51 filename : str
52 name or path of the file to load trace
53
54 Note
55 ----
56 This function is deprecated and will be removed in 0.4.
57 Use `from_netcdf` instead.
58 """
59 warnings.warn(
60 "The 'load_data' function is deprecated as of 0.3.2, use 'from_netcdf' instead",
61 DeprecationWarning,
62 )
63 return from_netcdf(filename=filename)
64
65
66 def save_data(data, filename, *, group="posterior", coords=None, dims=None):
67 """Save dataset as a netcdf file.
68
69 WARNING: Only idempotent in case `data` is InferenceData
70
71 Parameters
72 ----------
73 data : InferenceData, or any object accepted by `convert_to_inference_data`
74 Object to be saved
75 filename : str
76 name or path of the file to load trace
77 group : str (optional)
78 In case `data` is not InferenceData, this is the group it will be saved to
79 coords : dict (optional)
80 See `convert_to_inference_data`
81 dims : dict (optional)
82 See `convert_to_inference_data`
83
84 Returns
85 -------
86 str
87 filename saved to
88
89 Note
90 ----
91 This function is deprecated and will be removed in 0.4.
92 Use `to_netcdf` instead.
93 """
94 warnings.warn(
95 "The 'save_data' function is deprecated as of 0.3.2, use 'to_netcdf' instead",
96 DeprecationWarning,
97 )
98 return to_netcdf(data=data, filename=filename, group=group, coords=coords, dims=dims)
99
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/arviz/data/__init__.py b/arviz/data/__init__.py
--- a/arviz/data/__init__.py
+++ b/arviz/data/__init__.py
@@ -1,6 +1,6 @@
"""Code for loading and manipulating data structures."""
from .inference_data import InferenceData, concat
-from .io_netcdf import from_netcdf, to_netcdf, load_data, save_data
+from .io_netcdf import from_netcdf, to_netcdf
from .datasets import load_arviz_data, list_datasets, clear_data_home
from .base import numpy_to_data_array, dict_to_dataset
from .converters import convert_to_dataset, convert_to_inference_data
@@ -31,6 +31,4 @@
"from_tfp",
"from_netcdf",
"to_netcdf",
- "load_data",
- "save_data",
]
diff --git a/arviz/data/io_netcdf.py b/arviz/data/io_netcdf.py
--- a/arviz/data/io_netcdf.py
+++ b/arviz/data/io_netcdf.py
@@ -1,5 +1,5 @@
"""Input and output support for data."""
-import warnings
+
from .inference_data import InferenceData
from .converters import convert_to_inference_data
@@ -41,58 +41,3 @@
inference_data = convert_to_inference_data(data, group=group, coords=coords, dims=dims)
file_name = inference_data.to_netcdf(filename)
return file_name
-
-
-def load_data(filename):
- """Load netcdf file back into an arviz.InferenceData.
-
- Parameters
- ----------
- filename : str
- name or path of the file to load trace
-
- Note
- ----
- This function is deprecated and will be removed in 0.4.
- Use `from_netcdf` instead.
- """
- warnings.warn(
- "The 'load_data' function is deprecated as of 0.3.2, use 'from_netcdf' instead",
- DeprecationWarning,
- )
- return from_netcdf(filename=filename)
-
-
-def save_data(data, filename, *, group="posterior", coords=None, dims=None):
- """Save dataset as a netcdf file.
-
- WARNING: Only idempotent in case `data` is InferenceData
-
- Parameters
- ----------
- data : InferenceData, or any object accepted by `convert_to_inference_data`
- Object to be saved
- filename : str
- name or path of the file to load trace
- group : str (optional)
- In case `data` is not InferenceData, this is the group it will be saved to
- coords : dict (optional)
- See `convert_to_inference_data`
- dims : dict (optional)
- See `convert_to_inference_data`
-
- Returns
- -------
- str
- filename saved to
-
- Note
- ----
- This function is deprecated and will be removed in 0.4.
- Use `to_netcdf` instead.
- """
- warnings.warn(
- "The 'save_data' function is deprecated as of 0.3.2, use 'to_netcdf' instead",
- DeprecationWarning,
- )
- return to_netcdf(data=data, filename=filename, group=group, coords=coords, dims=dims)
|
{"golden_diff": "diff --git a/arviz/data/__init__.py b/arviz/data/__init__.py\n--- a/arviz/data/__init__.py\n+++ b/arviz/data/__init__.py\n@@ -1,6 +1,6 @@\n \"\"\"Code for loading and manipulating data structures.\"\"\"\n from .inference_data import InferenceData, concat\n-from .io_netcdf import from_netcdf, to_netcdf, load_data, save_data\n+from .io_netcdf import from_netcdf, to_netcdf\n from .datasets import load_arviz_data, list_datasets, clear_data_home\n from .base import numpy_to_data_array, dict_to_dataset\n from .converters import convert_to_dataset, convert_to_inference_data\n@@ -31,6 +31,4 @@\n \"from_tfp\",\n \"from_netcdf\",\n \"to_netcdf\",\n- \"load_data\",\n- \"save_data\",\n ]\ndiff --git a/arviz/data/io_netcdf.py b/arviz/data/io_netcdf.py\n--- a/arviz/data/io_netcdf.py\n+++ b/arviz/data/io_netcdf.py\n@@ -1,5 +1,5 @@\n \"\"\"Input and output support for data.\"\"\"\n-import warnings\n+\n from .inference_data import InferenceData\n from .converters import convert_to_inference_data\n \n@@ -41,58 +41,3 @@\n inference_data = convert_to_inference_data(data, group=group, coords=coords, dims=dims)\n file_name = inference_data.to_netcdf(filename)\n return file_name\n-\n-\n-def load_data(filename):\n- \"\"\"Load netcdf file back into an arviz.InferenceData.\n-\n- Parameters\n- ----------\n- filename : str\n- name or path of the file to load trace\n-\n- Note\n- ----\n- This function is deprecated and will be removed in 0.4.\n- Use `from_netcdf` instead.\n- \"\"\"\n- warnings.warn(\n- \"The 'load_data' function is deprecated as of 0.3.2, use 'from_netcdf' instead\",\n- DeprecationWarning,\n- )\n- return from_netcdf(filename=filename)\n-\n-\n-def save_data(data, filename, *, group=\"posterior\", coords=None, dims=None):\n- \"\"\"Save dataset as a netcdf file.\n-\n- WARNING: Only idempotent in case `data` is InferenceData\n-\n- Parameters\n- ----------\n- data : InferenceData, or any object accepted by `convert_to_inference_data`\n- Object to be saved\n- filename : str\n- name or path of the file to load trace\n- group : str (optional)\n- In case `data` is not InferenceData, this is the group it will be saved to\n- coords : dict (optional)\n- See `convert_to_inference_data`\n- dims : dict (optional)\n- See `convert_to_inference_data`\n-\n- Returns\n- -------\n- str\n- filename saved to\n-\n- Note\n- ----\n- This function is deprecated and will be removed in 0.4.\n- Use `to_netcdf` instead.\n- \"\"\"\n- warnings.warn(\n- \"The 'save_data' function is deprecated as of 0.3.2, use 'to_netcdf' instead\",\n- DeprecationWarning,\n- )\n- return to_netcdf(data=data, filename=filename, group=group, coords=coords, dims=dims)\n", "issue": "Remove load_data and save_data functions before 0.4\n`load_data` and `save_data` are currently deprecated (after 0.3.1 release). They need to be removed after 0.4 (assuming next release is going to be 0.3.2).\n", "before_files": [{"content": "\"\"\"Code for loading and manipulating data structures.\"\"\"\nfrom .inference_data import InferenceData, concat\nfrom .io_netcdf import from_netcdf, to_netcdf, load_data, save_data\nfrom .datasets import load_arviz_data, list_datasets, clear_data_home\nfrom .base import numpy_to_data_array, dict_to_dataset\nfrom .converters import convert_to_dataset, convert_to_inference_data\nfrom .io_cmdstan import from_cmdstan\nfrom .io_dict import from_dict\nfrom .io_pymc3 import from_pymc3\nfrom .io_pystan import from_pystan\nfrom .io_emcee import from_emcee\nfrom .io_pyro import from_pyro\nfrom .io_tfp import from_tfp\n\n__all__ = [\n \"InferenceData\",\n \"concat\",\n \"load_arviz_data\",\n \"list_datasets\",\n \"clear_data_home\",\n \"numpy_to_data_array\",\n \"dict_to_dataset\",\n \"convert_to_dataset\",\n \"convert_to_inference_data\",\n \"from_pymc3\",\n \"from_pystan\",\n \"from_emcee\",\n \"from_cmdstan\",\n \"from_dict\",\n \"from_pyro\",\n \"from_tfp\",\n \"from_netcdf\",\n \"to_netcdf\",\n \"load_data\",\n \"save_data\",\n]\n", "path": "arviz/data/__init__.py"}, {"content": "\"\"\"Input and output support for data.\"\"\"\nimport warnings\nfrom .inference_data import InferenceData\nfrom .converters import convert_to_inference_data\n\n\ndef from_netcdf(filename):\n \"\"\"Load netcdf file back into an arviz.InferenceData.\n\n Parameters\n ----------\n filename : str\n name or path of the file to load trace\n \"\"\"\n return InferenceData.from_netcdf(filename)\n\n\ndef to_netcdf(data, filename, *, group=\"posterior\", coords=None, dims=None):\n \"\"\"Save dataset as a netcdf file.\n\n WARNING: Only idempotent in case `data` is InferenceData\n\n Parameters\n ----------\n data : InferenceData, or any object accepted by `convert_to_inference_data`\n Object to be saved\n filename : str\n name or path of the file to load trace\n group : str (optional)\n In case `data` is not InferenceData, this is the group it will be saved to\n coords : dict (optional)\n See `convert_to_inference_data`\n dims : dict (optional)\n See `convert_to_inference_data`\n\n Returns\n -------\n str\n filename saved to\n \"\"\"\n inference_data = convert_to_inference_data(data, group=group, coords=coords, dims=dims)\n file_name = inference_data.to_netcdf(filename)\n return file_name\n\n\ndef load_data(filename):\n \"\"\"Load netcdf file back into an arviz.InferenceData.\n\n Parameters\n ----------\n filename : str\n name or path of the file to load trace\n\n Note\n ----\n This function is deprecated and will be removed in 0.4.\n Use `from_netcdf` instead.\n \"\"\"\n warnings.warn(\n \"The 'load_data' function is deprecated as of 0.3.2, use 'from_netcdf' instead\",\n DeprecationWarning,\n )\n return from_netcdf(filename=filename)\n\n\ndef save_data(data, filename, *, group=\"posterior\", coords=None, dims=None):\n \"\"\"Save dataset as a netcdf file.\n\n WARNING: Only idempotent in case `data` is InferenceData\n\n Parameters\n ----------\n data : InferenceData, or any object accepted by `convert_to_inference_data`\n Object to be saved\n filename : str\n name or path of the file to load trace\n group : str (optional)\n In case `data` is not InferenceData, this is the group it will be saved to\n coords : dict (optional)\n See `convert_to_inference_data`\n dims : dict (optional)\n See `convert_to_inference_data`\n\n Returns\n -------\n str\n filename saved to\n\n Note\n ----\n This function is deprecated and will be removed in 0.4.\n Use `to_netcdf` instead.\n \"\"\"\n warnings.warn(\n \"The 'save_data' function is deprecated as of 0.3.2, use 'to_netcdf' instead\",\n DeprecationWarning,\n )\n return to_netcdf(data=data, filename=filename, group=group, coords=coords, dims=dims)\n", "path": "arviz/data/io_netcdf.py"}], "after_files": [{"content": "\"\"\"Code for loading and manipulating data structures.\"\"\"\nfrom .inference_data import InferenceData, concat\nfrom .io_netcdf import from_netcdf, to_netcdf\nfrom .datasets import load_arviz_data, list_datasets, clear_data_home\nfrom .base import numpy_to_data_array, dict_to_dataset\nfrom .converters import convert_to_dataset, convert_to_inference_data\nfrom .io_cmdstan import from_cmdstan\nfrom .io_dict import from_dict\nfrom .io_pymc3 import from_pymc3\nfrom .io_pystan import from_pystan\nfrom .io_emcee import from_emcee\nfrom .io_pyro import from_pyro\nfrom .io_tfp import from_tfp\n\n__all__ = [\n \"InferenceData\",\n \"concat\",\n \"load_arviz_data\",\n \"list_datasets\",\n \"clear_data_home\",\n \"numpy_to_data_array\",\n \"dict_to_dataset\",\n \"convert_to_dataset\",\n \"convert_to_inference_data\",\n \"from_pymc3\",\n \"from_pystan\",\n \"from_emcee\",\n \"from_cmdstan\",\n \"from_dict\",\n \"from_pyro\",\n \"from_tfp\",\n \"from_netcdf\",\n \"to_netcdf\",\n]\n", "path": "arviz/data/__init__.py"}, {"content": "\"\"\"Input and output support for data.\"\"\"\n\nfrom .inference_data import InferenceData\nfrom .converters import convert_to_inference_data\n\n\ndef from_netcdf(filename):\n \"\"\"Load netcdf file back into an arviz.InferenceData.\n\n Parameters\n ----------\n filename : str\n name or path of the file to load trace\n \"\"\"\n return InferenceData.from_netcdf(filename)\n\n\ndef to_netcdf(data, filename, *, group=\"posterior\", coords=None, dims=None):\n \"\"\"Save dataset as a netcdf file.\n\n WARNING: Only idempotent in case `data` is InferenceData\n\n Parameters\n ----------\n data : InferenceData, or any object accepted by `convert_to_inference_data`\n Object to be saved\n filename : str\n name or path of the file to load trace\n group : str (optional)\n In case `data` is not InferenceData, this is the group it will be saved to\n coords : dict (optional)\n See `convert_to_inference_data`\n dims : dict (optional)\n See `convert_to_inference_data`\n\n Returns\n -------\n str\n filename saved to\n \"\"\"\n inference_data = convert_to_inference_data(data, group=group, coords=coords, dims=dims)\n file_name = inference_data.to_netcdf(filename)\n return file_name\n", "path": "arviz/data/io_netcdf.py"}]}
| 1,590 | 782 |
gh_patches_debug_32009
|
rasdani/github-patches
|
git_diff
|
pwndbg__pwndbg-1368
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Use the `gh` cli tool to create issues from `bugreport` if it's installed
Github has a CLI tool that can do things like create issues: https://cli.github.com/
When running pwndbg on a server, I can't do `bugreport --run-browser` because there's no X server running. I have to go through the issue creation process manually, copying and pasting the output. We could automate all of this using `gh issue create`.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pwndbg/commands/version.py`
Content:
```
1 """
2 Displays gdb, python and pwndbg versions.
3 """
4
5
6 import argparse
7 import sys
8 from platform import platform
9 from subprocess import check_output
10 from urllib.parse import quote
11
12 import gdb
13
14 import pwndbg
15 import pwndbg.commands
16 import pwndbg.ida
17 from pwndbg.color import message
18
19
20 def _gdb_version():
21 try:
22 return gdb.VERSION # GDB >= 8.1 (or earlier?)
23 except AttributeError:
24 return gdb.execute("show version", to_string=True).split("\n")[0]
25
26
27 def _py_version():
28 return sys.version.replace("\n", " ")
29
30
31 def capstone_version():
32 try:
33 import capstone
34
35 return ".".join(map(str, capstone.cs_version()))
36 except ImportError:
37 return "not found"
38
39
40 def unicorn_version():
41 try:
42 import unicorn
43
44 return unicorn.__version__
45 except ImportError:
46 return "not found"
47
48
49 def all_versions():
50 gdb_str = "Gdb: %s" % _gdb_version()
51 py_str = "Python: %s" % _py_version()
52 pwndbg_str = "Pwndbg: %s" % pwndbg.__version__
53
54 capstone_str = "Capstone: %s" % capstone_version()
55 unicorn_str = "Unicorn: %s" % unicorn_version()
56
57 all_versions = (gdb_str, py_str, pwndbg_str, capstone_str, unicorn_str)
58
59 ida_versions = pwndbg.ida.get_ida_versions()
60
61 if ida_versions is not None:
62 ida_version = "IDA PRO: %s" % ida_versions["ida"]
63 ida_py_ver = "IDA Py: %s" % ida_versions["python"]
64 ida_hr_ver = "Hexrays: %s" % ida_versions["hexrays"]
65 all_versions += (ida_version, ida_py_ver, ida_hr_ver)
66 return all_versions
67
68
69 @pwndbg.commands.ArgparsedCommand("Displays gdb, python and pwndbg versions.")
70 def version():
71 """
72 Displays gdb, python and pwndbg versions.
73 """
74 print("\n".join(map(message.system, all_versions())))
75
76
77 bugreport_parser = argparse.ArgumentParser(
78 description="""
79 Generate bugreport
80 """
81 )
82 bugreport_parser.add_argument(
83 "--run-browser", "-b", action="store_true", help="Open browser on github/issues/new"
84 )
85
86
87 @pwndbg.commands.ArgparsedCommand(bugreport_parser)
88 def bugreport(run_browser=False):
89 ISSUE_TEMPLATE = """
90 <!--
91 Before reporting a new issue, make sure that we do not have any duplicates already open.
92 If there is one it might be good to take part in the discussion there.
93
94 Please make sure you have checked that the issue persists on LATEST pwndbg version.
95
96 Below is a template for BUG REPORTS.
97 Don't include it if this is a FEATURE REQUEST.
98 -->
99
100
101 ### Description
102
103 <!--
104 Briefly describe the problem you are having in a few paragraphs.
105 -->
106
107 ### Steps to reproduce
108
109 <!--
110 What do we have to do to reproduce the problem?
111 If this is connected to particular C/asm code or a binary,
112 please provide the binary or if possible, a smallest C code that reproduces the issue.
113 -->
114
115 Gdb session history:
116 ```
117 {gdb_history}
118 ```
119
120 ### My setup
121
122 <!--
123 Show us your gdb/python/pwndbg/OS/IDA Pro version (depending on your case).
124
125 NOTE: We are currently testing Pwndbg only on Ubuntu installations but it should work fine on other distros as well.
126
127 This can be displayed in pwndbg through `version` command.
128
129 If it is somehow unavailable, use:
130 * `show version` - for gdb
131 * `py import sys; print(sys.version)` - for python
132 * pwndbg version/git commit id
133 -->
134
135 ```
136 {setup}
137 ```"""
138
139 gdb_config = gdb.execute("show configuration", to_string=True).split("\n")
140 all_info = all_versions()
141
142 current_setup = "Platform: %s\n" % platform()
143 current_setup += "\n".join(all_info)
144 current_setup += "\n" + "\n".join(gdb_config)
145
146 # get saved history size (not including current gdb session)
147 gdb_history_file = gdb.execute("show history filename", to_string=True)
148 gdb_history_file = gdb_history_file[
149 gdb_history_file.index('"') + 1 : gdb_history_file.rindex('"')
150 ]
151 gdb_history_len = 0
152 try:
153 with open(gdb_history_file, "r") as f:
154 gdb_history_len = len(f.readlines())
155 except FileNotFoundError:
156 pass
157
158 max_command_no = (
159 int(gdb.execute("show commands", to_string=True).split("\n")[-2].split()[0]) - 1
160 )
161 show_command_size = 10 # 'show command' returns 10 commands
162 gdb_current_session_history = {}
163 current_command_no = gdb_history_len + 1
164
165 while current_command_no <= max_command_no:
166 cmds = gdb.execute(
167 "show commands " + str(current_command_no + (show_command_size // 2) + 1),
168 to_string=True,
169 ).split("\n")[:-1]
170 for cmd in cmds:
171 cmd_no, cmd = cmd.split(maxsplit=1)
172 cmd_no = int(cmd_no)
173 if cmd_no <= gdb_history_len:
174 continue
175 if current_command_no > max_command_no:
176 break
177 gdb_current_session_history[cmd_no] = cmd
178 current_command_no += 1
179
180 gdb_current_session_history = (v for (k, v) in sorted(gdb_current_session_history.items()))
181 gdb_current_session_history = "\n".join(gdb_current_session_history)
182
183 issue_bugreport = ISSUE_TEMPLATE.format(
184 gdb_history=gdb_current_session_history, setup=current_setup
185 )
186 print(issue_bugreport)
187
188 please_please_submit = "Please submit the bugreport generated above at "
189 github_issue_url = "https://github.com/pwndbg/pwndbg/issues/new"
190 github_issue_body = "?body=" + quote(issue_bugreport)
191
192 if run_browser:
193 try:
194 check_output(["xdg-open", github_issue_url + github_issue_body])
195 except Exception:
196 print(please_please_submit + github_issue_url)
197 else:
198 print(please_please_submit + github_issue_url)
199
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pwndbg/commands/version.py b/pwndbg/commands/version.py
--- a/pwndbg/commands/version.py
+++ b/pwndbg/commands/version.py
@@ -4,9 +4,12 @@
import argparse
+import os
import sys
from platform import platform
+from subprocess import check_call
from subprocess import check_output
+from tempfile import NamedTemporaryFile
from urllib.parse import quote
import gdb
@@ -79,13 +82,17 @@
Generate bugreport
"""
)
-bugreport_parser.add_argument(
+bugreport_group = bugreport_parser.add_mutually_exclusive_group()
+bugreport_group.add_argument(
"--run-browser", "-b", action="store_true", help="Open browser on github/issues/new"
)
+bugreport_group.add_argument(
+ "--use-gh", "-g", action="store_true", help="Create issue using Github CLI"
+)
@pwndbg.commands.ArgparsedCommand(bugreport_parser)
-def bugreport(run_browser=False):
+def bugreport(run_browser=False, use_gh=False):
ISSUE_TEMPLATE = """
<!--
Before reporting a new issue, make sure that we do not have any duplicates already open.
@@ -189,7 +196,17 @@
github_issue_url = "https://github.com/pwndbg/pwndbg/issues/new"
github_issue_body = "?body=" + quote(issue_bugreport)
- if run_browser:
+ if use_gh:
+ try:
+ with NamedTemporaryFile("w", delete=True) as f:
+ f.write(issue_bugreport)
+ f.flush()
+ check_call([os.environ.get("EDITOR", "vi"), f.name])
+ check_call(["gh", "issue", "create", "--body-file", f.name])
+ except Exception:
+ print(please_please_submit + github_issue_url)
+ raise
+ elif run_browser:
try:
check_output(["xdg-open", github_issue_url + github_issue_body])
except Exception:
|
{"golden_diff": "diff --git a/pwndbg/commands/version.py b/pwndbg/commands/version.py\n--- a/pwndbg/commands/version.py\n+++ b/pwndbg/commands/version.py\n@@ -4,9 +4,12 @@\n \n \n import argparse\n+import os\n import sys\n from platform import platform\n+from subprocess import check_call\n from subprocess import check_output\n+from tempfile import NamedTemporaryFile\n from urllib.parse import quote\n \n import gdb\n@@ -79,13 +82,17 @@\n Generate bugreport\n \"\"\"\n )\n-bugreport_parser.add_argument(\n+bugreport_group = bugreport_parser.add_mutually_exclusive_group()\n+bugreport_group.add_argument(\n \"--run-browser\", \"-b\", action=\"store_true\", help=\"Open browser on github/issues/new\"\n )\n+bugreport_group.add_argument(\n+ \"--use-gh\", \"-g\", action=\"store_true\", help=\"Create issue using Github CLI\"\n+)\n \n \n @pwndbg.commands.ArgparsedCommand(bugreport_parser)\n-def bugreport(run_browser=False):\n+def bugreport(run_browser=False, use_gh=False):\n ISSUE_TEMPLATE = \"\"\"\n <!--\n Before reporting a new issue, make sure that we do not have any duplicates already open.\n@@ -189,7 +196,17 @@\n github_issue_url = \"https://github.com/pwndbg/pwndbg/issues/new\"\n github_issue_body = \"?body=\" + quote(issue_bugreport)\n \n- if run_browser:\n+ if use_gh:\n+ try:\n+ with NamedTemporaryFile(\"w\", delete=True) as f:\n+ f.write(issue_bugreport)\n+ f.flush()\n+ check_call([os.environ.get(\"EDITOR\", \"vi\"), f.name])\n+ check_call([\"gh\", \"issue\", \"create\", \"--body-file\", f.name])\n+ except Exception:\n+ print(please_please_submit + github_issue_url)\n+ raise\n+ elif run_browser:\n try:\n check_output([\"xdg-open\", github_issue_url + github_issue_body])\n except Exception:\n", "issue": "Use the `gh` cli tool to create issues from `bugreport` if it's installed\nGithub has a CLI tool that can do things like create issues: https://cli.github.com/\r\n\r\nWhen running pwndbg on a server, I can't do `bugreport --run-browser` because there's no X server running. I have to go through the issue creation process manually, copying and pasting the output. We could automate all of this using `gh issue create`.\n", "before_files": [{"content": "\"\"\"\nDisplays gdb, python and pwndbg versions.\n\"\"\"\n\n\nimport argparse\nimport sys\nfrom platform import platform\nfrom subprocess import check_output\nfrom urllib.parse import quote\n\nimport gdb\n\nimport pwndbg\nimport pwndbg.commands\nimport pwndbg.ida\nfrom pwndbg.color import message\n\n\ndef _gdb_version():\n try:\n return gdb.VERSION # GDB >= 8.1 (or earlier?)\n except AttributeError:\n return gdb.execute(\"show version\", to_string=True).split(\"\\n\")[0]\n\n\ndef _py_version():\n return sys.version.replace(\"\\n\", \" \")\n\n\ndef capstone_version():\n try:\n import capstone\n\n return \".\".join(map(str, capstone.cs_version()))\n except ImportError:\n return \"not found\"\n\n\ndef unicorn_version():\n try:\n import unicorn\n\n return unicorn.__version__\n except ImportError:\n return \"not found\"\n\n\ndef all_versions():\n gdb_str = \"Gdb: %s\" % _gdb_version()\n py_str = \"Python: %s\" % _py_version()\n pwndbg_str = \"Pwndbg: %s\" % pwndbg.__version__\n\n capstone_str = \"Capstone: %s\" % capstone_version()\n unicorn_str = \"Unicorn: %s\" % unicorn_version()\n\n all_versions = (gdb_str, py_str, pwndbg_str, capstone_str, unicorn_str)\n\n ida_versions = pwndbg.ida.get_ida_versions()\n\n if ida_versions is not None:\n ida_version = \"IDA PRO: %s\" % ida_versions[\"ida\"]\n ida_py_ver = \"IDA Py: %s\" % ida_versions[\"python\"]\n ida_hr_ver = \"Hexrays: %s\" % ida_versions[\"hexrays\"]\n all_versions += (ida_version, ida_py_ver, ida_hr_ver)\n return all_versions\n\n\[email protected](\"Displays gdb, python and pwndbg versions.\")\ndef version():\n \"\"\"\n Displays gdb, python and pwndbg versions.\n \"\"\"\n print(\"\\n\".join(map(message.system, all_versions())))\n\n\nbugreport_parser = argparse.ArgumentParser(\n description=\"\"\"\n Generate bugreport\n \"\"\"\n)\nbugreport_parser.add_argument(\n \"--run-browser\", \"-b\", action=\"store_true\", help=\"Open browser on github/issues/new\"\n)\n\n\[email protected](bugreport_parser)\ndef bugreport(run_browser=False):\n ISSUE_TEMPLATE = \"\"\"\n<!--\nBefore reporting a new issue, make sure that we do not have any duplicates already open.\nIf there is one it might be good to take part in the discussion there.\n\nPlease make sure you have checked that the issue persists on LATEST pwndbg version.\n\nBelow is a template for BUG REPORTS.\nDon't include it if this is a FEATURE REQUEST.\n-->\n\n\n### Description\n\n<!--\nBriefly describe the problem you are having in a few paragraphs.\n-->\n\n### Steps to reproduce\n\n<!--\nWhat do we have to do to reproduce the problem?\nIf this is connected to particular C/asm code or a binary,\nplease provide the binary or if possible, a smallest C code that reproduces the issue.\n-->\n\nGdb session history:\n```\n{gdb_history}\n```\n\n### My setup\n\n<!--\nShow us your gdb/python/pwndbg/OS/IDA Pro version (depending on your case).\n\nNOTE: We are currently testing Pwndbg only on Ubuntu installations but it should work fine on other distros as well.\n\nThis can be displayed in pwndbg through `version` command.\n\nIf it is somehow unavailable, use:\n* `show version` - for gdb\n* `py import sys; print(sys.version)` - for python\n* pwndbg version/git commit id\n-->\n\n```\n{setup}\n```\"\"\"\n\n gdb_config = gdb.execute(\"show configuration\", to_string=True).split(\"\\n\")\n all_info = all_versions()\n\n current_setup = \"Platform: %s\\n\" % platform()\n current_setup += \"\\n\".join(all_info)\n current_setup += \"\\n\" + \"\\n\".join(gdb_config)\n\n # get saved history size (not including current gdb session)\n gdb_history_file = gdb.execute(\"show history filename\", to_string=True)\n gdb_history_file = gdb_history_file[\n gdb_history_file.index('\"') + 1 : gdb_history_file.rindex('\"')\n ]\n gdb_history_len = 0\n try:\n with open(gdb_history_file, \"r\") as f:\n gdb_history_len = len(f.readlines())\n except FileNotFoundError:\n pass\n\n max_command_no = (\n int(gdb.execute(\"show commands\", to_string=True).split(\"\\n\")[-2].split()[0]) - 1\n )\n show_command_size = 10 # 'show command' returns 10 commands\n gdb_current_session_history = {}\n current_command_no = gdb_history_len + 1\n\n while current_command_no <= max_command_no:\n cmds = gdb.execute(\n \"show commands \" + str(current_command_no + (show_command_size // 2) + 1),\n to_string=True,\n ).split(\"\\n\")[:-1]\n for cmd in cmds:\n cmd_no, cmd = cmd.split(maxsplit=1)\n cmd_no = int(cmd_no)\n if cmd_no <= gdb_history_len:\n continue\n if current_command_no > max_command_no:\n break\n gdb_current_session_history[cmd_no] = cmd\n current_command_no += 1\n\n gdb_current_session_history = (v for (k, v) in sorted(gdb_current_session_history.items()))\n gdb_current_session_history = \"\\n\".join(gdb_current_session_history)\n\n issue_bugreport = ISSUE_TEMPLATE.format(\n gdb_history=gdb_current_session_history, setup=current_setup\n )\n print(issue_bugreport)\n\n please_please_submit = \"Please submit the bugreport generated above at \"\n github_issue_url = \"https://github.com/pwndbg/pwndbg/issues/new\"\n github_issue_body = \"?body=\" + quote(issue_bugreport)\n\n if run_browser:\n try:\n check_output([\"xdg-open\", github_issue_url + github_issue_body])\n except Exception:\n print(please_please_submit + github_issue_url)\n else:\n print(please_please_submit + github_issue_url)\n", "path": "pwndbg/commands/version.py"}], "after_files": [{"content": "\"\"\"\nDisplays gdb, python and pwndbg versions.\n\"\"\"\n\n\nimport argparse\nimport os\nimport sys\nfrom platform import platform\nfrom subprocess import check_call\nfrom subprocess import check_output\nfrom tempfile import NamedTemporaryFile\nfrom urllib.parse import quote\n\nimport gdb\n\nimport pwndbg\nimport pwndbg.commands\nimport pwndbg.ida\nfrom pwndbg.color import message\n\n\ndef _gdb_version():\n try:\n return gdb.VERSION # GDB >= 8.1 (or earlier?)\n except AttributeError:\n return gdb.execute(\"show version\", to_string=True).split(\"\\n\")[0]\n\n\ndef _py_version():\n return sys.version.replace(\"\\n\", \" \")\n\n\ndef capstone_version():\n try:\n import capstone\n\n return \".\".join(map(str, capstone.cs_version()))\n except ImportError:\n return \"not found\"\n\n\ndef unicorn_version():\n try:\n import unicorn\n\n return unicorn.__version__\n except ImportError:\n return \"not found\"\n\n\ndef all_versions():\n gdb_str = \"Gdb: %s\" % _gdb_version()\n py_str = \"Python: %s\" % _py_version()\n pwndbg_str = \"Pwndbg: %s\" % pwndbg.__version__\n\n capstone_str = \"Capstone: %s\" % capstone_version()\n unicorn_str = \"Unicorn: %s\" % unicorn_version()\n\n all_versions = (gdb_str, py_str, pwndbg_str, capstone_str, unicorn_str)\n\n ida_versions = pwndbg.ida.get_ida_versions()\n\n if ida_versions is not None:\n ida_version = \"IDA PRO: %s\" % ida_versions[\"ida\"]\n ida_py_ver = \"IDA Py: %s\" % ida_versions[\"python\"]\n ida_hr_ver = \"Hexrays: %s\" % ida_versions[\"hexrays\"]\n all_versions += (ida_version, ida_py_ver, ida_hr_ver)\n return all_versions\n\n\[email protected](\"Displays gdb, python and pwndbg versions.\")\ndef version():\n \"\"\"\n Displays gdb, python and pwndbg versions.\n \"\"\"\n print(\"\\n\".join(map(message.system, all_versions())))\n\n\nbugreport_parser = argparse.ArgumentParser(\n description=\"\"\"\n Generate bugreport\n \"\"\"\n)\nbugreport_group = bugreport_parser.add_mutually_exclusive_group()\nbugreport_group.add_argument(\n \"--run-browser\", \"-b\", action=\"store_true\", help=\"Open browser on github/issues/new\"\n)\nbugreport_group.add_argument(\n \"--use-gh\", \"-g\", action=\"store_true\", help=\"Create issue using Github CLI\"\n)\n\n\[email protected](bugreport_parser)\ndef bugreport(run_browser=False, use_gh=False):\n ISSUE_TEMPLATE = \"\"\"\n<!--\nBefore reporting a new issue, make sure that we do not have any duplicates already open.\nIf there is one it might be good to take part in the discussion there.\n\nPlease make sure you have checked that the issue persists on LATEST pwndbg version.\n\nBelow is a template for BUG REPORTS.\nDon't include it if this is a FEATURE REQUEST.\n-->\n\n\n### Description\n\n<!--\nBriefly describe the problem you are having in a few paragraphs.\n-->\n\n### Steps to reproduce\n\n<!--\nWhat do we have to do to reproduce the problem?\nIf this is connected to particular C/asm code or a binary,\nplease provide the binary or if possible, a smallest C code that reproduces the issue.\n-->\n\nGdb session history:\n```\n{gdb_history}\n```\n\n### My setup\n\n<!--\nShow us your gdb/python/pwndbg/OS/IDA Pro version (depending on your case).\n\nNOTE: We are currently testing Pwndbg only on Ubuntu installations but it should work fine on other distros as well.\n\nThis can be displayed in pwndbg through `version` command.\n\nIf it is somehow unavailable, use:\n* `show version` - for gdb\n* `py import sys; print(sys.version)` - for python\n* pwndbg version/git commit id\n-->\n\n```\n{setup}\n```\"\"\"\n\n gdb_config = gdb.execute(\"show configuration\", to_string=True).split(\"\\n\")\n all_info = all_versions()\n\n current_setup = \"Platform: %s\\n\" % platform()\n current_setup += \"\\n\".join(all_info)\n current_setup += \"\\n\" + \"\\n\".join(gdb_config)\n\n # get saved history size (not including current gdb session)\n gdb_history_file = gdb.execute(\"show history filename\", to_string=True)\n gdb_history_file = gdb_history_file[\n gdb_history_file.index('\"') + 1 : gdb_history_file.rindex('\"')\n ]\n gdb_history_len = 0\n try:\n with open(gdb_history_file, \"r\") as f:\n gdb_history_len = len(f.readlines())\n except FileNotFoundError:\n pass\n\n max_command_no = (\n int(gdb.execute(\"show commands\", to_string=True).split(\"\\n\")[-2].split()[0]) - 1\n )\n show_command_size = 10 # 'show command' returns 10 commands\n gdb_current_session_history = {}\n current_command_no = gdb_history_len + 1\n\n while current_command_no <= max_command_no:\n cmds = gdb.execute(\n \"show commands \" + str(current_command_no + (show_command_size // 2) + 1),\n to_string=True,\n ).split(\"\\n\")[:-1]\n for cmd in cmds:\n cmd_no, cmd = cmd.split(maxsplit=1)\n cmd_no = int(cmd_no)\n if cmd_no <= gdb_history_len:\n continue\n if current_command_no > max_command_no:\n break\n gdb_current_session_history[cmd_no] = cmd\n current_command_no += 1\n\n gdb_current_session_history = (v for (k, v) in sorted(gdb_current_session_history.items()))\n gdb_current_session_history = \"\\n\".join(gdb_current_session_history)\n\n issue_bugreport = ISSUE_TEMPLATE.format(\n gdb_history=gdb_current_session_history, setup=current_setup\n )\n print(issue_bugreport)\n\n please_please_submit = \"Please submit the bugreport generated above at \"\n github_issue_url = \"https://github.com/pwndbg/pwndbg/issues/new\"\n github_issue_body = \"?body=\" + quote(issue_bugreport)\n\n if use_gh:\n try:\n with NamedTemporaryFile(\"w\", delete=True) as f:\n f.write(issue_bugreport)\n f.flush()\n check_call([os.environ.get(\"EDITOR\", \"vi\"), f.name])\n check_call([\"gh\", \"issue\", \"create\", \"--body-file\", f.name])\n except Exception:\n print(please_please_submit + github_issue_url)\n raise\n elif run_browser:\n try:\n check_output([\"xdg-open\", github_issue_url + github_issue_body])\n except Exception:\n print(please_please_submit + github_issue_url)\n else:\n print(please_please_submit + github_issue_url)\n", "path": "pwndbg/commands/version.py"}]}
| 2,243 | 444 |
gh_patches_debug_21489
|
rasdani/github-patches
|
git_diff
|
pyinstaller__pyinstaller-3106
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
OS X ImportError: _sysconfigdata_m_darwin_ not found
# Description
When running a standalone executable that was built with PyInstaller on OS X, I receive the following error:
```
[7943] LOADER: Running pyiboot01_bootstrap.py
[7943] LOADER: Running pyi_rth_multiprocessing.py
[7943] LOADER: Running pyi_rth_pkgres.py
Traceback (most recent call last):
File "site-packages/PyInstaller/loader/rthooks/pyi_rth_pkgres.py", line 11, in <module>
File "/Users/addisonelliott/anaconda3/lib/python3.5/site-packages/PyInstaller/loader/pyimod03_importers.py", line 631, in exec_module
exec(bytecode, module.__dict__)
File "site-packages/pkg_resources/__init__.py", line 995, in <module>
File "site-packages/pkg_resources/__init__.py", line 998, in Environment
File "site-packages/pkg_resources/__init__.py", line 284, in get_supported_platform
File "site-packages/pkg_resources/__init__.py", line 480, in get_build_platform
File "sysconfig.py", line 688, in get_platform
File "sysconfig.py", line 549, in get_config_vars
File "sysconfig.py", line 420, in _init_posix
ImportError: No module named '_sysconfigdata_m_darwin_'
```
Issue is specific to OS X. I tested on Windows as well as Ubuntu (using WSL) and the issue was not present on either OS. Note that I can build the executable but the error occurs on **run-time**.
# Build Setup
* OS: Mac OS X 10.11 El Capitan
* Platform: Darwin-15.6.0-x86_64-i386-64bit
* Python: 3.5.4 using Anaconda
* PyInstaller: Tested with develop(3.4), 3.2.1, and 3.3 and issue occurs on all
# Example Setup
I have found that importing numpy in my case will trigger the error. If you comment out the import, it works fine. Create a Python script, paste the following code and then create an executable from it.
```
import numpy
print('Hello world')
```
And here is the command I am using to compile the code:
```
sudo pyinstaller -y --debug --clean --onedir main.py
```
# What I have discovered
I have discovered that using the hidden-import argument, this fixes the issue. The goal is to create/edit a hook and apply this fix in a more permanent method however.
The failed import module occurs in CPython: https://github.com/python/cpython/blob/master/Lib/sysconfig.py#L339
In Linux, the imported module is _sysconfigdata which is odd because that doesn't match the Github. I looked at sysconfig.py in Linux and it had some outdated code that just imported _sysconfigdata module and did not look at ABI, OS, etc.
Some type of alteration will need to be done to the sysconfig hook most likely: https://github.com/pyinstaller/pyinstaller/blob/develop/PyInstaller/hooks/hook-sysconfig.py
I am going to attempt to create a PR for this. Currently, my questions are:
* What does the sysconfig hook load now?
* Did something change in Python that caused the _sysconfigdata module to be different?
* If so, what version did this occur?
**Edit:**
Here is some of the information I have discovered. For reasons I don't know, the CPython GitHub does not match up with the Python implementation from Anaconda. For example, Python 3.5.4 in Anaconda backports the sysconfig changes in 3.6. This does not match up with CPython on Github because the sysconfig changes are not present until 3.6.
This is ultimately what is causing the issue. The sysconfig hook for PyInstaller assumes that only Python versions 3.6 and up will contain the _get_sysconfigdata_name() function. Since this also occurs in 3.5.4 for Anaconda, there is an issue.
Below is the sysconfig.py file for my Anaconda 3.5.4 environment:
```
def _init_posix(vars):
"""Initialize the module as appropriate for POSIX systems."""
# _sysconfigdata is generated at build time, see _generate_posix_vars()
# _PYTHON_SYSCONFIGDATA_NAME support backported from Python 3.6
name = _get_sysconfigdata_name()
_temp = __import__(name, globals(), locals(), ['build_time_vars'], 0)
build_time_vars = _temp.build_time_vars
vars.update(build_time_vars)
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `PyInstaller/hooks/hook-sysconfig.py`
Content:
```
1 #-----------------------------------------------------------------------------
2 # Copyright (c) 2005-2017, PyInstaller Development Team.
3 #
4 # Distributed under the terms of the GNU General Public License with exception
5 # for distributing bootloader.
6 #
7 # The full license is in the file COPYING.txt, distributed with this software.
8 #-----------------------------------------------------------------------------
9
10
11 # The 'sysconfig' module requires Makefile and pyconfig.h files from
12 # Python installation. 'sysconfig' parses these files to get some
13 # information from them.
14 # TODO Verify that bundling Makefile and pyconfig.h is still required for Python 3.
15
16 import sysconfig
17 import os
18
19 from PyInstaller.utils.hooks import relpath_to_config_or_make
20 from PyInstaller.compat import is_py36, is_win
21
22 _CONFIG_H = sysconfig.get_config_h_filename()
23 if hasattr(sysconfig, 'get_makefile_filename'):
24 # sysconfig.get_makefile_filename is missing in Python < 2.7.9
25 _MAKEFILE = sysconfig.get_makefile_filename()
26 else:
27 _MAKEFILE = sysconfig._get_makefile_filename()
28
29
30 datas = [(_CONFIG_H, relpath_to_config_or_make(_CONFIG_H))]
31
32 # The Makefile does not exist on all platforms, eg. on Windows
33 if os.path.exists(_MAKEFILE):
34 datas.append((_MAKEFILE, relpath_to_config_or_make(_MAKEFILE)))
35
36 if is_py36 and not is_win:
37 # Python 3.6 uses additional modules like
38 # `_sysconfigdata_m_linux_x86_64-linux-gnu`, see
39 # https://github.com/python/cpython/blob/3.6/Lib/sysconfig.py#L417
40 hiddenimports = [sysconfig._get_sysconfigdata_name()]
41
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/PyInstaller/hooks/hook-sysconfig.py b/PyInstaller/hooks/hook-sysconfig.py
--- a/PyInstaller/hooks/hook-sysconfig.py
+++ b/PyInstaller/hooks/hook-sysconfig.py
@@ -17,7 +17,7 @@
import os
from PyInstaller.utils.hooks import relpath_to_config_or_make
-from PyInstaller.compat import is_py36, is_win
+from PyInstaller.compat import is_win
_CONFIG_H = sysconfig.get_config_h_filename()
if hasattr(sysconfig, 'get_makefile_filename'):
@@ -33,8 +33,10 @@
if os.path.exists(_MAKEFILE):
datas.append((_MAKEFILE, relpath_to_config_or_make(_MAKEFILE)))
-if is_py36 and not is_win:
+if not is_win and hasattr(sysconfig, '_get_sysconfigdata_name'):
# Python 3.6 uses additional modules like
# `_sysconfigdata_m_linux_x86_64-linux-gnu`, see
# https://github.com/python/cpython/blob/3.6/Lib/sysconfig.py#L417
+ # Note: Some versions of Anaconda backport this feature to before 3.6.
+ # See issue #3105
hiddenimports = [sysconfig._get_sysconfigdata_name()]
|
{"golden_diff": "diff --git a/PyInstaller/hooks/hook-sysconfig.py b/PyInstaller/hooks/hook-sysconfig.py\n--- a/PyInstaller/hooks/hook-sysconfig.py\n+++ b/PyInstaller/hooks/hook-sysconfig.py\n@@ -17,7 +17,7 @@\n import os\n \n from PyInstaller.utils.hooks import relpath_to_config_or_make\n-from PyInstaller.compat import is_py36, is_win\n+from PyInstaller.compat import is_win\n \n _CONFIG_H = sysconfig.get_config_h_filename()\n if hasattr(sysconfig, 'get_makefile_filename'):\n@@ -33,8 +33,10 @@\n if os.path.exists(_MAKEFILE):\n datas.append((_MAKEFILE, relpath_to_config_or_make(_MAKEFILE)))\n \n-if is_py36 and not is_win:\n+if not is_win and hasattr(sysconfig, '_get_sysconfigdata_name'):\n # Python 3.6 uses additional modules like\n # `_sysconfigdata_m_linux_x86_64-linux-gnu`, see\n # https://github.com/python/cpython/blob/3.6/Lib/sysconfig.py#L417\n+ # Note: Some versions of Anaconda backport this feature to before 3.6.\n+ # See issue #3105\n hiddenimports = [sysconfig._get_sysconfigdata_name()]\n", "issue": "OS X ImportError: _sysconfigdata_m_darwin_ not found\n# Description\r\nWhen running a standalone executable that was built with PyInstaller on OS X, I receive the following error:\r\n```\r\n[7943] LOADER: Running pyiboot01_bootstrap.py\r\n[7943] LOADER: Running pyi_rth_multiprocessing.py\r\n[7943] LOADER: Running pyi_rth_pkgres.py\r\nTraceback (most recent call last):\r\n File \"site-packages/PyInstaller/loader/rthooks/pyi_rth_pkgres.py\", line 11, in <module>\r\n File \"/Users/addisonelliott/anaconda3/lib/python3.5/site-packages/PyInstaller/loader/pyimod03_importers.py\", line 631, in exec_module\r\n exec(bytecode, module.__dict__)\r\n File \"site-packages/pkg_resources/__init__.py\", line 995, in <module>\r\n File \"site-packages/pkg_resources/__init__.py\", line 998, in Environment\r\n File \"site-packages/pkg_resources/__init__.py\", line 284, in get_supported_platform\r\n File \"site-packages/pkg_resources/__init__.py\", line 480, in get_build_platform\r\n File \"sysconfig.py\", line 688, in get_platform\r\n File \"sysconfig.py\", line 549, in get_config_vars\r\n File \"sysconfig.py\", line 420, in _init_posix\r\nImportError: No module named '_sysconfigdata_m_darwin_'\r\n```\r\nIssue is specific to OS X. I tested on Windows as well as Ubuntu (using WSL) and the issue was not present on either OS. Note that I can build the executable but the error occurs on **run-time**.\r\n\r\n# Build Setup\r\n* OS: Mac OS X 10.11 El Capitan\r\n* Platform: Darwin-15.6.0-x86_64-i386-64bit\r\n* Python: 3.5.4 using Anaconda\r\n* PyInstaller: Tested with develop(3.4), 3.2.1, and 3.3 and issue occurs on all\r\n\r\n# Example Setup\r\nI have found that importing numpy in my case will trigger the error. If you comment out the import, it works fine. Create a Python script, paste the following code and then create an executable from it.\r\n```\r\nimport numpy\r\n\r\nprint('Hello world')\r\n```\r\n\r\nAnd here is the command I am using to compile the code: \r\n```\r\nsudo pyinstaller -y --debug --clean --onedir main.py\r\n```\r\n\r\n# What I have discovered\r\nI have discovered that using the hidden-import argument, this fixes the issue. The goal is to create/edit a hook and apply this fix in a more permanent method however.\r\n\r\nThe failed import module occurs in CPython: https://github.com/python/cpython/blob/master/Lib/sysconfig.py#L339\r\n\r\nIn Linux, the imported module is _sysconfigdata which is odd because that doesn't match the Github. I looked at sysconfig.py in Linux and it had some outdated code that just imported _sysconfigdata module and did not look at ABI, OS, etc.\r\n\r\nSome type of alteration will need to be done to the sysconfig hook most likely: https://github.com/pyinstaller/pyinstaller/blob/develop/PyInstaller/hooks/hook-sysconfig.py\r\n\r\nI am going to attempt to create a PR for this. Currently, my questions are:\r\n* What does the sysconfig hook load now?\r\n* Did something change in Python that caused the _sysconfigdata module to be different?\r\n* If so, what version did this occur?\r\n\r\n**Edit:**\r\nHere is some of the information I have discovered. For reasons I don't know, the CPython GitHub does not match up with the Python implementation from Anaconda. For example, Python 3.5.4 in Anaconda backports the sysconfig changes in 3.6. This does not match up with CPython on Github because the sysconfig changes are not present until 3.6.\r\n\r\nThis is ultimately what is causing the issue. The sysconfig hook for PyInstaller assumes that only Python versions 3.6 and up will contain the _get_sysconfigdata_name() function. Since this also occurs in 3.5.4 for Anaconda, there is an issue.\r\n\r\nBelow is the sysconfig.py file for my Anaconda 3.5.4 environment:\r\n```\r\ndef _init_posix(vars):\r\n \"\"\"Initialize the module as appropriate for POSIX systems.\"\"\"\r\n # _sysconfigdata is generated at build time, see _generate_posix_vars()\r\n # _PYTHON_SYSCONFIGDATA_NAME support backported from Python 3.6\r\n name = _get_sysconfigdata_name()\r\n _temp = __import__(name, globals(), locals(), ['build_time_vars'], 0)\r\n build_time_vars = _temp.build_time_vars\r\n vars.update(build_time_vars)\r\n```\n", "before_files": [{"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2005-2017, PyInstaller Development Team.\n#\n# Distributed under the terms of the GNU General Public License with exception\n# for distributing bootloader.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#-----------------------------------------------------------------------------\n\n\n# The 'sysconfig' module requires Makefile and pyconfig.h files from\n# Python installation. 'sysconfig' parses these files to get some\n# information from them.\n# TODO Verify that bundling Makefile and pyconfig.h is still required for Python 3.\n\nimport sysconfig\nimport os\n\nfrom PyInstaller.utils.hooks import relpath_to_config_or_make\nfrom PyInstaller.compat import is_py36, is_win\n\n_CONFIG_H = sysconfig.get_config_h_filename()\nif hasattr(sysconfig, 'get_makefile_filename'):\n # sysconfig.get_makefile_filename is missing in Python < 2.7.9\n _MAKEFILE = sysconfig.get_makefile_filename()\nelse:\n _MAKEFILE = sysconfig._get_makefile_filename()\n\n\ndatas = [(_CONFIG_H, relpath_to_config_or_make(_CONFIG_H))]\n\n# The Makefile does not exist on all platforms, eg. on Windows\nif os.path.exists(_MAKEFILE):\n datas.append((_MAKEFILE, relpath_to_config_or_make(_MAKEFILE)))\n\nif is_py36 and not is_win:\n # Python 3.6 uses additional modules like\n # `_sysconfigdata_m_linux_x86_64-linux-gnu`, see\n # https://github.com/python/cpython/blob/3.6/Lib/sysconfig.py#L417\n hiddenimports = [sysconfig._get_sysconfigdata_name()]\n", "path": "PyInstaller/hooks/hook-sysconfig.py"}], "after_files": [{"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2005-2017, PyInstaller Development Team.\n#\n# Distributed under the terms of the GNU General Public License with exception\n# for distributing bootloader.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#-----------------------------------------------------------------------------\n\n\n# The 'sysconfig' module requires Makefile and pyconfig.h files from\n# Python installation. 'sysconfig' parses these files to get some\n# information from them.\n# TODO Verify that bundling Makefile and pyconfig.h is still required for Python 3.\n\nimport sysconfig\nimport os\n\nfrom PyInstaller.utils.hooks import relpath_to_config_or_make\nfrom PyInstaller.compat import is_win\n\n_CONFIG_H = sysconfig.get_config_h_filename()\nif hasattr(sysconfig, 'get_makefile_filename'):\n # sysconfig.get_makefile_filename is missing in Python < 2.7.9\n _MAKEFILE = sysconfig.get_makefile_filename()\nelse:\n _MAKEFILE = sysconfig._get_makefile_filename()\n\n\ndatas = [(_CONFIG_H, relpath_to_config_or_make(_CONFIG_H))]\n\n# The Makefile does not exist on all platforms, eg. on Windows\nif os.path.exists(_MAKEFILE):\n datas.append((_MAKEFILE, relpath_to_config_or_make(_MAKEFILE)))\n\nif not is_win and hasattr(sysconfig, '_get_sysconfigdata_name'):\n # Python 3.6 uses additional modules like\n # `_sysconfigdata_m_linux_x86_64-linux-gnu`, see\n # https://github.com/python/cpython/blob/3.6/Lib/sysconfig.py#L417\n # Note: Some versions of Anaconda backport this feature to before 3.6.\n # See issue #3105\n hiddenimports = [sysconfig._get_sysconfigdata_name()]\n", "path": "PyInstaller/hooks/hook-sysconfig.py"}]}
| 1,775 | 293 |
gh_patches_debug_20899
|
rasdani/github-patches
|
git_diff
|
PaddlePaddle__PaddleDetection-365
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
跑敏感度分析时报错
按文档执行如下指令:
python3 slim/sensitive/sensitive.py -c configs/yolov3_mobilenet_v1_voc.yml -o weights=https://paddlemodels.bj.bcebos.com/object_detection/yolov3_mobilenet_v1_voc.tar --pruned_params "yolo_block.0.0.0.conv.weights,yolo_block.0.0.1.conv.weights,yolo_block.0.1.0.conv.weights,yolo_block.0.1.1.conv.weights,yolo_block.0.2.conv.weights,yolo_block.0.tip.conv.weights,yolo_block.1.0.0.conv.weights,yolo_block.1.0.1.conv.weights,yolo_block.1.1.0.conv.weights,yolo_block.1.1.1.conv.weights,yolo_block.1.2.conv.weights,yolo_block.1.tip.conv.weights,yolo_block.2.0.0.conv.weights,yolo_block.2.0.1.conv.weights,yolo_block.2.1.0.conv.weights,yolo_block.2.1.1.conv.weights,yolo_block.2.2.conv.weights,yolo_block.2.tip.conv.weights" --sensitivities_file "./demo.data"
报以下错误:2020-03-18 14:40:01,847-INFO: pruned params: ['yolo_block.0.0.0.conv.weights', 'yolo_block.0.0.1.conv.weights', 'yolo_block.0.1.0.conv.weights', 'yolo_block.0.1.1.conv.weights', 'yolo_block.0.2.conv.weights', 'yolo_block.0.tip.conv.weights', 'yolo_block.1.0.0.conv.weights', 'yolo_block.1.0.1.conv.weights', 'yolo_block.1.1.0.conv.weights', 'yolo_block.1.1.1.conv.weights', 'yolo_block.1.2.conv.weights', 'yolo_block.1.tip.conv.weights', 'yolo_block.2.0.0.conv.weights', 'yolo_block.2.0.1.conv.weights', 'yolo_block.2.1.0.conv.weights', 'yolo_block.2.1.1.conv.weights', 'yolo_block.2.2.conv.weights', 'yolo_block.2.tip.conv.weights']
2020-03-18 14:40:01,847-INFO: pruned ratios: [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
I0318 14:40:01.860719 14780 parallel_executor.cc:440] The Program will be executed on CUDA using ParallelExecutor, 1 cards are used, so 1 programs are executed in parallel.
I0318 14:40:01.868165 14780 build_strategy.cc:365] SeqOnlyAllReduceOps:0, num_trainers:1
I0318 14:40:01.874647 14780 parallel_executor.cc:307] Inplace strategy is enabled, when build_strategy.enable_inplace = True
I0318 14:40:01.879664 14780 parallel_executor.cc:375] Garbage collection strategy is enabled, when FLAGS_eager_delete_tensor_gb = 0
2020-03-18 14:40:02,443-INFO: Test iter 0
2020-03-18 14:40:10,603-INFO: Test iter 100
2020-03-18 14:40:21,551-INFO: Test iter 200
2020-03-18 14:40:28,985-INFO: Test iter 300
2020-03-18 14:39:54,955-INFO: Test iter 400
2020-03-18 14:40:02,477-INFO: Test iter 500
2020-03-18 14:40:09,807-INFO: Test iter 600
2020-03-18 14:40:11,114-INFO: Test finish iter 619
2020-03-18 14:40:11,114-INFO: Total number of images: 4952, inference time: 534.683212877132 fps.
2020-03-18 14:40:11,115-INFO: Start evaluate...
Traceback (most recent call last):
File "slim/sensitive/sensitive.py", line 214, in <module>
main()
File "slim/sensitive/sensitive.py", line 172, in main
pruned_ratios=pruned_ratios)
File "/usr/local/lib/python3.5/dist-packages/paddleslim/prune/sensitive.py", line 86, in sensitivity
baseline = eval_func(graph.program)
File "slim/sensitive/sensitive.py", line 154, in test
dataset=dataset)
File "/home/chenchaocun/PaddleDetection_slim/slim/sensitive/ppdet/utils/eval_utils.py", line 222, in eval_results
map_type=map_type)
File "/home/chenchaocun/PaddleDetection_slim/slim/sensitive/ppdet/utils/voc_eval.py", line 72, in bbox_eval
gt_boxes = t['gt_bbox'][0]
KeyError: 'gt_bbox'
这个voc_eval.py在其他任务验证时没有出现过错误,请请教下这是啥问题导致的?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `slim/sensitive/sensitive.py`
Content:
```
1 # Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from __future__ import absolute_import
16 from __future__ import division
17 from __future__ import print_function
18
19 import os
20 import time
21 import numpy as np
22 import datetime
23 from collections import deque
24
25
26 def set_paddle_flags(**kwargs):
27 for key, value in kwargs.items():
28 if os.environ.get(key, None) is None:
29 os.environ[key] = str(value)
30
31
32 # NOTE(paddle-dev): All of these flags should be set before
33 # `import paddle`. Otherwise, it would not take any effect.
34 set_paddle_flags(
35 FLAGS_eager_delete_tensor_gb=0, # enable GC to save memory
36 )
37
38 from paddle import fluid
39 from ppdet.experimental import mixed_precision_context
40 from ppdet.core.workspace import load_config, merge_config, create
41 #from ppdet.data.data_feed import create_reader
42
43 from ppdet.data.reader import create_reader
44
45 from ppdet.utils.cli import print_total_cfg
46 from ppdet.utils import dist_utils
47 from ppdet.utils.eval_utils import parse_fetches, eval_run, eval_results
48 from ppdet.utils.stats import TrainingStats
49 from ppdet.utils.cli import ArgsParser
50 from ppdet.utils.check import check_gpu, check_version
51 import ppdet.utils.checkpoint as checkpoint
52 from ppdet.modeling.model_input import create_feed
53 from paddleslim.prune import sensitivity
54 import logging
55 FORMAT = '%(asctime)s-%(levelname)s: %(message)s'
56 logging.basicConfig(level=logging.INFO, format=FORMAT)
57 logger = logging.getLogger(__name__)
58
59
60 def main():
61 env = os.environ
62
63 print("FLAGS.config: {}".format(FLAGS.config))
64 cfg = load_config(FLAGS.config)
65 assert 'architecture' in cfg
66 main_arch = cfg.architecture
67
68 merge_config(FLAGS.opt)
69
70 print_total_cfg(cfg)
71
72 place = fluid.CUDAPlace(0)
73 exe = fluid.Executor(place)
74
75 # build program
76 startup_prog = fluid.Program()
77 eval_prog = fluid.Program()
78 with fluid.program_guard(eval_prog, startup_prog):
79 with fluid.unique_name.guard():
80 model = create(main_arch)
81 inputs_def = cfg['EvalReader']['inputs_def']
82 feed_vars, eval_loader = model.build_inputs(**inputs_def)
83 fetches = model.eval(feed_vars)
84 eval_prog = eval_prog.clone(True)
85
86 if FLAGS.print_params:
87 print(
88 "-------------------------All parameters in current graph----------------------"
89 )
90 for block in eval_prog.blocks:
91 for param in block.all_parameters():
92 print("parameter name: {}\tshape: {}".format(param.name,
93 param.shape))
94 print(
95 "------------------------------------------------------------------------------"
96 )
97 return
98
99 eval_reader = create_reader(cfg.EvalReader)
100 eval_loader.set_sample_list_generator(eval_reader, place)
101
102 # parse eval fetches
103 extra_keys = []
104 if cfg.metric == 'COCO':
105 extra_keys = ['im_info', 'im_id', 'im_shape']
106 if cfg.metric == 'VOC':
107 extra_keys = ['gt_box', 'gt_label', 'is_difficult']
108 if cfg.metric == 'WIDERFACE':
109 extra_keys = ['im_id', 'im_shape', 'gt_box']
110 eval_keys, eval_values, eval_cls = parse_fetches(fetches, eval_prog,
111 extra_keys)
112
113 exe.run(startup_prog)
114
115 fuse_bn = getattr(model.backbone, 'norm_type', None) == 'affine_channel'
116
117 ignore_params = cfg.finetune_exclude_pretrained_params \
118 if 'finetune_exclude_pretrained_params' in cfg else []
119
120 start_iter = 0
121
122 if cfg.weights:
123 checkpoint.load_params(exe, eval_prog, cfg.weights)
124 else:
125 logger.warn("Please set cfg.weights to load trained model.")
126
127 # whether output bbox is normalized in model output layer
128 is_bbox_normalized = False
129 if hasattr(model, 'is_bbox_normalized') and \
130 callable(model.is_bbox_normalized):
131 is_bbox_normalized = model.is_bbox_normalized()
132
133 # if map_type not set, use default 11point, only use in VOC eval
134 map_type = cfg.map_type if 'map_type' in cfg else '11point'
135
136 def test(program):
137
138 compiled_eval_prog = fluid.compiler.CompiledProgram(program)
139
140 results = eval_run(exe, compiled_eval_prog, eval_loader, eval_keys,
141 eval_values, eval_cls)
142 resolution = None
143 if 'mask' in results[0]:
144 resolution = model.mask_head.resolution
145 dataset = cfg['EvalReader']['dataset']
146 box_ap_stats = eval_results(
147 results,
148 cfg.metric,
149 cfg.num_classes,
150 resolution,
151 is_bbox_normalized,
152 FLAGS.output_eval,
153 map_type,
154 dataset=dataset)
155 return box_ap_stats[0]
156
157 pruned_params = FLAGS.pruned_params
158
159 assert (
160 FLAGS.pruned_params is not None
161 ), "FLAGS.pruned_params is empty!!! Please set it by '--pruned_params' option."
162 pruned_params = FLAGS.pruned_params.strip().split(",")
163 logger.info("pruned params: {}".format(pruned_params))
164 pruned_ratios = [float(n) for n in FLAGS.pruned_ratios.strip().split(" ")]
165 logger.info("pruned ratios: {}".format(pruned_ratios))
166 sensitivity(
167 eval_prog,
168 place,
169 pruned_params,
170 test,
171 sensitivities_file=FLAGS.sensitivities_file,
172 pruned_ratios=pruned_ratios)
173
174
175 if __name__ == '__main__':
176 parser = ArgsParser()
177 parser.add_argument(
178 "--output_eval",
179 default=None,
180 type=str,
181 help="Evaluation directory, default is current directory.")
182 parser.add_argument(
183 "-d",
184 "--dataset_dir",
185 default=None,
186 type=str,
187 help="Dataset path, same as DataFeed.dataset.dataset_dir")
188 parser.add_argument(
189 "-s",
190 "--sensitivities_file",
191 default="sensitivities.data",
192 type=str,
193 help="The file used to save sensitivities.")
194 parser.add_argument(
195 "-p",
196 "--pruned_params",
197 default=None,
198 type=str,
199 help="The parameters to be pruned when calculating sensitivities.")
200 parser.add_argument(
201 "-r",
202 "--pruned_ratios",
203 default="0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9",
204 type=str,
205 help="The ratios pruned iteratively for each parameter when calculating sensitivities."
206 )
207 parser.add_argument(
208 "-P",
209 "--print_params",
210 default=False,
211 action='store_true',
212 help="Whether to only print the parameters' names and shapes.")
213 FLAGS = parser.parse_args()
214 main()
215
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/slim/sensitive/sensitive.py b/slim/sensitive/sensitive.py
--- a/slim/sensitive/sensitive.py
+++ b/slim/sensitive/sensitive.py
@@ -82,7 +82,6 @@
feed_vars, eval_loader = model.build_inputs(**inputs_def)
fetches = model.eval(feed_vars)
eval_prog = eval_prog.clone(True)
-
if FLAGS.print_params:
print(
"-------------------------All parameters in current graph----------------------"
@@ -104,7 +103,7 @@
if cfg.metric == 'COCO':
extra_keys = ['im_info', 'im_id', 'im_shape']
if cfg.metric == 'VOC':
- extra_keys = ['gt_box', 'gt_label', 'is_difficult']
+ extra_keys = ['gt_bbox', 'gt_class', 'is_difficult']
if cfg.metric == 'WIDERFACE':
extra_keys = ['im_id', 'im_shape', 'gt_box']
eval_keys, eval_values, eval_cls = parse_fetches(fetches, eval_prog,
|
{"golden_diff": "diff --git a/slim/sensitive/sensitive.py b/slim/sensitive/sensitive.py\n--- a/slim/sensitive/sensitive.py\n+++ b/slim/sensitive/sensitive.py\n@@ -82,7 +82,6 @@\n feed_vars, eval_loader = model.build_inputs(**inputs_def)\n fetches = model.eval(feed_vars)\n eval_prog = eval_prog.clone(True)\n-\n if FLAGS.print_params:\n print(\n \"-------------------------All parameters in current graph----------------------\"\n@@ -104,7 +103,7 @@\n if cfg.metric == 'COCO':\n extra_keys = ['im_info', 'im_id', 'im_shape']\n if cfg.metric == 'VOC':\n- extra_keys = ['gt_box', 'gt_label', 'is_difficult']\n+ extra_keys = ['gt_bbox', 'gt_class', 'is_difficult']\n if cfg.metric == 'WIDERFACE':\n extra_keys = ['im_id', 'im_shape', 'gt_box']\n eval_keys, eval_values, eval_cls = parse_fetches(fetches, eval_prog,\n", "issue": "\u8dd1\u654f\u611f\u5ea6\u5206\u6790\u65f6\u62a5\u9519\n\u6309\u6587\u6863\u6267\u884c\u5982\u4e0b\u6307\u4ee4\uff1a\r\npython3 slim/sensitive/sensitive.py -c configs/yolov3_mobilenet_v1_voc.yml -o weights=https://paddlemodels.bj.bcebos.com/object_detection/yolov3_mobilenet_v1_voc.tar --pruned_params \"yolo_block.0.0.0.conv.weights,yolo_block.0.0.1.conv.weights,yolo_block.0.1.0.conv.weights,yolo_block.0.1.1.conv.weights,yolo_block.0.2.conv.weights,yolo_block.0.tip.conv.weights,yolo_block.1.0.0.conv.weights,yolo_block.1.0.1.conv.weights,yolo_block.1.1.0.conv.weights,yolo_block.1.1.1.conv.weights,yolo_block.1.2.conv.weights,yolo_block.1.tip.conv.weights,yolo_block.2.0.0.conv.weights,yolo_block.2.0.1.conv.weights,yolo_block.2.1.0.conv.weights,yolo_block.2.1.1.conv.weights,yolo_block.2.2.conv.weights,yolo_block.2.tip.conv.weights\" --sensitivities_file \"./demo.data\"\r\n\u62a5\u4ee5\u4e0b\u9519\u8bef\uff1a2020-03-18 14:40:01,847-INFO: pruned params: ['yolo_block.0.0.0.conv.weights', 'yolo_block.0.0.1.conv.weights', 'yolo_block.0.1.0.conv.weights', 'yolo_block.0.1.1.conv.weights', 'yolo_block.0.2.conv.weights', 'yolo_block.0.tip.conv.weights', 'yolo_block.1.0.0.conv.weights', 'yolo_block.1.0.1.conv.weights', 'yolo_block.1.1.0.conv.weights', 'yolo_block.1.1.1.conv.weights', 'yolo_block.1.2.conv.weights', 'yolo_block.1.tip.conv.weights', 'yolo_block.2.0.0.conv.weights', 'yolo_block.2.0.1.conv.weights', 'yolo_block.2.1.0.conv.weights', 'yolo_block.2.1.1.conv.weights', 'yolo_block.2.2.conv.weights', 'yolo_block.2.tip.conv.weights']\r\n2020-03-18 14:40:01,847-INFO: pruned ratios: [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]\r\nI0318 14:40:01.860719 14780 parallel_executor.cc:440] The Program will be executed on CUDA using ParallelExecutor, 1 cards are used, so 1 programs are executed in parallel.\r\nI0318 14:40:01.868165 14780 build_strategy.cc:365] SeqOnlyAllReduceOps:0, num_trainers:1\r\nI0318 14:40:01.874647 14780 parallel_executor.cc:307] Inplace strategy is enabled, when build_strategy.enable_inplace = True\r\nI0318 14:40:01.879664 14780 parallel_executor.cc:375] Garbage collection strategy is enabled, when FLAGS_eager_delete_tensor_gb = 0\r\n2020-03-18 14:40:02,443-INFO: Test iter 0\r\n2020-03-18 14:40:10,603-INFO: Test iter 100\r\n2020-03-18 14:40:21,551-INFO: Test iter 200\r\n2020-03-18 14:40:28,985-INFO: Test iter 300\r\n2020-03-18 14:39:54,955-INFO: Test iter 400\r\n2020-03-18 14:40:02,477-INFO: Test iter 500\r\n2020-03-18 14:40:09,807-INFO: Test iter 600\r\n2020-03-18 14:40:11,114-INFO: Test finish iter 619\r\n2020-03-18 14:40:11,114-INFO: Total number of images: 4952, inference time: 534.683212877132 fps.\r\n2020-03-18 14:40:11,115-INFO: Start evaluate...\r\nTraceback (most recent call last):\r\n File \"slim/sensitive/sensitive.py\", line 214, in <module>\r\n main()\r\n File \"slim/sensitive/sensitive.py\", line 172, in main\r\n pruned_ratios=pruned_ratios)\r\n File \"/usr/local/lib/python3.5/dist-packages/paddleslim/prune/sensitive.py\", line 86, in sensitivity\r\n baseline = eval_func(graph.program)\r\n File \"slim/sensitive/sensitive.py\", line 154, in test\r\n dataset=dataset)\r\n File \"/home/chenchaocun/PaddleDetection_slim/slim/sensitive/ppdet/utils/eval_utils.py\", line 222, in eval_results\r\n map_type=map_type)\r\n File \"/home/chenchaocun/PaddleDetection_slim/slim/sensitive/ppdet/utils/voc_eval.py\", line 72, in bbox_eval\r\n gt_boxes = t['gt_bbox'][0]\r\nKeyError: 'gt_bbox'\r\n\u8fd9\u4e2avoc_eval.py\u5728\u5176\u4ed6\u4efb\u52a1\u9a8c\u8bc1\u65f6\u6ca1\u6709\u51fa\u73b0\u8fc7\u9519\u8bef\uff0c\u8bf7\u8bf7\u6559\u4e0b\u8fd9\u662f\u5565\u95ee\u9898\u5bfc\u81f4\u7684\uff1f\n", "before_files": [{"content": "# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport os\nimport time\nimport numpy as np\nimport datetime\nfrom collections import deque\n\n\ndef set_paddle_flags(**kwargs):\n for key, value in kwargs.items():\n if os.environ.get(key, None) is None:\n os.environ[key] = str(value)\n\n\n# NOTE(paddle-dev): All of these flags should be set before\n# `import paddle`. Otherwise, it would not take any effect.\nset_paddle_flags(\n FLAGS_eager_delete_tensor_gb=0, # enable GC to save memory\n)\n\nfrom paddle import fluid\nfrom ppdet.experimental import mixed_precision_context\nfrom ppdet.core.workspace import load_config, merge_config, create\n#from ppdet.data.data_feed import create_reader\n\nfrom ppdet.data.reader import create_reader\n\nfrom ppdet.utils.cli import print_total_cfg\nfrom ppdet.utils import dist_utils\nfrom ppdet.utils.eval_utils import parse_fetches, eval_run, eval_results\nfrom ppdet.utils.stats import TrainingStats\nfrom ppdet.utils.cli import ArgsParser\nfrom ppdet.utils.check import check_gpu, check_version\nimport ppdet.utils.checkpoint as checkpoint\nfrom ppdet.modeling.model_input import create_feed\nfrom paddleslim.prune import sensitivity\nimport logging\nFORMAT = '%(asctime)s-%(levelname)s: %(message)s'\nlogging.basicConfig(level=logging.INFO, format=FORMAT)\nlogger = logging.getLogger(__name__)\n\n\ndef main():\n env = os.environ\n\n print(\"FLAGS.config: {}\".format(FLAGS.config))\n cfg = load_config(FLAGS.config)\n assert 'architecture' in cfg\n main_arch = cfg.architecture\n\n merge_config(FLAGS.opt)\n\n print_total_cfg(cfg)\n\n place = fluid.CUDAPlace(0)\n exe = fluid.Executor(place)\n\n # build program\n startup_prog = fluid.Program()\n eval_prog = fluid.Program()\n with fluid.program_guard(eval_prog, startup_prog):\n with fluid.unique_name.guard():\n model = create(main_arch)\n inputs_def = cfg['EvalReader']['inputs_def']\n feed_vars, eval_loader = model.build_inputs(**inputs_def)\n fetches = model.eval(feed_vars)\n eval_prog = eval_prog.clone(True)\n\n if FLAGS.print_params:\n print(\n \"-------------------------All parameters in current graph----------------------\"\n )\n for block in eval_prog.blocks:\n for param in block.all_parameters():\n print(\"parameter name: {}\\tshape: {}\".format(param.name,\n param.shape))\n print(\n \"------------------------------------------------------------------------------\"\n )\n return\n\n eval_reader = create_reader(cfg.EvalReader)\n eval_loader.set_sample_list_generator(eval_reader, place)\n\n # parse eval fetches\n extra_keys = []\n if cfg.metric == 'COCO':\n extra_keys = ['im_info', 'im_id', 'im_shape']\n if cfg.metric == 'VOC':\n extra_keys = ['gt_box', 'gt_label', 'is_difficult']\n if cfg.metric == 'WIDERFACE':\n extra_keys = ['im_id', 'im_shape', 'gt_box']\n eval_keys, eval_values, eval_cls = parse_fetches(fetches, eval_prog,\n extra_keys)\n\n exe.run(startup_prog)\n\n fuse_bn = getattr(model.backbone, 'norm_type', None) == 'affine_channel'\n\n ignore_params = cfg.finetune_exclude_pretrained_params \\\n if 'finetune_exclude_pretrained_params' in cfg else []\n\n start_iter = 0\n\n if cfg.weights:\n checkpoint.load_params(exe, eval_prog, cfg.weights)\n else:\n logger.warn(\"Please set cfg.weights to load trained model.\")\n\n # whether output bbox is normalized in model output layer\n is_bbox_normalized = False\n if hasattr(model, 'is_bbox_normalized') and \\\n callable(model.is_bbox_normalized):\n is_bbox_normalized = model.is_bbox_normalized()\n\n # if map_type not set, use default 11point, only use in VOC eval\n map_type = cfg.map_type if 'map_type' in cfg else '11point'\n\n def test(program):\n\n compiled_eval_prog = fluid.compiler.CompiledProgram(program)\n\n results = eval_run(exe, compiled_eval_prog, eval_loader, eval_keys,\n eval_values, eval_cls)\n resolution = None\n if 'mask' in results[0]:\n resolution = model.mask_head.resolution\n dataset = cfg['EvalReader']['dataset']\n box_ap_stats = eval_results(\n results,\n cfg.metric,\n cfg.num_classes,\n resolution,\n is_bbox_normalized,\n FLAGS.output_eval,\n map_type,\n dataset=dataset)\n return box_ap_stats[0]\n\n pruned_params = FLAGS.pruned_params\n\n assert (\n FLAGS.pruned_params is not None\n ), \"FLAGS.pruned_params is empty!!! Please set it by '--pruned_params' option.\"\n pruned_params = FLAGS.pruned_params.strip().split(\",\")\n logger.info(\"pruned params: {}\".format(pruned_params))\n pruned_ratios = [float(n) for n in FLAGS.pruned_ratios.strip().split(\" \")]\n logger.info(\"pruned ratios: {}\".format(pruned_ratios))\n sensitivity(\n eval_prog,\n place,\n pruned_params,\n test,\n sensitivities_file=FLAGS.sensitivities_file,\n pruned_ratios=pruned_ratios)\n\n\nif __name__ == '__main__':\n parser = ArgsParser()\n parser.add_argument(\n \"--output_eval\",\n default=None,\n type=str,\n help=\"Evaluation directory, default is current directory.\")\n parser.add_argument(\n \"-d\",\n \"--dataset_dir\",\n default=None,\n type=str,\n help=\"Dataset path, same as DataFeed.dataset.dataset_dir\")\n parser.add_argument(\n \"-s\",\n \"--sensitivities_file\",\n default=\"sensitivities.data\",\n type=str,\n help=\"The file used to save sensitivities.\")\n parser.add_argument(\n \"-p\",\n \"--pruned_params\",\n default=None,\n type=str,\n help=\"The parameters to be pruned when calculating sensitivities.\")\n parser.add_argument(\n \"-r\",\n \"--pruned_ratios\",\n default=\"0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9\",\n type=str,\n help=\"The ratios pruned iteratively for each parameter when calculating sensitivities.\"\n )\n parser.add_argument(\n \"-P\",\n \"--print_params\",\n default=False,\n action='store_true',\n help=\"Whether to only print the parameters' names and shapes.\")\n FLAGS = parser.parse_args()\n main()\n", "path": "slim/sensitive/sensitive.py"}], "after_files": [{"content": "# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport os\nimport time\nimport numpy as np\nimport datetime\nfrom collections import deque\n\n\ndef set_paddle_flags(**kwargs):\n for key, value in kwargs.items():\n if os.environ.get(key, None) is None:\n os.environ[key] = str(value)\n\n\n# NOTE(paddle-dev): All of these flags should be set before\n# `import paddle`. Otherwise, it would not take any effect.\nset_paddle_flags(\n FLAGS_eager_delete_tensor_gb=0, # enable GC to save memory\n)\n\nfrom paddle import fluid\nfrom ppdet.experimental import mixed_precision_context\nfrom ppdet.core.workspace import load_config, merge_config, create\n#from ppdet.data.data_feed import create_reader\n\nfrom ppdet.data.reader import create_reader\n\nfrom ppdet.utils.cli import print_total_cfg\nfrom ppdet.utils import dist_utils\nfrom ppdet.utils.eval_utils import parse_fetches, eval_run, eval_results\nfrom ppdet.utils.stats import TrainingStats\nfrom ppdet.utils.cli import ArgsParser\nfrom ppdet.utils.check import check_gpu, check_version\nimport ppdet.utils.checkpoint as checkpoint\nfrom ppdet.modeling.model_input import create_feed\nfrom paddleslim.prune import sensitivity\nimport logging\nFORMAT = '%(asctime)s-%(levelname)s: %(message)s'\nlogging.basicConfig(level=logging.INFO, format=FORMAT)\nlogger = logging.getLogger(__name__)\n\n\ndef main():\n env = os.environ\n\n print(\"FLAGS.config: {}\".format(FLAGS.config))\n cfg = load_config(FLAGS.config)\n assert 'architecture' in cfg\n main_arch = cfg.architecture\n\n merge_config(FLAGS.opt)\n\n print_total_cfg(cfg)\n\n place = fluid.CUDAPlace(0)\n exe = fluid.Executor(place)\n\n # build program\n startup_prog = fluid.Program()\n eval_prog = fluid.Program()\n with fluid.program_guard(eval_prog, startup_prog):\n with fluid.unique_name.guard():\n model = create(main_arch)\n inputs_def = cfg['EvalReader']['inputs_def']\n feed_vars, eval_loader = model.build_inputs(**inputs_def)\n fetches = model.eval(feed_vars)\n eval_prog = eval_prog.clone(True)\n if FLAGS.print_params:\n print(\n \"-------------------------All parameters in current graph----------------------\"\n )\n for block in eval_prog.blocks:\n for param in block.all_parameters():\n print(\"parameter name: {}\\tshape: {}\".format(param.name,\n param.shape))\n print(\n \"------------------------------------------------------------------------------\"\n )\n return\n\n eval_reader = create_reader(cfg.EvalReader)\n eval_loader.set_sample_list_generator(eval_reader, place)\n\n # parse eval fetches\n extra_keys = []\n if cfg.metric == 'COCO':\n extra_keys = ['im_info', 'im_id', 'im_shape']\n if cfg.metric == 'VOC':\n extra_keys = ['gt_bbox', 'gt_class', 'is_difficult']\n if cfg.metric == 'WIDERFACE':\n extra_keys = ['im_id', 'im_shape', 'gt_box']\n eval_keys, eval_values, eval_cls = parse_fetches(fetches, eval_prog,\n extra_keys)\n\n exe.run(startup_prog)\n\n fuse_bn = getattr(model.backbone, 'norm_type', None) == 'affine_channel'\n\n ignore_params = cfg.finetune_exclude_pretrained_params \\\n if 'finetune_exclude_pretrained_params' in cfg else []\n\n start_iter = 0\n\n if cfg.weights:\n checkpoint.load_params(exe, eval_prog, cfg.weights)\n else:\n logger.warn(\"Please set cfg.weights to load trained model.\")\n\n # whether output bbox is normalized in model output layer\n is_bbox_normalized = False\n if hasattr(model, 'is_bbox_normalized') and \\\n callable(model.is_bbox_normalized):\n is_bbox_normalized = model.is_bbox_normalized()\n\n # if map_type not set, use default 11point, only use in VOC eval\n map_type = cfg.map_type if 'map_type' in cfg else '11point'\n\n def test(program):\n\n compiled_eval_prog = fluid.compiler.CompiledProgram(program)\n\n results = eval_run(exe, compiled_eval_prog, eval_loader, eval_keys,\n eval_values, eval_cls)\n resolution = None\n if 'mask' in results[0]:\n resolution = model.mask_head.resolution\n dataset = cfg['EvalReader']['dataset']\n box_ap_stats = eval_results(\n results,\n cfg.metric,\n cfg.num_classes,\n resolution,\n is_bbox_normalized,\n FLAGS.output_eval,\n map_type,\n dataset=dataset)\n return box_ap_stats[0]\n\n pruned_params = FLAGS.pruned_params\n\n assert (\n FLAGS.pruned_params is not None\n ), \"FLAGS.pruned_params is empty!!! Please set it by '--pruned_params' option.\"\n pruned_params = FLAGS.pruned_params.strip().split(\",\")\n logger.info(\"pruned params: {}\".format(pruned_params))\n pruned_ratios = [float(n) for n in FLAGS.pruned_ratios.strip().split(\" \")]\n logger.info(\"pruned ratios: {}\".format(pruned_ratios))\n sensitivity(\n eval_prog,\n place,\n pruned_params,\n test,\n sensitivities_file=FLAGS.sensitivities_file,\n pruned_ratios=pruned_ratios)\n\n\nif __name__ == '__main__':\n parser = ArgsParser()\n parser.add_argument(\n \"--output_eval\",\n default=None,\n type=str,\n help=\"Evaluation directory, default is current directory.\")\n parser.add_argument(\n \"-d\",\n \"--dataset_dir\",\n default=None,\n type=str,\n help=\"Dataset path, same as DataFeed.dataset.dataset_dir\")\n parser.add_argument(\n \"-s\",\n \"--sensitivities_file\",\n default=\"sensitivities.data\",\n type=str,\n help=\"The file used to save sensitivities.\")\n parser.add_argument(\n \"-p\",\n \"--pruned_params\",\n default=None,\n type=str,\n help=\"The parameters to be pruned when calculating sensitivities.\")\n parser.add_argument(\n \"-r\",\n \"--pruned_ratios\",\n default=\"0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9\",\n type=str,\n help=\"The ratios pruned iteratively for each parameter when calculating sensitivities.\"\n )\n parser.add_argument(\n \"-P\",\n \"--print_params\",\n default=False,\n action='store_true',\n help=\"Whether to only print the parameters' names and shapes.\")\n FLAGS = parser.parse_args()\n main()\n", "path": "slim/sensitive/sensitive.py"}]}
| 3,799 | 236 |
gh_patches_debug_15681
|
rasdani/github-patches
|
git_diff
|
TheAlgorithms__Python-1461
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
DIRECTORY.md not being updated by TravisCI
- [x] .travis.yml isn't updating DIRECTORY.md automatically as it should
- [x] scripts/build_directory_md.py needs can have some minor changes too. #1461
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `scripts/build_directory_md.py`
Content:
```
1 #!/usr/bin/env python3
2
3 import os
4 from typing import Iterator
5
6 URL_BASE = "https://github.com/TheAlgorithms/Python/blob/master"
7
8
9 def good_filepaths(top_dir: str = ".") -> Iterator[str]:
10 for dirpath, dirnames, filenames in os.walk(top_dir):
11 dirnames[:] = [d for d in dirnames if d != "scripts" and d[0] not in "._"]
12 for filename in filenames:
13 if filename == "__init__.py":
14 continue
15 if os.path.splitext(filename)[1] in (".py", ".ipynb"):
16 yield os.path.join(dirpath, filename).lstrip("./")
17
18
19 def md_prefix(i):
20 return f"{i * ' '}*" if i else "##"
21
22
23 def print_path(old_path: str, new_path: str) -> str:
24 old_parts = old_path.split(os.sep)
25 for i, new_part in enumerate(new_path.split(os.sep)):
26 if i + 1 > len(old_parts) or old_parts[i] != new_part:
27 if new_part:
28 print(f"{md_prefix(i)} {new_part.replace('_', ' ').title()}")
29 return new_path
30
31
32 def print_directory_md(top_dir: str = ".") -> None:
33 old_path = ""
34 for filepath in sorted(good_filepaths()):
35 filepath, filename = os.path.split(filepath)
36 if filepath != old_path:
37 old_path = print_path(old_path, filepath)
38 indent = (filepath.count(os.sep) + 1) if filepath else 0
39 url = "/".join((URL_BASE, filepath, filename)).replace(" ", "%20")
40 filename = os.path.splitext(filename.replace("_", " "))[0]
41 print(f"{md_prefix(indent)} [{filename}]({url})")
42
43
44 if __name__ == "__main__":
45 print_directory_md(".")
46
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/scripts/build_directory_md.py b/scripts/build_directory_md.py
--- a/scripts/build_directory_md.py
+++ b/scripts/build_directory_md.py
@@ -17,7 +17,7 @@
def md_prefix(i):
- return f"{i * ' '}*" if i else "##"
+ return f"{i * ' '}*" if i else "\n##"
def print_path(old_path: str, new_path: str) -> str:
@@ -37,7 +37,7 @@
old_path = print_path(old_path, filepath)
indent = (filepath.count(os.sep) + 1) if filepath else 0
url = "/".join((URL_BASE, filepath, filename)).replace(" ", "%20")
- filename = os.path.splitext(filename.replace("_", " "))[0]
+ filename = os.path.splitext(filename.replace("_", " ").title())[0]
print(f"{md_prefix(indent)} [{filename}]({url})")
|
{"golden_diff": "diff --git a/scripts/build_directory_md.py b/scripts/build_directory_md.py\n--- a/scripts/build_directory_md.py\n+++ b/scripts/build_directory_md.py\n@@ -17,7 +17,7 @@\n \n \n def md_prefix(i):\n- return f\"{i * ' '}*\" if i else \"##\"\n+ return f\"{i * ' '}*\" if i else \"\\n##\"\n \n \n def print_path(old_path: str, new_path: str) -> str:\n@@ -37,7 +37,7 @@\n old_path = print_path(old_path, filepath)\n indent = (filepath.count(os.sep) + 1) if filepath else 0\n url = \"/\".join((URL_BASE, filepath, filename)).replace(\" \", \"%20\")\n- filename = os.path.splitext(filename.replace(\"_\", \" \"))[0]\n+ filename = os.path.splitext(filename.replace(\"_\", \" \").title())[0]\n print(f\"{md_prefix(indent)} [{filename}]({url})\")\n", "issue": "DIRECTORY.md not being updated by TravisCI\n- [x] .travis.yml isn't updating DIRECTORY.md automatically as it should\r\n- [x] scripts/build_directory_md.py needs can have some minor changes too. #1461\n", "before_files": [{"content": "#!/usr/bin/env python3\n\nimport os\nfrom typing import Iterator\n\nURL_BASE = \"https://github.com/TheAlgorithms/Python/blob/master\"\n\n\ndef good_filepaths(top_dir: str = \".\") -> Iterator[str]:\n for dirpath, dirnames, filenames in os.walk(top_dir):\n dirnames[:] = [d for d in dirnames if d != \"scripts\" and d[0] not in \"._\"]\n for filename in filenames:\n if filename == \"__init__.py\":\n continue\n if os.path.splitext(filename)[1] in (\".py\", \".ipynb\"):\n yield os.path.join(dirpath, filename).lstrip(\"./\")\n\n\ndef md_prefix(i):\n return f\"{i * ' '}*\" if i else \"##\"\n\n\ndef print_path(old_path: str, new_path: str) -> str:\n old_parts = old_path.split(os.sep)\n for i, new_part in enumerate(new_path.split(os.sep)):\n if i + 1 > len(old_parts) or old_parts[i] != new_part:\n if new_part:\n print(f\"{md_prefix(i)} {new_part.replace('_', ' ').title()}\")\n return new_path\n\n\ndef print_directory_md(top_dir: str = \".\") -> None:\n old_path = \"\"\n for filepath in sorted(good_filepaths()):\n filepath, filename = os.path.split(filepath)\n if filepath != old_path:\n old_path = print_path(old_path, filepath)\n indent = (filepath.count(os.sep) + 1) if filepath else 0\n url = \"/\".join((URL_BASE, filepath, filename)).replace(\" \", \"%20\")\n filename = os.path.splitext(filename.replace(\"_\", \" \"))[0]\n print(f\"{md_prefix(indent)} [{filename}]({url})\")\n\n\nif __name__ == \"__main__\":\n print_directory_md(\".\")\n", "path": "scripts/build_directory_md.py"}], "after_files": [{"content": "#!/usr/bin/env python3\n\nimport os\nfrom typing import Iterator\n\nURL_BASE = \"https://github.com/TheAlgorithms/Python/blob/master\"\n\n\ndef good_filepaths(top_dir: str = \".\") -> Iterator[str]:\n for dirpath, dirnames, filenames in os.walk(top_dir):\n dirnames[:] = [d for d in dirnames if d != \"scripts\" and d[0] not in \"._\"]\n for filename in filenames:\n if filename == \"__init__.py\":\n continue\n if os.path.splitext(filename)[1] in (\".py\", \".ipynb\"):\n yield os.path.join(dirpath, filename).lstrip(\"./\")\n\n\ndef md_prefix(i):\n return f\"{i * ' '}*\" if i else \"\\n##\"\n\n\ndef print_path(old_path: str, new_path: str) -> str:\n old_parts = old_path.split(os.sep)\n for i, new_part in enumerate(new_path.split(os.sep)):\n if i + 1 > len(old_parts) or old_parts[i] != new_part:\n if new_part:\n print(f\"{md_prefix(i)} {new_part.replace('_', ' ').title()}\")\n return new_path\n\n\ndef print_directory_md(top_dir: str = \".\") -> None:\n old_path = \"\"\n for filepath in sorted(good_filepaths()):\n filepath, filename = os.path.split(filepath)\n if filepath != old_path:\n old_path = print_path(old_path, filepath)\n indent = (filepath.count(os.sep) + 1) if filepath else 0\n url = \"/\".join((URL_BASE, filepath, filename)).replace(\" \", \"%20\")\n filename = os.path.splitext(filename.replace(\"_\", \" \").title())[0]\n print(f\"{md_prefix(indent)} [{filename}]({url})\")\n\n\nif __name__ == \"__main__\":\n print_directory_md(\".\")\n", "path": "scripts/build_directory_md.py"}]}
| 796 | 213 |
gh_patches_debug_28663
|
rasdani/github-patches
|
git_diff
|
ray-project__ray-8177
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Ray async api is not working with uvloop.
<!--Please include [tune], [rllib], [autoscaler] etc. in the issue title if relevant-->
### What is the problem?
Current Ray async api uses asyncio event loop's internal attribute to identify if the loop is running in the current current thread.
```python3
loop = asyncio.get_event_loop()
if loop.is_running():
if loop._thread_id != threading.get_ident():
# If the loop is runing outside current thread, we actually need
# to do this to make sure the context is initialized.
asyncio.run_coroutine_threadsafe(_async_init(), loop=loop)
```
This causes a problem when we uses Ray APIs inside Fast API because Fast API uses uvloop as its main event loop, and uvloop doesn't have `_thread_id` attribute.
@simon-mo Any good idea to fix this? It doesn't seem to be trivial. What about we do async_init() whenever asyncio loop is created in a different thread instead of checking if the event loop's thread id? I assume the only use case where asyncio loop is defined in a different thread is only inside async actor?
### Reproduction (REQUIRED)
Please provide a script that can be run to reproduce the issue. The script should have **no external library dependencies** (i.e., use fake or mock data / environments):
```python3
import time
import asyncio
import ray
import psutil
from fastapi import FastAPI, APIRouter
app = FastAPI(
title="API template",
description="Template to build upon for API serving and distributed computation",
version="0.1.0",
openapi_url="/openapi.json",
docs_url="/docs",
)
@app.on_event("startup")
def startup_event():
ray.init(num_cpus=2)
@app.on_event("shutdown")
def shutdown_event():
ray.shutdown()
@app.get('/async')
async def non_seq_async_process():
"""
async distributed execution
"""
@ray.remote
def slow_function(i):
time.sleep(i)
return i
start_time = time.time()
# result_ids = []
# for i in range(10, 60, 10):
# result_ids.append(slow_function.remote(i))
# results = ray.get(result_ids)
results = await asyncio.wait([slow_function.remote(i) for i in range(10, 60, 10)])
duration = time.time() - start_time
out = "Executing the for loop took {:.3f} seconds.\n".format(duration)
out += f"The results are: {results}\n"
```
If we cannot run your script, we cannot fix your issue.
- [x] I have verified my script runs in a clean environment and reproduces the issue.
- [x] I have verified the issue also occurs with the [latest wheels](https://docs.ray.io/en/latest/installation.html).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `python/ray/experimental/async_api.py`
Content:
```
1 # Note: asyncio is only compatible with Python 3
2
3 import asyncio
4 import threading
5
6 import ray
7 from ray.experimental.async_plasma import PlasmaEventHandler
8 from ray.services import logger
9
10 handler = None
11
12
13 async def _async_init():
14 global handler
15 if handler is None:
16 worker = ray.worker.global_worker
17 loop = asyncio.get_event_loop()
18 handler = PlasmaEventHandler(loop, worker)
19 worker.core_worker.set_plasma_added_callback(handler)
20 logger.debug("AsyncPlasma Connection Created!")
21
22
23 def init():
24 """
25 Initialize synchronously.
26 """
27 assert ray.is_initialized(), "Please call ray.init before async_api.init"
28
29 # Noop when handler is set.
30 if handler is not None:
31 return
32
33 loop = asyncio.get_event_loop()
34 if loop.is_running():
35 if loop._thread_id != threading.get_ident():
36 # If the loop is runing outside current thread, we actually need
37 # to do this to make sure the context is initialized.
38 asyncio.run_coroutine_threadsafe(_async_init(), loop=loop)
39 else:
40 async_init_done = asyncio.get_event_loop().create_task(
41 _async_init())
42 # Block until the async init finishes.
43 async_init_done.done()
44 else:
45 asyncio.get_event_loop().run_until_complete(_async_init())
46
47
48 def as_future(object_id):
49 """Turn an object_id into a Future object.
50
51 Args:
52 object_id: A Ray object_id.
53
54 Returns:
55 PlasmaObjectFuture: A future object that waits the object_id.
56 """
57 if handler is None:
58 init()
59 return handler.as_future(object_id)
60
61
62 def shutdown():
63 """Manually shutdown the async API.
64
65 Cancels all related tasks and all the socket transportation.
66 """
67 global handler
68 if handler is not None:
69 handler.close()
70 handler = None
71
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/python/ray/experimental/async_api.py b/python/ray/experimental/async_api.py
--- a/python/ray/experimental/async_api.py
+++ b/python/ray/experimental/async_api.py
@@ -1,7 +1,4 @@
-# Note: asyncio is only compatible with Python 3
-
import asyncio
-import threading
import ray
from ray.experimental.async_plasma import PlasmaEventHandler
@@ -10,7 +7,10 @@
handler = None
-async def _async_init():
+def init():
+ """Initialize plasma event handlers for asyncio support."""
+ assert ray.is_initialized(), "Please call ray.init before async_api.init"
+
global handler
if handler is None:
worker = ray.worker.global_worker
@@ -20,31 +20,6 @@
logger.debug("AsyncPlasma Connection Created!")
-def init():
- """
- Initialize synchronously.
- """
- assert ray.is_initialized(), "Please call ray.init before async_api.init"
-
- # Noop when handler is set.
- if handler is not None:
- return
-
- loop = asyncio.get_event_loop()
- if loop.is_running():
- if loop._thread_id != threading.get_ident():
- # If the loop is runing outside current thread, we actually need
- # to do this to make sure the context is initialized.
- asyncio.run_coroutine_threadsafe(_async_init(), loop=loop)
- else:
- async_init_done = asyncio.get_event_loop().create_task(
- _async_init())
- # Block until the async init finishes.
- async_init_done.done()
- else:
- asyncio.get_event_loop().run_until_complete(_async_init())
-
-
def as_future(object_id):
"""Turn an object_id into a Future object.
|
{"golden_diff": "diff --git a/python/ray/experimental/async_api.py b/python/ray/experimental/async_api.py\n--- a/python/ray/experimental/async_api.py\n+++ b/python/ray/experimental/async_api.py\n@@ -1,7 +1,4 @@\n-# Note: asyncio is only compatible with Python 3\n-\n import asyncio\n-import threading\n \n import ray\n from ray.experimental.async_plasma import PlasmaEventHandler\n@@ -10,7 +7,10 @@\n handler = None\n \n \n-async def _async_init():\n+def init():\n+ \"\"\"Initialize plasma event handlers for asyncio support.\"\"\"\n+ assert ray.is_initialized(), \"Please call ray.init before async_api.init\"\n+\n global handler\n if handler is None:\n worker = ray.worker.global_worker\n@@ -20,31 +20,6 @@\n logger.debug(\"AsyncPlasma Connection Created!\")\n \n \n-def init():\n- \"\"\"\n- Initialize synchronously.\n- \"\"\"\n- assert ray.is_initialized(), \"Please call ray.init before async_api.init\"\n-\n- # Noop when handler is set.\n- if handler is not None:\n- return\n-\n- loop = asyncio.get_event_loop()\n- if loop.is_running():\n- if loop._thread_id != threading.get_ident():\n- # If the loop is runing outside current thread, we actually need\n- # to do this to make sure the context is initialized.\n- asyncio.run_coroutine_threadsafe(_async_init(), loop=loop)\n- else:\n- async_init_done = asyncio.get_event_loop().create_task(\n- _async_init())\n- # Block until the async init finishes.\n- async_init_done.done()\n- else:\n- asyncio.get_event_loop().run_until_complete(_async_init())\n-\n-\n def as_future(object_id):\n \"\"\"Turn an object_id into a Future object.\n", "issue": "Ray async api is not working with uvloop.\n<!--Please include [tune], [rllib], [autoscaler] etc. in the issue title if relevant-->\r\n\r\n### What is the problem?\r\n\r\nCurrent Ray async api uses asyncio event loop's internal attribute to identify if the loop is running in the current current thread.\r\n\r\n```python3\r\n loop = asyncio.get_event_loop()\r\n if loop.is_running():\r\n if loop._thread_id != threading.get_ident():\r\n # If the loop is runing outside current thread, we actually need\r\n # to do this to make sure the context is initialized.\r\n asyncio.run_coroutine_threadsafe(_async_init(), loop=loop)\r\n```\r\nThis causes a problem when we uses Ray APIs inside Fast API because Fast API uses uvloop as its main event loop, and uvloop doesn't have `_thread_id` attribute.\r\n\r\n@simon-mo Any good idea to fix this? It doesn't seem to be trivial. What about we do async_init() whenever asyncio loop is created in a different thread instead of checking if the event loop's thread id? I assume the only use case where asyncio loop is defined in a different thread is only inside async actor? \r\n\r\n### Reproduction (REQUIRED)\r\nPlease provide a script that can be run to reproduce the issue. The script should have **no external library dependencies** (i.e., use fake or mock data / environments):\r\n\r\n```python3\r\nimport time\r\nimport asyncio \r\n\u200b\r\nimport ray\r\nimport psutil\r\nfrom fastapi import FastAPI, APIRouter\r\n\u200b\r\n\u200b\r\napp = FastAPI(\r\n title=\"API template\",\r\n description=\"Template to build upon for API serving and distributed computation\",\r\n version=\"0.1.0\",\r\n openapi_url=\"/openapi.json\",\r\n docs_url=\"/docs\",\r\n)\r\n\u200b\r\[email protected]_event(\"startup\")\r\ndef startup_event():\r\n ray.init(num_cpus=2)\r\n\u200b\r\n\u200b\r\n\u200b\r\[email protected]_event(\"shutdown\")\r\ndef shutdown_event():\r\n ray.shutdown()\r\n\u200b\r\n\u200b\r\[email protected]('/async')\r\nasync def non_seq_async_process():\r\n \"\"\"\r\n async distributed execution\r\n \"\"\"\r\n @ray.remote\r\n def slow_function(i):\r\n time.sleep(i)\r\n return i\r\n\u200b\r\n start_time = time.time()\r\n\u200b\r\n # result_ids = []\r\n # for i in range(10, 60, 10):\r\n # result_ids.append(slow_function.remote(i))\r\n \r\n # results = ray.get(result_ids)\r\n\u200b\r\n results = await asyncio.wait([slow_function.remote(i) for i in range(10, 60, 10)])\r\n\u200b\r\n \r\n duration = time.time() - start_time\r\n out = \"Executing the for loop took {:.3f} seconds.\\n\".format(duration)\r\n out += f\"The results are: {results}\\n\"\r\n\r\n```\r\n\r\nIf we cannot run your script, we cannot fix your issue.\r\n\r\n- [x] I have verified my script runs in a clean environment and reproduces the issue.\r\n- [x] I have verified the issue also occurs with the [latest wheels](https://docs.ray.io/en/latest/installation.html).\r\n\n", "before_files": [{"content": "# Note: asyncio is only compatible with Python 3\n\nimport asyncio\nimport threading\n\nimport ray\nfrom ray.experimental.async_plasma import PlasmaEventHandler\nfrom ray.services import logger\n\nhandler = None\n\n\nasync def _async_init():\n global handler\n if handler is None:\n worker = ray.worker.global_worker\n loop = asyncio.get_event_loop()\n handler = PlasmaEventHandler(loop, worker)\n worker.core_worker.set_plasma_added_callback(handler)\n logger.debug(\"AsyncPlasma Connection Created!\")\n\n\ndef init():\n \"\"\"\n Initialize synchronously.\n \"\"\"\n assert ray.is_initialized(), \"Please call ray.init before async_api.init\"\n\n # Noop when handler is set.\n if handler is not None:\n return\n\n loop = asyncio.get_event_loop()\n if loop.is_running():\n if loop._thread_id != threading.get_ident():\n # If the loop is runing outside current thread, we actually need\n # to do this to make sure the context is initialized.\n asyncio.run_coroutine_threadsafe(_async_init(), loop=loop)\n else:\n async_init_done = asyncio.get_event_loop().create_task(\n _async_init())\n # Block until the async init finishes.\n async_init_done.done()\n else:\n asyncio.get_event_loop().run_until_complete(_async_init())\n\n\ndef as_future(object_id):\n \"\"\"Turn an object_id into a Future object.\n\n Args:\n object_id: A Ray object_id.\n\n Returns:\n PlasmaObjectFuture: A future object that waits the object_id.\n \"\"\"\n if handler is None:\n init()\n return handler.as_future(object_id)\n\n\ndef shutdown():\n \"\"\"Manually shutdown the async API.\n\n Cancels all related tasks and all the socket transportation.\n \"\"\"\n global handler\n if handler is not None:\n handler.close()\n handler = None\n", "path": "python/ray/experimental/async_api.py"}], "after_files": [{"content": "import asyncio\n\nimport ray\nfrom ray.experimental.async_plasma import PlasmaEventHandler\nfrom ray.services import logger\n\nhandler = None\n\n\ndef init():\n \"\"\"Initialize plasma event handlers for asyncio support.\"\"\"\n assert ray.is_initialized(), \"Please call ray.init before async_api.init\"\n\n global handler\n if handler is None:\n worker = ray.worker.global_worker\n loop = asyncio.get_event_loop()\n handler = PlasmaEventHandler(loop, worker)\n worker.core_worker.set_plasma_added_callback(handler)\n logger.debug(\"AsyncPlasma Connection Created!\")\n\n\ndef as_future(object_id):\n \"\"\"Turn an object_id into a Future object.\n\n Args:\n object_id: A Ray object_id.\n\n Returns:\n PlasmaObjectFuture: A future object that waits the object_id.\n \"\"\"\n if handler is None:\n init()\n return handler.as_future(object_id)\n\n\ndef shutdown():\n \"\"\"Manually shutdown the async API.\n\n Cancels all related tasks and all the socket transportation.\n \"\"\"\n global handler\n if handler is not None:\n handler.close()\n handler = None\n", "path": "python/ray/experimental/async_api.py"}]}
| 1,449 | 399 |
gh_patches_debug_27254
|
rasdani/github-patches
|
git_diff
|
nextcloud__appstore-272
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Create documentation section for explaining certificates, signing and how it all works together
App devs need a very quick tutorial/walkthrough in the docs on how to generate a new certificate pair, request the public cert to be signed, registering an app id, revoking certs (like registering certs: post it on our issue tracker) and signing apps.
Also some background needs to be provided on how the whole certificate mechanism works.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `nextcloudappstore/core/forms.py`
Content:
```
1 from django.forms import Form, CharField, Textarea, ChoiceField, RadioSelect, \
2 BooleanField
3 from django.utils.translation import ugettext_lazy as _ # type: ignore
4
5 from nextcloudappstore.core.models import App, AppRating
6
7 RATING_CHOICES = (
8 (0.0, _('Bad')),
9 (0.5, _('Ok')),
10 (1.0, _('Good'))
11 )
12
13
14 class AppReleaseUploadForm(Form):
15 download = CharField(label=_('Download link (tar.gz)'), max_length=256)
16 signature = CharField(widget=Textarea, label=_('SHA512 signature'),
17 help_text=_(
18 'Hint: can be calculated by executing the '
19 'following command: openssl dgst -sha512 -sign '
20 '/path/to/private-cert.key /path/to/app.tar.gz '
21 '| openssl base64'))
22 nightly = BooleanField(label=_('Nightly'))
23
24
25 class AppRatingForm(Form):
26 def __init__(self, *args, **kwargs):
27 self._id = kwargs.pop('id', None)
28 self._user = kwargs.pop('user', None)
29 self._language_code = kwargs.pop('language_code', None)
30 super().__init__(*args, **kwargs)
31
32 rating = ChoiceField(initial=0.5, choices=RATING_CHOICES,
33 widget=RadioSelect)
34 comment = CharField(widget=Textarea, required=False,
35 label=_('Review'))
36
37 class Meta:
38 fields = ('rating', 'comment')
39
40 def save(self):
41 app = App.objects.get(id=self._id)
42 app_rating, created = AppRating.objects.get_or_create(user=self._user,
43 app=app)
44 app_rating.rating = self.cleaned_data['rating']
45 app_rating.set_current_language(self._language_code)
46 app_rating.comment = self.cleaned_data['comment']
47 app_rating.save()
48
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/nextcloudappstore/core/forms.py b/nextcloudappstore/core/forms.py
--- a/nextcloudappstore/core/forms.py
+++ b/nextcloudappstore/core/forms.py
@@ -13,15 +13,32 @@
class AppReleaseUploadForm(Form):
download = CharField(label=_('Download link (tar.gz)'), max_length=256)
- signature = CharField(widget=Textarea, label=_('SHA512 signature'),
- help_text=_(
- 'Hint: can be calculated by executing the '
- 'following command: openssl dgst -sha512 -sign '
- '/path/to/private-cert.key /path/to/app.tar.gz '
- '| openssl base64'))
+ signature = CharField(
+ widget=Textarea,
+ label=_('SHA512 signature'),
+ help_text=_(
+ 'Hint: can be calculated by executing the '
+ 'following command: openssl dgst -sha512 -sign '
+ '~/.nextcloud/certificates/APP_ID.key '
+ '/path/to/app.tar.gz | openssl base64'))
nightly = BooleanField(label=_('Nightly'))
+class AppRegisterForm(Form):
+ certificate = CharField(
+ widget=Textarea(attrs={'pattern': '-----BEGIN CERTIFICATE-----.*'}),
+ label=_('Public certificate'),
+ help_text=_(
+ 'Usually stored in ~/.nextcloud/certificates/APP_ID.crt'))
+ signature = CharField(
+ widget=Textarea,
+ label=_('SHA512 signature'),
+ help_text=_(
+ 'Hint: can be calculated by executing the '
+ 'following command: echo -n "APP_ID" | openssl dgst -sha512 -sign '
+ '~/.nextcloud/certificates/APP_ID.key | openssl base64'))
+
+
class AppRatingForm(Form):
def __init__(self, *args, **kwargs):
self._id = kwargs.pop('id', None)
|
{"golden_diff": "diff --git a/nextcloudappstore/core/forms.py b/nextcloudappstore/core/forms.py\n--- a/nextcloudappstore/core/forms.py\n+++ b/nextcloudappstore/core/forms.py\n@@ -13,15 +13,32 @@\n \n class AppReleaseUploadForm(Form):\n download = CharField(label=_('Download link (tar.gz)'), max_length=256)\n- signature = CharField(widget=Textarea, label=_('SHA512 signature'),\n- help_text=_(\n- 'Hint: can be calculated by executing the '\n- 'following command: openssl dgst -sha512 -sign '\n- '/path/to/private-cert.key /path/to/app.tar.gz '\n- '| openssl base64'))\n+ signature = CharField(\n+ widget=Textarea,\n+ label=_('SHA512 signature'),\n+ help_text=_(\n+ 'Hint: can be calculated by executing the '\n+ 'following command: openssl dgst -sha512 -sign '\n+ '~/.nextcloud/certificates/APP_ID.key '\n+ '/path/to/app.tar.gz | openssl base64'))\n nightly = BooleanField(label=_('Nightly'))\n \n \n+class AppRegisterForm(Form):\n+ certificate = CharField(\n+ widget=Textarea(attrs={'pattern': '-----BEGIN CERTIFICATE-----.*'}),\n+ label=_('Public certificate'),\n+ help_text=_(\n+ 'Usually stored in ~/.nextcloud/certificates/APP_ID.crt'))\n+ signature = CharField(\n+ widget=Textarea,\n+ label=_('SHA512 signature'),\n+ help_text=_(\n+ 'Hint: can be calculated by executing the '\n+ 'following command: echo -n \"APP_ID\" | openssl dgst -sha512 -sign '\n+ '~/.nextcloud/certificates/APP_ID.key | openssl base64'))\n+\n+\n class AppRatingForm(Form):\n def __init__(self, *args, **kwargs):\n self._id = kwargs.pop('id', None)\n", "issue": "Create documentation section for explaining certificates, signing and how it all works together\nApp devs need a very quick tutorial/walkthrough in the docs on how to generate a new certificate pair, request the public cert to be signed, registering an app id, revoking certs (like registering certs: post it on our issue tracker) and signing apps.\n\nAlso some background needs to be provided on how the whole certificate mechanism works.\n\n", "before_files": [{"content": "from django.forms import Form, CharField, Textarea, ChoiceField, RadioSelect, \\\n BooleanField\nfrom django.utils.translation import ugettext_lazy as _ # type: ignore\n\nfrom nextcloudappstore.core.models import App, AppRating\n\nRATING_CHOICES = (\n (0.0, _('Bad')),\n (0.5, _('Ok')),\n (1.0, _('Good'))\n)\n\n\nclass AppReleaseUploadForm(Form):\n download = CharField(label=_('Download link (tar.gz)'), max_length=256)\n signature = CharField(widget=Textarea, label=_('SHA512 signature'),\n help_text=_(\n 'Hint: can be calculated by executing the '\n 'following command: openssl dgst -sha512 -sign '\n '/path/to/private-cert.key /path/to/app.tar.gz '\n '| openssl base64'))\n nightly = BooleanField(label=_('Nightly'))\n\n\nclass AppRatingForm(Form):\n def __init__(self, *args, **kwargs):\n self._id = kwargs.pop('id', None)\n self._user = kwargs.pop('user', None)\n self._language_code = kwargs.pop('language_code', None)\n super().__init__(*args, **kwargs)\n\n rating = ChoiceField(initial=0.5, choices=RATING_CHOICES,\n widget=RadioSelect)\n comment = CharField(widget=Textarea, required=False,\n label=_('Review'))\n\n class Meta:\n fields = ('rating', 'comment')\n\n def save(self):\n app = App.objects.get(id=self._id)\n app_rating, created = AppRating.objects.get_or_create(user=self._user,\n app=app)\n app_rating.rating = self.cleaned_data['rating']\n app_rating.set_current_language(self._language_code)\n app_rating.comment = self.cleaned_data['comment']\n app_rating.save()\n", "path": "nextcloudappstore/core/forms.py"}], "after_files": [{"content": "from django.forms import Form, CharField, Textarea, ChoiceField, RadioSelect, \\\n BooleanField\nfrom django.utils.translation import ugettext_lazy as _ # type: ignore\n\nfrom nextcloudappstore.core.models import App, AppRating\n\nRATING_CHOICES = (\n (0.0, _('Bad')),\n (0.5, _('Ok')),\n (1.0, _('Good'))\n)\n\n\nclass AppReleaseUploadForm(Form):\n download = CharField(label=_('Download link (tar.gz)'), max_length=256)\n signature = CharField(\n widget=Textarea,\n label=_('SHA512 signature'),\n help_text=_(\n 'Hint: can be calculated by executing the '\n 'following command: openssl dgst -sha512 -sign '\n '~/.nextcloud/certificates/APP_ID.key '\n '/path/to/app.tar.gz | openssl base64'))\n nightly = BooleanField(label=_('Nightly'))\n\n\nclass AppRegisterForm(Form):\n certificate = CharField(\n widget=Textarea(attrs={'pattern': '-----BEGIN CERTIFICATE-----.*'}),\n label=_('Public certificate'),\n help_text=_(\n 'Usually stored in ~/.nextcloud/certificates/APP_ID.crt'))\n signature = CharField(\n widget=Textarea,\n label=_('SHA512 signature'),\n help_text=_(\n 'Hint: can be calculated by executing the '\n 'following command: echo -n \"APP_ID\" | openssl dgst -sha512 -sign '\n '~/.nextcloud/certificates/APP_ID.key | openssl base64'))\n\n\nclass AppRatingForm(Form):\n def __init__(self, *args, **kwargs):\n self._id = kwargs.pop('id', None)\n self._user = kwargs.pop('user', None)\n self._language_code = kwargs.pop('language_code', None)\n super().__init__(*args, **kwargs)\n\n rating = ChoiceField(initial=0.5, choices=RATING_CHOICES,\n widget=RadioSelect)\n comment = CharField(widget=Textarea, required=False,\n label=_('Review'))\n\n class Meta:\n fields = ('rating', 'comment')\n\n def save(self):\n app = App.objects.get(id=self._id)\n app_rating, created = AppRating.objects.get_or_create(user=self._user,\n app=app)\n app_rating.rating = self.cleaned_data['rating']\n app_rating.set_current_language(self._language_code)\n app_rating.comment = self.cleaned_data['comment']\n app_rating.save()\n", "path": "nextcloudappstore/core/forms.py"}]}
| 828 | 438 |
gh_patches_debug_36615
|
rasdani/github-patches
|
git_diff
|
pyqtgraph__pyqtgraph-2796
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Bug in slider parameter
<!-- In the following, please describe your issue in detail! -->
<!-- If some sections do not apply, just remove them. -->
### Short description
<!-- This should summarize the issue. -->
Bug that precludes use of suffix in SliderParameter
### Code to reproduce
<!-- Please provide a minimal working example that reproduces the issue in the code block below.
Ideally, this should be a full example someone else could run without additional setup. -->
```python
import pyqtgraph as pg
import numpy as np
from pyqtgraph.parametertree import (Parameter, ParameterTree, registerParameterType)
...
colorParams = [
dict(name='Color Settings', type='group', children=[
dict(name='Opacity', type='slider', limits=[0,100], value = 60.0, suffix='%'),
]),
]
self.parameters = pg.parametertree.Parameter.create(name='Analysis Settings', type='group', children=colorParams)
...
```
### Expected behavior
<!-- What should happen? -->
Slider should show a number 0 - 100, followed by a '%' suffix
### Real behavior
<!-- What happens? -->
Application crashes, as it refers to a non-existing method
```
An error occurred?
Post the full traceback inside these 'code fences'!
AttributeError
'SliderParameterItem' object has no attribute '_updateLabel'
```
### Solution
1. Please delete line 32, containing ``` self._updateLabel(self.slider.value())``` as this method does not exist
2. After ```layout = QHBoxLayout()``` Please add a line containing ```layout.setContentsMargins(0, 0, 0, 0)```
3. In doing so, the height of the slider item stays equal to the other items (=20), now there is a *bulge*
### Additional context
None
Bug in slider parameter
<!-- In the following, please describe your issue in detail! -->
<!-- If some sections do not apply, just remove them. -->
### Short description
<!-- This should summarize the issue. -->
Bug that precludes use of suffix in SliderParameter
### Code to reproduce
<!-- Please provide a minimal working example that reproduces the issue in the code block below.
Ideally, this should be a full example someone else could run without additional setup. -->
```python
import pyqtgraph as pg
import numpy as np
from pyqtgraph.parametertree import (Parameter, ParameterTree, registerParameterType)
...
colorParams = [
dict(name='Color Settings', type='group', children=[
dict(name='Opacity', type='slider', limits=[0,100], value = 60.0, suffix='%'),
]),
]
self.parameters = pg.parametertree.Parameter.create(name='Analysis Settings', type='group', children=colorParams)
...
```
### Expected behavior
<!-- What should happen? -->
Slider should show a number 0 - 100, followed by a '%' suffix
### Real behavior
<!-- What happens? -->
Application crashes, as it refers to a non-existing method
```
An error occurred?
Post the full traceback inside these 'code fences'!
AttributeError
'SliderParameterItem' object has no attribute '_updateLabel'
```
### Solution
1. Please delete line 32, containing ``` self._updateLabel(self.slider.value())``` as this method does not exist
2. After ```layout = QHBoxLayout()``` Please add a line containing ```layout.setContentsMargins(0, 0, 0, 0)```
3. In doing so, the height of the slider item stays equal to the other items (=20), now there is a *bulge*
### Additional context
None
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pyqtgraph/parametertree/parameterTypes/slider.py`
Content:
```
1 import numpy as np
2
3 from ...Qt import QtCore, QtWidgets
4 from ..Parameter import Parameter
5 from .basetypes import Emitter, WidgetParameterItem
6
7
8 class SliderParameterItem(WidgetParameterItem):
9 slider: QtWidgets.QSlider
10 span: np.ndarray
11 charSpan: np.ndarray
12
13 def __init__(self, param, depth):
14 # Bind emitter to self to avoid garbage collection
15 self.emitter = Emitter()
16 self.sigChanging = self.emitter.sigChanging
17 self._suffix = None
18 super().__init__(param, depth)
19
20 def updateDisplayLabel(self, value=None):
21 if value is None:
22 value = self.param.value()
23 value = str(value)
24 if self._suffix is None:
25 suffixTxt = ''
26 else:
27 suffixTxt = f' {self._suffix}'
28 self.displayLabel.setText(value + suffixTxt)
29
30 def setSuffix(self, suffix):
31 self._suffix = suffix
32 self._updateLabel(self.slider.value())
33
34 def makeWidget(self):
35 param = self.param
36 opts = param.opts
37 opts.setdefault('limits', [0, 0])
38 self._suffix = opts.get('suffix')
39
40 self.slider = QtWidgets.QSlider()
41 self.slider.setOrientation(QtCore.Qt.Orientation.Horizontal)
42 lbl = QtWidgets.QLabel()
43 lbl.setAlignment(QtCore.Qt.AlignmentFlag.AlignLeft)
44
45 w = QtWidgets.QWidget()
46 layout = QtWidgets.QHBoxLayout()
47 w.setLayout(layout)
48 layout.addWidget(lbl)
49 layout.addWidget(self.slider)
50
51 def setValue(v):
52 self.slider.setValue(self.spanToSliderValue(v))
53
54 def getValue():
55 return self.span[self.slider.value()].item()
56
57 def vChanged(v):
58 lbl.setText(self.prettyTextValue(v))
59
60 self.slider.valueChanged.connect(vChanged)
61
62 def onMove(pos):
63 self.sigChanging.emit(self, self.span[pos].item())
64
65 self.slider.sliderMoved.connect(onMove)
66
67 w.setValue = setValue
68 w.value = getValue
69 w.sigChanged = self.slider.valueChanged
70 w.sigChanging = self.sigChanging
71 self.optsChanged(param, opts)
72 return w
73
74 def spanToSliderValue(self, v):
75 return int(np.argmin(np.abs(self.span - v)))
76
77 def prettyTextValue(self, v):
78 if self._suffix is None:
79 suffixTxt = ''
80 else:
81 suffixTxt = f' {self._suffix}'
82 format_ = self.param.opts.get('format', None)
83 cspan = self.charSpan
84 if format_ is None:
85 format_ = f'{{0:>{cspan.dtype.itemsize}}}{suffixTxt}'
86 return format_.format(cspan[v].decode())
87
88 def optsChanged(self, param, opts):
89 try:
90 super().optsChanged(param, opts)
91 except AttributeError:
92 # This may trigger while building the parameter before the widget is fully constructed.
93 # This is fine, since errors are from the parent scope which will stabilize after the widget is
94 # constructed anyway
95 pass
96 span = opts.get('span', None)
97 if span is None:
98 step = opts.get('step', 1)
99 start, stop = opts.get('limits', param.opts['limits'])
100 # Add a bit to 'stop' since python slicing excludes the last value
101 span = np.arange(start, stop + step, step)
102 precision = opts.get('precision', 2)
103 if precision is not None:
104 span = span.round(precision)
105 self.span = span
106 self.charSpan = np.char.array(span)
107 w = self.slider
108 w.setMinimum(0)
109 w.setMaximum(len(span) - 1)
110 if 'suffix' in opts:
111 self.setSuffix(opts['suffix'])
112 self.slider.valueChanged.emit(self.slider.value())
113
114 def limitsChanged(self, param, limits):
115 self.optsChanged(param, dict(limits=limits))
116
117
118 class SliderParameter(Parameter):
119 """
120 ============== ========================================================
121 **Options**
122 limits [start, stop] numbers
123 step: Defaults to 1, the spacing between each slider tick
124 span: Instead of limits + step, span can be set to specify
125 the range of slider options (e.g. np.linspace(-pi, pi, 100))
126 format: Format string to determine number of decimals to show, etc.
127 Defaults to display based on span dtype
128 precision: int number of decimals to keep for float tick spaces
129 ============== ========================================================
130 """
131 itemClass = SliderParameterItem
132
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pyqtgraph/parametertree/parameterTypes/slider.py b/pyqtgraph/parametertree/parameterTypes/slider.py
--- a/pyqtgraph/parametertree/parameterTypes/slider.py
+++ b/pyqtgraph/parametertree/parameterTypes/slider.py
@@ -20,6 +20,7 @@
def updateDisplayLabel(self, value=None):
if value is None:
value = self.param.value()
+ self.sliderLabel.setText(self.prettyTextValue(self.slider.value()))
value = str(value)
if self._suffix is None:
suffixTxt = ''
@@ -27,9 +28,13 @@
suffixTxt = f' {self._suffix}'
self.displayLabel.setText(value + suffixTxt)
+
def setSuffix(self, suffix):
self._suffix = suffix
- self._updateLabel(self.slider.value())
+ # This may be called during widget construction in which case there is no
+ # displayLabel yet
+ if hasattr(self, 'displayLabel'):
+ self.updateDisplayLabel(self.slider.value())
def makeWidget(self):
param = self.param
@@ -39,7 +44,7 @@
self.slider = QtWidgets.QSlider()
self.slider.setOrientation(QtCore.Qt.Orientation.Horizontal)
- lbl = QtWidgets.QLabel()
+ lbl = self.sliderLabel = QtWidgets.QLabel()
lbl.setAlignment(QtCore.Qt.AlignmentFlag.AlignLeft)
w = QtWidgets.QWidget()
@@ -54,10 +59,7 @@
def getValue():
return self.span[self.slider.value()].item()
- def vChanged(v):
- lbl.setText(self.prettyTextValue(v))
-
- self.slider.valueChanged.connect(vChanged)
+ self.slider.valueChanged.connect(self.updateDisplayLabel)
def onMove(pos):
self.sigChanging.emit(self, self.span[pos].item())
@@ -109,7 +111,6 @@
w.setMaximum(len(span) - 1)
if 'suffix' in opts:
self.setSuffix(opts['suffix'])
- self.slider.valueChanged.emit(self.slider.value())
def limitsChanged(self, param, limits):
self.optsChanged(param, dict(limits=limits))
|
{"golden_diff": "diff --git a/pyqtgraph/parametertree/parameterTypes/slider.py b/pyqtgraph/parametertree/parameterTypes/slider.py\n--- a/pyqtgraph/parametertree/parameterTypes/slider.py\n+++ b/pyqtgraph/parametertree/parameterTypes/slider.py\n@@ -20,6 +20,7 @@\n def updateDisplayLabel(self, value=None):\n if value is None:\n value = self.param.value()\n+ self.sliderLabel.setText(self.prettyTextValue(self.slider.value()))\n value = str(value)\n if self._suffix is None:\n suffixTxt = ''\n@@ -27,9 +28,13 @@\n suffixTxt = f' {self._suffix}'\n self.displayLabel.setText(value + suffixTxt)\n \n+\n def setSuffix(self, suffix):\n self._suffix = suffix\n- self._updateLabel(self.slider.value())\n+ # This may be called during widget construction in which case there is no\n+ # displayLabel yet\n+ if hasattr(self, 'displayLabel'):\n+ self.updateDisplayLabel(self.slider.value())\n \n def makeWidget(self):\n param = self.param\n@@ -39,7 +44,7 @@\n \n self.slider = QtWidgets.QSlider()\n self.slider.setOrientation(QtCore.Qt.Orientation.Horizontal)\n- lbl = QtWidgets.QLabel()\n+ lbl = self.sliderLabel = QtWidgets.QLabel()\n lbl.setAlignment(QtCore.Qt.AlignmentFlag.AlignLeft)\n \n w = QtWidgets.QWidget()\n@@ -54,10 +59,7 @@\n def getValue():\n return self.span[self.slider.value()].item()\n \n- def vChanged(v):\n- lbl.setText(self.prettyTextValue(v))\n-\n- self.slider.valueChanged.connect(vChanged)\n+ self.slider.valueChanged.connect(self.updateDisplayLabel)\n \n def onMove(pos):\n self.sigChanging.emit(self, self.span[pos].item())\n@@ -109,7 +111,6 @@\n w.setMaximum(len(span) - 1)\n if 'suffix' in opts:\n self.setSuffix(opts['suffix'])\n- self.slider.valueChanged.emit(self.slider.value())\n \n def limitsChanged(self, param, limits):\n self.optsChanged(param, dict(limits=limits))\n", "issue": "Bug in slider parameter\n<!-- In the following, please describe your issue in detail! -->\r\n<!-- If some sections do not apply, just remove them. -->\r\n\r\n### Short description\r\n<!-- This should summarize the issue. -->\r\nBug that precludes use of suffix in SliderParameter\r\n### Code to reproduce\r\n<!-- Please provide a minimal working example that reproduces the issue in the code block below.\r\n Ideally, this should be a full example someone else could run without additional setup. -->\r\n\r\n```python\r\nimport pyqtgraph as pg\r\nimport numpy as np\r\nfrom pyqtgraph.parametertree import (Parameter, ParameterTree, registerParameterType)\r\n\r\n ...\r\n colorParams = [\r\n dict(name='Color Settings', type='group', children=[\r\n dict(name='Opacity', type='slider', limits=[0,100], value = 60.0, suffix='%'),\r\n ]),\r\n ]\r\n self.parameters = pg.parametertree.Parameter.create(name='Analysis Settings', type='group', children=colorParams)\r\n ...\r\n```\r\n\r\n### Expected behavior\r\n<!-- What should happen? -->\r\nSlider should show a number 0 - 100, followed by a '%' suffix\r\n\r\n### Real behavior\r\n<!-- What happens? -->\r\nApplication crashes, as it refers to a non-existing method\r\n```\r\nAn error occurred?\r\nPost the full traceback inside these 'code fences'!\r\n\r\nAttributeError\r\n'SliderParameterItem' object has no attribute '_updateLabel'\r\n```\r\n\r\n### Solution\r\n 1. Please delete line 32, containing ``` self._updateLabel(self.slider.value())``` as this method does not exist\r\n 2. After ```layout = QHBoxLayout()``` Please add a line containing ```layout.setContentsMargins(0, 0, 0, 0)```\r\n 3. In doing so, the height of the slider item stays equal to the other items (=20), now there is a *bulge*\r\n\r\n### Additional context\r\nNone\nBug in slider parameter\n<!-- In the following, please describe your issue in detail! -->\r\n<!-- If some sections do not apply, just remove them. -->\r\n\r\n### Short description\r\n<!-- This should summarize the issue. -->\r\nBug that precludes use of suffix in SliderParameter\r\n### Code to reproduce\r\n<!-- Please provide a minimal working example that reproduces the issue in the code block below.\r\n Ideally, this should be a full example someone else could run without additional setup. -->\r\n\r\n```python\r\nimport pyqtgraph as pg\r\nimport numpy as np\r\nfrom pyqtgraph.parametertree import (Parameter, ParameterTree, registerParameterType)\r\n\r\n ...\r\n colorParams = [\r\n dict(name='Color Settings', type='group', children=[\r\n dict(name='Opacity', type='slider', limits=[0,100], value = 60.0, suffix='%'),\r\n ]),\r\n ]\r\n self.parameters = pg.parametertree.Parameter.create(name='Analysis Settings', type='group', children=colorParams)\r\n ...\r\n```\r\n\r\n### Expected behavior\r\n<!-- What should happen? -->\r\nSlider should show a number 0 - 100, followed by a '%' suffix\r\n\r\n### Real behavior\r\n<!-- What happens? -->\r\nApplication crashes, as it refers to a non-existing method\r\n```\r\nAn error occurred?\r\nPost the full traceback inside these 'code fences'!\r\n\r\nAttributeError\r\n'SliderParameterItem' object has no attribute '_updateLabel'\r\n```\r\n\r\n### Solution\r\n 1. Please delete line 32, containing ``` self._updateLabel(self.slider.value())``` as this method does not exist\r\n 2. After ```layout = QHBoxLayout()``` Please add a line containing ```layout.setContentsMargins(0, 0, 0, 0)```\r\n 3. In doing so, the height of the slider item stays equal to the other items (=20), now there is a *bulge*\r\n\r\n### Additional context\r\nNone\n", "before_files": [{"content": "import numpy as np\n\nfrom ...Qt import QtCore, QtWidgets\nfrom ..Parameter import Parameter\nfrom .basetypes import Emitter, WidgetParameterItem\n\n\nclass SliderParameterItem(WidgetParameterItem):\n slider: QtWidgets.QSlider\n span: np.ndarray\n charSpan: np.ndarray\n\n def __init__(self, param, depth):\n # Bind emitter to self to avoid garbage collection\n self.emitter = Emitter()\n self.sigChanging = self.emitter.sigChanging\n self._suffix = None\n super().__init__(param, depth)\n\n def updateDisplayLabel(self, value=None):\n if value is None:\n value = self.param.value()\n value = str(value)\n if self._suffix is None:\n suffixTxt = ''\n else:\n suffixTxt = f' {self._suffix}'\n self.displayLabel.setText(value + suffixTxt)\n\n def setSuffix(self, suffix):\n self._suffix = suffix\n self._updateLabel(self.slider.value())\n\n def makeWidget(self):\n param = self.param\n opts = param.opts\n opts.setdefault('limits', [0, 0])\n self._suffix = opts.get('suffix')\n\n self.slider = QtWidgets.QSlider()\n self.slider.setOrientation(QtCore.Qt.Orientation.Horizontal)\n lbl = QtWidgets.QLabel()\n lbl.setAlignment(QtCore.Qt.AlignmentFlag.AlignLeft)\n\n w = QtWidgets.QWidget()\n layout = QtWidgets.QHBoxLayout()\n w.setLayout(layout)\n layout.addWidget(lbl)\n layout.addWidget(self.slider)\n\n def setValue(v):\n self.slider.setValue(self.spanToSliderValue(v))\n\n def getValue():\n return self.span[self.slider.value()].item()\n\n def vChanged(v):\n lbl.setText(self.prettyTextValue(v))\n\n self.slider.valueChanged.connect(vChanged)\n\n def onMove(pos):\n self.sigChanging.emit(self, self.span[pos].item())\n\n self.slider.sliderMoved.connect(onMove)\n\n w.setValue = setValue\n w.value = getValue\n w.sigChanged = self.slider.valueChanged\n w.sigChanging = self.sigChanging\n self.optsChanged(param, opts)\n return w\n\n def spanToSliderValue(self, v):\n return int(np.argmin(np.abs(self.span - v)))\n\n def prettyTextValue(self, v):\n if self._suffix is None:\n suffixTxt = ''\n else:\n suffixTxt = f' {self._suffix}'\n format_ = self.param.opts.get('format', None)\n cspan = self.charSpan\n if format_ is None:\n format_ = f'{{0:>{cspan.dtype.itemsize}}}{suffixTxt}'\n return format_.format(cspan[v].decode())\n\n def optsChanged(self, param, opts):\n try:\n super().optsChanged(param, opts)\n except AttributeError:\n # This may trigger while building the parameter before the widget is fully constructed.\n # This is fine, since errors are from the parent scope which will stabilize after the widget is\n # constructed anyway\n pass\n span = opts.get('span', None)\n if span is None:\n step = opts.get('step', 1)\n start, stop = opts.get('limits', param.opts['limits'])\n # Add a bit to 'stop' since python slicing excludes the last value\n span = np.arange(start, stop + step, step)\n precision = opts.get('precision', 2)\n if precision is not None:\n span = span.round(precision)\n self.span = span\n self.charSpan = np.char.array(span)\n w = self.slider\n w.setMinimum(0)\n w.setMaximum(len(span) - 1)\n if 'suffix' in opts:\n self.setSuffix(opts['suffix'])\n self.slider.valueChanged.emit(self.slider.value())\n\n def limitsChanged(self, param, limits):\n self.optsChanged(param, dict(limits=limits))\n\n\nclass SliderParameter(Parameter):\n \"\"\"\n ============== ========================================================\n **Options**\n limits [start, stop] numbers\n step: Defaults to 1, the spacing between each slider tick\n span: Instead of limits + step, span can be set to specify\n the range of slider options (e.g. np.linspace(-pi, pi, 100))\n format: Format string to determine number of decimals to show, etc.\n Defaults to display based on span dtype\n precision: int number of decimals to keep for float tick spaces\n ============== ========================================================\n \"\"\"\n itemClass = SliderParameterItem\n", "path": "pyqtgraph/parametertree/parameterTypes/slider.py"}], "after_files": [{"content": "import numpy as np\n\nfrom ...Qt import QtCore, QtWidgets\nfrom ..Parameter import Parameter\nfrom .basetypes import Emitter, WidgetParameterItem\n\n\nclass SliderParameterItem(WidgetParameterItem):\n slider: QtWidgets.QSlider\n span: np.ndarray\n charSpan: np.ndarray\n\n def __init__(self, param, depth):\n # Bind emitter to self to avoid garbage collection\n self.emitter = Emitter()\n self.sigChanging = self.emitter.sigChanging\n self._suffix = None\n super().__init__(param, depth)\n\n def updateDisplayLabel(self, value=None):\n if value is None:\n value = self.param.value()\n self.sliderLabel.setText(self.prettyTextValue(self.slider.value()))\n value = str(value)\n if self._suffix is None:\n suffixTxt = ''\n else:\n suffixTxt = f' {self._suffix}'\n self.displayLabel.setText(value + suffixTxt)\n\n\n def setSuffix(self, suffix):\n self._suffix = suffix\n # This may be called during widget construction in which case there is no\n # displayLabel yet\n if hasattr(self, 'displayLabel'):\n self.updateDisplayLabel(self.slider.value())\n\n def makeWidget(self):\n param = self.param\n opts = param.opts\n opts.setdefault('limits', [0, 0])\n self._suffix = opts.get('suffix')\n\n self.slider = QtWidgets.QSlider()\n self.slider.setOrientation(QtCore.Qt.Orientation.Horizontal)\n lbl = self.sliderLabel = QtWidgets.QLabel()\n lbl.setAlignment(QtCore.Qt.AlignmentFlag.AlignLeft)\n\n w = QtWidgets.QWidget()\n layout = QtWidgets.QHBoxLayout()\n w.setLayout(layout)\n layout.addWidget(lbl)\n layout.addWidget(self.slider)\n\n def setValue(v):\n self.slider.setValue(self.spanToSliderValue(v))\n\n def getValue():\n return self.span[self.slider.value()].item()\n\n self.slider.valueChanged.connect(self.updateDisplayLabel)\n\n def onMove(pos):\n self.sigChanging.emit(self, self.span[pos].item())\n\n self.slider.sliderMoved.connect(onMove)\n\n w.setValue = setValue\n w.value = getValue\n w.sigChanged = self.slider.valueChanged\n w.sigChanging = self.sigChanging\n self.optsChanged(param, opts)\n return w\n\n def spanToSliderValue(self, v):\n return int(np.argmin(np.abs(self.span - v)))\n\n def prettyTextValue(self, v):\n if self._suffix is None:\n suffixTxt = ''\n else:\n suffixTxt = f' {self._suffix}'\n format_ = self.param.opts.get('format', None)\n cspan = self.charSpan\n if format_ is None:\n format_ = f'{{0:>{cspan.dtype.itemsize}}}{suffixTxt}'\n return format_.format(cspan[v].decode())\n\n def optsChanged(self, param, opts):\n try:\n super().optsChanged(param, opts)\n except AttributeError:\n # This may trigger while building the parameter before the widget is fully constructed.\n # This is fine, since errors are from the parent scope which will stabilize after the widget is\n # constructed anyway\n pass\n span = opts.get('span', None)\n if span is None:\n step = opts.get('step', 1)\n start, stop = opts.get('limits', param.opts['limits'])\n # Add a bit to 'stop' since python slicing excludes the last value\n span = np.arange(start, stop + step, step)\n precision = opts.get('precision', 2)\n if precision is not None:\n span = span.round(precision)\n self.span = span\n self.charSpan = np.char.array(span)\n w = self.slider\n w.setMinimum(0)\n w.setMaximum(len(span) - 1)\n if 'suffix' in opts:\n self.setSuffix(opts['suffix'])\n\n def limitsChanged(self, param, limits):\n self.optsChanged(param, dict(limits=limits))\n\n\nclass SliderParameter(Parameter):\n \"\"\"\n ============== ========================================================\n **Options**\n limits [start, stop] numbers\n step: Defaults to 1, the spacing between each slider tick\n span: Instead of limits + step, span can be set to specify\n the range of slider options (e.g. np.linspace(-pi, pi, 100))\n format: Format string to determine number of decimals to show, etc.\n Defaults to display based on span dtype\n precision: int number of decimals to keep for float tick spaces\n ============== ========================================================\n \"\"\"\n itemClass = SliderParameterItem\n", "path": "pyqtgraph/parametertree/parameterTypes/slider.py"}]}
| 2,325 | 479 |
gh_patches_debug_11591
|
rasdani/github-patches
|
git_diff
|
pypa__virtualenv-2722
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
zipapp virtual environment creation fails if zipapp path is symlinked
# Issue
On a fresh new system, trying to use the zipapp to setup a virtualenv when the pyz files is referenced by a symlink, it fails saying `KeyError: "There is no item named 'nv/create/via_global_ref/_virtualenv.py' in the archive"`
## But it works on my machine?
Yes, if you ever used the virtualenv (zipapp?) successfully, it creates files in `~/Library/Application Support/virtualenv` (on macOS) or `~/.local/share/virtualenv/` (on Linux) and the existence of these files (unzip and wheel) makes the zipapp work. By deleting that directory, you should be able to reproduce the problem. This was first discovered on travis-ci, where the machine is new every time.
# Environment
- OS: macOS 19.6.0 Darwin Kernel Version 19.6.0: Mon Aug 31 22:12:52 PDT 2020; root:xnu-6153.141.2~1/RELEASE_X86_64 x86_64.
- Python 3.7.7. (from brew), also tried 3.7.9
Also reproduced on
- OS: fedora:31
- Python 3.7.9
# Output of the virtual environment creation
```console
bash-3.2$ (cd /var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/foo; python3 /var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz -vvv --with-traceback /var/folders/g0/944401t11gz1jl7vxt
nzqs_m0001yj/T/foo/pipenv-2020.6.2)
206 setup logging to NOTSET [DEBUG report:43]
219 find interpreter for spec PythonSpec(path=/usr/local/opt/python/bin/python3.7) [INFO builtin:44]
219 proposed PythonInfo(spec=CPython3.7.7.final.0-64, exe=/usr/local/opt/python/bin/python3.7, platform=darwin, version='3.7.7 (default, Mar 10 2020, 15:43:33) \n[Clang 11.0.0 (clang-1100.0.33.17)]', encoding_fs_io=utf-8-utf-8) [INFO builtin:50]
219 accepted PythonInfo(spec=CPython3.7.7.final.0-64, exe=/usr/local/opt/python/bin/python3.7, platform=darwin, version='3.7.7 (default, Mar 10 2020, 15:43:33) \n[Clang 11.0.0 (clang-1100.0.33.17)]', encoding_fs_io=utf-8-utf-8) [DEBUG builtin:52]
224 filesystem is not case-sensitive [DEBUG info:28]
263 create virtual environment via CPython3Posix(dest=/private/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/foo/pipenv-2020.6.2, clear=False, global=False) [INFO session:52]
263 write /private/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/foo/pipenv-2020.6.2/pyvenv.cfg [DEBUG pyenv_cfg:34]
263 home = /usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7 [DEBUG pyenv_cfg:38]
263 implementation = CPython [DEBUG pyenv_cfg:38]
263 version_info = 3.7.7.final.0 [DEBUG pyenv_cfg:38]
263 virtualenv = 20.0.31 [DEBUG pyenv_cfg:38]
263 include-system-site-packages = false [DEBUG pyenv_cfg:38]
264 base-prefix = /usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7 [DEBUG pyenv_cfg:38]
264 base-exec-prefix = /usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7 [DEBUG pyenv_cfg:38]
264 base-executable = /usr/local/opt/python/bin/python3.7 [DEBUG pyenv_cfg:38]
264 remove file /private/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/foo/pipenv-2020.6.2/bin/python [DEBUG _sync:38]
264 symlink /usr/local/opt/python/bin/python3.7 to /private/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/foo/pipenv-2020.6.2/bin/python [DEBUG _sync:44]
265 Attempting to acquire lock 4396469136 on /Users/a-andy/Library/Application Support/virtualenv/unzip/20.0.31/_virtualenv.py.lock [DEBUG filelock:270]
265 Lock 4396469136 acquired on /Users/a-andy/Library/Application Support/virtualenv/unzip/20.0.31/_virtualenv.py.lock [INFO filelock:274]
265 extract /var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/create/via_global_ref/_virtualenv.py to /Users/a-andy/Library/Application Support/virtualenv/unzip/20.0.31/_virtualenv.py [DEBUG zipapp:19]
267 Attempting to release lock 4396469136 on /Users/a-andy/Library/Application Support/virtualenv/unzip/20.0.31/_virtualenv.py.lock [DEBUG filelock:315]
267 Lock 4396469136 released on /Users/a-andy/Library/Application Support/virtualenv/unzip/20.0.31/_virtualenv.py.lock [INFO filelock:318]
Traceback (most recent call last):
File "/usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7/lib/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7/lib/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/__main__.py", line 168, in <module>
File "/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/__main__.py", line 164, in run
File "/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/__main__.py", line 16, in run
File "/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/run/__init__.py", line 28, in cli_run
File "/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/run/session.py", line 46, in run
File "/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/run/session.py", line 53, in _create
File "/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/create/creator.py", line 157, in run
File "/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/create/via_global_ref/builtin/via_global_self_do.py", line 86, in create
File "/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/create/via_global_ref/api.py", line 88, in create
File "/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/create/via_global_ref/api.py", line 91, in install_patch
File "/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/create/via_global_ref/builtin/cpython/cpython3.py", line 27, in env_patch_text
File "/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/create/via_global_ref/api.py", line 102, in env_patch_text
File "/usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7/lib/python3.7/contextlib.py", line 112, in __enter__
return next(self.gen)
File "/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/app_data/base.py", line 54, in ensure_extracted
File "/usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7/lib/python3.7/contextlib.py", line 112, in __enter__
return next(self.gen)
File "/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/app_data/via_disk_folder.py", line 81, in extract
File "/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/util/zipapp.py", line 22, in extract
File "/usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7/lib/python3.7/zipfile.py", line 1431, in getinfo
'There is no item named %r in the archive' % name)
KeyError: "There is no item named 'nv/create/via_global_ref/_virtualenv.py' in the archive"
```
**Note:** Notice `/virtuale` is missing from the KeyError, the exact length `/private` which is the difference between the TMPDIR and the real path of the TMPDIR on macOS
On Fedora, the last line becomes:
```console
KeyError: "There is no item named '/virtualenv/create/via_global_ref/_virtualenv.py' in the archive"
```
# Steps to reproduce
1. `docker run -it --rm fedora:31`
1. `mkdir -p /tmp/foo/bar`
1. `ln -s /tmp/foo/bar /tmp/foo/bar2`
1. `curl -L -o /tmp/foo/bar/virtualenv.pyz https://bootstrap.pypa.io/virtualenv/3.7/virtualenv.pyz`
1. `python3 /tmp/foo/bar2/virtualenv.pyz /tmp/foo/blah`
Here are some one-liners to reproduce the error
```
docker run -it --rm fedora:32 bash -c 'REAL=/tmp/foo/car; SYM=/tmp/foo/bbb2; mkdir -p "${REAL}"; ln -s "${REAL}" "${SYM}"; curl -L -o "${REAL}/virtualenv.pyz" https://bootstrap.pypa.io/virtualenv/3.8/virtualenv.pyz; python3 "${SYM}/virtualenv.pyz" /tmp/blah'
```
```
docker run -it --rm fedora:32 bash -c 'REAL=/tmp/foo/car; SYM=/tmp/foo/bbb2; mkdir -p "${REAL}"; ln -s "${REAL}" "${SYM}"; curl -L -o "${REAL}/virtualenv.pyz" https://bootstrap.pypa.io/virtualenv/3.8/virtualenv.pyz; python3 "${SYM}/virtualenv.pyz" /tmp/blah'
```
Not all values of REAL/SYM fail. It appears if the length of the symlink path and realpath differ, it fails. If they are the exact same length, no error occurs
**Fail**
REAL | SYM
--|--
/tmp/foo/bar|/tmp/bar/bar2
/tmp/foo/bar|/tmp/foo/bc
/tmp/foo/car|/tmp/foo/bb
/tmp/bb|/tmp/aaa
**Does not fail**
REAL | SYM
--|--
/tmp/foo|/tmp/bar
/tmp/foo/bar|/tmp/foo/abc
/tmp/foo/car|/tmp/foo/bbb
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/virtualenv/util/zipapp.py`
Content:
```
1 from __future__ import annotations
2
3 import logging
4 import os
5 import zipfile
6
7 from virtualenv.info import IS_WIN, ROOT
8
9
10 def read(full_path):
11 sub_file = _get_path_within_zip(full_path)
12 with zipfile.ZipFile(ROOT, "r") as zip_file, zip_file.open(sub_file) as file_handler:
13 return file_handler.read().decode("utf-8")
14
15
16 def extract(full_path, dest):
17 logging.debug("extract %s to %s", full_path, dest)
18 sub_file = _get_path_within_zip(full_path)
19 with zipfile.ZipFile(ROOT, "r") as zip_file:
20 info = zip_file.getinfo(sub_file)
21 info.filename = dest.name
22 zip_file.extract(info, str(dest.parent))
23
24
25 def _get_path_within_zip(full_path):
26 full_path = os.path.abspath(str(full_path))
27 sub_file = full_path[len(ROOT) + 1 :]
28 if IS_WIN:
29 # paths are always UNIX separators, even on Windows, though __file__ still follows platform default
30 sub_file = sub_file.replace(os.sep, "/")
31 return sub_file
32
33
34 __all__ = [
35 "extract",
36 "read",
37 ]
38
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/virtualenv/util/zipapp.py b/src/virtualenv/util/zipapp.py
--- a/src/virtualenv/util/zipapp.py
+++ b/src/virtualenv/util/zipapp.py
@@ -23,8 +23,12 @@
def _get_path_within_zip(full_path):
- full_path = os.path.abspath(str(full_path))
- sub_file = full_path[len(ROOT) + 1 :]
+ full_path = os.path.realpath(os.path.abspath(str(full_path)))
+ prefix = f"{ROOT}{os.sep}"
+ if not full_path.startswith(prefix):
+ msg = f"full_path={full_path} should start with prefix={prefix}."
+ raise RuntimeError(msg)
+ sub_file = full_path[len(prefix) :]
if IS_WIN:
# paths are always UNIX separators, even on Windows, though __file__ still follows platform default
sub_file = sub_file.replace(os.sep, "/")
|
{"golden_diff": "diff --git a/src/virtualenv/util/zipapp.py b/src/virtualenv/util/zipapp.py\n--- a/src/virtualenv/util/zipapp.py\n+++ b/src/virtualenv/util/zipapp.py\n@@ -23,8 +23,12 @@\n \n \n def _get_path_within_zip(full_path):\n- full_path = os.path.abspath(str(full_path))\n- sub_file = full_path[len(ROOT) + 1 :]\n+ full_path = os.path.realpath(os.path.abspath(str(full_path)))\n+ prefix = f\"{ROOT}{os.sep}\"\n+ if not full_path.startswith(prefix):\n+ msg = f\"full_path={full_path} should start with prefix={prefix}.\"\n+ raise RuntimeError(msg)\n+ sub_file = full_path[len(prefix) :]\n if IS_WIN:\n # paths are always UNIX separators, even on Windows, though __file__ still follows platform default\n sub_file = sub_file.replace(os.sep, \"/\")\n", "issue": "zipapp virtual environment creation fails if zipapp path is symlinked\n# Issue\r\n\r\nOn a fresh new system, trying to use the zipapp to setup a virtualenv when the pyz files is referenced by a symlink, it fails saying `KeyError: \"There is no item named 'nv/create/via_global_ref/_virtualenv.py' in the archive\"`\r\n\r\n## But it works on my machine?\r\n\r\nYes, if you ever used the virtualenv (zipapp?) successfully, it creates files in `~/Library/Application Support/virtualenv` (on macOS) or `~/.local/share/virtualenv/` (on Linux) and the existence of these files (unzip and wheel) makes the zipapp work. By deleting that directory, you should be able to reproduce the problem. This was first discovered on travis-ci, where the machine is new every time.\r\n\r\n# Environment\r\n\r\n- OS: macOS 19.6.0 Darwin Kernel Version 19.6.0: Mon Aug 31 22:12:52 PDT 2020; root:xnu-6153.141.2~1/RELEASE_X86_64 x86_64.\r\n- Python 3.7.7. (from brew), also tried 3.7.9\r\n\r\nAlso reproduced on\r\n- OS: fedora:31\r\n- Python 3.7.9\r\n\r\n# Output of the virtual environment creation\r\n\r\n```console\r\nbash-3.2$ (cd /var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/foo; python3 /var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz -vvv --with-traceback /var/folders/g0/944401t11gz1jl7vxt\r\nnzqs_m0001yj/T/foo/pipenv-2020.6.2)\r\n206 setup logging to NOTSET [DEBUG report:43]\r\n219 find interpreter for spec PythonSpec(path=/usr/local/opt/python/bin/python3.7) [INFO builtin:44]\r\n219 proposed PythonInfo(spec=CPython3.7.7.final.0-64, exe=/usr/local/opt/python/bin/python3.7, platform=darwin, version='3.7.7 (default, Mar 10 2020, 15:43:33) \\n[Clang 11.0.0 (clang-1100.0.33.17)]', encoding_fs_io=utf-8-utf-8) [INFO builtin:50]\r\n219 accepted PythonInfo(spec=CPython3.7.7.final.0-64, exe=/usr/local/opt/python/bin/python3.7, platform=darwin, version='3.7.7 (default, Mar 10 2020, 15:43:33) \\n[Clang 11.0.0 (clang-1100.0.33.17)]', encoding_fs_io=utf-8-utf-8) [DEBUG builtin:52]\r\n224 filesystem is not case-sensitive [DEBUG info:28]\r\n263 create virtual environment via CPython3Posix(dest=/private/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/foo/pipenv-2020.6.2, clear=False, global=False) [INFO session:52]\r\n263 write /private/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/foo/pipenv-2020.6.2/pyvenv.cfg [DEBUG pyenv_cfg:34]\r\n263 home = /usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7 [DEBUG pyenv_cfg:38]\r\n263 implementation = CPython [DEBUG pyenv_cfg:38]\r\n263 version_info = 3.7.7.final.0 [DEBUG pyenv_cfg:38]\r\n263 virtualenv = 20.0.31 [DEBUG pyenv_cfg:38]\r\n263 include-system-site-packages = false [DEBUG pyenv_cfg:38]\r\n264 base-prefix = /usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7 [DEBUG pyenv_cfg:38]\r\n264 base-exec-prefix = /usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7 [DEBUG pyenv_cfg:38]\r\n264 base-executable = /usr/local/opt/python/bin/python3.7 [DEBUG pyenv_cfg:38]\r\n264 remove file /private/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/foo/pipenv-2020.6.2/bin/python [DEBUG _sync:38]\r\n264 symlink /usr/local/opt/python/bin/python3.7 to /private/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/foo/pipenv-2020.6.2/bin/python [DEBUG _sync:44]\r\n265 Attempting to acquire lock 4396469136 on /Users/a-andy/Library/Application Support/virtualenv/unzip/20.0.31/_virtualenv.py.lock [DEBUG filelock:270]\r\n265 Lock 4396469136 acquired on /Users/a-andy/Library/Application Support/virtualenv/unzip/20.0.31/_virtualenv.py.lock [INFO filelock:274]\r\n265 extract /var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/create/via_global_ref/_virtualenv.py to /Users/a-andy/Library/Application Support/virtualenv/unzip/20.0.31/_virtualenv.py [DEBUG zipapp:19]\r\n267 Attempting to release lock 4396469136 on /Users/a-andy/Library/Application Support/virtualenv/unzip/20.0.31/_virtualenv.py.lock [DEBUG filelock:315]\r\n267 Lock 4396469136 released on /Users/a-andy/Library/Application Support/virtualenv/unzip/20.0.31/_virtualenv.py.lock [INFO filelock:318]\r\nTraceback (most recent call last):\r\n File \"/usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7/lib/python3.7/runpy.py\", line 193, in _run_module_as_main\r\n \"__main__\", mod_spec)\r\n File \"/usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7/lib/python3.7/runpy.py\", line 85, in _run_code\r\n exec(code, run_globals)\r\n File \"/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/__main__.py\", line 168, in <module>\r\n File \"/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/__main__.py\", line 164, in run\r\n File \"/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/__main__.py\", line 16, in run\r\n File \"/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/run/__init__.py\", line 28, in cli_run\r\n File \"/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/run/session.py\", line 46, in run\r\n File \"/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/run/session.py\", line 53, in _create\r\n File \"/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/create/creator.py\", line 157, in run\r\n File \"/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/create/via_global_ref/builtin/via_global_self_do.py\", line 86, in create\r\n File \"/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/create/via_global_ref/api.py\", line 88, in create\r\n File \"/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/create/via_global_ref/api.py\", line 91, in install_patch\r\n File \"/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/create/via_global_ref/builtin/cpython/cpython3.py\", line 27, in env_patch_text\r\n File \"/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/create/via_global_ref/api.py\", line 102, in env_patch_text\r\n File \"/usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7/lib/python3.7/contextlib.py\", line 112, in __enter__\r\n return next(self.gen)\r\n File \"/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/app_data/base.py\", line 54, in ensure_extracted\r\n File \"/usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7/lib/python3.7/contextlib.py\", line 112, in __enter__\r\n return next(self.gen)\r\n File \"/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/app_data/via_disk_folder.py\", line 81, in extract\r\n File \"/var/folders/g0/944401t11gz1jl7vxtnzqs_m0001yj/T/tmp.7cHdIsKw/virtualenv.pyz/virtualenv/util/zipapp.py\", line 22, in extract\r\n\r\n File \"/usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7/lib/python3.7/zipfile.py\", line 1431, in getinfo\r\n 'There is no item named %r in the archive' % name)\r\nKeyError: \"There is no item named 'nv/create/via_global_ref/_virtualenv.py' in the archive\"\r\n```\r\n\r\n**Note:** Notice `/virtuale` is missing from the KeyError, the exact length `/private` which is the difference between the TMPDIR and the real path of the TMPDIR on macOS\r\n\r\nOn Fedora, the last line becomes:\r\n\r\n```console\r\nKeyError: \"There is no item named '/virtualenv/create/via_global_ref/_virtualenv.py' in the archive\"\r\n```\r\n\r\n# Steps to reproduce\r\n\r\n1. `docker run -it --rm fedora:31`\r\n1. `mkdir -p /tmp/foo/bar`\r\n1. `ln -s /tmp/foo/bar /tmp/foo/bar2`\r\n1. `curl -L -o /tmp/foo/bar/virtualenv.pyz https://bootstrap.pypa.io/virtualenv/3.7/virtualenv.pyz`\r\n1. `python3 /tmp/foo/bar2/virtualenv.pyz /tmp/foo/blah`\r\n\r\nHere are some one-liners to reproduce the error\r\n\r\n```\r\ndocker run -it --rm fedora:32 bash -c 'REAL=/tmp/foo/car; SYM=/tmp/foo/bbb2; mkdir -p \"${REAL}\"; ln -s \"${REAL}\" \"${SYM}\"; curl -L -o \"${REAL}/virtualenv.pyz\" https://bootstrap.pypa.io/virtualenv/3.8/virtualenv.pyz; python3 \"${SYM}/virtualenv.pyz\" /tmp/blah'\r\n```\r\n\r\n```\r\ndocker run -it --rm fedora:32 bash -c 'REAL=/tmp/foo/car; SYM=/tmp/foo/bbb2; mkdir -p \"${REAL}\"; ln -s \"${REAL}\" \"${SYM}\"; curl -L -o \"${REAL}/virtualenv.pyz\" https://bootstrap.pypa.io/virtualenv/3.8/virtualenv.pyz; python3 \"${SYM}/virtualenv.pyz\" /tmp/blah'\r\n```\r\n\r\nNot all values of REAL/SYM fail. It appears if the length of the symlink path and realpath differ, it fails. If they are the exact same length, no error occurs\r\n\r\n**Fail**\r\n\r\nREAL | SYM\r\n--|--\r\n/tmp/foo/bar|/tmp/bar/bar2\r\n/tmp/foo/bar|/tmp/foo/bc\r\n/tmp/foo/car|/tmp/foo/bb\r\n/tmp/bb|/tmp/aaa\r\n\r\n**Does not fail**\r\n\r\nREAL | SYM\r\n--|--\r\n/tmp/foo|/tmp/bar\r\n/tmp/foo/bar|/tmp/foo/abc\r\n/tmp/foo/car|/tmp/foo/bbb\n", "before_files": [{"content": "from __future__ import annotations\n\nimport logging\nimport os\nimport zipfile\n\nfrom virtualenv.info import IS_WIN, ROOT\n\n\ndef read(full_path):\n sub_file = _get_path_within_zip(full_path)\n with zipfile.ZipFile(ROOT, \"r\") as zip_file, zip_file.open(sub_file) as file_handler:\n return file_handler.read().decode(\"utf-8\")\n\n\ndef extract(full_path, dest):\n logging.debug(\"extract %s to %s\", full_path, dest)\n sub_file = _get_path_within_zip(full_path)\n with zipfile.ZipFile(ROOT, \"r\") as zip_file:\n info = zip_file.getinfo(sub_file)\n info.filename = dest.name\n zip_file.extract(info, str(dest.parent))\n\n\ndef _get_path_within_zip(full_path):\n full_path = os.path.abspath(str(full_path))\n sub_file = full_path[len(ROOT) + 1 :]\n if IS_WIN:\n # paths are always UNIX separators, even on Windows, though __file__ still follows platform default\n sub_file = sub_file.replace(os.sep, \"/\")\n return sub_file\n\n\n__all__ = [\n \"extract\",\n \"read\",\n]\n", "path": "src/virtualenv/util/zipapp.py"}], "after_files": [{"content": "from __future__ import annotations\n\nimport logging\nimport os\nimport zipfile\n\nfrom virtualenv.info import IS_WIN, ROOT\n\n\ndef read(full_path):\n sub_file = _get_path_within_zip(full_path)\n with zipfile.ZipFile(ROOT, \"r\") as zip_file, zip_file.open(sub_file) as file_handler:\n return file_handler.read().decode(\"utf-8\")\n\n\ndef extract(full_path, dest):\n logging.debug(\"extract %s to %s\", full_path, dest)\n sub_file = _get_path_within_zip(full_path)\n with zipfile.ZipFile(ROOT, \"r\") as zip_file:\n info = zip_file.getinfo(sub_file)\n info.filename = dest.name\n zip_file.extract(info, str(dest.parent))\n\n\ndef _get_path_within_zip(full_path):\n full_path = os.path.realpath(os.path.abspath(str(full_path)))\n prefix = f\"{ROOT}{os.sep}\"\n if not full_path.startswith(prefix):\n msg = f\"full_path={full_path} should start with prefix={prefix}.\"\n raise RuntimeError(msg)\n sub_file = full_path[len(prefix) :]\n if IS_WIN:\n # paths are always UNIX separators, even on Windows, though __file__ still follows platform default\n sub_file = sub_file.replace(os.sep, \"/\")\n return sub_file\n\n\n__all__ = [\n \"extract\",\n \"read\",\n]\n", "path": "src/virtualenv/util/zipapp.py"}]}
| 3,970 | 207 |
gh_patches_debug_1798
|
rasdani/github-patches
|
git_diff
|
mkdocs__mkdocs-2606
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Support terminal coloring on Windows
E.g. we might use the [colorama](https://pypi.org/project/colorama/) library, otherwise the output looks like this in cmd.exe:
```
←[33mWARNING - ←[0mmkdocs_autorefs.plugin: cookbook-reference.md: Could not find cross-reference target '['a']'
←[33mWARNING - ←[0mmkdocs_autorefs.plugin: cookbook-reference.md: Could not find cross-reference target '['a']'
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mkdocs/__main__.py`
Content:
```
1 #!/usr/bin/env python
2
3 import os
4 import sys
5 import logging
6 import click
7 import textwrap
8 import shutil
9
10 from mkdocs import __version__
11 from mkdocs import utils
12 from mkdocs import config
13 from mkdocs.commands import build, gh_deploy, new, serve
14
15 log = logging.getLogger(__name__)
16
17
18 class ColorFormatter(logging.Formatter):
19 colors = {
20 'CRITICAL': 'red',
21 'ERROR': 'red',
22 'WARNING': 'yellow',
23 'DEBUG': 'blue'
24 }
25
26 text_wrapper = textwrap.TextWrapper(
27 width=shutil.get_terminal_size(fallback=(0, 0)).columns,
28 replace_whitespace=False,
29 break_long_words=False,
30 break_on_hyphens=False,
31 initial_indent=' '*12,
32 subsequent_indent=' '*12
33 )
34
35 def format(self, record):
36 message = super().format(record)
37 prefix = f'{record.levelname:<8} - '
38 if record.levelname in self.colors:
39 prefix = click.style(prefix, fg=self.colors[record.levelname])
40 if self.text_wrapper.width:
41 # Only wrap text if a terminal width was detected
42 msg = '\n'.join(
43 self.text_wrapper.fill(line)
44 for line in message.splitlines()
45 )
46 # Prepend prefix after wrapping so that color codes don't affect length
47 return prefix + msg[12:]
48 return prefix + message
49
50
51 class State:
52 ''' Maintain logging level.'''
53
54 def __init__(self, log_name='mkdocs', level=logging.INFO):
55 self.logger = logging.getLogger(log_name)
56 # Don't restrict level on logger; use handler
57 self.logger.setLevel(1)
58 self.logger.propagate = False
59
60 self.stream = logging.StreamHandler()
61 self.stream.setFormatter(ColorFormatter())
62 self.stream.setLevel(level)
63 self.stream.name = 'MkDocsStreamHandler'
64 self.logger.addHandler(self.stream)
65
66 # Add CountHandler for strict mode
67 self.counter = utils.log_counter
68 self.counter.setLevel(logging.WARNING)
69 self.logger.addHandler(self.counter)
70
71
72 pass_state = click.make_pass_decorator(State, ensure=True)
73
74 clean_help = "Remove old files from the site_dir before building (the default)."
75 config_help = "Provide a specific MkDocs config"
76 dev_addr_help = ("IP address and port to serve documentation locally (default: "
77 "localhost:8000)")
78 strict_help = ("Enable strict mode. This will cause MkDocs to abort the build "
79 "on any warnings.")
80 theme_help = "The theme to use when building your documentation."
81 theme_choices = utils.get_theme_names()
82 site_dir_help = "The directory to output the result of the documentation build."
83 use_directory_urls_help = "Use directory URLs when building pages (the default)."
84 reload_help = "Enable the live reloading in the development server (this is the default)"
85 no_reload_help = "Disable the live reloading in the development server."
86 dirty_reload_help = "Enable the live reloading in the development server, but only re-build files that have changed"
87 commit_message_help = ("A commit message to use when committing to the "
88 "Github Pages remote branch. Commit {sha} and MkDocs {version} are available as expansions")
89 remote_branch_help = ("The remote branch to commit to for Github Pages. This "
90 "overrides the value specified in config")
91 remote_name_help = ("The remote name to commit to for Github Pages. This "
92 "overrides the value specified in config")
93 force_help = "Force the push to the repository."
94 ignore_version_help = "Ignore check that build is not being deployed with an older version of MkDocs."
95 watch_theme_help = ("Include the theme in list of files to watch for live reloading. "
96 "Ignored when live reload is not used.")
97 shell_help = "Use the shell when invoking Git."
98
99
100 def add_options(opts):
101 def inner(f):
102 for i in reversed(opts):
103 f = i(f)
104 return f
105
106 return inner
107
108
109 def verbose_option(f):
110 def callback(ctx, param, value):
111 state = ctx.ensure_object(State)
112 if value:
113 state.stream.setLevel(logging.DEBUG)
114 return click.option('-v', '--verbose',
115 is_flag=True,
116 expose_value=False,
117 help='Enable verbose output',
118 callback=callback)(f)
119
120
121 def quiet_option(f):
122 def callback(ctx, param, value):
123 state = ctx.ensure_object(State)
124 if value:
125 state.stream.setLevel(logging.ERROR)
126 return click.option('-q', '--quiet',
127 is_flag=True,
128 expose_value=False,
129 help='Silence warnings',
130 callback=callback)(f)
131
132
133 common_options = add_options([quiet_option, verbose_option])
134 common_config_options = add_options([
135 click.option('-f', '--config-file', type=click.File('rb'), help=config_help),
136 # Don't override config value if user did not specify --strict flag
137 # Conveniently, load_config drops None values
138 click.option('-s', '--strict', is_flag=True, default=None, help=strict_help),
139 click.option('-t', '--theme', type=click.Choice(theme_choices), help=theme_help),
140 # As with --strict, set the default to None so that this doesn't incorrectly
141 # override the config file
142 click.option('--use-directory-urls/--no-directory-urls', is_flag=True, default=None, help=use_directory_urls_help)
143 ])
144
145 PYTHON_VERSION = sys.version[:3]
146
147 PKG_DIR = os.path.dirname(os.path.abspath(__file__))
148
149
150 @click.group(context_settings={'help_option_names': ['-h', '--help']})
151 @click.version_option(
152 __version__,
153 '-V', '--version',
154 message=f'%(prog)s, version %(version)s from { PKG_DIR } (Python { PYTHON_VERSION })'
155 )
156 @common_options
157 def cli():
158 """
159 MkDocs - Project documentation with Markdown.
160 """
161
162
163 @cli.command(name="serve")
164 @click.option('-a', '--dev-addr', help=dev_addr_help, metavar='<IP:PORT>')
165 @click.option('--livereload', 'livereload', flag_value='livereload', help=reload_help, default=True)
166 @click.option('--no-livereload', 'livereload', flag_value='no-livereload', help=no_reload_help)
167 @click.option('--dirtyreload', 'livereload', flag_value='dirty', help=dirty_reload_help)
168 @click.option('--watch-theme', help=watch_theme_help, is_flag=True)
169 @common_config_options
170 @common_options
171 def serve_command(dev_addr, livereload, **kwargs):
172 """Run the builtin development server"""
173 serve.serve(dev_addr=dev_addr, livereload=livereload, **kwargs)
174
175
176 @cli.command(name="build")
177 @click.option('-c', '--clean/--dirty', is_flag=True, default=True, help=clean_help)
178 @common_config_options
179 @click.option('-d', '--site-dir', type=click.Path(), help=site_dir_help)
180 @common_options
181 def build_command(clean, **kwargs):
182 """Build the MkDocs documentation"""
183 build.build(config.load_config(**kwargs), dirty=not clean)
184
185
186 @cli.command(name="gh-deploy")
187 @click.option('-c', '--clean/--dirty', is_flag=True, default=True, help=clean_help)
188 @click.option('-m', '--message', help=commit_message_help)
189 @click.option('-b', '--remote-branch', help=remote_branch_help)
190 @click.option('-r', '--remote-name', help=remote_name_help)
191 @click.option('--force', is_flag=True, help=force_help)
192 @click.option('--ignore-version', is_flag=True, help=ignore_version_help)
193 @click.option('--shell', is_flag=True, help=shell_help)
194 @common_config_options
195 @click.option('-d', '--site-dir', type=click.Path(), help=site_dir_help)
196 @common_options
197 def gh_deploy_command(clean, message, remote_branch, remote_name, force, ignore_version, shell, **kwargs):
198 """Deploy your documentation to GitHub Pages"""
199 cfg = config.load_config(
200 remote_branch=remote_branch,
201 remote_name=remote_name,
202 **kwargs
203 )
204 build.build(cfg, dirty=not clean)
205 gh_deploy.gh_deploy(cfg, message=message, force=force, ignore_version=ignore_version, shell=shell)
206
207
208 @cli.command(name="new")
209 @click.argument("project_directory")
210 @common_options
211 def new_command(project_directory):
212 """Create a new MkDocs project"""
213 new.new(project_directory)
214
215
216 if __name__ == '__main__': # pragma: no cover
217 cli()
218
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/mkdocs/__main__.py b/mkdocs/__main__.py
--- a/mkdocs/__main__.py
+++ b/mkdocs/__main__.py
@@ -12,6 +12,15 @@
from mkdocs import config
from mkdocs.commands import build, gh_deploy, new, serve
+
+if sys.platform.startswith("win"):
+ try:
+ import colorama
+ except ImportError:
+ pass
+ else:
+ colorama.init()
+
log = logging.getLogger(__name__)
|
{"golden_diff": "diff --git a/mkdocs/__main__.py b/mkdocs/__main__.py\n--- a/mkdocs/__main__.py\n+++ b/mkdocs/__main__.py\n@@ -12,6 +12,15 @@\n from mkdocs import config\n from mkdocs.commands import build, gh_deploy, new, serve\n \n+\n+if sys.platform.startswith(\"win\"):\n+ try:\n+ import colorama\n+ except ImportError:\n+ pass\n+ else:\n+ colorama.init()\n+\n log = logging.getLogger(__name__)\n", "issue": "Support terminal coloring on Windows\nE.g. we might use the [colorama](https://pypi.org/project/colorama/) library, otherwise the output looks like this in cmd.exe:\r\n\r\n```\r\n\u2190[33mWARNING - \u2190[0mmkdocs_autorefs.plugin: cookbook-reference.md: Could not find cross-reference target '['a']'\r\n\u2190[33mWARNING - \u2190[0mmkdocs_autorefs.plugin: cookbook-reference.md: Could not find cross-reference target '['a']'\r\n```\n", "before_files": [{"content": "#!/usr/bin/env python\n\nimport os\nimport sys\nimport logging\nimport click\nimport textwrap\nimport shutil\n\nfrom mkdocs import __version__\nfrom mkdocs import utils\nfrom mkdocs import config\nfrom mkdocs.commands import build, gh_deploy, new, serve\n\nlog = logging.getLogger(__name__)\n\n\nclass ColorFormatter(logging.Formatter):\n colors = {\n 'CRITICAL': 'red',\n 'ERROR': 'red',\n 'WARNING': 'yellow',\n 'DEBUG': 'blue'\n }\n\n text_wrapper = textwrap.TextWrapper(\n width=shutil.get_terminal_size(fallback=(0, 0)).columns,\n replace_whitespace=False,\n break_long_words=False,\n break_on_hyphens=False,\n initial_indent=' '*12,\n subsequent_indent=' '*12\n )\n\n def format(self, record):\n message = super().format(record)\n prefix = f'{record.levelname:<8} - '\n if record.levelname in self.colors:\n prefix = click.style(prefix, fg=self.colors[record.levelname])\n if self.text_wrapper.width:\n # Only wrap text if a terminal width was detected\n msg = '\\n'.join(\n self.text_wrapper.fill(line)\n for line in message.splitlines()\n )\n # Prepend prefix after wrapping so that color codes don't affect length\n return prefix + msg[12:]\n return prefix + message\n\n\nclass State:\n ''' Maintain logging level.'''\n\n def __init__(self, log_name='mkdocs', level=logging.INFO):\n self.logger = logging.getLogger(log_name)\n # Don't restrict level on logger; use handler\n self.logger.setLevel(1)\n self.logger.propagate = False\n\n self.stream = logging.StreamHandler()\n self.stream.setFormatter(ColorFormatter())\n self.stream.setLevel(level)\n self.stream.name = 'MkDocsStreamHandler'\n self.logger.addHandler(self.stream)\n\n # Add CountHandler for strict mode\n self.counter = utils.log_counter\n self.counter.setLevel(logging.WARNING)\n self.logger.addHandler(self.counter)\n\n\npass_state = click.make_pass_decorator(State, ensure=True)\n\nclean_help = \"Remove old files from the site_dir before building (the default).\"\nconfig_help = \"Provide a specific MkDocs config\"\ndev_addr_help = (\"IP address and port to serve documentation locally (default: \"\n \"localhost:8000)\")\nstrict_help = (\"Enable strict mode. This will cause MkDocs to abort the build \"\n \"on any warnings.\")\ntheme_help = \"The theme to use when building your documentation.\"\ntheme_choices = utils.get_theme_names()\nsite_dir_help = \"The directory to output the result of the documentation build.\"\nuse_directory_urls_help = \"Use directory URLs when building pages (the default).\"\nreload_help = \"Enable the live reloading in the development server (this is the default)\"\nno_reload_help = \"Disable the live reloading in the development server.\"\ndirty_reload_help = \"Enable the live reloading in the development server, but only re-build files that have changed\"\ncommit_message_help = (\"A commit message to use when committing to the \"\n \"Github Pages remote branch. Commit {sha} and MkDocs {version} are available as expansions\")\nremote_branch_help = (\"The remote branch to commit to for Github Pages. This \"\n \"overrides the value specified in config\")\nremote_name_help = (\"The remote name to commit to for Github Pages. This \"\n \"overrides the value specified in config\")\nforce_help = \"Force the push to the repository.\"\nignore_version_help = \"Ignore check that build is not being deployed with an older version of MkDocs.\"\nwatch_theme_help = (\"Include the theme in list of files to watch for live reloading. \"\n \"Ignored when live reload is not used.\")\nshell_help = \"Use the shell when invoking Git.\"\n\n\ndef add_options(opts):\n def inner(f):\n for i in reversed(opts):\n f = i(f)\n return f\n\n return inner\n\n\ndef verbose_option(f):\n def callback(ctx, param, value):\n state = ctx.ensure_object(State)\n if value:\n state.stream.setLevel(logging.DEBUG)\n return click.option('-v', '--verbose',\n is_flag=True,\n expose_value=False,\n help='Enable verbose output',\n callback=callback)(f)\n\n\ndef quiet_option(f):\n def callback(ctx, param, value):\n state = ctx.ensure_object(State)\n if value:\n state.stream.setLevel(logging.ERROR)\n return click.option('-q', '--quiet',\n is_flag=True,\n expose_value=False,\n help='Silence warnings',\n callback=callback)(f)\n\n\ncommon_options = add_options([quiet_option, verbose_option])\ncommon_config_options = add_options([\n click.option('-f', '--config-file', type=click.File('rb'), help=config_help),\n # Don't override config value if user did not specify --strict flag\n # Conveniently, load_config drops None values\n click.option('-s', '--strict', is_flag=True, default=None, help=strict_help),\n click.option('-t', '--theme', type=click.Choice(theme_choices), help=theme_help),\n # As with --strict, set the default to None so that this doesn't incorrectly\n # override the config file\n click.option('--use-directory-urls/--no-directory-urls', is_flag=True, default=None, help=use_directory_urls_help)\n])\n\nPYTHON_VERSION = sys.version[:3]\n\nPKG_DIR = os.path.dirname(os.path.abspath(__file__))\n\n\[email protected](context_settings={'help_option_names': ['-h', '--help']})\[email protected]_option(\n __version__,\n '-V', '--version',\n message=f'%(prog)s, version %(version)s from { PKG_DIR } (Python { PYTHON_VERSION })'\n)\n@common_options\ndef cli():\n \"\"\"\n MkDocs - Project documentation with Markdown.\n \"\"\"\n\n\[email protected](name=\"serve\")\[email protected]('-a', '--dev-addr', help=dev_addr_help, metavar='<IP:PORT>')\[email protected]('--livereload', 'livereload', flag_value='livereload', help=reload_help, default=True)\[email protected]('--no-livereload', 'livereload', flag_value='no-livereload', help=no_reload_help)\[email protected]('--dirtyreload', 'livereload', flag_value='dirty', help=dirty_reload_help)\[email protected]('--watch-theme', help=watch_theme_help, is_flag=True)\n@common_config_options\n@common_options\ndef serve_command(dev_addr, livereload, **kwargs):\n \"\"\"Run the builtin development server\"\"\"\n serve.serve(dev_addr=dev_addr, livereload=livereload, **kwargs)\n\n\[email protected](name=\"build\")\[email protected]('-c', '--clean/--dirty', is_flag=True, default=True, help=clean_help)\n@common_config_options\[email protected]('-d', '--site-dir', type=click.Path(), help=site_dir_help)\n@common_options\ndef build_command(clean, **kwargs):\n \"\"\"Build the MkDocs documentation\"\"\"\n build.build(config.load_config(**kwargs), dirty=not clean)\n\n\[email protected](name=\"gh-deploy\")\[email protected]('-c', '--clean/--dirty', is_flag=True, default=True, help=clean_help)\[email protected]('-m', '--message', help=commit_message_help)\[email protected]('-b', '--remote-branch', help=remote_branch_help)\[email protected]('-r', '--remote-name', help=remote_name_help)\[email protected]('--force', is_flag=True, help=force_help)\[email protected]('--ignore-version', is_flag=True, help=ignore_version_help)\[email protected]('--shell', is_flag=True, help=shell_help)\n@common_config_options\[email protected]('-d', '--site-dir', type=click.Path(), help=site_dir_help)\n@common_options\ndef gh_deploy_command(clean, message, remote_branch, remote_name, force, ignore_version, shell, **kwargs):\n \"\"\"Deploy your documentation to GitHub Pages\"\"\"\n cfg = config.load_config(\n remote_branch=remote_branch,\n remote_name=remote_name,\n **kwargs\n )\n build.build(cfg, dirty=not clean)\n gh_deploy.gh_deploy(cfg, message=message, force=force, ignore_version=ignore_version, shell=shell)\n\n\[email protected](name=\"new\")\[email protected](\"project_directory\")\n@common_options\ndef new_command(project_directory):\n \"\"\"Create a new MkDocs project\"\"\"\n new.new(project_directory)\n\n\nif __name__ == '__main__': # pragma: no cover\n cli()\n", "path": "mkdocs/__main__.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\nimport os\nimport sys\nimport logging\nimport click\nimport textwrap\nimport shutil\n\nfrom mkdocs import __version__\nfrom mkdocs import utils\nfrom mkdocs import config\nfrom mkdocs.commands import build, gh_deploy, new, serve\n\n\nif sys.platform.startswith(\"win\"):\n try:\n import colorama\n except ImportError:\n pass\n else:\n colorama.init()\n\nlog = logging.getLogger(__name__)\n\n\nclass ColorFormatter(logging.Formatter):\n colors = {\n 'CRITICAL': 'red',\n 'ERROR': 'red',\n 'WARNING': 'yellow',\n 'DEBUG': 'blue'\n }\n\n text_wrapper = textwrap.TextWrapper(\n width=shutil.get_terminal_size(fallback=(0, 0)).columns,\n replace_whitespace=False,\n break_long_words=False,\n break_on_hyphens=False,\n initial_indent=' '*12,\n subsequent_indent=' '*12\n )\n\n def format(self, record):\n message = super().format(record)\n prefix = f'{record.levelname:<8} - '\n if record.levelname in self.colors:\n prefix = click.style(prefix, fg=self.colors[record.levelname])\n if self.text_wrapper.width:\n # Only wrap text if a terminal width was detected\n msg = '\\n'.join(\n self.text_wrapper.fill(line)\n for line in message.splitlines()\n )\n # Prepend prefix after wrapping so that color codes don't affect length\n return prefix + msg[12:]\n return prefix + message\n\n\nclass State:\n ''' Maintain logging level.'''\n\n def __init__(self, log_name='mkdocs', level=logging.INFO):\n self.logger = logging.getLogger(log_name)\n # Don't restrict level on logger; use handler\n self.logger.setLevel(1)\n self.logger.propagate = False\n\n self.stream = logging.StreamHandler()\n self.stream.setFormatter(ColorFormatter())\n self.stream.setLevel(level)\n self.stream.name = 'MkDocsStreamHandler'\n self.logger.addHandler(self.stream)\n\n # Add CountHandler for strict mode\n self.counter = utils.log_counter\n self.counter.setLevel(logging.WARNING)\n self.logger.addHandler(self.counter)\n\n\npass_state = click.make_pass_decorator(State, ensure=True)\n\nclean_help = \"Remove old files from the site_dir before building (the default).\"\nconfig_help = \"Provide a specific MkDocs config\"\ndev_addr_help = (\"IP address and port to serve documentation locally (default: \"\n \"localhost:8000)\")\nstrict_help = (\"Enable strict mode. This will cause MkDocs to abort the build \"\n \"on any warnings.\")\ntheme_help = \"The theme to use when building your documentation.\"\ntheme_choices = utils.get_theme_names()\nsite_dir_help = \"The directory to output the result of the documentation build.\"\nuse_directory_urls_help = \"Use directory URLs when building pages (the default).\"\nreload_help = \"Enable the live reloading in the development server (this is the default)\"\nno_reload_help = \"Disable the live reloading in the development server.\"\ndirty_reload_help = \"Enable the live reloading in the development server, but only re-build files that have changed\"\ncommit_message_help = (\"A commit message to use when committing to the \"\n \"Github Pages remote branch. Commit {sha} and MkDocs {version} are available as expansions\")\nremote_branch_help = (\"The remote branch to commit to for Github Pages. This \"\n \"overrides the value specified in config\")\nremote_name_help = (\"The remote name to commit to for Github Pages. This \"\n \"overrides the value specified in config\")\nforce_help = \"Force the push to the repository.\"\nignore_version_help = \"Ignore check that build is not being deployed with an older version of MkDocs.\"\nwatch_theme_help = (\"Include the theme in list of files to watch for live reloading. \"\n \"Ignored when live reload is not used.\")\nshell_help = \"Use the shell when invoking Git.\"\n\n\ndef add_options(opts):\n def inner(f):\n for i in reversed(opts):\n f = i(f)\n return f\n\n return inner\n\n\ndef verbose_option(f):\n def callback(ctx, param, value):\n state = ctx.ensure_object(State)\n if value:\n state.stream.setLevel(logging.DEBUG)\n return click.option('-v', '--verbose',\n is_flag=True,\n expose_value=False,\n help='Enable verbose output',\n callback=callback)(f)\n\n\ndef quiet_option(f):\n def callback(ctx, param, value):\n state = ctx.ensure_object(State)\n if value:\n state.stream.setLevel(logging.ERROR)\n return click.option('-q', '--quiet',\n is_flag=True,\n expose_value=False,\n help='Silence warnings',\n callback=callback)(f)\n\n\ncommon_options = add_options([quiet_option, verbose_option])\ncommon_config_options = add_options([\n click.option('-f', '--config-file', type=click.File('rb'), help=config_help),\n # Don't override config value if user did not specify --strict flag\n # Conveniently, load_config drops None values\n click.option('-s', '--strict', is_flag=True, default=None, help=strict_help),\n click.option('-t', '--theme', type=click.Choice(theme_choices), help=theme_help),\n # As with --strict, set the default to None so that this doesn't incorrectly\n # override the config file\n click.option('--use-directory-urls/--no-directory-urls', is_flag=True, default=None, help=use_directory_urls_help)\n])\n\nPYTHON_VERSION = sys.version[:3]\n\nPKG_DIR = os.path.dirname(os.path.abspath(__file__))\n\n\[email protected](context_settings={'help_option_names': ['-h', '--help']})\[email protected]_option(\n __version__,\n '-V', '--version',\n message=f'%(prog)s, version %(version)s from { PKG_DIR } (Python { PYTHON_VERSION })'\n)\n@common_options\ndef cli():\n \"\"\"\n MkDocs - Project documentation with Markdown.\n \"\"\"\n\n\[email protected](name=\"serve\")\[email protected]('-a', '--dev-addr', help=dev_addr_help, metavar='<IP:PORT>')\[email protected]('--livereload', 'livereload', flag_value='livereload', help=reload_help, default=True)\[email protected]('--no-livereload', 'livereload', flag_value='no-livereload', help=no_reload_help)\[email protected]('--dirtyreload', 'livereload', flag_value='dirty', help=dirty_reload_help)\[email protected]('--watch-theme', help=watch_theme_help, is_flag=True)\n@common_config_options\n@common_options\ndef serve_command(dev_addr, livereload, **kwargs):\n \"\"\"Run the builtin development server\"\"\"\n serve.serve(dev_addr=dev_addr, livereload=livereload, **kwargs)\n\n\[email protected](name=\"build\")\[email protected]('-c', '--clean/--dirty', is_flag=True, default=True, help=clean_help)\n@common_config_options\[email protected]('-d', '--site-dir', type=click.Path(), help=site_dir_help)\n@common_options\ndef build_command(clean, **kwargs):\n \"\"\"Build the MkDocs documentation\"\"\"\n build.build(config.load_config(**kwargs), dirty=not clean)\n\n\[email protected](name=\"gh-deploy\")\[email protected]('-c', '--clean/--dirty', is_flag=True, default=True, help=clean_help)\[email protected]('-m', '--message', help=commit_message_help)\[email protected]('-b', '--remote-branch', help=remote_branch_help)\[email protected]('-r', '--remote-name', help=remote_name_help)\[email protected]('--force', is_flag=True, help=force_help)\[email protected]('--ignore-version', is_flag=True, help=ignore_version_help)\[email protected]('--shell', is_flag=True, help=shell_help)\n@common_config_options\[email protected]('-d', '--site-dir', type=click.Path(), help=site_dir_help)\n@common_options\ndef gh_deploy_command(clean, message, remote_branch, remote_name, force, ignore_version, shell, **kwargs):\n \"\"\"Deploy your documentation to GitHub Pages\"\"\"\n cfg = config.load_config(\n remote_branch=remote_branch,\n remote_name=remote_name,\n **kwargs\n )\n build.build(cfg, dirty=not clean)\n gh_deploy.gh_deploy(cfg, message=message, force=force, ignore_version=ignore_version, shell=shell)\n\n\[email protected](name=\"new\")\[email protected](\"project_directory\")\n@common_options\ndef new_command(project_directory):\n \"\"\"Create a new MkDocs project\"\"\"\n new.new(project_directory)\n\n\nif __name__ == '__main__': # pragma: no cover\n cli()\n", "path": "mkdocs/__main__.py"}]}
| 2,752 | 119 |
gh_patches_debug_26500
|
rasdani/github-patches
|
git_diff
|
pypa__setuptools-555
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
upload command doesn't prompt for password; raises TypeError
# Problem statement
If the `~/.pypirc` file does not contain a password like so:
``` ini
[distutils]
index-servers =
pypitest
[pypitest]
repository = https://testpypi.python.org/pypi
username = my_username
; Note the lack of a password
```
Then uploading the package
```
python setup.py sdist upload -r pypitest
```
Fails to prompt the user for his password and instead raises a TypeError (output truncated)
```
running upload
Traceback (most recent call last):
File "setup.py", line 16, in <module>
keywords=["test", "hello"]
File "/usr/lib/python2.7/distutils/core.py", line 151, in setup
dist.run_commands()
File "/usr/lib/python2.7/distutils/dist.py", line 953, in run_commands
self.run_command(cmd)
File "/usr/lib/python2.7/distutils/dist.py", line 972, in run_command
cmd_obj.run()
File "/usr/lib/python2.7/distutils/command/upload.py", line 60, in run
self.upload_file(command, pyversion, filename)
File "/usr/lib/python2.7/distutils/command/upload.py", line 135, in upload_file
self.password)
TypeError: cannot concatenate 'str' and 'NoneType' objects
```
**This is different** than the behavior of the `register` command, which prompts the user for a password before continuing.
```
python setup.py sdist register -r pypitest
```
(output truncated)
```
Creating tar archive
removing 'HelloPyPi-0.0.1.dev0' (and everything under it)
running register
Password:
```
> Note that the `register` and the `upload` command exhibit the proper behavior **if you store your password in `~/.pypirc`**, but not if the password is omitted.
# Okay, so...?
I am aware that you can run
```
python setup.py sdist register -r pypitest upload -r pypitest
```
As a workaround, but it stands to reason that **if you can register a package without uploading it, then you should also be able to upload a package without registering it**, regardless of if a password has been specified in your `~/.pypirc` file.
# Steps to reproduce
1. Remove your pypi password from `~/.pypirc`
2. Find a project that you wish to upload to a pypi server (I used [my example repository](https://github.com/brookskindle/hellopypi) for this)
3. Run `python setup.py sdist upload -r target_pypi_server`
# Setuptools version
setuptools (20.9.0) -- from `pip list` in my virtualenv
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setuptools/command/upload.py`
Content:
```
1 from distutils.command import upload as orig
2
3
4 class upload(orig.upload):
5 """
6 Override default upload behavior to look up password
7 in the keyring if available.
8 """
9
10 def finalize_options(self):
11 orig.upload.finalize_options(self)
12 self.password or self._load_password_from_keyring()
13
14 def _load_password_from_keyring(self):
15 """
16 Attempt to load password from keyring. Suppress Exceptions.
17 """
18 try:
19 keyring = __import__('keyring')
20 self.password = keyring.get_password(self.repository,
21 self.username)
22 except Exception:
23 pass
24
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setuptools/command/upload.py b/setuptools/command/upload.py
--- a/setuptools/command/upload.py
+++ b/setuptools/command/upload.py
@@ -3,13 +3,18 @@
class upload(orig.upload):
"""
- Override default upload behavior to look up password
- in the keyring if available.
+ Override default upload behavior to obtain password
+ in a variety of different ways.
"""
def finalize_options(self):
orig.upload.finalize_options(self)
- self.password or self._load_password_from_keyring()
+ # Attempt to obtain password. Short circuit evaluation at the first
+ # sign of success.
+ self.password = (
+ self.password or self._load_password_from_keyring() or
+ self._prompt_for_password()
+ )
def _load_password_from_keyring(self):
"""
@@ -17,7 +22,22 @@
"""
try:
keyring = __import__('keyring')
- self.password = keyring.get_password(self.repository,
- self.username)
+ password = keyring.get_password(self.repository, self.username)
except Exception:
- pass
+ password = None
+ finally:
+ return password
+
+ def _prompt_for_password(self):
+ """
+ Prompt for a password on the tty. Suppress Exceptions.
+ """
+ password = None
+ try:
+ import getpass
+ while not password:
+ password = getpass.getpass()
+ except (Exception, KeyboardInterrupt):
+ password = None
+ finally:
+ return password
|
{"golden_diff": "diff --git a/setuptools/command/upload.py b/setuptools/command/upload.py\n--- a/setuptools/command/upload.py\n+++ b/setuptools/command/upload.py\n@@ -3,13 +3,18 @@\n \n class upload(orig.upload):\n \"\"\"\n- Override default upload behavior to look up password\n- in the keyring if available.\n+ Override default upload behavior to obtain password\n+ in a variety of different ways.\n \"\"\"\n \n def finalize_options(self):\n orig.upload.finalize_options(self)\n- self.password or self._load_password_from_keyring()\n+ # Attempt to obtain password. Short circuit evaluation at the first\n+ # sign of success.\n+ self.password = (\n+ self.password or self._load_password_from_keyring() or\n+ self._prompt_for_password()\n+ )\n \n def _load_password_from_keyring(self):\n \"\"\"\n@@ -17,7 +22,22 @@\n \"\"\"\n try:\n keyring = __import__('keyring')\n- self.password = keyring.get_password(self.repository,\n- self.username)\n+ password = keyring.get_password(self.repository, self.username)\n except Exception:\n- pass\n+ password = None\n+ finally:\n+ return password\n+\n+ def _prompt_for_password(self):\n+ \"\"\"\n+ Prompt for a password on the tty. Suppress Exceptions.\n+ \"\"\"\n+ password = None\n+ try:\n+ import getpass\n+ while not password:\n+ password = getpass.getpass()\n+ except (Exception, KeyboardInterrupt):\n+ password = None\n+ finally:\n+ return password\n", "issue": "upload command doesn't prompt for password; raises TypeError\n# Problem statement\n\nIf the `~/.pypirc` file does not contain a password like so:\n\n``` ini\n[distutils]\nindex-servers = \n pypitest\n\n[pypitest]\nrepository = https://testpypi.python.org/pypi\nusername = my_username\n; Note the lack of a password\n```\n\nThen uploading the package\n\n```\npython setup.py sdist upload -r pypitest\n```\n\nFails to prompt the user for his password and instead raises a TypeError (output truncated)\n\n```\nrunning upload\nTraceback (most recent call last):\n File \"setup.py\", line 16, in <module>\n keywords=[\"test\", \"hello\"]\n File \"/usr/lib/python2.7/distutils/core.py\", line 151, in setup\n dist.run_commands()\n File \"/usr/lib/python2.7/distutils/dist.py\", line 953, in run_commands\n self.run_command(cmd)\n File \"/usr/lib/python2.7/distutils/dist.py\", line 972, in run_command\n cmd_obj.run()\n File \"/usr/lib/python2.7/distutils/command/upload.py\", line 60, in run\n self.upload_file(command, pyversion, filename)\n File \"/usr/lib/python2.7/distutils/command/upload.py\", line 135, in upload_file\n self.password)\nTypeError: cannot concatenate 'str' and 'NoneType' objects\n```\n\n**This is different** than the behavior of the `register` command, which prompts the user for a password before continuing.\n\n```\npython setup.py sdist register -r pypitest\n```\n\n(output truncated)\n\n```\nCreating tar archive\nremoving 'HelloPyPi-0.0.1.dev0' (and everything under it)\nrunning register\nPassword: \n```\n\n> Note that the `register` and the `upload` command exhibit the proper behavior **if you store your password in `~/.pypirc`**, but not if the password is omitted.\n# Okay, so...?\n\nI am aware that you can run\n\n```\npython setup.py sdist register -r pypitest upload -r pypitest\n```\n\nAs a workaround, but it stands to reason that **if you can register a package without uploading it, then you should also be able to upload a package without registering it**, regardless of if a password has been specified in your `~/.pypirc` file.\n# Steps to reproduce\n1. Remove your pypi password from `~/.pypirc`\n2. Find a project that you wish to upload to a pypi server (I used [my example repository](https://github.com/brookskindle/hellopypi) for this)\n3. Run `python setup.py sdist upload -r target_pypi_server`\n# Setuptools version\n\nsetuptools (20.9.0) -- from `pip list` in my virtualenv\n\n", "before_files": [{"content": "from distutils.command import upload as orig\n\n\nclass upload(orig.upload):\n \"\"\"\n Override default upload behavior to look up password\n in the keyring if available.\n \"\"\"\n\n def finalize_options(self):\n orig.upload.finalize_options(self)\n self.password or self._load_password_from_keyring()\n\n def _load_password_from_keyring(self):\n \"\"\"\n Attempt to load password from keyring. Suppress Exceptions.\n \"\"\"\n try:\n keyring = __import__('keyring')\n self.password = keyring.get_password(self.repository,\n self.username)\n except Exception:\n pass\n", "path": "setuptools/command/upload.py"}], "after_files": [{"content": "from distutils.command import upload as orig\n\n\nclass upload(orig.upload):\n \"\"\"\n Override default upload behavior to obtain password\n in a variety of different ways.\n \"\"\"\n\n def finalize_options(self):\n orig.upload.finalize_options(self)\n # Attempt to obtain password. Short circuit evaluation at the first\n # sign of success.\n self.password = (\n self.password or self._load_password_from_keyring() or\n self._prompt_for_password()\n )\n\n def _load_password_from_keyring(self):\n \"\"\"\n Attempt to load password from keyring. Suppress Exceptions.\n \"\"\"\n try:\n keyring = __import__('keyring')\n password = keyring.get_password(self.repository, self.username)\n except Exception:\n password = None\n finally:\n return password\n\n def _prompt_for_password(self):\n \"\"\"\n Prompt for a password on the tty. Suppress Exceptions.\n \"\"\"\n password = None\n try:\n import getpass\n while not password:\n password = getpass.getpass()\n except (Exception, KeyboardInterrupt):\n password = None\n finally:\n return password\n", "path": "setuptools/command/upload.py"}]}
| 1,059 | 353 |
gh_patches_debug_15619
|
rasdani/github-patches
|
git_diff
|
readthedocs__readthedocs.org-7002
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Better pattern to use PYTEST_OPTIONS
In #4095 we incorporate the usage of `PYTEST_OPTIONS` to define a set of options to be environment-dependent. This way, we can extend/override these options used only in tests from outside (for example, corporate repo).
Although I like it, I had to write a hack to know if we are running in `readthedocs` or `readthedocsinc` to know which of these options has to be respected by `pytest`.
The ugly code is at https://github.com/rtfd/readthedocs.org/pull/4095#discussion_r198927773 and we need to find a better pattern for this.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `readthedocs/conftest.py`
Content:
```
1 import pytest
2 from rest_framework.test import APIClient
3
4
5 try:
6 # TODO: this file is read/executed even when called from ``readthedocsinc``,
7 # so it's overriding the options that we are defining in the ``conftest.py``
8 # from the corporate site. We need to find a better way to avoid this.
9 import readthedocsinc
10 PYTEST_OPTIONS = ()
11 except ImportError:
12 PYTEST_OPTIONS = (
13 # Options to set test environment
14 ('community', True),
15 ('corporate', False),
16 ('environment', 'readthedocs'),
17 )
18
19
20 def pytest_configure(config):
21 for option, value in PYTEST_OPTIONS:
22 setattr(config.option, option, value)
23
24
25 @pytest.fixture(autouse=True)
26 def settings_modification(settings):
27 settings.CELERY_ALWAYS_EAGER = True
28
29
30 @pytest.fixture
31 def api_client():
32 return APIClient()
33
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/readthedocs/conftest.py b/readthedocs/conftest.py
--- a/readthedocs/conftest.py
+++ b/readthedocs/conftest.py
@@ -1,32 +1,6 @@
import pytest
from rest_framework.test import APIClient
-
-try:
- # TODO: this file is read/executed even when called from ``readthedocsinc``,
- # so it's overriding the options that we are defining in the ``conftest.py``
- # from the corporate site. We need to find a better way to avoid this.
- import readthedocsinc
- PYTEST_OPTIONS = ()
-except ImportError:
- PYTEST_OPTIONS = (
- # Options to set test environment
- ('community', True),
- ('corporate', False),
- ('environment', 'readthedocs'),
- )
-
-
-def pytest_configure(config):
- for option, value in PYTEST_OPTIONS:
- setattr(config.option, option, value)
-
-
[email protected](autouse=True)
-def settings_modification(settings):
- settings.CELERY_ALWAYS_EAGER = True
-
-
@pytest.fixture
def api_client():
return APIClient()
|
{"golden_diff": "diff --git a/readthedocs/conftest.py b/readthedocs/conftest.py\n--- a/readthedocs/conftest.py\n+++ b/readthedocs/conftest.py\n@@ -1,32 +1,6 @@\n import pytest\n from rest_framework.test import APIClient\n \n-\n-try:\n- # TODO: this file is read/executed even when called from ``readthedocsinc``,\n- # so it's overriding the options that we are defining in the ``conftest.py``\n- # from the corporate site. We need to find a better way to avoid this.\n- import readthedocsinc\n- PYTEST_OPTIONS = ()\n-except ImportError:\n- PYTEST_OPTIONS = (\n- # Options to set test environment\n- ('community', True),\n- ('corporate', False),\n- ('environment', 'readthedocs'),\n- )\n-\n-\n-def pytest_configure(config):\n- for option, value in PYTEST_OPTIONS:\n- setattr(config.option, option, value)\n-\n-\[email protected](autouse=True)\n-def settings_modification(settings):\n- settings.CELERY_ALWAYS_EAGER = True\n-\n-\n @pytest.fixture\n def api_client():\n return APIClient()\n", "issue": "Better pattern to use PYTEST_OPTIONS\nIn #4095 we incorporate the usage of `PYTEST_OPTIONS` to define a set of options to be environment-dependent. This way, we can extend/override these options used only in tests from outside (for example, corporate repo).\r\n\r\nAlthough I like it, I had to write a hack to know if we are running in `readthedocs` or `readthedocsinc` to know which of these options has to be respected by `pytest`.\r\n\r\nThe ugly code is at https://github.com/rtfd/readthedocs.org/pull/4095#discussion_r198927773 and we need to find a better pattern for this.\n", "before_files": [{"content": "import pytest\nfrom rest_framework.test import APIClient\n\n\ntry:\n # TODO: this file is read/executed even when called from ``readthedocsinc``,\n # so it's overriding the options that we are defining in the ``conftest.py``\n # from the corporate site. We need to find a better way to avoid this.\n import readthedocsinc\n PYTEST_OPTIONS = ()\nexcept ImportError:\n PYTEST_OPTIONS = (\n # Options to set test environment\n ('community', True),\n ('corporate', False),\n ('environment', 'readthedocs'),\n )\n\n\ndef pytest_configure(config):\n for option, value in PYTEST_OPTIONS:\n setattr(config.option, option, value)\n\n\[email protected](autouse=True)\ndef settings_modification(settings):\n settings.CELERY_ALWAYS_EAGER = True\n\n\[email protected]\ndef api_client():\n return APIClient()\n", "path": "readthedocs/conftest.py"}], "after_files": [{"content": "import pytest\nfrom rest_framework.test import APIClient\n\[email protected]\ndef api_client():\n return APIClient()\n", "path": "readthedocs/conftest.py"}]}
| 664 | 263 |
gh_patches_debug_5965
|
rasdani/github-patches
|
git_diff
|
wagtail__wagtail-940
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Dropping Python 3.2 support
Python 3.2 is quite old and many projects are dropping support for it (`libsass` and `treebeard` both have already). Should we consider dropping support as well?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2
3 import sys, os
4
5 from wagtail.wagtailcore import __version__
6
7
8 try:
9 from setuptools import setup, find_packages
10 except ImportError:
11 from distutils.core import setup
12
13
14 # Hack to prevent "TypeError: 'NoneType' object is not callable" error
15 # in multiprocessing/util.py _exit_function when setup.py exits
16 # (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
17 try:
18 import multiprocessing
19 except ImportError:
20 pass
21
22
23 # Disable parallel builds, because Pillow 2.5.3 does some crazy monkeypatching of
24 # the build process on multicore systems, which breaks installation of libsass
25 os.environ['MAX_CONCURRENCY'] = '1'
26
27 PY3 = sys.version_info[0] == 3
28
29
30 install_requires = [
31 "Django>=1.7.0,<1.8",
32 "django-compressor>=1.4",
33 "django-libsass>=0.2",
34 "django-modelcluster>=0.4",
35 "django-taggit==0.12.2",
36 "django-treebeard==2.0",
37 "Pillow>=2.6.1",
38 "beautifulsoup4>=4.3.2",
39 "html5lib==0.999",
40 "Unidecode>=0.04.14",
41 "six>=1.7.0",
42 'requests>=2.0.0',
43 "Willow==0.1",
44 ]
45
46
47 if not PY3:
48 install_requires += [
49 "unicodecsv>=0.9.4"
50 ]
51
52
53 setup(
54 name='wagtail',
55 version=__version__,
56 description='A Django content management system focused on flexibility and user experience',
57 author='Matthew Westcott',
58 author_email='[email protected]',
59 url='http://wagtail.io/',
60 packages=find_packages(),
61 include_package_data=True,
62 license='BSD',
63 long_description=open('README.rst').read(),
64 classifiers=[
65 'Development Status :: 5 - Production/Stable',
66 'Environment :: Web Environment',
67 'Intended Audience :: Developers',
68 'License :: OSI Approved :: BSD License',
69 'Operating System :: OS Independent',
70 'Programming Language :: Python',
71 'Programming Language :: Python :: 2',
72 'Programming Language :: Python :: 2.7',
73 'Programming Language :: Python :: 3',
74 'Programming Language :: Python :: 3.2',
75 'Programming Language :: Python :: 3.3',
76 'Programming Language :: Python :: 3.4',
77 'Framework :: Django',
78 'Topic :: Internet :: WWW/HTTP :: Site Management',
79 ],
80 install_requires=install_requires,
81 entry_points="""
82 [console_scripts]
83 wagtail=wagtail.bin.wagtail:main
84 """,
85 zip_safe=False,
86 )
87
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -71,7 +71,6 @@
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -71,7 +71,6 @@\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n- 'Programming Language :: Python :: 3.2',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Framework :: Django',\n", "issue": "Dropping Python 3.2 support\nPython 3.2 is quite old and many projects are dropping support for it (`libsass` and `treebeard` both have already). Should we consider dropping support as well?\n\n", "before_files": [{"content": "#!/usr/bin/env python\n\nimport sys, os\n\nfrom wagtail.wagtailcore import __version__\n\n\ntry:\n from setuptools import setup, find_packages\nexcept ImportError:\n from distutils.core import setup\n\n\n# Hack to prevent \"TypeError: 'NoneType' object is not callable\" error\n# in multiprocessing/util.py _exit_function when setup.py exits\n# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)\ntry:\n import multiprocessing\nexcept ImportError:\n pass\n\n\n# Disable parallel builds, because Pillow 2.5.3 does some crazy monkeypatching of\n# the build process on multicore systems, which breaks installation of libsass\nos.environ['MAX_CONCURRENCY'] = '1'\n\nPY3 = sys.version_info[0] == 3\n\n\ninstall_requires = [\n \"Django>=1.7.0,<1.8\",\n \"django-compressor>=1.4\",\n \"django-libsass>=0.2\",\n \"django-modelcluster>=0.4\",\n \"django-taggit==0.12.2\",\n \"django-treebeard==2.0\",\n \"Pillow>=2.6.1\",\n \"beautifulsoup4>=4.3.2\",\n \"html5lib==0.999\",\n \"Unidecode>=0.04.14\",\n \"six>=1.7.0\",\n 'requests>=2.0.0',\n \"Willow==0.1\",\n]\n\n\nif not PY3:\n install_requires += [\n \"unicodecsv>=0.9.4\"\n ]\n\n\nsetup(\n name='wagtail',\n version=__version__,\n description='A Django content management system focused on flexibility and user experience',\n author='Matthew Westcott',\n author_email='[email protected]',\n url='http://wagtail.io/',\n packages=find_packages(),\n include_package_data=True,\n license='BSD',\n long_description=open('README.rst').read(),\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Web Environment',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.2',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Framework :: Django',\n 'Topic :: Internet :: WWW/HTTP :: Site Management',\n ],\n install_requires=install_requires,\n entry_points=\"\"\"\n [console_scripts]\n wagtail=wagtail.bin.wagtail:main\n \"\"\",\n zip_safe=False,\n)\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\nimport sys, os\n\nfrom wagtail.wagtailcore import __version__\n\n\ntry:\n from setuptools import setup, find_packages\nexcept ImportError:\n from distutils.core import setup\n\n\n# Hack to prevent \"TypeError: 'NoneType' object is not callable\" error\n# in multiprocessing/util.py _exit_function when setup.py exits\n# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)\ntry:\n import multiprocessing\nexcept ImportError:\n pass\n\n\n# Disable parallel builds, because Pillow 2.5.3 does some crazy monkeypatching of\n# the build process on multicore systems, which breaks installation of libsass\nos.environ['MAX_CONCURRENCY'] = '1'\n\nPY3 = sys.version_info[0] == 3\n\n\ninstall_requires = [\n \"Django>=1.7.0,<1.8\",\n \"django-compressor>=1.4\",\n \"django-libsass>=0.2\",\n \"django-modelcluster>=0.4\",\n \"django-taggit==0.12.2\",\n \"django-treebeard==2.0\",\n \"Pillow>=2.6.1\",\n \"beautifulsoup4>=4.3.2\",\n \"html5lib==0.999\",\n \"Unidecode>=0.04.14\",\n \"six>=1.7.0\",\n 'requests>=2.0.0',\n \"Willow==0.1\",\n]\n\n\nif not PY3:\n install_requires += [\n \"unicodecsv>=0.9.4\"\n ]\n\n\nsetup(\n name='wagtail',\n version=__version__,\n description='A Django content management system focused on flexibility and user experience',\n author='Matthew Westcott',\n author_email='[email protected]',\n url='http://wagtail.io/',\n packages=find_packages(),\n include_package_data=True,\n license='BSD',\n long_description=open('README.rst').read(),\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Web Environment',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Framework :: Django',\n 'Topic :: Internet :: WWW/HTTP :: Site Management',\n ],\n install_requires=install_requires,\n entry_points=\"\"\"\n [console_scripts]\n wagtail=wagtail.bin.wagtail:main\n \"\"\",\n zip_safe=False,\n)\n", "path": "setup.py"}]}
| 1,110 | 107 |
gh_patches_debug_8392
|
rasdani/github-patches
|
git_diff
|
PokemonGoF__PokemonGo-Bot-5122
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Incense being used when false
### Expected Behavior
Don't use Incense when set to false in config
### Actual Behavior
Bot using incense when set to false in config
### Your FULL config.json (remove your username, password, gmapkey and any other private info)
http://pastebin.com/YEHMRMiE
### Output when issue occurred
[2016-09-02 15:43:55] [UseIncense] [INFO] [use_incense] Using Ordinary incense. 8 incense remaining
### Steps to Reproduce
Run bot with Incense false in config
### Other Information
OS: Linux
Branch: Dev
Git Commit: 1cc9da7a79c421f11a4b13359f6a6c1abfcd061a
Python Version: 2.7.12
Any other relevant files/configs (eg: path files)
config.json
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pokemongo_bot/cell_workers/use_incense.py`
Content:
```
1 import time
2 from pokemongo_bot.base_task import BaseTask
3 from pokemongo_bot.worker_result import WorkerResult
4 from pokemongo_bot.item_list import Item
5 from pokemongo_bot import inventory
6
7 class UseIncense(BaseTask):
8 SUPPORTED_TASK_API_VERSION = 1
9
10 def initialize(self):
11 self.start_time = 0
12 self.use_incense = self.config.get('use_incense', False)
13 self.use_order = self.config.get('use_order', {})
14 self._update_inventory()
15
16 self.types = {
17 401: "Ordinary",
18 402: "Spicy",
19 403: "Cool",
20 404: "Floral"
21 }
22
23 def _get_type(self):
24 for order in self.use_order:
25 if order == "ordinary" and self.incense_ordinary_count > 0:
26 return Item.ITEM_INCENSE_ORDINARY.value
27 if order == "spicy" and self.incense_spicy_count > 0:
28 return Item.ITEM_INCENSE_SPICY.value
29 if order == "cool" and self.incense_cool_count > 0:
30 return Item.ITEM_INCENSE_COOL.value
31 if order == "floral" and self.incense_floral_count > 0:
32 return Item.ITEM_INCENSE_FLORAL.value
33
34 return Item.ITEM_INCENSE_ORDINARY.value
35
36 def _update_inventory(self):
37 self.incense_ordinary_count = inventory.items().get(Item.ITEM_INCENSE_ORDINARY.value).count
38 self.incense_spicy_count = inventory.items().get(Item.ITEM_INCENSE_SPICY.value).count
39 self.incense_cool_count = inventory.items().get(Item.ITEM_INCENSE_COOL.value).count
40 self.incense_floral_count = inventory.items().get(Item.ITEM_INCENSE_FLORAL.value).count
41
42 def _has_count(self):
43 return self.incense_ordinary_count > 0 or self.incense_spicy_count > 0 or self.incense_cool_count > 0 or self.incense_floral_count > 0
44
45 def _should_run(self):
46 if self._has_count() > 0 and self.start_time == 0:
47 return True
48
49 using_incense = time.time() - self.start_time < 1800
50 if not using_incense:
51 self._update_inventory()
52 if self._has_count() and self.use_incense:
53 return True
54
55 def work(self):
56 if self._should_run():
57 self.start_time = time.time()
58 type = self._get_type()
59 response_dict = self.bot.api.use_incense(incense_type=type)
60 result = response_dict.get('responses', {}).get('USE_INCENSE', {}).get('result', 0)
61 if result is 1:
62 self.emit_event(
63 'use_incense',
64 formatted="Using {type} incense. {incense_count} incense remaining",
65 data={
66 'type': self.types.get(type, 'Unknown'),
67 'incense_count': inventory.items().get(type).count
68 }
69 )
70 else:
71 self.emit_event(
72 'use_incense',
73 formatted="Unable to use incense {type}. {incense_count} incense remaining",
74 data={
75 'type': self.types.get(type, 'Unknown'),
76 'incense_count': inventory.items().get(type).count
77 }
78 )
79
80 return WorkerResult.SUCCESS
81
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pokemongo_bot/cell_workers/use_incense.py b/pokemongo_bot/cell_workers/use_incense.py
--- a/pokemongo_bot/cell_workers/use_incense.py
+++ b/pokemongo_bot/cell_workers/use_incense.py
@@ -42,7 +42,10 @@
def _has_count(self):
return self.incense_ordinary_count > 0 or self.incense_spicy_count > 0 or self.incense_cool_count > 0 or self.incense_floral_count > 0
- def _should_run(self):
+ def _should_run(self):
+ if not self.use_incense:
+ return False
+
if self._has_count() > 0 and self.start_time == 0:
return True
|
{"golden_diff": "diff --git a/pokemongo_bot/cell_workers/use_incense.py b/pokemongo_bot/cell_workers/use_incense.py\n--- a/pokemongo_bot/cell_workers/use_incense.py\n+++ b/pokemongo_bot/cell_workers/use_incense.py\n@@ -42,7 +42,10 @@\n def _has_count(self):\n return self.incense_ordinary_count > 0 or self.incense_spicy_count > 0 or self.incense_cool_count > 0 or self.incense_floral_count > 0\n \n- def _should_run(self): \n+ def _should_run(self):\n+ if not self.use_incense:\n+ return False\n+\n if self._has_count() > 0 and self.start_time == 0:\n return True\n", "issue": "Incense being used when false\n### Expected Behavior\n\nDon't use Incense when set to false in config\n### Actual Behavior\n\nBot using incense when set to false in config\n### Your FULL config.json (remove your username, password, gmapkey and any other private info)\n\nhttp://pastebin.com/YEHMRMiE\n### Output when issue occurred\n\n[2016-09-02 15:43:55] [UseIncense] [INFO] [use_incense] Using Ordinary incense. 8 incense remaining\n### Steps to Reproduce\n\nRun bot with Incense false in config\n### Other Information\n\nOS: Linux\nBranch: Dev\nGit Commit: 1cc9da7a79c421f11a4b13359f6a6c1abfcd061a\nPython Version: 2.7.12\nAny other relevant files/configs (eg: path files) \nconfig.json\n\n", "before_files": [{"content": "import time\nfrom pokemongo_bot.base_task import BaseTask\nfrom pokemongo_bot.worker_result import WorkerResult\nfrom pokemongo_bot.item_list import Item\nfrom pokemongo_bot import inventory\n\nclass UseIncense(BaseTask):\n SUPPORTED_TASK_API_VERSION = 1\n\n def initialize(self):\n self.start_time = 0\n self.use_incense = self.config.get('use_incense', False)\n self.use_order = self.config.get('use_order', {})\n self._update_inventory()\n \n self.types = {\n 401: \"Ordinary\",\n 402: \"Spicy\",\n 403: \"Cool\",\n 404: \"Floral\"\n }\n \n def _get_type(self):\n for order in self.use_order:\n if order == \"ordinary\" and self.incense_ordinary_count > 0:\n return Item.ITEM_INCENSE_ORDINARY.value\n if order == \"spicy\" and self.incense_spicy_count > 0:\n return Item.ITEM_INCENSE_SPICY.value\n if order == \"cool\" and self.incense_cool_count > 0:\n return Item.ITEM_INCENSE_COOL.value\n if order == \"floral\" and self.incense_floral_count > 0:\n return Item.ITEM_INCENSE_FLORAL.value\n \n return Item.ITEM_INCENSE_ORDINARY.value \n \n def _update_inventory(self):\n self.incense_ordinary_count = inventory.items().get(Item.ITEM_INCENSE_ORDINARY.value).count \n self.incense_spicy_count = inventory.items().get(Item.ITEM_INCENSE_SPICY.value).count\n self.incense_cool_count = inventory.items().get(Item.ITEM_INCENSE_COOL.value).count \n self.incense_floral_count = inventory.items().get(Item.ITEM_INCENSE_FLORAL.value).count \n \n def _has_count(self):\n return self.incense_ordinary_count > 0 or self.incense_spicy_count > 0 or self.incense_cool_count > 0 or self.incense_floral_count > 0\n \n def _should_run(self): \n if self._has_count() > 0 and self.start_time == 0:\n return True \n \n using_incense = time.time() - self.start_time < 1800\n if not using_incense: \n self._update_inventory()\n if self._has_count() and self.use_incense:\n return True\n\n def work(self):\n if self._should_run():\n self.start_time = time.time()\n type = self._get_type() \n response_dict = self.bot.api.use_incense(incense_type=type)\n result = response_dict.get('responses', {}).get('USE_INCENSE', {}).get('result', 0)\n if result is 1:\n self.emit_event(\n 'use_incense',\n formatted=\"Using {type} incense. {incense_count} incense remaining\",\n data={\n 'type': self.types.get(type, 'Unknown'),\n 'incense_count': inventory.items().get(type).count\n }\n )\n else:\n self.emit_event(\n 'use_incense',\n formatted=\"Unable to use incense {type}. {incense_count} incense remaining\",\n data={\n 'type': self.types.get(type, 'Unknown'),\n 'incense_count': inventory.items().get(type).count\n }\n )\n \n return WorkerResult.SUCCESS\n", "path": "pokemongo_bot/cell_workers/use_incense.py"}], "after_files": [{"content": "import time\nfrom pokemongo_bot.base_task import BaseTask\nfrom pokemongo_bot.worker_result import WorkerResult\nfrom pokemongo_bot.item_list import Item\nfrom pokemongo_bot import inventory\n\nclass UseIncense(BaseTask):\n SUPPORTED_TASK_API_VERSION = 1\n\n def initialize(self):\n self.start_time = 0\n self.use_incense = self.config.get('use_incense', False)\n self.use_order = self.config.get('use_order', {})\n self._update_inventory()\n \n self.types = {\n 401: \"Ordinary\",\n 402: \"Spicy\",\n 403: \"Cool\",\n 404: \"Floral\"\n }\n \n def _get_type(self):\n for order in self.use_order:\n if order == \"ordinary\" and self.incense_ordinary_count > 0:\n return Item.ITEM_INCENSE_ORDINARY.value\n if order == \"spicy\" and self.incense_spicy_count > 0:\n return Item.ITEM_INCENSE_SPICY.value\n if order == \"cool\" and self.incense_cool_count > 0:\n return Item.ITEM_INCENSE_COOL.value\n if order == \"floral\" and self.incense_floral_count > 0:\n return Item.ITEM_INCENSE_FLORAL.value\n \n return Item.ITEM_INCENSE_ORDINARY.value \n \n def _update_inventory(self):\n self.incense_ordinary_count = inventory.items().get(Item.ITEM_INCENSE_ORDINARY.value).count \n self.incense_spicy_count = inventory.items().get(Item.ITEM_INCENSE_SPICY.value).count\n self.incense_cool_count = inventory.items().get(Item.ITEM_INCENSE_COOL.value).count \n self.incense_floral_count = inventory.items().get(Item.ITEM_INCENSE_FLORAL.value).count \n \n def _has_count(self):\n return self.incense_ordinary_count > 0 or self.incense_spicy_count > 0 or self.incense_cool_count > 0 or self.incense_floral_count > 0\n \n def _should_run(self):\n if not self.use_incense:\n return False\n\n if self._has_count() > 0 and self.start_time == 0:\n return True \n \n using_incense = time.time() - self.start_time < 1800\n if not using_incense: \n self._update_inventory()\n if self._has_count() and self.use_incense:\n return True\n\n def work(self):\n if self._should_run():\n self.start_time = time.time()\n type = self._get_type() \n response_dict = self.bot.api.use_incense(incense_type=type)\n result = response_dict.get('responses', {}).get('USE_INCENSE', {}).get('result', 0)\n if result is 1:\n self.emit_event(\n 'use_incense',\n formatted=\"Using {type} incense. {incense_count} incense remaining\",\n data={\n 'type': self.types.get(type, 'Unknown'),\n 'incense_count': inventory.items().get(type).count\n }\n )\n else:\n self.emit_event(\n 'use_incense',\n formatted=\"Unable to use incense {type}. {incense_count} incense remaining\",\n data={\n 'type': self.types.get(type, 'Unknown'),\n 'incense_count': inventory.items().get(type).count\n }\n )\n \n return WorkerResult.SUCCESS\n", "path": "pokemongo_bot/cell_workers/use_incense.py"}]}
| 1,378 | 177 |
gh_patches_debug_32415
|
rasdani/github-patches
|
git_diff
|
vllm-project__vllm-4368
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[Feature]: Cannot use FlashAttention backend for Volta and Turing GPUs. (but FlashAttention v1.0.9 supports Turing GPU.)
### 🚀 The feature, motivation and pitch
Turing GPU can use FlashAttention v1.0.9 which can reduce use of vram significantly.
FlashAttention has no plan to support Turing GPU in FlashAttention v2 actually.
so please support FlashAttention v1.0.9. thanks a lot!
many friends having 8*2080ti need this help.
### Alternatives
_No response_
### Additional context
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `vllm/attention/selector.py`
Content:
```
1 import enum
2 import os
3 from functools import lru_cache
4 from typing import Type
5
6 import torch
7
8 from vllm.attention.backends.abstract import AttentionBackend
9 from vllm.logger import init_logger
10 from vllm.utils import is_cpu, is_hip
11
12 logger = init_logger(__name__)
13
14 VLLM_ATTENTION_BACKEND = "VLLM_ATTENTION_BACKEND"
15
16
17 class _Backend(enum.Enum):
18 FLASH_ATTN = enum.auto()
19 XFORMERS = enum.auto()
20 ROCM_FLASH = enum.auto()
21 TORCH_SDPA = enum.auto()
22
23
24 @lru_cache(maxsize=None)
25 def get_attn_backend(dtype: torch.dtype) -> Type[AttentionBackend]:
26 backend = _which_attn_to_use(dtype)
27 if backend == _Backend.FLASH_ATTN:
28 logger.info("Using FlashAttention backend.")
29 from vllm.attention.backends.flash_attn import ( # noqa: F401
30 FlashAttentionBackend)
31 return FlashAttentionBackend
32 elif backend == _Backend.XFORMERS:
33 logger.info("Using XFormers backend.")
34 from vllm.attention.backends.xformers import ( # noqa: F401
35 XFormersBackend)
36 return XFormersBackend
37 elif backend == _Backend.ROCM_FLASH:
38 logger.info("Using ROCmFlashAttention backend.")
39 from vllm.attention.backends.rocm_flash_attn import ( # noqa: F401
40 ROCmFlashAttentionBackend)
41 return ROCmFlashAttentionBackend
42 elif backend == _Backend.TORCH_SDPA:
43 logger.info("Using Torch SDPA backend.")
44 from vllm.attention.backends.torch_sdpa import TorchSDPABackend
45 return TorchSDPABackend
46 else:
47 raise ValueError("Invalid attention backend.")
48
49
50 def _which_attn_to_use(dtype: torch.dtype) -> _Backend:
51 """Returns which flash attention backend to use."""
52 if is_cpu():
53 return _Backend.TORCH_SDPA
54
55 if is_hip():
56 # AMD GPUs.
57 if torch.cuda.get_device_capability()[0] != 9:
58 # not Instinct series GPUs.
59 logger.info("flash_atten is not supported on NAVI GPUs.")
60 return _Backend.ROCM_FLASH
61
62 # NVIDIA GPUs.
63 if torch.cuda.get_device_capability()[0] < 8:
64 # Volta and Turing NVIDIA GPUs.
65 logger.info("Cannot use FlashAttention backend for Volta and Turing "
66 "GPUs.")
67 return _Backend.XFORMERS
68
69 if dtype not in (torch.float16, torch.bfloat16):
70 logger.info("Cannot use FlashAttention backend for dtype other than "
71 "torch.float16 or torch.bfloat16.")
72 return _Backend.XFORMERS
73
74 try:
75 import flash_attn # noqa: F401
76 except ImportError:
77 logger.info(
78 "Cannot use FlashAttention backend because the flash_attn package "
79 "is not found. Please install it for better performance.")
80 return _Backend.XFORMERS
81
82 backend_by_env_var = os.getenv(VLLM_ATTENTION_BACKEND)
83 if backend_by_env_var is not None:
84 return _Backend[backend_by_env_var]
85
86 # Default case.
87 return _Backend.FLASH_ATTN
88
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/vllm/attention/selector.py b/vllm/attention/selector.py
--- a/vllm/attention/selector.py
+++ b/vllm/attention/selector.py
@@ -25,7 +25,7 @@
def get_attn_backend(dtype: torch.dtype) -> Type[AttentionBackend]:
backend = _which_attn_to_use(dtype)
if backend == _Backend.FLASH_ATTN:
- logger.info("Using FlashAttention backend.")
+ logger.info("Using FlashAttention-2 backend.")
from vllm.attention.backends.flash_attn import ( # noqa: F401
FlashAttentionBackend)
return FlashAttentionBackend
@@ -62,12 +62,12 @@
# NVIDIA GPUs.
if torch.cuda.get_device_capability()[0] < 8:
# Volta and Turing NVIDIA GPUs.
- logger.info("Cannot use FlashAttention backend for Volta and Turing "
+ logger.info("Cannot use FlashAttention-2 backend for Volta and Turing "
"GPUs.")
return _Backend.XFORMERS
if dtype not in (torch.float16, torch.bfloat16):
- logger.info("Cannot use FlashAttention backend for dtype other than "
+ logger.info("Cannot use FlashAttention-2 backend for dtype other than "
"torch.float16 or torch.bfloat16.")
return _Backend.XFORMERS
@@ -75,8 +75,8 @@
import flash_attn # noqa: F401
except ImportError:
logger.info(
- "Cannot use FlashAttention backend because the flash_attn package "
- "is not found. Please install it for better performance.")
+ "Cannot use FlashAttention-2 backend because the flash_attn "
+ "package is not found. Please install it for better performance.")
return _Backend.XFORMERS
backend_by_env_var = os.getenv(VLLM_ATTENTION_BACKEND)
|
{"golden_diff": "diff --git a/vllm/attention/selector.py b/vllm/attention/selector.py\n--- a/vllm/attention/selector.py\n+++ b/vllm/attention/selector.py\n@@ -25,7 +25,7 @@\n def get_attn_backend(dtype: torch.dtype) -> Type[AttentionBackend]:\n backend = _which_attn_to_use(dtype)\n if backend == _Backend.FLASH_ATTN:\n- logger.info(\"Using FlashAttention backend.\")\n+ logger.info(\"Using FlashAttention-2 backend.\")\n from vllm.attention.backends.flash_attn import ( # noqa: F401\n FlashAttentionBackend)\n return FlashAttentionBackend\n@@ -62,12 +62,12 @@\n # NVIDIA GPUs.\n if torch.cuda.get_device_capability()[0] < 8:\n # Volta and Turing NVIDIA GPUs.\n- logger.info(\"Cannot use FlashAttention backend for Volta and Turing \"\n+ logger.info(\"Cannot use FlashAttention-2 backend for Volta and Turing \"\n \"GPUs.\")\n return _Backend.XFORMERS\n \n if dtype not in (torch.float16, torch.bfloat16):\n- logger.info(\"Cannot use FlashAttention backend for dtype other than \"\n+ logger.info(\"Cannot use FlashAttention-2 backend for dtype other than \"\n \"torch.float16 or torch.bfloat16.\")\n return _Backend.XFORMERS\n \n@@ -75,8 +75,8 @@\n import flash_attn # noqa: F401\n except ImportError:\n logger.info(\n- \"Cannot use FlashAttention backend because the flash_attn package \"\n- \"is not found. Please install it for better performance.\")\n+ \"Cannot use FlashAttention-2 backend because the flash_attn \"\n+ \"package is not found. Please install it for better performance.\")\n return _Backend.XFORMERS\n \n backend_by_env_var = os.getenv(VLLM_ATTENTION_BACKEND)\n", "issue": "[Feature]: Cannot use FlashAttention backend for Volta and Turing GPUs. (but FlashAttention v1.0.9 supports Turing GPU.)\n### \ud83d\ude80 The feature, motivation and pitch\r\n\r\nTuring GPU can use FlashAttention v1.0.9 which can reduce use of vram significantly.\r\n\r\nFlashAttention has no plan to support Turing GPU in FlashAttention v2 actually.\r\nso please support FlashAttention v1.0.9. thanks a lot!\r\n\r\nmany friends having 8*2080ti need this help.\r\n\r\n### Alternatives\r\n\r\n_No response_\r\n\r\n### Additional context\r\n\r\n_No response_\n", "before_files": [{"content": "import enum\nimport os\nfrom functools import lru_cache\nfrom typing import Type\n\nimport torch\n\nfrom vllm.attention.backends.abstract import AttentionBackend\nfrom vllm.logger import init_logger\nfrom vllm.utils import is_cpu, is_hip\n\nlogger = init_logger(__name__)\n\nVLLM_ATTENTION_BACKEND = \"VLLM_ATTENTION_BACKEND\"\n\n\nclass _Backend(enum.Enum):\n FLASH_ATTN = enum.auto()\n XFORMERS = enum.auto()\n ROCM_FLASH = enum.auto()\n TORCH_SDPA = enum.auto()\n\n\n@lru_cache(maxsize=None)\ndef get_attn_backend(dtype: torch.dtype) -> Type[AttentionBackend]:\n backend = _which_attn_to_use(dtype)\n if backend == _Backend.FLASH_ATTN:\n logger.info(\"Using FlashAttention backend.\")\n from vllm.attention.backends.flash_attn import ( # noqa: F401\n FlashAttentionBackend)\n return FlashAttentionBackend\n elif backend == _Backend.XFORMERS:\n logger.info(\"Using XFormers backend.\")\n from vllm.attention.backends.xformers import ( # noqa: F401\n XFormersBackend)\n return XFormersBackend\n elif backend == _Backend.ROCM_FLASH:\n logger.info(\"Using ROCmFlashAttention backend.\")\n from vllm.attention.backends.rocm_flash_attn import ( # noqa: F401\n ROCmFlashAttentionBackend)\n return ROCmFlashAttentionBackend\n elif backend == _Backend.TORCH_SDPA:\n logger.info(\"Using Torch SDPA backend.\")\n from vllm.attention.backends.torch_sdpa import TorchSDPABackend\n return TorchSDPABackend\n else:\n raise ValueError(\"Invalid attention backend.\")\n\n\ndef _which_attn_to_use(dtype: torch.dtype) -> _Backend:\n \"\"\"Returns which flash attention backend to use.\"\"\"\n if is_cpu():\n return _Backend.TORCH_SDPA\n\n if is_hip():\n # AMD GPUs.\n if torch.cuda.get_device_capability()[0] != 9:\n # not Instinct series GPUs.\n logger.info(\"flash_atten is not supported on NAVI GPUs.\")\n return _Backend.ROCM_FLASH\n\n # NVIDIA GPUs.\n if torch.cuda.get_device_capability()[0] < 8:\n # Volta and Turing NVIDIA GPUs.\n logger.info(\"Cannot use FlashAttention backend for Volta and Turing \"\n \"GPUs.\")\n return _Backend.XFORMERS\n\n if dtype not in (torch.float16, torch.bfloat16):\n logger.info(\"Cannot use FlashAttention backend for dtype other than \"\n \"torch.float16 or torch.bfloat16.\")\n return _Backend.XFORMERS\n\n try:\n import flash_attn # noqa: F401\n except ImportError:\n logger.info(\n \"Cannot use FlashAttention backend because the flash_attn package \"\n \"is not found. Please install it for better performance.\")\n return _Backend.XFORMERS\n\n backend_by_env_var = os.getenv(VLLM_ATTENTION_BACKEND)\n if backend_by_env_var is not None:\n return _Backend[backend_by_env_var]\n\n # Default case.\n return _Backend.FLASH_ATTN\n", "path": "vllm/attention/selector.py"}], "after_files": [{"content": "import enum\nimport os\nfrom functools import lru_cache\nfrom typing import Type\n\nimport torch\n\nfrom vllm.attention.backends.abstract import AttentionBackend\nfrom vllm.logger import init_logger\nfrom vllm.utils import is_cpu, is_hip\n\nlogger = init_logger(__name__)\n\nVLLM_ATTENTION_BACKEND = \"VLLM_ATTENTION_BACKEND\"\n\n\nclass _Backend(enum.Enum):\n FLASH_ATTN = enum.auto()\n XFORMERS = enum.auto()\n ROCM_FLASH = enum.auto()\n TORCH_SDPA = enum.auto()\n\n\n@lru_cache(maxsize=None)\ndef get_attn_backend(dtype: torch.dtype) -> Type[AttentionBackend]:\n backend = _which_attn_to_use(dtype)\n if backend == _Backend.FLASH_ATTN:\n logger.info(\"Using FlashAttention-2 backend.\")\n from vllm.attention.backends.flash_attn import ( # noqa: F401\n FlashAttentionBackend)\n return FlashAttentionBackend\n elif backend == _Backend.XFORMERS:\n logger.info(\"Using XFormers backend.\")\n from vllm.attention.backends.xformers import ( # noqa: F401\n XFormersBackend)\n return XFormersBackend\n elif backend == _Backend.ROCM_FLASH:\n logger.info(\"Using ROCmFlashAttention backend.\")\n from vllm.attention.backends.rocm_flash_attn import ( # noqa: F401\n ROCmFlashAttentionBackend)\n return ROCmFlashAttentionBackend\n elif backend == _Backend.TORCH_SDPA:\n logger.info(\"Using Torch SDPA backend.\")\n from vllm.attention.backends.torch_sdpa import TorchSDPABackend\n return TorchSDPABackend\n else:\n raise ValueError(\"Invalid attention backend.\")\n\n\ndef _which_attn_to_use(dtype: torch.dtype) -> _Backend:\n \"\"\"Returns which flash attention backend to use.\"\"\"\n if is_cpu():\n return _Backend.TORCH_SDPA\n\n if is_hip():\n # AMD GPUs.\n if torch.cuda.get_device_capability()[0] != 9:\n # not Instinct series GPUs.\n logger.info(\"flash_atten is not supported on NAVI GPUs.\")\n return _Backend.ROCM_FLASH\n\n # NVIDIA GPUs.\n if torch.cuda.get_device_capability()[0] < 8:\n # Volta and Turing NVIDIA GPUs.\n logger.info(\"Cannot use FlashAttention-2 backend for Volta and Turing \"\n \"GPUs.\")\n return _Backend.XFORMERS\n\n if dtype not in (torch.float16, torch.bfloat16):\n logger.info(\"Cannot use FlashAttention-2 backend for dtype other than \"\n \"torch.float16 or torch.bfloat16.\")\n return _Backend.XFORMERS\n\n try:\n import flash_attn # noqa: F401\n except ImportError:\n logger.info(\n \"Cannot use FlashAttention-2 backend because the flash_attn \"\n \"package is not found. Please install it for better performance.\")\n return _Backend.XFORMERS\n\n backend_by_env_var = os.getenv(VLLM_ATTENTION_BACKEND)\n if backend_by_env_var is not None:\n return _Backend[backend_by_env_var]\n\n # Default case.\n return _Backend.FLASH_ATTN\n", "path": "vllm/attention/selector.py"}]}
| 1,268 | 426 |
gh_patches_debug_22037
|
rasdani/github-patches
|
git_diff
|
netbox-community__netbox-9826
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add Contacts field to Virtual Machines table view
### NetBox version
v3.2.7
### Feature type
Change to existing functionality
### Proposed functionality
I would suggest to add contacts field to Virtual Machines table view/export, similarly to what we have in Devices.
Currently in Devices in the "Configure Table" it's possible to select "Contacts" as a column, but it's not available in Virtual Machines.
### Use case
When browsing through or exporting Virtual Machines it would be nice to be able to see who the owner/contact is.
### Database changes
_No response_
### External dependencies
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `netbox/virtualization/tables/virtualmachines.py`
Content:
```
1 import django_tables2 as tables
2
3 from dcim.tables.devices import BaseInterfaceTable
4 from netbox.tables import NetBoxTable, columns
5 from tenancy.tables import TenancyColumnsMixin
6 from virtualization.models import VirtualMachine, VMInterface
7
8 __all__ = (
9 'VirtualMachineTable',
10 'VirtualMachineVMInterfaceTable',
11 'VMInterfaceTable',
12 )
13
14 VMINTERFACE_BUTTONS = """
15 {% if perms.ipam.add_ipaddress %}
16 <a href="{% url 'ipam:ipaddress_add' %}?vminterface={{ record.pk }}&return_url={% url 'virtualization:virtualmachine_interfaces' pk=object.pk %}" class="btn btn-sm btn-success" title="Add IP Address">
17 <i class="mdi mdi-plus-thick" aria-hidden="true"></i>
18 </a>
19 {% endif %}
20 """
21
22
23 #
24 # Virtual machines
25 #
26
27 class VirtualMachineTable(TenancyColumnsMixin, NetBoxTable):
28 name = tables.Column(
29 order_by=('_name',),
30 linkify=True
31 )
32 status = columns.ChoiceFieldColumn()
33 cluster = tables.Column(
34 linkify=True
35 )
36 role = columns.ColoredLabelColumn()
37 comments = columns.MarkdownColumn()
38 primary_ip4 = tables.Column(
39 linkify=True,
40 verbose_name='IPv4 Address'
41 )
42 primary_ip6 = tables.Column(
43 linkify=True,
44 verbose_name='IPv6 Address'
45 )
46 primary_ip = tables.Column(
47 linkify=True,
48 order_by=('primary_ip4', 'primary_ip6'),
49 verbose_name='IP Address'
50 )
51 tags = columns.TagColumn(
52 url_name='virtualization:virtualmachine_list'
53 )
54
55 class Meta(NetBoxTable.Meta):
56 model = VirtualMachine
57 fields = (
58 'pk', 'id', 'name', 'status', 'cluster', 'role', 'tenant', 'tenant_group', 'platform', 'vcpus', 'memory', 'disk',
59 'primary_ip4', 'primary_ip6', 'primary_ip', 'comments', 'tags', 'created', 'last_updated',
60 )
61 default_columns = (
62 'pk', 'name', 'status', 'cluster', 'role', 'tenant', 'vcpus', 'memory', 'disk', 'primary_ip',
63 )
64
65
66 #
67 # VM components
68 #
69
70 class VMInterfaceTable(BaseInterfaceTable):
71 virtual_machine = tables.Column(
72 linkify=True
73 )
74 name = tables.Column(
75 linkify=True
76 )
77 vrf = tables.Column(
78 linkify=True
79 )
80 contacts = columns.ManyToManyColumn(
81 linkify_item=True
82 )
83 tags = columns.TagColumn(
84 url_name='virtualization:vminterface_list'
85 )
86
87 class Meta(NetBoxTable.Meta):
88 model = VMInterface
89 fields = (
90 'pk', 'id', 'name', 'virtual_machine', 'enabled', 'mac_address', 'mtu', 'mode', 'description', 'tags',
91 'vrf', 'ip_addresses', 'fhrp_groups', 'untagged_vlan', 'tagged_vlans', 'contacts', 'created',
92 'last_updated',
93 )
94 default_columns = ('pk', 'name', 'virtual_machine', 'enabled', 'description')
95
96
97 class VirtualMachineVMInterfaceTable(VMInterfaceTable):
98 parent = tables.Column(
99 linkify=True
100 )
101 bridge = tables.Column(
102 linkify=True
103 )
104 actions = columns.ActionsColumn(
105 actions=('edit', 'delete'),
106 extra_buttons=VMINTERFACE_BUTTONS
107 )
108
109 class Meta(NetBoxTable.Meta):
110 model = VMInterface
111 fields = (
112 'pk', 'id', 'name', 'enabled', 'parent', 'bridge', 'mac_address', 'mtu', 'mode', 'description', 'tags',
113 'ip_addresses', 'fhrp_groups', 'untagged_vlan', 'tagged_vlans', 'actions',
114 )
115 default_columns = ('pk', 'name', 'enabled', 'mac_address', 'mtu', 'mode', 'description', 'ip_addresses')
116 row_attrs = {
117 'data-name': lambda record: record.name,
118 }
119
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/netbox/virtualization/tables/virtualmachines.py b/netbox/virtualization/tables/virtualmachines.py
--- a/netbox/virtualization/tables/virtualmachines.py
+++ b/netbox/virtualization/tables/virtualmachines.py
@@ -48,6 +48,9 @@
order_by=('primary_ip4', 'primary_ip6'),
verbose_name='IP Address'
)
+ contacts = columns.ManyToManyColumn(
+ linkify_item=True
+ )
tags = columns.TagColumn(
url_name='virtualization:virtualmachine_list'
)
@@ -56,7 +59,7 @@
model = VirtualMachine
fields = (
'pk', 'id', 'name', 'status', 'cluster', 'role', 'tenant', 'tenant_group', 'platform', 'vcpus', 'memory', 'disk',
- 'primary_ip4', 'primary_ip6', 'primary_ip', 'comments', 'tags', 'created', 'last_updated',
+ 'primary_ip4', 'primary_ip6', 'primary_ip', 'comments', 'contacts', 'tags', 'created', 'last_updated',
)
default_columns = (
'pk', 'name', 'status', 'cluster', 'role', 'tenant', 'vcpus', 'memory', 'disk', 'primary_ip',
|
{"golden_diff": "diff --git a/netbox/virtualization/tables/virtualmachines.py b/netbox/virtualization/tables/virtualmachines.py\n--- a/netbox/virtualization/tables/virtualmachines.py\n+++ b/netbox/virtualization/tables/virtualmachines.py\n@@ -48,6 +48,9 @@\n order_by=('primary_ip4', 'primary_ip6'),\n verbose_name='IP Address'\n )\n+ contacts = columns.ManyToManyColumn(\n+ linkify_item=True\n+ )\n tags = columns.TagColumn(\n url_name='virtualization:virtualmachine_list'\n )\n@@ -56,7 +59,7 @@\n model = VirtualMachine\n fields = (\n 'pk', 'id', 'name', 'status', 'cluster', 'role', 'tenant', 'tenant_group', 'platform', 'vcpus', 'memory', 'disk',\n- 'primary_ip4', 'primary_ip6', 'primary_ip', 'comments', 'tags', 'created', 'last_updated',\n+ 'primary_ip4', 'primary_ip6', 'primary_ip', 'comments', 'contacts', 'tags', 'created', 'last_updated',\n )\n default_columns = (\n 'pk', 'name', 'status', 'cluster', 'role', 'tenant', 'vcpus', 'memory', 'disk', 'primary_ip',\n", "issue": "Add Contacts field to Virtual Machines table view\n### NetBox version\n\nv3.2.7\n\n### Feature type\n\nChange to existing functionality\n\n### Proposed functionality\n\nI would suggest to add contacts field to Virtual Machines table view/export, similarly to what we have in Devices. \r\nCurrently in Devices in the \"Configure Table\" it's possible to select \"Contacts\" as a column, but it's not available in Virtual Machines. \n\n### Use case\n\nWhen browsing through or exporting Virtual Machines it would be nice to be able to see who the owner/contact is. \n\n### Database changes\n\n_No response_\n\n### External dependencies\n\n_No response_\n", "before_files": [{"content": "import django_tables2 as tables\n\nfrom dcim.tables.devices import BaseInterfaceTable\nfrom netbox.tables import NetBoxTable, columns\nfrom tenancy.tables import TenancyColumnsMixin\nfrom virtualization.models import VirtualMachine, VMInterface\n\n__all__ = (\n 'VirtualMachineTable',\n 'VirtualMachineVMInterfaceTable',\n 'VMInterfaceTable',\n)\n\nVMINTERFACE_BUTTONS = \"\"\"\n{% if perms.ipam.add_ipaddress %}\n <a href=\"{% url 'ipam:ipaddress_add' %}?vminterface={{ record.pk }}&return_url={% url 'virtualization:virtualmachine_interfaces' pk=object.pk %}\" class=\"btn btn-sm btn-success\" title=\"Add IP Address\">\n <i class=\"mdi mdi-plus-thick\" aria-hidden=\"true\"></i>\n </a>\n{% endif %}\n\"\"\"\n\n\n#\n# Virtual machines\n#\n\nclass VirtualMachineTable(TenancyColumnsMixin, NetBoxTable):\n name = tables.Column(\n order_by=('_name',),\n linkify=True\n )\n status = columns.ChoiceFieldColumn()\n cluster = tables.Column(\n linkify=True\n )\n role = columns.ColoredLabelColumn()\n comments = columns.MarkdownColumn()\n primary_ip4 = tables.Column(\n linkify=True,\n verbose_name='IPv4 Address'\n )\n primary_ip6 = tables.Column(\n linkify=True,\n verbose_name='IPv6 Address'\n )\n primary_ip = tables.Column(\n linkify=True,\n order_by=('primary_ip4', 'primary_ip6'),\n verbose_name='IP Address'\n )\n tags = columns.TagColumn(\n url_name='virtualization:virtualmachine_list'\n )\n\n class Meta(NetBoxTable.Meta):\n model = VirtualMachine\n fields = (\n 'pk', 'id', 'name', 'status', 'cluster', 'role', 'tenant', 'tenant_group', 'platform', 'vcpus', 'memory', 'disk',\n 'primary_ip4', 'primary_ip6', 'primary_ip', 'comments', 'tags', 'created', 'last_updated',\n )\n default_columns = (\n 'pk', 'name', 'status', 'cluster', 'role', 'tenant', 'vcpus', 'memory', 'disk', 'primary_ip',\n )\n\n\n#\n# VM components\n#\n\nclass VMInterfaceTable(BaseInterfaceTable):\n virtual_machine = tables.Column(\n linkify=True\n )\n name = tables.Column(\n linkify=True\n )\n vrf = tables.Column(\n linkify=True\n )\n contacts = columns.ManyToManyColumn(\n linkify_item=True\n )\n tags = columns.TagColumn(\n url_name='virtualization:vminterface_list'\n )\n\n class Meta(NetBoxTable.Meta):\n model = VMInterface\n fields = (\n 'pk', 'id', 'name', 'virtual_machine', 'enabled', 'mac_address', 'mtu', 'mode', 'description', 'tags',\n 'vrf', 'ip_addresses', 'fhrp_groups', 'untagged_vlan', 'tagged_vlans', 'contacts', 'created',\n 'last_updated',\n )\n default_columns = ('pk', 'name', 'virtual_machine', 'enabled', 'description')\n\n\nclass VirtualMachineVMInterfaceTable(VMInterfaceTable):\n parent = tables.Column(\n linkify=True\n )\n bridge = tables.Column(\n linkify=True\n )\n actions = columns.ActionsColumn(\n actions=('edit', 'delete'),\n extra_buttons=VMINTERFACE_BUTTONS\n )\n\n class Meta(NetBoxTable.Meta):\n model = VMInterface\n fields = (\n 'pk', 'id', 'name', 'enabled', 'parent', 'bridge', 'mac_address', 'mtu', 'mode', 'description', 'tags',\n 'ip_addresses', 'fhrp_groups', 'untagged_vlan', 'tagged_vlans', 'actions',\n )\n default_columns = ('pk', 'name', 'enabled', 'mac_address', 'mtu', 'mode', 'description', 'ip_addresses')\n row_attrs = {\n 'data-name': lambda record: record.name,\n }\n", "path": "netbox/virtualization/tables/virtualmachines.py"}], "after_files": [{"content": "import django_tables2 as tables\n\nfrom dcim.tables.devices import BaseInterfaceTable\nfrom netbox.tables import NetBoxTable, columns\nfrom tenancy.tables import TenancyColumnsMixin\nfrom virtualization.models import VirtualMachine, VMInterface\n\n__all__ = (\n 'VirtualMachineTable',\n 'VirtualMachineVMInterfaceTable',\n 'VMInterfaceTable',\n)\n\nVMINTERFACE_BUTTONS = \"\"\"\n{% if perms.ipam.add_ipaddress %}\n <a href=\"{% url 'ipam:ipaddress_add' %}?vminterface={{ record.pk }}&return_url={% url 'virtualization:virtualmachine_interfaces' pk=object.pk %}\" class=\"btn btn-sm btn-success\" title=\"Add IP Address\">\n <i class=\"mdi mdi-plus-thick\" aria-hidden=\"true\"></i>\n </a>\n{% endif %}\n\"\"\"\n\n\n#\n# Virtual machines\n#\n\nclass VirtualMachineTable(TenancyColumnsMixin, NetBoxTable):\n name = tables.Column(\n order_by=('_name',),\n linkify=True\n )\n status = columns.ChoiceFieldColumn()\n cluster = tables.Column(\n linkify=True\n )\n role = columns.ColoredLabelColumn()\n comments = columns.MarkdownColumn()\n primary_ip4 = tables.Column(\n linkify=True,\n verbose_name='IPv4 Address'\n )\n primary_ip6 = tables.Column(\n linkify=True,\n verbose_name='IPv6 Address'\n )\n primary_ip = tables.Column(\n linkify=True,\n order_by=('primary_ip4', 'primary_ip6'),\n verbose_name='IP Address'\n )\n contacts = columns.ManyToManyColumn(\n linkify_item=True\n )\n tags = columns.TagColumn(\n url_name='virtualization:virtualmachine_list'\n )\n\n class Meta(NetBoxTable.Meta):\n model = VirtualMachine\n fields = (\n 'pk', 'id', 'name', 'status', 'cluster', 'role', 'tenant', 'tenant_group', 'platform', 'vcpus', 'memory', 'disk',\n 'primary_ip4', 'primary_ip6', 'primary_ip', 'comments', 'contacts', 'tags', 'created', 'last_updated',\n )\n default_columns = (\n 'pk', 'name', 'status', 'cluster', 'role', 'tenant', 'vcpus', 'memory', 'disk', 'primary_ip',\n )\n\n\n#\n# VM components\n#\n\nclass VMInterfaceTable(BaseInterfaceTable):\n virtual_machine = tables.Column(\n linkify=True\n )\n name = tables.Column(\n linkify=True\n )\n vrf = tables.Column(\n linkify=True\n )\n contacts = columns.ManyToManyColumn(\n linkify_item=True\n )\n tags = columns.TagColumn(\n url_name='virtualization:vminterface_list'\n )\n\n class Meta(NetBoxTable.Meta):\n model = VMInterface\n fields = (\n 'pk', 'id', 'name', 'virtual_machine', 'enabled', 'mac_address', 'mtu', 'mode', 'description', 'tags',\n 'vrf', 'ip_addresses', 'fhrp_groups', 'untagged_vlan', 'tagged_vlans', 'contacts', 'created',\n 'last_updated',\n )\n default_columns = ('pk', 'name', 'virtual_machine', 'enabled', 'description')\n\n\nclass VirtualMachineVMInterfaceTable(VMInterfaceTable):\n parent = tables.Column(\n linkify=True\n )\n bridge = tables.Column(\n linkify=True\n )\n actions = columns.ActionsColumn(\n actions=('edit', 'delete'),\n extra_buttons=VMINTERFACE_BUTTONS\n )\n\n class Meta(NetBoxTable.Meta):\n model = VMInterface\n fields = (\n 'pk', 'id', 'name', 'enabled', 'parent', 'bridge', 'mac_address', 'mtu', 'mode', 'description', 'tags',\n 'ip_addresses', 'fhrp_groups', 'untagged_vlan', 'tagged_vlans', 'actions',\n )\n default_columns = ('pk', 'name', 'enabled', 'mac_address', 'mtu', 'mode', 'description', 'ip_addresses')\n row_attrs = {\n 'data-name': lambda record: record.name,\n }\n", "path": "netbox/virtualization/tables/virtualmachines.py"}]}
| 1,539 | 296 |
gh_patches_debug_35385
|
rasdani/github-patches
|
git_diff
|
svthalia__concrexit-3176
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Site administration option is shown for all users
### Describe the bug
I just noticed that the site administration button in the menu is shown for everyone (I think): https://github.com/svthalia/concrexit/blob/master/website/thaliawebsite/templates/base.html#L184
Clicking it results in a looped redirect, so it doesn't work either.

### How to reproduce
Steps to reproduce the behaviour:
1. Go to the website
2. Check the menu
### Expected behaviour
Do not show the menu item
### Screenshots
If applicable, add screenshots to help explain your problem.
### Additional context
Add any other context about the problem here.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `website/thaliawebsite/views.py`
Content:
```
1 """General views for the website."""
2
3 from django.contrib.admin.views.decorators import staff_member_required
4 from django.contrib.auth.views import LoginView, PasswordResetView
5 from django.http import HttpResponse, HttpResponseForbidden
6 from django.utils.decorators import method_decorator
7 from django.views.generic import ListView, TemplateView
8 from django.views.generic.base import View
9
10 from django_ratelimit.decorators import ratelimit
11
12
13 class IndexView(TemplateView):
14 template_name = "index.html"
15
16
17 @method_decorator(staff_member_required, "dispatch")
18 class TestCrashView(View):
19 """Test view to intentionally crash to test the error handling."""
20
21 def dispatch(self, request, *args, **kwargs) -> HttpResponse:
22 if not request.user.is_superuser:
23 return HttpResponseForbidden("This is not for you")
24 raise Exception("Test exception")
25
26
27 class PagedView(ListView):
28 """A ListView with automatic pagination."""
29
30 def get_context_data(self, **kwargs) -> dict:
31 context = super().get_context_data(**kwargs)
32 page = context["page_obj"].number
33 paginator = context["paginator"]
34
35 # Show the two pages before and after the current page
36 page_range_start = max(1, page - 2)
37 page_range_stop = min(page + 3, paginator.num_pages + 1)
38
39 # Add extra pages if we show less than 5 pages
40 page_range_start = min(page_range_start, page_range_stop - 5)
41 page_range_start = max(1, page_range_start)
42
43 # Add extra pages if we still show less than 5 pages
44 page_range_stop = max(page_range_stop, page_range_start + 5)
45 page_range_stop = min(page_range_stop, paginator.num_pages + 1)
46
47 page_range = range(page_range_start, page_range_stop)
48
49 context.update(
50 {
51 "page_range": page_range,
52 }
53 )
54
55 return context
56
57
58 class RateLimitedPasswordResetView(PasswordResetView):
59 @method_decorator(ratelimit(key="ip", rate="5/h"))
60 def post(self, request, *args, **kwargs):
61 return super().post(request, *args, **kwargs)
62
63
64 class RateLimitedLoginView(LoginView):
65 @method_decorator(ratelimit(key="ip", rate="30/h"))
66 @method_decorator(ratelimit(key="post:username", rate="30/h"))
67 def post(self, request, *args, **kwargs):
68 return super().post(request, *args, **kwargs)
69
70
71 def rate_limited_view(request, *args, **kwargs):
72 return HttpResponse("You are rate limited", status=429)
73
```
Path: `website/thaliawebsite/urls.py`
Content:
```
1 """Thalia's root URL Configuration.
2
3 The ``urlpatterns`` list routes URLs to views. For more information please see:
4 https://docs.djangoproject.com/en/dev/topics/http/urls/
5
6 Examples:
7 * Function views
8
9 1. Add an import: ``from my_app import views``
10 2. Add a URL to ``urlpatterns``: ``path('', views.home, name='home')``
11
12 * Class-based views
13
14 1. Add an import: ``from other_app.views import Home``
15 2. Add a URL to urlpatterns: ``path('', Home.as_view(), name='home')``
16
17 * Including another URLconf
18
19 1. Import the ``include()`` function::
20
21 from django.conf.urls import url, include
22
23 2. Add a URL to urlpatterns: ``path('blog/', include('blog.urls'))``
24 """
25
26 import os.path
27
28 from django.conf import settings
29 from django.conf.urls import include
30 from django.conf.urls.static import static
31 from django.contrib import admin
32 from django.contrib.sitemaps.views import sitemap
33 from django.urls import path, re_path
34 from django.views.generic import RedirectView
35 from django.views.i18n import JavaScriptCatalog
36
37 import debug_toolbar
38 from oauth2_provider.urls import base_urlpatterns
39 from oauth2_provider.views import (
40 AuthorizedTokenDeleteView,
41 AuthorizedTokensListView,
42 ConnectDiscoveryInfoView,
43 JwksInfoView,
44 UserInfoView,
45 )
46
47 from activemembers.sitemaps import sitemap as activemembers_sitemap
48 from documents.sitemaps import sitemap as documents_sitemap
49 from education.sitemaps import sitemap as education_sitemap
50 from events.sitemaps import sitemap as events_sitemap
51 from members.sitemaps import sitemap as members_sitemap
52 from partners.sitemaps import sitemap as partners_sitemap
53 from singlepages.sitemaps import sitemap as singlepages_sitemap
54 from thabloid.sitemaps import sitemap as thabloid_sitemap
55 from thaliawebsite.forms import AuthenticationForm
56 from thaliawebsite.views import (
57 IndexView,
58 RateLimitedLoginView,
59 RateLimitedPasswordResetView,
60 TestCrashView,
61 )
62 from utils.media.views import private_media
63
64 from .sitemaps import StaticViewSitemap
65
66 __all__ = ["urlpatterns"]
67
68 THALIA_SITEMAP = {
69 "main-static": StaticViewSitemap,
70 }
71 THALIA_SITEMAP.update(activemembers_sitemap)
72 THALIA_SITEMAP.update(members_sitemap)
73 THALIA_SITEMAP.update(documents_sitemap)
74 THALIA_SITEMAP.update(thabloid_sitemap)
75 THALIA_SITEMAP.update(partners_sitemap)
76 THALIA_SITEMAP.update(education_sitemap)
77 THALIA_SITEMAP.update(events_sitemap)
78 THALIA_SITEMAP.update(singlepages_sitemap)
79
80 urlpatterns = [
81 path(
82 "admin/login/",
83 RedirectView.as_view(url="/user/login", query_string=True),
84 name="login-redirect",
85 ),
86 path("admin/", admin.site.urls),
87 path("", IndexView.as_view(), name="index"),
88 # Default helpers
89 path(
90 "",
91 include(
92 (
93 [
94 path(
95 "user/oauth/",
96 include(
97 base_urlpatterns
98 + [
99 path(
100 "authorised-apps/",
101 AuthorizedTokensListView.as_view(),
102 name="authorized-token-list",
103 ),
104 path(
105 "authorised-apps/<int:pk>/delete/",
106 AuthorizedTokenDeleteView.as_view(),
107 name="authorized-token-delete",
108 ),
109 path(
110 "keys/",
111 JwksInfoView.as_view(),
112 name="jwks-info",
113 ),
114 path(
115 "info/",
116 UserInfoView.as_view(),
117 name="user-info",
118 ),
119 ]
120 ),
121 ),
122 path(
123 ".well-known/openid-configuration/",
124 ConnectDiscoveryInfoView.as_view(),
125 name="oidc-connect-discovery-info",
126 ),
127 ],
128 "oauth2_provider",
129 ),
130 namespace="oauth2_provider",
131 ),
132 ),
133 path(
134 "user/",
135 include(
136 [
137 path(
138 "login/",
139 RateLimitedLoginView.as_view(
140 authentication_form=AuthenticationForm,
141 redirect_authenticated_user=True,
142 ),
143 name="login",
144 ),
145 path(
146 "password_reset/",
147 RateLimitedPasswordResetView.as_view(),
148 name="password_reset",
149 ),
150 path("", include("django.contrib.auth.urls")),
151 ]
152 ),
153 ),
154 path(
155 "i18n/",
156 include(
157 [
158 path("", include("django.conf.urls.i18n")),
159 path("js/", JavaScriptCatalog.as_view(), name="javascript-catalog"),
160 ]
161 ),
162 ),
163 # Apps
164 path("", include("singlepages.urls")),
165 path("", include("merchandise.urls")),
166 path("", include("thabloid.urls")),
167 path("", include("registrations.urls")),
168 path("", include("newsletters.urls")),
169 path("", include("announcements.urls")),
170 path("", include("pushnotifications.urls")),
171 path("", include("facedetection.urls")),
172 path("", include("photos.urls")),
173 path("", include("members.urls")),
174 path("", include("payments.urls")),
175 path("", include("education.urls")),
176 path("", include("activemembers.urls")),
177 path("", include("documents.urls")),
178 path("", include("events.urls")),
179 path("", include("pizzas.urls")),
180 path("", include("partners.urls")),
181 path("", include("sales.urls")),
182 path("api/", include("thaliawebsite.api.urls")),
183 # Sitemap
184 path(
185 "sitemap.xml",
186 sitemap,
187 {"sitemaps": THALIA_SITEMAP},
188 name="django.contrib.sitemaps.views.sitemap",
189 ),
190 # Dependencies
191 path("tinymce/", include("tinymce.urls")),
192 path("__debug__/", include(debug_toolbar.urls)),
193 # Provide something to test error handling. Limited to admins.
194 path("crash/", TestCrashView.as_view()),
195 # Custom media paths
196 re_path(
197 r"^media/private/(?P<request_path>.*)$", private_media, name="private-media"
198 ),
199 path("", include("shortlinks.urls")),
200 re_path(r"^fp/", include("django_drf_filepond.urls")),
201 ] + static(
202 settings.PUBLIC_MEDIA_URL,
203 document_root=os.path.join(settings.MEDIA_ROOT, settings.PUBLIC_MEDIA_LOCATION),
204 )
205
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/website/thaliawebsite/urls.py b/website/thaliawebsite/urls.py
--- a/website/thaliawebsite/urls.py
+++ b/website/thaliawebsite/urls.py
@@ -31,7 +31,6 @@
from django.contrib import admin
from django.contrib.sitemaps.views import sitemap
from django.urls import path, re_path
-from django.views.generic import RedirectView
from django.views.i18n import JavaScriptCatalog
import debug_toolbar
@@ -58,6 +57,7 @@
RateLimitedLoginView,
RateLimitedPasswordResetView,
TestCrashView,
+ admin_unauthorized_view,
)
from utils.media.views import private_media
@@ -80,7 +80,7 @@
urlpatterns = [
path(
"admin/login/",
- RedirectView.as_view(url="/user/login", query_string=True),
+ admin_unauthorized_view,
name="login-redirect",
),
path("admin/", admin.site.urls),
diff --git a/website/thaliawebsite/views.py b/website/thaliawebsite/views.py
--- a/website/thaliawebsite/views.py
+++ b/website/thaliawebsite/views.py
@@ -2,7 +2,9 @@
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth.views import LoginView, PasswordResetView
+from django.core.exceptions import PermissionDenied
from django.http import HttpResponse, HttpResponseForbidden
+from django.shortcuts import redirect
from django.utils.decorators import method_decorator
from django.views.generic import ListView, TemplateView
from django.views.generic.base import View
@@ -70,3 +72,17 @@
def rate_limited_view(request, *args, **kwargs):
return HttpResponse("You are rate limited", status=429)
+
+
+def admin_unauthorized_view(request):
+ if not request.member:
+ url = "/user/login"
+ args = request.META.get("QUERY_STRING", "")
+ if args:
+ url = f"{url}?{args}"
+ return redirect(url)
+ elif not request.member.is_staff:
+ # user is logged in but not authorized
+ raise PermissionDenied("You are not allowed to access the administration page.")
+ else:
+ return redirect(request.GET.get("next", "/"))
|
{"golden_diff": "diff --git a/website/thaliawebsite/urls.py b/website/thaliawebsite/urls.py\n--- a/website/thaliawebsite/urls.py\n+++ b/website/thaliawebsite/urls.py\n@@ -31,7 +31,6 @@\n from django.contrib import admin\n from django.contrib.sitemaps.views import sitemap\n from django.urls import path, re_path\n-from django.views.generic import RedirectView\n from django.views.i18n import JavaScriptCatalog\n \n import debug_toolbar\n@@ -58,6 +57,7 @@\n RateLimitedLoginView,\n RateLimitedPasswordResetView,\n TestCrashView,\n+ admin_unauthorized_view,\n )\n from utils.media.views import private_media\n \n@@ -80,7 +80,7 @@\n urlpatterns = [\n path(\n \"admin/login/\",\n- RedirectView.as_view(url=\"/user/login\", query_string=True),\n+ admin_unauthorized_view,\n name=\"login-redirect\",\n ),\n path(\"admin/\", admin.site.urls),\ndiff --git a/website/thaliawebsite/views.py b/website/thaliawebsite/views.py\n--- a/website/thaliawebsite/views.py\n+++ b/website/thaliawebsite/views.py\n@@ -2,7 +2,9 @@\n \n from django.contrib.admin.views.decorators import staff_member_required\n from django.contrib.auth.views import LoginView, PasswordResetView\n+from django.core.exceptions import PermissionDenied\n from django.http import HttpResponse, HttpResponseForbidden\n+from django.shortcuts import redirect\n from django.utils.decorators import method_decorator\n from django.views.generic import ListView, TemplateView\n from django.views.generic.base import View\n@@ -70,3 +72,17 @@\n \n def rate_limited_view(request, *args, **kwargs):\n return HttpResponse(\"You are rate limited\", status=429)\n+\n+\n+def admin_unauthorized_view(request):\n+ if not request.member:\n+ url = \"/user/login\"\n+ args = request.META.get(\"QUERY_STRING\", \"\")\n+ if args:\n+ url = f\"{url}?{args}\"\n+ return redirect(url)\n+ elif not request.member.is_staff:\n+ # user is logged in but not authorized\n+ raise PermissionDenied(\"You are not allowed to access the administration page.\")\n+ else:\n+ return redirect(request.GET.get(\"next\", \"/\"))\n", "issue": "Site administration option is shown for all users\n### Describe the bug\r\nI just noticed that the site administration button in the menu is shown for everyone (I think): https://github.com/svthalia/concrexit/blob/master/website/thaliawebsite/templates/base.html#L184\r\n\r\nClicking it results in a looped redirect, so it doesn't work either.\r\n\r\n\r\n\r\n\r\n\r\n### How to reproduce\r\nSteps to reproduce the behaviour:\r\n1. Go to the website\r\n2. Check the menu\r\n\r\n### Expected behaviour\r\nDo not show the menu item\r\n\r\n### Screenshots\r\nIf applicable, add screenshots to help explain your problem.\r\n\r\n### Additional context\r\nAdd any other context about the problem here.\r\n\n", "before_files": [{"content": "\"\"\"General views for the website.\"\"\"\n\nfrom django.contrib.admin.views.decorators import staff_member_required\nfrom django.contrib.auth.views import LoginView, PasswordResetView\nfrom django.http import HttpResponse, HttpResponseForbidden\nfrom django.utils.decorators import method_decorator\nfrom django.views.generic import ListView, TemplateView\nfrom django.views.generic.base import View\n\nfrom django_ratelimit.decorators import ratelimit\n\n\nclass IndexView(TemplateView):\n template_name = \"index.html\"\n\n\n@method_decorator(staff_member_required, \"dispatch\")\nclass TestCrashView(View):\n \"\"\"Test view to intentionally crash to test the error handling.\"\"\"\n\n def dispatch(self, request, *args, **kwargs) -> HttpResponse:\n if not request.user.is_superuser:\n return HttpResponseForbidden(\"This is not for you\")\n raise Exception(\"Test exception\")\n\n\nclass PagedView(ListView):\n \"\"\"A ListView with automatic pagination.\"\"\"\n\n def get_context_data(self, **kwargs) -> dict:\n context = super().get_context_data(**kwargs)\n page = context[\"page_obj\"].number\n paginator = context[\"paginator\"]\n\n # Show the two pages before and after the current page\n page_range_start = max(1, page - 2)\n page_range_stop = min(page + 3, paginator.num_pages + 1)\n\n # Add extra pages if we show less than 5 pages\n page_range_start = min(page_range_start, page_range_stop - 5)\n page_range_start = max(1, page_range_start)\n\n # Add extra pages if we still show less than 5 pages\n page_range_stop = max(page_range_stop, page_range_start + 5)\n page_range_stop = min(page_range_stop, paginator.num_pages + 1)\n\n page_range = range(page_range_start, page_range_stop)\n\n context.update(\n {\n \"page_range\": page_range,\n }\n )\n\n return context\n\n\nclass RateLimitedPasswordResetView(PasswordResetView):\n @method_decorator(ratelimit(key=\"ip\", rate=\"5/h\"))\n def post(self, request, *args, **kwargs):\n return super().post(request, *args, **kwargs)\n\n\nclass RateLimitedLoginView(LoginView):\n @method_decorator(ratelimit(key=\"ip\", rate=\"30/h\"))\n @method_decorator(ratelimit(key=\"post:username\", rate=\"30/h\"))\n def post(self, request, *args, **kwargs):\n return super().post(request, *args, **kwargs)\n\n\ndef rate_limited_view(request, *args, **kwargs):\n return HttpResponse(\"You are rate limited\", status=429)\n", "path": "website/thaliawebsite/views.py"}, {"content": "\"\"\"Thalia's root URL Configuration.\n\nThe ``urlpatterns`` list routes URLs to views. For more information please see:\nhttps://docs.djangoproject.com/en/dev/topics/http/urls/\n\nExamples:\n* Function views\n\n 1. Add an import: ``from my_app import views``\n 2. Add a URL to ``urlpatterns``: ``path('', views.home, name='home')``\n\n* Class-based views\n\n 1. Add an import: ``from other_app.views import Home``\n 2. Add a URL to urlpatterns: ``path('', Home.as_view(), name='home')``\n\n* Including another URLconf\n\n 1. Import the ``include()`` function::\n\n from django.conf.urls import url, include\n\n 2. Add a URL to urlpatterns: ``path('blog/', include('blog.urls'))``\n\"\"\"\n\nimport os.path\n\nfrom django.conf import settings\nfrom django.conf.urls import include\nfrom django.conf.urls.static import static\nfrom django.contrib import admin\nfrom django.contrib.sitemaps.views import sitemap\nfrom django.urls import path, re_path\nfrom django.views.generic import RedirectView\nfrom django.views.i18n import JavaScriptCatalog\n\nimport debug_toolbar\nfrom oauth2_provider.urls import base_urlpatterns\nfrom oauth2_provider.views import (\n AuthorizedTokenDeleteView,\n AuthorizedTokensListView,\n ConnectDiscoveryInfoView,\n JwksInfoView,\n UserInfoView,\n)\n\nfrom activemembers.sitemaps import sitemap as activemembers_sitemap\nfrom documents.sitemaps import sitemap as documents_sitemap\nfrom education.sitemaps import sitemap as education_sitemap\nfrom events.sitemaps import sitemap as events_sitemap\nfrom members.sitemaps import sitemap as members_sitemap\nfrom partners.sitemaps import sitemap as partners_sitemap\nfrom singlepages.sitemaps import sitemap as singlepages_sitemap\nfrom thabloid.sitemaps import sitemap as thabloid_sitemap\nfrom thaliawebsite.forms import AuthenticationForm\nfrom thaliawebsite.views import (\n IndexView,\n RateLimitedLoginView,\n RateLimitedPasswordResetView,\n TestCrashView,\n)\nfrom utils.media.views import private_media\n\nfrom .sitemaps import StaticViewSitemap\n\n__all__ = [\"urlpatterns\"]\n\nTHALIA_SITEMAP = {\n \"main-static\": StaticViewSitemap,\n}\nTHALIA_SITEMAP.update(activemembers_sitemap)\nTHALIA_SITEMAP.update(members_sitemap)\nTHALIA_SITEMAP.update(documents_sitemap)\nTHALIA_SITEMAP.update(thabloid_sitemap)\nTHALIA_SITEMAP.update(partners_sitemap)\nTHALIA_SITEMAP.update(education_sitemap)\nTHALIA_SITEMAP.update(events_sitemap)\nTHALIA_SITEMAP.update(singlepages_sitemap)\n\nurlpatterns = [\n path(\n \"admin/login/\",\n RedirectView.as_view(url=\"/user/login\", query_string=True),\n name=\"login-redirect\",\n ),\n path(\"admin/\", admin.site.urls),\n path(\"\", IndexView.as_view(), name=\"index\"),\n # Default helpers\n path(\n \"\",\n include(\n (\n [\n path(\n \"user/oauth/\",\n include(\n base_urlpatterns\n + [\n path(\n \"authorised-apps/\",\n AuthorizedTokensListView.as_view(),\n name=\"authorized-token-list\",\n ),\n path(\n \"authorised-apps/<int:pk>/delete/\",\n AuthorizedTokenDeleteView.as_view(),\n name=\"authorized-token-delete\",\n ),\n path(\n \"keys/\",\n JwksInfoView.as_view(),\n name=\"jwks-info\",\n ),\n path(\n \"info/\",\n UserInfoView.as_view(),\n name=\"user-info\",\n ),\n ]\n ),\n ),\n path(\n \".well-known/openid-configuration/\",\n ConnectDiscoveryInfoView.as_view(),\n name=\"oidc-connect-discovery-info\",\n ),\n ],\n \"oauth2_provider\",\n ),\n namespace=\"oauth2_provider\",\n ),\n ),\n path(\n \"user/\",\n include(\n [\n path(\n \"login/\",\n RateLimitedLoginView.as_view(\n authentication_form=AuthenticationForm,\n redirect_authenticated_user=True,\n ),\n name=\"login\",\n ),\n path(\n \"password_reset/\",\n RateLimitedPasswordResetView.as_view(),\n name=\"password_reset\",\n ),\n path(\"\", include(\"django.contrib.auth.urls\")),\n ]\n ),\n ),\n path(\n \"i18n/\",\n include(\n [\n path(\"\", include(\"django.conf.urls.i18n\")),\n path(\"js/\", JavaScriptCatalog.as_view(), name=\"javascript-catalog\"),\n ]\n ),\n ),\n # Apps\n path(\"\", include(\"singlepages.urls\")),\n path(\"\", include(\"merchandise.urls\")),\n path(\"\", include(\"thabloid.urls\")),\n path(\"\", include(\"registrations.urls\")),\n path(\"\", include(\"newsletters.urls\")),\n path(\"\", include(\"announcements.urls\")),\n path(\"\", include(\"pushnotifications.urls\")),\n path(\"\", include(\"facedetection.urls\")),\n path(\"\", include(\"photos.urls\")),\n path(\"\", include(\"members.urls\")),\n path(\"\", include(\"payments.urls\")),\n path(\"\", include(\"education.urls\")),\n path(\"\", include(\"activemembers.urls\")),\n path(\"\", include(\"documents.urls\")),\n path(\"\", include(\"events.urls\")),\n path(\"\", include(\"pizzas.urls\")),\n path(\"\", include(\"partners.urls\")),\n path(\"\", include(\"sales.urls\")),\n path(\"api/\", include(\"thaliawebsite.api.urls\")),\n # Sitemap\n path(\n \"sitemap.xml\",\n sitemap,\n {\"sitemaps\": THALIA_SITEMAP},\n name=\"django.contrib.sitemaps.views.sitemap\",\n ),\n # Dependencies\n path(\"tinymce/\", include(\"tinymce.urls\")),\n path(\"__debug__/\", include(debug_toolbar.urls)),\n # Provide something to test error handling. Limited to admins.\n path(\"crash/\", TestCrashView.as_view()),\n # Custom media paths\n re_path(\n r\"^media/private/(?P<request_path>.*)$\", private_media, name=\"private-media\"\n ),\n path(\"\", include(\"shortlinks.urls\")),\n re_path(r\"^fp/\", include(\"django_drf_filepond.urls\")),\n] + static(\n settings.PUBLIC_MEDIA_URL,\n document_root=os.path.join(settings.MEDIA_ROOT, settings.PUBLIC_MEDIA_LOCATION),\n)\n", "path": "website/thaliawebsite/urls.py"}], "after_files": [{"content": "\"\"\"General views for the website.\"\"\"\n\nfrom django.contrib.admin.views.decorators import staff_member_required\nfrom django.contrib.auth.views import LoginView, PasswordResetView\nfrom django.core.exceptions import PermissionDenied\nfrom django.http import HttpResponse, HttpResponseForbidden\nfrom django.shortcuts import redirect\nfrom django.utils.decorators import method_decorator\nfrom django.views.generic import ListView, TemplateView\nfrom django.views.generic.base import View\n\nfrom django_ratelimit.decorators import ratelimit\n\n\nclass IndexView(TemplateView):\n template_name = \"index.html\"\n\n\n@method_decorator(staff_member_required, \"dispatch\")\nclass TestCrashView(View):\n \"\"\"Test view to intentionally crash to test the error handling.\"\"\"\n\n def dispatch(self, request, *args, **kwargs) -> HttpResponse:\n if not request.user.is_superuser:\n return HttpResponseForbidden(\"This is not for you\")\n raise Exception(\"Test exception\")\n\n\nclass PagedView(ListView):\n \"\"\"A ListView with automatic pagination.\"\"\"\n\n def get_context_data(self, **kwargs) -> dict:\n context = super().get_context_data(**kwargs)\n page = context[\"page_obj\"].number\n paginator = context[\"paginator\"]\n\n # Show the two pages before and after the current page\n page_range_start = max(1, page - 2)\n page_range_stop = min(page + 3, paginator.num_pages + 1)\n\n # Add extra pages if we show less than 5 pages\n page_range_start = min(page_range_start, page_range_stop - 5)\n page_range_start = max(1, page_range_start)\n\n # Add extra pages if we still show less than 5 pages\n page_range_stop = max(page_range_stop, page_range_start + 5)\n page_range_stop = min(page_range_stop, paginator.num_pages + 1)\n\n page_range = range(page_range_start, page_range_stop)\n\n context.update(\n {\n \"page_range\": page_range,\n }\n )\n\n return context\n\n\nclass RateLimitedPasswordResetView(PasswordResetView):\n @method_decorator(ratelimit(key=\"ip\", rate=\"5/h\"))\n def post(self, request, *args, **kwargs):\n return super().post(request, *args, **kwargs)\n\n\nclass RateLimitedLoginView(LoginView):\n @method_decorator(ratelimit(key=\"ip\", rate=\"30/h\"))\n @method_decorator(ratelimit(key=\"post:username\", rate=\"30/h\"))\n def post(self, request, *args, **kwargs):\n return super().post(request, *args, **kwargs)\n\n\ndef rate_limited_view(request, *args, **kwargs):\n return HttpResponse(\"You are rate limited\", status=429)\n\n\ndef admin_unauthorized_view(request):\n if not request.member:\n url = \"/user/login\"\n args = request.META.get(\"QUERY_STRING\", \"\")\n if args:\n url = f\"{url}?{args}\"\n return redirect(url)\n elif not request.member.is_staff:\n # user is logged in but not authorized\n raise PermissionDenied(\"You are not allowed to access the administration page.\")\n else:\n return redirect(request.GET.get(\"next\", \"/\"))\n", "path": "website/thaliawebsite/views.py"}, {"content": "\"\"\"Thalia's root URL Configuration.\n\nThe ``urlpatterns`` list routes URLs to views. For more information please see:\nhttps://docs.djangoproject.com/en/dev/topics/http/urls/\n\nExamples:\n* Function views\n\n 1. Add an import: ``from my_app import views``\n 2. Add a URL to ``urlpatterns``: ``path('', views.home, name='home')``\n\n* Class-based views\n\n 1. Add an import: ``from other_app.views import Home``\n 2. Add a URL to urlpatterns: ``path('', Home.as_view(), name='home')``\n\n* Including another URLconf\n\n 1. Import the ``include()`` function::\n\n from django.conf.urls import url, include\n\n 2. Add a URL to urlpatterns: ``path('blog/', include('blog.urls'))``\n\"\"\"\n\nimport os.path\n\nfrom django.conf import settings\nfrom django.conf.urls import include\nfrom django.conf.urls.static import static\nfrom django.contrib import admin\nfrom django.contrib.sitemaps.views import sitemap\nfrom django.urls import path, re_path\nfrom django.views.i18n import JavaScriptCatalog\n\nimport debug_toolbar\nfrom oauth2_provider.urls import base_urlpatterns\nfrom oauth2_provider.views import (\n AuthorizedTokenDeleteView,\n AuthorizedTokensListView,\n ConnectDiscoveryInfoView,\n JwksInfoView,\n UserInfoView,\n)\n\nfrom activemembers.sitemaps import sitemap as activemembers_sitemap\nfrom documents.sitemaps import sitemap as documents_sitemap\nfrom education.sitemaps import sitemap as education_sitemap\nfrom events.sitemaps import sitemap as events_sitemap\nfrom members.sitemaps import sitemap as members_sitemap\nfrom partners.sitemaps import sitemap as partners_sitemap\nfrom singlepages.sitemaps import sitemap as singlepages_sitemap\nfrom thabloid.sitemaps import sitemap as thabloid_sitemap\nfrom thaliawebsite.forms import AuthenticationForm\nfrom thaliawebsite.views import (\n IndexView,\n RateLimitedLoginView,\n RateLimitedPasswordResetView,\n TestCrashView,\n admin_unauthorized_view,\n)\nfrom utils.media.views import private_media\n\nfrom .sitemaps import StaticViewSitemap\n\n__all__ = [\"urlpatterns\"]\n\nTHALIA_SITEMAP = {\n \"main-static\": StaticViewSitemap,\n}\nTHALIA_SITEMAP.update(activemembers_sitemap)\nTHALIA_SITEMAP.update(members_sitemap)\nTHALIA_SITEMAP.update(documents_sitemap)\nTHALIA_SITEMAP.update(thabloid_sitemap)\nTHALIA_SITEMAP.update(partners_sitemap)\nTHALIA_SITEMAP.update(education_sitemap)\nTHALIA_SITEMAP.update(events_sitemap)\nTHALIA_SITEMAP.update(singlepages_sitemap)\n\nurlpatterns = [\n path(\n \"admin/login/\",\n admin_unauthorized_view,\n name=\"login-redirect\",\n ),\n path(\"admin/\", admin.site.urls),\n path(\"\", IndexView.as_view(), name=\"index\"),\n # Default helpers\n path(\n \"\",\n include(\n (\n [\n path(\n \"user/oauth/\",\n include(\n base_urlpatterns\n + [\n path(\n \"authorised-apps/\",\n AuthorizedTokensListView.as_view(),\n name=\"authorized-token-list\",\n ),\n path(\n \"authorised-apps/<int:pk>/delete/\",\n AuthorizedTokenDeleteView.as_view(),\n name=\"authorized-token-delete\",\n ),\n path(\n \"keys/\",\n JwksInfoView.as_view(),\n name=\"jwks-info\",\n ),\n path(\n \"info/\",\n UserInfoView.as_view(),\n name=\"user-info\",\n ),\n ]\n ),\n ),\n path(\n \".well-known/openid-configuration/\",\n ConnectDiscoveryInfoView.as_view(),\n name=\"oidc-connect-discovery-info\",\n ),\n ],\n \"oauth2_provider\",\n ),\n namespace=\"oauth2_provider\",\n ),\n ),\n path(\n \"user/\",\n include(\n [\n path(\n \"login/\",\n RateLimitedLoginView.as_view(\n authentication_form=AuthenticationForm,\n redirect_authenticated_user=True,\n ),\n name=\"login\",\n ),\n path(\n \"password_reset/\",\n RateLimitedPasswordResetView.as_view(),\n name=\"password_reset\",\n ),\n path(\"\", include(\"django.contrib.auth.urls\")),\n ]\n ),\n ),\n path(\n \"i18n/\",\n include(\n [\n path(\"\", include(\"django.conf.urls.i18n\")),\n path(\"js/\", JavaScriptCatalog.as_view(), name=\"javascript-catalog\"),\n ]\n ),\n ),\n # Apps\n path(\"\", include(\"singlepages.urls\")),\n path(\"\", include(\"merchandise.urls\")),\n path(\"\", include(\"thabloid.urls\")),\n path(\"\", include(\"registrations.urls\")),\n path(\"\", include(\"newsletters.urls\")),\n path(\"\", include(\"announcements.urls\")),\n path(\"\", include(\"pushnotifications.urls\")),\n path(\"\", include(\"facedetection.urls\")),\n path(\"\", include(\"photos.urls\")),\n path(\"\", include(\"members.urls\")),\n path(\"\", include(\"payments.urls\")),\n path(\"\", include(\"education.urls\")),\n path(\"\", include(\"activemembers.urls\")),\n path(\"\", include(\"documents.urls\")),\n path(\"\", include(\"events.urls\")),\n path(\"\", include(\"pizzas.urls\")),\n path(\"\", include(\"partners.urls\")),\n path(\"\", include(\"sales.urls\")),\n path(\"api/\", include(\"thaliawebsite.api.urls\")),\n # Sitemap\n path(\n \"sitemap.xml\",\n sitemap,\n {\"sitemaps\": THALIA_SITEMAP},\n name=\"django.contrib.sitemaps.views.sitemap\",\n ),\n # Dependencies\n path(\"tinymce/\", include(\"tinymce.urls\")),\n path(\"__debug__/\", include(debug_toolbar.urls)),\n # Provide something to test error handling. Limited to admins.\n path(\"crash/\", TestCrashView.as_view()),\n # Custom media paths\n re_path(\n r\"^media/private/(?P<request_path>.*)$\", private_media, name=\"private-media\"\n ),\n path(\"\", include(\"shortlinks.urls\")),\n re_path(r\"^fp/\", include(\"django_drf_filepond.urls\")),\n] + static(\n settings.PUBLIC_MEDIA_URL,\n document_root=os.path.join(settings.MEDIA_ROOT, settings.PUBLIC_MEDIA_LOCATION),\n)\n", "path": "website/thaliawebsite/urls.py"}]}
| 3,076 | 503 |
gh_patches_debug_40593
|
rasdani/github-patches
|
git_diff
|
zulip__zulip-18065
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
test-documentation: Fix output spam from external links
Currently, test-documentation run in a development environment (i.e. without `--skip-external-links`) prints like 2 screenfuls of errors like this:
```
2021-04-01 10:20:38 [documentation_crawler] ERROR: Please check link: https://github.com/zulip/zulip/commit/49dbd85a8985b12666087f9ea36acb6f7da0aa4f
2021-04-01 10:20:38 [documentation_crawler] ERROR: Please check link: https://github.com/zulip/zulip-desktop
2021-04-01 10:20:38 [documentation_crawler] ERROR: Please check link: https://github.com/zulip/zulip/issues/10976
```
I imagine this is really confusing for anyone new to our ReadTheDocs documentation.
Most of these are 429 errors because GitHub doesn't want automation hitting their servers all the time; we could probably just suppress most of them that fit a pattern that we expect to be statically correct (E.g. anything in github.com/zulip/ that is not a tree path).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `tools/documentation_crawler/documentation_crawler/spiders/common/spiders.py`
Content:
```
1 import json
2 import os
3 import re
4 from typing import Callable, Iterator, List, Optional, Union
5
6 import scrapy
7 from scrapy.http import Request, Response
8 from scrapy.linkextractors import IGNORED_EXTENSIONS
9 from scrapy.linkextractors.lxmlhtml import LxmlLinkExtractor
10 from scrapy.spidermiddlewares.httperror import HttpError
11 from scrapy.utils.url import url_has_any_extension
12 from twisted.python.failure import Failure
13
14 EXCLUDED_URLS = [
15 # Google calendar returns 404s on HEAD requests unconditionally
16 "https://calendar.google.com/calendar/[email protected]",
17 # Returns 409 errors to HEAD requests frequently
18 "https://medium.freecodecamp.org/",
19 # Returns 404 to HEAD requests unconditionally
20 "https://www.git-tower.com/blog/command-line-cheat-sheet/",
21 "https://marketplace.visualstudio.com/items?itemName=rafaelmaiolla.remote-vscode",
22 # Requires authentication
23 "https://circleci.com/gh/zulip/zulip/tree/master",
24 "https://circleci.com/gh/zulip/zulip/16617",
25 "https://www.linkedin.com/company/zulip-project",
26 # Returns 403 errors to HEAD requests
27 "https://giphy.com",
28 "https://giphy.com/apps/giphycapture",
29 "https://www.udemy.com/course/the-complete-react-native-and-redux-course/",
30 ]
31
32 VNU_IGNORE = [
33 # Real errors that should be fixed.
34 r"Duplicate ID “[^”]*”\.",
35 r"The first occurrence of ID “[^”]*” was here\.",
36 r"Attribute “markdown” not allowed on element “div” at this point\.",
37 r"No “p” element in scope but a “p” end tag seen\.",
38 r"Element “div” not allowed as child of element “ul” in this context\. "
39 + r"\(Suppressing further errors from this subtree\.\)",
40 # Warnings that are probably less important.
41 r"The “type” attribute is unnecessary for JavaScript resources\.",
42 ]
43 VNU_IGNORE_REGEX = re.compile(r"|".join(VNU_IGNORE))
44
45 DEPLOY_ROOT = os.path.abspath(os.path.join(__file__, "../../../../../.."))
46
47 ZULIP_SERVER_GITHUB_FILE_URL_PREFIX = "https://github.com/zulip/zulip/blob/master"
48 ZULIP_SERVER_GITHUB_DIRECTORY_URL_PREFIX = "https://github.com/zulip/zulip/tree/master"
49
50
51 class BaseDocumentationSpider(scrapy.Spider):
52 name: Optional[str] = None
53 # Exclude domain address.
54 deny_domains: List[str] = []
55 start_urls: List[str] = []
56 deny: List[str] = []
57 file_extensions: List[str] = ["." + ext for ext in IGNORED_EXTENSIONS]
58 tags = ("a", "area", "img")
59 attrs = ("href", "src")
60
61 def _has_extension(self, url: str) -> bool:
62 return url_has_any_extension(url, self.file_extensions)
63
64 def _is_external_url(self, url: str) -> bool:
65 return url.startswith("http") or self._has_extension(url)
66
67 def check_existing(self, response: Response) -> None:
68 self.log(response)
69
70 def _is_external_link(self, url: str) -> bool:
71 if url.startswith("https://chat.zulip.org"):
72 # Since most chat.zulip.org URLs will be links to specific
73 # logged-in content that the spider cannot verify, or the
74 # homepage, there's no need to check those (which can
75 # cause errors when chat.zulip.org is being updated).
76 return True
77 if "zulip.readthedocs" in url or "zulip.com" in url or "zulip.org" in url:
78 # We want CI to check any links to Zulip sites.
79 return False
80 if (len(url) > 4 and url[:4] == "file") or ("localhost" in url):
81 # We also want CI to check any links to built documentation.
82 return False
83 if url.startswith(ZULIP_SERVER_GITHUB_FILE_URL_PREFIX) or url.startswith(
84 ZULIP_SERVER_GITHUB_DIRECTORY_URL_PREFIX
85 ):
86 # We can verify these links directly in the local git repo without making any requests to GitHub servers.
87 return False
88 if "github.com/zulip" in url:
89 # We want to check these links but due to rate limiting from GitHub, these checks often
90 # fail in the CI. Thus, we should treat these as external links for now.
91 # TODO: Figure out how to test github.com/zulip links in CI.
92 return True
93 return True
94
95 def check_fragment(self, response: Response) -> None:
96 self.log(response)
97 xpath_template = "//*[@id='{fragment}' or @name='{fragment}']"
98 m = re.match(r".+\#(?P<fragment>.*)$", response.request.url) # Get fragment value.
99 if not m:
100 return
101 fragment = m.group("fragment")
102 # Check fragment existing on response page.
103 if not response.selector.xpath(xpath_template.format(fragment=fragment)):
104 self.logger.error(
105 "Fragment #%s is not found on page %s", fragment, response.request.url
106 )
107
108 def _vnu_callback(self, url: str) -> Callable[[Response], None]:
109 def callback(response: Response) -> None:
110 vnu_out = json.loads(response.text)
111 for message in vnu_out["messages"]:
112 if not VNU_IGNORE_REGEX.fullmatch(message["message"]):
113 self.logger.error(
114 '"%s":%d.%d-%d.%d: %s: %s',
115 url,
116 message.get("firstLine", message["lastLine"]),
117 message.get("firstColumn", message["lastColumn"]),
118 message["lastLine"],
119 message["lastColumn"],
120 message["type"],
121 message["message"],
122 )
123
124 return callback
125
126 def _make_requests(self, url: str) -> Iterator[Request]:
127 # These URLs are for Zulip's webapp, which with recent changes
128 # can be accessible without login an account. While we do
129 # crawl documentation served by the webapp (E.g. /help/), we
130 # don't want to crawl the webapp itself, so we exclude these.
131 if (
132 url in ["http://localhost:9981/", "http://localhost:9981"]
133 or url.startswith("http://localhost:9981/#")
134 or url.startswith("http://localhost:9981#")
135 ):
136 return
137
138 callback: Callable[[Response], Optional[Iterator[Request]]] = self.parse
139 dont_filter = False
140 method = "GET"
141 if self._is_external_url(url):
142 callback = self.check_existing
143 method = "HEAD"
144
145 if url.startswith(ZULIP_SERVER_GITHUB_FILE_URL_PREFIX):
146 file_path = url.replace(ZULIP_SERVER_GITHUB_FILE_URL_PREFIX, DEPLOY_ROOT)
147 hash_index = file_path.find("#")
148 if hash_index != -1:
149 file_path = file_path[:hash_index]
150 if not os.path.isfile(file_path):
151 self.logger.error(
152 "There is no local file associated with the GitHub URL: %s", url
153 )
154 return
155 elif url.startswith(ZULIP_SERVER_GITHUB_DIRECTORY_URL_PREFIX):
156 dir_path = url.replace(ZULIP_SERVER_GITHUB_DIRECTORY_URL_PREFIX, DEPLOY_ROOT)
157 if not os.path.isdir(dir_path):
158 self.logger.error(
159 "There is no local directory associated with the GitHub URL: %s", url
160 )
161 return
162 elif "#" in url:
163 dont_filter = True
164 callback = self.check_fragment
165 if getattr(self, "skip_external", False) and self._is_external_link(url):
166 return
167 yield Request(
168 url,
169 method=method,
170 callback=callback,
171 dont_filter=dont_filter,
172 errback=self.error_callback,
173 )
174
175 def start_requests(self) -> Iterator[Request]:
176 for url in self.start_urls:
177 yield from self._make_requests(url)
178
179 def parse(self, response: Response) -> Iterator[Request]:
180 self.log(response)
181
182 if getattr(self, "validate_html", False):
183 yield Request(
184 "http://127.0.0.1:9988/?out=json",
185 method="POST",
186 headers={"Content-Type": response.headers["Content-Type"]},
187 body=response.body,
188 callback=self._vnu_callback(response.url),
189 errback=self.error_callback,
190 )
191
192 for link in LxmlLinkExtractor(
193 deny_domains=self.deny_domains,
194 deny_extensions=["doc"],
195 tags=self.tags,
196 attrs=self.attrs,
197 deny=self.deny,
198 canonicalize=False,
199 ).extract_links(response):
200 yield from self._make_requests(link.url)
201
202 def retry_request_with_get(self, request: Request) -> Iterator[Request]:
203 request.method = "GET"
204 request.dont_filter = True
205 yield request
206
207 def exclude_error(self, url: str) -> bool:
208 return url in EXCLUDED_URLS
209
210 def error_callback(self, failure: Failure) -> Optional[Union[Failure, Iterator[Request]]]:
211 if isinstance(failure.value, HttpError):
212 response = failure.value.response
213 if self.exclude_error(response.url):
214 return None
215 if response.status == 405 and response.request.method == "HEAD":
216 # Method 'HEAD' not allowed, repeat request with 'GET'
217 return self.retry_request_with_get(response.request)
218 self.logger.error("Please check link: %s", response.request.url)
219
220 return failure
221
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/tools/documentation_crawler/documentation_crawler/spiders/common/spiders.py b/tools/documentation_crawler/documentation_crawler/spiders/common/spiders.py
--- a/tools/documentation_crawler/documentation_crawler/spiders/common/spiders.py
+++ b/tools/documentation_crawler/documentation_crawler/spiders/common/spiders.py
@@ -2,6 +2,7 @@
import os
import re
from typing import Callable, Iterator, List, Optional, Union
+from urllib.parse import urlparse
import scrapy
from scrapy.http import Request, Response
@@ -11,6 +12,15 @@
from scrapy.utils.url import url_has_any_extension
from twisted.python.failure import Failure
+EXCLUDED_DOMAINS = [
+ # Returns 429 Rate-Limited Errors
+ "github.com",
+ "gist.github.com",
+ # Returns 503 Errors
+ "www.amazon.com",
+ "gitlab.com",
+]
+
EXCLUDED_URLS = [
# Google calendar returns 404s on HEAD requests unconditionally
"https://calendar.google.com/calendar/[email protected]",
@@ -19,6 +29,8 @@
# Returns 404 to HEAD requests unconditionally
"https://www.git-tower.com/blog/command-line-cheat-sheet/",
"https://marketplace.visualstudio.com/items?itemName=rafaelmaiolla.remote-vscode",
+ "https://www.transifex.com/zulip/zulip/announcements/",
+ "https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-ssh",
# Requires authentication
"https://circleci.com/gh/zulip/zulip/tree/master",
"https://circleci.com/gh/zulip/zulip/16617",
@@ -164,6 +176,10 @@
callback = self.check_fragment
if getattr(self, "skip_external", False) and self._is_external_link(url):
return
+ if urlparse(url).netloc in EXCLUDED_DOMAINS:
+ return
+ if url in EXCLUDED_URLS:
+ return
yield Request(
url,
method=method,
@@ -204,13 +220,12 @@
request.dont_filter = True
yield request
- def exclude_error(self, url: str) -> bool:
- return url in EXCLUDED_URLS
-
def error_callback(self, failure: Failure) -> Optional[Union[Failure, Iterator[Request]]]:
if isinstance(failure.value, HttpError):
response = failure.value.response
- if self.exclude_error(response.url):
+ # Hack: The filtering above does not catch this URL,
+ # likely due to a redirect.
+ if urlparse(response.url).netloc == "idmsa.apple.com":
return None
if response.status == 405 and response.request.method == "HEAD":
# Method 'HEAD' not allowed, repeat request with 'GET'
|
{"golden_diff": "diff --git a/tools/documentation_crawler/documentation_crawler/spiders/common/spiders.py b/tools/documentation_crawler/documentation_crawler/spiders/common/spiders.py\n--- a/tools/documentation_crawler/documentation_crawler/spiders/common/spiders.py\n+++ b/tools/documentation_crawler/documentation_crawler/spiders/common/spiders.py\n@@ -2,6 +2,7 @@\n import os\n import re\n from typing import Callable, Iterator, List, Optional, Union\n+from urllib.parse import urlparse\n \n import scrapy\n from scrapy.http import Request, Response\n@@ -11,6 +12,15 @@\n from scrapy.utils.url import url_has_any_extension\n from twisted.python.failure import Failure\n \n+EXCLUDED_DOMAINS = [\n+ # Returns 429 Rate-Limited Errors\n+ \"github.com\",\n+ \"gist.github.com\",\n+ # Returns 503 Errors\n+ \"www.amazon.com\",\n+ \"gitlab.com\",\n+]\n+\n EXCLUDED_URLS = [\n # Google calendar returns 404s on HEAD requests unconditionally\n \"https://calendar.google.com/calendar/[email protected]\",\n@@ -19,6 +29,8 @@\n # Returns 404 to HEAD requests unconditionally\n \"https://www.git-tower.com/blog/command-line-cheat-sheet/\",\n \"https://marketplace.visualstudio.com/items?itemName=rafaelmaiolla.remote-vscode\",\n+ \"https://www.transifex.com/zulip/zulip/announcements/\",\n+ \"https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-ssh\",\n # Requires authentication\n \"https://circleci.com/gh/zulip/zulip/tree/master\",\n \"https://circleci.com/gh/zulip/zulip/16617\",\n@@ -164,6 +176,10 @@\n callback = self.check_fragment\n if getattr(self, \"skip_external\", False) and self._is_external_link(url):\n return\n+ if urlparse(url).netloc in EXCLUDED_DOMAINS:\n+ return\n+ if url in EXCLUDED_URLS:\n+ return\n yield Request(\n url,\n method=method,\n@@ -204,13 +220,12 @@\n request.dont_filter = True\n yield request\n \n- def exclude_error(self, url: str) -> bool:\n- return url in EXCLUDED_URLS\n-\n def error_callback(self, failure: Failure) -> Optional[Union[Failure, Iterator[Request]]]:\n if isinstance(failure.value, HttpError):\n response = failure.value.response\n- if self.exclude_error(response.url):\n+ # Hack: The filtering above does not catch this URL,\n+ # likely due to a redirect.\n+ if urlparse(response.url).netloc == \"idmsa.apple.com\":\n return None\n if response.status == 405 and response.request.method == \"HEAD\":\n # Method 'HEAD' not allowed, repeat request with 'GET'\n", "issue": "test-documentation: Fix output spam from external links\nCurrently, test-documentation run in a development environment (i.e. without `--skip-external-links`) prints like 2 screenfuls of errors like this:\r\n```\r\n2021-04-01 10:20:38 [documentation_crawler] ERROR: Please check link: https://github.com/zulip/zulip/commit/49dbd85a8985b12666087f9ea36acb6f7da0aa4f\r\n2021-04-01 10:20:38 [documentation_crawler] ERROR: Please check link: https://github.com/zulip/zulip-desktop\r\n2021-04-01 10:20:38 [documentation_crawler] ERROR: Please check link: https://github.com/zulip/zulip/issues/10976\r\n```\r\n\r\nI imagine this is really confusing for anyone new to our ReadTheDocs documentation.\r\n\r\nMost of these are 429 errors because GitHub doesn't want automation hitting their servers all the time; we could probably just suppress most of them that fit a pattern that we expect to be statically correct (E.g. anything in github.com/zulip/ that is not a tree path).\n", "before_files": [{"content": "import json\nimport os\nimport re\nfrom typing import Callable, Iterator, List, Optional, Union\n\nimport scrapy\nfrom scrapy.http import Request, Response\nfrom scrapy.linkextractors import IGNORED_EXTENSIONS\nfrom scrapy.linkextractors.lxmlhtml import LxmlLinkExtractor\nfrom scrapy.spidermiddlewares.httperror import HttpError\nfrom scrapy.utils.url import url_has_any_extension\nfrom twisted.python.failure import Failure\n\nEXCLUDED_URLS = [\n # Google calendar returns 404s on HEAD requests unconditionally\n \"https://calendar.google.com/calendar/[email protected]\",\n # Returns 409 errors to HEAD requests frequently\n \"https://medium.freecodecamp.org/\",\n # Returns 404 to HEAD requests unconditionally\n \"https://www.git-tower.com/blog/command-line-cheat-sheet/\",\n \"https://marketplace.visualstudio.com/items?itemName=rafaelmaiolla.remote-vscode\",\n # Requires authentication\n \"https://circleci.com/gh/zulip/zulip/tree/master\",\n \"https://circleci.com/gh/zulip/zulip/16617\",\n \"https://www.linkedin.com/company/zulip-project\",\n # Returns 403 errors to HEAD requests\n \"https://giphy.com\",\n \"https://giphy.com/apps/giphycapture\",\n \"https://www.udemy.com/course/the-complete-react-native-and-redux-course/\",\n]\n\nVNU_IGNORE = [\n # Real errors that should be fixed.\n r\"Duplicate ID \u201c[^\u201d]*\u201d\\.\",\n r\"The first occurrence of ID \u201c[^\u201d]*\u201d was here\\.\",\n r\"Attribute \u201cmarkdown\u201d not allowed on element \u201cdiv\u201d at this point\\.\",\n r\"No \u201cp\u201d element in scope but a \u201cp\u201d end tag seen\\.\",\n r\"Element \u201cdiv\u201d not allowed as child of element \u201cul\u201d in this context\\. \"\n + r\"\\(Suppressing further errors from this subtree\\.\\)\",\n # Warnings that are probably less important.\n r\"The \u201ctype\u201d attribute is unnecessary for JavaScript resources\\.\",\n]\nVNU_IGNORE_REGEX = re.compile(r\"|\".join(VNU_IGNORE))\n\nDEPLOY_ROOT = os.path.abspath(os.path.join(__file__, \"../../../../../..\"))\n\nZULIP_SERVER_GITHUB_FILE_URL_PREFIX = \"https://github.com/zulip/zulip/blob/master\"\nZULIP_SERVER_GITHUB_DIRECTORY_URL_PREFIX = \"https://github.com/zulip/zulip/tree/master\"\n\n\nclass BaseDocumentationSpider(scrapy.Spider):\n name: Optional[str] = None\n # Exclude domain address.\n deny_domains: List[str] = []\n start_urls: List[str] = []\n deny: List[str] = []\n file_extensions: List[str] = [\".\" + ext for ext in IGNORED_EXTENSIONS]\n tags = (\"a\", \"area\", \"img\")\n attrs = (\"href\", \"src\")\n\n def _has_extension(self, url: str) -> bool:\n return url_has_any_extension(url, self.file_extensions)\n\n def _is_external_url(self, url: str) -> bool:\n return url.startswith(\"http\") or self._has_extension(url)\n\n def check_existing(self, response: Response) -> None:\n self.log(response)\n\n def _is_external_link(self, url: str) -> bool:\n if url.startswith(\"https://chat.zulip.org\"):\n # Since most chat.zulip.org URLs will be links to specific\n # logged-in content that the spider cannot verify, or the\n # homepage, there's no need to check those (which can\n # cause errors when chat.zulip.org is being updated).\n return True\n if \"zulip.readthedocs\" in url or \"zulip.com\" in url or \"zulip.org\" in url:\n # We want CI to check any links to Zulip sites.\n return False\n if (len(url) > 4 and url[:4] == \"file\") or (\"localhost\" in url):\n # We also want CI to check any links to built documentation.\n return False\n if url.startswith(ZULIP_SERVER_GITHUB_FILE_URL_PREFIX) or url.startswith(\n ZULIP_SERVER_GITHUB_DIRECTORY_URL_PREFIX\n ):\n # We can verify these links directly in the local git repo without making any requests to GitHub servers.\n return False\n if \"github.com/zulip\" in url:\n # We want to check these links but due to rate limiting from GitHub, these checks often\n # fail in the CI. Thus, we should treat these as external links for now.\n # TODO: Figure out how to test github.com/zulip links in CI.\n return True\n return True\n\n def check_fragment(self, response: Response) -> None:\n self.log(response)\n xpath_template = \"//*[@id='{fragment}' or @name='{fragment}']\"\n m = re.match(r\".+\\#(?P<fragment>.*)$\", response.request.url) # Get fragment value.\n if not m:\n return\n fragment = m.group(\"fragment\")\n # Check fragment existing on response page.\n if not response.selector.xpath(xpath_template.format(fragment=fragment)):\n self.logger.error(\n \"Fragment #%s is not found on page %s\", fragment, response.request.url\n )\n\n def _vnu_callback(self, url: str) -> Callable[[Response], None]:\n def callback(response: Response) -> None:\n vnu_out = json.loads(response.text)\n for message in vnu_out[\"messages\"]:\n if not VNU_IGNORE_REGEX.fullmatch(message[\"message\"]):\n self.logger.error(\n '\"%s\":%d.%d-%d.%d: %s: %s',\n url,\n message.get(\"firstLine\", message[\"lastLine\"]),\n message.get(\"firstColumn\", message[\"lastColumn\"]),\n message[\"lastLine\"],\n message[\"lastColumn\"],\n message[\"type\"],\n message[\"message\"],\n )\n\n return callback\n\n def _make_requests(self, url: str) -> Iterator[Request]:\n # These URLs are for Zulip's webapp, which with recent changes\n # can be accessible without login an account. While we do\n # crawl documentation served by the webapp (E.g. /help/), we\n # don't want to crawl the webapp itself, so we exclude these.\n if (\n url in [\"http://localhost:9981/\", \"http://localhost:9981\"]\n or url.startswith(\"http://localhost:9981/#\")\n or url.startswith(\"http://localhost:9981#\")\n ):\n return\n\n callback: Callable[[Response], Optional[Iterator[Request]]] = self.parse\n dont_filter = False\n method = \"GET\"\n if self._is_external_url(url):\n callback = self.check_existing\n method = \"HEAD\"\n\n if url.startswith(ZULIP_SERVER_GITHUB_FILE_URL_PREFIX):\n file_path = url.replace(ZULIP_SERVER_GITHUB_FILE_URL_PREFIX, DEPLOY_ROOT)\n hash_index = file_path.find(\"#\")\n if hash_index != -1:\n file_path = file_path[:hash_index]\n if not os.path.isfile(file_path):\n self.logger.error(\n \"There is no local file associated with the GitHub URL: %s\", url\n )\n return\n elif url.startswith(ZULIP_SERVER_GITHUB_DIRECTORY_URL_PREFIX):\n dir_path = url.replace(ZULIP_SERVER_GITHUB_DIRECTORY_URL_PREFIX, DEPLOY_ROOT)\n if not os.path.isdir(dir_path):\n self.logger.error(\n \"There is no local directory associated with the GitHub URL: %s\", url\n )\n return\n elif \"#\" in url:\n dont_filter = True\n callback = self.check_fragment\n if getattr(self, \"skip_external\", False) and self._is_external_link(url):\n return\n yield Request(\n url,\n method=method,\n callback=callback,\n dont_filter=dont_filter,\n errback=self.error_callback,\n )\n\n def start_requests(self) -> Iterator[Request]:\n for url in self.start_urls:\n yield from self._make_requests(url)\n\n def parse(self, response: Response) -> Iterator[Request]:\n self.log(response)\n\n if getattr(self, \"validate_html\", False):\n yield Request(\n \"http://127.0.0.1:9988/?out=json\",\n method=\"POST\",\n headers={\"Content-Type\": response.headers[\"Content-Type\"]},\n body=response.body,\n callback=self._vnu_callback(response.url),\n errback=self.error_callback,\n )\n\n for link in LxmlLinkExtractor(\n deny_domains=self.deny_domains,\n deny_extensions=[\"doc\"],\n tags=self.tags,\n attrs=self.attrs,\n deny=self.deny,\n canonicalize=False,\n ).extract_links(response):\n yield from self._make_requests(link.url)\n\n def retry_request_with_get(self, request: Request) -> Iterator[Request]:\n request.method = \"GET\"\n request.dont_filter = True\n yield request\n\n def exclude_error(self, url: str) -> bool:\n return url in EXCLUDED_URLS\n\n def error_callback(self, failure: Failure) -> Optional[Union[Failure, Iterator[Request]]]:\n if isinstance(failure.value, HttpError):\n response = failure.value.response\n if self.exclude_error(response.url):\n return None\n if response.status == 405 and response.request.method == \"HEAD\":\n # Method 'HEAD' not allowed, repeat request with 'GET'\n return self.retry_request_with_get(response.request)\n self.logger.error(\"Please check link: %s\", response.request.url)\n\n return failure\n", "path": "tools/documentation_crawler/documentation_crawler/spiders/common/spiders.py"}], "after_files": [{"content": "import json\nimport os\nimport re\nfrom typing import Callable, Iterator, List, Optional, Union\nfrom urllib.parse import urlparse\n\nimport scrapy\nfrom scrapy.http import Request, Response\nfrom scrapy.linkextractors import IGNORED_EXTENSIONS\nfrom scrapy.linkextractors.lxmlhtml import LxmlLinkExtractor\nfrom scrapy.spidermiddlewares.httperror import HttpError\nfrom scrapy.utils.url import url_has_any_extension\nfrom twisted.python.failure import Failure\n\nEXCLUDED_DOMAINS = [\n # Returns 429 Rate-Limited Errors\n \"github.com\",\n \"gist.github.com\",\n # Returns 503 Errors\n \"www.amazon.com\",\n \"gitlab.com\",\n]\n\nEXCLUDED_URLS = [\n # Google calendar returns 404s on HEAD requests unconditionally\n \"https://calendar.google.com/calendar/[email protected]\",\n # Returns 409 errors to HEAD requests frequently\n \"https://medium.freecodecamp.org/\",\n # Returns 404 to HEAD requests unconditionally\n \"https://www.git-tower.com/blog/command-line-cheat-sheet/\",\n \"https://marketplace.visualstudio.com/items?itemName=rafaelmaiolla.remote-vscode\",\n \"https://www.transifex.com/zulip/zulip/announcements/\",\n \"https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-ssh\",\n # Requires authentication\n \"https://circleci.com/gh/zulip/zulip/tree/master\",\n \"https://circleci.com/gh/zulip/zulip/16617\",\n \"https://www.linkedin.com/company/zulip-project\",\n # Returns 403 errors to HEAD requests\n \"https://giphy.com\",\n \"https://giphy.com/apps/giphycapture\",\n \"https://www.udemy.com/course/the-complete-react-native-and-redux-course/\",\n]\n\nVNU_IGNORE = [\n # Real errors that should be fixed.\n r\"Duplicate ID \u201c[^\u201d]*\u201d\\.\",\n r\"The first occurrence of ID \u201c[^\u201d]*\u201d was here\\.\",\n r\"Attribute \u201cmarkdown\u201d not allowed on element \u201cdiv\u201d at this point\\.\",\n r\"No \u201cp\u201d element in scope but a \u201cp\u201d end tag seen\\.\",\n r\"Element \u201cdiv\u201d not allowed as child of element \u201cul\u201d in this context\\. \"\n + r\"\\(Suppressing further errors from this subtree\\.\\)\",\n # Warnings that are probably less important.\n r\"The \u201ctype\u201d attribute is unnecessary for JavaScript resources\\.\",\n]\nVNU_IGNORE_REGEX = re.compile(r\"|\".join(VNU_IGNORE))\n\nDEPLOY_ROOT = os.path.abspath(os.path.join(__file__, \"../../../../../..\"))\n\nZULIP_SERVER_GITHUB_FILE_URL_PREFIX = \"https://github.com/zulip/zulip/blob/master\"\nZULIP_SERVER_GITHUB_DIRECTORY_URL_PREFIX = \"https://github.com/zulip/zulip/tree/master\"\n\n\nclass BaseDocumentationSpider(scrapy.Spider):\n name: Optional[str] = None\n # Exclude domain address.\n deny_domains: List[str] = []\n start_urls: List[str] = []\n deny: List[str] = []\n file_extensions: List[str] = [\".\" + ext for ext in IGNORED_EXTENSIONS]\n tags = (\"a\", \"area\", \"img\")\n attrs = (\"href\", \"src\")\n\n def _has_extension(self, url: str) -> bool:\n return url_has_any_extension(url, self.file_extensions)\n\n def _is_external_url(self, url: str) -> bool:\n return url.startswith(\"http\") or self._has_extension(url)\n\n def check_existing(self, response: Response) -> None:\n self.log(response)\n\n def _is_external_link(self, url: str) -> bool:\n if url.startswith(\"https://chat.zulip.org\"):\n # Since most chat.zulip.org URLs will be links to specific\n # logged-in content that the spider cannot verify, or the\n # homepage, there's no need to check those (which can\n # cause errors when chat.zulip.org is being updated).\n return True\n if \"zulip.readthedocs\" in url or \"zulip.com\" in url or \"zulip.org\" in url:\n # We want CI to check any links to Zulip sites.\n return False\n if (len(url) > 4 and url[:4] == \"file\") or (\"localhost\" in url):\n # We also want CI to check any links to built documentation.\n return False\n if url.startswith(ZULIP_SERVER_GITHUB_FILE_URL_PREFIX) or url.startswith(\n ZULIP_SERVER_GITHUB_DIRECTORY_URL_PREFIX\n ):\n # We can verify these links directly in the local git repo without making any requests to GitHub servers.\n return False\n if \"github.com/zulip\" in url:\n # We want to check these links but due to rate limiting from GitHub, these checks often\n # fail in the CI. Thus, we should treat these as external links for now.\n # TODO: Figure out how to test github.com/zulip links in CI.\n return True\n return True\n\n def check_fragment(self, response: Response) -> None:\n self.log(response)\n xpath_template = \"//*[@id='{fragment}' or @name='{fragment}']\"\n m = re.match(r\".+\\#(?P<fragment>.*)$\", response.request.url) # Get fragment value.\n if not m:\n return\n fragment = m.group(\"fragment\")\n # Check fragment existing on response page.\n if not response.selector.xpath(xpath_template.format(fragment=fragment)):\n self.logger.error(\n \"Fragment #%s is not found on page %s\", fragment, response.request.url\n )\n\n def _vnu_callback(self, url: str) -> Callable[[Response], None]:\n def callback(response: Response) -> None:\n vnu_out = json.loads(response.text)\n for message in vnu_out[\"messages\"]:\n if not VNU_IGNORE_REGEX.fullmatch(message[\"message\"]):\n self.logger.error(\n '\"%s\":%d.%d-%d.%d: %s: %s',\n url,\n message.get(\"firstLine\", message[\"lastLine\"]),\n message.get(\"firstColumn\", message[\"lastColumn\"]),\n message[\"lastLine\"],\n message[\"lastColumn\"],\n message[\"type\"],\n message[\"message\"],\n )\n\n return callback\n\n def _make_requests(self, url: str) -> Iterator[Request]:\n # These URLs are for Zulip's webapp, which with recent changes\n # can be accessible without login an account. While we do\n # crawl documentation served by the webapp (E.g. /help/), we\n # don't want to crawl the webapp itself, so we exclude these.\n if (\n url in [\"http://localhost:9981/\", \"http://localhost:9981\"]\n or url.startswith(\"http://localhost:9981/#\")\n or url.startswith(\"http://localhost:9981#\")\n ):\n return\n\n callback: Callable[[Response], Optional[Iterator[Request]]] = self.parse\n dont_filter = False\n method = \"GET\"\n if self._is_external_url(url):\n callback = self.check_existing\n method = \"HEAD\"\n\n if url.startswith(ZULIP_SERVER_GITHUB_FILE_URL_PREFIX):\n file_path = url.replace(ZULIP_SERVER_GITHUB_FILE_URL_PREFIX, DEPLOY_ROOT)\n hash_index = file_path.find(\"#\")\n if hash_index != -1:\n file_path = file_path[:hash_index]\n if not os.path.isfile(file_path):\n self.logger.error(\n \"There is no local file associated with the GitHub URL: %s\", url\n )\n return\n elif url.startswith(ZULIP_SERVER_GITHUB_DIRECTORY_URL_PREFIX):\n dir_path = url.replace(ZULIP_SERVER_GITHUB_DIRECTORY_URL_PREFIX, DEPLOY_ROOT)\n if not os.path.isdir(dir_path):\n self.logger.error(\n \"There is no local directory associated with the GitHub URL: %s\", url\n )\n return\n elif \"#\" in url:\n dont_filter = True\n callback = self.check_fragment\n if getattr(self, \"skip_external\", False) and self._is_external_link(url):\n return\n if urlparse(url).netloc in EXCLUDED_DOMAINS:\n return\n if url in EXCLUDED_URLS:\n return\n yield Request(\n url,\n method=method,\n callback=callback,\n dont_filter=dont_filter,\n errback=self.error_callback,\n )\n\n def start_requests(self) -> Iterator[Request]:\n for url in self.start_urls:\n yield from self._make_requests(url)\n\n def parse(self, response: Response) -> Iterator[Request]:\n self.log(response)\n\n if getattr(self, \"validate_html\", False):\n yield Request(\n \"http://127.0.0.1:9988/?out=json\",\n method=\"POST\",\n headers={\"Content-Type\": response.headers[\"Content-Type\"]},\n body=response.body,\n callback=self._vnu_callback(response.url),\n errback=self.error_callback,\n )\n\n for link in LxmlLinkExtractor(\n deny_domains=self.deny_domains,\n deny_extensions=[\"doc\"],\n tags=self.tags,\n attrs=self.attrs,\n deny=self.deny,\n canonicalize=False,\n ).extract_links(response):\n yield from self._make_requests(link.url)\n\n def retry_request_with_get(self, request: Request) -> Iterator[Request]:\n request.method = \"GET\"\n request.dont_filter = True\n yield request\n\n def error_callback(self, failure: Failure) -> Optional[Union[Failure, Iterator[Request]]]:\n if isinstance(failure.value, HttpError):\n response = failure.value.response\n # Hack: The filtering above does not catch this URL,\n # likely due to a redirect.\n if urlparse(response.url).netloc == \"idmsa.apple.com\":\n return None\n if response.status == 405 and response.request.method == \"HEAD\":\n # Method 'HEAD' not allowed, repeat request with 'GET'\n return self.retry_request_with_get(response.request)\n self.logger.error(\"Please check link: %s\", response.request.url)\n\n return failure\n", "path": "tools/documentation_crawler/documentation_crawler/spiders/common/spiders.py"}]}
| 3,235 | 675 |
gh_patches_debug_33335
|
rasdani/github-patches
|
git_diff
|
Kinto__kinto-1173
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Should we allow cache entries without ttl?
Or define a max_ttl setting that is always applied ?
Should we allow cache entries without ttl?
Or define a max_ttl setting that is always applied ?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kinto/core/cache/memory.py`
Content:
```
1 import logging
2
3 from kinto.core.cache import CacheBase
4 from kinto.core.utils import msec_time
5 from kinto.core.decorators import synchronized
6
7
8 logger = logging.getLogger(__name__)
9
10
11 class Cache(CacheBase):
12 """Cache backend implementation in local process memory.
13
14 Enable in configuration::
15
16 kinto.cache_backend = kinto.core.cache.memory
17
18 :noindex:
19 """
20
21 def __init__(self, *args, **kwargs):
22 super().__init__(*args, **kwargs)
23 self.flush()
24
25 def initialize_schema(self, dry_run=False):
26 # Nothing to do.
27 pass
28
29 def flush(self):
30 self._created_at = {}
31 self._ttl = {}
32 self._store = {}
33 self._quota = 0
34
35 def _clean_expired(self):
36 current = msec_time()
37 expired = [k for k, v in self._ttl.items() if current >= v]
38 for expired_item_key in expired:
39 self.delete(expired_item_key[len(self.prefix):])
40
41 def _clean_oversized(self):
42 if self._quota < self.max_size_bytes:
43 return
44
45 for key, value in sorted(self._created_at.items(), key=lambda k: k[1]):
46 if self._quota < (self.max_size_bytes * 0.8):
47 break
48 self.delete(key[len(self.prefix):])
49
50 @synchronized
51 def ttl(self, key):
52 ttl = self._ttl.get(self.prefix + key)
53 if ttl is not None:
54 return (ttl - msec_time()) / 1000.0
55 return -1
56
57 @synchronized
58 def expire(self, key, ttl):
59 self._ttl[self.prefix + key] = msec_time() + int(ttl * 1000.0)
60
61 @synchronized
62 def set(self, key, value, ttl=None):
63 if isinstance(value, bytes):
64 raise TypeError("a string-like object is required, not 'bytes'")
65 self._clean_expired()
66 self._clean_oversized()
67 if ttl is not None:
68 self.expire(key, ttl)
69 else:
70 logger.warning("No TTL for cache key '{}'".format(key))
71 item_key = self.prefix + key
72 self._store[item_key] = value
73 self._created_at[item_key] = msec_time()
74 self._quota += size_of(item_key, value)
75
76 @synchronized
77 def get(self, key):
78 self._clean_expired()
79 return self._store.get(self.prefix + key)
80
81 @synchronized
82 def delete(self, key):
83 key = self.prefix + key
84 self._ttl.pop(key, None)
85 self._created_at.pop(key, None)
86 value = self._store.pop(key, None)
87 self._quota -= size_of(key, value)
88
89
90 def load_from_config(config):
91 settings = config.get_settings()
92 return Cache(cache_prefix=settings['cache_prefix'],
93 cache_max_size_bytes=settings['cache_max_size_bytes'])
94
95
96 def size_of(key, value):
97 # Key used for ttl, created_at and store.
98 # Int size is 24 bytes one for ttl and one for created_at values
99 return len(key) * 3 + len(str(value)) + 24 * 2
100
```
Path: `kinto/core/cache/postgresql/__init__.py`
Content:
```
1 import logging
2 import os
3
4 from kinto.core.cache import CacheBase
5 from kinto.core.storage.postgresql.client import create_from_config
6 from kinto.core.utils import json
7
8
9 logger = logging.getLogger(__name__)
10
11
12 class Cache(CacheBase):
13 """Cache backend using PostgreSQL.
14
15 Enable in configuration::
16
17 kinto.cache_backend = kinto.core.cache.postgresql
18
19 Database location URI can be customized::
20
21 kinto.cache_url = postgres://user:[email protected]:5432/dbname
22
23 Alternatively, username and password could also rely on system user ident
24 or even specified in :file:`~/.pgpass` (*see PostgreSQL documentation*).
25
26 .. note::
27
28 Some tables and indices are created when ``kinto migrate`` is run.
29 This requires some privileges on the database, or some error will
30 be raised.
31
32 **Alternatively**, the schema can be initialized outside the
33 python application, using the SQL file located in
34 :file:`kinto/core/cache/postgresql/schema.sql`. This allows to
35 distinguish schema manipulation privileges from schema usage.
36
37
38 A connection pool is enabled by default::
39
40 kinto.cache_pool_size = 10
41 kinto.cache_maxoverflow = 10
42 kinto.cache_max_backlog = -1
43 kinto.cache_pool_recycle = -1
44 kinto.cache_pool_timeout = 30
45 kinto.cache_poolclass =
46 kinto.core.storage.postgresql.pool.QueuePoolWithMaxBacklog
47
48 The ``max_backlog`` limits the number of threads that can be in the queue
49 waiting for a connection. Once this limit has been reached, any further
50 attempts to acquire a connection will be rejected immediately, instead of
51 locking up all threads by keeping them waiting in the queue.
52
53 See `dedicated section in SQLAlchemy documentation
54 <http://docs.sqlalchemy.org/en/rel_1_0/core/engines.html>`_
55 for default values and behaviour.
56
57 .. note::
58
59 Using a `dedicated connection pool <http://pgpool.net>`_ is still
60 recommended to allow load balancing, replication or limit the number
61 of connections used in a multi-process deployment.
62
63 :noindex:
64 """ # NOQA
65 def __init__(self, client, *args, **kwargs):
66 super().__init__(*args, **kwargs)
67 self.client = client
68
69 def initialize_schema(self, dry_run=False):
70 # Check if cache table exists.
71 query = """
72 SELECT 1
73 FROM information_schema.tables
74 WHERE table_name = 'cache';
75 """
76 with self.client.connect(readonly=True) as conn:
77 result = conn.execute(query)
78 if result.rowcount > 0:
79 logger.info("PostgreSQL cache schema is up-to-date.")
80 return
81
82 # Create schema
83 here = os.path.abspath(os.path.dirname(__file__))
84 sql_file = os.path.join(here, 'schema.sql')
85
86 if dry_run:
87 logger.info("Create cache schema from '{}'".format(sql_file))
88 return
89
90 # Since called outside request, force commit.
91 with open(sql_file) as f:
92 schema = f.read()
93 with self.client.connect(force_commit=True) as conn:
94 conn.execute(schema)
95 logger.info('Created PostgreSQL cache tables')
96
97 def flush(self):
98 query = """
99 DELETE FROM cache;
100 """
101 # Since called outside request (e.g. tests), force commit.
102 with self.client.connect(force_commit=True) as conn:
103 conn.execute(query)
104 logger.debug('Flushed PostgreSQL cache tables')
105
106 def ttl(self, key):
107 query = """
108 SELECT EXTRACT(SECOND FROM (ttl - now())) AS ttl
109 FROM cache
110 WHERE key = :key
111 AND ttl IS NOT NULL;
112 """
113 with self.client.connect(readonly=True) as conn:
114 result = conn.execute(query, dict(key=self.prefix + key))
115 if result.rowcount > 0:
116 return result.fetchone()['ttl']
117 return -1
118
119 def expire(self, key, ttl):
120 query = """
121 UPDATE cache SET ttl = sec2ttl(:ttl) WHERE key = :key;
122 """
123 with self.client.connect() as conn:
124 conn.execute(query, dict(ttl=ttl, key=self.prefix + key))
125
126 def set(self, key, value, ttl=None):
127 if isinstance(value, bytes):
128 raise TypeError("a string-like object is required, not 'bytes'")
129
130 if ttl is None:
131 logger.warning("No TTL for cache key '{}'".format(key))
132 query = """
133 INSERT INTO cache (key, value, ttl)
134 VALUES (:key, :value, sec2ttl(:ttl))
135 ON CONFLICT (key) DO UPDATE
136 SET value = :value,
137 ttl = sec2ttl(:ttl);
138 """
139 value = json.dumps(value)
140 with self.client.connect() as conn:
141 conn.execute(query, dict(key=self.prefix + key,
142 value=value, ttl=ttl))
143
144 def get(self, key):
145 purge = "DELETE FROM cache WHERE ttl IS NOT NULL AND now() > ttl;"
146 query = "SELECT value FROM cache WHERE key = :key;"
147 with self.client.connect() as conn:
148 conn.execute(purge)
149 result = conn.execute(query, dict(key=self.prefix + key))
150 if result.rowcount > 0:
151 value = result.fetchone()['value']
152 return json.loads(value)
153
154 def delete(self, key):
155 query = "DELETE FROM cache WHERE key = :key"
156 with self.client.connect() as conn:
157 conn.execute(query, dict(key=self.prefix + key))
158
159
160 def load_from_config(config):
161 settings = config.get_settings()
162 client = create_from_config(config, prefix='cache_', with_transaction=False)
163 return Cache(client=client, cache_prefix=settings['cache_prefix'])
164
```
Path: `kinto/core/cache/__init__.py`
Content:
```
1 import logging
2 import random
3
4
5 logger = logging.getLogger(__name__)
6
7
8 _HEARTBEAT_DELETE_RATE = 0.5
9 _HEARTBEAT_KEY = '__heartbeat__'
10 _HEARTBEAT_TTL_SECONDS = 3600
11
12
13 class CacheBase:
14
15 def __init__(self, *args, **kwargs):
16 self.prefix = kwargs['cache_prefix']
17 self.max_size_bytes = kwargs.get('cache_max_size_bytes')
18
19 def initialize_schema(self, dry_run=False):
20 """Create every necessary objects (like tables or indices) in the
21 backend.
22
23 This is executed when the ``kinto migrate`` command is run.
24
25 :param bool dry_run: simulate instead of executing the operations.
26 """
27 raise NotImplementedError
28
29 def flush(self):
30 """Delete every values."""
31 raise NotImplementedError
32
33 def ttl(self, key):
34 """Obtain the expiration value of the specified `key`.
35
36 :param str key: key
37 :returns: number of seconds or negative if no TTL.
38 :rtype: float
39 """
40 raise NotImplementedError
41
42 def expire(self, key, ttl):
43 """Set the expiration value `ttl` for the specified `key`.
44
45 :param str key: key
46 :param float ttl: number of seconds
47 """
48 raise NotImplementedError
49
50 def set(self, key, value, ttl=None):
51 """Store a value with the specified `key`. If `ttl` is provided,
52 set an expiration value.
53
54 :param str key: key
55 :param str value: value to store
56 :param float ttl: expire after number of seconds
57 """
58 raise NotImplementedError
59
60 def get(self, key):
61 """Obtain the value of the specified `key`.
62
63 :param str key: key
64 :returns: the stored value or None if missing.
65 :rtype: str
66 """
67 raise NotImplementedError
68
69 def delete(self, key):
70 """Delete the value of the specified `key`.
71
72 :param str key: key
73 """
74 raise NotImplementedError
75
76
77 def heartbeat(backend):
78 def ping(request):
79 """Test that cache backend is operationnal.
80
81 :param request: current request object
82 :type request: :class:`~pyramid:pyramid.request.Request`
83 :returns: ``True`` is everything is ok, ``False`` otherwise.
84 :rtype: bool
85 """
86 # No specific case for readonly mode because the cache should
87 # continue to work in that mode.
88 try:
89 if random.SystemRandom().random() < _HEARTBEAT_DELETE_RATE:
90 backend.delete(_HEARTBEAT_KEY)
91 else:
92 backend.set(_HEARTBEAT_KEY, 'alive', _HEARTBEAT_TTL_SECONDS)
93 return True
94 except:
95 logger.exception("Heartbeat Failure")
96 return False
97
98 return ping
99
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/kinto/core/cache/__init__.py b/kinto/core/cache/__init__.py
--- a/kinto/core/cache/__init__.py
+++ b/kinto/core/cache/__init__.py
@@ -47,9 +47,8 @@
"""
raise NotImplementedError
- def set(self, key, value, ttl=None):
- """Store a value with the specified `key`. If `ttl` is provided,
- set an expiration value.
+ def set(self, key, value, ttl):
+ """Store a value with the specified `key`.
:param str key: key
:param str value: value to store
diff --git a/kinto/core/cache/memory.py b/kinto/core/cache/memory.py
--- a/kinto/core/cache/memory.py
+++ b/kinto/core/cache/memory.py
@@ -59,15 +59,12 @@
self._ttl[self.prefix + key] = msec_time() + int(ttl * 1000.0)
@synchronized
- def set(self, key, value, ttl=None):
+ def set(self, key, value, ttl):
if isinstance(value, bytes):
raise TypeError("a string-like object is required, not 'bytes'")
self._clean_expired()
self._clean_oversized()
- if ttl is not None:
- self.expire(key, ttl)
- else:
- logger.warning("No TTL for cache key '{}'".format(key))
+ self.expire(key, ttl)
item_key = self.prefix + key
self._store[item_key] = value
self._created_at[item_key] = msec_time()
diff --git a/kinto/core/cache/postgresql/__init__.py b/kinto/core/cache/postgresql/__init__.py
--- a/kinto/core/cache/postgresql/__init__.py
+++ b/kinto/core/cache/postgresql/__init__.py
@@ -123,12 +123,10 @@
with self.client.connect() as conn:
conn.execute(query, dict(ttl=ttl, key=self.prefix + key))
- def set(self, key, value, ttl=None):
+ def set(self, key, value, ttl):
if isinstance(value, bytes):
raise TypeError("a string-like object is required, not 'bytes'")
- if ttl is None:
- logger.warning("No TTL for cache key '{}'".format(key))
query = """
INSERT INTO cache (key, value, ttl)
VALUES (:key, :value, sec2ttl(:ttl))
|
{"golden_diff": "diff --git a/kinto/core/cache/__init__.py b/kinto/core/cache/__init__.py\n--- a/kinto/core/cache/__init__.py\n+++ b/kinto/core/cache/__init__.py\n@@ -47,9 +47,8 @@\n \"\"\"\n raise NotImplementedError\n \n- def set(self, key, value, ttl=None):\n- \"\"\"Store a value with the specified `key`. If `ttl` is provided,\n- set an expiration value.\n+ def set(self, key, value, ttl):\n+ \"\"\"Store a value with the specified `key`.\n \n :param str key: key\n :param str value: value to store\ndiff --git a/kinto/core/cache/memory.py b/kinto/core/cache/memory.py\n--- a/kinto/core/cache/memory.py\n+++ b/kinto/core/cache/memory.py\n@@ -59,15 +59,12 @@\n self._ttl[self.prefix + key] = msec_time() + int(ttl * 1000.0)\n \n @synchronized\n- def set(self, key, value, ttl=None):\n+ def set(self, key, value, ttl):\n if isinstance(value, bytes):\n raise TypeError(\"a string-like object is required, not 'bytes'\")\n self._clean_expired()\n self._clean_oversized()\n- if ttl is not None:\n- self.expire(key, ttl)\n- else:\n- logger.warning(\"No TTL for cache key '{}'\".format(key))\n+ self.expire(key, ttl)\n item_key = self.prefix + key\n self._store[item_key] = value\n self._created_at[item_key] = msec_time()\ndiff --git a/kinto/core/cache/postgresql/__init__.py b/kinto/core/cache/postgresql/__init__.py\n--- a/kinto/core/cache/postgresql/__init__.py\n+++ b/kinto/core/cache/postgresql/__init__.py\n@@ -123,12 +123,10 @@\n with self.client.connect() as conn:\n conn.execute(query, dict(ttl=ttl, key=self.prefix + key))\n \n- def set(self, key, value, ttl=None):\n+ def set(self, key, value, ttl):\n if isinstance(value, bytes):\n raise TypeError(\"a string-like object is required, not 'bytes'\")\n \n- if ttl is None:\n- logger.warning(\"No TTL for cache key '{}'\".format(key))\n query = \"\"\"\n INSERT INTO cache (key, value, ttl)\n VALUES (:key, :value, sec2ttl(:ttl))\n", "issue": "Should we allow cache entries without ttl?\nOr define a max_ttl setting that is always applied ?\nShould we allow cache entries without ttl?\nOr define a max_ttl setting that is always applied ?\n", "before_files": [{"content": "import logging\n\nfrom kinto.core.cache import CacheBase\nfrom kinto.core.utils import msec_time\nfrom kinto.core.decorators import synchronized\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass Cache(CacheBase):\n \"\"\"Cache backend implementation in local process memory.\n\n Enable in configuration::\n\n kinto.cache_backend = kinto.core.cache.memory\n\n :noindex:\n \"\"\"\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.flush()\n\n def initialize_schema(self, dry_run=False):\n # Nothing to do.\n pass\n\n def flush(self):\n self._created_at = {}\n self._ttl = {}\n self._store = {}\n self._quota = 0\n\n def _clean_expired(self):\n current = msec_time()\n expired = [k for k, v in self._ttl.items() if current >= v]\n for expired_item_key in expired:\n self.delete(expired_item_key[len(self.prefix):])\n\n def _clean_oversized(self):\n if self._quota < self.max_size_bytes:\n return\n\n for key, value in sorted(self._created_at.items(), key=lambda k: k[1]):\n if self._quota < (self.max_size_bytes * 0.8):\n break\n self.delete(key[len(self.prefix):])\n\n @synchronized\n def ttl(self, key):\n ttl = self._ttl.get(self.prefix + key)\n if ttl is not None:\n return (ttl - msec_time()) / 1000.0\n return -1\n\n @synchronized\n def expire(self, key, ttl):\n self._ttl[self.prefix + key] = msec_time() + int(ttl * 1000.0)\n\n @synchronized\n def set(self, key, value, ttl=None):\n if isinstance(value, bytes):\n raise TypeError(\"a string-like object is required, not 'bytes'\")\n self._clean_expired()\n self._clean_oversized()\n if ttl is not None:\n self.expire(key, ttl)\n else:\n logger.warning(\"No TTL for cache key '{}'\".format(key))\n item_key = self.prefix + key\n self._store[item_key] = value\n self._created_at[item_key] = msec_time()\n self._quota += size_of(item_key, value)\n\n @synchronized\n def get(self, key):\n self._clean_expired()\n return self._store.get(self.prefix + key)\n\n @synchronized\n def delete(self, key):\n key = self.prefix + key\n self._ttl.pop(key, None)\n self._created_at.pop(key, None)\n value = self._store.pop(key, None)\n self._quota -= size_of(key, value)\n\n\ndef load_from_config(config):\n settings = config.get_settings()\n return Cache(cache_prefix=settings['cache_prefix'],\n cache_max_size_bytes=settings['cache_max_size_bytes'])\n\n\ndef size_of(key, value):\n # Key used for ttl, created_at and store.\n # Int size is 24 bytes one for ttl and one for created_at values\n return len(key) * 3 + len(str(value)) + 24 * 2\n", "path": "kinto/core/cache/memory.py"}, {"content": "import logging\nimport os\n\nfrom kinto.core.cache import CacheBase\nfrom kinto.core.storage.postgresql.client import create_from_config\nfrom kinto.core.utils import json\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass Cache(CacheBase):\n \"\"\"Cache backend using PostgreSQL.\n\n Enable in configuration::\n\n kinto.cache_backend = kinto.core.cache.postgresql\n\n Database location URI can be customized::\n\n kinto.cache_url = postgres://user:[email protected]:5432/dbname\n\n Alternatively, username and password could also rely on system user ident\n or even specified in :file:`~/.pgpass` (*see PostgreSQL documentation*).\n\n .. note::\n\n Some tables and indices are created when ``kinto migrate`` is run.\n This requires some privileges on the database, or some error will\n be raised.\n\n **Alternatively**, the schema can be initialized outside the\n python application, using the SQL file located in\n :file:`kinto/core/cache/postgresql/schema.sql`. This allows to\n distinguish schema manipulation privileges from schema usage.\n\n\n A connection pool is enabled by default::\n\n kinto.cache_pool_size = 10\n kinto.cache_maxoverflow = 10\n kinto.cache_max_backlog = -1\n kinto.cache_pool_recycle = -1\n kinto.cache_pool_timeout = 30\n kinto.cache_poolclass =\n kinto.core.storage.postgresql.pool.QueuePoolWithMaxBacklog\n\n The ``max_backlog`` limits the number of threads that can be in the queue\n waiting for a connection. Once this limit has been reached, any further\n attempts to acquire a connection will be rejected immediately, instead of\n locking up all threads by keeping them waiting in the queue.\n\n See `dedicated section in SQLAlchemy documentation\n <http://docs.sqlalchemy.org/en/rel_1_0/core/engines.html>`_\n for default values and behaviour.\n\n .. note::\n\n Using a `dedicated connection pool <http://pgpool.net>`_ is still\n recommended to allow load balancing, replication or limit the number\n of connections used in a multi-process deployment.\n\n :noindex:\n \"\"\" # NOQA\n def __init__(self, client, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.client = client\n\n def initialize_schema(self, dry_run=False):\n # Check if cache table exists.\n query = \"\"\"\n SELECT 1\n FROM information_schema.tables\n WHERE table_name = 'cache';\n \"\"\"\n with self.client.connect(readonly=True) as conn:\n result = conn.execute(query)\n if result.rowcount > 0:\n logger.info(\"PostgreSQL cache schema is up-to-date.\")\n return\n\n # Create schema\n here = os.path.abspath(os.path.dirname(__file__))\n sql_file = os.path.join(here, 'schema.sql')\n\n if dry_run:\n logger.info(\"Create cache schema from '{}'\".format(sql_file))\n return\n\n # Since called outside request, force commit.\n with open(sql_file) as f:\n schema = f.read()\n with self.client.connect(force_commit=True) as conn:\n conn.execute(schema)\n logger.info('Created PostgreSQL cache tables')\n\n def flush(self):\n query = \"\"\"\n DELETE FROM cache;\n \"\"\"\n # Since called outside request (e.g. tests), force commit.\n with self.client.connect(force_commit=True) as conn:\n conn.execute(query)\n logger.debug('Flushed PostgreSQL cache tables')\n\n def ttl(self, key):\n query = \"\"\"\n SELECT EXTRACT(SECOND FROM (ttl - now())) AS ttl\n FROM cache\n WHERE key = :key\n AND ttl IS NOT NULL;\n \"\"\"\n with self.client.connect(readonly=True) as conn:\n result = conn.execute(query, dict(key=self.prefix + key))\n if result.rowcount > 0:\n return result.fetchone()['ttl']\n return -1\n\n def expire(self, key, ttl):\n query = \"\"\"\n UPDATE cache SET ttl = sec2ttl(:ttl) WHERE key = :key;\n \"\"\"\n with self.client.connect() as conn:\n conn.execute(query, dict(ttl=ttl, key=self.prefix + key))\n\n def set(self, key, value, ttl=None):\n if isinstance(value, bytes):\n raise TypeError(\"a string-like object is required, not 'bytes'\")\n\n if ttl is None:\n logger.warning(\"No TTL for cache key '{}'\".format(key))\n query = \"\"\"\n INSERT INTO cache (key, value, ttl)\n VALUES (:key, :value, sec2ttl(:ttl))\n ON CONFLICT (key) DO UPDATE\n SET value = :value,\n ttl = sec2ttl(:ttl);\n \"\"\"\n value = json.dumps(value)\n with self.client.connect() as conn:\n conn.execute(query, dict(key=self.prefix + key,\n value=value, ttl=ttl))\n\n def get(self, key):\n purge = \"DELETE FROM cache WHERE ttl IS NOT NULL AND now() > ttl;\"\n query = \"SELECT value FROM cache WHERE key = :key;\"\n with self.client.connect() as conn:\n conn.execute(purge)\n result = conn.execute(query, dict(key=self.prefix + key))\n if result.rowcount > 0:\n value = result.fetchone()['value']\n return json.loads(value)\n\n def delete(self, key):\n query = \"DELETE FROM cache WHERE key = :key\"\n with self.client.connect() as conn:\n conn.execute(query, dict(key=self.prefix + key))\n\n\ndef load_from_config(config):\n settings = config.get_settings()\n client = create_from_config(config, prefix='cache_', with_transaction=False)\n return Cache(client=client, cache_prefix=settings['cache_prefix'])\n", "path": "kinto/core/cache/postgresql/__init__.py"}, {"content": "import logging\nimport random\n\n\nlogger = logging.getLogger(__name__)\n\n\n_HEARTBEAT_DELETE_RATE = 0.5\n_HEARTBEAT_KEY = '__heartbeat__'\n_HEARTBEAT_TTL_SECONDS = 3600\n\n\nclass CacheBase:\n\n def __init__(self, *args, **kwargs):\n self.prefix = kwargs['cache_prefix']\n self.max_size_bytes = kwargs.get('cache_max_size_bytes')\n\n def initialize_schema(self, dry_run=False):\n \"\"\"Create every necessary objects (like tables or indices) in the\n backend.\n\n This is executed when the ``kinto migrate`` command is run.\n\n :param bool dry_run: simulate instead of executing the operations.\n \"\"\"\n raise NotImplementedError\n\n def flush(self):\n \"\"\"Delete every values.\"\"\"\n raise NotImplementedError\n\n def ttl(self, key):\n \"\"\"Obtain the expiration value of the specified `key`.\n\n :param str key: key\n :returns: number of seconds or negative if no TTL.\n :rtype: float\n \"\"\"\n raise NotImplementedError\n\n def expire(self, key, ttl):\n \"\"\"Set the expiration value `ttl` for the specified `key`.\n\n :param str key: key\n :param float ttl: number of seconds\n \"\"\"\n raise NotImplementedError\n\n def set(self, key, value, ttl=None):\n \"\"\"Store a value with the specified `key`. If `ttl` is provided,\n set an expiration value.\n\n :param str key: key\n :param str value: value to store\n :param float ttl: expire after number of seconds\n \"\"\"\n raise NotImplementedError\n\n def get(self, key):\n \"\"\"Obtain the value of the specified `key`.\n\n :param str key: key\n :returns: the stored value or None if missing.\n :rtype: str\n \"\"\"\n raise NotImplementedError\n\n def delete(self, key):\n \"\"\"Delete the value of the specified `key`.\n\n :param str key: key\n \"\"\"\n raise NotImplementedError\n\n\ndef heartbeat(backend):\n def ping(request):\n \"\"\"Test that cache backend is operationnal.\n\n :param request: current request object\n :type request: :class:`~pyramid:pyramid.request.Request`\n :returns: ``True`` is everything is ok, ``False`` otherwise.\n :rtype: bool\n \"\"\"\n # No specific case for readonly mode because the cache should\n # continue to work in that mode.\n try:\n if random.SystemRandom().random() < _HEARTBEAT_DELETE_RATE:\n backend.delete(_HEARTBEAT_KEY)\n else:\n backend.set(_HEARTBEAT_KEY, 'alive', _HEARTBEAT_TTL_SECONDS)\n return True\n except:\n logger.exception(\"Heartbeat Failure\")\n return False\n\n return ping\n", "path": "kinto/core/cache/__init__.py"}], "after_files": [{"content": "import logging\n\nfrom kinto.core.cache import CacheBase\nfrom kinto.core.utils import msec_time\nfrom kinto.core.decorators import synchronized\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass Cache(CacheBase):\n \"\"\"Cache backend implementation in local process memory.\n\n Enable in configuration::\n\n kinto.cache_backend = kinto.core.cache.memory\n\n :noindex:\n \"\"\"\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.flush()\n\n def initialize_schema(self, dry_run=False):\n # Nothing to do.\n pass\n\n def flush(self):\n self._created_at = {}\n self._ttl = {}\n self._store = {}\n self._quota = 0\n\n def _clean_expired(self):\n current = msec_time()\n expired = [k for k, v in self._ttl.items() if current >= v]\n for expired_item_key in expired:\n self.delete(expired_item_key[len(self.prefix):])\n\n def _clean_oversized(self):\n if self._quota < self.max_size_bytes:\n return\n\n for key, value in sorted(self._created_at.items(), key=lambda k: k[1]):\n if self._quota < (self.max_size_bytes * 0.8):\n break\n self.delete(key[len(self.prefix):])\n\n @synchronized\n def ttl(self, key):\n ttl = self._ttl.get(self.prefix + key)\n if ttl is not None:\n return (ttl - msec_time()) / 1000.0\n return -1\n\n @synchronized\n def expire(self, key, ttl):\n self._ttl[self.prefix + key] = msec_time() + int(ttl * 1000.0)\n\n @synchronized\n def set(self, key, value, ttl):\n if isinstance(value, bytes):\n raise TypeError(\"a string-like object is required, not 'bytes'\")\n self._clean_expired()\n self._clean_oversized()\n self.expire(key, ttl)\n item_key = self.prefix + key\n self._store[item_key] = value\n self._created_at[item_key] = msec_time()\n self._quota += size_of(item_key, value)\n\n @synchronized\n def get(self, key):\n self._clean_expired()\n return self._store.get(self.prefix + key)\n\n @synchronized\n def delete(self, key):\n key = self.prefix + key\n self._ttl.pop(key, None)\n self._created_at.pop(key, None)\n value = self._store.pop(key, None)\n self._quota -= size_of(key, value)\n\n\ndef load_from_config(config):\n settings = config.get_settings()\n return Cache(cache_prefix=settings['cache_prefix'],\n cache_max_size_bytes=settings['cache_max_size_bytes'])\n\n\ndef size_of(key, value):\n # Key used for ttl, created_at and store.\n # Int size is 24 bytes one for ttl and one for created_at values\n return len(key) * 3 + len(str(value)) + 24 * 2\n", "path": "kinto/core/cache/memory.py"}, {"content": "import logging\nimport os\n\nfrom kinto.core.cache import CacheBase\nfrom kinto.core.storage.postgresql.client import create_from_config\nfrom kinto.core.utils import json\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass Cache(CacheBase):\n \"\"\"Cache backend using PostgreSQL.\n\n Enable in configuration::\n\n kinto.cache_backend = kinto.core.cache.postgresql\n\n Database location URI can be customized::\n\n kinto.cache_url = postgres://user:[email protected]:5432/dbname\n\n Alternatively, username and password could also rely on system user ident\n or even specified in :file:`~/.pgpass` (*see PostgreSQL documentation*).\n\n .. note::\n\n Some tables and indices are created when ``kinto migrate`` is run.\n This requires some privileges on the database, or some error will\n be raised.\n\n **Alternatively**, the schema can be initialized outside the\n python application, using the SQL file located in\n :file:`kinto/core/cache/postgresql/schema.sql`. This allows to\n distinguish schema manipulation privileges from schema usage.\n\n\n A connection pool is enabled by default::\n\n kinto.cache_pool_size = 10\n kinto.cache_maxoverflow = 10\n kinto.cache_max_backlog = -1\n kinto.cache_pool_recycle = -1\n kinto.cache_pool_timeout = 30\n kinto.cache_poolclass =\n kinto.core.storage.postgresql.pool.QueuePoolWithMaxBacklog\n\n The ``max_backlog`` limits the number of threads that can be in the queue\n waiting for a connection. Once this limit has been reached, any further\n attempts to acquire a connection will be rejected immediately, instead of\n locking up all threads by keeping them waiting in the queue.\n\n See `dedicated section in SQLAlchemy documentation\n <http://docs.sqlalchemy.org/en/rel_1_0/core/engines.html>`_\n for default values and behaviour.\n\n .. note::\n\n Using a `dedicated connection pool <http://pgpool.net>`_ is still\n recommended to allow load balancing, replication or limit the number\n of connections used in a multi-process deployment.\n\n :noindex:\n \"\"\" # NOQA\n def __init__(self, client, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.client = client\n\n def initialize_schema(self, dry_run=False):\n # Check if cache table exists.\n query = \"\"\"\n SELECT 1\n FROM information_schema.tables\n WHERE table_name = 'cache';\n \"\"\"\n with self.client.connect(readonly=True) as conn:\n result = conn.execute(query)\n if result.rowcount > 0:\n logger.info(\"PostgreSQL cache schema is up-to-date.\")\n return\n\n # Create schema\n here = os.path.abspath(os.path.dirname(__file__))\n sql_file = os.path.join(here, 'schema.sql')\n\n if dry_run:\n logger.info(\"Create cache schema from '{}'\".format(sql_file))\n return\n\n # Since called outside request, force commit.\n with open(sql_file) as f:\n schema = f.read()\n with self.client.connect(force_commit=True) as conn:\n conn.execute(schema)\n logger.info('Created PostgreSQL cache tables')\n\n def flush(self):\n query = \"\"\"\n DELETE FROM cache;\n \"\"\"\n # Since called outside request (e.g. tests), force commit.\n with self.client.connect(force_commit=True) as conn:\n conn.execute(query)\n logger.debug('Flushed PostgreSQL cache tables')\n\n def ttl(self, key):\n query = \"\"\"\n SELECT EXTRACT(SECOND FROM (ttl - now())) AS ttl\n FROM cache\n WHERE key = :key\n AND ttl IS NOT NULL;\n \"\"\"\n with self.client.connect(readonly=True) as conn:\n result = conn.execute(query, dict(key=self.prefix + key))\n if result.rowcount > 0:\n return result.fetchone()['ttl']\n return -1\n\n def expire(self, key, ttl):\n query = \"\"\"\n UPDATE cache SET ttl = sec2ttl(:ttl) WHERE key = :key;\n \"\"\"\n with self.client.connect() as conn:\n conn.execute(query, dict(ttl=ttl, key=self.prefix + key))\n\n def set(self, key, value, ttl):\n if isinstance(value, bytes):\n raise TypeError(\"a string-like object is required, not 'bytes'\")\n\n query = \"\"\"\n INSERT INTO cache (key, value, ttl)\n VALUES (:key, :value, sec2ttl(:ttl))\n ON CONFLICT (key) DO UPDATE\n SET value = :value,\n ttl = sec2ttl(:ttl);\n \"\"\"\n value = json.dumps(value)\n with self.client.connect() as conn:\n conn.execute(query, dict(key=self.prefix + key,\n value=value, ttl=ttl))\n\n def get(self, key):\n purge = \"DELETE FROM cache WHERE ttl IS NOT NULL AND now() > ttl;\"\n query = \"SELECT value FROM cache WHERE key = :key;\"\n with self.client.connect() as conn:\n conn.execute(purge)\n result = conn.execute(query, dict(key=self.prefix + key))\n if result.rowcount > 0:\n value = result.fetchone()['value']\n return json.loads(value)\n\n def delete(self, key):\n query = \"DELETE FROM cache WHERE key = :key\"\n with self.client.connect() as conn:\n conn.execute(query, dict(key=self.prefix + key))\n\n\ndef load_from_config(config):\n settings = config.get_settings()\n client = create_from_config(config, prefix='cache_', with_transaction=False)\n return Cache(client=client, cache_prefix=settings['cache_prefix'])\n", "path": "kinto/core/cache/postgresql/__init__.py"}, {"content": "import logging\nimport random\n\n\nlogger = logging.getLogger(__name__)\n\n\n_HEARTBEAT_DELETE_RATE = 0.5\n_HEARTBEAT_KEY = '__heartbeat__'\n_HEARTBEAT_TTL_SECONDS = 3600\n\n\nclass CacheBase:\n\n def __init__(self, *args, **kwargs):\n self.prefix = kwargs['cache_prefix']\n self.max_size_bytes = kwargs.get('cache_max_size_bytes')\n\n def initialize_schema(self, dry_run=False):\n \"\"\"Create every necessary objects (like tables or indices) in the\n backend.\n\n This is executed when the ``kinto migrate`` command is run.\n\n :param bool dry_run: simulate instead of executing the operations.\n \"\"\"\n raise NotImplementedError\n\n def flush(self):\n \"\"\"Delete every values.\"\"\"\n raise NotImplementedError\n\n def ttl(self, key):\n \"\"\"Obtain the expiration value of the specified `key`.\n\n :param str key: key\n :returns: number of seconds or negative if no TTL.\n :rtype: float\n \"\"\"\n raise NotImplementedError\n\n def expire(self, key, ttl):\n \"\"\"Set the expiration value `ttl` for the specified `key`.\n\n :param str key: key\n :param float ttl: number of seconds\n \"\"\"\n raise NotImplementedError\n\n def set(self, key, value, ttl):\n \"\"\"Store a value with the specified `key`.\n\n :param str key: key\n :param str value: value to store\n :param float ttl: expire after number of seconds\n \"\"\"\n raise NotImplementedError\n\n def get(self, key):\n \"\"\"Obtain the value of the specified `key`.\n\n :param str key: key\n :returns: the stored value or None if missing.\n :rtype: str\n \"\"\"\n raise NotImplementedError\n\n def delete(self, key):\n \"\"\"Delete the value of the specified `key`.\n\n :param str key: key\n \"\"\"\n raise NotImplementedError\n\n\ndef heartbeat(backend):\n def ping(request):\n \"\"\"Test that cache backend is operationnal.\n\n :param request: current request object\n :type request: :class:`~pyramid:pyramid.request.Request`\n :returns: ``True`` is everything is ok, ``False`` otherwise.\n :rtype: bool\n \"\"\"\n # No specific case for readonly mode because the cache should\n # continue to work in that mode.\n try:\n if random.SystemRandom().random() < _HEARTBEAT_DELETE_RATE:\n backend.delete(_HEARTBEAT_KEY)\n else:\n backend.set(_HEARTBEAT_KEY, 'alive', _HEARTBEAT_TTL_SECONDS)\n return True\n except:\n logger.exception(\"Heartbeat Failure\")\n return False\n\n return ping\n", "path": "kinto/core/cache/__init__.py"}]}
| 3,692 | 557 |
gh_patches_debug_29584
|
rasdani/github-patches
|
git_diff
|
matrix-org__synapse-3157
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
synapse fails to start under Twisted >= 18.4
Looks like we are relying on a private identifier which has been removed in Twisted 18.4:
```
Traceback (most recent call last):
File "/usr/lib/python2.7/runpy.py", line 174, in _run_module_as_main
"__main__", fname, loader, pkg_name)
File "/usr/lib/python2.7/runpy.py", line 72, in _run_code
exec code in run_globals
File "/home/rav/work/synapse/synapse/app/homeserver.py", line 31, in <module>
from synapse.crypto import context_factory
File "synapse/crypto/context_factory.py", line 17, in <module>
from twisted.internet._sslverify import _OpenSSLECCurve, _defaultCurveName
ImportError: cannot import name _OpenSSLECCurve
```
synapse fails to start under Twisted >= 18.4
Looks like we are relying on a private identifier which has been removed in Twisted 18.4:
```
Traceback (most recent call last):
File "/usr/lib/python2.7/runpy.py", line 174, in _run_module_as_main
"__main__", fname, loader, pkg_name)
File "/usr/lib/python2.7/runpy.py", line 72, in _run_code
exec code in run_globals
File "/home/rav/work/synapse/synapse/app/homeserver.py", line 31, in <module>
from synapse.crypto import context_factory
File "synapse/crypto/context_factory.py", line 17, in <module>
from twisted.internet._sslverify import _OpenSSLECCurve, _defaultCurveName
ImportError: cannot import name _OpenSSLECCurve
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `synapse/crypto/context_factory.py`
Content:
```
1 # Copyright 2014-2016 OpenMarket Ltd
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from twisted.internet import ssl
16 from OpenSSL import SSL
17 from twisted.internet._sslverify import _OpenSSLECCurve, _defaultCurveName
18
19 import logging
20
21 logger = logging.getLogger(__name__)
22
23
24 class ServerContextFactory(ssl.ContextFactory):
25 """Factory for PyOpenSSL SSL contexts that are used to handle incoming
26 connections and to make connections to remote servers."""
27
28 def __init__(self, config):
29 self._context = SSL.Context(SSL.SSLv23_METHOD)
30 self.configure_context(self._context, config)
31
32 @staticmethod
33 def configure_context(context, config):
34 try:
35 _ecCurve = _OpenSSLECCurve(_defaultCurveName)
36 _ecCurve.addECKeyToContext(context)
37 except Exception:
38 logger.exception("Failed to enable elliptic curve for TLS")
39 context.set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3)
40 context.use_certificate_chain_file(config.tls_certificate_file)
41
42 if not config.no_tls:
43 context.use_privatekey(config.tls_private_key)
44
45 context.load_tmp_dh(config.tls_dh_params_path)
46 context.set_cipher_list("!ADH:HIGH+kEDH:!AECDH:HIGH+kEECDH")
47
48 def getContext(self):
49 return self._context
50
```
Path: `synapse/python_dependencies.py`
Content:
```
1 # Copyright 2015, 2016 OpenMarket Ltd
2 # Copyright 2017 Vector Creations Ltd
3 # Copyright 2018 New Vector Ltd
4 #
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
16
17 import logging
18 from distutils.version import LooseVersion
19
20 logger = logging.getLogger(__name__)
21
22 # this dict maps from python package name to a list of modules we expect it to
23 # provide.
24 #
25 # the key is a "requirement specifier", as used as a parameter to `pip
26 # install`[1], or an `install_requires` argument to `setuptools.setup` [2].
27 #
28 # the value is a sequence of strings; each entry should be the name of the
29 # python module, optionally followed by a version assertion which can be either
30 # ">=<ver>" or "==<ver>".
31 #
32 # [1] https://pip.pypa.io/en/stable/reference/pip_install/#requirement-specifiers.
33 # [2] https://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-dependencies
34 REQUIREMENTS = {
35 "jsonschema>=2.5.1": ["jsonschema>=2.5.1"],
36 "frozendict>=0.4": ["frozendict"],
37 "unpaddedbase64>=1.1.0": ["unpaddedbase64>=1.1.0"],
38 "canonicaljson>=1.1.3": ["canonicaljson>=1.1.3"],
39 "signedjson>=1.0.0": ["signedjson>=1.0.0"],
40 "pynacl>=1.2.1": ["nacl>=1.2.1", "nacl.bindings"],
41 "service_identity>=1.0.0": ["service_identity>=1.0.0"],
42
43 # we break under Twisted 18.4
44 # (https://github.com/matrix-org/synapse/issues/3135)
45 "Twisted>=16.0.0,<18.4": ["twisted>=16.0.0"],
46
47 "pyopenssl>=0.14": ["OpenSSL>=0.14"],
48 "pyyaml": ["yaml"],
49 "pyasn1": ["pyasn1"],
50 "daemonize": ["daemonize"],
51 "bcrypt": ["bcrypt>=3.1.0"],
52 "pillow": ["PIL"],
53 "pydenticon": ["pydenticon"],
54 "blist": ["blist"],
55 "pysaml2>=3.0.0": ["saml2>=3.0.0"],
56 "pymacaroons-pynacl": ["pymacaroons"],
57 "msgpack-python>=0.3.0": ["msgpack"],
58 "phonenumbers>=8.2.0": ["phonenumbers"],
59 "six": ["six"],
60 }
61 CONDITIONAL_REQUIREMENTS = {
62 "web_client": {
63 "matrix_angular_sdk>=0.6.8": ["syweb>=0.6.8"],
64 },
65 "preview_url": {
66 "netaddr>=0.7.18": ["netaddr"],
67 },
68 "email.enable_notifs": {
69 "Jinja2>=2.8": ["Jinja2>=2.8"],
70 "bleach>=1.4.2": ["bleach>=1.4.2"],
71 },
72 "matrix-synapse-ldap3": {
73 "matrix-synapse-ldap3>=0.1": ["ldap_auth_provider"],
74 },
75 "psutil": {
76 "psutil>=2.0.0": ["psutil>=2.0.0"],
77 },
78 "affinity": {
79 "affinity": ["affinity"],
80 },
81 }
82
83
84 def requirements(config=None, include_conditional=False):
85 reqs = REQUIREMENTS.copy()
86 if include_conditional:
87 for _, req in CONDITIONAL_REQUIREMENTS.items():
88 reqs.update(req)
89 return reqs
90
91
92 def github_link(project, version, egg):
93 return "https://github.com/%s/tarball/%s/#egg=%s" % (project, version, egg)
94
95
96 DEPENDENCY_LINKS = {
97 }
98
99
100 class MissingRequirementError(Exception):
101 def __init__(self, message, module_name, dependency):
102 super(MissingRequirementError, self).__init__(message)
103 self.module_name = module_name
104 self.dependency = dependency
105
106
107 def check_requirements(config=None):
108 """Checks that all the modules needed by synapse have been correctly
109 installed and are at the correct version"""
110 for dependency, module_requirements in (
111 requirements(config, include_conditional=False).items()):
112 for module_requirement in module_requirements:
113 if ">=" in module_requirement:
114 module_name, required_version = module_requirement.split(">=")
115 version_test = ">="
116 elif "==" in module_requirement:
117 module_name, required_version = module_requirement.split("==")
118 version_test = "=="
119 else:
120 module_name = module_requirement
121 version_test = None
122
123 try:
124 module = __import__(module_name)
125 except ImportError:
126 logging.exception(
127 "Can't import %r which is part of %r",
128 module_name, dependency
129 )
130 raise MissingRequirementError(
131 "Can't import %r which is part of %r"
132 % (module_name, dependency), module_name, dependency
133 )
134 version = getattr(module, "__version__", None)
135 file_path = getattr(module, "__file__", None)
136 logger.info(
137 "Using %r version %r from %r to satisfy %r",
138 module_name, version, file_path, dependency
139 )
140
141 if version_test == ">=":
142 if version is None:
143 raise MissingRequirementError(
144 "Version of %r isn't set as __version__ of module %r"
145 % (dependency, module_name), module_name, dependency
146 )
147 if LooseVersion(version) < LooseVersion(required_version):
148 raise MissingRequirementError(
149 "Version of %r in %r is too old. %r < %r"
150 % (dependency, file_path, version, required_version),
151 module_name, dependency
152 )
153 elif version_test == "==":
154 if version is None:
155 raise MissingRequirementError(
156 "Version of %r isn't set as __version__ of module %r"
157 % (dependency, module_name), module_name, dependency
158 )
159 if LooseVersion(version) != LooseVersion(required_version):
160 raise MissingRequirementError(
161 "Unexpected version of %r in %r. %r != %r"
162 % (dependency, file_path, version, required_version),
163 module_name, dependency
164 )
165
166
167 def list_requirements():
168 result = []
169 linked = []
170 for link in DEPENDENCY_LINKS.values():
171 egg = link.split("#egg=")[1]
172 linked.append(egg.split('-')[0])
173 result.append(link)
174 for requirement in requirements(include_conditional=True):
175 is_linked = False
176 for link in linked:
177 if requirement.replace('-', '_').startswith(link):
178 is_linked = True
179 if not is_linked:
180 result.append(requirement)
181 return result
182
183
184 if __name__ == "__main__":
185 import sys
186 sys.stdout.writelines(req + "\n" for req in list_requirements())
187
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py
--- a/synapse/crypto/context_factory.py
+++ b/synapse/crypto/context_factory.py
@@ -13,8 +13,8 @@
# limitations under the License.
from twisted.internet import ssl
-from OpenSSL import SSL
-from twisted.internet._sslverify import _OpenSSLECCurve, _defaultCurveName
+from OpenSSL import SSL, crypto
+from twisted.internet._sslverify import _defaultCurveName
import logging
@@ -32,8 +32,9 @@
@staticmethod
def configure_context(context, config):
try:
- _ecCurve = _OpenSSLECCurve(_defaultCurveName)
- _ecCurve.addECKeyToContext(context)
+ _ecCurve = crypto.get_elliptic_curve(_defaultCurveName)
+ context.set_tmp_ecdh(_ecCurve)
+
except Exception:
logger.exception("Failed to enable elliptic curve for TLS")
context.set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3)
diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py
--- a/synapse/python_dependencies.py
+++ b/synapse/python_dependencies.py
@@ -39,12 +39,11 @@
"signedjson>=1.0.0": ["signedjson>=1.0.0"],
"pynacl>=1.2.1": ["nacl>=1.2.1", "nacl.bindings"],
"service_identity>=1.0.0": ["service_identity>=1.0.0"],
+ "Twisted>=16.0.0": ["twisted>=16.0.0"],
- # we break under Twisted 18.4
- # (https://github.com/matrix-org/synapse/issues/3135)
- "Twisted>=16.0.0,<18.4": ["twisted>=16.0.0"],
+ # We use crypto.get_elliptic_curve which is only supported in >=0.15
+ "pyopenssl>=0.15": ["OpenSSL>=0.15"],
- "pyopenssl>=0.14": ["OpenSSL>=0.14"],
"pyyaml": ["yaml"],
"pyasn1": ["pyasn1"],
"daemonize": ["daemonize"],
|
{"golden_diff": "diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py\n--- a/synapse/crypto/context_factory.py\n+++ b/synapse/crypto/context_factory.py\n@@ -13,8 +13,8 @@\n # limitations under the License.\n \n from twisted.internet import ssl\n-from OpenSSL import SSL\n-from twisted.internet._sslverify import _OpenSSLECCurve, _defaultCurveName\n+from OpenSSL import SSL, crypto\n+from twisted.internet._sslverify import _defaultCurveName\n \n import logging\n \n@@ -32,8 +32,9 @@\n @staticmethod\n def configure_context(context, config):\n try:\n- _ecCurve = _OpenSSLECCurve(_defaultCurveName)\n- _ecCurve.addECKeyToContext(context)\n+ _ecCurve = crypto.get_elliptic_curve(_defaultCurveName)\n+ context.set_tmp_ecdh(_ecCurve)\n+\n except Exception:\n logger.exception(\"Failed to enable elliptic curve for TLS\")\n context.set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3)\ndiff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py\n--- a/synapse/python_dependencies.py\n+++ b/synapse/python_dependencies.py\n@@ -39,12 +39,11 @@\n \"signedjson>=1.0.0\": [\"signedjson>=1.0.0\"],\n \"pynacl>=1.2.1\": [\"nacl>=1.2.1\", \"nacl.bindings\"],\n \"service_identity>=1.0.0\": [\"service_identity>=1.0.0\"],\n+ \"Twisted>=16.0.0\": [\"twisted>=16.0.0\"],\n \n- # we break under Twisted 18.4\n- # (https://github.com/matrix-org/synapse/issues/3135)\n- \"Twisted>=16.0.0,<18.4\": [\"twisted>=16.0.0\"],\n+ # We use crypto.get_elliptic_curve which is only supported in >=0.15\n+ \"pyopenssl>=0.15\": [\"OpenSSL>=0.15\"],\n \n- \"pyopenssl>=0.14\": [\"OpenSSL>=0.14\"],\n \"pyyaml\": [\"yaml\"],\n \"pyasn1\": [\"pyasn1\"],\n \"daemonize\": [\"daemonize\"],\n", "issue": "synapse fails to start under Twisted >= 18.4\nLooks like we are relying on a private identifier which has been removed in Twisted 18.4:\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"/usr/lib/python2.7/runpy.py\", line 174, in _run_module_as_main\r\n \"__main__\", fname, loader, pkg_name)\r\n File \"/usr/lib/python2.7/runpy.py\", line 72, in _run_code\r\n exec code in run_globals\r\n File \"/home/rav/work/synapse/synapse/app/homeserver.py\", line 31, in <module>\r\n from synapse.crypto import context_factory\r\n File \"synapse/crypto/context_factory.py\", line 17, in <module>\r\n from twisted.internet._sslverify import _OpenSSLECCurve, _defaultCurveName\r\nImportError: cannot import name _OpenSSLECCurve\r\n```\nsynapse fails to start under Twisted >= 18.4\nLooks like we are relying on a private identifier which has been removed in Twisted 18.4:\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"/usr/lib/python2.7/runpy.py\", line 174, in _run_module_as_main\r\n \"__main__\", fname, loader, pkg_name)\r\n File \"/usr/lib/python2.7/runpy.py\", line 72, in _run_code\r\n exec code in run_globals\r\n File \"/home/rav/work/synapse/synapse/app/homeserver.py\", line 31, in <module>\r\n from synapse.crypto import context_factory\r\n File \"synapse/crypto/context_factory.py\", line 17, in <module>\r\n from twisted.internet._sslverify import _OpenSSLECCurve, _defaultCurveName\r\nImportError: cannot import name _OpenSSLECCurve\r\n```\n", "before_files": [{"content": "# Copyright 2014-2016 OpenMarket Ltd\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom twisted.internet import ssl\nfrom OpenSSL import SSL\nfrom twisted.internet._sslverify import _OpenSSLECCurve, _defaultCurveName\n\nimport logging\n\nlogger = logging.getLogger(__name__)\n\n\nclass ServerContextFactory(ssl.ContextFactory):\n \"\"\"Factory for PyOpenSSL SSL contexts that are used to handle incoming\n connections and to make connections to remote servers.\"\"\"\n\n def __init__(self, config):\n self._context = SSL.Context(SSL.SSLv23_METHOD)\n self.configure_context(self._context, config)\n\n @staticmethod\n def configure_context(context, config):\n try:\n _ecCurve = _OpenSSLECCurve(_defaultCurveName)\n _ecCurve.addECKeyToContext(context)\n except Exception:\n logger.exception(\"Failed to enable elliptic curve for TLS\")\n context.set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3)\n context.use_certificate_chain_file(config.tls_certificate_file)\n\n if not config.no_tls:\n context.use_privatekey(config.tls_private_key)\n\n context.load_tmp_dh(config.tls_dh_params_path)\n context.set_cipher_list(\"!ADH:HIGH+kEDH:!AECDH:HIGH+kEECDH\")\n\n def getContext(self):\n return self._context\n", "path": "synapse/crypto/context_factory.py"}, {"content": "# Copyright 2015, 2016 OpenMarket Ltd\n# Copyright 2017 Vector Creations Ltd\n# Copyright 2018 New Vector Ltd\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport logging\nfrom distutils.version import LooseVersion\n\nlogger = logging.getLogger(__name__)\n\n# this dict maps from python package name to a list of modules we expect it to\n# provide.\n#\n# the key is a \"requirement specifier\", as used as a parameter to `pip\n# install`[1], or an `install_requires` argument to `setuptools.setup` [2].\n#\n# the value is a sequence of strings; each entry should be the name of the\n# python module, optionally followed by a version assertion which can be either\n# \">=<ver>\" or \"==<ver>\".\n#\n# [1] https://pip.pypa.io/en/stable/reference/pip_install/#requirement-specifiers.\n# [2] https://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-dependencies\nREQUIREMENTS = {\n \"jsonschema>=2.5.1\": [\"jsonschema>=2.5.1\"],\n \"frozendict>=0.4\": [\"frozendict\"],\n \"unpaddedbase64>=1.1.0\": [\"unpaddedbase64>=1.1.0\"],\n \"canonicaljson>=1.1.3\": [\"canonicaljson>=1.1.3\"],\n \"signedjson>=1.0.0\": [\"signedjson>=1.0.0\"],\n \"pynacl>=1.2.1\": [\"nacl>=1.2.1\", \"nacl.bindings\"],\n \"service_identity>=1.0.0\": [\"service_identity>=1.0.0\"],\n\n # we break under Twisted 18.4\n # (https://github.com/matrix-org/synapse/issues/3135)\n \"Twisted>=16.0.0,<18.4\": [\"twisted>=16.0.0\"],\n\n \"pyopenssl>=0.14\": [\"OpenSSL>=0.14\"],\n \"pyyaml\": [\"yaml\"],\n \"pyasn1\": [\"pyasn1\"],\n \"daemonize\": [\"daemonize\"],\n \"bcrypt\": [\"bcrypt>=3.1.0\"],\n \"pillow\": [\"PIL\"],\n \"pydenticon\": [\"pydenticon\"],\n \"blist\": [\"blist\"],\n \"pysaml2>=3.0.0\": [\"saml2>=3.0.0\"],\n \"pymacaroons-pynacl\": [\"pymacaroons\"],\n \"msgpack-python>=0.3.0\": [\"msgpack\"],\n \"phonenumbers>=8.2.0\": [\"phonenumbers\"],\n \"six\": [\"six\"],\n}\nCONDITIONAL_REQUIREMENTS = {\n \"web_client\": {\n \"matrix_angular_sdk>=0.6.8\": [\"syweb>=0.6.8\"],\n },\n \"preview_url\": {\n \"netaddr>=0.7.18\": [\"netaddr\"],\n },\n \"email.enable_notifs\": {\n \"Jinja2>=2.8\": [\"Jinja2>=2.8\"],\n \"bleach>=1.4.2\": [\"bleach>=1.4.2\"],\n },\n \"matrix-synapse-ldap3\": {\n \"matrix-synapse-ldap3>=0.1\": [\"ldap_auth_provider\"],\n },\n \"psutil\": {\n \"psutil>=2.0.0\": [\"psutil>=2.0.0\"],\n },\n \"affinity\": {\n \"affinity\": [\"affinity\"],\n },\n}\n\n\ndef requirements(config=None, include_conditional=False):\n reqs = REQUIREMENTS.copy()\n if include_conditional:\n for _, req in CONDITIONAL_REQUIREMENTS.items():\n reqs.update(req)\n return reqs\n\n\ndef github_link(project, version, egg):\n return \"https://github.com/%s/tarball/%s/#egg=%s\" % (project, version, egg)\n\n\nDEPENDENCY_LINKS = {\n}\n\n\nclass MissingRequirementError(Exception):\n def __init__(self, message, module_name, dependency):\n super(MissingRequirementError, self).__init__(message)\n self.module_name = module_name\n self.dependency = dependency\n\n\ndef check_requirements(config=None):\n \"\"\"Checks that all the modules needed by synapse have been correctly\n installed and are at the correct version\"\"\"\n for dependency, module_requirements in (\n requirements(config, include_conditional=False).items()):\n for module_requirement in module_requirements:\n if \">=\" in module_requirement:\n module_name, required_version = module_requirement.split(\">=\")\n version_test = \">=\"\n elif \"==\" in module_requirement:\n module_name, required_version = module_requirement.split(\"==\")\n version_test = \"==\"\n else:\n module_name = module_requirement\n version_test = None\n\n try:\n module = __import__(module_name)\n except ImportError:\n logging.exception(\n \"Can't import %r which is part of %r\",\n module_name, dependency\n )\n raise MissingRequirementError(\n \"Can't import %r which is part of %r\"\n % (module_name, dependency), module_name, dependency\n )\n version = getattr(module, \"__version__\", None)\n file_path = getattr(module, \"__file__\", None)\n logger.info(\n \"Using %r version %r from %r to satisfy %r\",\n module_name, version, file_path, dependency\n )\n\n if version_test == \">=\":\n if version is None:\n raise MissingRequirementError(\n \"Version of %r isn't set as __version__ of module %r\"\n % (dependency, module_name), module_name, dependency\n )\n if LooseVersion(version) < LooseVersion(required_version):\n raise MissingRequirementError(\n \"Version of %r in %r is too old. %r < %r\"\n % (dependency, file_path, version, required_version),\n module_name, dependency\n )\n elif version_test == \"==\":\n if version is None:\n raise MissingRequirementError(\n \"Version of %r isn't set as __version__ of module %r\"\n % (dependency, module_name), module_name, dependency\n )\n if LooseVersion(version) != LooseVersion(required_version):\n raise MissingRequirementError(\n \"Unexpected version of %r in %r. %r != %r\"\n % (dependency, file_path, version, required_version),\n module_name, dependency\n )\n\n\ndef list_requirements():\n result = []\n linked = []\n for link in DEPENDENCY_LINKS.values():\n egg = link.split(\"#egg=\")[1]\n linked.append(egg.split('-')[0])\n result.append(link)\n for requirement in requirements(include_conditional=True):\n is_linked = False\n for link in linked:\n if requirement.replace('-', '_').startswith(link):\n is_linked = True\n if not is_linked:\n result.append(requirement)\n return result\n\n\nif __name__ == \"__main__\":\n import sys\n sys.stdout.writelines(req + \"\\n\" for req in list_requirements())\n", "path": "synapse/python_dependencies.py"}], "after_files": [{"content": "# Copyright 2014-2016 OpenMarket Ltd\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom twisted.internet import ssl\nfrom OpenSSL import SSL, crypto\nfrom twisted.internet._sslverify import _defaultCurveName\n\nimport logging\n\nlogger = logging.getLogger(__name__)\n\n\nclass ServerContextFactory(ssl.ContextFactory):\n \"\"\"Factory for PyOpenSSL SSL contexts that are used to handle incoming\n connections and to make connections to remote servers.\"\"\"\n\n def __init__(self, config):\n self._context = SSL.Context(SSL.SSLv23_METHOD)\n self.configure_context(self._context, config)\n\n @staticmethod\n def configure_context(context, config):\n try:\n _ecCurve = crypto.get_elliptic_curve(_defaultCurveName)\n context.set_tmp_ecdh(_ecCurve)\n\n except Exception:\n logger.exception(\"Failed to enable elliptic curve for TLS\")\n context.set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3)\n context.use_certificate_chain_file(config.tls_certificate_file)\n\n if not config.no_tls:\n context.use_privatekey(config.tls_private_key)\n\n context.load_tmp_dh(config.tls_dh_params_path)\n context.set_cipher_list(\"!ADH:HIGH+kEDH:!AECDH:HIGH+kEECDH\")\n\n def getContext(self):\n return self._context\n", "path": "synapse/crypto/context_factory.py"}, {"content": "# Copyright 2015, 2016 OpenMarket Ltd\n# Copyright 2017 Vector Creations Ltd\n# Copyright 2018 New Vector Ltd\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport logging\nfrom distutils.version import LooseVersion\n\nlogger = logging.getLogger(__name__)\n\n# this dict maps from python package name to a list of modules we expect it to\n# provide.\n#\n# the key is a \"requirement specifier\", as used as a parameter to `pip\n# install`[1], or an `install_requires` argument to `setuptools.setup` [2].\n#\n# the value is a sequence of strings; each entry should be the name of the\n# python module, optionally followed by a version assertion which can be either\n# \">=<ver>\" or \"==<ver>\".\n#\n# [1] https://pip.pypa.io/en/stable/reference/pip_install/#requirement-specifiers.\n# [2] https://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-dependencies\nREQUIREMENTS = {\n \"jsonschema>=2.5.1\": [\"jsonschema>=2.5.1\"],\n \"frozendict>=0.4\": [\"frozendict\"],\n \"unpaddedbase64>=1.1.0\": [\"unpaddedbase64>=1.1.0\"],\n \"canonicaljson>=1.1.3\": [\"canonicaljson>=1.1.3\"],\n \"signedjson>=1.0.0\": [\"signedjson>=1.0.0\"],\n \"pynacl>=1.2.1\": [\"nacl>=1.2.1\", \"nacl.bindings\"],\n \"service_identity>=1.0.0\": [\"service_identity>=1.0.0\"],\n \"Twisted>=16.0.0\": [\"twisted>=16.0.0\"],\n\n # We use crypto.get_elliptic_curve which is only supported in >=0.15\n \"pyopenssl>=0.15\": [\"OpenSSL>=0.15\"],\n\n \"pyyaml\": [\"yaml\"],\n \"pyasn1\": [\"pyasn1\"],\n \"daemonize\": [\"daemonize\"],\n \"bcrypt\": [\"bcrypt>=3.1.0\"],\n \"pillow\": [\"PIL\"],\n \"pydenticon\": [\"pydenticon\"],\n \"blist\": [\"blist\"],\n \"pysaml2>=3.0.0\": [\"saml2>=3.0.0\"],\n \"pymacaroons-pynacl\": [\"pymacaroons\"],\n \"msgpack-python>=0.3.0\": [\"msgpack\"],\n \"phonenumbers>=8.2.0\": [\"phonenumbers\"],\n \"six\": [\"six\"],\n}\nCONDITIONAL_REQUIREMENTS = {\n \"web_client\": {\n \"matrix_angular_sdk>=0.6.8\": [\"syweb>=0.6.8\"],\n },\n \"preview_url\": {\n \"netaddr>=0.7.18\": [\"netaddr\"],\n },\n \"email.enable_notifs\": {\n \"Jinja2>=2.8\": [\"Jinja2>=2.8\"],\n \"bleach>=1.4.2\": [\"bleach>=1.4.2\"],\n },\n \"matrix-synapse-ldap3\": {\n \"matrix-synapse-ldap3>=0.1\": [\"ldap_auth_provider\"],\n },\n \"psutil\": {\n \"psutil>=2.0.0\": [\"psutil>=2.0.0\"],\n },\n \"affinity\": {\n \"affinity\": [\"affinity\"],\n },\n}\n\n\ndef requirements(config=None, include_conditional=False):\n reqs = REQUIREMENTS.copy()\n if include_conditional:\n for _, req in CONDITIONAL_REQUIREMENTS.items():\n reqs.update(req)\n return reqs\n\n\ndef github_link(project, version, egg):\n return \"https://github.com/%s/tarball/%s/#egg=%s\" % (project, version, egg)\n\n\nDEPENDENCY_LINKS = {\n}\n\n\nclass MissingRequirementError(Exception):\n def __init__(self, message, module_name, dependency):\n super(MissingRequirementError, self).__init__(message)\n self.module_name = module_name\n self.dependency = dependency\n\n\ndef check_requirements(config=None):\n \"\"\"Checks that all the modules needed by synapse have been correctly\n installed and are at the correct version\"\"\"\n for dependency, module_requirements in (\n requirements(config, include_conditional=False).items()):\n for module_requirement in module_requirements:\n if \">=\" in module_requirement:\n module_name, required_version = module_requirement.split(\">=\")\n version_test = \">=\"\n elif \"==\" in module_requirement:\n module_name, required_version = module_requirement.split(\"==\")\n version_test = \"==\"\n else:\n module_name = module_requirement\n version_test = None\n\n try:\n module = __import__(module_name)\n except ImportError:\n logging.exception(\n \"Can't import %r which is part of %r\",\n module_name, dependency\n )\n raise MissingRequirementError(\n \"Can't import %r which is part of %r\"\n % (module_name, dependency), module_name, dependency\n )\n version = getattr(module, \"__version__\", None)\n file_path = getattr(module, \"__file__\", None)\n logger.info(\n \"Using %r version %r from %r to satisfy %r\",\n module_name, version, file_path, dependency\n )\n\n if version_test == \">=\":\n if version is None:\n raise MissingRequirementError(\n \"Version of %r isn't set as __version__ of module %r\"\n % (dependency, module_name), module_name, dependency\n )\n if LooseVersion(version) < LooseVersion(required_version):\n raise MissingRequirementError(\n \"Version of %r in %r is too old. %r < %r\"\n % (dependency, file_path, version, required_version),\n module_name, dependency\n )\n elif version_test == \"==\":\n if version is None:\n raise MissingRequirementError(\n \"Version of %r isn't set as __version__ of module %r\"\n % (dependency, module_name), module_name, dependency\n )\n if LooseVersion(version) != LooseVersion(required_version):\n raise MissingRequirementError(\n \"Unexpected version of %r in %r. %r != %r\"\n % (dependency, file_path, version, required_version),\n module_name, dependency\n )\n\n\ndef list_requirements():\n result = []\n linked = []\n for link in DEPENDENCY_LINKS.values():\n egg = link.split(\"#egg=\")[1]\n linked.append(egg.split('-')[0])\n result.append(link)\n for requirement in requirements(include_conditional=True):\n is_linked = False\n for link in linked:\n if requirement.replace('-', '_').startswith(link):\n is_linked = True\n if not is_linked:\n result.append(requirement)\n return result\n\n\nif __name__ == \"__main__\":\n import sys\n sys.stdout.writelines(req + \"\\n\" for req in list_requirements())\n", "path": "synapse/python_dependencies.py"}]}
| 3,341 | 536 |
gh_patches_debug_26762
|
rasdani/github-patches
|
git_diff
|
pytorch__ignite-1312
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Improve Canberra metric
## 🚀 Feature
Actual implementation of Canberra metric does not use absolute value on terms in denominator. Moreover, `sklearn` can be used in test.
See https://arxiv.org/pdf/1411.7474.pdf
See https://scikit-learn.org/stable/modules/generated/sklearn.neighbors.DistanceMetric.html
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ignite/contrib/metrics/regression/canberra_metric.py`
Content:
```
1 import torch
2
3 from ignite.contrib.metrics.regression._base import _BaseRegression
4
5
6 class CanberraMetric(_BaseRegression):
7 r"""
8 Calculates the Canberra Metric.
9
10 :math:`\text{CM} = \sum_{j=1}^n\frac{|A_j - P_j|}{A_j + P_j}`
11
12 where, :math:`A_j` is the ground truth and :math:`P_j` is the predicted value.
13
14 More details can be found in `Botchkarev 2018`__.
15
16 - ``update`` must receive output of the form ``(y_pred, y)`` or ``{'y_pred': y_pred, 'y': y}``.
17 - `y` and `y_pred` must be of same shape `(N, )` or `(N, 1)`.
18
19 __ https://arxiv.org/abs/1809.03006
20 """
21
22 def reset(self):
23 self._sum_of_errors = 0.0
24
25 def _update(self, output):
26 y_pred, y = output
27 errors = torch.abs(y.view_as(y_pred) - y_pred) / (y_pred + y.view_as(y_pred))
28 self._sum_of_errors += torch.sum(errors).item()
29
30 def compute(self):
31 return self._sum_of_errors
32
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/ignite/contrib/metrics/regression/canberra_metric.py b/ignite/contrib/metrics/regression/canberra_metric.py
--- a/ignite/contrib/metrics/regression/canberra_metric.py
+++ b/ignite/contrib/metrics/regression/canberra_metric.py
@@ -7,16 +7,19 @@
r"""
Calculates the Canberra Metric.
- :math:`\text{CM} = \sum_{j=1}^n\frac{|A_j - P_j|}{A_j + P_j}`
+ :math:`\text{CM} = \sum_{j=1}^n\frac{|A_j - P_j|}{|A_j| + |P_j|}`
where, :math:`A_j` is the ground truth and :math:`P_j` is the predicted value.
- More details can be found in `Botchkarev 2018`__.
+ More details can be found in `Botchkarev 2018`_ or `scikit-learn distance metrics`_
- ``update`` must receive output of the form ``(y_pred, y)`` or ``{'y_pred': y_pred, 'y': y}``.
- `y` and `y_pred` must be of same shape `(N, )` or `(N, 1)`.
- __ https://arxiv.org/abs/1809.03006
+ .. _Botchkarev 2018: https://arxiv.org/abs/1809.03006
+ .. _scikit-learn distance metrics:
+ https://scikit-learn.org/stable/modules/generated/sklearn.neighbors.DistanceMetric.html
+
"""
def reset(self):
@@ -24,7 +27,7 @@
def _update(self, output):
y_pred, y = output
- errors = torch.abs(y.view_as(y_pred) - y_pred) / (y_pred + y.view_as(y_pred))
+ errors = torch.abs(y.view_as(y_pred) - y_pred) / (torch.abs(y_pred) + torch.abs(y.view_as(y_pred)))
self._sum_of_errors += torch.sum(errors).item()
def compute(self):
|
{"golden_diff": "diff --git a/ignite/contrib/metrics/regression/canberra_metric.py b/ignite/contrib/metrics/regression/canberra_metric.py\n--- a/ignite/contrib/metrics/regression/canberra_metric.py\n+++ b/ignite/contrib/metrics/regression/canberra_metric.py\n@@ -7,16 +7,19 @@\n r\"\"\"\n Calculates the Canberra Metric.\n \n- :math:`\\text{CM} = \\sum_{j=1}^n\\frac{|A_j - P_j|}{A_j + P_j}`\n+ :math:`\\text{CM} = \\sum_{j=1}^n\\frac{|A_j - P_j|}{|A_j| + |P_j|}`\n \n where, :math:`A_j` is the ground truth and :math:`P_j` is the predicted value.\n \n- More details can be found in `Botchkarev 2018`__.\n+ More details can be found in `Botchkarev 2018`_ or `scikit-learn distance metrics`_\n \n - ``update`` must receive output of the form ``(y_pred, y)`` or ``{'y_pred': y_pred, 'y': y}``.\n - `y` and `y_pred` must be of same shape `(N, )` or `(N, 1)`.\n \n- __ https://arxiv.org/abs/1809.03006\n+ .. _Botchkarev 2018: https://arxiv.org/abs/1809.03006\n+ .. _scikit-learn distance metrics:\n+ https://scikit-learn.org/stable/modules/generated/sklearn.neighbors.DistanceMetric.html\n+\n \"\"\"\n \n def reset(self):\n@@ -24,7 +27,7 @@\n \n def _update(self, output):\n y_pred, y = output\n- errors = torch.abs(y.view_as(y_pred) - y_pred) / (y_pred + y.view_as(y_pred))\n+ errors = torch.abs(y.view_as(y_pred) - y_pred) / (torch.abs(y_pred) + torch.abs(y.view_as(y_pred)))\n self._sum_of_errors += torch.sum(errors).item()\n \n def compute(self):\n", "issue": "Improve Canberra metric\n## \ud83d\ude80 Feature\r\n\r\nActual implementation of Canberra metric does not use absolute value on terms in denominator. Moreover, `sklearn` can be used in test.\r\n\r\nSee https://arxiv.org/pdf/1411.7474.pdf \r\n\r\nSee https://scikit-learn.org/stable/modules/generated/sklearn.neighbors.DistanceMetric.html\r\n\n", "before_files": [{"content": "import torch\n\nfrom ignite.contrib.metrics.regression._base import _BaseRegression\n\n\nclass CanberraMetric(_BaseRegression):\n r\"\"\"\n Calculates the Canberra Metric.\n\n :math:`\\text{CM} = \\sum_{j=1}^n\\frac{|A_j - P_j|}{A_j + P_j}`\n\n where, :math:`A_j` is the ground truth and :math:`P_j` is the predicted value.\n\n More details can be found in `Botchkarev 2018`__.\n\n - ``update`` must receive output of the form ``(y_pred, y)`` or ``{'y_pred': y_pred, 'y': y}``.\n - `y` and `y_pred` must be of same shape `(N, )` or `(N, 1)`.\n\n __ https://arxiv.org/abs/1809.03006\n \"\"\"\n\n def reset(self):\n self._sum_of_errors = 0.0\n\n def _update(self, output):\n y_pred, y = output\n errors = torch.abs(y.view_as(y_pred) - y_pred) / (y_pred + y.view_as(y_pred))\n self._sum_of_errors += torch.sum(errors).item()\n\n def compute(self):\n return self._sum_of_errors\n", "path": "ignite/contrib/metrics/regression/canberra_metric.py"}], "after_files": [{"content": "import torch\n\nfrom ignite.contrib.metrics.regression._base import _BaseRegression\n\n\nclass CanberraMetric(_BaseRegression):\n r\"\"\"\n Calculates the Canberra Metric.\n\n :math:`\\text{CM} = \\sum_{j=1}^n\\frac{|A_j - P_j|}{|A_j| + |P_j|}`\n\n where, :math:`A_j` is the ground truth and :math:`P_j` is the predicted value.\n\n More details can be found in `Botchkarev 2018`_ or `scikit-learn distance metrics`_\n\n - ``update`` must receive output of the form ``(y_pred, y)`` or ``{'y_pred': y_pred, 'y': y}``.\n - `y` and `y_pred` must be of same shape `(N, )` or `(N, 1)`.\n\n .. _Botchkarev 2018: https://arxiv.org/abs/1809.03006\n .. _scikit-learn distance metrics:\n https://scikit-learn.org/stable/modules/generated/sklearn.neighbors.DistanceMetric.html\n\n \"\"\"\n\n def reset(self):\n self._sum_of_errors = 0.0\n\n def _update(self, output):\n y_pred, y = output\n errors = torch.abs(y.view_as(y_pred) - y_pred) / (torch.abs(y_pred) + torch.abs(y.view_as(y_pred)))\n self._sum_of_errors += torch.sum(errors).item()\n\n def compute(self):\n return self._sum_of_errors\n", "path": "ignite/contrib/metrics/regression/canberra_metric.py"}]}
| 691 | 506 |
gh_patches_debug_16031
|
rasdani/github-patches
|
git_diff
|
pyqtgraph__pyqtgraph-2130
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
examples/ScatterPlotWidget.py Color Map throws exception
<!-- In the following, please describe your issue in detail! -->
<!-- If some of the sections do not apply, just remove them. -->
### Short description
<!-- This should summarize the issue. -->
examples/ScatterPlotWidget.py which makes use of Parameter Trees throws exception since #1919 (4bf1866c2a28b237ca8ca06ac668686b92ccf967 from bisection)
### Code to reproduce
<!-- Please provide a minimal working example that reproduces the issue in the code block below.
Ideally, this should be a full example someone else could run without additional setup. -->
run ```examples/ScatterPlotWidget.py```
select ```Color Map``` -> ```Add Mapping``` -> ```x_pos```
an exception will be thrown
### Expected behavior
<!-- What should happen? -->
no exception thrown
### Real behavior
<!-- What happens? -->
following exception thrown
```
Traceback (most recent call last):
File "d:\github\pyqtgraph\pyqtgraph\widgets\ScatterPlotWidget.py", line 168, in updatePlot
colors = np.array([fn.mkBrush(*x) for x in self.colorMap.map(data)])
File "d:\github\pyqtgraph\pyqtgraph\widgets\ColorMapWidget.py", line 150, in map
colors2 = item.map(data)
File "d:\github\pyqtgraph\pyqtgraph\widgets\ColorMapWidget.py", line 216, in map
colors = cmap.map(scaled, mode='float')
AttributeError: 'NoneType' object has no attribute 'map'
```
### Tested environment(s)
* PyQtGraph version: 0.12.3 <!-- output of pyqtgraph.__version__ -->
* Qt Python binding: PySide6 6.2.0 <!-- output of pyqtgraph.Qt.VERSION_INFO -->
* Python version: 3.8.10
* NumPy version: 1.21.2 <!-- output of numpy.__version__ -->
* Operating system: Windows 10
* Installation method: pip install -e . <!-- e.g. pip, conda, system packages, ... -->
### Additional context
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pyqtgraph/widgets/ColorMapWidget.py`
Content:
```
1 from collections import OrderedDict
2
3 import numpy as np
4
5 from .. import functions as fn
6 from .. import parametertree as ptree
7 from ..Qt import QtCore
8
9 __all__ = ['ColorMapWidget']
10
11 class ColorMapWidget(ptree.ParameterTree):
12 """
13 This class provides a widget allowing the user to customize color mapping
14 for multi-column data. Given a list of field names, the user may specify
15 multiple criteria for assigning colors to each record in a numpy record array.
16 Multiple criteria are evaluated and combined into a single color for each
17 record by user-defined compositing methods.
18
19 For simpler color mapping using a single gradient editor, see
20 :class:`GradientWidget <pyqtgraph.GradientWidget>`
21 """
22 sigColorMapChanged = QtCore.Signal(object)
23
24 def __init__(self, parent=None):
25 ptree.ParameterTree.__init__(self, parent=parent, showHeader=False)
26
27 self.params = ColorMapParameter()
28 self.setParameters(self.params)
29 self.params.sigTreeStateChanged.connect(self.mapChanged)
30
31 ## wrap a couple methods
32 self.setFields = self.params.setFields
33 self.map = self.params.map
34
35 def mapChanged(self):
36 self.sigColorMapChanged.emit(self)
37
38 def widgetGroupInterface(self):
39 return (self.sigColorMapChanged, self.saveState, self.restoreState)
40
41 def saveState(self):
42 return self.params.saveState()
43
44 def restoreState(self, state):
45 self.params.restoreState(state)
46
47 def addColorMap(self, name):
48 """Add a new color mapping and return the created parameter.
49 """
50 return self.params.addNew(name)
51
52
53 class ColorMapParameter(ptree.types.GroupParameter):
54 sigColorMapChanged = QtCore.Signal(object)
55
56 def __init__(self):
57 self.fields = {}
58 ptree.types.GroupParameter.__init__(self, name='Color Map', addText='Add Mapping..', addList=[])
59 self.sigTreeStateChanged.connect(self.mapChanged)
60
61 def mapChanged(self):
62 self.sigColorMapChanged.emit(self)
63
64 def addNew(self, name):
65 fieldSpec = self.fields[name]
66
67 mode = fieldSpec.get('mode', 'range')
68 if mode == 'range':
69 item = RangeColorMapItem(name, self.fields[name])
70 elif mode == 'enum':
71 item = EnumColorMapItem(name, self.fields[name])
72
73 defaults = fieldSpec.get('defaults', {})
74 for k, v in defaults.items():
75 if k == 'colormap':
76 if mode == 'range':
77 item.setValue(v)
78 elif mode == 'enum':
79 children = item.param('Values').children()
80 for i, child in enumerate(children):
81 try:
82 child.setValue(v[i])
83 except IndexError:
84 continue
85 else:
86 item[k] = v
87
88 self.addChild(item)
89 return item
90
91 def fieldNames(self):
92 return list(self.fields.keys())
93
94 def setFields(self, fields):
95 """
96 Set the list of fields to be used by the mapper.
97
98 The format of *fields* is::
99
100 [ (fieldName, {options}), ... ]
101
102 ============== ============================================================
103 Field Options:
104 mode Either 'range' or 'enum' (default is range). For 'range',
105 The user may specify a gradient of colors to be applied
106 linearly across a specific range of values. For 'enum',
107 the user specifies a single color for each unique value
108 (see *values* option).
109 units String indicating the units of the data for this field.
110 values List of unique values for which the user may assign a
111 color when mode=='enum'. Optionally may specify a dict
112 instead {value: name}.
113 defaults Dict of default values to apply to color map items when
114 they are created. Valid keys are 'colormap' to provide
115 a default color map, or otherwise they a string or tuple
116 indicating the parameter to be set, such as 'Operation' or
117 ('Channels..', 'Red').
118 ============== ============================================================
119 """
120 self.fields = OrderedDict(fields)
121 #self.fields = fields
122 #self.fields.sort()
123 names = self.fieldNames()
124 self.setAddList(names)
125
126 def map(self, data, mode='byte'):
127 """
128 Return an array of colors corresponding to *data*.
129
130 ============== =================================================================
131 **Arguments:**
132 data A numpy record array where the fields in data.dtype match those
133 defined by a prior call to setFields().
134 mode Either 'byte' or 'float'. For 'byte', the method returns an array
135 of dtype ubyte with values scaled 0-255. For 'float', colors are
136 returned as 0.0-1.0 float values.
137 ============== =================================================================
138 """
139 if isinstance(data, dict):
140 data = np.array([tuple(data.values())], dtype=[(k, float) for k in data.keys()])
141
142 colors = np.zeros((len(data),4))
143 for item in self.children():
144 if not item['Enabled']:
145 continue
146 chans = item.param('Channels..')
147 mask = np.empty((len(data), 4), dtype=bool)
148 for i,f in enumerate(['Red', 'Green', 'Blue', 'Alpha']):
149 mask[:,i] = chans[f]
150
151 colors2 = item.map(data)
152
153 op = item['Operation']
154 if op == 'Add':
155 colors[mask] = colors[mask] + colors2[mask]
156 elif op == 'Multiply':
157 colors[mask] *= colors2[mask]
158 elif op == 'Overlay':
159 a = colors2[:,3:4]
160 c3 = colors * (1-a) + colors2 * a
161 c3[:,3:4] = colors[:,3:4] + (1-colors[:,3:4]) * a
162 colors = c3
163 elif op == 'Set':
164 colors[mask] = colors2[mask]
165
166 colors = fn.clip_array(colors, 0., 1.)
167 if mode == 'byte':
168 colors = (colors * 255).astype(np.ubyte)
169
170 return colors
171
172 def saveState(self):
173 items = OrderedDict()
174 for item in self:
175 itemState = item.saveState(filter='user')
176 itemState['field'] = item.fieldName
177 items[item.name()] = itemState
178 state = {'fields': self.fields, 'items': items}
179 return state
180
181 def restoreState(self, state):
182 if 'fields' in state:
183 self.setFields(state['fields'])
184 for name, itemState in state['items'].items():
185 item = self.addNew(itemState['field'])
186 item.restoreState(itemState)
187
188
189 class RangeColorMapItem(ptree.types.SimpleParameter):
190 mapType = 'range'
191
192 def __init__(self, name, opts):
193 self.fieldName = name
194 units = opts.get('units', '')
195 ptree.types.SimpleParameter.__init__(self,
196 name=name, autoIncrementName=True, type='colormap', removable=True, renamable=True,
197 children=[
198 #dict(name="Field", type='list', value=name, limits=fields),
199 dict(name='Min', type='float', value=0.0, suffix=units, siPrefix=True),
200 dict(name='Max', type='float', value=1.0, suffix=units, siPrefix=True),
201 dict(name='Operation', type='list', value='Overlay', limits=['Overlay', 'Add', 'Multiply', 'Set']),
202 dict(name='Channels..', type='group', expanded=False, children=[
203 dict(name='Red', type='bool', value=True),
204 dict(name='Green', type='bool', value=True),
205 dict(name='Blue', type='bool', value=True),
206 dict(name='Alpha', type='bool', value=True),
207 ]),
208 dict(name='Enabled', type='bool', value=True),
209 dict(name='NaN', type='color'),
210 ])
211
212 def map(self, data):
213 data = data[self.fieldName]
214
215 scaled = fn.clip_array((data-self['Min']) / (self['Max']-self['Min']), 0, 1)
216 cmap = self.value()
217 colors = cmap.map(scaled, mode='float')
218
219 mask = np.invert(np.isfinite(data))
220 nanColor = self['NaN']
221 nanColor = nanColor.getRgbF()
222 colors[mask] = nanColor
223
224 return colors
225
226 class EnumColorMapItem(ptree.types.GroupParameter):
227 mapType = 'enum'
228
229 def __init__(self, name, opts):
230 self.fieldName = name
231 vals = opts.get('values', [])
232 if isinstance(vals, list):
233 vals = OrderedDict([(v,str(v)) for v in vals])
234 childs = []
235 for val,vname in vals.items():
236 ch = ptree.Parameter.create(name=vname, type='color')
237 ch.maskValue = val
238 childs.append(ch)
239
240 ptree.types.GroupParameter.__init__(self,
241 name=name, autoIncrementName=True, removable=True, renamable=True,
242 children=[
243 dict(name='Values', type='group', children=childs),
244 dict(name='Operation', type='list', value='Overlay', limits=['Overlay', 'Add', 'Multiply', 'Set']),
245 dict(name='Channels..', type='group', expanded=False, children=[
246 dict(name='Red', type='bool', value=True),
247 dict(name='Green', type='bool', value=True),
248 dict(name='Blue', type='bool', value=True),
249 dict(name='Alpha', type='bool', value=True),
250 ]),
251 dict(name='Enabled', type='bool', value=True),
252 dict(name='Default', type='color'),
253 ])
254
255 def map(self, data):
256 data = data[self.fieldName]
257 colors = np.empty((len(data), 4))
258 default = np.array(self['Default'].getRgbF())
259 colors[:] = default
260
261 for v in self.param('Values'):
262 mask = data == v.maskValue
263 c = np.array(v.value().getRgbF())
264 colors[mask] = c
265 #scaled = np.clip((data-self['Min']) / (self['Max']-self['Min']), 0, 1)
266 #cmap = self.value()
267 #colors = cmap.map(scaled, mode='float')
268
269 #mask = np.isnan(data) | np.isinf(data)
270 #nanColor = self['NaN']
271 #nanColor = nanColor.getRgbF()
272 #colors[mask] = nanColor
273
274 return colors
275
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pyqtgraph/widgets/ColorMapWidget.py b/pyqtgraph/widgets/ColorMapWidget.py
--- a/pyqtgraph/widgets/ColorMapWidget.py
+++ b/pyqtgraph/widgets/ColorMapWidget.py
@@ -186,13 +186,13 @@
item.restoreState(itemState)
-class RangeColorMapItem(ptree.types.SimpleParameter):
+class RangeColorMapItem(ptree.types.ColorMapParameter):
mapType = 'range'
def __init__(self, name, opts):
self.fieldName = name
units = opts.get('units', '')
- ptree.types.SimpleParameter.__init__(self,
+ ptree.types.ColorMapParameter.__init__(self,
name=name, autoIncrementName=True, type='colormap', removable=True, renamable=True,
children=[
#dict(name="Field", type='list', value=name, limits=fields),
|
{"golden_diff": "diff --git a/pyqtgraph/widgets/ColorMapWidget.py b/pyqtgraph/widgets/ColorMapWidget.py\n--- a/pyqtgraph/widgets/ColorMapWidget.py\n+++ b/pyqtgraph/widgets/ColorMapWidget.py\n@@ -186,13 +186,13 @@\n item.restoreState(itemState)\n \n \n-class RangeColorMapItem(ptree.types.SimpleParameter):\n+class RangeColorMapItem(ptree.types.ColorMapParameter):\n mapType = 'range'\n \n def __init__(self, name, opts):\n self.fieldName = name\n units = opts.get('units', '')\n- ptree.types.SimpleParameter.__init__(self, \n+ ptree.types.ColorMapParameter.__init__(self,\n name=name, autoIncrementName=True, type='colormap', removable=True, renamable=True, \n children=[\n #dict(name=\"Field\", type='list', value=name, limits=fields),\n", "issue": "examples/ScatterPlotWidget.py Color Map throws exception\n<!-- In the following, please describe your issue in detail! -->\r\n<!-- If some of the sections do not apply, just remove them. -->\r\n\r\n### Short description\r\n<!-- This should summarize the issue. -->\r\nexamples/ScatterPlotWidget.py which makes use of Parameter Trees throws exception since #1919 (4bf1866c2a28b237ca8ca06ac668686b92ccf967 from bisection)\r\n\r\n### Code to reproduce\r\n<!-- Please provide a minimal working example that reproduces the issue in the code block below.\r\n Ideally, this should be a full example someone else could run without additional setup. -->\r\nrun ```examples/ScatterPlotWidget.py```\r\nselect ```Color Map``` -> ```Add Mapping``` -> ```x_pos```\r\nan exception will be thrown\r\n\r\n### Expected behavior\r\n<!-- What should happen? -->\r\nno exception thrown\r\n\r\n### Real behavior\r\n<!-- What happens? -->\r\nfollowing exception thrown\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"d:\\github\\pyqtgraph\\pyqtgraph\\widgets\\ScatterPlotWidget.py\", line 168, in updatePlot\r\n colors = np.array([fn.mkBrush(*x) for x in self.colorMap.map(data)])\r\n File \"d:\\github\\pyqtgraph\\pyqtgraph\\widgets\\ColorMapWidget.py\", line 150, in map\r\n colors2 = item.map(data)\r\n File \"d:\\github\\pyqtgraph\\pyqtgraph\\widgets\\ColorMapWidget.py\", line 216, in map\r\n colors = cmap.map(scaled, mode='float')\r\nAttributeError: 'NoneType' object has no attribute 'map'\r\n```\r\n\r\n### Tested environment(s)\r\n\r\n * PyQtGraph version: 0.12.3 <!-- output of pyqtgraph.__version__ -->\r\n * Qt Python binding: PySide6 6.2.0 <!-- output of pyqtgraph.Qt.VERSION_INFO -->\r\n * Python version: 3.8.10\r\n * NumPy version: 1.21.2 <!-- output of numpy.__version__ -->\r\n * Operating system: Windows 10\r\n * Installation method: pip install -e . <!-- e.g. pip, conda, system packages, ... -->\r\n\r\n### Additional context\r\n\n", "before_files": [{"content": "from collections import OrderedDict\n\nimport numpy as np\n\nfrom .. import functions as fn\nfrom .. import parametertree as ptree\nfrom ..Qt import QtCore\n\n__all__ = ['ColorMapWidget']\n\nclass ColorMapWidget(ptree.ParameterTree):\n \"\"\"\n This class provides a widget allowing the user to customize color mapping\n for multi-column data. Given a list of field names, the user may specify\n multiple criteria for assigning colors to each record in a numpy record array.\n Multiple criteria are evaluated and combined into a single color for each\n record by user-defined compositing methods.\n \n For simpler color mapping using a single gradient editor, see \n :class:`GradientWidget <pyqtgraph.GradientWidget>`\n \"\"\"\n sigColorMapChanged = QtCore.Signal(object)\n \n def __init__(self, parent=None):\n ptree.ParameterTree.__init__(self, parent=parent, showHeader=False)\n \n self.params = ColorMapParameter()\n self.setParameters(self.params)\n self.params.sigTreeStateChanged.connect(self.mapChanged)\n \n ## wrap a couple methods \n self.setFields = self.params.setFields\n self.map = self.params.map\n\n def mapChanged(self):\n self.sigColorMapChanged.emit(self)\n\n def widgetGroupInterface(self):\n return (self.sigColorMapChanged, self.saveState, self.restoreState)\n\n def saveState(self):\n return self.params.saveState()\n\n def restoreState(self, state):\n self.params.restoreState(state)\n \n def addColorMap(self, name):\n \"\"\"Add a new color mapping and return the created parameter.\n \"\"\"\n return self.params.addNew(name)\n\n\nclass ColorMapParameter(ptree.types.GroupParameter):\n sigColorMapChanged = QtCore.Signal(object)\n \n def __init__(self):\n self.fields = {}\n ptree.types.GroupParameter.__init__(self, name='Color Map', addText='Add Mapping..', addList=[])\n self.sigTreeStateChanged.connect(self.mapChanged)\n \n def mapChanged(self):\n self.sigColorMapChanged.emit(self)\n \n def addNew(self, name):\n fieldSpec = self.fields[name]\n \n mode = fieldSpec.get('mode', 'range') \n if mode == 'range':\n item = RangeColorMapItem(name, self.fields[name])\n elif mode == 'enum':\n item = EnumColorMapItem(name, self.fields[name])\n\n defaults = fieldSpec.get('defaults', {})\n for k, v in defaults.items():\n if k == 'colormap':\n if mode == 'range':\n item.setValue(v)\n elif mode == 'enum':\n children = item.param('Values').children()\n for i, child in enumerate(children):\n try:\n child.setValue(v[i])\n except IndexError:\n continue\n else:\n item[k] = v\n\n self.addChild(item)\n return item\n \n def fieldNames(self):\n return list(self.fields.keys())\n \n def setFields(self, fields):\n \"\"\"\n Set the list of fields to be used by the mapper. \n \n The format of *fields* is::\n \n [ (fieldName, {options}), ... ]\n \n ============== ============================================================\n Field Options:\n mode Either 'range' or 'enum' (default is range). For 'range', \n The user may specify a gradient of colors to be applied \n linearly across a specific range of values. For 'enum', \n the user specifies a single color for each unique value\n (see *values* option).\n units String indicating the units of the data for this field.\n values List of unique values for which the user may assign a \n color when mode=='enum'. Optionally may specify a dict \n instead {value: name}.\n defaults Dict of default values to apply to color map items when\n they are created. Valid keys are 'colormap' to provide\n a default color map, or otherwise they a string or tuple\n indicating the parameter to be set, such as 'Operation' or\n ('Channels..', 'Red').\n ============== ============================================================\n \"\"\"\n self.fields = OrderedDict(fields)\n #self.fields = fields\n #self.fields.sort()\n names = self.fieldNames()\n self.setAddList(names)\n \n def map(self, data, mode='byte'):\n \"\"\"\n Return an array of colors corresponding to *data*. \n \n ============== =================================================================\n **Arguments:**\n data A numpy record array where the fields in data.dtype match those\n defined by a prior call to setFields().\n mode Either 'byte' or 'float'. For 'byte', the method returns an array\n of dtype ubyte with values scaled 0-255. For 'float', colors are\n returned as 0.0-1.0 float values.\n ============== =================================================================\n \"\"\"\n if isinstance(data, dict):\n data = np.array([tuple(data.values())], dtype=[(k, float) for k in data.keys()])\n\n colors = np.zeros((len(data),4))\n for item in self.children():\n if not item['Enabled']:\n continue\n chans = item.param('Channels..')\n mask = np.empty((len(data), 4), dtype=bool)\n for i,f in enumerate(['Red', 'Green', 'Blue', 'Alpha']):\n mask[:,i] = chans[f]\n \n colors2 = item.map(data)\n \n op = item['Operation']\n if op == 'Add':\n colors[mask] = colors[mask] + colors2[mask]\n elif op == 'Multiply':\n colors[mask] *= colors2[mask]\n elif op == 'Overlay':\n a = colors2[:,3:4]\n c3 = colors * (1-a) + colors2 * a\n c3[:,3:4] = colors[:,3:4] + (1-colors[:,3:4]) * a\n colors = c3\n elif op == 'Set':\n colors[mask] = colors2[mask] \n \n colors = fn.clip_array(colors, 0., 1.)\n if mode == 'byte':\n colors = (colors * 255).astype(np.ubyte)\n \n return colors\n \n def saveState(self):\n items = OrderedDict()\n for item in self:\n itemState = item.saveState(filter='user')\n itemState['field'] = item.fieldName\n items[item.name()] = itemState\n state = {'fields': self.fields, 'items': items}\n return state\n\n def restoreState(self, state):\n if 'fields' in state:\n self.setFields(state['fields'])\n for name, itemState in state['items'].items():\n item = self.addNew(itemState['field'])\n item.restoreState(itemState)\n \n \nclass RangeColorMapItem(ptree.types.SimpleParameter):\n mapType = 'range'\n \n def __init__(self, name, opts):\n self.fieldName = name\n units = opts.get('units', '')\n ptree.types.SimpleParameter.__init__(self, \n name=name, autoIncrementName=True, type='colormap', removable=True, renamable=True, \n children=[\n #dict(name=\"Field\", type='list', value=name, limits=fields),\n dict(name='Min', type='float', value=0.0, suffix=units, siPrefix=True),\n dict(name='Max', type='float', value=1.0, suffix=units, siPrefix=True),\n dict(name='Operation', type='list', value='Overlay', limits=['Overlay', 'Add', 'Multiply', 'Set']),\n dict(name='Channels..', type='group', expanded=False, children=[\n dict(name='Red', type='bool', value=True),\n dict(name='Green', type='bool', value=True),\n dict(name='Blue', type='bool', value=True),\n dict(name='Alpha', type='bool', value=True),\n ]),\n dict(name='Enabled', type='bool', value=True),\n dict(name='NaN', type='color'),\n ])\n\n def map(self, data):\n data = data[self.fieldName]\n \n scaled = fn.clip_array((data-self['Min']) / (self['Max']-self['Min']), 0, 1)\n cmap = self.value()\n colors = cmap.map(scaled, mode='float')\n \n mask = np.invert(np.isfinite(data))\n nanColor = self['NaN']\n nanColor = nanColor.getRgbF()\n colors[mask] = nanColor\n \n return colors \n\nclass EnumColorMapItem(ptree.types.GroupParameter):\n mapType = 'enum'\n \n def __init__(self, name, opts):\n self.fieldName = name\n vals = opts.get('values', [])\n if isinstance(vals, list):\n vals = OrderedDict([(v,str(v)) for v in vals]) \n childs = []\n for val,vname in vals.items():\n ch = ptree.Parameter.create(name=vname, type='color')\n ch.maskValue = val\n childs.append(ch)\n \n ptree.types.GroupParameter.__init__(self, \n name=name, autoIncrementName=True, removable=True, renamable=True, \n children=[\n dict(name='Values', type='group', children=childs),\n dict(name='Operation', type='list', value='Overlay', limits=['Overlay', 'Add', 'Multiply', 'Set']),\n dict(name='Channels..', type='group', expanded=False, children=[\n dict(name='Red', type='bool', value=True),\n dict(name='Green', type='bool', value=True),\n dict(name='Blue', type='bool', value=True),\n dict(name='Alpha', type='bool', value=True),\n ]),\n dict(name='Enabled', type='bool', value=True),\n dict(name='Default', type='color'),\n ])\n \n def map(self, data):\n data = data[self.fieldName]\n colors = np.empty((len(data), 4))\n default = np.array(self['Default'].getRgbF())\n colors[:] = default\n \n for v in self.param('Values'):\n mask = data == v.maskValue\n c = np.array(v.value().getRgbF())\n colors[mask] = c\n #scaled = np.clip((data-self['Min']) / (self['Max']-self['Min']), 0, 1)\n #cmap = self.value()\n #colors = cmap.map(scaled, mode='float')\n \n #mask = np.isnan(data) | np.isinf(data)\n #nanColor = self['NaN']\n #nanColor = nanColor.getRgbF()\n #colors[mask] = nanColor\n \n return colors\n", "path": "pyqtgraph/widgets/ColorMapWidget.py"}], "after_files": [{"content": "from collections import OrderedDict\n\nimport numpy as np\n\nfrom .. import functions as fn\nfrom .. import parametertree as ptree\nfrom ..Qt import QtCore\n\n__all__ = ['ColorMapWidget']\n\nclass ColorMapWidget(ptree.ParameterTree):\n \"\"\"\n This class provides a widget allowing the user to customize color mapping\n for multi-column data. Given a list of field names, the user may specify\n multiple criteria for assigning colors to each record in a numpy record array.\n Multiple criteria are evaluated and combined into a single color for each\n record by user-defined compositing methods.\n \n For simpler color mapping using a single gradient editor, see \n :class:`GradientWidget <pyqtgraph.GradientWidget>`\n \"\"\"\n sigColorMapChanged = QtCore.Signal(object)\n \n def __init__(self, parent=None):\n ptree.ParameterTree.__init__(self, parent=parent, showHeader=False)\n \n self.params = ColorMapParameter()\n self.setParameters(self.params)\n self.params.sigTreeStateChanged.connect(self.mapChanged)\n \n ## wrap a couple methods \n self.setFields = self.params.setFields\n self.map = self.params.map\n\n def mapChanged(self):\n self.sigColorMapChanged.emit(self)\n\n def widgetGroupInterface(self):\n return (self.sigColorMapChanged, self.saveState, self.restoreState)\n\n def saveState(self):\n return self.params.saveState()\n\n def restoreState(self, state):\n self.params.restoreState(state)\n \n def addColorMap(self, name):\n \"\"\"Add a new color mapping and return the created parameter.\n \"\"\"\n return self.params.addNew(name)\n\n\nclass ColorMapParameter(ptree.types.GroupParameter):\n sigColorMapChanged = QtCore.Signal(object)\n \n def __init__(self):\n self.fields = {}\n ptree.types.GroupParameter.__init__(self, name='Color Map', addText='Add Mapping..', addList=[])\n self.sigTreeStateChanged.connect(self.mapChanged)\n \n def mapChanged(self):\n self.sigColorMapChanged.emit(self)\n \n def addNew(self, name):\n fieldSpec = self.fields[name]\n \n mode = fieldSpec.get('mode', 'range') \n if mode == 'range':\n item = RangeColorMapItem(name, self.fields[name])\n elif mode == 'enum':\n item = EnumColorMapItem(name, self.fields[name])\n\n defaults = fieldSpec.get('defaults', {})\n for k, v in defaults.items():\n if k == 'colormap':\n if mode == 'range':\n item.setValue(v)\n elif mode == 'enum':\n children = item.param('Values').children()\n for i, child in enumerate(children):\n try:\n child.setValue(v[i])\n except IndexError:\n continue\n else:\n item[k] = v\n\n self.addChild(item)\n return item\n \n def fieldNames(self):\n return list(self.fields.keys())\n \n def setFields(self, fields):\n \"\"\"\n Set the list of fields to be used by the mapper. \n \n The format of *fields* is::\n \n [ (fieldName, {options}), ... ]\n \n ============== ============================================================\n Field Options:\n mode Either 'range' or 'enum' (default is range). For 'range', \n The user may specify a gradient of colors to be applied \n linearly across a specific range of values. For 'enum', \n the user specifies a single color for each unique value\n (see *values* option).\n units String indicating the units of the data for this field.\n values List of unique values for which the user may assign a \n color when mode=='enum'. Optionally may specify a dict \n instead {value: name}.\n defaults Dict of default values to apply to color map items when\n they are created. Valid keys are 'colormap' to provide\n a default color map, or otherwise they a string or tuple\n indicating the parameter to be set, such as 'Operation' or\n ('Channels..', 'Red').\n ============== ============================================================\n \"\"\"\n self.fields = OrderedDict(fields)\n #self.fields = fields\n #self.fields.sort()\n names = self.fieldNames()\n self.setAddList(names)\n \n def map(self, data, mode='byte'):\n \"\"\"\n Return an array of colors corresponding to *data*. \n \n ============== =================================================================\n **Arguments:**\n data A numpy record array where the fields in data.dtype match those\n defined by a prior call to setFields().\n mode Either 'byte' or 'float'. For 'byte', the method returns an array\n of dtype ubyte with values scaled 0-255. For 'float', colors are\n returned as 0.0-1.0 float values.\n ============== =================================================================\n \"\"\"\n if isinstance(data, dict):\n data = np.array([tuple(data.values())], dtype=[(k, float) for k in data.keys()])\n\n colors = np.zeros((len(data),4))\n for item in self.children():\n if not item['Enabled']:\n continue\n chans = item.param('Channels..')\n mask = np.empty((len(data), 4), dtype=bool)\n for i,f in enumerate(['Red', 'Green', 'Blue', 'Alpha']):\n mask[:,i] = chans[f]\n \n colors2 = item.map(data)\n \n op = item['Operation']\n if op == 'Add':\n colors[mask] = colors[mask] + colors2[mask]\n elif op == 'Multiply':\n colors[mask] *= colors2[mask]\n elif op == 'Overlay':\n a = colors2[:,3:4]\n c3 = colors * (1-a) + colors2 * a\n c3[:,3:4] = colors[:,3:4] + (1-colors[:,3:4]) * a\n colors = c3\n elif op == 'Set':\n colors[mask] = colors2[mask] \n \n colors = fn.clip_array(colors, 0., 1.)\n if mode == 'byte':\n colors = (colors * 255).astype(np.ubyte)\n \n return colors\n \n def saveState(self):\n items = OrderedDict()\n for item in self:\n itemState = item.saveState(filter='user')\n itemState['field'] = item.fieldName\n items[item.name()] = itemState\n state = {'fields': self.fields, 'items': items}\n return state\n\n def restoreState(self, state):\n if 'fields' in state:\n self.setFields(state['fields'])\n for name, itemState in state['items'].items():\n item = self.addNew(itemState['field'])\n item.restoreState(itemState)\n \n \nclass RangeColorMapItem(ptree.types.ColorMapParameter):\n mapType = 'range'\n \n def __init__(self, name, opts):\n self.fieldName = name\n units = opts.get('units', '')\n ptree.types.ColorMapParameter.__init__(self,\n name=name, autoIncrementName=True, type='colormap', removable=True, renamable=True, \n children=[\n #dict(name=\"Field\", type='list', value=name, limits=fields),\n dict(name='Min', type='float', value=0.0, suffix=units, siPrefix=True),\n dict(name='Max', type='float', value=1.0, suffix=units, siPrefix=True),\n dict(name='Operation', type='list', value='Overlay', limits=['Overlay', 'Add', 'Multiply', 'Set']),\n dict(name='Channels..', type='group', expanded=False, children=[\n dict(name='Red', type='bool', value=True),\n dict(name='Green', type='bool', value=True),\n dict(name='Blue', type='bool', value=True),\n dict(name='Alpha', type='bool', value=True),\n ]),\n dict(name='Enabled', type='bool', value=True),\n dict(name='NaN', type='color'),\n ])\n\n def map(self, data):\n data = data[self.fieldName]\n \n scaled = fn.clip_array((data-self['Min']) / (self['Max']-self['Min']), 0, 1)\n cmap = self.value()\n colors = cmap.map(scaled, mode='float')\n \n mask = np.invert(np.isfinite(data))\n nanColor = self['NaN']\n nanColor = nanColor.getRgbF()\n colors[mask] = nanColor\n \n return colors \n\nclass EnumColorMapItem(ptree.types.GroupParameter):\n mapType = 'enum'\n \n def __init__(self, name, opts):\n self.fieldName = name\n vals = opts.get('values', [])\n if isinstance(vals, list):\n vals = OrderedDict([(v,str(v)) for v in vals]) \n childs = []\n for val,vname in vals.items():\n ch = ptree.Parameter.create(name=vname, type='color')\n ch.maskValue = val\n childs.append(ch)\n \n ptree.types.GroupParameter.__init__(self, \n name=name, autoIncrementName=True, removable=True, renamable=True, \n children=[\n dict(name='Values', type='group', children=childs),\n dict(name='Operation', type='list', value='Overlay', limits=['Overlay', 'Add', 'Multiply', 'Set']),\n dict(name='Channels..', type='group', expanded=False, children=[\n dict(name='Red', type='bool', value=True),\n dict(name='Green', type='bool', value=True),\n dict(name='Blue', type='bool', value=True),\n dict(name='Alpha', type='bool', value=True),\n ]),\n dict(name='Enabled', type='bool', value=True),\n dict(name='Default', type='color'),\n ])\n \n def map(self, data):\n data = data[self.fieldName]\n colors = np.empty((len(data), 4))\n default = np.array(self['Default'].getRgbF())\n colors[:] = default\n \n for v in self.param('Values'):\n mask = data == v.maskValue\n c = np.array(v.value().getRgbF())\n colors[mask] = c\n #scaled = np.clip((data-self['Min']) / (self['Max']-self['Min']), 0, 1)\n #cmap = self.value()\n #colors = cmap.map(scaled, mode='float')\n \n #mask = np.isnan(data) | np.isinf(data)\n #nanColor = self['NaN']\n #nanColor = nanColor.getRgbF()\n #colors[mask] = nanColor\n \n return colors\n", "path": "pyqtgraph/widgets/ColorMapWidget.py"}]}
| 3,775 | 202 |
gh_patches_debug_38642
|
rasdani/github-patches
|
git_diff
|
horovod__horovod-275
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
DistributedOptimizer cannot be used when underlying optimizer overwrites compute_gradients and apply_gradients
In our use case we have a TensorFlow optimizer that explicitly defines `compute_gradients` and `apply_gradients` functions. The current version of `DistributedOptimizer` cannot be used in our case because it does not overwrite `apply_gradients` to use` self._optimizer.apply_gradients` and it calls (lines 178-179 of `horovod/tensorflow/__init__.py`)
```python
gradients = super(DistributedOptimizer, self).compute_gradients(*args, **kwargs)
```
instead of `gradients = self._optimizer.compute_gradients(*args, **kwargs)`.
Is it possible to update `DistributedOptimizer` to work in our use case and similar situations when `compute_gradients` and `apply_gradients` are redefined? As far as I can see simply changing lines 178-179 of `horovod/tensorflow/__init__.py` to
```python
gradients = self._optimizer.compute_gradients(*args, **kwargs)
```
and adding additional method:
```python
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
"""Calls this same method on the underlying optimizer."""
return self._optimizer.apply_gradients(grads_and_vars, global_step, name)
```
should make it work.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `horovod/__init__.py`
Content:
```
1 __version__ = '0.13.0'
2
```
Path: `horovod/tensorflow/__init__.py`
Content:
```
1 # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
2 # Modifications copyright (C) 2017 Uber Technologies, Inc.
3 #
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15 # ==============================================================================
16 # pylint: disable=g-short-docstring-punctuation
17 """## Communicating Between Processes with MPI
18
19 TensorFlow natively provides inter-device communication through send and
20 receive ops and inter-node communication through Distributed TensorFlow, based
21 on the same send and receive abstractions. On HPC clusters where Infiniband or
22 other high-speed node interconnects are available, these can end up being
23 insufficient for synchronous data-parallel training (without asynchronous
24 gradient descent). This module implements a variety of MPI ops which can take
25 advantage of hardware-specific MPI libraries for efficient communication.
26 """
27
28 from __future__ import absolute_import
29 from __future__ import division
30 from __future__ import print_function
31
32 from horovod.common import init
33 from horovod.common import size
34 from horovod.common import local_size
35 from horovod.common import rank
36 from horovod.common import local_rank
37 from horovod.common import mpi_threads_supported
38 from horovod.common import check_extension
39
40 check_extension('horovod.tensorflow', 'HOROVOD_WITH_TENSORFLOW', __file__, 'mpi_lib')
41
42 from horovod.tensorflow.mpi_ops import allgather
43 from horovod.tensorflow.mpi_ops import broadcast
44 from horovod.tensorflow.mpi_ops import _allreduce
45
46 import tensorflow as tf
47
48
49 def allreduce(tensor, average=True, device_dense='', device_sparse=''):
50 """Perform an allreduce on a tf.Tensor or tf.IndexedSlices.
51
52 Arguments:
53 tensor: tf.Tensor, tf.Variable, or tf.IndexedSlices to reduce.
54 The shape of the input must be identical across all ranks.
55 average: If True, computes the average over all ranks.
56 Otherwise, computes the sum over all ranks.
57 device_dense: Device to be used for dense tensors. Uses GPU by default
58 if Horovod was build with HOROVOD_GPU_ALLREDUCE.
59 device_sparse: Device to be used for sparse tensors. Uses GPU by default
60 if Horovod was build with HOROVOD_GPU_ALLGATHER.
61
62 This function performs a bandwidth-optimal ring allreduce on the input
63 tensor. If the input is an tf.IndexedSlices, the function instead does an
64 allgather on the values and the indices, effectively doing an allreduce on
65 the represented tensor.
66 """
67 if isinstance(tensor, tf.IndexedSlices):
68 with tf.device(device_sparse):
69 # For IndexedSlices, do two allgathers intead of an allreduce.
70 horovod_size = tf.cast(size(), tensor.values.dtype)
71 values = allgather(tensor.values)
72 indices = allgather(tensor.indices)
73
74 # To make this operation into an average, divide all gathered values by
75 # the Horovod size.
76 new_values = tf.div(values, horovod_size) if average else values
77 return tf.IndexedSlices(new_values, indices,
78 dense_shape=tensor.dense_shape)
79 else:
80 with tf.device(device_dense):
81 horovod_size = tf.cast(size(), tensor.dtype)
82 summed_tensor = _allreduce(tensor)
83 new_tensor = (tf.div(summed_tensor, horovod_size)
84 if average else summed_tensor)
85 return new_tensor
86
87
88 def broadcast_global_variables(root_rank):
89 """Broadcasts all global variables from root rank to all other processes.
90
91 Arguments:
92 root_rank: rank of the process from which global variables will be broadcasted
93 to all other processes.
94 """
95 return tf.group(*[tf.assign(var, broadcast(var, root_rank))
96 for var in tf.global_variables()])
97
98
99 class BroadcastGlobalVariablesHook(tf.train.SessionRunHook):
100 """
101 SessionRunHook that will broadcast all global variables from root rank
102 to all other processes during initialization.
103
104 This is necessary to ensure consistent initialization of all workers when
105 training is started with random weights or restored from a checkpoint.
106 """
107
108 def __init__(self, root_rank, device=''):
109 """Construct a new BroadcastGlobalVariablesHook that will broadcast all
110 global variables from root rank to all other processes during initialization.
111
112 Args:
113 root_rank:
114 Rank that will send data, other ranks will receive data.
115 device:
116 Device to be used for broadcasting. Uses GPU by default
117 if Horovod was build with HOROVOD_GPU_BROADCAST.
118 """
119 super(BroadcastGlobalVariablesHook, self).__init__()
120 self.root_rank = root_rank
121 self.bcast_op = None
122 self.device = device
123
124 def begin(self):
125 if not self.bcast_op or self.bcast_op.graph != tf.get_default_graph():
126 with tf.device(self.device):
127 self.bcast_op = broadcast_global_variables(self.root_rank)
128
129 def after_create_session(self, session, coord):
130 session.run(self.bcast_op)
131
132
133 class DistributedOptimizer(tf.train.Optimizer):
134 """An optimizer that wraps another tf.Optimizer, using an allreduce to
135 average gradient values before applying gradients to model weights."""
136
137 def __init__(self, optimizer, name=None, use_locking=False, device_dense='',
138 device_sparse=''):
139 """Construct a new DistributedOptimizer, which uses another optimizer
140 under the hood for computing single-process gradient values and
141 applying gradient updates after the gradient values have been averaged
142 across all the Horovod ranks.
143
144 Args:
145 optimizer:
146 Optimizer to use for computing gradients and applying updates.
147 name:
148 Optional name prefix for the operations created when applying
149 gradients. Defaults to "Distributed" followed by the provided
150 optimizer type.
151 use_locking:
152 Whether to use locking when updating variables.
153 See Optimizer.__init__ for more info.
154 device_dense:
155 Device to be used for dense tensors. Uses GPU by default
156 if Horovod was build with HOROVOD_GPU_ALLREDUCE.
157 device_sparse:
158 Device to be used for sparse tensors. Uses GPU by default
159 if Horovod was build with HOROVOD_GPU_ALLGATHER.
160 """
161 if name is None:
162 name = "Distributed{}".format(type(optimizer).__name__)
163
164 self._optimizer = optimizer
165 self._device_dense = device_dense
166 self._device_sparse = device_sparse
167 super(DistributedOptimizer, self).__init__(
168 name=name, use_locking=use_locking)
169
170 def compute_gradients(self, *args, **kwargs):
171 """Compute gradients of all trainable variables.
172
173 See Optimizer.compute_gradients() for more info.
174
175 In DistributedOptimizer, compute_gradients() is overriden to also
176 allreduce the gradients before returning them.
177 """
178 gradients = (super(DistributedOptimizer, self)
179 .compute_gradients(*args, **kwargs))
180 if size() > 1:
181 averaged_gradients = []
182 with tf.name_scope(self._name + "_Allreduce"):
183 for grad, var in gradients:
184 if grad is not None:
185 avg_grad = allreduce(grad, device_dense=self._device_dense,
186 device_sparse=self._device_sparse)
187 averaged_gradients.append((avg_grad, var))
188 else:
189 averaged_gradients.append((None, var))
190 return averaged_gradients
191 else:
192 return gradients
193
194 def _apply_dense(self, *args, **kwargs):
195 """Calls this same method on the underlying optimizer."""
196 return self._optimizer._apply_dense(*args, **kwargs)
197
198 def _resource_apply_dense(self, *args, **kwargs):
199 """Calls this same method on the underlying optimizer."""
200 return self._optimizer._resource_apply_dense(*args, **kwargs)
201
202 def _resource_apply_sparse_duplicate_indices(self, *args, **kwargs):
203 """Calls this same method on the underlying optimizer."""
204 return self._optimizer._resource_apply_sparse_duplicate_indices(*args, **kwargs)
205
206 def _resource_apply_sparse(self, *args, **kwargs):
207 """Calls this same method on the underlying optimizer."""
208 return self._optimizer._resource_apply_sparse(*args, **kwargs)
209
210 def _apply_sparse_duplicate_indices(self, *args, **kwargs):
211 """Calls this same method on the underlying optimizer."""
212 return self._optimizer._apply_sparse_duplicate_indices(*args, **kwargs)
213
214 def _apply_sparse(self, *args, **kwargs):
215 """Calls this same method on the underlying optimizer."""
216 return self._optimizer._apply_sparse(*args, **kwargs)
217
218 def _prepare(self, *args, **kwargs):
219 """Calls this same method on the underlying optimizer."""
220 return self._optimizer._prepare(*args, **kwargs)
221
222 def _create_slots(self, *args, **kwargs):
223 """Calls this same method on the underlying optimizer."""
224 return self._optimizer._create_slots(*args, **kwargs)
225
226 def _valid_dtypes(self, *args, **kwargs):
227 """Calls this same method on the underlying optimizer."""
228 return self._optimizer._valid_dtypes(*args, **kwargs)
229
230 def _finish(self, *args, **kwargs):
231 """Calls this same method on the underlying optimizer."""
232 return self._optimizer._finish(*args, **kwargs)
233
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/horovod/__init__.py b/horovod/__init__.py
--- a/horovod/__init__.py
+++ b/horovod/__init__.py
@@ -1 +1 @@
-__version__ = '0.13.0'
+__version__ = '0.13.1'
diff --git a/horovod/tensorflow/__init__.py b/horovod/tensorflow/__init__.py
--- a/horovod/tensorflow/__init__.py
+++ b/horovod/tensorflow/__init__.py
@@ -175,8 +175,7 @@
In DistributedOptimizer, compute_gradients() is overriden to also
allreduce the gradients before returning them.
"""
- gradients = (super(DistributedOptimizer, self)
- .compute_gradients(*args, **kwargs))
+ gradients = self._optimizer.compute_gradients(*args, **kwargs)
if size() > 1:
averaged_gradients = []
with tf.name_scope(self._name + "_Allreduce"):
@@ -191,42 +190,18 @@
else:
return gradients
- def _apply_dense(self, *args, **kwargs):
+ def apply_gradients(self, *args, **kwargs):
"""Calls this same method on the underlying optimizer."""
- return self._optimizer._apply_dense(*args, **kwargs)
+ return self._optimizer.apply_gradients(*args, **kwargs)
- def _resource_apply_dense(self, *args, **kwargs):
+ def get_slot(self, *args, **kwargs):
"""Calls this same method on the underlying optimizer."""
- return self._optimizer._resource_apply_dense(*args, **kwargs)
+ return self._optimizer.get_slot(*args, **kwargs)
- def _resource_apply_sparse_duplicate_indices(self, *args, **kwargs):
+ def get_slot_names(self, *args, **kwargs):
"""Calls this same method on the underlying optimizer."""
- return self._optimizer._resource_apply_sparse_duplicate_indices(*args, **kwargs)
+ return self._optimizer.get_slot_names(*args, **kwargs)
- def _resource_apply_sparse(self, *args, **kwargs):
+ def variables(self, *args, **kwargs):
"""Calls this same method on the underlying optimizer."""
- return self._optimizer._resource_apply_sparse(*args, **kwargs)
-
- def _apply_sparse_duplicate_indices(self, *args, **kwargs):
- """Calls this same method on the underlying optimizer."""
- return self._optimizer._apply_sparse_duplicate_indices(*args, **kwargs)
-
- def _apply_sparse(self, *args, **kwargs):
- """Calls this same method on the underlying optimizer."""
- return self._optimizer._apply_sparse(*args, **kwargs)
-
- def _prepare(self, *args, **kwargs):
- """Calls this same method on the underlying optimizer."""
- return self._optimizer._prepare(*args, **kwargs)
-
- def _create_slots(self, *args, **kwargs):
- """Calls this same method on the underlying optimizer."""
- return self._optimizer._create_slots(*args, **kwargs)
-
- def _valid_dtypes(self, *args, **kwargs):
- """Calls this same method on the underlying optimizer."""
- return self._optimizer._valid_dtypes(*args, **kwargs)
-
- def _finish(self, *args, **kwargs):
- """Calls this same method on the underlying optimizer."""
- return self._optimizer._finish(*args, **kwargs)
+ return self._optimizer.variables(*args, **kwargs)
|
{"golden_diff": "diff --git a/horovod/__init__.py b/horovod/__init__.py\n--- a/horovod/__init__.py\n+++ b/horovod/__init__.py\n@@ -1 +1 @@\n-__version__ = '0.13.0'\n+__version__ = '0.13.1'\ndiff --git a/horovod/tensorflow/__init__.py b/horovod/tensorflow/__init__.py\n--- a/horovod/tensorflow/__init__.py\n+++ b/horovod/tensorflow/__init__.py\n@@ -175,8 +175,7 @@\n In DistributedOptimizer, compute_gradients() is overriden to also\n allreduce the gradients before returning them.\n \"\"\"\n- gradients = (super(DistributedOptimizer, self)\n- .compute_gradients(*args, **kwargs))\n+ gradients = self._optimizer.compute_gradients(*args, **kwargs)\n if size() > 1:\n averaged_gradients = []\n with tf.name_scope(self._name + \"_Allreduce\"):\n@@ -191,42 +190,18 @@\n else:\n return gradients\n \n- def _apply_dense(self, *args, **kwargs):\n+ def apply_gradients(self, *args, **kwargs):\n \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n- return self._optimizer._apply_dense(*args, **kwargs)\n+ return self._optimizer.apply_gradients(*args, **kwargs)\n \n- def _resource_apply_dense(self, *args, **kwargs):\n+ def get_slot(self, *args, **kwargs):\n \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n- return self._optimizer._resource_apply_dense(*args, **kwargs)\n+ return self._optimizer.get_slot(*args, **kwargs)\n \n- def _resource_apply_sparse_duplicate_indices(self, *args, **kwargs):\n+ def get_slot_names(self, *args, **kwargs):\n \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n- return self._optimizer._resource_apply_sparse_duplicate_indices(*args, **kwargs)\n+ return self._optimizer.get_slot_names(*args, **kwargs)\n \n- def _resource_apply_sparse(self, *args, **kwargs):\n+ def variables(self, *args, **kwargs):\n \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n- return self._optimizer._resource_apply_sparse(*args, **kwargs)\n-\n- def _apply_sparse_duplicate_indices(self, *args, **kwargs):\n- \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n- return self._optimizer._apply_sparse_duplicate_indices(*args, **kwargs)\n-\n- def _apply_sparse(self, *args, **kwargs):\n- \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n- return self._optimizer._apply_sparse(*args, **kwargs)\n-\n- def _prepare(self, *args, **kwargs):\n- \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n- return self._optimizer._prepare(*args, **kwargs)\n-\n- def _create_slots(self, *args, **kwargs):\n- \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n- return self._optimizer._create_slots(*args, **kwargs)\n-\n- def _valid_dtypes(self, *args, **kwargs):\n- \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n- return self._optimizer._valid_dtypes(*args, **kwargs)\n-\n- def _finish(self, *args, **kwargs):\n- \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n- return self._optimizer._finish(*args, **kwargs)\n+ return self._optimizer.variables(*args, **kwargs)\n", "issue": "DistributedOptimizer cannot be used when underlying optimizer overwrites compute_gradients and apply_gradients\nIn our use case we have a TensorFlow optimizer that explicitly defines `compute_gradients` and `apply_gradients` functions. The current version of `DistributedOptimizer` cannot be used in our case because it does not overwrite `apply_gradients` to use` self._optimizer.apply_gradients` and it calls (lines 178-179 of `horovod/tensorflow/__init__.py`)\r\n```python\r\ngradients = super(DistributedOptimizer, self).compute_gradients(*args, **kwargs)\r\n```\r\ninstead of `gradients = self._optimizer.compute_gradients(*args, **kwargs)`.\r\n\r\nIs it possible to update `DistributedOptimizer` to work in our use case and similar situations when `compute_gradients` and `apply_gradients` are redefined? As far as I can see simply changing lines 178-179 of `horovod/tensorflow/__init__.py` to \r\n```python\r\ngradients = self._optimizer.compute_gradients(*args, **kwargs)\r\n```\r\nand adding additional method:\r\n```python\r\n def apply_gradients(self, grads_and_vars, global_step=None, name=None):\r\n \"\"\"Calls this same method on the underlying optimizer.\"\"\"\r\n return self._optimizer.apply_gradients(grads_and_vars, global_step, name)\r\n```\r\nshould make it work.\n", "before_files": [{"content": "__version__ = '0.13.0'\n", "path": "horovod/__init__.py"}, {"content": "# Copyright 2016 The TensorFlow Authors. All Rights Reserved.\n# Modifications copyright (C) 2017 Uber Technologies, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n# pylint: disable=g-short-docstring-punctuation\n\"\"\"## Communicating Between Processes with MPI\n\nTensorFlow natively provides inter-device communication through send and\nreceive ops and inter-node communication through Distributed TensorFlow, based\non the same send and receive abstractions. On HPC clusters where Infiniband or\nother high-speed node interconnects are available, these can end up being\ninsufficient for synchronous data-parallel training (without asynchronous\ngradient descent). This module implements a variety of MPI ops which can take\nadvantage of hardware-specific MPI libraries for efficient communication.\n\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nfrom horovod.common import init\nfrom horovod.common import size\nfrom horovod.common import local_size\nfrom horovod.common import rank\nfrom horovod.common import local_rank\nfrom horovod.common import mpi_threads_supported\nfrom horovod.common import check_extension\n\ncheck_extension('horovod.tensorflow', 'HOROVOD_WITH_TENSORFLOW', __file__, 'mpi_lib')\n\nfrom horovod.tensorflow.mpi_ops import allgather\nfrom horovod.tensorflow.mpi_ops import broadcast\nfrom horovod.tensorflow.mpi_ops import _allreduce\n\nimport tensorflow as tf\n\n\ndef allreduce(tensor, average=True, device_dense='', device_sparse=''):\n \"\"\"Perform an allreduce on a tf.Tensor or tf.IndexedSlices.\n\n Arguments:\n tensor: tf.Tensor, tf.Variable, or tf.IndexedSlices to reduce.\n The shape of the input must be identical across all ranks.\n average: If True, computes the average over all ranks.\n Otherwise, computes the sum over all ranks.\n device_dense: Device to be used for dense tensors. Uses GPU by default\n if Horovod was build with HOROVOD_GPU_ALLREDUCE.\n device_sparse: Device to be used for sparse tensors. Uses GPU by default\n if Horovod was build with HOROVOD_GPU_ALLGATHER.\n\n This function performs a bandwidth-optimal ring allreduce on the input\n tensor. If the input is an tf.IndexedSlices, the function instead does an\n allgather on the values and the indices, effectively doing an allreduce on\n the represented tensor.\n \"\"\"\n if isinstance(tensor, tf.IndexedSlices):\n with tf.device(device_sparse):\n # For IndexedSlices, do two allgathers intead of an allreduce.\n horovod_size = tf.cast(size(), tensor.values.dtype)\n values = allgather(tensor.values)\n indices = allgather(tensor.indices)\n\n # To make this operation into an average, divide all gathered values by\n # the Horovod size.\n new_values = tf.div(values, horovod_size) if average else values\n return tf.IndexedSlices(new_values, indices,\n dense_shape=tensor.dense_shape)\n else:\n with tf.device(device_dense):\n horovod_size = tf.cast(size(), tensor.dtype)\n summed_tensor = _allreduce(tensor)\n new_tensor = (tf.div(summed_tensor, horovod_size)\n if average else summed_tensor)\n return new_tensor\n\n\ndef broadcast_global_variables(root_rank):\n \"\"\"Broadcasts all global variables from root rank to all other processes.\n\n Arguments:\n root_rank: rank of the process from which global variables will be broadcasted\n to all other processes.\n \"\"\"\n return tf.group(*[tf.assign(var, broadcast(var, root_rank))\n for var in tf.global_variables()])\n\n\nclass BroadcastGlobalVariablesHook(tf.train.SessionRunHook):\n \"\"\"\n SessionRunHook that will broadcast all global variables from root rank\n to all other processes during initialization.\n\n This is necessary to ensure consistent initialization of all workers when\n training is started with random weights or restored from a checkpoint.\n \"\"\"\n\n def __init__(self, root_rank, device=''):\n \"\"\"Construct a new BroadcastGlobalVariablesHook that will broadcast all\n global variables from root rank to all other processes during initialization.\n\n Args:\n root_rank:\n Rank that will send data, other ranks will receive data.\n device:\n Device to be used for broadcasting. Uses GPU by default\n if Horovod was build with HOROVOD_GPU_BROADCAST.\n \"\"\"\n super(BroadcastGlobalVariablesHook, self).__init__()\n self.root_rank = root_rank\n self.bcast_op = None\n self.device = device\n\n def begin(self):\n if not self.bcast_op or self.bcast_op.graph != tf.get_default_graph():\n with tf.device(self.device):\n self.bcast_op = broadcast_global_variables(self.root_rank)\n\n def after_create_session(self, session, coord):\n session.run(self.bcast_op)\n\n\nclass DistributedOptimizer(tf.train.Optimizer):\n \"\"\"An optimizer that wraps another tf.Optimizer, using an allreduce to\n average gradient values before applying gradients to model weights.\"\"\"\n\n def __init__(self, optimizer, name=None, use_locking=False, device_dense='',\n device_sparse=''):\n \"\"\"Construct a new DistributedOptimizer, which uses another optimizer\n under the hood for computing single-process gradient values and\n applying gradient updates after the gradient values have been averaged\n across all the Horovod ranks.\n\n Args:\n optimizer:\n Optimizer to use for computing gradients and applying updates.\n name:\n Optional name prefix for the operations created when applying\n gradients. Defaults to \"Distributed\" followed by the provided\n optimizer type.\n use_locking:\n Whether to use locking when updating variables.\n See Optimizer.__init__ for more info.\n device_dense:\n Device to be used for dense tensors. Uses GPU by default\n if Horovod was build with HOROVOD_GPU_ALLREDUCE.\n device_sparse:\n Device to be used for sparse tensors. Uses GPU by default\n if Horovod was build with HOROVOD_GPU_ALLGATHER.\n \"\"\"\n if name is None:\n name = \"Distributed{}\".format(type(optimizer).__name__)\n\n self._optimizer = optimizer\n self._device_dense = device_dense\n self._device_sparse = device_sparse\n super(DistributedOptimizer, self).__init__(\n name=name, use_locking=use_locking)\n\n def compute_gradients(self, *args, **kwargs):\n \"\"\"Compute gradients of all trainable variables.\n\n See Optimizer.compute_gradients() for more info.\n\n In DistributedOptimizer, compute_gradients() is overriden to also\n allreduce the gradients before returning them.\n \"\"\"\n gradients = (super(DistributedOptimizer, self)\n .compute_gradients(*args, **kwargs))\n if size() > 1:\n averaged_gradients = []\n with tf.name_scope(self._name + \"_Allreduce\"):\n for grad, var in gradients:\n if grad is not None:\n avg_grad = allreduce(grad, device_dense=self._device_dense,\n device_sparse=self._device_sparse)\n averaged_gradients.append((avg_grad, var))\n else:\n averaged_gradients.append((None, var))\n return averaged_gradients\n else:\n return gradients\n\n def _apply_dense(self, *args, **kwargs):\n \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n return self._optimizer._apply_dense(*args, **kwargs)\n\n def _resource_apply_dense(self, *args, **kwargs):\n \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n return self._optimizer._resource_apply_dense(*args, **kwargs)\n\n def _resource_apply_sparse_duplicate_indices(self, *args, **kwargs):\n \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n return self._optimizer._resource_apply_sparse_duplicate_indices(*args, **kwargs)\n\n def _resource_apply_sparse(self, *args, **kwargs):\n \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n return self._optimizer._resource_apply_sparse(*args, **kwargs)\n\n def _apply_sparse_duplicate_indices(self, *args, **kwargs):\n \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n return self._optimizer._apply_sparse_duplicate_indices(*args, **kwargs)\n\n def _apply_sparse(self, *args, **kwargs):\n \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n return self._optimizer._apply_sparse(*args, **kwargs)\n\n def _prepare(self, *args, **kwargs):\n \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n return self._optimizer._prepare(*args, **kwargs)\n\n def _create_slots(self, *args, **kwargs):\n \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n return self._optimizer._create_slots(*args, **kwargs)\n\n def _valid_dtypes(self, *args, **kwargs):\n \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n return self._optimizer._valid_dtypes(*args, **kwargs)\n\n def _finish(self, *args, **kwargs):\n \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n return self._optimizer._finish(*args, **kwargs)\n", "path": "horovod/tensorflow/__init__.py"}], "after_files": [{"content": "__version__ = '0.13.1'\n", "path": "horovod/__init__.py"}, {"content": "# Copyright 2016 The TensorFlow Authors. All Rights Reserved.\n# Modifications copyright (C) 2017 Uber Technologies, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n# pylint: disable=g-short-docstring-punctuation\n\"\"\"## Communicating Between Processes with MPI\n\nTensorFlow natively provides inter-device communication through send and\nreceive ops and inter-node communication through Distributed TensorFlow, based\non the same send and receive abstractions. On HPC clusters where Infiniband or\nother high-speed node interconnects are available, these can end up being\ninsufficient for synchronous data-parallel training (without asynchronous\ngradient descent). This module implements a variety of MPI ops which can take\nadvantage of hardware-specific MPI libraries for efficient communication.\n\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nfrom horovod.common import init\nfrom horovod.common import size\nfrom horovod.common import local_size\nfrom horovod.common import rank\nfrom horovod.common import local_rank\nfrom horovod.common import mpi_threads_supported\nfrom horovod.common import check_extension\n\ncheck_extension('horovod.tensorflow', 'HOROVOD_WITH_TENSORFLOW', __file__, 'mpi_lib')\n\nfrom horovod.tensorflow.mpi_ops import allgather\nfrom horovod.tensorflow.mpi_ops import broadcast\nfrom horovod.tensorflow.mpi_ops import _allreduce\n\nimport tensorflow as tf\n\n\ndef allreduce(tensor, average=True, device_dense='', device_sparse=''):\n \"\"\"Perform an allreduce on a tf.Tensor or tf.IndexedSlices.\n\n Arguments:\n tensor: tf.Tensor, tf.Variable, or tf.IndexedSlices to reduce.\n The shape of the input must be identical across all ranks.\n average: If True, computes the average over all ranks.\n Otherwise, computes the sum over all ranks.\n device_dense: Device to be used for dense tensors. Uses GPU by default\n if Horovod was build with HOROVOD_GPU_ALLREDUCE.\n device_sparse: Device to be used for sparse tensors. Uses GPU by default\n if Horovod was build with HOROVOD_GPU_ALLGATHER.\n\n This function performs a bandwidth-optimal ring allreduce on the input\n tensor. If the input is an tf.IndexedSlices, the function instead does an\n allgather on the values and the indices, effectively doing an allreduce on\n the represented tensor.\n \"\"\"\n if isinstance(tensor, tf.IndexedSlices):\n with tf.device(device_sparse):\n # For IndexedSlices, do two allgathers intead of an allreduce.\n horovod_size = tf.cast(size(), tensor.values.dtype)\n values = allgather(tensor.values)\n indices = allgather(tensor.indices)\n\n # To make this operation into an average, divide all gathered values by\n # the Horovod size.\n new_values = tf.div(values, horovod_size) if average else values\n return tf.IndexedSlices(new_values, indices,\n dense_shape=tensor.dense_shape)\n else:\n with tf.device(device_dense):\n horovod_size = tf.cast(size(), tensor.dtype)\n summed_tensor = _allreduce(tensor)\n new_tensor = (tf.div(summed_tensor, horovod_size)\n if average else summed_tensor)\n return new_tensor\n\n\ndef broadcast_global_variables(root_rank):\n \"\"\"Broadcasts all global variables from root rank to all other processes.\n\n Arguments:\n root_rank: rank of the process from which global variables will be broadcasted\n to all other processes.\n \"\"\"\n return tf.group(*[tf.assign(var, broadcast(var, root_rank))\n for var in tf.global_variables()])\n\n\nclass BroadcastGlobalVariablesHook(tf.train.SessionRunHook):\n \"\"\"\n SessionRunHook that will broadcast all global variables from root rank\n to all other processes during initialization.\n\n This is necessary to ensure consistent initialization of all workers when\n training is started with random weights or restored from a checkpoint.\n \"\"\"\n\n def __init__(self, root_rank, device=''):\n \"\"\"Construct a new BroadcastGlobalVariablesHook that will broadcast all\n global variables from root rank to all other processes during initialization.\n\n Args:\n root_rank:\n Rank that will send data, other ranks will receive data.\n device:\n Device to be used for broadcasting. Uses GPU by default\n if Horovod was build with HOROVOD_GPU_BROADCAST.\n \"\"\"\n super(BroadcastGlobalVariablesHook, self).__init__()\n self.root_rank = root_rank\n self.bcast_op = None\n self.device = device\n\n def begin(self):\n if not self.bcast_op or self.bcast_op.graph != tf.get_default_graph():\n with tf.device(self.device):\n self.bcast_op = broadcast_global_variables(self.root_rank)\n\n def after_create_session(self, session, coord):\n session.run(self.bcast_op)\n\n\nclass DistributedOptimizer(tf.train.Optimizer):\n \"\"\"An optimizer that wraps another tf.Optimizer, using an allreduce to\n average gradient values before applying gradients to model weights.\"\"\"\n\n def __init__(self, optimizer, name=None, use_locking=False, device_dense='',\n device_sparse=''):\n \"\"\"Construct a new DistributedOptimizer, which uses another optimizer\n under the hood for computing single-process gradient values and\n applying gradient updates after the gradient values have been averaged\n across all the Horovod ranks.\n\n Args:\n optimizer:\n Optimizer to use for computing gradients and applying updates.\n name:\n Optional name prefix for the operations created when applying\n gradients. Defaults to \"Distributed\" followed by the provided\n optimizer type.\n use_locking:\n Whether to use locking when updating variables.\n See Optimizer.__init__ for more info.\n device_dense:\n Device to be used for dense tensors. Uses GPU by default\n if Horovod was build with HOROVOD_GPU_ALLREDUCE.\n device_sparse:\n Device to be used for sparse tensors. Uses GPU by default\n if Horovod was build with HOROVOD_GPU_ALLGATHER.\n \"\"\"\n if name is None:\n name = \"Distributed{}\".format(type(optimizer).__name__)\n\n self._optimizer = optimizer\n self._device_dense = device_dense\n self._device_sparse = device_sparse\n super(DistributedOptimizer, self).__init__(\n name=name, use_locking=use_locking)\n\n def compute_gradients(self, *args, **kwargs):\n \"\"\"Compute gradients of all trainable variables.\n\n See Optimizer.compute_gradients() for more info.\n\n In DistributedOptimizer, compute_gradients() is overriden to also\n allreduce the gradients before returning them.\n \"\"\"\n gradients = self._optimizer.compute_gradients(*args, **kwargs)\n if size() > 1:\n averaged_gradients = []\n with tf.name_scope(self._name + \"_Allreduce\"):\n for grad, var in gradients:\n if grad is not None:\n avg_grad = allreduce(grad, device_dense=self._device_dense,\n device_sparse=self._device_sparse)\n averaged_gradients.append((avg_grad, var))\n else:\n averaged_gradients.append((None, var))\n return averaged_gradients\n else:\n return gradients\n\n def apply_gradients(self, *args, **kwargs):\n \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n return self._optimizer.apply_gradients(*args, **kwargs)\n\n def get_slot(self, *args, **kwargs):\n \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n return self._optimizer.get_slot(*args, **kwargs)\n\n def get_slot_names(self, *args, **kwargs):\n \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n return self._optimizer.get_slot_names(*args, **kwargs)\n\n def variables(self, *args, **kwargs):\n \"\"\"Calls this same method on the underlying optimizer.\"\"\"\n return self._optimizer.variables(*args, **kwargs)\n", "path": "horovod/tensorflow/__init__.py"}]}
| 3,251 | 794 |
gh_patches_debug_30935
|
rasdani/github-patches
|
git_diff
|
ansible-collections__community.vmware-1958
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Feature: vmware_guest_tools_upgrade to be applied without forcing reboot
<!--- Verify first that your feature was not already discussed on GitHub -->
<!--- Complete *all* sections as described, this form is processed automatically -->
##### SUMMARY
<!--- Describe the new feature/improvement briefly below -->
vmware_guest_tools_upgrade should support shutdown as an option
##### ISSUE TYPE
- Feature Idea
- Users may wish to apply the latest vmtools without rebooting immediately.
##### COMPONENT NAME
<!--- Write the short name of the module, plugin, task or feature below, use your best guess if unsure -->
- https://github.com/ansible-collections/community.vmware/blob/main/plugins/module_utils/vmware.py#L968
##### ADDITIONAL INFORMATION
<!--- Describe how the feature would be used, why it is needed and what it would solve -->
- This allows the flexibility to set the desired state and then schedule the reboot in a production environment.
<!--- Paste example playbooks or commands between quotes below -->
```yaml
---
- name: Upgrade VMtools
hosts: "{{ ansible_hosts }}"
gather_facts: false
become: true
tasks:
- name: Upgrade VMtools
community.vmware.vmware_guest_tools_upgrade:
hostname: "{{ vcenter_url }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
datacenter: "{{ datacenter_name }}"
name: "{{ inventory_hostname.split('_')[0] }}"
validate_certs: "{{ vcenter_cert_validation }}"
allow_reboot: false
become: false
delegate_to: localhost
```
<!--- HINT: You can also paste gist.github.com links for larger files -->
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `plugins/modules/vmware_guest_tools_upgrade.py`
Content:
```
1 #!/usr/bin/python
2 # -*- coding: utf-8 -*-
3
4 # Copyright: (c) 2018, Mike Klebolt <[email protected]>
5 # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
6 # SPDX-License-Identifier: GPL-3.0-or-later
7
8 from __future__ import (absolute_import, division, print_function)
9 __metaclass__ = type
10
11
12 DOCUMENTATION = r'''
13 ---
14 module: vmware_guest_tools_upgrade
15 short_description: Module to upgrade VMTools
16 description:
17 - This module upgrades the VMware Tools on Windows and Linux guests and reboots them.
18 notes:
19 - "In order to upgrade VMTools, please power on virtual machine before hand - either 'manually' or
20 using module M(community.vmware.vmware_guest_powerstate)."
21 options:
22 name:
23 description:
24 - Name of the virtual machine to work with.
25 - 'This is required if O(uuid) or O(moid) is not supplied.'
26 type: str
27 name_match:
28 description:
29 - If multiple virtual machines matching the name, use the first or last found.
30 default: 'first'
31 choices: ['first', 'last']
32 type: str
33 uuid:
34 description:
35 - "UUID of the instance to manage if known, this is VMware's unique identifier."
36 - This is required if O(name) or O(moid) is not supplied.
37 type: str
38 moid:
39 description:
40 - Managed Object ID of the instance to manage if known, this is a unique identifier only within a single vCenter instance.
41 - This is required if O(name) or O(uuid) is not supplied.
42 type: str
43 folder:
44 description:
45 - Destination folder, absolute or relative path to find an existing guest.
46 - This is required, if O(name) is supplied.
47 - "The folder should include the datacenter. ESX's datacenter is ha-datacenter"
48 - 'Examples:'
49 - ' folder: /ha-datacenter/vm'
50 - ' folder: ha-datacenter/vm'
51 - ' folder: /datacenter1/vm'
52 - ' folder: datacenter1/vm'
53 - ' folder: /datacenter1/vm/folder1'
54 - ' folder: datacenter1/vm/folder1'
55 - ' folder: /folder1/datacenter1/vm'
56 - ' folder: folder1/datacenter1/vm'
57 - ' folder: /folder1/datacenter1/vm/folder2'
58 type: str
59 datacenter:
60 description:
61 - Destination datacenter where the virtual machine exists.
62 required: true
63 type: str
64 force_upgrade:
65 description:
66 - This flag overrides the guest operating system detection and forcibly upgrade VMware tools or open-vm-tools.
67 - "This is useful when VMware tools is too old and unable to detect the 'guestFamily' value."
68 - 'Using this flag may sometime give unexpected results since module will override the default'
69 - "behaviour of 'guestFamily' detection."
70 default: false
71 type: bool
72 required: false
73 extends_documentation_fragment:
74 - community.vmware.vmware.documentation
75
76 author:
77 - Mike Klebolt (@MikeKlebolt) <[email protected]>
78 '''
79
80 EXAMPLES = r'''
81 - name: Get VM UUID
82 vmware_guest_facts:
83 hostname: "{{ vcenter_hostname }}"
84 username: "{{ vcenter_username }}"
85 password: "{{ vcenter_password }}"
86 datacenter: "{{ datacenter }}"
87 folder: "/{{datacenter}}/vm"
88 name: "{{ vm_name }}"
89 delegate_to: localhost
90 register: vm_facts
91
92 - name: Upgrade VMware Tools using uuid
93 community.vmware.vmware_guest_tools_upgrade:
94 hostname: "{{ vcenter_hostname }}"
95 username: "{{ vcenter_username }}"
96 password: "{{ vcenter_password }}"
97 datacenter: "{{ datacenter_name }}"
98 uuid: "{{ vm_facts.instance.hw_product_uuid }}"
99 delegate_to: localhost
100
101 - name: Upgrade VMware Tools using MoID
102 community.vmware.vmware_guest_tools_upgrade:
103 hostname: "{{ vcenter_hostname }}"
104 username: "{{ vcenter_username }}"
105 password: "{{ vcenter_password }}"
106 datacenter: "{{ datacenter_name }}"
107 moid: vm-42
108 delegate_to: localhost
109 '''
110
111 RETURN = r''' # '''
112
113
114 from ansible.module_utils.basic import AnsibleModule
115 from ansible_collections.community.vmware.plugins.module_utils.vmware import PyVmomi, vmware_argument_spec, wait_for_task
116 from ansible.module_utils._text import to_native
117
118
119 class PyVmomiHelper(PyVmomi):
120 def __init__(self, module):
121 super(PyVmomiHelper, self).__init__(module)
122
123 def upgrade_tools(self, vm):
124 result = {'failed': False, 'changed': False, 'msg': ''}
125 # Exit if VMware tools is already up to date
126 if vm.guest.toolsStatus == "toolsOk":
127 result.update(
128 changed=False,
129 msg="VMware tools is already up to date",
130 )
131 return result
132
133 # Fail if VM is not powered on
134 elif vm.summary.runtime.powerState != "poweredOn":
135 result.update(
136 failed=True,
137 msg="VM must be powered on to upgrade tools",
138 )
139 return result
140
141 # Fail if VMware tools is either not running or not installed
142 elif vm.guest.toolsStatus in ["toolsNotRunning", "toolsNotInstalled"]:
143 result.update(
144 failed=True,
145 msg="VMware tools is either not running or not installed",
146 )
147 return result
148
149 # If vmware tools is out of date, check major OS family
150 # Upgrade tools on Linux and Windows guests
151 elif vm.guest.toolsStatus == "toolsOld":
152 try:
153 force = self.module.params.get('force_upgrade')
154 if force or vm.guest.guestFamily in ["linuxGuest", "windowsGuest"]:
155 task = vm.UpgradeTools()
156 changed, err_msg = wait_for_task(task)
157 result.update(changed=changed, msg=to_native(err_msg))
158 else:
159 result.update(msg='Guest Operating System is other than Linux and Windows.')
160 return result
161 except Exception as exc:
162 result.update(
163 failed=True,
164 msg='Error while upgrading VMware tools %s' % to_native(exc),
165 )
166 return result
167 else:
168 result.update(
169 failed=True,
170 msg="VMware tools could not be upgraded",
171 )
172 return result
173
174
175 def main():
176 argument_spec = vmware_argument_spec()
177 argument_spec.update(
178 name=dict(type='str'),
179 name_match=dict(type='str', choices=['first', 'last'], default='first'),
180 uuid=dict(type='str'),
181 moid=dict(type='str'),
182 folder=dict(type='str'),
183 datacenter=dict(type='str', required=True),
184 force_upgrade=dict(type='bool', default=False),
185 )
186 module = AnsibleModule(
187 argument_spec=argument_spec,
188 required_one_of=[
189 ['name', 'uuid', 'moid']
190 ]
191 )
192
193 if module.params['folder']:
194 # FindByInventoryPath() does not require an absolute path
195 # so we should leave the input folder path unmodified
196 module.params['folder'] = module.params['folder'].rstrip('/')
197
198 pyv = PyVmomiHelper(module)
199 # Check if the VM exists before continuing
200 vm = pyv.get_vm()
201
202 # VM already exists
203 if vm:
204 try:
205 result = pyv.upgrade_tools(vm)
206 if result['changed']:
207 module.exit_json(changed=result['changed'])
208 elif result['failed']:
209 module.fail_json(msg=result['msg'])
210 else:
211 module.exit_json(msg=result['msg'], changed=result['changed'])
212 except Exception as exc:
213 module.fail_json(msg='Unknown error: %s' % to_native(exc))
214 else:
215 vm_id = module.params.get('uuid') or module.params.get('name') or module.params.get('moid')
216 module.fail_json(msg='Unable to find VM %s' % vm_id)
217
218
219 if __name__ == '__main__':
220 main()
221
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/plugins/modules/vmware_guest_tools_upgrade.py b/plugins/modules/vmware_guest_tools_upgrade.py
--- a/plugins/modules/vmware_guest_tools_upgrade.py
+++ b/plugins/modules/vmware_guest_tools_upgrade.py
@@ -70,6 +70,12 @@
default: false
type: bool
required: false
+ installer_options:
+ version_added: '4.1.0'
+ description:
+ - Command line options passed to the installer to modify the installation procedure for tools.
+ type: str
+ required: false
extends_documentation_fragment:
- community.vmware.vmware.documentation
@@ -151,8 +157,12 @@
elif vm.guest.toolsStatus == "toolsOld":
try:
force = self.module.params.get('force_upgrade')
+ installer_options = self.module.params.get('installer_options')
if force or vm.guest.guestFamily in ["linuxGuest", "windowsGuest"]:
- task = vm.UpgradeTools()
+ if installer_options is not None:
+ task = vm.UpgradeTools(installer_options)
+ else:
+ task = vm.UpgradeTools()
changed, err_msg = wait_for_task(task)
result.update(changed=changed, msg=to_native(err_msg))
else:
@@ -182,6 +192,7 @@
folder=dict(type='str'),
datacenter=dict(type='str', required=True),
force_upgrade=dict(type='bool', default=False),
+ installer_options=dict(type='str'),
)
module = AnsibleModule(
argument_spec=argument_spec,
|
{"golden_diff": "diff --git a/plugins/modules/vmware_guest_tools_upgrade.py b/plugins/modules/vmware_guest_tools_upgrade.py\n--- a/plugins/modules/vmware_guest_tools_upgrade.py\n+++ b/plugins/modules/vmware_guest_tools_upgrade.py\n@@ -70,6 +70,12 @@\n default: false\n type: bool\n required: false\n+ installer_options:\n+ version_added: '4.1.0'\n+ description:\n+ - Command line options passed to the installer to modify the installation procedure for tools.\n+ type: str\n+ required: false\n extends_documentation_fragment:\n - community.vmware.vmware.documentation\n \n@@ -151,8 +157,12 @@\n elif vm.guest.toolsStatus == \"toolsOld\":\n try:\n force = self.module.params.get('force_upgrade')\n+ installer_options = self.module.params.get('installer_options')\n if force or vm.guest.guestFamily in [\"linuxGuest\", \"windowsGuest\"]:\n- task = vm.UpgradeTools()\n+ if installer_options is not None:\n+ task = vm.UpgradeTools(installer_options)\n+ else:\n+ task = vm.UpgradeTools()\n changed, err_msg = wait_for_task(task)\n result.update(changed=changed, msg=to_native(err_msg))\n else:\n@@ -182,6 +192,7 @@\n folder=dict(type='str'),\n datacenter=dict(type='str', required=True),\n force_upgrade=dict(type='bool', default=False),\n+ installer_options=dict(type='str'),\n )\n module = AnsibleModule(\n argument_spec=argument_spec,\n", "issue": "Feature: vmware_guest_tools_upgrade to be applied without forcing reboot\n<!--- Verify first that your feature was not already discussed on GitHub -->\r\n<!--- Complete *all* sections as described, this form is processed automatically -->\r\n\r\n##### SUMMARY\r\n<!--- Describe the new feature/improvement briefly below -->\r\nvmware_guest_tools_upgrade should support shutdown as an option\r\n\r\n##### ISSUE TYPE\r\n- Feature Idea\r\n- Users may wish to apply the latest vmtools without rebooting immediately.\r\n\r\n##### COMPONENT NAME\r\n<!--- Write the short name of the module, plugin, task or feature below, use your best guess if unsure -->\r\n- https://github.com/ansible-collections/community.vmware/blob/main/plugins/module_utils/vmware.py#L968\r\n\r\n##### ADDITIONAL INFORMATION\r\n<!--- Describe how the feature would be used, why it is needed and what it would solve -->\r\n- This allows the flexibility to set the desired state and then schedule the reboot in a production environment.\r\n<!--- Paste example playbooks or commands between quotes below -->\r\n```yaml\r\n---\r\n- name: Upgrade VMtools\r\n hosts: \"{{ ansible_hosts }}\"\r\n gather_facts: false\r\n become: true\r\n tasks:\r\n\r\n - name: Upgrade VMtools\r\n community.vmware.vmware_guest_tools_upgrade:\r\n hostname: \"{{ vcenter_url }}\"\r\n username: \"{{ vcenter_username }}\"\r\n password: \"{{ vcenter_password }}\"\r\n datacenter: \"{{ datacenter_name }}\"\r\n name: \"{{ inventory_hostname.split('_')[0] }}\"\r\n validate_certs: \"{{ vcenter_cert_validation }}\"\r\n allow_reboot: false\r\n become: false\r\n delegate_to: localhost\r\n```\r\n\r\n<!--- HINT: You can also paste gist.github.com links for larger files -->\r\n\n", "before_files": [{"content": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\n# Copyright: (c) 2018, Mike Klebolt <[email protected]>\n# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)\n# SPDX-License-Identifier: GPL-3.0-or-later\n\nfrom __future__ import (absolute_import, division, print_function)\n__metaclass__ = type\n\n\nDOCUMENTATION = r'''\n---\nmodule: vmware_guest_tools_upgrade\nshort_description: Module to upgrade VMTools\ndescription:\n - This module upgrades the VMware Tools on Windows and Linux guests and reboots them.\nnotes:\n - \"In order to upgrade VMTools, please power on virtual machine before hand - either 'manually' or\n using module M(community.vmware.vmware_guest_powerstate).\"\noptions:\n name:\n description:\n - Name of the virtual machine to work with.\n - 'This is required if O(uuid) or O(moid) is not supplied.'\n type: str\n name_match:\n description:\n - If multiple virtual machines matching the name, use the first or last found.\n default: 'first'\n choices: ['first', 'last']\n type: str\n uuid:\n description:\n - \"UUID of the instance to manage if known, this is VMware's unique identifier.\"\n - This is required if O(name) or O(moid) is not supplied.\n type: str\n moid:\n description:\n - Managed Object ID of the instance to manage if known, this is a unique identifier only within a single vCenter instance.\n - This is required if O(name) or O(uuid) is not supplied.\n type: str\n folder:\n description:\n - Destination folder, absolute or relative path to find an existing guest.\n - This is required, if O(name) is supplied.\n - \"The folder should include the datacenter. ESX's datacenter is ha-datacenter\"\n - 'Examples:'\n - ' folder: /ha-datacenter/vm'\n - ' folder: ha-datacenter/vm'\n - ' folder: /datacenter1/vm'\n - ' folder: datacenter1/vm'\n - ' folder: /datacenter1/vm/folder1'\n - ' folder: datacenter1/vm/folder1'\n - ' folder: /folder1/datacenter1/vm'\n - ' folder: folder1/datacenter1/vm'\n - ' folder: /folder1/datacenter1/vm/folder2'\n type: str\n datacenter:\n description:\n - Destination datacenter where the virtual machine exists.\n required: true\n type: str\n force_upgrade:\n description:\n - This flag overrides the guest operating system detection and forcibly upgrade VMware tools or open-vm-tools.\n - \"This is useful when VMware tools is too old and unable to detect the 'guestFamily' value.\"\n - 'Using this flag may sometime give unexpected results since module will override the default'\n - \"behaviour of 'guestFamily' detection.\"\n default: false\n type: bool\n required: false\nextends_documentation_fragment:\n- community.vmware.vmware.documentation\n\nauthor:\n - Mike Klebolt (@MikeKlebolt) <[email protected]>\n'''\n\nEXAMPLES = r'''\n- name: Get VM UUID\n vmware_guest_facts:\n hostname: \"{{ vcenter_hostname }}\"\n username: \"{{ vcenter_username }}\"\n password: \"{{ vcenter_password }}\"\n datacenter: \"{{ datacenter }}\"\n folder: \"/{{datacenter}}/vm\"\n name: \"{{ vm_name }}\"\n delegate_to: localhost\n register: vm_facts\n\n- name: Upgrade VMware Tools using uuid\n community.vmware.vmware_guest_tools_upgrade:\n hostname: \"{{ vcenter_hostname }}\"\n username: \"{{ vcenter_username }}\"\n password: \"{{ vcenter_password }}\"\n datacenter: \"{{ datacenter_name }}\"\n uuid: \"{{ vm_facts.instance.hw_product_uuid }}\"\n delegate_to: localhost\n\n- name: Upgrade VMware Tools using MoID\n community.vmware.vmware_guest_tools_upgrade:\n hostname: \"{{ vcenter_hostname }}\"\n username: \"{{ vcenter_username }}\"\n password: \"{{ vcenter_password }}\"\n datacenter: \"{{ datacenter_name }}\"\n moid: vm-42\n delegate_to: localhost\n'''\n\nRETURN = r''' # '''\n\n\nfrom ansible.module_utils.basic import AnsibleModule\nfrom ansible_collections.community.vmware.plugins.module_utils.vmware import PyVmomi, vmware_argument_spec, wait_for_task\nfrom ansible.module_utils._text import to_native\n\n\nclass PyVmomiHelper(PyVmomi):\n def __init__(self, module):\n super(PyVmomiHelper, self).__init__(module)\n\n def upgrade_tools(self, vm):\n result = {'failed': False, 'changed': False, 'msg': ''}\n # Exit if VMware tools is already up to date\n if vm.guest.toolsStatus == \"toolsOk\":\n result.update(\n changed=False,\n msg=\"VMware tools is already up to date\",\n )\n return result\n\n # Fail if VM is not powered on\n elif vm.summary.runtime.powerState != \"poweredOn\":\n result.update(\n failed=True,\n msg=\"VM must be powered on to upgrade tools\",\n )\n return result\n\n # Fail if VMware tools is either not running or not installed\n elif vm.guest.toolsStatus in [\"toolsNotRunning\", \"toolsNotInstalled\"]:\n result.update(\n failed=True,\n msg=\"VMware tools is either not running or not installed\",\n )\n return result\n\n # If vmware tools is out of date, check major OS family\n # Upgrade tools on Linux and Windows guests\n elif vm.guest.toolsStatus == \"toolsOld\":\n try:\n force = self.module.params.get('force_upgrade')\n if force or vm.guest.guestFamily in [\"linuxGuest\", \"windowsGuest\"]:\n task = vm.UpgradeTools()\n changed, err_msg = wait_for_task(task)\n result.update(changed=changed, msg=to_native(err_msg))\n else:\n result.update(msg='Guest Operating System is other than Linux and Windows.')\n return result\n except Exception as exc:\n result.update(\n failed=True,\n msg='Error while upgrading VMware tools %s' % to_native(exc),\n )\n return result\n else:\n result.update(\n failed=True,\n msg=\"VMware tools could not be upgraded\",\n )\n return result\n\n\ndef main():\n argument_spec = vmware_argument_spec()\n argument_spec.update(\n name=dict(type='str'),\n name_match=dict(type='str', choices=['first', 'last'], default='first'),\n uuid=dict(type='str'),\n moid=dict(type='str'),\n folder=dict(type='str'),\n datacenter=dict(type='str', required=True),\n force_upgrade=dict(type='bool', default=False),\n )\n module = AnsibleModule(\n argument_spec=argument_spec,\n required_one_of=[\n ['name', 'uuid', 'moid']\n ]\n )\n\n if module.params['folder']:\n # FindByInventoryPath() does not require an absolute path\n # so we should leave the input folder path unmodified\n module.params['folder'] = module.params['folder'].rstrip('/')\n\n pyv = PyVmomiHelper(module)\n # Check if the VM exists before continuing\n vm = pyv.get_vm()\n\n # VM already exists\n if vm:\n try:\n result = pyv.upgrade_tools(vm)\n if result['changed']:\n module.exit_json(changed=result['changed'])\n elif result['failed']:\n module.fail_json(msg=result['msg'])\n else:\n module.exit_json(msg=result['msg'], changed=result['changed'])\n except Exception as exc:\n module.fail_json(msg='Unknown error: %s' % to_native(exc))\n else:\n vm_id = module.params.get('uuid') or module.params.get('name') or module.params.get('moid')\n module.fail_json(msg='Unable to find VM %s' % vm_id)\n\n\nif __name__ == '__main__':\n main()\n", "path": "plugins/modules/vmware_guest_tools_upgrade.py"}], "after_files": [{"content": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\n# Copyright: (c) 2018, Mike Klebolt <[email protected]>\n# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)\n# SPDX-License-Identifier: GPL-3.0-or-later\n\nfrom __future__ import (absolute_import, division, print_function)\n__metaclass__ = type\n\n\nDOCUMENTATION = r'''\n---\nmodule: vmware_guest_tools_upgrade\nshort_description: Module to upgrade VMTools\ndescription:\n - This module upgrades the VMware Tools on Windows and Linux guests and reboots them.\nnotes:\n - \"In order to upgrade VMTools, please power on virtual machine before hand - either 'manually' or\n using module M(community.vmware.vmware_guest_powerstate).\"\noptions:\n name:\n description:\n - Name of the virtual machine to work with.\n - 'This is required if O(uuid) or O(moid) is not supplied.'\n type: str\n name_match:\n description:\n - If multiple virtual machines matching the name, use the first or last found.\n default: 'first'\n choices: ['first', 'last']\n type: str\n uuid:\n description:\n - \"UUID of the instance to manage if known, this is VMware's unique identifier.\"\n - This is required if O(name) or O(moid) is not supplied.\n type: str\n moid:\n description:\n - Managed Object ID of the instance to manage if known, this is a unique identifier only within a single vCenter instance.\n - This is required if O(name) or O(uuid) is not supplied.\n type: str\n folder:\n description:\n - Destination folder, absolute or relative path to find an existing guest.\n - This is required, if O(name) is supplied.\n - \"The folder should include the datacenter. ESX's datacenter is ha-datacenter\"\n - 'Examples:'\n - ' folder: /ha-datacenter/vm'\n - ' folder: ha-datacenter/vm'\n - ' folder: /datacenter1/vm'\n - ' folder: datacenter1/vm'\n - ' folder: /datacenter1/vm/folder1'\n - ' folder: datacenter1/vm/folder1'\n - ' folder: /folder1/datacenter1/vm'\n - ' folder: folder1/datacenter1/vm'\n - ' folder: /folder1/datacenter1/vm/folder2'\n type: str\n datacenter:\n description:\n - Destination datacenter where the virtual machine exists.\n required: true\n type: str\n force_upgrade:\n description:\n - This flag overrides the guest operating system detection and forcibly upgrade VMware tools or open-vm-tools.\n - \"This is useful when VMware tools is too old and unable to detect the 'guestFamily' value.\"\n - 'Using this flag may sometime give unexpected results since module will override the default'\n - \"behaviour of 'guestFamily' detection.\"\n default: false\n type: bool\n required: false\n installer_options:\n version_added: '4.1.0'\n description:\n - Command line options passed to the installer to modify the installation procedure for tools.\n type: str\n required: false\nextends_documentation_fragment:\n- community.vmware.vmware.documentation\n\nauthor:\n - Mike Klebolt (@MikeKlebolt) <[email protected]>\n'''\n\nEXAMPLES = r'''\n- name: Get VM UUID\n vmware_guest_facts:\n hostname: \"{{ vcenter_hostname }}\"\n username: \"{{ vcenter_username }}\"\n password: \"{{ vcenter_password }}\"\n datacenter: \"{{ datacenter }}\"\n folder: \"/{{datacenter}}/vm\"\n name: \"{{ vm_name }}\"\n delegate_to: localhost\n register: vm_facts\n\n- name: Upgrade VMware Tools using uuid\n community.vmware.vmware_guest_tools_upgrade:\n hostname: \"{{ vcenter_hostname }}\"\n username: \"{{ vcenter_username }}\"\n password: \"{{ vcenter_password }}\"\n datacenter: \"{{ datacenter_name }}\"\n uuid: \"{{ vm_facts.instance.hw_product_uuid }}\"\n delegate_to: localhost\n\n- name: Upgrade VMware Tools using MoID\n community.vmware.vmware_guest_tools_upgrade:\n hostname: \"{{ vcenter_hostname }}\"\n username: \"{{ vcenter_username }}\"\n password: \"{{ vcenter_password }}\"\n datacenter: \"{{ datacenter_name }}\"\n moid: vm-42\n delegate_to: localhost\n'''\n\nRETURN = r''' # '''\n\n\nfrom ansible.module_utils.basic import AnsibleModule\nfrom ansible_collections.community.vmware.plugins.module_utils.vmware import PyVmomi, vmware_argument_spec, wait_for_task\nfrom ansible.module_utils._text import to_native\n\n\nclass PyVmomiHelper(PyVmomi):\n def __init__(self, module):\n super(PyVmomiHelper, self).__init__(module)\n\n def upgrade_tools(self, vm):\n result = {'failed': False, 'changed': False, 'msg': ''}\n # Exit if VMware tools is already up to date\n if vm.guest.toolsStatus == \"toolsOk\":\n result.update(\n changed=False,\n msg=\"VMware tools is already up to date\",\n )\n return result\n\n # Fail if VM is not powered on\n elif vm.summary.runtime.powerState != \"poweredOn\":\n result.update(\n failed=True,\n msg=\"VM must be powered on to upgrade tools\",\n )\n return result\n\n # Fail if VMware tools is either not running or not installed\n elif vm.guest.toolsStatus in [\"toolsNotRunning\", \"toolsNotInstalled\"]:\n result.update(\n failed=True,\n msg=\"VMware tools is either not running or not installed\",\n )\n return result\n\n # If vmware tools is out of date, check major OS family\n # Upgrade tools on Linux and Windows guests\n elif vm.guest.toolsStatus == \"toolsOld\":\n try:\n force = self.module.params.get('force_upgrade')\n installer_options = self.module.params.get('installer_options')\n if force or vm.guest.guestFamily in [\"linuxGuest\", \"windowsGuest\"]:\n if installer_options is not None:\n task = vm.UpgradeTools(installer_options)\n else:\n task = vm.UpgradeTools()\n changed, err_msg = wait_for_task(task)\n result.update(changed=changed, msg=to_native(err_msg))\n else:\n result.update(msg='Guest Operating System is other than Linux and Windows.')\n return result\n except Exception as exc:\n result.update(\n failed=True,\n msg='Error while upgrading VMware tools %s' % to_native(exc),\n )\n return result\n else:\n result.update(\n failed=True,\n msg=\"VMware tools could not be upgraded\",\n )\n return result\n\n\ndef main():\n argument_spec = vmware_argument_spec()\n argument_spec.update(\n name=dict(type='str'),\n name_match=dict(type='str', choices=['first', 'last'], default='first'),\n uuid=dict(type='str'),\n moid=dict(type='str'),\n folder=dict(type='str'),\n datacenter=dict(type='str', required=True),\n force_upgrade=dict(type='bool', default=False),\n installer_options=dict(type='str'),\n )\n module = AnsibleModule(\n argument_spec=argument_spec,\n required_one_of=[\n ['name', 'uuid', 'moid']\n ]\n )\n\n if module.params['folder']:\n # FindByInventoryPath() does not require an absolute path\n # so we should leave the input folder path unmodified\n module.params['folder'] = module.params['folder'].rstrip('/')\n\n pyv = PyVmomiHelper(module)\n # Check if the VM exists before continuing\n vm = pyv.get_vm()\n\n # VM already exists\n if vm:\n try:\n result = pyv.upgrade_tools(vm)\n if result['changed']:\n module.exit_json(changed=result['changed'])\n elif result['failed']:\n module.fail_json(msg=result['msg'])\n else:\n module.exit_json(msg=result['msg'], changed=result['changed'])\n except Exception as exc:\n module.fail_json(msg='Unknown error: %s' % to_native(exc))\n else:\n vm_id = module.params.get('uuid') or module.params.get('name') or module.params.get('moid')\n module.fail_json(msg='Unable to find VM %s' % vm_id)\n\n\nif __name__ == '__main__':\n main()\n", "path": "plugins/modules/vmware_guest_tools_upgrade.py"}]}
| 2,971 | 353 |
gh_patches_debug_8220
|
rasdani/github-patches
|
git_diff
|
SciTools__cartopy-157
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Path <-> LineString vs Polygon conversion
Consider the following:
```
p = Path([[0, 0], [0, 2], [0, 0]])
print cpatch.path_to_geos(p)
```
In this case, the result should be a LineString, but the following should be a polygon:
```
p = Path([[0, 0], [0, 2], [2, 2], [2, 0], [0, 0]])
print cpatch.path_to_geos(p)
```
Update `cartopy.mpl_integration.patch` (possibly renaming it) to handle these cases in the best possible way (which way is, as yet, unclear).
Add tests for these cases.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lib/cartopy/mpl_integration/patch.py`
Content:
```
1 # (C) British Crown Copyright 2011 - 2012, Met Office
2 #
3 # This file is part of cartopy.
4 #
5 # cartopy is free software: you can redistribute it and/or modify it under
6 # the terms of the GNU Lesser General Public License as published by the
7 # Free Software Foundation, either version 3 of the License, or
8 # (at your option) any later version.
9 #
10 # cartopy is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public License
16 # along with cartopy. If not, see <http://www.gnu.org/licenses/>.
17 """
18 Provides shapely geometry <-> matplotlib path support.
19
20 See also `Shapely Geometric Objects <http://toblerity.github.com/shapely/manual.html#geometric-objects>`_
21 and `Matplotlib Path API <http://matplotlib.org/api/path_api.html>`_.
22
23 """
24
25 import numpy as np
26 import matplotlib.path
27 from matplotlib.path import Path
28 from shapely.geometry.collection import GeometryCollection
29 from shapely.geometry.linestring import LineString
30 from shapely.geometry.point import Point
31 from shapely.geometry.polygon import Polygon
32 from shapely.geometry.multilinestring import MultiLineString
33 from shapely.geometry.multipoint import MultiPoint
34 from shapely.geometry.multipolygon import MultiPolygon
35
36
37 def geos_to_path(shape):
38 """
39 Creates a list of :class:`matplotlib.path.Path` objects that describe
40 a shape.
41
42 Args:
43
44 * shape
45 A list, tuple or single instance of any of the following
46 types: :class:`shapely.geometry.point.Point`,
47 :class:`shapely.geometry.linestring.LineString`,
48 :class:`shapely.geometry.polygon.Polygon`,
49 :class:`shapely.geometry.multipoint.MultiPoint`,
50 :class:`shapely.geometry.multipolygon.MultiPolygon`,
51 :class:`shapely.geometry.multilinestring.MultiLineString`,
52 :class:`shapely.geometry.collection.GeometryCollection`,
53 or any type with a _as_mpl_path() method.
54
55 Returns:
56 A list of :class:`matplotlib.path.Path` objects.
57
58 """
59 if isinstance(shape, (list, tuple)):
60 paths = []
61 for shp in shape:
62 paths.extend(geos_to_path(shp))
63 return paths
64
65 if isinstance(shape, (LineString, Point)):
66 return [Path(np.vstack(shape.xy).T)]
67 elif isinstance(shape, Polygon):
68 def poly_codes(poly):
69 codes = np.ones(len(poly.xy[0])) * Path.LINETO
70 codes[0] = Path.MOVETO
71 return codes
72
73 vertices = np.concatenate([np.array(shape.exterior.xy)] +
74 [np.array(ring.xy) for ring in
75 shape.interiors], 1).T
76 codes = np.concatenate([poly_codes(shape.exterior)] +
77 [poly_codes(ring) for ring in shape.interiors])
78 return [Path(vertices, codes)]
79 elif isinstance(shape, (MultiPolygon, GeometryCollection, MultiLineString,
80 MultiPoint)):
81 paths = []
82 for geom in shape.geoms:
83 paths.extend(geos_to_path(geom))
84 return paths
85 elif hasattr(shape, '_as_mpl_path'):
86 vertices, codes = shape._as_mpl_path()
87 return [Path(vertices, codes)]
88 else:
89 raise ValueError('Unsupported shape type {}.'.format(type(shape)))
90
91
92 def path_segments(path, transform=None, remove_nans=False, clip=None,
93 quantize=False, simplify=False, curves=False,
94 stroke_width=1.0, snap=False):
95 """
96 Creates an array of vertices and a corresponding array of codes from a
97 :class:`matplotlib.path.Path`.
98
99 Args:
100
101 * path
102 A :class:`matplotlib.path.Path` instance.
103
104 Kwargs:
105 See :func:`matplotlib.path.iter_segments` for details of the keyword
106 arguments.
107
108 Returns:
109 A (vertices, codes) tuple, where vertices is a numpy array of
110 coordinates, and codes is a numpy array of matplotlib path codes.
111 See :class:`matplotlib.path.Path` for information on the types of
112 codes and their meanings.
113
114 """
115 # XXX assigned to avoid a ValueError inside the mpl C code...
116 a = transform, remove_nans, clip, quantize, simplify, curves
117
118 # Series of cleanups and conversions to the path e.g. it
119 # can convert curved segments to line segments.
120 vertices, codes = matplotlib.path.cleanup_path(path, transform,
121 remove_nans, clip,
122 snap, stroke_width,
123 simplify, curves)
124
125 # Remove the final vertex (with code 0)
126 return vertices[:-1, :], codes[:-1]
127
128
129 def path_to_geos(path):
130 """
131 Creates a list of Shapely geometric objects from a
132 :class:`matplotlib.path.Path`.
133
134 Args:
135
136 * path
137 A :class:`matplotlib.path.Path` instance.
138
139 Returns:
140 A list of :class:`shapely.geometry.polygon.Polygon`,
141 :class:`shapely.geometry.linestring.LineString` and/or
142 :class:`shapely.geometry.multilinestring.MultiLineString` instances.
143
144 """
145 # Convert path into numpy array of vertices (and associated codes)
146 path_verts, path_codes = path_segments(path, curves=False)
147
148 # Split into subarrays such that each subarray consists of connected
149 # line segments based on the start of each one being marked by a
150 # matplotlib MOVETO code.
151 verts_split_inds = np.where(path_codes == Path.MOVETO)[0]
152 verts_split = np.split(path_verts, verts_split_inds)
153 codes_split = np.split(path_codes, verts_split_inds)
154
155 # Iterate through the vertices generating a list of
156 # (external_geom, [internal_polygons]) tuples.
157 collection = []
158 for path_verts, path_codes in zip(verts_split, codes_split):
159 if len(path_verts) == 0:
160 continue
161
162 # XXX A path can be given which does not end with close poly, in that
163 # situation, we have to guess?
164 # XXX Implement a point
165
166 if path_verts.shape[0] > 2 and (path_codes[-1] == Path.CLOSEPOLY or
167 all(path_verts[0, :] == path_verts[-1, :])):
168 if path_codes[-1] == Path.CLOSEPOLY:
169 geom = Polygon(path_verts[:-1, :])
170 else:
171 geom = Polygon(path_verts)
172 else:
173 geom = LineString(path_verts)
174
175 # If geom is a Polygon and is contained within the last geom in collection,
176 # add it to its list of internal polygons, otherwise simple append it as a
177 # new external geom.
178 if (len(collection) > 0 and
179 isinstance(collection[-1][0], Polygon) and
180 isinstance(geom, Polygon) and
181 collection[-1][0].contains(geom.exterior)):
182 collection[-1][1].append(geom.exterior)
183 else:
184 collection.append((geom, []))
185
186 # Convert each (external_geom, [internal_polygons]) pair into a
187 # a shapely Polygon that encapsulates the internal polygons, if the
188 # external geom is a LineSting leave it alone.
189 geom_collection = []
190 for external_geom, internal_polys in collection:
191 if internal_polys:
192 # XXX worry about islands within lakes
193 geom = Polygon(external_geom.exterior, internal_polys)
194 else:
195 geom = external_geom
196 geom_collection.append(geom)
197
198 # If the geom_collection only contains LineStrings combine them
199 # into a single MultiLinestring.
200 if geom_collection and all(isinstance(geom, LineString) for
201 geom in geom_collection):
202 geom_collection = [MultiLineString(geom_collection)]
203
204 # Remove any zero area Polygons
205 result = filter(lambda geom: (isinstance(geom, Polygon) and geom.area != 0) or
206 not isinstance(geom, Polygon), geom_collection)
207
208 return result
209
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/lib/cartopy/mpl_integration/patch.py b/lib/cartopy/mpl_integration/patch.py
--- a/lib/cartopy/mpl_integration/patch.py
+++ b/lib/cartopy/mpl_integration/patch.py
@@ -202,7 +202,9 @@
geom_collection = [MultiLineString(geom_collection)]
# Remove any zero area Polygons
- result = filter(lambda geom: (isinstance(geom, Polygon) and geom.area != 0) or
- not isinstance(geom, Polygon), geom_collection)
-
+ not_zero_poly = lambda geom: ((isinstance(geom, Polygon) and
+ not geom._is_empty and geom.area != 0) or
+ not isinstance(geom, Polygon))
+ result = filter(not_zero_poly, geom_collection)
+
return result
|
{"golden_diff": "diff --git a/lib/cartopy/mpl_integration/patch.py b/lib/cartopy/mpl_integration/patch.py\n--- a/lib/cartopy/mpl_integration/patch.py\n+++ b/lib/cartopy/mpl_integration/patch.py\n@@ -202,7 +202,9 @@\n geom_collection = [MultiLineString(geom_collection)]\n \n # Remove any zero area Polygons\n- result = filter(lambda geom: (isinstance(geom, Polygon) and geom.area != 0) or\n- not isinstance(geom, Polygon), geom_collection)\n- \n+ not_zero_poly = lambda geom: ((isinstance(geom, Polygon) and\n+ not geom._is_empty and geom.area != 0) or\n+ not isinstance(geom, Polygon))\n+ result = filter(not_zero_poly, geom_collection)\n+\n return result\n", "issue": "Path <-> LineString vs Polygon conversion\nConsider the following:\n\n```\np = Path([[0, 0], [0, 2], [0, 0]])\nprint cpatch.path_to_geos(p)\n```\n\nIn this case, the result should be a LineString, but the following should be a polygon:\n\n```\np = Path([[0, 0], [0, 2], [2, 2], [2, 0], [0, 0]])\nprint cpatch.path_to_geos(p)\n```\n\nUpdate `cartopy.mpl_integration.patch` (possibly renaming it) to handle these cases in the best possible way (which way is, as yet, unclear). \n\nAdd tests for these cases.\n\n", "before_files": [{"content": "# (C) British Crown Copyright 2011 - 2012, Met Office\n#\n# This file is part of cartopy.\n#\n# cartopy is free software: you can redistribute it and/or modify it under\n# the terms of the GNU Lesser General Public License as published by the\n# Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# cartopy is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public License\n# along with cartopy. If not, see <http://www.gnu.org/licenses/>.\n\"\"\"\nProvides shapely geometry <-> matplotlib path support.\n\nSee also `Shapely Geometric Objects <http://toblerity.github.com/shapely/manual.html#geometric-objects>`_\nand `Matplotlib Path API <http://matplotlib.org/api/path_api.html>`_.\n\n\"\"\"\n\nimport numpy as np\nimport matplotlib.path\nfrom matplotlib.path import Path\nfrom shapely.geometry.collection import GeometryCollection\nfrom shapely.geometry.linestring import LineString\nfrom shapely.geometry.point import Point\nfrom shapely.geometry.polygon import Polygon\nfrom shapely.geometry.multilinestring import MultiLineString\nfrom shapely.geometry.multipoint import MultiPoint\nfrom shapely.geometry.multipolygon import MultiPolygon\n\n\ndef geos_to_path(shape):\n \"\"\"\n Creates a list of :class:`matplotlib.path.Path` objects that describe\n a shape.\n\n Args:\n\n * shape\n A list, tuple or single instance of any of the following\n types: :class:`shapely.geometry.point.Point`,\n :class:`shapely.geometry.linestring.LineString`,\n :class:`shapely.geometry.polygon.Polygon`,\n :class:`shapely.geometry.multipoint.MultiPoint`,\n :class:`shapely.geometry.multipolygon.MultiPolygon`,\n :class:`shapely.geometry.multilinestring.MultiLineString`,\n :class:`shapely.geometry.collection.GeometryCollection`,\n or any type with a _as_mpl_path() method.\n\n Returns:\n A list of :class:`matplotlib.path.Path` objects.\n\n \"\"\"\n if isinstance(shape, (list, tuple)):\n paths = []\n for shp in shape:\n paths.extend(geos_to_path(shp))\n return paths\n\n if isinstance(shape, (LineString, Point)):\n return [Path(np.vstack(shape.xy).T)]\n elif isinstance(shape, Polygon):\n def poly_codes(poly):\n codes = np.ones(len(poly.xy[0])) * Path.LINETO\n codes[0] = Path.MOVETO\n return codes\n \n vertices = np.concatenate([np.array(shape.exterior.xy)] +\n [np.array(ring.xy) for ring in\n shape.interiors], 1).T\n codes = np.concatenate([poly_codes(shape.exterior)] +\n [poly_codes(ring) for ring in shape.interiors])\n return [Path(vertices, codes)]\n elif isinstance(shape, (MultiPolygon, GeometryCollection, MultiLineString,\n MultiPoint)):\n paths = []\n for geom in shape.geoms:\n paths.extend(geos_to_path(geom))\n return paths\n elif hasattr(shape, '_as_mpl_path'):\n vertices, codes = shape._as_mpl_path()\n return [Path(vertices, codes)]\n else:\n raise ValueError('Unsupported shape type {}.'.format(type(shape)))\n\n\ndef path_segments(path, transform=None, remove_nans=False, clip=None,\n quantize=False, simplify=False, curves=False,\n stroke_width=1.0, snap=False):\n \"\"\"\n Creates an array of vertices and a corresponding array of codes from a\n :class:`matplotlib.path.Path`.\n\n Args:\n\n * path\n A :class:`matplotlib.path.Path` instance.\n\n Kwargs:\n See :func:`matplotlib.path.iter_segments` for details of the keyword\n arguments.\n\n Returns:\n A (vertices, codes) tuple, where vertices is a numpy array of\n coordinates, and codes is a numpy array of matplotlib path codes.\n See :class:`matplotlib.path.Path` for information on the types of\n codes and their meanings.\n\n \"\"\"\n # XXX assigned to avoid a ValueError inside the mpl C code...\n a = transform, remove_nans, clip, quantize, simplify, curves\n\n # Series of cleanups and conversions to the path e.g. it\n # can convert curved segments to line segments.\n vertices, codes = matplotlib.path.cleanup_path(path, transform,\n remove_nans, clip,\n snap, stroke_width,\n simplify, curves)\n \n # Remove the final vertex (with code 0)\n return vertices[:-1, :], codes[:-1]\n\n\ndef path_to_geos(path):\n \"\"\"\n Creates a list of Shapely geometric objects from a\n :class:`matplotlib.path.Path`.\n\n Args:\n\n * path\n A :class:`matplotlib.path.Path` instance.\n\n Returns:\n A list of :class:`shapely.geometry.polygon.Polygon`,\n :class:`shapely.geometry.linestring.LineString` and/or\n :class:`shapely.geometry.multilinestring.MultiLineString` instances.\n\n \"\"\"\n # Convert path into numpy array of vertices (and associated codes)\n path_verts, path_codes = path_segments(path, curves=False)\n \n # Split into subarrays such that each subarray consists of connected\n # line segments based on the start of each one being marked by a\n # matplotlib MOVETO code.\n verts_split_inds = np.where(path_codes == Path.MOVETO)[0]\n verts_split = np.split(path_verts, verts_split_inds)\n codes_split = np.split(path_codes, verts_split_inds)\n \n # Iterate through the vertices generating a list of\n # (external_geom, [internal_polygons]) tuples.\n collection = []\n for path_verts, path_codes in zip(verts_split, codes_split):\n if len(path_verts) == 0:\n continue\n\n # XXX A path can be given which does not end with close poly, in that\n # situation, we have to guess?\n # XXX Implement a point\n \n if path_verts.shape[0] > 2 and (path_codes[-1] == Path.CLOSEPOLY or\n all(path_verts[0, :] == path_verts[-1, :])):\n if path_codes[-1] == Path.CLOSEPOLY:\n geom = Polygon(path_verts[:-1, :])\n else:\n geom = Polygon(path_verts)\n else:\n geom = LineString(path_verts)\n\n # If geom is a Polygon and is contained within the last geom in collection,\n # add it to its list of internal polygons, otherwise simple append it as a\n # new external geom.\n if (len(collection) > 0 and \n isinstance(collection[-1][0], Polygon) and\n isinstance(geom, Polygon) and\n collection[-1][0].contains(geom.exterior)):\n collection[-1][1].append(geom.exterior)\n else:\n collection.append((geom, []))\n \n # Convert each (external_geom, [internal_polygons]) pair into a\n # a shapely Polygon that encapsulates the internal polygons, if the\n # external geom is a LineSting leave it alone.\n geom_collection = []\n for external_geom, internal_polys in collection:\n if internal_polys:\n # XXX worry about islands within lakes\n geom = Polygon(external_geom.exterior, internal_polys)\n else:\n geom = external_geom\n geom_collection.append(geom)\n \n # If the geom_collection only contains LineStrings combine them\n # into a single MultiLinestring.\n if geom_collection and all(isinstance(geom, LineString) for\n geom in geom_collection):\n geom_collection = [MultiLineString(geom_collection)]\n\n # Remove any zero area Polygons\n result = filter(lambda geom: (isinstance(geom, Polygon) and geom.area != 0) or\n not isinstance(geom, Polygon), geom_collection)\n \n return result\n", "path": "lib/cartopy/mpl_integration/patch.py"}], "after_files": [{"content": "# (C) British Crown Copyright 2011 - 2012, Met Office\n#\n# This file is part of cartopy.\n#\n# cartopy is free software: you can redistribute it and/or modify it under\n# the terms of the GNU Lesser General Public License as published by the\n# Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# cartopy is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public License\n# along with cartopy. If not, see <http://www.gnu.org/licenses/>.\n\"\"\"\nProvides shapely geometry <-> matplotlib path support.\n\nSee also `Shapely Geometric Objects <http://toblerity.github.com/shapely/manual.html#geometric-objects>`_\nand `Matplotlib Path API <http://matplotlib.org/api/path_api.html>`_.\n\n\"\"\"\n\nimport numpy as np\nimport matplotlib.path\nfrom matplotlib.path import Path\nfrom shapely.geometry.collection import GeometryCollection\nfrom shapely.geometry.linestring import LineString\nfrom shapely.geometry.point import Point\nfrom shapely.geometry.polygon import Polygon\nfrom shapely.geometry.multilinestring import MultiLineString\nfrom shapely.geometry.multipoint import MultiPoint\nfrom shapely.geometry.multipolygon import MultiPolygon\n\n\ndef geos_to_path(shape):\n \"\"\"\n Creates a list of :class:`matplotlib.path.Path` objects that describe\n a shape.\n\n Args:\n\n * shape\n A list, tuple or single instance of any of the following\n types: :class:`shapely.geometry.point.Point`,\n :class:`shapely.geometry.linestring.LineString`,\n :class:`shapely.geometry.polygon.Polygon`,\n :class:`shapely.geometry.multipoint.MultiPoint`,\n :class:`shapely.geometry.multipolygon.MultiPolygon`,\n :class:`shapely.geometry.multilinestring.MultiLineString`,\n :class:`shapely.geometry.collection.GeometryCollection`,\n or any type with a _as_mpl_path() method.\n\n Returns:\n A list of :class:`matplotlib.path.Path` objects.\n\n \"\"\"\n if isinstance(shape, (list, tuple)):\n paths = []\n for shp in shape:\n paths.extend(geos_to_path(shp))\n return paths\n\n if isinstance(shape, (LineString, Point)):\n return [Path(np.vstack(shape.xy).T)]\n elif isinstance(shape, Polygon):\n def poly_codes(poly):\n codes = np.ones(len(poly.xy[0])) * Path.LINETO\n codes[0] = Path.MOVETO\n return codes\n \n vertices = np.concatenate([np.array(shape.exterior.xy)] +\n [np.array(ring.xy) for ring in\n shape.interiors], 1).T\n codes = np.concatenate([poly_codes(shape.exterior)] +\n [poly_codes(ring) for ring in shape.interiors])\n return [Path(vertices, codes)]\n elif isinstance(shape, (MultiPolygon, GeometryCollection, MultiLineString,\n MultiPoint)):\n paths = []\n for geom in shape.geoms:\n paths.extend(geos_to_path(geom))\n return paths\n elif hasattr(shape, '_as_mpl_path'):\n vertices, codes = shape._as_mpl_path()\n return [Path(vertices, codes)]\n else:\n raise ValueError('Unsupported shape type {}.'.format(type(shape)))\n\n\ndef path_segments(path, transform=None, remove_nans=False, clip=None,\n quantize=False, simplify=False, curves=False,\n stroke_width=1.0, snap=False):\n \"\"\"\n Creates an array of vertices and a corresponding array of codes from a\n :class:`matplotlib.path.Path`.\n\n Args:\n\n * path\n A :class:`matplotlib.path.Path` instance.\n\n Kwargs:\n See :func:`matplotlib.path.iter_segments` for details of the keyword\n arguments.\n\n Returns:\n A (vertices, codes) tuple, where vertices is a numpy array of\n coordinates, and codes is a numpy array of matplotlib path codes.\n See :class:`matplotlib.path.Path` for information on the types of\n codes and their meanings.\n\n \"\"\"\n # XXX assigned to avoid a ValueError inside the mpl C code...\n a = transform, remove_nans, clip, quantize, simplify, curves\n\n # Series of cleanups and conversions to the path e.g. it\n # can convert curved segments to line segments.\n vertices, codes = matplotlib.path.cleanup_path(path, transform,\n remove_nans, clip,\n snap, stroke_width,\n simplify, curves)\n \n # Remove the final vertex (with code 0)\n return vertices[:-1, :], codes[:-1]\n\n\ndef path_to_geos(path):\n \"\"\"\n Creates a list of Shapely geometric objects from a\n :class:`matplotlib.path.Path`.\n\n Args:\n\n * path\n A :class:`matplotlib.path.Path` instance.\n\n Returns:\n A list of :class:`shapely.geometry.polygon.Polygon`,\n :class:`shapely.geometry.linestring.LineString` and/or\n :class:`shapely.geometry.multilinestring.MultiLineString` instances.\n\n \"\"\"\n # Convert path into numpy array of vertices (and associated codes)\n path_verts, path_codes = path_segments(path, curves=False)\n \n # Split into subarrays such that each subarray consists of connected\n # line segments based on the start of each one being marked by a\n # matplotlib MOVETO code.\n verts_split_inds = np.where(path_codes == Path.MOVETO)[0]\n verts_split = np.split(path_verts, verts_split_inds)\n codes_split = np.split(path_codes, verts_split_inds)\n \n # Iterate through the vertices generating a list of\n # (external_geom, [internal_polygons]) tuples.\n collection = []\n for path_verts, path_codes in zip(verts_split, codes_split):\n if len(path_verts) == 0:\n continue\n\n # XXX A path can be given which does not end with close poly, in that\n # situation, we have to guess?\n # XXX Implement a point\n \n if path_verts.shape[0] > 2 and (path_codes[-1] == Path.CLOSEPOLY or\n all(path_verts[0, :] == path_verts[-1, :])):\n if path_codes[-1] == Path.CLOSEPOLY:\n geom = Polygon(path_verts[:-1, :])\n else:\n geom = Polygon(path_verts)\n else:\n geom = LineString(path_verts)\n\n # If geom is a Polygon and is contained within the last geom in collection,\n # add it to its list of internal polygons, otherwise simple append it as a\n # new external geom.\n if (len(collection) > 0 and \n isinstance(collection[-1][0], Polygon) and\n isinstance(geom, Polygon) and\n collection[-1][0].contains(geom.exterior)):\n collection[-1][1].append(geom.exterior)\n else:\n collection.append((geom, []))\n \n # Convert each (external_geom, [internal_polygons]) pair into a\n # a shapely Polygon that encapsulates the internal polygons, if the\n # external geom is a LineSting leave it alone.\n geom_collection = []\n for external_geom, internal_polys in collection:\n if internal_polys:\n # XXX worry about islands within lakes\n geom = Polygon(external_geom.exterior, internal_polys)\n else:\n geom = external_geom\n geom_collection.append(geom)\n \n # If the geom_collection only contains LineStrings combine them\n # into a single MultiLinestring.\n if geom_collection and all(isinstance(geom, LineString) for\n geom in geom_collection):\n geom_collection = [MultiLineString(geom_collection)]\n\n # Remove any zero area Polygons\n not_zero_poly = lambda geom: ((isinstance(geom, Polygon) and\n not geom._is_empty and geom.area != 0) or\n not isinstance(geom, Polygon))\n result = filter(not_zero_poly, geom_collection)\n\n return result\n", "path": "lib/cartopy/mpl_integration/patch.py"}]}
| 2,730 | 184 |
gh_patches_debug_6593
|
rasdani/github-patches
|
git_diff
|
enthought__chaco-540
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
ScatterInspector does not deselect all on index == None
I've been playing around with the scattertoggle.py example. Maybe I'm missing something, but the ScatterInspector seems to fail to deselect all points when clicking on whitespace on the plot (that is, index == None). Something along the lines of this seems to fix it.
``` python
class PatchedScatterInspector(ScatterInspector):
def _deselect(self, index=None):
plot = self.component
if index:
super(PatchedScatterInspector, self)._deselect(index)
else:
for name in ('index', 'value'):
if not hasattr(plot, name):
continue
md = getattr(plot, name).metadata
md[self.selection_metadata_name] = []
getattr(plot, name).metadata_changed = True
return
```
Cheers,
-A
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `chaco/tools/scatter_inspector.py`
Content:
```
1 """ Defines the ScatterInspector tool class.
2 """
3
4 # Enthought library imports
5 from traits.api import Any, Bool, Enum, Event, HasStrictTraits, Str
6
7 # Local, relative imports
8 from .select_tool import SelectTool
9
10 HOVER_EVENT = "hover"
11
12 SELECT_EVENT = "select"
13
14 DESELECT_EVENT = "deselect"
15
16
17 class ScatterInspectorEvent(HasStrictTraits):
18 #: Is it a hover event or a selection event?
19 event_type = Enum([HOVER_EVENT, SELECT_EVENT, DESELECT_EVENT])
20
21 #: What index was involved?
22 event_index = Any
23
24
25 class ScatterInspector(SelectTool):
26 """ A tool for inspecting scatter plots.
27
28 It writes the index of the point under the cursor to the metadata of the
29 index and value data sources, and allows clicking to select the point.
30 Other components can listen for metadata updates on the data sources.
31
32 By default, it writes the index of the point under the cursor to the
33 "hover" key in metadata, and the index of a clicked point to "selection".
34 """
35
36 #: If persistent_hover is False, then a point will be de-hovered as soon as
37 #: the mouse leaves its hit-testing area. If persistent_hover is True, then
38 #: a point does no de-hover until another point get hover focus.
39 persistent_hover = Bool(False)
40
41 #: The names of the data source metadata for hover and selection events.
42 hover_metadata_name = Str('hover')
43 selection_metadata_name = Str('selections')
44
45 #: This tool emits events when hover or selection changes
46 inspector_event = Event(ScatterInspectorEvent)
47
48 # -------------------------------------------------------------------------
49 # Override/configure inherited traits
50 # -------------------------------------------------------------------------
51
52 #: This tool is not visible
53 visible = False
54
55 #: This tool does not have a visual representation
56 draw_mode = "none"
57
58 def normal_mouse_move(self, event):
59 """ Handles the mouse moving when the tool is in the 'normal' state.
60
61 If the cursor is within **threshold** of a data point, the method
62 writes the index to the plot's data sources' "hover" metadata.
63
64 This method emits a ScatterInspectorEvent when a new scatter point is
65 hovered over and when the mouse leaves that point.
66 """
67 plot = self.component
68 index = plot.map_index((event.x, event.y), threshold=self.threshold)
69 insp_event = ScatterInspectorEvent(event_type=HOVER_EVENT,
70 event_index=index)
71 if index is not None:
72 old = plot.index.metadata.get(self.hover_metadata_name, None)
73 plot.index.metadata[self.hover_metadata_name] = [index]
74 if old != [index]:
75 self.inspector_event = insp_event
76 if hasattr(plot, "value"):
77 plot.value.metadata[self.hover_metadata_name] = [index]
78 elif not self.persistent_hover:
79 old = plot.index.metadata.pop(self.hover_metadata_name, None)
80 if old:
81 self.inspector_event = insp_event
82 if hasattr(plot, "value"):
83 plot.value.metadata.pop(self.hover_metadata_name, None)
84
85 return
86
87 def _get_selection_state(self, event):
88 plot = self.component
89 index = plot.map_index((event.x, event.y), threshold=self.threshold)
90
91 already_selected = False
92 for name in ('index', 'value'):
93 if not hasattr(plot, name):
94 continue
95 md = getattr(plot, name).metadata
96 if md is None or self.selection_metadata_name not in md:
97 continue
98 if index in md[self.selection_metadata_name]:
99 already_selected = True
100 break
101 return already_selected, (index is not None)
102
103 def _get_selection_token(self, event):
104 plot = self.component
105 index = plot.map_index((event.x, event.y), threshold=self.threshold)
106 return index
107
108 def _deselect(self, index=None):
109 """ Deselects a particular index. If no index is given, then
110 deselects all points.
111 """
112 plot = self.component
113 insp_event = ScatterInspectorEvent(event_type=DESELECT_EVENT,
114 event_index=index)
115 for name in ('index', 'value'):
116 if not hasattr(plot, name):
117 continue
118 md = getattr(plot, name).metadata
119 if self.selection_metadata_name not in md:
120 pass
121 elif index in md[self.selection_metadata_name]:
122 new_list = md[self.selection_metadata_name][:]
123 new_list.remove(index)
124 md[self.selection_metadata_name] = new_list
125 # Only issue 1 event:
126 if name == 'index':
127 self.inspector_event = insp_event
128 return
129
130 def _select(self, index, append=True):
131 plot = self.component
132 insp_event = ScatterInspectorEvent(event_type=SELECT_EVENT,
133 event_index=index)
134 for name in ('index', 'value'):
135 if not hasattr(plot, name):
136 continue
137 md = getattr(plot, name).metadata
138 selection = md.get(self.selection_metadata_name, None)
139
140 # If no existing selection
141 if selection is None:
142 md[self.selection_metadata_name] = [index]
143 # check for list-like object supporting append
144 else:
145 if append:
146 if index not in md[self.selection_metadata_name]:
147 new_list = md[self.selection_metadata_name] + [index]
148 md[self.selection_metadata_name] = new_list
149 # Manually trigger the metadata_changed event on
150 # the datasource. Datasources only automatically
151 # fire notifications when the values inside the
152 # metadata dict change, but they do not listen
153 # for further changes on those values.
154 # DEPRECATED: use self.inspector_event instead:
155 getattr(plot, name).metadata_changed = True
156 else:
157 md[self.selection_metadata_name] = [index]
158
159 # Test to only issue 1 event per selection, not 1 per axis:
160 if name == 'index':
161 self.inspector_event = insp_event
162
163 return
164
165
166 # EOF
167
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/chaco/tools/scatter_inspector.py b/chaco/tools/scatter_inspector.py
--- a/chaco/tools/scatter_inspector.py
+++ b/chaco/tools/scatter_inspector.py
@@ -125,6 +125,11 @@
# Only issue 1 event:
if name == 'index':
self.inspector_event = insp_event
+ elif index is None:
+ md[self.selection_metadata_name] = []
+ # Only issue 1 event:
+ if name == 'index':
+ self.inspector_event = insp_event
return
def _select(self, index, append=True):
|
{"golden_diff": "diff --git a/chaco/tools/scatter_inspector.py b/chaco/tools/scatter_inspector.py\n--- a/chaco/tools/scatter_inspector.py\n+++ b/chaco/tools/scatter_inspector.py\n@@ -125,6 +125,11 @@\n # Only issue 1 event:\n if name == 'index':\n self.inspector_event = insp_event\n+ elif index is None:\n+ md[self.selection_metadata_name] = []\n+ # Only issue 1 event:\n+ if name == 'index':\n+ self.inspector_event = insp_event\n return\n \n def _select(self, index, append=True):\n", "issue": "ScatterInspector does not deselect all on index == None\nI've been playing around with the scattertoggle.py example. Maybe I'm missing something, but the ScatterInspector seems to fail to deselect all points when clicking on whitespace on the plot (that is, index == None). Something along the lines of this seems to fix it.\n\n``` python\nclass PatchedScatterInspector(ScatterInspector):\n def _deselect(self, index=None):\n plot = self.component\n if index:\n super(PatchedScatterInspector, self)._deselect(index)\n else:\n for name in ('index', 'value'):\n if not hasattr(plot, name):\n continue\n md = getattr(plot, name).metadata\n md[self.selection_metadata_name] = []\n getattr(plot, name).metadata_changed = True\n return\n```\n\nCheers,\n\n-A\n\n", "before_files": [{"content": "\"\"\" Defines the ScatterInspector tool class.\n\"\"\"\n\n# Enthought library imports\nfrom traits.api import Any, Bool, Enum, Event, HasStrictTraits, Str\n\n# Local, relative imports\nfrom .select_tool import SelectTool\n\nHOVER_EVENT = \"hover\"\n\nSELECT_EVENT = \"select\"\n\nDESELECT_EVENT = \"deselect\"\n\n\nclass ScatterInspectorEvent(HasStrictTraits):\n #: Is it a hover event or a selection event?\n event_type = Enum([HOVER_EVENT, SELECT_EVENT, DESELECT_EVENT])\n\n #: What index was involved?\n event_index = Any\n\n\nclass ScatterInspector(SelectTool):\n \"\"\" A tool for inspecting scatter plots.\n\n It writes the index of the point under the cursor to the metadata of the\n index and value data sources, and allows clicking to select the point.\n Other components can listen for metadata updates on the data sources.\n\n By default, it writes the index of the point under the cursor to the\n \"hover\" key in metadata, and the index of a clicked point to \"selection\".\n \"\"\"\n\n #: If persistent_hover is False, then a point will be de-hovered as soon as\n #: the mouse leaves its hit-testing area. If persistent_hover is True, then\n #: a point does no de-hover until another point get hover focus.\n persistent_hover = Bool(False)\n\n #: The names of the data source metadata for hover and selection events.\n hover_metadata_name = Str('hover')\n selection_metadata_name = Str('selections')\n\n #: This tool emits events when hover or selection changes\n inspector_event = Event(ScatterInspectorEvent)\n\n # -------------------------------------------------------------------------\n # Override/configure inherited traits\n # -------------------------------------------------------------------------\n\n #: This tool is not visible\n visible = False\n\n #: This tool does not have a visual representation\n draw_mode = \"none\"\n\n def normal_mouse_move(self, event):\n \"\"\" Handles the mouse moving when the tool is in the 'normal' state.\n\n If the cursor is within **threshold** of a data point, the method\n writes the index to the plot's data sources' \"hover\" metadata.\n\n This method emits a ScatterInspectorEvent when a new scatter point is\n hovered over and when the mouse leaves that point.\n \"\"\"\n plot = self.component\n index = plot.map_index((event.x, event.y), threshold=self.threshold)\n insp_event = ScatterInspectorEvent(event_type=HOVER_EVENT,\n event_index=index)\n if index is not None:\n old = plot.index.metadata.get(self.hover_metadata_name, None)\n plot.index.metadata[self.hover_metadata_name] = [index]\n if old != [index]:\n self.inspector_event = insp_event\n if hasattr(plot, \"value\"):\n plot.value.metadata[self.hover_metadata_name] = [index]\n elif not self.persistent_hover:\n old = plot.index.metadata.pop(self.hover_metadata_name, None)\n if old:\n self.inspector_event = insp_event\n if hasattr(plot, \"value\"):\n plot.value.metadata.pop(self.hover_metadata_name, None)\n\n return\n\n def _get_selection_state(self, event):\n plot = self.component\n index = plot.map_index((event.x, event.y), threshold=self.threshold)\n\n already_selected = False\n for name in ('index', 'value'):\n if not hasattr(plot, name):\n continue\n md = getattr(plot, name).metadata\n if md is None or self.selection_metadata_name not in md:\n continue\n if index in md[self.selection_metadata_name]:\n already_selected = True\n break\n return already_selected, (index is not None)\n\n def _get_selection_token(self, event):\n plot = self.component\n index = plot.map_index((event.x, event.y), threshold=self.threshold)\n return index\n\n def _deselect(self, index=None):\n \"\"\" Deselects a particular index. If no index is given, then\n deselects all points.\n \"\"\"\n plot = self.component\n insp_event = ScatterInspectorEvent(event_type=DESELECT_EVENT,\n event_index=index)\n for name in ('index', 'value'):\n if not hasattr(plot, name):\n continue\n md = getattr(plot, name).metadata\n if self.selection_metadata_name not in md:\n pass\n elif index in md[self.selection_metadata_name]:\n new_list = md[self.selection_metadata_name][:]\n new_list.remove(index)\n md[self.selection_metadata_name] = new_list\n # Only issue 1 event:\n if name == 'index':\n self.inspector_event = insp_event\n return\n\n def _select(self, index, append=True):\n plot = self.component\n insp_event = ScatterInspectorEvent(event_type=SELECT_EVENT,\n event_index=index)\n for name in ('index', 'value'):\n if not hasattr(plot, name):\n continue\n md = getattr(plot, name).metadata\n selection = md.get(self.selection_metadata_name, None)\n\n # If no existing selection\n if selection is None:\n md[self.selection_metadata_name] = [index]\n # check for list-like object supporting append\n else:\n if append:\n if index not in md[self.selection_metadata_name]:\n new_list = md[self.selection_metadata_name] + [index]\n md[self.selection_metadata_name] = new_list\n # Manually trigger the metadata_changed event on\n # the datasource. Datasources only automatically\n # fire notifications when the values inside the\n # metadata dict change, but they do not listen\n # for further changes on those values.\n # DEPRECATED: use self.inspector_event instead:\n getattr(plot, name).metadata_changed = True\n else:\n md[self.selection_metadata_name] = [index]\n\n # Test to only issue 1 event per selection, not 1 per axis:\n if name == 'index':\n self.inspector_event = insp_event\n\n return\n\n\n# EOF\n", "path": "chaco/tools/scatter_inspector.py"}], "after_files": [{"content": "\"\"\" Defines the ScatterInspector tool class.\n\"\"\"\n\n# Enthought library imports\nfrom traits.api import Any, Bool, Enum, Event, HasStrictTraits, Str\n\n# Local, relative imports\nfrom .select_tool import SelectTool\n\nHOVER_EVENT = \"hover\"\n\nSELECT_EVENT = \"select\"\n\nDESELECT_EVENT = \"deselect\"\n\n\nclass ScatterInspectorEvent(HasStrictTraits):\n #: Is it a hover event or a selection event?\n event_type = Enum([HOVER_EVENT, SELECT_EVENT, DESELECT_EVENT])\n\n #: What index was involved?\n event_index = Any\n\n\nclass ScatterInspector(SelectTool):\n \"\"\" A tool for inspecting scatter plots.\n\n It writes the index of the point under the cursor to the metadata of the\n index and value data sources, and allows clicking to select the point.\n Other components can listen for metadata updates on the data sources.\n\n By default, it writes the index of the point under the cursor to the\n \"hover\" key in metadata, and the index of a clicked point to \"selection\".\n \"\"\"\n\n #: If persistent_hover is False, then a point will be de-hovered as soon as\n #: the mouse leaves its hit-testing area. If persistent_hover is True, then\n #: a point does no de-hover until another point get hover focus.\n persistent_hover = Bool(False)\n\n #: The names of the data source metadata for hover and selection events.\n hover_metadata_name = Str('hover')\n selection_metadata_name = Str('selections')\n\n #: This tool emits events when hover or selection changes\n inspector_event = Event(ScatterInspectorEvent)\n\n # -------------------------------------------------------------------------\n # Override/configure inherited traits\n # -------------------------------------------------------------------------\n\n #: This tool is not visible\n visible = False\n\n #: This tool does not have a visual representation\n draw_mode = \"none\"\n\n def normal_mouse_move(self, event):\n \"\"\" Handles the mouse moving when the tool is in the 'normal' state.\n\n If the cursor is within **threshold** of a data point, the method\n writes the index to the plot's data sources' \"hover\" metadata.\n\n This method emits a ScatterInspectorEvent when a new scatter point is\n hovered over and when the mouse leaves that point.\n \"\"\"\n plot = self.component\n index = plot.map_index((event.x, event.y), threshold=self.threshold)\n insp_event = ScatterInspectorEvent(event_type=HOVER_EVENT,\n event_index=index)\n if index is not None:\n old = plot.index.metadata.get(self.hover_metadata_name, None)\n plot.index.metadata[self.hover_metadata_name] = [index]\n if old != [index]:\n self.inspector_event = insp_event\n if hasattr(plot, \"value\"):\n plot.value.metadata[self.hover_metadata_name] = [index]\n elif not self.persistent_hover:\n old = plot.index.metadata.pop(self.hover_metadata_name, None)\n if old:\n self.inspector_event = insp_event\n if hasattr(plot, \"value\"):\n plot.value.metadata.pop(self.hover_metadata_name, None)\n\n return\n\n def _get_selection_state(self, event):\n plot = self.component\n index = plot.map_index((event.x, event.y), threshold=self.threshold)\n\n already_selected = False\n for name in ('index', 'value'):\n if not hasattr(plot, name):\n continue\n md = getattr(plot, name).metadata\n if md is None or self.selection_metadata_name not in md:\n continue\n if index in md[self.selection_metadata_name]:\n already_selected = True\n break\n return already_selected, (index is not None)\n\n def _get_selection_token(self, event):\n plot = self.component\n index = plot.map_index((event.x, event.y), threshold=self.threshold)\n return index\n\n def _deselect(self, index=None):\n \"\"\" Deselects a particular index. If no index is given, then\n deselects all points.\n \"\"\"\n plot = self.component\n insp_event = ScatterInspectorEvent(event_type=DESELECT_EVENT,\n event_index=index)\n for name in ('index', 'value'):\n if not hasattr(plot, name):\n continue\n md = getattr(plot, name).metadata\n if self.selection_metadata_name not in md:\n pass\n elif index in md[self.selection_metadata_name]:\n new_list = md[self.selection_metadata_name][:]\n new_list.remove(index)\n md[self.selection_metadata_name] = new_list\n # Only issue 1 event:\n if name == 'index':\n self.inspector_event = insp_event\n elif index is None:\n md[self.selection_metadata_name] = []\n # Only issue 1 event:\n if name == 'index':\n self.inspector_event = insp_event\n return\n\n def _select(self, index, append=True):\n plot = self.component\n insp_event = ScatterInspectorEvent(event_type=SELECT_EVENT,\n event_index=index)\n for name in ('index', 'value'):\n if not hasattr(plot, name):\n continue\n md = getattr(plot, name).metadata\n selection = md.get(self.selection_metadata_name, None)\n\n # If no existing selection\n if selection is None:\n md[self.selection_metadata_name] = [index]\n # check for list-like object supporting append\n else:\n if append:\n if index not in md[self.selection_metadata_name]:\n new_list = md[self.selection_metadata_name] + [index]\n md[self.selection_metadata_name] = new_list\n # Manually trigger the metadata_changed event on\n # the datasource. Datasources only automatically\n # fire notifications when the values inside the\n # metadata dict change, but they do not listen\n # for further changes on those values.\n # DEPRECATED: use self.inspector_event instead:\n getattr(plot, name).metadata_changed = True\n else:\n md[self.selection_metadata_name] = [index]\n\n # Test to only issue 1 event per selection, not 1 per axis:\n if name == 'index':\n self.inspector_event = insp_event\n\n return\n\n\n# EOF\n", "path": "chaco/tools/scatter_inspector.py"}]}
| 2,119 | 143 |
gh_patches_debug_23131
|
rasdani/github-patches
|
git_diff
|
saleor__saleor-5489
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Email is not completely translated
The main part of the email("You're receiving this e-mail...") is not translated. The parts after are properly translated. However, the text in text/plain is completely correctly translated. I've tried it with different languages (german and russian).
However, the text in text/plain is completely correctly translated:
```
Content-Type: text/plain; charset="utf-8"
MIME-Version: 1.0
Content-Transfer-Encoding: 8bit
Hi!
Sie bekommen diese E-Mail weil Sie oder jemand anders ein Passwort für Ihren Benutzeraccount auf localhost:8000 angefordert hat.
Sie können diese Nachricht ignorieren, wenn sie es nicht selber angefordert haben.
Klicken Sie unten um Ihr Passwort zurückzusetzen.
http://localhost:8000/de/account/password/reset/MQ/5ez-e13a9786c6c548232f8b/
Dies ist eine automatisch generierte E-Mail, bitte antworten Sie nicht auf diese E-Mail-Adresse.
Mit freundlichen Grüßen, Saleor e-commerce
```
### Steps to reproduce the problem
1. go to password reset page (e.g. /de/account/password/reset/)
2. provide email and click on send
### What I expected to happen
a fully translated email
### Screenshots
<img width="712" alt="email" src="https://user-images.githubusercontent.com/15652645/77230292-7c6b6900-6b93-11ea-94ae-c7c1be7587d2.png">
**System information**
Operating system: macOS Catalina & Windows 10
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `saleor/order/utils.py`
Content:
```
1 from functools import wraps
2
3 from django.conf import settings
4 from django.db import transaction
5 from django.utils import timezone
6 from prices import Money, TaxedMoney
7
8 from ..account.models import User
9 from ..core.taxes import zero_money
10 from ..core.weight import zero_weight
11 from ..discount.models import NotApplicable, Voucher, VoucherType
12 from ..discount.utils import get_products_voucher_discount, validate_voucher_in_order
13 from ..extensions.manager import get_extensions_manager
14 from ..order import OrderStatus
15 from ..order.models import Order, OrderLine
16 from ..product.utils.digital_products import get_default_digital_content_settings
17 from ..shipping.models import ShippingMethod
18 from ..warehouse.availability import check_stock_quantity
19 from ..warehouse.management import allocate_stock, deallocate_stock, increase_stock
20 from . import events
21
22
23 def get_order_country(order: Order) -> str:
24 """Return country to which order will be shipped."""
25 address = order.billing_address
26 if order.is_shipping_required():
27 address = order.shipping_address
28 if address is None:
29 return settings.DEFAULT_COUNTRY
30 return address.country.code
31
32
33 def order_line_needs_automatic_fulfillment(line: OrderLine) -> bool:
34 """Check if given line is digital and should be automatically fulfilled."""
35 digital_content_settings = get_default_digital_content_settings()
36 default_automatic_fulfillment = digital_content_settings["automatic_fulfillment"]
37 content = line.variant.digital_content if line.variant else None
38 if not content:
39 return False
40 if default_automatic_fulfillment and content.use_default_settings:
41 return True
42 if content.automatic_fulfillment:
43 return True
44 return False
45
46
47 def order_needs_automatic_fullfilment(order: Order) -> bool:
48 """Check if order has digital products which should be automatically fulfilled."""
49 for line in order.lines.digital():
50 if order_line_needs_automatic_fulfillment(line):
51 return True
52 return False
53
54
55 def update_voucher_discount(func):
56 """Recalculate order discount amount based on order voucher."""
57
58 @wraps(func)
59 def decorator(*args, **kwargs):
60 if kwargs.pop("update_voucher_discount", True):
61 order = args[0]
62 try:
63 discount = get_voucher_discount_for_order(order)
64 except NotApplicable:
65 discount = zero_money(order.currency)
66 order.discount = discount
67 return func(*args, **kwargs)
68
69 return decorator
70
71
72 @update_voucher_discount
73 def recalculate_order(order: Order, **kwargs):
74 """Recalculate and assign total price of order.
75
76 Total price is a sum of items in order and order shipping price minus
77 discount amount.
78
79 Voucher discount amount is recalculated by default. To avoid this, pass
80 update_voucher_discount argument set to False.
81 """
82 # avoid using prefetched order lines
83 lines = [OrderLine.objects.get(pk=line.pk) for line in order]
84 prices = [line.get_total() for line in lines]
85 total = sum(prices, order.shipping_price)
86 # discount amount can't be greater than order total
87 order.discount_amount = min(order.discount_amount, total.gross.amount)
88 if order.discount:
89 total -= order.discount
90 order.total = total
91 order.save(
92 update_fields=[
93 "discount_amount",
94 "total_net_amount",
95 "total_gross_amount",
96 "currency",
97 ]
98 )
99 recalculate_order_weight(order)
100
101
102 def recalculate_order_weight(order):
103 """Recalculate order weights."""
104 weight = zero_weight()
105 for line in order:
106 if line.variant:
107 weight += line.variant.get_weight() * line.quantity
108 order.weight = weight
109 order.save(update_fields=["weight"])
110
111
112 def update_order_prices(order, discounts):
113 """Update prices in order with given discounts and proper taxes."""
114 manager = get_extensions_manager()
115 for line in order: # type: OrderLine
116 if line.variant:
117 unit_price = line.variant.get_price(discounts)
118 unit_price = TaxedMoney(unit_price, unit_price)
119 line.unit_price = unit_price
120 line.save(
121 update_fields=[
122 "currency",
123 "unit_price_net_amount",
124 "unit_price_gross_amount",
125 ]
126 )
127
128 price = manager.calculate_order_line_unit(line)
129 if price != line.unit_price:
130 line.unit_price = price
131 if price.tax and price.net:
132 line.tax_rate = price.tax / price.net
133 line.save()
134
135 if order.shipping_method:
136 order.shipping_price = manager.calculate_order_shipping(order)
137 order.save(
138 update_fields=[
139 "shipping_price_net_amount",
140 "shipping_price_gross_amount",
141 "currency",
142 ]
143 )
144
145 recalculate_order(order)
146
147
148 def update_order_status(order):
149 """Update order status depending on fulfillments."""
150 quantity_fulfilled = order.quantity_fulfilled
151 total_quantity = order.get_total_quantity()
152
153 if quantity_fulfilled <= 0:
154 status = OrderStatus.UNFULFILLED
155 elif quantity_fulfilled < total_quantity:
156 status = OrderStatus.PARTIALLY_FULFILLED
157 else:
158 status = OrderStatus.FULFILLED
159
160 if status != order.status:
161 order.status = status
162 order.save(update_fields=["status"])
163
164
165 @transaction.atomic
166 def add_variant_to_order(
167 order,
168 variant,
169 quantity,
170 discounts=None,
171 allow_overselling=False,
172 track_inventory=True,
173 ):
174 """Add total_quantity of variant to order.
175
176 Returns an order line the variant was added to.
177
178 By default, raises InsufficientStock exception if quantity could not be
179 fulfilled. This can be disabled by setting `allow_overselling` to True.
180 """
181 country = get_order_country(order)
182 if not allow_overselling:
183 check_stock_quantity(variant, country, quantity)
184
185 try:
186 line = order.lines.get(variant=variant)
187 line.quantity += quantity
188 line.save(update_fields=["quantity"])
189 except OrderLine.DoesNotExist:
190 unit_price = variant.get_price(discounts)
191 unit_price = TaxedMoney(net=unit_price, gross=unit_price)
192 product = variant.product
193 product_name = str(product)
194 variant_name = str(variant)
195 translated_product_name = str(product.translated)
196 translated_variant_name = str(variant.translated)
197 if translated_product_name == product_name:
198 translated_product_name = ""
199 if translated_variant_name == variant_name:
200 translated_variant_name = ""
201 line = order.lines.create(
202 product_name=product_name,
203 variant_name=variant_name,
204 translated_product_name=translated_product_name,
205 translated_variant_name=translated_variant_name,
206 product_sku=variant.sku,
207 is_shipping_required=variant.is_shipping_required(),
208 quantity=quantity,
209 unit_price=unit_price,
210 variant=variant,
211 )
212 manager = get_extensions_manager()
213 unit_price = manager.calculate_order_line_unit(line)
214 line.unit_price = unit_price
215 line.tax_rate = unit_price.tax / unit_price.net
216 line.save(
217 update_fields=[
218 "currency",
219 "unit_price_net_amount",
220 "unit_price_gross_amount",
221 "tax_rate",
222 ]
223 )
224
225 if variant.track_inventory and track_inventory:
226 allocate_stock(variant, country, quantity)
227 return line
228
229
230 def add_gift_card_to_order(order, gift_card, total_price_left):
231 """Add gift card to order.
232
233 Return a total price left after applying the gift cards.
234 """
235 if total_price_left > zero_money(total_price_left.currency):
236 order.gift_cards.add(gift_card)
237 if total_price_left < gift_card.current_balance:
238 gift_card.current_balance = gift_card.current_balance - total_price_left
239 total_price_left = zero_money(total_price_left.currency)
240 else:
241 total_price_left = total_price_left - gift_card.current_balance
242 gift_card.current_balance_amount = 0
243 gift_card.last_used_on = timezone.now()
244 gift_card.save(update_fields=["current_balance_amount", "last_used_on"])
245 return total_price_left
246
247
248 def change_order_line_quantity(user, line, old_quantity, new_quantity):
249 """Change the quantity of ordered items in a order line."""
250 if new_quantity:
251 line.quantity = new_quantity
252 line.save(update_fields=["quantity"])
253 else:
254 delete_order_line(line)
255
256 quantity_diff = old_quantity - new_quantity
257
258 # Create the removal event
259 if quantity_diff > 0:
260 events.draft_order_removed_products_event(
261 order=line.order, user=user, order_lines=[(quantity_diff, line)]
262 )
263 elif quantity_diff < 0:
264 events.draft_order_added_products_event(
265 order=line.order, user=user, order_lines=[(quantity_diff * -1, line)]
266 )
267
268
269 def delete_order_line(line):
270 """Delete an order line from an order."""
271 line.delete()
272
273
274 def restock_order_lines(order):
275 """Return ordered products to corresponding stocks."""
276 country = get_order_country(order)
277
278 for line in order:
279 if line.variant and line.variant.track_inventory:
280 if line.quantity_unfulfilled > 0:
281 deallocate_stock(line.variant, country, line.quantity_unfulfilled)
282 if line.quantity_fulfilled > 0:
283 increase_stock(line.variant, country, line.quantity_fulfilled)
284
285 if line.quantity_fulfilled > 0:
286 line.quantity_fulfilled = 0
287 line.save(update_fields=["quantity_fulfilled"])
288
289
290 def restock_fulfillment_lines(fulfillment):
291 """Return fulfilled products to corresponding stocks."""
292 country = get_order_country(fulfillment.order)
293 for line in fulfillment:
294 if line.order_line.variant and line.order_line.variant.track_inventory:
295 increase_stock(
296 line.order_line.variant, country, line.quantity, allocate=True
297 )
298
299
300 def sum_order_totals(qs):
301 zero = Money(0, currency=settings.DEFAULT_CURRENCY)
302 taxed_zero = TaxedMoney(zero, zero)
303 return sum([order.total for order in qs], taxed_zero)
304
305
306 def get_valid_shipping_methods_for_order(order: Order):
307 return ShippingMethod.objects.applicable_shipping_methods_for_instance(
308 order, price=order.get_subtotal().gross
309 )
310
311
312 def get_products_voucher_discount_for_order(voucher: Voucher) -> Money:
313 """Calculate products discount value for a voucher, depending on its type."""
314 prices = None
315 if not prices:
316 msg = "This offer is only valid for selected items."
317 raise NotApplicable(msg)
318 return get_products_voucher_discount(voucher, prices)
319
320
321 def get_voucher_discount_for_order(order: Order) -> Money:
322 """Calculate discount value depending on voucher and discount types.
323
324 Raise NotApplicable if voucher of given type cannot be applied.
325 """
326 if not order.voucher:
327 return zero_money(order.currency)
328 validate_voucher_in_order(order)
329 subtotal = order.get_subtotal()
330 if order.voucher.type == VoucherType.ENTIRE_ORDER:
331 return order.voucher.get_discount_amount_for(subtotal.gross)
332 if order.voucher.type == VoucherType.SHIPPING:
333 return order.voucher.get_discount_amount_for(order.shipping_price)
334 if order.voucher.type == VoucherType.SPECIFIC_PRODUCT:
335 return get_products_voucher_discount_for_order(order.voucher)
336 raise NotImplementedError("Unknown discount type")
337
338
339 def match_orders_with_new_user(user: User) -> None:
340 Order.objects.confirmed().filter(user_email=user.email, user=None).update(user=user)
341
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/saleor/order/utils.py b/saleor/order/utils.py
--- a/saleor/order/utils.py
+++ b/saleor/order/utils.py
@@ -1,9 +1,15 @@
from functools import wraps
+from uuid import uuid4
from django.conf import settings
+from django.contrib.staticfiles import finders as static_finders
+from django.core.files.base import ContentFile
+from django.core.files.storage import default_storage
from django.db import transaction
+from django.template.loader import get_template
from django.utils import timezone
from prices import Money, TaxedMoney
+from weasyprint import HTML
from ..account.models import User
from ..core.taxes import zero_money
@@ -338,3 +344,12 @@
def match_orders_with_new_user(user: User) -> None:
Order.objects.confirmed().filter(user_email=user.email, user=None).update(user=user)
+
+
+def generate_invoice_pdf_for_order(invoice):
+ logo_path = static_finders.find("images/logo-light.svg")
+ rendered_template = get_template("invoice.html").render(
+ {"invoice": invoice, "order": invoice.order, "logo_path": f"file://{logo_path}"}
+ )
+ content_file = ContentFile(HTML(string=rendered_template).write_pdf())
+ return default_storage.save(f"{uuid4()}.pdf", content_file)
|
{"golden_diff": "diff --git a/saleor/order/utils.py b/saleor/order/utils.py\n--- a/saleor/order/utils.py\n+++ b/saleor/order/utils.py\n@@ -1,9 +1,15 @@\n from functools import wraps\n+from uuid import uuid4\n \n from django.conf import settings\n+from django.contrib.staticfiles import finders as static_finders\n+from django.core.files.base import ContentFile\n+from django.core.files.storage import default_storage\n from django.db import transaction\n+from django.template.loader import get_template\n from django.utils import timezone\n from prices import Money, TaxedMoney\n+from weasyprint import HTML\n \n from ..account.models import User\n from ..core.taxes import zero_money\n@@ -338,3 +344,12 @@\n \n def match_orders_with_new_user(user: User) -> None:\n Order.objects.confirmed().filter(user_email=user.email, user=None).update(user=user)\n+\n+\n+def generate_invoice_pdf_for_order(invoice):\n+ logo_path = static_finders.find(\"images/logo-light.svg\")\n+ rendered_template = get_template(\"invoice.html\").render(\n+ {\"invoice\": invoice, \"order\": invoice.order, \"logo_path\": f\"file://{logo_path}\"}\n+ )\n+ content_file = ContentFile(HTML(string=rendered_template).write_pdf())\n+ return default_storage.save(f\"{uuid4()}.pdf\", content_file)\n", "issue": "Email is not completely translated\nThe main part of the email(\"You're receiving this e-mail...\") is not translated. The parts after are properly translated. However, the text in text/plain is completely correctly translated. I've tried it with different languages (german and russian).\r\n\r\nHowever, the text in text/plain is completely correctly translated:\r\n```\r\nContent-Type: text/plain; charset=\"utf-8\"\r\nMIME-Version: 1.0\r\nContent-Transfer-Encoding: 8bit\r\n\r\n\r\n\r\nHi!\r\n\r\nSie bekommen diese E-Mail weil Sie oder jemand anders ein Passwort f\u00fcr Ihren Benutzeraccount auf localhost:8000 angefordert hat.\r\nSie k\u00f6nnen diese Nachricht ignorieren, wenn sie es nicht selber angefordert haben.\r\nKlicken Sie unten um Ihr Passwort zur\u00fcckzusetzen.\r\n\r\n\r\nhttp://localhost:8000/de/account/password/reset/MQ/5ez-e13a9786c6c548232f8b/\r\n\r\n\r\nDies ist eine automatisch generierte E-Mail, bitte antworten Sie nicht auf diese E-Mail-Adresse.\r\nMit freundlichen Gr\u00fc\u00dfen, Saleor e-commerce\r\n```\r\n\r\n### Steps to reproduce the problem\r\n1. go to password reset page (e.g. /de/account/password/reset/)\r\n2. provide email and click on send\r\n\r\n### What I expected to happen\r\na fully translated email\r\n\r\n### Screenshots\r\n<img width=\"712\" alt=\"email\" src=\"https://user-images.githubusercontent.com/15652645/77230292-7c6b6900-6b93-11ea-94ae-c7c1be7587d2.png\">\r\n\r\n**System information**\r\nOperating system: macOS Catalina & Windows 10\r\n\n", "before_files": [{"content": "from functools import wraps\n\nfrom django.conf import settings\nfrom django.db import transaction\nfrom django.utils import timezone\nfrom prices import Money, TaxedMoney\n\nfrom ..account.models import User\nfrom ..core.taxes import zero_money\nfrom ..core.weight import zero_weight\nfrom ..discount.models import NotApplicable, Voucher, VoucherType\nfrom ..discount.utils import get_products_voucher_discount, validate_voucher_in_order\nfrom ..extensions.manager import get_extensions_manager\nfrom ..order import OrderStatus\nfrom ..order.models import Order, OrderLine\nfrom ..product.utils.digital_products import get_default_digital_content_settings\nfrom ..shipping.models import ShippingMethod\nfrom ..warehouse.availability import check_stock_quantity\nfrom ..warehouse.management import allocate_stock, deallocate_stock, increase_stock\nfrom . import events\n\n\ndef get_order_country(order: Order) -> str:\n \"\"\"Return country to which order will be shipped.\"\"\"\n address = order.billing_address\n if order.is_shipping_required():\n address = order.shipping_address\n if address is None:\n return settings.DEFAULT_COUNTRY\n return address.country.code\n\n\ndef order_line_needs_automatic_fulfillment(line: OrderLine) -> bool:\n \"\"\"Check if given line is digital and should be automatically fulfilled.\"\"\"\n digital_content_settings = get_default_digital_content_settings()\n default_automatic_fulfillment = digital_content_settings[\"automatic_fulfillment\"]\n content = line.variant.digital_content if line.variant else None\n if not content:\n return False\n if default_automatic_fulfillment and content.use_default_settings:\n return True\n if content.automatic_fulfillment:\n return True\n return False\n\n\ndef order_needs_automatic_fullfilment(order: Order) -> bool:\n \"\"\"Check if order has digital products which should be automatically fulfilled.\"\"\"\n for line in order.lines.digital():\n if order_line_needs_automatic_fulfillment(line):\n return True\n return False\n\n\ndef update_voucher_discount(func):\n \"\"\"Recalculate order discount amount based on order voucher.\"\"\"\n\n @wraps(func)\n def decorator(*args, **kwargs):\n if kwargs.pop(\"update_voucher_discount\", True):\n order = args[0]\n try:\n discount = get_voucher_discount_for_order(order)\n except NotApplicable:\n discount = zero_money(order.currency)\n order.discount = discount\n return func(*args, **kwargs)\n\n return decorator\n\n\n@update_voucher_discount\ndef recalculate_order(order: Order, **kwargs):\n \"\"\"Recalculate and assign total price of order.\n\n Total price is a sum of items in order and order shipping price minus\n discount amount.\n\n Voucher discount amount is recalculated by default. To avoid this, pass\n update_voucher_discount argument set to False.\n \"\"\"\n # avoid using prefetched order lines\n lines = [OrderLine.objects.get(pk=line.pk) for line in order]\n prices = [line.get_total() for line in lines]\n total = sum(prices, order.shipping_price)\n # discount amount can't be greater than order total\n order.discount_amount = min(order.discount_amount, total.gross.amount)\n if order.discount:\n total -= order.discount\n order.total = total\n order.save(\n update_fields=[\n \"discount_amount\",\n \"total_net_amount\",\n \"total_gross_amount\",\n \"currency\",\n ]\n )\n recalculate_order_weight(order)\n\n\ndef recalculate_order_weight(order):\n \"\"\"Recalculate order weights.\"\"\"\n weight = zero_weight()\n for line in order:\n if line.variant:\n weight += line.variant.get_weight() * line.quantity\n order.weight = weight\n order.save(update_fields=[\"weight\"])\n\n\ndef update_order_prices(order, discounts):\n \"\"\"Update prices in order with given discounts and proper taxes.\"\"\"\n manager = get_extensions_manager()\n for line in order: # type: OrderLine\n if line.variant:\n unit_price = line.variant.get_price(discounts)\n unit_price = TaxedMoney(unit_price, unit_price)\n line.unit_price = unit_price\n line.save(\n update_fields=[\n \"currency\",\n \"unit_price_net_amount\",\n \"unit_price_gross_amount\",\n ]\n )\n\n price = manager.calculate_order_line_unit(line)\n if price != line.unit_price:\n line.unit_price = price\n if price.tax and price.net:\n line.tax_rate = price.tax / price.net\n line.save()\n\n if order.shipping_method:\n order.shipping_price = manager.calculate_order_shipping(order)\n order.save(\n update_fields=[\n \"shipping_price_net_amount\",\n \"shipping_price_gross_amount\",\n \"currency\",\n ]\n )\n\n recalculate_order(order)\n\n\ndef update_order_status(order):\n \"\"\"Update order status depending on fulfillments.\"\"\"\n quantity_fulfilled = order.quantity_fulfilled\n total_quantity = order.get_total_quantity()\n\n if quantity_fulfilled <= 0:\n status = OrderStatus.UNFULFILLED\n elif quantity_fulfilled < total_quantity:\n status = OrderStatus.PARTIALLY_FULFILLED\n else:\n status = OrderStatus.FULFILLED\n\n if status != order.status:\n order.status = status\n order.save(update_fields=[\"status\"])\n\n\[email protected]\ndef add_variant_to_order(\n order,\n variant,\n quantity,\n discounts=None,\n allow_overselling=False,\n track_inventory=True,\n):\n \"\"\"Add total_quantity of variant to order.\n\n Returns an order line the variant was added to.\n\n By default, raises InsufficientStock exception if quantity could not be\n fulfilled. This can be disabled by setting `allow_overselling` to True.\n \"\"\"\n country = get_order_country(order)\n if not allow_overselling:\n check_stock_quantity(variant, country, quantity)\n\n try:\n line = order.lines.get(variant=variant)\n line.quantity += quantity\n line.save(update_fields=[\"quantity\"])\n except OrderLine.DoesNotExist:\n unit_price = variant.get_price(discounts)\n unit_price = TaxedMoney(net=unit_price, gross=unit_price)\n product = variant.product\n product_name = str(product)\n variant_name = str(variant)\n translated_product_name = str(product.translated)\n translated_variant_name = str(variant.translated)\n if translated_product_name == product_name:\n translated_product_name = \"\"\n if translated_variant_name == variant_name:\n translated_variant_name = \"\"\n line = order.lines.create(\n product_name=product_name,\n variant_name=variant_name,\n translated_product_name=translated_product_name,\n translated_variant_name=translated_variant_name,\n product_sku=variant.sku,\n is_shipping_required=variant.is_shipping_required(),\n quantity=quantity,\n unit_price=unit_price,\n variant=variant,\n )\n manager = get_extensions_manager()\n unit_price = manager.calculate_order_line_unit(line)\n line.unit_price = unit_price\n line.tax_rate = unit_price.tax / unit_price.net\n line.save(\n update_fields=[\n \"currency\",\n \"unit_price_net_amount\",\n \"unit_price_gross_amount\",\n \"tax_rate\",\n ]\n )\n\n if variant.track_inventory and track_inventory:\n allocate_stock(variant, country, quantity)\n return line\n\n\ndef add_gift_card_to_order(order, gift_card, total_price_left):\n \"\"\"Add gift card to order.\n\n Return a total price left after applying the gift cards.\n \"\"\"\n if total_price_left > zero_money(total_price_left.currency):\n order.gift_cards.add(gift_card)\n if total_price_left < gift_card.current_balance:\n gift_card.current_balance = gift_card.current_balance - total_price_left\n total_price_left = zero_money(total_price_left.currency)\n else:\n total_price_left = total_price_left - gift_card.current_balance\n gift_card.current_balance_amount = 0\n gift_card.last_used_on = timezone.now()\n gift_card.save(update_fields=[\"current_balance_amount\", \"last_used_on\"])\n return total_price_left\n\n\ndef change_order_line_quantity(user, line, old_quantity, new_quantity):\n \"\"\"Change the quantity of ordered items in a order line.\"\"\"\n if new_quantity:\n line.quantity = new_quantity\n line.save(update_fields=[\"quantity\"])\n else:\n delete_order_line(line)\n\n quantity_diff = old_quantity - new_quantity\n\n # Create the removal event\n if quantity_diff > 0:\n events.draft_order_removed_products_event(\n order=line.order, user=user, order_lines=[(quantity_diff, line)]\n )\n elif quantity_diff < 0:\n events.draft_order_added_products_event(\n order=line.order, user=user, order_lines=[(quantity_diff * -1, line)]\n )\n\n\ndef delete_order_line(line):\n \"\"\"Delete an order line from an order.\"\"\"\n line.delete()\n\n\ndef restock_order_lines(order):\n \"\"\"Return ordered products to corresponding stocks.\"\"\"\n country = get_order_country(order)\n\n for line in order:\n if line.variant and line.variant.track_inventory:\n if line.quantity_unfulfilled > 0:\n deallocate_stock(line.variant, country, line.quantity_unfulfilled)\n if line.quantity_fulfilled > 0:\n increase_stock(line.variant, country, line.quantity_fulfilled)\n\n if line.quantity_fulfilled > 0:\n line.quantity_fulfilled = 0\n line.save(update_fields=[\"quantity_fulfilled\"])\n\n\ndef restock_fulfillment_lines(fulfillment):\n \"\"\"Return fulfilled products to corresponding stocks.\"\"\"\n country = get_order_country(fulfillment.order)\n for line in fulfillment:\n if line.order_line.variant and line.order_line.variant.track_inventory:\n increase_stock(\n line.order_line.variant, country, line.quantity, allocate=True\n )\n\n\ndef sum_order_totals(qs):\n zero = Money(0, currency=settings.DEFAULT_CURRENCY)\n taxed_zero = TaxedMoney(zero, zero)\n return sum([order.total for order in qs], taxed_zero)\n\n\ndef get_valid_shipping_methods_for_order(order: Order):\n return ShippingMethod.objects.applicable_shipping_methods_for_instance(\n order, price=order.get_subtotal().gross\n )\n\n\ndef get_products_voucher_discount_for_order(voucher: Voucher) -> Money:\n \"\"\"Calculate products discount value for a voucher, depending on its type.\"\"\"\n prices = None\n if not prices:\n msg = \"This offer is only valid for selected items.\"\n raise NotApplicable(msg)\n return get_products_voucher_discount(voucher, prices)\n\n\ndef get_voucher_discount_for_order(order: Order) -> Money:\n \"\"\"Calculate discount value depending on voucher and discount types.\n\n Raise NotApplicable if voucher of given type cannot be applied.\n \"\"\"\n if not order.voucher:\n return zero_money(order.currency)\n validate_voucher_in_order(order)\n subtotal = order.get_subtotal()\n if order.voucher.type == VoucherType.ENTIRE_ORDER:\n return order.voucher.get_discount_amount_for(subtotal.gross)\n if order.voucher.type == VoucherType.SHIPPING:\n return order.voucher.get_discount_amount_for(order.shipping_price)\n if order.voucher.type == VoucherType.SPECIFIC_PRODUCT:\n return get_products_voucher_discount_for_order(order.voucher)\n raise NotImplementedError(\"Unknown discount type\")\n\n\ndef match_orders_with_new_user(user: User) -> None:\n Order.objects.confirmed().filter(user_email=user.email, user=None).update(user=user)\n", "path": "saleor/order/utils.py"}], "after_files": [{"content": "from functools import wraps\nfrom uuid import uuid4\n\nfrom django.conf import settings\nfrom django.contrib.staticfiles import finders as static_finders\nfrom django.core.files.base import ContentFile\nfrom django.core.files.storage import default_storage\nfrom django.db import transaction\nfrom django.template.loader import get_template\nfrom django.utils import timezone\nfrom prices import Money, TaxedMoney\nfrom weasyprint import HTML\n\nfrom ..account.models import User\nfrom ..core.taxes import zero_money\nfrom ..core.weight import zero_weight\nfrom ..discount.models import NotApplicable, Voucher, VoucherType\nfrom ..discount.utils import get_products_voucher_discount, validate_voucher_in_order\nfrom ..extensions.manager import get_extensions_manager\nfrom ..order import OrderStatus\nfrom ..order.models import Order, OrderLine\nfrom ..product.utils.digital_products import get_default_digital_content_settings\nfrom ..shipping.models import ShippingMethod\nfrom ..warehouse.availability import check_stock_quantity\nfrom ..warehouse.management import allocate_stock, deallocate_stock, increase_stock\nfrom . import events\n\n\ndef get_order_country(order: Order) -> str:\n \"\"\"Return country to which order will be shipped.\"\"\"\n address = order.billing_address\n if order.is_shipping_required():\n address = order.shipping_address\n if address is None:\n return settings.DEFAULT_COUNTRY\n return address.country.code\n\n\ndef order_line_needs_automatic_fulfillment(line: OrderLine) -> bool:\n \"\"\"Check if given line is digital and should be automatically fulfilled.\"\"\"\n digital_content_settings = get_default_digital_content_settings()\n default_automatic_fulfillment = digital_content_settings[\"automatic_fulfillment\"]\n content = line.variant.digital_content if line.variant else None\n if not content:\n return False\n if default_automatic_fulfillment and content.use_default_settings:\n return True\n if content.automatic_fulfillment:\n return True\n return False\n\n\ndef order_needs_automatic_fullfilment(order: Order) -> bool:\n \"\"\"Check if order has digital products which should be automatically fulfilled.\"\"\"\n for line in order.lines.digital():\n if order_line_needs_automatic_fulfillment(line):\n return True\n return False\n\n\ndef update_voucher_discount(func):\n \"\"\"Recalculate order discount amount based on order voucher.\"\"\"\n\n @wraps(func)\n def decorator(*args, **kwargs):\n if kwargs.pop(\"update_voucher_discount\", True):\n order = args[0]\n try:\n discount = get_voucher_discount_for_order(order)\n except NotApplicable:\n discount = zero_money(order.currency)\n order.discount = discount\n return func(*args, **kwargs)\n\n return decorator\n\n\n@update_voucher_discount\ndef recalculate_order(order: Order, **kwargs):\n \"\"\"Recalculate and assign total price of order.\n\n Total price is a sum of items in order and order shipping price minus\n discount amount.\n\n Voucher discount amount is recalculated by default. To avoid this, pass\n update_voucher_discount argument set to False.\n \"\"\"\n # avoid using prefetched order lines\n lines = [OrderLine.objects.get(pk=line.pk) for line in order]\n prices = [line.get_total() for line in lines]\n total = sum(prices, order.shipping_price)\n # discount amount can't be greater than order total\n order.discount_amount = min(order.discount_amount, total.gross.amount)\n if order.discount:\n total -= order.discount\n order.total = total\n order.save(\n update_fields=[\n \"discount_amount\",\n \"total_net_amount\",\n \"total_gross_amount\",\n \"currency\",\n ]\n )\n recalculate_order_weight(order)\n\n\ndef recalculate_order_weight(order):\n \"\"\"Recalculate order weights.\"\"\"\n weight = zero_weight()\n for line in order:\n if line.variant:\n weight += line.variant.get_weight() * line.quantity\n order.weight = weight\n order.save(update_fields=[\"weight\"])\n\n\ndef update_order_prices(order, discounts):\n \"\"\"Update prices in order with given discounts and proper taxes.\"\"\"\n manager = get_extensions_manager()\n for line in order: # type: OrderLine\n if line.variant:\n unit_price = line.variant.get_price(discounts)\n unit_price = TaxedMoney(unit_price, unit_price)\n line.unit_price = unit_price\n line.save(\n update_fields=[\n \"currency\",\n \"unit_price_net_amount\",\n \"unit_price_gross_amount\",\n ]\n )\n\n price = manager.calculate_order_line_unit(line)\n if price != line.unit_price:\n line.unit_price = price\n if price.tax and price.net:\n line.tax_rate = price.tax / price.net\n line.save()\n\n if order.shipping_method:\n order.shipping_price = manager.calculate_order_shipping(order)\n order.save(\n update_fields=[\n \"shipping_price_net_amount\",\n \"shipping_price_gross_amount\",\n \"currency\",\n ]\n )\n\n recalculate_order(order)\n\n\ndef update_order_status(order):\n \"\"\"Update order status depending on fulfillments.\"\"\"\n quantity_fulfilled = order.quantity_fulfilled\n total_quantity = order.get_total_quantity()\n\n if quantity_fulfilled <= 0:\n status = OrderStatus.UNFULFILLED\n elif quantity_fulfilled < total_quantity:\n status = OrderStatus.PARTIALLY_FULFILLED\n else:\n status = OrderStatus.FULFILLED\n\n if status != order.status:\n order.status = status\n order.save(update_fields=[\"status\"])\n\n\[email protected]\ndef add_variant_to_order(\n order,\n variant,\n quantity,\n discounts=None,\n allow_overselling=False,\n track_inventory=True,\n):\n \"\"\"Add total_quantity of variant to order.\n\n Returns an order line the variant was added to.\n\n By default, raises InsufficientStock exception if quantity could not be\n fulfilled. This can be disabled by setting `allow_overselling` to True.\n \"\"\"\n country = get_order_country(order)\n if not allow_overselling:\n check_stock_quantity(variant, country, quantity)\n\n try:\n line = order.lines.get(variant=variant)\n line.quantity += quantity\n line.save(update_fields=[\"quantity\"])\n except OrderLine.DoesNotExist:\n unit_price = variant.get_price(discounts)\n unit_price = TaxedMoney(net=unit_price, gross=unit_price)\n product = variant.product\n product_name = str(product)\n variant_name = str(variant)\n translated_product_name = str(product.translated)\n translated_variant_name = str(variant.translated)\n if translated_product_name == product_name:\n translated_product_name = \"\"\n if translated_variant_name == variant_name:\n translated_variant_name = \"\"\n line = order.lines.create(\n product_name=product_name,\n variant_name=variant_name,\n translated_product_name=translated_product_name,\n translated_variant_name=translated_variant_name,\n product_sku=variant.sku,\n is_shipping_required=variant.is_shipping_required(),\n quantity=quantity,\n unit_price=unit_price,\n variant=variant,\n )\n manager = get_extensions_manager()\n unit_price = manager.calculate_order_line_unit(line)\n line.unit_price = unit_price\n line.tax_rate = unit_price.tax / unit_price.net\n line.save(\n update_fields=[\n \"currency\",\n \"unit_price_net_amount\",\n \"unit_price_gross_amount\",\n \"tax_rate\",\n ]\n )\n\n if variant.track_inventory and track_inventory:\n allocate_stock(variant, country, quantity)\n return line\n\n\ndef add_gift_card_to_order(order, gift_card, total_price_left):\n \"\"\"Add gift card to order.\n\n Return a total price left after applying the gift cards.\n \"\"\"\n if total_price_left > zero_money(total_price_left.currency):\n order.gift_cards.add(gift_card)\n if total_price_left < gift_card.current_balance:\n gift_card.current_balance = gift_card.current_balance - total_price_left\n total_price_left = zero_money(total_price_left.currency)\n else:\n total_price_left = total_price_left - gift_card.current_balance\n gift_card.current_balance_amount = 0\n gift_card.last_used_on = timezone.now()\n gift_card.save(update_fields=[\"current_balance_amount\", \"last_used_on\"])\n return total_price_left\n\n\ndef change_order_line_quantity(user, line, old_quantity, new_quantity):\n \"\"\"Change the quantity of ordered items in a order line.\"\"\"\n if new_quantity:\n line.quantity = new_quantity\n line.save(update_fields=[\"quantity\"])\n else:\n delete_order_line(line)\n\n quantity_diff = old_quantity - new_quantity\n\n # Create the removal event\n if quantity_diff > 0:\n events.draft_order_removed_products_event(\n order=line.order, user=user, order_lines=[(quantity_diff, line)]\n )\n elif quantity_diff < 0:\n events.draft_order_added_products_event(\n order=line.order, user=user, order_lines=[(quantity_diff * -1, line)]\n )\n\n\ndef delete_order_line(line):\n \"\"\"Delete an order line from an order.\"\"\"\n line.delete()\n\n\ndef restock_order_lines(order):\n \"\"\"Return ordered products to corresponding stocks.\"\"\"\n country = get_order_country(order)\n\n for line in order:\n if line.variant and line.variant.track_inventory:\n if line.quantity_unfulfilled > 0:\n deallocate_stock(line.variant, country, line.quantity_unfulfilled)\n if line.quantity_fulfilled > 0:\n increase_stock(line.variant, country, line.quantity_fulfilled)\n\n if line.quantity_fulfilled > 0:\n line.quantity_fulfilled = 0\n line.save(update_fields=[\"quantity_fulfilled\"])\n\n\ndef restock_fulfillment_lines(fulfillment):\n \"\"\"Return fulfilled products to corresponding stocks.\"\"\"\n country = get_order_country(fulfillment.order)\n for line in fulfillment:\n if line.order_line.variant and line.order_line.variant.track_inventory:\n increase_stock(\n line.order_line.variant, country, line.quantity, allocate=True\n )\n\n\ndef sum_order_totals(qs):\n zero = Money(0, currency=settings.DEFAULT_CURRENCY)\n taxed_zero = TaxedMoney(zero, zero)\n return sum([order.total for order in qs], taxed_zero)\n\n\ndef get_valid_shipping_methods_for_order(order: Order):\n return ShippingMethod.objects.applicable_shipping_methods_for_instance(\n order, price=order.get_subtotal().gross\n )\n\n\ndef get_products_voucher_discount_for_order(voucher: Voucher) -> Money:\n \"\"\"Calculate products discount value for a voucher, depending on its type.\"\"\"\n prices = None\n if not prices:\n msg = \"This offer is only valid for selected items.\"\n raise NotApplicable(msg)\n return get_products_voucher_discount(voucher, prices)\n\n\ndef get_voucher_discount_for_order(order: Order) -> Money:\n \"\"\"Calculate discount value depending on voucher and discount types.\n\n Raise NotApplicable if voucher of given type cannot be applied.\n \"\"\"\n if not order.voucher:\n return zero_money(order.currency)\n validate_voucher_in_order(order)\n subtotal = order.get_subtotal()\n if order.voucher.type == VoucherType.ENTIRE_ORDER:\n return order.voucher.get_discount_amount_for(subtotal.gross)\n if order.voucher.type == VoucherType.SHIPPING:\n return order.voucher.get_discount_amount_for(order.shipping_price)\n if order.voucher.type == VoucherType.SPECIFIC_PRODUCT:\n return get_products_voucher_discount_for_order(order.voucher)\n raise NotImplementedError(\"Unknown discount type\")\n\n\ndef match_orders_with_new_user(user: User) -> None:\n Order.objects.confirmed().filter(user_email=user.email, user=None).update(user=user)\n\n\ndef generate_invoice_pdf_for_order(invoice):\n logo_path = static_finders.find(\"images/logo-light.svg\")\n rendered_template = get_template(\"invoice.html\").render(\n {\"invoice\": invoice, \"order\": invoice.order, \"logo_path\": f\"file://{logo_path}\"}\n )\n content_file = ContentFile(HTML(string=rendered_template).write_pdf())\n return default_storage.save(f\"{uuid4()}.pdf\", content_file)\n", "path": "saleor/order/utils.py"}]}
| 4,010 | 304 |
gh_patches_debug_27727
|
rasdani/github-patches
|
git_diff
|
bokeh__bokeh-8854
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[BUG] exports do not work with Firefox webdriver
Firefox webdriver does not currently support logs: https://github.com/SeleniumHQ/selenium/issues/2972
It makes it impossible to export figures with Firefox as a webdriver.
Fixing this issue may allow to fix this one: https://github.com/bokeh/bokeh/issues/8176
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `bokeh/io/export.py`
Content:
```
1 #-----------------------------------------------------------------------------
2 # Copyright (c) 2012 - 2019, Anaconda, Inc., and Bokeh Contributors.
3 # All rights reserved.
4 #
5 # The full license is in the file LICENSE.txt, distributed with this software.
6 #-----------------------------------------------------------------------------
7 '''
8
9 '''
10
11 #-----------------------------------------------------------------------------
12 # Boilerplate
13 #-----------------------------------------------------------------------------
14 from __future__ import absolute_import, division, print_function, unicode_literals
15
16 import logging
17 log = logging.getLogger(__name__)
18
19 #-----------------------------------------------------------------------------
20 # Imports
21 #-----------------------------------------------------------------------------
22
23 # Standard library imports
24 import os
25 import io
26 import warnings
27 from os.path import abspath
28 from tempfile import mkstemp
29
30 # External imports
31 from six import raise_from, b
32
33 # Bokeh imports
34 from ..embed import file_html
35 from ..resources import INLINE
36 from ..util.dependencies import import_required
37 from ..util.string import decode_utf8
38 from .util import default_filename
39
40 #-----------------------------------------------------------------------------
41 # Globals and constants
42 #-----------------------------------------------------------------------------
43
44 __all__ = (
45 'create_webdriver',
46 'export_png',
47 'export_svgs',
48 'get_layout_html',
49 'get_screenshot_as_png',
50 'get_svgs',
51 'terminate_webdriver',
52 'webdriver_control',
53 )
54
55 #-----------------------------------------------------------------------------
56 # General API
57 #-----------------------------------------------------------------------------
58
59 def export_png(obj, filename=None, height=None, width=None, webdriver=None):
60 ''' Export the ``LayoutDOM`` object or document as a PNG.
61
62 If the filename is not given, it is derived from the script name (e.g.
63 ``/foo/myplot.py`` will create ``/foo/myplot.png``)
64
65 Args:
66 obj (LayoutDOM or Document) : a Layout (Row/Column), Plot or Widget
67 object or Document to export.
68
69 filename (str, optional) : filename to save document under (default: None)
70 If None, infer from the filename.
71
72 height (int) : the desired height of the exported layout obj only if
73 it's a Plot instance. Otherwise the height kwarg is ignored.
74
75 width (int) : the desired width of the exported layout obj only if
76 it's a Plot instance. Otherwise the width kwarg is ignored.
77
78 webdriver (selenium.webdriver) : a selenium webdriver instance to use
79 to export the image.
80
81 Returns:
82 filename (str) : the filename where the static file is saved.
83
84 If you would like to access an Image object directly, rather than save a
85 file to disk, use the lower-level :func:`~bokeh.io.export.get_screenshot_as_png`
86 function.
87
88 .. warning::
89 Responsive sizing_modes may generate layouts with unexpected size and
90 aspect ratios. It is recommended to use the default ``fixed`` sizing mode.
91
92 '''
93
94 image = get_screenshot_as_png(obj, height=height, width=width, driver=webdriver)
95
96 if filename is None:
97 filename = default_filename("png")
98
99 if image.width == 0 or image.height == 0:
100 raise ValueError("unable to save an empty image")
101
102 image.save(filename)
103
104 return abspath(filename)
105
106 def export_svgs(obj, filename=None, height=None, width=None, webdriver=None):
107 ''' Export the SVG-enabled plots within a layout. Each plot will result
108 in a distinct SVG file.
109
110 If the filename is not given, it is derived from the script name
111 (e.g. ``/foo/myplot.py`` will create ``/foo/myplot.svg``)
112
113 Args:
114 obj (LayoutDOM object) : a Layout (Row/Column), Plot or Widget object to display
115
116 filename (str, optional) : filename to save document under (default: None)
117 If None, infer from the filename.
118
119 height (int) : the desired height of the exported layout obj only if
120 it's a Plot instance. Otherwise the height kwarg is ignored.
121
122 width (int) : the desired width of the exported layout obj only if
123 it's a Plot instance. Otherwise the width kwarg is ignored.
124
125 webdriver (selenium.webdriver) : a selenium webdriver instance to use
126 to export the image.
127
128 Returns:
129 filenames (list(str)) : the list of filenames where the SVGs files are
130 saved.
131
132 .. warning::
133 Responsive sizing_modes may generate layouts with unexpected size and
134 aspect ratios. It is recommended to use the default ``fixed`` sizing mode.
135
136 '''
137 svgs = get_svgs(obj, height=height, width=width, driver=webdriver)
138
139 if len(svgs) == 0:
140 log.warning("No SVG Plots were found.")
141 return
142
143 if filename is None:
144 filename = default_filename("svg")
145
146 filenames = []
147
148 for i, svg in enumerate(svgs):
149 if i == 0:
150 filename = filename
151 else:
152 idx = filename.find(".svg")
153 filename = filename[:idx] + "_{}".format(i) + filename[idx:]
154
155 with io.open(filename, mode="w", encoding="utf-8") as f:
156 f.write(svg)
157
158 filenames.append(filename)
159
160 return filenames
161
162 #-----------------------------------------------------------------------------
163 # Dev API
164 #-----------------------------------------------------------------------------
165
166 # this is part of the API for this module
167 from .webdriver import webdriver_control
168 from .webdriver import terminate_webdriver # for back compat
169
170 def create_webdriver():
171 ''' Create a new webdriver.
172
173 .. note ::
174 Here for compatibility. Prefer methods on the webdriver_control
175 object.
176
177 '''
178 return webdriver_control.create()
179
180 def get_screenshot_as_png(obj, driver=None, **kwargs):
181 ''' Get a screenshot of a ``LayoutDOM`` object.
182
183 Args:
184 obj (LayoutDOM or Document) : a Layout (Row/Column), Plot or Widget
185 object or Document to export.
186
187 driver (selenium.webdriver) : a selenium webdriver instance to use
188 to export the image.
189
190 Returns:
191 cropped_image (PIL.Image.Image) : a pillow image loaded from PNG.
192
193 .. warning::
194 Responsive sizing_modes may generate layouts with unexpected size and
195 aspect ratios. It is recommended to use the default ``fixed`` sizing mode.
196
197 '''
198 Image = import_required('PIL.Image',
199 'To use bokeh.io.export_png you need pillow ' +
200 '("conda install pillow" or "pip install pillow")')
201
202 with _tmp_html() as tmp:
203 html = get_layout_html(obj, **kwargs)
204 with io.open(tmp.path, mode="w", encoding="utf-8") as file:
205 file.write(decode_utf8(html))
206
207 web_driver = driver if driver is not None else webdriver_control.get()
208
209 web_driver.get("file:///" + tmp.path)
210 web_driver.maximize_window()
211
212 ## resize for PhantomJS compat
213 web_driver.execute_script("document.body.style.width = '100%';")
214
215 wait_until_render_complete(web_driver)
216
217 png = web_driver.get_screenshot_as_png()
218
219 b_rect = web_driver.execute_script(_BOUNDING_RECT_SCRIPT)
220
221 image = Image.open(io.BytesIO(png))
222 cropped_image = _crop_image(image, **b_rect)
223
224 return cropped_image
225
226 def get_svgs(obj, driver=None, **kwargs):
227 '''
228
229 '''
230 with _tmp_html() as tmp:
231 html = get_layout_html(obj, **kwargs)
232 with io.open(tmp.path, mode="wb") as file:
233 file.write(b(html))
234
235 web_driver = driver if driver is not None else webdriver_control.get()
236
237 web_driver.get("file:///" + tmp.path)
238
239 wait_until_render_complete(web_driver)
240
241 svgs = web_driver.execute_script(_SVG_SCRIPT)
242
243 return svgs
244
245 def get_layout_html(obj, resources=INLINE, **kwargs):
246 '''
247
248 '''
249 resize = False
250 if kwargs.get('height') is not None or kwargs.get('width') is not None:
251 # Defer this import, it is expensive
252 from ..models.plots import Plot
253 if not isinstance(obj, Plot):
254 warnings.warn("Export method called with height or width kwargs on a non-Plot layout. The size values will be ignored.")
255 else:
256 resize = True
257 old_height = obj.plot_height
258 old_width = obj.plot_width
259 obj.plot_height = kwargs.get('height', old_height)
260 obj.plot_width = kwargs.get('width', old_width)
261
262 try:
263 html = file_html(obj, resources, title="", suppress_callback_warning=True, _always_new=True)
264 finally:
265 if resize:
266 obj.plot_height = old_height
267 obj.plot_width = old_width
268
269 return html
270
271 def wait_until_render_complete(driver):
272 '''
273
274 '''
275 from selenium.webdriver.support.ui import WebDriverWait
276 from selenium.common.exceptions import TimeoutException
277
278 def is_bokeh_loaded(driver):
279 return driver.execute_script('''
280 const b = window.Bokeh;
281 return b && b.documents && b.documents.length > 0;
282 ''')
283
284 try:
285 WebDriverWait(driver, 5, poll_frequency=0.1).until(is_bokeh_loaded)
286 except TimeoutException as e:
287 raise_from(RuntimeError('Bokeh was not loaded in time. Something may have gone wrong.'), e)
288
289 driver.execute_script(_WAIT_SCRIPT)
290
291 def is_bokeh_render_complete(driver):
292 return driver.execute_script('return window._bokeh_render_complete;')
293
294 try:
295 WebDriverWait(driver, 5, poll_frequency=0.1).until(is_bokeh_render_complete)
296 except TimeoutException:
297 log.warning("The webdriver raised a TimeoutException while waiting for "
298 "a 'bokeh:idle' event to signify that the layout has rendered. "
299 "Something may have gone wrong.")
300 finally:
301 browser_logs = driver.get_log('browser')
302 messages = [ l.get("message") for l in browser_logs if l.get('level') in ['WARNING', 'ERROR', 'SEVERE'] ]
303 if len(messages) > 0:
304 log.warning("There were browser warnings and/or errors that may have affected your export")
305 for message in messages:
306 log.warning(message)
307
308 #-----------------------------------------------------------------------------
309 # Private API
310 #-----------------------------------------------------------------------------
311
312 _BOUNDING_RECT_SCRIPT = """
313 return document.getElementsByClassName('bk-root')[0].children[0].getBoundingClientRect()
314 """
315
316 _SVG_SCRIPT = """
317 var serialized_svgs = [];
318 var svgs = document.getElementsByClassName('bk-root')[0].getElementsByTagName("svg");
319 for (var i = 0; i < svgs.length; i++) {
320 var source = (new XMLSerializer()).serializeToString(svgs[i]);
321 serialized_svgs.push(source);
322 };
323 return serialized_svgs
324 """
325
326 _WAIT_SCRIPT = """
327 // add private window prop to check that render is complete
328 window._bokeh_render_complete = false;
329 function done() {
330 window._bokeh_render_complete = true;
331 }
332
333 var doc = window.Bokeh.documents[0];
334
335 if (doc.is_idle)
336 done();
337 else
338 doc.idle.connect(done);
339 """
340
341 def _crop_image(image, left=0, top=0, right=0, bottom=0, **kwargs):
342 ''' Crop the border from the layout
343
344 '''
345 return image.crop((left, top, right, bottom))
346
347 class _TempFile(object):
348
349 _closed = False
350
351 def __init__(self, prefix="tmp", suffix=""):
352 self.fd, self.path = mkstemp(prefix=prefix, suffix=suffix)
353
354 def __enter__(self):
355 return self
356
357 def __exit__(self, exc, value, tb):
358 self.close()
359
360 def __del__(self):
361 self.close()
362
363 def close(self):
364 if self._closed:
365 return
366
367 try:
368 os.close(self.fd)
369 except (OSError, IOError):
370 pass
371 finally:
372 self.fd = None
373
374 try:
375 os.unlink(self.path)
376 except (OSError, IOError):
377 pass
378 finally:
379 self.path = None
380
381 self._closed = True
382
383 def _tmp_html():
384 return _TempFile(prefix="bokeh", suffix=".html")
385
386 #-----------------------------------------------------------------------------
387 # Code
388 #-----------------------------------------------------------------------------
389
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/bokeh/io/export.py b/bokeh/io/export.py
--- a/bokeh/io/export.py
+++ b/bokeh/io/export.py
@@ -274,6 +274,7 @@
'''
from selenium.webdriver.support.ui import WebDriverWait
from selenium.common.exceptions import TimeoutException
+ from selenium.webdriver import Firefox
def is_bokeh_loaded(driver):
return driver.execute_script('''
@@ -298,12 +299,14 @@
"a 'bokeh:idle' event to signify that the layout has rendered. "
"Something may have gone wrong.")
finally:
- browser_logs = driver.get_log('browser')
- messages = [ l.get("message") for l in browser_logs if l.get('level') in ['WARNING', 'ERROR', 'SEVERE'] ]
- if len(messages) > 0:
- log.warning("There were browser warnings and/or errors that may have affected your export")
- for message in messages:
- log.warning(message)
+ # Firefox webdriver does not currently support logs
+ if not isinstance(driver, Firefox):
+ browser_logs = driver.get_log('browser')
+ messages = [ l.get("message") for l in browser_logs if l.get('level') in ['WARNING', 'ERROR', 'SEVERE'] ]
+ if len(messages) > 0:
+ log.warning("There were browser warnings and/or errors that may have affected your export")
+ for message in messages:
+ log.warning(message)
#-----------------------------------------------------------------------------
# Private API
|
{"golden_diff": "diff --git a/bokeh/io/export.py b/bokeh/io/export.py\n--- a/bokeh/io/export.py\n+++ b/bokeh/io/export.py\n@@ -274,6 +274,7 @@\n '''\n from selenium.webdriver.support.ui import WebDriverWait\n from selenium.common.exceptions import TimeoutException\n+ from selenium.webdriver import Firefox\n \n def is_bokeh_loaded(driver):\n return driver.execute_script('''\n@@ -298,12 +299,14 @@\n \"a 'bokeh:idle' event to signify that the layout has rendered. \"\n \"Something may have gone wrong.\")\n finally:\n- browser_logs = driver.get_log('browser')\n- messages = [ l.get(\"message\") for l in browser_logs if l.get('level') in ['WARNING', 'ERROR', 'SEVERE'] ]\n- if len(messages) > 0:\n- log.warning(\"There were browser warnings and/or errors that may have affected your export\")\n- for message in messages:\n- log.warning(message)\n+ # Firefox webdriver does not currently support logs\n+ if not isinstance(driver, Firefox):\n+ browser_logs = driver.get_log('browser')\n+ messages = [ l.get(\"message\") for l in browser_logs if l.get('level') in ['WARNING', 'ERROR', 'SEVERE'] ]\n+ if len(messages) > 0:\n+ log.warning(\"There were browser warnings and/or errors that may have affected your export\")\n+ for message in messages:\n+ log.warning(message)\n \n #-----------------------------------------------------------------------------\n # Private API\n", "issue": "[BUG] exports do not work with Firefox webdriver\nFirefox webdriver does not currently support logs: https://github.com/SeleniumHQ/selenium/issues/2972\r\n\r\nIt makes it impossible to export figures with Firefox as a webdriver.\r\nFixing this issue may allow to fix this one: https://github.com/bokeh/bokeh/issues/8176\n", "before_files": [{"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2012 - 2019, Anaconda, Inc., and Bokeh Contributors.\n# All rights reserved.\n#\n# The full license is in the file LICENSE.txt, distributed with this software.\n#-----------------------------------------------------------------------------\n'''\n\n'''\n\n#-----------------------------------------------------------------------------\n# Boilerplate\n#-----------------------------------------------------------------------------\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nimport logging\nlog = logging.getLogger(__name__)\n\n#-----------------------------------------------------------------------------\n# Imports\n#-----------------------------------------------------------------------------\n\n# Standard library imports\nimport os\nimport io\nimport warnings\nfrom os.path import abspath\nfrom tempfile import mkstemp\n\n# External imports\nfrom six import raise_from, b\n\n# Bokeh imports\nfrom ..embed import file_html\nfrom ..resources import INLINE\nfrom ..util.dependencies import import_required\nfrom ..util.string import decode_utf8\nfrom .util import default_filename\n\n#-----------------------------------------------------------------------------\n# Globals and constants\n#-----------------------------------------------------------------------------\n\n__all__ = (\n 'create_webdriver',\n 'export_png',\n 'export_svgs',\n 'get_layout_html',\n 'get_screenshot_as_png',\n 'get_svgs',\n 'terminate_webdriver',\n 'webdriver_control',\n)\n\n#-----------------------------------------------------------------------------\n# General API\n#-----------------------------------------------------------------------------\n\ndef export_png(obj, filename=None, height=None, width=None, webdriver=None):\n ''' Export the ``LayoutDOM`` object or document as a PNG.\n\n If the filename is not given, it is derived from the script name (e.g.\n ``/foo/myplot.py`` will create ``/foo/myplot.png``)\n\n Args:\n obj (LayoutDOM or Document) : a Layout (Row/Column), Plot or Widget\n object or Document to export.\n\n filename (str, optional) : filename to save document under (default: None)\n If None, infer from the filename.\n\n height (int) : the desired height of the exported layout obj only if\n it's a Plot instance. Otherwise the height kwarg is ignored.\n\n width (int) : the desired width of the exported layout obj only if\n it's a Plot instance. Otherwise the width kwarg is ignored.\n\n webdriver (selenium.webdriver) : a selenium webdriver instance to use\n to export the image.\n\n Returns:\n filename (str) : the filename where the static file is saved.\n\n If you would like to access an Image object directly, rather than save a\n file to disk, use the lower-level :func:`~bokeh.io.export.get_screenshot_as_png`\n function.\n\n .. warning::\n Responsive sizing_modes may generate layouts with unexpected size and\n aspect ratios. It is recommended to use the default ``fixed`` sizing mode.\n\n '''\n\n image = get_screenshot_as_png(obj, height=height, width=width, driver=webdriver)\n\n if filename is None:\n filename = default_filename(\"png\")\n\n if image.width == 0 or image.height == 0:\n raise ValueError(\"unable to save an empty image\")\n\n image.save(filename)\n\n return abspath(filename)\n\ndef export_svgs(obj, filename=None, height=None, width=None, webdriver=None):\n ''' Export the SVG-enabled plots within a layout. Each plot will result\n in a distinct SVG file.\n\n If the filename is not given, it is derived from the script name\n (e.g. ``/foo/myplot.py`` will create ``/foo/myplot.svg``)\n\n Args:\n obj (LayoutDOM object) : a Layout (Row/Column), Plot or Widget object to display\n\n filename (str, optional) : filename to save document under (default: None)\n If None, infer from the filename.\n\n height (int) : the desired height of the exported layout obj only if\n it's a Plot instance. Otherwise the height kwarg is ignored.\n\n width (int) : the desired width of the exported layout obj only if\n it's a Plot instance. Otherwise the width kwarg is ignored.\n\n webdriver (selenium.webdriver) : a selenium webdriver instance to use\n to export the image.\n\n Returns:\n filenames (list(str)) : the list of filenames where the SVGs files are\n saved.\n\n .. warning::\n Responsive sizing_modes may generate layouts with unexpected size and\n aspect ratios. It is recommended to use the default ``fixed`` sizing mode.\n\n '''\n svgs = get_svgs(obj, height=height, width=width, driver=webdriver)\n\n if len(svgs) == 0:\n log.warning(\"No SVG Plots were found.\")\n return\n\n if filename is None:\n filename = default_filename(\"svg\")\n\n filenames = []\n\n for i, svg in enumerate(svgs):\n if i == 0:\n filename = filename\n else:\n idx = filename.find(\".svg\")\n filename = filename[:idx] + \"_{}\".format(i) + filename[idx:]\n\n with io.open(filename, mode=\"w\", encoding=\"utf-8\") as f:\n f.write(svg)\n\n filenames.append(filename)\n\n return filenames\n\n#-----------------------------------------------------------------------------\n# Dev API\n#-----------------------------------------------------------------------------\n\n# this is part of the API for this module\nfrom .webdriver import webdriver_control\nfrom .webdriver import terminate_webdriver # for back compat\n\ndef create_webdriver():\n ''' Create a new webdriver.\n\n .. note ::\n Here for compatibility. Prefer methods on the webdriver_control\n object.\n\n '''\n return webdriver_control.create()\n\ndef get_screenshot_as_png(obj, driver=None, **kwargs):\n ''' Get a screenshot of a ``LayoutDOM`` object.\n\n Args:\n obj (LayoutDOM or Document) : a Layout (Row/Column), Plot or Widget\n object or Document to export.\n\n driver (selenium.webdriver) : a selenium webdriver instance to use\n to export the image.\n\n Returns:\n cropped_image (PIL.Image.Image) : a pillow image loaded from PNG.\n\n .. warning::\n Responsive sizing_modes may generate layouts with unexpected size and\n aspect ratios. It is recommended to use the default ``fixed`` sizing mode.\n\n '''\n Image = import_required('PIL.Image',\n 'To use bokeh.io.export_png you need pillow ' +\n '(\"conda install pillow\" or \"pip install pillow\")')\n\n with _tmp_html() as tmp:\n html = get_layout_html(obj, **kwargs)\n with io.open(tmp.path, mode=\"w\", encoding=\"utf-8\") as file:\n file.write(decode_utf8(html))\n\n web_driver = driver if driver is not None else webdriver_control.get()\n\n web_driver.get(\"file:///\" + tmp.path)\n web_driver.maximize_window()\n\n ## resize for PhantomJS compat\n web_driver.execute_script(\"document.body.style.width = '100%';\")\n\n wait_until_render_complete(web_driver)\n\n png = web_driver.get_screenshot_as_png()\n\n b_rect = web_driver.execute_script(_BOUNDING_RECT_SCRIPT)\n\n image = Image.open(io.BytesIO(png))\n cropped_image = _crop_image(image, **b_rect)\n\n return cropped_image\n\ndef get_svgs(obj, driver=None, **kwargs):\n '''\n\n '''\n with _tmp_html() as tmp:\n html = get_layout_html(obj, **kwargs)\n with io.open(tmp.path, mode=\"wb\") as file:\n file.write(b(html))\n\n web_driver = driver if driver is not None else webdriver_control.get()\n\n web_driver.get(\"file:///\" + tmp.path)\n\n wait_until_render_complete(web_driver)\n\n svgs = web_driver.execute_script(_SVG_SCRIPT)\n\n return svgs\n\ndef get_layout_html(obj, resources=INLINE, **kwargs):\n '''\n\n '''\n resize = False\n if kwargs.get('height') is not None or kwargs.get('width') is not None:\n # Defer this import, it is expensive\n from ..models.plots import Plot\n if not isinstance(obj, Plot):\n warnings.warn(\"Export method called with height or width kwargs on a non-Plot layout. The size values will be ignored.\")\n else:\n resize = True\n old_height = obj.plot_height\n old_width = obj.plot_width\n obj.plot_height = kwargs.get('height', old_height)\n obj.plot_width = kwargs.get('width', old_width)\n\n try:\n html = file_html(obj, resources, title=\"\", suppress_callback_warning=True, _always_new=True)\n finally:\n if resize:\n obj.plot_height = old_height\n obj.plot_width = old_width\n\n return html\n\ndef wait_until_render_complete(driver):\n '''\n\n '''\n from selenium.webdriver.support.ui import WebDriverWait\n from selenium.common.exceptions import TimeoutException\n\n def is_bokeh_loaded(driver):\n return driver.execute_script('''\n const b = window.Bokeh;\n return b && b.documents && b.documents.length > 0;\n ''')\n\n try:\n WebDriverWait(driver, 5, poll_frequency=0.1).until(is_bokeh_loaded)\n except TimeoutException as e:\n raise_from(RuntimeError('Bokeh was not loaded in time. Something may have gone wrong.'), e)\n\n driver.execute_script(_WAIT_SCRIPT)\n\n def is_bokeh_render_complete(driver):\n return driver.execute_script('return window._bokeh_render_complete;')\n\n try:\n WebDriverWait(driver, 5, poll_frequency=0.1).until(is_bokeh_render_complete)\n except TimeoutException:\n log.warning(\"The webdriver raised a TimeoutException while waiting for \"\n \"a 'bokeh:idle' event to signify that the layout has rendered. \"\n \"Something may have gone wrong.\")\n finally:\n browser_logs = driver.get_log('browser')\n messages = [ l.get(\"message\") for l in browser_logs if l.get('level') in ['WARNING', 'ERROR', 'SEVERE'] ]\n if len(messages) > 0:\n log.warning(\"There were browser warnings and/or errors that may have affected your export\")\n for message in messages:\n log.warning(message)\n\n#-----------------------------------------------------------------------------\n# Private API\n#-----------------------------------------------------------------------------\n\n_BOUNDING_RECT_SCRIPT = \"\"\"\nreturn document.getElementsByClassName('bk-root')[0].children[0].getBoundingClientRect()\n\"\"\"\n\n_SVG_SCRIPT = \"\"\"\nvar serialized_svgs = [];\nvar svgs = document.getElementsByClassName('bk-root')[0].getElementsByTagName(\"svg\");\nfor (var i = 0; i < svgs.length; i++) {\n var source = (new XMLSerializer()).serializeToString(svgs[i]);\n serialized_svgs.push(source);\n};\nreturn serialized_svgs\n\"\"\"\n\n_WAIT_SCRIPT = \"\"\"\n// add private window prop to check that render is complete\nwindow._bokeh_render_complete = false;\nfunction done() {\n window._bokeh_render_complete = true;\n}\n\nvar doc = window.Bokeh.documents[0];\n\nif (doc.is_idle)\n done();\nelse\n doc.idle.connect(done);\n\"\"\"\n\ndef _crop_image(image, left=0, top=0, right=0, bottom=0, **kwargs):\n ''' Crop the border from the layout\n\n '''\n return image.crop((left, top, right, bottom))\n\nclass _TempFile(object):\n\n _closed = False\n\n def __init__(self, prefix=\"tmp\", suffix=\"\"):\n self.fd, self.path = mkstemp(prefix=prefix, suffix=suffix)\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc, value, tb):\n self.close()\n\n def __del__(self):\n self.close()\n\n def close(self):\n if self._closed:\n return\n\n try:\n os.close(self.fd)\n except (OSError, IOError):\n pass\n finally:\n self.fd = None\n\n try:\n os.unlink(self.path)\n except (OSError, IOError):\n pass\n finally:\n self.path = None\n\n self._closed = True\n\ndef _tmp_html():\n return _TempFile(prefix=\"bokeh\", suffix=\".html\")\n\n#-----------------------------------------------------------------------------\n# Code\n#-----------------------------------------------------------------------------\n", "path": "bokeh/io/export.py"}], "after_files": [{"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2012 - 2019, Anaconda, Inc., and Bokeh Contributors.\n# All rights reserved.\n#\n# The full license is in the file LICENSE.txt, distributed with this software.\n#-----------------------------------------------------------------------------\n'''\n\n'''\n\n#-----------------------------------------------------------------------------\n# Boilerplate\n#-----------------------------------------------------------------------------\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nimport logging\nlog = logging.getLogger(__name__)\n\n#-----------------------------------------------------------------------------\n# Imports\n#-----------------------------------------------------------------------------\n\n# Standard library imports\nimport os\nimport io\nimport warnings\nfrom os.path import abspath\nfrom tempfile import mkstemp\n\n# External imports\nfrom six import raise_from, b\n\n# Bokeh imports\nfrom ..embed import file_html\nfrom ..resources import INLINE\nfrom ..util.dependencies import import_required\nfrom ..util.string import decode_utf8\nfrom .util import default_filename\n\n#-----------------------------------------------------------------------------\n# Globals and constants\n#-----------------------------------------------------------------------------\n\n__all__ = (\n 'create_webdriver',\n 'export_png',\n 'export_svgs',\n 'get_layout_html',\n 'get_screenshot_as_png',\n 'get_svgs',\n 'terminate_webdriver',\n 'webdriver_control',\n)\n\n#-----------------------------------------------------------------------------\n# General API\n#-----------------------------------------------------------------------------\n\ndef export_png(obj, filename=None, height=None, width=None, webdriver=None):\n ''' Export the ``LayoutDOM`` object or document as a PNG.\n\n If the filename is not given, it is derived from the script name (e.g.\n ``/foo/myplot.py`` will create ``/foo/myplot.png``)\n\n Args:\n obj (LayoutDOM or Document) : a Layout (Row/Column), Plot or Widget\n object or Document to export.\n\n filename (str, optional) : filename to save document under (default: None)\n If None, infer from the filename.\n\n height (int) : the desired height of the exported layout obj only if\n it's a Plot instance. Otherwise the height kwarg is ignored.\n\n width (int) : the desired width of the exported layout obj only if\n it's a Plot instance. Otherwise the width kwarg is ignored.\n\n webdriver (selenium.webdriver) : a selenium webdriver instance to use\n to export the image.\n\n Returns:\n filename (str) : the filename where the static file is saved.\n\n If you would like to access an Image object directly, rather than save a\n file to disk, use the lower-level :func:`~bokeh.io.export.get_screenshot_as_png`\n function.\n\n .. warning::\n Responsive sizing_modes may generate layouts with unexpected size and\n aspect ratios. It is recommended to use the default ``fixed`` sizing mode.\n\n '''\n\n image = get_screenshot_as_png(obj, height=height, width=width, driver=webdriver)\n\n if filename is None:\n filename = default_filename(\"png\")\n\n if image.width == 0 or image.height == 0:\n raise ValueError(\"unable to save an empty image\")\n\n image.save(filename)\n\n return abspath(filename)\n\ndef export_svgs(obj, filename=None, height=None, width=None, webdriver=None):\n ''' Export the SVG-enabled plots within a layout. Each plot will result\n in a distinct SVG file.\n\n If the filename is not given, it is derived from the script name\n (e.g. ``/foo/myplot.py`` will create ``/foo/myplot.svg``)\n\n Args:\n obj (LayoutDOM object) : a Layout (Row/Column), Plot or Widget object to display\n\n filename (str, optional) : filename to save document under (default: None)\n If None, infer from the filename.\n\n height (int) : the desired height of the exported layout obj only if\n it's a Plot instance. Otherwise the height kwarg is ignored.\n\n width (int) : the desired width of the exported layout obj only if\n it's a Plot instance. Otherwise the width kwarg is ignored.\n\n webdriver (selenium.webdriver) : a selenium webdriver instance to use\n to export the image.\n\n Returns:\n filenames (list(str)) : the list of filenames where the SVGs files are\n saved.\n\n .. warning::\n Responsive sizing_modes may generate layouts with unexpected size and\n aspect ratios. It is recommended to use the default ``fixed`` sizing mode.\n\n '''\n svgs = get_svgs(obj, height=height, width=width, driver=webdriver)\n\n if len(svgs) == 0:\n log.warning(\"No SVG Plots were found.\")\n return\n\n if filename is None:\n filename = default_filename(\"svg\")\n\n filenames = []\n\n for i, svg in enumerate(svgs):\n if i == 0:\n filename = filename\n else:\n idx = filename.find(\".svg\")\n filename = filename[:idx] + \"_{}\".format(i) + filename[idx:]\n\n with io.open(filename, mode=\"w\", encoding=\"utf-8\") as f:\n f.write(svg)\n\n filenames.append(filename)\n\n return filenames\n\n#-----------------------------------------------------------------------------\n# Dev API\n#-----------------------------------------------------------------------------\n\n# this is part of the API for this module\nfrom .webdriver import webdriver_control\nfrom .webdriver import terminate_webdriver # for back compat\n\ndef create_webdriver():\n ''' Create a new webdriver.\n\n .. note ::\n Here for compatibility. Prefer methods on the webdriver_control\n object.\n\n '''\n return webdriver_control.create()\n\ndef get_screenshot_as_png(obj, driver=None, **kwargs):\n ''' Get a screenshot of a ``LayoutDOM`` object.\n\n Args:\n obj (LayoutDOM or Document) : a Layout (Row/Column), Plot or Widget\n object or Document to export.\n\n driver (selenium.webdriver) : a selenium webdriver instance to use\n to export the image.\n\n Returns:\n cropped_image (PIL.Image.Image) : a pillow image loaded from PNG.\n\n .. warning::\n Responsive sizing_modes may generate layouts with unexpected size and\n aspect ratios. It is recommended to use the default ``fixed`` sizing mode.\n\n '''\n Image = import_required('PIL.Image',\n 'To use bokeh.io.export_png you need pillow ' +\n '(\"conda install pillow\" or \"pip install pillow\")')\n\n with _tmp_html() as tmp:\n html = get_layout_html(obj, **kwargs)\n with io.open(tmp.path, mode=\"w\", encoding=\"utf-8\") as file:\n file.write(decode_utf8(html))\n\n web_driver = driver if driver is not None else webdriver_control.get()\n\n web_driver.get(\"file:///\" + tmp.path)\n web_driver.maximize_window()\n\n ## resize for PhantomJS compat\n web_driver.execute_script(\"document.body.style.width = '100%';\")\n\n wait_until_render_complete(web_driver)\n\n png = web_driver.get_screenshot_as_png()\n\n b_rect = web_driver.execute_script(_BOUNDING_RECT_SCRIPT)\n\n image = Image.open(io.BytesIO(png))\n cropped_image = _crop_image(image, **b_rect)\n\n return cropped_image\n\ndef get_svgs(obj, driver=None, **kwargs):\n '''\n\n '''\n with _tmp_html() as tmp:\n html = get_layout_html(obj, **kwargs)\n with io.open(tmp.path, mode=\"wb\") as file:\n file.write(b(html))\n\n web_driver = driver if driver is not None else webdriver_control.get()\n\n web_driver.get(\"file:///\" + tmp.path)\n\n wait_until_render_complete(web_driver)\n\n svgs = web_driver.execute_script(_SVG_SCRIPT)\n\n return svgs\n\ndef get_layout_html(obj, resources=INLINE, **kwargs):\n '''\n\n '''\n resize = False\n if kwargs.get('height') is not None or kwargs.get('width') is not None:\n # Defer this import, it is expensive\n from ..models.plots import Plot\n if not isinstance(obj, Plot):\n warnings.warn(\"Export method called with height or width kwargs on a non-Plot layout. The size values will be ignored.\")\n else:\n resize = True\n old_height = obj.plot_height\n old_width = obj.plot_width\n obj.plot_height = kwargs.get('height', old_height)\n obj.plot_width = kwargs.get('width', old_width)\n\n try:\n html = file_html(obj, resources, title=\"\", suppress_callback_warning=True, _always_new=True)\n finally:\n if resize:\n obj.plot_height = old_height\n obj.plot_width = old_width\n\n return html\n\ndef wait_until_render_complete(driver):\n '''\n\n '''\n from selenium.webdriver.support.ui import WebDriverWait\n from selenium.common.exceptions import TimeoutException\n from selenium.webdriver import Firefox\n\n def is_bokeh_loaded(driver):\n return driver.execute_script('''\n const b = window.Bokeh;\n return b && b.documents && b.documents.length > 0;\n ''')\n\n try:\n WebDriverWait(driver, 5, poll_frequency=0.1).until(is_bokeh_loaded)\n except TimeoutException as e:\n raise_from(RuntimeError('Bokeh was not loaded in time. Something may have gone wrong.'), e)\n\n driver.execute_script(_WAIT_SCRIPT)\n\n def is_bokeh_render_complete(driver):\n return driver.execute_script('return window._bokeh_render_complete;')\n\n try:\n WebDriverWait(driver, 5, poll_frequency=0.1).until(is_bokeh_render_complete)\n except TimeoutException:\n log.warning(\"The webdriver raised a TimeoutException while waiting for \"\n \"a 'bokeh:idle' event to signify that the layout has rendered. \"\n \"Something may have gone wrong.\")\n finally:\n # Firefox webdriver does not currently support logs\n if not isinstance(driver, Firefox):\n browser_logs = driver.get_log('browser')\n messages = [ l.get(\"message\") for l in browser_logs if l.get('level') in ['WARNING', 'ERROR', 'SEVERE'] ]\n if len(messages) > 0:\n log.warning(\"There were browser warnings and/or errors that may have affected your export\")\n for message in messages:\n log.warning(message)\n\n#-----------------------------------------------------------------------------\n# Private API\n#-----------------------------------------------------------------------------\n\n_BOUNDING_RECT_SCRIPT = \"\"\"\nreturn document.getElementsByClassName('bk-root')[0].children[0].getBoundingClientRect()\n\"\"\"\n\n_SVG_SCRIPT = \"\"\"\nvar serialized_svgs = [];\nvar svgs = document.getElementsByClassName('bk-root')[0].getElementsByTagName(\"svg\");\nfor (var i = 0; i < svgs.length; i++) {\n var source = (new XMLSerializer()).serializeToString(svgs[i]);\n serialized_svgs.push(source);\n};\nreturn serialized_svgs\n\"\"\"\n\n_WAIT_SCRIPT = \"\"\"\n// add private window prop to check that render is complete\nwindow._bokeh_render_complete = false;\nfunction done() {\n window._bokeh_render_complete = true;\n}\n\nvar doc = window.Bokeh.documents[0];\n\nif (doc.is_idle)\n done();\nelse\n doc.idle.connect(done);\n\"\"\"\n\ndef _crop_image(image, left=0, top=0, right=0, bottom=0, **kwargs):\n ''' Crop the border from the layout\n\n '''\n return image.crop((left, top, right, bottom))\n\nclass _TempFile(object):\n\n _closed = False\n\n def __init__(self, prefix=\"tmp\", suffix=\"\"):\n self.fd, self.path = mkstemp(prefix=prefix, suffix=suffix)\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc, value, tb):\n self.close()\n\n def __del__(self):\n self.close()\n\n def close(self):\n if self._closed:\n return\n\n try:\n os.close(self.fd)\n except (OSError, IOError):\n pass\n finally:\n self.fd = None\n\n try:\n os.unlink(self.path)\n except (OSError, IOError):\n pass\n finally:\n self.path = None\n\n self._closed = True\n\ndef _tmp_html():\n return _TempFile(prefix=\"bokeh\", suffix=\".html\")\n\n#-----------------------------------------------------------------------------\n# Code\n#-----------------------------------------------------------------------------\n", "path": "bokeh/io/export.py"}]}
| 4,004 | 341 |
gh_patches_debug_58004
|
rasdani/github-patches
|
git_diff
|
CiviWiki__OpenCiviWiki-980
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Restore SessionAuthenticationMiddleware
We aim to move away from having a heavy JavaScript front-end, preferring instead to use Django templates (and sprinkles of JS where needed). This means we can use SessionAuthenticationMiddleware.
This will also require restoring the default authentication classes in `settings.py`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `project/core/settings.py`
Content:
```
1 """
2 Django settings for civiwiki project.
3 Darius Calliet May 12, 2016
4
5 Production settings file to select proper environment variables.
6 """
7 import os
8
9 # False if not in os.environ
10 DEBUG = os.getenv("DEBUG", False)
11
12 # defaults to second value if not found in os.environ
13 DJANGO_HOST = os.getenv("DJANGO_HOST", "LOCALHOST")
14
15 BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
16 SECRET_KEY = os.getenv("DJANGO_SECRET_KEY", "TEST_KEY_FOR_DEVELOPMENT")
17 ALLOWED_HOSTS = [".herokuapp.com", ".civiwiki.org", "127.0.0.1", "localhost", "0.0.0.0"]
18
19 INSTALLED_APPS = (
20 "django.contrib.admin",
21 "django.contrib.auth",
22 "django.contrib.contenttypes",
23 "django.contrib.sessions",
24 "django.contrib.messages",
25 "django.contrib.staticfiles",
26 "django_extensions",
27 "storages",
28 "core", # TODO: consider removing this, if we can move the decorators, etc. to an actual app
29 "api",
30 "rest_framework",
31 "accounts",
32 "threads",
33 "frontend_views",
34 "notifications",
35 "corsheaders",
36 "taggit",
37 )
38
39 MIDDLEWARE = [
40 "corsheaders.middleware.CorsMiddleware",
41 "django.middleware.security.SecurityMiddleware",
42 "whitenoise.middleware.WhiteNoiseMiddleware",
43 "django.contrib.sessions.middleware.SessionMiddleware",
44 "django.middleware.common.CommonMiddleware",
45 "django.middleware.csrf.CsrfViewMiddleware",
46 "django.contrib.auth.middleware.AuthenticationMiddleware",
47 # 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
48 "django.contrib.messages.middleware.MessageMiddleware",
49 "django.middleware.clickjacking.XFrameOptionsMiddleware",
50 ]
51
52 CSRF_USE_SESSIONS = (
53 True # Store the CSRF token in the users session instead of in a cookie
54 )
55
56 CORS_ORIGIN_ALLOW_ALL = True
57 ROOT_URLCONF = "core.urls"
58 LOGIN_URL = "/login"
59
60 # SSL Setup
61 if DJANGO_HOST != "LOCALHOST":
62 SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
63 SECURE_SSL_REDIRECT = True
64 SESSION_COOKIE_SECURE = True
65 CSRF_COOKIE_SECURE = True
66
67 # Internationalization & Localization
68 LANGUAGE_CODE = "en-us"
69 TIME_ZONE = "UTC"
70 USE_I18N = True
71 USE_L10N = True
72 USE_TZ = True
73
74 TEMPLATES = [
75 {
76 "BACKEND": "django.template.backends.django.DjangoTemplates",
77 "DIRS": [
78 os.path.join(BASE_DIR, "threads/templates/threads"), os.path.join(BASE_DIR, "accounts/templates/accounts")
79 ], # TODO: Add non-webapp template directory
80 "APP_DIRS": True,
81 "OPTIONS": {
82 "context_processors": [
83 "django.template.context_processors.debug",
84 "django.template.context_processors.request",
85 "django.contrib.auth.context_processors.auth",
86 "django.contrib.messages.context_processors.messages",
87 ],
88 },
89 },
90 ]
91
92 WSGI_APPLICATION = "core.wsgi.application"
93
94 # Apex Contact for Production Errors
95 ADMINS = [("Development Team", "[email protected]")]
96
97 # AWS S3 Setup
98 if "AWS_STORAGE_BUCKET_NAME" not in os.environ:
99 MEDIA_URL = "/media/"
100 MEDIA_ROOT = os.path.join(BASE_DIR, "media")
101 else:
102 AWS_STORAGE_BUCKET_NAME = os.getenv("AWS_STORAGE_BUCKET_NAME")
103 AWS_S3_ACCESS_KEY_ID = os.getenv("AWS_S3_ACCESS_KEY_ID")
104 AWS_S3_SECRET_ACCESS_KEY = os.getenv("AWS_S3_SECRET_ACCESS_KEY")
105 DEFAULT_FILE_STORAGE = "storages.backends.s3boto.S3BotoStorage"
106 AWS_S3_SECURE_URLS = False
107 AWS_QUERYSTRING_AUTH = False
108
109 STATIC_URL = "/static/"
110 STATICFILES_DIRS = (os.path.join(BASE_DIR, "threads/templates/static"),)
111 STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
112
113 # TODO: re-organize and simplify staticfiles settings
114 if "CIVIWIKI_LOCAL_NAME" not in os.environ:
115 STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
116
117 # Use DATABASE_URL in production
118 DATABASE_URL = os.getenv("DATABASE_URL")
119
120 if DATABASE_URL is not None:
121 DATABASES = {"default": DATABASE_URL}
122 else:
123 # Default to sqlite for simplicity in development
124 DATABASES = {
125 "default": {
126 "ENGINE": "django.db.backends.sqlite3",
127 "NAME": BASE_DIR + "/" + "db.sqlite3",
128 }
129 }
130
131 # Email Backend Setup
132 if "EMAIL_HOST" not in os.environ:
133 EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
134 EMAIL_HOST_USER = "[email protected]"
135 else:
136 EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend"
137 EMAIL_HOST = os.getenv("EMAIL_HOST")
138 EMAIL_PORT = os.getenv("EMAIL_PORT")
139 EMAIL_HOST_USER = os.getenv("EMAIL_HOST_USER")
140 EMAIL_HOST_PASSWORD = os.getenv("EMAIL_HOST_PASSWORD")
141 EMAIL_USE_SSL = True
142 DEFAULT_FROM_EMAIL = EMAIL_HOST
143
144 # Notification API Settings
145 NOTIFICATIONS_SOFT_DELETE = True
146 NOTIFICATIONS_USE_JSONFIELD = True
147
148 # Django REST API Settings
149 DEFAULT_RENDERER_CLASSES = ("rest_framework.renderers.JSONRenderer",)
150
151 DEFAULT_AUTHENTICATION_CLASSES = ("rest_framework.authentication.BasicAuthentication",)
152
153 if DEBUG:
154 # Browsable HTML - Enabled only in Debug mode (dev)
155 DEFAULT_RENDERER_CLASSES = DEFAULT_RENDERER_CLASSES + (
156 "rest_framework.renderers.BrowsableAPIRenderer",
157 )
158
159 DEFAULT_AUTHENTICATION_CLASSES = (
160 "api.authentication.CsrfExemptSessionAuthentication",
161 ) + DEFAULT_AUTHENTICATION_CLASSES
162
163 REST_FRAMEWORK = {
164 "DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",),
165 "DEFAULT_RENDERER_CLASSES": DEFAULT_RENDERER_CLASSES,
166 "DEFAULT_AUTHENTICATION_CLASSES": DEFAULT_AUTHENTICATION_CLASSES,
167 }
168
169 # CORS Settings
170 CORS_ORIGIN_ALLOW_ALL = True
171
172 # Custom User model
173 AUTH_USER_MODEL = 'accounts.User'
174
175 APPEND_SLASH = False
176
177 DEFAULT_AUTO_FIELD = 'django.db.models.AutoField'
178
179 LOGIN_REDIRECT_URL = '/'
180
181 AUTH_PASSWORD_VALIDATORS = [
182 {
183 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
184 },
185 {
186 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
187 'OPTIONS': {
188 'min_length': 8,
189 }
190 },
191 {
192 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
193 },
194 {
195 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
196 },
197 ]
198
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/project/core/settings.py b/project/core/settings.py
--- a/project/core/settings.py
+++ b/project/core/settings.py
@@ -148,7 +148,7 @@
# Django REST API Settings
DEFAULT_RENDERER_CLASSES = ("rest_framework.renderers.JSONRenderer",)
-DEFAULT_AUTHENTICATION_CLASSES = ("rest_framework.authentication.BasicAuthentication",)
+DEFAULT_AUTHENTICATION_CLASSES = ("rest_framework.authentication.SessionAuthentication",)
if DEBUG:
# Browsable HTML - Enabled only in Debug mode (dev)
|
{"golden_diff": "diff --git a/project/core/settings.py b/project/core/settings.py\n--- a/project/core/settings.py\n+++ b/project/core/settings.py\n@@ -148,7 +148,7 @@\n # Django REST API Settings\n DEFAULT_RENDERER_CLASSES = (\"rest_framework.renderers.JSONRenderer\",)\n \n-DEFAULT_AUTHENTICATION_CLASSES = (\"rest_framework.authentication.BasicAuthentication\",)\n+DEFAULT_AUTHENTICATION_CLASSES = (\"rest_framework.authentication.SessionAuthentication\",)\n \n if DEBUG:\n # Browsable HTML - Enabled only in Debug mode (dev)\n", "issue": "Restore SessionAuthenticationMiddleware\nWe aim to move away from having a heavy JavaScript front-end, preferring instead to use Django templates (and sprinkles of JS where needed). This means we can use SessionAuthenticationMiddleware.\r\n\r\nThis will also require restoring the default authentication classes in `settings.py`\n", "before_files": [{"content": "\"\"\"\nDjango settings for civiwiki project.\nDarius Calliet May 12, 2016\n\nProduction settings file to select proper environment variables.\n\"\"\"\nimport os\n\n# False if not in os.environ\nDEBUG = os.getenv(\"DEBUG\", False)\n\n# defaults to second value if not found in os.environ\nDJANGO_HOST = os.getenv(\"DJANGO_HOST\", \"LOCALHOST\")\n\nBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\nSECRET_KEY = os.getenv(\"DJANGO_SECRET_KEY\", \"TEST_KEY_FOR_DEVELOPMENT\")\nALLOWED_HOSTS = [\".herokuapp.com\", \".civiwiki.org\", \"127.0.0.1\", \"localhost\", \"0.0.0.0\"]\n\nINSTALLED_APPS = (\n \"django.contrib.admin\",\n \"django.contrib.auth\",\n \"django.contrib.contenttypes\",\n \"django.contrib.sessions\",\n \"django.contrib.messages\",\n \"django.contrib.staticfiles\",\n \"django_extensions\",\n \"storages\",\n \"core\", # TODO: consider removing this, if we can move the decorators, etc. to an actual app\n \"api\",\n \"rest_framework\",\n \"accounts\",\n \"threads\",\n \"frontend_views\",\n \"notifications\",\n \"corsheaders\",\n \"taggit\",\n)\n\nMIDDLEWARE = [\n \"corsheaders.middleware.CorsMiddleware\",\n \"django.middleware.security.SecurityMiddleware\",\n \"whitenoise.middleware.WhiteNoiseMiddleware\",\n \"django.contrib.sessions.middleware.SessionMiddleware\",\n \"django.middleware.common.CommonMiddleware\",\n \"django.middleware.csrf.CsrfViewMiddleware\",\n \"django.contrib.auth.middleware.AuthenticationMiddleware\",\n # 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',\n \"django.contrib.messages.middleware.MessageMiddleware\",\n \"django.middleware.clickjacking.XFrameOptionsMiddleware\",\n]\n\nCSRF_USE_SESSIONS = (\n True # Store the CSRF token in the users session instead of in a cookie\n)\n\nCORS_ORIGIN_ALLOW_ALL = True\nROOT_URLCONF = \"core.urls\"\nLOGIN_URL = \"/login\"\n\n# SSL Setup\nif DJANGO_HOST != \"LOCALHOST\":\n SECURE_PROXY_SSL_HEADER = (\"HTTP_X_FORWARDED_PROTO\", \"https\")\n SECURE_SSL_REDIRECT = True\n SESSION_COOKIE_SECURE = True\n CSRF_COOKIE_SECURE = True\n\n# Internationalization & Localization\nLANGUAGE_CODE = \"en-us\"\nTIME_ZONE = \"UTC\"\nUSE_I18N = True\nUSE_L10N = True\nUSE_TZ = True\n\nTEMPLATES = [\n {\n \"BACKEND\": \"django.template.backends.django.DjangoTemplates\",\n \"DIRS\": [\n os.path.join(BASE_DIR, \"threads/templates/threads\"), os.path.join(BASE_DIR, \"accounts/templates/accounts\")\n ], # TODO: Add non-webapp template directory\n \"APP_DIRS\": True,\n \"OPTIONS\": {\n \"context_processors\": [\n \"django.template.context_processors.debug\",\n \"django.template.context_processors.request\",\n \"django.contrib.auth.context_processors.auth\",\n \"django.contrib.messages.context_processors.messages\",\n ],\n },\n },\n]\n\nWSGI_APPLICATION = \"core.wsgi.application\"\n\n# Apex Contact for Production Errors\nADMINS = [(\"Development Team\", \"[email protected]\")]\n\n# AWS S3 Setup\nif \"AWS_STORAGE_BUCKET_NAME\" not in os.environ:\n MEDIA_URL = \"/media/\"\n MEDIA_ROOT = os.path.join(BASE_DIR, \"media\")\nelse:\n AWS_STORAGE_BUCKET_NAME = os.getenv(\"AWS_STORAGE_BUCKET_NAME\")\n AWS_S3_ACCESS_KEY_ID = os.getenv(\"AWS_S3_ACCESS_KEY_ID\")\n AWS_S3_SECRET_ACCESS_KEY = os.getenv(\"AWS_S3_SECRET_ACCESS_KEY\")\n DEFAULT_FILE_STORAGE = \"storages.backends.s3boto.S3BotoStorage\"\n AWS_S3_SECURE_URLS = False\n AWS_QUERYSTRING_AUTH = False\n\nSTATIC_URL = \"/static/\"\nSTATICFILES_DIRS = (os.path.join(BASE_DIR, \"threads/templates/static\"),)\nSTATIC_ROOT = os.path.join(BASE_DIR, \"staticfiles\")\n\n# TODO: re-organize and simplify staticfiles settings\nif \"CIVIWIKI_LOCAL_NAME\" not in os.environ:\n STATICFILES_STORAGE = \"whitenoise.storage.CompressedManifestStaticFilesStorage\"\n\n# Use DATABASE_URL in production\nDATABASE_URL = os.getenv(\"DATABASE_URL\")\n\nif DATABASE_URL is not None:\n DATABASES = {\"default\": DATABASE_URL}\nelse:\n # Default to sqlite for simplicity in development\n DATABASES = {\n \"default\": {\n \"ENGINE\": \"django.db.backends.sqlite3\",\n \"NAME\": BASE_DIR + \"/\" + \"db.sqlite3\",\n }\n }\n\n# Email Backend Setup\nif \"EMAIL_HOST\" not in os.environ:\n EMAIL_BACKEND = \"django.core.mail.backends.console.EmailBackend\"\n EMAIL_HOST_USER = \"[email protected]\"\nelse:\n EMAIL_BACKEND = \"django.core.mail.backends.smtp.EmailBackend\"\n EMAIL_HOST = os.getenv(\"EMAIL_HOST\")\n EMAIL_PORT = os.getenv(\"EMAIL_PORT\")\n EMAIL_HOST_USER = os.getenv(\"EMAIL_HOST_USER\")\n EMAIL_HOST_PASSWORD = os.getenv(\"EMAIL_HOST_PASSWORD\")\n EMAIL_USE_SSL = True\n DEFAULT_FROM_EMAIL = EMAIL_HOST\n\n# Notification API Settings\nNOTIFICATIONS_SOFT_DELETE = True\nNOTIFICATIONS_USE_JSONFIELD = True\n\n# Django REST API Settings\nDEFAULT_RENDERER_CLASSES = (\"rest_framework.renderers.JSONRenderer\",)\n\nDEFAULT_AUTHENTICATION_CLASSES = (\"rest_framework.authentication.BasicAuthentication\",)\n\nif DEBUG:\n # Browsable HTML - Enabled only in Debug mode (dev)\n DEFAULT_RENDERER_CLASSES = DEFAULT_RENDERER_CLASSES + (\n \"rest_framework.renderers.BrowsableAPIRenderer\",\n )\n\n DEFAULT_AUTHENTICATION_CLASSES = (\n \"api.authentication.CsrfExemptSessionAuthentication\",\n ) + DEFAULT_AUTHENTICATION_CLASSES\n\nREST_FRAMEWORK = {\n \"DEFAULT_PERMISSION_CLASSES\": (\"rest_framework.permissions.IsAuthenticated\",),\n \"DEFAULT_RENDERER_CLASSES\": DEFAULT_RENDERER_CLASSES,\n \"DEFAULT_AUTHENTICATION_CLASSES\": DEFAULT_AUTHENTICATION_CLASSES,\n}\n\n# CORS Settings\nCORS_ORIGIN_ALLOW_ALL = True\n\n# Custom User model\nAUTH_USER_MODEL = 'accounts.User'\n\nAPPEND_SLASH = False\n\nDEFAULT_AUTO_FIELD = 'django.db.models.AutoField'\n\nLOGIN_REDIRECT_URL = '/'\n\nAUTH_PASSWORD_VALIDATORS = [\n {\n 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',\n 'OPTIONS': {\n 'min_length': 8,\n }\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',\n },\n]\n", "path": "project/core/settings.py"}], "after_files": [{"content": "\"\"\"\nDjango settings for civiwiki project.\nDarius Calliet May 12, 2016\n\nProduction settings file to select proper environment variables.\n\"\"\"\nimport os\n\n# False if not in os.environ\nDEBUG = os.getenv(\"DEBUG\", False)\n\n# defaults to second value if not found in os.environ\nDJANGO_HOST = os.getenv(\"DJANGO_HOST\", \"LOCALHOST\")\n\nBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\nSECRET_KEY = os.getenv(\"DJANGO_SECRET_KEY\", \"TEST_KEY_FOR_DEVELOPMENT\")\nALLOWED_HOSTS = [\".herokuapp.com\", \".civiwiki.org\", \"127.0.0.1\", \"localhost\", \"0.0.0.0\"]\n\nINSTALLED_APPS = (\n \"django.contrib.admin\",\n \"django.contrib.auth\",\n \"django.contrib.contenttypes\",\n \"django.contrib.sessions\",\n \"django.contrib.messages\",\n \"django.contrib.staticfiles\",\n \"django_extensions\",\n \"storages\",\n \"core\", # TODO: consider removing this, if we can move the decorators, etc. to an actual app\n \"api\",\n \"rest_framework\",\n \"accounts\",\n \"threads\",\n \"frontend_views\",\n \"notifications\",\n \"corsheaders\",\n \"taggit\",\n)\n\nMIDDLEWARE = [\n \"corsheaders.middleware.CorsMiddleware\",\n \"django.middleware.security.SecurityMiddleware\",\n \"whitenoise.middleware.WhiteNoiseMiddleware\",\n \"django.contrib.sessions.middleware.SessionMiddleware\",\n \"django.middleware.common.CommonMiddleware\",\n \"django.middleware.csrf.CsrfViewMiddleware\",\n \"django.contrib.auth.middleware.AuthenticationMiddleware\",\n # 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',\n \"django.contrib.messages.middleware.MessageMiddleware\",\n \"django.middleware.clickjacking.XFrameOptionsMiddleware\",\n]\n\nCSRF_USE_SESSIONS = (\n True # Store the CSRF token in the users session instead of in a cookie\n)\n\nCORS_ORIGIN_ALLOW_ALL = True\nROOT_URLCONF = \"core.urls\"\nLOGIN_URL = \"/login\"\n\n# SSL Setup\nif DJANGO_HOST != \"LOCALHOST\":\n SECURE_PROXY_SSL_HEADER = (\"HTTP_X_FORWARDED_PROTO\", \"https\")\n SECURE_SSL_REDIRECT = True\n SESSION_COOKIE_SECURE = True\n CSRF_COOKIE_SECURE = True\n\n# Internationalization & Localization\nLANGUAGE_CODE = \"en-us\"\nTIME_ZONE = \"UTC\"\nUSE_I18N = True\nUSE_L10N = True\nUSE_TZ = True\n\nTEMPLATES = [\n {\n \"BACKEND\": \"django.template.backends.django.DjangoTemplates\",\n \"DIRS\": [\n os.path.join(BASE_DIR, \"threads/templates/threads\"), os.path.join(BASE_DIR, \"accounts/templates/accounts\")\n ], # TODO: Add non-webapp template directory\n \"APP_DIRS\": True,\n \"OPTIONS\": {\n \"context_processors\": [\n \"django.template.context_processors.debug\",\n \"django.template.context_processors.request\",\n \"django.contrib.auth.context_processors.auth\",\n \"django.contrib.messages.context_processors.messages\",\n ],\n },\n },\n]\n\nWSGI_APPLICATION = \"core.wsgi.application\"\n\n# Apex Contact for Production Errors\nADMINS = [(\"Development Team\", \"[email protected]\")]\n\n# AWS S3 Setup\nif \"AWS_STORAGE_BUCKET_NAME\" not in os.environ:\n MEDIA_URL = \"/media/\"\n MEDIA_ROOT = os.path.join(BASE_DIR, \"media\")\nelse:\n AWS_STORAGE_BUCKET_NAME = os.getenv(\"AWS_STORAGE_BUCKET_NAME\")\n AWS_S3_ACCESS_KEY_ID = os.getenv(\"AWS_S3_ACCESS_KEY_ID\")\n AWS_S3_SECRET_ACCESS_KEY = os.getenv(\"AWS_S3_SECRET_ACCESS_KEY\")\n DEFAULT_FILE_STORAGE = \"storages.backends.s3boto.S3BotoStorage\"\n AWS_S3_SECURE_URLS = False\n AWS_QUERYSTRING_AUTH = False\n\nSTATIC_URL = \"/static/\"\nSTATICFILES_DIRS = (os.path.join(BASE_DIR, \"threads/templates/static\"),)\nSTATIC_ROOT = os.path.join(BASE_DIR, \"staticfiles\")\n\n# TODO: re-organize and simplify staticfiles settings\nif \"CIVIWIKI_LOCAL_NAME\" not in os.environ:\n STATICFILES_STORAGE = \"whitenoise.storage.CompressedManifestStaticFilesStorage\"\n\n# Use DATABASE_URL in production\nDATABASE_URL = os.getenv(\"DATABASE_URL\")\n\nif DATABASE_URL is not None:\n DATABASES = {\"default\": DATABASE_URL}\nelse:\n # Default to sqlite for simplicity in development\n DATABASES = {\n \"default\": {\n \"ENGINE\": \"django.db.backends.sqlite3\",\n \"NAME\": BASE_DIR + \"/\" + \"db.sqlite3\",\n }\n }\n\n# Email Backend Setup\nif \"EMAIL_HOST\" not in os.environ:\n EMAIL_BACKEND = \"django.core.mail.backends.console.EmailBackend\"\n EMAIL_HOST_USER = \"[email protected]\"\nelse:\n EMAIL_BACKEND = \"django.core.mail.backends.smtp.EmailBackend\"\n EMAIL_HOST = os.getenv(\"EMAIL_HOST\")\n EMAIL_PORT = os.getenv(\"EMAIL_PORT\")\n EMAIL_HOST_USER = os.getenv(\"EMAIL_HOST_USER\")\n EMAIL_HOST_PASSWORD = os.getenv(\"EMAIL_HOST_PASSWORD\")\n EMAIL_USE_SSL = True\n DEFAULT_FROM_EMAIL = EMAIL_HOST\n\n# Notification API Settings\nNOTIFICATIONS_SOFT_DELETE = True\nNOTIFICATIONS_USE_JSONFIELD = True\n\n# Django REST API Settings\nDEFAULT_RENDERER_CLASSES = (\"rest_framework.renderers.JSONRenderer\",)\n\nDEFAULT_AUTHENTICATION_CLASSES = (\"rest_framework.authentication.SessionAuthentication\",)\n\nif DEBUG:\n # Browsable HTML - Enabled only in Debug mode (dev)\n DEFAULT_RENDERER_CLASSES = DEFAULT_RENDERER_CLASSES + (\n \"rest_framework.renderers.BrowsableAPIRenderer\",\n )\n\n DEFAULT_AUTHENTICATION_CLASSES = (\n \"api.authentication.CsrfExemptSessionAuthentication\",\n ) + DEFAULT_AUTHENTICATION_CLASSES\n\nREST_FRAMEWORK = {\n \"DEFAULT_PERMISSION_CLASSES\": (\"rest_framework.permissions.IsAuthenticated\",),\n \"DEFAULT_RENDERER_CLASSES\": DEFAULT_RENDERER_CLASSES,\n \"DEFAULT_AUTHENTICATION_CLASSES\": DEFAULT_AUTHENTICATION_CLASSES,\n}\n\n# CORS Settings\nCORS_ORIGIN_ALLOW_ALL = True\n\n# Custom User model\nAUTH_USER_MODEL = 'accounts.User'\n\nAPPEND_SLASH = False\n\nDEFAULT_AUTO_FIELD = 'django.db.models.AutoField'\n\nLOGIN_REDIRECT_URL = '/'\n\nAUTH_PASSWORD_VALIDATORS = [\n {\n 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',\n 'OPTIONS': {\n 'min_length': 8,\n }\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',\n },\n]\n", "path": "project/core/settings.py"}]}
| 2,213 | 108 |
gh_patches_debug_38223
|
rasdani/github-patches
|
git_diff
|
pypi__warehouse-432
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Implement X-PyPI-Last-Serial for the File View
The view that serves files doesn't implement the `X-PyPI-Last-Serial` header, however bandersnatch needs that header, so we need to implement it.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `warehouse/packaging/views.py`
Content:
```
1 # Licensed under the Apache License, Version 2.0 (the "License");
2 # you may not use this file except in compliance with the License.
3 # You may obtain a copy of the License at
4 #
5 # http://www.apache.org/licenses/LICENSE-2.0
6 #
7 # Unless required by applicable law or agreed to in writing, software
8 # distributed under the License is distributed on an "AS IS" BASIS,
9 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 # See the License for the specific language governing permissions and
11 # limitations under the License.
12
13 import fs.errors
14
15 from pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound
16 from pyramid.response import FileIter, Response
17 from pyramid.view import view_config
18 from sqlalchemy.orm.exc import NoResultFound
19
20 from warehouse.accounts.models import User
21 from warehouse.cache.http import cache_control
22 from warehouse.cache.origin import origin_cache
23 from warehouse.packaging.interfaces import IDownloadStatService
24 from warehouse.packaging.models import Release, File, Role
25
26
27 @view_config(
28 route_name="packaging.project",
29 renderer="packaging/detail.html",
30 decorator=[
31 cache_control(1 * 24 * 60 * 60), # 1 day
32 origin_cache(7 * 24 * 60 * 60), # 7 days
33 ],
34 )
35 def project_detail(project, request):
36 if project.name != request.matchdict.get("name", project.name):
37 return HTTPMovedPermanently(
38 request.current_route_url(name=project.name),
39 )
40
41 try:
42 release = project.releases.order_by(
43 Release._pypi_ordering.desc()
44 ).limit(1).one()
45 except NoResultFound:
46 raise HTTPNotFound from None
47
48 return release_detail(release, request)
49
50
51 @view_config(
52 route_name="packaging.release",
53 renderer="packaging/detail.html",
54 decorator=[
55 cache_control(7 * 24 * 60 * 60), # 7 days
56 origin_cache(30 * 24 * 60 * 60), # 30 days
57 ],
58 )
59 def release_detail(release, request):
60 project = release.project
61
62 if project.name != request.matchdict.get("name", project.name):
63 return HTTPMovedPermanently(
64 request.current_route_url(name=project.name),
65 )
66
67 # Get all of the registered versions for this Project, in order of newest
68 # to oldest.
69 all_releases = (
70 project.releases
71 .with_entities(Release.version, Release.created)
72 .order_by(Release._pypi_ordering.desc())
73 .all()
74 )
75
76 # Get all of the maintainers for this project.
77 maintainers = [
78 r.user
79 for r in (
80 request.db.query(Role)
81 .join(User)
82 .filter(Role.project == project)
83 .distinct(User.username)
84 .order_by(User.username)
85 .all()
86 )
87 ]
88
89 stats_svc = request.find_service(IDownloadStatService)
90
91 return {
92 "project": project,
93 "release": release,
94 "files": release.files.all(),
95 "all_releases": all_releases,
96 "maintainers": maintainers,
97 "download_stats": {
98 "daily": stats_svc.get_daily_stats(project.name),
99 "weekly": stats_svc.get_weekly_stats(project.name),
100 "monthly": stats_svc.get_monthly_stats(project.name),
101 },
102 }
103
104
105 @view_config(
106 route_name="packaging.file",
107 decorator=[
108 cache_control(365 * 24 * 60 * 60), # 1 year
109 ],
110 )
111 def packages(request):
112 # The amount of logic that we can do in this view is very limited, this
113 # view needs to be able to be handled by Fastly directly hitting S3 instead
114 # of actually hitting this view. This more or less means that we're limited
115 # to just setting headers and serving the actual file. In addition the
116 # headers that we can set, have to be able to be determined at file upload
117 # time instead of dynamically.
118
119 # Grab the path of the file that we're attempting to serve
120 path = request.matchdict["path"]
121
122 # We need to look up the File that is associated with this path, either the
123 # package path or the pgp path. If that doesn't exist then we'll bail out
124 # early with a 404.
125 try:
126 file_ = (
127 request.db.query(File)
128 .filter((File.path == path) | (File.pgp_path == path))
129 .one()
130 )
131 except NoResultFound:
132 raise HTTPNotFound from None
133
134 # If this request is for a PGP signature, and the file doesn't have a PGP
135 # signature, then we can go ahead and 404 now before hitting the file
136 # storage.
137 if path == file_.pgp_path and not file_.has_pgp_signature:
138 raise HTTPNotFound
139
140 # We also need to get the X-PyPI-Last-Serial for the project associated
141 # with this file. Bandersnatch (and other mirroring clients) will use this
142 # to determine what kind of action to take if the MD5 hash does not match
143 # what it expected.
144 # TODO: Get the X-PyPI-Last-Serial number for this.
145
146 # Try to open the file, streaming if possible, and if this file doesn't
147 # exist then we'll return a 404 error. However we'll log an error because
148 # if the database thinks we have a file, then a file should exist here.
149 try:
150 # TODO: We need to use mode="rb" here because this is a binary file
151 # and we don't want Python to attempt to decode it. However S3FS
152 # checks explicitly for mode="r-" to support streaming access.
153 # We need to get S3FS so that it support rb- as well as r-.
154 f = request.registry["filesystems"]["packages"].open(path, mode="rb")
155 except fs.errors.ResourceNotFoundError:
156 # TODO: Log an error here, this file doesn't exists for some reason,
157 # but it should because the database thinks it should.
158 raise HTTPNotFound from None
159
160 # If the path we're accessing is the path for the package itself, as
161 # opposed to the path for the signature, then we can include a
162 # Content-Length header.
163 content_length = None
164 if path == file_.path:
165 content_length = file_.size
166
167 return Response(
168 # If we have a wsgi.file_wrapper, we'll want to use that so that, if
169 # possible, this will use an optimized method of sending. Otherwise
170 # we'll just use Pyramid's FileIter as a fallback.
171 app_iter=request.environ.get("wsgi.file_wrapper", FileIter)(f),
172 # We use application/octet-stream instead of something nicer because
173 # different HTTP libraries will treat different combinations of
174 # Content-Type and Content-Encoding differently. The only thing that
175 # works sanely across all things without having something in the middle
176 # decide it can decompress the result to "help" the end user is with
177 # Content-Type: applicaton/octet-stream and no Content-Encoding.
178 content_type="application/octet-stream",
179 content_encoding=None,
180 # We need to specify an ETag for this response. Since ETags compared
181 # between URLs have no meaning (and thus, is safe for two URLs to share
182 # the same ETag) we will just use the MD5 hash of the package as our
183 # ETag.
184 etag=file_.md5_digest,
185 # Similarly to the ETag header, we'll just use the date that the file
186 # was uploaded as the Last-Modified header.
187 last_modified=file_.upload_time,
188 # If we have a Content-Length, we'll go ahead and use it here to
189 # hopefully enable the server and clients alike to be smarter about how
190 # they handle downloading this response.
191 content_length=content_length,
192 )
193
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/warehouse/packaging/views.py b/warehouse/packaging/views.py
--- a/warehouse/packaging/views.py
+++ b/warehouse/packaging/views.py
@@ -15,13 +15,14 @@
from pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound
from pyramid.response import FileIter, Response
from pyramid.view import view_config
+from sqlalchemy import func
from sqlalchemy.orm.exc import NoResultFound
from warehouse.accounts.models import User
from warehouse.cache.http import cache_control
from warehouse.cache.origin import origin_cache
from warehouse.packaging.interfaces import IDownloadStatService
-from warehouse.packaging.models import Release, File, Role
+from warehouse.packaging.models import Release, File, Role, JournalEntry
@view_config(
@@ -137,12 +138,6 @@
if path == file_.pgp_path and not file_.has_pgp_signature:
raise HTTPNotFound
- # We also need to get the X-PyPI-Last-Serial for the project associated
- # with this file. Bandersnatch (and other mirroring clients) will use this
- # to determine what kind of action to take if the MD5 hash does not match
- # what it expected.
- # TODO: Get the X-PyPI-Last-Serial number for this.
-
# Try to open the file, streaming if possible, and if this file doesn't
# exist then we'll return a 404 error. However we'll log an error because
# if the database thinks we have a file, then a file should exist here.
@@ -164,7 +159,7 @@
if path == file_.path:
content_length = file_.size
- return Response(
+ resp = Response(
# If we have a wsgi.file_wrapper, we'll want to use that so that, if
# possible, this will use an optimized method of sending. Otherwise
# we'll just use Pyramid's FileIter as a fallback.
@@ -190,3 +185,16 @@
# they handle downloading this response.
content_length=content_length,
)
+
+ # We also need to get the X-PyPI-Last-Serial for the project associated
+ # with this file. Bandersnatch (and other mirroring clients) will use this
+ # to determine what kind of action to take if the MD5 hash does not match
+ # what it expected.
+ serial = (
+ request.db.query(func.max(JournalEntry.id))
+ .filter(JournalEntry.name == file_.name)
+ .scalar()
+ )
+ resp.headers["X-PyPI-Last-Serial"] = serial or 0
+
+ return resp
|
{"golden_diff": "diff --git a/warehouse/packaging/views.py b/warehouse/packaging/views.py\n--- a/warehouse/packaging/views.py\n+++ b/warehouse/packaging/views.py\n@@ -15,13 +15,14 @@\n from pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound\n from pyramid.response import FileIter, Response\n from pyramid.view import view_config\n+from sqlalchemy import func\n from sqlalchemy.orm.exc import NoResultFound\n \n from warehouse.accounts.models import User\n from warehouse.cache.http import cache_control\n from warehouse.cache.origin import origin_cache\n from warehouse.packaging.interfaces import IDownloadStatService\n-from warehouse.packaging.models import Release, File, Role\n+from warehouse.packaging.models import Release, File, Role, JournalEntry\n \n \n @view_config(\n@@ -137,12 +138,6 @@\n if path == file_.pgp_path and not file_.has_pgp_signature:\n raise HTTPNotFound\n \n- # We also need to get the X-PyPI-Last-Serial for the project associated\n- # with this file. Bandersnatch (and other mirroring clients) will use this\n- # to determine what kind of action to take if the MD5 hash does not match\n- # what it expected.\n- # TODO: Get the X-PyPI-Last-Serial number for this.\n-\n # Try to open the file, streaming if possible, and if this file doesn't\n # exist then we'll return a 404 error. However we'll log an error because\n # if the database thinks we have a file, then a file should exist here.\n@@ -164,7 +159,7 @@\n if path == file_.path:\n content_length = file_.size\n \n- return Response(\n+ resp = Response(\n # If we have a wsgi.file_wrapper, we'll want to use that so that, if\n # possible, this will use an optimized method of sending. Otherwise\n # we'll just use Pyramid's FileIter as a fallback.\n@@ -190,3 +185,16 @@\n # they handle downloading this response.\n content_length=content_length,\n )\n+\n+ # We also need to get the X-PyPI-Last-Serial for the project associated\n+ # with this file. Bandersnatch (and other mirroring clients) will use this\n+ # to determine what kind of action to take if the MD5 hash does not match\n+ # what it expected.\n+ serial = (\n+ request.db.query(func.max(JournalEntry.id))\n+ .filter(JournalEntry.name == file_.name)\n+ .scalar()\n+ )\n+ resp.headers[\"X-PyPI-Last-Serial\"] = serial or 0\n+\n+ return resp\n", "issue": "Implement X-PyPI-Last-Serial for the File View\nThe view that serves files doesn't implement the `X-PyPI-Last-Serial` header, however bandersnatch needs that header, so we need to implement it.\n\n", "before_files": [{"content": "# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport fs.errors\n\nfrom pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound\nfrom pyramid.response import FileIter, Response\nfrom pyramid.view import view_config\nfrom sqlalchemy.orm.exc import NoResultFound\n\nfrom warehouse.accounts.models import User\nfrom warehouse.cache.http import cache_control\nfrom warehouse.cache.origin import origin_cache\nfrom warehouse.packaging.interfaces import IDownloadStatService\nfrom warehouse.packaging.models import Release, File, Role\n\n\n@view_config(\n route_name=\"packaging.project\",\n renderer=\"packaging/detail.html\",\n decorator=[\n cache_control(1 * 24 * 60 * 60), # 1 day\n origin_cache(7 * 24 * 60 * 60), # 7 days\n ],\n)\ndef project_detail(project, request):\n if project.name != request.matchdict.get(\"name\", project.name):\n return HTTPMovedPermanently(\n request.current_route_url(name=project.name),\n )\n\n try:\n release = project.releases.order_by(\n Release._pypi_ordering.desc()\n ).limit(1).one()\n except NoResultFound:\n raise HTTPNotFound from None\n\n return release_detail(release, request)\n\n\n@view_config(\n route_name=\"packaging.release\",\n renderer=\"packaging/detail.html\",\n decorator=[\n cache_control(7 * 24 * 60 * 60), # 7 days\n origin_cache(30 * 24 * 60 * 60), # 30 days\n ],\n)\ndef release_detail(release, request):\n project = release.project\n\n if project.name != request.matchdict.get(\"name\", project.name):\n return HTTPMovedPermanently(\n request.current_route_url(name=project.name),\n )\n\n # Get all of the registered versions for this Project, in order of newest\n # to oldest.\n all_releases = (\n project.releases\n .with_entities(Release.version, Release.created)\n .order_by(Release._pypi_ordering.desc())\n .all()\n )\n\n # Get all of the maintainers for this project.\n maintainers = [\n r.user\n for r in (\n request.db.query(Role)\n .join(User)\n .filter(Role.project == project)\n .distinct(User.username)\n .order_by(User.username)\n .all()\n )\n ]\n\n stats_svc = request.find_service(IDownloadStatService)\n\n return {\n \"project\": project,\n \"release\": release,\n \"files\": release.files.all(),\n \"all_releases\": all_releases,\n \"maintainers\": maintainers,\n \"download_stats\": {\n \"daily\": stats_svc.get_daily_stats(project.name),\n \"weekly\": stats_svc.get_weekly_stats(project.name),\n \"monthly\": stats_svc.get_monthly_stats(project.name),\n },\n }\n\n\n@view_config(\n route_name=\"packaging.file\",\n decorator=[\n cache_control(365 * 24 * 60 * 60), # 1 year\n ],\n)\ndef packages(request):\n # The amount of logic that we can do in this view is very limited, this\n # view needs to be able to be handled by Fastly directly hitting S3 instead\n # of actually hitting this view. This more or less means that we're limited\n # to just setting headers and serving the actual file. In addition the\n # headers that we can set, have to be able to be determined at file upload\n # time instead of dynamically.\n\n # Grab the path of the file that we're attempting to serve\n path = request.matchdict[\"path\"]\n\n # We need to look up the File that is associated with this path, either the\n # package path or the pgp path. If that doesn't exist then we'll bail out\n # early with a 404.\n try:\n file_ = (\n request.db.query(File)\n .filter((File.path == path) | (File.pgp_path == path))\n .one()\n )\n except NoResultFound:\n raise HTTPNotFound from None\n\n # If this request is for a PGP signature, and the file doesn't have a PGP\n # signature, then we can go ahead and 404 now before hitting the file\n # storage.\n if path == file_.pgp_path and not file_.has_pgp_signature:\n raise HTTPNotFound\n\n # We also need to get the X-PyPI-Last-Serial for the project associated\n # with this file. Bandersnatch (and other mirroring clients) will use this\n # to determine what kind of action to take if the MD5 hash does not match\n # what it expected.\n # TODO: Get the X-PyPI-Last-Serial number for this.\n\n # Try to open the file, streaming if possible, and if this file doesn't\n # exist then we'll return a 404 error. However we'll log an error because\n # if the database thinks we have a file, then a file should exist here.\n try:\n # TODO: We need to use mode=\"rb\" here because this is a binary file\n # and we don't want Python to attempt to decode it. However S3FS\n # checks explicitly for mode=\"r-\" to support streaming access.\n # We need to get S3FS so that it support rb- as well as r-.\n f = request.registry[\"filesystems\"][\"packages\"].open(path, mode=\"rb\")\n except fs.errors.ResourceNotFoundError:\n # TODO: Log an error here, this file doesn't exists for some reason,\n # but it should because the database thinks it should.\n raise HTTPNotFound from None\n\n # If the path we're accessing is the path for the package itself, as\n # opposed to the path for the signature, then we can include a\n # Content-Length header.\n content_length = None\n if path == file_.path:\n content_length = file_.size\n\n return Response(\n # If we have a wsgi.file_wrapper, we'll want to use that so that, if\n # possible, this will use an optimized method of sending. Otherwise\n # we'll just use Pyramid's FileIter as a fallback.\n app_iter=request.environ.get(\"wsgi.file_wrapper\", FileIter)(f),\n # We use application/octet-stream instead of something nicer because\n # different HTTP libraries will treat different combinations of\n # Content-Type and Content-Encoding differently. The only thing that\n # works sanely across all things without having something in the middle\n # decide it can decompress the result to \"help\" the end user is with\n # Content-Type: applicaton/octet-stream and no Content-Encoding.\n content_type=\"application/octet-stream\",\n content_encoding=None,\n # We need to specify an ETag for this response. Since ETags compared\n # between URLs have no meaning (and thus, is safe for two URLs to share\n # the same ETag) we will just use the MD5 hash of the package as our\n # ETag.\n etag=file_.md5_digest,\n # Similarly to the ETag header, we'll just use the date that the file\n # was uploaded as the Last-Modified header.\n last_modified=file_.upload_time,\n # If we have a Content-Length, we'll go ahead and use it here to\n # hopefully enable the server and clients alike to be smarter about how\n # they handle downloading this response.\n content_length=content_length,\n )\n", "path": "warehouse/packaging/views.py"}], "after_files": [{"content": "# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport fs.errors\n\nfrom pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound\nfrom pyramid.response import FileIter, Response\nfrom pyramid.view import view_config\nfrom sqlalchemy import func\nfrom sqlalchemy.orm.exc import NoResultFound\n\nfrom warehouse.accounts.models import User\nfrom warehouse.cache.http import cache_control\nfrom warehouse.cache.origin import origin_cache\nfrom warehouse.packaging.interfaces import IDownloadStatService\nfrom warehouse.packaging.models import Release, File, Role, JournalEntry\n\n\n@view_config(\n route_name=\"packaging.project\",\n renderer=\"packaging/detail.html\",\n decorator=[\n cache_control(1 * 24 * 60 * 60), # 1 day\n origin_cache(7 * 24 * 60 * 60), # 7 days\n ],\n)\ndef project_detail(project, request):\n if project.name != request.matchdict.get(\"name\", project.name):\n return HTTPMovedPermanently(\n request.current_route_url(name=project.name),\n )\n\n try:\n release = project.releases.order_by(\n Release._pypi_ordering.desc()\n ).limit(1).one()\n except NoResultFound:\n raise HTTPNotFound from None\n\n return release_detail(release, request)\n\n\n@view_config(\n route_name=\"packaging.release\",\n renderer=\"packaging/detail.html\",\n decorator=[\n cache_control(7 * 24 * 60 * 60), # 7 days\n origin_cache(30 * 24 * 60 * 60), # 30 days\n ],\n)\ndef release_detail(release, request):\n project = release.project\n\n if project.name != request.matchdict.get(\"name\", project.name):\n return HTTPMovedPermanently(\n request.current_route_url(name=project.name),\n )\n\n # Get all of the registered versions for this Project, in order of newest\n # to oldest.\n all_releases = (\n project.releases\n .with_entities(Release.version, Release.created)\n .order_by(Release._pypi_ordering.desc())\n .all()\n )\n\n # Get all of the maintainers for this project.\n maintainers = [\n r.user\n for r in (\n request.db.query(Role)\n .join(User)\n .filter(Role.project == project)\n .distinct(User.username)\n .order_by(User.username)\n .all()\n )\n ]\n\n stats_svc = request.find_service(IDownloadStatService)\n\n return {\n \"project\": project,\n \"release\": release,\n \"files\": release.files.all(),\n \"all_releases\": all_releases,\n \"maintainers\": maintainers,\n \"download_stats\": {\n \"daily\": stats_svc.get_daily_stats(project.name),\n \"weekly\": stats_svc.get_weekly_stats(project.name),\n \"monthly\": stats_svc.get_monthly_stats(project.name),\n },\n }\n\n\n@view_config(\n route_name=\"packaging.file\",\n decorator=[\n cache_control(365 * 24 * 60 * 60), # 1 year\n ],\n)\ndef packages(request):\n # The amount of logic that we can do in this view is very limited, this\n # view needs to be able to be handled by Fastly directly hitting S3 instead\n # of actually hitting this view. This more or less means that we're limited\n # to just setting headers and serving the actual file. In addition the\n # headers that we can set, have to be able to be determined at file upload\n # time instead of dynamically.\n\n # Grab the path of the file that we're attempting to serve\n path = request.matchdict[\"path\"]\n\n # We need to look up the File that is associated with this path, either the\n # package path or the pgp path. If that doesn't exist then we'll bail out\n # early with a 404.\n try:\n file_ = (\n request.db.query(File)\n .filter((File.path == path) | (File.pgp_path == path))\n .one()\n )\n except NoResultFound:\n raise HTTPNotFound from None\n\n # If this request is for a PGP signature, and the file doesn't have a PGP\n # signature, then we can go ahead and 404 now before hitting the file\n # storage.\n if path == file_.pgp_path and not file_.has_pgp_signature:\n raise HTTPNotFound\n\n # Try to open the file, streaming if possible, and if this file doesn't\n # exist then we'll return a 404 error. However we'll log an error because\n # if the database thinks we have a file, then a file should exist here.\n try:\n # TODO: We need to use mode=\"rb\" here because this is a binary file\n # and we don't want Python to attempt to decode it. However S3FS\n # checks explicitly for mode=\"r-\" to support streaming access.\n # We need to get S3FS so that it support rb- as well as r-.\n f = request.registry[\"filesystems\"][\"packages\"].open(path, mode=\"rb\")\n except fs.errors.ResourceNotFoundError:\n # TODO: Log an error here, this file doesn't exists for some reason,\n # but it should because the database thinks it should.\n raise HTTPNotFound from None\n\n # If the path we're accessing is the path for the package itself, as\n # opposed to the path for the signature, then we can include a\n # Content-Length header.\n content_length = None\n if path == file_.path:\n content_length = file_.size\n\n resp = Response(\n # If we have a wsgi.file_wrapper, we'll want to use that so that, if\n # possible, this will use an optimized method of sending. Otherwise\n # we'll just use Pyramid's FileIter as a fallback.\n app_iter=request.environ.get(\"wsgi.file_wrapper\", FileIter)(f),\n # We use application/octet-stream instead of something nicer because\n # different HTTP libraries will treat different combinations of\n # Content-Type and Content-Encoding differently. The only thing that\n # works sanely across all things without having something in the middle\n # decide it can decompress the result to \"help\" the end user is with\n # Content-Type: applicaton/octet-stream and no Content-Encoding.\n content_type=\"application/octet-stream\",\n content_encoding=None,\n # We need to specify an ETag for this response. Since ETags compared\n # between URLs have no meaning (and thus, is safe for two URLs to share\n # the same ETag) we will just use the MD5 hash of the package as our\n # ETag.\n etag=file_.md5_digest,\n # Similarly to the ETag header, we'll just use the date that the file\n # was uploaded as the Last-Modified header.\n last_modified=file_.upload_time,\n # If we have a Content-Length, we'll go ahead and use it here to\n # hopefully enable the server and clients alike to be smarter about how\n # they handle downloading this response.\n content_length=content_length,\n )\n\n # We also need to get the X-PyPI-Last-Serial for the project associated\n # with this file. Bandersnatch (and other mirroring clients) will use this\n # to determine what kind of action to take if the MD5 hash does not match\n # what it expected.\n serial = (\n request.db.query(func.max(JournalEntry.id))\n .filter(JournalEntry.name == file_.name)\n .scalar()\n )\n resp.headers[\"X-PyPI-Last-Serial\"] = serial or 0\n\n return resp\n", "path": "warehouse/packaging/views.py"}]}
| 2,555 | 614 |
gh_patches_debug_27545
|
rasdani/github-patches
|
git_diff
|
encode__uvicorn-227
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Error integrating with Channels if 'lifespan' is not specified in router
I'm not entirely sure if I should be posting this here or on `channels`.
I'm using v0.3.12 which I believe has already introduced the new `lifespan` protocol defined in asgiref. But this causes an error with `channels`' router
```bash
Traceback (most recent call last):
File "/usr/local/lib/python3.6/site-packages/uvicorn/lifespan.py", line 29, in run
await self.asgi(self.receive, self.send)
File "/usr/local/lib/python3.6/site-packages/uvicorn/middleware/message_logger.py", line 51, in __call__
inner = self.app(self.scope)
File "/usr/local/lib/python3.6/site-packages/channels/routing.py", line 58, in __call__
raise ValueError("No application configured for scope type %r" % scope["type"])
ValueError: No application configured for scope type 'lifespan'
```
My `routing.py` file looks like this:
```python
application = ProtocolTypeRouter({
# Empty for now (http->django views is added by default)
'websocket': JWTWebsocketMiddleware(
URLRouter(urlpatterns)
)
})
```
**EDIT**: Sorry my workaround wasn't actually working as you'll need at least one `path` in the `URLRouter`, so I've removed it.
To temporarily get around this, I had to downgrade to `v0.3.9`.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `uvicorn/middleware/message_logger.py`
Content:
```
1 import logging
2
3 PLACEHOLDER_FORMAT = {
4 'body': '<{length} bytes>',
5 'bytes': '<{length} bytes>',
6 'text': '<{length} chars>',
7 'headers': '<...>',
8 }
9
10
11 def message_with_placeholders(message):
12 """
13 Return an ASGI message, with any body-type content omitted and replaced
14 with a placeholder.
15 """
16 new_message = message.copy()
17 for attr in PLACEHOLDER_FORMAT.keys():
18 if message.get(attr) is not None:
19 content = message[attr]
20 placeholder = PLACEHOLDER_FORMAT[attr].format(length=len(content))
21 new_message[attr] = placeholder
22 return new_message
23
24
25 class MessageLoggerMiddleware:
26 def __init__(self, app):
27 self.task_counter = 0
28 self.app = app
29 self.logger = logging.getLogger("uvicorn")
30
31 def __call__(self, scope):
32 self.task_counter += 1
33 return MessageLoggerResponder(scope, self.app, self.logger, self.task_counter)
34
35
36 class MessageLoggerResponder:
37 def __init__(self, scope, app, logger, task_counter):
38 self.scope = scope
39 self.app = app
40 self.logger = logger
41 self.task_counter = task_counter
42 self.client_addr = scope.get('client')
43
44 async def __call__(self, receive, send):
45 self._receive = receive
46 self._send = send
47 logged_scope = message_with_placeholders(self.scope)
48 log_text = '%s - ASGI [%d] Started %s'
49 self.logger.debug(log_text, self.client_addr, self.task_counter, logged_scope)
50 try:
51 inner = self.app(self.scope)
52 await inner(self.receive, self.send)
53 except:
54 log_text = '%s - ASGI [%d] Raised exception'
55 self.logger.debug(log_text, self.client_addr, self.task_counter)
56 raise
57 else:
58 log_text = '%s - ASGI [%d] Completed'
59 self.logger.debug(log_text, self.client_addr, self.task_counter)
60
61 async def receive(self):
62 message = await self._receive()
63 logged_message = message_with_placeholders(message)
64 log_text = '%s - ASGI [%d] Sent %s'
65 self.logger.debug(log_text, self.client_addr, self.task_counter, logged_message)
66 return message
67
68 async def send(self, message):
69 logged_message = message_with_placeholders(message)
70 log_text = '%s - ASGI [%d] Received %s'
71 self.logger.debug(log_text, self.client_addr, self.task_counter, logged_message)
72 await self._send(message)
73
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/uvicorn/middleware/message_logger.py b/uvicorn/middleware/message_logger.py
--- a/uvicorn/middleware/message_logger.py
+++ b/uvicorn/middleware/message_logger.py
@@ -36,20 +36,27 @@
class MessageLoggerResponder:
def __init__(self, scope, app, logger, task_counter):
self.scope = scope
- self.app = app
self.logger = logger
self.task_counter = task_counter
self.client_addr = scope.get('client')
+ logged_scope = message_with_placeholders(scope)
+ log_text = '%s - ASGI [%d] Initialized %s'
+ self.logger.debug(log_text, self.client_addr, self.task_counter, logged_scope)
+ try:
+ self.inner = app(scope)
+ except:
+ log_text = '%s - ASGI [%d] Raised exception'
+ self.logger.debug(log_text, self.client_addr, self.task_counter)
+ raise
+
async def __call__(self, receive, send):
self._receive = receive
self._send = send
- logged_scope = message_with_placeholders(self.scope)
- log_text = '%s - ASGI [%d] Started %s'
- self.logger.debug(log_text, self.client_addr, self.task_counter, logged_scope)
+ log_text = '%s - ASGI [%d] Started task'
+ self.logger.debug(log_text, self.client_addr, self.task_counter)
try:
- inner = self.app(self.scope)
- await inner(self.receive, self.send)
+ await self.inner(self.receive, self.send)
except:
log_text = '%s - ASGI [%d] Raised exception'
self.logger.debug(log_text, self.client_addr, self.task_counter)
|
{"golden_diff": "diff --git a/uvicorn/middleware/message_logger.py b/uvicorn/middleware/message_logger.py\n--- a/uvicorn/middleware/message_logger.py\n+++ b/uvicorn/middleware/message_logger.py\n@@ -36,20 +36,27 @@\n class MessageLoggerResponder:\n def __init__(self, scope, app, logger, task_counter):\n self.scope = scope\n- self.app = app\n self.logger = logger\n self.task_counter = task_counter\n self.client_addr = scope.get('client')\n \n+ logged_scope = message_with_placeholders(scope)\n+ log_text = '%s - ASGI [%d] Initialized %s'\n+ self.logger.debug(log_text, self.client_addr, self.task_counter, logged_scope)\n+ try:\n+ self.inner = app(scope)\n+ except:\n+ log_text = '%s - ASGI [%d] Raised exception'\n+ self.logger.debug(log_text, self.client_addr, self.task_counter)\n+ raise\n+\n async def __call__(self, receive, send):\n self._receive = receive\n self._send = send\n- logged_scope = message_with_placeholders(self.scope)\n- log_text = '%s - ASGI [%d] Started %s'\n- self.logger.debug(log_text, self.client_addr, self.task_counter, logged_scope)\n+ log_text = '%s - ASGI [%d] Started task'\n+ self.logger.debug(log_text, self.client_addr, self.task_counter)\n try:\n- inner = self.app(self.scope)\n- await inner(self.receive, self.send)\n+ await self.inner(self.receive, self.send)\n except:\n log_text = '%s - ASGI [%d] Raised exception'\n self.logger.debug(log_text, self.client_addr, self.task_counter)\n", "issue": "Error integrating with Channels if 'lifespan' is not specified in router\nI'm not entirely sure if I should be posting this here or on `channels`.\r\n\r\nI'm using v0.3.12 which I believe has already introduced the new `lifespan` protocol defined in asgiref. But this causes an error with `channels`' router\r\n\r\n```bash\r\nTraceback (most recent call last):\r\n File \"/usr/local/lib/python3.6/site-packages/uvicorn/lifespan.py\", line 29, in run\r\n await self.asgi(self.receive, self.send)\r\n File \"/usr/local/lib/python3.6/site-packages/uvicorn/middleware/message_logger.py\", line 51, in __call__\r\n inner = self.app(self.scope)\r\n File \"/usr/local/lib/python3.6/site-packages/channels/routing.py\", line 58, in __call__\r\n raise ValueError(\"No application configured for scope type %r\" % scope[\"type\"])\r\nValueError: No application configured for scope type 'lifespan'\r\n```\r\n\r\nMy `routing.py` file looks like this:\r\n\r\n```python\r\napplication = ProtocolTypeRouter({\r\n # Empty for now (http->django views is added by default)\r\n 'websocket': JWTWebsocketMiddleware(\r\n URLRouter(urlpatterns)\r\n )\r\n})\r\n```\r\n\r\n**EDIT**: Sorry my workaround wasn't actually working as you'll need at least one `path` in the `URLRouter`, so I've removed it.\r\n\r\nTo temporarily get around this, I had to downgrade to `v0.3.9`.\n", "before_files": [{"content": "import logging\n\nPLACEHOLDER_FORMAT = {\n 'body': '<{length} bytes>',\n 'bytes': '<{length} bytes>',\n 'text': '<{length} chars>',\n 'headers': '<...>',\n}\n\n\ndef message_with_placeholders(message):\n \"\"\"\n Return an ASGI message, with any body-type content omitted and replaced\n with a placeholder.\n \"\"\"\n new_message = message.copy()\n for attr in PLACEHOLDER_FORMAT.keys():\n if message.get(attr) is not None:\n content = message[attr]\n placeholder = PLACEHOLDER_FORMAT[attr].format(length=len(content))\n new_message[attr] = placeholder\n return new_message\n\n\nclass MessageLoggerMiddleware:\n def __init__(self, app):\n self.task_counter = 0\n self.app = app\n self.logger = logging.getLogger(\"uvicorn\")\n\n def __call__(self, scope):\n self.task_counter += 1\n return MessageLoggerResponder(scope, self.app, self.logger, self.task_counter)\n\n\nclass MessageLoggerResponder:\n def __init__(self, scope, app, logger, task_counter):\n self.scope = scope\n self.app = app\n self.logger = logger\n self.task_counter = task_counter\n self.client_addr = scope.get('client')\n\n async def __call__(self, receive, send):\n self._receive = receive\n self._send = send\n logged_scope = message_with_placeholders(self.scope)\n log_text = '%s - ASGI [%d] Started %s'\n self.logger.debug(log_text, self.client_addr, self.task_counter, logged_scope)\n try:\n inner = self.app(self.scope)\n await inner(self.receive, self.send)\n except:\n log_text = '%s - ASGI [%d] Raised exception'\n self.logger.debug(log_text, self.client_addr, self.task_counter)\n raise\n else:\n log_text = '%s - ASGI [%d] Completed'\n self.logger.debug(log_text, self.client_addr, self.task_counter)\n\n async def receive(self):\n message = await self._receive()\n logged_message = message_with_placeholders(message)\n log_text = '%s - ASGI [%d] Sent %s'\n self.logger.debug(log_text, self.client_addr, self.task_counter, logged_message)\n return message\n\n async def send(self, message):\n logged_message = message_with_placeholders(message)\n log_text = '%s - ASGI [%d] Received %s'\n self.logger.debug(log_text, self.client_addr, self.task_counter, logged_message)\n await self._send(message)\n", "path": "uvicorn/middleware/message_logger.py"}], "after_files": [{"content": "import logging\n\nPLACEHOLDER_FORMAT = {\n 'body': '<{length} bytes>',\n 'bytes': '<{length} bytes>',\n 'text': '<{length} chars>',\n 'headers': '<...>',\n}\n\n\ndef message_with_placeholders(message):\n \"\"\"\n Return an ASGI message, with any body-type content omitted and replaced\n with a placeholder.\n \"\"\"\n new_message = message.copy()\n for attr in PLACEHOLDER_FORMAT.keys():\n if message.get(attr) is not None:\n content = message[attr]\n placeholder = PLACEHOLDER_FORMAT[attr].format(length=len(content))\n new_message[attr] = placeholder\n return new_message\n\n\nclass MessageLoggerMiddleware:\n def __init__(self, app):\n self.task_counter = 0\n self.app = app\n self.logger = logging.getLogger(\"uvicorn\")\n\n def __call__(self, scope):\n self.task_counter += 1\n return MessageLoggerResponder(scope, self.app, self.logger, self.task_counter)\n\n\nclass MessageLoggerResponder:\n def __init__(self, scope, app, logger, task_counter):\n self.scope = scope\n self.logger = logger\n self.task_counter = task_counter\n self.client_addr = scope.get('client')\n\n logged_scope = message_with_placeholders(scope)\n log_text = '%s - ASGI [%d] Initialized %s'\n self.logger.debug(log_text, self.client_addr, self.task_counter, logged_scope)\n try:\n self.inner = app(scope)\n except:\n log_text = '%s - ASGI [%d] Raised exception'\n self.logger.debug(log_text, self.client_addr, self.task_counter)\n raise\n\n async def __call__(self, receive, send):\n self._receive = receive\n self._send = send\n log_text = '%s - ASGI [%d] Started task'\n self.logger.debug(log_text, self.client_addr, self.task_counter)\n try:\n await self.inner(self.receive, self.send)\n except:\n log_text = '%s - ASGI [%d] Raised exception'\n self.logger.debug(log_text, self.client_addr, self.task_counter)\n raise\n else:\n log_text = '%s - ASGI [%d] Completed'\n self.logger.debug(log_text, self.client_addr, self.task_counter)\n\n async def receive(self):\n message = await self._receive()\n logged_message = message_with_placeholders(message)\n log_text = '%s - ASGI [%d] Sent %s'\n self.logger.debug(log_text, self.client_addr, self.task_counter, logged_message)\n return message\n\n async def send(self, message):\n logged_message = message_with_placeholders(message)\n log_text = '%s - ASGI [%d] Received %s'\n self.logger.debug(log_text, self.client_addr, self.task_counter, logged_message)\n await self._send(message)\n", "path": "uvicorn/middleware/message_logger.py"}]}
| 1,285 | 389 |
gh_patches_debug_25857
|
rasdani/github-patches
|
git_diff
|
certbot__certbot-1534
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Implement --manual-public-ip-logging-ok
Not having such flags slows down my development considerably.
https://github.com/letsencrypt/letsencrypt/pull/1125#issuecomment-150897419
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `letsencrypt/plugins/manual.py`
Content:
```
1 """Manual plugin."""
2 import os
3 import logging
4 import pipes
5 import shutil
6 import signal
7 import socket
8 import subprocess
9 import sys
10 import tempfile
11 import time
12
13 import zope.component
14 import zope.interface
15
16 from acme import challenges
17
18 from letsencrypt import errors
19 from letsencrypt import interfaces
20 from letsencrypt.plugins import common
21
22
23 logger = logging.getLogger(__name__)
24
25
26 class Authenticator(common.Plugin):
27 """Manual Authenticator.
28
29 This plugin requires user's manual intervention in setting up a HTTP
30 server for solving http-01 challenges and thus does not need to be
31 run as a privileged process. Alternatively shows instructions on how
32 to use Python's built-in HTTP server.
33
34 .. todo:: Support for `~.challenges.TLSSNI01`.
35
36 """
37 zope.interface.implements(interfaces.IAuthenticator)
38 zope.interface.classProvides(interfaces.IPluginFactory)
39 hidden = True
40
41 description = "Manually configure an HTTP server"
42
43 MESSAGE_TEMPLATE = """\
44 Make sure your web server displays the following content at
45 {uri} before continuing:
46
47 {validation}
48
49 Content-Type header MUST be set to {ct}.
50
51 If you don't have HTTP server configured, you can run the following
52 command on the target server (as root):
53
54 {command}
55 """
56
57 # a disclaimer about your current IP being transmitted to Let's Encrypt's servers.
58 IP_DISCLAIMER = """\
59 NOTE: The IP of this machine will be publicly logged as having requested this certificate. \
60 If you're running letsencrypt in manual mode on a machine that is not your server, \
61 please ensure you're okay with that.
62
63 Are you OK with your IP being logged?
64 """
65
66 # "cd /tmp/letsencrypt" makes sure user doesn't serve /root,
67 # separate "public_html" ensures that cert.pem/key.pem are not
68 # served and makes it more obvious that Python command will serve
69 # anything recursively under the cwd
70
71 CMD_TEMPLATE = """\
72 mkdir -p {root}/public_html/{achall.URI_ROOT_PATH}
73 cd {root}/public_html
74 printf "%s" {validation} > {achall.URI_ROOT_PATH}/{encoded_token}
75 # run only once per server:
76 $(command -v python2 || command -v python2.7 || command -v python2.6) -c \\
77 "import BaseHTTPServer, SimpleHTTPServer; \\
78 SimpleHTTPServer.SimpleHTTPRequestHandler.extensions_map = {{'': '{ct}'}}; \\
79 s = BaseHTTPServer.HTTPServer(('', {port}), SimpleHTTPServer.SimpleHTTPRequestHandler); \\
80 s.serve_forever()" """
81 """Command template."""
82
83 def __init__(self, *args, **kwargs):
84 super(Authenticator, self).__init__(*args, **kwargs)
85 self._root = (tempfile.mkdtemp() if self.conf("test-mode")
86 else "/tmp/letsencrypt")
87 self._httpd = None
88
89 @classmethod
90 def add_parser_arguments(cls, add):
91 add("test-mode", action="store_true",
92 help="Test mode. Executes the manual command in subprocess.")
93
94 def prepare(self): # pylint: disable=missing-docstring,no-self-use
95 pass # pragma: no cover
96
97 def more_info(self): # pylint: disable=missing-docstring,no-self-use
98 return ("This plugin requires user's manual intervention in setting "
99 "up an HTTP server for solving http-01 challenges and thus "
100 "does not need to be run as a privileged process. "
101 "Alternatively shows instructions on how to use Python's "
102 "built-in HTTP server.")
103
104 def get_chall_pref(self, domain):
105 # pylint: disable=missing-docstring,no-self-use,unused-argument
106 return [challenges.HTTP01]
107
108 def perform(self, achalls): # pylint: disable=missing-docstring
109 responses = []
110 # TODO: group achalls by the same socket.gethostbyname(_ex)
111 # and prompt only once per server (one "echo -n" per domain)
112 for achall in achalls:
113 responses.append(self._perform_single(achall))
114 return responses
115
116 @classmethod
117 def _test_mode_busy_wait(cls, port):
118 while True:
119 time.sleep(1)
120 sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
121 try:
122 sock.connect(("localhost", port))
123 except socket.error: # pragma: no cover
124 pass
125 else:
126 break
127 finally:
128 sock.close()
129
130 def _perform_single(self, achall):
131 # same path for each challenge response would be easier for
132 # users, but will not work if multiple domains point at the
133 # same server: default command doesn't support virtual hosts
134 response, validation = achall.response_and_validation()
135
136 port = (response.port if self.config.http01_port is None
137 else int(self.config.http01_port))
138 command = self.CMD_TEMPLATE.format(
139 root=self._root, achall=achall, response=response,
140 # TODO(kuba): pipes still necessary?
141 validation=pipes.quote(validation),
142 encoded_token=achall.chall.encode("token"),
143 ct=achall.CONTENT_TYPE, port=port)
144 if self.conf("test-mode"):
145 logger.debug("Test mode. Executing the manual command: %s", command)
146 # sh shipped with OS X does't support echo -n, but supports printf
147 try:
148 self._httpd = subprocess.Popen(
149 command,
150 # don't care about setting stdout and stderr,
151 # we're in test mode anyway
152 shell=True,
153 executable=None,
154 # "preexec_fn" is UNIX specific, but so is "command"
155 preexec_fn=os.setsid)
156 except OSError as error: # ValueError should not happen!
157 logger.debug(
158 "Couldn't execute manual command: %s", error, exc_info=True)
159 return False
160 logger.debug("Manual command running as PID %s.", self._httpd.pid)
161 # give it some time to bootstrap, before we try to verify
162 # (cert generation in case of simpleHttpS might take time)
163 self._test_mode_busy_wait(port)
164 if self._httpd.poll() is not None:
165 raise errors.Error("Couldn't execute manual command")
166 else:
167 if not zope.component.getUtility(interfaces.IDisplay).yesno(
168 self.IP_DISCLAIMER, "Yes", "No"):
169 raise errors.PluginError("Must agree to IP logging to proceed")
170
171 self._notify_and_wait(self.MESSAGE_TEMPLATE.format(
172 validation=validation, response=response,
173 uri=achall.chall.uri(achall.domain),
174 ct=achall.CONTENT_TYPE, command=command))
175
176 if response.simple_verify(
177 achall.chall, achall.domain,
178 achall.account_key.public_key(), self.config.http01_port):
179 return response
180 else:
181 logger.error(
182 "Self-verify of challenge failed, authorization abandoned.")
183 if self.conf("test-mode") and self._httpd.poll() is not None:
184 # simply verify cause command failure...
185 return False
186 return None
187
188 def _notify_and_wait(self, message): # pylint: disable=no-self-use
189 # TODO: IDisplay wraps messages, breaking the command
190 #answer = zope.component.getUtility(interfaces.IDisplay).notification(
191 # message=message, height=25, pause=True)
192 sys.stdout.write(message)
193 raw_input("Press ENTER to continue")
194
195 def cleanup(self, achalls):
196 # pylint: disable=missing-docstring,no-self-use,unused-argument
197 if self.conf("test-mode"):
198 assert self._httpd is not None, (
199 "cleanup() must be called after perform()")
200 if self._httpd.poll() is None:
201 logger.debug("Terminating manual command process")
202 os.killpg(self._httpd.pid, signal.SIGTERM)
203 else:
204 logger.debug("Manual command process already terminated "
205 "with %s code", self._httpd.returncode)
206 shutil.rmtree(self._root)
207
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/letsencrypt/plugins/manual.py b/letsencrypt/plugins/manual.py
--- a/letsencrypt/plugins/manual.py
+++ b/letsencrypt/plugins/manual.py
@@ -90,6 +90,8 @@
def add_parser_arguments(cls, add):
add("test-mode", action="store_true",
help="Test mode. Executes the manual command in subprocess.")
+ add("public-ip-logging-ok", action="store_true",
+ help="Automatically allows public IP logging.")
def prepare(self): # pylint: disable=missing-docstring,no-self-use
pass # pragma: no cover
@@ -164,9 +166,10 @@
if self._httpd.poll() is not None:
raise errors.Error("Couldn't execute manual command")
else:
- if not zope.component.getUtility(interfaces.IDisplay).yesno(
- self.IP_DISCLAIMER, "Yes", "No"):
- raise errors.PluginError("Must agree to IP logging to proceed")
+ if not self.conf("public-ip-logging-ok"):
+ if not zope.component.getUtility(interfaces.IDisplay).yesno(
+ self.IP_DISCLAIMER, "Yes", "No"):
+ raise errors.PluginError("Must agree to IP logging to proceed")
self._notify_and_wait(self.MESSAGE_TEMPLATE.format(
validation=validation, response=response,
|
{"golden_diff": "diff --git a/letsencrypt/plugins/manual.py b/letsencrypt/plugins/manual.py\n--- a/letsencrypt/plugins/manual.py\n+++ b/letsencrypt/plugins/manual.py\n@@ -90,6 +90,8 @@\n def add_parser_arguments(cls, add):\n add(\"test-mode\", action=\"store_true\",\n help=\"Test mode. Executes the manual command in subprocess.\")\n+ add(\"public-ip-logging-ok\", action=\"store_true\",\n+ help=\"Automatically allows public IP logging.\")\n \n def prepare(self): # pylint: disable=missing-docstring,no-self-use\n pass # pragma: no cover\n@@ -164,9 +166,10 @@\n if self._httpd.poll() is not None:\n raise errors.Error(\"Couldn't execute manual command\")\n else:\n- if not zope.component.getUtility(interfaces.IDisplay).yesno(\n- self.IP_DISCLAIMER, \"Yes\", \"No\"):\n- raise errors.PluginError(\"Must agree to IP logging to proceed\")\n+ if not self.conf(\"public-ip-logging-ok\"):\n+ if not zope.component.getUtility(interfaces.IDisplay).yesno(\n+ self.IP_DISCLAIMER, \"Yes\", \"No\"):\n+ raise errors.PluginError(\"Must agree to IP logging to proceed\")\n \n self._notify_and_wait(self.MESSAGE_TEMPLATE.format(\n validation=validation, response=response,\n", "issue": "Implement --manual-public-ip-logging-ok\nNot having such flags slows down my development considerably.\n\nhttps://github.com/letsencrypt/letsencrypt/pull/1125#issuecomment-150897419\n\n", "before_files": [{"content": "\"\"\"Manual plugin.\"\"\"\nimport os\nimport logging\nimport pipes\nimport shutil\nimport signal\nimport socket\nimport subprocess\nimport sys\nimport tempfile\nimport time\n\nimport zope.component\nimport zope.interface\n\nfrom acme import challenges\n\nfrom letsencrypt import errors\nfrom letsencrypt import interfaces\nfrom letsencrypt.plugins import common\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass Authenticator(common.Plugin):\n \"\"\"Manual Authenticator.\n\n This plugin requires user's manual intervention in setting up a HTTP\n server for solving http-01 challenges and thus does not need to be\n run as a privileged process. Alternatively shows instructions on how\n to use Python's built-in HTTP server.\n\n .. todo:: Support for `~.challenges.TLSSNI01`.\n\n \"\"\"\n zope.interface.implements(interfaces.IAuthenticator)\n zope.interface.classProvides(interfaces.IPluginFactory)\n hidden = True\n\n description = \"Manually configure an HTTP server\"\n\n MESSAGE_TEMPLATE = \"\"\"\\\nMake sure your web server displays the following content at\n{uri} before continuing:\n\n{validation}\n\nContent-Type header MUST be set to {ct}.\n\nIf you don't have HTTP server configured, you can run the following\ncommand on the target server (as root):\n\n{command}\n\"\"\"\n\n # a disclaimer about your current IP being transmitted to Let's Encrypt's servers.\n IP_DISCLAIMER = \"\"\"\\\nNOTE: The IP of this machine will be publicly logged as having requested this certificate. \\\nIf you're running letsencrypt in manual mode on a machine that is not your server, \\\nplease ensure you're okay with that.\n\nAre you OK with your IP being logged?\n\"\"\"\n\n # \"cd /tmp/letsencrypt\" makes sure user doesn't serve /root,\n # separate \"public_html\" ensures that cert.pem/key.pem are not\n # served and makes it more obvious that Python command will serve\n # anything recursively under the cwd\n\n CMD_TEMPLATE = \"\"\"\\\nmkdir -p {root}/public_html/{achall.URI_ROOT_PATH}\ncd {root}/public_html\nprintf \"%s\" {validation} > {achall.URI_ROOT_PATH}/{encoded_token}\n# run only once per server:\n$(command -v python2 || command -v python2.7 || command -v python2.6) -c \\\\\n\"import BaseHTTPServer, SimpleHTTPServer; \\\\\nSimpleHTTPServer.SimpleHTTPRequestHandler.extensions_map = {{'': '{ct}'}}; \\\\\ns = BaseHTTPServer.HTTPServer(('', {port}), SimpleHTTPServer.SimpleHTTPRequestHandler); \\\\\ns.serve_forever()\" \"\"\"\n \"\"\"Command template.\"\"\"\n\n def __init__(self, *args, **kwargs):\n super(Authenticator, self).__init__(*args, **kwargs)\n self._root = (tempfile.mkdtemp() if self.conf(\"test-mode\")\n else \"/tmp/letsencrypt\")\n self._httpd = None\n\n @classmethod\n def add_parser_arguments(cls, add):\n add(\"test-mode\", action=\"store_true\",\n help=\"Test mode. Executes the manual command in subprocess.\")\n\n def prepare(self): # pylint: disable=missing-docstring,no-self-use\n pass # pragma: no cover\n\n def more_info(self): # pylint: disable=missing-docstring,no-self-use\n return (\"This plugin requires user's manual intervention in setting \"\n \"up an HTTP server for solving http-01 challenges and thus \"\n \"does not need to be run as a privileged process. \"\n \"Alternatively shows instructions on how to use Python's \"\n \"built-in HTTP server.\")\n\n def get_chall_pref(self, domain):\n # pylint: disable=missing-docstring,no-self-use,unused-argument\n return [challenges.HTTP01]\n\n def perform(self, achalls): # pylint: disable=missing-docstring\n responses = []\n # TODO: group achalls by the same socket.gethostbyname(_ex)\n # and prompt only once per server (one \"echo -n\" per domain)\n for achall in achalls:\n responses.append(self._perform_single(achall))\n return responses\n\n @classmethod\n def _test_mode_busy_wait(cls, port):\n while True:\n time.sleep(1)\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n try:\n sock.connect((\"localhost\", port))\n except socket.error: # pragma: no cover\n pass\n else:\n break\n finally:\n sock.close()\n\n def _perform_single(self, achall):\n # same path for each challenge response would be easier for\n # users, but will not work if multiple domains point at the\n # same server: default command doesn't support virtual hosts\n response, validation = achall.response_and_validation()\n\n port = (response.port if self.config.http01_port is None\n else int(self.config.http01_port))\n command = self.CMD_TEMPLATE.format(\n root=self._root, achall=achall, response=response,\n # TODO(kuba): pipes still necessary?\n validation=pipes.quote(validation),\n encoded_token=achall.chall.encode(\"token\"),\n ct=achall.CONTENT_TYPE, port=port)\n if self.conf(\"test-mode\"):\n logger.debug(\"Test mode. Executing the manual command: %s\", command)\n # sh shipped with OS X does't support echo -n, but supports printf\n try:\n self._httpd = subprocess.Popen(\n command,\n # don't care about setting stdout and stderr,\n # we're in test mode anyway\n shell=True,\n executable=None,\n # \"preexec_fn\" is UNIX specific, but so is \"command\"\n preexec_fn=os.setsid)\n except OSError as error: # ValueError should not happen!\n logger.debug(\n \"Couldn't execute manual command: %s\", error, exc_info=True)\n return False\n logger.debug(\"Manual command running as PID %s.\", self._httpd.pid)\n # give it some time to bootstrap, before we try to verify\n # (cert generation in case of simpleHttpS might take time)\n self._test_mode_busy_wait(port)\n if self._httpd.poll() is not None:\n raise errors.Error(\"Couldn't execute manual command\")\n else:\n if not zope.component.getUtility(interfaces.IDisplay).yesno(\n self.IP_DISCLAIMER, \"Yes\", \"No\"):\n raise errors.PluginError(\"Must agree to IP logging to proceed\")\n\n self._notify_and_wait(self.MESSAGE_TEMPLATE.format(\n validation=validation, response=response,\n uri=achall.chall.uri(achall.domain),\n ct=achall.CONTENT_TYPE, command=command))\n\n if response.simple_verify(\n achall.chall, achall.domain,\n achall.account_key.public_key(), self.config.http01_port):\n return response\n else:\n logger.error(\n \"Self-verify of challenge failed, authorization abandoned.\")\n if self.conf(\"test-mode\") and self._httpd.poll() is not None:\n # simply verify cause command failure...\n return False\n return None\n\n def _notify_and_wait(self, message): # pylint: disable=no-self-use\n # TODO: IDisplay wraps messages, breaking the command\n #answer = zope.component.getUtility(interfaces.IDisplay).notification(\n # message=message, height=25, pause=True)\n sys.stdout.write(message)\n raw_input(\"Press ENTER to continue\")\n\n def cleanup(self, achalls):\n # pylint: disable=missing-docstring,no-self-use,unused-argument\n if self.conf(\"test-mode\"):\n assert self._httpd is not None, (\n \"cleanup() must be called after perform()\")\n if self._httpd.poll() is None:\n logger.debug(\"Terminating manual command process\")\n os.killpg(self._httpd.pid, signal.SIGTERM)\n else:\n logger.debug(\"Manual command process already terminated \"\n \"with %s code\", self._httpd.returncode)\n shutil.rmtree(self._root)\n", "path": "letsencrypt/plugins/manual.py"}], "after_files": [{"content": "\"\"\"Manual plugin.\"\"\"\nimport os\nimport logging\nimport pipes\nimport shutil\nimport signal\nimport socket\nimport subprocess\nimport sys\nimport tempfile\nimport time\n\nimport zope.component\nimport zope.interface\n\nfrom acme import challenges\n\nfrom letsencrypt import errors\nfrom letsencrypt import interfaces\nfrom letsencrypt.plugins import common\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass Authenticator(common.Plugin):\n \"\"\"Manual Authenticator.\n\n This plugin requires user's manual intervention in setting up a HTTP\n server for solving http-01 challenges and thus does not need to be\n run as a privileged process. Alternatively shows instructions on how\n to use Python's built-in HTTP server.\n\n .. todo:: Support for `~.challenges.TLSSNI01`.\n\n \"\"\"\n zope.interface.implements(interfaces.IAuthenticator)\n zope.interface.classProvides(interfaces.IPluginFactory)\n hidden = True\n\n description = \"Manually configure an HTTP server\"\n\n MESSAGE_TEMPLATE = \"\"\"\\\nMake sure your web server displays the following content at\n{uri} before continuing:\n\n{validation}\n\nContent-Type header MUST be set to {ct}.\n\nIf you don't have HTTP server configured, you can run the following\ncommand on the target server (as root):\n\n{command}\n\"\"\"\n\n # a disclaimer about your current IP being transmitted to Let's Encrypt's servers.\n IP_DISCLAIMER = \"\"\"\\\nNOTE: The IP of this machine will be publicly logged as having requested this certificate. \\\nIf you're running letsencrypt in manual mode on a machine that is not your server, \\\nplease ensure you're okay with that.\n\nAre you OK with your IP being logged?\n\"\"\"\n\n # \"cd /tmp/letsencrypt\" makes sure user doesn't serve /root,\n # separate \"public_html\" ensures that cert.pem/key.pem are not\n # served and makes it more obvious that Python command will serve\n # anything recursively under the cwd\n\n CMD_TEMPLATE = \"\"\"\\\nmkdir -p {root}/public_html/{achall.URI_ROOT_PATH}\ncd {root}/public_html\nprintf \"%s\" {validation} > {achall.URI_ROOT_PATH}/{encoded_token}\n# run only once per server:\n$(command -v python2 || command -v python2.7 || command -v python2.6) -c \\\\\n\"import BaseHTTPServer, SimpleHTTPServer; \\\\\nSimpleHTTPServer.SimpleHTTPRequestHandler.extensions_map = {{'': '{ct}'}}; \\\\\ns = BaseHTTPServer.HTTPServer(('', {port}), SimpleHTTPServer.SimpleHTTPRequestHandler); \\\\\ns.serve_forever()\" \"\"\"\n \"\"\"Command template.\"\"\"\n\n def __init__(self, *args, **kwargs):\n super(Authenticator, self).__init__(*args, **kwargs)\n self._root = (tempfile.mkdtemp() if self.conf(\"test-mode\")\n else \"/tmp/letsencrypt\")\n self._httpd = None\n\n @classmethod\n def add_parser_arguments(cls, add):\n add(\"test-mode\", action=\"store_true\",\n help=\"Test mode. Executes the manual command in subprocess.\")\n add(\"public-ip-logging-ok\", action=\"store_true\",\n help=\"Automatically allows public IP logging.\")\n\n def prepare(self): # pylint: disable=missing-docstring,no-self-use\n pass # pragma: no cover\n\n def more_info(self): # pylint: disable=missing-docstring,no-self-use\n return (\"This plugin requires user's manual intervention in setting \"\n \"up an HTTP server for solving http-01 challenges and thus \"\n \"does not need to be run as a privileged process. \"\n \"Alternatively shows instructions on how to use Python's \"\n \"built-in HTTP server.\")\n\n def get_chall_pref(self, domain):\n # pylint: disable=missing-docstring,no-self-use,unused-argument\n return [challenges.HTTP01]\n\n def perform(self, achalls): # pylint: disable=missing-docstring\n responses = []\n # TODO: group achalls by the same socket.gethostbyname(_ex)\n # and prompt only once per server (one \"echo -n\" per domain)\n for achall in achalls:\n responses.append(self._perform_single(achall))\n return responses\n\n @classmethod\n def _test_mode_busy_wait(cls, port):\n while True:\n time.sleep(1)\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n try:\n sock.connect((\"localhost\", port))\n except socket.error: # pragma: no cover\n pass\n else:\n break\n finally:\n sock.close()\n\n def _perform_single(self, achall):\n # same path for each challenge response would be easier for\n # users, but will not work if multiple domains point at the\n # same server: default command doesn't support virtual hosts\n response, validation = achall.response_and_validation()\n\n port = (response.port if self.config.http01_port is None\n else int(self.config.http01_port))\n command = self.CMD_TEMPLATE.format(\n root=self._root, achall=achall, response=response,\n # TODO(kuba): pipes still necessary?\n validation=pipes.quote(validation),\n encoded_token=achall.chall.encode(\"token\"),\n ct=achall.CONTENT_TYPE, port=port)\n if self.conf(\"test-mode\"):\n logger.debug(\"Test mode. Executing the manual command: %s\", command)\n # sh shipped with OS X does't support echo -n, but supports printf\n try:\n self._httpd = subprocess.Popen(\n command,\n # don't care about setting stdout and stderr,\n # we're in test mode anyway\n shell=True,\n executable=None,\n # \"preexec_fn\" is UNIX specific, but so is \"command\"\n preexec_fn=os.setsid)\n except OSError as error: # ValueError should not happen!\n logger.debug(\n \"Couldn't execute manual command: %s\", error, exc_info=True)\n return False\n logger.debug(\"Manual command running as PID %s.\", self._httpd.pid)\n # give it some time to bootstrap, before we try to verify\n # (cert generation in case of simpleHttpS might take time)\n self._test_mode_busy_wait(port)\n if self._httpd.poll() is not None:\n raise errors.Error(\"Couldn't execute manual command\")\n else:\n if not self.conf(\"public-ip-logging-ok\"):\n if not zope.component.getUtility(interfaces.IDisplay).yesno(\n self.IP_DISCLAIMER, \"Yes\", \"No\"):\n raise errors.PluginError(\"Must agree to IP logging to proceed\")\n\n self._notify_and_wait(self.MESSAGE_TEMPLATE.format(\n validation=validation, response=response,\n uri=achall.chall.uri(achall.domain),\n ct=achall.CONTENT_TYPE, command=command))\n\n if response.simple_verify(\n achall.chall, achall.domain,\n achall.account_key.public_key(), self.config.http01_port):\n return response\n else:\n logger.error(\n \"Self-verify of challenge failed, authorization abandoned.\")\n if self.conf(\"test-mode\") and self._httpd.poll() is not None:\n # simply verify cause command failure...\n return False\n return None\n\n def _notify_and_wait(self, message): # pylint: disable=no-self-use\n # TODO: IDisplay wraps messages, breaking the command\n #answer = zope.component.getUtility(interfaces.IDisplay).notification(\n # message=message, height=25, pause=True)\n sys.stdout.write(message)\n raw_input(\"Press ENTER to continue\")\n\n def cleanup(self, achalls):\n # pylint: disable=missing-docstring,no-self-use,unused-argument\n if self.conf(\"test-mode\"):\n assert self._httpd is not None, (\n \"cleanup() must be called after perform()\")\n if self._httpd.poll() is None:\n logger.debug(\"Terminating manual command process\")\n os.killpg(self._httpd.pid, signal.SIGTERM)\n else:\n logger.debug(\"Manual command process already terminated \"\n \"with %s code\", self._httpd.returncode)\n shutil.rmtree(self._root)\n", "path": "letsencrypt/plugins/manual.py"}]}
| 2,560 | 301 |
gh_patches_debug_25872
|
rasdani/github-patches
|
git_diff
|
google__clusterfuzz-3140
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Make Centipede handle custom ASAN options that libFuzzer supports
See https://bugs.chromium.org/p/chromium/issues/detail?id=1451080#c4
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/clusterfuzz/_internal/bot/fuzzers/centipede/engine.py`
Content:
```
1 # Copyright 2022 Google LLC
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 """Centipede engine interface."""
15
16 from collections import namedtuple
17 import os
18 import pathlib
19 import re
20 import shutil
21
22 from clusterfuzz._internal.bot.fuzzers import dictionary_manager
23 from clusterfuzz._internal.bot.fuzzers import engine_common
24 from clusterfuzz._internal.bot.fuzzers import utils as fuzzer_utils
25 from clusterfuzz._internal.metrics import logs
26 from clusterfuzz._internal.system import environment
27 from clusterfuzz._internal.system import new_process
28 from clusterfuzz.fuzz import engine
29
30 _CLEAN_EXIT_SECS = 10
31 _SERVER_COUNT = 1
32 _RSS_LIMIT = 4096
33 _ADDRESS_SPACE_LIMIT = 4096
34 _TIMEOUT_PER_INPUT_FUZZ = 25
35 _TIMEOUT_PER_INPUT_REPR = 60
36 _DEFAULT_ARGUMENTS = [
37 '--exit_on_crash=1',
38 f'--fork_server={_SERVER_COUNT}',
39 f'--rss_limit_mb={_RSS_LIMIT}',
40 f'--address_space_limit_mb={_ADDRESS_SPACE_LIMIT}',
41 ]
42
43 CRASH_REGEX = re.compile(r'[sS]aving input to:?\s*(.*)')
44 _CRASH_LOG_PREFIX = 'CRASH LOG: '
45 TargetBinaries = namedtuple('TargetBinaries', ['unsanitized', 'sanitized'])
46
47
48 class CentipedeError(Exception):
49 """Base exception class."""
50
51
52 def _get_runner(target_path):
53 """Gets the Centipede runner."""
54 centipede_path = pathlib.Path(target_path).parent / 'centipede'
55 if not centipede_path.exists():
56 raise CentipedeError('Centipede not found in build')
57
58 centipede_path = str(centipede_path)
59 if environment.get_value('USE_UNSHARE'):
60 return new_process.UnicodeModifierRunner(centipede_path)
61 return new_process.UnicodeProcessRunner(centipede_path)
62
63
64 def _get_reproducer_path(log, reproducers_dir):
65 """Gets the reproducer path, if any."""
66 crash_match = CRASH_REGEX.search(log)
67 if not crash_match:
68 return None
69 tmp_crash_path = pathlib.Path(crash_match.group(1))
70 crash_path = pathlib.Path(reproducers_dir) / tmp_crash_path.name
71 shutil.copy(tmp_crash_path, crash_path)
72 return crash_path
73
74
75 class Engine(engine.Engine):
76 """Centipede engine implementation."""
77
78 @property
79 def name(self):
80 return 'centipede'
81
82 # pylint: disable=unused-argument
83 def prepare(self, corpus_dir, target_path, build_dir):
84 """Prepares for a fuzzing session, by generating options.
85
86 Args:
87 corpus_dir: The main corpus directory.
88 target_path: Path to the target.
89 build_dir: Path to the build directory.
90
91 Returns:
92 A FuzzOptions object.
93 """
94 arguments = []
95 dict_path = pathlib.Path(
96 dictionary_manager.get_default_dictionary_path(target_path))
97 if dict_path.exists():
98 arguments.append(f'--dictionary={dict_path}')
99
100 # Directory workdir saves:
101 # 1. Centipede-readable corpus file;
102 # 2. Centipede-readable feature file;
103 # 3. Crash reproducing inputs.
104 workdir = self._create_temp_dir('workdir')
105 arguments.append(f'--workdir={workdir}')
106
107 # Directory corpus_dir saves the corpus files required by ClusterFuzz.
108 arguments.append(f'--corpus_dir={corpus_dir}')
109
110 target_binaries = self._get_binary_paths(target_path)
111 if target_binaries.unsanitized is None:
112 # Assuming the only binary is always sanitized (e.g., from Chrome).
113 arguments.append(f'--binary={target_binaries.sanitized}')
114 logs.log_warn('Unable to find unsanitized target binary.')
115 else:
116 arguments.append(f'--binary={target_binaries.unsanitized}')
117 arguments.append(f'--extra_binaries={target_binaries.sanitized}')
118
119 arguments.append(f'--timeout_per_input={_TIMEOUT_PER_INPUT_FUZZ}')
120
121 arguments.extend(_DEFAULT_ARGUMENTS)
122
123 return engine.FuzzOptions(corpus_dir, arguments, {})
124
125 def _get_binary_paths(self, target_path):
126 """Gets the paths to the main and auxiliary binaries based on |target_path|
127 Args:
128 target_path: Path to the main target in a string.
129
130 Returns:
131 A named tuple containing paths to both target binaries as pathlib.Path.
132 """
133 # Centipede expects one or two target binaries:
134 # |-------------------------------------------------------|
135 # | | main target path | auxiliary target path |
136 # |-------------------------------------------------------|
137 # | 1 binary | sanitized | - |
138 # |-------------------------------------------------------|
139 # | 2 binaries | unsanitized | sanitized |
140 # |-------------------------------------------------------|
141
142 main_target_path = pathlib.Path(target_path)
143 auxiliary_target_path = self._get_auxiliary_target_path(target_path)
144
145 if main_target_path.exists() and auxiliary_target_path.exists():
146 # 2 binaries were provided.
147 target_binaries = TargetBinaries(main_target_path, auxiliary_target_path)
148 elif main_target_path.exists():
149 # 1 binary was provided.
150 target_binaries = TargetBinaries(None, main_target_path)
151 else:
152 assert not auxiliary_target_path.exists()
153 raise RuntimeError('No fuzz target: Centipede cannot find main target '
154 f'{main_target_path}, or auxiliary target '
155 f'{auxiliary_target_path}.')
156
157 return target_binaries
158
159 def _get_auxiliary_target_path(self, target_path):
160 """Gets the auxiliary target path based on the main |target_path|.
161 When exists, it points to the sanitized binary, which is required by fuzzing
162 (as an auxiliary) and crash reproduction.
163
164 Args:
165 target_path: Path to the main target in a string.
166
167 Returns:
168 Path to the auxiliary binary as a pathlib.Path.
169 """
170 # Assuming they will be in child dirs named by fuzzer_utils.EXTRA_BUILD_DIR.
171 build_dir = environment.get_value('BUILD_DIR')
172 auxiliary_target_name = pathlib.Path(target_path).name
173 auxiliary_target_path = pathlib.Path(
174 build_dir, fuzzer_utils.EXTRA_BUILD_DIR, auxiliary_target_name)
175 return auxiliary_target_path
176
177 def fuzz(self, target_path, options, reproducers_dir, max_time): # pylint: disable=unused-argument
178 """Runs a fuzz session.
179
180 Args:
181 target_path: Path to the target.
182 options: The FuzzOptions object returned by prepare().
183 reproducers_dir: The directory to put reproducers in when crashes
184 are found.
185 max_time: Maximum allowed time for the fuzzing to run.
186
187 Returns:
188 A FuzzResult object.
189 """
190 runner = _get_runner(target_path)
191 timeout = max_time + _CLEAN_EXIT_SECS
192 fuzz_result = runner.run_and_wait(
193 additional_args=options.arguments, timeout=timeout)
194 fuzz_result.output = Engine.trim_logs(fuzz_result.output)
195
196 reproducer_path = _get_reproducer_path(fuzz_result.output, reproducers_dir)
197 crashes = []
198 if reproducer_path:
199 crashes.append(
200 engine.Crash(
201 str(reproducer_path), fuzz_result.output, [],
202 int(fuzz_result.time_executed)))
203
204 # Stats report is not available in Centipede yet.
205 stats = None
206 return engine.FuzzResult(fuzz_result.output, fuzz_result.command, crashes,
207 stats, fuzz_result.time_executed)
208
209 @staticmethod
210 def trim_logs(fuzz_log):
211 """ Strips the 'CRASH LOG:' prefix that breaks stacktrace parsing.
212
213 Args:
214 fuzz_result: The ProcessResult returned by running fuzzer binary.
215 """
216 trimmed_log_lines = [
217 line[len(_CRASH_LOG_PREFIX):]
218 if line.startswith(_CRASH_LOG_PREFIX) else line
219 for line in fuzz_log.splitlines()
220 ]
221 return '\n'.join(trimmed_log_lines)
222
223 def reproduce(self, target_path, input_path, arguments, max_time): # pylint: disable=unused-argument
224 """Reproduces a crash given an input.
225
226 Args:
227 target_path: Path to the target.
228 input_path: Path to the reproducer input.
229 arguments: Additional arguments needed for reproduction.
230 max_time: Maximum allowed time for the reproduction.
231
232 Returns:
233 A ReproduceResult.
234 """
235 target_binaries = self._get_binary_paths(target_path)
236 sanitized_target = str(target_binaries.sanitized)
237
238 existing_runner_flags = os.environ.get('CENTIPEDE_RUNNER_FLAGS')
239 if not existing_runner_flags:
240 os.environ['CENTIPEDE_RUNNER_FLAGS'] = (
241 f':rss_limit_mb={_RSS_LIMIT}'
242 f':timeout_per_input={_TIMEOUT_PER_INPUT_REPR}:')
243
244 runner = new_process.UnicodeProcessRunner(sanitized_target, [input_path])
245 result = runner.run_and_wait(timeout=max_time)
246
247 if existing_runner_flags:
248 os.environ['CENTIPEDE_RUNNER_FLAGS'] = existing_runner_flags
249 else:
250 os.unsetenv('CENTIPEDE_RUNNER_FLAGS')
251 result.output = Engine.trim_logs(result.output)
252
253 return engine.ReproduceResult(result.command, result.return_code,
254 result.time_executed, result.output)
255
256 def _create_temp_dir(self, name):
257 """Creates temporary directory for fuzzing."""
258 new_directory = pathlib.Path(fuzzer_utils.get_temp_dir(), name)
259 engine_common.recreate_directory(new_directory)
260 return new_directory
261
262 def minimize_corpus(self, target_path, arguments, input_dirs, output_dir,
263 reproducers_dir, max_time):
264 """Runs corpus minimization.
265 Args:
266 target_path: Path to the target.
267 arguments: Additional arguments needed for corpus minimization.
268 input_dirs: Input corpora.
269 output_dir: Output directory to place minimized corpus.
270 reproducers_dir: The directory to put reproducers in when crashes are
271 found.
272 max_time: Maximum allowed time for the minimization.
273
274 Returns:
275 A FuzzResult object.
276 """
277 raise NotImplementedError
278
279 def minimize_testcase(self, target_path, arguments, input_path, output_path,
280 max_time):
281 """Minimizes a testcase.
282 Args:
283 target_path: Path to the target.
284 arguments: Additional arguments needed for testcase minimization.
285 input_path: Path to the reproducer input.
286 output_path: Path to the minimized output.
287 max_time: Maximum allowed time for the minimization.
288 Returns:
289 A ReproduceResult.
290 Raises:
291 TimeoutError: If the testcase minimization exceeds max_time.
292 """
293 raise NotImplementedError
294
295 def cleanse(self, target_path, arguments, input_path, output_path, max_time):
296 """Cleanses a testcase.
297 Args:
298 target_path: Path to the target.
299 arguments: Additional arguments needed for testcase cleanse.
300 input_path: Path to the reproducer input.
301 output_path: Path to the cleansed output.
302 max_time: Maximum allowed time for the cleanse.
303 Returns:
304 A ReproduceResult.
305 Raises:
306 TimeoutError: If the cleanse exceeds max_time.
307 """
308 raise NotImplementedError
309
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/clusterfuzz/_internal/bot/fuzzers/centipede/engine.py b/src/clusterfuzz/_internal/bot/fuzzers/centipede/engine.py
--- a/src/clusterfuzz/_internal/bot/fuzzers/centipede/engine.py
+++ b/src/clusterfuzz/_internal/bot/fuzzers/centipede/engine.py
@@ -72,6 +72,15 @@
return crash_path
+def _set_sanitizer_options(fuzzer_path):
+ """Sets sanitizer options based on .options file overrides."""
+ engine_common.process_sanitizer_options_overrides(fuzzer_path)
+ sanitizer_options_var = environment.get_current_memory_tool_var()
+ sanitizer_options = environment.get_memory_tool_options(
+ sanitizer_options_var, {})
+ environment.set_memory_tool_options(sanitizer_options_var, sanitizer_options)
+
+
class Engine(engine.Engine):
"""Centipede engine implementation."""
@@ -188,6 +197,7 @@
A FuzzResult object.
"""
runner = _get_runner(target_path)
+ _set_sanitizer_options(target_path)
timeout = max_time + _CLEAN_EXIT_SECS
fuzz_result = runner.run_and_wait(
additional_args=options.arguments, timeout=timeout)
@@ -232,6 +242,7 @@
Returns:
A ReproduceResult.
"""
+ _set_sanitizer_options(target_path)
target_binaries = self._get_binary_paths(target_path)
sanitized_target = str(target_binaries.sanitized)
|
{"golden_diff": "diff --git a/src/clusterfuzz/_internal/bot/fuzzers/centipede/engine.py b/src/clusterfuzz/_internal/bot/fuzzers/centipede/engine.py\n--- a/src/clusterfuzz/_internal/bot/fuzzers/centipede/engine.py\n+++ b/src/clusterfuzz/_internal/bot/fuzzers/centipede/engine.py\n@@ -72,6 +72,15 @@\n return crash_path\n \n \n+def _set_sanitizer_options(fuzzer_path):\n+ \"\"\"Sets sanitizer options based on .options file overrides.\"\"\"\n+ engine_common.process_sanitizer_options_overrides(fuzzer_path)\n+ sanitizer_options_var = environment.get_current_memory_tool_var()\n+ sanitizer_options = environment.get_memory_tool_options(\n+ sanitizer_options_var, {})\n+ environment.set_memory_tool_options(sanitizer_options_var, sanitizer_options)\n+\n+\n class Engine(engine.Engine):\n \"\"\"Centipede engine implementation.\"\"\"\n \n@@ -188,6 +197,7 @@\n A FuzzResult object.\n \"\"\"\n runner = _get_runner(target_path)\n+ _set_sanitizer_options(target_path)\n timeout = max_time + _CLEAN_EXIT_SECS\n fuzz_result = runner.run_and_wait(\n additional_args=options.arguments, timeout=timeout)\n@@ -232,6 +242,7 @@\n Returns:\n A ReproduceResult.\n \"\"\"\n+ _set_sanitizer_options(target_path)\n target_binaries = self._get_binary_paths(target_path)\n sanitized_target = str(target_binaries.sanitized)\n", "issue": "Make Centipede handle custom ASAN options that libFuzzer supports\nSee https://bugs.chromium.org/p/chromium/issues/detail?id=1451080#c4\n", "before_files": [{"content": "# Copyright 2022 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Centipede engine interface.\"\"\"\n\nfrom collections import namedtuple\nimport os\nimport pathlib\nimport re\nimport shutil\n\nfrom clusterfuzz._internal.bot.fuzzers import dictionary_manager\nfrom clusterfuzz._internal.bot.fuzzers import engine_common\nfrom clusterfuzz._internal.bot.fuzzers import utils as fuzzer_utils\nfrom clusterfuzz._internal.metrics import logs\nfrom clusterfuzz._internal.system import environment\nfrom clusterfuzz._internal.system import new_process\nfrom clusterfuzz.fuzz import engine\n\n_CLEAN_EXIT_SECS = 10\n_SERVER_COUNT = 1\n_RSS_LIMIT = 4096\n_ADDRESS_SPACE_LIMIT = 4096\n_TIMEOUT_PER_INPUT_FUZZ = 25\n_TIMEOUT_PER_INPUT_REPR = 60\n_DEFAULT_ARGUMENTS = [\n '--exit_on_crash=1',\n f'--fork_server={_SERVER_COUNT}',\n f'--rss_limit_mb={_RSS_LIMIT}',\n f'--address_space_limit_mb={_ADDRESS_SPACE_LIMIT}',\n]\n\nCRASH_REGEX = re.compile(r'[sS]aving input to:?\\s*(.*)')\n_CRASH_LOG_PREFIX = 'CRASH LOG: '\nTargetBinaries = namedtuple('TargetBinaries', ['unsanitized', 'sanitized'])\n\n\nclass CentipedeError(Exception):\n \"\"\"Base exception class.\"\"\"\n\n\ndef _get_runner(target_path):\n \"\"\"Gets the Centipede runner.\"\"\"\n centipede_path = pathlib.Path(target_path).parent / 'centipede'\n if not centipede_path.exists():\n raise CentipedeError('Centipede not found in build')\n\n centipede_path = str(centipede_path)\n if environment.get_value('USE_UNSHARE'):\n return new_process.UnicodeModifierRunner(centipede_path)\n return new_process.UnicodeProcessRunner(centipede_path)\n\n\ndef _get_reproducer_path(log, reproducers_dir):\n \"\"\"Gets the reproducer path, if any.\"\"\"\n crash_match = CRASH_REGEX.search(log)\n if not crash_match:\n return None\n tmp_crash_path = pathlib.Path(crash_match.group(1))\n crash_path = pathlib.Path(reproducers_dir) / tmp_crash_path.name\n shutil.copy(tmp_crash_path, crash_path)\n return crash_path\n\n\nclass Engine(engine.Engine):\n \"\"\"Centipede engine implementation.\"\"\"\n\n @property\n def name(self):\n return 'centipede'\n\n # pylint: disable=unused-argument\n def prepare(self, corpus_dir, target_path, build_dir):\n \"\"\"Prepares for a fuzzing session, by generating options.\n\n Args:\n corpus_dir: The main corpus directory.\n target_path: Path to the target.\n build_dir: Path to the build directory.\n\n Returns:\n A FuzzOptions object.\n \"\"\"\n arguments = []\n dict_path = pathlib.Path(\n dictionary_manager.get_default_dictionary_path(target_path))\n if dict_path.exists():\n arguments.append(f'--dictionary={dict_path}')\n\n # Directory workdir saves:\n # 1. Centipede-readable corpus file;\n # 2. Centipede-readable feature file;\n # 3. Crash reproducing inputs.\n workdir = self._create_temp_dir('workdir')\n arguments.append(f'--workdir={workdir}')\n\n # Directory corpus_dir saves the corpus files required by ClusterFuzz.\n arguments.append(f'--corpus_dir={corpus_dir}')\n\n target_binaries = self._get_binary_paths(target_path)\n if target_binaries.unsanitized is None:\n # Assuming the only binary is always sanitized (e.g., from Chrome).\n arguments.append(f'--binary={target_binaries.sanitized}')\n logs.log_warn('Unable to find unsanitized target binary.')\n else:\n arguments.append(f'--binary={target_binaries.unsanitized}')\n arguments.append(f'--extra_binaries={target_binaries.sanitized}')\n\n arguments.append(f'--timeout_per_input={_TIMEOUT_PER_INPUT_FUZZ}')\n\n arguments.extend(_DEFAULT_ARGUMENTS)\n\n return engine.FuzzOptions(corpus_dir, arguments, {})\n\n def _get_binary_paths(self, target_path):\n \"\"\"Gets the paths to the main and auxiliary binaries based on |target_path|\n Args:\n target_path: Path to the main target in a string.\n\n Returns:\n A named tuple containing paths to both target binaries as pathlib.Path.\n \"\"\"\n # Centipede expects one or two target binaries:\n # |-------------------------------------------------------|\n # | | main target path | auxiliary target path |\n # |-------------------------------------------------------|\n # | 1 binary | sanitized | - |\n # |-------------------------------------------------------|\n # | 2 binaries | unsanitized | sanitized |\n # |-------------------------------------------------------|\n\n main_target_path = pathlib.Path(target_path)\n auxiliary_target_path = self._get_auxiliary_target_path(target_path)\n\n if main_target_path.exists() and auxiliary_target_path.exists():\n # 2 binaries were provided.\n target_binaries = TargetBinaries(main_target_path, auxiliary_target_path)\n elif main_target_path.exists():\n # 1 binary was provided.\n target_binaries = TargetBinaries(None, main_target_path)\n else:\n assert not auxiliary_target_path.exists()\n raise RuntimeError('No fuzz target: Centipede cannot find main target '\n f'{main_target_path}, or auxiliary target '\n f'{auxiliary_target_path}.')\n\n return target_binaries\n\n def _get_auxiliary_target_path(self, target_path):\n \"\"\"Gets the auxiliary target path based on the main |target_path|.\n When exists, it points to the sanitized binary, which is required by fuzzing\n (as an auxiliary) and crash reproduction.\n\n Args:\n target_path: Path to the main target in a string.\n\n Returns:\n Path to the auxiliary binary as a pathlib.Path.\n \"\"\"\n # Assuming they will be in child dirs named by fuzzer_utils.EXTRA_BUILD_DIR.\n build_dir = environment.get_value('BUILD_DIR')\n auxiliary_target_name = pathlib.Path(target_path).name\n auxiliary_target_path = pathlib.Path(\n build_dir, fuzzer_utils.EXTRA_BUILD_DIR, auxiliary_target_name)\n return auxiliary_target_path\n\n def fuzz(self, target_path, options, reproducers_dir, max_time): # pylint: disable=unused-argument\n \"\"\"Runs a fuzz session.\n\n Args:\n target_path: Path to the target.\n options: The FuzzOptions object returned by prepare().\n reproducers_dir: The directory to put reproducers in when crashes\n are found.\n max_time: Maximum allowed time for the fuzzing to run.\n\n Returns:\n A FuzzResult object.\n \"\"\"\n runner = _get_runner(target_path)\n timeout = max_time + _CLEAN_EXIT_SECS\n fuzz_result = runner.run_and_wait(\n additional_args=options.arguments, timeout=timeout)\n fuzz_result.output = Engine.trim_logs(fuzz_result.output)\n\n reproducer_path = _get_reproducer_path(fuzz_result.output, reproducers_dir)\n crashes = []\n if reproducer_path:\n crashes.append(\n engine.Crash(\n str(reproducer_path), fuzz_result.output, [],\n int(fuzz_result.time_executed)))\n\n # Stats report is not available in Centipede yet.\n stats = None\n return engine.FuzzResult(fuzz_result.output, fuzz_result.command, crashes,\n stats, fuzz_result.time_executed)\n\n @staticmethod\n def trim_logs(fuzz_log):\n \"\"\" Strips the 'CRASH LOG:' prefix that breaks stacktrace parsing.\n\n Args:\n fuzz_result: The ProcessResult returned by running fuzzer binary.\n \"\"\"\n trimmed_log_lines = [\n line[len(_CRASH_LOG_PREFIX):]\n if line.startswith(_CRASH_LOG_PREFIX) else line\n for line in fuzz_log.splitlines()\n ]\n return '\\n'.join(trimmed_log_lines)\n\n def reproduce(self, target_path, input_path, arguments, max_time): # pylint: disable=unused-argument\n \"\"\"Reproduces a crash given an input.\n\n Args:\n target_path: Path to the target.\n input_path: Path to the reproducer input.\n arguments: Additional arguments needed for reproduction.\n max_time: Maximum allowed time for the reproduction.\n\n Returns:\n A ReproduceResult.\n \"\"\"\n target_binaries = self._get_binary_paths(target_path)\n sanitized_target = str(target_binaries.sanitized)\n\n existing_runner_flags = os.environ.get('CENTIPEDE_RUNNER_FLAGS')\n if not existing_runner_flags:\n os.environ['CENTIPEDE_RUNNER_FLAGS'] = (\n f':rss_limit_mb={_RSS_LIMIT}'\n f':timeout_per_input={_TIMEOUT_PER_INPUT_REPR}:')\n\n runner = new_process.UnicodeProcessRunner(sanitized_target, [input_path])\n result = runner.run_and_wait(timeout=max_time)\n\n if existing_runner_flags:\n os.environ['CENTIPEDE_RUNNER_FLAGS'] = existing_runner_flags\n else:\n os.unsetenv('CENTIPEDE_RUNNER_FLAGS')\n result.output = Engine.trim_logs(result.output)\n\n return engine.ReproduceResult(result.command, result.return_code,\n result.time_executed, result.output)\n\n def _create_temp_dir(self, name):\n \"\"\"Creates temporary directory for fuzzing.\"\"\"\n new_directory = pathlib.Path(fuzzer_utils.get_temp_dir(), name)\n engine_common.recreate_directory(new_directory)\n return new_directory\n\n def minimize_corpus(self, target_path, arguments, input_dirs, output_dir,\n reproducers_dir, max_time):\n \"\"\"Runs corpus minimization.\n Args:\n target_path: Path to the target.\n arguments: Additional arguments needed for corpus minimization.\n input_dirs: Input corpora.\n output_dir: Output directory to place minimized corpus.\n reproducers_dir: The directory to put reproducers in when crashes are\n found.\n max_time: Maximum allowed time for the minimization.\n\n Returns:\n A FuzzResult object.\n \"\"\"\n raise NotImplementedError\n\n def minimize_testcase(self, target_path, arguments, input_path, output_path,\n max_time):\n \"\"\"Minimizes a testcase.\n Args:\n target_path: Path to the target.\n arguments: Additional arguments needed for testcase minimization.\n input_path: Path to the reproducer input.\n output_path: Path to the minimized output.\n max_time: Maximum allowed time for the minimization.\n Returns:\n A ReproduceResult.\n Raises:\n TimeoutError: If the testcase minimization exceeds max_time.\n \"\"\"\n raise NotImplementedError\n\n def cleanse(self, target_path, arguments, input_path, output_path, max_time):\n \"\"\"Cleanses a testcase.\n Args:\n target_path: Path to the target.\n arguments: Additional arguments needed for testcase cleanse.\n input_path: Path to the reproducer input.\n output_path: Path to the cleansed output.\n max_time: Maximum allowed time for the cleanse.\n Returns:\n A ReproduceResult.\n Raises:\n TimeoutError: If the cleanse exceeds max_time.\n \"\"\"\n raise NotImplementedError\n", "path": "src/clusterfuzz/_internal/bot/fuzzers/centipede/engine.py"}], "after_files": [{"content": "# Copyright 2022 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Centipede engine interface.\"\"\"\n\nfrom collections import namedtuple\nimport os\nimport pathlib\nimport re\nimport shutil\n\nfrom clusterfuzz._internal.bot.fuzzers import dictionary_manager\nfrom clusterfuzz._internal.bot.fuzzers import engine_common\nfrom clusterfuzz._internal.bot.fuzzers import utils as fuzzer_utils\nfrom clusterfuzz._internal.metrics import logs\nfrom clusterfuzz._internal.system import environment\nfrom clusterfuzz._internal.system import new_process\nfrom clusterfuzz.fuzz import engine\n\n_CLEAN_EXIT_SECS = 10\n_SERVER_COUNT = 1\n_RSS_LIMIT = 4096\n_ADDRESS_SPACE_LIMIT = 4096\n_TIMEOUT_PER_INPUT_FUZZ = 25\n_TIMEOUT_PER_INPUT_REPR = 60\n_DEFAULT_ARGUMENTS = [\n '--exit_on_crash=1',\n f'--fork_server={_SERVER_COUNT}',\n f'--rss_limit_mb={_RSS_LIMIT}',\n f'--address_space_limit_mb={_ADDRESS_SPACE_LIMIT}',\n]\n\nCRASH_REGEX = re.compile(r'[sS]aving input to:?\\s*(.*)')\n_CRASH_LOG_PREFIX = 'CRASH LOG: '\nTargetBinaries = namedtuple('TargetBinaries', ['unsanitized', 'sanitized'])\n\n\nclass CentipedeError(Exception):\n \"\"\"Base exception class.\"\"\"\n\n\ndef _get_runner(target_path):\n \"\"\"Gets the Centipede runner.\"\"\"\n centipede_path = pathlib.Path(target_path).parent / 'centipede'\n if not centipede_path.exists():\n raise CentipedeError('Centipede not found in build')\n\n centipede_path = str(centipede_path)\n if environment.get_value('USE_UNSHARE'):\n return new_process.UnicodeModifierRunner(centipede_path)\n return new_process.UnicodeProcessRunner(centipede_path)\n\n\ndef _get_reproducer_path(log, reproducers_dir):\n \"\"\"Gets the reproducer path, if any.\"\"\"\n crash_match = CRASH_REGEX.search(log)\n if not crash_match:\n return None\n tmp_crash_path = pathlib.Path(crash_match.group(1))\n crash_path = pathlib.Path(reproducers_dir) / tmp_crash_path.name\n shutil.copy(tmp_crash_path, crash_path)\n return crash_path\n\n\ndef _set_sanitizer_options(fuzzer_path):\n \"\"\"Sets sanitizer options based on .options file overrides.\"\"\"\n engine_common.process_sanitizer_options_overrides(fuzzer_path)\n sanitizer_options_var = environment.get_current_memory_tool_var()\n sanitizer_options = environment.get_memory_tool_options(\n sanitizer_options_var, {})\n environment.set_memory_tool_options(sanitizer_options_var, sanitizer_options)\n\n\nclass Engine(engine.Engine):\n \"\"\"Centipede engine implementation.\"\"\"\n\n @property\n def name(self):\n return 'centipede'\n\n # pylint: disable=unused-argument\n def prepare(self, corpus_dir, target_path, build_dir):\n \"\"\"Prepares for a fuzzing session, by generating options.\n\n Args:\n corpus_dir: The main corpus directory.\n target_path: Path to the target.\n build_dir: Path to the build directory.\n\n Returns:\n A FuzzOptions object.\n \"\"\"\n arguments = []\n dict_path = pathlib.Path(\n dictionary_manager.get_default_dictionary_path(target_path))\n if dict_path.exists():\n arguments.append(f'--dictionary={dict_path}')\n\n # Directory workdir saves:\n # 1. Centipede-readable corpus file;\n # 2. Centipede-readable feature file;\n # 3. Crash reproducing inputs.\n workdir = self._create_temp_dir('workdir')\n arguments.append(f'--workdir={workdir}')\n\n # Directory corpus_dir saves the corpus files required by ClusterFuzz.\n arguments.append(f'--corpus_dir={corpus_dir}')\n\n target_binaries = self._get_binary_paths(target_path)\n if target_binaries.unsanitized is None:\n # Assuming the only binary is always sanitized (e.g., from Chrome).\n arguments.append(f'--binary={target_binaries.sanitized}')\n logs.log_warn('Unable to find unsanitized target binary.')\n else:\n arguments.append(f'--binary={target_binaries.unsanitized}')\n arguments.append(f'--extra_binaries={target_binaries.sanitized}')\n\n arguments.append(f'--timeout_per_input={_TIMEOUT_PER_INPUT_FUZZ}')\n\n arguments.extend(_DEFAULT_ARGUMENTS)\n\n return engine.FuzzOptions(corpus_dir, arguments, {})\n\n def _get_binary_paths(self, target_path):\n \"\"\"Gets the paths to the main and auxiliary binaries based on |target_path|\n Args:\n target_path: Path to the main target in a string.\n\n Returns:\n A named tuple containing paths to both target binaries as pathlib.Path.\n \"\"\"\n # Centipede expects one or two target binaries:\n # |-------------------------------------------------------|\n # | | main target path | auxiliary target path |\n # |-------------------------------------------------------|\n # | 1 binary | sanitized | - |\n # |-------------------------------------------------------|\n # | 2 binaries | unsanitized | sanitized |\n # |-------------------------------------------------------|\n\n main_target_path = pathlib.Path(target_path)\n auxiliary_target_path = self._get_auxiliary_target_path(target_path)\n\n if main_target_path.exists() and auxiliary_target_path.exists():\n # 2 binaries were provided.\n target_binaries = TargetBinaries(main_target_path, auxiliary_target_path)\n elif main_target_path.exists():\n # 1 binary was provided.\n target_binaries = TargetBinaries(None, main_target_path)\n else:\n assert not auxiliary_target_path.exists()\n raise RuntimeError('No fuzz target: Centipede cannot find main target '\n f'{main_target_path}, or auxiliary target '\n f'{auxiliary_target_path}.')\n\n return target_binaries\n\n def _get_auxiliary_target_path(self, target_path):\n \"\"\"Gets the auxiliary target path based on the main |target_path|.\n When exists, it points to the sanitized binary, which is required by fuzzing\n (as an auxiliary) and crash reproduction.\n\n Args:\n target_path: Path to the main target in a string.\n\n Returns:\n Path to the auxiliary binary as a pathlib.Path.\n \"\"\"\n # Assuming they will be in child dirs named by fuzzer_utils.EXTRA_BUILD_DIR.\n build_dir = environment.get_value('BUILD_DIR')\n auxiliary_target_name = pathlib.Path(target_path).name\n auxiliary_target_path = pathlib.Path(\n build_dir, fuzzer_utils.EXTRA_BUILD_DIR, auxiliary_target_name)\n return auxiliary_target_path\n\n def fuzz(self, target_path, options, reproducers_dir, max_time): # pylint: disable=unused-argument\n \"\"\"Runs a fuzz session.\n\n Args:\n target_path: Path to the target.\n options: The FuzzOptions object returned by prepare().\n reproducers_dir: The directory to put reproducers in when crashes\n are found.\n max_time: Maximum allowed time for the fuzzing to run.\n\n Returns:\n A FuzzResult object.\n \"\"\"\n runner = _get_runner(target_path)\n _set_sanitizer_options(target_path)\n timeout = max_time + _CLEAN_EXIT_SECS\n fuzz_result = runner.run_and_wait(\n additional_args=options.arguments, timeout=timeout)\n fuzz_result.output = Engine.trim_logs(fuzz_result.output)\n\n reproducer_path = _get_reproducer_path(fuzz_result.output, reproducers_dir)\n crashes = []\n if reproducer_path:\n crashes.append(\n engine.Crash(\n str(reproducer_path), fuzz_result.output, [],\n int(fuzz_result.time_executed)))\n\n # Stats report is not available in Centipede yet.\n stats = None\n return engine.FuzzResult(fuzz_result.output, fuzz_result.command, crashes,\n stats, fuzz_result.time_executed)\n\n @staticmethod\n def trim_logs(fuzz_log):\n \"\"\" Strips the 'CRASH LOG:' prefix that breaks stacktrace parsing.\n\n Args:\n fuzz_result: The ProcessResult returned by running fuzzer binary.\n \"\"\"\n trimmed_log_lines = [\n line[len(_CRASH_LOG_PREFIX):]\n if line.startswith(_CRASH_LOG_PREFIX) else line\n for line in fuzz_log.splitlines()\n ]\n return '\\n'.join(trimmed_log_lines)\n\n def reproduce(self, target_path, input_path, arguments, max_time): # pylint: disable=unused-argument\n \"\"\"Reproduces a crash given an input.\n\n Args:\n target_path: Path to the target.\n input_path: Path to the reproducer input.\n arguments: Additional arguments needed for reproduction.\n max_time: Maximum allowed time for the reproduction.\n\n Returns:\n A ReproduceResult.\n \"\"\"\n _set_sanitizer_options(target_path)\n target_binaries = self._get_binary_paths(target_path)\n sanitized_target = str(target_binaries.sanitized)\n\n existing_runner_flags = os.environ.get('CENTIPEDE_RUNNER_FLAGS')\n if not existing_runner_flags:\n os.environ['CENTIPEDE_RUNNER_FLAGS'] = (\n f':rss_limit_mb={_RSS_LIMIT}'\n f':timeout_per_input={_TIMEOUT_PER_INPUT_REPR}:')\n\n runner = new_process.UnicodeProcessRunner(sanitized_target, [input_path])\n result = runner.run_and_wait(timeout=max_time)\n\n if existing_runner_flags:\n os.environ['CENTIPEDE_RUNNER_FLAGS'] = existing_runner_flags\n else:\n os.unsetenv('CENTIPEDE_RUNNER_FLAGS')\n result.output = Engine.trim_logs(result.output)\n\n return engine.ReproduceResult(result.command, result.return_code,\n result.time_executed, result.output)\n\n def _create_temp_dir(self, name):\n \"\"\"Creates temporary directory for fuzzing.\"\"\"\n new_directory = pathlib.Path(fuzzer_utils.get_temp_dir(), name)\n engine_common.recreate_directory(new_directory)\n return new_directory\n\n def minimize_corpus(self, target_path, arguments, input_dirs, output_dir,\n reproducers_dir, max_time):\n \"\"\"Runs corpus minimization.\n Args:\n target_path: Path to the target.\n arguments: Additional arguments needed for corpus minimization.\n input_dirs: Input corpora.\n output_dir: Output directory to place minimized corpus.\n reproducers_dir: The directory to put reproducers in when crashes are\n found.\n max_time: Maximum allowed time for the minimization.\n\n Returns:\n A FuzzResult object.\n \"\"\"\n raise NotImplementedError\n\n def minimize_testcase(self, target_path, arguments, input_path, output_path,\n max_time):\n \"\"\"Minimizes a testcase.\n Args:\n target_path: Path to the target.\n arguments: Additional arguments needed for testcase minimization.\n input_path: Path to the reproducer input.\n output_path: Path to the minimized output.\n max_time: Maximum allowed time for the minimization.\n Returns:\n A ReproduceResult.\n Raises:\n TimeoutError: If the testcase minimization exceeds max_time.\n \"\"\"\n raise NotImplementedError\n\n def cleanse(self, target_path, arguments, input_path, output_path, max_time):\n \"\"\"Cleanses a testcase.\n Args:\n target_path: Path to the target.\n arguments: Additional arguments needed for testcase cleanse.\n input_path: Path to the reproducer input.\n output_path: Path to the cleansed output.\n max_time: Maximum allowed time for the cleanse.\n Returns:\n A ReproduceResult.\n Raises:\n TimeoutError: If the cleanse exceeds max_time.\n \"\"\"\n raise NotImplementedError\n", "path": "src/clusterfuzz/_internal/bot/fuzzers/centipede/engine.py"}]}
| 3,667 | 338 |
gh_patches_debug_2522
|
rasdani/github-patches
|
git_diff
|
googleapis__python-bigquery-189
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Packaging: prep for 1.0.0 release of `google-resumable-media-python`.
See: https://github.com/googleapis/google-resumable-media-python/issues/138
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 # Copyright 2018 Google LLC
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 import io
16 import os
17
18 import setuptools
19
20
21 # Package metadata.
22
23 name = "google-cloud-bigquery"
24 description = "Google BigQuery API client library"
25 version = "1.26.0"
26 # Should be one of:
27 # 'Development Status :: 3 - Alpha'
28 # 'Development Status :: 4 - Beta'
29 # 'Development Status :: 5 - Production/Stable'
30 release_status = "Development Status :: 5 - Production/Stable"
31 dependencies = [
32 'enum34; python_version < "3.4"',
33 "google-api-core >= 1.21.0, < 2.0dev",
34 "google-cloud-core >= 1.1.0, < 2.0dev",
35 "google-resumable-media >= 0.5.0, < 0.6dev",
36 "six >=1.13.0,< 2.0.0dev",
37 ]
38 extras = {
39 "bqstorage": [
40 "google-cloud-bigquery-storage >= 1.0.0, <2.0.0dev",
41 # Due to an issue in pip's dependency resolver, the `grpc` extra is not
42 # installed, even though `google-cloud-bigquery-storage` specifies it
43 # as `google-api-core[grpc]`. We thus need to explicitly specify it here.
44 # See: https://github.com/googleapis/python-bigquery/issues/83
45 "grpcio >= 1.8.2, < 2.0dev",
46 "pyarrow>=0.16.0, < 2.0dev",
47 ],
48 "pandas": ["pandas>=0.17.1"],
49 # Exclude PyArrow dependency from Windows Python 2.7.
50 'pyarrow: platform_system != "Windows" or python_version >= "3.4"': [
51 # Bad Linux release for 0.14.0.
52 # https://issues.apache.org/jira/browse/ARROW-5868
53 "pyarrow>=0.4.1, != 0.14.0"
54 ],
55 "tqdm": ["tqdm >= 4.0.0, <5.0.0dev"],
56 "fastparquet": [
57 "fastparquet",
58 "python-snappy",
59 # llvmlite >= 0.32.0 cannot be installed on Python 3.5 and below
60 # (building the wheel fails), thus needs to be restricted.
61 # See: https://github.com/googleapis/python-bigquery/issues/78
62 "llvmlite <= 0.31.0",
63 ],
64 }
65
66 all_extras = []
67
68 for extra in extras:
69 if extra == "fastparquet":
70 # Skip fastparquet from "all" because it is redundant with pyarrow and
71 # creates a dependency on pre-release versions of numpy. See:
72 # https://github.com/googleapis/google-cloud-python/issues/8549
73 continue
74 all_extras.extend(extras[extra])
75
76 extras["all"] = all_extras
77
78 # Setup boilerplate below this line.
79
80 package_root = os.path.abspath(os.path.dirname(__file__))
81
82 readme_filename = os.path.join(package_root, "README.rst")
83 with io.open(readme_filename, encoding="utf-8") as readme_file:
84 readme = readme_file.read()
85
86 # Only include packages under the 'google' namespace. Do not include tests,
87 # benchmarks, etc.
88 packages = [
89 package for package in setuptools.find_packages() if package.startswith("google")
90 ]
91
92 # Determine which namespaces are needed.
93 namespaces = ["google"]
94 if "google.cloud" in packages:
95 namespaces.append("google.cloud")
96
97
98 setuptools.setup(
99 name=name,
100 version=version,
101 description=description,
102 long_description=readme,
103 author="Google LLC",
104 author_email="[email protected]",
105 license="Apache 2.0",
106 url="https://github.com/googleapis/python-bigquery",
107 classifiers=[
108 release_status,
109 "Intended Audience :: Developers",
110 "License :: OSI Approved :: Apache Software License",
111 "Programming Language :: Python",
112 "Programming Language :: Python :: 2",
113 "Programming Language :: Python :: 2.7",
114 "Programming Language :: Python :: 3",
115 "Programming Language :: Python :: 3.5",
116 "Programming Language :: Python :: 3.6",
117 "Programming Language :: Python :: 3.7",
118 "Programming Language :: Python :: 3.8",
119 "Operating System :: OS Independent",
120 "Topic :: Internet",
121 ],
122 platforms="Posix; MacOS X; Windows",
123 packages=packages,
124 namespace_packages=namespaces,
125 install_requires=dependencies,
126 extras_require=extras,
127 python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*",
128 include_package_data=True,
129 zip_safe=False,
130 )
131
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -32,7 +32,7 @@
'enum34; python_version < "3.4"',
"google-api-core >= 1.21.0, < 2.0dev",
"google-cloud-core >= 1.1.0, < 2.0dev",
- "google-resumable-media >= 0.5.0, < 0.6dev",
+ "google-resumable-media >= 0.5.0, < 2.0dev",
"six >=1.13.0,< 2.0.0dev",
]
extras = {
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -32,7 +32,7 @@\n 'enum34; python_version < \"3.4\"',\n \"google-api-core >= 1.21.0, < 2.0dev\",\n \"google-cloud-core >= 1.1.0, < 2.0dev\",\n- \"google-resumable-media >= 0.5.0, < 0.6dev\",\n+ \"google-resumable-media >= 0.5.0, < 2.0dev\",\n \"six >=1.13.0,< 2.0.0dev\",\n ]\n extras = {\n", "issue": "Packaging: prep for 1.0.0 release of `google-resumable-media-python`.\nSee: https://github.com/googleapis/google-resumable-media-python/issues/138\n", "before_files": [{"content": "# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport io\nimport os\n\nimport setuptools\n\n\n# Package metadata.\n\nname = \"google-cloud-bigquery\"\ndescription = \"Google BigQuery API client library\"\nversion = \"1.26.0\"\n# Should be one of:\n# 'Development Status :: 3 - Alpha'\n# 'Development Status :: 4 - Beta'\n# 'Development Status :: 5 - Production/Stable'\nrelease_status = \"Development Status :: 5 - Production/Stable\"\ndependencies = [\n 'enum34; python_version < \"3.4\"',\n \"google-api-core >= 1.21.0, < 2.0dev\",\n \"google-cloud-core >= 1.1.0, < 2.0dev\",\n \"google-resumable-media >= 0.5.0, < 0.6dev\",\n \"six >=1.13.0,< 2.0.0dev\",\n]\nextras = {\n \"bqstorage\": [\n \"google-cloud-bigquery-storage >= 1.0.0, <2.0.0dev\",\n # Due to an issue in pip's dependency resolver, the `grpc` extra is not\n # installed, even though `google-cloud-bigquery-storage` specifies it\n # as `google-api-core[grpc]`. We thus need to explicitly specify it here.\n # See: https://github.com/googleapis/python-bigquery/issues/83\n \"grpcio >= 1.8.2, < 2.0dev\",\n \"pyarrow>=0.16.0, < 2.0dev\",\n ],\n \"pandas\": [\"pandas>=0.17.1\"],\n # Exclude PyArrow dependency from Windows Python 2.7.\n 'pyarrow: platform_system != \"Windows\" or python_version >= \"3.4\"': [\n # Bad Linux release for 0.14.0.\n # https://issues.apache.org/jira/browse/ARROW-5868\n \"pyarrow>=0.4.1, != 0.14.0\"\n ],\n \"tqdm\": [\"tqdm >= 4.0.0, <5.0.0dev\"],\n \"fastparquet\": [\n \"fastparquet\",\n \"python-snappy\",\n # llvmlite >= 0.32.0 cannot be installed on Python 3.5 and below\n # (building the wheel fails), thus needs to be restricted.\n # See: https://github.com/googleapis/python-bigquery/issues/78\n \"llvmlite <= 0.31.0\",\n ],\n}\n\nall_extras = []\n\nfor extra in extras:\n if extra == \"fastparquet\":\n # Skip fastparquet from \"all\" because it is redundant with pyarrow and\n # creates a dependency on pre-release versions of numpy. See:\n # https://github.com/googleapis/google-cloud-python/issues/8549\n continue\n all_extras.extend(extras[extra])\n\nextras[\"all\"] = all_extras\n\n# Setup boilerplate below this line.\n\npackage_root = os.path.abspath(os.path.dirname(__file__))\n\nreadme_filename = os.path.join(package_root, \"README.rst\")\nwith io.open(readme_filename, encoding=\"utf-8\") as readme_file:\n readme = readme_file.read()\n\n# Only include packages under the 'google' namespace. Do not include tests,\n# benchmarks, etc.\npackages = [\n package for package in setuptools.find_packages() if package.startswith(\"google\")\n]\n\n# Determine which namespaces are needed.\nnamespaces = [\"google\"]\nif \"google.cloud\" in packages:\n namespaces.append(\"google.cloud\")\n\n\nsetuptools.setup(\n name=name,\n version=version,\n description=description,\n long_description=readme,\n author=\"Google LLC\",\n author_email=\"[email protected]\",\n license=\"Apache 2.0\",\n url=\"https://github.com/googleapis/python-bigquery\",\n classifiers=[\n release_status,\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Operating System :: OS Independent\",\n \"Topic :: Internet\",\n ],\n platforms=\"Posix; MacOS X; Windows\",\n packages=packages,\n namespace_packages=namespaces,\n install_requires=dependencies,\n extras_require=extras,\n python_requires=\">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*\",\n include_package_data=True,\n zip_safe=False,\n)\n", "path": "setup.py"}], "after_files": [{"content": "# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport io\nimport os\n\nimport setuptools\n\n\n# Package metadata.\n\nname = \"google-cloud-bigquery\"\ndescription = \"Google BigQuery API client library\"\nversion = \"1.26.0\"\n# Should be one of:\n# 'Development Status :: 3 - Alpha'\n# 'Development Status :: 4 - Beta'\n# 'Development Status :: 5 - Production/Stable'\nrelease_status = \"Development Status :: 5 - Production/Stable\"\ndependencies = [\n 'enum34; python_version < \"3.4\"',\n \"google-api-core >= 1.21.0, < 2.0dev\",\n \"google-cloud-core >= 1.1.0, < 2.0dev\",\n \"google-resumable-media >= 0.5.0, < 2.0dev\",\n \"six >=1.13.0,< 2.0.0dev\",\n]\nextras = {\n \"bqstorage\": [\n \"google-cloud-bigquery-storage >= 1.0.0, <2.0.0dev\",\n # Due to an issue in pip's dependency resolver, the `grpc` extra is not\n # installed, even though `google-cloud-bigquery-storage` specifies it\n # as `google-api-core[grpc]`. We thus need to explicitly specify it here.\n # See: https://github.com/googleapis/python-bigquery/issues/83\n \"grpcio >= 1.8.2, < 2.0dev\",\n \"pyarrow>=0.16.0, < 2.0dev\",\n ],\n \"pandas\": [\"pandas>=0.17.1\"],\n # Exclude PyArrow dependency from Windows Python 2.7.\n 'pyarrow: platform_system != \"Windows\" or python_version >= \"3.4\"': [\n # Bad Linux release for 0.14.0.\n # https://issues.apache.org/jira/browse/ARROW-5868\n \"pyarrow>=0.4.1, != 0.14.0\"\n ],\n \"tqdm\": [\"tqdm >= 4.0.0, <5.0.0dev\"],\n \"fastparquet\": [\n \"fastparquet\",\n \"python-snappy\",\n # llvmlite >= 0.32.0 cannot be installed on Python 3.5 and below\n # (building the wheel fails), thus needs to be restricted.\n # See: https://github.com/googleapis/python-bigquery/issues/78\n \"llvmlite <= 0.31.0\",\n ],\n}\n\nall_extras = []\n\nfor extra in extras:\n if extra == \"fastparquet\":\n # Skip fastparquet from \"all\" because it is redundant with pyarrow and\n # creates a dependency on pre-release versions of numpy. See:\n # https://github.com/googleapis/google-cloud-python/issues/8549\n continue\n all_extras.extend(extras[extra])\n\nextras[\"all\"] = all_extras\n\n# Setup boilerplate below this line.\n\npackage_root = os.path.abspath(os.path.dirname(__file__))\n\nreadme_filename = os.path.join(package_root, \"README.rst\")\nwith io.open(readme_filename, encoding=\"utf-8\") as readme_file:\n readme = readme_file.read()\n\n# Only include packages under the 'google' namespace. Do not include tests,\n# benchmarks, etc.\npackages = [\n package for package in setuptools.find_packages() if package.startswith(\"google\")\n]\n\n# Determine which namespaces are needed.\nnamespaces = [\"google\"]\nif \"google.cloud\" in packages:\n namespaces.append(\"google.cloud\")\n\n\nsetuptools.setup(\n name=name,\n version=version,\n description=description,\n long_description=readme,\n author=\"Google LLC\",\n author_email=\"[email protected]\",\n license=\"Apache 2.0\",\n url=\"https://github.com/googleapis/python-bigquery\",\n classifiers=[\n release_status,\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Operating System :: OS Independent\",\n \"Topic :: Internet\",\n ],\n platforms=\"Posix; MacOS X; Windows\",\n packages=packages,\n namespace_packages=namespaces,\n install_requires=dependencies,\n extras_require=extras,\n python_requires=\">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*\",\n include_package_data=True,\n zip_safe=False,\n)\n", "path": "setup.py"}]}
| 1,780 | 156 |
gh_patches_debug_2916
|
rasdani/github-patches
|
git_diff
|
getsentry__sentry-python-1554
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Redis integration tests have side effects
### How do you use Sentry?
Self-hosted/on-premise
### Version
1.9.2
### Steps to Reproduce
While working on https://github.com/getsentry/sentry-python/pull/1543, I noticed the following:
1. Checked out `sentry-sdk` for development.
2. Installed redis:
```
fakeredis==1.9.0
redis==3.5.3
redis-py-cluster==2.1.3
````
3. Run redis integration tests twice, in different order:
```bash
# first rediscluster, then redis
pytest 'tests/integrations/rediscluster/test_rediscluster.py::test_rediscluster_basic[RedisCluster]' tests/integrations/redis/test_redis.py::test_basic
# first redis, then rediscluster
pytest tests/integrations/redis/test_redis.py::test_basic 'tests/integrations/rediscluster/test_rediscluster.py::test_rediscluster_basic[RedisCluster]'
### Expected Result
Both test runs pass.
### Actual Result
The second test run
```bash
pytest tests/integrations/redis/test_redis.py::test_basic 'tests/integrations/rediscluster/test_rediscluster.py::test_rediscluster_basic[RedisCluster]'
```
fails with
```pytest
tests/integrations/redis/test_redis.py . [ 50%]
tests/integrations/rediscluster/test_rediscluster.py F [100%]
============================================================================================================================================ FAILURES =============================================================================================================================================
______________________________________________________________________________________________________________________________ test_rediscluster_basic[RedisCluster] ______________________________________________________________________________________________________________________________
tests/integrations/rediscluster/test_rediscluster.py:29: in test_rediscluster_basic
(crumb,) = event["breadcrumbs"]["values"]
E ValueError: not enough values to unpack (expected 1, got 0)
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sentry_sdk/integrations/redis.py`
Content:
```
1 from __future__ import absolute_import
2
3 from sentry_sdk import Hub
4 from sentry_sdk.utils import capture_internal_exceptions, logger
5 from sentry_sdk.integrations import Integration, DidNotEnable
6
7 from sentry_sdk._types import MYPY
8
9 if MYPY:
10 from typing import Any, Sequence
11
12 _SINGLE_KEY_COMMANDS = frozenset(
13 ["decr", "decrby", "get", "incr", "incrby", "pttl", "set", "setex", "setnx", "ttl"]
14 )
15 _MULTI_KEY_COMMANDS = frozenset(["del", "touch", "unlink"])
16
17 #: Trim argument lists to this many values
18 _MAX_NUM_ARGS = 10
19
20
21 def patch_redis_pipeline(pipeline_cls, is_cluster, get_command_args_fn):
22 # type: (Any, bool, Any) -> None
23 old_execute = pipeline_cls.execute
24
25 def sentry_patched_execute(self, *args, **kwargs):
26 # type: (Any, *Any, **Any) -> Any
27 hub = Hub.current
28
29 if hub.get_integration(RedisIntegration) is None:
30 return old_execute(self, *args, **kwargs)
31
32 with hub.start_span(op="redis", description="redis.pipeline.execute") as span:
33 with capture_internal_exceptions():
34 span.set_tag("redis.is_cluster", is_cluster)
35 transaction = self.transaction if not is_cluster else False
36 span.set_tag("redis.transaction", transaction)
37
38 commands = []
39 for i, arg in enumerate(self.command_stack):
40 if i > _MAX_NUM_ARGS:
41 break
42 command_args = []
43 for j, command_arg in enumerate(get_command_args_fn(arg)):
44 if j > 0:
45 command_arg = repr(command_arg)
46 command_args.append(command_arg)
47 commands.append(" ".join(command_args))
48
49 span.set_data(
50 "redis.commands",
51 {"count": len(self.command_stack), "first_ten": commands},
52 )
53
54 return old_execute(self, *args, **kwargs)
55
56 pipeline_cls.execute = sentry_patched_execute
57
58
59 def _get_redis_command_args(command):
60 # type: (Any) -> Sequence[Any]
61 return command[0]
62
63
64 def _parse_rediscluster_command(command):
65 # type: (Any) -> Sequence[Any]
66 return command.args
67
68
69 def _patch_rediscluster():
70 # type: () -> None
71 try:
72 import rediscluster # type: ignore
73 except ImportError:
74 return
75
76 patch_redis_client(rediscluster.RedisCluster, is_cluster=True)
77
78 # up to v1.3.6, __version__ attribute is a tuple
79 # from v2.0.0, __version__ is a string and VERSION a tuple
80 version = getattr(rediscluster, "VERSION", rediscluster.__version__)
81
82 # StrictRedisCluster was introduced in v0.2.0 and removed in v2.0.0
83 # https://github.com/Grokzen/redis-py-cluster/blob/master/docs/release-notes.rst
84 if (0, 2, 0) < version < (2, 0, 0):
85 pipeline_cls = rediscluster.StrictClusterPipeline
86 patch_redis_client(rediscluster.StrictRedisCluster, is_cluster=True)
87 else:
88 pipeline_cls = rediscluster.ClusterPipeline
89
90 patch_redis_pipeline(pipeline_cls, True, _parse_rediscluster_command)
91
92
93 class RedisIntegration(Integration):
94 identifier = "redis"
95
96 @staticmethod
97 def setup_once():
98 # type: () -> None
99 try:
100 import redis
101 except ImportError:
102 raise DidNotEnable("Redis client not installed")
103
104 patch_redis_client(redis.StrictRedis, is_cluster=False)
105 patch_redis_pipeline(redis.client.Pipeline, False, _get_redis_command_args)
106 try:
107 strict_pipeline = redis.client.StrictPipeline # type: ignore
108 except AttributeError:
109 pass
110 else:
111 patch_redis_pipeline(strict_pipeline, False, _get_redis_command_args)
112
113 try:
114 import rb.clients # type: ignore
115 except ImportError:
116 pass
117 else:
118 patch_redis_client(rb.clients.FanoutClient, is_cluster=False)
119 patch_redis_client(rb.clients.MappingClient, is_cluster=False)
120 patch_redis_client(rb.clients.RoutingClient, is_cluster=False)
121
122 try:
123 _patch_rediscluster()
124 except Exception:
125 logger.exception("Error occurred while patching `rediscluster` library")
126
127
128 def patch_redis_client(cls, is_cluster):
129 # type: (Any, bool) -> None
130 """
131 This function can be used to instrument custom redis client classes or
132 subclasses.
133 """
134
135 old_execute_command = cls.execute_command
136
137 def sentry_patched_execute_command(self, name, *args, **kwargs):
138 # type: (Any, str, *Any, **Any) -> Any
139 hub = Hub.current
140
141 if hub.get_integration(RedisIntegration) is None:
142 return old_execute_command(self, name, *args, **kwargs)
143
144 description = name
145
146 with capture_internal_exceptions():
147 description_parts = [name]
148 for i, arg in enumerate(args):
149 if i > _MAX_NUM_ARGS:
150 break
151
152 description_parts.append(repr(arg))
153
154 description = " ".join(description_parts)
155
156 with hub.start_span(op="redis", description=description) as span:
157 span.set_tag("redis.is_cluster", is_cluster)
158 if name:
159 span.set_tag("redis.command", name)
160
161 if name and args:
162 name_low = name.lower()
163 if (name_low in _SINGLE_KEY_COMMANDS) or (
164 name_low in _MULTI_KEY_COMMANDS and len(args) == 1
165 ):
166 span.set_tag("redis.key", args[0])
167
168 return old_execute_command(self, name, *args, **kwargs)
169
170 cls.execute_command = sentry_patched_execute_command
171
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/sentry_sdk/integrations/redis.py b/sentry_sdk/integrations/redis.py
--- a/sentry_sdk/integrations/redis.py
+++ b/sentry_sdk/integrations/redis.py
@@ -131,7 +131,6 @@
This function can be used to instrument custom redis client classes or
subclasses.
"""
-
old_execute_command = cls.execute_command
def sentry_patched_execute_command(self, name, *args, **kwargs):
|
{"golden_diff": "diff --git a/sentry_sdk/integrations/redis.py b/sentry_sdk/integrations/redis.py\n--- a/sentry_sdk/integrations/redis.py\n+++ b/sentry_sdk/integrations/redis.py\n@@ -131,7 +131,6 @@\n This function can be used to instrument custom redis client classes or\n subclasses.\n \"\"\"\n-\n old_execute_command = cls.execute_command\n \n def sentry_patched_execute_command(self, name, *args, **kwargs):\n", "issue": "Redis integration tests have side effects\n### How do you use Sentry?\n\nSelf-hosted/on-premise\n\n### Version\n\n1.9.2\n\n### Steps to Reproduce\n\nWhile working on https://github.com/getsentry/sentry-python/pull/1543, I noticed the following:\r\n\r\n1. Checked out `sentry-sdk` for development.\r\n2. Installed redis:\r\n ```\r\n fakeredis==1.9.0\r\n redis==3.5.3\r\n redis-py-cluster==2.1.3\r\n ````\r\n3. Run redis integration tests twice, in different order:\r\n ```bash\r\n # first rediscluster, then redis\r\n pytest 'tests/integrations/rediscluster/test_rediscluster.py::test_rediscluster_basic[RedisCluster]' tests/integrations/redis/test_redis.py::test_basic\r\n # first redis, then rediscluster\r\n pytest tests/integrations/redis/test_redis.py::test_basic 'tests/integrations/rediscluster/test_rediscluster.py::test_rediscluster_basic[RedisCluster]'\n\n### Expected Result\n\nBoth test runs pass.\n\n### Actual Result\n\nThe second test run\r\n\r\n```bash\r\npytest tests/integrations/redis/test_redis.py::test_basic 'tests/integrations/rediscluster/test_rediscluster.py::test_rediscluster_basic[RedisCluster]' \r\n```\r\n\r\nfails with \r\n\r\n```pytest\r\ntests/integrations/redis/test_redis.py . [ 50%]\r\ntests/integrations/rediscluster/test_rediscluster.py F [100%]\r\n\r\n============================================================================================================================================ FAILURES =============================================================================================================================================\r\n______________________________________________________________________________________________________________________________ test_rediscluster_basic[RedisCluster] ______________________________________________________________________________________________________________________________\r\ntests/integrations/rediscluster/test_rediscluster.py:29: in test_rediscluster_basic\r\n (crumb,) = event[\"breadcrumbs\"][\"values\"]\r\nE ValueError: not enough values to unpack (expected 1, got 0)\r\n```\n", "before_files": [{"content": "from __future__ import absolute_import\n\nfrom sentry_sdk import Hub\nfrom sentry_sdk.utils import capture_internal_exceptions, logger\nfrom sentry_sdk.integrations import Integration, DidNotEnable\n\nfrom sentry_sdk._types import MYPY\n\nif MYPY:\n from typing import Any, Sequence\n\n_SINGLE_KEY_COMMANDS = frozenset(\n [\"decr\", \"decrby\", \"get\", \"incr\", \"incrby\", \"pttl\", \"set\", \"setex\", \"setnx\", \"ttl\"]\n)\n_MULTI_KEY_COMMANDS = frozenset([\"del\", \"touch\", \"unlink\"])\n\n#: Trim argument lists to this many values\n_MAX_NUM_ARGS = 10\n\n\ndef patch_redis_pipeline(pipeline_cls, is_cluster, get_command_args_fn):\n # type: (Any, bool, Any) -> None\n old_execute = pipeline_cls.execute\n\n def sentry_patched_execute(self, *args, **kwargs):\n # type: (Any, *Any, **Any) -> Any\n hub = Hub.current\n\n if hub.get_integration(RedisIntegration) is None:\n return old_execute(self, *args, **kwargs)\n\n with hub.start_span(op=\"redis\", description=\"redis.pipeline.execute\") as span:\n with capture_internal_exceptions():\n span.set_tag(\"redis.is_cluster\", is_cluster)\n transaction = self.transaction if not is_cluster else False\n span.set_tag(\"redis.transaction\", transaction)\n\n commands = []\n for i, arg in enumerate(self.command_stack):\n if i > _MAX_NUM_ARGS:\n break\n command_args = []\n for j, command_arg in enumerate(get_command_args_fn(arg)):\n if j > 0:\n command_arg = repr(command_arg)\n command_args.append(command_arg)\n commands.append(\" \".join(command_args))\n\n span.set_data(\n \"redis.commands\",\n {\"count\": len(self.command_stack), \"first_ten\": commands},\n )\n\n return old_execute(self, *args, **kwargs)\n\n pipeline_cls.execute = sentry_patched_execute\n\n\ndef _get_redis_command_args(command):\n # type: (Any) -> Sequence[Any]\n return command[0]\n\n\ndef _parse_rediscluster_command(command):\n # type: (Any) -> Sequence[Any]\n return command.args\n\n\ndef _patch_rediscluster():\n # type: () -> None\n try:\n import rediscluster # type: ignore\n except ImportError:\n return\n\n patch_redis_client(rediscluster.RedisCluster, is_cluster=True)\n\n # up to v1.3.6, __version__ attribute is a tuple\n # from v2.0.0, __version__ is a string and VERSION a tuple\n version = getattr(rediscluster, \"VERSION\", rediscluster.__version__)\n\n # StrictRedisCluster was introduced in v0.2.0 and removed in v2.0.0\n # https://github.com/Grokzen/redis-py-cluster/blob/master/docs/release-notes.rst\n if (0, 2, 0) < version < (2, 0, 0):\n pipeline_cls = rediscluster.StrictClusterPipeline\n patch_redis_client(rediscluster.StrictRedisCluster, is_cluster=True)\n else:\n pipeline_cls = rediscluster.ClusterPipeline\n\n patch_redis_pipeline(pipeline_cls, True, _parse_rediscluster_command)\n\n\nclass RedisIntegration(Integration):\n identifier = \"redis\"\n\n @staticmethod\n def setup_once():\n # type: () -> None\n try:\n import redis\n except ImportError:\n raise DidNotEnable(\"Redis client not installed\")\n\n patch_redis_client(redis.StrictRedis, is_cluster=False)\n patch_redis_pipeline(redis.client.Pipeline, False, _get_redis_command_args)\n try:\n strict_pipeline = redis.client.StrictPipeline # type: ignore\n except AttributeError:\n pass\n else:\n patch_redis_pipeline(strict_pipeline, False, _get_redis_command_args)\n\n try:\n import rb.clients # type: ignore\n except ImportError:\n pass\n else:\n patch_redis_client(rb.clients.FanoutClient, is_cluster=False)\n patch_redis_client(rb.clients.MappingClient, is_cluster=False)\n patch_redis_client(rb.clients.RoutingClient, is_cluster=False)\n\n try:\n _patch_rediscluster()\n except Exception:\n logger.exception(\"Error occurred while patching `rediscluster` library\")\n\n\ndef patch_redis_client(cls, is_cluster):\n # type: (Any, bool) -> None\n \"\"\"\n This function can be used to instrument custom redis client classes or\n subclasses.\n \"\"\"\n\n old_execute_command = cls.execute_command\n\n def sentry_patched_execute_command(self, name, *args, **kwargs):\n # type: (Any, str, *Any, **Any) -> Any\n hub = Hub.current\n\n if hub.get_integration(RedisIntegration) is None:\n return old_execute_command(self, name, *args, **kwargs)\n\n description = name\n\n with capture_internal_exceptions():\n description_parts = [name]\n for i, arg in enumerate(args):\n if i > _MAX_NUM_ARGS:\n break\n\n description_parts.append(repr(arg))\n\n description = \" \".join(description_parts)\n\n with hub.start_span(op=\"redis\", description=description) as span:\n span.set_tag(\"redis.is_cluster\", is_cluster)\n if name:\n span.set_tag(\"redis.command\", name)\n\n if name and args:\n name_low = name.lower()\n if (name_low in _SINGLE_KEY_COMMANDS) or (\n name_low in _MULTI_KEY_COMMANDS and len(args) == 1\n ):\n span.set_tag(\"redis.key\", args[0])\n\n return old_execute_command(self, name, *args, **kwargs)\n\n cls.execute_command = sentry_patched_execute_command\n", "path": "sentry_sdk/integrations/redis.py"}], "after_files": [{"content": "from __future__ import absolute_import\n\nfrom sentry_sdk import Hub\nfrom sentry_sdk.utils import capture_internal_exceptions, logger\nfrom sentry_sdk.integrations import Integration, DidNotEnable\n\nfrom sentry_sdk._types import MYPY\n\nif MYPY:\n from typing import Any, Sequence\n\n_SINGLE_KEY_COMMANDS = frozenset(\n [\"decr\", \"decrby\", \"get\", \"incr\", \"incrby\", \"pttl\", \"set\", \"setex\", \"setnx\", \"ttl\"]\n)\n_MULTI_KEY_COMMANDS = frozenset([\"del\", \"touch\", \"unlink\"])\n\n#: Trim argument lists to this many values\n_MAX_NUM_ARGS = 10\n\n\ndef patch_redis_pipeline(pipeline_cls, is_cluster, get_command_args_fn):\n # type: (Any, bool, Any) -> None\n old_execute = pipeline_cls.execute\n\n def sentry_patched_execute(self, *args, **kwargs):\n # type: (Any, *Any, **Any) -> Any\n hub = Hub.current\n\n if hub.get_integration(RedisIntegration) is None:\n return old_execute(self, *args, **kwargs)\n\n with hub.start_span(op=\"redis\", description=\"redis.pipeline.execute\") as span:\n with capture_internal_exceptions():\n span.set_tag(\"redis.is_cluster\", is_cluster)\n transaction = self.transaction if not is_cluster else False\n span.set_tag(\"redis.transaction\", transaction)\n\n commands = []\n for i, arg in enumerate(self.command_stack):\n if i > _MAX_NUM_ARGS:\n break\n command_args = []\n for j, command_arg in enumerate(get_command_args_fn(arg)):\n if j > 0:\n command_arg = repr(command_arg)\n command_args.append(command_arg)\n commands.append(\" \".join(command_args))\n\n span.set_data(\n \"redis.commands\",\n {\"count\": len(self.command_stack), \"first_ten\": commands},\n )\n\n return old_execute(self, *args, **kwargs)\n\n pipeline_cls.execute = sentry_patched_execute\n\n\ndef _get_redis_command_args(command):\n # type: (Any) -> Sequence[Any]\n return command[0]\n\n\ndef _parse_rediscluster_command(command):\n # type: (Any) -> Sequence[Any]\n return command.args\n\n\ndef _patch_rediscluster():\n # type: () -> None\n try:\n import rediscluster # type: ignore\n except ImportError:\n return\n\n patch_redis_client(rediscluster.RedisCluster, is_cluster=True)\n\n # up to v1.3.6, __version__ attribute is a tuple\n # from v2.0.0, __version__ is a string and VERSION a tuple\n version = getattr(rediscluster, \"VERSION\", rediscluster.__version__)\n\n # StrictRedisCluster was introduced in v0.2.0 and removed in v2.0.0\n # https://github.com/Grokzen/redis-py-cluster/blob/master/docs/release-notes.rst\n if (0, 2, 0) < version < (2, 0, 0):\n pipeline_cls = rediscluster.StrictClusterPipeline\n patch_redis_client(rediscluster.StrictRedisCluster, is_cluster=True)\n else:\n pipeline_cls = rediscluster.ClusterPipeline\n\n patch_redis_pipeline(pipeline_cls, True, _parse_rediscluster_command)\n\n\nclass RedisIntegration(Integration):\n identifier = \"redis\"\n\n @staticmethod\n def setup_once():\n # type: () -> None\n try:\n import redis\n except ImportError:\n raise DidNotEnable(\"Redis client not installed\")\n\n patch_redis_client(redis.StrictRedis, is_cluster=False)\n patch_redis_pipeline(redis.client.Pipeline, False, _get_redis_command_args)\n try:\n strict_pipeline = redis.client.StrictPipeline # type: ignore\n except AttributeError:\n pass\n else:\n patch_redis_pipeline(strict_pipeline, False, _get_redis_command_args)\n\n try:\n import rb.clients # type: ignore\n except ImportError:\n pass\n else:\n patch_redis_client(rb.clients.FanoutClient, is_cluster=False)\n patch_redis_client(rb.clients.MappingClient, is_cluster=False)\n patch_redis_client(rb.clients.RoutingClient, is_cluster=False)\n\n try:\n _patch_rediscluster()\n except Exception:\n logger.exception(\"Error occurred while patching `rediscluster` library\")\n\n\ndef patch_redis_client(cls, is_cluster):\n # type: (Any, bool) -> None\n \"\"\"\n This function can be used to instrument custom redis client classes or\n subclasses.\n \"\"\"\n old_execute_command = cls.execute_command\n\n def sentry_patched_execute_command(self, name, *args, **kwargs):\n # type: (Any, str, *Any, **Any) -> Any\n hub = Hub.current\n\n if hub.get_integration(RedisIntegration) is None:\n return old_execute_command(self, name, *args, **kwargs)\n\n description = name\n\n with capture_internal_exceptions():\n description_parts = [name]\n for i, arg in enumerate(args):\n if i > _MAX_NUM_ARGS:\n break\n\n description_parts.append(repr(arg))\n\n description = \" \".join(description_parts)\n\n with hub.start_span(op=\"redis\", description=description) as span:\n span.set_tag(\"redis.is_cluster\", is_cluster)\n if name:\n span.set_tag(\"redis.command\", name)\n\n if name and args:\n name_low = name.lower()\n if (name_low in _SINGLE_KEY_COMMANDS) or (\n name_low in _MULTI_KEY_COMMANDS and len(args) == 1\n ):\n span.set_tag(\"redis.key\", args[0])\n\n return old_execute_command(self, name, *args, **kwargs)\n\n cls.execute_command = sentry_patched_execute_command\n", "path": "sentry_sdk/integrations/redis.py"}]}
| 2,347 | 109 |
gh_patches_debug_6181
|
rasdani/github-patches
|
git_diff
|
scrapy__scrapy-2816
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
DNSCACHE_ENABLED=False not working
Originally reported by @softwarevamp on [StackOverflow](https://stackoverflow.com/questions/44877296/scrapy-with-dnscache-enabled-false-not-working):
> When i run scrapy shell with `DNSCACHE_ENABLED=False` got
```
KeyError: 'dictionary is empty'
twisted.internet.error.DNSLookupError: DNS lookup failed: no results for hostname lookup: www.mydomain.com.
```
```
2017-07-03 03:09:12 [twisted] CRITICAL: while looking up www.mydomain.com with <scrapy.resolver.CachingThreadedResolver object at 0x3fd0050>
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/twisted/internet/defer.py", line 653, in _runCallbacks
current.result = callback(current.result, *args, **kw)
File "/usr/lib64/python2.7/site-packages/scrapy/resolver.py", line 29, in _cache_result
dnscache[name] = result
File "/usr/lib64/python2.7/site-packages/scrapy/utils/datatypes.py", line 305, in __setitem__
self.popitem(last=False)
File "/usr/lib64/python2.7/collections.py", line 159, in popitem
raise KeyError('dictionary is empty')
KeyError: 'dictionary is empty'
2017-07-03 03:09:12 [scrapy.downloadermiddlewares.retry] DEBUG: Gave up retrying <GET //www.mydomain.com/> (failed 3 times): DNS lookup failed: no results for hostname lookup: www.mydomain.com.
Traceback (most recent call last):
File "/usr/bin/scrapy", line 11, in <module>
sys.exit(execute())
File "/usr/lib64/python2.7/site-packages/scrapy/cmdline.py", line 149, in execute
_run_print_help(parser, _run_command, cmd, args, opts)
File "/usr/lib64/python2.7/site-packages/scrapy/cmdline.py", line 89, in _run_print_help
func(*a, **kw)
File "/usr/lib64/python2.7/site-packages/scrapy/cmdline.py", line 156, in _run_command
cmd.run(args, opts)
File "/usr/lib64/python2.7/site-packages/scrapy/commands/shell.py", line 73, in run
shell.start(url=url, redirect=not opts.no_redirect)
File "/usr/lib64/python2.7/site-packages/scrapy/shell.py", line 48, in start
self.fetch(url, spider, redirect=redirect)
File "/usr/lib64/python2.7/site-packages/scrapy/shell.py", line 115, in fetch
reactor, self._schedule, request, spider)
File "/usr/lib64/python2.7/site-packages/twisted/internet/threads.py", line 122, in blockingCallFromThread
result.raiseException()
File "<string>", line 2, in raiseException
twisted.internet.error.DNSLookupError: DNS lookup failed: no results for hostname lookup: www.mydomain.com.
```
> Any thoughts welcome
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `scrapy/resolver.py`
Content:
```
1 from twisted.internet import defer
2 from twisted.internet.base import ThreadedResolver
3
4 from scrapy.utils.datatypes import LocalCache
5
6 # TODO: cache misses
7
8 dnscache = LocalCache(10000)
9
10 class CachingThreadedResolver(ThreadedResolver):
11 def __init__(self, reactor, cache_size, timeout):
12 super(CachingThreadedResolver, self).__init__(reactor)
13 dnscache.limit = cache_size
14 self.timeout = timeout
15
16 def getHostByName(self, name, timeout=None):
17 if name in dnscache:
18 return defer.succeed(dnscache[name])
19 # in Twisted<=16.6, getHostByName() is always called with
20 # a default timeout of 60s (actually passed as (1, 3, 11, 45) tuple),
21 # so the input argument above is simply overridden
22 # to enforce Scrapy's DNS_TIMEOUT setting's value
23 timeout = (self.timeout,)
24 d = super(CachingThreadedResolver, self).getHostByName(name, timeout)
25 d.addCallback(self._cache_result, name)
26 return d
27
28 def _cache_result(self, result, name):
29 dnscache[name] = result
30 return result
31
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/scrapy/resolver.py b/scrapy/resolver.py
--- a/scrapy/resolver.py
+++ b/scrapy/resolver.py
@@ -22,7 +22,8 @@
# to enforce Scrapy's DNS_TIMEOUT setting's value
timeout = (self.timeout,)
d = super(CachingThreadedResolver, self).getHostByName(name, timeout)
- d.addCallback(self._cache_result, name)
+ if dnscache.limit:
+ d.addCallback(self._cache_result, name)
return d
def _cache_result(self, result, name):
|
{"golden_diff": "diff --git a/scrapy/resolver.py b/scrapy/resolver.py\n--- a/scrapy/resolver.py\n+++ b/scrapy/resolver.py\n@@ -22,7 +22,8 @@\n # to enforce Scrapy's DNS_TIMEOUT setting's value\n timeout = (self.timeout,)\n d = super(CachingThreadedResolver, self).getHostByName(name, timeout)\n- d.addCallback(self._cache_result, name)\n+ if dnscache.limit:\n+ d.addCallback(self._cache_result, name)\n return d\n \n def _cache_result(self, result, name):\n", "issue": "DNSCACHE_ENABLED=False not working\nOriginally reported by @softwarevamp on [StackOverflow](https://stackoverflow.com/questions/44877296/scrapy-with-dnscache-enabled-false-not-working):\r\n\r\n> When i run scrapy shell with `DNSCACHE_ENABLED=False` got\r\n```\r\nKeyError: 'dictionary is empty'\r\ntwisted.internet.error.DNSLookupError: DNS lookup failed: no results for hostname lookup: www.mydomain.com.\r\n```\r\n\r\n```\r\n 2017-07-03 03:09:12 [twisted] CRITICAL: while looking up www.mydomain.com with <scrapy.resolver.CachingThreadedResolver object at 0x3fd0050>\r\n Traceback (most recent call last):\r\n File \"/usr/lib64/python2.7/site-packages/twisted/internet/defer.py\", line 653, in _runCallbacks\r\n current.result = callback(current.result, *args, **kw)\r\n File \"/usr/lib64/python2.7/site-packages/scrapy/resolver.py\", line 29, in _cache_result\r\n dnscache[name] = result\r\n File \"/usr/lib64/python2.7/site-packages/scrapy/utils/datatypes.py\", line 305, in __setitem__\r\n self.popitem(last=False)\r\n File \"/usr/lib64/python2.7/collections.py\", line 159, in popitem\r\n raise KeyError('dictionary is empty')\r\n KeyError: 'dictionary is empty'\r\n 2017-07-03 03:09:12 [scrapy.downloadermiddlewares.retry] DEBUG: Gave up retrying <GET //www.mydomain.com/> (failed 3 times): DNS lookup failed: no results for hostname lookup: www.mydomain.com.\r\n Traceback (most recent call last):\r\n File \"/usr/bin/scrapy\", line 11, in <module>\r\n sys.exit(execute())\r\n File \"/usr/lib64/python2.7/site-packages/scrapy/cmdline.py\", line 149, in execute\r\n _run_print_help(parser, _run_command, cmd, args, opts)\r\n File \"/usr/lib64/python2.7/site-packages/scrapy/cmdline.py\", line 89, in _run_print_help\r\n func(*a, **kw)\r\n File \"/usr/lib64/python2.7/site-packages/scrapy/cmdline.py\", line 156, in _run_command\r\n cmd.run(args, opts)\r\n File \"/usr/lib64/python2.7/site-packages/scrapy/commands/shell.py\", line 73, in run\r\n shell.start(url=url, redirect=not opts.no_redirect)\r\n File \"/usr/lib64/python2.7/site-packages/scrapy/shell.py\", line 48, in start\r\n self.fetch(url, spider, redirect=redirect)\r\n File \"/usr/lib64/python2.7/site-packages/scrapy/shell.py\", line 115, in fetch\r\n reactor, self._schedule, request, spider)\r\n File \"/usr/lib64/python2.7/site-packages/twisted/internet/threads.py\", line 122, in blockingCallFromThread\r\n result.raiseException()\r\n File \"<string>\", line 2, in raiseException\r\n twisted.internet.error.DNSLookupError: DNS lookup failed: no results for hostname lookup: www.mydomain.com.\r\n```\r\n\r\n> Any thoughts welcome\n", "before_files": [{"content": "from twisted.internet import defer\nfrom twisted.internet.base import ThreadedResolver\n\nfrom scrapy.utils.datatypes import LocalCache\n\n# TODO: cache misses\n\ndnscache = LocalCache(10000)\n\nclass CachingThreadedResolver(ThreadedResolver):\n def __init__(self, reactor, cache_size, timeout):\n super(CachingThreadedResolver, self).__init__(reactor)\n dnscache.limit = cache_size\n self.timeout = timeout\n\n def getHostByName(self, name, timeout=None):\n if name in dnscache:\n return defer.succeed(dnscache[name])\n # in Twisted<=16.6, getHostByName() is always called with\n # a default timeout of 60s (actually passed as (1, 3, 11, 45) tuple),\n # so the input argument above is simply overridden\n # to enforce Scrapy's DNS_TIMEOUT setting's value\n timeout = (self.timeout,)\n d = super(CachingThreadedResolver, self).getHostByName(name, timeout)\n d.addCallback(self._cache_result, name)\n return d\n\n def _cache_result(self, result, name):\n dnscache[name] = result\n return result\n", "path": "scrapy/resolver.py"}], "after_files": [{"content": "from twisted.internet import defer\nfrom twisted.internet.base import ThreadedResolver\n\nfrom scrapy.utils.datatypes import LocalCache\n\n# TODO: cache misses\n\ndnscache = LocalCache(10000)\n\nclass CachingThreadedResolver(ThreadedResolver):\n def __init__(self, reactor, cache_size, timeout):\n super(CachingThreadedResolver, self).__init__(reactor)\n dnscache.limit = cache_size\n self.timeout = timeout\n\n def getHostByName(self, name, timeout=None):\n if name in dnscache:\n return defer.succeed(dnscache[name])\n # in Twisted<=16.6, getHostByName() is always called with\n # a default timeout of 60s (actually passed as (1, 3, 11, 45) tuple),\n # so the input argument above is simply overridden\n # to enforce Scrapy's DNS_TIMEOUT setting's value\n timeout = (self.timeout,)\n d = super(CachingThreadedResolver, self).getHostByName(name, timeout)\n if dnscache.limit:\n d.addCallback(self._cache_result, name)\n return d\n\n def _cache_result(self, result, name):\n dnscache[name] = result\n return result\n", "path": "scrapy/resolver.py"}]}
| 1,334 | 130 |
gh_patches_debug_24499
|
rasdani/github-patches
|
git_diff
|
pre-commit__pre-commit-797
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
python_venv language fails to use python3 interpreter and is using python2.7 instead
Apparently pre-commit failed to use python3 interpreter when I tried to add a hook and thus failed because venv module was not installed on default python2.7!
```
$ pre-commit try-repo ../python-license-check [19:55:27]
[INFO] Initializing environment for ../python-license-check.
===============================================================================
Using config:
===============================================================================
repos:
- repo: ../python-license-check
rev: 4048cf3844dbbf45690c153a7da7f532585ec87c
hooks:
- id: liccheck
===============================================================================
[INFO] Installing environment for ../python-license-check.
[INFO] Once installed this environment will be reused.
[INFO] This may take a few minutes...
An unexpected error has occurred: CalledProcessError: Command: ('/Users/ssbarnea/.pyenv/versions/2.7.14/bin/python2.7', '-mvenv', '/var/folders/br/99tfdvcs3vvfwdk69z7f0xmc0000gn/T/tmpayl0P5/repoHa7_qe/py_venv-python2.7')
Return code: 1
Expected return code: 0
Output: (none)
Errors:
/Users/ssbarnea/.pyenv/versions/2.7.14/bin/python2.7: No module named venv
Check the log at /Users/ssbarnea/.cache/pre-commit/pre-commit.log
FAIL: 1
ssbarnea@smac: ~/os/jira master ⚡ $ cat ../python-license-check/.pre-commit-hooks.yaml [19:55:34]
- id: liccheck
name: Validates dependency licenses for Python packages
description: This validator validates a pre-commit hooks manifest file
entry: liccheck -s setup.cfg -r requirements.txt
language: python_venv
```
Based on the documentation I was expecting to see pre-commit using the `python3` executable for calling venv module.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pre_commit/languages/python_venv.py`
Content:
```
1 from __future__ import unicode_literals
2
3 import os.path
4
5 from pre_commit.languages import python
6 from pre_commit.util import CalledProcessError
7 from pre_commit.util import cmd_output
8
9
10 ENVIRONMENT_DIR = 'py_venv'
11
12
13 def orig_py_exe(exe): # pragma: no cover (platform specific)
14 """A -mvenv virtualenv made from a -mvirtualenv virtualenv installs
15 packages to the incorrect location. Attempt to find the _original_ exe
16 and invoke `-mvenv` from there.
17
18 See:
19 - https://github.com/pre-commit/pre-commit/issues/755
20 - https://github.com/pypa/virtualenv/issues/1095
21 - https://bugs.python.org/issue30811
22 """
23 try:
24 prefix_script = 'import sys; print(sys.real_prefix)'
25 _, prefix, _ = cmd_output(exe, '-c', prefix_script)
26 prefix = prefix.strip()
27 except CalledProcessError:
28 # not created from -mvirtualenv
29 return exe
30
31 if os.name == 'nt':
32 expected = os.path.join(prefix, 'python.exe')
33 else:
34 expected = os.path.join(prefix, 'bin', os.path.basename(exe))
35
36 if os.path.exists(expected):
37 return expected
38 else:
39 return exe
40
41
42 def make_venv(envdir, python):
43 cmd_output(orig_py_exe(python), '-mvenv', envdir, cwd='/')
44
45
46 get_default_version = python.get_default_version
47 _interface = python.py_interface(ENVIRONMENT_DIR, make_venv)
48 in_env, healthy, run_hook, install_environment = _interface
49
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pre_commit/languages/python_venv.py b/pre_commit/languages/python_venv.py
--- a/pre_commit/languages/python_venv.py
+++ b/pre_commit/languages/python_venv.py
@@ -1,6 +1,7 @@
from __future__ import unicode_literals
import os.path
+import sys
from pre_commit.languages import python
from pre_commit.util import CalledProcessError
@@ -10,6 +11,13 @@
ENVIRONMENT_DIR = 'py_venv'
+def get_default_version(): # pragma: no cover (version specific)
+ if sys.version_info < (3,):
+ return 'python3'
+ else:
+ return python.get_default_version()
+
+
def orig_py_exe(exe): # pragma: no cover (platform specific)
"""A -mvenv virtualenv made from a -mvirtualenv virtualenv installs
packages to the incorrect location. Attempt to find the _original_ exe
@@ -43,6 +51,5 @@
cmd_output(orig_py_exe(python), '-mvenv', envdir, cwd='/')
-get_default_version = python.get_default_version
_interface = python.py_interface(ENVIRONMENT_DIR, make_venv)
in_env, healthy, run_hook, install_environment = _interface
|
{"golden_diff": "diff --git a/pre_commit/languages/python_venv.py b/pre_commit/languages/python_venv.py\n--- a/pre_commit/languages/python_venv.py\n+++ b/pre_commit/languages/python_venv.py\n@@ -1,6 +1,7 @@\n from __future__ import unicode_literals\n \n import os.path\n+import sys\n \n from pre_commit.languages import python\n from pre_commit.util import CalledProcessError\n@@ -10,6 +11,13 @@\n ENVIRONMENT_DIR = 'py_venv'\n \n \n+def get_default_version(): # pragma: no cover (version specific)\n+ if sys.version_info < (3,):\n+ return 'python3'\n+ else:\n+ return python.get_default_version()\n+\n+\n def orig_py_exe(exe): # pragma: no cover (platform specific)\n \"\"\"A -mvenv virtualenv made from a -mvirtualenv virtualenv installs\n packages to the incorrect location. Attempt to find the _original_ exe\n@@ -43,6 +51,5 @@\n cmd_output(orig_py_exe(python), '-mvenv', envdir, cwd='/')\n \n \n-get_default_version = python.get_default_version\n _interface = python.py_interface(ENVIRONMENT_DIR, make_venv)\n in_env, healthy, run_hook, install_environment = _interface\n", "issue": "python_venv language fails to use python3 interpreter and is using python2.7 instead\nApparently pre-commit failed to use python3 interpreter when I tried to add a hook and thus failed because venv module was not installed on default python2.7!\r\n\r\n```\r\n$ pre-commit try-repo ../python-license-check [19:55:27]\r\n[INFO] Initializing environment for ../python-license-check.\r\n===============================================================================\r\nUsing config:\r\n===============================================================================\r\nrepos:\r\n- repo: ../python-license-check\r\n rev: 4048cf3844dbbf45690c153a7da7f532585ec87c\r\n hooks:\r\n - id: liccheck\r\n===============================================================================\r\n[INFO] Installing environment for ../python-license-check.\r\n[INFO] Once installed this environment will be reused.\r\n[INFO] This may take a few minutes...\r\nAn unexpected error has occurred: CalledProcessError: Command: ('/Users/ssbarnea/.pyenv/versions/2.7.14/bin/python2.7', '-mvenv', '/var/folders/br/99tfdvcs3vvfwdk69z7f0xmc0000gn/T/tmpayl0P5/repoHa7_qe/py_venv-python2.7')\r\nReturn code: 1\r\nExpected return code: 0\r\nOutput: (none)\r\nErrors:\r\n /Users/ssbarnea/.pyenv/versions/2.7.14/bin/python2.7: No module named venv\r\n\r\n\r\nCheck the log at /Users/ssbarnea/.cache/pre-commit/pre-commit.log\r\nFAIL: 1\r\nssbarnea@smac: ~/os/jira master \u26a1 $ cat ../python-license-check/.pre-commit-hooks.yaml [19:55:34]\r\n- id: liccheck\r\n name: Validates dependency licenses for Python packages\r\n description: This validator validates a pre-commit hooks manifest file\r\n entry: liccheck -s setup.cfg -r requirements.txt\r\n language: python_venv\r\n```\r\n\r\nBased on the documentation I was expecting to see pre-commit using the `python3` executable for calling venv module. \n", "before_files": [{"content": "from __future__ import unicode_literals\n\nimport os.path\n\nfrom pre_commit.languages import python\nfrom pre_commit.util import CalledProcessError\nfrom pre_commit.util import cmd_output\n\n\nENVIRONMENT_DIR = 'py_venv'\n\n\ndef orig_py_exe(exe): # pragma: no cover (platform specific)\n \"\"\"A -mvenv virtualenv made from a -mvirtualenv virtualenv installs\n packages to the incorrect location. Attempt to find the _original_ exe\n and invoke `-mvenv` from there.\n\n See:\n - https://github.com/pre-commit/pre-commit/issues/755\n - https://github.com/pypa/virtualenv/issues/1095\n - https://bugs.python.org/issue30811\n \"\"\"\n try:\n prefix_script = 'import sys; print(sys.real_prefix)'\n _, prefix, _ = cmd_output(exe, '-c', prefix_script)\n prefix = prefix.strip()\n except CalledProcessError:\n # not created from -mvirtualenv\n return exe\n\n if os.name == 'nt':\n expected = os.path.join(prefix, 'python.exe')\n else:\n expected = os.path.join(prefix, 'bin', os.path.basename(exe))\n\n if os.path.exists(expected):\n return expected\n else:\n return exe\n\n\ndef make_venv(envdir, python):\n cmd_output(orig_py_exe(python), '-mvenv', envdir, cwd='/')\n\n\nget_default_version = python.get_default_version\n_interface = python.py_interface(ENVIRONMENT_DIR, make_venv)\nin_env, healthy, run_hook, install_environment = _interface\n", "path": "pre_commit/languages/python_venv.py"}], "after_files": [{"content": "from __future__ import unicode_literals\n\nimport os.path\nimport sys\n\nfrom pre_commit.languages import python\nfrom pre_commit.util import CalledProcessError\nfrom pre_commit.util import cmd_output\n\n\nENVIRONMENT_DIR = 'py_venv'\n\n\ndef get_default_version(): # pragma: no cover (version specific)\n if sys.version_info < (3,):\n return 'python3'\n else:\n return python.get_default_version()\n\n\ndef orig_py_exe(exe): # pragma: no cover (platform specific)\n \"\"\"A -mvenv virtualenv made from a -mvirtualenv virtualenv installs\n packages to the incorrect location. Attempt to find the _original_ exe\n and invoke `-mvenv` from there.\n\n See:\n - https://github.com/pre-commit/pre-commit/issues/755\n - https://github.com/pypa/virtualenv/issues/1095\n - https://bugs.python.org/issue30811\n \"\"\"\n try:\n prefix_script = 'import sys; print(sys.real_prefix)'\n _, prefix, _ = cmd_output(exe, '-c', prefix_script)\n prefix = prefix.strip()\n except CalledProcessError:\n # not created from -mvirtualenv\n return exe\n\n if os.name == 'nt':\n expected = os.path.join(prefix, 'python.exe')\n else:\n expected = os.path.join(prefix, 'bin', os.path.basename(exe))\n\n if os.path.exists(expected):\n return expected\n else:\n return exe\n\n\ndef make_venv(envdir, python):\n cmd_output(orig_py_exe(python), '-mvenv', envdir, cwd='/')\n\n\n_interface = python.py_interface(ENVIRONMENT_DIR, make_venv)\nin_env, healthy, run_hook, install_environment = _interface\n", "path": "pre_commit/languages/python_venv.py"}]}
| 1,195 | 288 |
gh_patches_debug_56429
|
rasdani/github-patches
|
git_diff
|
pwr-Solaar__Solaar-1085
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Either documentation or behavior with a second Unifying Receiver - How to specify?
Arch Linux
solaar-git 1.0.5rc2.r0.g8d01e28-1
2021 Feb 20
Suppose you have, for instance, a second Unifying Receiver that you want to pair with a particular device, perhaps before moving the second receiver and the paired device to another machine.
To specify and pair that particular device with a specific Unifying Receiver, when there is more than one receiver, and consulting the embedded documentation, compare:
```
$ solaar pair --help
usage: solaar pair [-h] [receiver]
positional arguments:
receiver select a certain receiver when more than one is present
optional arguments:
-h, --help show this help message and exit
The Logitech Unifying Receiver supports up to 6 paired devices at the same time.1 Feb 20
```
versus:
```
$ solaar --help
usage: solaar [-h] [-d] [-D PATH] [--restart-on-wake-up] [-w {show,hide,only}] [-b {regular,symbolic,solaar}] [-V] [--help-actions] ...
positional arguments:
{show,probe,config,pair,unpair}
optional actions to perform
optional arguments:
-h, --help show this help message and exit
-d, --debug print logging messages, for debugging purposes (may be repeated for extra verbosity)
-D PATH, --hidraw PATH
unifying receiver to use; the first detected receiver if unspecified. Example: /dev/hidraw2
--restart-on-wake-up restart Solaar on sleep wake-up (experimental)
-w {show,hide,only}, --window {show,hide,only}
start with window showing / hidden / only (no tray icon)
-b {regular,symbolic,solaar}, --battery-icons {regular,symbolic,solaar}
prefer regular battery / symbolic battery / solaar icons
-V, --version show program's version number and exit
--help-actions print help for the optional actions
```
Note that, currently, the first approach fails - assuming that the receiver is selected using its Serial number, found with `solaar show` - giving:
```
$ solaar pair xxxxxxxx
solaar: error: Traceback (most recent call last):
File "/usr/lib/python3.9/site-packages/solaar/cli/__init__.py", line 202, in run
m.run(c, args, _find_receiver, _find_device)
File "/usr/lib/python3.9/site-packages/solaar/cli/pair.py", line 35, in run
receiver = find_receiver(receiver_name)
TypeError: _find_receiver() missing 1 required positional argument: 'name'
```
Well, no, the "1 required positional argument" was not "missing", it was just not recognized. Using "Device path" as the name, instead of "Serial", produces the same failed result.
Instead, the second approach does work:
```
$ solaar -D /dev/hidraw0 pair
Pairing: turn your new device on (timing out in 20 seconds).
Paired device 1: Wireless Mouse M525 (M525) [4013:xxxxxxxx]
```
So, what is the preferred behavior? If the device path switch approach is preferred, then the `solaar pair --help` usage message should be revised. If, instead, the original `solaar pair [-h] [receiver]` approach is preferred, then that would suggest that the device path switch approach should be reverted. I expect that there was a good reason to implement the device path switch approach, which maybe implies that the original `solaar pair [-h] [receiver]` approach is now deprecated.
It just seems that `/usr/lib/python3.9/site-packages/solaar/cli/pair.py` is still expecting `find_receiver(receiver_name)`, so, a little confusing. I haven't tried to grok the code, but I do notice there is still:
```
/usr/lib/python3.9/site-packages/solaar/cli/__init__.py
def _find_receiver(receivers, name):
assert receivers
assert name
for r in receivers:
if name in r.name.lower() or (r.serial is not None and name == r.serial.lower()):
return r
```
Maybe someone can take a look at all this and see what is going on?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lib/solaar/cli/pair.py`
Content:
```
1 # -*- python-mode -*-
2 # -*- coding: UTF-8 -*-
3
4 ## Copyright (C) 2012-2013 Daniel Pavel
5 ##
6 ## This program is free software; you can redistribute it and/or modify
7 ## it under the terms of the GNU General Public License as published by
8 ## the Free Software Foundation; either version 2 of the License, or
9 ## (at your option) any later version.
10 ##
11 ## This program is distributed in the hope that it will be useful,
12 ## but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ## GNU General Public License for more details.
15 ##
16 ## You should have received a copy of the GNU General Public License along
17 ## with this program; if not, write to the Free Software Foundation, Inc.,
18 ## 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
19
20 from __future__ import absolute_import, division, print_function, unicode_literals
21
22 from time import time as _timestamp
23
24 from logitech_receiver import base as _base
25 from logitech_receiver import hidpp10 as _hidpp10
26 from logitech_receiver import notifications as _notifications
27 from logitech_receiver import status as _status
28
29
30 def run(receivers, args, find_receiver, _ignore):
31 assert receivers
32
33 if args.receiver:
34 receiver_name = args.receiver.lower()
35 receiver = find_receiver(receiver_name)
36 if not receiver:
37 raise Exception("no receiver found matching '%s'" % receiver_name)
38 else:
39 receiver = receivers[0]
40
41 assert receiver
42 receiver.status = _status.ReceiverStatus(receiver, lambda *args, **kwargs: None)
43
44 # check if it's necessary to set the notification flags
45 old_notification_flags = _hidpp10.get_notification_flags(receiver) or 0
46 if not (old_notification_flags & _hidpp10.NOTIFICATION_FLAG.wireless):
47 _hidpp10.set_notification_flags(receiver, old_notification_flags | _hidpp10.NOTIFICATION_FLAG.wireless)
48
49 # get all current devices
50 known_devices = [dev.number for dev in receiver]
51
52 class _HandleWithNotificationHook(int):
53 def notifications_hook(self, n):
54 nonlocal known_devices
55 assert n
56 if n.devnumber == 0xFF:
57 _notifications.process(receiver, n)
58 elif n.sub_id == 0x41 and len(n.data) == _base._SHORT_MESSAGE_SIZE - 4:
59 kd, known_devices = known_devices, None # only process one connection notification
60 if kd is not None:
61 if n.devnumber not in kd:
62 receiver.status.new_device = receiver.register_new_device(n.devnumber, n)
63 elif receiver.re_pairs:
64 del receiver[n.devnumber] # get rid of information on device re-paired away
65 receiver.status.new_device = receiver.register_new_device(n.devnumber, n)
66
67 timeout = 20 # seconds
68 receiver.handle = _HandleWithNotificationHook(receiver.handle)
69
70 receiver.set_lock(False, timeout=timeout)
71 print('Pairing: turn your new device on (timing out in', timeout, 'seconds).')
72
73 # the lock-open notification may come slightly later, wait for it a bit
74 pairing_start = _timestamp()
75 patience = 5 # seconds
76
77 while receiver.status.lock_open or _timestamp() - pairing_start < patience:
78 n = _base.read(receiver.handle)
79 if n:
80 n = _base.make_notification(*n)
81 if n:
82 receiver.handle.notifications_hook(n)
83
84 if not (old_notification_flags & _hidpp10.NOTIFICATION_FLAG.wireless):
85 # only clear the flags if they weren't set before, otherwise a
86 # concurrently running Solaar app might stop working properly
87 _hidpp10.set_notification_flags(receiver, old_notification_flags)
88
89 if receiver.status.new_device:
90 dev = receiver.status.new_device
91 print('Paired device %d: %s (%s) [%s:%s]' % (dev.number, dev.name, dev.codename, dev.wpid, dev.serial))
92 else:
93 error = receiver.status.get(_status.KEYS.ERROR)
94 if error:
95 raise Exception('pairing failed: %s' % error)
96 else:
97 print('Paired a device') # this is better than an error
98
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/lib/solaar/cli/pair.py b/lib/solaar/cli/pair.py
--- a/lib/solaar/cli/pair.py
+++ b/lib/solaar/cli/pair.py
@@ -32,7 +32,7 @@
if args.receiver:
receiver_name = args.receiver.lower()
- receiver = find_receiver(receiver_name)
+ receiver = find_receiver(receivers, receiver_name)
if not receiver:
raise Exception("no receiver found matching '%s'" % receiver_name)
else:
|
{"golden_diff": "diff --git a/lib/solaar/cli/pair.py b/lib/solaar/cli/pair.py\n--- a/lib/solaar/cli/pair.py\n+++ b/lib/solaar/cli/pair.py\n@@ -32,7 +32,7 @@\n \n if args.receiver:\n receiver_name = args.receiver.lower()\n- receiver = find_receiver(receiver_name)\n+ receiver = find_receiver(receivers, receiver_name)\n if not receiver:\n raise Exception(\"no receiver found matching '%s'\" % receiver_name)\n else:\n", "issue": "Either documentation or behavior with a second Unifying Receiver - How to specify?\nArch Linux\r\nsolaar-git 1.0.5rc2.r0.g8d01e28-1\r\n2021 Feb 20\r\n\r\nSuppose you have, for instance, a second Unifying Receiver that you want to pair with a particular device, perhaps before moving the second receiver and the paired device to another machine.\r\n\r\nTo specify and pair that particular device with a specific Unifying Receiver, when there is more than one receiver, and consulting the embedded documentation, compare:\r\n```\r\n$ solaar pair --help\r\nusage: solaar pair [-h] [receiver]\r\n\r\npositional arguments:\r\n receiver select a certain receiver when more than one is present\r\n\r\noptional arguments:\r\n -h, --help show this help message and exit\r\n\r\nThe Logitech Unifying Receiver supports up to 6 paired devices at the same time.1 Feb 20\r\n```\r\nversus:\r\n```\r\n$ solaar --help\r\nusage: solaar [-h] [-d] [-D PATH] [--restart-on-wake-up] [-w {show,hide,only}] [-b {regular,symbolic,solaar}] [-V] [--help-actions] ...\r\n\r\npositional arguments:\r\n {show,probe,config,pair,unpair}\r\n optional actions to perform\r\n\r\noptional arguments:\r\n -h, --help show this help message and exit\r\n -d, --debug print logging messages, for debugging purposes (may be repeated for extra verbosity)\r\n -D PATH, --hidraw PATH\r\n unifying receiver to use; the first detected receiver if unspecified. Example: /dev/hidraw2\r\n --restart-on-wake-up restart Solaar on sleep wake-up (experimental)\r\n -w {show,hide,only}, --window {show,hide,only}\r\n start with window showing / hidden / only (no tray icon)\r\n -b {regular,symbolic,solaar}, --battery-icons {regular,symbolic,solaar}\r\n prefer regular battery / symbolic battery / solaar icons\r\n -V, --version show program's version number and exit\r\n --help-actions print help for the optional actions\r\n```\r\n\r\nNote that, currently, the first approach fails - assuming that the receiver is selected using its Serial number, found with `solaar show` - giving:\r\n```\r\n$ solaar pair xxxxxxxx\r\nsolaar: error: Traceback (most recent call last):\r\n File \"/usr/lib/python3.9/site-packages/solaar/cli/__init__.py\", line 202, in run\r\n m.run(c, args, _find_receiver, _find_device)\r\n File \"/usr/lib/python3.9/site-packages/solaar/cli/pair.py\", line 35, in run\r\n receiver = find_receiver(receiver_name)\r\nTypeError: _find_receiver() missing 1 required positional argument: 'name'\r\n```\r\nWell, no, the \"1 required positional argument\" was not \"missing\", it was just not recognized. Using \"Device path\" as the name, instead of \"Serial\", produces the same failed result.\r\n\r\nInstead, the second approach does work:\r\n```\r\n$ solaar -D /dev/hidraw0 pair\r\nPairing: turn your new device on (timing out in 20 seconds).\r\nPaired device 1: Wireless Mouse M525 (M525) [4013:xxxxxxxx]\r\n```\r\n\r\nSo, what is the preferred behavior? If the device path switch approach is preferred, then the `solaar pair --help` usage message should be revised. If, instead, the original `solaar pair [-h] [receiver]` approach is preferred, then that would suggest that the device path switch approach should be reverted. I expect that there was a good reason to implement the device path switch approach, which maybe implies that the original `solaar pair [-h] [receiver]` approach is now deprecated.\r\n\r\nIt just seems that `/usr/lib/python3.9/site-packages/solaar/cli/pair.py` is still expecting `find_receiver(receiver_name)`, so, a little confusing. I haven't tried to grok the code, but I do notice there is still:\r\n```\r\n/usr/lib/python3.9/site-packages/solaar/cli/__init__.py\r\ndef _find_receiver(receivers, name):\r\n assert receivers\r\n assert name\r\n\r\n for r in receivers:\r\n if name in r.name.lower() or (r.serial is not None and name == r.serial.lower()):\r\n return r\r\n```\r\n\r\nMaybe someone can take a look at all this and see what is going on?\n", "before_files": [{"content": "# -*- python-mode -*-\n# -*- coding: UTF-8 -*-\n\n## Copyright (C) 2012-2013 Daniel Pavel\n##\n## This program is free software; you can redistribute it and/or modify\n## it under the terms of the GNU General Public License as published by\n## the Free Software Foundation; either version 2 of the License, or\n## (at your option) any later version.\n##\n## This program is distributed in the hope that it will be useful,\n## but WITHOUT ANY WARRANTY; without even the implied warranty of\n## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n## GNU General Public License for more details.\n##\n## You should have received a copy of the GNU General Public License along\n## with this program; if not, write to the Free Software Foundation, Inc.,\n## 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.\n\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nfrom time import time as _timestamp\n\nfrom logitech_receiver import base as _base\nfrom logitech_receiver import hidpp10 as _hidpp10\nfrom logitech_receiver import notifications as _notifications\nfrom logitech_receiver import status as _status\n\n\ndef run(receivers, args, find_receiver, _ignore):\n assert receivers\n\n if args.receiver:\n receiver_name = args.receiver.lower()\n receiver = find_receiver(receiver_name)\n if not receiver:\n raise Exception(\"no receiver found matching '%s'\" % receiver_name)\n else:\n receiver = receivers[0]\n\n assert receiver\n receiver.status = _status.ReceiverStatus(receiver, lambda *args, **kwargs: None)\n\n # check if it's necessary to set the notification flags\n old_notification_flags = _hidpp10.get_notification_flags(receiver) or 0\n if not (old_notification_flags & _hidpp10.NOTIFICATION_FLAG.wireless):\n _hidpp10.set_notification_flags(receiver, old_notification_flags | _hidpp10.NOTIFICATION_FLAG.wireless)\n\n # get all current devices\n known_devices = [dev.number for dev in receiver]\n\n class _HandleWithNotificationHook(int):\n def notifications_hook(self, n):\n nonlocal known_devices\n assert n\n if n.devnumber == 0xFF:\n _notifications.process(receiver, n)\n elif n.sub_id == 0x41 and len(n.data) == _base._SHORT_MESSAGE_SIZE - 4:\n kd, known_devices = known_devices, None # only process one connection notification\n if kd is not None:\n if n.devnumber not in kd:\n receiver.status.new_device = receiver.register_new_device(n.devnumber, n)\n elif receiver.re_pairs:\n del receiver[n.devnumber] # get rid of information on device re-paired away\n receiver.status.new_device = receiver.register_new_device(n.devnumber, n)\n\n timeout = 20 # seconds\n receiver.handle = _HandleWithNotificationHook(receiver.handle)\n\n receiver.set_lock(False, timeout=timeout)\n print('Pairing: turn your new device on (timing out in', timeout, 'seconds).')\n\n # the lock-open notification may come slightly later, wait for it a bit\n pairing_start = _timestamp()\n patience = 5 # seconds\n\n while receiver.status.lock_open or _timestamp() - pairing_start < patience:\n n = _base.read(receiver.handle)\n if n:\n n = _base.make_notification(*n)\n if n:\n receiver.handle.notifications_hook(n)\n\n if not (old_notification_flags & _hidpp10.NOTIFICATION_FLAG.wireless):\n # only clear the flags if they weren't set before, otherwise a\n # concurrently running Solaar app might stop working properly\n _hidpp10.set_notification_flags(receiver, old_notification_flags)\n\n if receiver.status.new_device:\n dev = receiver.status.new_device\n print('Paired device %d: %s (%s) [%s:%s]' % (dev.number, dev.name, dev.codename, dev.wpid, dev.serial))\n else:\n error = receiver.status.get(_status.KEYS.ERROR)\n if error:\n raise Exception('pairing failed: %s' % error)\n else:\n print('Paired a device') # this is better than an error\n", "path": "lib/solaar/cli/pair.py"}], "after_files": [{"content": "# -*- python-mode -*-\n# -*- coding: UTF-8 -*-\n\n## Copyright (C) 2012-2013 Daniel Pavel\n##\n## This program is free software; you can redistribute it and/or modify\n## it under the terms of the GNU General Public License as published by\n## the Free Software Foundation; either version 2 of the License, or\n## (at your option) any later version.\n##\n## This program is distributed in the hope that it will be useful,\n## but WITHOUT ANY WARRANTY; without even the implied warranty of\n## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n## GNU General Public License for more details.\n##\n## You should have received a copy of the GNU General Public License along\n## with this program; if not, write to the Free Software Foundation, Inc.,\n## 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.\n\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nfrom time import time as _timestamp\n\nfrom logitech_receiver import base as _base\nfrom logitech_receiver import hidpp10 as _hidpp10\nfrom logitech_receiver import notifications as _notifications\nfrom logitech_receiver import status as _status\n\n\ndef run(receivers, args, find_receiver, _ignore):\n assert receivers\n\n if args.receiver:\n receiver_name = args.receiver.lower()\n receiver = find_receiver(receivers, receiver_name)\n if not receiver:\n raise Exception(\"no receiver found matching '%s'\" % receiver_name)\n else:\n receiver = receivers[0]\n\n assert receiver\n receiver.status = _status.ReceiverStatus(receiver, lambda *args, **kwargs: None)\n\n # check if it's necessary to set the notification flags\n old_notification_flags = _hidpp10.get_notification_flags(receiver) or 0\n if not (old_notification_flags & _hidpp10.NOTIFICATION_FLAG.wireless):\n _hidpp10.set_notification_flags(receiver, old_notification_flags | _hidpp10.NOTIFICATION_FLAG.wireless)\n\n # get all current devices\n known_devices = [dev.number for dev in receiver]\n\n class _HandleWithNotificationHook(int):\n def notifications_hook(self, n):\n nonlocal known_devices\n assert n\n if n.devnumber == 0xFF:\n _notifications.process(receiver, n)\n elif n.sub_id == 0x41 and len(n.data) == _base._SHORT_MESSAGE_SIZE - 4:\n kd, known_devices = known_devices, None # only process one connection notification\n if kd is not None:\n if n.devnumber not in kd:\n receiver.status.new_device = receiver.register_new_device(n.devnumber, n)\n elif receiver.re_pairs:\n del receiver[n.devnumber] # get rid of information on device re-paired away\n receiver.status.new_device = receiver.register_new_device(n.devnumber, n)\n\n timeout = 20 # seconds\n receiver.handle = _HandleWithNotificationHook(receiver.handle)\n\n receiver.set_lock(False, timeout=timeout)\n print('Pairing: turn your new device on (timing out in', timeout, 'seconds).')\n\n # the lock-open notification may come slightly later, wait for it a bit\n pairing_start = _timestamp()\n patience = 5 # seconds\n\n while receiver.status.lock_open or _timestamp() - pairing_start < patience:\n n = _base.read(receiver.handle)\n if n:\n n = _base.make_notification(*n)\n if n:\n receiver.handle.notifications_hook(n)\n\n if not (old_notification_flags & _hidpp10.NOTIFICATION_FLAG.wireless):\n # only clear the flags if they weren't set before, otherwise a\n # concurrently running Solaar app might stop working properly\n _hidpp10.set_notification_flags(receiver, old_notification_flags)\n\n if receiver.status.new_device:\n dev = receiver.status.new_device\n print('Paired device %d: %s (%s) [%s:%s]' % (dev.number, dev.name, dev.codename, dev.wpid, dev.serial))\n else:\n error = receiver.status.get(_status.KEYS.ERROR)\n if error:\n raise Exception('pairing failed: %s' % error)\n else:\n print('Paired a device') # this is better than an error\n", "path": "lib/solaar/cli/pair.py"}]}
| 2,388 | 115 |
gh_patches_debug_18504
|
rasdani/github-patches
|
git_diff
|
open-mmlab__mmdetection-2296
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
ImportError: cannot import name 'CARAFENAIVE' from 'mmdet.ops.carafe'
The module name 'CARAFENAIVE' in file 'mmdet.ops.carafe.grad_check.py' shoud be 'CARAFENaive'. When I run this command 'python mmdet/ops/carafe/grad_check.py', the following error is reported: ImportError: cannot import name 'CARAFENAIVE' from 'mmdet.ops.carafe'.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mmdet/ops/carafe/grad_check.py`
Content:
```
1 import os.path as osp
2 import sys
3
4 import mmcv
5 import torch
6 from torch.autograd import gradcheck
7
8 sys.path.append(osp.abspath(osp.join(__file__, '../../')))
9 from mmdet.ops.carafe import CARAFENAIVE # noqa: E402, isort:skip
10 from mmdet.ops.carafe import carafe_naive # noqa: E402, isort:skip
11 from mmdet.ops.carafe import carafe, CARAFE # noqa: E402, isort:skip
12
13 feat = torch.randn(2, 64, 3, 3, requires_grad=True, device='cuda:0').double()
14 mask = torch.randn(
15 2, 100, 6, 6, requires_grad=True, device='cuda:0').sigmoid().double()
16
17 print('Gradcheck for carafe...')
18 test = gradcheck(CARAFE(5, 4, 2), (feat, mask), atol=1e-4, eps=1e-4)
19 print(test)
20
21 print('Gradcheck for carafe naive...')
22 test = gradcheck(CARAFENAIVE(5, 4, 2), (feat, mask), atol=1e-4, eps=1e-4)
23 print(test)
24
25 feat = torch.randn(
26 2, 1024, 100, 100, requires_grad=True, device='cuda:0').float()
27 mask = torch.randn(
28 2, 25, 200, 200, requires_grad=True, device='cuda:0').sigmoid().float()
29 loop_num = 500
30
31 time_forward = 0
32 time_backward = 0
33 bar = mmcv.ProgressBar(loop_num)
34 timer = mmcv.Timer()
35 for i in range(loop_num):
36 x = carafe(feat.clone(), mask.clone(), 5, 1, 2)
37 torch.cuda.synchronize()
38 time_forward += timer.since_last_check()
39 x.sum().backward(retain_graph=True)
40 torch.cuda.synchronize()
41 time_backward += timer.since_last_check()
42 bar.update()
43 print('\nCARAFE time forward: {} ms/iter | time backward: {} ms/iter'.format(
44 (time_forward + 1e-3) * 1e3 / loop_num,
45 (time_backward + 1e-3) * 1e3 / loop_num))
46
47 time_naive_forward = 0
48 time_naive_backward = 0
49 bar = mmcv.ProgressBar(loop_num)
50 timer = mmcv.Timer()
51 for i in range(loop_num):
52 x = carafe_naive(feat.clone(), mask.clone(), 5, 1, 2)
53 torch.cuda.synchronize()
54 time_naive_forward += timer.since_last_check()
55 x.sum().backward(retain_graph=True)
56 torch.cuda.synchronize()
57 time_naive_backward += timer.since_last_check()
58 bar.update()
59 print('\nCARAFE naive time forward: {} ms/iter | time backward: {} ms/iter'.
60 format((time_naive_forward + 1e-3) * 1e3 / loop_num,
61 (time_naive_backward + 1e-3) * 1e3 / loop_num))
62
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/mmdet/ops/carafe/grad_check.py b/mmdet/ops/carafe/grad_check.py
--- a/mmdet/ops/carafe/grad_check.py
+++ b/mmdet/ops/carafe/grad_check.py
@@ -6,9 +6,8 @@
from torch.autograd import gradcheck
sys.path.append(osp.abspath(osp.join(__file__, '../../')))
-from mmdet.ops.carafe import CARAFENAIVE # noqa: E402, isort:skip
-from mmdet.ops.carafe import carafe_naive # noqa: E402, isort:skip
-from mmdet.ops.carafe import carafe, CARAFE # noqa: E402, isort:skip
+from mmdet.ops.carafe import CARAFE, CARAFENaive # noqa: E402, isort:skip
+from mmdet.ops.carafe import carafe, carafe_naive # noqa: E402, isort:skip
feat = torch.randn(2, 64, 3, 3, requires_grad=True, device='cuda:0').double()
mask = torch.randn(
@@ -19,7 +18,7 @@
print(test)
print('Gradcheck for carafe naive...')
-test = gradcheck(CARAFENAIVE(5, 4, 2), (feat, mask), atol=1e-4, eps=1e-4)
+test = gradcheck(CARAFENaive(5, 4, 2), (feat, mask), atol=1e-4, eps=1e-4)
print(test)
feat = torch.randn(
|
{"golden_diff": "diff --git a/mmdet/ops/carafe/grad_check.py b/mmdet/ops/carafe/grad_check.py\n--- a/mmdet/ops/carafe/grad_check.py\n+++ b/mmdet/ops/carafe/grad_check.py\n@@ -6,9 +6,8 @@\n from torch.autograd import gradcheck\n \n sys.path.append(osp.abspath(osp.join(__file__, '../../')))\n-from mmdet.ops.carafe import CARAFENAIVE # noqa: E402, isort:skip\n-from mmdet.ops.carafe import carafe_naive # noqa: E402, isort:skip\n-from mmdet.ops.carafe import carafe, CARAFE # noqa: E402, isort:skip\n+from mmdet.ops.carafe import CARAFE, CARAFENaive # noqa: E402, isort:skip\n+from mmdet.ops.carafe import carafe, carafe_naive # noqa: E402, isort:skip\n \n feat = torch.randn(2, 64, 3, 3, requires_grad=True, device='cuda:0').double()\n mask = torch.randn(\n@@ -19,7 +18,7 @@\n print(test)\n \n print('Gradcheck for carafe naive...')\n-test = gradcheck(CARAFENAIVE(5, 4, 2), (feat, mask), atol=1e-4, eps=1e-4)\n+test = gradcheck(CARAFENaive(5, 4, 2), (feat, mask), atol=1e-4, eps=1e-4)\n print(test)\n \n feat = torch.randn(\n", "issue": "ImportError: cannot import name 'CARAFENAIVE' from 'mmdet.ops.carafe'\nThe module name 'CARAFENAIVE' in file 'mmdet.ops.carafe.grad_check.py' shoud be 'CARAFENaive'. When I run this command 'python mmdet/ops/carafe/grad_check.py', the following error is reported: ImportError: cannot import name 'CARAFENAIVE' from 'mmdet.ops.carafe'.\n", "before_files": [{"content": "import os.path as osp\nimport sys\n\nimport mmcv\nimport torch\nfrom torch.autograd import gradcheck\n\nsys.path.append(osp.abspath(osp.join(__file__, '../../')))\nfrom mmdet.ops.carafe import CARAFENAIVE # noqa: E402, isort:skip\nfrom mmdet.ops.carafe import carafe_naive # noqa: E402, isort:skip\nfrom mmdet.ops.carafe import carafe, CARAFE # noqa: E402, isort:skip\n\nfeat = torch.randn(2, 64, 3, 3, requires_grad=True, device='cuda:0').double()\nmask = torch.randn(\n 2, 100, 6, 6, requires_grad=True, device='cuda:0').sigmoid().double()\n\nprint('Gradcheck for carafe...')\ntest = gradcheck(CARAFE(5, 4, 2), (feat, mask), atol=1e-4, eps=1e-4)\nprint(test)\n\nprint('Gradcheck for carafe naive...')\ntest = gradcheck(CARAFENAIVE(5, 4, 2), (feat, mask), atol=1e-4, eps=1e-4)\nprint(test)\n\nfeat = torch.randn(\n 2, 1024, 100, 100, requires_grad=True, device='cuda:0').float()\nmask = torch.randn(\n 2, 25, 200, 200, requires_grad=True, device='cuda:0').sigmoid().float()\nloop_num = 500\n\ntime_forward = 0\ntime_backward = 0\nbar = mmcv.ProgressBar(loop_num)\ntimer = mmcv.Timer()\nfor i in range(loop_num):\n x = carafe(feat.clone(), mask.clone(), 5, 1, 2)\n torch.cuda.synchronize()\n time_forward += timer.since_last_check()\n x.sum().backward(retain_graph=True)\n torch.cuda.synchronize()\n time_backward += timer.since_last_check()\n bar.update()\nprint('\\nCARAFE time forward: {} ms/iter | time backward: {} ms/iter'.format(\n (time_forward + 1e-3) * 1e3 / loop_num,\n (time_backward + 1e-3) * 1e3 / loop_num))\n\ntime_naive_forward = 0\ntime_naive_backward = 0\nbar = mmcv.ProgressBar(loop_num)\ntimer = mmcv.Timer()\nfor i in range(loop_num):\n x = carafe_naive(feat.clone(), mask.clone(), 5, 1, 2)\n torch.cuda.synchronize()\n time_naive_forward += timer.since_last_check()\n x.sum().backward(retain_graph=True)\n torch.cuda.synchronize()\n time_naive_backward += timer.since_last_check()\n bar.update()\nprint('\\nCARAFE naive time forward: {} ms/iter | time backward: {} ms/iter'.\n format((time_naive_forward + 1e-3) * 1e3 / loop_num,\n (time_naive_backward + 1e-3) * 1e3 / loop_num))\n", "path": "mmdet/ops/carafe/grad_check.py"}], "after_files": [{"content": "import os.path as osp\nimport sys\n\nimport mmcv\nimport torch\nfrom torch.autograd import gradcheck\n\nsys.path.append(osp.abspath(osp.join(__file__, '../../')))\nfrom mmdet.ops.carafe import CARAFE, CARAFENaive # noqa: E402, isort:skip\nfrom mmdet.ops.carafe import carafe, carafe_naive # noqa: E402, isort:skip\n\nfeat = torch.randn(2, 64, 3, 3, requires_grad=True, device='cuda:0').double()\nmask = torch.randn(\n 2, 100, 6, 6, requires_grad=True, device='cuda:0').sigmoid().double()\n\nprint('Gradcheck for carafe...')\ntest = gradcheck(CARAFE(5, 4, 2), (feat, mask), atol=1e-4, eps=1e-4)\nprint(test)\n\nprint('Gradcheck for carafe naive...')\ntest = gradcheck(CARAFENaive(5, 4, 2), (feat, mask), atol=1e-4, eps=1e-4)\nprint(test)\n\nfeat = torch.randn(\n 2, 1024, 100, 100, requires_grad=True, device='cuda:0').float()\nmask = torch.randn(\n 2, 25, 200, 200, requires_grad=True, device='cuda:0').sigmoid().float()\nloop_num = 500\n\ntime_forward = 0\ntime_backward = 0\nbar = mmcv.ProgressBar(loop_num)\ntimer = mmcv.Timer()\nfor i in range(loop_num):\n x = carafe(feat.clone(), mask.clone(), 5, 1, 2)\n torch.cuda.synchronize()\n time_forward += timer.since_last_check()\n x.sum().backward(retain_graph=True)\n torch.cuda.synchronize()\n time_backward += timer.since_last_check()\n bar.update()\nprint('\\nCARAFE time forward: {} ms/iter | time backward: {} ms/iter'.format(\n (time_forward + 1e-3) * 1e3 / loop_num,\n (time_backward + 1e-3) * 1e3 / loop_num))\n\ntime_naive_forward = 0\ntime_naive_backward = 0\nbar = mmcv.ProgressBar(loop_num)\ntimer = mmcv.Timer()\nfor i in range(loop_num):\n x = carafe_naive(feat.clone(), mask.clone(), 5, 1, 2)\n torch.cuda.synchronize()\n time_naive_forward += timer.since_last_check()\n x.sum().backward(retain_graph=True)\n torch.cuda.synchronize()\n time_naive_backward += timer.since_last_check()\n bar.update()\nprint('\\nCARAFE naive time forward: {} ms/iter | time backward: {} ms/iter'.\n format((time_naive_forward + 1e-3) * 1e3 / loop_num,\n (time_naive_backward + 1e-3) * 1e3 / loop_num))\n", "path": "mmdet/ops/carafe/grad_check.py"}]}
| 1,191 | 381 |
gh_patches_debug_19435
|
rasdani/github-patches
|
git_diff
|
Pylons__pyramid-3457
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
pyramid.exceptions.ConfigurationConflictError: <exception str() failed>
**Describe the bug**
While building an app I caused an error who's traceback ending the the following line:
```
pyramid.exceptions.ConfigurationConflictError: <exception str() failed>
```
What caused the error in the first place was that I copied a python package containing my views to a new package called controllers and forgot to delete the original views package. I assume that the configurator failed while trying to commit the config.scan(). Since I couldn't find any information online about the above traceback message I assume that this is probably just some internal problem with Pyramid.
**To Reproduce**
download from: https://github.com/benkawecki/pypi/tree/error
after setting up run
```
pserve development.ini
```
**Expected behavior**
I expect there to be an error message.
**Screenshots**
Screenshot of the error message:
<img width="489" alt="screen shot 2019-01-15 at 10 02 44 pm" src="https://user-images.githubusercontent.com/39999125/51224413-c57eb800-1913-11e9-9e0f-b25878a479f5.png">
Screenshot of installed packages:
<img width="488" alt="screen shot 2019-01-15 at 10 24 42 pm" src="https://user-images.githubusercontent.com/39999125/51224563-8b61e600-1914-11e9-9b04-42936f94d4bd.png">
**Additional context**
I'm looking to help out in open-source more this year so if this is an easy fix I would love to see if I can do it!
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/pyramid/exceptions.py`
Content:
```
1 from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPForbidden
2
3 NotFound = HTTPNotFound # bw compat
4 Forbidden = HTTPForbidden # bw compat
5
6 CR = '\n'
7
8
9 class BadCSRFOrigin(HTTPBadRequest):
10 """
11 This exception indicates the request has failed cross-site request forgery
12 origin validation.
13 """
14
15 title = "Bad CSRF Origin"
16 explanation = (
17 "Access is denied. This server can not verify that the origin or "
18 "referrer of your request matches the current site. Either your "
19 "browser supplied the wrong Origin or Referrer or it did not supply "
20 "one at all."
21 )
22
23
24 class BadCSRFToken(HTTPBadRequest):
25 """
26 This exception indicates the request has failed cross-site request
27 forgery token validation.
28 """
29
30 title = 'Bad CSRF Token'
31 explanation = (
32 'Access is denied. This server can not verify that your cross-site '
33 'request forgery token belongs to your login session. Either you '
34 'supplied the wrong cross-site request forgery token or your session '
35 'no longer exists. This may be due to session timeout or because '
36 'browser is not supplying the credentials required, as can happen '
37 'when the browser has cookies turned off.'
38 )
39
40
41 class PredicateMismatch(HTTPNotFound):
42 """
43 This exception is raised by multiviews when no view matches
44 all given predicates.
45
46 This exception subclasses the :class:`HTTPNotFound` exception for a
47 specific reason: if it reaches the main exception handler, it should
48 be treated as :class:`HTTPNotFound`` by any exception view
49 registrations. Thus, typically, this exception will not be seen
50 publicly.
51
52 However, this exception will be raised if the predicates of all
53 views configured to handle another exception context cannot be
54 successfully matched. For instance, if a view is configured to
55 handle a context of ``HTTPForbidden`` and the configured with
56 additional predicates, then :class:`PredicateMismatch` will be
57 raised if:
58
59 * An original view callable has raised :class:`HTTPForbidden` (thus
60 invoking an exception view); and
61 * The given request fails to match all predicates for said
62 exception view associated with :class:`HTTPForbidden`.
63
64 The same applies to any type of exception being handled by an
65 exception view.
66 """
67
68
69 class URLDecodeError(UnicodeDecodeError):
70 """
71 This exception is raised when :app:`Pyramid` cannot
72 successfully decode a URL or a URL path segment. This exception
73 behaves just like the Python builtin
74 :exc:`UnicodeDecodeError`. It is a subclass of the builtin
75 :exc:`UnicodeDecodeError` exception only for identity purposes,
76 mostly so an exception view can be registered when a URL cannot be
77 decoded.
78 """
79
80
81 class ConfigurationError(Exception):
82 """ Raised when inappropriate input values are supplied to an API
83 method of a :term:`Configurator`"""
84
85
86 class ConfigurationConflictError(ConfigurationError):
87 """ Raised when a configuration conflict is detected during action
88 processing"""
89
90 def __init__(self, conflicts):
91 self._conflicts = conflicts
92
93 def __str__(self):
94 r = ["Conflicting configuration actions"]
95 items = sorted(self._conflicts.items())
96 for discriminator, infos in items:
97 r.append(" For: %s" % (discriminator,))
98 for info in infos:
99 for line in str(info).rstrip().split(CR):
100 r.append(" " + line)
101
102 return CR.join(r)
103
104
105 class ConfigurationExecutionError(ConfigurationError):
106 """An error occurred during execution of a configuration action
107 """
108
109 def __init__(self, etype, evalue, info):
110 self.etype, self.evalue, self.info = etype, evalue, info
111
112 def __str__(self):
113 return "%s: %s\n in:\n %s" % (self.etype, self.evalue, self.info)
114
115
116 class CyclicDependencyError(Exception):
117 """ The exception raised when the Pyramid topological sorter detects a
118 cyclic dependency."""
119
120 def __init__(self, cycles):
121 self.cycles = cycles
122
123 def __str__(self):
124 L = []
125 cycles = self.cycles
126 for cycle in cycles:
127 dependent = cycle
128 dependees = cycles[cycle]
129 L.append('%r sorts before %r' % (dependent, dependees))
130 msg = 'Implicit ordering cycle:' + '; '.join(L)
131 return msg
132
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/pyramid/exceptions.py b/src/pyramid/exceptions.py
--- a/src/pyramid/exceptions.py
+++ b/src/pyramid/exceptions.py
@@ -3,8 +3,6 @@
NotFound = HTTPNotFound # bw compat
Forbidden = HTTPForbidden # bw compat
-CR = '\n'
-
class BadCSRFOrigin(HTTPBadRequest):
"""
@@ -92,14 +90,13 @@
def __str__(self):
r = ["Conflicting configuration actions"]
- items = sorted(self._conflicts.items())
- for discriminator, infos in items:
+ for discriminator, infos in self._conflicts.items():
r.append(" For: %s" % (discriminator,))
for info in infos:
- for line in str(info).rstrip().split(CR):
+ for line in str(info).rstrip().split('\n'):
r.append(" " + line)
- return CR.join(r)
+ return '\n'.join(r)
class ConfigurationExecutionError(ConfigurationError):
|
{"golden_diff": "diff --git a/src/pyramid/exceptions.py b/src/pyramid/exceptions.py\n--- a/src/pyramid/exceptions.py\n+++ b/src/pyramid/exceptions.py\n@@ -3,8 +3,6 @@\n NotFound = HTTPNotFound # bw compat\n Forbidden = HTTPForbidden # bw compat\n \n-CR = '\\n'\n-\n \n class BadCSRFOrigin(HTTPBadRequest):\n \"\"\"\n@@ -92,14 +90,13 @@\n \n def __str__(self):\n r = [\"Conflicting configuration actions\"]\n- items = sorted(self._conflicts.items())\n- for discriminator, infos in items:\n+ for discriminator, infos in self._conflicts.items():\n r.append(\" For: %s\" % (discriminator,))\n for info in infos:\n- for line in str(info).rstrip().split(CR):\n+ for line in str(info).rstrip().split('\\n'):\n r.append(\" \" + line)\n \n- return CR.join(r)\n+ return '\\n'.join(r)\n \n \n class ConfigurationExecutionError(ConfigurationError):\n", "issue": "pyramid.exceptions.ConfigurationConflictError: <exception str() failed>\n**Describe the bug**\r\nWhile building an app I caused an error who's traceback ending the the following line:\r\n```\r\npyramid.exceptions.ConfigurationConflictError: <exception str() failed>\r\n```\r\nWhat caused the error in the first place was that I copied a python package containing my views to a new package called controllers and forgot to delete the original views package. I assume that the configurator failed while trying to commit the config.scan(). Since I couldn't find any information online about the above traceback message I assume that this is probably just some internal problem with Pyramid. \r\n**To Reproduce**\r\ndownload from: https://github.com/benkawecki/pypi/tree/error\r\nafter setting up run \r\n``` \r\npserve development.ini\r\n```\r\n\r\n**Expected behavior**\r\nI expect there to be an error message. \r\n\r\n**Screenshots**\r\nScreenshot of the error message:\r\n<img width=\"489\" alt=\"screen shot 2019-01-15 at 10 02 44 pm\" src=\"https://user-images.githubusercontent.com/39999125/51224413-c57eb800-1913-11e9-9e0f-b25878a479f5.png\">\r\nScreenshot of installed packages:\r\n<img width=\"488\" alt=\"screen shot 2019-01-15 at 10 24 42 pm\" src=\"https://user-images.githubusercontent.com/39999125/51224563-8b61e600-1914-11e9-9b04-42936f94d4bd.png\">\r\n\r\n\r\n\r\n**Additional context**\r\nI'm looking to help out in open-source more this year so if this is an easy fix I would love to see if I can do it!\r\n\n", "before_files": [{"content": "from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPForbidden\n\nNotFound = HTTPNotFound # bw compat\nForbidden = HTTPForbidden # bw compat\n\nCR = '\\n'\n\n\nclass BadCSRFOrigin(HTTPBadRequest):\n \"\"\"\n This exception indicates the request has failed cross-site request forgery\n origin validation.\n \"\"\"\n\n title = \"Bad CSRF Origin\"\n explanation = (\n \"Access is denied. This server can not verify that the origin or \"\n \"referrer of your request matches the current site. Either your \"\n \"browser supplied the wrong Origin or Referrer or it did not supply \"\n \"one at all.\"\n )\n\n\nclass BadCSRFToken(HTTPBadRequest):\n \"\"\"\n This exception indicates the request has failed cross-site request\n forgery token validation.\n \"\"\"\n\n title = 'Bad CSRF Token'\n explanation = (\n 'Access is denied. This server can not verify that your cross-site '\n 'request forgery token belongs to your login session. Either you '\n 'supplied the wrong cross-site request forgery token or your session '\n 'no longer exists. This may be due to session timeout or because '\n 'browser is not supplying the credentials required, as can happen '\n 'when the browser has cookies turned off.'\n )\n\n\nclass PredicateMismatch(HTTPNotFound):\n \"\"\"\n This exception is raised by multiviews when no view matches\n all given predicates.\n\n This exception subclasses the :class:`HTTPNotFound` exception for a\n specific reason: if it reaches the main exception handler, it should\n be treated as :class:`HTTPNotFound`` by any exception view\n registrations. Thus, typically, this exception will not be seen\n publicly.\n\n However, this exception will be raised if the predicates of all\n views configured to handle another exception context cannot be\n successfully matched. For instance, if a view is configured to\n handle a context of ``HTTPForbidden`` and the configured with\n additional predicates, then :class:`PredicateMismatch` will be\n raised if:\n\n * An original view callable has raised :class:`HTTPForbidden` (thus\n invoking an exception view); and\n * The given request fails to match all predicates for said\n exception view associated with :class:`HTTPForbidden`.\n\n The same applies to any type of exception being handled by an\n exception view.\n \"\"\"\n\n\nclass URLDecodeError(UnicodeDecodeError):\n \"\"\"\n This exception is raised when :app:`Pyramid` cannot\n successfully decode a URL or a URL path segment. This exception\n behaves just like the Python builtin\n :exc:`UnicodeDecodeError`. It is a subclass of the builtin\n :exc:`UnicodeDecodeError` exception only for identity purposes,\n mostly so an exception view can be registered when a URL cannot be\n decoded.\n \"\"\"\n\n\nclass ConfigurationError(Exception):\n \"\"\" Raised when inappropriate input values are supplied to an API\n method of a :term:`Configurator`\"\"\"\n\n\nclass ConfigurationConflictError(ConfigurationError):\n \"\"\" Raised when a configuration conflict is detected during action\n processing\"\"\"\n\n def __init__(self, conflicts):\n self._conflicts = conflicts\n\n def __str__(self):\n r = [\"Conflicting configuration actions\"]\n items = sorted(self._conflicts.items())\n for discriminator, infos in items:\n r.append(\" For: %s\" % (discriminator,))\n for info in infos:\n for line in str(info).rstrip().split(CR):\n r.append(\" \" + line)\n\n return CR.join(r)\n\n\nclass ConfigurationExecutionError(ConfigurationError):\n \"\"\"An error occurred during execution of a configuration action\n \"\"\"\n\n def __init__(self, etype, evalue, info):\n self.etype, self.evalue, self.info = etype, evalue, info\n\n def __str__(self):\n return \"%s: %s\\n in:\\n %s\" % (self.etype, self.evalue, self.info)\n\n\nclass CyclicDependencyError(Exception):\n \"\"\" The exception raised when the Pyramid topological sorter detects a\n cyclic dependency.\"\"\"\n\n def __init__(self, cycles):\n self.cycles = cycles\n\n def __str__(self):\n L = []\n cycles = self.cycles\n for cycle in cycles:\n dependent = cycle\n dependees = cycles[cycle]\n L.append('%r sorts before %r' % (dependent, dependees))\n msg = 'Implicit ordering cycle:' + '; '.join(L)\n return msg\n", "path": "src/pyramid/exceptions.py"}], "after_files": [{"content": "from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPForbidden\n\nNotFound = HTTPNotFound # bw compat\nForbidden = HTTPForbidden # bw compat\n\n\nclass BadCSRFOrigin(HTTPBadRequest):\n \"\"\"\n This exception indicates the request has failed cross-site request forgery\n origin validation.\n \"\"\"\n\n title = \"Bad CSRF Origin\"\n explanation = (\n \"Access is denied. This server can not verify that the origin or \"\n \"referrer of your request matches the current site. Either your \"\n \"browser supplied the wrong Origin or Referrer or it did not supply \"\n \"one at all.\"\n )\n\n\nclass BadCSRFToken(HTTPBadRequest):\n \"\"\"\n This exception indicates the request has failed cross-site request\n forgery token validation.\n \"\"\"\n\n title = 'Bad CSRF Token'\n explanation = (\n 'Access is denied. This server can not verify that your cross-site '\n 'request forgery token belongs to your login session. Either you '\n 'supplied the wrong cross-site request forgery token or your session '\n 'no longer exists. This may be due to session timeout or because '\n 'browser is not supplying the credentials required, as can happen '\n 'when the browser has cookies turned off.'\n )\n\n\nclass PredicateMismatch(HTTPNotFound):\n \"\"\"\n This exception is raised by multiviews when no view matches\n all given predicates.\n\n This exception subclasses the :class:`HTTPNotFound` exception for a\n specific reason: if it reaches the main exception handler, it should\n be treated as :class:`HTTPNotFound`` by any exception view\n registrations. Thus, typically, this exception will not be seen\n publicly.\n\n However, this exception will be raised if the predicates of all\n views configured to handle another exception context cannot be\n successfully matched. For instance, if a view is configured to\n handle a context of ``HTTPForbidden`` and the configured with\n additional predicates, then :class:`PredicateMismatch` will be\n raised if:\n\n * An original view callable has raised :class:`HTTPForbidden` (thus\n invoking an exception view); and\n * The given request fails to match all predicates for said\n exception view associated with :class:`HTTPForbidden`.\n\n The same applies to any type of exception being handled by an\n exception view.\n \"\"\"\n\n\nclass URLDecodeError(UnicodeDecodeError):\n \"\"\"\n This exception is raised when :app:`Pyramid` cannot\n successfully decode a URL or a URL path segment. This exception\n behaves just like the Python builtin\n :exc:`UnicodeDecodeError`. It is a subclass of the builtin\n :exc:`UnicodeDecodeError` exception only for identity purposes,\n mostly so an exception view can be registered when a URL cannot be\n decoded.\n \"\"\"\n\n\nclass ConfigurationError(Exception):\n \"\"\" Raised when inappropriate input values are supplied to an API\n method of a :term:`Configurator`\"\"\"\n\n\nclass ConfigurationConflictError(ConfigurationError):\n \"\"\" Raised when a configuration conflict is detected during action\n processing\"\"\"\n\n def __init__(self, conflicts):\n self._conflicts = conflicts\n\n def __str__(self):\n r = [\"Conflicting configuration actions\"]\n for discriminator, infos in self._conflicts.items():\n r.append(\" For: %s\" % (discriminator,))\n for info in infos:\n for line in str(info).rstrip().split('\\n'):\n r.append(\" \" + line)\n\n return '\\n'.join(r)\n\n\nclass ConfigurationExecutionError(ConfigurationError):\n \"\"\"An error occurred during execution of a configuration action\n \"\"\"\n\n def __init__(self, etype, evalue, info):\n self.etype, self.evalue, self.info = etype, evalue, info\n\n def __str__(self):\n return \"%s: %s\\n in:\\n %s\" % (self.etype, self.evalue, self.info)\n\n\nclass CyclicDependencyError(Exception):\n \"\"\" The exception raised when the Pyramid topological sorter detects a\n cyclic dependency.\"\"\"\n\n def __init__(self, cycles):\n self.cycles = cycles\n\n def __str__(self):\n L = []\n cycles = self.cycles\n for cycle in cycles:\n dependent = cycle\n dependees = cycles[cycle]\n L.append('%r sorts before %r' % (dependent, dependees))\n msg = 'Implicit ordering cycle:' + '; '.join(L)\n return msg\n", "path": "src/pyramid/exceptions.py"}]}
| 1,953 | 229 |
gh_patches_debug_14167
|
rasdani/github-patches
|
git_diff
|
fidals__shopelectro-917
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
models.py:85: Explore and doc what is vendor_code.
The puzzle `910-f547fee5` from #910 has to be resolved:
https://github.com/fidals/shopelectro/blob/e7de650f01b749f584f374923d7863083cc83b3c/shopelectro/models.py#L85-L85
The puzzle was created by duker33 on 28-Jun-19.
Estimate: 30 minutes, role: DEV.
If you have any technical questions, don't ask me, submit new tickets instead. The task will be \"done\" when the problem is fixed and the text of the puzzle is _removed_ from the source code. Here is more about [PDD](http://www.yegor256.com/2009/03/04/pdd.html) and [about me](http://www.yegor256.com/2017/04/05/pdd-in-action.html).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `shopelectro/models.py`
Content:
```
1 import enum
2 import random
3 import string
4 import typing
5 from uuid import uuid4
6
7 from django.conf import settings
8 from django.db import models
9 from django.urls import reverse
10 from django.utils.translation import ugettext_lazy as _
11
12 from catalog import models as catalog_models
13 from ecommerce import models as ecommerce_models
14 from pages import models as pages_models
15
16
17 def randomize_slug(slug: str) -> str:
18 slug_hash = ''.join(
19 random.choices(string.ascii_lowercase, k=settings.SLUG_HASH_SIZE)
20 )
21 return f'{slug}_{slug_hash}'
22
23
24 class SECategoryQuerySet(catalog_models.CategoryQuerySet):
25 def get_categories_tree_with_pictures(self) -> 'SECategoryQuerySet':
26 categories_with_pictures = (
27 self
28 .filter(products__page__images__isnull=False)
29 .distinct()
30 )
31
32 return categories_with_pictures.get_ancestors(include_self=True)
33
34
35 class SECategoryManager(
36 catalog_models.CategoryManager.from_queryset(SECategoryQuerySet)
37 ):
38 pass
39
40
41 class Category(catalog_models.AbstractCategory, pages_models.SyncPageMixin):
42
43 objects = SECategoryManager()
44 uuid = models.UUIDField(default=uuid4, editable=False)
45
46 @classmethod
47 def get_default_parent(cls):
48 return pages_models.CustomPage.objects.filter(slug='catalog').first()
49
50 @property
51 def image(self):
52 products = self.products.all()
53 return products[0].image if products else None
54
55 def get_absolute_url(self):
56 return reverse('category', args=(self.page.slug,))
57
58
59 class Product(
60 catalog_models.AbstractProduct,
61 catalog_models.AbstractPosition,
62 pages_models.SyncPageMixin
63 ):
64
65 # That's why we are needed to explicitly add objects manager here
66 # because of Django special managers behaviour.
67 # Se se#480 for details.
68 objects = catalog_models.ProductManager()
69
70 category = models.ForeignKey(
71 Category,
72 on_delete=models.CASCADE,
73 null=True,
74 related_name='products',
75 verbose_name=_('category'),
76 )
77
78 tags = models.ManyToManyField(
79 'Tag',
80 related_name='products',
81 blank=True,
82 verbose_name=_('tags'),
83 )
84
85 # @todo #910:30m Explore and doc what is vendor_code.
86 vendor_code = models.SmallIntegerField(verbose_name=_('vendor_code'))
87 uuid = models.UUIDField(default=uuid4, editable=False)
88 purchase_price = models.FloatField(
89 default=0, verbose_name=_('purchase_price'))
90 wholesale_small = models.FloatField(
91 default=0, verbose_name=_('wholesale_small'))
92 wholesale_medium = models.FloatField(
93 default=0, verbose_name=_('wholesale_medium'))
94 wholesale_large = models.FloatField(
95 default=0, verbose_name=_('wholesale_large'))
96
97 in_pack = models.PositiveSmallIntegerField(
98 default=1,
99 verbose_name=_('in pack'),
100 )
101
102 def get_absolute_url(self):
103 return reverse('product', args=(self.vendor_code,))
104
105 @property
106 def average_rate(self):
107 """Return rounded to first decimal averaged rating."""
108 rating = self.product_feedbacks.aggregate(
109 avg=models.Avg('rating')).get('avg', 0)
110 return round(rating, 1)
111
112 @property
113 def feedback_count(self):
114 return self.product_feedbacks.count()
115
116 @property
117 def feedback(self):
118 return self.product_feedbacks.all().order_by('-date')
119
120 def get_params(self):
121 return Tag.objects.filter_by_products([self]).group_tags()
122
123 def get_brand_name(self) -> str:
124 brand: typing.Optional['Tag'] = Tag.objects.get_brands([self]).get(self)
125 return brand.name if brand else ''
126
127
128 class ProductFeedback(models.Model):
129 product = models.ForeignKey(
130 Product, on_delete=models.CASCADE, null=True,
131 related_name='product_feedbacks'
132 )
133
134 date = models.DateTimeField(
135 auto_now=True, db_index=True, verbose_name=_('date'))
136 name = models.CharField(
137 max_length=255, db_index=True, verbose_name=_('name'))
138 rating = models.PositiveSmallIntegerField(
139 default=1, db_index=True, verbose_name=_('rating'))
140 dignities = models.TextField(
141 default='', blank=True, verbose_name=_('dignities'))
142 limitations = models.TextField(
143 default='', blank=True, verbose_name=_('limitations'))
144 general = models.TextField(
145 default='', blank=True, verbose_name=_('limitations'))
146
147
148 class ItemsEnum(enum.EnumMeta):
149 """
150 Provide dict-like `items` method.
151
152 https://docs.python.org/3/library/enum.html#enum-classes
153 """
154
155 def items(self):
156 return [(i.name, i.value) for i in self]
157
158 def __repr__(self):
159 fields = ', '.join(i.name for i in self)
160 return f"<enum '{self.__name__}: {fields}'>"
161
162
163 class PaymentOptions(enum.Enum, metaclass=ItemsEnum):
164 cash = 'Наличные'
165 cashless = 'Безналичные и денежные переводы'
166 AC = 'Банковская карта'
167 PC = 'Яндекс.Деньги'
168 GP = 'Связной (терминал)'
169 AB = 'Альфа-Клик'
170
171 @staticmethod
172 def default():
173 return PaymentOptions.cash
174
175
176 class Order(ecommerce_models.Order):
177 address = models.TextField(blank=True, default='')
178 payment_type = models.CharField(
179 max_length=255,
180 choices=PaymentOptions.items(),
181 default=PaymentOptions.default().name,
182 )
183 comment = models.TextField(blank=True, default='')
184 # total price - total purchase price
185 revenue = models.FloatField(default=0, null=True, verbose_name=_('revenue'))
186
187 @property
188 def payment_type_label(self):
189 """Return label for an order's payment option."""
190 return PaymentOptions[self.payment_type].value
191
192 def set_positions(self, cart):
193 """
194 Save cart's state into Order instance.
195
196 @todo #589:60m Create Cart model.
197 See details here: https://github.com/fidals/shopelectro/pull/590#discussion_r222544672
198 """
199 self.revenue = cart.total_revenue()
200 self.save()
201 for id_, position in cart:
202 self.positions.create(
203 order=self,
204 product_id=id_,
205 vendor_code=position['vendor_code'],
206 name=position['name'],
207 price=position['price'],
208 quantity=position['quantity'],
209 )
210 return self
211
212
213 class CategoryPage(pages_models.ModelPage):
214 """Create proxy model for Admin."""
215
216 class Meta(pages_models.ModelPage.Meta): # Ignore PycodestyleBear (E303)
217 proxy = True
218
219 # noinspection PyTypeChecker
220 objects = pages_models.ModelPage.create_model_page_managers(Category)
221
222
223 class ProductPage(pages_models.ModelPage):
224 """Create proxy model for Admin."""
225
226 class Meta(pages_models.ModelPage.Meta): # Ignore PycodestyleBear (E303)
227 proxy = True
228
229 # noinspection PyTypeChecker
230 objects = (
231 pages_models.ModelPage
232 .create_model_page_managers(Product)
233 )
234
235
236 class TagGroupManager(models.Manager):
237
238 def get_pack(self):
239 return self.get_queryset().get(uuid=settings.PACK_GROUP_UUID)
240
241
242 class TagGroup(catalog_models.TagGroup):
243
244 objects = TagGroupManager()
245
246
247 class TagQuerySet(catalog_models.TagQuerySet):
248
249 def products(self):
250 ids = self.values_list('products__id', flat=True)
251 return Product.objects.filter(id__in=ids).distinct()
252
253
254 class TagManager(catalog_models.TagManager.from_queryset(TagQuerySet)):
255
256 def get_packs(self):
257 return TagGroup.objects.get_pack().tags.all()
258
259
260 class Tag(catalog_models.Tag):
261 group = models.ForeignKey(
262 TagGroup, on_delete=models.CASCADE, null=True, related_name='tags',
263 )
264
265 objects = TagManager()
266
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/shopelectro/models.py b/shopelectro/models.py
--- a/shopelectro/models.py
+++ b/shopelectro/models.py
@@ -82,7 +82,13 @@
verbose_name=_('tags'),
)
- # @todo #910:30m Explore and doc what is vendor_code.
+ # `vendor_code` is a code that refers to the particular stock keeping unit (SKU).
+ # You can treat it as public unique id. Use it to publicly identify a product.
+
+ # We bring codes from 1C database and sync with products.
+ # We doesn't use the id field instead, because it is auto-increment sequence,
+ # that can't be changed easily. We decided to avoid that complexity.
+ # https://www.postgresql.org/docs/current/functions-sequence.html
vendor_code = models.SmallIntegerField(verbose_name=_('vendor_code'))
uuid = models.UUIDField(default=uuid4, editable=False)
purchase_price = models.FloatField(
|
{"golden_diff": "diff --git a/shopelectro/models.py b/shopelectro/models.py\n--- a/shopelectro/models.py\n+++ b/shopelectro/models.py\n@@ -82,7 +82,13 @@\n verbose_name=_('tags'),\n )\n \n- # @todo #910:30m Explore and doc what is vendor_code.\n+ # `vendor_code` is a code that refers to the particular stock keeping unit (SKU).\n+ # You can treat it as public unique id. Use it to publicly identify a product.\n+\n+ # We bring codes from 1C database and sync with products.\n+ # We doesn't use the id field instead, because it is auto-increment sequence,\n+ # that can't be changed easily. We decided to avoid that complexity.\n+ # https://www.postgresql.org/docs/current/functions-sequence.html\n vendor_code = models.SmallIntegerField(verbose_name=_('vendor_code'))\n uuid = models.UUIDField(default=uuid4, editable=False)\n purchase_price = models.FloatField(\n", "issue": "models.py:85: Explore and doc what is vendor_code.\nThe puzzle `910-f547fee5` from #910 has to be resolved: \n\nhttps://github.com/fidals/shopelectro/blob/e7de650f01b749f584f374923d7863083cc83b3c/shopelectro/models.py#L85-L85\n\nThe puzzle was created by duker33 on 28-Jun-19. \n\nEstimate: 30 minutes, role: DEV. \n\nIf you have any technical questions, don't ask me, submit new tickets instead. The task will be \\\"done\\\" when the problem is fixed and the text of the puzzle is _removed_ from the source code. Here is more about [PDD](http://www.yegor256.com/2009/03/04/pdd.html) and [about me](http://www.yegor256.com/2017/04/05/pdd-in-action.html). \n\n", "before_files": [{"content": "import enum\nimport random\nimport string\nimport typing\nfrom uuid import uuid4\n\nfrom django.conf import settings\nfrom django.db import models\nfrom django.urls import reverse\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom catalog import models as catalog_models\nfrom ecommerce import models as ecommerce_models\nfrom pages import models as pages_models\n\n\ndef randomize_slug(slug: str) -> str:\n slug_hash = ''.join(\n random.choices(string.ascii_lowercase, k=settings.SLUG_HASH_SIZE)\n )\n return f'{slug}_{slug_hash}'\n\n\nclass SECategoryQuerySet(catalog_models.CategoryQuerySet):\n def get_categories_tree_with_pictures(self) -> 'SECategoryQuerySet':\n categories_with_pictures = (\n self\n .filter(products__page__images__isnull=False)\n .distinct()\n )\n\n return categories_with_pictures.get_ancestors(include_self=True)\n\n\nclass SECategoryManager(\n catalog_models.CategoryManager.from_queryset(SECategoryQuerySet)\n):\n pass\n\n\nclass Category(catalog_models.AbstractCategory, pages_models.SyncPageMixin):\n\n objects = SECategoryManager()\n uuid = models.UUIDField(default=uuid4, editable=False)\n\n @classmethod\n def get_default_parent(cls):\n return pages_models.CustomPage.objects.filter(slug='catalog').first()\n\n @property\n def image(self):\n products = self.products.all()\n return products[0].image if products else None\n\n def get_absolute_url(self):\n return reverse('category', args=(self.page.slug,))\n\n\nclass Product(\n catalog_models.AbstractProduct,\n catalog_models.AbstractPosition,\n pages_models.SyncPageMixin\n):\n\n # That's why we are needed to explicitly add objects manager here\n # because of Django special managers behaviour.\n # Se se#480 for details.\n objects = catalog_models.ProductManager()\n\n category = models.ForeignKey(\n Category,\n on_delete=models.CASCADE,\n null=True,\n related_name='products',\n verbose_name=_('category'),\n )\n\n tags = models.ManyToManyField(\n 'Tag',\n related_name='products',\n blank=True,\n verbose_name=_('tags'),\n )\n\n # @todo #910:30m Explore and doc what is vendor_code.\n vendor_code = models.SmallIntegerField(verbose_name=_('vendor_code'))\n uuid = models.UUIDField(default=uuid4, editable=False)\n purchase_price = models.FloatField(\n default=0, verbose_name=_('purchase_price'))\n wholesale_small = models.FloatField(\n default=0, verbose_name=_('wholesale_small'))\n wholesale_medium = models.FloatField(\n default=0, verbose_name=_('wholesale_medium'))\n wholesale_large = models.FloatField(\n default=0, verbose_name=_('wholesale_large'))\n\n in_pack = models.PositiveSmallIntegerField(\n default=1,\n verbose_name=_('in pack'),\n )\n\n def get_absolute_url(self):\n return reverse('product', args=(self.vendor_code,))\n\n @property\n def average_rate(self):\n \"\"\"Return rounded to first decimal averaged rating.\"\"\"\n rating = self.product_feedbacks.aggregate(\n avg=models.Avg('rating')).get('avg', 0)\n return round(rating, 1)\n\n @property\n def feedback_count(self):\n return self.product_feedbacks.count()\n\n @property\n def feedback(self):\n return self.product_feedbacks.all().order_by('-date')\n\n def get_params(self):\n return Tag.objects.filter_by_products([self]).group_tags()\n\n def get_brand_name(self) -> str:\n brand: typing.Optional['Tag'] = Tag.objects.get_brands([self]).get(self)\n return brand.name if brand else ''\n\n\nclass ProductFeedback(models.Model):\n product = models.ForeignKey(\n Product, on_delete=models.CASCADE, null=True,\n related_name='product_feedbacks'\n )\n\n date = models.DateTimeField(\n auto_now=True, db_index=True, verbose_name=_('date'))\n name = models.CharField(\n max_length=255, db_index=True, verbose_name=_('name'))\n rating = models.PositiveSmallIntegerField(\n default=1, db_index=True, verbose_name=_('rating'))\n dignities = models.TextField(\n default='', blank=True, verbose_name=_('dignities'))\n limitations = models.TextField(\n default='', blank=True, verbose_name=_('limitations'))\n general = models.TextField(\n default='', blank=True, verbose_name=_('limitations'))\n\n\nclass ItemsEnum(enum.EnumMeta):\n \"\"\"\n Provide dict-like `items` method.\n\n https://docs.python.org/3/library/enum.html#enum-classes\n \"\"\"\n\n def items(self):\n return [(i.name, i.value) for i in self]\n\n def __repr__(self):\n fields = ', '.join(i.name for i in self)\n return f\"<enum '{self.__name__}: {fields}'>\"\n\n\nclass PaymentOptions(enum.Enum, metaclass=ItemsEnum):\n cash = '\u041d\u0430\u043b\u0438\u0447\u043d\u044b\u0435'\n cashless = '\u0411\u0435\u0437\u043d\u0430\u043b\u0438\u0447\u043d\u044b\u0435 \u0438 \u0434\u0435\u043d\u0435\u0436\u043d\u044b\u0435 \u043f\u0435\u0440\u0435\u0432\u043e\u0434\u044b'\n AC = '\u0411\u0430\u043d\u043a\u043e\u0432\u0441\u043a\u0430\u044f \u043a\u0430\u0440\u0442\u0430'\n PC = '\u042f\u043d\u0434\u0435\u043a\u0441.\u0414\u0435\u043d\u044c\u0433\u0438'\n GP = '\u0421\u0432\u044f\u0437\u043d\u043e\u0439 (\u0442\u0435\u0440\u043c\u0438\u043d\u0430\u043b)'\n AB = '\u0410\u043b\u044c\u0444\u0430-\u041a\u043b\u0438\u043a'\n\n @staticmethod\n def default():\n return PaymentOptions.cash\n\n\nclass Order(ecommerce_models.Order):\n address = models.TextField(blank=True, default='')\n payment_type = models.CharField(\n max_length=255,\n choices=PaymentOptions.items(),\n default=PaymentOptions.default().name,\n )\n comment = models.TextField(blank=True, default='')\n # total price - total purchase price\n revenue = models.FloatField(default=0, null=True, verbose_name=_('revenue'))\n\n @property\n def payment_type_label(self):\n \"\"\"Return label for an order's payment option.\"\"\"\n return PaymentOptions[self.payment_type].value\n\n def set_positions(self, cart):\n \"\"\"\n Save cart's state into Order instance.\n\n @todo #589:60m Create Cart model.\n See details here: https://github.com/fidals/shopelectro/pull/590#discussion_r222544672\n \"\"\"\n self.revenue = cart.total_revenue()\n self.save()\n for id_, position in cart:\n self.positions.create(\n order=self,\n product_id=id_,\n vendor_code=position['vendor_code'],\n name=position['name'],\n price=position['price'],\n quantity=position['quantity'],\n )\n return self\n\n\nclass CategoryPage(pages_models.ModelPage):\n \"\"\"Create proxy model for Admin.\"\"\"\n\n class Meta(pages_models.ModelPage.Meta): # Ignore PycodestyleBear (E303)\n proxy = True\n\n # noinspection PyTypeChecker\n objects = pages_models.ModelPage.create_model_page_managers(Category)\n\n\nclass ProductPage(pages_models.ModelPage):\n \"\"\"Create proxy model for Admin.\"\"\"\n\n class Meta(pages_models.ModelPage.Meta): # Ignore PycodestyleBear (E303)\n proxy = True\n\n # noinspection PyTypeChecker\n objects = (\n pages_models.ModelPage\n .create_model_page_managers(Product)\n )\n\n\nclass TagGroupManager(models.Manager):\n\n def get_pack(self):\n return self.get_queryset().get(uuid=settings.PACK_GROUP_UUID)\n\n\nclass TagGroup(catalog_models.TagGroup):\n\n objects = TagGroupManager()\n\n\nclass TagQuerySet(catalog_models.TagQuerySet):\n\n def products(self):\n ids = self.values_list('products__id', flat=True)\n return Product.objects.filter(id__in=ids).distinct()\n\n\nclass TagManager(catalog_models.TagManager.from_queryset(TagQuerySet)):\n\n def get_packs(self):\n return TagGroup.objects.get_pack().tags.all()\n\n\nclass Tag(catalog_models.Tag):\n group = models.ForeignKey(\n TagGroup, on_delete=models.CASCADE, null=True, related_name='tags',\n )\n\n objects = TagManager()\n", "path": "shopelectro/models.py"}], "after_files": [{"content": "import enum\nimport random\nimport string\nimport typing\nfrom uuid import uuid4\n\nfrom django.conf import settings\nfrom django.db import models\nfrom django.urls import reverse\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom catalog import models as catalog_models\nfrom ecommerce import models as ecommerce_models\nfrom pages import models as pages_models\n\n\ndef randomize_slug(slug: str) -> str:\n slug_hash = ''.join(\n random.choices(string.ascii_lowercase, k=settings.SLUG_HASH_SIZE)\n )\n return f'{slug}_{slug_hash}'\n\n\nclass SECategoryQuerySet(catalog_models.CategoryQuerySet):\n def get_categories_tree_with_pictures(self) -> 'SECategoryQuerySet':\n categories_with_pictures = (\n self\n .filter(products__page__images__isnull=False)\n .distinct()\n )\n\n return categories_with_pictures.get_ancestors(include_self=True)\n\n\nclass SECategoryManager(\n catalog_models.CategoryManager.from_queryset(SECategoryQuerySet)\n):\n pass\n\n\nclass Category(catalog_models.AbstractCategory, pages_models.SyncPageMixin):\n\n objects = SECategoryManager()\n uuid = models.UUIDField(default=uuid4, editable=False)\n\n @classmethod\n def get_default_parent(cls):\n return pages_models.CustomPage.objects.filter(slug='catalog').first()\n\n @property\n def image(self):\n products = self.products.all()\n return products[0].image if products else None\n\n def get_absolute_url(self):\n return reverse('category', args=(self.page.slug,))\n\n\nclass Product(\n catalog_models.AbstractProduct,\n catalog_models.AbstractPosition,\n pages_models.SyncPageMixin\n):\n\n # That's why we are needed to explicitly add objects manager here\n # because of Django special managers behaviour.\n # Se se#480 for details.\n objects = catalog_models.ProductManager()\n\n category = models.ForeignKey(\n Category,\n on_delete=models.CASCADE,\n null=True,\n related_name='products',\n verbose_name=_('category'),\n )\n\n tags = models.ManyToManyField(\n 'Tag',\n related_name='products',\n blank=True,\n verbose_name=_('tags'),\n )\n\n # `vendor_code` is a code that refers to the particular stock keeping unit (SKU).\n # You can treat it as public unique id. Use it to publicly identify a product.\n\n # We bring codes from 1C database and sync with products.\n # We doesn't use the id field instead, because it is auto-increment sequence,\n # that can't be changed easily. We decided to avoid that complexity.\n # https://www.postgresql.org/docs/current/functions-sequence.html\n vendor_code = models.SmallIntegerField(verbose_name=_('vendor_code'))\n uuid = models.UUIDField(default=uuid4, editable=False)\n purchase_price = models.FloatField(\n default=0, verbose_name=_('purchase_price'))\n wholesale_small = models.FloatField(\n default=0, verbose_name=_('wholesale_small'))\n wholesale_medium = models.FloatField(\n default=0, verbose_name=_('wholesale_medium'))\n wholesale_large = models.FloatField(\n default=0, verbose_name=_('wholesale_large'))\n\n in_pack = models.PositiveSmallIntegerField(\n default=1,\n verbose_name=_('in pack'),\n )\n\n def get_absolute_url(self):\n return reverse('product', args=(self.vendor_code,))\n\n @property\n def average_rate(self):\n \"\"\"Return rounded to first decimal averaged rating.\"\"\"\n rating = self.product_feedbacks.aggregate(\n avg=models.Avg('rating')).get('avg', 0)\n return round(rating, 1)\n\n @property\n def feedback_count(self):\n return self.product_feedbacks.count()\n\n @property\n def feedback(self):\n return self.product_feedbacks.all().order_by('-date')\n\n def get_params(self):\n return Tag.objects.filter_by_products([self]).group_tags()\n\n def get_brand_name(self) -> str:\n brand: typing.Optional['Tag'] = Tag.objects.get_brands([self]).get(self)\n return brand.name if brand else ''\n\n\nclass ProductFeedback(models.Model):\n product = models.ForeignKey(\n Product, on_delete=models.CASCADE, null=True,\n related_name='product_feedbacks'\n )\n\n date = models.DateTimeField(\n auto_now=True, db_index=True, verbose_name=_('date'))\n name = models.CharField(\n max_length=255, db_index=True, verbose_name=_('name'))\n rating = models.PositiveSmallIntegerField(\n default=1, db_index=True, verbose_name=_('rating'))\n dignities = models.TextField(\n default='', blank=True, verbose_name=_('dignities'))\n limitations = models.TextField(\n default='', blank=True, verbose_name=_('limitations'))\n general = models.TextField(\n default='', blank=True, verbose_name=_('limitations'))\n\n\nclass ItemsEnum(enum.EnumMeta):\n \"\"\"\n Provide dict-like `items` method.\n\n https://docs.python.org/3/library/enum.html#enum-classes\n \"\"\"\n\n def items(self):\n return [(i.name, i.value) for i in self]\n\n def __repr__(self):\n fields = ', '.join(i.name for i in self)\n return f\"<enum '{self.__name__}: {fields}'>\"\n\n\nclass PaymentOptions(enum.Enum, metaclass=ItemsEnum):\n cash = '\u041d\u0430\u043b\u0438\u0447\u043d\u044b\u0435'\n cashless = '\u0411\u0435\u0437\u043d\u0430\u043b\u0438\u0447\u043d\u044b\u0435 \u0438 \u0434\u0435\u043d\u0435\u0436\u043d\u044b\u0435 \u043f\u0435\u0440\u0435\u0432\u043e\u0434\u044b'\n AC = '\u0411\u0430\u043d\u043a\u043e\u0432\u0441\u043a\u0430\u044f \u043a\u0430\u0440\u0442\u0430'\n PC = '\u042f\u043d\u0434\u0435\u043a\u0441.\u0414\u0435\u043d\u044c\u0433\u0438'\n GP = '\u0421\u0432\u044f\u0437\u043d\u043e\u0439 (\u0442\u0435\u0440\u043c\u0438\u043d\u0430\u043b)'\n AB = '\u0410\u043b\u044c\u0444\u0430-\u041a\u043b\u0438\u043a'\n\n @staticmethod\n def default():\n return PaymentOptions.cash\n\n\nclass Order(ecommerce_models.Order):\n address = models.TextField(blank=True, default='')\n payment_type = models.CharField(\n max_length=255,\n choices=PaymentOptions.items(),\n default=PaymentOptions.default().name,\n )\n comment = models.TextField(blank=True, default='')\n # total price - total purchase price\n revenue = models.FloatField(default=0, null=True, verbose_name=_('revenue'))\n\n @property\n def payment_type_label(self):\n \"\"\"Return label for an order's payment option.\"\"\"\n return PaymentOptions[self.payment_type].value\n\n def set_positions(self, cart):\n \"\"\"\n Save cart's state into Order instance.\n\n @todo #589:60m Create Cart model.\n See details here: https://github.com/fidals/shopelectro/pull/590#discussion_r222544672\n \"\"\"\n self.revenue = cart.total_revenue()\n self.save()\n for id_, position in cart:\n self.positions.create(\n order=self,\n product_id=id_,\n vendor_code=position['vendor_code'],\n name=position['name'],\n price=position['price'],\n quantity=position['quantity'],\n )\n return self\n\n\nclass CategoryPage(pages_models.ModelPage):\n \"\"\"Create proxy model for Admin.\"\"\"\n\n class Meta(pages_models.ModelPage.Meta): # Ignore PycodestyleBear (E303)\n proxy = True\n\n # noinspection PyTypeChecker\n objects = pages_models.ModelPage.create_model_page_managers(Category)\n\n\nclass ProductPage(pages_models.ModelPage):\n \"\"\"Create proxy model for Admin.\"\"\"\n\n class Meta(pages_models.ModelPage.Meta): # Ignore PycodestyleBear (E303)\n proxy = True\n\n # noinspection PyTypeChecker\n objects = (\n pages_models.ModelPage\n .create_model_page_managers(Product)\n )\n\n\nclass TagGroupManager(models.Manager):\n\n def get_pack(self):\n return self.get_queryset().get(uuid=settings.PACK_GROUP_UUID)\n\n\nclass TagGroup(catalog_models.TagGroup):\n\n objects = TagGroupManager()\n\n\nclass TagQuerySet(catalog_models.TagQuerySet):\n\n def products(self):\n ids = self.values_list('products__id', flat=True)\n return Product.objects.filter(id__in=ids).distinct()\n\n\nclass TagManager(catalog_models.TagManager.from_queryset(TagQuerySet)):\n\n def get_packs(self):\n return TagGroup.objects.get_pack().tags.all()\n\n\nclass Tag(catalog_models.Tag):\n group = models.ForeignKey(\n TagGroup, on_delete=models.CASCADE, null=True, related_name='tags',\n )\n\n objects = TagManager()\n", "path": "shopelectro/models.py"}]}
| 2,918 | 225 |
gh_patches_debug_37837
|
rasdani/github-patches
|
git_diff
|
kserve__kserve-3173
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Configurable timeouts for InferenceGraph
/kind feature
**Describe the solution you'd like**
InferenceServices in a Serverless deployment of KServe allow you to configure the Knative `timeoutSeconds` field by way of the `timeout` field in ComponentExtensionSpec. We'd like the same feature for an InferenceGraph.
**Anything else you would like to add:**
If we cannot update the InfereceGraph spec at this time, I would request the InferenceGraph controller at the very least be able to check the existing InferenceServices for Knative-backed components and match the highest `timeoutSeconds`.
Thanks for your consideration!
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `python/kserve/kserve/models/v1alpha1_inference_graph_spec.py`
Content:
```
1 # Copyright 2023 The KServe Authors.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 # coding: utf-8
16
17 """
18 KServe
19
20 Python SDK for KServe # noqa: E501
21
22 The version of the OpenAPI document: v0.1
23 Generated by: https://openapi-generator.tech
24 """
25
26
27 import pprint
28 import re # noqa: F401
29
30 import six
31
32 from kserve.configuration import Configuration
33
34
35 class V1alpha1InferenceGraphSpec(object):
36 """NOTE: This class is auto generated by OpenAPI Generator.
37 Ref: https://openapi-generator.tech
38
39 Do not edit the class manually.
40 """
41
42 """
43 Attributes:
44 openapi_types (dict): The key is attribute name
45 and the value is attribute type.
46 attribute_map (dict): The key is attribute name
47 and the value is json key in definition.
48 """
49 openapi_types = {
50 'affinity': 'V1Affinity',
51 'nodes': 'dict(str, V1alpha1InferenceRouter)',
52 'resources': 'V1ResourceRequirements'
53 }
54
55 attribute_map = {
56 'affinity': 'affinity',
57 'nodes': 'nodes',
58 'resources': 'resources'
59 }
60
61 def __init__(self, affinity=None, nodes=None, resources=None, local_vars_configuration=None): # noqa: E501
62 """V1alpha1InferenceGraphSpec - a model defined in OpenAPI""" # noqa: E501
63 if local_vars_configuration is None:
64 local_vars_configuration = Configuration()
65 self.local_vars_configuration = local_vars_configuration
66
67 self._affinity = None
68 self._nodes = None
69 self._resources = None
70 self.discriminator = None
71
72 if affinity is not None:
73 self.affinity = affinity
74 self.nodes = nodes
75 if resources is not None:
76 self.resources = resources
77
78 @property
79 def affinity(self):
80 """Gets the affinity of this V1alpha1InferenceGraphSpec. # noqa: E501
81
82
83 :return: The affinity of this V1alpha1InferenceGraphSpec. # noqa: E501
84 :rtype: V1Affinity
85 """
86 return self._affinity
87
88 @affinity.setter
89 def affinity(self, affinity):
90 """Sets the affinity of this V1alpha1InferenceGraphSpec.
91
92
93 :param affinity: The affinity of this V1alpha1InferenceGraphSpec. # noqa: E501
94 :type: V1Affinity
95 """
96
97 self._affinity = affinity
98
99 @property
100 def nodes(self):
101 """Gets the nodes of this V1alpha1InferenceGraphSpec. # noqa: E501
102
103 Map of InferenceGraph router nodes Each node defines the router which can be different routing types # noqa: E501
104
105 :return: The nodes of this V1alpha1InferenceGraphSpec. # noqa: E501
106 :rtype: dict(str, V1alpha1InferenceRouter)
107 """
108 return self._nodes
109
110 @nodes.setter
111 def nodes(self, nodes):
112 """Sets the nodes of this V1alpha1InferenceGraphSpec.
113
114 Map of InferenceGraph router nodes Each node defines the router which can be different routing types # noqa: E501
115
116 :param nodes: The nodes of this V1alpha1InferenceGraphSpec. # noqa: E501
117 :type: dict(str, V1alpha1InferenceRouter)
118 """
119 if self.local_vars_configuration.client_side_validation and nodes is None: # noqa: E501
120 raise ValueError("Invalid value for `nodes`, must not be `None`") # noqa: E501
121
122 self._nodes = nodes
123
124 @property
125 def resources(self):
126 """Gets the resources of this V1alpha1InferenceGraphSpec. # noqa: E501
127
128
129 :return: The resources of this V1alpha1InferenceGraphSpec. # noqa: E501
130 :rtype: V1ResourceRequirements
131 """
132 return self._resources
133
134 @resources.setter
135 def resources(self, resources):
136 """Sets the resources of this V1alpha1InferenceGraphSpec.
137
138
139 :param resources: The resources of this V1alpha1InferenceGraphSpec. # noqa: E501
140 :type: V1ResourceRequirements
141 """
142
143 self._resources = resources
144
145 def to_dict(self):
146 """Returns the model properties as a dict"""
147 result = {}
148
149 for attr, _ in six.iteritems(self.openapi_types):
150 value = getattr(self, attr)
151 if isinstance(value, list):
152 result[attr] = list(map(
153 lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
154 value
155 ))
156 elif hasattr(value, "to_dict"):
157 result[attr] = value.to_dict()
158 elif isinstance(value, dict):
159 result[attr] = dict(map(
160 lambda item: (item[0], item[1].to_dict())
161 if hasattr(item[1], "to_dict") else item,
162 value.items()
163 ))
164 else:
165 result[attr] = value
166
167 return result
168
169 def to_str(self):
170 """Returns the string representation of the model"""
171 return pprint.pformat(self.to_dict())
172
173 def __repr__(self):
174 """For `print` and `pprint`"""
175 return self.to_str()
176
177 def __eq__(self, other):
178 """Returns true if both objects are equal"""
179 if not isinstance(other, V1alpha1InferenceGraphSpec):
180 return False
181
182 return self.to_dict() == other.to_dict()
183
184 def __ne__(self, other):
185 """Returns true if both objects are not equal"""
186 if not isinstance(other, V1alpha1InferenceGraphSpec):
187 return True
188
189 return self.to_dict() != other.to_dict()
190
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/python/kserve/kserve/models/v1alpha1_inference_graph_spec.py b/python/kserve/kserve/models/v1alpha1_inference_graph_spec.py
--- a/python/kserve/kserve/models/v1alpha1_inference_graph_spec.py
+++ b/python/kserve/kserve/models/v1alpha1_inference_graph_spec.py
@@ -49,16 +49,18 @@
openapi_types = {
'affinity': 'V1Affinity',
'nodes': 'dict(str, V1alpha1InferenceRouter)',
- 'resources': 'V1ResourceRequirements'
+ 'resources': 'V1ResourceRequirements',
+ 'timeout': 'int'
}
attribute_map = {
'affinity': 'affinity',
'nodes': 'nodes',
- 'resources': 'resources'
+ 'resources': 'resources',
+ 'timeout': 'timeout'
}
- def __init__(self, affinity=None, nodes=None, resources=None, local_vars_configuration=None): # noqa: E501
+ def __init__(self, affinity=None, nodes=None, resources=None, timeout=None, local_vars_configuration=None): # noqa: E501
"""V1alpha1InferenceGraphSpec - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
@@ -67,6 +69,7 @@
self._affinity = None
self._nodes = None
self._resources = None
+ self._timeout = None
self.discriminator = None
if affinity is not None:
@@ -74,6 +77,8 @@
self.nodes = nodes
if resources is not None:
self.resources = resources
+ if timeout is not None:
+ self.timeout = timeout
@property
def affinity(self):
@@ -142,6 +147,29 @@
self._resources = resources
+ @property
+ def timeout(self):
+ """Gets the timeout of this V1alpha1InferenceGraphSpec. # noqa: E501
+
+ TimeoutSeconds specifies the number of seconds to wait before timing out a request to the component. # noqa: E501
+
+ :return: The timeout of this V1alpha1InferenceGraphSpec. # noqa: E501
+ :rtype: int
+ """
+ return self._timeout
+
+ @timeout.setter
+ def timeout(self, timeout):
+ """Sets the timeout of this V1alpha1InferenceGraphSpec.
+
+ TimeoutSeconds specifies the number of seconds to wait before timing out a request to the component. # noqa: E501
+
+ :param timeout: The timeout of this V1alpha1InferenceGraphSpec. # noqa: E501
+ :type: int
+ """
+
+ self._timeout = timeout
+
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
|
{"golden_diff": "diff --git a/python/kserve/kserve/models/v1alpha1_inference_graph_spec.py b/python/kserve/kserve/models/v1alpha1_inference_graph_spec.py\n--- a/python/kserve/kserve/models/v1alpha1_inference_graph_spec.py\n+++ b/python/kserve/kserve/models/v1alpha1_inference_graph_spec.py\n@@ -49,16 +49,18 @@\n openapi_types = {\n 'affinity': 'V1Affinity',\n 'nodes': 'dict(str, V1alpha1InferenceRouter)',\n- 'resources': 'V1ResourceRequirements'\n+ 'resources': 'V1ResourceRequirements',\n+ 'timeout': 'int'\n }\n \n attribute_map = {\n 'affinity': 'affinity',\n 'nodes': 'nodes',\n- 'resources': 'resources'\n+ 'resources': 'resources',\n+ 'timeout': 'timeout'\n }\n \n- def __init__(self, affinity=None, nodes=None, resources=None, local_vars_configuration=None): # noqa: E501\n+ def __init__(self, affinity=None, nodes=None, resources=None, timeout=None, local_vars_configuration=None): # noqa: E501\n \"\"\"V1alpha1InferenceGraphSpec - a model defined in OpenAPI\"\"\" # noqa: E501\n if local_vars_configuration is None:\n local_vars_configuration = Configuration()\n@@ -67,6 +69,7 @@\n self._affinity = None\n self._nodes = None\n self._resources = None\n+ self._timeout = None\n self.discriminator = None\n \n if affinity is not None:\n@@ -74,6 +77,8 @@\n self.nodes = nodes\n if resources is not None:\n self.resources = resources\n+ if timeout is not None:\n+ self.timeout = timeout\n \n @property\n def affinity(self):\n@@ -142,6 +147,29 @@\n \n self._resources = resources\n \n+ @property\n+ def timeout(self):\n+ \"\"\"Gets the timeout of this V1alpha1InferenceGraphSpec. # noqa: E501\n+\n+ TimeoutSeconds specifies the number of seconds to wait before timing out a request to the component. # noqa: E501\n+\n+ :return: The timeout of this V1alpha1InferenceGraphSpec. # noqa: E501\n+ :rtype: int\n+ \"\"\"\n+ return self._timeout\n+\n+ @timeout.setter\n+ def timeout(self, timeout):\n+ \"\"\"Sets the timeout of this V1alpha1InferenceGraphSpec.\n+\n+ TimeoutSeconds specifies the number of seconds to wait before timing out a request to the component. # noqa: E501\n+\n+ :param timeout: The timeout of this V1alpha1InferenceGraphSpec. # noqa: E501\n+ :type: int\n+ \"\"\"\n+\n+ self._timeout = timeout\n+\n def to_dict(self):\n \"\"\"Returns the model properties as a dict\"\"\"\n result = {}\n", "issue": "Configurable timeouts for InferenceGraph\n/kind feature\r\n\r\n**Describe the solution you'd like**\r\n\r\nInferenceServices in a Serverless deployment of KServe allow you to configure the Knative `timeoutSeconds` field by way of the `timeout` field in ComponentExtensionSpec. We'd like the same feature for an InferenceGraph.\r\n\r\n**Anything else you would like to add:**\r\n\r\nIf we cannot update the InfereceGraph spec at this time, I would request the InferenceGraph controller at the very least be able to check the existing InferenceServices for Knative-backed components and match the highest `timeoutSeconds`.\r\n\r\nThanks for your consideration!\n", "before_files": [{"content": "# Copyright 2023 The KServe Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# coding: utf-8\n\n\"\"\"\n KServe\n\n Python SDK for KServe # noqa: E501\n\n The version of the OpenAPI document: v0.1\n Generated by: https://openapi-generator.tech\n\"\"\"\n\n\nimport pprint\nimport re # noqa: F401\n\nimport six\n\nfrom kserve.configuration import Configuration\n\n\nclass V1alpha1InferenceGraphSpec(object):\n \"\"\"NOTE: This class is auto generated by OpenAPI Generator.\n Ref: https://openapi-generator.tech\n\n Do not edit the class manually.\n \"\"\"\n\n \"\"\"\n Attributes:\n openapi_types (dict): The key is attribute name\n and the value is attribute type.\n attribute_map (dict): The key is attribute name\n and the value is json key in definition.\n \"\"\"\n openapi_types = {\n 'affinity': 'V1Affinity',\n 'nodes': 'dict(str, V1alpha1InferenceRouter)',\n 'resources': 'V1ResourceRequirements'\n }\n\n attribute_map = {\n 'affinity': 'affinity',\n 'nodes': 'nodes',\n 'resources': 'resources'\n }\n\n def __init__(self, affinity=None, nodes=None, resources=None, local_vars_configuration=None): # noqa: E501\n \"\"\"V1alpha1InferenceGraphSpec - a model defined in OpenAPI\"\"\" # noqa: E501\n if local_vars_configuration is None:\n local_vars_configuration = Configuration()\n self.local_vars_configuration = local_vars_configuration\n\n self._affinity = None\n self._nodes = None\n self._resources = None\n self.discriminator = None\n\n if affinity is not None:\n self.affinity = affinity\n self.nodes = nodes\n if resources is not None:\n self.resources = resources\n\n @property\n def affinity(self):\n \"\"\"Gets the affinity of this V1alpha1InferenceGraphSpec. # noqa: E501\n\n\n :return: The affinity of this V1alpha1InferenceGraphSpec. # noqa: E501\n :rtype: V1Affinity\n \"\"\"\n return self._affinity\n\n @affinity.setter\n def affinity(self, affinity):\n \"\"\"Sets the affinity of this V1alpha1InferenceGraphSpec.\n\n\n :param affinity: The affinity of this V1alpha1InferenceGraphSpec. # noqa: E501\n :type: V1Affinity\n \"\"\"\n\n self._affinity = affinity\n\n @property\n def nodes(self):\n \"\"\"Gets the nodes of this V1alpha1InferenceGraphSpec. # noqa: E501\n\n Map of InferenceGraph router nodes Each node defines the router which can be different routing types # noqa: E501\n\n :return: The nodes of this V1alpha1InferenceGraphSpec. # noqa: E501\n :rtype: dict(str, V1alpha1InferenceRouter)\n \"\"\"\n return self._nodes\n\n @nodes.setter\n def nodes(self, nodes):\n \"\"\"Sets the nodes of this V1alpha1InferenceGraphSpec.\n\n Map of InferenceGraph router nodes Each node defines the router which can be different routing types # noqa: E501\n\n :param nodes: The nodes of this V1alpha1InferenceGraphSpec. # noqa: E501\n :type: dict(str, V1alpha1InferenceRouter)\n \"\"\"\n if self.local_vars_configuration.client_side_validation and nodes is None: # noqa: E501\n raise ValueError(\"Invalid value for `nodes`, must not be `None`\") # noqa: E501\n\n self._nodes = nodes\n\n @property\n def resources(self):\n \"\"\"Gets the resources of this V1alpha1InferenceGraphSpec. # noqa: E501\n\n\n :return: The resources of this V1alpha1InferenceGraphSpec. # noqa: E501\n :rtype: V1ResourceRequirements\n \"\"\"\n return self._resources\n\n @resources.setter\n def resources(self, resources):\n \"\"\"Sets the resources of this V1alpha1InferenceGraphSpec.\n\n\n :param resources: The resources of this V1alpha1InferenceGraphSpec. # noqa: E501\n :type: V1ResourceRequirements\n \"\"\"\n\n self._resources = resources\n\n def to_dict(self):\n \"\"\"Returns the model properties as a dict\"\"\"\n result = {}\n\n for attr, _ in six.iteritems(self.openapi_types):\n value = getattr(self, attr)\n if isinstance(value, list):\n result[attr] = list(map(\n lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x,\n value\n ))\n elif hasattr(value, \"to_dict\"):\n result[attr] = value.to_dict()\n elif isinstance(value, dict):\n result[attr] = dict(map(\n lambda item: (item[0], item[1].to_dict())\n if hasattr(item[1], \"to_dict\") else item,\n value.items()\n ))\n else:\n result[attr] = value\n\n return result\n\n def to_str(self):\n \"\"\"Returns the string representation of the model\"\"\"\n return pprint.pformat(self.to_dict())\n\n def __repr__(self):\n \"\"\"For `print` and `pprint`\"\"\"\n return self.to_str()\n\n def __eq__(self, other):\n \"\"\"Returns true if both objects are equal\"\"\"\n if not isinstance(other, V1alpha1InferenceGraphSpec):\n return False\n\n return self.to_dict() == other.to_dict()\n\n def __ne__(self, other):\n \"\"\"Returns true if both objects are not equal\"\"\"\n if not isinstance(other, V1alpha1InferenceGraphSpec):\n return True\n\n return self.to_dict() != other.to_dict()\n", "path": "python/kserve/kserve/models/v1alpha1_inference_graph_spec.py"}], "after_files": [{"content": "# Copyright 2023 The KServe Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# coding: utf-8\n\n\"\"\"\n KServe\n\n Python SDK for KServe # noqa: E501\n\n The version of the OpenAPI document: v0.1\n Generated by: https://openapi-generator.tech\n\"\"\"\n\n\nimport pprint\nimport re # noqa: F401\n\nimport six\n\nfrom kserve.configuration import Configuration\n\n\nclass V1alpha1InferenceGraphSpec(object):\n \"\"\"NOTE: This class is auto generated by OpenAPI Generator.\n Ref: https://openapi-generator.tech\n\n Do not edit the class manually.\n \"\"\"\n\n \"\"\"\n Attributes:\n openapi_types (dict): The key is attribute name\n and the value is attribute type.\n attribute_map (dict): The key is attribute name\n and the value is json key in definition.\n \"\"\"\n openapi_types = {\n 'affinity': 'V1Affinity',\n 'nodes': 'dict(str, V1alpha1InferenceRouter)',\n 'resources': 'V1ResourceRequirements',\n 'timeout': 'int'\n }\n\n attribute_map = {\n 'affinity': 'affinity',\n 'nodes': 'nodes',\n 'resources': 'resources',\n 'timeout': 'timeout'\n }\n\n def __init__(self, affinity=None, nodes=None, resources=None, timeout=None, local_vars_configuration=None): # noqa: E501\n \"\"\"V1alpha1InferenceGraphSpec - a model defined in OpenAPI\"\"\" # noqa: E501\n if local_vars_configuration is None:\n local_vars_configuration = Configuration()\n self.local_vars_configuration = local_vars_configuration\n\n self._affinity = None\n self._nodes = None\n self._resources = None\n self._timeout = None\n self.discriminator = None\n\n if affinity is not None:\n self.affinity = affinity\n self.nodes = nodes\n if resources is not None:\n self.resources = resources\n if timeout is not None:\n self.timeout = timeout\n\n @property\n def affinity(self):\n \"\"\"Gets the affinity of this V1alpha1InferenceGraphSpec. # noqa: E501\n\n\n :return: The affinity of this V1alpha1InferenceGraphSpec. # noqa: E501\n :rtype: V1Affinity\n \"\"\"\n return self._affinity\n\n @affinity.setter\n def affinity(self, affinity):\n \"\"\"Sets the affinity of this V1alpha1InferenceGraphSpec.\n\n\n :param affinity: The affinity of this V1alpha1InferenceGraphSpec. # noqa: E501\n :type: V1Affinity\n \"\"\"\n\n self._affinity = affinity\n\n @property\n def nodes(self):\n \"\"\"Gets the nodes of this V1alpha1InferenceGraphSpec. # noqa: E501\n\n Map of InferenceGraph router nodes Each node defines the router which can be different routing types # noqa: E501\n\n :return: The nodes of this V1alpha1InferenceGraphSpec. # noqa: E501\n :rtype: dict(str, V1alpha1InferenceRouter)\n \"\"\"\n return self._nodes\n\n @nodes.setter\n def nodes(self, nodes):\n \"\"\"Sets the nodes of this V1alpha1InferenceGraphSpec.\n\n Map of InferenceGraph router nodes Each node defines the router which can be different routing types # noqa: E501\n\n :param nodes: The nodes of this V1alpha1InferenceGraphSpec. # noqa: E501\n :type: dict(str, V1alpha1InferenceRouter)\n \"\"\"\n if self.local_vars_configuration.client_side_validation and nodes is None: # noqa: E501\n raise ValueError(\"Invalid value for `nodes`, must not be `None`\") # noqa: E501\n\n self._nodes = nodes\n\n @property\n def resources(self):\n \"\"\"Gets the resources of this V1alpha1InferenceGraphSpec. # noqa: E501\n\n\n :return: The resources of this V1alpha1InferenceGraphSpec. # noqa: E501\n :rtype: V1ResourceRequirements\n \"\"\"\n return self._resources\n\n @resources.setter\n def resources(self, resources):\n \"\"\"Sets the resources of this V1alpha1InferenceGraphSpec.\n\n\n :param resources: The resources of this V1alpha1InferenceGraphSpec. # noqa: E501\n :type: V1ResourceRequirements\n \"\"\"\n\n self._resources = resources\n\n @property\n def timeout(self):\n \"\"\"Gets the timeout of this V1alpha1InferenceGraphSpec. # noqa: E501\n\n TimeoutSeconds specifies the number of seconds to wait before timing out a request to the component. # noqa: E501\n\n :return: The timeout of this V1alpha1InferenceGraphSpec. # noqa: E501\n :rtype: int\n \"\"\"\n return self._timeout\n\n @timeout.setter\n def timeout(self, timeout):\n \"\"\"Sets the timeout of this V1alpha1InferenceGraphSpec.\n\n TimeoutSeconds specifies the number of seconds to wait before timing out a request to the component. # noqa: E501\n\n :param timeout: The timeout of this V1alpha1InferenceGraphSpec. # noqa: E501\n :type: int\n \"\"\"\n\n self._timeout = timeout\n\n def to_dict(self):\n \"\"\"Returns the model properties as a dict\"\"\"\n result = {}\n\n for attr, _ in six.iteritems(self.openapi_types):\n value = getattr(self, attr)\n if isinstance(value, list):\n result[attr] = list(map(\n lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x,\n value\n ))\n elif hasattr(value, \"to_dict\"):\n result[attr] = value.to_dict()\n elif isinstance(value, dict):\n result[attr] = dict(map(\n lambda item: (item[0], item[1].to_dict())\n if hasattr(item[1], \"to_dict\") else item,\n value.items()\n ))\n else:\n result[attr] = value\n\n return result\n\n def to_str(self):\n \"\"\"Returns the string representation of the model\"\"\"\n return pprint.pformat(self.to_dict())\n\n def __repr__(self):\n \"\"\"For `print` and `pprint`\"\"\"\n return self.to_str()\n\n def __eq__(self, other):\n \"\"\"Returns true if both objects are equal\"\"\"\n if not isinstance(other, V1alpha1InferenceGraphSpec):\n return False\n\n return self.to_dict() == other.to_dict()\n\n def __ne__(self, other):\n \"\"\"Returns true if both objects are not equal\"\"\"\n if not isinstance(other, V1alpha1InferenceGraphSpec):\n return True\n\n return self.to_dict() != other.to_dict()\n", "path": "python/kserve/kserve/models/v1alpha1_inference_graph_spec.py"}]}
| 2,316 | 685 |
gh_patches_debug_23283
|
rasdani/github-patches
|
git_diff
|
conda__conda-build-1685
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Encoding issues on Windows build in the fix_staged_scripts step
Hello.
I'm trying to build the [waf package on windows](https://github.com/conda-forge/waf-feedstock/pull/4), but I'm having some trouble with ```conda_build``` in the ```fix_staged_scripts``` step. I couldn't find any similar issue. I'll follow with some details:
While building, there is a file named ```waf``` that is meant to be moved to the ```%SCRIPTS%``` folder. The problem seems to be that ```conda-build``` tries to read the file to do some checking, but fails to do so because of the file encoding (https://github.com/conda/conda-build/blob/master/conda_build/windows.py#L45)
I believe the waf file should be opened with encoding='iso-8859-1'. Making a local change replacing with the code bellow builds the package successfully, but is not a solution:
```
with open(join(scripts_dir, fn), encoding='iso-8859-1') as f:
```
Any suggestions here? I'm unsure how this case should be handled.
I'll leave some suggestions that occurred me:
1- Fix this at package level, by avoiding running into the ```fix_staged_scripts``` function, somehow. (I don't like this solution because it'll not really fix anything besides this specific package)
2- Do some try...catch with several encodings near the presented code above
3- Have a metadata somewhere (maybe in the recipe?) to handle the file encoding. Fallback to the current case if none.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `conda_build/windows.py`
Content:
```
1 from __future__ import absolute_import, division, print_function
2
3 import os
4 import sys
5 from os.path import isdir, join, dirname, isfile
6
7 import bs4
8 # importing setuptools patches distutils so that it knows how to find VC for python 2.7
9 import setuptools # noqa
10 # Leverage the hard work done by setuptools/distutils to find vcvarsall using
11 # either the registry or the VS**COMNTOOLS environment variable
12 from distutils.msvc9compiler import find_vcvarsall as distutils_find_vcvarsall
13 from distutils.msvc9compiler import Reg, WINSDK_BASE
14
15 from .conda_interface import bits
16
17 from conda_build import environ
18 from conda_build.utils import check_call_env, root_script_dir, path_prepended, copy_into
19
20
21 assert sys.platform == 'win32'
22
23
24 VS_VERSION_STRING = {
25 '8.0': 'Visual Studio 8 2005',
26 '9.0': 'Visual Studio 9 2008',
27 '10.0': 'Visual Studio 10 2010',
28 '11.0': 'Visual Studio 11 2012',
29 '12.0': 'Visual Studio 12 2013',
30 '14.0': 'Visual Studio 14 2015'
31 }
32
33
34 def fix_staged_scripts(scripts_dir):
35 """
36 Fixes scripts which have been installed unix-style to have a .bat
37 helper
38 """
39 if not isdir(scripts_dir):
40 return
41 for fn in os.listdir(scripts_dir):
42 # process all the extensionless files
43 if not isfile(join(scripts_dir, fn)) or '.' in fn:
44 continue
45
46 with open(join(scripts_dir, fn)) as f:
47 line = bs4.UnicodeDammit(f.readline()).unicode_markup.lower()
48 # If it's a #!python script
49 if not (line.startswith('#!') and 'python' in line.lower()):
50 continue
51 print('Adjusting unix-style #! script %s, '
52 'and adding a .bat file for it' % fn)
53 # copy it with a .py extension (skipping that first #! line)
54 with open(join(scripts_dir, fn + '-script.py'), 'w') as fo:
55 fo.write(f.read())
56 # now create the .exe file
57 copy_into(join(dirname(__file__), 'cli-%d.exe' % bits),
58 join(scripts_dir, fn + '.exe'))
59
60 # remove the original script
61 os.remove(join(scripts_dir, fn))
62
63
64 def build_vcvarsall_vs_path(version):
65 """
66 Given the Visual Studio version, returns the default path to the
67 Microsoft Visual Studio vcvarsall.bat file.
68 Expected versions are of the form {9.0, 10.0, 12.0, 14.0}
69 """
70 # Set up a load of paths that can be imported from the tests
71 if 'ProgramFiles(x86)' in os.environ:
72 PROGRAM_FILES_PATH = os.environ['ProgramFiles(x86)']
73 else:
74 PROGRAM_FILES_PATH = os.environ['ProgramFiles']
75
76 flatversion = str(version).replace('.', '')
77 vstools = "VS{0}COMNTOOLS".format(flatversion)
78
79 if vstools in os.environ:
80 return os.path.join(os.environ[vstools], '..\\..\\VC\\vcvarsall.bat')
81 else:
82 # prefer looking at env var; fall back to program files defaults
83 return os.path.join(PROGRAM_FILES_PATH,
84 'Microsoft Visual Studio {}'.format(version), 'VC',
85 'vcvarsall.bat')
86
87
88 def msvc_env_cmd(bits, config, override=None):
89 arch_selector = 'x86' if bits == 32 else 'amd64'
90
91 msvc_env_lines = []
92
93 version = None
94 if override is not None:
95 version = override
96
97 # The DISTUTILS_USE_SDK variable tells distutils to not try and validate
98 # the MSVC compiler. For < 3.5 this still forcibly looks for 'cl.exe'.
99 # For > 3.5 it literally just skips the validation logic.
100 # See distutils _msvccompiler.py and msvc9compiler.py / msvccompiler.py
101 # for more information.
102 msvc_env_lines.append('set DISTUTILS_USE_SDK=1')
103 # This is also required to hit the 'don't validate' logic on < 3.5.
104 # For > 3.5 this is ignored.
105 msvc_env_lines.append('set MSSdk=1')
106
107 if not version:
108 if config.PY3K and config.use_MSVC2015:
109 version = '14.0'
110 elif config.PY3K:
111 version = '10.0'
112 else:
113 version = '9.0'
114
115 if float(version) >= 14.0:
116 # For Python 3.5+, ensure that we link with the dynamic runtime. See
117 # http://stevedower.id.au/blog/building-for-python-3-5-part-two/ for more info
118 msvc_env_lines.append('set PY_VCRUNTIME_REDIST=%LIBRARY_BIN%\\vcruntime{0}.dll'.format(
119 version.replace('.', '')))
120
121 vcvarsall_vs_path = build_vcvarsall_vs_path(version)
122
123 def build_vcvarsall_cmd(cmd, arch=arch_selector):
124 # Default argument `arch_selector` is defined above
125 return 'call "{cmd}" {arch}'.format(cmd=cmd, arch=arch)
126
127 msvc_env_lines.append('set "VS_VERSION={}"'.format(version))
128 msvc_env_lines.append('set "VS_MAJOR={}"'.format(version.split('.')[0]))
129 msvc_env_lines.append('set "VS_YEAR={}"'.format(VS_VERSION_STRING[version][-4:]))
130 msvc_env_lines.append('set "CMAKE_GENERATOR={}"'.format(VS_VERSION_STRING[version] +
131 {64: ' Win64', 32: ''}[bits]))
132 # tell msys2 to ignore path conversions for issue-causing windows-style flags in build
133 # See https://github.com/conda-forge/icu-feedstock/pull/5
134 msvc_env_lines.append('set "MSYS2_ARG_CONV_EXCL=/AI;/AL;/OUT;/out"')
135 msvc_env_lines.append('set "MSYS2_ENV_CONV_EXCL=CL"')
136 if version == '10.0':
137 try:
138 WIN_SDK_71_PATH = Reg.get_value(os.path.join(WINSDK_BASE, 'v7.1'),
139 'installationfolder')
140 WIN_SDK_71_BAT_PATH = os.path.join(WIN_SDK_71_PATH, 'Bin', 'SetEnv.cmd')
141
142 win_sdk_arch = '/Release /x86' if bits == 32 else '/Release /x64'
143 win_sdk_cmd = build_vcvarsall_cmd(WIN_SDK_71_BAT_PATH, arch=win_sdk_arch)
144
145 # There are two methods of building Python 3.3 and 3.4 extensions (both
146 # of which required Visual Studio 2010 - as explained in the Python wiki
147 # https://wiki.python.org/moin/WindowsCompilers)
148 # 1) Use the Windows SDK 7.1
149 # 2) Use Visual Studio 2010 (any edition)
150 # However, VS2010 never shipped with a 64-bit compiler, so in this case
151 # **only** option (1) applies. For this reason, we always try and
152 # activate the Windows SDK first. Unfortunately, unsuccessfully setting
153 # up the environment does **not EXIT 1** and therefore we must fall
154 # back to attempting to set up VS2010.
155 # DelayedExpansion is required for the SetEnv.cmd
156 msvc_env_lines.append('Setlocal EnableDelayedExpansion')
157 msvc_env_lines.append(win_sdk_cmd)
158 # If the WindowsSDKDir environment variable has not been successfully
159 # set then try activating VS2010
160 msvc_env_lines.append('if not "%WindowsSDKDir%" == "{}" ( {} )'.format(
161 WIN_SDK_71_PATH, build_vcvarsall_cmd(vcvarsall_vs_path)))
162 # sdk is not installed. Fall back to only trying VS 2010
163 except KeyError:
164 msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path))
165 elif version == '9.0':
166 # Get the Visual Studio 2008 path (not the Visual C++ for Python path)
167 # and get the 'vcvars64.bat' from inside the bin (in the directory above
168 # that returned by distutils_find_vcvarsall)
169 try:
170 VCVARS64_VS9_BAT_PATH = os.path.join(os.path.dirname(distutils_find_vcvarsall(9)),
171 'bin', 'vcvars64.bat')
172 # there's an exception if VS or the VC compiler for python are not actually installed.
173 except (KeyError, TypeError):
174 VCVARS64_VS9_BAT_PATH = None
175
176 error1 = 'if errorlevel 1 {}'
177
178 # Prefer VS9 proper over Microsoft Visual C++ Compiler for Python 2.7
179 msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path))
180 # The Visual Studio 2008 Express edition does not properly contain
181 # the amd64 build files, so we call the vcvars64.bat manually,
182 # rather than using the vcvarsall.bat which would try and call the
183 # missing bat file.
184 if arch_selector == 'amd64' and VCVARS64_VS9_BAT_PATH:
185 msvc_env_lines.append(error1.format(
186 build_vcvarsall_cmd(VCVARS64_VS9_BAT_PATH)))
187 # Otherwise, fall back to icrosoft Visual C++ Compiler for Python 2.7+
188 # by using the logic provided by setuptools
189 msvc_env_lines.append(error1.format(
190 build_vcvarsall_cmd(distutils_find_vcvarsall(9))))
191 else:
192 # Visual Studio 14 or otherwise
193 msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path))
194
195 return '\n'.join(msvc_env_lines) + '\n'
196
197
198 def build(m, bld_bat, config):
199 with path_prepended(config.build_prefix):
200 env = environ.get_dict(config=config, m=m)
201 env["CONDA_BUILD_STATE"] = "BUILD"
202
203 for name in 'BIN', 'INC', 'LIB':
204 path = env['LIBRARY_' + name]
205 if not isdir(path):
206 os.makedirs(path)
207
208 src_dir = config.work_dir
209 if os.path.isfile(bld_bat):
210 with open(bld_bat) as fi:
211 data = fi.read()
212 with open(join(src_dir, 'bld.bat'), 'w') as fo:
213 # more debuggable with echo on
214 fo.write('@echo on\n')
215 for key, value in env.items():
216 fo.write('set "{key}={value}"\n'.format(key=key, value=value))
217 fo.write(msvc_env_cmd(bits=bits, config=config,
218 override=m.get_value('build/msvc_compiler', None)))
219 # Reset echo on, because MSVC scripts might have turned it off
220 fo.write('@echo on\n')
221 fo.write('set "INCLUDE={};%INCLUDE%"\n'.format(env["LIBRARY_INC"]))
222 fo.write('set "LIB={};%LIB%"\n'.format(env["LIBRARY_LIB"]))
223 if config.activate:
224 fo.write('call "{conda_root}\\activate.bat" "{prefix}"\n'.format(
225 conda_root=root_script_dir,
226 prefix=config.build_prefix))
227 fo.write("REM ===== end generated header =====\n")
228 fo.write(data)
229
230 cmd = ['cmd.exe', '/c', 'bld.bat']
231 check_call_env(cmd, cwd=src_dir)
232
233 fix_staged_scripts(join(config.build_prefix, 'Scripts'))
234
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/conda_build/windows.py b/conda_build/windows.py
--- a/conda_build/windows.py
+++ b/conda_build/windows.py
@@ -43,15 +43,16 @@
if not isfile(join(scripts_dir, fn)) or '.' in fn:
continue
- with open(join(scripts_dir, fn)) as f:
- line = bs4.UnicodeDammit(f.readline()).unicode_markup.lower()
+ # read as binary file to ensure we don't run into encoding errors, see #1632
+ with open(join(scripts_dir, fn), 'rb') as f:
+ line = f.readline()
# If it's a #!python script
- if not (line.startswith('#!') and 'python' in line.lower()):
+ if not (line.startswith(b'#!') and b'python' in line.lower()):
continue
print('Adjusting unix-style #! script %s, '
'and adding a .bat file for it' % fn)
# copy it with a .py extension (skipping that first #! line)
- with open(join(scripts_dir, fn + '-script.py'), 'w') as fo:
+ with open(join(scripts_dir, fn + '-script.py'), 'wb') as fo:
fo.write(f.read())
# now create the .exe file
copy_into(join(dirname(__file__), 'cli-%d.exe' % bits),
|
{"golden_diff": "diff --git a/conda_build/windows.py b/conda_build/windows.py\n--- a/conda_build/windows.py\n+++ b/conda_build/windows.py\n@@ -43,15 +43,16 @@\n if not isfile(join(scripts_dir, fn)) or '.' in fn:\n continue\n \n- with open(join(scripts_dir, fn)) as f:\n- line = bs4.UnicodeDammit(f.readline()).unicode_markup.lower()\n+ # read as binary file to ensure we don't run into encoding errors, see #1632\n+ with open(join(scripts_dir, fn), 'rb') as f:\n+ line = f.readline()\n # If it's a #!python script\n- if not (line.startswith('#!') and 'python' in line.lower()):\n+ if not (line.startswith(b'#!') and b'python' in line.lower()):\n continue\n print('Adjusting unix-style #! script %s, '\n 'and adding a .bat file for it' % fn)\n # copy it with a .py extension (skipping that first #! line)\n- with open(join(scripts_dir, fn + '-script.py'), 'w') as fo:\n+ with open(join(scripts_dir, fn + '-script.py'), 'wb') as fo:\n fo.write(f.read())\n # now create the .exe file\n copy_into(join(dirname(__file__), 'cli-%d.exe' % bits),\n", "issue": "Encoding issues on Windows build in the fix_staged_scripts step\nHello.\r\n\r\nI'm trying to build the [waf package on windows](https://github.com/conda-forge/waf-feedstock/pull/4), but I'm having some trouble with ```conda_build``` in the ```fix_staged_scripts``` step. I couldn't find any similar issue. I'll follow with some details:\r\n\r\nWhile building, there is a file named ```waf``` that is meant to be moved to the ```%SCRIPTS%``` folder. The problem seems to be that ```conda-build``` tries to read the file to do some checking, but fails to do so because of the file encoding (https://github.com/conda/conda-build/blob/master/conda_build/windows.py#L45)\r\n\r\nI believe the waf file should be opened with encoding='iso-8859-1'. Making a local change replacing with the code bellow builds the package successfully, but is not a solution:\r\n\r\n```\r\nwith open(join(scripts_dir, fn), encoding='iso-8859-1') as f:\r\n```\r\n\r\nAny suggestions here? I'm unsure how this case should be handled.\r\nI'll leave some suggestions that occurred me:\r\n\r\n1- Fix this at package level, by avoiding running into the ```fix_staged_scripts``` function, somehow. (I don't like this solution because it'll not really fix anything besides this specific package)\r\n\r\n2- Do some try...catch with several encodings near the presented code above\r\n\r\n3- Have a metadata somewhere (maybe in the recipe?) to handle the file encoding. Fallback to the current case if none.\n", "before_files": [{"content": "from __future__ import absolute_import, division, print_function\n\nimport os\nimport sys\nfrom os.path import isdir, join, dirname, isfile\n\nimport bs4\n# importing setuptools patches distutils so that it knows how to find VC for python 2.7\nimport setuptools # noqa\n# Leverage the hard work done by setuptools/distutils to find vcvarsall using\n# either the registry or the VS**COMNTOOLS environment variable\nfrom distutils.msvc9compiler import find_vcvarsall as distutils_find_vcvarsall\nfrom distutils.msvc9compiler import Reg, WINSDK_BASE\n\nfrom .conda_interface import bits\n\nfrom conda_build import environ\nfrom conda_build.utils import check_call_env, root_script_dir, path_prepended, copy_into\n\n\nassert sys.platform == 'win32'\n\n\nVS_VERSION_STRING = {\n '8.0': 'Visual Studio 8 2005',\n '9.0': 'Visual Studio 9 2008',\n '10.0': 'Visual Studio 10 2010',\n '11.0': 'Visual Studio 11 2012',\n '12.0': 'Visual Studio 12 2013',\n '14.0': 'Visual Studio 14 2015'\n}\n\n\ndef fix_staged_scripts(scripts_dir):\n \"\"\"\n Fixes scripts which have been installed unix-style to have a .bat\n helper\n \"\"\"\n if not isdir(scripts_dir):\n return\n for fn in os.listdir(scripts_dir):\n # process all the extensionless files\n if not isfile(join(scripts_dir, fn)) or '.' in fn:\n continue\n\n with open(join(scripts_dir, fn)) as f:\n line = bs4.UnicodeDammit(f.readline()).unicode_markup.lower()\n # If it's a #!python script\n if not (line.startswith('#!') and 'python' in line.lower()):\n continue\n print('Adjusting unix-style #! script %s, '\n 'and adding a .bat file for it' % fn)\n # copy it with a .py extension (skipping that first #! line)\n with open(join(scripts_dir, fn + '-script.py'), 'w') as fo:\n fo.write(f.read())\n # now create the .exe file\n copy_into(join(dirname(__file__), 'cli-%d.exe' % bits),\n join(scripts_dir, fn + '.exe'))\n\n # remove the original script\n os.remove(join(scripts_dir, fn))\n\n\ndef build_vcvarsall_vs_path(version):\n \"\"\"\n Given the Visual Studio version, returns the default path to the\n Microsoft Visual Studio vcvarsall.bat file.\n Expected versions are of the form {9.0, 10.0, 12.0, 14.0}\n \"\"\"\n # Set up a load of paths that can be imported from the tests\n if 'ProgramFiles(x86)' in os.environ:\n PROGRAM_FILES_PATH = os.environ['ProgramFiles(x86)']\n else:\n PROGRAM_FILES_PATH = os.environ['ProgramFiles']\n\n flatversion = str(version).replace('.', '')\n vstools = \"VS{0}COMNTOOLS\".format(flatversion)\n\n if vstools in os.environ:\n return os.path.join(os.environ[vstools], '..\\\\..\\\\VC\\\\vcvarsall.bat')\n else:\n # prefer looking at env var; fall back to program files defaults\n return os.path.join(PROGRAM_FILES_PATH,\n 'Microsoft Visual Studio {}'.format(version), 'VC',\n 'vcvarsall.bat')\n\n\ndef msvc_env_cmd(bits, config, override=None):\n arch_selector = 'x86' if bits == 32 else 'amd64'\n\n msvc_env_lines = []\n\n version = None\n if override is not None:\n version = override\n\n # The DISTUTILS_USE_SDK variable tells distutils to not try and validate\n # the MSVC compiler. For < 3.5 this still forcibly looks for 'cl.exe'.\n # For > 3.5 it literally just skips the validation logic.\n # See distutils _msvccompiler.py and msvc9compiler.py / msvccompiler.py\n # for more information.\n msvc_env_lines.append('set DISTUTILS_USE_SDK=1')\n # This is also required to hit the 'don't validate' logic on < 3.5.\n # For > 3.5 this is ignored.\n msvc_env_lines.append('set MSSdk=1')\n\n if not version:\n if config.PY3K and config.use_MSVC2015:\n version = '14.0'\n elif config.PY3K:\n version = '10.0'\n else:\n version = '9.0'\n\n if float(version) >= 14.0:\n # For Python 3.5+, ensure that we link with the dynamic runtime. See\n # http://stevedower.id.au/blog/building-for-python-3-5-part-two/ for more info\n msvc_env_lines.append('set PY_VCRUNTIME_REDIST=%LIBRARY_BIN%\\\\vcruntime{0}.dll'.format(\n version.replace('.', '')))\n\n vcvarsall_vs_path = build_vcvarsall_vs_path(version)\n\n def build_vcvarsall_cmd(cmd, arch=arch_selector):\n # Default argument `arch_selector` is defined above\n return 'call \"{cmd}\" {arch}'.format(cmd=cmd, arch=arch)\n\n msvc_env_lines.append('set \"VS_VERSION={}\"'.format(version))\n msvc_env_lines.append('set \"VS_MAJOR={}\"'.format(version.split('.')[0]))\n msvc_env_lines.append('set \"VS_YEAR={}\"'.format(VS_VERSION_STRING[version][-4:]))\n msvc_env_lines.append('set \"CMAKE_GENERATOR={}\"'.format(VS_VERSION_STRING[version] +\n {64: ' Win64', 32: ''}[bits]))\n # tell msys2 to ignore path conversions for issue-causing windows-style flags in build\n # See https://github.com/conda-forge/icu-feedstock/pull/5\n msvc_env_lines.append('set \"MSYS2_ARG_CONV_EXCL=/AI;/AL;/OUT;/out\"')\n msvc_env_lines.append('set \"MSYS2_ENV_CONV_EXCL=CL\"')\n if version == '10.0':\n try:\n WIN_SDK_71_PATH = Reg.get_value(os.path.join(WINSDK_BASE, 'v7.1'),\n 'installationfolder')\n WIN_SDK_71_BAT_PATH = os.path.join(WIN_SDK_71_PATH, 'Bin', 'SetEnv.cmd')\n\n win_sdk_arch = '/Release /x86' if bits == 32 else '/Release /x64'\n win_sdk_cmd = build_vcvarsall_cmd(WIN_SDK_71_BAT_PATH, arch=win_sdk_arch)\n\n # There are two methods of building Python 3.3 and 3.4 extensions (both\n # of which required Visual Studio 2010 - as explained in the Python wiki\n # https://wiki.python.org/moin/WindowsCompilers)\n # 1) Use the Windows SDK 7.1\n # 2) Use Visual Studio 2010 (any edition)\n # However, VS2010 never shipped with a 64-bit compiler, so in this case\n # **only** option (1) applies. For this reason, we always try and\n # activate the Windows SDK first. Unfortunately, unsuccessfully setting\n # up the environment does **not EXIT 1** and therefore we must fall\n # back to attempting to set up VS2010.\n # DelayedExpansion is required for the SetEnv.cmd\n msvc_env_lines.append('Setlocal EnableDelayedExpansion')\n msvc_env_lines.append(win_sdk_cmd)\n # If the WindowsSDKDir environment variable has not been successfully\n # set then try activating VS2010\n msvc_env_lines.append('if not \"%WindowsSDKDir%\" == \"{}\" ( {} )'.format(\n WIN_SDK_71_PATH, build_vcvarsall_cmd(vcvarsall_vs_path)))\n # sdk is not installed. Fall back to only trying VS 2010\n except KeyError:\n msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path))\n elif version == '9.0':\n # Get the Visual Studio 2008 path (not the Visual C++ for Python path)\n # and get the 'vcvars64.bat' from inside the bin (in the directory above\n # that returned by distutils_find_vcvarsall)\n try:\n VCVARS64_VS9_BAT_PATH = os.path.join(os.path.dirname(distutils_find_vcvarsall(9)),\n 'bin', 'vcvars64.bat')\n # there's an exception if VS or the VC compiler for python are not actually installed.\n except (KeyError, TypeError):\n VCVARS64_VS9_BAT_PATH = None\n\n error1 = 'if errorlevel 1 {}'\n\n # Prefer VS9 proper over Microsoft Visual C++ Compiler for Python 2.7\n msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path))\n # The Visual Studio 2008 Express edition does not properly contain\n # the amd64 build files, so we call the vcvars64.bat manually,\n # rather than using the vcvarsall.bat which would try and call the\n # missing bat file.\n if arch_selector == 'amd64' and VCVARS64_VS9_BAT_PATH:\n msvc_env_lines.append(error1.format(\n build_vcvarsall_cmd(VCVARS64_VS9_BAT_PATH)))\n # Otherwise, fall back to icrosoft Visual C++ Compiler for Python 2.7+\n # by using the logic provided by setuptools\n msvc_env_lines.append(error1.format(\n build_vcvarsall_cmd(distutils_find_vcvarsall(9))))\n else:\n # Visual Studio 14 or otherwise\n msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path))\n\n return '\\n'.join(msvc_env_lines) + '\\n'\n\n\ndef build(m, bld_bat, config):\n with path_prepended(config.build_prefix):\n env = environ.get_dict(config=config, m=m)\n env[\"CONDA_BUILD_STATE\"] = \"BUILD\"\n\n for name in 'BIN', 'INC', 'LIB':\n path = env['LIBRARY_' + name]\n if not isdir(path):\n os.makedirs(path)\n\n src_dir = config.work_dir\n if os.path.isfile(bld_bat):\n with open(bld_bat) as fi:\n data = fi.read()\n with open(join(src_dir, 'bld.bat'), 'w') as fo:\n # more debuggable with echo on\n fo.write('@echo on\\n')\n for key, value in env.items():\n fo.write('set \"{key}={value}\"\\n'.format(key=key, value=value))\n fo.write(msvc_env_cmd(bits=bits, config=config,\n override=m.get_value('build/msvc_compiler', None)))\n # Reset echo on, because MSVC scripts might have turned it off\n fo.write('@echo on\\n')\n fo.write('set \"INCLUDE={};%INCLUDE%\"\\n'.format(env[\"LIBRARY_INC\"]))\n fo.write('set \"LIB={};%LIB%\"\\n'.format(env[\"LIBRARY_LIB\"]))\n if config.activate:\n fo.write('call \"{conda_root}\\\\activate.bat\" \"{prefix}\"\\n'.format(\n conda_root=root_script_dir,\n prefix=config.build_prefix))\n fo.write(\"REM ===== end generated header =====\\n\")\n fo.write(data)\n\n cmd = ['cmd.exe', '/c', 'bld.bat']\n check_call_env(cmd, cwd=src_dir)\n\n fix_staged_scripts(join(config.build_prefix, 'Scripts'))\n", "path": "conda_build/windows.py"}], "after_files": [{"content": "from __future__ import absolute_import, division, print_function\n\nimport os\nimport sys\nfrom os.path import isdir, join, dirname, isfile\n\nimport bs4\n# importing setuptools patches distutils so that it knows how to find VC for python 2.7\nimport setuptools # noqa\n# Leverage the hard work done by setuptools/distutils to find vcvarsall using\n# either the registry or the VS**COMNTOOLS environment variable\nfrom distutils.msvc9compiler import find_vcvarsall as distutils_find_vcvarsall\nfrom distutils.msvc9compiler import Reg, WINSDK_BASE\n\nfrom .conda_interface import bits\n\nfrom conda_build import environ\nfrom conda_build.utils import check_call_env, root_script_dir, path_prepended, copy_into\n\n\nassert sys.platform == 'win32'\n\n\nVS_VERSION_STRING = {\n '8.0': 'Visual Studio 8 2005',\n '9.0': 'Visual Studio 9 2008',\n '10.0': 'Visual Studio 10 2010',\n '11.0': 'Visual Studio 11 2012',\n '12.0': 'Visual Studio 12 2013',\n '14.0': 'Visual Studio 14 2015'\n}\n\n\ndef fix_staged_scripts(scripts_dir):\n \"\"\"\n Fixes scripts which have been installed unix-style to have a .bat\n helper\n \"\"\"\n if not isdir(scripts_dir):\n return\n for fn in os.listdir(scripts_dir):\n # process all the extensionless files\n if not isfile(join(scripts_dir, fn)) or '.' in fn:\n continue\n\n # read as binary file to ensure we don't run into encoding errors, see #1632\n with open(join(scripts_dir, fn), 'rb') as f:\n line = f.readline()\n # If it's a #!python script\n if not (line.startswith(b'#!') and b'python' in line.lower()):\n continue\n print('Adjusting unix-style #! script %s, '\n 'and adding a .bat file for it' % fn)\n # copy it with a .py extension (skipping that first #! line)\n with open(join(scripts_dir, fn + '-script.py'), 'wb') as fo:\n fo.write(f.read())\n # now create the .exe file\n copy_into(join(dirname(__file__), 'cli-%d.exe' % bits),\n join(scripts_dir, fn + '.exe'))\n\n # remove the original script\n os.remove(join(scripts_dir, fn))\n\n\ndef build_vcvarsall_vs_path(version):\n \"\"\"\n Given the Visual Studio version, returns the default path to the\n Microsoft Visual Studio vcvarsall.bat file.\n Expected versions are of the form {9.0, 10.0, 12.0, 14.0}\n \"\"\"\n # Set up a load of paths that can be imported from the tests\n if 'ProgramFiles(x86)' in os.environ:\n PROGRAM_FILES_PATH = os.environ['ProgramFiles(x86)']\n else:\n PROGRAM_FILES_PATH = os.environ['ProgramFiles']\n\n flatversion = str(version).replace('.', '')\n vstools = \"VS{0}COMNTOOLS\".format(flatversion)\n\n if vstools in os.environ:\n return os.path.join(os.environ[vstools], '..\\\\..\\\\VC\\\\vcvarsall.bat')\n else:\n # prefer looking at env var; fall back to program files defaults\n return os.path.join(PROGRAM_FILES_PATH,\n 'Microsoft Visual Studio {}'.format(version), 'VC',\n 'vcvarsall.bat')\n\n\ndef msvc_env_cmd(bits, config, override=None):\n arch_selector = 'x86' if bits == 32 else 'amd64'\n\n msvc_env_lines = []\n\n version = None\n if override is not None:\n version = override\n\n # The DISTUTILS_USE_SDK variable tells distutils to not try and validate\n # the MSVC compiler. For < 3.5 this still forcibly looks for 'cl.exe'.\n # For > 3.5 it literally just skips the validation logic.\n # See distutils _msvccompiler.py and msvc9compiler.py / msvccompiler.py\n # for more information.\n msvc_env_lines.append('set DISTUTILS_USE_SDK=1')\n # This is also required to hit the 'don't validate' logic on < 3.5.\n # For > 3.5 this is ignored.\n msvc_env_lines.append('set MSSdk=1')\n\n if not version:\n if config.PY3K and config.use_MSVC2015:\n version = '14.0'\n elif config.PY3K:\n version = '10.0'\n else:\n version = '9.0'\n\n if float(version) >= 14.0:\n # For Python 3.5+, ensure that we link with the dynamic runtime. See\n # http://stevedower.id.au/blog/building-for-python-3-5-part-two/ for more info\n msvc_env_lines.append('set PY_VCRUNTIME_REDIST=%LIBRARY_BIN%\\\\vcruntime{0}.dll'.format(\n version.replace('.', '')))\n\n vcvarsall_vs_path = build_vcvarsall_vs_path(version)\n\n def build_vcvarsall_cmd(cmd, arch=arch_selector):\n # Default argument `arch_selector` is defined above\n return 'call \"{cmd}\" {arch}'.format(cmd=cmd, arch=arch)\n\n msvc_env_lines.append('set \"VS_VERSION={}\"'.format(version))\n msvc_env_lines.append('set \"VS_MAJOR={}\"'.format(version.split('.')[0]))\n msvc_env_lines.append('set \"VS_YEAR={}\"'.format(VS_VERSION_STRING[version][-4:]))\n msvc_env_lines.append('set \"CMAKE_GENERATOR={}\"'.format(VS_VERSION_STRING[version] +\n {64: ' Win64', 32: ''}[bits]))\n # tell msys2 to ignore path conversions for issue-causing windows-style flags in build\n # See https://github.com/conda-forge/icu-feedstock/pull/5\n msvc_env_lines.append('set \"MSYS2_ARG_CONV_EXCL=/AI;/AL;/OUT;/out;%MSYS2_ARG_CONV_EXCL%\"')\n msvc_env_lines.append('set \"MSYS2_ENV_CONV_EXCL=CL\"')\n if version == '10.0':\n try:\n WIN_SDK_71_PATH = Reg.get_value(os.path.join(WINSDK_BASE, 'v7.1'),\n 'installationfolder')\n WIN_SDK_71_BAT_PATH = os.path.join(WIN_SDK_71_PATH, 'Bin', 'SetEnv.cmd')\n\n win_sdk_arch = '/Release /x86' if bits == 32 else '/Release /x64'\n win_sdk_cmd = build_vcvarsall_cmd(WIN_SDK_71_BAT_PATH, arch=win_sdk_arch)\n\n # There are two methods of building Python 3.3 and 3.4 extensions (both\n # of which required Visual Studio 2010 - as explained in the Python wiki\n # https://wiki.python.org/moin/WindowsCompilers)\n # 1) Use the Windows SDK 7.1\n # 2) Use Visual Studio 2010 (any edition)\n # However, VS2010 never shipped with a 64-bit compiler, so in this case\n # **only** option (1) applies. For this reason, we always try and\n # activate the Windows SDK first. Unfortunately, unsuccessfully setting\n # up the environment does **not EXIT 1** and therefore we must fall\n # back to attempting to set up VS2010.\n # DelayedExpansion is required for the SetEnv.cmd\n msvc_env_lines.append('Setlocal EnableDelayedExpansion')\n msvc_env_lines.append(win_sdk_cmd)\n # If the WindowsSDKDir environment variable has not been successfully\n # set then try activating VS2010\n msvc_env_lines.append('if not \"%WindowsSDKDir%\" == \"{}\" ( {} )'.format(\n WIN_SDK_71_PATH, build_vcvarsall_cmd(vcvarsall_vs_path)))\n # sdk is not installed. Fall back to only trying VS 2010\n except KeyError:\n msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path))\n elif version == '9.0':\n # Get the Visual Studio 2008 path (not the Visual C++ for Python path)\n # and get the 'vcvars64.bat' from inside the bin (in the directory above\n # that returned by distutils_find_vcvarsall)\n try:\n VCVARS64_VS9_BAT_PATH = os.path.join(os.path.dirname(distutils_find_vcvarsall(9)),\n 'bin', 'vcvars64.bat')\n # there's an exception if VS or the VC compiler for python are not actually installed.\n except (KeyError, TypeError):\n VCVARS64_VS9_BAT_PATH = None\n\n error1 = 'if errorlevel 1 {}'\n\n # Prefer VS9 proper over Microsoft Visual C++ Compiler for Python 2.7\n msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path))\n # The Visual Studio 2008 Express edition does not properly contain\n # the amd64 build files, so we call the vcvars64.bat manually,\n # rather than using the vcvarsall.bat which would try and call the\n # missing bat file.\n if arch_selector == 'amd64' and VCVARS64_VS9_BAT_PATH:\n msvc_env_lines.append(error1.format(\n build_vcvarsall_cmd(VCVARS64_VS9_BAT_PATH)))\n # Otherwise, fall back to icrosoft Visual C++ Compiler for Python 2.7+\n # by using the logic provided by setuptools\n msvc_env_lines.append(error1.format(\n build_vcvarsall_cmd(distutils_find_vcvarsall(9))))\n else:\n # Visual Studio 14 or otherwise\n msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path))\n\n return '\\n'.join(msvc_env_lines) + '\\n'\n\n\ndef build(m, bld_bat, config):\n with path_prepended(config.build_prefix):\n env = environ.get_dict(config=config, m=m)\n env[\"CONDA_BUILD_STATE\"] = \"BUILD\"\n\n for name in 'BIN', 'INC', 'LIB':\n path = env['LIBRARY_' + name]\n if not isdir(path):\n os.makedirs(path)\n\n src_dir = config.work_dir\n if os.path.isfile(bld_bat):\n with open(bld_bat) as fi:\n data = fi.read()\n with open(join(src_dir, 'bld.bat'), 'w') as fo:\n # more debuggable with echo on\n fo.write('@echo on\\n')\n for key, value in env.items():\n fo.write('set \"{key}={value}\"\\n'.format(key=key, value=value))\n fo.write(msvc_env_cmd(bits=bits, config=config,\n override=m.get_value('build/msvc_compiler', None)))\n # Reset echo on, because MSVC scripts might have turned it off\n fo.write('@echo on\\n')\n fo.write('set \"INCLUDE={};%INCLUDE%\"\\n'.format(env[\"LIBRARY_INC\"]))\n fo.write('set \"LIB={};%LIB%\"\\n'.format(env[\"LIBRARY_LIB\"]))\n if config.activate:\n fo.write('call \"{conda_root}\\\\activate.bat\" \"{prefix}\"\\n'.format(\n conda_root=root_script_dir,\n prefix=config.build_prefix))\n fo.write(\"REM ===== end generated header =====\\n\")\n fo.write(data)\n\n cmd = ['cmd.exe', '/c', 'bld.bat']\n check_call_env(cmd, cwd=src_dir)\n\n fix_staged_scripts(join(config.build_prefix, 'Scripts'))\n", "path": "conda_build/windows.py"}]}
| 3,851 | 316 |
gh_patches_debug_32909
|
rasdani/github-patches
|
git_diff
|
python-discord__bot-1538
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Stop an in-progress stream if a user's role is revoked
If a user's streaming permission is removed via the revoke stream command, we want to stop any stream that may be in-progress.
If a user's video role drops off due to the normal schedule, then we don't need to do this.
`discord.ActivityType.streaming` shows whether a user is streaming.
This implementation should work even if the member don't have the video role anymore.
Concept approved by @MrHemlock
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `bot/exts/moderation/stream.py`
Content:
```
1 import logging
2 from datetime import timedelta, timezone
3 from operator import itemgetter
4
5 import arrow
6 import discord
7 from arrow import Arrow
8 from async_rediscache import RedisCache
9 from discord.ext import commands
10
11 from bot.bot import Bot
12 from bot.constants import Colours, Emojis, Guild, MODERATION_ROLES, Roles, STAFF_ROLES, VideoPermission
13 from bot.converters import Expiry
14 from bot.pagination import LinePaginator
15 from bot.utils.scheduling import Scheduler
16 from bot.utils.time import format_infraction_with_duration
17
18 log = logging.getLogger(__name__)
19
20
21 class Stream(commands.Cog):
22 """Grant and revoke streaming permissions from members."""
23
24 # Stores tasks to remove streaming permission
25 # RedisCache[discord.Member.id, UtcPosixTimestamp]
26 task_cache = RedisCache()
27
28 def __init__(self, bot: Bot):
29 self.bot = bot
30 self.scheduler = Scheduler(self.__class__.__name__)
31 self.reload_task = self.bot.loop.create_task(self._reload_tasks_from_redis())
32
33 def cog_unload(self) -> None:
34 """Cancel all scheduled tasks."""
35 self.reload_task.cancel()
36 self.reload_task.add_done_callback(lambda _: self.scheduler.cancel_all())
37
38 async def _revoke_streaming_permission(self, member: discord.Member) -> None:
39 """Remove the streaming permission from the given Member."""
40 await self.task_cache.delete(member.id)
41 await member.remove_roles(discord.Object(Roles.video), reason="Streaming access revoked")
42
43 async def _reload_tasks_from_redis(self) -> None:
44 """Reload outstanding tasks from redis on startup, delete the task if the member has since left the server."""
45 await self.bot.wait_until_guild_available()
46 items = await self.task_cache.items()
47 for key, value in items:
48 member = self.bot.get_guild(Guild.id).get_member(key)
49
50 if not member:
51 # Member isn't found in the cache
52 try:
53 member = await self.bot.get_guild(Guild.id).fetch_member(key)
54 except discord.errors.NotFound:
55 log.debug(
56 f"Member {key} left the guild before we could schedule "
57 "the revoking of their streaming permissions."
58 )
59 await self.task_cache.delete(key)
60 continue
61 except discord.HTTPException:
62 log.exception(f"Exception while trying to retrieve member {key} from Discord.")
63 continue
64
65 revoke_time = Arrow.utcfromtimestamp(value)
66 log.debug(f"Scheduling {member} ({member.id}) to have streaming permission revoked at {revoke_time}")
67 self.scheduler.schedule_at(
68 revoke_time,
69 key,
70 self._revoke_streaming_permission(member)
71 )
72
73 @commands.command(aliases=("streaming",))
74 @commands.has_any_role(*MODERATION_ROLES)
75 async def stream(self, ctx: commands.Context, member: discord.Member, duration: Expiry = None) -> None:
76 """
77 Temporarily grant streaming permissions to a member for a given duration.
78
79 A unit of time should be appended to the duration.
80 Units (∗case-sensitive):
81 \u2003`y` - years
82 \u2003`m` - months∗
83 \u2003`w` - weeks
84 \u2003`d` - days
85 \u2003`h` - hours
86 \u2003`M` - minutes∗
87 \u2003`s` - seconds
88
89 Alternatively, an ISO 8601 timestamp can be provided for the duration.
90 """
91 log.trace(f"Attempting to give temporary streaming permission to {member} ({member.id}).")
92
93 if duration is None:
94 # Use default duration and convert back to datetime as Embed.timestamp doesn't support Arrow
95 duration = arrow.utcnow() + timedelta(minutes=VideoPermission.default_permission_duration)
96 duration = duration.datetime
97 elif duration.tzinfo is None:
98 # Make duration tz-aware.
99 # ISODateTime could already include tzinfo, this check is so it isn't overwritten.
100 duration.replace(tzinfo=timezone.utc)
101
102 # Check if the member already has streaming permission
103 already_allowed = any(Roles.video == role.id for role in member.roles)
104 if already_allowed:
105 await ctx.send(f"{Emojis.cross_mark} {member.mention} can already stream.")
106 log.debug(f"{member} ({member.id}) already has permission to stream.")
107 return
108
109 # Schedule task to remove streaming permission from Member and add it to task cache
110 self.scheduler.schedule_at(duration, member.id, self._revoke_streaming_permission(member))
111 await self.task_cache.set(member.id, duration.timestamp())
112
113 await member.add_roles(discord.Object(Roles.video), reason="Temporary streaming access granted")
114
115 # Use embed as embed timestamps do timezone conversions.
116 embed = discord.Embed(
117 description=f"{Emojis.check_mark} {member.mention} can now stream.",
118 colour=Colours.soft_green
119 )
120 embed.set_footer(text=f"Streaming permission has been given to {member} until")
121 embed.timestamp = duration
122
123 # Mention in content as mentions in embeds don't ping
124 await ctx.send(content=member.mention, embed=embed)
125
126 # Convert here for nicer logging
127 revoke_time = format_infraction_with_duration(str(duration))
128 log.debug(f"Successfully gave {member} ({member.id}) permission to stream until {revoke_time}.")
129
130 @commands.command(aliases=("pstream",))
131 @commands.has_any_role(*MODERATION_ROLES)
132 async def permanentstream(self, ctx: commands.Context, member: discord.Member) -> None:
133 """Permanently grants the given member the permission to stream."""
134 log.trace(f"Attempting to give permanent streaming permission to {member} ({member.id}).")
135
136 # Check if the member already has streaming permission
137 if any(Roles.video == role.id for role in member.roles):
138 if member.id in self.scheduler:
139 # Member has temp permission, so cancel the task to revoke later and delete from cache
140 self.scheduler.cancel(member.id)
141 await self.task_cache.delete(member.id)
142
143 await ctx.send(f"{Emojis.check_mark} Permanently granted {member.mention} the permission to stream.")
144 log.debug(
145 f"Successfully upgraded temporary streaming permission for {member} ({member.id}) to permanent."
146 )
147 return
148
149 await ctx.send(f"{Emojis.cross_mark} This member can already stream.")
150 log.debug(f"{member} ({member.id}) already had permanent streaming permission.")
151 return
152
153 await member.add_roles(discord.Object(Roles.video), reason="Permanent streaming access granted")
154 await ctx.send(f"{Emojis.check_mark} Permanently granted {member.mention} the permission to stream.")
155 log.debug(f"Successfully gave {member} ({member.id}) permanent streaming permission.")
156
157 @commands.command(aliases=("unstream", "rstream"))
158 @commands.has_any_role(*MODERATION_ROLES)
159 async def revokestream(self, ctx: commands.Context, member: discord.Member) -> None:
160 """Revoke the permission to stream from the given member."""
161 log.trace(f"Attempting to remove streaming permission from {member} ({member.id}).")
162
163 # Check if the member already has streaming permission
164 if any(Roles.video == role.id for role in member.roles):
165 if member.id in self.scheduler:
166 # Member has temp permission, so cancel the task to revoke later and delete from cache
167 self.scheduler.cancel(member.id)
168 await self.task_cache.delete(member.id)
169 await self._revoke_streaming_permission(member)
170
171 await ctx.send(f"{Emojis.check_mark} Revoked the permission to stream from {member.mention}.")
172 log.debug(f"Successfully revoked streaming permission from {member} ({member.id}).")
173 return
174
175 await ctx.send(f"{Emojis.cross_mark} This member doesn't have video permissions to remove!")
176 log.debug(f"{member} ({member.id}) didn't have the streaming permission to remove!")
177
178 @commands.command(aliases=('lstream',))
179 @commands.has_any_role(*MODERATION_ROLES)
180 async def liststream(self, ctx: commands.Context) -> None:
181 """Lists all non-staff users who have permission to stream."""
182 non_staff_members_with_stream = [
183 member
184 for member in ctx.guild.get_role(Roles.video).members
185 if not any(role.id in STAFF_ROLES for role in member.roles)
186 ]
187
188 # List of tuples (UtcPosixTimestamp, str)
189 # So that the list can be sorted on the UtcPosixTimestamp before the message is passed to the paginator.
190 streamer_info = []
191 for member in non_staff_members_with_stream:
192 if revoke_time := await self.task_cache.get(member.id):
193 # Member only has temporary streaming perms
194 revoke_delta = Arrow.utcfromtimestamp(revoke_time).humanize()
195 message = f"{member.mention} will have stream permissions revoked {revoke_delta}."
196 else:
197 message = f"{member.mention} has permanent streaming permissions."
198
199 # If revoke_time is None use max timestamp to force sort to put them at the end
200 streamer_info.append(
201 (revoke_time or Arrow.max.timestamp(), message)
202 )
203
204 if streamer_info:
205 # Sort based on duration left of streaming perms
206 streamer_info.sort(key=itemgetter(0))
207
208 # Only output the message in the pagination
209 lines = [line[1] for line in streamer_info]
210 embed = discord.Embed(
211 title=f"Members with streaming permission (`{len(lines)}` total)",
212 colour=Colours.soft_green
213 )
214 await LinePaginator.paginate(lines, ctx, embed, max_size=400, empty=False)
215 else:
216 await ctx.send("No members with stream permissions found.")
217
218
219 def setup(bot: Bot) -> None:
220 """Loads the Stream cog."""
221 bot.add_cog(Stream(bot))
222
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py
--- a/bot/exts/moderation/stream.py
+++ b/bot/exts/moderation/stream.py
@@ -70,6 +70,28 @@
self._revoke_streaming_permission(member)
)
+ async def _suspend_stream(self, ctx: commands.Context, member: discord.Member) -> None:
+ """Suspend a member's stream."""
+ await self.bot.wait_until_guild_available()
+ voice_state = member.voice
+
+ if not voice_state:
+ return
+
+ # If the user is streaming.
+ if voice_state.self_stream:
+ # End user's stream by moving them to AFK voice channel and back.
+ original_vc = voice_state.channel
+ await member.move_to(ctx.guild.afk_channel)
+ await member.move_to(original_vc)
+
+ # Notify.
+ await ctx.send(f"{member.mention}'s stream has been suspended!")
+ log.debug(f"Successfully suspended stream from {member} ({member.id}).")
+ return
+
+ log.debug(f"No stream found to suspend from {member} ({member.id}).")
+
@commands.command(aliases=("streaming",))
@commands.has_any_role(*MODERATION_ROLES)
async def stream(self, ctx: commands.Context, member: discord.Member, duration: Expiry = None) -> None:
@@ -170,10 +192,12 @@
await ctx.send(f"{Emojis.check_mark} Revoked the permission to stream from {member.mention}.")
log.debug(f"Successfully revoked streaming permission from {member} ({member.id}).")
- return
- await ctx.send(f"{Emojis.cross_mark} This member doesn't have video permissions to remove!")
- log.debug(f"{member} ({member.id}) didn't have the streaming permission to remove!")
+ else:
+ await ctx.send(f"{Emojis.cross_mark} This member doesn't have video permissions to remove!")
+ log.debug(f"{member} ({member.id}) didn't have the streaming permission to remove!")
+
+ await self._suspend_stream(ctx, member)
@commands.command(aliases=('lstream',))
@commands.has_any_role(*MODERATION_ROLES)
|
{"golden_diff": "diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py\n--- a/bot/exts/moderation/stream.py\n+++ b/bot/exts/moderation/stream.py\n@@ -70,6 +70,28 @@\n self._revoke_streaming_permission(member)\n )\n \n+ async def _suspend_stream(self, ctx: commands.Context, member: discord.Member) -> None:\n+ \"\"\"Suspend a member's stream.\"\"\"\n+ await self.bot.wait_until_guild_available()\n+ voice_state = member.voice\n+\n+ if not voice_state:\n+ return\n+\n+ # If the user is streaming.\n+ if voice_state.self_stream:\n+ # End user's stream by moving them to AFK voice channel and back.\n+ original_vc = voice_state.channel\n+ await member.move_to(ctx.guild.afk_channel)\n+ await member.move_to(original_vc)\n+\n+ # Notify.\n+ await ctx.send(f\"{member.mention}'s stream has been suspended!\")\n+ log.debug(f\"Successfully suspended stream from {member} ({member.id}).\")\n+ return\n+\n+ log.debug(f\"No stream found to suspend from {member} ({member.id}).\")\n+\n @commands.command(aliases=(\"streaming\",))\n @commands.has_any_role(*MODERATION_ROLES)\n async def stream(self, ctx: commands.Context, member: discord.Member, duration: Expiry = None) -> None:\n@@ -170,10 +192,12 @@\n \n await ctx.send(f\"{Emojis.check_mark} Revoked the permission to stream from {member.mention}.\")\n log.debug(f\"Successfully revoked streaming permission from {member} ({member.id}).\")\n- return\n \n- await ctx.send(f\"{Emojis.cross_mark} This member doesn't have video permissions to remove!\")\n- log.debug(f\"{member} ({member.id}) didn't have the streaming permission to remove!\")\n+ else:\n+ await ctx.send(f\"{Emojis.cross_mark} This member doesn't have video permissions to remove!\")\n+ log.debug(f\"{member} ({member.id}) didn't have the streaming permission to remove!\")\n+\n+ await self._suspend_stream(ctx, member)\n \n @commands.command(aliases=('lstream',))\n @commands.has_any_role(*MODERATION_ROLES)\n", "issue": "Stop an in-progress stream if a user's role is revoked\nIf a user's streaming permission is removed via the revoke stream command, we want to stop any stream that may be in-progress.\r\n\r\nIf a user's video role drops off due to the normal schedule, then we don't need to do this.\r\n\r\n`discord.ActivityType.streaming` shows whether a user is streaming.\r\n\r\nThis implementation should work even if the member don't have the video role anymore.\r\n\r\nConcept approved by @MrHemlock \n", "before_files": [{"content": "import logging\nfrom datetime import timedelta, timezone\nfrom operator import itemgetter\n\nimport arrow\nimport discord\nfrom arrow import Arrow\nfrom async_rediscache import RedisCache\nfrom discord.ext import commands\n\nfrom bot.bot import Bot\nfrom bot.constants import Colours, Emojis, Guild, MODERATION_ROLES, Roles, STAFF_ROLES, VideoPermission\nfrom bot.converters import Expiry\nfrom bot.pagination import LinePaginator\nfrom bot.utils.scheduling import Scheduler\nfrom bot.utils.time import format_infraction_with_duration\n\nlog = logging.getLogger(__name__)\n\n\nclass Stream(commands.Cog):\n \"\"\"Grant and revoke streaming permissions from members.\"\"\"\n\n # Stores tasks to remove streaming permission\n # RedisCache[discord.Member.id, UtcPosixTimestamp]\n task_cache = RedisCache()\n\n def __init__(self, bot: Bot):\n self.bot = bot\n self.scheduler = Scheduler(self.__class__.__name__)\n self.reload_task = self.bot.loop.create_task(self._reload_tasks_from_redis())\n\n def cog_unload(self) -> None:\n \"\"\"Cancel all scheduled tasks.\"\"\"\n self.reload_task.cancel()\n self.reload_task.add_done_callback(lambda _: self.scheduler.cancel_all())\n\n async def _revoke_streaming_permission(self, member: discord.Member) -> None:\n \"\"\"Remove the streaming permission from the given Member.\"\"\"\n await self.task_cache.delete(member.id)\n await member.remove_roles(discord.Object(Roles.video), reason=\"Streaming access revoked\")\n\n async def _reload_tasks_from_redis(self) -> None:\n \"\"\"Reload outstanding tasks from redis on startup, delete the task if the member has since left the server.\"\"\"\n await self.bot.wait_until_guild_available()\n items = await self.task_cache.items()\n for key, value in items:\n member = self.bot.get_guild(Guild.id).get_member(key)\n\n if not member:\n # Member isn't found in the cache\n try:\n member = await self.bot.get_guild(Guild.id).fetch_member(key)\n except discord.errors.NotFound:\n log.debug(\n f\"Member {key} left the guild before we could schedule \"\n \"the revoking of their streaming permissions.\"\n )\n await self.task_cache.delete(key)\n continue\n except discord.HTTPException:\n log.exception(f\"Exception while trying to retrieve member {key} from Discord.\")\n continue\n\n revoke_time = Arrow.utcfromtimestamp(value)\n log.debug(f\"Scheduling {member} ({member.id}) to have streaming permission revoked at {revoke_time}\")\n self.scheduler.schedule_at(\n revoke_time,\n key,\n self._revoke_streaming_permission(member)\n )\n\n @commands.command(aliases=(\"streaming\",))\n @commands.has_any_role(*MODERATION_ROLES)\n async def stream(self, ctx: commands.Context, member: discord.Member, duration: Expiry = None) -> None:\n \"\"\"\n Temporarily grant streaming permissions to a member for a given duration.\n\n A unit of time should be appended to the duration.\n Units (\u2217case-sensitive):\n \\u2003`y` - years\n \\u2003`m` - months\u2217\n \\u2003`w` - weeks\n \\u2003`d` - days\n \\u2003`h` - hours\n \\u2003`M` - minutes\u2217\n \\u2003`s` - seconds\n\n Alternatively, an ISO 8601 timestamp can be provided for the duration.\n \"\"\"\n log.trace(f\"Attempting to give temporary streaming permission to {member} ({member.id}).\")\n\n if duration is None:\n # Use default duration and convert back to datetime as Embed.timestamp doesn't support Arrow\n duration = arrow.utcnow() + timedelta(minutes=VideoPermission.default_permission_duration)\n duration = duration.datetime\n elif duration.tzinfo is None:\n # Make duration tz-aware.\n # ISODateTime could already include tzinfo, this check is so it isn't overwritten.\n duration.replace(tzinfo=timezone.utc)\n\n # Check if the member already has streaming permission\n already_allowed = any(Roles.video == role.id for role in member.roles)\n if already_allowed:\n await ctx.send(f\"{Emojis.cross_mark} {member.mention} can already stream.\")\n log.debug(f\"{member} ({member.id}) already has permission to stream.\")\n return\n\n # Schedule task to remove streaming permission from Member and add it to task cache\n self.scheduler.schedule_at(duration, member.id, self._revoke_streaming_permission(member))\n await self.task_cache.set(member.id, duration.timestamp())\n\n await member.add_roles(discord.Object(Roles.video), reason=\"Temporary streaming access granted\")\n\n # Use embed as embed timestamps do timezone conversions.\n embed = discord.Embed(\n description=f\"{Emojis.check_mark} {member.mention} can now stream.\",\n colour=Colours.soft_green\n )\n embed.set_footer(text=f\"Streaming permission has been given to {member} until\")\n embed.timestamp = duration\n\n # Mention in content as mentions in embeds don't ping\n await ctx.send(content=member.mention, embed=embed)\n\n # Convert here for nicer logging\n revoke_time = format_infraction_with_duration(str(duration))\n log.debug(f\"Successfully gave {member} ({member.id}) permission to stream until {revoke_time}.\")\n\n @commands.command(aliases=(\"pstream\",))\n @commands.has_any_role(*MODERATION_ROLES)\n async def permanentstream(self, ctx: commands.Context, member: discord.Member) -> None:\n \"\"\"Permanently grants the given member the permission to stream.\"\"\"\n log.trace(f\"Attempting to give permanent streaming permission to {member} ({member.id}).\")\n\n # Check if the member already has streaming permission\n if any(Roles.video == role.id for role in member.roles):\n if member.id in self.scheduler:\n # Member has temp permission, so cancel the task to revoke later and delete from cache\n self.scheduler.cancel(member.id)\n await self.task_cache.delete(member.id)\n\n await ctx.send(f\"{Emojis.check_mark} Permanently granted {member.mention} the permission to stream.\")\n log.debug(\n f\"Successfully upgraded temporary streaming permission for {member} ({member.id}) to permanent.\"\n )\n return\n\n await ctx.send(f\"{Emojis.cross_mark} This member can already stream.\")\n log.debug(f\"{member} ({member.id}) already had permanent streaming permission.\")\n return\n\n await member.add_roles(discord.Object(Roles.video), reason=\"Permanent streaming access granted\")\n await ctx.send(f\"{Emojis.check_mark} Permanently granted {member.mention} the permission to stream.\")\n log.debug(f\"Successfully gave {member} ({member.id}) permanent streaming permission.\")\n\n @commands.command(aliases=(\"unstream\", \"rstream\"))\n @commands.has_any_role(*MODERATION_ROLES)\n async def revokestream(self, ctx: commands.Context, member: discord.Member) -> None:\n \"\"\"Revoke the permission to stream from the given member.\"\"\"\n log.trace(f\"Attempting to remove streaming permission from {member} ({member.id}).\")\n\n # Check if the member already has streaming permission\n if any(Roles.video == role.id for role in member.roles):\n if member.id in self.scheduler:\n # Member has temp permission, so cancel the task to revoke later and delete from cache\n self.scheduler.cancel(member.id)\n await self.task_cache.delete(member.id)\n await self._revoke_streaming_permission(member)\n\n await ctx.send(f\"{Emojis.check_mark} Revoked the permission to stream from {member.mention}.\")\n log.debug(f\"Successfully revoked streaming permission from {member} ({member.id}).\")\n return\n\n await ctx.send(f\"{Emojis.cross_mark} This member doesn't have video permissions to remove!\")\n log.debug(f\"{member} ({member.id}) didn't have the streaming permission to remove!\")\n\n @commands.command(aliases=('lstream',))\n @commands.has_any_role(*MODERATION_ROLES)\n async def liststream(self, ctx: commands.Context) -> None:\n \"\"\"Lists all non-staff users who have permission to stream.\"\"\"\n non_staff_members_with_stream = [\n member\n for member in ctx.guild.get_role(Roles.video).members\n if not any(role.id in STAFF_ROLES for role in member.roles)\n ]\n\n # List of tuples (UtcPosixTimestamp, str)\n # So that the list can be sorted on the UtcPosixTimestamp before the message is passed to the paginator.\n streamer_info = []\n for member in non_staff_members_with_stream:\n if revoke_time := await self.task_cache.get(member.id):\n # Member only has temporary streaming perms\n revoke_delta = Arrow.utcfromtimestamp(revoke_time).humanize()\n message = f\"{member.mention} will have stream permissions revoked {revoke_delta}.\"\n else:\n message = f\"{member.mention} has permanent streaming permissions.\"\n\n # If revoke_time is None use max timestamp to force sort to put them at the end\n streamer_info.append(\n (revoke_time or Arrow.max.timestamp(), message)\n )\n\n if streamer_info:\n # Sort based on duration left of streaming perms\n streamer_info.sort(key=itemgetter(0))\n\n # Only output the message in the pagination\n lines = [line[1] for line in streamer_info]\n embed = discord.Embed(\n title=f\"Members with streaming permission (`{len(lines)}` total)\",\n colour=Colours.soft_green\n )\n await LinePaginator.paginate(lines, ctx, embed, max_size=400, empty=False)\n else:\n await ctx.send(\"No members with stream permissions found.\")\n\n\ndef setup(bot: Bot) -> None:\n \"\"\"Loads the Stream cog.\"\"\"\n bot.add_cog(Stream(bot))\n", "path": "bot/exts/moderation/stream.py"}], "after_files": [{"content": "import logging\nfrom datetime import timedelta, timezone\nfrom operator import itemgetter\n\nimport arrow\nimport discord\nfrom arrow import Arrow\nfrom async_rediscache import RedisCache\nfrom discord.ext import commands\n\nfrom bot.bot import Bot\nfrom bot.constants import Colours, Emojis, Guild, MODERATION_ROLES, Roles, STAFF_ROLES, VideoPermission\nfrom bot.converters import Expiry\nfrom bot.pagination import LinePaginator\nfrom bot.utils.scheduling import Scheduler\nfrom bot.utils.time import format_infraction_with_duration\n\nlog = logging.getLogger(__name__)\n\n\nclass Stream(commands.Cog):\n \"\"\"Grant and revoke streaming permissions from members.\"\"\"\n\n # Stores tasks to remove streaming permission\n # RedisCache[discord.Member.id, UtcPosixTimestamp]\n task_cache = RedisCache()\n\n def __init__(self, bot: Bot):\n self.bot = bot\n self.scheduler = Scheduler(self.__class__.__name__)\n self.reload_task = self.bot.loop.create_task(self._reload_tasks_from_redis())\n\n def cog_unload(self) -> None:\n \"\"\"Cancel all scheduled tasks.\"\"\"\n self.reload_task.cancel()\n self.reload_task.add_done_callback(lambda _: self.scheduler.cancel_all())\n\n async def _revoke_streaming_permission(self, member: discord.Member) -> None:\n \"\"\"Remove the streaming permission from the given Member.\"\"\"\n await self.task_cache.delete(member.id)\n await member.remove_roles(discord.Object(Roles.video), reason=\"Streaming access revoked\")\n\n async def _reload_tasks_from_redis(self) -> None:\n \"\"\"Reload outstanding tasks from redis on startup, delete the task if the member has since left the server.\"\"\"\n await self.bot.wait_until_guild_available()\n items = await self.task_cache.items()\n for key, value in items:\n member = self.bot.get_guild(Guild.id).get_member(key)\n\n if not member:\n # Member isn't found in the cache\n try:\n member = await self.bot.get_guild(Guild.id).fetch_member(key)\n except discord.errors.NotFound:\n log.debug(\n f\"Member {key} left the guild before we could schedule \"\n \"the revoking of their streaming permissions.\"\n )\n await self.task_cache.delete(key)\n continue\n except discord.HTTPException:\n log.exception(f\"Exception while trying to retrieve member {key} from Discord.\")\n continue\n\n revoke_time = Arrow.utcfromtimestamp(value)\n log.debug(f\"Scheduling {member} ({member.id}) to have streaming permission revoked at {revoke_time}\")\n self.scheduler.schedule_at(\n revoke_time,\n key,\n self._revoke_streaming_permission(member)\n )\n\n async def _suspend_stream(self, ctx: commands.Context, member: discord.Member) -> None:\n \"\"\"Suspend a member's stream.\"\"\"\n await self.bot.wait_until_guild_available()\n voice_state = member.voice\n\n if not voice_state:\n return\n\n # If the user is streaming.\n if voice_state.self_stream:\n # End user's stream by moving them to AFK voice channel and back.\n original_vc = voice_state.channel\n await member.move_to(ctx.guild.afk_channel)\n await member.move_to(original_vc)\n\n # Notify.\n await ctx.send(f\"{member.mention}'s stream has been suspended!\")\n log.debug(f\"Successfully suspended stream from {member} ({member.id}).\")\n return\n\n log.debug(f\"No stream found to suspend from {member} ({member.id}).\")\n\n @commands.command(aliases=(\"streaming\",))\n @commands.has_any_role(*MODERATION_ROLES)\n async def stream(self, ctx: commands.Context, member: discord.Member, duration: Expiry = None) -> None:\n \"\"\"\n Temporarily grant streaming permissions to a member for a given duration.\n\n A unit of time should be appended to the duration.\n Units (\u2217case-sensitive):\n \\u2003`y` - years\n \\u2003`m` - months\u2217\n \\u2003`w` - weeks\n \\u2003`d` - days\n \\u2003`h` - hours\n \\u2003`M` - minutes\u2217\n \\u2003`s` - seconds\n\n Alternatively, an ISO 8601 timestamp can be provided for the duration.\n \"\"\"\n log.trace(f\"Attempting to give temporary streaming permission to {member} ({member.id}).\")\n\n if duration is None:\n # Use default duration and convert back to datetime as Embed.timestamp doesn't support Arrow\n duration = arrow.utcnow() + timedelta(minutes=VideoPermission.default_permission_duration)\n duration = duration.datetime\n elif duration.tzinfo is None:\n # Make duration tz-aware.\n # ISODateTime could already include tzinfo, this check is so it isn't overwritten.\n duration.replace(tzinfo=timezone.utc)\n\n # Check if the member already has streaming permission\n already_allowed = any(Roles.video == role.id for role in member.roles)\n if already_allowed:\n await ctx.send(f\"{Emojis.cross_mark} {member.mention} can already stream.\")\n log.debug(f\"{member} ({member.id}) already has permission to stream.\")\n return\n\n # Schedule task to remove streaming permission from Member and add it to task cache\n self.scheduler.schedule_at(duration, member.id, self._revoke_streaming_permission(member))\n await self.task_cache.set(member.id, duration.timestamp())\n\n await member.add_roles(discord.Object(Roles.video), reason=\"Temporary streaming access granted\")\n\n # Use embed as embed timestamps do timezone conversions.\n embed = discord.Embed(\n description=f\"{Emojis.check_mark} {member.mention} can now stream.\",\n colour=Colours.soft_green\n )\n embed.set_footer(text=f\"Streaming permission has been given to {member} until\")\n embed.timestamp = duration\n\n # Mention in content as mentions in embeds don't ping\n await ctx.send(content=member.mention, embed=embed)\n\n # Convert here for nicer logging\n revoke_time = format_infraction_with_duration(str(duration))\n log.debug(f\"Successfully gave {member} ({member.id}) permission to stream until {revoke_time}.\")\n\n @commands.command(aliases=(\"pstream\",))\n @commands.has_any_role(*MODERATION_ROLES)\n async def permanentstream(self, ctx: commands.Context, member: discord.Member) -> None:\n \"\"\"Permanently grants the given member the permission to stream.\"\"\"\n log.trace(f\"Attempting to give permanent streaming permission to {member} ({member.id}).\")\n\n # Check if the member already has streaming permission\n if any(Roles.video == role.id for role in member.roles):\n if member.id in self.scheduler:\n # Member has temp permission, so cancel the task to revoke later and delete from cache\n self.scheduler.cancel(member.id)\n await self.task_cache.delete(member.id)\n\n await ctx.send(f\"{Emojis.check_mark} Permanently granted {member.mention} the permission to stream.\")\n log.debug(\n f\"Successfully upgraded temporary streaming permission for {member} ({member.id}) to permanent.\"\n )\n return\n\n await ctx.send(f\"{Emojis.cross_mark} This member can already stream.\")\n log.debug(f\"{member} ({member.id}) already had permanent streaming permission.\")\n return\n\n await member.add_roles(discord.Object(Roles.video), reason=\"Permanent streaming access granted\")\n await ctx.send(f\"{Emojis.check_mark} Permanently granted {member.mention} the permission to stream.\")\n log.debug(f\"Successfully gave {member} ({member.id}) permanent streaming permission.\")\n\n @commands.command(aliases=(\"unstream\", \"rstream\"))\n @commands.has_any_role(*MODERATION_ROLES)\n async def revokestream(self, ctx: commands.Context, member: discord.Member) -> None:\n \"\"\"Revoke the permission to stream from the given member.\"\"\"\n log.trace(f\"Attempting to remove streaming permission from {member} ({member.id}).\")\n\n # Check if the member already has streaming permission\n if any(Roles.video == role.id for role in member.roles):\n if member.id in self.scheduler:\n # Member has temp permission, so cancel the task to revoke later and delete from cache\n self.scheduler.cancel(member.id)\n await self.task_cache.delete(member.id)\n await self._revoke_streaming_permission(member)\n\n await ctx.send(f\"{Emojis.check_mark} Revoked the permission to stream from {member.mention}.\")\n log.debug(f\"Successfully revoked streaming permission from {member} ({member.id}).\")\n\n else:\n await ctx.send(f\"{Emojis.cross_mark} This member doesn't have video permissions to remove!\")\n log.debug(f\"{member} ({member.id}) didn't have the streaming permission to remove!\")\n\n await self._suspend_stream(ctx, member)\n\n @commands.command(aliases=('lstream',))\n @commands.has_any_role(*MODERATION_ROLES)\n async def liststream(self, ctx: commands.Context) -> None:\n \"\"\"Lists all non-staff users who have permission to stream.\"\"\"\n non_staff_members_with_stream = [\n member\n for member in ctx.guild.get_role(Roles.video).members\n if not any(role.id in STAFF_ROLES for role in member.roles)\n ]\n\n # List of tuples (UtcPosixTimestamp, str)\n # So that the list can be sorted on the UtcPosixTimestamp before the message is passed to the paginator.\n streamer_info = []\n for member in non_staff_members_with_stream:\n if revoke_time := await self.task_cache.get(member.id):\n # Member only has temporary streaming perms\n revoke_delta = Arrow.utcfromtimestamp(revoke_time).humanize()\n message = f\"{member.mention} will have stream permissions revoked {revoke_delta}.\"\n else:\n message = f\"{member.mention} has permanent streaming permissions.\"\n\n # If revoke_time is None use max timestamp to force sort to put them at the end\n streamer_info.append(\n (revoke_time or Arrow.max.timestamp(), message)\n )\n\n if streamer_info:\n # Sort based on duration left of streaming perms\n streamer_info.sort(key=itemgetter(0))\n\n # Only output the message in the pagination\n lines = [line[1] for line in streamer_info]\n embed = discord.Embed(\n title=f\"Members with streaming permission (`{len(lines)}` total)\",\n colour=Colours.soft_green\n )\n await LinePaginator.paginate(lines, ctx, embed, max_size=400, empty=False)\n else:\n await ctx.send(\"No members with stream permissions found.\")\n\n\ndef setup(bot: Bot) -> None:\n \"\"\"Loads the Stream cog.\"\"\"\n bot.add_cog(Stream(bot))\n", "path": "bot/exts/moderation/stream.py"}]}
| 3,046 | 505 |
gh_patches_debug_9269
|
rasdani/github-patches
|
git_diff
|
autogluon__autogluon-2915
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Update scikit-learn-intelex version
- [ ] Check if scikit-learn-intelex can be upgraded.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `tabular/setup.py`
Content:
```
1 #!/usr/bin/env python
2 ###########################
3 # This code block is a HACK (!), but is necessary to avoid code duplication. Do NOT alter these lines.
4 import os
5 from setuptools import setup
6 import importlib.util
7 filepath = os.path.abspath(os.path.dirname(__file__))
8 filepath_import = os.path.join(filepath, '..', 'core', 'src', 'autogluon', 'core', '_setup_utils.py')
9 spec = importlib.util.spec_from_file_location("ag_min_dependencies", filepath_import)
10 ag = importlib.util.module_from_spec(spec)
11 # Identical to `from autogluon.core import _setup_utils as ag`, but works without `autogluon.core` being installed.
12 spec.loader.exec_module(ag)
13 ###########################
14
15 import sys
16
17 version = ag.load_version_file()
18 version = ag.update_version(version)
19
20 submodule = 'tabular'
21 install_requires = [
22 # version ranges added in ag.get_dependency_version_ranges()
23 'numpy', # version range defined in `core/_setup_utils.py`
24 'scipy', # version range defined in `core/_setup_utils.py`
25 'pandas', # version range defined in `core/_setup_utils.py`
26 'scikit-learn', # version range defined in `core/_setup_utils.py`
27 'networkx', # version range defined in `core/_setup_utils.py`
28 f'{ag.PACKAGE_NAME}.core=={version}',
29 f'{ag.PACKAGE_NAME}.features=={version}',
30 ]
31
32 extras_require = {
33 'lightgbm': [
34 'lightgbm>=3.3,<3.4',
35 ],
36 'catboost': [
37 'catboost>=1.0,<1.2',
38 ],
39 # FIXME: Debug why xgboost 1.6 has 4x+ slower inference on multiclass datasets compared to 1.4
40 # It is possibly only present on MacOS, haven't tested linux.
41 # XGBoost made API breaking changes in 1.6 with custom metric and callback support, so we don't support older versions.
42 'xgboost': [
43 'xgboost>=1.6,<1.8',
44 ],
45 'fastai': [
46 'torch>=1.9,<1.14',
47 'fastai>=2.3.1,<2.8',
48 ],
49 'ray': [
50 f'{ag.PACKAGE_NAME}.core[all]=={version}',
51 ],
52 'skex': [
53 'scikit-learn-intelex>=2021.6,<2021.8',
54 ],
55 'imodels': [
56 'imodels>=1.3.10,<1.4.0', # 1.3.8/1.3.9 either remove/renamed attribute `complexity_` causing failures. https://github.com/csinva/imodels/issues/147
57 ],
58 'vowpalwabbit': [
59 # FIXME: 9.5+ causes VW to save an empty model which always predicts 0. Confirmed on MacOS (Intel CPU). Unknown how to fix.
60 'vowpalwabbit>=9,<9.5',
61 ],
62 'skl2onnx': [
63 'skl2onnx>=1.13.0,<1.14.0',
64 # For macOS, there isn't a onnxruntime-gpu package installed with skl2onnx.
65 # Therefore, we install onnxruntime explicitly here just for macOS.
66 'onnxruntime>=1.13.0,<1.14.0'
67 ] if sys.platform == 'darwin' else [
68 'skl2onnx>=1.13.0,<1.14.0'
69 ]
70 }
71
72 all_requires = []
73 # TODO: Consider adding 'skex' to 'all'
74 for extra_package in ['lightgbm', 'catboost', 'xgboost', 'fastai', 'ray']:
75 all_requires += extras_require[extra_package]
76 all_requires = list(set(all_requires))
77 extras_require['all'] = all_requires
78
79
80 test_requires = []
81 for test_package in ['imodels', 'vowpalwabbit', 'skl2onnx']:
82 test_requires += extras_require[test_package]
83 extras_require['tests'] = test_requires
84 install_requires = ag.get_dependency_version_ranges(install_requires)
85
86 if __name__ == '__main__':
87 ag.create_version_file(version=version, submodule=submodule)
88 setup_args = ag.default_setup_args(version=version, submodule=submodule)
89 setup(
90 install_requires=install_requires,
91 extras_require=extras_require,
92 **setup_args,
93 )
94
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/tabular/setup.py b/tabular/setup.py
--- a/tabular/setup.py
+++ b/tabular/setup.py
@@ -50,7 +50,8 @@
f'{ag.PACKAGE_NAME}.core[all]=={version}',
],
'skex': [
- 'scikit-learn-intelex>=2021.6,<2021.8',
+ # Note: 2021.7 released on Sep 2022, version 2022.x doesn't exist (went directly from 2021.7 to 2023.0)
+ 'scikit-learn-intelex>=2021.7,<2023.1',
],
'imodels': [
'imodels>=1.3.10,<1.4.0', # 1.3.8/1.3.9 either remove/renamed attribute `complexity_` causing failures. https://github.com/csinva/imodels/issues/147
|
{"golden_diff": "diff --git a/tabular/setup.py b/tabular/setup.py\n--- a/tabular/setup.py\n+++ b/tabular/setup.py\n@@ -50,7 +50,8 @@\n f'{ag.PACKAGE_NAME}.core[all]=={version}',\n ],\n 'skex': [\n- 'scikit-learn-intelex>=2021.6,<2021.8',\n+ # Note: 2021.7 released on Sep 2022, version 2022.x doesn't exist (went directly from 2021.7 to 2023.0)\n+ 'scikit-learn-intelex>=2021.7,<2023.1',\n ],\n 'imodels': [\n 'imodels>=1.3.10,<1.4.0', # 1.3.8/1.3.9 either remove/renamed attribute `complexity_` causing failures. https://github.com/csinva/imodels/issues/147\n", "issue": "Update scikit-learn-intelex version\n- [ ] Check if scikit-learn-intelex can be upgraded.\n", "before_files": [{"content": "#!/usr/bin/env python\n###########################\n# This code block is a HACK (!), but is necessary to avoid code duplication. Do NOT alter these lines.\nimport os\nfrom setuptools import setup\nimport importlib.util\nfilepath = os.path.abspath(os.path.dirname(__file__))\nfilepath_import = os.path.join(filepath, '..', 'core', 'src', 'autogluon', 'core', '_setup_utils.py')\nspec = importlib.util.spec_from_file_location(\"ag_min_dependencies\", filepath_import)\nag = importlib.util.module_from_spec(spec)\n# Identical to `from autogluon.core import _setup_utils as ag`, but works without `autogluon.core` being installed.\nspec.loader.exec_module(ag)\n###########################\n\nimport sys\n\nversion = ag.load_version_file()\nversion = ag.update_version(version)\n\nsubmodule = 'tabular'\ninstall_requires = [\n # version ranges added in ag.get_dependency_version_ranges()\n 'numpy', # version range defined in `core/_setup_utils.py`\n 'scipy', # version range defined in `core/_setup_utils.py`\n 'pandas', # version range defined in `core/_setup_utils.py`\n 'scikit-learn', # version range defined in `core/_setup_utils.py`\n 'networkx', # version range defined in `core/_setup_utils.py`\n f'{ag.PACKAGE_NAME}.core=={version}',\n f'{ag.PACKAGE_NAME}.features=={version}',\n]\n\nextras_require = {\n 'lightgbm': [\n 'lightgbm>=3.3,<3.4',\n ],\n 'catboost': [\n 'catboost>=1.0,<1.2',\n ],\n # FIXME: Debug why xgboost 1.6 has 4x+ slower inference on multiclass datasets compared to 1.4\n # It is possibly only present on MacOS, haven't tested linux.\n # XGBoost made API breaking changes in 1.6 with custom metric and callback support, so we don't support older versions.\n 'xgboost': [\n 'xgboost>=1.6,<1.8',\n ],\n 'fastai': [\n 'torch>=1.9,<1.14',\n 'fastai>=2.3.1,<2.8',\n ],\n 'ray': [\n f'{ag.PACKAGE_NAME}.core[all]=={version}',\n ],\n 'skex': [\n 'scikit-learn-intelex>=2021.6,<2021.8',\n ],\n 'imodels': [\n 'imodels>=1.3.10,<1.4.0', # 1.3.8/1.3.9 either remove/renamed attribute `complexity_` causing failures. https://github.com/csinva/imodels/issues/147\n ],\n 'vowpalwabbit': [\n # FIXME: 9.5+ causes VW to save an empty model which always predicts 0. Confirmed on MacOS (Intel CPU). Unknown how to fix.\n 'vowpalwabbit>=9,<9.5',\n ],\n 'skl2onnx': [\n 'skl2onnx>=1.13.0,<1.14.0',\n # For macOS, there isn't a onnxruntime-gpu package installed with skl2onnx.\n # Therefore, we install onnxruntime explicitly here just for macOS.\n 'onnxruntime>=1.13.0,<1.14.0'\n ] if sys.platform == 'darwin' else [\n 'skl2onnx>=1.13.0,<1.14.0'\n ]\n}\n\nall_requires = []\n# TODO: Consider adding 'skex' to 'all'\nfor extra_package in ['lightgbm', 'catboost', 'xgboost', 'fastai', 'ray']:\n all_requires += extras_require[extra_package]\nall_requires = list(set(all_requires))\nextras_require['all'] = all_requires\n\n\ntest_requires = []\nfor test_package in ['imodels', 'vowpalwabbit', 'skl2onnx']:\n test_requires += extras_require[test_package]\nextras_require['tests'] = test_requires\ninstall_requires = ag.get_dependency_version_ranges(install_requires)\n\nif __name__ == '__main__':\n ag.create_version_file(version=version, submodule=submodule)\n setup_args = ag.default_setup_args(version=version, submodule=submodule)\n setup(\n install_requires=install_requires,\n extras_require=extras_require,\n **setup_args,\n )\n", "path": "tabular/setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\n###########################\n# This code block is a HACK (!), but is necessary to avoid code duplication. Do NOT alter these lines.\nimport os\nfrom setuptools import setup\nimport importlib.util\nfilepath = os.path.abspath(os.path.dirname(__file__))\nfilepath_import = os.path.join(filepath, '..', 'core', 'src', 'autogluon', 'core', '_setup_utils.py')\nspec = importlib.util.spec_from_file_location(\"ag_min_dependencies\", filepath_import)\nag = importlib.util.module_from_spec(spec)\n# Identical to `from autogluon.core import _setup_utils as ag`, but works without `autogluon.core` being installed.\nspec.loader.exec_module(ag)\n###########################\n\nimport sys\n\nversion = ag.load_version_file()\nversion = ag.update_version(version)\n\nsubmodule = 'tabular'\ninstall_requires = [\n # version ranges added in ag.get_dependency_version_ranges()\n 'numpy', # version range defined in `core/_setup_utils.py`\n 'scipy', # version range defined in `core/_setup_utils.py`\n 'pandas', # version range defined in `core/_setup_utils.py`\n 'scikit-learn', # version range defined in `core/_setup_utils.py`\n 'networkx', # version range defined in `core/_setup_utils.py`\n f'{ag.PACKAGE_NAME}.core=={version}',\n f'{ag.PACKAGE_NAME}.features=={version}',\n]\n\nextras_require = {\n 'lightgbm': [\n 'lightgbm>=3.3,<3.4',\n ],\n 'catboost': [\n 'catboost>=1.0,<1.2',\n ],\n # FIXME: Debug why xgboost 1.6 has 4x+ slower inference on multiclass datasets compared to 1.4\n # It is possibly only present on MacOS, haven't tested linux.\n # XGBoost made API breaking changes in 1.6 with custom metric and callback support, so we don't support older versions.\n 'xgboost': [\n 'xgboost>=1.6,<1.8',\n ],\n 'fastai': [\n 'torch>=1.9,<1.14',\n 'fastai>=2.3.1,<2.8',\n ],\n 'ray': [\n f'{ag.PACKAGE_NAME}.core[all]=={version}',\n ],\n 'skex': [\n # Note: 2021.7 released on Sep 2022, version 2022.x doesn't exist (went directly from 2021.7 to 2023.0)\n 'scikit-learn-intelex>=2021.7,<2023.1',\n ],\n 'imodels': [\n 'imodels>=1.3.10,<1.4.0', # 1.3.8/1.3.9 either remove/renamed attribute `complexity_` causing failures. https://github.com/csinva/imodels/issues/147\n ],\n 'vowpalwabbit': [\n # FIXME: 9.5+ causes VW to save an empty model which always predicts 0. Confirmed on MacOS (Intel CPU). Unknown how to fix.\n 'vowpalwabbit>=9,<9.5',\n ],\n 'skl2onnx': [\n 'skl2onnx>=1.13.0,<1.14.0',\n # For macOS, there isn't a onnxruntime-gpu package installed with skl2onnx.\n # Therefore, we install onnxruntime explicitly here just for macOS.\n 'onnxruntime>=1.13.0,<1.14.0'\n ] if sys.platform == 'darwin' else [\n 'skl2onnx>=1.13.0,<1.14.0'\n ]\n}\n\nall_requires = []\n# TODO: Consider adding 'skex' to 'all'\nfor extra_package in ['lightgbm', 'catboost', 'xgboost', 'fastai', 'ray']:\n all_requires += extras_require[extra_package]\nall_requires = list(set(all_requires))\nextras_require['all'] = all_requires\n\n\ntest_requires = []\nfor test_package in ['imodels', 'vowpalwabbit', 'skl2onnx']:\n test_requires += extras_require[test_package]\nextras_require['tests'] = test_requires\ninstall_requires = ag.get_dependency_version_ranges(install_requires)\n\nif __name__ == '__main__':\n ag.create_version_file(version=version, submodule=submodule)\n setup_args = ag.default_setup_args(version=version, submodule=submodule)\n setup(\n install_requires=install_requires,\n extras_require=extras_require,\n **setup_args,\n )\n", "path": "tabular/setup.py"}]}
| 1,462 | 237 |
gh_patches_debug_2274
|
rasdani/github-patches
|
git_diff
|
svthalia__concrexit-1844
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Event (registration) status message in the API
### Is your feature request related to a problem? Please describe.
Currently, the event status messages (like 'you cannot cancel your registration without having to pay a fine') are hardcoded and whenever we update them, we must also update the app
### Describe the solution you'd like
Put the message in the API
### Additional context
Also checkout #1381
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `website/events/api/v2/serializers/event.py`
Content:
```
1 from rest_framework import serializers
2
3 from activemembers.api.v2.serializers.member_group import MemberGroupSerializer
4 from announcements.api.v2.serializers import SlideSerializer
5 from documents.api.v2.serializers.document import DocumentSerializer
6 from events import services
7 from events.api.v2.serializers.event_registration import EventRegistrationSerializer
8 from events.models import Event, EventRegistration
9 from thaliawebsite.api.v2.serializers import CleanedHTMLSerializer
10 from utils.snippets import create_google_maps_url
11
12
13 class EventSerializer(serializers.ModelSerializer):
14 """Serializer for events."""
15
16 class Meta:
17 model = Event
18 fields = (
19 "pk",
20 "title",
21 "description",
22 "start",
23 "end",
24 "category",
25 "registration_start",
26 "registration_end",
27 "cancel_deadline",
28 "optional_registrations",
29 "location",
30 "price",
31 "fine",
32 "num_participants",
33 "max_participants",
34 "no_registration_message",
35 "has_fields",
36 "food_event",
37 "maps_url",
38 "user_permissions",
39 "user_registration",
40 "organiser",
41 "slide",
42 "documents",
43 )
44
45 description = CleanedHTMLSerializer()
46 organiser = MemberGroupSerializer()
47 user_registration = serializers.SerializerMethodField("_user_registration")
48 num_participants = serializers.SerializerMethodField("_num_participants")
49 maps_url = serializers.SerializerMethodField("_maps_url")
50 price = serializers.DecimalField(max_digits=5, decimal_places=2)
51 fine = serializers.DecimalField(max_digits=5, decimal_places=2)
52 slide = SlideSerializer()
53 documents = DocumentSerializer(many=True)
54 user_permissions = serializers.SerializerMethodField("_user_permissions")
55
56 def _user_registration(self, instance):
57 try:
58 if self.context["request"].member:
59 reg = instance.eventregistration_set.get(
60 member=self.context["request"].member, date_cancelled=None
61 )
62 return EventRegistrationSerializer(
63 reg,
64 context=self.context,
65 fields=("pk", "present", "queue_position", "date", "payment"),
66 ).data
67 except EventRegistration.DoesNotExist:
68 pass
69 return None
70
71 def _num_participants(self, instance):
72 if (
73 instance.max_participants
74 and instance.participants.count() > instance.max_participants
75 ):
76 return instance.max_participants
77 return instance.participants.count()
78
79 def _user_permissions(self, instance):
80 member = self.context["request"].member
81 return services.event_permissions(member, instance)
82
83 def _maps_url(self, instance):
84 return create_google_maps_url(instance.map_location, zoom=13, size="450x250")
85
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/website/events/api/v2/serializers/event.py b/website/events/api/v2/serializers/event.py
--- a/website/events/api/v2/serializers/event.py
+++ b/website/events/api/v2/serializers/event.py
@@ -32,6 +32,7 @@
"num_participants",
"max_participants",
"no_registration_message",
+ "cancel_too_late_message",
"has_fields",
"food_event",
"maps_url",
|
{"golden_diff": "diff --git a/website/events/api/v2/serializers/event.py b/website/events/api/v2/serializers/event.py\n--- a/website/events/api/v2/serializers/event.py\n+++ b/website/events/api/v2/serializers/event.py\n@@ -32,6 +32,7 @@\n \"num_participants\",\n \"max_participants\",\n \"no_registration_message\",\n+ \"cancel_too_late_message\",\n \"has_fields\",\n \"food_event\",\n \"maps_url\",\n", "issue": "Event (registration) status message in the API\n### Is your feature request related to a problem? Please describe.\r\nCurrently, the event status messages (like 'you cannot cancel your registration without having to pay a fine') are hardcoded and whenever we update them, we must also update the app\r\n\r\n### Describe the solution you'd like\r\nPut the message in the API\r\n\r\n### Additional context\r\nAlso checkout #1381 \n", "before_files": [{"content": "from rest_framework import serializers\n\nfrom activemembers.api.v2.serializers.member_group import MemberGroupSerializer\nfrom announcements.api.v2.serializers import SlideSerializer\nfrom documents.api.v2.serializers.document import DocumentSerializer\nfrom events import services\nfrom events.api.v2.serializers.event_registration import EventRegistrationSerializer\nfrom events.models import Event, EventRegistration\nfrom thaliawebsite.api.v2.serializers import CleanedHTMLSerializer\nfrom utils.snippets import create_google_maps_url\n\n\nclass EventSerializer(serializers.ModelSerializer):\n \"\"\"Serializer for events.\"\"\"\n\n class Meta:\n model = Event\n fields = (\n \"pk\",\n \"title\",\n \"description\",\n \"start\",\n \"end\",\n \"category\",\n \"registration_start\",\n \"registration_end\",\n \"cancel_deadline\",\n \"optional_registrations\",\n \"location\",\n \"price\",\n \"fine\",\n \"num_participants\",\n \"max_participants\",\n \"no_registration_message\",\n \"has_fields\",\n \"food_event\",\n \"maps_url\",\n \"user_permissions\",\n \"user_registration\",\n \"organiser\",\n \"slide\",\n \"documents\",\n )\n\n description = CleanedHTMLSerializer()\n organiser = MemberGroupSerializer()\n user_registration = serializers.SerializerMethodField(\"_user_registration\")\n num_participants = serializers.SerializerMethodField(\"_num_participants\")\n maps_url = serializers.SerializerMethodField(\"_maps_url\")\n price = serializers.DecimalField(max_digits=5, decimal_places=2)\n fine = serializers.DecimalField(max_digits=5, decimal_places=2)\n slide = SlideSerializer()\n documents = DocumentSerializer(many=True)\n user_permissions = serializers.SerializerMethodField(\"_user_permissions\")\n\n def _user_registration(self, instance):\n try:\n if self.context[\"request\"].member:\n reg = instance.eventregistration_set.get(\n member=self.context[\"request\"].member, date_cancelled=None\n )\n return EventRegistrationSerializer(\n reg,\n context=self.context,\n fields=(\"pk\", \"present\", \"queue_position\", \"date\", \"payment\"),\n ).data\n except EventRegistration.DoesNotExist:\n pass\n return None\n\n def _num_participants(self, instance):\n if (\n instance.max_participants\n and instance.participants.count() > instance.max_participants\n ):\n return instance.max_participants\n return instance.participants.count()\n\n def _user_permissions(self, instance):\n member = self.context[\"request\"].member\n return services.event_permissions(member, instance)\n\n def _maps_url(self, instance):\n return create_google_maps_url(instance.map_location, zoom=13, size=\"450x250\")\n", "path": "website/events/api/v2/serializers/event.py"}], "after_files": [{"content": "from rest_framework import serializers\n\nfrom activemembers.api.v2.serializers.member_group import MemberGroupSerializer\nfrom announcements.api.v2.serializers import SlideSerializer\nfrom documents.api.v2.serializers.document import DocumentSerializer\nfrom events import services\nfrom events.api.v2.serializers.event_registration import EventRegistrationSerializer\nfrom events.models import Event, EventRegistration\nfrom thaliawebsite.api.v2.serializers import CleanedHTMLSerializer\nfrom utils.snippets import create_google_maps_url\n\n\nclass EventSerializer(serializers.ModelSerializer):\n \"\"\"Serializer for events.\"\"\"\n\n class Meta:\n model = Event\n fields = (\n \"pk\",\n \"title\",\n \"description\",\n \"start\",\n \"end\",\n \"category\",\n \"registration_start\",\n \"registration_end\",\n \"cancel_deadline\",\n \"optional_registrations\",\n \"location\",\n \"price\",\n \"fine\",\n \"num_participants\",\n \"max_participants\",\n \"no_registration_message\",\n \"cancel_too_late_message\",\n \"has_fields\",\n \"food_event\",\n \"maps_url\",\n \"user_permissions\",\n \"user_registration\",\n \"organiser\",\n \"slide\",\n \"documents\",\n )\n\n description = CleanedHTMLSerializer()\n organiser = MemberGroupSerializer()\n user_registration = serializers.SerializerMethodField(\"_user_registration\")\n num_participants = serializers.SerializerMethodField(\"_num_participants\")\n maps_url = serializers.SerializerMethodField(\"_maps_url\")\n price = serializers.DecimalField(max_digits=5, decimal_places=2)\n fine = serializers.DecimalField(max_digits=5, decimal_places=2)\n slide = SlideSerializer()\n documents = DocumentSerializer(many=True)\n user_permissions = serializers.SerializerMethodField(\"_user_permissions\")\n\n def _user_registration(self, instance):\n try:\n if self.context[\"request\"].member:\n reg = instance.eventregistration_set.get(\n member=self.context[\"request\"].member, date_cancelled=None\n )\n return EventRegistrationSerializer(\n reg,\n context=self.context,\n fields=(\"pk\", \"present\", \"queue_position\", \"date\", \"payment\"),\n ).data\n except EventRegistration.DoesNotExist:\n pass\n return None\n\n def _num_participants(self, instance):\n if (\n instance.max_participants\n and instance.participants.count() > instance.max_participants\n ):\n return instance.max_participants\n return instance.participants.count()\n\n def _user_permissions(self, instance):\n member = self.context[\"request\"].member\n return services.event_permissions(member, instance)\n\n def _maps_url(self, instance):\n return create_google_maps_url(instance.map_location, zoom=13, size=\"450x250\")\n", "path": "website/events/api/v2/serializers/event.py"}]}
| 1,077 | 111 |
gh_patches_debug_33003
|
rasdani/github-patches
|
git_diff
|
getredash__redash-1335
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Gmail is merging [OK] and [Triggered] alert emails
### Issue Summary
In Gmail the alert emails from redash are merged on the same thread, which makes it hard to know if an alert was triggered or solved.
Here is the explanation of the behavior of Gmail:
https://www.quora.com/Why-when-I-edit-subject-in-email-and-add-the-symbols-at-the-beginning-of-the-current-subject-to-keep-email-in-the-same-thread-the-email-sometimes-get-divided-in-a-new-thread-and-sometimes-doesnt/answer/Vineet-Chawla
### Steps to Reproduce
1. Setup an alert, register to receive email updates on a gmail address
2. Trigger the alert
3. Change the query and re-run it so that redash sends the email starting with [OK]
4. In gmail you will see the two emails under the subject starting with [Triggered] like this screenshot: 
5. The expectation is to have two threads, or to have the information about whether the alert was triggered or solved in the body of the email
### Technical details:
- Redash Version: 0.9.2+b1536
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `redash/destinations/email.py`
Content:
```
1 import logging
2
3 from flask_mail import Message
4 from redash import models, mail
5 from redash.destinations import *
6
7
8 class Email(BaseDestination):
9
10 @classmethod
11 def configuration_schema(cls):
12 return {
13 "type": "object",
14 "properties": {
15 "addresses": {
16 "type": "string"
17 },
18 },
19 "required": ["addresses"]
20 }
21
22 @classmethod
23 def icon(cls):
24 return 'fa-envelope'
25
26 def notify(self, alert, query, user, new_state, app, host, options):
27 recipients = [email for email in options.get('addresses', '').split(',') if email]
28
29 if not recipients:
30 logging.warning("No emails given. Skipping send.")
31
32 html = """
33 Check <a href="{host}/alerts/{alert_id}">alert</a> / check <a href="{host}/queries/{query_id}">query</a>.
34 """.format(host=host, alert_id=alert.id, query_id=query.id)
35 logging.debug("Notifying: %s", recipients)
36
37 try:
38 with app.app_context():
39 message = Message(
40 recipients=recipients,
41 subject="[{1}] {0}".format(alert.name.encode('utf-8', 'ignore'), new_state.upper()),
42 html=html
43 )
44 mail.send(message)
45 except Exception:
46 logging.exception("Mail send error.")
47
48 register(Email)
49
```
Path: `redash/settings.py`
Content:
```
1 import json
2 import os
3 import urlparse
4 from funcy import distinct, remove
5
6
7 def parse_db_url(url):
8 url_parts = urlparse.urlparse(url)
9 connection = {'threadlocals': True}
10
11 if url_parts.hostname and not url_parts.path:
12 connection['name'] = url_parts.hostname
13 else:
14 connection['name'] = url_parts.path[1:]
15 connection['host'] = url_parts.hostname
16 connection['port'] = url_parts.port
17 connection['user'] = url_parts.username
18 connection['password'] = url_parts.password
19
20 return connection
21
22
23 def fix_assets_path(path):
24 fullpath = os.path.join(os.path.dirname(__file__), path)
25 return fullpath
26
27
28 def array_from_string(str):
29 array = str.split(',')
30 if "" in array:
31 array.remove("")
32
33 return array
34
35
36 def set_from_string(str):
37 return set(array_from_string(str))
38
39
40 def parse_boolean(str):
41 return json.loads(str.lower())
42
43
44 def all_settings():
45 from types import ModuleType
46
47 settings = {}
48 for name, item in globals().iteritems():
49 if not callable(item) and not name.startswith("__") and not isinstance(item, ModuleType):
50 settings[name] = item
51
52 return settings
53
54
55 NAME = os.environ.get('REDASH_NAME', 'Redash')
56 LOGO_URL = os.environ.get('REDASH_LOGO_URL', '/images/redash_icon_small.png')
57
58 REDIS_URL = os.environ.get('REDASH_REDIS_URL', os.environ.get('REDIS_URL', "redis://localhost:6379/0"))
59 PROXIES_COUNT = int(os.environ.get('REDASH_PROXIES_COUNT', "1"))
60
61 STATSD_HOST = os.environ.get('REDASH_STATSD_HOST', "127.0.0.1")
62 STATSD_PORT = int(os.environ.get('REDASH_STATSD_PORT', "8125"))
63 STATSD_PREFIX = os.environ.get('REDASH_STATSD_PREFIX', "redash")
64 STATSD_USE_TAGS = parse_boolean(os.environ.get('REDASH_STATSD_USE_TAGS', "false"))
65
66 # Connection settings for re:dash's own database (where we store the queries, results, etc)
67 DATABASE_CONFIG = parse_db_url(os.environ.get("REDASH_DATABASE_URL", os.environ.get('DATABASE_URL', "postgresql://postgres")))
68
69 # Celery related settings
70 CELERY_BROKER = os.environ.get("REDASH_CELERY_BROKER", REDIS_URL)
71 CELERY_BACKEND = os.environ.get("REDASH_CELERY_BACKEND", CELERY_BROKER)
72 CELERY_TASK_RESULT_EXPIRES = int(os.environ.get('REDASH_CELERY_TASK_RESULT_EXPIRES', 3600))
73
74 # The following enables periodic job (every 5 minutes) of removing unused query results.
75 QUERY_RESULTS_CLEANUP_ENABLED = parse_boolean(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_ENABLED", "true"))
76 QUERY_RESULTS_CLEANUP_COUNT = int(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_COUNT", "100"))
77 QUERY_RESULTS_CLEANUP_MAX_AGE = int(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_MAX_AGE", "7"))
78
79 SCHEMAS_REFRESH_SCHEDULE = int(os.environ.get("REDASH_SCHEMAS_REFRESH_SCHEDULE", 30))
80
81 AUTH_TYPE = os.environ.get("REDASH_AUTH_TYPE", "api_key")
82 PASSWORD_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_PASSWORD_LOGIN_ENABLED", "true"))
83 ENFORCE_HTTPS = parse_boolean(os.environ.get("REDASH_ENFORCE_HTTPS", "false"))
84
85 MULTI_ORG = parse_boolean(os.environ.get("REDASH_MULTI_ORG", "false"))
86
87 GOOGLE_CLIENT_ID = os.environ.get("REDASH_GOOGLE_CLIENT_ID", "")
88 GOOGLE_CLIENT_SECRET = os.environ.get("REDASH_GOOGLE_CLIENT_SECRET", "")
89 GOOGLE_OAUTH_ENABLED = GOOGLE_CLIENT_ID and GOOGLE_CLIENT_SECRET
90
91 SAML_METADATA_URL = os.environ.get("REDASH_SAML_METADATA_URL", "")
92 SAML_LOGIN_ENABLED = SAML_METADATA_URL != ""
93 SAML_CALLBACK_SERVER_NAME = os.environ.get("REDASH_SAML_CALLBACK_SERVER_NAME", "")
94
95 # Enables the use of an externally-provided and trusted remote user via an HTTP
96 # header. The "user" must be an email address.
97 #
98 # By default the trusted header is X-Forwarded-Remote-User. You can change
99 # this by setting REDASH_REMOTE_USER_HEADER.
100 #
101 # Enabling this authentication method is *potentially dangerous*, and it is
102 # your responsibility to ensure that only a trusted frontend (usually on the
103 # same server) can talk to the redash backend server, otherwise people will be
104 # able to login as anyone they want by directly talking to the redash backend.
105 # You must *also* ensure that any special header in the original request is
106 # removed or always overwritten by your frontend, otherwise your frontend may
107 # pass it through to the backend unchanged.
108 #
109 # Note that redash will only check the remote user once, upon the first need
110 # for a login, and then set a cookie which keeps the user logged in. Dropping
111 # the remote user header after subsequent requests won't automatically log the
112 # user out. Doing so could be done with further work, but usually it's
113 # unnecessary.
114 #
115 # If you also set REDASH_PASSWORD_LOGIN_ENABLED to false, then your
116 # authentication will be seamless. Otherwise a link will be presented on the
117 # login page to trigger remote user auth.
118 REMOTE_USER_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_REMOTE_USER_LOGIN_ENABLED", "false"))
119 REMOTE_USER_HEADER = os.environ.get("REDASH_REMOTE_USER_HEADER", "X-Forwarded-Remote-User")
120
121 # Usually it will be a single path, but we allow to specify additional ones to override the default assets. Only the
122 # last one will be used for Flask templates.
123 STATIC_ASSETS_PATHS = [fix_assets_path(path) for path in os.environ.get("REDASH_STATIC_ASSETS_PATH", "../rd_ui/app/").split(',')]
124
125 JOB_EXPIRY_TIME = int(os.environ.get("REDASH_JOB_EXPIRY_TIME", 3600 * 6))
126 COOKIE_SECRET = os.environ.get("REDASH_COOKIE_SECRET", "c292a0a3aa32397cdb050e233733900f")
127 SESSION_COOKIE_SECURE = parse_boolean(os.environ.get("REDASH_SESSION_COOKIE_SECURE") or str(ENFORCE_HTTPS))
128
129 LOG_LEVEL = os.environ.get("REDASH_LOG_LEVEL", "INFO")
130
131 # Mail settings:
132 MAIL_SERVER = os.environ.get('REDASH_MAIL_SERVER', 'localhost')
133 MAIL_PORT = int(os.environ.get('REDASH_MAIL_PORT', 25))
134 MAIL_USE_TLS = parse_boolean(os.environ.get('REDASH_MAIL_USE_TLS', 'false'))
135 MAIL_USE_SSL = parse_boolean(os.environ.get('REDASH_MAIL_USE_SSL', 'false'))
136 MAIL_USERNAME = os.environ.get('REDASH_MAIL_USERNAME', None)
137 MAIL_PASSWORD = os.environ.get('REDASH_MAIL_PASSWORD', None)
138 MAIL_DEFAULT_SENDER = os.environ.get('REDASH_MAIL_DEFAULT_SENDER', None)
139 MAIL_MAX_EMAILS = os.environ.get('REDASH_MAIL_MAX_EMAILS', None)
140 MAIL_ASCII_ATTACHMENTS = parse_boolean(os.environ.get('REDASH_MAIL_ASCII_ATTACHMENTS', 'false'))
141
142 HOST = os.environ.get('REDASH_HOST', '')
143
144 # CORS settings for the Query Result API (and possbily future external APIs).
145 # In most cases all you need to do is set REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN
146 # to the calling domain (or domains in a comma separated list).
147 ACCESS_CONTROL_ALLOW_ORIGIN = set_from_string(os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN", ""))
148 ACCESS_CONTROL_ALLOW_CREDENTIALS = parse_boolean(os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_CREDENTIALS", "false"))
149 ACCESS_CONTROL_REQUEST_METHOD = os.environ.get("REDASH_CORS_ACCESS_CONTROL_REQUEST_METHOD", "GET, POST, PUT")
150 ACCESS_CONTROL_ALLOW_HEADERS = os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_HEADERS", "Content-Type")
151
152 # Query Runners
153 default_query_runners = [
154 'redash.query_runner.big_query',
155 'redash.query_runner.google_spreadsheets',
156 'redash.query_runner.graphite',
157 'redash.query_runner.mongodb',
158 'redash.query_runner.mysql',
159 'redash.query_runner.pg',
160 'redash.query_runner.url',
161 'redash.query_runner.influx_db',
162 'redash.query_runner.elasticsearch',
163 'redash.query_runner.presto',
164 'redash.query_runner.hive_ds',
165 'redash.query_runner.impala_ds',
166 'redash.query_runner.vertica',
167 'redash.query_runner.treasuredata',
168 'redash.query_runner.sqlite',
169 'redash.query_runner.dynamodb_sql',
170 'redash.query_runner.mssql',
171 'redash.query_runner.jql'
172 ]
173
174 enabled_query_runners = array_from_string(os.environ.get("REDASH_ENABLED_QUERY_RUNNERS", ",".join(default_query_runners)))
175 additional_query_runners = array_from_string(os.environ.get("REDASH_ADDITIONAL_QUERY_RUNNERS", ""))
176 disabled_query_runners = array_from_string(os.environ.get("REDASH_DISABLED_QUERY_RUNNERS", ""))
177
178 QUERY_RUNNERS = remove(set(disabled_query_runners), distinct(enabled_query_runners + additional_query_runners))
179
180 # Destinations
181 default_destinations = [
182 'redash.destinations.email',
183 'redash.destinations.slack',
184 'redash.destinations.webhook',
185 'redash.destinations.hipchat',
186 ]
187
188 enabled_destinations = array_from_string(os.environ.get("REDASH_ENABLED_DESTINATIONS", ",".join(default_destinations)))
189 additional_destinations = array_from_string(os.environ.get("REDASH_ADDITIONAL_DESTINATIONS", ""))
190
191 DESTINATIONS = distinct(enabled_destinations + additional_destinations)
192
193 EVENT_REPORTING_WEBHOOKS = array_from_string(os.environ.get("REDASH_EVENT_REPORTING_WEBHOOKS", ""))
194
195 # Support for Sentry (http://getsentry.com/). Just set your Sentry DSN to enable it:
196 SENTRY_DSN = os.environ.get("REDASH_SENTRY_DSN", "")
197
198 # Client side toggles:
199 ALLOW_SCRIPTS_IN_USER_INPUT = parse_boolean(os.environ.get("REDASH_ALLOW_SCRIPTS_IN_USER_INPUT", "false"))
200 DATE_FORMAT = os.environ.get("REDASH_DATE_FORMAT", "DD/MM/YY")
201
202 # Features:
203 FEATURE_ALLOW_ALL_TO_EDIT_QUERIES = parse_boolean(os.environ.get("REDASH_FEATURE_ALLOW_ALL_TO_EDIT", "true"))
204 VERSION_CHECK = parse_boolean(os.environ.get("REDASH_VERSION_CHECK", "true"))
205 FEATURE_DISABLE_REFRESH_QUERIES = parse_boolean(os.environ.get("REDASH_FEATURE_DISABLE_REFRESH_QUERIES", "false"))
206 FEATURE_SHOW_QUERY_RESULTS_COUNT = parse_boolean(os.environ.get("REDASH_FEATURE_SHOW_QUERY_RESULTS_COUNT", "true"))
207
208 # BigQuery
209 BIGQUERY_HTTP_TIMEOUT = int(os.environ.get("REDASH_BIGQUERY_HTTP_TIMEOUT", "600"))
210
211 # Enhance schema fetching
212 SCHEMA_RUN_TABLE_SIZE_CALCULATIONS = parse_boolean(os.environ.get("REDASH_SCHEMA_RUN_TABLE_SIZE_CALCULATIONS", "false"))
213
214 # Allow Parameters in Embeds
215 # WARNING: With this option enabled, Redash reads query parameters from the request URL (risk of SQL injection!)
216 ALLOW_PARAMETERS_IN_EMBEDS = parse_boolean(os.environ.get("REDASH_ALLOW_PARAMETERS_IN_EMBEDS", "false"))
217
218 ### Common Client config
219 COMMON_CLIENT_CONFIG = {
220 'allowScriptsInUserInput': ALLOW_SCRIPTS_IN_USER_INPUT,
221 'dateFormat': DATE_FORMAT,
222 'dateTimeFormat': "{0} HH:mm".format(DATE_FORMAT),
223 'allowAllToEditQueries': FEATURE_ALLOW_ALL_TO_EDIT_QUERIES,
224 'mailSettingsMissing': MAIL_DEFAULT_SENDER is None,
225 'logoUrl': LOGO_URL
226 }
227
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/redash/destinations/email.py b/redash/destinations/email.py
--- a/redash/destinations/email.py
+++ b/redash/destinations/email.py
@@ -1,7 +1,7 @@
import logging
from flask_mail import Message
-from redash import models, mail
+from redash import mail, settings
from redash.destinations import *
@@ -15,6 +15,11 @@
"addresses": {
"type": "string"
},
+ "subject_template": {
+ "type": "string",
+ "default": settings.ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE,
+ "title": "Subject Template"
+ }
},
"required": ["addresses"]
}
@@ -36,9 +41,12 @@
try:
with app.app_context():
+ alert_name = alert.name.encode('utf-8', 'ignore')
+ state = new_state.upper()
+ subject_template = options.get('subject_template', settings.ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE)
message = Message(
recipients=recipients,
- subject="[{1}] {0}".format(alert.name.encode('utf-8', 'ignore'), new_state.upper()),
+ subject=subject_template.format(alert_name=alert_name, state=state),
html=html
)
mail.send(message)
diff --git a/redash/settings.py b/redash/settings.py
--- a/redash/settings.py
+++ b/redash/settings.py
@@ -141,6 +141,8 @@
HOST = os.environ.get('REDASH_HOST', '')
+ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE = os.environ.get('REDASH_ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE', "({state}) {alert_name}")
+
# CORS settings for the Query Result API (and possbily future external APIs).
# In most cases all you need to do is set REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN
# to the calling domain (or domains in a comma separated list).
|
{"golden_diff": "diff --git a/redash/destinations/email.py b/redash/destinations/email.py\n--- a/redash/destinations/email.py\n+++ b/redash/destinations/email.py\n@@ -1,7 +1,7 @@\n import logging\n \n from flask_mail import Message\n-from redash import models, mail\n+from redash import mail, settings\n from redash.destinations import *\n \n \n@@ -15,6 +15,11 @@\n \"addresses\": {\n \"type\": \"string\"\n },\n+ \"subject_template\": {\n+ \"type\": \"string\",\n+ \"default\": settings.ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE,\n+ \"title\": \"Subject Template\"\n+ }\n },\n \"required\": [\"addresses\"]\n }\n@@ -36,9 +41,12 @@\n \n try:\n with app.app_context():\n+ alert_name = alert.name.encode('utf-8', 'ignore')\n+ state = new_state.upper()\n+ subject_template = options.get('subject_template', settings.ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE)\n message = Message(\n recipients=recipients,\n- subject=\"[{1}] {0}\".format(alert.name.encode('utf-8', 'ignore'), new_state.upper()),\n+ subject=subject_template.format(alert_name=alert_name, state=state),\n html=html\n )\n mail.send(message)\ndiff --git a/redash/settings.py b/redash/settings.py\n--- a/redash/settings.py\n+++ b/redash/settings.py\n@@ -141,6 +141,8 @@\n \n HOST = os.environ.get('REDASH_HOST', '')\n \n+ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE = os.environ.get('REDASH_ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE', \"({state}) {alert_name}\")\n+\n # CORS settings for the Query Result API (and possbily future external APIs).\n # In most cases all you need to do is set REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN\n # to the calling domain (or domains in a comma separated list).\n", "issue": "Gmail is merging [OK] and [Triggered] alert emails\n### Issue Summary\n\nIn Gmail the alert emails from redash are merged on the same thread, which makes it hard to know if an alert was triggered or solved.\n\nHere is the explanation of the behavior of Gmail:\nhttps://www.quora.com/Why-when-I-edit-subject-in-email-and-add-the-symbols-at-the-beginning-of-the-current-subject-to-keep-email-in-the-same-thread-the-email-sometimes-get-divided-in-a-new-thread-and-sometimes-doesnt/answer/Vineet-Chawla\n### Steps to Reproduce\n1. Setup an alert, register to receive email updates on a gmail address\n2. Trigger the alert\n3. Change the query and re-run it so that redash sends the email starting with [OK]\n4. In gmail you will see the two emails under the subject starting with [Triggered] like this screenshot: \n5. The expectation is to have two threads, or to have the information about whether the alert was triggered or solved in the body of the email\n### Technical details:\n- Redash Version: 0.9.2+b1536\n\n", "before_files": [{"content": "import logging\n\nfrom flask_mail import Message\nfrom redash import models, mail\nfrom redash.destinations import *\n\n\nclass Email(BaseDestination):\n\n @classmethod\n def configuration_schema(cls):\n return {\n \"type\": \"object\",\n \"properties\": {\n \"addresses\": {\n \"type\": \"string\"\n },\n },\n \"required\": [\"addresses\"]\n }\n\n @classmethod\n def icon(cls):\n return 'fa-envelope'\n\n def notify(self, alert, query, user, new_state, app, host, options):\n recipients = [email for email in options.get('addresses', '').split(',') if email]\n\n if not recipients:\n logging.warning(\"No emails given. Skipping send.\")\n\n html = \"\"\"\n Check <a href=\"{host}/alerts/{alert_id}\">alert</a> / check <a href=\"{host}/queries/{query_id}\">query</a>.\n \"\"\".format(host=host, alert_id=alert.id, query_id=query.id)\n logging.debug(\"Notifying: %s\", recipients)\n\n try:\n with app.app_context():\n message = Message(\n recipients=recipients,\n subject=\"[{1}] {0}\".format(alert.name.encode('utf-8', 'ignore'), new_state.upper()),\n html=html\n )\n mail.send(message)\n except Exception:\n logging.exception(\"Mail send error.\")\n\nregister(Email)\n", "path": "redash/destinations/email.py"}, {"content": "import json\nimport os\nimport urlparse\nfrom funcy import distinct, remove\n\n\ndef parse_db_url(url):\n url_parts = urlparse.urlparse(url)\n connection = {'threadlocals': True}\n\n if url_parts.hostname and not url_parts.path:\n connection['name'] = url_parts.hostname\n else:\n connection['name'] = url_parts.path[1:]\n connection['host'] = url_parts.hostname\n connection['port'] = url_parts.port\n connection['user'] = url_parts.username\n connection['password'] = url_parts.password\n\n return connection\n\n\ndef fix_assets_path(path):\n fullpath = os.path.join(os.path.dirname(__file__), path)\n return fullpath\n\n\ndef array_from_string(str):\n array = str.split(',')\n if \"\" in array:\n array.remove(\"\")\n\n return array\n\n\ndef set_from_string(str):\n return set(array_from_string(str))\n\n\ndef parse_boolean(str):\n return json.loads(str.lower())\n\n\ndef all_settings():\n from types import ModuleType\n\n settings = {}\n for name, item in globals().iteritems():\n if not callable(item) and not name.startswith(\"__\") and not isinstance(item, ModuleType):\n settings[name] = item\n\n return settings\n\n\nNAME = os.environ.get('REDASH_NAME', 'Redash')\nLOGO_URL = os.environ.get('REDASH_LOGO_URL', '/images/redash_icon_small.png')\n\nREDIS_URL = os.environ.get('REDASH_REDIS_URL', os.environ.get('REDIS_URL', \"redis://localhost:6379/0\"))\nPROXIES_COUNT = int(os.environ.get('REDASH_PROXIES_COUNT', \"1\"))\n\nSTATSD_HOST = os.environ.get('REDASH_STATSD_HOST', \"127.0.0.1\")\nSTATSD_PORT = int(os.environ.get('REDASH_STATSD_PORT', \"8125\"))\nSTATSD_PREFIX = os.environ.get('REDASH_STATSD_PREFIX', \"redash\")\nSTATSD_USE_TAGS = parse_boolean(os.environ.get('REDASH_STATSD_USE_TAGS', \"false\"))\n\n# Connection settings for re:dash's own database (where we store the queries, results, etc)\nDATABASE_CONFIG = parse_db_url(os.environ.get(\"REDASH_DATABASE_URL\", os.environ.get('DATABASE_URL', \"postgresql://postgres\")))\n\n# Celery related settings\nCELERY_BROKER = os.environ.get(\"REDASH_CELERY_BROKER\", REDIS_URL)\nCELERY_BACKEND = os.environ.get(\"REDASH_CELERY_BACKEND\", CELERY_BROKER)\nCELERY_TASK_RESULT_EXPIRES = int(os.environ.get('REDASH_CELERY_TASK_RESULT_EXPIRES', 3600))\n\n# The following enables periodic job (every 5 minutes) of removing unused query results.\nQUERY_RESULTS_CLEANUP_ENABLED = parse_boolean(os.environ.get(\"REDASH_QUERY_RESULTS_CLEANUP_ENABLED\", \"true\"))\nQUERY_RESULTS_CLEANUP_COUNT = int(os.environ.get(\"REDASH_QUERY_RESULTS_CLEANUP_COUNT\", \"100\"))\nQUERY_RESULTS_CLEANUP_MAX_AGE = int(os.environ.get(\"REDASH_QUERY_RESULTS_CLEANUP_MAX_AGE\", \"7\"))\n\nSCHEMAS_REFRESH_SCHEDULE = int(os.environ.get(\"REDASH_SCHEMAS_REFRESH_SCHEDULE\", 30))\n\nAUTH_TYPE = os.environ.get(\"REDASH_AUTH_TYPE\", \"api_key\")\nPASSWORD_LOGIN_ENABLED = parse_boolean(os.environ.get(\"REDASH_PASSWORD_LOGIN_ENABLED\", \"true\"))\nENFORCE_HTTPS = parse_boolean(os.environ.get(\"REDASH_ENFORCE_HTTPS\", \"false\"))\n\nMULTI_ORG = parse_boolean(os.environ.get(\"REDASH_MULTI_ORG\", \"false\"))\n\nGOOGLE_CLIENT_ID = os.environ.get(\"REDASH_GOOGLE_CLIENT_ID\", \"\")\nGOOGLE_CLIENT_SECRET = os.environ.get(\"REDASH_GOOGLE_CLIENT_SECRET\", \"\")\nGOOGLE_OAUTH_ENABLED = GOOGLE_CLIENT_ID and GOOGLE_CLIENT_SECRET\n\nSAML_METADATA_URL = os.environ.get(\"REDASH_SAML_METADATA_URL\", \"\")\nSAML_LOGIN_ENABLED = SAML_METADATA_URL != \"\"\nSAML_CALLBACK_SERVER_NAME = os.environ.get(\"REDASH_SAML_CALLBACK_SERVER_NAME\", \"\")\n\n# Enables the use of an externally-provided and trusted remote user via an HTTP\n# header. The \"user\" must be an email address.\n#\n# By default the trusted header is X-Forwarded-Remote-User. You can change\n# this by setting REDASH_REMOTE_USER_HEADER.\n#\n# Enabling this authentication method is *potentially dangerous*, and it is\n# your responsibility to ensure that only a trusted frontend (usually on the\n# same server) can talk to the redash backend server, otherwise people will be\n# able to login as anyone they want by directly talking to the redash backend.\n# You must *also* ensure that any special header in the original request is\n# removed or always overwritten by your frontend, otherwise your frontend may\n# pass it through to the backend unchanged.\n#\n# Note that redash will only check the remote user once, upon the first need\n# for a login, and then set a cookie which keeps the user logged in. Dropping\n# the remote user header after subsequent requests won't automatically log the\n# user out. Doing so could be done with further work, but usually it's\n# unnecessary.\n#\n# If you also set REDASH_PASSWORD_LOGIN_ENABLED to false, then your\n# authentication will be seamless. Otherwise a link will be presented on the\n# login page to trigger remote user auth.\nREMOTE_USER_LOGIN_ENABLED = parse_boolean(os.environ.get(\"REDASH_REMOTE_USER_LOGIN_ENABLED\", \"false\"))\nREMOTE_USER_HEADER = os.environ.get(\"REDASH_REMOTE_USER_HEADER\", \"X-Forwarded-Remote-User\")\n\n# Usually it will be a single path, but we allow to specify additional ones to override the default assets. Only the\n# last one will be used for Flask templates.\nSTATIC_ASSETS_PATHS = [fix_assets_path(path) for path in os.environ.get(\"REDASH_STATIC_ASSETS_PATH\", \"../rd_ui/app/\").split(',')]\n\nJOB_EXPIRY_TIME = int(os.environ.get(\"REDASH_JOB_EXPIRY_TIME\", 3600 * 6))\nCOOKIE_SECRET = os.environ.get(\"REDASH_COOKIE_SECRET\", \"c292a0a3aa32397cdb050e233733900f\")\nSESSION_COOKIE_SECURE = parse_boolean(os.environ.get(\"REDASH_SESSION_COOKIE_SECURE\") or str(ENFORCE_HTTPS))\n\nLOG_LEVEL = os.environ.get(\"REDASH_LOG_LEVEL\", \"INFO\")\n\n# Mail settings:\nMAIL_SERVER = os.environ.get('REDASH_MAIL_SERVER', 'localhost')\nMAIL_PORT = int(os.environ.get('REDASH_MAIL_PORT', 25))\nMAIL_USE_TLS = parse_boolean(os.environ.get('REDASH_MAIL_USE_TLS', 'false'))\nMAIL_USE_SSL = parse_boolean(os.environ.get('REDASH_MAIL_USE_SSL', 'false'))\nMAIL_USERNAME = os.environ.get('REDASH_MAIL_USERNAME', None)\nMAIL_PASSWORD = os.environ.get('REDASH_MAIL_PASSWORD', None)\nMAIL_DEFAULT_SENDER = os.environ.get('REDASH_MAIL_DEFAULT_SENDER', None)\nMAIL_MAX_EMAILS = os.environ.get('REDASH_MAIL_MAX_EMAILS', None)\nMAIL_ASCII_ATTACHMENTS = parse_boolean(os.environ.get('REDASH_MAIL_ASCII_ATTACHMENTS', 'false'))\n\nHOST = os.environ.get('REDASH_HOST', '')\n\n# CORS settings for the Query Result API (and possbily future external APIs).\n# In most cases all you need to do is set REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN\n# to the calling domain (or domains in a comma separated list).\nACCESS_CONTROL_ALLOW_ORIGIN = set_from_string(os.environ.get(\"REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN\", \"\"))\nACCESS_CONTROL_ALLOW_CREDENTIALS = parse_boolean(os.environ.get(\"REDASH_CORS_ACCESS_CONTROL_ALLOW_CREDENTIALS\", \"false\"))\nACCESS_CONTROL_REQUEST_METHOD = os.environ.get(\"REDASH_CORS_ACCESS_CONTROL_REQUEST_METHOD\", \"GET, POST, PUT\")\nACCESS_CONTROL_ALLOW_HEADERS = os.environ.get(\"REDASH_CORS_ACCESS_CONTROL_ALLOW_HEADERS\", \"Content-Type\")\n\n# Query Runners\ndefault_query_runners = [\n 'redash.query_runner.big_query',\n 'redash.query_runner.google_spreadsheets',\n 'redash.query_runner.graphite',\n 'redash.query_runner.mongodb',\n 'redash.query_runner.mysql',\n 'redash.query_runner.pg',\n 'redash.query_runner.url',\n 'redash.query_runner.influx_db',\n 'redash.query_runner.elasticsearch',\n 'redash.query_runner.presto',\n 'redash.query_runner.hive_ds',\n 'redash.query_runner.impala_ds',\n 'redash.query_runner.vertica',\n 'redash.query_runner.treasuredata',\n 'redash.query_runner.sqlite',\n 'redash.query_runner.dynamodb_sql',\n 'redash.query_runner.mssql',\n 'redash.query_runner.jql'\n]\n\nenabled_query_runners = array_from_string(os.environ.get(\"REDASH_ENABLED_QUERY_RUNNERS\", \",\".join(default_query_runners)))\nadditional_query_runners = array_from_string(os.environ.get(\"REDASH_ADDITIONAL_QUERY_RUNNERS\", \"\"))\ndisabled_query_runners = array_from_string(os.environ.get(\"REDASH_DISABLED_QUERY_RUNNERS\", \"\"))\n\nQUERY_RUNNERS = remove(set(disabled_query_runners), distinct(enabled_query_runners + additional_query_runners))\n\n# Destinations\ndefault_destinations = [\n 'redash.destinations.email',\n 'redash.destinations.slack',\n 'redash.destinations.webhook',\n 'redash.destinations.hipchat',\n]\n\nenabled_destinations = array_from_string(os.environ.get(\"REDASH_ENABLED_DESTINATIONS\", \",\".join(default_destinations)))\nadditional_destinations = array_from_string(os.environ.get(\"REDASH_ADDITIONAL_DESTINATIONS\", \"\"))\n\nDESTINATIONS = distinct(enabled_destinations + additional_destinations)\n\nEVENT_REPORTING_WEBHOOKS = array_from_string(os.environ.get(\"REDASH_EVENT_REPORTING_WEBHOOKS\", \"\"))\n\n# Support for Sentry (http://getsentry.com/). Just set your Sentry DSN to enable it:\nSENTRY_DSN = os.environ.get(\"REDASH_SENTRY_DSN\", \"\")\n\n# Client side toggles:\nALLOW_SCRIPTS_IN_USER_INPUT = parse_boolean(os.environ.get(\"REDASH_ALLOW_SCRIPTS_IN_USER_INPUT\", \"false\"))\nDATE_FORMAT = os.environ.get(\"REDASH_DATE_FORMAT\", \"DD/MM/YY\")\n\n# Features:\nFEATURE_ALLOW_ALL_TO_EDIT_QUERIES = parse_boolean(os.environ.get(\"REDASH_FEATURE_ALLOW_ALL_TO_EDIT\", \"true\"))\nVERSION_CHECK = parse_boolean(os.environ.get(\"REDASH_VERSION_CHECK\", \"true\"))\nFEATURE_DISABLE_REFRESH_QUERIES = parse_boolean(os.environ.get(\"REDASH_FEATURE_DISABLE_REFRESH_QUERIES\", \"false\"))\nFEATURE_SHOW_QUERY_RESULTS_COUNT = parse_boolean(os.environ.get(\"REDASH_FEATURE_SHOW_QUERY_RESULTS_COUNT\", \"true\"))\n\n# BigQuery\nBIGQUERY_HTTP_TIMEOUT = int(os.environ.get(\"REDASH_BIGQUERY_HTTP_TIMEOUT\", \"600\"))\n\n# Enhance schema fetching\nSCHEMA_RUN_TABLE_SIZE_CALCULATIONS = parse_boolean(os.environ.get(\"REDASH_SCHEMA_RUN_TABLE_SIZE_CALCULATIONS\", \"false\"))\n\n# Allow Parameters in Embeds\n# WARNING: With this option enabled, Redash reads query parameters from the request URL (risk of SQL injection!)\nALLOW_PARAMETERS_IN_EMBEDS = parse_boolean(os.environ.get(\"REDASH_ALLOW_PARAMETERS_IN_EMBEDS\", \"false\"))\n\n### Common Client config\nCOMMON_CLIENT_CONFIG = {\n 'allowScriptsInUserInput': ALLOW_SCRIPTS_IN_USER_INPUT,\n 'dateFormat': DATE_FORMAT,\n 'dateTimeFormat': \"{0} HH:mm\".format(DATE_FORMAT),\n 'allowAllToEditQueries': FEATURE_ALLOW_ALL_TO_EDIT_QUERIES,\n 'mailSettingsMissing': MAIL_DEFAULT_SENDER is None,\n 'logoUrl': LOGO_URL\n}\n", "path": "redash/settings.py"}], "after_files": [{"content": "import logging\n\nfrom flask_mail import Message\nfrom redash import mail, settings\nfrom redash.destinations import *\n\n\nclass Email(BaseDestination):\n\n @classmethod\n def configuration_schema(cls):\n return {\n \"type\": \"object\",\n \"properties\": {\n \"addresses\": {\n \"type\": \"string\"\n },\n \"subject_template\": {\n \"type\": \"string\",\n \"default\": settings.ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE,\n \"title\": \"Subject Template\"\n }\n },\n \"required\": [\"addresses\"]\n }\n\n @classmethod\n def icon(cls):\n return 'fa-envelope'\n\n def notify(self, alert, query, user, new_state, app, host, options):\n recipients = [email for email in options.get('addresses', '').split(',') if email]\n\n if not recipients:\n logging.warning(\"No emails given. Skipping send.\")\n\n html = \"\"\"\n Check <a href=\"{host}/alerts/{alert_id}\">alert</a> / check <a href=\"{host}/queries/{query_id}\">query</a>.\n \"\"\".format(host=host, alert_id=alert.id, query_id=query.id)\n logging.debug(\"Notifying: %s\", recipients)\n\n try:\n with app.app_context():\n alert_name = alert.name.encode('utf-8', 'ignore')\n state = new_state.upper()\n subject_template = options.get('subject_template', settings.ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE)\n message = Message(\n recipients=recipients,\n subject=subject_template.format(alert_name=alert_name, state=state),\n html=html\n )\n mail.send(message)\n except Exception:\n logging.exception(\"Mail send error.\")\n\nregister(Email)\n", "path": "redash/destinations/email.py"}, {"content": "import json\nimport os\nimport urlparse\nfrom funcy import distinct, remove\n\n\ndef parse_db_url(url):\n url_parts = urlparse.urlparse(url)\n connection = {'threadlocals': True}\n\n if url_parts.hostname and not url_parts.path:\n connection['name'] = url_parts.hostname\n else:\n connection['name'] = url_parts.path[1:]\n connection['host'] = url_parts.hostname\n connection['port'] = url_parts.port\n connection['user'] = url_parts.username\n connection['password'] = url_parts.password\n\n return connection\n\n\ndef fix_assets_path(path):\n fullpath = os.path.join(os.path.dirname(__file__), path)\n return fullpath\n\n\ndef array_from_string(str):\n array = str.split(',')\n if \"\" in array:\n array.remove(\"\")\n\n return array\n\n\ndef set_from_string(str):\n return set(array_from_string(str))\n\n\ndef parse_boolean(str):\n return json.loads(str.lower())\n\n\ndef all_settings():\n from types import ModuleType\n\n settings = {}\n for name, item in globals().iteritems():\n if not callable(item) and not name.startswith(\"__\") and not isinstance(item, ModuleType):\n settings[name] = item\n\n return settings\n\n\nNAME = os.environ.get('REDASH_NAME', 'Redash')\nLOGO_URL = os.environ.get('REDASH_LOGO_URL', '/images/redash_icon_small.png')\n\nREDIS_URL = os.environ.get('REDASH_REDIS_URL', os.environ.get('REDIS_URL', \"redis://localhost:6379/0\"))\nPROXIES_COUNT = int(os.environ.get('REDASH_PROXIES_COUNT', \"1\"))\n\nSTATSD_HOST = os.environ.get('REDASH_STATSD_HOST', \"127.0.0.1\")\nSTATSD_PORT = int(os.environ.get('REDASH_STATSD_PORT', \"8125\"))\nSTATSD_PREFIX = os.environ.get('REDASH_STATSD_PREFIX', \"redash\")\nSTATSD_USE_TAGS = parse_boolean(os.environ.get('REDASH_STATSD_USE_TAGS', \"false\"))\n\n# Connection settings for re:dash's own database (where we store the queries, results, etc)\nDATABASE_CONFIG = parse_db_url(os.environ.get(\"REDASH_DATABASE_URL\", os.environ.get('DATABASE_URL', \"postgresql://postgres\")))\n\n# Celery related settings\nCELERY_BROKER = os.environ.get(\"REDASH_CELERY_BROKER\", REDIS_URL)\nCELERY_BACKEND = os.environ.get(\"REDASH_CELERY_BACKEND\", CELERY_BROKER)\nCELERY_TASK_RESULT_EXPIRES = int(os.environ.get('REDASH_CELERY_TASK_RESULT_EXPIRES', 3600))\n\n# The following enables periodic job (every 5 minutes) of removing unused query results.\nQUERY_RESULTS_CLEANUP_ENABLED = parse_boolean(os.environ.get(\"REDASH_QUERY_RESULTS_CLEANUP_ENABLED\", \"true\"))\nQUERY_RESULTS_CLEANUP_COUNT = int(os.environ.get(\"REDASH_QUERY_RESULTS_CLEANUP_COUNT\", \"100\"))\nQUERY_RESULTS_CLEANUP_MAX_AGE = int(os.environ.get(\"REDASH_QUERY_RESULTS_CLEANUP_MAX_AGE\", \"7\"))\n\nSCHEMAS_REFRESH_SCHEDULE = int(os.environ.get(\"REDASH_SCHEMAS_REFRESH_SCHEDULE\", 30))\n\nAUTH_TYPE = os.environ.get(\"REDASH_AUTH_TYPE\", \"api_key\")\nPASSWORD_LOGIN_ENABLED = parse_boolean(os.environ.get(\"REDASH_PASSWORD_LOGIN_ENABLED\", \"true\"))\nENFORCE_HTTPS = parse_boolean(os.environ.get(\"REDASH_ENFORCE_HTTPS\", \"false\"))\n\nMULTI_ORG = parse_boolean(os.environ.get(\"REDASH_MULTI_ORG\", \"false\"))\n\nGOOGLE_CLIENT_ID = os.environ.get(\"REDASH_GOOGLE_CLIENT_ID\", \"\")\nGOOGLE_CLIENT_SECRET = os.environ.get(\"REDASH_GOOGLE_CLIENT_SECRET\", \"\")\nGOOGLE_OAUTH_ENABLED = GOOGLE_CLIENT_ID and GOOGLE_CLIENT_SECRET\n\nSAML_METADATA_URL = os.environ.get(\"REDASH_SAML_METADATA_URL\", \"\")\nSAML_LOGIN_ENABLED = SAML_METADATA_URL != \"\"\nSAML_CALLBACK_SERVER_NAME = os.environ.get(\"REDASH_SAML_CALLBACK_SERVER_NAME\", \"\")\n\n# Enables the use of an externally-provided and trusted remote user via an HTTP\n# header. The \"user\" must be an email address.\n#\n# By default the trusted header is X-Forwarded-Remote-User. You can change\n# this by setting REDASH_REMOTE_USER_HEADER.\n#\n# Enabling this authentication method is *potentially dangerous*, and it is\n# your responsibility to ensure that only a trusted frontend (usually on the\n# same server) can talk to the redash backend server, otherwise people will be\n# able to login as anyone they want by directly talking to the redash backend.\n# You must *also* ensure that any special header in the original request is\n# removed or always overwritten by your frontend, otherwise your frontend may\n# pass it through to the backend unchanged.\n#\n# Note that redash will only check the remote user once, upon the first need\n# for a login, and then set a cookie which keeps the user logged in. Dropping\n# the remote user header after subsequent requests won't automatically log the\n# user out. Doing so could be done with further work, but usually it's\n# unnecessary.\n#\n# If you also set REDASH_PASSWORD_LOGIN_ENABLED to false, then your\n# authentication will be seamless. Otherwise a link will be presented on the\n# login page to trigger remote user auth.\nREMOTE_USER_LOGIN_ENABLED = parse_boolean(os.environ.get(\"REDASH_REMOTE_USER_LOGIN_ENABLED\", \"false\"))\nREMOTE_USER_HEADER = os.environ.get(\"REDASH_REMOTE_USER_HEADER\", \"X-Forwarded-Remote-User\")\n\n# Usually it will be a single path, but we allow to specify additional ones to override the default assets. Only the\n# last one will be used for Flask templates.\nSTATIC_ASSETS_PATHS = [fix_assets_path(path) for path in os.environ.get(\"REDASH_STATIC_ASSETS_PATH\", \"../rd_ui/app/\").split(',')]\n\nJOB_EXPIRY_TIME = int(os.environ.get(\"REDASH_JOB_EXPIRY_TIME\", 3600 * 6))\nCOOKIE_SECRET = os.environ.get(\"REDASH_COOKIE_SECRET\", \"c292a0a3aa32397cdb050e233733900f\")\nSESSION_COOKIE_SECURE = parse_boolean(os.environ.get(\"REDASH_SESSION_COOKIE_SECURE\") or str(ENFORCE_HTTPS))\n\nLOG_LEVEL = os.environ.get(\"REDASH_LOG_LEVEL\", \"INFO\")\n\n# Mail settings:\nMAIL_SERVER = os.environ.get('REDASH_MAIL_SERVER', 'localhost')\nMAIL_PORT = int(os.environ.get('REDASH_MAIL_PORT', 25))\nMAIL_USE_TLS = parse_boolean(os.environ.get('REDASH_MAIL_USE_TLS', 'false'))\nMAIL_USE_SSL = parse_boolean(os.environ.get('REDASH_MAIL_USE_SSL', 'false'))\nMAIL_USERNAME = os.environ.get('REDASH_MAIL_USERNAME', None)\nMAIL_PASSWORD = os.environ.get('REDASH_MAIL_PASSWORD', None)\nMAIL_DEFAULT_SENDER = os.environ.get('REDASH_MAIL_DEFAULT_SENDER', None)\nMAIL_MAX_EMAILS = os.environ.get('REDASH_MAIL_MAX_EMAILS', None)\nMAIL_ASCII_ATTACHMENTS = parse_boolean(os.environ.get('REDASH_MAIL_ASCII_ATTACHMENTS', 'false'))\n\nHOST = os.environ.get('REDASH_HOST', '')\n\nALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE = os.environ.get('REDASH_ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE', \"({state}) {alert_name}\")\n\n# CORS settings for the Query Result API (and possbily future external APIs).\n# In most cases all you need to do is set REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN\n# to the calling domain (or domains in a comma separated list).\nACCESS_CONTROL_ALLOW_ORIGIN = set_from_string(os.environ.get(\"REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN\", \"\"))\nACCESS_CONTROL_ALLOW_CREDENTIALS = parse_boolean(os.environ.get(\"REDASH_CORS_ACCESS_CONTROL_ALLOW_CREDENTIALS\", \"false\"))\nACCESS_CONTROL_REQUEST_METHOD = os.environ.get(\"REDASH_CORS_ACCESS_CONTROL_REQUEST_METHOD\", \"GET, POST, PUT\")\nACCESS_CONTROL_ALLOW_HEADERS = os.environ.get(\"REDASH_CORS_ACCESS_CONTROL_ALLOW_HEADERS\", \"Content-Type\")\n\n# Query Runners\ndefault_query_runners = [\n 'redash.query_runner.big_query',\n 'redash.query_runner.google_spreadsheets',\n 'redash.query_runner.graphite',\n 'redash.query_runner.mongodb',\n 'redash.query_runner.mysql',\n 'redash.query_runner.pg',\n 'redash.query_runner.url',\n 'redash.query_runner.influx_db',\n 'redash.query_runner.elasticsearch',\n 'redash.query_runner.presto',\n 'redash.query_runner.hive_ds',\n 'redash.query_runner.impala_ds',\n 'redash.query_runner.vertica',\n 'redash.query_runner.treasuredata',\n 'redash.query_runner.sqlite',\n 'redash.query_runner.dynamodb_sql',\n 'redash.query_runner.mssql',\n 'redash.query_runner.jql'\n]\n\nenabled_query_runners = array_from_string(os.environ.get(\"REDASH_ENABLED_QUERY_RUNNERS\", \",\".join(default_query_runners)))\nadditional_query_runners = array_from_string(os.environ.get(\"REDASH_ADDITIONAL_QUERY_RUNNERS\", \"\"))\ndisabled_query_runners = array_from_string(os.environ.get(\"REDASH_DISABLED_QUERY_RUNNERS\", \"\"))\n\nQUERY_RUNNERS = remove(set(disabled_query_runners), distinct(enabled_query_runners + additional_query_runners))\n\n# Destinations\ndefault_destinations = [\n 'redash.destinations.email',\n 'redash.destinations.slack',\n 'redash.destinations.webhook',\n 'redash.destinations.hipchat',\n]\n\nenabled_destinations = array_from_string(os.environ.get(\"REDASH_ENABLED_DESTINATIONS\", \",\".join(default_destinations)))\nadditional_destinations = array_from_string(os.environ.get(\"REDASH_ADDITIONAL_DESTINATIONS\", \"\"))\n\nDESTINATIONS = distinct(enabled_destinations + additional_destinations)\n\nEVENT_REPORTING_WEBHOOKS = array_from_string(os.environ.get(\"REDASH_EVENT_REPORTING_WEBHOOKS\", \"\"))\n\n# Support for Sentry (http://getsentry.com/). Just set your Sentry DSN to enable it:\nSENTRY_DSN = os.environ.get(\"REDASH_SENTRY_DSN\", \"\")\n\n# Client side toggles:\nALLOW_SCRIPTS_IN_USER_INPUT = parse_boolean(os.environ.get(\"REDASH_ALLOW_SCRIPTS_IN_USER_INPUT\", \"false\"))\nDATE_FORMAT = os.environ.get(\"REDASH_DATE_FORMAT\", \"DD/MM/YY\")\n\n# Features:\nFEATURE_ALLOW_ALL_TO_EDIT_QUERIES = parse_boolean(os.environ.get(\"REDASH_FEATURE_ALLOW_ALL_TO_EDIT\", \"true\"))\nVERSION_CHECK = parse_boolean(os.environ.get(\"REDASH_VERSION_CHECK\", \"true\"))\nFEATURE_DISABLE_REFRESH_QUERIES = parse_boolean(os.environ.get(\"REDASH_FEATURE_DISABLE_REFRESH_QUERIES\", \"false\"))\nFEATURE_SHOW_QUERY_RESULTS_COUNT = parse_boolean(os.environ.get(\"REDASH_FEATURE_SHOW_QUERY_RESULTS_COUNT\", \"true\"))\n\n# BigQuery\nBIGQUERY_HTTP_TIMEOUT = int(os.environ.get(\"REDASH_BIGQUERY_HTTP_TIMEOUT\", \"600\"))\n\n# Enhance schema fetching\nSCHEMA_RUN_TABLE_SIZE_CALCULATIONS = parse_boolean(os.environ.get(\"REDASH_SCHEMA_RUN_TABLE_SIZE_CALCULATIONS\", \"false\"))\n\n# Allow Parameters in Embeds\n# WARNING: With this option enabled, Redash reads query parameters from the request URL (risk of SQL injection!)\nALLOW_PARAMETERS_IN_EMBEDS = parse_boolean(os.environ.get(\"REDASH_ALLOW_PARAMETERS_IN_EMBEDS\", \"false\"))\n\n### Common Client config\nCOMMON_CLIENT_CONFIG = {\n 'allowScriptsInUserInput': ALLOW_SCRIPTS_IN_USER_INPUT,\n 'dateFormat': DATE_FORMAT,\n 'dateTimeFormat': \"{0} HH:mm\".format(DATE_FORMAT),\n 'allowAllToEditQueries': FEATURE_ALLOW_ALL_TO_EDIT_QUERIES,\n 'mailSettingsMissing': MAIL_DEFAULT_SENDER is None,\n 'logoUrl': LOGO_URL\n}\n", "path": "redash/settings.py"}]}
| 4,072 | 433 |
gh_patches_debug_49253
|
rasdani/github-patches
|
git_diff
|
pyro-ppl__pyro-1903
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Cannot delete constrained parameter [bug]
### **Issue Description**
Deleting a constrained parameter throws an error.
In the function `param_store.__delitem__()`, the line
`unconstrained_value = constrained_value.unconstrained()`
throws
`AttributeError: 'Tensor' object has no attribute 'unconstrained'`
### **Environment**
OS: Windows 8.1
Python Version: 3.6.8
Pytorch Version: 1.1.0
Pyro Version: 0.3.3
This error was also present using Pytorch 1.0 and Pyro 0.3.1.
### **Code Snippet**
```py
import torch
import pyro
from torch.distributions import constraints
param_store = pyro.get_param_store()
a = pyro.param('a', torch.ones(3))
print(param_store.keys()) #dict_keys(['a'])
param_store.__delitem__('a') #Works fine
print(param_store.keys()) #dict_keys([])
b = pyro.param('b', torch.ones(3), constraint=constraints.positive)
print(param_store.keys()) #dict_keys(['b'])
param_store.__delitem__('b') #AttributeError: 'Tensor' object has no attribute 'unconstrained'
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pyro/params/param_store.py`
Content:
```
1 from __future__ import absolute_import, division, print_function
2
3 import re
4 import warnings
5 import weakref
6
7 import torch
8 from torch.distributions import constraints, transform_to
9
10
11 class ParamStoreDict(object):
12 """
13 Global store for parameters in Pyro. This is basically a key-value store.
14 The typical user interacts with the ParamStore primarily through the
15 primitive `pyro.param`.
16
17 See `Intro Part II <http://pyro.ai/examples/intro_part_ii.html>`_ for further discussion
18 and `SVI Part I <http://pyro.ai/examples/svi_part_i.html>`_ for some examples.
19
20 Some things to bear in mind when using parameters in Pyro:
21
22 - parameters must be assigned unique names
23 - the `init_tensor` argument to `pyro.param` is only used the first time that a given (named)
24 parameter is registered with Pyro.
25 - for this reason, a user may need to use the `clear()` method if working in a REPL in order to
26 get the desired behavior. this method can also be invoked with `pyro.clear_param_store()`.
27 - the internal name of a parameter within a PyTorch `nn.Module` that has been registered with
28 Pyro is prepended with the Pyro name of the module. so nothing prevents the user from having
29 two different modules each of which contains a parameter named `weight`. by contrast, a user
30 can only have one top-level parameter named `weight` (outside of any module).
31 - parameters can be saved and loaded from disk using `save` and `load`.
32 """
33
34 # -------------------------------------------------------------------------------
35 # New dict-like interface
36
37 def __init__(self):
38 """
39 initialize ParamStore data structures
40 """
41 self._params = {} # dictionary from param name to param
42 self._param_to_name = {} # dictionary from unconstrained param to param name
43 self._constraints = {} # dictionary from param name to constraint object
44
45 def clear(self):
46 """
47 Clear the ParamStore
48 """
49 self._params = {}
50 self._param_to_name = {}
51 self._constraints = {}
52
53 def items(self):
54 """
55 Iterate over ``(name, constrained_param)`` pairs.
56 """
57 for name in self._params:
58 yield name, self[name]
59
60 def keys(self):
61 """
62 Iterate over param names.
63 """
64 return self._params.keys()
65
66 def values(self):
67 """
68 Iterate over constrained parameter values.
69 """
70 for name, constrained_param in self.items():
71 yield constrained_param
72
73 def __bool__(self):
74 return bool(self._params)
75
76 def __len__(self):
77 return len(self._params)
78
79 def __contains__(self, name):
80 return name in self._params
81
82 def __iter__(self):
83 """
84 Iterate over param names.
85 """
86 return iter(self.keys())
87
88 def __delitem__(self, name):
89 """
90 Remove a parameter from the param store.
91 """
92 constrained_value = self._params.pop(name)
93 unconstrained_value = constrained_value.unconstrained()
94 self._param_to_name.pop(unconstrained_value)
95 self._constraints.pop(name)
96
97 def __getitem__(self, name):
98 """
99 Get the constrained value of a named parameter.
100 """
101 unconstrained_value = self._params[name]
102
103 # compute the constrained value
104 constraint = self._constraints[name]
105 constrained_value = transform_to(constraint)(unconstrained_value)
106 constrained_value.unconstrained = weakref.ref(unconstrained_value)
107
108 return constrained_value
109
110 def __setitem__(self, name, new_constrained_value):
111 """
112 Set the constrained value of an existing parameter, or the value of a
113 new unconstrained parameter. To declare a new parameter with
114 constraint, use :meth:`setdefault`.
115 """
116 # store constraint, defaulting to unconstrained
117 constraint = self._constraints.setdefault(name, constraints.real)
118
119 # compute the unconstrained value
120 with torch.no_grad():
121 # FIXME should we .detach() the new_constrained_value?
122 unconstrained_value = transform_to(constraint).inv(new_constrained_value)
123 unconstrained_value = unconstrained_value.contiguous()
124 unconstrained_value.requires_grad_(True)
125
126 # store a bidirectional mapping between name and unconstrained tensor
127 self._params[name] = unconstrained_value
128 self._param_to_name[unconstrained_value] = name
129
130 def setdefault(self, name, init_constrained_value, constraint=constraints.real):
131 """
132 Retrieve a constrained parameter value from the if it exists, otherwise
133 set the initial value. Note that this is a little fancier than
134 :meth:`dict.setdefault`.
135
136 If the parameter already exists, ``init_constrained_tensor`` will be ignored. To avoid
137 expensive creation of ``init_constrained_tensor`` you can wrap it in a ``lambda`` that
138 will only be evaluated if the parameter does not already exist::
139
140 param_store.get("foo", lambda: (0.001 * torch.randn(1000, 1000)).exp(),
141 constraint=constraints.positive)
142
143 :param str name: parameter name
144 :param init_constrained_value: initial constrained value
145 :type init_constrained_value: torch.Tensor or callable returning a torch.Tensor
146 :param constraint: torch constraint object
147 :type constraint: torch.distributions.constraints.Constraint
148 :returns: constrained parameter value
149 :rtype: torch.Tensor
150 """
151 if name not in self._params:
152 # set the constraint
153 self._constraints[name] = constraint
154
155 # evaluate the lazy value
156 if callable(init_constrained_value):
157 init_constrained_value = init_constrained_value()
158
159 # set the initial value
160 self[name] = init_constrained_value
161
162 # get the param, which is guaranteed to exist
163 return self[name]
164
165 # -------------------------------------------------------------------------------
166 # Old non-dict interface
167
168 def named_parameters(self):
169 """
170 Returns an iterator over ``(name, unconstrained_value)`` tuples for
171 each parameter in the ParamStore.
172 """
173 return self._params.items()
174
175 def get_all_param_names(self):
176 warnings.warn("ParamStore.get_all_param_names() is deprecated; use .keys() instead.",
177 DeprecationWarning)
178 return self.keys()
179
180 def replace_param(self, param_name, new_param, old_param):
181 warnings.warn("ParamStore.replace_param() is deprecated; use .__setitem__() instead.",
182 DeprecationWarning)
183 assert self._params[param_name] is old_param.unconstrained()
184 self[param_name] = new_param
185
186 def get_param(self, name, init_tensor=None, constraint=constraints.real, event_dim=None):
187 """
188 Get parameter from its name. If it does not yet exist in the
189 ParamStore, it will be created and stored.
190 The Pyro primitive `pyro.param` dispatches to this method.
191
192 :param name: parameter name
193 :type name: str
194 :param init_tensor: initial tensor
195 :type init_tensor: torch.Tensor
196 :param constraint: torch constraint
197 :type constraint: torch.distributions.constraints.Constraint
198 :param int event_dim: (ignored)
199 :returns: parameter
200 :rtype: torch.Tensor
201 """
202 if init_tensor is None:
203 return self[name]
204 else:
205 return self.setdefault(name, init_tensor, constraint)
206
207 def match(self, name):
208 """
209 Get all parameters that match regex. The parameter must exist.
210
211 :param name: regular expression
212 :type name: str
213 :returns: dict with key param name and value torch Tensor
214 """
215 pattern = re.compile(name)
216 return {name: self[name] for name in self if pattern.match(name)}
217
218 def param_name(self, p):
219 """
220 Get parameter name from parameter
221
222 :param p: parameter
223 :returns: parameter name
224 """
225 return self._param_to_name.get(p)
226
227 def get_state(self):
228 """
229 Get the ParamStore state.
230 """
231 state = {
232 'params': self._params,
233 'constraints': self._constraints,
234 }
235 return state
236
237 def set_state(self, state):
238 """
239 Set the ParamStore state using state from a previous get_state() call
240 """
241 assert isinstance(state, dict), "malformed ParamStore state"
242 assert set(state.keys()) == set(['params', 'constraints']), \
243 "malformed ParamStore keys {}".format(state.keys())
244
245 for param_name, param in state['params'].items():
246 self._params[param_name] = param
247 self._param_to_name[param] = param_name
248
249 for param_name, constraint in state['constraints'].items():
250 if isinstance(constraint, type(constraints.real)):
251 # Work around lack of hash & equality comparison on constraints.
252 constraint = constraints.real
253 self._constraints[param_name] = constraint
254
255 def save(self, filename):
256 """
257 Save parameters to disk
258
259 :param filename: file name to save to
260 :type filename: str
261 """
262 with open(filename, "wb") as output_file:
263 torch.save(self.get_state(), output_file)
264
265 def load(self, filename, map_location=None):
266 """
267 Loads parameters from disk
268
269 .. note::
270
271 If using :meth:`pyro.module` on parameters loaded from
272 disk, be sure to set the ``update_module_params`` flag::
273
274 pyro.get_param_store().load('saved_params.save')
275 pyro.module('module', nn, update_module_params=True)
276
277 :param filename: file name to load from
278 :type filename: str
279 :param map_location: specifies how to remap storage locations
280 :type map_location: function, torch.device, string or a dict
281 """
282 with open(filename, "rb") as input_file:
283 state = torch.load(input_file, map_location)
284 self.set_state(state)
285
286
287 # used to create fully-formed param names, e.g. mymodule$$$mysubmodule.weight
288 _MODULE_NAMESPACE_DIVIDER = "$$$"
289
290
291 def param_with_module_name(pyro_name, param_name):
292 return _MODULE_NAMESPACE_DIVIDER.join([pyro_name, param_name])
293
294
295 def module_from_param_with_module_name(param_name):
296 return param_name.split(_MODULE_NAMESPACE_DIVIDER)[0]
297
298
299 def user_param_name(param_name):
300 if _MODULE_NAMESPACE_DIVIDER in param_name:
301 return param_name.split(_MODULE_NAMESPACE_DIVIDER)[1]
302 return param_name
303
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pyro/params/param_store.py b/pyro/params/param_store.py
--- a/pyro/params/param_store.py
+++ b/pyro/params/param_store.py
@@ -89,8 +89,7 @@
"""
Remove a parameter from the param store.
"""
- constrained_value = self._params.pop(name)
- unconstrained_value = constrained_value.unconstrained()
+ unconstrained_value = self._params.pop(name)
self._param_to_name.pop(unconstrained_value)
self._constraints.pop(name)
|
{"golden_diff": "diff --git a/pyro/params/param_store.py b/pyro/params/param_store.py\n--- a/pyro/params/param_store.py\n+++ b/pyro/params/param_store.py\n@@ -89,8 +89,7 @@\n \"\"\"\n Remove a parameter from the param store.\n \"\"\"\n- constrained_value = self._params.pop(name)\n- unconstrained_value = constrained_value.unconstrained()\n+ unconstrained_value = self._params.pop(name)\n self._param_to_name.pop(unconstrained_value)\n self._constraints.pop(name)\n", "issue": "Cannot delete constrained parameter [bug]\n### **Issue Description**\r\nDeleting a constrained parameter throws an error.\r\nIn the function `param_store.__delitem__()`, the line \r\n`unconstrained_value = constrained_value.unconstrained()`\r\nthrows\r\n`AttributeError: 'Tensor' object has no attribute 'unconstrained'`\r\n\r\n### **Environment**\r\nOS: Windows 8.1\r\nPython Version: 3.6.8\r\nPytorch Version: 1.1.0\r\nPyro Version: 0.3.3\r\n\r\nThis error was also present using Pytorch 1.0 and Pyro 0.3.1.\r\n### **Code Snippet**\r\n```py\r\nimport torch\r\nimport pyro\r\nfrom torch.distributions import constraints\r\n\r\nparam_store = pyro.get_param_store()\r\na = pyro.param('a', torch.ones(3))\r\nprint(param_store.keys()) #dict_keys(['a'])\r\nparam_store.__delitem__('a') #Works fine\r\nprint(param_store.keys()) #dict_keys([])\r\nb = pyro.param('b', torch.ones(3), constraint=constraints.positive)\r\nprint(param_store.keys()) #dict_keys(['b'])\r\nparam_store.__delitem__('b') #AttributeError: 'Tensor' object has no attribute 'unconstrained'\r\n```\n", "before_files": [{"content": "from __future__ import absolute_import, division, print_function\n\nimport re\nimport warnings\nimport weakref\n\nimport torch\nfrom torch.distributions import constraints, transform_to\n\n\nclass ParamStoreDict(object):\n \"\"\"\n Global store for parameters in Pyro. This is basically a key-value store.\n The typical user interacts with the ParamStore primarily through the\n primitive `pyro.param`.\n\n See `Intro Part II <http://pyro.ai/examples/intro_part_ii.html>`_ for further discussion\n and `SVI Part I <http://pyro.ai/examples/svi_part_i.html>`_ for some examples.\n\n Some things to bear in mind when using parameters in Pyro:\n\n - parameters must be assigned unique names\n - the `init_tensor` argument to `pyro.param` is only used the first time that a given (named)\n parameter is registered with Pyro.\n - for this reason, a user may need to use the `clear()` method if working in a REPL in order to\n get the desired behavior. this method can also be invoked with `pyro.clear_param_store()`.\n - the internal name of a parameter within a PyTorch `nn.Module` that has been registered with\n Pyro is prepended with the Pyro name of the module. so nothing prevents the user from having\n two different modules each of which contains a parameter named `weight`. by contrast, a user\n can only have one top-level parameter named `weight` (outside of any module).\n - parameters can be saved and loaded from disk using `save` and `load`.\n \"\"\"\n\n # -------------------------------------------------------------------------------\n # New dict-like interface\n\n def __init__(self):\n \"\"\"\n initialize ParamStore data structures\n \"\"\"\n self._params = {} # dictionary from param name to param\n self._param_to_name = {} # dictionary from unconstrained param to param name\n self._constraints = {} # dictionary from param name to constraint object\n\n def clear(self):\n \"\"\"\n Clear the ParamStore\n \"\"\"\n self._params = {}\n self._param_to_name = {}\n self._constraints = {}\n\n def items(self):\n \"\"\"\n Iterate over ``(name, constrained_param)`` pairs.\n \"\"\"\n for name in self._params:\n yield name, self[name]\n\n def keys(self):\n \"\"\"\n Iterate over param names.\n \"\"\"\n return self._params.keys()\n\n def values(self):\n \"\"\"\n Iterate over constrained parameter values.\n \"\"\"\n for name, constrained_param in self.items():\n yield constrained_param\n\n def __bool__(self):\n return bool(self._params)\n\n def __len__(self):\n return len(self._params)\n\n def __contains__(self, name):\n return name in self._params\n\n def __iter__(self):\n \"\"\"\n Iterate over param names.\n \"\"\"\n return iter(self.keys())\n\n def __delitem__(self, name):\n \"\"\"\n Remove a parameter from the param store.\n \"\"\"\n constrained_value = self._params.pop(name)\n unconstrained_value = constrained_value.unconstrained()\n self._param_to_name.pop(unconstrained_value)\n self._constraints.pop(name)\n\n def __getitem__(self, name):\n \"\"\"\n Get the constrained value of a named parameter.\n \"\"\"\n unconstrained_value = self._params[name]\n\n # compute the constrained value\n constraint = self._constraints[name]\n constrained_value = transform_to(constraint)(unconstrained_value)\n constrained_value.unconstrained = weakref.ref(unconstrained_value)\n\n return constrained_value\n\n def __setitem__(self, name, new_constrained_value):\n \"\"\"\n Set the constrained value of an existing parameter, or the value of a\n new unconstrained parameter. To declare a new parameter with\n constraint, use :meth:`setdefault`.\n \"\"\"\n # store constraint, defaulting to unconstrained\n constraint = self._constraints.setdefault(name, constraints.real)\n\n # compute the unconstrained value\n with torch.no_grad():\n # FIXME should we .detach() the new_constrained_value?\n unconstrained_value = transform_to(constraint).inv(new_constrained_value)\n unconstrained_value = unconstrained_value.contiguous()\n unconstrained_value.requires_grad_(True)\n\n # store a bidirectional mapping between name and unconstrained tensor\n self._params[name] = unconstrained_value\n self._param_to_name[unconstrained_value] = name\n\n def setdefault(self, name, init_constrained_value, constraint=constraints.real):\n \"\"\"\n Retrieve a constrained parameter value from the if it exists, otherwise\n set the initial value. Note that this is a little fancier than\n :meth:`dict.setdefault`.\n\n If the parameter already exists, ``init_constrained_tensor`` will be ignored. To avoid\n expensive creation of ``init_constrained_tensor`` you can wrap it in a ``lambda`` that\n will only be evaluated if the parameter does not already exist::\n\n param_store.get(\"foo\", lambda: (0.001 * torch.randn(1000, 1000)).exp(),\n constraint=constraints.positive)\n\n :param str name: parameter name\n :param init_constrained_value: initial constrained value\n :type init_constrained_value: torch.Tensor or callable returning a torch.Tensor\n :param constraint: torch constraint object\n :type constraint: torch.distributions.constraints.Constraint\n :returns: constrained parameter value\n :rtype: torch.Tensor\n \"\"\"\n if name not in self._params:\n # set the constraint\n self._constraints[name] = constraint\n\n # evaluate the lazy value\n if callable(init_constrained_value):\n init_constrained_value = init_constrained_value()\n\n # set the initial value\n self[name] = init_constrained_value\n\n # get the param, which is guaranteed to exist\n return self[name]\n\n # -------------------------------------------------------------------------------\n # Old non-dict interface\n\n def named_parameters(self):\n \"\"\"\n Returns an iterator over ``(name, unconstrained_value)`` tuples for\n each parameter in the ParamStore.\n \"\"\"\n return self._params.items()\n\n def get_all_param_names(self):\n warnings.warn(\"ParamStore.get_all_param_names() is deprecated; use .keys() instead.\",\n DeprecationWarning)\n return self.keys()\n\n def replace_param(self, param_name, new_param, old_param):\n warnings.warn(\"ParamStore.replace_param() is deprecated; use .__setitem__() instead.\",\n DeprecationWarning)\n assert self._params[param_name] is old_param.unconstrained()\n self[param_name] = new_param\n\n def get_param(self, name, init_tensor=None, constraint=constraints.real, event_dim=None):\n \"\"\"\n Get parameter from its name. If it does not yet exist in the\n ParamStore, it will be created and stored.\n The Pyro primitive `pyro.param` dispatches to this method.\n\n :param name: parameter name\n :type name: str\n :param init_tensor: initial tensor\n :type init_tensor: torch.Tensor\n :param constraint: torch constraint\n :type constraint: torch.distributions.constraints.Constraint\n :param int event_dim: (ignored)\n :returns: parameter\n :rtype: torch.Tensor\n \"\"\"\n if init_tensor is None:\n return self[name]\n else:\n return self.setdefault(name, init_tensor, constraint)\n\n def match(self, name):\n \"\"\"\n Get all parameters that match regex. The parameter must exist.\n\n :param name: regular expression\n :type name: str\n :returns: dict with key param name and value torch Tensor\n \"\"\"\n pattern = re.compile(name)\n return {name: self[name] for name in self if pattern.match(name)}\n\n def param_name(self, p):\n \"\"\"\n Get parameter name from parameter\n\n :param p: parameter\n :returns: parameter name\n \"\"\"\n return self._param_to_name.get(p)\n\n def get_state(self):\n \"\"\"\n Get the ParamStore state.\n \"\"\"\n state = {\n 'params': self._params,\n 'constraints': self._constraints,\n }\n return state\n\n def set_state(self, state):\n \"\"\"\n Set the ParamStore state using state from a previous get_state() call\n \"\"\"\n assert isinstance(state, dict), \"malformed ParamStore state\"\n assert set(state.keys()) == set(['params', 'constraints']), \\\n \"malformed ParamStore keys {}\".format(state.keys())\n\n for param_name, param in state['params'].items():\n self._params[param_name] = param\n self._param_to_name[param] = param_name\n\n for param_name, constraint in state['constraints'].items():\n if isinstance(constraint, type(constraints.real)):\n # Work around lack of hash & equality comparison on constraints.\n constraint = constraints.real\n self._constraints[param_name] = constraint\n\n def save(self, filename):\n \"\"\"\n Save parameters to disk\n\n :param filename: file name to save to\n :type filename: str\n \"\"\"\n with open(filename, \"wb\") as output_file:\n torch.save(self.get_state(), output_file)\n\n def load(self, filename, map_location=None):\n \"\"\"\n Loads parameters from disk\n\n .. note::\n\n If using :meth:`pyro.module` on parameters loaded from\n disk, be sure to set the ``update_module_params`` flag::\n\n pyro.get_param_store().load('saved_params.save')\n pyro.module('module', nn, update_module_params=True)\n\n :param filename: file name to load from\n :type filename: str\n :param map_location: specifies how to remap storage locations\n :type map_location: function, torch.device, string or a dict\n \"\"\"\n with open(filename, \"rb\") as input_file:\n state = torch.load(input_file, map_location)\n self.set_state(state)\n\n\n# used to create fully-formed param names, e.g. mymodule$$$mysubmodule.weight\n_MODULE_NAMESPACE_DIVIDER = \"$$$\"\n\n\ndef param_with_module_name(pyro_name, param_name):\n return _MODULE_NAMESPACE_DIVIDER.join([pyro_name, param_name])\n\n\ndef module_from_param_with_module_name(param_name):\n return param_name.split(_MODULE_NAMESPACE_DIVIDER)[0]\n\n\ndef user_param_name(param_name):\n if _MODULE_NAMESPACE_DIVIDER in param_name:\n return param_name.split(_MODULE_NAMESPACE_DIVIDER)[1]\n return param_name\n", "path": "pyro/params/param_store.py"}], "after_files": [{"content": "from __future__ import absolute_import, division, print_function\n\nimport re\nimport warnings\nimport weakref\n\nimport torch\nfrom torch.distributions import constraints, transform_to\n\n\nclass ParamStoreDict(object):\n \"\"\"\n Global store for parameters in Pyro. This is basically a key-value store.\n The typical user interacts with the ParamStore primarily through the\n primitive `pyro.param`.\n\n See `Intro Part II <http://pyro.ai/examples/intro_part_ii.html>`_ for further discussion\n and `SVI Part I <http://pyro.ai/examples/svi_part_i.html>`_ for some examples.\n\n Some things to bear in mind when using parameters in Pyro:\n\n - parameters must be assigned unique names\n - the `init_tensor` argument to `pyro.param` is only used the first time that a given (named)\n parameter is registered with Pyro.\n - for this reason, a user may need to use the `clear()` method if working in a REPL in order to\n get the desired behavior. this method can also be invoked with `pyro.clear_param_store()`.\n - the internal name of a parameter within a PyTorch `nn.Module` that has been registered with\n Pyro is prepended with the Pyro name of the module. so nothing prevents the user from having\n two different modules each of which contains a parameter named `weight`. by contrast, a user\n can only have one top-level parameter named `weight` (outside of any module).\n - parameters can be saved and loaded from disk using `save` and `load`.\n \"\"\"\n\n # -------------------------------------------------------------------------------\n # New dict-like interface\n\n def __init__(self):\n \"\"\"\n initialize ParamStore data structures\n \"\"\"\n self._params = {} # dictionary from param name to param\n self._param_to_name = {} # dictionary from unconstrained param to param name\n self._constraints = {} # dictionary from param name to constraint object\n\n def clear(self):\n \"\"\"\n Clear the ParamStore\n \"\"\"\n self._params = {}\n self._param_to_name = {}\n self._constraints = {}\n\n def items(self):\n \"\"\"\n Iterate over ``(name, constrained_param)`` pairs.\n \"\"\"\n for name in self._params:\n yield name, self[name]\n\n def keys(self):\n \"\"\"\n Iterate over param names.\n \"\"\"\n return self._params.keys()\n\n def values(self):\n \"\"\"\n Iterate over constrained parameter values.\n \"\"\"\n for name, constrained_param in self.items():\n yield constrained_param\n\n def __bool__(self):\n return bool(self._params)\n\n def __len__(self):\n return len(self._params)\n\n def __contains__(self, name):\n return name in self._params\n\n def __iter__(self):\n \"\"\"\n Iterate over param names.\n \"\"\"\n return iter(self.keys())\n\n def __delitem__(self, name):\n \"\"\"\n Remove a parameter from the param store.\n \"\"\"\n unconstrained_value = self._params.pop(name)\n self._param_to_name.pop(unconstrained_value)\n self._constraints.pop(name)\n\n def __getitem__(self, name):\n \"\"\"\n Get the constrained value of a named parameter.\n \"\"\"\n unconstrained_value = self._params[name]\n\n # compute the constrained value\n constraint = self._constraints[name]\n constrained_value = transform_to(constraint)(unconstrained_value)\n constrained_value.unconstrained = weakref.ref(unconstrained_value)\n\n return constrained_value\n\n def __setitem__(self, name, new_constrained_value):\n \"\"\"\n Set the constrained value of an existing parameter, or the value of a\n new unconstrained parameter. To declare a new parameter with\n constraint, use :meth:`setdefault`.\n \"\"\"\n # store constraint, defaulting to unconstrained\n constraint = self._constraints.setdefault(name, constraints.real)\n\n # compute the unconstrained value\n with torch.no_grad():\n # FIXME should we .detach() the new_constrained_value?\n unconstrained_value = transform_to(constraint).inv(new_constrained_value)\n unconstrained_value = unconstrained_value.contiguous()\n unconstrained_value.requires_grad_(True)\n\n # store a bidirectional mapping between name and unconstrained tensor\n self._params[name] = unconstrained_value\n self._param_to_name[unconstrained_value] = name\n\n def setdefault(self, name, init_constrained_value, constraint=constraints.real):\n \"\"\"\n Retrieve a constrained parameter value from the if it exists, otherwise\n set the initial value. Note that this is a little fancier than\n :meth:`dict.setdefault`.\n\n If the parameter already exists, ``init_constrained_tensor`` will be ignored. To avoid\n expensive creation of ``init_constrained_tensor`` you can wrap it in a ``lambda`` that\n will only be evaluated if the parameter does not already exist::\n\n param_store.get(\"foo\", lambda: (0.001 * torch.randn(1000, 1000)).exp(),\n constraint=constraints.positive)\n\n :param str name: parameter name\n :param init_constrained_value: initial constrained value\n :type init_constrained_value: torch.Tensor or callable returning a torch.Tensor\n :param constraint: torch constraint object\n :type constraint: torch.distributions.constraints.Constraint\n :returns: constrained parameter value\n :rtype: torch.Tensor\n \"\"\"\n if name not in self._params:\n # set the constraint\n self._constraints[name] = constraint\n\n # evaluate the lazy value\n if callable(init_constrained_value):\n init_constrained_value = init_constrained_value()\n\n # set the initial value\n self[name] = init_constrained_value\n\n # get the param, which is guaranteed to exist\n return self[name]\n\n # -------------------------------------------------------------------------------\n # Old non-dict interface\n\n def named_parameters(self):\n \"\"\"\n Returns an iterator over ``(name, unconstrained_value)`` tuples for\n each parameter in the ParamStore.\n \"\"\"\n return self._params.items()\n\n def get_all_param_names(self):\n warnings.warn(\"ParamStore.get_all_param_names() is deprecated; use .keys() instead.\",\n DeprecationWarning)\n return self.keys()\n\n def replace_param(self, param_name, new_param, old_param):\n warnings.warn(\"ParamStore.replace_param() is deprecated; use .__setitem__() instead.\",\n DeprecationWarning)\n assert self._params[param_name] is old_param.unconstrained()\n self[param_name] = new_param\n\n def get_param(self, name, init_tensor=None, constraint=constraints.real, event_dim=None):\n \"\"\"\n Get parameter from its name. If it does not yet exist in the\n ParamStore, it will be created and stored.\n The Pyro primitive `pyro.param` dispatches to this method.\n\n :param name: parameter name\n :type name: str\n :param init_tensor: initial tensor\n :type init_tensor: torch.Tensor\n :param constraint: torch constraint\n :type constraint: torch.distributions.constraints.Constraint\n :param int event_dim: (ignored)\n :returns: parameter\n :rtype: torch.Tensor\n \"\"\"\n if init_tensor is None:\n return self[name]\n else:\n return self.setdefault(name, init_tensor, constraint)\n\n def match(self, name):\n \"\"\"\n Get all parameters that match regex. The parameter must exist.\n\n :param name: regular expression\n :type name: str\n :returns: dict with key param name and value torch Tensor\n \"\"\"\n pattern = re.compile(name)\n return {name: self[name] for name in self if pattern.match(name)}\n\n def param_name(self, p):\n \"\"\"\n Get parameter name from parameter\n\n :param p: parameter\n :returns: parameter name\n \"\"\"\n return self._param_to_name.get(p)\n\n def get_state(self):\n \"\"\"\n Get the ParamStore state.\n \"\"\"\n state = {\n 'params': self._params,\n 'constraints': self._constraints,\n }\n return state\n\n def set_state(self, state):\n \"\"\"\n Set the ParamStore state using state from a previous get_state() call\n \"\"\"\n assert isinstance(state, dict), \"malformed ParamStore state\"\n assert set(state.keys()) == set(['params', 'constraints']), \\\n \"malformed ParamStore keys {}\".format(state.keys())\n\n for param_name, param in state['params'].items():\n self._params[param_name] = param\n self._param_to_name[param] = param_name\n\n for param_name, constraint in state['constraints'].items():\n if isinstance(constraint, type(constraints.real)):\n # Work around lack of hash & equality comparison on constraints.\n constraint = constraints.real\n self._constraints[param_name] = constraint\n\n def save(self, filename):\n \"\"\"\n Save parameters to disk\n\n :param filename: file name to save to\n :type filename: str\n \"\"\"\n with open(filename, \"wb\") as output_file:\n torch.save(self.get_state(), output_file)\n\n def load(self, filename, map_location=None):\n \"\"\"\n Loads parameters from disk\n\n .. note::\n\n If using :meth:`pyro.module` on parameters loaded from\n disk, be sure to set the ``update_module_params`` flag::\n\n pyro.get_param_store().load('saved_params.save')\n pyro.module('module', nn, update_module_params=True)\n\n :param filename: file name to load from\n :type filename: str\n :param map_location: specifies how to remap storage locations\n :type map_location: function, torch.device, string or a dict\n \"\"\"\n with open(filename, \"rb\") as input_file:\n state = torch.load(input_file, map_location)\n self.set_state(state)\n\n\n# used to create fully-formed param names, e.g. mymodule$$$mysubmodule.weight\n_MODULE_NAMESPACE_DIVIDER = \"$$$\"\n\n\ndef param_with_module_name(pyro_name, param_name):\n return _MODULE_NAMESPACE_DIVIDER.join([pyro_name, param_name])\n\n\ndef module_from_param_with_module_name(param_name):\n return param_name.split(_MODULE_NAMESPACE_DIVIDER)[0]\n\n\ndef user_param_name(param_name):\n if _MODULE_NAMESPACE_DIVIDER in param_name:\n return param_name.split(_MODULE_NAMESPACE_DIVIDER)[1]\n return param_name\n", "path": "pyro/params/param_store.py"}]}
| 3,622 | 123 |
gh_patches_debug_25304
|
rasdani/github-patches
|
git_diff
|
learningequality__kolibri-8744
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Home page - The 'Continue learning on your own' section is not displayed
## Observed behavior
The 'Continue learning on your own' section is not displayed at the Home page even though I have completed all assigned class resources and have some non-class resources in progress.
## Expected behavior
The 'Continue learning on your own' section should be displayed as specified in [Notion](https://www.notion.so/Home-Page-QA-8ffb1011fa034e21bc1f52a2aad585ac).
## Steps to reproduce the issue
1. Install the following [build.](https://github.com/learningequality/kolibri/releases/tag/v0.15.0-beta2)
2. As a Coach create a new Learner user, enroll that user to a Class and assign a lesson with resources.
3. Sign in with the Learner user and complete the assigned resources.
4. Start looking at some non-classes resources so that those will get marked as being in progress.
5. Go back to the Home page and look for the 'Continue learning on your own' section
## Additional information

## Usage Details
- OS: Windows 10
- Browser: Chrome
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kolibri/plugins/learn/viewsets.py`
Content:
```
1 from django.db.models import Count
2 from django.db.models import OuterRef
3 from django.db.models import Q
4 from django.db.models import Subquery
5 from django.db.models import Sum
6 from django.db.models.fields import IntegerField
7 from rest_framework.permissions import IsAuthenticated
8 from rest_framework.response import Response
9 from rest_framework.views import APIView
10
11 from kolibri.core.api import ReadOnlyValuesViewset
12 from kolibri.core.auth.api import KolibriAuthPermissionsFilter
13 from kolibri.core.auth.models import Classroom
14 from kolibri.core.auth.models import Facility
15 from kolibri.core.content.api import ContentNodeProgressViewset
16 from kolibri.core.content.api import ContentNodeViewset
17 from kolibri.core.content.api import UserContentNodeViewset
18 from kolibri.core.exams.models import Exam
19 from kolibri.core.lessons.models import Lesson
20 from kolibri.core.logger.models import AttemptLog
21 from kolibri.core.logger.models import MasteryLog
22
23
24 contentnode_progress_viewset = ContentNodeProgressViewset()
25 contentnode_viewset = ContentNodeViewset()
26 user_contentnode_viewset = UserContentNodeViewset()
27
28
29 class LearnStateView(APIView):
30 def get(self, request, format=None):
31 if request.user.is_anonymous():
32 default_facility = Facility.get_default_facility()
33 can_download_content = (
34 default_facility.dataset.show_download_button_in_learn
35 if default_facility
36 else True
37 )
38 return Response(
39 {
40 "in_classes": False,
41 "can_download_content": can_download_content,
42 }
43 )
44 return Response(
45 {
46 "in_classes": request.user.memberships.exists(),
47 "can_download_content": request.user.dataset.show_download_button_in_learn,
48 }
49 )
50
51
52 class LearnerClassroomViewset(ReadOnlyValuesViewset):
53 """
54 Returns all Classrooms for which the requesting User is a member,
55 along with all associated assignments.
56 """
57
58 filter_backends = (KolibriAuthPermissionsFilter,)
59 permission_classes = (IsAuthenticated,)
60
61 values = ("id", "name")
62
63 def get_queryset(self):
64 if self.request.user.is_anonymous():
65 return Classroom.objects.none()
66 return Classroom.objects.filter(membership__user=self.request.user)
67
68 def consolidate(self, items, queryset):
69 if not items:
70 return items
71 lessons = (
72 Lesson.objects.filter(
73 lesson_assignments__collection__membership__user=self.request.user,
74 is_active=True,
75 collection__in=(c["id"] for c in items),
76 )
77 .distinct()
78 .values(
79 "description", "id", "is_active", "title", "resources", "collection"
80 )
81 )
82 lesson_contentnode_ids = set()
83 for lesson in lessons:
84 lesson_contentnode_ids |= {
85 resource["contentnode_id"] for resource in lesson["resources"]
86 }
87
88 contentnode_progress = contentnode_progress_viewset.serialize_list(
89 self.request, {"ids": lesson_contentnode_ids}
90 )
91
92 contentnodes = contentnode_viewset.serialize_list(
93 self.request, {"ids": lesson_contentnode_ids}
94 )
95
96 progress_map = {l["content_id"]: l["progress"] for l in contentnode_progress}
97
98 contentnode_map = {c["id"]: c for c in contentnodes}
99
100 for lesson in lessons:
101 lesson["progress"] = {
102 "resource_progress": sum(
103 (
104 progress_map[resource["content_id"]]
105 for resource in lesson["resources"]
106 if resource["content_id"] in progress_map
107 )
108 ),
109 "total_resources": len(lesson["resources"]),
110 }
111 for resource in lesson["resources"]:
112 resource["progress"] = progress_map.get(resource["content_id"], 0)
113 resource["contentnode"] = contentnode_map.get(
114 resource["contentnode_id"], None
115 )
116
117 user_masterylog_content_ids = MasteryLog.objects.filter(
118 user=self.request.user
119 ).values("summarylog__content_id")
120
121 exams = (
122 Exam.objects.filter(
123 assignments__collection__membership__user=self.request.user,
124 collection__in=(c["id"] for c in items),
125 )
126 .filter(Q(active=True) | Q(id__in=user_masterylog_content_ids))
127 .annotate(
128 closed=Subquery(
129 MasteryLog.objects.filter(
130 summarylog__content_id=OuterRef("id"), user=self.request.user
131 ).values("complete")[:1]
132 ),
133 score=Subquery(
134 AttemptLog.objects.filter(
135 sessionlog__content_id=OuterRef("id"), user=self.request.user
136 )
137 .order_by()
138 .values_list("item")
139 .distinct()
140 .values("masterylog")
141 .annotate(total_correct=Sum("correct"))
142 .values("total_correct"),
143 output_field=IntegerField(),
144 ),
145 answer_count=Subquery(
146 AttemptLog.objects.filter(
147 sessionlog__content_id=OuterRef("id"), user=self.request.user
148 )
149 .order_by()
150 .values_list("item")
151 .distinct()
152 .values("masterylog")
153 .annotate(total_complete=Count("id"))
154 .values("total_complete"),
155 output_field=IntegerField(),
156 ),
157 )
158 .distinct()
159 .values(
160 "collection",
161 "active",
162 "archive",
163 "id",
164 "question_count",
165 "title",
166 "closed",
167 "answer_count",
168 "score",
169 )
170 )
171
172 for exam in exams:
173 closed = exam.pop("closed")
174 score = exam.pop("score")
175 answer_count = exam.pop("answer_count")
176 if closed is not None:
177 exam["progress"] = {
178 "closed": closed,
179 "score": score,
180 "answer_count": answer_count,
181 "started": True,
182 }
183 else:
184 exam["progress"] = {
185 "score": None,
186 "answer_count": None,
187 "closed": None,
188 "started": False,
189 }
190 out_items = []
191 for item in items:
192 item["assignments"] = {
193 "exams": [exam for exam in exams if exam["collection"] == item["id"]],
194 "lessons": [
195 lesson for lesson in lessons if lesson["collection"] == item["id"]
196 ],
197 }
198 out_items.append(item)
199 return out_items
200
201
202 learner_classroom_viewset = LearnerClassroomViewset()
203
204
205 def _resumable_resources(classrooms):
206 for classroom in classrooms:
207 for lesson in classroom["assignments"]["lessons"]:
208 for resource in lesson["resources"]:
209 yield resource["progress"] > 0
210
211
212 class LearnHomePageHydrationView(APIView):
213 def get(self, request, format=None):
214 classrooms = []
215 resumable_resources = []
216 resumable_resources_progress = []
217 if not request.user.is_anonymous():
218 classrooms = learner_classroom_viewset.serialize_list(request)
219 if not classrooms or not any(_resumable_resources(classrooms)):
220 resumable_resources = user_contentnode_viewset.serialize_list(
221 request, {"resume": True, "max_results": 12}
222 )
223 resumable_resources_progress = (
224 contentnode_progress_viewset.serialize_list(
225 request, {"resume": True, "max_results": 12}
226 )
227 )
228
229 return Response(
230 {
231 "classrooms": classrooms,
232 "resumable_resources": resumable_resources,
233 "resumable_resources_progress": resumable_resources_progress,
234 }
235 )
236
237
238 def _map_lesson_classroom(item):
239 return {
240 "id": item.pop("collection__id"),
241 "name": item.pop("collection__name"),
242 "parent": item.pop("collection__parent_id"),
243 }
244
245
246 class LearnerLessonViewset(ReadOnlyValuesViewset):
247 """
248 Special Viewset for Learners to view Lessons to which they are assigned.
249 The core Lesson Viewset is locked down to Admin users only.
250 """
251
252 permission_classes = (IsAuthenticated,)
253
254 values = (
255 "id",
256 "title",
257 "description",
258 "resources",
259 "is_active",
260 "collection",
261 "collection__id",
262 "collection__name",
263 "collection__parent_id",
264 )
265
266 field_map = {"classroom": _map_lesson_classroom}
267
268 def get_queryset(self):
269 if self.request.user.is_anonymous():
270 return Lesson.objects.none()
271 return Lesson.objects.filter(
272 lesson_assignments__collection__membership__user=self.request.user,
273 is_active=True,
274 )
275
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/kolibri/plugins/learn/viewsets.py b/kolibri/plugins/learn/viewsets.py
--- a/kolibri/plugins/learn/viewsets.py
+++ b/kolibri/plugins/learn/viewsets.py
@@ -85,12 +85,20 @@
resource["contentnode_id"] for resource in lesson["resources"]
}
- contentnode_progress = contentnode_progress_viewset.serialize_list(
- self.request, {"ids": lesson_contentnode_ids}
+ contentnode_progress = (
+ contentnode_progress_viewset.serialize_list(
+ self.request, {"ids": lesson_contentnode_ids}
+ )
+ if lesson_contentnode_ids
+ else []
)
- contentnodes = contentnode_viewset.serialize_list(
- self.request, {"ids": lesson_contentnode_ids}
+ contentnodes = (
+ contentnode_viewset.serialize_list(
+ self.request, {"ids": lesson_contentnode_ids}
+ )
+ if lesson_contentnode_ids
+ else []
)
progress_map = {l["content_id"]: l["progress"] for l in contentnode_progress}
@@ -206,7 +214,7 @@
for classroom in classrooms:
for lesson in classroom["assignments"]["lessons"]:
for resource in lesson["resources"]:
- yield resource["progress"] > 0
+ yield 0 < resource["progress"] < 1
class LearnHomePageHydrationView(APIView):
|
{"golden_diff": "diff --git a/kolibri/plugins/learn/viewsets.py b/kolibri/plugins/learn/viewsets.py\n--- a/kolibri/plugins/learn/viewsets.py\n+++ b/kolibri/plugins/learn/viewsets.py\n@@ -85,12 +85,20 @@\n resource[\"contentnode_id\"] for resource in lesson[\"resources\"]\n }\n \n- contentnode_progress = contentnode_progress_viewset.serialize_list(\n- self.request, {\"ids\": lesson_contentnode_ids}\n+ contentnode_progress = (\n+ contentnode_progress_viewset.serialize_list(\n+ self.request, {\"ids\": lesson_contentnode_ids}\n+ )\n+ if lesson_contentnode_ids\n+ else []\n )\n \n- contentnodes = contentnode_viewset.serialize_list(\n- self.request, {\"ids\": lesson_contentnode_ids}\n+ contentnodes = (\n+ contentnode_viewset.serialize_list(\n+ self.request, {\"ids\": lesson_contentnode_ids}\n+ )\n+ if lesson_contentnode_ids\n+ else []\n )\n \n progress_map = {l[\"content_id\"]: l[\"progress\"] for l in contentnode_progress}\n@@ -206,7 +214,7 @@\n for classroom in classrooms:\n for lesson in classroom[\"assignments\"][\"lessons\"]:\n for resource in lesson[\"resources\"]:\n- yield resource[\"progress\"] > 0\n+ yield 0 < resource[\"progress\"] < 1\n \n \n class LearnHomePageHydrationView(APIView):\n", "issue": "Home page - The 'Continue learning on your own' section is not displayed\n## Observed behavior\r\nThe 'Continue learning on your own' section is not displayed at the Home page even though I have completed all assigned class resources and have some non-class resources in progress.\r\n\r\n## Expected behavior\r\nThe 'Continue learning on your own' section should be displayed as specified in [Notion](https://www.notion.so/Home-Page-QA-8ffb1011fa034e21bc1f52a2aad585ac).\r\n\r\n## Steps to reproduce the issue\r\n1. Install the following [build.](https://github.com/learningequality/kolibri/releases/tag/v0.15.0-beta2)\r\n2. As a Coach create a new Learner user, enroll that user to a Class and assign a lesson with resources.\r\n3. Sign in with the Learner user and complete the assigned resources.\r\n4. Start looking at some non-classes resources so that those will get marked as being in progress.\r\n5. Go back to the Home page and look for the 'Continue learning on your own' section\r\n\r\n## Additional information\r\n\r\n\r\n\r\n## Usage Details\r\n - OS: Windows 10\r\n - Browser: Chrome\n", "before_files": [{"content": "from django.db.models import Count\nfrom django.db.models import OuterRef\nfrom django.db.models import Q\nfrom django.db.models import Subquery\nfrom django.db.models import Sum\nfrom django.db.models.fields import IntegerField\nfrom rest_framework.permissions import IsAuthenticated\nfrom rest_framework.response import Response\nfrom rest_framework.views import APIView\n\nfrom kolibri.core.api import ReadOnlyValuesViewset\nfrom kolibri.core.auth.api import KolibriAuthPermissionsFilter\nfrom kolibri.core.auth.models import Classroom\nfrom kolibri.core.auth.models import Facility\nfrom kolibri.core.content.api import ContentNodeProgressViewset\nfrom kolibri.core.content.api import ContentNodeViewset\nfrom kolibri.core.content.api import UserContentNodeViewset\nfrom kolibri.core.exams.models import Exam\nfrom kolibri.core.lessons.models import Lesson\nfrom kolibri.core.logger.models import AttemptLog\nfrom kolibri.core.logger.models import MasteryLog\n\n\ncontentnode_progress_viewset = ContentNodeProgressViewset()\ncontentnode_viewset = ContentNodeViewset()\nuser_contentnode_viewset = UserContentNodeViewset()\n\n\nclass LearnStateView(APIView):\n def get(self, request, format=None):\n if request.user.is_anonymous():\n default_facility = Facility.get_default_facility()\n can_download_content = (\n default_facility.dataset.show_download_button_in_learn\n if default_facility\n else True\n )\n return Response(\n {\n \"in_classes\": False,\n \"can_download_content\": can_download_content,\n }\n )\n return Response(\n {\n \"in_classes\": request.user.memberships.exists(),\n \"can_download_content\": request.user.dataset.show_download_button_in_learn,\n }\n )\n\n\nclass LearnerClassroomViewset(ReadOnlyValuesViewset):\n \"\"\"\n Returns all Classrooms for which the requesting User is a member,\n along with all associated assignments.\n \"\"\"\n\n filter_backends = (KolibriAuthPermissionsFilter,)\n permission_classes = (IsAuthenticated,)\n\n values = (\"id\", \"name\")\n\n def get_queryset(self):\n if self.request.user.is_anonymous():\n return Classroom.objects.none()\n return Classroom.objects.filter(membership__user=self.request.user)\n\n def consolidate(self, items, queryset):\n if not items:\n return items\n lessons = (\n Lesson.objects.filter(\n lesson_assignments__collection__membership__user=self.request.user,\n is_active=True,\n collection__in=(c[\"id\"] for c in items),\n )\n .distinct()\n .values(\n \"description\", \"id\", \"is_active\", \"title\", \"resources\", \"collection\"\n )\n )\n lesson_contentnode_ids = set()\n for lesson in lessons:\n lesson_contentnode_ids |= {\n resource[\"contentnode_id\"] for resource in lesson[\"resources\"]\n }\n\n contentnode_progress = contentnode_progress_viewset.serialize_list(\n self.request, {\"ids\": lesson_contentnode_ids}\n )\n\n contentnodes = contentnode_viewset.serialize_list(\n self.request, {\"ids\": lesson_contentnode_ids}\n )\n\n progress_map = {l[\"content_id\"]: l[\"progress\"] for l in contentnode_progress}\n\n contentnode_map = {c[\"id\"]: c for c in contentnodes}\n\n for lesson in lessons:\n lesson[\"progress\"] = {\n \"resource_progress\": sum(\n (\n progress_map[resource[\"content_id\"]]\n for resource in lesson[\"resources\"]\n if resource[\"content_id\"] in progress_map\n )\n ),\n \"total_resources\": len(lesson[\"resources\"]),\n }\n for resource in lesson[\"resources\"]:\n resource[\"progress\"] = progress_map.get(resource[\"content_id\"], 0)\n resource[\"contentnode\"] = contentnode_map.get(\n resource[\"contentnode_id\"], None\n )\n\n user_masterylog_content_ids = MasteryLog.objects.filter(\n user=self.request.user\n ).values(\"summarylog__content_id\")\n\n exams = (\n Exam.objects.filter(\n assignments__collection__membership__user=self.request.user,\n collection__in=(c[\"id\"] for c in items),\n )\n .filter(Q(active=True) | Q(id__in=user_masterylog_content_ids))\n .annotate(\n closed=Subquery(\n MasteryLog.objects.filter(\n summarylog__content_id=OuterRef(\"id\"), user=self.request.user\n ).values(\"complete\")[:1]\n ),\n score=Subquery(\n AttemptLog.objects.filter(\n sessionlog__content_id=OuterRef(\"id\"), user=self.request.user\n )\n .order_by()\n .values_list(\"item\")\n .distinct()\n .values(\"masterylog\")\n .annotate(total_correct=Sum(\"correct\"))\n .values(\"total_correct\"),\n output_field=IntegerField(),\n ),\n answer_count=Subquery(\n AttemptLog.objects.filter(\n sessionlog__content_id=OuterRef(\"id\"), user=self.request.user\n )\n .order_by()\n .values_list(\"item\")\n .distinct()\n .values(\"masterylog\")\n .annotate(total_complete=Count(\"id\"))\n .values(\"total_complete\"),\n output_field=IntegerField(),\n ),\n )\n .distinct()\n .values(\n \"collection\",\n \"active\",\n \"archive\",\n \"id\",\n \"question_count\",\n \"title\",\n \"closed\",\n \"answer_count\",\n \"score\",\n )\n )\n\n for exam in exams:\n closed = exam.pop(\"closed\")\n score = exam.pop(\"score\")\n answer_count = exam.pop(\"answer_count\")\n if closed is not None:\n exam[\"progress\"] = {\n \"closed\": closed,\n \"score\": score,\n \"answer_count\": answer_count,\n \"started\": True,\n }\n else:\n exam[\"progress\"] = {\n \"score\": None,\n \"answer_count\": None,\n \"closed\": None,\n \"started\": False,\n }\n out_items = []\n for item in items:\n item[\"assignments\"] = {\n \"exams\": [exam for exam in exams if exam[\"collection\"] == item[\"id\"]],\n \"lessons\": [\n lesson for lesson in lessons if lesson[\"collection\"] == item[\"id\"]\n ],\n }\n out_items.append(item)\n return out_items\n\n\nlearner_classroom_viewset = LearnerClassroomViewset()\n\n\ndef _resumable_resources(classrooms):\n for classroom in classrooms:\n for lesson in classroom[\"assignments\"][\"lessons\"]:\n for resource in lesson[\"resources\"]:\n yield resource[\"progress\"] > 0\n\n\nclass LearnHomePageHydrationView(APIView):\n def get(self, request, format=None):\n classrooms = []\n resumable_resources = []\n resumable_resources_progress = []\n if not request.user.is_anonymous():\n classrooms = learner_classroom_viewset.serialize_list(request)\n if not classrooms or not any(_resumable_resources(classrooms)):\n resumable_resources = user_contentnode_viewset.serialize_list(\n request, {\"resume\": True, \"max_results\": 12}\n )\n resumable_resources_progress = (\n contentnode_progress_viewset.serialize_list(\n request, {\"resume\": True, \"max_results\": 12}\n )\n )\n\n return Response(\n {\n \"classrooms\": classrooms,\n \"resumable_resources\": resumable_resources,\n \"resumable_resources_progress\": resumable_resources_progress,\n }\n )\n\n\ndef _map_lesson_classroom(item):\n return {\n \"id\": item.pop(\"collection__id\"),\n \"name\": item.pop(\"collection__name\"),\n \"parent\": item.pop(\"collection__parent_id\"),\n }\n\n\nclass LearnerLessonViewset(ReadOnlyValuesViewset):\n \"\"\"\n Special Viewset for Learners to view Lessons to which they are assigned.\n The core Lesson Viewset is locked down to Admin users only.\n \"\"\"\n\n permission_classes = (IsAuthenticated,)\n\n values = (\n \"id\",\n \"title\",\n \"description\",\n \"resources\",\n \"is_active\",\n \"collection\",\n \"collection__id\",\n \"collection__name\",\n \"collection__parent_id\",\n )\n\n field_map = {\"classroom\": _map_lesson_classroom}\n\n def get_queryset(self):\n if self.request.user.is_anonymous():\n return Lesson.objects.none()\n return Lesson.objects.filter(\n lesson_assignments__collection__membership__user=self.request.user,\n is_active=True,\n )\n", "path": "kolibri/plugins/learn/viewsets.py"}], "after_files": [{"content": "from django.db.models import Count\nfrom django.db.models import OuterRef\nfrom django.db.models import Q\nfrom django.db.models import Subquery\nfrom django.db.models import Sum\nfrom django.db.models.fields import IntegerField\nfrom rest_framework.permissions import IsAuthenticated\nfrom rest_framework.response import Response\nfrom rest_framework.views import APIView\n\nfrom kolibri.core.api import ReadOnlyValuesViewset\nfrom kolibri.core.auth.api import KolibriAuthPermissionsFilter\nfrom kolibri.core.auth.models import Classroom\nfrom kolibri.core.auth.models import Facility\nfrom kolibri.core.content.api import ContentNodeProgressViewset\nfrom kolibri.core.content.api import ContentNodeViewset\nfrom kolibri.core.content.api import UserContentNodeViewset\nfrom kolibri.core.exams.models import Exam\nfrom kolibri.core.lessons.models import Lesson\nfrom kolibri.core.logger.models import AttemptLog\nfrom kolibri.core.logger.models import MasteryLog\n\n\ncontentnode_progress_viewset = ContentNodeProgressViewset()\ncontentnode_viewset = ContentNodeViewset()\nuser_contentnode_viewset = UserContentNodeViewset()\n\n\nclass LearnStateView(APIView):\n def get(self, request, format=None):\n if request.user.is_anonymous():\n default_facility = Facility.get_default_facility()\n can_download_content = (\n default_facility.dataset.show_download_button_in_learn\n if default_facility\n else True\n )\n return Response(\n {\n \"in_classes\": False,\n \"can_download_content\": can_download_content,\n }\n )\n return Response(\n {\n \"in_classes\": request.user.memberships.exists(),\n \"can_download_content\": request.user.dataset.show_download_button_in_learn,\n }\n )\n\n\nclass LearnerClassroomViewset(ReadOnlyValuesViewset):\n \"\"\"\n Returns all Classrooms for which the requesting User is a member,\n along with all associated assignments.\n \"\"\"\n\n filter_backends = (KolibriAuthPermissionsFilter,)\n permission_classes = (IsAuthenticated,)\n\n values = (\"id\", \"name\")\n\n def get_queryset(self):\n if self.request.user.is_anonymous():\n return Classroom.objects.none()\n return Classroom.objects.filter(membership__user=self.request.user)\n\n def consolidate(self, items, queryset):\n if not items:\n return items\n lessons = (\n Lesson.objects.filter(\n lesson_assignments__collection__membership__user=self.request.user,\n is_active=True,\n collection__in=(c[\"id\"] for c in items),\n )\n .distinct()\n .values(\n \"description\", \"id\", \"is_active\", \"title\", \"resources\", \"collection\"\n )\n )\n lesson_contentnode_ids = set()\n for lesson in lessons:\n lesson_contentnode_ids |= {\n resource[\"contentnode_id\"] for resource in lesson[\"resources\"]\n }\n\n contentnode_progress = (\n contentnode_progress_viewset.serialize_list(\n self.request, {\"ids\": lesson_contentnode_ids}\n )\n if lesson_contentnode_ids\n else []\n )\n\n contentnodes = (\n contentnode_viewset.serialize_list(\n self.request, {\"ids\": lesson_contentnode_ids}\n )\n if lesson_contentnode_ids\n else []\n )\n\n progress_map = {l[\"content_id\"]: l[\"progress\"] for l in contentnode_progress}\n\n contentnode_map = {c[\"id\"]: c for c in contentnodes}\n\n for lesson in lessons:\n lesson[\"progress\"] = {\n \"resource_progress\": sum(\n (\n progress_map[resource[\"content_id\"]]\n for resource in lesson[\"resources\"]\n if resource[\"content_id\"] in progress_map\n )\n ),\n \"total_resources\": len(lesson[\"resources\"]),\n }\n for resource in lesson[\"resources\"]:\n resource[\"progress\"] = progress_map.get(resource[\"content_id\"], 0)\n resource[\"contentnode\"] = contentnode_map.get(\n resource[\"contentnode_id\"], None\n )\n\n user_masterylog_content_ids = MasteryLog.objects.filter(\n user=self.request.user\n ).values(\"summarylog__content_id\")\n\n exams = (\n Exam.objects.filter(\n assignments__collection__membership__user=self.request.user,\n collection__in=(c[\"id\"] for c in items),\n )\n .filter(Q(active=True) | Q(id__in=user_masterylog_content_ids))\n .annotate(\n closed=Subquery(\n MasteryLog.objects.filter(\n summarylog__content_id=OuterRef(\"id\"), user=self.request.user\n ).values(\"complete\")[:1]\n ),\n score=Subquery(\n AttemptLog.objects.filter(\n sessionlog__content_id=OuterRef(\"id\"), user=self.request.user\n )\n .order_by()\n .values_list(\"item\")\n .distinct()\n .values(\"masterylog\")\n .annotate(total_correct=Sum(\"correct\"))\n .values(\"total_correct\"),\n output_field=IntegerField(),\n ),\n answer_count=Subquery(\n AttemptLog.objects.filter(\n sessionlog__content_id=OuterRef(\"id\"), user=self.request.user\n )\n .order_by()\n .values_list(\"item\")\n .distinct()\n .values(\"masterylog\")\n .annotate(total_complete=Count(\"id\"))\n .values(\"total_complete\"),\n output_field=IntegerField(),\n ),\n )\n .distinct()\n .values(\n \"collection\",\n \"active\",\n \"archive\",\n \"id\",\n \"question_count\",\n \"title\",\n \"closed\",\n \"answer_count\",\n \"score\",\n )\n )\n\n for exam in exams:\n closed = exam.pop(\"closed\")\n score = exam.pop(\"score\")\n answer_count = exam.pop(\"answer_count\")\n if closed is not None:\n exam[\"progress\"] = {\n \"closed\": closed,\n \"score\": score,\n \"answer_count\": answer_count,\n \"started\": True,\n }\n else:\n exam[\"progress\"] = {\n \"score\": None,\n \"answer_count\": None,\n \"closed\": None,\n \"started\": False,\n }\n out_items = []\n for item in items:\n item[\"assignments\"] = {\n \"exams\": [exam for exam in exams if exam[\"collection\"] == item[\"id\"]],\n \"lessons\": [\n lesson for lesson in lessons if lesson[\"collection\"] == item[\"id\"]\n ],\n }\n out_items.append(item)\n return out_items\n\n\nlearner_classroom_viewset = LearnerClassroomViewset()\n\n\ndef _resumable_resources(classrooms):\n for classroom in classrooms:\n for lesson in classroom[\"assignments\"][\"lessons\"]:\n for resource in lesson[\"resources\"]:\n yield 0 < resource[\"progress\"] < 1\n\n\nclass LearnHomePageHydrationView(APIView):\n def get(self, request, format=None):\n classrooms = []\n resumable_resources = []\n resumable_resources_progress = []\n if not request.user.is_anonymous():\n classrooms = learner_classroom_viewset.serialize_list(request)\n if not classrooms or not any(_resumable_resources(classrooms)):\n resumable_resources = user_contentnode_viewset.serialize_list(\n request, {\"resume\": True, \"max_results\": 12}\n )\n resumable_resources_progress = (\n contentnode_progress_viewset.serialize_list(\n request, {\"resume\": True, \"max_results\": 12}\n )\n )\n\n return Response(\n {\n \"classrooms\": classrooms,\n \"resumable_resources\": resumable_resources,\n \"resumable_resources_progress\": resumable_resources_progress,\n }\n )\n\n\ndef _map_lesson_classroom(item):\n return {\n \"id\": item.pop(\"collection__id\"),\n \"name\": item.pop(\"collection__name\"),\n \"parent\": item.pop(\"collection__parent_id\"),\n }\n\n\nclass LearnerLessonViewset(ReadOnlyValuesViewset):\n \"\"\"\n Special Viewset for Learners to view Lessons to which they are assigned.\n The core Lesson Viewset is locked down to Admin users only.\n \"\"\"\n\n permission_classes = (IsAuthenticated,)\n\n values = (\n \"id\",\n \"title\",\n \"description\",\n \"resources\",\n \"is_active\",\n \"collection\",\n \"collection__id\",\n \"collection__name\",\n \"collection__parent_id\",\n )\n\n field_map = {\"classroom\": _map_lesson_classroom}\n\n def get_queryset(self):\n if self.request.user.is_anonymous():\n return Lesson.objects.none()\n return Lesson.objects.filter(\n lesson_assignments__collection__membership__user=self.request.user,\n is_active=True,\n )\n", "path": "kolibri/plugins/learn/viewsets.py"}]}
| 3,129 | 319 |
gh_patches_debug_28
|
rasdani/github-patches
|
git_diff
|
open-telemetry__opentelemetry-python-1889
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Run tests on Windows in CI
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `docs/getting_started/flask_example.py`
Content:
```
1 # Copyright The OpenTelemetry Authors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 # flask_example.py
16 import flask
17 import requests
18
19 from opentelemetry import trace
20 from opentelemetry.instrumentation.flask import FlaskInstrumentor
21 from opentelemetry.instrumentation.requests import RequestsInstrumentor
22 from opentelemetry.sdk.trace import TracerProvider
23 from opentelemetry.sdk.trace.export import (
24 BatchSpanProcessor,
25 ConsoleSpanExporter,
26 )
27
28 trace.set_tracer_provider(TracerProvider())
29 trace.get_tracer_provider().add_span_processor(
30 BatchSpanProcessor(ConsoleSpanExporter())
31 )
32
33 app = flask.Flask(__name__)
34 FlaskInstrumentor().instrument_app(app)
35 RequestsInstrumentor().instrument()
36
37 tracer = trace.get_tracer(__name__)
38
39
40 @app.route("/")
41 def hello():
42 with tracer.start_as_current_span("example-request"):
43 requests.get("http://www.example.com")
44 return "hello"
45
46
47 app.run(debug=True, port=5000)
48
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/docs/getting_started/flask_example.py b/docs/getting_started/flask_example.py
--- a/docs/getting_started/flask_example.py
+++ b/docs/getting_started/flask_example.py
@@ -44,4 +44,4 @@
return "hello"
-app.run(debug=True, port=5000)
+app.run(port=5000)
|
{"golden_diff": "diff --git a/docs/getting_started/flask_example.py b/docs/getting_started/flask_example.py\n--- a/docs/getting_started/flask_example.py\n+++ b/docs/getting_started/flask_example.py\n@@ -44,4 +44,4 @@\n return \"hello\"\n \n \n-app.run(debug=True, port=5000)\n+app.run(port=5000)\n", "issue": "Run tests on Windows in CI\n\n", "before_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# flask_example.py\nimport flask\nimport requests\n\nfrom opentelemetry import trace\nfrom opentelemetry.instrumentation.flask import FlaskInstrumentor\nfrom opentelemetry.instrumentation.requests import RequestsInstrumentor\nfrom opentelemetry.sdk.trace import TracerProvider\nfrom opentelemetry.sdk.trace.export import (\n BatchSpanProcessor,\n ConsoleSpanExporter,\n)\n\ntrace.set_tracer_provider(TracerProvider())\ntrace.get_tracer_provider().add_span_processor(\n BatchSpanProcessor(ConsoleSpanExporter())\n)\n\napp = flask.Flask(__name__)\nFlaskInstrumentor().instrument_app(app)\nRequestsInstrumentor().instrument()\n\ntracer = trace.get_tracer(__name__)\n\n\[email protected](\"/\")\ndef hello():\n with tracer.start_as_current_span(\"example-request\"):\n requests.get(\"http://www.example.com\")\n return \"hello\"\n\n\napp.run(debug=True, port=5000)\n", "path": "docs/getting_started/flask_example.py"}], "after_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# flask_example.py\nimport flask\nimport requests\n\nfrom opentelemetry import trace\nfrom opentelemetry.instrumentation.flask import FlaskInstrumentor\nfrom opentelemetry.instrumentation.requests import RequestsInstrumentor\nfrom opentelemetry.sdk.trace import TracerProvider\nfrom opentelemetry.sdk.trace.export import (\n BatchSpanProcessor,\n ConsoleSpanExporter,\n)\n\ntrace.set_tracer_provider(TracerProvider())\ntrace.get_tracer_provider().add_span_processor(\n BatchSpanProcessor(ConsoleSpanExporter())\n)\n\napp = flask.Flask(__name__)\nFlaskInstrumentor().instrument_app(app)\nRequestsInstrumentor().instrument()\n\ntracer = trace.get_tracer(__name__)\n\n\[email protected](\"/\")\ndef hello():\n with tracer.start_as_current_span(\"example-request\"):\n requests.get(\"http://www.example.com\")\n return \"hello\"\n\n\napp.run(port=5000)\n", "path": "docs/getting_started/flask_example.py"}]}
| 672 | 84 |
gh_patches_debug_43703
|
rasdani/github-patches
|
git_diff
|
huggingface__optimum-1660
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Whisper-v3 ValueError: Transformers now supports natively BetterTransformer optimizations
### System Info
```shell
Nvidia Docker Container 23.12
xFormers 0.0.24+6600003.d20240116
memory_efficient_attention.cutlassF: available
memory_efficient_attention.cutlassB: available
memory_efficient_attention.decoderF: available
[email protected]: available
[email protected]: available
memory_efficient_attention.smallkF: available
memory_efficient_attention.smallkB: available
memory_efficient_attention.tritonflashattF: unavailable
memory_efficient_attention.tritonflashattB: unavailable
memory_efficient_attention.triton_splitKF: available
indexing.scaled_index_addF: available
indexing.scaled_index_addB: available
indexing.index_select: available
swiglu.dual_gemm_silu: available
swiglu.gemm_fused_operand_sum: available
swiglu.fused.p.cpp: available
is_triton_available: True
pytorch.version: 2.2.0a0+81ea7a4
pytorch.cuda: available
gpu.compute_capability: 8.9
gpu.name: NVIDIA GeForce RTX 4090
dcgm_profiler: unavailable
build.info: available
build.cuda_version: 1203
build.python_version: 3.10.12
build.torch_version: 2.2.0a0+81ea7a4
build.env.TORCH_CUDA_ARCH_LIST: 5.2 6.0 6.1 7.0 7.2 7.5 8.0 8.6 8.7 9.0+PTX
build.env.XFORMERS_BUILD_TYPE: None
build.env.XFORMERS_ENABLE_DEBUG_ASSERTIONS: None
build.env.NVCC_FLAGS: None
build.env.XFORMERS_PACKAGE_FROM: None
build.nvcc_version: 12.3.107
source.privacy: open source
```
### Who can help?
_No response_
### Information
- [X] The official example scripts
- [ ] My own modified scripts
### Tasks
- [X] An officially supported task in the `examples` folder (such as GLUE/SQuAD, ...)
- [ ] My own task or dataset (give details below)
### Reproduction (minimal, reproducible, runnable)
use code:
https://huggingface.co/spaces/primeline/whisper-german/blob/main/app.py
```bash
python app.py
Traceback (most recent call last):
File "/app/app.py", line 23, in <module>
model = model.to_bettertransformer()
File "/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py", line 4314, in to_bettertransformer
return BetterTransformer.transform(self)
File "/usr/lib/python3.10/contextlib.py", line 79, in inner
return func(*args, **kwds)
File "/usr/local/lib/python3.10/dist-packages/optimum/bettertransformer/transformation.py", line 211, in transform
raise ValueError(
ValueError: Transformers now supports natively BetterTransformer optimizations (torch.nn.functional.scaled_dot_product_attention) for the model type whisper. Please upgrade to transformers>=4.36 and torch>=2.1.1 to use it. Details: https://huggingface.co/docs/transformers/perf_infer_gpu_one#flashattention-and-memory-efficient-attention-through-pytorchs-scaleddotproductattention
```
### Expected behavior
everything is ok.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `optimum/utils/import_utils.py`
Content:
```
1 # Copyright 2022 The HuggingFace Team. All rights reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 """Import utilities."""
15
16 import importlib.util
17 import inspect
18 import sys
19 from collections import OrderedDict
20 from contextlib import contextmanager
21 from typing import Tuple, Union
22
23 import numpy as np
24 import packaging
25 from transformers.utils import is_torch_available
26
27
28 def _is_package_available(pkg_name: str, return_version: bool = False) -> Union[Tuple[bool, str], bool]:
29 # Check we're not importing a "pkg_name" directory somewhere but the actual library by trying to grab the version
30 package_exists = importlib.util.find_spec(pkg_name) is not None
31 package_version = "N/A"
32 if package_exists:
33 try:
34 package_version = importlib.metadata.version(pkg_name)
35 package_exists = True
36 except importlib.metadata.PackageNotFoundError:
37 package_exists = False
38 if return_version:
39 return package_exists, package_version
40 else:
41 return package_exists
42
43
44 # The package importlib_metadata is in a different place, depending on the python version.
45 if sys.version_info < (3, 8):
46 import importlib_metadata
47 else:
48 import importlib.metadata as importlib_metadata
49
50
51 TORCH_MINIMUM_VERSION = packaging.version.parse("1.11.0")
52 TRANSFORMERS_MINIMUM_VERSION = packaging.version.parse("4.25.0")
53 DIFFUSERS_MINIMUM_VERSION = packaging.version.parse("0.18.0")
54 AUTOGPTQ_MINIMUM_VERSION = packaging.version.parse("0.4.99") # Allows 0.5.0.dev0
55
56
57 # This is the minimal required version to support some ONNX Runtime features
58 ORT_QUANTIZE_MINIMUM_VERSION = packaging.version.parse("1.4.0")
59
60
61 _onnx_available = _is_package_available("onnx")
62
63 # importlib.metadata.version seem to not be robust with the ONNX Runtime extensions (`onnxruntime-gpu`, etc.)
64 _onnxruntime_available = importlib.util.find_spec("onnxruntime") is not None
65
66 _pydantic_available = _is_package_available("pydantic")
67 _accelerate_available = _is_package_available("accelerate")
68 _diffusers_available = _is_package_available("diffusers")
69 _auto_gptq_available = _is_package_available("auto_gptq")
70 _timm_available = _is_package_available("timm")
71 _sentence_transformers_available = _is_package_available("sentence_transformers")
72
73 torch_version = None
74 if is_torch_available():
75 torch_version = packaging.version.parse(importlib_metadata.version("torch"))
76
77 _is_torch_onnx_support_available = is_torch_available() and (
78 TORCH_MINIMUM_VERSION.major,
79 TORCH_MINIMUM_VERSION.minor,
80 ) <= (
81 torch_version.major,
82 torch_version.minor,
83 )
84
85
86 _diffusers_version = None
87 if _diffusers_available:
88 try:
89 _diffusers_version = importlib_metadata.version("diffusers")
90 except importlib_metadata.PackageNotFoundError:
91 _diffusers_available = False
92
93
94 def is_torch_onnx_support_available():
95 return _is_torch_onnx_support_available
96
97
98 def is_onnx_available():
99 return _onnx_available
100
101
102 def is_onnxruntime_available():
103 try:
104 # Try to import the source file of onnxruntime - if you run the tests from `tests` the function gets
105 # confused since there a folder named `onnxruntime` in `tests`. Therefore, `_onnxruntime_available`
106 # will be set to `True` even if not installed.
107 mod = importlib.import_module("onnxruntime")
108 inspect.getsourcefile(mod)
109 except Exception:
110 return False
111 return _onnxruntime_available
112
113
114 def is_pydantic_available():
115 return _pydantic_available
116
117
118 def is_accelerate_available():
119 return _accelerate_available
120
121
122 def is_diffusers_available():
123 return _diffusers_available
124
125
126 def is_timm_available():
127 return _timm_available
128
129
130 def is_sentence_transformers_available():
131 return _sentence_transformers_available
132
133
134 def is_auto_gptq_available():
135 if _auto_gptq_available:
136 version_autogptq = packaging.version.parse(importlib_metadata.version("auto_gptq"))
137 if AUTOGPTQ_MINIMUM_VERSION < version_autogptq:
138 return True
139 else:
140 raise ImportError(
141 f"Found an incompatible version of auto-gptq. Found version {version_autogptq}, but only version above {AUTOGPTQ_MINIMUM_VERSION} are supported"
142 )
143
144
145 @contextmanager
146 def check_if_pytorch_greater(target_version: str, message: str):
147 r"""
148 A context manager that does nothing except checking if the PyTorch version is greater than `pt_version`
149 """
150 import torch
151
152 if not packaging.version.parse(torch.__version__) >= packaging.version.parse(target_version):
153 raise ImportError(
154 f"Found an incompatible version of PyTorch. Found version {torch.__version__}, but only {target_version} and above are supported. {message}"
155 )
156 try:
157 yield
158 finally:
159 pass
160
161
162 def check_if_transformers_greater(target_version: Union[str, packaging.version.Version]) -> bool:
163 """
164 Checks whether the current install of transformers is greater than or equal to the target version.
165
166 Args:
167 target_version (`Union[str, packaging.version.Version]`): version used as the reference for comparison.
168
169 Returns:
170 bool: whether the check is True or not.
171 """
172 import transformers
173
174 if isinstance(target_version, str):
175 target_version = packaging.version.parse(target_version)
176
177 return packaging.version.parse(transformers.__version__) >= target_version
178
179
180 def check_if_diffusers_greater(target_version: str) -> bool:
181 """
182 Checks whether the current install of diffusers is greater than or equal to the target version.
183
184 Args:
185 target_version (str): version used as the reference for comparison.
186
187 Returns:
188 bool: whether the check is True or not.
189 """
190 if not _diffusers_available:
191 return False
192
193 return packaging.version.parse(_diffusers_version) >= packaging.version.parse(target_version)
194
195
196 @contextmanager
197 def require_numpy_strictly_lower(version: str, message: str):
198 if not packaging.version.parse(np.__version__) < packaging.version.parse(version):
199 raise ImportError(
200 f"Found an incompatible version of numpy. Found version {np.__version__}, but expected numpy<{version}. {message}"
201 )
202 try:
203 yield
204 finally:
205 pass
206
207
208 DIFFUSERS_IMPORT_ERROR = """
209 {0} requires the diffusers library but it was not found in your environment. You can install it with pip: `pip install
210 diffusers`. Please note that you may need to restart your runtime after installation.
211 """
212
213 TRANSFORMERS_IMPORT_ERROR = """requires the transformers>={0} library but it was not found in your environment. You can install it with pip: `pip install
214 -U transformers`. Please note that you may need to restart your runtime after installation.
215 """
216
217 BACKENDS_MAPPING = OrderedDict(
218 [
219 ("diffusers", (is_diffusers_available, DIFFUSERS_IMPORT_ERROR)),
220 (
221 "transformers_431",
222 (lambda: check_if_transformers_greater("4.31"), "{0} " + TRANSFORMERS_IMPORT_ERROR.format("4.31")),
223 ),
224 (
225 "transformers_432",
226 (lambda: check_if_transformers_greater("4.32"), "{0} " + TRANSFORMERS_IMPORT_ERROR.format("4.32")),
227 ),
228 (
229 "transformers_434",
230 (lambda: check_if_transformers_greater("4.34"), "{0} " + TRANSFORMERS_IMPORT_ERROR.format("4.34")),
231 ),
232 ]
233 )
234
235
236 def requires_backends(obj, backends):
237 if not isinstance(backends, (list, tuple)):
238 backends = [backends]
239
240 name = obj.__name__ if hasattr(obj, "__name__") else obj.__class__.__name__
241 checks = (BACKENDS_MAPPING[backend] for backend in backends)
242 failed = [msg.format(name) for available, msg in checks if not available()]
243 if failed:
244 raise ImportError("".join(failed))
245
246
247 # Copied from: https://github.com/huggingface/transformers/blob/v4.26.0/src/transformers/utils/import_utils.py#L1041
248 class DummyObject(type):
249 """
250 Metaclass for the dummy objects. Any class inheriting from it will return the ImportError generated by
251 `requires_backend` each time a user tries to access any method of that class.
252 """
253
254 def __getattr__(cls, key):
255 if key.startswith("_"):
256 return super().__getattr__(cls, key)
257 requires_backends(cls, cls._backends)
258
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/optimum/utils/import_utils.py b/optimum/utils/import_utils.py
--- a/optimum/utils/import_utils.py
+++ b/optimum/utils/import_utils.py
@@ -21,7 +21,7 @@
from typing import Tuple, Union
import numpy as np
-import packaging
+from packaging import version
from transformers.utils import is_torch_available
@@ -48,14 +48,14 @@
import importlib.metadata as importlib_metadata
-TORCH_MINIMUM_VERSION = packaging.version.parse("1.11.0")
-TRANSFORMERS_MINIMUM_VERSION = packaging.version.parse("4.25.0")
-DIFFUSERS_MINIMUM_VERSION = packaging.version.parse("0.18.0")
-AUTOGPTQ_MINIMUM_VERSION = packaging.version.parse("0.4.99") # Allows 0.5.0.dev0
+TORCH_MINIMUM_VERSION = version.parse("1.11.0")
+TRANSFORMERS_MINIMUM_VERSION = version.parse("4.25.0")
+DIFFUSERS_MINIMUM_VERSION = version.parse("0.18.0")
+AUTOGPTQ_MINIMUM_VERSION = version.parse("0.4.99") # Allows 0.5.0.dev0
# This is the minimal required version to support some ONNX Runtime features
-ORT_QUANTIZE_MINIMUM_VERSION = packaging.version.parse("1.4.0")
+ORT_QUANTIZE_MINIMUM_VERSION = version.parse("1.4.0")
_onnx_available = _is_package_available("onnx")
@@ -72,7 +72,7 @@
torch_version = None
if is_torch_available():
- torch_version = packaging.version.parse(importlib_metadata.version("torch"))
+ torch_version = version.parse(importlib_metadata.version("torch"))
_is_torch_onnx_support_available = is_torch_available() and (
TORCH_MINIMUM_VERSION.major,
@@ -133,7 +133,7 @@
def is_auto_gptq_available():
if _auto_gptq_available:
- version_autogptq = packaging.version.parse(importlib_metadata.version("auto_gptq"))
+ version_autogptq = version.parse(importlib_metadata.version("auto_gptq"))
if AUTOGPTQ_MINIMUM_VERSION < version_autogptq:
return True
else:
@@ -149,7 +149,7 @@
"""
import torch
- if not packaging.version.parse(torch.__version__) >= packaging.version.parse(target_version):
+ if not version.parse(torch.__version__) >= version.parse(target_version):
raise ImportError(
f"Found an incompatible version of PyTorch. Found version {torch.__version__}, but only {target_version} and above are supported. {message}"
)
@@ -159,7 +159,7 @@
pass
-def check_if_transformers_greater(target_version: Union[str, packaging.version.Version]) -> bool:
+def check_if_transformers_greater(target_version: Union[str, version.Version]) -> bool:
"""
Checks whether the current install of transformers is greater than or equal to the target version.
@@ -172,9 +172,9 @@
import transformers
if isinstance(target_version, str):
- target_version = packaging.version.parse(target_version)
+ target_version = version.parse(target_version)
- return packaging.version.parse(transformers.__version__) >= target_version
+ return version.parse(transformers.__version__) >= target_version
def check_if_diffusers_greater(target_version: str) -> bool:
@@ -190,12 +190,12 @@
if not _diffusers_available:
return False
- return packaging.version.parse(_diffusers_version) >= packaging.version.parse(target_version)
+ return version.parse(_diffusers_version) >= version.parse(target_version)
@contextmanager
-def require_numpy_strictly_lower(version: str, message: str):
- if not packaging.version.parse(np.__version__) < packaging.version.parse(version):
+def require_numpy_strictly_lower(package_version: str, message: str):
+ if not version.parse(np.__version__) < version.parse(package_version):
raise ImportError(
f"Found an incompatible version of numpy. Found version {np.__version__}, but expected numpy<{version}. {message}"
)
|
{"golden_diff": "diff --git a/optimum/utils/import_utils.py b/optimum/utils/import_utils.py\n--- a/optimum/utils/import_utils.py\n+++ b/optimum/utils/import_utils.py\n@@ -21,7 +21,7 @@\n from typing import Tuple, Union\n \n import numpy as np\n-import packaging\n+from packaging import version\n from transformers.utils import is_torch_available\n \n \n@@ -48,14 +48,14 @@\n import importlib.metadata as importlib_metadata\n \n \n-TORCH_MINIMUM_VERSION = packaging.version.parse(\"1.11.0\")\n-TRANSFORMERS_MINIMUM_VERSION = packaging.version.parse(\"4.25.0\")\n-DIFFUSERS_MINIMUM_VERSION = packaging.version.parse(\"0.18.0\")\n-AUTOGPTQ_MINIMUM_VERSION = packaging.version.parse(\"0.4.99\") # Allows 0.5.0.dev0\n+TORCH_MINIMUM_VERSION = version.parse(\"1.11.0\")\n+TRANSFORMERS_MINIMUM_VERSION = version.parse(\"4.25.0\")\n+DIFFUSERS_MINIMUM_VERSION = version.parse(\"0.18.0\")\n+AUTOGPTQ_MINIMUM_VERSION = version.parse(\"0.4.99\") # Allows 0.5.0.dev0\n \n \n # This is the minimal required version to support some ONNX Runtime features\n-ORT_QUANTIZE_MINIMUM_VERSION = packaging.version.parse(\"1.4.0\")\n+ORT_QUANTIZE_MINIMUM_VERSION = version.parse(\"1.4.0\")\n \n \n _onnx_available = _is_package_available(\"onnx\")\n@@ -72,7 +72,7 @@\n \n torch_version = None\n if is_torch_available():\n- torch_version = packaging.version.parse(importlib_metadata.version(\"torch\"))\n+ torch_version = version.parse(importlib_metadata.version(\"torch\"))\n \n _is_torch_onnx_support_available = is_torch_available() and (\n TORCH_MINIMUM_VERSION.major,\n@@ -133,7 +133,7 @@\n \n def is_auto_gptq_available():\n if _auto_gptq_available:\n- version_autogptq = packaging.version.parse(importlib_metadata.version(\"auto_gptq\"))\n+ version_autogptq = version.parse(importlib_metadata.version(\"auto_gptq\"))\n if AUTOGPTQ_MINIMUM_VERSION < version_autogptq:\n return True\n else:\n@@ -149,7 +149,7 @@\n \"\"\"\n import torch\n \n- if not packaging.version.parse(torch.__version__) >= packaging.version.parse(target_version):\n+ if not version.parse(torch.__version__) >= version.parse(target_version):\n raise ImportError(\n f\"Found an incompatible version of PyTorch. Found version {torch.__version__}, but only {target_version} and above are supported. {message}\"\n )\n@@ -159,7 +159,7 @@\n pass\n \n \n-def check_if_transformers_greater(target_version: Union[str, packaging.version.Version]) -> bool:\n+def check_if_transformers_greater(target_version: Union[str, version.Version]) -> bool:\n \"\"\"\n Checks whether the current install of transformers is greater than or equal to the target version.\n \n@@ -172,9 +172,9 @@\n import transformers\n \n if isinstance(target_version, str):\n- target_version = packaging.version.parse(target_version)\n+ target_version = version.parse(target_version)\n \n- return packaging.version.parse(transformers.__version__) >= target_version\n+ return version.parse(transformers.__version__) >= target_version\n \n \n def check_if_diffusers_greater(target_version: str) -> bool:\n@@ -190,12 +190,12 @@\n if not _diffusers_available:\n return False\n \n- return packaging.version.parse(_diffusers_version) >= packaging.version.parse(target_version)\n+ return version.parse(_diffusers_version) >= version.parse(target_version)\n \n \n @contextmanager\n-def require_numpy_strictly_lower(version: str, message: str):\n- if not packaging.version.parse(np.__version__) < packaging.version.parse(version):\n+def require_numpy_strictly_lower(package_version: str, message: str):\n+ if not version.parse(np.__version__) < version.parse(package_version):\n raise ImportError(\n f\"Found an incompatible version of numpy. Found version {np.__version__}, but expected numpy<{version}. {message}\"\n )\n", "issue": "Whisper-v3 ValueError: Transformers now supports natively BetterTransformer optimizations\n### System Info\n\n```shell\nNvidia Docker Container 23.12\r\n\r\n\r\nxFormers 0.0.24+6600003.d20240116\r\nmemory_efficient_attention.cutlassF: available\r\nmemory_efficient_attention.cutlassB: available\r\nmemory_efficient_attention.decoderF: available\r\[email protected]: available\r\[email protected]: available\r\nmemory_efficient_attention.smallkF: available\r\nmemory_efficient_attention.smallkB: available\r\nmemory_efficient_attention.tritonflashattF: unavailable\r\nmemory_efficient_attention.tritonflashattB: unavailable\r\nmemory_efficient_attention.triton_splitKF: available\r\nindexing.scaled_index_addF: available\r\nindexing.scaled_index_addB: available\r\nindexing.index_select: available\r\nswiglu.dual_gemm_silu: available\r\nswiglu.gemm_fused_operand_sum: available\r\nswiglu.fused.p.cpp: available\r\nis_triton_available: True\r\npytorch.version: 2.2.0a0+81ea7a4\r\npytorch.cuda: available\r\ngpu.compute_capability: 8.9\r\ngpu.name: NVIDIA GeForce RTX 4090\r\ndcgm_profiler: unavailable\r\nbuild.info: available\r\nbuild.cuda_version: 1203\r\nbuild.python_version: 3.10.12\r\nbuild.torch_version: 2.2.0a0+81ea7a4\r\nbuild.env.TORCH_CUDA_ARCH_LIST: 5.2 6.0 6.1 7.0 7.2 7.5 8.0 8.6 8.7 9.0+PTX\r\nbuild.env.XFORMERS_BUILD_TYPE: None\r\nbuild.env.XFORMERS_ENABLE_DEBUG_ASSERTIONS: None\r\nbuild.env.NVCC_FLAGS: None\r\nbuild.env.XFORMERS_PACKAGE_FROM: None\r\nbuild.nvcc_version: 12.3.107\r\nsource.privacy: open source\n```\n\n\n### Who can help?\n\n_No response_\n\n### Information\n\n- [X] The official example scripts\n- [ ] My own modified scripts\n\n### Tasks\n\n- [X] An officially supported task in the `examples` folder (such as GLUE/SQuAD, ...)\n- [ ] My own task or dataset (give details below)\n\n### Reproduction (minimal, reproducible, runnable)\n\nuse code:\r\n\r\nhttps://huggingface.co/spaces/primeline/whisper-german/blob/main/app.py\r\n\r\n```bash\r\npython app.py \r\nTraceback (most recent call last):\r\n File \"/app/app.py\", line 23, in <module>\r\n model = model.to_bettertransformer()\r\n File \"/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py\", line 4314, in to_bettertransformer\r\n return BetterTransformer.transform(self)\r\n File \"/usr/lib/python3.10/contextlib.py\", line 79, in inner\r\n return func(*args, **kwds)\r\n File \"/usr/local/lib/python3.10/dist-packages/optimum/bettertransformer/transformation.py\", line 211, in transform\r\n raise ValueError(\r\nValueError: Transformers now supports natively BetterTransformer optimizations (torch.nn.functional.scaled_dot_product_attention) for the model type whisper. Please upgrade to transformers>=4.36 and torch>=2.1.1 to use it. Details: https://huggingface.co/docs/transformers/perf_infer_gpu_one#flashattention-and-memory-efficient-attention-through-pytorchs-scaleddotproductattention\r\n```\n\n### Expected behavior\n\neverything is ok.\n", "before_files": [{"content": "# Copyright 2022 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Import utilities.\"\"\"\n\nimport importlib.util\nimport inspect\nimport sys\nfrom collections import OrderedDict\nfrom contextlib import contextmanager\nfrom typing import Tuple, Union\n\nimport numpy as np\nimport packaging\nfrom transformers.utils import is_torch_available\n\n\ndef _is_package_available(pkg_name: str, return_version: bool = False) -> Union[Tuple[bool, str], bool]:\n # Check we're not importing a \"pkg_name\" directory somewhere but the actual library by trying to grab the version\n package_exists = importlib.util.find_spec(pkg_name) is not None\n package_version = \"N/A\"\n if package_exists:\n try:\n package_version = importlib.metadata.version(pkg_name)\n package_exists = True\n except importlib.metadata.PackageNotFoundError:\n package_exists = False\n if return_version:\n return package_exists, package_version\n else:\n return package_exists\n\n\n# The package importlib_metadata is in a different place, depending on the python version.\nif sys.version_info < (3, 8):\n import importlib_metadata\nelse:\n import importlib.metadata as importlib_metadata\n\n\nTORCH_MINIMUM_VERSION = packaging.version.parse(\"1.11.0\")\nTRANSFORMERS_MINIMUM_VERSION = packaging.version.parse(\"4.25.0\")\nDIFFUSERS_MINIMUM_VERSION = packaging.version.parse(\"0.18.0\")\nAUTOGPTQ_MINIMUM_VERSION = packaging.version.parse(\"0.4.99\") # Allows 0.5.0.dev0\n\n\n# This is the minimal required version to support some ONNX Runtime features\nORT_QUANTIZE_MINIMUM_VERSION = packaging.version.parse(\"1.4.0\")\n\n\n_onnx_available = _is_package_available(\"onnx\")\n\n# importlib.metadata.version seem to not be robust with the ONNX Runtime extensions (`onnxruntime-gpu`, etc.)\n_onnxruntime_available = importlib.util.find_spec(\"onnxruntime\") is not None\n\n_pydantic_available = _is_package_available(\"pydantic\")\n_accelerate_available = _is_package_available(\"accelerate\")\n_diffusers_available = _is_package_available(\"diffusers\")\n_auto_gptq_available = _is_package_available(\"auto_gptq\")\n_timm_available = _is_package_available(\"timm\")\n_sentence_transformers_available = _is_package_available(\"sentence_transformers\")\n\ntorch_version = None\nif is_torch_available():\n torch_version = packaging.version.parse(importlib_metadata.version(\"torch\"))\n\n_is_torch_onnx_support_available = is_torch_available() and (\n TORCH_MINIMUM_VERSION.major,\n TORCH_MINIMUM_VERSION.minor,\n) <= (\n torch_version.major,\n torch_version.minor,\n)\n\n\n_diffusers_version = None\nif _diffusers_available:\n try:\n _diffusers_version = importlib_metadata.version(\"diffusers\")\n except importlib_metadata.PackageNotFoundError:\n _diffusers_available = False\n\n\ndef is_torch_onnx_support_available():\n return _is_torch_onnx_support_available\n\n\ndef is_onnx_available():\n return _onnx_available\n\n\ndef is_onnxruntime_available():\n try:\n # Try to import the source file of onnxruntime - if you run the tests from `tests` the function gets\n # confused since there a folder named `onnxruntime` in `tests`. Therefore, `_onnxruntime_available`\n # will be set to `True` even if not installed.\n mod = importlib.import_module(\"onnxruntime\")\n inspect.getsourcefile(mod)\n except Exception:\n return False\n return _onnxruntime_available\n\n\ndef is_pydantic_available():\n return _pydantic_available\n\n\ndef is_accelerate_available():\n return _accelerate_available\n\n\ndef is_diffusers_available():\n return _diffusers_available\n\n\ndef is_timm_available():\n return _timm_available\n\n\ndef is_sentence_transformers_available():\n return _sentence_transformers_available\n\n\ndef is_auto_gptq_available():\n if _auto_gptq_available:\n version_autogptq = packaging.version.parse(importlib_metadata.version(\"auto_gptq\"))\n if AUTOGPTQ_MINIMUM_VERSION < version_autogptq:\n return True\n else:\n raise ImportError(\n f\"Found an incompatible version of auto-gptq. Found version {version_autogptq}, but only version above {AUTOGPTQ_MINIMUM_VERSION} are supported\"\n )\n\n\n@contextmanager\ndef check_if_pytorch_greater(target_version: str, message: str):\n r\"\"\"\n A context manager that does nothing except checking if the PyTorch version is greater than `pt_version`\n \"\"\"\n import torch\n\n if not packaging.version.parse(torch.__version__) >= packaging.version.parse(target_version):\n raise ImportError(\n f\"Found an incompatible version of PyTorch. Found version {torch.__version__}, but only {target_version} and above are supported. {message}\"\n )\n try:\n yield\n finally:\n pass\n\n\ndef check_if_transformers_greater(target_version: Union[str, packaging.version.Version]) -> bool:\n \"\"\"\n Checks whether the current install of transformers is greater than or equal to the target version.\n\n Args:\n target_version (`Union[str, packaging.version.Version]`): version used as the reference for comparison.\n\n Returns:\n bool: whether the check is True or not.\n \"\"\"\n import transformers\n\n if isinstance(target_version, str):\n target_version = packaging.version.parse(target_version)\n\n return packaging.version.parse(transformers.__version__) >= target_version\n\n\ndef check_if_diffusers_greater(target_version: str) -> bool:\n \"\"\"\n Checks whether the current install of diffusers is greater than or equal to the target version.\n\n Args:\n target_version (str): version used as the reference for comparison.\n\n Returns:\n bool: whether the check is True or not.\n \"\"\"\n if not _diffusers_available:\n return False\n\n return packaging.version.parse(_diffusers_version) >= packaging.version.parse(target_version)\n\n\n@contextmanager\ndef require_numpy_strictly_lower(version: str, message: str):\n if not packaging.version.parse(np.__version__) < packaging.version.parse(version):\n raise ImportError(\n f\"Found an incompatible version of numpy. Found version {np.__version__}, but expected numpy<{version}. {message}\"\n )\n try:\n yield\n finally:\n pass\n\n\nDIFFUSERS_IMPORT_ERROR = \"\"\"\n{0} requires the diffusers library but it was not found in your environment. You can install it with pip: `pip install\ndiffusers`. Please note that you may need to restart your runtime after installation.\n\"\"\"\n\nTRANSFORMERS_IMPORT_ERROR = \"\"\"requires the transformers>={0} library but it was not found in your environment. You can install it with pip: `pip install\n-U transformers`. Please note that you may need to restart your runtime after installation.\n\"\"\"\n\nBACKENDS_MAPPING = OrderedDict(\n [\n (\"diffusers\", (is_diffusers_available, DIFFUSERS_IMPORT_ERROR)),\n (\n \"transformers_431\",\n (lambda: check_if_transformers_greater(\"4.31\"), \"{0} \" + TRANSFORMERS_IMPORT_ERROR.format(\"4.31\")),\n ),\n (\n \"transformers_432\",\n (lambda: check_if_transformers_greater(\"4.32\"), \"{0} \" + TRANSFORMERS_IMPORT_ERROR.format(\"4.32\")),\n ),\n (\n \"transformers_434\",\n (lambda: check_if_transformers_greater(\"4.34\"), \"{0} \" + TRANSFORMERS_IMPORT_ERROR.format(\"4.34\")),\n ),\n ]\n)\n\n\ndef requires_backends(obj, backends):\n if not isinstance(backends, (list, tuple)):\n backends = [backends]\n\n name = obj.__name__ if hasattr(obj, \"__name__\") else obj.__class__.__name__\n checks = (BACKENDS_MAPPING[backend] for backend in backends)\n failed = [msg.format(name) for available, msg in checks if not available()]\n if failed:\n raise ImportError(\"\".join(failed))\n\n\n# Copied from: https://github.com/huggingface/transformers/blob/v4.26.0/src/transformers/utils/import_utils.py#L1041\nclass DummyObject(type):\n \"\"\"\n Metaclass for the dummy objects. Any class inheriting from it will return the ImportError generated by\n `requires_backend` each time a user tries to access any method of that class.\n \"\"\"\n\n def __getattr__(cls, key):\n if key.startswith(\"_\"):\n return super().__getattr__(cls, key)\n requires_backends(cls, cls._backends)\n", "path": "optimum/utils/import_utils.py"}], "after_files": [{"content": "# Copyright 2022 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Import utilities.\"\"\"\n\nimport importlib.util\nimport inspect\nimport sys\nfrom collections import OrderedDict\nfrom contextlib import contextmanager\nfrom typing import Tuple, Union\n\nimport numpy as np\nfrom packaging import version\nfrom transformers.utils import is_torch_available\n\n\ndef _is_package_available(pkg_name: str, return_version: bool = False) -> Union[Tuple[bool, str], bool]:\n # Check we're not importing a \"pkg_name\" directory somewhere but the actual library by trying to grab the version\n package_exists = importlib.util.find_spec(pkg_name) is not None\n package_version = \"N/A\"\n if package_exists:\n try:\n package_version = importlib.metadata.version(pkg_name)\n package_exists = True\n except importlib.metadata.PackageNotFoundError:\n package_exists = False\n if return_version:\n return package_exists, package_version\n else:\n return package_exists\n\n\n# The package importlib_metadata is in a different place, depending on the python version.\nif sys.version_info < (3, 8):\n import importlib_metadata\nelse:\n import importlib.metadata as importlib_metadata\n\n\nTORCH_MINIMUM_VERSION = version.parse(\"1.11.0\")\nTRANSFORMERS_MINIMUM_VERSION = version.parse(\"4.25.0\")\nDIFFUSERS_MINIMUM_VERSION = version.parse(\"0.18.0\")\nAUTOGPTQ_MINIMUM_VERSION = version.parse(\"0.4.99\") # Allows 0.5.0.dev0\n\n\n# This is the minimal required version to support some ONNX Runtime features\nORT_QUANTIZE_MINIMUM_VERSION = version.parse(\"1.4.0\")\n\n\n_onnx_available = _is_package_available(\"onnx\")\n\n# importlib.metadata.version seem to not be robust with the ONNX Runtime extensions (`onnxruntime-gpu`, etc.)\n_onnxruntime_available = importlib.util.find_spec(\"onnxruntime\") is not None\n\n_pydantic_available = _is_package_available(\"pydantic\")\n_accelerate_available = _is_package_available(\"accelerate\")\n_diffusers_available = _is_package_available(\"diffusers\")\n_auto_gptq_available = _is_package_available(\"auto_gptq\")\n_timm_available = _is_package_available(\"timm\")\n_sentence_transformers_available = _is_package_available(\"sentence_transformers\")\n\ntorch_version = None\nif is_torch_available():\n torch_version = version.parse(importlib_metadata.version(\"torch\"))\n\n_is_torch_onnx_support_available = is_torch_available() and (\n TORCH_MINIMUM_VERSION.major,\n TORCH_MINIMUM_VERSION.minor,\n) <= (\n torch_version.major,\n torch_version.minor,\n)\n\n\n_diffusers_version = None\nif _diffusers_available:\n try:\n _diffusers_version = importlib_metadata.version(\"diffusers\")\n except importlib_metadata.PackageNotFoundError:\n _diffusers_available = False\n\n\ndef is_torch_onnx_support_available():\n return _is_torch_onnx_support_available\n\n\ndef is_onnx_available():\n return _onnx_available\n\n\ndef is_onnxruntime_available():\n try:\n # Try to import the source file of onnxruntime - if you run the tests from `tests` the function gets\n # confused since there a folder named `onnxruntime` in `tests`. Therefore, `_onnxruntime_available`\n # will be set to `True` even if not installed.\n mod = importlib.import_module(\"onnxruntime\")\n inspect.getsourcefile(mod)\n except Exception:\n return False\n return _onnxruntime_available\n\n\ndef is_pydantic_available():\n return _pydantic_available\n\n\ndef is_accelerate_available():\n return _accelerate_available\n\n\ndef is_diffusers_available():\n return _diffusers_available\n\n\ndef is_timm_available():\n return _timm_available\n\n\ndef is_sentence_transformers_available():\n return _sentence_transformers_available\n\n\ndef is_auto_gptq_available():\n if _auto_gptq_available:\n version_autogptq = version.parse(importlib_metadata.version(\"auto_gptq\"))\n if AUTOGPTQ_MINIMUM_VERSION < version_autogptq:\n return True\n else:\n raise ImportError(\n f\"Found an incompatible version of auto-gptq. Found version {version_autogptq}, but only version above {AUTOGPTQ_MINIMUM_VERSION} are supported\"\n )\n\n\n@contextmanager\ndef check_if_pytorch_greater(target_version: str, message: str):\n r\"\"\"\n A context manager that does nothing except checking if the PyTorch version is greater than `pt_version`\n \"\"\"\n import torch\n\n if not version.parse(torch.__version__) >= version.parse(target_version):\n raise ImportError(\n f\"Found an incompatible version of PyTorch. Found version {torch.__version__}, but only {target_version} and above are supported. {message}\"\n )\n try:\n yield\n finally:\n pass\n\n\ndef check_if_transformers_greater(target_version: Union[str, version.Version]) -> bool:\n \"\"\"\n Checks whether the current install of transformers is greater than or equal to the target version.\n\n Args:\n target_version (`Union[str, packaging.version.Version]`): version used as the reference for comparison.\n\n Returns:\n bool: whether the check is True or not.\n \"\"\"\n import transformers\n\n if isinstance(target_version, str):\n target_version = version.parse(target_version)\n\n return version.parse(transformers.__version__) >= target_version\n\n\ndef check_if_diffusers_greater(target_version: str) -> bool:\n \"\"\"\n Checks whether the current install of diffusers is greater than or equal to the target version.\n\n Args:\n target_version (str): version used as the reference for comparison.\n\n Returns:\n bool: whether the check is True or not.\n \"\"\"\n if not _diffusers_available:\n return False\n\n return version.parse(_diffusers_version) >= version.parse(target_version)\n\n\n@contextmanager\ndef require_numpy_strictly_lower(package_version: str, message: str):\n if not version.parse(np.__version__) < version.parse(package_version):\n raise ImportError(\n f\"Found an incompatible version of numpy. Found version {np.__version__}, but expected numpy<{version}. {message}\"\n )\n try:\n yield\n finally:\n pass\n\n\nDIFFUSERS_IMPORT_ERROR = \"\"\"\n{0} requires the diffusers library but it was not found in your environment. You can install it with pip: `pip install\ndiffusers`. Please note that you may need to restart your runtime after installation.\n\"\"\"\n\nTRANSFORMERS_IMPORT_ERROR = \"\"\"requires the transformers>={0} library but it was not found in your environment. You can install it with pip: `pip install\n-U transformers`. Please note that you may need to restart your runtime after installation.\n\"\"\"\n\nBACKENDS_MAPPING = OrderedDict(\n [\n (\"diffusers\", (is_diffusers_available, DIFFUSERS_IMPORT_ERROR)),\n (\n \"transformers_431\",\n (lambda: check_if_transformers_greater(\"4.31\"), \"{0} \" + TRANSFORMERS_IMPORT_ERROR.format(\"4.31\")),\n ),\n (\n \"transformers_432\",\n (lambda: check_if_transformers_greater(\"4.32\"), \"{0} \" + TRANSFORMERS_IMPORT_ERROR.format(\"4.32\")),\n ),\n (\n \"transformers_434\",\n (lambda: check_if_transformers_greater(\"4.34\"), \"{0} \" + TRANSFORMERS_IMPORT_ERROR.format(\"4.34\")),\n ),\n ]\n)\n\n\ndef requires_backends(obj, backends):\n if not isinstance(backends, (list, tuple)):\n backends = [backends]\n\n name = obj.__name__ if hasattr(obj, \"__name__\") else obj.__class__.__name__\n checks = (BACKENDS_MAPPING[backend] for backend in backends)\n failed = [msg.format(name) for available, msg in checks if not available()]\n if failed:\n raise ImportError(\"\".join(failed))\n\n\n# Copied from: https://github.com/huggingface/transformers/blob/v4.26.0/src/transformers/utils/import_utils.py#L1041\nclass DummyObject(type):\n \"\"\"\n Metaclass for the dummy objects. Any class inheriting from it will return the ImportError generated by\n `requires_backend` each time a user tries to access any method of that class.\n \"\"\"\n\n def __getattr__(cls, key):\n if key.startswith(\"_\"):\n return super().__getattr__(cls, key)\n requires_backends(cls, cls._backends)\n", "path": "optimum/utils/import_utils.py"}]}
| 3,813 | 945 |
gh_patches_debug_37092
|
rasdani/github-patches
|
git_diff
|
PennyLaneAI__pennylane-4800
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[BUG] compile pre-decomposes at most once
### Expected behavior
I would expect that if I do qml.compile() of a template and its adjoint template, the decompositions would be equivalent but adjoined. However the depth of the decomposition is different:
<img width="300" alt="Captura de pantalla 2023-11-07 a las 13 58 38" src="https://github.com/PennyLaneAI/pennylane/assets/65235481/b6d7525e-e751-47ea-824e-d1bda5701f73">
<img width="300" alt="Captura de pantalla 2023-11-07 a las 13 58 49" src="https://github.com/PennyLaneAI/pennylane/assets/65235481/967a1e09-0203-4501-b0c2-3c2e345cc5d7">
As we can see in one case it expands the QFT and in another it does not. I would have liked that they were equivalent so in both cases it shows (QFT or QFT^t)
### Actual behavior
Explained above
### Additional information
_No response_
### Source code
```shell
import pennylane as qml
dev = qml.device("default.qubit")
@qml.qnode(dev)
@qml.compile()
def circuit():
#qml.QuantumPhaseEstimation(qml.PauliZ(0), estimation_wires = range(1,3))
qml.adjoint(qml.QuantumPhaseEstimation)(qml.PauliZ(0), estimation_wires = range(1,3))
return qml.state()
qml.draw_mpl(circuit)()
```
### Tracebacks
_No response_
### System information
```shell
Name: PennyLane
Version: 0.33.0
Summary: PennyLane is a Python quantum machine learning library by Xanadu Inc.
Home-page: https://github.com/PennyLaneAI/pennylane
Author:
Author-email:
License: Apache License 2.0
Location: /usr/local/lib/python3.10/dist-packages
Requires: appdirs, autograd, autoray, cachetools, networkx, numpy, pennylane-lightning, requests, rustworkx, scipy, semantic-version, toml, typing-extensions
Required-by: PennyLane-Lightning
Platform info: Linux-5.15.120+-x86_64-with-glibc2.35
Python version: 3.10.12
Numpy version: 1.23.5
Scipy version: 1.11.3
Installed devices:
```
### Existing GitHub issues
- [X] I have searched existing GitHub issues to make sure the issue does not already exist.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pennylane/transforms/compile.py`
Content:
```
1 # Copyright 2018-2021 Xanadu Quantum Technologies Inc.
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6
7 # http://www.apache.org/licenses/LICENSE-2.0
8
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 """Code for the high-level quantum function transform that executes compilation."""
15 # pylint: disable=too-many-branches
16 from functools import partial
17 from typing import Sequence, Callable
18
19 from pennylane.queuing import QueuingManager
20 from pennylane.ops import __all__ as all_ops
21 from pennylane.tape import QuantumTape
22 from pennylane.transforms.core import transform, TransformDispatcher
23 from pennylane.transforms.optimization import (
24 cancel_inverses,
25 commute_controlled,
26 merge_rotations,
27 remove_barrier,
28 )
29
30
31 default_pipeline = [commute_controlled, cancel_inverses, merge_rotations, remove_barrier]
32
33
34 @transform
35 def compile(
36 tape: QuantumTape, pipeline=None, basis_set=None, num_passes=1, expand_depth=5
37 ) -> (Sequence[QuantumTape], Callable):
38 """Compile a circuit by applying a series of transforms to a quantum function.
39
40 The default set of transforms includes (in order):
41
42 - pushing all commuting single-qubit gates as far right as possible
43 (:func:`~pennylane.transforms.commute_controlled`)
44 - cancellation of adjacent inverse gates
45 (:func:`~pennylane.transforms.cancel_inverses`)
46 - merging adjacent rotations of the same type
47 (:func:`~pennylane.transforms.merge_rotations`)
48
49 Args:
50 tape (QNode or QuantumTape or Callable): A quantum circuit.
51 pipeline (list[Callable]): A list of
52 tape and/or quantum function transforms to apply.
53 basis_set (list[str]): A list of basis gates. When expanding the tape,
54 expansion will continue until gates in the specific set are
55 reached. If no basis set is specified, no expansion will be done.
56 num_passes (int): The number of times to apply the set of transforms in
57 ``pipeline``. The default is to perform each transform once;
58 however, doing so may produce a new circuit where applying the set
59 of transforms again may yield further improvement, so the number of
60 such passes can be adjusted.
61 expand_depth (int): When ``basis_set`` is specified, the depth to use
62 for tape expansion into the basis gates.
63
64 Returns:
65 qnode (QNode) or quantum function (Callable) or tuple[List[QuantumTape], function]: The compiled circuit. The output type is explained in :func:`qml.transform <pennylane.transform>`.
66
67 **Example**
68
69 >>> dev = qml.device('default.qubit', wires=[0, 1, 2])
70
71 You can apply the transform directly on a :class:`QNode`:
72
73 .. code-block:: python
74
75 @compile
76 @qml.qnode(device=dev)
77 def circuit(x, y, z):
78 qml.Hadamard(wires=0)
79 qml.Hadamard(wires=1)
80 qml.Hadamard(wires=2)
81 qml.RZ(z, wires=2)
82 qml.CNOT(wires=[2, 1])
83 qml.RX(z, wires=0)
84 qml.CNOT(wires=[1, 0])
85 qml.RX(x, wires=0)
86 qml.CNOT(wires=[1, 0])
87 qml.RZ(-z, wires=2)
88 qml.RX(y, wires=2)
89 qml.PauliY(wires=2)
90 qml.CY(wires=[1, 2])
91 return qml.expval(qml.PauliZ(wires=0))
92
93 The default compilation pipeline is applied before execution.
94
95 Consider the following quantum function:
96
97 .. code-block:: python
98
99 def qfunc(x, y, z):
100 qml.Hadamard(wires=0)
101 qml.Hadamard(wires=1)
102 qml.Hadamard(wires=2)
103 qml.RZ(z, wires=2)
104 qml.CNOT(wires=[2, 1])
105 qml.RX(z, wires=0)
106 qml.CNOT(wires=[1, 0])
107 qml.RX(x, wires=0)
108 qml.CNOT(wires=[1, 0])
109 qml.RZ(-z, wires=2)
110 qml.RX(y, wires=2)
111 qml.PauliY(wires=2)
112 qml.CY(wires=[1, 2])
113 return qml.expval(qml.PauliZ(wires=0))
114
115 Visually, the original function looks like this:
116
117 >>> qnode = qml.QNode(qfunc, dev)
118 >>> print(qml.draw(qnode)(0.2, 0.3, 0.4))
119 0: ──H──RX(0.40)────╭X──────────RX(0.20)─╭X────┤ <Z>
120 1: ──H───────────╭X─╰●───────────────────╰●─╭●─┤
121 2: ──H──RZ(0.40)─╰●──RZ(-0.40)──RX(0.30)──Y─╰Y─┤
122
123 We can compile it down to a smaller set of gates using the ``qml.compile``
124 transform.
125
126 >>> compiled_qfunc = qml.compile(qfunc)
127 >>> compiled_qnode = qml.QNode(compiled_qfunc, dev)
128 >>> print(qml.draw(compiled_qnode)(0.2, 0.3, 0.4))
129 0: ──H──RX(0.60)─────────────────┤ <Z>
130 1: ──H─╭X──────────────────╭●────┤
131 2: ──H─╰●─────────RX(0.30)─╰Y──Y─┤
132
133 You can change up the set of transforms by passing a custom ``pipeline`` to
134 ``qml.compile``. The pipeline is a list of transform functions. Furthermore,
135 you can specify a number of passes (repetitions of the pipeline), and a list
136 of gates into which the compiler will first attempt to decompose the
137 existing operations prior to applying any optimization transforms.
138
139 .. code-block:: python3
140
141 compiled_qfunc = qml.compile(
142 pipeline=[
143 partial(qml.transforms.commute_controlled, direction="left"),
144 partial(qml.transforms.merge_rotations, atol=1e-6),
145 qml.transforms.cancel_inverses
146 ],
147 basis_set=["CNOT", "RX", "RY", "RZ"],
148 num_passes=2
149 )(qfunc)
150
151 compiled_qnode = qml.QNode(compiled_qfunc, dev)
152
153 print(qml.draw(compiled_qnode)(0.2, 0.3, 0.4))
154
155 .. code-block::
156
157 0: ──RZ(1.57)──RX(1.57)──RZ(1.57)──RX(0.60)─────────────────────────────────────────────────────
158 1: ──RZ(1.57)──RX(1.57)──RZ(1.57)─╭X─────────RZ(1.57)─────────────────────────────────────────╭●
159 2: ──RZ(1.57)──RX(1.57)──RZ(1.57)─╰●─────────RX(0.30)──RZ(1.57)──RY(3.14)──RZ(1.57)──RY(1.57)─╰X
160
161 ────────────────┤ <Z>
162 ─────────────╭●─┤
163 ───RY(-1.57)─╰X─┤
164
165 """
166 # Ensure that everything in the pipeline is a valid qfunc or tape transform
167 if pipeline is None:
168 pipeline = default_pipeline
169 else:
170 for p in pipeline:
171 p_func = p.func if isinstance(p, partial) else p
172 if not isinstance(p_func, TransformDispatcher):
173 raise ValueError("Invalid transform function {p} passed to compile.")
174
175 if num_passes < 1 or not isinstance(num_passes, int):
176 raise ValueError("Number of passes must be an integer with value at least 1.")
177
178 # Expand the tape; this is done to unroll any templates that may be present,
179 # as well as to decompose over a specified basis set
180 # First, though, we have to stop whatever tape may be recording so that we
181 # don't queue anything as a result of the expansion or transform pipeline
182
183 with QueuingManager.stop_recording():
184 if basis_set is not None:
185 expanded_tape = tape.expand(
186 depth=expand_depth, stop_at=lambda obj: obj.name in basis_set
187 )
188 else:
189 # Expands out anything that is not a single operation (i.e., the templates)
190 # expand barriers when `only_visual=True`
191 def stop_at(obj):
192 return (obj.name in all_ops) and (not getattr(obj, "only_visual", False))
193
194 expanded_tape = tape.expand(stop_at=stop_at)
195
196 # Apply the full set of compilation transforms num_passes times
197 for _ in range(num_passes):
198 for transf in pipeline:
199 tapes, _ = transf(expanded_tape)
200 expanded_tape = tapes[0]
201
202 new_tape = type(tape)(
203 expanded_tape.operations, expanded_tape.measurements, shots=expanded_tape.shots
204 )
205
206 def null_postprocessing(results):
207 """A postprocesing function returned by a transform that only converts the batch of results
208 into a result for a single ``QuantumTape``.
209 """
210 return results[0]
211
212 return [new_tape], null_postprocessing
213
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pennylane/transforms/compile.py b/pennylane/transforms/compile.py
--- a/pennylane/transforms/compile.py
+++ b/pennylane/transforms/compile.py
@@ -52,14 +52,15 @@
tape and/or quantum function transforms to apply.
basis_set (list[str]): A list of basis gates. When expanding the tape,
expansion will continue until gates in the specific set are
- reached. If no basis set is specified, no expansion will be done.
+ reached. If no basis set is specified, a default of
+ ``pennylane.ops.__all__`` will be used. This decomposes templates and
+ operator arithmetic.
num_passes (int): The number of times to apply the set of transforms in
``pipeline``. The default is to perform each transform once;
however, doing so may produce a new circuit where applying the set
of transforms again may yield further improvement, so the number of
such passes can be adjusted.
- expand_depth (int): When ``basis_set`` is specified, the depth to use
- for tape expansion into the basis gates.
+ expand_depth (int): The depth to use for tape expansion into the basis gates.
Returns:
qnode (QNode) or quantum function (Callable) or tuple[List[QuantumTape], function]: The compiled circuit. The output type is explained in :func:`qml.transform <pennylane.transform>`.
@@ -181,17 +182,12 @@
# don't queue anything as a result of the expansion or transform pipeline
with QueuingManager.stop_recording():
- if basis_set is not None:
- expanded_tape = tape.expand(
- depth=expand_depth, stop_at=lambda obj: obj.name in basis_set
- )
- else:
- # Expands out anything that is not a single operation (i.e., the templates)
- # expand barriers when `only_visual=True`
- def stop_at(obj):
- return (obj.name in all_ops) and (not getattr(obj, "only_visual", False))
-
- expanded_tape = tape.expand(stop_at=stop_at)
+ basis_set = basis_set or all_ops
+
+ def stop_at(obj):
+ return obj.name in basis_set and (not getattr(obj, "only_visual", False))
+
+ expanded_tape = tape.expand(depth=expand_depth, stop_at=stop_at)
# Apply the full set of compilation transforms num_passes times
for _ in range(num_passes):
|
{"golden_diff": "diff --git a/pennylane/transforms/compile.py b/pennylane/transforms/compile.py\n--- a/pennylane/transforms/compile.py\n+++ b/pennylane/transforms/compile.py\n@@ -52,14 +52,15 @@\n tape and/or quantum function transforms to apply.\n basis_set (list[str]): A list of basis gates. When expanding the tape,\n expansion will continue until gates in the specific set are\n- reached. If no basis set is specified, no expansion will be done.\n+ reached. If no basis set is specified, a default of\n+ ``pennylane.ops.__all__`` will be used. This decomposes templates and\n+ operator arithmetic.\n num_passes (int): The number of times to apply the set of transforms in\n ``pipeline``. The default is to perform each transform once;\n however, doing so may produce a new circuit where applying the set\n of transforms again may yield further improvement, so the number of\n such passes can be adjusted.\n- expand_depth (int): When ``basis_set`` is specified, the depth to use\n- for tape expansion into the basis gates.\n+ expand_depth (int): The depth to use for tape expansion into the basis gates.\n \n Returns:\n qnode (QNode) or quantum function (Callable) or tuple[List[QuantumTape], function]: The compiled circuit. The output type is explained in :func:`qml.transform <pennylane.transform>`.\n@@ -181,17 +182,12 @@\n # don't queue anything as a result of the expansion or transform pipeline\n \n with QueuingManager.stop_recording():\n- if basis_set is not None:\n- expanded_tape = tape.expand(\n- depth=expand_depth, stop_at=lambda obj: obj.name in basis_set\n- )\n- else:\n- # Expands out anything that is not a single operation (i.e., the templates)\n- # expand barriers when `only_visual=True`\n- def stop_at(obj):\n- return (obj.name in all_ops) and (not getattr(obj, \"only_visual\", False))\n-\n- expanded_tape = tape.expand(stop_at=stop_at)\n+ basis_set = basis_set or all_ops\n+\n+ def stop_at(obj):\n+ return obj.name in basis_set and (not getattr(obj, \"only_visual\", False))\n+\n+ expanded_tape = tape.expand(depth=expand_depth, stop_at=stop_at)\n \n # Apply the full set of compilation transforms num_passes times\n for _ in range(num_passes):\n", "issue": "[BUG] compile pre-decomposes at most once\n### Expected behavior\r\n\r\nI would expect that if I do qml.compile() of a template and its adjoint template, the decompositions would be equivalent but adjoined. However the depth of the decomposition is different:\r\n\r\n<img width=\"300\" alt=\"Captura de pantalla 2023-11-07 a las 13 58 38\" src=\"https://github.com/PennyLaneAI/pennylane/assets/65235481/b6d7525e-e751-47ea-824e-d1bda5701f73\">\r\n\r\n<img width=\"300\" alt=\"Captura de pantalla 2023-11-07 a las 13 58 49\" src=\"https://github.com/PennyLaneAI/pennylane/assets/65235481/967a1e09-0203-4501-b0c2-3c2e345cc5d7\">\r\n\r\nAs we can see in one case it expands the QFT and in another it does not. I would have liked that they were equivalent so in both cases it shows (QFT or QFT^t)\r\n\r\n\r\n\r\n### Actual behavior\r\n\r\nExplained above\r\n\r\n### Additional information\r\n\r\n_No response_\r\n\r\n### Source code\r\n\r\n```shell\r\nimport pennylane as qml\r\n\r\ndev = qml.device(\"default.qubit\")\r\n\r\[email protected](dev)\r\[email protected]()\r\ndef circuit():\r\n #qml.QuantumPhaseEstimation(qml.PauliZ(0), estimation_wires = range(1,3))\r\n qml.adjoint(qml.QuantumPhaseEstimation)(qml.PauliZ(0), estimation_wires = range(1,3))\r\n return qml.state()\r\n\r\nqml.draw_mpl(circuit)()\r\n```\r\n\r\n\r\n### Tracebacks\r\n\r\n_No response_\r\n\r\n### System information\r\n\r\n```shell\r\nName: PennyLane\r\nVersion: 0.33.0\r\nSummary: PennyLane is a Python quantum machine learning library by Xanadu Inc.\r\nHome-page: https://github.com/PennyLaneAI/pennylane\r\nAuthor: \r\nAuthor-email: \r\nLicense: Apache License 2.0\r\nLocation: /usr/local/lib/python3.10/dist-packages\r\nRequires: appdirs, autograd, autoray, cachetools, networkx, numpy, pennylane-lightning, requests, rustworkx, scipy, semantic-version, toml, typing-extensions\r\nRequired-by: PennyLane-Lightning\r\n\r\nPlatform info: Linux-5.15.120+-x86_64-with-glibc2.35\r\nPython version: 3.10.12\r\nNumpy version: 1.23.5\r\nScipy version: 1.11.3\r\nInstalled devices:\r\n```\r\n\r\n\r\n### Existing GitHub issues\r\n\r\n- [X] I have searched existing GitHub issues to make sure the issue does not already exist.\n", "before_files": [{"content": "# Copyright 2018-2021 Xanadu Quantum Technologies Inc.\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Code for the high-level quantum function transform that executes compilation.\"\"\"\n# pylint: disable=too-many-branches\nfrom functools import partial\nfrom typing import Sequence, Callable\n\nfrom pennylane.queuing import QueuingManager\nfrom pennylane.ops import __all__ as all_ops\nfrom pennylane.tape import QuantumTape\nfrom pennylane.transforms.core import transform, TransformDispatcher\nfrom pennylane.transforms.optimization import (\n cancel_inverses,\n commute_controlled,\n merge_rotations,\n remove_barrier,\n)\n\n\ndefault_pipeline = [commute_controlled, cancel_inverses, merge_rotations, remove_barrier]\n\n\n@transform\ndef compile(\n tape: QuantumTape, pipeline=None, basis_set=None, num_passes=1, expand_depth=5\n) -> (Sequence[QuantumTape], Callable):\n \"\"\"Compile a circuit by applying a series of transforms to a quantum function.\n\n The default set of transforms includes (in order):\n\n - pushing all commuting single-qubit gates as far right as possible\n (:func:`~pennylane.transforms.commute_controlled`)\n - cancellation of adjacent inverse gates\n (:func:`~pennylane.transforms.cancel_inverses`)\n - merging adjacent rotations of the same type\n (:func:`~pennylane.transforms.merge_rotations`)\n\n Args:\n tape (QNode or QuantumTape or Callable): A quantum circuit.\n pipeline (list[Callable]): A list of\n tape and/or quantum function transforms to apply.\n basis_set (list[str]): A list of basis gates. When expanding the tape,\n expansion will continue until gates in the specific set are\n reached. If no basis set is specified, no expansion will be done.\n num_passes (int): The number of times to apply the set of transforms in\n ``pipeline``. The default is to perform each transform once;\n however, doing so may produce a new circuit where applying the set\n of transforms again may yield further improvement, so the number of\n such passes can be adjusted.\n expand_depth (int): When ``basis_set`` is specified, the depth to use\n for tape expansion into the basis gates.\n\n Returns:\n qnode (QNode) or quantum function (Callable) or tuple[List[QuantumTape], function]: The compiled circuit. The output type is explained in :func:`qml.transform <pennylane.transform>`.\n\n **Example**\n\n >>> dev = qml.device('default.qubit', wires=[0, 1, 2])\n\n You can apply the transform directly on a :class:`QNode`:\n\n .. code-block:: python\n\n @compile\n @qml.qnode(device=dev)\n def circuit(x, y, z):\n qml.Hadamard(wires=0)\n qml.Hadamard(wires=1)\n qml.Hadamard(wires=2)\n qml.RZ(z, wires=2)\n qml.CNOT(wires=[2, 1])\n qml.RX(z, wires=0)\n qml.CNOT(wires=[1, 0])\n qml.RX(x, wires=0)\n qml.CNOT(wires=[1, 0])\n qml.RZ(-z, wires=2)\n qml.RX(y, wires=2)\n qml.PauliY(wires=2)\n qml.CY(wires=[1, 2])\n return qml.expval(qml.PauliZ(wires=0))\n\n The default compilation pipeline is applied before execution.\n\n Consider the following quantum function:\n\n .. code-block:: python\n\n def qfunc(x, y, z):\n qml.Hadamard(wires=0)\n qml.Hadamard(wires=1)\n qml.Hadamard(wires=2)\n qml.RZ(z, wires=2)\n qml.CNOT(wires=[2, 1])\n qml.RX(z, wires=0)\n qml.CNOT(wires=[1, 0])\n qml.RX(x, wires=0)\n qml.CNOT(wires=[1, 0])\n qml.RZ(-z, wires=2)\n qml.RX(y, wires=2)\n qml.PauliY(wires=2)\n qml.CY(wires=[1, 2])\n return qml.expval(qml.PauliZ(wires=0))\n\n Visually, the original function looks like this:\n\n >>> qnode = qml.QNode(qfunc, dev)\n >>> print(qml.draw(qnode)(0.2, 0.3, 0.4))\n 0: \u2500\u2500H\u2500\u2500RX(0.40)\u2500\u2500\u2500\u2500\u256dX\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500RX(0.20)\u2500\u256dX\u2500\u2500\u2500\u2500\u2524 <Z>\n 1: \u2500\u2500H\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256dX\u2500\u2570\u25cf\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2570\u25cf\u2500\u256d\u25cf\u2500\u2524\n 2: \u2500\u2500H\u2500\u2500RZ(0.40)\u2500\u2570\u25cf\u2500\u2500RZ(-0.40)\u2500\u2500RX(0.30)\u2500\u2500Y\u2500\u2570Y\u2500\u2524\n\n We can compile it down to a smaller set of gates using the ``qml.compile``\n transform.\n\n >>> compiled_qfunc = qml.compile(qfunc)\n >>> compiled_qnode = qml.QNode(compiled_qfunc, dev)\n >>> print(qml.draw(compiled_qnode)(0.2, 0.3, 0.4))\n 0: \u2500\u2500H\u2500\u2500RX(0.60)\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2524 <Z>\n 1: \u2500\u2500H\u2500\u256dX\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256d\u25cf\u2500\u2500\u2500\u2500\u2524\n 2: \u2500\u2500H\u2500\u2570\u25cf\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500RX(0.30)\u2500\u2570Y\u2500\u2500Y\u2500\u2524\n\n You can change up the set of transforms by passing a custom ``pipeline`` to\n ``qml.compile``. The pipeline is a list of transform functions. Furthermore,\n you can specify a number of passes (repetitions of the pipeline), and a list\n of gates into which the compiler will first attempt to decompose the\n existing operations prior to applying any optimization transforms.\n\n .. code-block:: python3\n\n compiled_qfunc = qml.compile(\n pipeline=[\n partial(qml.transforms.commute_controlled, direction=\"left\"),\n partial(qml.transforms.merge_rotations, atol=1e-6),\n qml.transforms.cancel_inverses\n ],\n basis_set=[\"CNOT\", \"RX\", \"RY\", \"RZ\"],\n num_passes=2\n )(qfunc)\n\n compiled_qnode = qml.QNode(compiled_qfunc, dev)\n\n print(qml.draw(compiled_qnode)(0.2, 0.3, 0.4))\n\n .. code-block::\n\n 0: \u2500\u2500RZ(1.57)\u2500\u2500RX(1.57)\u2500\u2500RZ(1.57)\u2500\u2500RX(0.60)\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n 1: \u2500\u2500RZ(1.57)\u2500\u2500RX(1.57)\u2500\u2500RZ(1.57)\u2500\u256dX\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500RZ(1.57)\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256d\u25cf\n 2: \u2500\u2500RZ(1.57)\u2500\u2500RX(1.57)\u2500\u2500RZ(1.57)\u2500\u2570\u25cf\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500RX(0.30)\u2500\u2500RZ(1.57)\u2500\u2500RY(3.14)\u2500\u2500RZ(1.57)\u2500\u2500RY(1.57)\u2500\u2570X\n\n \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2524 <Z>\n \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256d\u25cf\u2500\u2524\n \u2500\u2500\u2500RY(-1.57)\u2500\u2570X\u2500\u2524\n\n \"\"\"\n # Ensure that everything in the pipeline is a valid qfunc or tape transform\n if pipeline is None:\n pipeline = default_pipeline\n else:\n for p in pipeline:\n p_func = p.func if isinstance(p, partial) else p\n if not isinstance(p_func, TransformDispatcher):\n raise ValueError(\"Invalid transform function {p} passed to compile.\")\n\n if num_passes < 1 or not isinstance(num_passes, int):\n raise ValueError(\"Number of passes must be an integer with value at least 1.\")\n\n # Expand the tape; this is done to unroll any templates that may be present,\n # as well as to decompose over a specified basis set\n # First, though, we have to stop whatever tape may be recording so that we\n # don't queue anything as a result of the expansion or transform pipeline\n\n with QueuingManager.stop_recording():\n if basis_set is not None:\n expanded_tape = tape.expand(\n depth=expand_depth, stop_at=lambda obj: obj.name in basis_set\n )\n else:\n # Expands out anything that is not a single operation (i.e., the templates)\n # expand barriers when `only_visual=True`\n def stop_at(obj):\n return (obj.name in all_ops) and (not getattr(obj, \"only_visual\", False))\n\n expanded_tape = tape.expand(stop_at=stop_at)\n\n # Apply the full set of compilation transforms num_passes times\n for _ in range(num_passes):\n for transf in pipeline:\n tapes, _ = transf(expanded_tape)\n expanded_tape = tapes[0]\n\n new_tape = type(tape)(\n expanded_tape.operations, expanded_tape.measurements, shots=expanded_tape.shots\n )\n\n def null_postprocessing(results):\n \"\"\"A postprocesing function returned by a transform that only converts the batch of results\n into a result for a single ``QuantumTape``.\n \"\"\"\n return results[0]\n\n return [new_tape], null_postprocessing\n", "path": "pennylane/transforms/compile.py"}], "after_files": [{"content": "# Copyright 2018-2021 Xanadu Quantum Technologies Inc.\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Code for the high-level quantum function transform that executes compilation.\"\"\"\n# pylint: disable=too-many-branches\nfrom functools import partial\nfrom typing import Sequence, Callable\n\nfrom pennylane.queuing import QueuingManager\nfrom pennylane.ops import __all__ as all_ops\nfrom pennylane.tape import QuantumTape\nfrom pennylane.transforms.core import transform, TransformDispatcher\nfrom pennylane.transforms.optimization import (\n cancel_inverses,\n commute_controlled,\n merge_rotations,\n remove_barrier,\n)\n\n\ndefault_pipeline = [commute_controlled, cancel_inverses, merge_rotations, remove_barrier]\n\n\n@transform\ndef compile(\n tape: QuantumTape, pipeline=None, basis_set=None, num_passes=1, expand_depth=5\n) -> (Sequence[QuantumTape], Callable):\n \"\"\"Compile a circuit by applying a series of transforms to a quantum function.\n\n The default set of transforms includes (in order):\n\n - pushing all commuting single-qubit gates as far right as possible\n (:func:`~pennylane.transforms.commute_controlled`)\n - cancellation of adjacent inverse gates\n (:func:`~pennylane.transforms.cancel_inverses`)\n - merging adjacent rotations of the same type\n (:func:`~pennylane.transforms.merge_rotations`)\n\n Args:\n tape (QNode or QuantumTape or Callable): A quantum circuit.\n pipeline (list[Callable]): A list of\n tape and/or quantum function transforms to apply.\n basis_set (list[str]): A list of basis gates. When expanding the tape,\n expansion will continue until gates in the specific set are\n reached. If no basis set is specified, a default of\n ``pennylane.ops.__all__`` will be used. This decomposes templates and\n operator arithmetic.\n num_passes (int): The number of times to apply the set of transforms in\n ``pipeline``. The default is to perform each transform once;\n however, doing so may produce a new circuit where applying the set\n of transforms again may yield further improvement, so the number of\n such passes can be adjusted.\n expand_depth (int): The depth to use for tape expansion into the basis gates.\n\n Returns:\n qnode (QNode) or quantum function (Callable) or tuple[List[QuantumTape], function]: The compiled circuit. The output type is explained in :func:`qml.transform <pennylane.transform>`.\n\n **Example**\n\n >>> dev = qml.device('default.qubit', wires=[0, 1, 2])\n\n You can apply the transform directly on a :class:`QNode`:\n\n .. code-block:: python\n\n @compile\n @qml.qnode(device=dev)\n def circuit(x, y, z):\n qml.Hadamard(wires=0)\n qml.Hadamard(wires=1)\n qml.Hadamard(wires=2)\n qml.RZ(z, wires=2)\n qml.CNOT(wires=[2, 1])\n qml.RX(z, wires=0)\n qml.CNOT(wires=[1, 0])\n qml.RX(x, wires=0)\n qml.CNOT(wires=[1, 0])\n qml.RZ(-z, wires=2)\n qml.RX(y, wires=2)\n qml.PauliY(wires=2)\n qml.CY(wires=[1, 2])\n return qml.expval(qml.PauliZ(wires=0))\n\n The default compilation pipeline is applied before execution.\n\n Consider the following quantum function:\n\n .. code-block:: python\n\n def qfunc(x, y, z):\n qml.Hadamard(wires=0)\n qml.Hadamard(wires=1)\n qml.Hadamard(wires=2)\n qml.RZ(z, wires=2)\n qml.CNOT(wires=[2, 1])\n qml.RX(z, wires=0)\n qml.CNOT(wires=[1, 0])\n qml.RX(x, wires=0)\n qml.CNOT(wires=[1, 0])\n qml.RZ(-z, wires=2)\n qml.RX(y, wires=2)\n qml.PauliY(wires=2)\n qml.CY(wires=[1, 2])\n return qml.expval(qml.PauliZ(wires=0))\n\n Visually, the original function looks like this:\n\n >>> qnode = qml.QNode(qfunc, dev)\n >>> print(qml.draw(qnode)(0.2, 0.3, 0.4))\n 0: \u2500\u2500H\u2500\u2500RX(0.40)\u2500\u2500\u2500\u2500\u256dX\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500RX(0.20)\u2500\u256dX\u2500\u2500\u2500\u2500\u2524 <Z>\n 1: \u2500\u2500H\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256dX\u2500\u2570\u25cf\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2570\u25cf\u2500\u256d\u25cf\u2500\u2524\n 2: \u2500\u2500H\u2500\u2500RZ(0.40)\u2500\u2570\u25cf\u2500\u2500RZ(-0.40)\u2500\u2500RX(0.30)\u2500\u2500Y\u2500\u2570Y\u2500\u2524\n\n We can compile it down to a smaller set of gates using the ``qml.compile``\n transform.\n\n >>> compiled_qfunc = qml.compile(qfunc)\n >>> compiled_qnode = qml.QNode(compiled_qfunc, dev)\n >>> print(qml.draw(compiled_qnode)(0.2, 0.3, 0.4))\n 0: \u2500\u2500H\u2500\u2500RX(0.60)\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2524 <Z>\n 1: \u2500\u2500H\u2500\u256dX\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256d\u25cf\u2500\u2500\u2500\u2500\u2524\n 2: \u2500\u2500H\u2500\u2570\u25cf\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500RX(0.30)\u2500\u2570Y\u2500\u2500Y\u2500\u2524\n\n You can change up the set of transforms by passing a custom ``pipeline`` to\n ``qml.compile``. The pipeline is a list of transform functions. Furthermore,\n you can specify a number of passes (repetitions of the pipeline), and a list\n of gates into which the compiler will first attempt to decompose the\n existing operations prior to applying any optimization transforms.\n\n .. code-block:: python3\n\n compiled_qfunc = qml.compile(\n pipeline=[\n partial(qml.transforms.commute_controlled, direction=\"left\"),\n partial(qml.transforms.merge_rotations, atol=1e-6),\n qml.transforms.cancel_inverses\n ],\n basis_set=[\"CNOT\", \"RX\", \"RY\", \"RZ\"],\n num_passes=2\n )(qfunc)\n\n compiled_qnode = qml.QNode(compiled_qfunc, dev)\n\n print(qml.draw(compiled_qnode)(0.2, 0.3, 0.4))\n\n .. code-block::\n\n 0: \u2500\u2500RZ(1.57)\u2500\u2500RX(1.57)\u2500\u2500RZ(1.57)\u2500\u2500RX(0.60)\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n 1: \u2500\u2500RZ(1.57)\u2500\u2500RX(1.57)\u2500\u2500RZ(1.57)\u2500\u256dX\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500RZ(1.57)\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256d\u25cf\n 2: \u2500\u2500RZ(1.57)\u2500\u2500RX(1.57)\u2500\u2500RZ(1.57)\u2500\u2570\u25cf\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500RX(0.30)\u2500\u2500RZ(1.57)\u2500\u2500RY(3.14)\u2500\u2500RZ(1.57)\u2500\u2500RY(1.57)\u2500\u2570X\n\n \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2524 <Z>\n \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256d\u25cf\u2500\u2524\n \u2500\u2500\u2500RY(-1.57)\u2500\u2570X\u2500\u2524\n\n \"\"\"\n # Ensure that everything in the pipeline is a valid qfunc or tape transform\n if pipeline is None:\n pipeline = default_pipeline\n else:\n for p in pipeline:\n p_func = p.func if isinstance(p, partial) else p\n if not isinstance(p_func, TransformDispatcher):\n raise ValueError(\"Invalid transform function {p} passed to compile.\")\n\n if num_passes < 1 or not isinstance(num_passes, int):\n raise ValueError(\"Number of passes must be an integer with value at least 1.\")\n\n # Expand the tape; this is done to unroll any templates that may be present,\n # as well as to decompose over a specified basis set\n # First, though, we have to stop whatever tape may be recording so that we\n # don't queue anything as a result of the expansion or transform pipeline\n\n with QueuingManager.stop_recording():\n basis_set = basis_set or all_ops\n\n def stop_at(obj):\n return obj.name in basis_set and (not getattr(obj, \"only_visual\", False))\n\n expanded_tape = tape.expand(depth=expand_depth, stop_at=stop_at)\n\n # Apply the full set of compilation transforms num_passes times\n for _ in range(num_passes):\n for transf in pipeline:\n tapes, _ = transf(expanded_tape)\n expanded_tape = tapes[0]\n\n new_tape = type(tape)(\n expanded_tape.operations, expanded_tape.measurements, shots=expanded_tape.shots\n )\n\n def null_postprocessing(results):\n \"\"\"A postprocesing function returned by a transform that only converts the batch of results\n into a result for a single ``QuantumTape``.\n \"\"\"\n return results[0]\n\n return [new_tape], null_postprocessing\n", "path": "pennylane/transforms/compile.py"}]}
| 3,770 | 572 |
gh_patches_debug_25501
|
rasdani/github-patches
|
git_diff
|
azavea__raster-vision-784
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Plugin configuration values cannot handle list of values
The plugin component of the configuration file is set up to be parsed as a JSON list. However, a multi-item list causes a parse exception when Everett parses the configuration.
E.g.:
```
[PLUGINS]
modules = [ "tests.test_plugin" ]
```
works, while
```
[PLUGINS]
modules = [ "tests.test_plugin", "tests.test_plugin" ]
```
does not, and errors with:
```
File "/opt/src/rastervision/registry.py", line 179, in initialize_config
verbosity=verbosity)
File "/opt/src/rastervision/rv_config.py", line 146, in __init__
ConfigIniEnv(config_file_locations),
File "/usr/local/lib/python3.5/dist-packages/everett/manager.py", line 602, in __init__
self.cfg.update(ConfigIniEnv.parse_ini_file(path))
File "/usr/local/lib/python3.5/dist-packages/everett/manager.py", line 606, in parse_ini_file
cfgobj = ConfigObj(path)
File "/usr/local/lib/python3.5/dist-packages/configobj.py", line 1229, in __init__
self._load(infile, configspec)
File "/usr/local/lib/python3.5/dist-packages/configobj.py", line 1318, in _load
raise error
File "<string>", line None
configobj.ParseError: Parse error in value at line 2.
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `rastervision/plugin.py`
Content:
```
1 import os
2 import json
3 import importlib
4
5 from pluginbase import PluginBase
6
7 import rastervision as rv
8 from rastervision.protos.plugin_pb2 import PluginConfig as PluginConfigMsg
9 from rastervision.utils.files import download_if_needed
10
11
12 class PluginError(Exception):
13 pass
14
15
16 class PluginRegistry:
17 @staticmethod
18 def get_instance():
19 return rv._registry._get_plugin_registry()
20
21 def __init__(self, plugin_config, rv_home):
22 """Initializes this plugin registry.
23
24 A plugin registry is passed to plugins in a call
25 to their "register_plugin" method.
26
27 Args:
28 plugin_config - the everett ConfigManager for the plugin
29 section of the application configuration.
30 """
31 self.plugin_root_dir = os.path.join(rv_home, 'plugins')
32 self.config_builders = {}
33 self.default_raster_sources = []
34 self.default_vector_sources = []
35 self.default_label_sources = []
36 self.default_label_stores = []
37 self.default_evaluators = []
38 self.experiment_runners = {}
39 self.filesystems = []
40
41 plugin_files = json.loads(plugin_config('files', default='[]'))
42 self._load_from_files(plugin_files)
43 self.plugin_files = plugin_files
44
45 plugin_modules = json.loads(plugin_config('modules', default='[]'))
46 self._load_from_modules(plugin_modules)
47 self.plugin_modules = plugin_modules
48
49 def _load_plugin(self, plugin, identifier):
50 # Check the plugin is valid
51 if not hasattr(plugin, 'register_plugin'):
52 raise PluginError('Plugin at {} does not have '
53 '"register_plugin" method.'.format(identifier))
54
55 register_method = getattr(plugin, 'register_plugin')
56 if not callable(register_method):
57 raise PluginError('Plugin at {} has a '
58 '"register_plugin" attribute, '
59 'but it is not callable'.format(identifier))
60
61 # TODO: Log loading plugin.
62 register_method(self)
63
64 def _load_from_files(self, plugin_paths):
65 if not plugin_paths:
66 return
67
68 self.plugin_sources = []
69
70 plugin_base = PluginBase(package='rastervision.plugins')
71 for uri in plugin_paths:
72 plugin_name = os.path.splitext(os.path.basename(uri))[0]
73 plugin_path = os.path.join(self.plugin_root_dir, plugin_name)
74 fs = rv._registry.get_file_system(uri, search_plugins=False)
75 local_path = download_if_needed(uri, plugin_path, fs=fs)
76 local_dir = os.path.dirname(local_path)
77
78 plugin_source = plugin_base.make_plugin_source(
79 searchpath=[local_dir])
80
81 # We're required to hang onto the source
82 # to keep it from getting GC'd.
83 self.plugin_sources.append(plugin_source)
84
85 self._load_plugin(plugin_source.load_plugin(plugin_name), uri)
86
87 def _load_from_modules(self, plugin_modules):
88 if not plugin_modules:
89 return
90
91 for module in plugin_modules:
92 plugin = importlib.import_module(module)
93 self._load_plugin(plugin, module)
94
95 def add_plugins_from_proto(self, plugin_msg):
96 new_plugin_files = list(
97 set(plugin_msg.plugin_uris) - set(self.plugin_files))
98 self._load_from_files(new_plugin_files)
99 self.plugin_files.extend(new_plugin_files)
100
101 new_plugin_modules = list(
102 set(plugin_msg.plugin_modules) - set(self.plugin_modules))
103 self._load_from_modules(new_plugin_modules)
104 self.plugin_modules.extend(new_plugin_modules)
105
106 def to_proto(self):
107 """Returns a protobuf message that records the
108 plugin sources for plugins that are currently loaded
109 in the registry.
110 """
111 return PluginConfigMsg(
112 plugin_uris=self.plugin_files, plugin_modules=self.plugin_modules)
113
114 def register_config_builder(self, group, key, builder_class):
115 """Registers a ConfigBuilder as a plugin.
116
117 Args:
118 group - The Config group, e.g. rv.BACKEND, rv.TASK.
119 key - The key used for this plugin. This will be used to
120 construct the builder in a ".builder(key)" call.
121 builder_class - The subclass of ConfigBuilder that builds
122 the Config for this plugin.
123 """
124 if (group, key) in self.config_builders:
125 raise PluginError('ConfigBuilder already registered for group '
126 '{} and key {}'.format(group, key))
127 self.config_builders[(group, key)] = builder_class
128
129 def register_default_raster_source(self, provider_class):
130 """Registers a RasterSourceDefaultProvider for use as a plugin."""
131
132 self.default_raster_sources.append(provider_class)
133
134 def register_default_vector_source(self, provider_class):
135 """Registers a VectorSourceDefaultProvider for use as a plugin."""
136 self.default_vector_sources.append(provider_class)
137
138 def register_default_label_source(self, provider_class):
139 """Registers a LabelSourceDefaultProvider for use as a plugin."""
140 self.default_label_sources.append(provider_class)
141
142 def register_default_label_store(self, provider_class):
143 """Registers a LabelStoreDefaultProvider for use as a plugin."""
144 self.default_label_stores.append(provider_class)
145
146 def register_default_evaluator(self, provider_class):
147 """Registers an EvaluatorDefaultProvider for use as a plugin."""
148 self.default_evaluators.append(provider_class)
149
150 def register_experiment_runner(self, runner_key, runner_class):
151 """Registers an ExperimentRunner as a plugin.
152
153 Args:
154 runner_key - The key used to reference this plugin runner.
155 This is a string that will match the command line
156 argument used to reference this runner; e.g. if the
157 key is "FOO_RUNNER", then users can use the runner
158 by issuing a "rastervision run foo_runner ..." command.
159 runner_class - The class of the ExperimentRunner plugin.
160 """
161 if runner_key in self.experiment_runners:
162 raise PluginError('ExperimentRunner already registered for '
163 'key {}'.format(runner_key))
164 self.experiment_runners[runner_key] = runner_class
165
166 def register_filesystem(self, filesystem_class):
167 """Registers a FileSystem as a plugin."""
168 self.filesystems.append(filesystem_class)
169
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/rastervision/plugin.py b/rastervision/plugin.py
--- a/rastervision/plugin.py
+++ b/rastervision/plugin.py
@@ -13,6 +13,24 @@
pass
+def load_conf_list(s):
+ """Loads a list of items from the config.
+
+ Lists should be comma separated.
+
+ This takes into account that previous versions of Raster Vision
+ allowed for a `[ "module" ]` like syntax, even though that didn't
+ work for multi-value lists.
+ """
+ try:
+ # A comma separated list of values will be transformed to
+ # having a list-like string, with ' instead of ". Replacing
+ # single quotes with double quotes lets us parse it as a JSON list.
+ return json.loads(s.replace("'", '"'))
+ except json.JSONDecodeError:
+ return list(map(lambda x: x.strip(), s.split(',')))
+
+
class PluginRegistry:
@staticmethod
def get_instance():
@@ -38,11 +56,11 @@
self.experiment_runners = {}
self.filesystems = []
- plugin_files = json.loads(plugin_config('files', default='[]'))
+ plugin_files = load_conf_list(plugin_config('files', default='[]'))
self._load_from_files(plugin_files)
self.plugin_files = plugin_files
- plugin_modules = json.loads(plugin_config('modules', default='[]'))
+ plugin_modules = load_conf_list(plugin_config('modules', default='[]'))
self._load_from_modules(plugin_modules)
self.plugin_modules = plugin_modules
|
{"golden_diff": "diff --git a/rastervision/plugin.py b/rastervision/plugin.py\n--- a/rastervision/plugin.py\n+++ b/rastervision/plugin.py\n@@ -13,6 +13,24 @@\n pass\n \n \n+def load_conf_list(s):\n+ \"\"\"Loads a list of items from the config.\n+\n+ Lists should be comma separated.\n+\n+ This takes into account that previous versions of Raster Vision\n+ allowed for a `[ \"module\" ]` like syntax, even though that didn't\n+ work for multi-value lists.\n+ \"\"\"\n+ try:\n+ # A comma separated list of values will be transformed to\n+ # having a list-like string, with ' instead of \". Replacing\n+ # single quotes with double quotes lets us parse it as a JSON list.\n+ return json.loads(s.replace(\"'\", '\"'))\n+ except json.JSONDecodeError:\n+ return list(map(lambda x: x.strip(), s.split(',')))\n+\n+\n class PluginRegistry:\n @staticmethod\n def get_instance():\n@@ -38,11 +56,11 @@\n self.experiment_runners = {}\n self.filesystems = []\n \n- plugin_files = json.loads(plugin_config('files', default='[]'))\n+ plugin_files = load_conf_list(plugin_config('files', default='[]'))\n self._load_from_files(plugin_files)\n self.plugin_files = plugin_files\n \n- plugin_modules = json.loads(plugin_config('modules', default='[]'))\n+ plugin_modules = load_conf_list(plugin_config('modules', default='[]'))\n self._load_from_modules(plugin_modules)\n self.plugin_modules = plugin_modules\n", "issue": "Plugin configuration values cannot handle list of values\nThe plugin component of the configuration file is set up to be parsed as a JSON list. However, a multi-item list causes a parse exception when Everett parses the configuration.\r\n\r\nE.g.:\r\n\r\n```\r\n[PLUGINS]\r\nmodules = [ \"tests.test_plugin\" ]\r\n```\r\n\r\nworks, while\r\n\r\n```\r\n[PLUGINS]\r\nmodules = [ \"tests.test_plugin\", \"tests.test_plugin\" ]\r\n```\r\n\r\ndoes not, and errors with:\r\n\r\n```\r\n File \"/opt/src/rastervision/registry.py\", line 179, in initialize_config\r\n verbosity=verbosity)\r\n File \"/opt/src/rastervision/rv_config.py\", line 146, in __init__\r\n ConfigIniEnv(config_file_locations),\r\n File \"/usr/local/lib/python3.5/dist-packages/everett/manager.py\", line 602, in __init__\r\n self.cfg.update(ConfigIniEnv.parse_ini_file(path))\r\n File \"/usr/local/lib/python3.5/dist-packages/everett/manager.py\", line 606, in parse_ini_file\r\n cfgobj = ConfigObj(path)\r\n File \"/usr/local/lib/python3.5/dist-packages/configobj.py\", line 1229, in __init__\r\n self._load(infile, configspec)\r\n File \"/usr/local/lib/python3.5/dist-packages/configobj.py\", line 1318, in _load\r\n raise error\r\n File \"<string>\", line None\r\nconfigobj.ParseError: Parse error in value at line 2.\r\n```\n", "before_files": [{"content": "import os\nimport json\nimport importlib\n\nfrom pluginbase import PluginBase\n\nimport rastervision as rv\nfrom rastervision.protos.plugin_pb2 import PluginConfig as PluginConfigMsg\nfrom rastervision.utils.files import download_if_needed\n\n\nclass PluginError(Exception):\n pass\n\n\nclass PluginRegistry:\n @staticmethod\n def get_instance():\n return rv._registry._get_plugin_registry()\n\n def __init__(self, plugin_config, rv_home):\n \"\"\"Initializes this plugin registry.\n\n A plugin registry is passed to plugins in a call\n to their \"register_plugin\" method.\n\n Args:\n plugin_config - the everett ConfigManager for the plugin\n section of the application configuration.\n \"\"\"\n self.plugin_root_dir = os.path.join(rv_home, 'plugins')\n self.config_builders = {}\n self.default_raster_sources = []\n self.default_vector_sources = []\n self.default_label_sources = []\n self.default_label_stores = []\n self.default_evaluators = []\n self.experiment_runners = {}\n self.filesystems = []\n\n plugin_files = json.loads(plugin_config('files', default='[]'))\n self._load_from_files(plugin_files)\n self.plugin_files = plugin_files\n\n plugin_modules = json.loads(plugin_config('modules', default='[]'))\n self._load_from_modules(plugin_modules)\n self.plugin_modules = plugin_modules\n\n def _load_plugin(self, plugin, identifier):\n # Check the plugin is valid\n if not hasattr(plugin, 'register_plugin'):\n raise PluginError('Plugin at {} does not have '\n '\"register_plugin\" method.'.format(identifier))\n\n register_method = getattr(plugin, 'register_plugin')\n if not callable(register_method):\n raise PluginError('Plugin at {} has a '\n '\"register_plugin\" attribute, '\n 'but it is not callable'.format(identifier))\n\n # TODO: Log loading plugin.\n register_method(self)\n\n def _load_from_files(self, plugin_paths):\n if not plugin_paths:\n return\n\n self.plugin_sources = []\n\n plugin_base = PluginBase(package='rastervision.plugins')\n for uri in plugin_paths:\n plugin_name = os.path.splitext(os.path.basename(uri))[0]\n plugin_path = os.path.join(self.plugin_root_dir, plugin_name)\n fs = rv._registry.get_file_system(uri, search_plugins=False)\n local_path = download_if_needed(uri, plugin_path, fs=fs)\n local_dir = os.path.dirname(local_path)\n\n plugin_source = plugin_base.make_plugin_source(\n searchpath=[local_dir])\n\n # We're required to hang onto the source\n # to keep it from getting GC'd.\n self.plugin_sources.append(plugin_source)\n\n self._load_plugin(plugin_source.load_plugin(plugin_name), uri)\n\n def _load_from_modules(self, plugin_modules):\n if not plugin_modules:\n return\n\n for module in plugin_modules:\n plugin = importlib.import_module(module)\n self._load_plugin(plugin, module)\n\n def add_plugins_from_proto(self, plugin_msg):\n new_plugin_files = list(\n set(plugin_msg.plugin_uris) - set(self.plugin_files))\n self._load_from_files(new_plugin_files)\n self.plugin_files.extend(new_plugin_files)\n\n new_plugin_modules = list(\n set(plugin_msg.plugin_modules) - set(self.plugin_modules))\n self._load_from_modules(new_plugin_modules)\n self.plugin_modules.extend(new_plugin_modules)\n\n def to_proto(self):\n \"\"\"Returns a protobuf message that records the\n plugin sources for plugins that are currently loaded\n in the registry.\n \"\"\"\n return PluginConfigMsg(\n plugin_uris=self.plugin_files, plugin_modules=self.plugin_modules)\n\n def register_config_builder(self, group, key, builder_class):\n \"\"\"Registers a ConfigBuilder as a plugin.\n\n Args:\n group - The Config group, e.g. rv.BACKEND, rv.TASK.\n key - The key used for this plugin. This will be used to\n construct the builder in a \".builder(key)\" call.\n builder_class - The subclass of ConfigBuilder that builds\n the Config for this plugin.\n \"\"\"\n if (group, key) in self.config_builders:\n raise PluginError('ConfigBuilder already registered for group '\n '{} and key {}'.format(group, key))\n self.config_builders[(group, key)] = builder_class\n\n def register_default_raster_source(self, provider_class):\n \"\"\"Registers a RasterSourceDefaultProvider for use as a plugin.\"\"\"\n\n self.default_raster_sources.append(provider_class)\n\n def register_default_vector_source(self, provider_class):\n \"\"\"Registers a VectorSourceDefaultProvider for use as a plugin.\"\"\"\n self.default_vector_sources.append(provider_class)\n\n def register_default_label_source(self, provider_class):\n \"\"\"Registers a LabelSourceDefaultProvider for use as a plugin.\"\"\"\n self.default_label_sources.append(provider_class)\n\n def register_default_label_store(self, provider_class):\n \"\"\"Registers a LabelStoreDefaultProvider for use as a plugin.\"\"\"\n self.default_label_stores.append(provider_class)\n\n def register_default_evaluator(self, provider_class):\n \"\"\"Registers an EvaluatorDefaultProvider for use as a plugin.\"\"\"\n self.default_evaluators.append(provider_class)\n\n def register_experiment_runner(self, runner_key, runner_class):\n \"\"\"Registers an ExperimentRunner as a plugin.\n\n Args:\n runner_key - The key used to reference this plugin runner.\n This is a string that will match the command line\n argument used to reference this runner; e.g. if the\n key is \"FOO_RUNNER\", then users can use the runner\n by issuing a \"rastervision run foo_runner ...\" command.\n runner_class - The class of the ExperimentRunner plugin.\n \"\"\"\n if runner_key in self.experiment_runners:\n raise PluginError('ExperimentRunner already registered for '\n 'key {}'.format(runner_key))\n self.experiment_runners[runner_key] = runner_class\n\n def register_filesystem(self, filesystem_class):\n \"\"\"Registers a FileSystem as a plugin.\"\"\"\n self.filesystems.append(filesystem_class)\n", "path": "rastervision/plugin.py"}], "after_files": [{"content": "import os\nimport json\nimport importlib\n\nfrom pluginbase import PluginBase\n\nimport rastervision as rv\nfrom rastervision.protos.plugin_pb2 import PluginConfig as PluginConfigMsg\nfrom rastervision.utils.files import download_if_needed\n\n\nclass PluginError(Exception):\n pass\n\n\ndef load_conf_list(s):\n \"\"\"Loads a list of items from the config.\n\n Lists should be comma separated.\n\n This takes into account that previous versions of Raster Vision\n allowed for a `[ \"module\" ]` like syntax, even though that didn't\n work for multi-value lists.\n \"\"\"\n try:\n # A comma separated list of values will be transformed to\n # having a list-like string, with ' instead of \". Replacing\n # single quotes with double quotes lets us parse it as a JSON list.\n return json.loads(s.replace(\"'\", '\"'))\n except json.JSONDecodeError:\n return list(map(lambda x: x.strip(), s.split(',')))\n\n\nclass PluginRegistry:\n @staticmethod\n def get_instance():\n return rv._registry._get_plugin_registry()\n\n def __init__(self, plugin_config, rv_home):\n \"\"\"Initializes this plugin registry.\n\n A plugin registry is passed to plugins in a call\n to their \"register_plugin\" method.\n\n Args:\n plugin_config - the everett ConfigManager for the plugin\n section of the application configuration.\n \"\"\"\n self.plugin_root_dir = os.path.join(rv_home, 'plugins')\n self.config_builders = {}\n self.default_raster_sources = []\n self.default_vector_sources = []\n self.default_label_sources = []\n self.default_label_stores = []\n self.default_evaluators = []\n self.experiment_runners = {}\n self.filesystems = []\n\n plugin_files = load_conf_list(plugin_config('files', default='[]'))\n self._load_from_files(plugin_files)\n self.plugin_files = plugin_files\n\n plugin_modules = load_conf_list(plugin_config('modules', default='[]'))\n self._load_from_modules(plugin_modules)\n self.plugin_modules = plugin_modules\n\n def _load_plugin(self, plugin, identifier):\n # Check the plugin is valid\n if not hasattr(plugin, 'register_plugin'):\n raise PluginError('Plugin at {} does not have '\n '\"register_plugin\" method.'.format(identifier))\n\n register_method = getattr(plugin, 'register_plugin')\n if not callable(register_method):\n raise PluginError('Plugin at {} has a '\n '\"register_plugin\" attribute, '\n 'but it is not callable'.format(identifier))\n\n # TODO: Log loading plugin.\n register_method(self)\n\n def _load_from_files(self, plugin_paths):\n if not plugin_paths:\n return\n\n self.plugin_sources = []\n\n plugin_base = PluginBase(package='rastervision.plugins')\n for uri in plugin_paths:\n plugin_name = os.path.splitext(os.path.basename(uri))[0]\n plugin_path = os.path.join(self.plugin_root_dir, plugin_name)\n fs = rv._registry.get_file_system(uri, search_plugins=False)\n local_path = download_if_needed(uri, plugin_path, fs=fs)\n local_dir = os.path.dirname(local_path)\n\n plugin_source = plugin_base.make_plugin_source(\n searchpath=[local_dir])\n\n # We're required to hang onto the source\n # to keep it from getting GC'd.\n self.plugin_sources.append(plugin_source)\n\n self._load_plugin(plugin_source.load_plugin(plugin_name), uri)\n\n def _load_from_modules(self, plugin_modules):\n if not plugin_modules:\n return\n\n for module in plugin_modules:\n plugin = importlib.import_module(module)\n self._load_plugin(plugin, module)\n\n def add_plugins_from_proto(self, plugin_msg):\n new_plugin_files = list(\n set(plugin_msg.plugin_uris) - set(self.plugin_files))\n self._load_from_files(new_plugin_files)\n self.plugin_files.extend(new_plugin_files)\n\n new_plugin_modules = list(\n set(plugin_msg.plugin_modules) - set(self.plugin_modules))\n self._load_from_modules(new_plugin_modules)\n self.plugin_modules.extend(new_plugin_modules)\n\n def to_proto(self):\n \"\"\"Returns a protobuf message that records the\n plugin sources for plugins that are currently loaded\n in the registry.\n \"\"\"\n return PluginConfigMsg(\n plugin_uris=self.plugin_files, plugin_modules=self.plugin_modules)\n\n def register_config_builder(self, group, key, builder_class):\n \"\"\"Registers a ConfigBuilder as a plugin.\n\n Args:\n group - The Config group, e.g. rv.BACKEND, rv.TASK.\n key - The key used for this plugin. This will be used to\n construct the builder in a \".builder(key)\" call.\n builder_class - The subclass of ConfigBuilder that builds\n the Config for this plugin.\n \"\"\"\n if (group, key) in self.config_builders:\n raise PluginError('ConfigBuilder already registered for group '\n '{} and key {}'.format(group, key))\n self.config_builders[(group, key)] = builder_class\n\n def register_default_raster_source(self, provider_class):\n \"\"\"Registers a RasterSourceDefaultProvider for use as a plugin.\"\"\"\n\n self.default_raster_sources.append(provider_class)\n\n def register_default_vector_source(self, provider_class):\n \"\"\"Registers a VectorSourceDefaultProvider for use as a plugin.\"\"\"\n self.default_vector_sources.append(provider_class)\n\n def register_default_label_source(self, provider_class):\n \"\"\"Registers a LabelSourceDefaultProvider for use as a plugin.\"\"\"\n self.default_label_sources.append(provider_class)\n\n def register_default_label_store(self, provider_class):\n \"\"\"Registers a LabelStoreDefaultProvider for use as a plugin.\"\"\"\n self.default_label_stores.append(provider_class)\n\n def register_default_evaluator(self, provider_class):\n \"\"\"Registers an EvaluatorDefaultProvider for use as a plugin.\"\"\"\n self.default_evaluators.append(provider_class)\n\n def register_experiment_runner(self, runner_key, runner_class):\n \"\"\"Registers an ExperimentRunner as a plugin.\n\n Args:\n runner_key - The key used to reference this plugin runner.\n This is a string that will match the command line\n argument used to reference this runner; e.g. if the\n key is \"FOO_RUNNER\", then users can use the runner\n by issuing a \"rastervision run foo_runner ...\" command.\n runner_class - The class of the ExperimentRunner plugin.\n \"\"\"\n if runner_key in self.experiment_runners:\n raise PluginError('ExperimentRunner already registered for '\n 'key {}'.format(runner_key))\n self.experiment_runners[runner_key] = runner_class\n\n def register_filesystem(self, filesystem_class):\n \"\"\"Registers a FileSystem as a plugin.\"\"\"\n self.filesystems.append(filesystem_class)\n", "path": "rastervision/plugin.py"}]}
| 2,294 | 356 |
gh_patches_debug_14093
|
rasdani/github-patches
|
git_diff
|
Kinto__kinto-1925
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Remove deprecation warning: Resource `mapping` is deprecated, use `schema`
```
tests/core/resource/test_viewset.py::ViewSetTest::test_a_default_schema_is_added_when_method_doesnt_match
/home/mathieu/Code/Mozilla/kinto/kinto/core/resource/viewset.py:160: DeprecationWarning: Resource `mapping` is deprecated, use `schema`
warnings.warn(message, DeprecationWarning)
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kinto/core/resource/viewset.py`
Content:
```
1 import functools
2 import warnings
3
4 import colander
5 from cornice.validators import colander_validator
6 from pyramid.settings import asbool
7
8 from kinto.core import authorization
9
10 from .schema import (
11 PermissionsSchema,
12 RequestSchema,
13 PayloadRequestSchema,
14 PatchHeaderSchema,
15 CollectionQuerySchema,
16 CollectionGetQuerySchema,
17 RecordGetQuerySchema,
18 RecordSchema,
19 ResourceReponses,
20 ShareableResourseResponses,
21 )
22
23
24 CONTENT_TYPES = ["application/json"]
25
26 PATCH_CONTENT_TYPES = ["application/merge-patch+json"]
27
28
29 class StrictSchema(colander.MappingSchema):
30 @staticmethod
31 def schema_type():
32 return colander.Mapping(unknown="raise")
33
34
35 class PartialSchema(colander.MappingSchema):
36 @staticmethod
37 def schema_type():
38 return colander.Mapping(unknown="ignore")
39
40
41 class SimpleSchema(colander.MappingSchema):
42 @staticmethod
43 def schema_type():
44 return colander.Mapping(unknown="preserve")
45
46
47 class ViewSet:
48 """The default ViewSet object.
49
50 A viewset contains all the information needed to register
51 any resource in the Cornice registry.
52
53 It provides the same features as ``cornice.resource()``, except
54 that it is much more flexible and extensible.
55 """
56
57 service_name = "{resource_name}-{endpoint_type}"
58 collection_path = "/{resource_name}s"
59 record_path = "/{resource_name}s/{{id}}"
60
61 collection_methods = ("GET", "POST", "DELETE")
62 record_methods = ("GET", "PUT", "PATCH", "DELETE")
63
64 readonly_methods = ("GET", "OPTIONS", "HEAD")
65
66 factory = authorization.RouteFactory
67
68 responses = ResourceReponses()
69
70 service_arguments = {"description": "Collection of {resource_name}"}
71
72 default_arguments = {
73 "permission": authorization.PRIVATE,
74 "accept": CONTENT_TYPES,
75 "schema": RequestSchema(),
76 }
77
78 default_post_arguments = {"content_type": CONTENT_TYPES, "schema": PayloadRequestSchema()}
79
80 default_put_arguments = {"content_type": CONTENT_TYPES, "schema": PayloadRequestSchema()}
81
82 default_patch_arguments = {
83 "content_type": CONTENT_TYPES + PATCH_CONTENT_TYPES,
84 "schema": PayloadRequestSchema().bind(header=PatchHeaderSchema()),
85 }
86
87 default_collection_arguments = {
88 "schema": RequestSchema().bind(querystring=CollectionQuerySchema())
89 }
90 collection_get_arguments = {
91 "schema": RequestSchema().bind(querystring=CollectionGetQuerySchema()),
92 "cors_headers": (
93 "Next-Page",
94 "Total-Records",
95 "Last-Modified",
96 "ETag",
97 "Cache-Control",
98 "Expires",
99 "Pragma",
100 ),
101 }
102 collection_post_arguments = {"schema": PayloadRequestSchema()}
103 default_record_arguments = {}
104 record_get_arguments = {
105 "schema": RequestSchema().bind(querystring=RecordGetQuerySchema()),
106 "cors_headers": ("Last-Modified", "ETag", "Cache-Control", "Expires", "Pragma"),
107 }
108
109 def __init__(self, **kwargs):
110 self.update(**kwargs)
111 self.record_arguments = functools.partial(self.get_view_arguments, "record")
112 self.collection_arguments = functools.partial(self.get_view_arguments, "collection")
113
114 def update(self, **kwargs):
115 """Update viewset attributes with provided values."""
116 self.__dict__.update(**kwargs)
117
118 def get_view_arguments(self, endpoint_type, resource_cls, method):
119 """Return the Pyramid/Cornice view arguments for the given endpoint
120 type and method.
121
122 :param str endpoint_type: either "collection" or "record".
123 :param resource_cls: the resource class.
124 :param str method: the HTTP method.
125 """
126 args = {**self.default_arguments}
127 default_arguments = getattr(self, f"default_{endpoint_type}_arguments")
128 args.update(**default_arguments)
129
130 by_http_verb = f"default_{method.lower()}_arguments"
131 method_args = getattr(self, by_http_verb, {})
132 args.update(**method_args)
133
134 by_method = f"{endpoint_type}_{method.lower()}_arguments"
135 endpoint_args = getattr(self, by_method, {})
136 args.update(**endpoint_args)
137
138 request_schema = args.get("schema", RequestSchema())
139 record_schema = self.get_record_schema(resource_cls, method)
140 request_schema = request_schema.bind(body=record_schema)
141 response_schemas = self.responses.get_and_bind(endpoint_type, method, record=record_schema)
142
143 args["schema"] = request_schema
144 args["response_schemas"] = response_schemas
145
146 validators = args.get("validators", [])
147 validators.append(colander_validator)
148 args["validators"] = validators
149
150 return args
151
152 def get_record_schema(self, resource_cls, method):
153 """Return the Cornice schema for the given method.
154 """
155 if method.lower() in ("patch", "delete"):
156 resource_schema = SimpleSchema
157 else:
158 resource_schema = resource_cls.schema
159 if hasattr(resource_cls, "mapping"):
160 message = "Resource `mapping` is deprecated, use `schema`"
161 warnings.warn(message, DeprecationWarning)
162 resource_schema = resource_cls.mapping.__class__
163
164 record_schema = RecordSchema().bind(data=resource_schema())
165
166 return record_schema
167
168 def get_view(self, endpoint_type, method):
169 """Return the view method name located on the resource object, for the
170 given type and method.
171
172 * For collections, this will be "collection_{method|lower}
173 * For records, this will be "{method|lower}.
174 """
175 if endpoint_type == "record":
176 return method.lower()
177 return f"{endpoint_type}_{method.lower()}"
178
179 def get_name(self, resource_cls):
180 """Returns the name of the resource.
181 """
182 # Provided on viewset during registration.
183 if "name" in self.__dict__:
184 return self.__dict__["name"]
185
186 # Attribute on resource class (but not @property)
187 has_class_attr = hasattr(resource_cls, "name") and not callable(resource_cls.name)
188 if has_class_attr:
189 return resource_cls.name
190
191 # Use classname
192 return resource_cls.__name__.lower()
193
194 def get_service_name(self, endpoint_type, resource_cls):
195 """Returns the name of the service, depending a given type and
196 resource.
197 """
198 return self.service_name.format(
199 resource_name=self.get_name(resource_cls), endpoint_type=endpoint_type
200 )
201
202 def get_service_arguments(self):
203 return {**self.service_arguments}
204
205 def is_endpoint_enabled(self, endpoint_type, resource_name, method, settings):
206 """Returns if the given endpoint is enabled or not.
207
208 Uses the settings to tell so.
209 """
210 readonly_enabled = asbool(settings.get("readonly"))
211 readonly_method = method.lower() in [m.lower() for m in self.readonly_methods]
212 if readonly_enabled and not readonly_method:
213 return False
214
215 setting_enabled = f"{endpoint_type}_{resource_name}_{method.lower()}_enabled"
216 return asbool(settings.get(setting_enabled, True))
217
218
219 class ShareableViewSet(ViewSet):
220 """A ShareableViewSet will register the given resource with a schema
221 that supports permissions.
222
223 The views will rely on dynamic permissions (e.g. create with PUT if
224 record does not exist), and solicit the cliquet RouteFactory.
225 """
226
227 responses = ShareableResourseResponses()
228
229 def get_record_schema(self, resource_cls, method):
230 """Return the Cornice schema for the given method.
231 """
232 record_schema = super(ShareableViewSet, self).get_record_schema(resource_cls, method)
233 allowed_permissions = resource_cls.permissions
234 permissions = PermissionsSchema(
235 name="permissions", missing=colander.drop, permissions=allowed_permissions
236 )
237 record_schema = record_schema.bind(permissions=permissions)
238 return record_schema
239
240 def get_view_arguments(self, endpoint_type, resource_cls, method):
241 args = super().get_view_arguments(endpoint_type, resource_cls, method)
242 args["permission"] = authorization.DYNAMIC
243 return args
244
245 def get_service_arguments(self):
246 args = super().get_service_arguments()
247 args["factory"] = self.factory
248 return args
249
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/kinto/core/resource/viewset.py b/kinto/core/resource/viewset.py
--- a/kinto/core/resource/viewset.py
+++ b/kinto/core/resource/viewset.py
@@ -1,5 +1,4 @@
import functools
-import warnings
import colander
from cornice.validators import colander_validator
@@ -156,10 +155,6 @@
resource_schema = SimpleSchema
else:
resource_schema = resource_cls.schema
- if hasattr(resource_cls, "mapping"):
- message = "Resource `mapping` is deprecated, use `schema`"
- warnings.warn(message, DeprecationWarning)
- resource_schema = resource_cls.mapping.__class__
record_schema = RecordSchema().bind(data=resource_schema())
|
{"golden_diff": "diff --git a/kinto/core/resource/viewset.py b/kinto/core/resource/viewset.py\n--- a/kinto/core/resource/viewset.py\n+++ b/kinto/core/resource/viewset.py\n@@ -1,5 +1,4 @@\n import functools\n-import warnings\n \n import colander\n from cornice.validators import colander_validator\n@@ -156,10 +155,6 @@\n resource_schema = SimpleSchema\n else:\n resource_schema = resource_cls.schema\n- if hasattr(resource_cls, \"mapping\"):\n- message = \"Resource `mapping` is deprecated, use `schema`\"\n- warnings.warn(message, DeprecationWarning)\n- resource_schema = resource_cls.mapping.__class__\n \n record_schema = RecordSchema().bind(data=resource_schema())\n", "issue": "Remove deprecation warning: Resource `mapping` is deprecated, use `schema`\n```\r\ntests/core/resource/test_viewset.py::ViewSetTest::test_a_default_schema_is_added_when_method_doesnt_match\r\n /home/mathieu/Code/Mozilla/kinto/kinto/core/resource/viewset.py:160: DeprecationWarning: Resource `mapping` is deprecated, use `schema`\r\n warnings.warn(message, DeprecationWarning)\r\n```\r\n\n", "before_files": [{"content": "import functools\nimport warnings\n\nimport colander\nfrom cornice.validators import colander_validator\nfrom pyramid.settings import asbool\n\nfrom kinto.core import authorization\n\nfrom .schema import (\n PermissionsSchema,\n RequestSchema,\n PayloadRequestSchema,\n PatchHeaderSchema,\n CollectionQuerySchema,\n CollectionGetQuerySchema,\n RecordGetQuerySchema,\n RecordSchema,\n ResourceReponses,\n ShareableResourseResponses,\n)\n\n\nCONTENT_TYPES = [\"application/json\"]\n\nPATCH_CONTENT_TYPES = [\"application/merge-patch+json\"]\n\n\nclass StrictSchema(colander.MappingSchema):\n @staticmethod\n def schema_type():\n return colander.Mapping(unknown=\"raise\")\n\n\nclass PartialSchema(colander.MappingSchema):\n @staticmethod\n def schema_type():\n return colander.Mapping(unknown=\"ignore\")\n\n\nclass SimpleSchema(colander.MappingSchema):\n @staticmethod\n def schema_type():\n return colander.Mapping(unknown=\"preserve\")\n\n\nclass ViewSet:\n \"\"\"The default ViewSet object.\n\n A viewset contains all the information needed to register\n any resource in the Cornice registry.\n\n It provides the same features as ``cornice.resource()``, except\n that it is much more flexible and extensible.\n \"\"\"\n\n service_name = \"{resource_name}-{endpoint_type}\"\n collection_path = \"/{resource_name}s\"\n record_path = \"/{resource_name}s/{{id}}\"\n\n collection_methods = (\"GET\", \"POST\", \"DELETE\")\n record_methods = (\"GET\", \"PUT\", \"PATCH\", \"DELETE\")\n\n readonly_methods = (\"GET\", \"OPTIONS\", \"HEAD\")\n\n factory = authorization.RouteFactory\n\n responses = ResourceReponses()\n\n service_arguments = {\"description\": \"Collection of {resource_name}\"}\n\n default_arguments = {\n \"permission\": authorization.PRIVATE,\n \"accept\": CONTENT_TYPES,\n \"schema\": RequestSchema(),\n }\n\n default_post_arguments = {\"content_type\": CONTENT_TYPES, \"schema\": PayloadRequestSchema()}\n\n default_put_arguments = {\"content_type\": CONTENT_TYPES, \"schema\": PayloadRequestSchema()}\n\n default_patch_arguments = {\n \"content_type\": CONTENT_TYPES + PATCH_CONTENT_TYPES,\n \"schema\": PayloadRequestSchema().bind(header=PatchHeaderSchema()),\n }\n\n default_collection_arguments = {\n \"schema\": RequestSchema().bind(querystring=CollectionQuerySchema())\n }\n collection_get_arguments = {\n \"schema\": RequestSchema().bind(querystring=CollectionGetQuerySchema()),\n \"cors_headers\": (\n \"Next-Page\",\n \"Total-Records\",\n \"Last-Modified\",\n \"ETag\",\n \"Cache-Control\",\n \"Expires\",\n \"Pragma\",\n ),\n }\n collection_post_arguments = {\"schema\": PayloadRequestSchema()}\n default_record_arguments = {}\n record_get_arguments = {\n \"schema\": RequestSchema().bind(querystring=RecordGetQuerySchema()),\n \"cors_headers\": (\"Last-Modified\", \"ETag\", \"Cache-Control\", \"Expires\", \"Pragma\"),\n }\n\n def __init__(self, **kwargs):\n self.update(**kwargs)\n self.record_arguments = functools.partial(self.get_view_arguments, \"record\")\n self.collection_arguments = functools.partial(self.get_view_arguments, \"collection\")\n\n def update(self, **kwargs):\n \"\"\"Update viewset attributes with provided values.\"\"\"\n self.__dict__.update(**kwargs)\n\n def get_view_arguments(self, endpoint_type, resource_cls, method):\n \"\"\"Return the Pyramid/Cornice view arguments for the given endpoint\n type and method.\n\n :param str endpoint_type: either \"collection\" or \"record\".\n :param resource_cls: the resource class.\n :param str method: the HTTP method.\n \"\"\"\n args = {**self.default_arguments}\n default_arguments = getattr(self, f\"default_{endpoint_type}_arguments\")\n args.update(**default_arguments)\n\n by_http_verb = f\"default_{method.lower()}_arguments\"\n method_args = getattr(self, by_http_verb, {})\n args.update(**method_args)\n\n by_method = f\"{endpoint_type}_{method.lower()}_arguments\"\n endpoint_args = getattr(self, by_method, {})\n args.update(**endpoint_args)\n\n request_schema = args.get(\"schema\", RequestSchema())\n record_schema = self.get_record_schema(resource_cls, method)\n request_schema = request_schema.bind(body=record_schema)\n response_schemas = self.responses.get_and_bind(endpoint_type, method, record=record_schema)\n\n args[\"schema\"] = request_schema\n args[\"response_schemas\"] = response_schemas\n\n validators = args.get(\"validators\", [])\n validators.append(colander_validator)\n args[\"validators\"] = validators\n\n return args\n\n def get_record_schema(self, resource_cls, method):\n \"\"\"Return the Cornice schema for the given method.\n \"\"\"\n if method.lower() in (\"patch\", \"delete\"):\n resource_schema = SimpleSchema\n else:\n resource_schema = resource_cls.schema\n if hasattr(resource_cls, \"mapping\"):\n message = \"Resource `mapping` is deprecated, use `schema`\"\n warnings.warn(message, DeprecationWarning)\n resource_schema = resource_cls.mapping.__class__\n\n record_schema = RecordSchema().bind(data=resource_schema())\n\n return record_schema\n\n def get_view(self, endpoint_type, method):\n \"\"\"Return the view method name located on the resource object, for the\n given type and method.\n\n * For collections, this will be \"collection_{method|lower}\n * For records, this will be \"{method|lower}.\n \"\"\"\n if endpoint_type == \"record\":\n return method.lower()\n return f\"{endpoint_type}_{method.lower()}\"\n\n def get_name(self, resource_cls):\n \"\"\"Returns the name of the resource.\n \"\"\"\n # Provided on viewset during registration.\n if \"name\" in self.__dict__:\n return self.__dict__[\"name\"]\n\n # Attribute on resource class (but not @property)\n has_class_attr = hasattr(resource_cls, \"name\") and not callable(resource_cls.name)\n if has_class_attr:\n return resource_cls.name\n\n # Use classname\n return resource_cls.__name__.lower()\n\n def get_service_name(self, endpoint_type, resource_cls):\n \"\"\"Returns the name of the service, depending a given type and\n resource.\n \"\"\"\n return self.service_name.format(\n resource_name=self.get_name(resource_cls), endpoint_type=endpoint_type\n )\n\n def get_service_arguments(self):\n return {**self.service_arguments}\n\n def is_endpoint_enabled(self, endpoint_type, resource_name, method, settings):\n \"\"\"Returns if the given endpoint is enabled or not.\n\n Uses the settings to tell so.\n \"\"\"\n readonly_enabled = asbool(settings.get(\"readonly\"))\n readonly_method = method.lower() in [m.lower() for m in self.readonly_methods]\n if readonly_enabled and not readonly_method:\n return False\n\n setting_enabled = f\"{endpoint_type}_{resource_name}_{method.lower()}_enabled\"\n return asbool(settings.get(setting_enabled, True))\n\n\nclass ShareableViewSet(ViewSet):\n \"\"\"A ShareableViewSet will register the given resource with a schema\n that supports permissions.\n\n The views will rely on dynamic permissions (e.g. create with PUT if\n record does not exist), and solicit the cliquet RouteFactory.\n \"\"\"\n\n responses = ShareableResourseResponses()\n\n def get_record_schema(self, resource_cls, method):\n \"\"\"Return the Cornice schema for the given method.\n \"\"\"\n record_schema = super(ShareableViewSet, self).get_record_schema(resource_cls, method)\n allowed_permissions = resource_cls.permissions\n permissions = PermissionsSchema(\n name=\"permissions\", missing=colander.drop, permissions=allowed_permissions\n )\n record_schema = record_schema.bind(permissions=permissions)\n return record_schema\n\n def get_view_arguments(self, endpoint_type, resource_cls, method):\n args = super().get_view_arguments(endpoint_type, resource_cls, method)\n args[\"permission\"] = authorization.DYNAMIC\n return args\n\n def get_service_arguments(self):\n args = super().get_service_arguments()\n args[\"factory\"] = self.factory\n return args\n", "path": "kinto/core/resource/viewset.py"}], "after_files": [{"content": "import functools\n\nimport colander\nfrom cornice.validators import colander_validator\nfrom pyramid.settings import asbool\n\nfrom kinto.core import authorization\n\nfrom .schema import (\n PermissionsSchema,\n RequestSchema,\n PayloadRequestSchema,\n PatchHeaderSchema,\n CollectionQuerySchema,\n CollectionGetQuerySchema,\n RecordGetQuerySchema,\n RecordSchema,\n ResourceReponses,\n ShareableResourseResponses,\n)\n\n\nCONTENT_TYPES = [\"application/json\"]\n\nPATCH_CONTENT_TYPES = [\"application/merge-patch+json\"]\n\n\nclass StrictSchema(colander.MappingSchema):\n @staticmethod\n def schema_type():\n return colander.Mapping(unknown=\"raise\")\n\n\nclass PartialSchema(colander.MappingSchema):\n @staticmethod\n def schema_type():\n return colander.Mapping(unknown=\"ignore\")\n\n\nclass SimpleSchema(colander.MappingSchema):\n @staticmethod\n def schema_type():\n return colander.Mapping(unknown=\"preserve\")\n\n\nclass ViewSet:\n \"\"\"The default ViewSet object.\n\n A viewset contains all the information needed to register\n any resource in the Cornice registry.\n\n It provides the same features as ``cornice.resource()``, except\n that it is much more flexible and extensible.\n \"\"\"\n\n service_name = \"{resource_name}-{endpoint_type}\"\n collection_path = \"/{resource_name}s\"\n record_path = \"/{resource_name}s/{{id}}\"\n\n collection_methods = (\"GET\", \"POST\", \"DELETE\")\n record_methods = (\"GET\", \"PUT\", \"PATCH\", \"DELETE\")\n\n readonly_methods = (\"GET\", \"OPTIONS\", \"HEAD\")\n\n factory = authorization.RouteFactory\n\n responses = ResourceReponses()\n\n service_arguments = {\"description\": \"Collection of {resource_name}\"}\n\n default_arguments = {\n \"permission\": authorization.PRIVATE,\n \"accept\": CONTENT_TYPES,\n \"schema\": RequestSchema(),\n }\n\n default_post_arguments = {\"content_type\": CONTENT_TYPES, \"schema\": PayloadRequestSchema()}\n\n default_put_arguments = {\"content_type\": CONTENT_TYPES, \"schema\": PayloadRequestSchema()}\n\n default_patch_arguments = {\n \"content_type\": CONTENT_TYPES + PATCH_CONTENT_TYPES,\n \"schema\": PayloadRequestSchema().bind(header=PatchHeaderSchema()),\n }\n\n default_collection_arguments = {\n \"schema\": RequestSchema().bind(querystring=CollectionQuerySchema())\n }\n collection_get_arguments = {\n \"schema\": RequestSchema().bind(querystring=CollectionGetQuerySchema()),\n \"cors_headers\": (\n \"Next-Page\",\n \"Total-Records\",\n \"Last-Modified\",\n \"ETag\",\n \"Cache-Control\",\n \"Expires\",\n \"Pragma\",\n ),\n }\n collection_post_arguments = {\"schema\": PayloadRequestSchema()}\n default_record_arguments = {}\n record_get_arguments = {\n \"schema\": RequestSchema().bind(querystring=RecordGetQuerySchema()),\n \"cors_headers\": (\"Last-Modified\", \"ETag\", \"Cache-Control\", \"Expires\", \"Pragma\"),\n }\n\n def __init__(self, **kwargs):\n self.update(**kwargs)\n self.record_arguments = functools.partial(self.get_view_arguments, \"record\")\n self.collection_arguments = functools.partial(self.get_view_arguments, \"collection\")\n\n def update(self, **kwargs):\n \"\"\"Update viewset attributes with provided values.\"\"\"\n self.__dict__.update(**kwargs)\n\n def get_view_arguments(self, endpoint_type, resource_cls, method):\n \"\"\"Return the Pyramid/Cornice view arguments for the given endpoint\n type and method.\n\n :param str endpoint_type: either \"collection\" or \"record\".\n :param resource_cls: the resource class.\n :param str method: the HTTP method.\n \"\"\"\n args = {**self.default_arguments}\n default_arguments = getattr(self, f\"default_{endpoint_type}_arguments\")\n args.update(**default_arguments)\n\n by_http_verb = f\"default_{method.lower()}_arguments\"\n method_args = getattr(self, by_http_verb, {})\n args.update(**method_args)\n\n by_method = f\"{endpoint_type}_{method.lower()}_arguments\"\n endpoint_args = getattr(self, by_method, {})\n args.update(**endpoint_args)\n\n request_schema = args.get(\"schema\", RequestSchema())\n record_schema = self.get_record_schema(resource_cls, method)\n request_schema = request_schema.bind(body=record_schema)\n response_schemas = self.responses.get_and_bind(endpoint_type, method, record=record_schema)\n\n args[\"schema\"] = request_schema\n args[\"response_schemas\"] = response_schemas\n\n validators = args.get(\"validators\", [])\n validators.append(colander_validator)\n args[\"validators\"] = validators\n\n return args\n\n def get_record_schema(self, resource_cls, method):\n \"\"\"Return the Cornice schema for the given method.\n \"\"\"\n if method.lower() in (\"patch\", \"delete\"):\n resource_schema = SimpleSchema\n else:\n resource_schema = resource_cls.schema\n\n record_schema = RecordSchema().bind(data=resource_schema())\n\n return record_schema\n\n def get_view(self, endpoint_type, method):\n \"\"\"Return the view method name located on the resource object, for the\n given type and method.\n\n * For collections, this will be \"collection_{method|lower}\n * For records, this will be \"{method|lower}.\n \"\"\"\n if endpoint_type == \"record\":\n return method.lower()\n return f\"{endpoint_type}_{method.lower()}\"\n\n def get_name(self, resource_cls):\n \"\"\"Returns the name of the resource.\n \"\"\"\n # Provided on viewset during registration.\n if \"name\" in self.__dict__:\n return self.__dict__[\"name\"]\n\n # Attribute on resource class (but not @property)\n has_class_attr = hasattr(resource_cls, \"name\") and not callable(resource_cls.name)\n if has_class_attr:\n return resource_cls.name\n\n # Use classname\n return resource_cls.__name__.lower()\n\n def get_service_name(self, endpoint_type, resource_cls):\n \"\"\"Returns the name of the service, depending a given type and\n resource.\n \"\"\"\n return self.service_name.format(\n resource_name=self.get_name(resource_cls), endpoint_type=endpoint_type\n )\n\n def get_service_arguments(self):\n return {**self.service_arguments}\n\n def is_endpoint_enabled(self, endpoint_type, resource_name, method, settings):\n \"\"\"Returns if the given endpoint is enabled or not.\n\n Uses the settings to tell so.\n \"\"\"\n readonly_enabled = asbool(settings.get(\"readonly\"))\n readonly_method = method.lower() in [m.lower() for m in self.readonly_methods]\n if readonly_enabled and not readonly_method:\n return False\n\n setting_enabled = f\"{endpoint_type}_{resource_name}_{method.lower()}_enabled\"\n return asbool(settings.get(setting_enabled, True))\n\n\nclass ShareableViewSet(ViewSet):\n \"\"\"A ShareableViewSet will register the given resource with a schema\n that supports permissions.\n\n The views will rely on dynamic permissions (e.g. create with PUT if\n record does not exist), and solicit the cliquet RouteFactory.\n \"\"\"\n\n responses = ShareableResourseResponses()\n\n def get_record_schema(self, resource_cls, method):\n \"\"\"Return the Cornice schema for the given method.\n \"\"\"\n record_schema = super(ShareableViewSet, self).get_record_schema(resource_cls, method)\n allowed_permissions = resource_cls.permissions\n permissions = PermissionsSchema(\n name=\"permissions\", missing=colander.drop, permissions=allowed_permissions\n )\n record_schema = record_schema.bind(permissions=permissions)\n return record_schema\n\n def get_view_arguments(self, endpoint_type, resource_cls, method):\n args = super().get_view_arguments(endpoint_type, resource_cls, method)\n args[\"permission\"] = authorization.DYNAMIC\n return args\n\n def get_service_arguments(self):\n args = super().get_service_arguments()\n args[\"factory\"] = self.factory\n return args\n", "path": "kinto/core/resource/viewset.py"}]}
| 2,748 | 165 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.