[200~chunk[choices][text]~
Browse files
app.py
CHANGED
@@ -132,7 +132,7 @@ Here's the explanation of the solution:
|
|
132 |
"""
|
133 |
generated_text = ""
|
134 |
for chunk in llm(full_prompt, stream=True, **generation_kwargs):
|
135 |
-
generated_text += chunk["choices"]["text"]
|
136 |
|
137 |
return {"explanation": generated_text}
|
138 |
|
|
|
132 |
"""
|
133 |
generated_text = ""
|
134 |
for chunk in llm(full_prompt, stream=True, **generation_kwargs):
|
135 |
+
generated_text += chunk["choices"][0]["text"]
|
136 |
|
137 |
return {"explanation": generated_text}
|
138 |
|