Update backend/query_llm.py
Browse files- backend/query_llm.py +2 -2
backend/query_llm.py
CHANGED
@@ -112,8 +112,8 @@ def generate_qwen(formatted_prompt: str, history: str):
|
|
112 |
system="You are a helpful assistant.",
|
113 |
api_name="/model_chat"
|
114 |
)
|
115 |
-
print(response)
|
116 |
-
return response
|
117 |
|
118 |
|
119 |
|
|
|
112 |
system="You are a helpful assistant.",
|
113 |
api_name="/model_chat"
|
114 |
)
|
115 |
+
print('Response:',response)
|
116 |
+
return response[1][0][1]
|
117 |
|
118 |
|
119 |
|