Spaces:
Sleeping
Sleeping
update
Browse files- routes/llm.py +7 -1
routes/llm.py
CHANGED
@@ -128,10 +128,16 @@ async def chat_stream(
|
|
128 |
stop=[],
|
129 |
)
|
130 |
|
|
|
|
|
|
|
|
|
|
|
|
|
131 |
return StreamingResponse(
|
132 |
sse_generator(request, llm_api, system_prompt.text, predict_params, dataset_service, entity_service),
|
133 |
media_type="text/event-stream",
|
134 |
-
headers=
|
135 |
)
|
136 |
except Exception as e:
|
137 |
logger.error(f"Error in SSE chat stream: {str(e)}", stack_info=True)
|
|
|
128 |
stop=[],
|
129 |
)
|
130 |
|
131 |
+
headers = {
|
132 |
+
"Content-Type": "text/event-stream",
|
133 |
+
"Cache-Control": "no-cache",
|
134 |
+
"Connection": "keep-alive",
|
135 |
+
"Access-Control-Allow-Origin": "*",
|
136 |
+
}
|
137 |
return StreamingResponse(
|
138 |
sse_generator(request, llm_api, system_prompt.text, predict_params, dataset_service, entity_service),
|
139 |
media_type="text/event-stream",
|
140 |
+
headers=headers
|
141 |
)
|
142 |
except Exception as e:
|
143 |
logger.error(f"Error in SSE chat stream: {str(e)}", stack_info=True)
|