Update endpoints.py
Browse files- endpoints.py +65 -5
endpoints.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Query, Form
|
2 |
from fastapi.responses import StreamingResponse, JSONResponse
|
3 |
from fastapi.encoders import jsonable_encoder
|
4 |
-
from typing import Optional
|
5 |
from models import ChatRequest, VoiceOutputRequest, RiskLevel
|
6 |
from auth import get_current_user
|
7 |
from utils import clean_text_response
|
@@ -87,15 +87,61 @@ def create_router(agent, logger, patients_collection, analysis_collection, users
|
|
87 |
)
|
88 |
|
89 |
text = agent.tokenizer.decode(output["sequences"][0][input_ids.shape[1]:], skip_special_tokens=True)
|
90 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
91 |
yield chunk + " "
|
92 |
await asyncio.sleep(0.05)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
93 |
except Exception as e:
|
94 |
logger.error(f"Streaming error: {e}")
|
95 |
yield f"⚠️ Error: {e}"
|
96 |
|
97 |
return StreamingResponse(token_stream(), media_type="text/plain")
|
98 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
99 |
@router.post("/voice/transcribe")
|
100 |
async def transcribe_voice(
|
101 |
audio: UploadFile = File(...),
|
@@ -163,6 +209,20 @@ def create_router(agent, logger, patients_collection, analysis_collection, users
|
|
163 |
|
164 |
audio_data = text_to_speech(chat_response, language.split('-')[0])
|
165 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
166 |
return StreamingResponse(
|
167 |
io.BytesIO(audio_data),
|
168 |
media_type="audio/mpeg",
|
@@ -262,15 +322,15 @@ def create_router(agent, logger, patients_collection, analysis_collection, users
|
|
262 |
if patient.get("created_by") != current_user["email"] and not current_user.get("is_admin", False):
|
263 |
raise HTTPException(status_code=403, detail="Not authorized to delete this patient")
|
264 |
|
265 |
-
# Delete all analyses associated with this patient
|
266 |
await analysis_collection.delete_many({"patient_id": patient_id})
|
267 |
-
logger.info(f"Deleted analyses for patient {patient_id}")
|
268 |
|
269 |
# Delete the patient
|
270 |
await patients_collection.delete_one({"fhir_id": patient_id})
|
271 |
logger.info(f"Patient {patient_id} deleted successfully")
|
272 |
|
273 |
-
return {"status": "success", "message": f"Patient {patient_id} and associated analyses deleted"}
|
274 |
|
275 |
except HTTPException:
|
276 |
raise
|
|
|
1 |
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Query, Form
|
2 |
from fastapi.responses import StreamingResponse, JSONResponse
|
3 |
from fastapi.encoders import jsonable_encoder
|
4 |
+
from typing import Optional, List
|
5 |
from models import ChatRequest, VoiceOutputRequest, RiskLevel
|
6 |
from auth import get_current_user
|
7 |
from utils import clean_text_response
|
|
|
87 |
)
|
88 |
|
89 |
text = agent.tokenizer.decode(output["sequences"][0][input_ids.shape[1]:], skip_special_tokens=True)
|
90 |
+
cleaned_text = clean_text_response(text)
|
91 |
+
full_response = ""
|
92 |
+
|
93 |
+
# Store chat session in database
|
94 |
+
chat_entry = {
|
95 |
+
"user_id": current_user["email"],
|
96 |
+
"patient_id": request.patient_id if request.patient_id else None,
|
97 |
+
"message": request.message,
|
98 |
+
"response": cleaned_text,
|
99 |
+
"chat_type": "chat",
|
100 |
+
"timestamp": datetime.utcnow(),
|
101 |
+
"temperature": request.temperature,
|
102 |
+
"max_new_tokens": request.max_new_tokens
|
103 |
+
}
|
104 |
+
result = await analysis_collection.insert_one(chat_entry)
|
105 |
+
chat_entry["_id"] = str(result.inserted_id)
|
106 |
+
|
107 |
+
for chunk in cleaned_text.split():
|
108 |
+
full_response += chunk + " "
|
109 |
yield chunk + " "
|
110 |
await asyncio.sleep(0.05)
|
111 |
+
|
112 |
+
# Update chat entry with full response
|
113 |
+
await analysis_collection.update_one(
|
114 |
+
{"_id": result.inserted_id},
|
115 |
+
{"$set": {"response": full_response}}
|
116 |
+
)
|
117 |
+
|
118 |
except Exception as e:
|
119 |
logger.error(f"Streaming error: {e}")
|
120 |
yield f"⚠️ Error: {e}"
|
121 |
|
122 |
return StreamingResponse(token_stream(), media_type="text/plain")
|
123 |
|
124 |
+
@router.get("/chats")
|
125 |
+
async def get_chats(
|
126 |
+
current_user: dict = Depends(get_current_user)
|
127 |
+
):
|
128 |
+
logger.info(f"Fetching chats for {current_user['email']}")
|
129 |
+
try:
|
130 |
+
chats = await analysis_collection.find({"user_id": current_user["email"], "chat_type": "chat"}).sort("timestamp", -1).to_list(length=100)
|
131 |
+
return [
|
132 |
+
{
|
133 |
+
"id": str(chat["_id"]),
|
134 |
+
"title": chat.get("message", "Untitled Chat")[:30], # First 30 chars of message as title
|
135 |
+
"timestamp": chat["timestamp"].isoformat(),
|
136 |
+
"message": chat["message"],
|
137 |
+
"response": chat["response"]
|
138 |
+
}
|
139 |
+
for chat in chats
|
140 |
+
]
|
141 |
+
except Exception as e:
|
142 |
+
logger.error(f"Error fetching chats: {e}")
|
143 |
+
raise HTTPException(status_code=500, detail="Failed to retrieve chats")
|
144 |
+
|
145 |
@router.post("/voice/transcribe")
|
146 |
async def transcribe_voice(
|
147 |
audio: UploadFile = File(...),
|
|
|
209 |
|
210 |
audio_data = text_to_speech(chat_response, language.split('-')[0])
|
211 |
|
212 |
+
# Store voice chat in database
|
213 |
+
chat_entry = {
|
214 |
+
"user_id": current_user["email"],
|
215 |
+
"patient_id": None,
|
216 |
+
"message": user_message,
|
217 |
+
"response": chat_response,
|
218 |
+
"chat_type": "voice_chat",
|
219 |
+
"timestamp": datetime.utcnow(),
|
220 |
+
"temperature": temperature,
|
221 |
+
"max_new_tokens": max_new_tokens
|
222 |
+
}
|
223 |
+
result = await analysis_collection.insert_one(chat_entry)
|
224 |
+
chat_entry["_id"] = str(result.inserted_id)
|
225 |
+
|
226 |
return StreamingResponse(
|
227 |
io.BytesIO(audio_data),
|
228 |
media_type="audio/mpeg",
|
|
|
322 |
if patient.get("created_by") != current_user["email"] and not current_user.get("is_admin", False):
|
323 |
raise HTTPException(status_code=403, detail="Not authorized to delete this patient")
|
324 |
|
325 |
+
# Delete all analyses and chats associated with this patient
|
326 |
await analysis_collection.delete_many({"patient_id": patient_id})
|
327 |
+
logger.info(f"Deleted analyses and chats for patient {patient_id}")
|
328 |
|
329 |
# Delete the patient
|
330 |
await patients_collection.delete_one({"fhir_id": patient_id})
|
331 |
logger.info(f"Patient {patient_id} deleted successfully")
|
332 |
|
333 |
+
return {"status": "success", "message": f"Patient {patient_id} and associated analyses/chats deleted"}
|
334 |
|
335 |
except HTTPException:
|
336 |
raise
|