Ali2206 commited on
Commit
fa9b390
·
verified ·
1 Parent(s): e3305a8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +27 -14
app.py CHANGED
@@ -3,12 +3,13 @@ import sys
3
  import json
4
  import logging
5
  import re
 
6
  from datetime import datetime
7
  from typing import List, Dict, Optional, Tuple
8
  from enum import Enum
9
 
10
  from fastapi import FastAPI, HTTPException
11
- from fastapi.responses import StreamingResponse
12
  from fastapi.middleware.cors import CORSMiddleware
13
  from pydantic import BaseModel
14
  import asyncio
@@ -22,7 +23,7 @@ logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(
22
  logger = logging.getLogger("TxAgentAPI")
23
 
24
  # App
25
- app = FastAPI(title="TxAgent API", version="2.2.0") # Version bump for new features
26
 
27
  app.add_middleware(
28
  CORSMiddleware,
@@ -169,13 +170,26 @@ def serialize_patient(patient: dict) -> dict:
169
  patient_copy["_id"] = str(patient_copy["_id"])
170
  return patient_copy
171
 
 
 
 
 
 
172
  async def analyze_patient(patient: dict):
173
  try:
174
  serialized = serialize_patient(patient)
175
- doc = json.dumps(serialized, indent=2)
176
- logger.info(f"🧾 Analyzing patient: {serialized.get('fhir_id')}")
 
 
 
 
 
 
 
177
 
178
  # Main clinical analysis
 
179
  message = (
180
  "You are a clinical decision support AI.\n\n"
181
  "Given the patient document below:\n"
@@ -197,26 +211,27 @@ async def analyze_patient(patient: dict):
197
  "factors": risk_factors
198
  }
199
 
200
- # Store analysis
201
  analysis_doc = {
202
- "patient_id": serialized.get("fhir_id"),
203
  "timestamp": datetime.utcnow(),
204
  "summary": structured,
205
  "suicide_risk": suicide_risk,
206
- "raw": raw
 
207
  }
208
 
209
  await analysis_collection.update_one(
210
- {"patient_id": serialized.get("fhir_id")},
211
  {"$set": analysis_doc},
212
  upsert=True
213
  )
214
 
215
  # Create alert if risk is above threshold
216
  if risk_level in [RiskLevel.MODERATE, RiskLevel.HIGH, RiskLevel.SEVERE]:
217
- await create_alert(serialized.get("fhir_id"), suicide_risk)
218
 
219
- logger.info(f"✅ Stored analysis for patient {serialized.get('fhir_id')}")
220
 
221
  except Exception as e:
222
  logger.error(f"Error analyzing patient: {e}")
@@ -250,7 +265,7 @@ async def startup_event():
250
  db = get_mongo_client()["cps_db"]
251
  patients_collection = db["patients"]
252
  analysis_collection = db["patient_analysis_results"]
253
- alerts_collection = db["clinical_alerts"] # New collection for alerts
254
  logger.info("📡 Connected to MongoDB")
255
 
256
  asyncio.create_task(analyze_all_patients())
@@ -260,7 +275,7 @@ async def status():
260
  return {
261
  "status": "running",
262
  "timestamp": datetime.utcnow().isoformat(),
263
- "version": "2.2.0"
264
  }
265
 
266
  @app.get("/patients/analysis-results")
@@ -295,8 +310,6 @@ async def get_patient_analysis_results(name: Optional[str] = Query(None)):
295
  logger.error(f"Error fetching analysis results: {e}")
296
  raise HTTPException(status_code=500, detail="Failed to retrieve analysis results")
297
 
298
-
299
-
300
  @app.post("/chat-stream")
301
  async def chat_stream_endpoint(request: ChatRequest):
302
  async def token_stream():
 
3
  import json
4
  import logging
5
  import re
6
+ import hashlib
7
  from datetime import datetime
8
  from typing import List, Dict, Optional, Tuple
9
  from enum import Enum
10
 
11
  from fastapi import FastAPI, HTTPException
12
+ from,快api.responses import StreamingResponse
13
  from fastapi.middleware.cors import CORSMiddleware
14
  from pydantic import BaseModel
15
  import asyncio
 
23
  logger = logging.getLogger("TxAgentAPI")
24
 
25
  # App
26
+ app = FastAPI(title="TxAgent API", version="2.2.1") # Version bump for hash-based analysis
27
 
28
  app.add_middleware(
29
  CORSMiddleware,
 
170
  patient_copy["_id"] = str(patient_copy["_id"])
171
  return patient_copy
172
 
173
+ def compute_patient_data_hash(patient: dict) -> str:
174
+ """Compute SHA-256 hash of patient data."""
175
+ serialized = json.dumps(patient, sort_keys=True) # Sort keys for consistent hashing
176
+ return hashlib.sha256(serialized.encode()).hexdigest()
177
+
178
  async def analyze_patient(patient: dict):
179
  try:
180
  serialized = serialize_patient(patient)
181
+ patient_id = serialized.get("fhir_id")
182
+ patient_hash = compute_patient_data_hash(serialized)
183
+ logger.info(f"🧾 Analyzing patient: {patient_id}")
184
+
185
+ # Check if analysis exists and hash matches
186
+ existing_analysis = await analysis_collection.find_one({"patient_id": patient_id})
187
+ if existing_analysis and existing_analysis.get("data_hash") == patient_hash:
188
+ logger.info(f"✅ No changes in patient data for {patient_id}, skipping analysis")
189
+ return # Skip analysis if data hasn't changed
190
 
191
  # Main clinical analysis
192
+ doc = json.dumps(serialized, indent=2)
193
  message = (
194
  "You are a clinical decision support AI.\n\n"
195
  "Given the patient document below:\n"
 
211
  "factors": risk_factors
212
  }
213
 
214
+ # Store analysis with data hash
215
  analysis_doc = {
216
+ "patient_id": patient_id,
217
  "timestamp": datetime.utcnow(),
218
  "summary": structured,
219
  "suicide_risk": suicide_risk,
220
+ "raw": raw,
221
+ "data_hash": patient_hash # Store the hash
222
  }
223
 
224
  await analysis_collection.update_one(
225
+ {"patient_id": patient_id},
226
  {"$set": analysis_doc},
227
  upsert=True
228
  )
229
 
230
  # Create alert if risk is above threshold
231
  if risk_level in [RiskLevel.MODERATE, RiskLevel.HIGH, RiskLevel.SEVERE]:
232
+ await create_alert(patient_id, suicide_risk)
233
 
234
+ logger.info(f"✅ Stored analysis for patient {patient_id}")
235
 
236
  except Exception as e:
237
  logger.error(f"Error analyzing patient: {e}")
 
265
  db = get_mongo_client()["cps_db"]
266
  patients_collection = db["patients"]
267
  analysis_collection = db["patient_analysis_results"]
268
+ alerts_collection = db["clinical_alerts"]
269
  logger.info("📡 Connected to MongoDB")
270
 
271
  asyncio.create_task(analyze_all_patients())
 
275
  return {
276
  "status": "running",
277
  "timestamp": datetime.utcnow().isoformat(),
278
+ "version": "2.2.1"
279
  }
280
 
281
  @app.get("/patients/analysis-results")
 
310
  logger.error(f"Error fetching analysis results: {e}")
311
  raise HTTPException(status_code=500, detail="Failed to retrieve analysis results")
312
 
 
 
313
  @app.post("/chat-stream")
314
  async def chat_stream_endpoint(request: ChatRequest):
315
  async def token_stream():