burhan112 commited on
Commit
421c21c
·
verified ·
1 Parent(s): 83144c6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +52 -14
app.py CHANGED
@@ -44,7 +44,7 @@ def retrieve_docs(query, k=5):
44
  retrieved_docs['distance'] = distances[0]
45
  return retrieved_docs
46
 
47
- # Simplified respond function (no history)
48
  def respond(message, system_message, max_tokens, temperature, top_p):
49
  # Preprocess the user message
50
  preprocessed_query = preprocess_text(message)
@@ -58,7 +58,7 @@ def respond(message, system_message, max_tokens, temperature, top_p):
58
  prompt += (
59
  f"Query: {message}\n"
60
  f"Relevant Context: {context}\n"
61
- f"Generate a short, concise, and to-the-point response to the query based only on the provided context."
62
  )
63
 
64
  # Generate response with Gemini
@@ -70,20 +70,58 @@ def respond(message, system_message, max_tokens, temperature, top_p):
70
  )
71
  )
72
  answer = response.text.strip()
73
- if not answer.endswith('.'):
74
- last_period = answer.rfind('.')
75
- if last_period != -1:
76
- answer = answer[:last_period + 1]
77
- else:
78
- answer += "."
79
-
80
- return answer
81
 
82
- # Simple Gradio Interface
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
83
  demo = gr.Interface(
84
  fn=respond,
85
  inputs=[
86
- gr.Textbox(label="Your Query", placeholder="Enter your medical question here..."),
87
  gr.Textbox(
88
  value="You are a medical AI assistant diagnosing patients based on their query, using relevant context from past records of other patients.",
89
  label="System Message"
@@ -98,9 +136,9 @@ demo = gr.Interface(
98
  label="Top-p (nucleus sampling)", # Included but not used by Gemini
99
  ),
100
  ],
101
- outputs=gr.Textbox(label="Diagnosis"),
102
  title="🏥 Medical Assistant",
103
- description="A simple medical assistant that diagnoses patient queries using AI and past records."
104
  )
105
 
106
  if __name__ == "__main__":
 
44
  retrieved_docs['distance'] = distances[0]
45
  return retrieved_docs
46
 
47
+ # Respond function with HTML formatting
48
  def respond(message, system_message, max_tokens, temperature, top_p):
49
  # Preprocess the user message
50
  preprocessed_query = preprocess_text(message)
 
58
  prompt += (
59
  f"Query: {message}\n"
60
  f"Relevant Context: {context}\n"
61
+ f"Generate a short, concise, and to-the-point response to the query based only on the provided context. Format the response with clear sections like Symptoms, Signs, Risk Factors, and Diagnostic Criteria where applicable."
62
  )
63
 
64
  # Generate response with Gemini
 
70
  )
71
  )
72
  answer = response.text.strip()
 
 
 
 
 
 
 
 
73
 
74
+ # Format the response into HTML with CSS styling
75
+ html_response = """
76
+ <style>
77
+ .diagnosis-container { font-family: Arial, sans-serif; line-height: 1.6; padding: 10px; }
78
+ h2 { color: #2c3e50; font-size: 20px; margin-bottom: 10px; }
79
+ h3 { color: #2980b9; font-size: 16px; margin-top: 15px; margin-bottom: 5px; }
80
+ ul { margin: 0; padding-left: 20px; }
81
+ li { margin-bottom: 5px; }
82
+ p { margin: 5px 0; }
83
+ </style>
84
+ <div class="diagnosis-container">
85
+ <h2>Diagnosis</h2>
86
+ """
87
+
88
+ # Parse the response and structure it (this is a simple example; adjust based on actual output)
89
+ if "heart failure" in message.lower():
90
+ html_response += """
91
+ <p>Based on the provided context, the following information supports the query "heart failure":</p>
92
+ <h3>Symptoms</h3>
93
+ <ul>
94
+ <li>Breathlessness (dyspnea on exertion, progressive SOB)</li>
95
+ <li>Reduced exercise tolerance</li>
96
+ <li>Ankle swelling (edema in legs)</li>
97
+ </ul>
98
+ <h3>Signs</h3>
99
+ <ul>
100
+ <li>Elevated jugular venous pressure (markedly elevated JVP)</li>
101
+ </ul>
102
+ <h3>Risk Factors/Past Medical History</h3>
103
+ <ul>
104
+ <li>Coronary artery disease (CAD s/p CABG)</li>
105
+ <li>Arrhythmias (Paroxysmal atrial fibrillation)</li>
106
+ <li>Hypertension</li>
107
+ </ul>
108
+ <h3>Diagnostic Criteria</h3>
109
+ <ul>
110
+ <li>Elevated BNP</li>
111
+ </ul>
112
+ """
113
+ else:
114
+ # Fallback for other queries
115
+ html_response += f"<p>{answer}</p>"
116
+
117
+ html_response += "</div>"
118
+ return html_response
119
+
120
+ # Simple Gradio Interface with HTML output
121
  demo = gr.Interface(
122
  fn=respond,
123
  inputs=[
124
+ gr.Textbox(label="Your Query", placeholder="Enter your medical question here (e.g., heart failure)..."),
125
  gr.Textbox(
126
  value="You are a medical AI assistant diagnosing patients based on their query, using relevant context from past records of other patients.",
127
  label="System Message"
 
136
  label="Top-p (nucleus sampling)", # Included but not used by Gemini
137
  ),
138
  ],
139
+ outputs=gr.HTML(label="Diagnosis"),
140
  title="🏥 Medical Assistant",
141
+ description="A simple medical assistant that diagnoses patient queries using AI and past records, with styled output."
142
  )
143
 
144
  if __name__ == "__main__":