burhan112 commited on
Commit
ee32f79
·
verified ·
1 Parent(s): c850716

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -26
app.py CHANGED
@@ -9,7 +9,7 @@ import os
9
 
10
  # Load data and FAISS index
11
  def load_data_and_index():
12
- docs_df = pd.read_pickle("data.pkl")
13
  embeddings = np.array(docs_df['embeddings'].tolist(), dtype=np.float32)
14
  dimension = embeddings.shape[1]
15
  index = faiss.IndexFlatL2(dimension)
@@ -44,15 +44,8 @@ def retrieve_docs(query, k=5):
44
  retrieved_docs['distance'] = distances[0]
45
  return retrieved_docs
46
 
47
- # RAG pipeline integrated into respond function
48
- def respond(
49
- message,
50
- history: list[tuple[str, str]],
51
- system_message,
52
- max_tokens,
53
- temperature,
54
- top_p, # Keeping top_p as an input, though Gemini doesn’t use it directly
55
- ):
56
  # Preprocess the user message
57
  preprocessed_query = preprocess_text(message)
58
 
@@ -60,13 +53,8 @@ def respond(
60
  retrieved_docs = retrieve_docs(preprocessed_query, k=5)
61
  context = "\n".join(retrieved_docs['text'].tolist())
62
 
63
- # Construct the prompt with system message, history, and RAG context
64
  prompt = f"{system_message}\n\n"
65
- for user_msg, assistant_msg in history:
66
- if user_msg:
67
- prompt += f"User: {user_msg}\n"
68
- if assistant_msg:
69
- prompt += f"Assistant: {assistant_msg}\n"
70
  prompt += (
71
  f"Query: {message}\n"
72
  f"Relevant Context: {context}\n"
@@ -89,18 +77,18 @@ def respond(
89
  else:
90
  answer += "."
91
 
92
- # Yield the full response (no streaming, as Gemini API doesn’t support it here)
93
- yield answer
94
 
95
- # Gradio Chat Interface
96
- demo = gr.ChatInterface(
97
- respond,
98
- additional_inputs=[
 
99
  gr.Textbox(
100
  value="You are a medical AI assistant diagnosing patients based on their query, using relevant context from past records of other patients.",
101
- label="System message"
102
  ),
103
- gr.Slider(minimum=1, maximum=2048, value=150, step=1, label="Max new tokens"),
104
  gr.Slider(minimum=0.1, maximum=4.0, value=0.75, step=0.1, label="Temperature"),
105
  gr.Slider(
106
  minimum=0.1,
@@ -110,8 +98,9 @@ demo = gr.ChatInterface(
110
  label="Top-p (nucleus sampling)", # Included but not used by Gemini
111
  ),
112
  ],
113
- title="🏥 Medical Chat Assistant",
114
- description="A chat-based medical assistant that diagnoses patient queries using AI and past records."
 
115
  )
116
 
117
  if __name__ == "__main__":
 
9
 
10
  # Load data and FAISS index
11
  def load_data_and_index():
12
+ docs_df = pd.read_pickle("docs_with_embeddings (1).pkl") # Adjust path for HF Spaces
13
  embeddings = np.array(docs_df['embeddings'].tolist(), dtype=np.float32)
14
  dimension = embeddings.shape[1]
15
  index = faiss.IndexFlatL2(dimension)
 
44
  retrieved_docs['distance'] = distances[0]
45
  return retrieved_docs
46
 
47
+ # Simplified respond function (no history)
48
+ def respond(message, system_message, max_tokens, temperature, top_p):
 
 
 
 
 
 
 
49
  # Preprocess the user message
50
  preprocessed_query = preprocess_text(message)
51
 
 
53
  retrieved_docs = retrieve_docs(preprocessed_query, k=5)
54
  context = "\n".join(retrieved_docs['text'].tolist())
55
 
56
+ # Construct the prompt with system message and RAG context
57
  prompt = f"{system_message}\n\n"
 
 
 
 
 
58
  prompt += (
59
  f"Query: {message}\n"
60
  f"Relevant Context: {context}\n"
 
77
  else:
78
  answer += "."
79
 
80
+ return answer
 
81
 
82
+ # Simple Gradio Interface
83
+ demo = gr.Interface(
84
+ fn=respond,
85
+ inputs=[
86
+ gr.Textbox(label="Your Query", placeholder="Enter your medical question here..."),
87
  gr.Textbox(
88
  value="You are a medical AI assistant diagnosing patients based on their query, using relevant context from past records of other patients.",
89
+ label="System Message"
90
  ),
91
+ gr.Slider(minimum=1, maximum=2048, value=150, step=1, label="Max New Tokens"),
92
  gr.Slider(minimum=0.1, maximum=4.0, value=0.75, step=0.1, label="Temperature"),
93
  gr.Slider(
94
  minimum=0.1,
 
98
  label="Top-p (nucleus sampling)", # Included but not used by Gemini
99
  ),
100
  ],
101
+ outputs=gr.Textbox(label="Diagnosis"),
102
+ title="🏥 Medical Assistant",
103
+ description="A simple medical assistant that diagnoses patient queries using AI and past records."
104
  )
105
 
106
  if __name__ == "__main__":