datnguyentien204 commited on
Commit
c32b90c
Β·
verified Β·
1 Parent(s): 63ea0d4

Update pages/πŸ€– Medical Question Answering.py

Browse files
pages/πŸ€– Medical Question Answering.py CHANGED
@@ -53,14 +53,13 @@ if "document_messages" not in st.session_state:
53
 
54
  def get_conversational_chain():
55
  prompt_template = """
56
- Answer the question as detailed as possible from the provided context, make sure to provide all the details, if the answer is not in
57
- provided context just say, "answer is not available in the context", don't provide the wrong answer\n\n
58
  Context:\n {context}?\n
59
  Question: \n{question}\n
60
 
61
  Answer:
62
  """
63
- model = ChatGoogleGenerativeAI(model="gemini-pro", temperature=0.1)
64
  prompt = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
65
  chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
66
  return chain
 
53
 
54
  def get_conversational_chain():
55
  prompt_template = """
56
+ Answer the question as detailed as possible, but only if it relates to lung diseases or conditions. If the question is unrelated to lung diseases, respond with "This question is not related to lung diseases, so I cannot provide an answer." If the answer is not in the provided context, just say, "answer is not available in the context", and do not provide a wrong answer.\n\n
 
57
  Context:\n {context}?\n
58
  Question: \n{question}\n
59
 
60
  Answer:
61
  """
62
+ model = ChatGoogleGenerativeAI(model="gemini-pro", temperature=0.8)
63
  prompt = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
64
  chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
65
  return chain