Tanmay09516 commited on
Commit
a70133d
·
verified ·
1 Parent(s): 4d37355

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -6
app.py CHANGED
@@ -38,13 +38,19 @@ def chat_with_langassist(query: str):
38
 
39
  # Construct the prompt with context and question
40
  prompt = (
41
- "You are LangAssist, a knowledgeable assistant for the LangChain Python Library. "
42
- "Given the following context from the documentation, provide a helpful answer to the user's question.\n\n"
43
- "Context:\n{context}\n\n"
44
- "Question: {question}\n\n"
45
- "Answer:"
 
 
 
 
 
 
 
46
  ).format(context=context, question=query)
47
-
48
  # Generate an answer using the language model
49
  try:
50
  answer = llm.invoke(prompt).content.strip()
 
38
 
39
  # Construct the prompt with context and question
40
  prompt = (
41
+ # "You are LangAssist, a knowledgeable assistant for the LangChain Python Library. "
42
+ # "Given the following context from the documentation, provide a helpful answer to the user's question.\n\n"
43
+ # "Context:\n{context}\n\n"
44
+ # "Question: {question}\n\n"
45
+ # "Answer:"
46
+ "You are LangChat, a knowledgeable assistant for the LangChain Python Library. "
47
+ "Given the following context from the documentation, provide a helpful answer to the user's question. \n\n"
48
+ "Context:\n{context}\n\n"
49
+ "You can ignore the context if the question is a simple chat like Hi, hello, and just respond in a normal manner as LangChat, otherwise use the context to answer the query."
50
+ "If you can't find the answer from the sources, mention that clearly instead of making up an answer.\n\n"
51
+ "Question: {question}\n\n"
52
+ "Answer:"
53
  ).format(context=context, question=query)
 
54
  # Generate an answer using the language model
55
  try:
56
  answer = llm.invoke(prompt).content.strip()