Tanmay09516's picture
Update app.py
1924a16 verified
raw
history blame
2.42 kB
# app.py
import os
from dotenv import load_dotenv
from pydantic import BaseModel
from qdrant_search import QdrantSearch
from langchain_groq import ChatGroq
from nomic_embeddings import EmbeddingsModel
import gradio as gr
load_dotenv()
import warnings
warnings.filterwarnings("ignore", category=FutureWarning)
os.environ["TOKENIZERS_PARALLELISM"] = "FALSE"
# Initialize global variables
collection_names = ["docs_v1_2", "docs_v2_2", "docs_v3_2"]
limit = 5
llm = ChatGroq(model="mixtral-8x7b-32768")
embeddings = EmbeddingsModel()
search = QdrantSearch(
qdrant_url=os.environ["QDRANT_CLOUD_URL"],
api_key=os.environ["QDRANT_API_KEY"],
embeddings=embeddings
)
# Define the query processing function
def chat_with_langassist(query: str):
if not query.strip():
return "Query cannot be empty.", []
# Retrieve relevant documents from Qdrant
retrieved_docs = search.query_multiple_collections(query, collection_names, limit)
# Prepare the context from retrieved documents
context = "\n".join([doc['text'] for doc in retrieved_docs])
# Construct the prompt with context and question
prompt = (
"You are LangAssist, a knowledgeable assistant for the LangChain Python Library. "
"Given the following context from the documentation, provide a helpful answer to the user's question.\n\n"
"Context:\n{context}\n\n"
"Question: {question}\n\n"
"Answer:"
).format(context=context, question=query)
# Generate an answer using the language model
try:
answer = llm.invoke(prompt).content.strip()
except Exception as e:
return f"Error: {str(e)}", []
# Prepare sources
sources = [
{
"source": doc['source'],
"text": doc['text']
} for doc in retrieved_docs
]
return answer, sources
# Define Gradio interface
with gr.Blocks() as demo:
gr.Markdown("<h1>LangAssist Chat</h1>")
chatbot = gr.Chatbot()
msg = gr.Textbox()
clear = gr.Button("Clear")
def respond(message, chat_history):
answer, sources = chat_with_langassist(message)
chat_history.append((message, answer))
return chat_history, gr.update(value=''), sources
msg.submit(respond, [msg, chatbot], [chatbot, msg])
clear.click(lambda: None, None, chatbot)
# Run the Gradio app
if __name__ == "__main__":
demo.launch()