|
import openai |
|
from langchain.vectorstores import Chroma |
|
from langchain.embeddings import OpenAIEmbeddings |
|
from retriever import * |
|
from chain import * |
|
import gradio as gr |
|
|
|
def load_embeddings_database_from_disk(persistence_directory, embeddings_generator): |
|
""" |
|
Load a Chroma vector database from disk. |
|
|
|
This function loads a Chroma vector database from the specified directory on disk. |
|
It expects the same persistence_directory and embedding function as used when creating the database. |
|
|
|
Args: |
|
persistence_directory (str): The directory where the database is stored on disk. |
|
embeddings_generator (obj): The embeddings generator function that was used when creating the database. |
|
|
|
Returns: |
|
vector_database (obj): The loaded Chroma vector database. |
|
""" |
|
|
|
|
|
|
|
vector_database = Chroma(persist_directory=persistence_directory, embedding_function=embeddings_generator) |
|
|
|
return vector_database |
|
|
|
|
|
|
|
persistence_directory = 'db' |
|
|
|
embeddings_generator = OpenAIEmbeddings(openai_api_key = openai.api_key) |
|
|
|
vector_database = load_embeddings_database_from_disk(persistence_directory, embeddings_generator) |
|
topk_documents = 2 |
|
|
|
retriever = initialize_document_retriever(topk_documents, vector_database) |
|
qa_chain = create_question_answering_chain(retriever) |
|
|
|
|
|
def add_text(history, text): |
|
history = history + [(text, None)] |
|
return history, gr.update(value="", interactive=False) |
|
|
|
|
|
def bot(query): |
|
llm_response = qa_chain.run({"query": query[-1][0]}) |
|
query[-1][1] = llm_response |
|
return query |
|
|
|
|
|
with gr.Blocks() as demo: |
|
chatbot = gr.Chatbot([], elem_id="Retrieval Augmented Question Answering").style(height=750) |
|
|
|
with gr.Row(): |
|
with gr.Column(scale=0.95): |
|
txt = gr.Textbox( |
|
show_label=False, |
|
placeholder="Enter text and press enter", |
|
).style(container=False) |
|
|
|
txt_msg = txt.submit(add_text, [chatbot, txt], [chatbot, txt], queue=False).then( |
|
bot, chatbot, chatbot |
|
) |
|
txt_msg.then(lambda: gr.update(interactive=True), None, txt, queue=False) |
|
|
|
|
|
demo.launch() |
|
|