File size: 2,001 Bytes
f6a53a3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
import gradio as gr
import torch
from langchain.embeddings import HuggingFaceEmbeddings
from langchain_community.vectorstores import Chroma
from sentence_transformers import SentenceTransformer
import openai

# Load pre-trained embedding model
model_name = 'intfloat/e5-small'
embedding_model = HuggingFaceEmbeddings(model_name=model_name)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")

# Load ChromaDB
persist_directory = './docs/chroma/'
vectordb = Chroma(persist_directory=persist_directory, embedding_function=embedding_model)

# OpenAI API Key
openai.api_key = 'your-api-key'

def retrieve_documents(question, k=5):
    """Retrieve top K relevant documents from ChromaDB"""
    docs = vectordb.similarity_search(question, k=k)
    return [doc.page_content for doc in docs]

def generate_response(question, context):
    """Generate response using OpenAI GPT-4"""
    full_prompt = f"Context: {context}\n\nQuestion: {question}"
    response = openai.ChatCompletion.create(
        model="gpt-4",
        messages=[{"role": "user", "content": full_prompt}],
        max_tokens=300,
        temperature=0.7
    )
    return response['choices'][0]['message']['content'].strip()

def rag_pipeline(question):
    """Full RAG Pipeline - Retrieve Docs & Generate Response"""
    retrieved_docs = retrieve_documents(question, k=5)
    context = " ".join(retrieved_docs)
    response = generate_response(question, context)
    return response, retrieved_docs

def gradio_interface(question):
    response, retrieved_docs = rag_pipeline(question)
    return response, "\n\n".join(retrieved_docs)

# Create Gradio App
iface = gr.Interface(
    fn=gradio_interface,
    inputs=gr.Textbox(label="Enter your question"),
    outputs=[gr.Textbox(label="Generated Response"), gr.Textbox(label="Retrieved Documents")],
    title="RAG-Based Question Answering System",
    description="Enter a question and retrieve relevant documents along with the AI-generated response."
)

iface.launch()