File size: 518 Bytes
026aeba
 
afa7a1b
bd69eee
026aeba
 
 
 
 
afa7a1b
026aeba
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
from langchain.chains import RetrievalQA

def generate_response(llm, vector_store, question, relevant_docs):
    # Create a retrieval-based question-answering chain using the relevant documents
    qa_chain = RetrievalQA.from_chain_type(
        llm=llm,
        retriever=vector_store.as_retriever(),
        return_source_documents=True
    )
    result = qa_chain.invoke(question, documents=relevant_docs)
    response = result['result']
    source_docs = result['source_documents']
    return response, source_docs