Spaces:
Sleeping
Sleeping
import gradio as gr | |
from langchain.vectorstores import FAISS | |
from langchain.embeddings import HuggingFaceEmbeddings | |
# Carica il modello di embedding | |
embedding_model = HuggingFaceEmbeddings(model_name="sentence-transformers/LaBSE") | |
# Carica i vectorstore FAISS salvati | |
vectorstore = FAISS.load_local("faiss_index", embedding_model, allow_dangerous_deserialization=True) | |
manual_vectorstore = FAISS.load_local("faiss_manual_index", embedding_model, allow_dangerous_deserialization=True) | |
problems_vectorstore = FAISS.load_local("faiss_problems_index", embedding_model, allow_dangerous_deserialization=True) | |
def search_query(query): | |
# Cerca nei manuali | |
manual_results = manual_vectorstore.similarity_search(query, k=2) | |
manual_output = "\n\n".join([doc.page_content for doc in manual_results]) | |
# Cerca nei problemi | |
problems_results = problems_vectorstore.similarity_search(query, k=2) | |
problems_output = "\n\n".join([doc.page_content for doc in problems_results]) | |
# Restituisce i risultati come output diviso | |
return manual_output, problems_output | |
examples = [ | |
["How to change the knife?"], | |
["What are the safety precautions for using the machine?"], | |
["How can I get help with the machine?"] | |
] | |
# Interfaccia Gradio | |
iface = gr.Interface( | |
fn=search_query, | |
inputs=gr.Textbox(lines=2, placeholder="Enter your question here..."), | |
outputs=[ | |
gr.Textbox(label="Manual Results"), | |
gr.Textbox(label="Issues Results") | |
], | |
examples=examples, | |
title="Manual Querying System", | |
description="Enter a question to get relevant information extracted from the manual and the most common related issues." | |
) | |
# Avvia l'app | |
iface.launch() |