Spaces:
Sleeping
Sleeping
File size: 1,693 Bytes
38f8736 6c05acd 0cea8e5 38f8736 0cea8e5 3818f5a 0cea8e5 3818f5a 0cea8e5 b474dc5 0cea8e5 b474dc5 0cea8e5 56adc9e 3d3f8f8 0cea8e5 3d3f8f8 3818f5a 0cea8e5 3d3f8f8 0cea8e5 ed1dd60 0cea8e5 3818f5a 0cea8e5 ed1dd60 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 |
import gradio as gr
from langchain.vectorstores import FAISS
from langchain.embeddings import HuggingFaceEmbeddings
# Carica il modello di embedding
embedding_model = HuggingFaceEmbeddings(model_name="sentence-transformers/LaBSE")
# Carica i vectorstore FAISS salvati
vectorstore = FAISS.load_local("faiss_index", embedding_model, allow_dangerous_deserialization=True)
manual_vectorstore = FAISS.load_local("faiss_manual_index", embedding_model, allow_dangerous_deserialization=True)
problems_vectorstore = FAISS.load_local("faiss_problems_index", embedding_model, allow_dangerous_deserialization=True)
def search_query(query):
# Cerca nei manuali
manual_results = manual_vectorstore.similarity_search(query, k=2)
manual_output = "\n\n".join([doc.page_content for doc in manual_results])
# Cerca nei problemi
problems_results = problems_vectorstore.similarity_search(query, k=2)
problems_output = "\n\n".join([doc.page_content for doc in problems_results])
# Restituisce i risultati come output diviso
return manual_output, problems_output
examples = [
["How to change the knife?"],
["What are the safety precautions for using the machine?"],
["How can I get help with the machine?"]
]
# Interfaccia Gradio
iface = gr.Interface(
fn=search_query,
inputs=gr.Textbox(lines=2, placeholder="Enter your question here..."),
outputs=[
gr.Textbox(label="Manual Results"),
gr.Textbox(label="Issues Results")
],
examples=examples,
title="Manual Querying System",
description="Enter a question to get relevant information extracted from the manual and the most common related issues."
)
# Avvia l'app
iface.launch() |