Spaces:
Sleeping
Sleeping
File size: 2,885 Bytes
38f8736 6c05acd 317cf3c 0deea66 652437c e21292c 72820fa 0deea66 e21292c 816a5d4 72820fa 0deea66 72820fa 49bedec 72820fa db5c016 72820fa 0cea8e5 3818f5a 72820fa db5c016 3818f5a 72820fa 0cea8e5 b474dc5 0cea8e5 b474dc5 72820fa e21292c 72820fa e21292c 72820fa 3818f5a 0cea8e5 3d3f8f8 72820fa ed1dd60 72820fa e21292c 72820fa e21292c 3818f5a 72820fa e21292c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 |
import gradio as gr
from langchain_community.vectorstores import FAISS
from langchain_community.embeddings import HuggingFaceEmbeddings
import zipfile
import os
import openai
import torch
# Carica la chiave API di OpenAI dalle variabili d'ambiente
openai.api_key = os.getenv("OPENAI_API_KEY")
# Percorsi ZIP per manuali e problemi
zip_path_m = "faiss_manual_index.zip"
faiss_manual_index = "faiss_manual_index"
zip_path_p = "faiss_problems_index.zip"
faiss_problems_index = "faiss_problems_index"
# Estrazione dei file ZIP se necessario
for zip_path, output_dir in [(zip_path_m, faiss_manual_index), (zip_path_p, faiss_problems_index)]:
if not os.path.exists(output_dir):
os.makedirs(output_dir)
if os.path.exists(zip_path):
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
zip_ref.extractall(output_dir)
# Caricamento del modello di embedding
embedding_model = HuggingFaceEmbeddings(model_name="sentence-transformers/LaBSE")
# Caricamento dei vectorstore FAISS
manual_vectorstore = FAISS.load_local(faiss_manual_index, embedding_model, allow_dangerous_deserialization=True)
problems_vectorstore = FAISS.load_local(faiss_problems_index, embedding_model, allow_dangerous_deserialization=True)
# Funzione per la ricerca e il riassunto
def search_and_summarize(query):
# Ricerca nei manuali e problemi
manual_results = manual_vectorstore.similarity_search(query, k=2)
manual_output = "\n\n".join([doc.page_content for doc in manual_results])
problems_results = problems_vectorstore.similarity_search(query, k=2)
problems_output = "\n\n".join([doc.page_content for doc in problems_results])
combined_text = f"Manual Results:\n{manual_output}\n\nProblems Results:\n{problems_output}"
# Generazione del riassunto con OpenAI
input_text = f"Riassumi le seguenti informazioni:\n{combined_text}\n\nRiassunto:"
response = openai.Completion.create(
engine="text-davinci-003", # Puoi scegliere un altro modello se preferisci
prompt=input_text,
max_tokens=300,
temperature=0.7
)
summary = response.choices[0].text.strip()
return manual_output, problems_output, summary
# Interfaccia Gradio
iface = gr.Interface(
fn=search_and_summarize,
inputs=gr.Textbox(lines=2, placeholder="Enter your question here..."),
outputs=[
gr.Textbox(label="Manual Results"),
gr.Textbox(label="Issues Results"),
gr.Textbox(label="Summary by OpenAI")
],
examples=[
["How to change the knife?"],
["What are the safety precautions for using the machine?"],
["How can I get help with the machine?"]
],
title="Manual Querying System with OpenAI Summarization",
description="Enter a question to get information from the manual and the common issues, summarized by OpenAI."
)
# Avvia l'app Gradio
iface.launch()
|