Spaces:
Sleeping
Sleeping
import gradio as gr | |
from huggingface_hub import InferenceClient | |
""" | |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference | |
""" | |
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta") | |
from utils import Translation, search_error_in_excel | |
def reply(message, history): | |
# Detectar el idioma original | |
original_lang = Translation.detect_language(message) | |
# Traducir el mensaje al español | |
translated_message = Translation.translatef(message, "es") | |
# Buscar información en el Excel | |
excel_response,infotype = search_error_in_excel(translated_message) | |
# Traducir la respuesta de vuelta al idioma original | |
if original_lang != "es": | |
#response_translator = Translation(excel_response, original_lang) | |
if infotype=="protocolo": | |
final_response = Translation.translatef(excel_response, original_lang) | |
else: | |
final_response = excel_response | |
else: | |
final_response = excel_response | |
return final_response | |
# Configurar la interfaz del chatbot | |
demo = gr.ChatInterface(fn=reply, title="Multilingual-TedCas Bot") | |
demo.launch(share=False) | |