DHEIVER's picture
Update app.py
9bf186c verified
raw
history blame
8.02 kB
import gradio as gr
from typing import List, Dict
from gradio_client import Client
def create_chat_app():
# Language configurations
TRANSLATIONS = {
"en": {
"title": "🤖 Chat with Llama 3.3 70B",
"description": """
This is a chatbot based on the Llama 3.3 70B model. To use:
1. Type your message in the field below
2. Adjust parameters as needed
3. Press Enter to send
""",
"system_message": "You are a helpful and friendly assistant based on the Llama 3.3 70B model.",
"system_message_label": "System Message",
"max_tokens_label": "Maximum Tokens",
"temperature_label": "Temperature",
"top_p_label": "Top-p (Nucleus Sampling)",
"message_placeholder": "Type your message here...",
"info_section": """
### ℹ️ Information
- Model: Llama 3.3 70B Instruct
- Language: English/Portuguese
- Hosting: Hugging Face Spaces
For best performance, adjust the parameters according to your needs.
""",
"error_message": "Sorry, an error occurred: {}\nPlease check your connection and settings.",
"examples": [
"Hello! How are you?",
"Can you explain what artificial intelligence is?",
"What is the capital of Brazil?",
"Help me write a Python code to calculate Fibonacci."
]
},
"pt": {
"title": "🤖 Chat com Llama 3.3 70B em Português",
"description": """
Este é um chatbot baseado no modelo Llama 3.3 70B. Para usar:
1. Digite sua mensagem no campo abaixo
2. Ajuste os parâmetros conforme necessário
3. Pressione Enter para enviar
""",
"system_message": "Você é um assistente amigável e prestativo que responde em português. Você é baseado no modelo Llama 3.3 70B.",
"system_message_label": "Mensagem do Sistema",
"max_tokens_label": "Máximo de Tokens",
"temperature_label": "Temperatura",
"top_p_label": "Top-p (Amostragem Nucleus)",
"message_placeholder": "Digite sua mensagem aqui...",
"info_section": """
### ℹ️ Informações
- Modelo: Llama 3.3 70B Instruct
- Idioma: Português/Inglês
- Hospedagem: Hugging Face Spaces
Para melhor desempenho, ajuste os parâmetros de acordo com suas necessidades.
""",
"error_message": "Desculpe, ocorreu um erro: {}\nPor favor, verifique sua conexão e configurações.",
"examples": [
"Olá! Como você está?",
"Pode me explicar o que é inteligência artificial?",
"Qual é a capital do Brasil?",
"Me ajude a escrever um código em Python para calcular fibonacci."
]
}
}
def respond(
message: str,
chat_history: List[Dict],
system_message: str,
max_tokens: int,
temperature: float,
top_p: float,
language: str,
):
try:
client = Client("aifeifei798/feifei-chat")
# Format conversation history
formatted_message = f"{system_message}\n\nConversation history:\n"
for msg in chat_history:
formatted_message += f"{msg['role']}: {msg['content']}\n"
formatted_message += f"User: {message}"
message_payload = {
"text": formatted_message,
"files": []
}
response = client.predict(
message=message_payload,
feifei_select=True,
additional_dropdown="meta-llama/Llama-3.3-70B-Instruct",
image_mod="pixtral",
api_name="/chat"
)
# Update chat history in the new format
chat_history.extend([
{"role": "user", "content": message},
{"role": "assistant", "content": response}
])
return chat_history, ""
except Exception as e:
error_msg = TRANSLATIONS[language]["error_message"].format(str(e))
chat_history.append({"role": "assistant", "content": error_msg})
return chat_history, ""
with gr.Blocks(theme=gr.themes.Soft()) as demo:
current_language = gr.State("en")
gr.Markdown(TRANSLATIONS["en"]["title"])
gr.Markdown(TRANSLATIONS["en"]["description"])
with gr.Group():
chatbot = gr.Chatbot(
value=[],
height=400,
type="messages" # Use the new messages format
)
message = gr.Textbox(
placeholder=TRANSLATIONS["en"]["message_placeholder"],
lines=3
)
with gr.Accordion("Settings", open=False):
system_message = gr.Textbox(
value=TRANSLATIONS["en"]["system_message"],
label=TRANSLATIONS["en"]["system_message_label"]
)
with gr.Row():
max_tokens = gr.Slider(
minimum=1,
maximum=4096,
value=2048,
step=1,
label=TRANSLATIONS["en"]["max_tokens_label"]
)
temperature = gr.Slider(
minimum=0.1,
maximum=2.0,
value=0.7,
step=0.1,
label=TRANSLATIONS["en"]["temperature_label"]
)
top_p = gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.95,
step=0.05,
label=TRANSLATIONS["en"]["top_p_label"]
)
with gr.Row():
language_selector = gr.Radio(
choices=["en", "pt"],
value="en",
label="Language/Idioma",
interactive=True
)
clear = gr.Button("Clear")
gr.Markdown(TRANSLATIONS["en"]["info_section"])
gr.Examples(
examples=TRANSLATIONS["en"]["examples"],
inputs=message
)
# Event handlers
message.submit(
respond,
[message, chatbot, system_message, max_tokens, temperature, top_p, language_selector],
[chatbot, message]
)
clear.click(lambda: ([], ""), outputs=[chatbot, message])
# Update interface text when language changes
def update_language(lang):
trans = TRANSLATIONS[lang]
return (
trans["message_placeholder"],
trans["system_message"],
trans["system_message_label"],
trans["max_tokens_label"],
trans["temperature_label"],
trans["top_p_label"]
)
language_selector.change(
update_language,
inputs=[language_selector],
outputs=[
message,
system_message,
system_message, # For label
max_tokens, # For label
temperature, # For label
top_p # For label
]
)
return demo
if __name__ == "__main__":
demo = create_chat_app()
demo.launch(share=False)