Spaces:
Runtime error
Runtime error
File size: 8,016 Bytes
a73f817 9bf186c 6bd3626 a73f817 6bd3626 8a2c75b 9bf186c 8a2c75b 9bf186c 8a2c75b 9bf186c 8a2c75b 9bf186c 8a2c75b 6bd3626 9bf186c 6bd3626 8a2c75b 9bf186c 6bd3626 a84fed4 6bd3626 9bf186c 6bd3626 9bf186c 6bd3626 a84fed4 6bd3626 a84fed4 6bd3626 9bf186c 6bd3626 bb97b78 9bf186c bb97b78 8a2c75b 6bd3626 9bf186c 50f7f91 9bf186c 50f7f91 e07965d bb97b78 9bf186c e07965d 9bf186c e07965d 9bf186c e07965d 9bf186c e07965d 9bf186c e07965d 9bf186c 50f7f91 e07965d 9bf186c 8a2c75b 50f7f91 9bf186c 8a2c75b 9bf186c 8a2c75b 6bd3626 a73f817 6bd3626 50f7f91 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 |
import gradio as gr
from typing import List, Dict
from gradio_client import Client
def create_chat_app():
# Language configurations
TRANSLATIONS = {
"en": {
"title": "🤖 Chat with Llama 3.3 70B",
"description": """
This is a chatbot based on the Llama 3.3 70B model. To use:
1. Type your message in the field below
2. Adjust parameters as needed
3. Press Enter to send
""",
"system_message": "You are a helpful and friendly assistant based on the Llama 3.3 70B model.",
"system_message_label": "System Message",
"max_tokens_label": "Maximum Tokens",
"temperature_label": "Temperature",
"top_p_label": "Top-p (Nucleus Sampling)",
"message_placeholder": "Type your message here...",
"info_section": """
### ℹ️ Information
- Model: Llama 3.3 70B Instruct
- Language: English/Portuguese
- Hosting: Hugging Face Spaces
For best performance, adjust the parameters according to your needs.
""",
"error_message": "Sorry, an error occurred: {}\nPlease check your connection and settings.",
"examples": [
"Hello! How are you?",
"Can you explain what artificial intelligence is?",
"What is the capital of Brazil?",
"Help me write a Python code to calculate Fibonacci."
]
},
"pt": {
"title": "🤖 Chat com Llama 3.3 70B em Português",
"description": """
Este é um chatbot baseado no modelo Llama 3.3 70B. Para usar:
1. Digite sua mensagem no campo abaixo
2. Ajuste os parâmetros conforme necessário
3. Pressione Enter para enviar
""",
"system_message": "Você é um assistente amigável e prestativo que responde em português. Você é baseado no modelo Llama 3.3 70B.",
"system_message_label": "Mensagem do Sistema",
"max_tokens_label": "Máximo de Tokens",
"temperature_label": "Temperatura",
"top_p_label": "Top-p (Amostragem Nucleus)",
"message_placeholder": "Digite sua mensagem aqui...",
"info_section": """
### ℹ️ Informações
- Modelo: Llama 3.3 70B Instruct
- Idioma: Português/Inglês
- Hospedagem: Hugging Face Spaces
Para melhor desempenho, ajuste os parâmetros de acordo com suas necessidades.
""",
"error_message": "Desculpe, ocorreu um erro: {}\nPor favor, verifique sua conexão e configurações.",
"examples": [
"Olá! Como você está?",
"Pode me explicar o que é inteligência artificial?",
"Qual é a capital do Brasil?",
"Me ajude a escrever um código em Python para calcular fibonacci."
]
}
}
def respond(
message: str,
chat_history: List[Dict],
system_message: str,
max_tokens: int,
temperature: float,
top_p: float,
language: str,
):
try:
client = Client("aifeifei798/feifei-chat")
# Format conversation history
formatted_message = f"{system_message}\n\nConversation history:\n"
for msg in chat_history:
formatted_message += f"{msg['role']}: {msg['content']}\n"
formatted_message += f"User: {message}"
message_payload = {
"text": formatted_message,
"files": []
}
response = client.predict(
message=message_payload,
feifei_select=True,
additional_dropdown="meta-llama/Llama-3.3-70B-Instruct",
image_mod="pixtral",
api_name="/chat"
)
# Update chat history in the new format
chat_history.extend([
{"role": "user", "content": message},
{"role": "assistant", "content": response}
])
return chat_history, ""
except Exception as e:
error_msg = TRANSLATIONS[language]["error_message"].format(str(e))
chat_history.append({"role": "assistant", "content": error_msg})
return chat_history, ""
with gr.Blocks(theme=gr.themes.Soft()) as demo:
current_language = gr.State("en")
gr.Markdown(TRANSLATIONS["en"]["title"])
gr.Markdown(TRANSLATIONS["en"]["description"])
with gr.Group():
chatbot = gr.Chatbot(
value=[],
height=400,
type="messages" # Use the new messages format
)
message = gr.Textbox(
placeholder=TRANSLATIONS["en"]["message_placeholder"],
lines=3
)
with gr.Accordion("Settings", open=False):
system_message = gr.Textbox(
value=TRANSLATIONS["en"]["system_message"],
label=TRANSLATIONS["en"]["system_message_label"]
)
with gr.Row():
max_tokens = gr.Slider(
minimum=1,
maximum=4096,
value=2048,
step=1,
label=TRANSLATIONS["en"]["max_tokens_label"]
)
temperature = gr.Slider(
minimum=0.1,
maximum=2.0,
value=0.7,
step=0.1,
label=TRANSLATIONS["en"]["temperature_label"]
)
top_p = gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.95,
step=0.05,
label=TRANSLATIONS["en"]["top_p_label"]
)
with gr.Row():
language_selector = gr.Radio(
choices=["en", "pt"],
value="en",
label="Language/Idioma",
interactive=True
)
clear = gr.Button("Clear")
gr.Markdown(TRANSLATIONS["en"]["info_section"])
gr.Examples(
examples=TRANSLATIONS["en"]["examples"],
inputs=message
)
# Event handlers
message.submit(
respond,
[message, chatbot, system_message, max_tokens, temperature, top_p, language_selector],
[chatbot, message]
)
clear.click(lambda: ([], ""), outputs=[chatbot, message])
# Update interface text when language changes
def update_language(lang):
trans = TRANSLATIONS[lang]
return (
trans["message_placeholder"],
trans["system_message"],
trans["system_message_label"],
trans["max_tokens_label"],
trans["temperature_label"],
trans["top_p_label"]
)
language_selector.change(
update_language,
inputs=[language_selector],
outputs=[
message,
system_message,
system_message, # For label
max_tokens, # For label
temperature, # For label
top_p # For label
]
)
return demo
if __name__ == "__main__":
demo = create_chat_app()
demo.launch(share=False) |