Update app.py
Browse files
app.py
CHANGED
@@ -70,17 +70,23 @@ def chatbot_submit(message, chat_history, system_message, max_tokens_val, temper
|
|
70 |
|
71 |
# Chama a API da NVIDIA para gerar uma resposta
|
72 |
chat_history = call_nvidia_api(chat_history, max_tokens_val, temperature_val, top_p_val)
|
73 |
-
return chat_history
|
74 |
|
75 |
-
#
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
|
|
|
|
80 |
|
81 |
|
82 |
with gr.Blocks() as demo:
|
83 |
chat_history_state = gr.State([])
|
|
|
|
|
|
|
|
|
|
|
84 |
chatbot = gr.ChatInterface(
|
85 |
fn=chatbot_submit,
|
86 |
additional_inputs=[system_msg, max_tokens, temperature, top_p],
|
|
|
70 |
|
71 |
# Chama a API da NVIDIA para gerar uma resposta
|
72 |
chat_history = call_nvidia_api(chat_history, max_tokens_val, temperature_val, top_p_val)
|
|
|
73 |
|
74 |
+
# Extrai apenas a mensagem do assistente da resposta
|
75 |
+
if chat_history and chat_history[-1]["role"] == "assistant":
|
76 |
+
assistant_message = chat_history[-1]["content"]
|
77 |
+
else:
|
78 |
+
assistant_message = "Desculpe, ocorreu um erro ao gerar a resposta."
|
79 |
+
|
80 |
+
return assistant_message
|
81 |
|
82 |
|
83 |
with gr.Blocks() as demo:
|
84 |
chat_history_state = gr.State([])
|
85 |
+
system_msg = gr.Textbox(BASE_SYSTEM_MESSAGE, label="System Message", placeholder="System prompt.", lines=5)
|
86 |
+
max_tokens = gr.Slider(20, 1024, label="Max Tokens", step=20, value=1024)
|
87 |
+
temperature = gr.Slider(0.0, 1.0, label="Temperature", step=0.1, value=0.2)
|
88 |
+
top_p = gr.Slider(0.0, 1.0, label="Top P", step=0.05, value=0.7)
|
89 |
+
|
90 |
chatbot = gr.ChatInterface(
|
91 |
fn=chatbot_submit,
|
92 |
additional_inputs=[system_msg, max_tokens, temperature, top_p],
|