Update app.py
Browse files
app.py
CHANGED
@@ -22,21 +22,10 @@ def clear_chat():
|
|
22 |
chat_history_state.value = []
|
23 |
chatbot.textbox.value = ""
|
24 |
|
25 |
-
def user(message, history):
|
26 |
-
"""Updates the chat history with the user message."""
|
27 |
-
print(f"User message: {message}")
|
28 |
-
history = history or []
|
29 |
-
history.append({"role": "user", "content": message})
|
30 |
-
return history
|
31 |
-
|
32 |
def call_nvidia_api(history, system_message, max_tokens, temperature, top_p):
|
33 |
"""Calls the NVIDIA API to generate a response."""
|
34 |
-
# Transforma o hist贸rico de listas de listas para o formato esperado pela API
|
35 |
messages = [{"role": "system", "content": system_message}]
|
36 |
-
for
|
37 |
-
messages.append({"role": "user", "content": msg[0]})
|
38 |
-
if msg[1]: # Garante que n茫o adicionamos mensagens vazias do assistente
|
39 |
-
messages.append({"role": "assistant", "content": msg[1]})
|
40 |
|
41 |
payload = {
|
42 |
"messages": messages,
|
@@ -45,6 +34,7 @@ def call_nvidia_api(history, system_message, max_tokens, temperature, top_p):
|
|
45 |
"max_tokens": max_tokens,
|
46 |
"stream": False
|
47 |
}
|
|
|
48 |
session = requests.Session()
|
49 |
response = session.post(INVOKE_URL, headers=headers, json=payload)
|
50 |
while response.status_code == 202:
|
@@ -53,26 +43,27 @@ def call_nvidia_api(history, system_message, max_tokens, temperature, top_p):
|
|
53 |
response = session.get(fetch_url, headers=headers)
|
54 |
response.raise_for_status()
|
55 |
response_body = response.json()
|
|
|
56 |
if response_body.get("choices"):
|
57 |
assistant_message = response_body["choices"][0]["message"]["content"]
|
58 |
-
|
59 |
-
|
|
|
60 |
|
61 |
def chatbot_submit(message, chat_history, system_message, max_tokens_val, temperature_val, top_p_val):
|
62 |
"""Submits the user message to the chatbot and updates the chat history."""
|
63 |
-
|
64 |
-
|
|
|
|
|
65 |
|
66 |
# Chama a API da NVIDIA para gerar uma resposta
|
67 |
-
|
68 |
|
69 |
-
#
|
70 |
-
|
71 |
-
assistant_message = updated_history[-1][1]
|
72 |
-
else:
|
73 |
-
assistant_message = "Desculpe, ocorreu um erro ao gerar a resposta."
|
74 |
|
75 |
-
return assistant_message,
|
76 |
|
77 |
chat_history_state = gr.State([])
|
78 |
system_msg = gr.Textbox(BASE_SYSTEM_MESSAGE, label="System Message", placeholder="System prompt.", lines=5)
|
|
|
22 |
chat_history_state.value = []
|
23 |
chatbot.textbox.value = ""
|
24 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
def call_nvidia_api(history, system_message, max_tokens, temperature, top_p):
|
26 |
"""Calls the NVIDIA API to generate a response."""
|
|
|
27 |
messages = [{"role": "system", "content": system_message}]
|
28 |
+
messages.extend([{"role": "user", "content": h[0]} for h in history])
|
|
|
|
|
|
|
29 |
|
30 |
payload = {
|
31 |
"messages": messages,
|
|
|
34 |
"max_tokens": max_tokens,
|
35 |
"stream": False
|
36 |
}
|
37 |
+
print(f"Payload enviado: {payload}")
|
38 |
session = requests.Session()
|
39 |
response = session.post(INVOKE_URL, headers=headers, json=payload)
|
40 |
while response.status_code == 202:
|
|
|
43 |
response = session.get(fetch_url, headers=headers)
|
44 |
response.raise_for_status()
|
45 |
response_body = response.json()
|
46 |
+
print(f"Payload recebido: {response_body}")
|
47 |
if response_body.get("choices"):
|
48 |
assistant_message = response_body["choices"][0]["message"]["content"]
|
49 |
+
return assistant_message
|
50 |
+
else:
|
51 |
+
return "Desculpe, ocorreu um erro ao gerar a resposta."
|
52 |
|
53 |
def chatbot_submit(message, chat_history, system_message, max_tokens_val, temperature_val, top_p_val):
|
54 |
"""Submits the user message to the chatbot and updates the chat history."""
|
55 |
+
print("Updating chatbot...")
|
56 |
+
|
57 |
+
# Adiciona a mensagem do usu谩rio ao hist贸rico para exibi莽茫o
|
58 |
+
chat_history.append([message, ""])
|
59 |
|
60 |
# Chama a API da NVIDIA para gerar uma resposta
|
61 |
+
assistant_message = call_nvidia_api(chat_history, system_message, max_tokens_val, temperature_val, top_p_val)
|
62 |
|
63 |
+
# Atualiza o hist贸rico com a resposta do assistente
|
64 |
+
chat_history[-1][1] = assistant_message
|
|
|
|
|
|
|
65 |
|
66 |
+
return assistant_message, chat_history
|
67 |
|
68 |
chat_history_state = gr.State([])
|
69 |
system_msg = gr.Textbox(BASE_SYSTEM_MESSAGE, label="System Message", placeholder="System prompt.", lines=5)
|