Update app.py
Browse files
app.py
CHANGED
|
@@ -63,7 +63,20 @@ def chat(history, system_message, max_tokens, temperature, top_p, top_k, repetit
|
|
| 63 |
print("Starting chat...")
|
| 64 |
updated_history = call_nvidia_api(history, max_tokens, temperature, top_p)
|
| 65 |
return updated_history, ""
|
| 66 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 67 |
# Gradio interface setup
|
| 68 |
with gr.Blocks() as demo:
|
| 69 |
with gr.Row():
|
|
@@ -88,46 +101,32 @@ with gr.Blocks() as demo:
|
|
| 88 |
<p> <strong>HF Created by:</strong> @artificialguybr (<a href="https://twitter.com/artificialguybr">Twitter</a>)</p>
|
| 89 |
<p> <strong>Discover more:</strong> <a href="https://artificialguy.com">artificialguy.com</a></p>
|
| 90 |
"""
|
| 91 |
-
|
| 92 |
-
|
| 93 |
-
|
| 94 |
-
|
| 95 |
-
|
| 96 |
-
|
| 97 |
-
|
| 98 |
-
|
| 99 |
-
|
| 100 |
-
|
| 101 |
-
|
| 102 |
-
|
| 103 |
-
|
| 104 |
-
|
| 105 |
-
|
| 106 |
-
|
| 107 |
-
|
| 108 |
-
|
| 109 |
-
|
| 110 |
-
|
| 111 |
-
|
| 112 |
-
|
| 113 |
-
|
| 114 |
-
|
| 115 |
-
|
| 116 |
-
|
| 117 |
-
|
| 118 |
-
|
| 119 |
-
# Ajuste na chamada do botão submit para incluir os novos inputs
|
| 120 |
-
submit.click(
|
| 121 |
-
fn=update_chatbot,
|
| 122 |
-
inputs=[message, chat_history_state, system_msg, max_tokens, temperature, top_p], # Inclua os novos inputs aqui
|
| 123 |
-
outputs=[chatbot, chat_history_state, message]
|
| 124 |
-
)
|
| 125 |
-
|
| 126 |
-
|
| 127 |
-
clear.click(
|
| 128 |
-
fn=clear_chat,
|
| 129 |
-
inputs=[chat_history_state, message],
|
| 130 |
-
outputs=[chat_history_state, message]
|
| 131 |
-
)
|
| 132 |
|
| 133 |
demo.launch()
|
|
|
|
| 63 |
print("Starting chat...")
|
| 64 |
updated_history = call_nvidia_api(history, max_tokens, temperature, top_p)
|
| 65 |
return updated_history, ""
|
| 66 |
+
|
| 67 |
+
def update_chatbot(message, chat_history, system_message, max_tokens, temperature, top_p):
|
| 68 |
+
if not chat_history or (chat_history and chat_history[-1]["role"] != "user"):
|
| 69 |
+
chat_history = user(message, chat_history, system_message if not chat_history else None)
|
| 70 |
+
else:
|
| 71 |
+
chat_history = user(message, chat_history)
|
| 72 |
+
chat_history = call_nvidia_api(chat_history, max_tokens, temperature, top_p)
|
| 73 |
+
|
| 74 |
+
formatted_chat_history = []
|
| 75 |
+
for msg in chat_history:
|
| 76 |
+
if msg["role"] == "user" or msg["role"] == "assistant":
|
| 77 |
+
formatted_chat_history.append([msg["content"].strip()])
|
| 78 |
+
|
| 79 |
+
return formatted_chat_history, chat_history
|
| 80 |
# Gradio interface setup
|
| 81 |
with gr.Blocks() as demo:
|
| 82 |
with gr.Row():
|
|
|
|
| 101 |
<p> <strong>HF Created by:</strong> @artificialguybr (<a href="https://twitter.com/artificialguybr">Twitter</a>)</p>
|
| 102 |
<p> <strong>Discover more:</strong> <a href="https://artificialguy.com">artificialguy.com</a></p>
|
| 103 |
"""
|
| 104 |
+
gr.Markdown(description)
|
| 105 |
+
chatbot = gr.Chatbot()
|
| 106 |
+
message = gr.Textbox(label="What do you want to chat about?", placeholder="Ask me anything.", lines=3)
|
| 107 |
+
system_msg = gr.Textbox(BASE_SYSTEM_MESSAGE, label="System Message", placeholder="System prompt.", lines=5, visible=False)
|
| 108 |
+
max_tokens = gr.Slider(20, 1024, label="Max Tokens", step=20, value=1024, interactive=True, visible=False)
|
| 109 |
+
temperature = gr.Slider(0.0, 1.0, label="Temperature", step=0.1, value=0.2, interactive=True, visible=False)
|
| 110 |
+
top_p = gr.Slider(0.0, 1.0, label="Top P", step=0.05, value=0.7, interactive=True, visible=False)
|
| 111 |
+
chat_history_state = gr.State([])
|
| 112 |
+
|
| 113 |
+
submit = gr.Button(value="Send message")
|
| 114 |
+
clear = gr.Button(value="New topic")
|
| 115 |
+
|
| 116 |
+
additional_inputs = gr.Accordion("Additional Inputs", open=False)
|
| 117 |
+
with additional_inputs:
|
| 118 |
+
gr.Row([system_msg, max_tokens, temperature, top_p])
|
| 119 |
+
|
| 120 |
+
submit.click(
|
| 121 |
+
fn=update_chatbot,
|
| 122 |
+
inputs=[message, chat_history_state, system_msg, max_tokens, temperature, top_p],
|
| 123 |
+
outputs=[chatbot, chat_history_state]
|
| 124 |
+
)
|
| 125 |
+
|
| 126 |
+
clear.click(
|
| 127 |
+
fn=clear_chat,
|
| 128 |
+
inputs=[chat_history_state],
|
| 129 |
+
outputs=[chatbot, chat_history_state]
|
| 130 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 131 |
|
| 132 |
demo.launch()
|