Update app.py
Browse files
app.py
CHANGED
@@ -62,19 +62,19 @@ def call_nvidia_api(history, max_tokens, temperature, top_p):
|
|
62 |
def update_chatbot(message, chat_history):
|
63 |
"""Updates the chatbot with the user message and generates a response."""
|
64 |
print("Updating chatbot...")
|
65 |
-
system_message = system_msg.value
|
66 |
max_tokens_val = max_tokens.value
|
67 |
temperature_val = temperature.value
|
68 |
top_p_val = top_p.value
|
|
|
69 |
if not chat_history or (chat_history and chat_history[-1]["role"] != "user"):
|
70 |
chat_history = user(message, chat_history, system_message)
|
71 |
else:
|
72 |
chat_history = user(message, chat_history)
|
|
|
73 |
chat_history = call_nvidia_api(chat_history, max_tokens_val, temperature_val, top_p_val)
|
74 |
return chat_history
|
75 |
|
76 |
-
|
77 |
-
|
78 |
# Gradio interface components
|
79 |
system_msg = gr.Textbox(BASE_SYSTEM_MESSAGE, label="System Message", placeholder="System prompt.", lines=5)
|
80 |
max_tokens = gr.Slider(20, 1024, label="Max Tokens", step=20, value=1024)
|
@@ -84,7 +84,7 @@ top_p = gr.Slider(0.0, 1.0, label="Top P", step=0.05, value=0.7)
|
|
84 |
with gr.Blocks() as demo:
|
85 |
chat_history_state = gr.State([])
|
86 |
chatbot = gr.ChatInterface(
|
87 |
-
fn=update_chatbot,
|
88 |
additional_inputs=[system_msg, max_tokens, temperature, top_p],
|
89 |
title="LLAMA 70B Free Demo",
|
90 |
description="""
|
|
|
62 |
def update_chatbot(message, chat_history):
|
63 |
"""Updates the chatbot with the user message and generates a response."""
|
64 |
print("Updating chatbot...")
|
65 |
+
system_message = system_msg.value # Get values from components
|
66 |
max_tokens_val = max_tokens.value
|
67 |
temperature_val = temperature.value
|
68 |
top_p_val = top_p.value
|
69 |
+
|
70 |
if not chat_history or (chat_history and chat_history[-1]["role"] != "user"):
|
71 |
chat_history = user(message, chat_history, system_message)
|
72 |
else:
|
73 |
chat_history = user(message, chat_history)
|
74 |
+
|
75 |
chat_history = call_nvidia_api(chat_history, max_tokens_val, temperature_val, top_p_val)
|
76 |
return chat_history
|
77 |
|
|
|
|
|
78 |
# Gradio interface components
|
79 |
system_msg = gr.Textbox(BASE_SYSTEM_MESSAGE, label="System Message", placeholder="System prompt.", lines=5)
|
80 |
max_tokens = gr.Slider(20, 1024, label="Max Tokens", step=20, value=1024)
|
|
|
84 |
with gr.Blocks() as demo:
|
85 |
chat_history_state = gr.State([])
|
86 |
chatbot = gr.ChatInterface(
|
87 |
+
fn=lambda message, history: update_chatbot(message, history),
|
88 |
additional_inputs=[system_msg, max_tokens, temperature, top_p],
|
89 |
title="LLAMA 70B Free Demo",
|
90 |
description="""
|