Update app.py
Browse files
app.py
CHANGED
|
@@ -78,8 +78,6 @@ max_tokens = gr.Slider(20, 1024, label="Max Tokens", step=20, value=1024)
|
|
| 78 |
temperature = gr.Slider(0.0, 1.0, label="Temperature", step=0.1, value=0.2)
|
| 79 |
top_p = gr.Slider(0.0, 1.0, label="Top P", step=0.05, value=0.7)
|
| 80 |
|
| 81 |
-
|
| 82 |
-
# Gradio interface setup
|
| 83 |
with gr.Blocks() as demo:
|
| 84 |
with gr.Row():
|
| 85 |
with gr.Column():
|
|
@@ -100,24 +98,23 @@ with gr.Blocks() as demo:
|
|
| 100 |
<p> <strong>HF Created by:</strong> @artificialguybr (<a href="https://twitter.com/artificialguybr">Twitter</a>)</p>
|
| 101 |
<p> <strong>Discover more:</strong> <a href="https://artificialguy.com">artificialguy.com</a></p>
|
| 102 |
"""
|
| 103 |
-
|
| 104 |
-
|
| 105 |
-
|
| 106 |
-
|
| 107 |
-
|
| 108 |
-
|
| 109 |
-
|
| 110 |
-
|
| 111 |
-
|
| 112 |
-
|
| 113 |
-
|
| 114 |
-
|
| 115 |
-
|
| 116 |
-
|
| 117 |
-
|
| 118 |
-
|
| 119 |
-
|
| 120 |
-
|
| 121 |
-
)
|
| 122 |
|
| 123 |
demo.launch()
|
|
|
|
| 78 |
temperature = gr.Slider(0.0, 1.0, label="Temperature", step=0.1, value=0.2)
|
| 79 |
top_p = gr.Slider(0.0, 1.0, label="Top P", step=0.05, value=0.7)
|
| 80 |
|
|
|
|
|
|
|
| 81 |
with gr.Blocks() as demo:
|
| 82 |
with gr.Row():
|
| 83 |
with gr.Column():
|
|
|
|
| 98 |
<p> <strong>HF Created by:</strong> @artificialguybr (<a href="https://twitter.com/artificialguybr">Twitter</a>)</p>
|
| 99 |
<p> <strong>Discover more:</strong> <a href="https://artificialguy.com">artificialguy.com</a></p>
|
| 100 |
"""
|
| 101 |
+
gr.Markdown(description)
|
| 102 |
+
chatbox = gr.Textbox(label="What do you want to chat about?", placeholder="Ask me anything.", lines=3)
|
| 103 |
+
chat_history_state = gr.State([])
|
| 104 |
+
|
| 105 |
+
chatbot = gr.ChatInterface(
|
| 106 |
+
fn=lambda message, history: update_chatbot(message, history, system_msg.value, max_tokens.value, temperature.value, top_p.value),
|
| 107 |
+
textbox=chatbox, # Use the chatbox created within the Blocks context
|
| 108 |
+
additional_inputs=[system_msg, max_tokens, temperature, top_p],
|
| 109 |
+
title="LLAMA 2 70B Chatbot",
|
| 110 |
+
submit_btn="Submit",
|
| 111 |
+
clear_btn="🗑️ Clear",
|
| 112 |
+
)
|
| 113 |
+
|
| 114 |
+
chatbot.clear(
|
| 115 |
+
fn=clear_chat,
|
| 116 |
+
inputs=[chat_history_state, chatbox],
|
| 117 |
+
outputs=[chat_history_state, chatbox]
|
| 118 |
+
)
|
|
|
|
| 119 |
|
| 120 |
demo.launch()
|