Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -239,16 +239,31 @@ def generate_response(message, history, max_tokens, temperature, top_p):
|
|
239 |
yield "No response generated."
|
240 |
|
241 |
def chat_interface(message, history, max_tokens, temperature, top_p):
|
242 |
-
"""Main chat interface with improved streaming"""
|
243 |
if not message.strip():
|
244 |
return history, ""
|
245 |
|
246 |
-
# Add user message to history
|
247 |
-
history.append(
|
248 |
|
249 |
# Generate response with streaming
|
250 |
-
|
251 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
252 |
yield history, ""
|
253 |
|
254 |
return history, ""
|
@@ -610,10 +625,10 @@ with gr.Blocks(
|
|
610 |
chatbot = gr.Chatbot(
|
611 |
[],
|
612 |
elem_id="chatbot",
|
613 |
-
bubble_full_width=False,
|
614 |
height=650,
|
615 |
show_copy_button=True,
|
616 |
show_share_button=True,
|
|
|
617 |
avatar_images=(
|
618 |
"https://raw.githubusercontent.com/gradio-app/gradio/main/gradio/themes/utils/profile_avatar.png",
|
619 |
"๐ค"
|
@@ -623,8 +638,7 @@ with gr.Blocks(
|
|
623 |
latex_delimiters=[
|
624 |
{"left": "$$", "right": "$$", "display": True},
|
625 |
{"left": "$", "right": "$", "display": False}
|
626 |
-
]
|
627 |
-
elem_classes=["modern-chatbot"]
|
628 |
)
|
629 |
|
630 |
# Input Section
|
@@ -637,21 +651,18 @@ with gr.Blocks(
|
|
637 |
autofocus=True,
|
638 |
scale=8,
|
639 |
lines=1,
|
640 |
-
max_lines=5
|
641 |
-
elem_classes=["modern-input"]
|
642 |
)
|
643 |
with gr.Column(scale=1, min_width=120):
|
644 |
send_btn = gr.Button(
|
645 |
"๐ Send",
|
646 |
variant="primary",
|
647 |
-
size="lg"
|
648 |
-
elem_classes=["send-button"]
|
649 |
)
|
650 |
clear_btn = gr.Button(
|
651 |
"๐๏ธ Clear",
|
652 |
variant="secondary",
|
653 |
-
size="sm"
|
654 |
-
elem_classes=["clear-button"]
|
655 |
)
|
656 |
|
657 |
# Settings Sidebar
|
@@ -669,8 +680,7 @@ with gr.Blocks(
|
|
669 |
value=2048,
|
670 |
step=1,
|
671 |
label="๐ฏ Max Tokens",
|
672 |
-
info="Maximum number of tokens to generate"
|
673 |
-
elem_classes=["modern-slider"]
|
674 |
)
|
675 |
|
676 |
temperature = gr.Slider(
|
@@ -679,8 +689,7 @@ with gr.Blocks(
|
|
679 |
value=0.7,
|
680 |
step=0.1,
|
681 |
label="๐ก๏ธ Temperature",
|
682 |
-
info="Controls randomness in generation"
|
683 |
-
elem_classes=["modern-slider"]
|
684 |
)
|
685 |
|
686 |
top_p = gr.Slider(
|
@@ -689,16 +698,14 @@ with gr.Blocks(
|
|
689 |
value=0.9,
|
690 |
step=0.05,
|
691 |
label="๐ฒ Top-p (Nucleus Sampling)",
|
692 |
-
info="Controls diversity of generation"
|
693 |
-
elem_classes=["modern-slider"]
|
694 |
)
|
695 |
|
696 |
with gr.Row():
|
697 |
stop_btn = gr.Button(
|
698 |
"โน๏ธ Stop Generation",
|
699 |
variant="stop",
|
700 |
-
size="sm"
|
701 |
-
elem_classes=["stop-button"]
|
702 |
)
|
703 |
|
704 |
# Model Information Panel
|
@@ -755,8 +762,7 @@ with gr.Blocks(
|
|
755 |
],
|
756 |
inputs=msg,
|
757 |
label="",
|
758 |
-
examples_per_page=6
|
759 |
-
elem_classes=["modern-examples"]
|
760 |
)
|
761 |
|
762 |
# Event handlers
|
|
|
239 |
yield "No response generated."
|
240 |
|
241 |
def chat_interface(message, history, max_tokens, temperature, top_p):
|
242 |
+
"""Main chat interface with improved streaming for messages format"""
|
243 |
if not message.strip():
|
244 |
return history, ""
|
245 |
|
246 |
+
# Add user message to history (messages format)
|
247 |
+
history.append({"role": "user", "content": message})
|
248 |
|
249 |
# Generate response with streaming
|
250 |
+
# Convert messages format to tuples for generate_response compatibility
|
251 |
+
history_tuples = []
|
252 |
+
for i in range(0, len(history) - 1, 2): # Process pairs
|
253 |
+
user_msg = history[i] if i < len(history) else None
|
254 |
+
assistant_msg = history[i + 1] if i + 1 < len(history) else None
|
255 |
+
|
256 |
+
if user_msg and user_msg.get("role") == "user":
|
257 |
+
user_content = user_msg.get("content", "")
|
258 |
+
assistant_content = assistant_msg.get("content", "") if assistant_msg and assistant_msg.get("role") == "assistant" else ""
|
259 |
+
history_tuples.append([user_content, assistant_content])
|
260 |
+
|
261 |
+
# Add assistant message placeholder
|
262 |
+
history.append({"role": "assistant", "content": ""})
|
263 |
+
|
264 |
+
# Generate response with streaming
|
265 |
+
for partial_response in generate_response(message, history_tuples, max_tokens, temperature, top_p):
|
266 |
+
history[-1]["content"] = partial_response
|
267 |
yield history, ""
|
268 |
|
269 |
return history, ""
|
|
|
625 |
chatbot = gr.Chatbot(
|
626 |
[],
|
627 |
elem_id="chatbot",
|
|
|
628 |
height=650,
|
629 |
show_copy_button=True,
|
630 |
show_share_button=True,
|
631 |
+
type='messages', # Use openai-style messages format
|
632 |
avatar_images=(
|
633 |
"https://raw.githubusercontent.com/gradio-app/gradio/main/gradio/themes/utils/profile_avatar.png",
|
634 |
"๐ค"
|
|
|
638 |
latex_delimiters=[
|
639 |
{"left": "$$", "right": "$$", "display": True},
|
640 |
{"left": "$", "right": "$", "display": False}
|
641 |
+
]
|
|
|
642 |
)
|
643 |
|
644 |
# Input Section
|
|
|
651 |
autofocus=True,
|
652 |
scale=8,
|
653 |
lines=1,
|
654 |
+
max_lines=5
|
|
|
655 |
)
|
656 |
with gr.Column(scale=1, min_width=120):
|
657 |
send_btn = gr.Button(
|
658 |
"๐ Send",
|
659 |
variant="primary",
|
660 |
+
size="lg"
|
|
|
661 |
)
|
662 |
clear_btn = gr.Button(
|
663 |
"๐๏ธ Clear",
|
664 |
variant="secondary",
|
665 |
+
size="sm"
|
|
|
666 |
)
|
667 |
|
668 |
# Settings Sidebar
|
|
|
680 |
value=2048,
|
681 |
step=1,
|
682 |
label="๐ฏ Max Tokens",
|
683 |
+
info="Maximum number of tokens to generate"
|
|
|
684 |
)
|
685 |
|
686 |
temperature = gr.Slider(
|
|
|
689 |
value=0.7,
|
690 |
step=0.1,
|
691 |
label="๐ก๏ธ Temperature",
|
692 |
+
info="Controls randomness in generation"
|
|
|
693 |
)
|
694 |
|
695 |
top_p = gr.Slider(
|
|
|
698 |
value=0.9,
|
699 |
step=0.05,
|
700 |
label="๐ฒ Top-p (Nucleus Sampling)",
|
701 |
+
info="Controls diversity of generation"
|
|
|
702 |
)
|
703 |
|
704 |
with gr.Row():
|
705 |
stop_btn = gr.Button(
|
706 |
"โน๏ธ Stop Generation",
|
707 |
variant="stop",
|
708 |
+
size="sm"
|
|
|
709 |
)
|
710 |
|
711 |
# Model Information Panel
|
|
|
762 |
],
|
763 |
inputs=msg,
|
764 |
label="",
|
765 |
+
examples_per_page=6
|
|
|
766 |
)
|
767 |
|
768 |
# Event handlers
|