Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -129,6 +129,27 @@ def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
|
|
129 |
seed = random.randint(0, MAX_SEED)
|
130 |
return seed
|
131 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
132 |
@spaces.GPU(duration=60, enable_queue=True)
|
133 |
def generate_image_fn(
|
134 |
prompt: str,
|
@@ -256,14 +277,14 @@ def generate(
|
|
256 |
thread.start()
|
257 |
|
258 |
buffer = ""
|
259 |
-
yield
|
|
|
260 |
for new_text in streamer:
|
261 |
buffer += new_text
|
262 |
buffer = buffer.replace("<|im_end|>", "")
|
263 |
time.sleep(0.01)
|
264 |
-
yield buffer
|
265 |
else:
|
266 |
-
|
267 |
input_ids = tokenizer.apply_chat_template(conversation, add_generation_prompt=True, return_tensors="pt")
|
268 |
if input_ids.shape[1] > MAX_INPUT_TOKEN_LENGTH:
|
269 |
input_ids = input_ids[:, -MAX_INPUT_TOKEN_LENGTH:]
|
@@ -285,11 +306,15 @@ def generate(
|
|
285 |
t.start()
|
286 |
|
287 |
outputs = []
|
|
|
|
|
288 |
for new_text in streamer:
|
289 |
outputs.append(new_text)
|
290 |
-
|
|
|
291 |
|
292 |
final_response = "".join(outputs)
|
|
|
293 |
yield final_response
|
294 |
|
295 |
# If TTS was requested, convert the final response to speech.
|
@@ -313,7 +338,6 @@ demo = gr.ChatInterface(
|
|
313 |
["@image Chocolate dripping from a donut against a yellow background, in the style of brocore, hyper-realistic"],
|
314 |
["Write a Python function to check if a number is prime."],
|
315 |
["@tts2 What causes rainbows to form?"],
|
316 |
-
|
317 |
],
|
318 |
cache_examples=False,
|
319 |
type="messages",
|
@@ -326,4 +350,4 @@ demo = gr.ChatInterface(
|
|
326 |
)
|
327 |
|
328 |
if __name__ == "__main__":
|
329 |
-
demo.queue(max_size=20).launch(share=True)
|
|
|
129 |
seed = random.randint(0, MAX_SEED)
|
130 |
return seed
|
131 |
|
132 |
+
def generate_thinking_html(buffer: str) -> str:
|
133 |
+
"""
|
134 |
+
Return an HTML snippet with a "Thinking..." label, an animated progress bar,
|
135 |
+
and the current buffered text.
|
136 |
+
"""
|
137 |
+
return f'''
|
138 |
+
<div style="display: flex; align-items: center;">
|
139 |
+
<span style="margin-right: 10px; font-weight: bold;">Thinking...</span>
|
140 |
+
<div style="flex: 1; margin-right: 10px; white-space: pre-wrap;">{buffer}</div>
|
141 |
+
<div style="width: 110px; height: 5px; background: #e0e0e0; position: relative; overflow: hidden;">
|
142 |
+
<div style="width: 100%; height: 100%; background: #1890ff; animation: progressAnimation 1.5s linear infinite;"></div>
|
143 |
+
</div>
|
144 |
+
</div>
|
145 |
+
<style>
|
146 |
+
@keyframes progressAnimation {{
|
147 |
+
0% {{ transform: translateX(-100%); }}
|
148 |
+
100% {{ transform: translateX(100%); }}
|
149 |
+
}}
|
150 |
+
</style>
|
151 |
+
'''
|
152 |
+
|
153 |
@spaces.GPU(duration=60, enable_queue=True)
|
154 |
def generate_image_fn(
|
155 |
prompt: str,
|
|
|
277 |
thread.start()
|
278 |
|
279 |
buffer = ""
|
280 |
+
# Initial yield: progress bar with no text yet.
|
281 |
+
yield gr.HTML(generate_thinking_html(buffer))
|
282 |
for new_text in streamer:
|
283 |
buffer += new_text
|
284 |
buffer = buffer.replace("<|im_end|>", "")
|
285 |
time.sleep(0.01)
|
286 |
+
yield gr.HTML(generate_thinking_html(buffer))
|
287 |
else:
|
|
|
288 |
input_ids = tokenizer.apply_chat_template(conversation, add_generation_prompt=True, return_tensors="pt")
|
289 |
if input_ids.shape[1] > MAX_INPUT_TOKEN_LENGTH:
|
290 |
input_ids = input_ids[:, -MAX_INPUT_TOKEN_LENGTH:]
|
|
|
306 |
t.start()
|
307 |
|
308 |
outputs = []
|
309 |
+
# Initial yield: progress bar with no text yet.
|
310 |
+
yield gr.HTML(generate_thinking_html(""))
|
311 |
for new_text in streamer:
|
312 |
outputs.append(new_text)
|
313 |
+
current_text = "".join(outputs)
|
314 |
+
yield gr.HTML(generate_thinking_html(current_text))
|
315 |
|
316 |
final_response = "".join(outputs)
|
317 |
+
# Final update: yield the final response as plain text.
|
318 |
yield final_response
|
319 |
|
320 |
# If TTS was requested, convert the final response to speech.
|
|
|
338 |
["@image Chocolate dripping from a donut against a yellow background, in the style of brocore, hyper-realistic"],
|
339 |
["Write a Python function to check if a number is prime."],
|
340 |
["@tts2 What causes rainbows to form?"],
|
|
|
341 |
],
|
342 |
cache_examples=False,
|
343 |
type="messages",
|
|
|
350 |
)
|
351 |
|
352 |
if __name__ == "__main__":
|
353 |
+
demo.queue(max_size=20).launch(share=True)
|