Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -56,17 +56,12 @@ def bot_streaming(message, history, max_new_tokens=250):
|
|
56 |
inputs = processor(text=texts, images=images, return_tensors="pt").to("cuda")
|
57 |
|
58 |
generation_kwargs = dict(inputs, max_new_tokens=max_new_tokens)
|
59 |
-
generated_text = ""
|
60 |
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
buffer += new_text
|
67 |
-
generated_text_without_prompt = buffer
|
68 |
-
time.sleep(0.01)
|
69 |
-
yield buffer
|
70 |
|
71 |
|
72 |
demo = gr.ChatInterface(fn=bot_streaming, title="LLaVA-CoT",
|
|
|
56 |
inputs = processor(text=texts, images=images, return_tensors="pt").to("cuda")
|
57 |
|
58 |
generation_kwargs = dict(inputs, max_new_tokens=max_new_tokens)
|
|
|
59 |
|
60 |
+
with torch.no_grad():
|
61 |
+
output = model.generate(**inputs, **generation_kwargs)
|
62 |
+
output_text = processor.decode(output[0][inputs['input_ids'].shape[1]:]).replace('<|eot_id|>', '')
|
63 |
+
|
64 |
+
yield output_text
|
|
|
|
|
|
|
|
|
65 |
|
66 |
|
67 |
demo = gr.ChatInterface(fn=bot_streaming, title="LLaVA-CoT",
|