Spaces:
Sleeping
Sleeping
dragonjump
commited on
Commit
·
8f52987
1
Parent(s):
0f6bee9
udp
Browse files
README.md
CHANGED
@@ -31,7 +31,7 @@ curl -G "https://lseanlon-qwen25-api.hf.space/predict" \
|
|
31 |
|
32 |
|
33 |
|
34 |
-
curl -G "https://lseanlon-qwen25-api.hf.space/
|
35 |
--data-urlencode "prompt=why is the sky blue?"
|
36 |
|
37 |
|
|
|
31 |
|
32 |
|
33 |
|
34 |
+
curl -G "https://lseanlon-qwen25-api.hf.space/chat" \
|
35 |
--data-urlencode "prompt=why is the sky blue?"
|
36 |
|
37 |
|
main.py
CHANGED
@@ -54,10 +54,8 @@ def chat( prompt: str = Query(...)):
|
|
54 |
{"role": "user", "content": [ {"type": "text", "text": prompt}]},
|
55 |
]
|
56 |
text = processor.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
|
57 |
-
video_inputs = process_vision_info(messages)
|
58 |
inputs = processor(
|
59 |
text=[text],
|
60 |
-
videos=video_inputs,
|
61 |
padding=True,
|
62 |
return_tensors="pt",
|
63 |
).to(model.device)
|
|
|
54 |
{"role": "user", "content": [ {"type": "text", "text": prompt}]},
|
55 |
]
|
56 |
text = processor.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
|
|
|
57 |
inputs = processor(
|
58 |
text=[text],
|
|
|
59 |
padding=True,
|
60 |
return_tensors="pt",
|
61 |
).to(model.device)
|