Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -6,7 +6,7 @@ from decord import cpu, VideoReader, bridge
|
|
6 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
7 |
from transformers import BitsAndBytesConfig
|
8 |
|
9 |
-
MODEL_PATH = "THUDM/cogvlm2-llama3-
|
10 |
DEVICE = 'cuda' if torch.cuda.is_available() else 'cpu'
|
11 |
TORCH_TYPE = torch.bfloat16 if torch.cuda.is_available() and torch.cuda.get_device_capability()[0] >= 8 else torch.float16
|
12 |
|
@@ -265,8 +265,6 @@ Please respond with the most likely delay reason based on the analysis in the fo
|
|
265 |
- **Visual Evidence**: [Describe specific observations from the video that support your decision]
|
266 |
- **Reasoning**: [Explain why this delay reason best matches the observed evidence]
|
267 |
- **Alternative Analysis**: [Briefly explain why other reasons are less likely]
|
268 |
-
|
269 |
-
Important: Ensure your analysis is grounded in visual observations, avoiding assumptions. Provide a clear explanation if no relevant evidence is observed.
|
270 |
"""
|
271 |
|
272 |
|
|
|
6 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
7 |
from transformers import BitsAndBytesConfig
|
8 |
|
9 |
+
MODEL_PATH = "THUDM/cogvlm2-video-llama3-chat"
|
10 |
DEVICE = 'cuda' if torch.cuda.is_available() else 'cpu'
|
11 |
TORCH_TYPE = torch.bfloat16 if torch.cuda.is_available() and torch.cuda.get_device_capability()[0] >= 8 else torch.float16
|
12 |
|
|
|
265 |
- **Visual Evidence**: [Describe specific observations from the video that support your decision]
|
266 |
- **Reasoning**: [Explain why this delay reason best matches the observed evidence]
|
267 |
- **Alternative Analysis**: [Briefly explain why other reasons are less likely]
|
|
|
|
|
268 |
"""
|
269 |
|
270 |
|