Spaces:
Runtime error
Runtime error
@torch .autocast(device_type=cuda, dtype=torch.bfloat16)
Browse files
app.py
CHANGED
|
@@ -95,8 +95,7 @@ def get_som_response(instruction, image_som):
|
|
| 95 |
|
| 96 |
inputs = magma_processor(images=[image_som], texts=prompt, return_tensors="pt")
|
| 97 |
inputs['pixel_values'] = inputs['pixel_values'].unsqueeze(0)
|
| 98 |
-
#
|
| 99 |
-
inputs['pixel_values'] = inputs['pixel_values'].to(torch.bfloat16)
|
| 100 |
inputs['image_sizes'] = inputs['image_sizes'].unsqueeze(0)
|
| 101 |
inputs = inputs.to("cuda")
|
| 102 |
|
|
@@ -156,7 +155,7 @@ def get_qa_response(instruction, image):
|
|
| 156 |
|
| 157 |
@spaces.GPU
|
| 158 |
@torch.inference_mode()
|
| 159 |
-
|
| 160 |
def process(
|
| 161 |
image_input,
|
| 162 |
box_threshold,
|
|
|
|
| 95 |
|
| 96 |
inputs = magma_processor(images=[image_som], texts=prompt, return_tensors="pt")
|
| 97 |
inputs['pixel_values'] = inputs['pixel_values'].unsqueeze(0)
|
| 98 |
+
# inputs['pixel_values'] = inputs['pixel_values'].to(torch.bfloat16)
|
|
|
|
| 99 |
inputs['image_sizes'] = inputs['image_sizes'].unsqueeze(0)
|
| 100 |
inputs = inputs.to("cuda")
|
| 101 |
|
|
|
|
| 155 |
|
| 156 |
@spaces.GPU
|
| 157 |
@torch.inference_mode()
|
| 158 |
+
@torch.autocast(device_type="cuda", dtype=torch.bfloat16)
|
| 159 |
def process(
|
| 160 |
image_input,
|
| 161 |
box_threshold,
|