Fix max_memory keys (use integer 0) or drop max_memory
Browse files
app.py
CHANGED
|
@@ -46,7 +46,7 @@ model = Llama4ForConditionalGeneration.from_pretrained(
|
|
| 46 |
torch_dtype=torch.bfloat16,
|
| 47 |
device_map="auto",
|
| 48 |
max_memory={ # cap GPU usage to ~11 GiB
|
| 49 |
-
|
| 50 |
"cpu": "200GiB"
|
| 51 |
},
|
| 52 |
quantization_config=quant_config,
|
|
|
|
| 46 |
torch_dtype=torch.bfloat16,
|
| 47 |
device_map="auto",
|
| 48 |
max_memory={ # cap GPU usage to ~11 GiB
|
| 49 |
+
0: "11GiB",
|
| 50 |
"cpu": "200GiB"
|
| 51 |
},
|
| 52 |
quantization_config=quant_config,
|