Spaces:
Runtime error
Runtime error
0.18 bfloat to float
Browse files
app.py
CHANGED
|
@@ -62,7 +62,7 @@ def load_model_a(model_id):
|
|
| 62 |
tokenizer_a = AutoTokenizer.from_pretrained(model_id)
|
| 63 |
model_a = AutoModelForCausalLM.from_pretrained(
|
| 64 |
model_id,
|
| 65 |
-
torch_dtype=torch.
|
| 66 |
device_map="auto",
|
| 67 |
trust_remote_code=True,
|
| 68 |
).eval()
|
|
@@ -79,7 +79,7 @@ def load_model_b(model_id):
|
|
| 79 |
tokenizer_b = AutoTokenizer.from_pretrained(model_id)
|
| 80 |
model_b = AutoModelForCausalLM.from_pretrained(
|
| 81 |
model_id,
|
| 82 |
-
torch_dtype=torch.
|
| 83 |
device_map="auto",
|
| 84 |
trust_remote_code=True,
|
| 85 |
).eval()
|
|
|
|
| 62 |
tokenizer_a = AutoTokenizer.from_pretrained(model_id)
|
| 63 |
model_a = AutoModelForCausalLM.from_pretrained(
|
| 64 |
model_id,
|
| 65 |
+
torch_dtype=torch.float16,
|
| 66 |
device_map="auto",
|
| 67 |
trust_remote_code=True,
|
| 68 |
).eval()
|
|
|
|
| 79 |
tokenizer_b = AutoTokenizer.from_pretrained(model_id)
|
| 80 |
model_b = AutoModelForCausalLM.from_pretrained(
|
| 81 |
model_id,
|
| 82 |
+
torch_dtype=torch.float16,
|
| 83 |
device_map="auto",
|
| 84 |
trust_remote_code=True,
|
| 85 |
).eval()
|