Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -3,12 +3,13 @@ import torch
|
|
3 |
from peft import PeftModel
|
4 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
5 |
|
|
|
6 |
# Load tokenizer
|
7 |
-
tokenizer = AutoTokenizer.from_pretrained("
|
8 |
|
9 |
# Load base model on CPU with optimizations
|
10 |
base_model = AutoModelForCausalLM.from_pretrained(
|
11 |
-
"
|
12 |
torch_dtype=torch.bfloat16, # Efficient memory usage
|
13 |
low_cpu_mem_usage=True
|
14 |
)
|
|
|
3 |
from peft import PeftModel
|
4 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
5 |
|
6 |
+
|
7 |
# Load tokenizer
|
8 |
+
tokenizer = AutoTokenizer.from_pretrained("hackergeek98/gemma-finetuned")
|
9 |
|
10 |
# Load base model on CPU with optimizations
|
11 |
base_model = AutoModelForCausalLM.from_pretrained(
|
12 |
+
"hackergeek98/gemma-finetuned",
|
13 |
torch_dtype=torch.bfloat16, # Efficient memory usage
|
14 |
low_cpu_mem_usage=True
|
15 |
)
|