Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -8,11 +8,11 @@ from transformers import pipeline,AutoModelForCausalLM, AutoTokenizer
|
|
8 |
import torch
|
9 |
|
10 |
|
11 |
-
model_name = "
|
12 |
-
access_token = base64.b64decode('aGZfekJxa0pXZEh1bm90UEJXek1mdkdOc096WXdIVVZvYkRwcg==') # Replace with your token
|
13 |
|
14 |
tokenizer = AutoTokenizer.from_pretrained(model_name, use_auth_token=access_token)
|
15 |
-
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto"
|
16 |
# Load model and tokenizer
|
17 |
|
18 |
|
|
|
8 |
import torch
|
9 |
|
10 |
|
11 |
+
model_name = "bmas10/OG_llama2"
|
12 |
+
#access_token = base64.b64decode('aGZfekJxa0pXZEh1bm90UEJXek1mdkdOc096WXdIVVZvYkRwcg==') # Replace with your token
|
13 |
|
14 |
tokenizer = AutoTokenizer.from_pretrained(model_name, use_auth_token=access_token)
|
15 |
+
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto")#, use_auth_token=access_token)
|
16 |
# Load model and tokenizer
|
17 |
|
18 |
|