Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -6,10 +6,10 @@ from threading import Thread
|
|
6 |
|
7 |
# Loading the tokenizer and model from Hugging Face's model hub.
|
8 |
# model_name_or_path = "TinyLlama/TinyLlama-1.1B-Chat-v1.0"
|
9 |
-
model_name_or_path = "
|
10 |
tokenizer = AutoTokenizer.from_pretrained(model_name_or_path,trust_remote_code=True)
|
11 |
# model = AutoModelForCausalLM.from_pretrained(model_name,trust_remote_code=True)
|
12 |
-
model = AutoModel.from_pretrained(model_name_or_path, trust_remote_code=True)
|
13 |
|
14 |
# using CUDA for an optimal experience
|
15 |
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
|
|
6 |
|
7 |
# Loading the tokenizer and model from Hugging Face's model hub.
|
8 |
# model_name_or_path = "TinyLlama/TinyLlama-1.1B-Chat-v1.0"
|
9 |
+
model_name_or_path = "Flmc/DISC-MedLLM"
|
10 |
tokenizer = AutoTokenizer.from_pretrained(model_name_or_path,trust_remote_code=True)
|
11 |
# model = AutoModelForCausalLM.from_pretrained(model_name,trust_remote_code=True)
|
12 |
+
model = AutoModel.from_pretrained(model_name_or_path, trust_remote_code=True)
|
13 |
|
14 |
# using CUDA for an optimal experience
|
15 |
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|