Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -21,7 +21,7 @@ prompt = [
|
|
21 |
|
22 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
23 |
|
24 |
-
inputs = tokenizer.apply_chat_template(prompt, tokenize=True, add_generation_prompt=True, return_tensors="pt")
|
25 |
|
26 |
model = AutoModelForCausalLM.from_pretrained(
|
27 |
model_id,
|
|
|
21 |
|
22 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
23 |
|
24 |
+
inputs = tokenizer.apply_chat_template(prompt, tokenize=True, add_generation_prompt=True, return_tensors="pt") #.cuda()
|
25 |
|
26 |
model = AutoModelForCausalLM.from_pretrained(
|
27 |
model_id,
|