Pipatpong commited on
Commit
a7e39c6
·
1 Parent(s): bf4b616

modified: app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -8,7 +8,7 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
8
  checkpoint = "Pipatpong/vcm_santa"
9
  device = "cuda" if torch.cuda.is_available() else "cpu"
10
  tokenizer = AutoTokenizer.from_pretrained(checkpoint, trust_remote_code=True)
11
- model = AutoModelForCausalLM.from_pretrained(checkpoint, trust_remote_code=True, device_map="auto", load_in_8bit=True)
12
 
13
  def generate(text, max_length, num_return_sequences=1):
14
  inputs = tokenizer.encode(text, padding=False, add_special_tokens=False, return_tensors="pt")
 
8
  checkpoint = "Pipatpong/vcm_santa"
9
  device = "cuda" if torch.cuda.is_available() else "cpu"
10
  tokenizer = AutoTokenizer.from_pretrained(checkpoint, trust_remote_code=True)
11
+ model = AutoModelForCausalLM.from_pretrained(checkpoint, trust_remote_code=True, device_map="auto")
12
 
13
  def generate(text, max_length, num_return_sequences=1):
14
  inputs = tokenizer.encode(text, padding=False, add_special_tokens=False, return_tensors="pt")