cloneQ commited on
Commit
f9b63c0
·
verified ·
1 Parent(s): a71b5e6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -30,7 +30,7 @@ from transformers.utils import logging
30
  from transformers import AutoTokenizer, AutoModelForCausalLM # isort: skip
31
 
32
  logger = logging.get_logger(__name__)
33
- model_name_or_path="internlm/internlm2_5-1_8b-chat"
34
 
35
  @dataclass
36
  class GenerationConfig:
@@ -186,7 +186,7 @@ def load_model():
186
  # trust_remote_code=True).to(torch.bfloat16).cuda())
187
  model = AutoModelForCausalLM.from_pretrained(
188
  model_name_or_path,
189
- trust_remote_code=True).to(torch.float32)
190
  tokenizer = AutoTokenizer.from_pretrained(model_name_or_path,
191
  trust_remote_code=True)
192
  return model, tokenizer
 
30
  from transformers import AutoTokenizer, AutoModelForCausalLM # isort: skip
31
 
32
  logger = logging.get_logger(__name__)
33
+ model_name_or_path="cloneQ/my_personal_assistant"
34
 
35
  @dataclass
36
  class GenerationConfig:
 
186
  # trust_remote_code=True).to(torch.bfloat16).cuda())
187
  model = AutoModelForCausalLM.from_pretrained(
188
  model_name_or_path,
189
+ trust_remote_code=True).to("cpu")
190
  tokenizer = AutoTokenizer.from_pretrained(model_name_or_path,
191
  trust_remote_code=True)
192
  return model, tokenizer