pabloce commited on
Commit
57c740c
·
verified ·
1 Parent(s): f9547b0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -4
app.py CHANGED
@@ -28,12 +28,20 @@ def respond(
28
  ):
29
  torch.set_default_device("cuda")
30
 
31
- model = AutoModelForCausalLM.from_pretrained("mistralai/Mistral-7B-Instruct-v0.2", torch_dtype="auto",load_in_4bit=True,trust_remote_code=True)
32
- tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct-v0.2", trust_remote_code=True)
33
-
 
 
 
 
 
 
 
34
  history_transformer_format = history + [[message, ""]]
35
 
36
- messages = system_prompt + "".join(["".join(["\n[INST]" + item[0], "[/INST]\n" + item[1] + "</s>"]) for item in history_transformer_format])
 
37
  input_ids = tokenizer([messages], return_tensors="pt").to('cuda')
38
  streamer = TextIteratorStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True)
39
  generate_kwargs = dict(
 
28
  ):
29
  torch.set_default_device("cuda")
30
 
31
+ tokenizer = AutoTokenizer.from_pretrained(
32
+ "cognitivecomputations/dolphin-2.8-mistral-7b-v02",
33
+ trust_remote_code=True
34
+ )
35
+ model = AutoModelForCausalLM.from_pretrained(
36
+ "cognitivecomputations/dolphin-2.8-mistral-7b-v02",
37
+ torch_dtype="auto",
38
+ load_in_4bit=True,
39
+ trust_remote_code=True
40
+ )
41
  history_transformer_format = history + [[message, ""]]
42
 
43
+ system_prompt = "<|im_start|>system\nYou are Dolphin, a helpful AI assistant.<|im_end|>"
44
+ messages = system_prompt + "".join(["".join(["\n<|im_start|>user\n" + item[0], "<|im_end|>\n<|im_start|>assistant\n" + item[1]]) for item in history_transformer_format])
45
  input_ids = tokenizer([messages], return_tensors="pt").to('cuda')
46
  streamer = TextIteratorStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True)
47
  generate_kwargs = dict(