AFischer1985 commited on
Commit
0cb4458
·
verified ·
1 Parent(s): 6a168f4

Update model

Browse files
Files changed (1) hide show
  1. run.py +4 -2
run.py CHANGED
@@ -24,7 +24,8 @@ from huggingface_hub import InferenceClient #multimodal_response
24
  # Specify models for text generation and embeddings
25
  #---------------------------------------------------
26
 
27
- myModel="mistralai/Mixtral-8x7b-instruct-v0.1"
 
28
  #mod="mistralai/Mixtral-8x7b-instruct-v0.1"
29
  #tok=AutoTokenizer.from_pretrained(mod) #,token="hf_...")
30
  #cha=[{"role":"system","content":"A"},{"role":"user","content":"B"},{"role":"assistant","content":"C"}]
@@ -81,7 +82,8 @@ def format_prompt0(message, history):
81
  #-------------------------------------------------------------------------
82
 
83
  def format_prompt(message, history=None, system=None, RAGAddon=None, system2=None, zeichenlimit=None,historylimit=4, removeHTML=False,
84
- startOfString="<s>", template0=" [INST] {system} [/INST] </s>",template1=" [INST] {message} [/INST]",template2=" {response}</s>"):
 
85
  if zeichenlimit is None: zeichenlimit=1000000000 # :-)
86
  prompt = ""
87
  if RAGAddon is not None:
 
24
  # Specify models for text generation and embeddings
25
  #---------------------------------------------------
26
 
27
+ #myModel="mistralai/Mixtral-8x7b-instruct-v0.1"
28
+ myModel="princeton-nlp/gemma-2-9b-it-SimPO"
29
  #mod="mistralai/Mixtral-8x7b-instruct-v0.1"
30
  #tok=AutoTokenizer.from_pretrained(mod) #,token="hf_...")
31
  #cha=[{"role":"system","content":"A"},{"role":"user","content":"B"},{"role":"assistant","content":"C"}]
 
82
  #-------------------------------------------------------------------------
83
 
84
  def format_prompt(message, history=None, system=None, RAGAddon=None, system2=None, zeichenlimit=None,historylimit=4, removeHTML=False,
85
+ startOfString="",template0="<start_of_turn>user\n{system}<end_of_turn>\n<start_of_turn>model\n<end_of_turn>\n",template1="<start_of_turn>user\n{message}<end_of_turn>\n<start_of_turn>model\n",template2="<end_of_turn>\n"):
86
+ #startOfString="<s>", template0=" [INST] {system} [/INST] </s>",template1=" [INST] {message} [/INST]",template2=" {response}</s>"):
87
  if zeichenlimit is None: zeichenlimit=1000000000 # :-)
88
  prompt = ""
89
  if RAGAddon is not None: