skoneru commited on
Commit
092a44d
·
verified ·
1 Parent(s): ff1f54d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -2
app.py CHANGED
@@ -121,15 +121,24 @@ def get_menu():
121
  def reply_bot(message, history):
122
  menu = get_menu()
123
 
 
 
124
 
125
  client = InferenceClient(model="https://8cc9-141-3-25-29.ngrok-free.app")
126
 
127
- prompt = "<s>[INST] <<SYS>>\nYou are multilingual chat bot that helps deciding what to eat ina german canteen. In the canteen there are different lines with names. Based on the menu and question, you suggest the user which line they should go to. You respond really briefly and do not generate long responses\n<</SYS>>\n\nMenu:\n" + menu + "\n" + message + " [/INST]"
 
 
 
 
 
 
 
128
 
129
  try:
130
  #answer = client.text_generation(prompt=prompt, max_new_tokens=512)
131
  answer = ""
132
- for token in client.text_generation(prompt=prompt, max_new_tokens=512, stream=True):
133
  answer+=token
134
  yield answer
135
  except:
 
121
  def reply_bot(message, history):
122
  menu = get_menu()
123
 
124
+ history = history[-2:]
125
+
126
 
127
  client = InferenceClient(model="https://8cc9-141-3-25-29.ngrok-free.app")
128
 
129
+ system_prompt = "<s>[INST] <<SYS>>\nYou are multilingual chat bot that helps deciding what to eat ina german canteen. In the canteen there are different lines with names. Based on the menu and question, you suggest the user which line they should go to. You respond really briefly and do not generate long responses\n<</SYS>>\n\nMenu:\n" + menu + "\n"
130
+
131
+ for human, ai in history:
132
+ system_prompt += human + " [/INST]" + ai + "</s><s>[INST]\n"
133
+
134
+ curr_prompt = system_prompt + message
135
+
136
+ #prompt = "<s>[INST] <<SYS>>\nYou are multilingual chat bot that helps deciding what to eat ina german canteen. In the canteen there are different lines with names. Based on the menu and question, you suggest the user which line they should go to. You respond really briefly and do not generate long responses\n<</SYS>>\n\nMenu:\n" + menu + "\n" + message + " [/INST]"
137
 
138
  try:
139
  #answer = client.text_generation(prompt=prompt, max_new_tokens=512)
140
  answer = ""
141
+ for token in client.text_generation(prompt=curr_prompt, max_new_tokens=512, stream=True):
142
  answer+=token
143
  yield answer
144
  except: