skoneru commited on
Commit
f82cef4
·
verified ·
1 Parent(s): b46c336

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -9
app.py CHANGED
@@ -119,7 +119,6 @@ def get_menu():
119
  return menu
120
 
121
  def reply_bot(message, history):
122
- print(history)
123
  menu = get_menu()
124
 
125
  history = []
@@ -130,15 +129,16 @@ def reply_bot(message, history):
130
  system_prompt = "<s>[INST] <<SYS>>\nYou are multilingual chat bot that helps deciding what to eat in a german canteen. In the canteen, there are different lines with names. Based on the menu and question, you suggest the user which line they should go to. You respond really briefly and do not generate long responses.\n<</SYS>>\n\nMenu:\n" + menu + "\n"
131
 
132
  curr_prompt = system_prompt + message + " [/INST]"
133
- #if len(history) != 0:
134
- # for human, ai in history:
135
- # system_prompt += human + " [/INST]" + ai + "</s><s>[INST]\n"
136
- #
137
- # curr_prompt = system_prompt + message + " [/INST]"
138
- #else:
139
- # curr_prompt = "<s>[INST] <<SYS>>\nYou are multilingual chat bot that helps deciding what to eat ina german canteen. In the canteen there are different lines with names. Based on the menu and question, you suggest the user which line they should go to. You respond really briefly and do not generate long responses. You respond in the same language user talks to you\n<</SYS>>\n\nMenu:\n" + menu + "\n" + message + " [/INST]"
140
- #
141
  try:
 
142
  #answer = client.text_generation(prompt=prompt, max_new_tokens=512)
143
  answer = ""
144
  for token in client.text_generation(prompt=curr_prompt, max_new_tokens=512, stream=True):
 
119
  return menu
120
 
121
  def reply_bot(message, history):
 
122
  menu = get_menu()
123
 
124
  history = []
 
129
  system_prompt = "<s>[INST] <<SYS>>\nYou are multilingual chat bot that helps deciding what to eat in a german canteen. In the canteen, there are different lines with names. Based on the menu and question, you suggest the user which line they should go to. You respond really briefly and do not generate long responses.\n<</SYS>>\n\nMenu:\n" + menu + "\n"
130
 
131
  curr_prompt = system_prompt + message + " [/INST]"
132
+ if len(history) != 0:
133
+ for human, ai in history:
134
+ system_prompt += human + " [/INST]" + ai + "</s><s>[INST]\n"
135
+
136
+ curr_prompt = system_prompt + message + " [/INST]"
137
+ else:
138
+ curr_prompt = "<s>[INST] <<SYS>>\nYou are multilingual chat bot that helps deciding what to eat ina german canteen. In the canteen there are different lines with names. Based on the menu and question, you suggest the user which line they should go to. You respond really briefly and do not generate long responses. You respond in the same language user talks to you\n<</SYS>>\n\nMenu:\n" + menu + "\n" + message + " [/INST]"
139
+
140
  try:
141
+ print(curr_prompt)
142
  #answer = client.text_generation(prompt=prompt, max_new_tokens=512)
143
  answer = ""
144
  for token in client.text_generation(prompt=curr_prompt, max_new_tokens=512, stream=True):