skoneru commited on
Commit
471ea72
·
verified ·
1 Parent(s): 0c03e37

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -11
app.py CHANGED
@@ -126,18 +126,19 @@ def reply_bot(message, history):
126
 
127
  client = InferenceClient(model="https://8cc9-141-3-25-29.ngrok-free.app")
128
 
129
- system_prompt = "<s>[INST] <<SYS>>\nYou are multilingual chat bot that helps deciding what to eat ina german canteen. In the canteen there are different lines with names. Based on the menu and question, you suggest the user which line they should go to. You respond really briefly and do not generate long responses. You respond in the same language user talks to you\n<</SYS>>\n\nMenu:\n" + menu + "\n"
130
-
131
- if len(history) != 0:
132
- for human, ai in history:
133
- system_prompt += human + " [/INST]" + ai + "</s><s>[INST]\n"
134
-
135
- curr_prompt = system_prompt + message + " [/INST]"
136
- else:
137
- curr_prompt = "<s>[INST] <<SYS>>\nYou are multilingual chat bot that helps deciding what to eat ina german canteen. In the canteen there are different lines with names. Based on the menu and question, you suggest the user which line they should go to. You respond really briefly and do not generate long responses. You respond in the same language user talks to you\n<</SYS>>\n\nMenu:\n" + menu + "\n" + message + " [/INST]"
138
-
 
139
  try:
140
- #answer = client.text_generation(prompt=prompt, max_new_tokens=512)
141
  answer = ""
142
  for token in client.text_generation(prompt=curr_prompt, max_new_tokens=512, stream=True):
143
  answer+=token
 
126
 
127
  client = InferenceClient(model="https://8cc9-141-3-25-29.ngrok-free.app")
128
 
129
+ system_prompt = "<s>[INST] <<SYS>>\nYou are multilingual chat bot that helps deciding what to eat in a german canteen. In the canteen, there are different lines with names. Based on the menu and question, you suggest the user which line they should go to. You respond really briefly and do not generate long responses.\n<</SYS>>\n\nMenu:\n" + menu + "\n"
130
+
131
+ curr_prompt = system_prompt + message + " [/INST]"
132
+ #if len(history) != 0:
133
+ # for human, ai in history:
134
+ # system_prompt += human + " [/INST]" + ai + "</s><s>[INST]\n"
135
+ #
136
+ # curr_prompt = system_prompt + message + " [/INST]"
137
+ #else:
138
+ # curr_prompt = "<s>[INST] <<SYS>>\nYou are multilingual chat bot that helps deciding what to eat ina german canteen. In the canteen there are different lines with names. Based on the menu and question, you suggest the user which line they should go to. You respond really briefly and do not generate long responses. You respond in the same language user talks to you\n<</SYS>>\n\nMenu:\n" + menu + "\n" + message + " [/INST]"
139
+ #
140
  try:
141
+ #answer = client.text_generation(prompt=prompt, max_new_tokens=512)
142
  answer = ""
143
  for token in client.text_generation(prompt=curr_prompt, max_new_tokens=512, stream=True):
144
  answer+=token