Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -31,9 +31,17 @@ def predict(message, history):
|
|
31 |
history_transformer_format = history + [[message, ""]]
|
32 |
stop = StopOnTokens()
|
33 |
|
|
|
|
|
|
|
|
|
|
|
|
|
34 |
# Formatting the input for the model.
|
35 |
-
messages
|
36 |
for item in history_transformer_format])
|
|
|
|
|
37 |
model_inputs = tokenizer([messages], return_tensors="pt").to(device)
|
38 |
streamer = TextIteratorStreamer(tokenizer, timeout=10., skip_prompt=True, skip_special_tokens=True)
|
39 |
generate_kwargs = dict(
|
|
|
31 |
history_transformer_format = history + [[message, ""]]
|
32 |
stop = StopOnTokens()
|
33 |
|
34 |
+
sys_prompt = 'You are Pragna, an AI built by Soket AI Labs. You should never lie and always tell facts. Help the user as much as you can and be open to say I dont know this if you are not sure of the answer'
|
35 |
+
|
36 |
+
eos_token = tokenizer.eos_token
|
37 |
+
|
38 |
+
messages = f'<|system|>\n{sys_prompt}{eos_token}'
|
39 |
+
|
40 |
# Formatting the input for the model.
|
41 |
+
messages += "</s>".join(["</s>".join(["<|user|>\n" + item[0], "<|assistant|>\n" + item[1]])
|
42 |
for item in history_transformer_format])
|
43 |
+
|
44 |
+
print(messages)
|
45 |
model_inputs = tokenizer([messages], return_tensors="pt").to(device)
|
46 |
streamer = TextIteratorStreamer(tokenizer, timeout=10., skip_prompt=True, skip_special_tokens=True)
|
47 |
generate_kwargs = dict(
|