Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -54,11 +54,19 @@ def stream_chat(
|
|
| 54 |
):
|
| 55 |
print(f'message: {message}')
|
| 56 |
print(f'history: {history}')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 57 |
torch.manual_seed(0)
|
| 58 |
resp, history = model.chat(
|
| 59 |
tokenizer,
|
| 60 |
query = message,
|
| 61 |
-
history =
|
| 62 |
max_length = max_new_tokens,
|
| 63 |
do_sample = False if temperature == 0 else True,
|
| 64 |
top_p = top_p,
|
|
|
|
| 54 |
):
|
| 55 |
print(f'message: {message}')
|
| 56 |
print(f'history: {history}')
|
| 57 |
+
|
| 58 |
+
conversation = []
|
| 59 |
+
for prompt, answer in history:
|
| 60 |
+
conversation.extend([
|
| 61 |
+
{"role": "user", "content": prompt},
|
| 62 |
+
{"role": "assistant", "content": answer},
|
| 63 |
+
])
|
| 64 |
+
|
| 65 |
torch.manual_seed(0)
|
| 66 |
resp, history = model.chat(
|
| 67 |
tokenizer,
|
| 68 |
query = message,
|
| 69 |
+
history = conversation,
|
| 70 |
max_length = max_new_tokens,
|
| 71 |
do_sample = False if temperature == 0 else True,
|
| 72 |
top_p = top_p,
|