Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -2428,18 +2428,22 @@ supermassive_nn = ConsciousSupermassiveNN20()
|
|
2428 |
|
2429 |
def respond(message, history, max_tokens, temperature, top_p):
|
2430 |
messages = [{"role": "system", "content": system_prompt}]
|
2431 |
-
|
2432 |
-
|
2433 |
-
|
2434 |
-
|
2435 |
-
|
2436 |
messages.append({"role": "user", "content": message})
|
2437 |
|
2438 |
response = ""
|
2439 |
-
for
|
2440 |
-
messages,
|
|
|
|
|
|
|
|
|
2441 |
):
|
2442 |
-
token =
|
2443 |
response += token
|
2444 |
yield response
|
2445 |
|
|
|
2428 |
|
2429 |
def respond(message, history, max_tokens, temperature, top_p):
|
2430 |
messages = [{"role": "system", "content": system_prompt}]
|
2431 |
+
|
2432 |
+
for user_msg, bot_msg in history:
|
2433 |
+
messages.append({"role": "user", "content": user_msg})
|
2434 |
+
messages.append({"role": "assistant", "content": bot_msg})
|
2435 |
+
|
2436 |
messages.append({"role": "user", "content": message})
|
2437 |
|
2438 |
response = ""
|
2439 |
+
for chunk in client.chat_completion(
|
2440 |
+
messages,
|
2441 |
+
max_tokens=max_tokens,
|
2442 |
+
stream=True,
|
2443 |
+
temperature=temperature,
|
2444 |
+
top_p=top_p
|
2445 |
):
|
2446 |
+
token = chunk.choices[0].delta.content
|
2447 |
response += token
|
2448 |
yield response
|
2449 |
|