TejAndrewsACC commited on
Commit
08e36eb
·
verified ·
1 Parent(s): 84543b3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -8
app.py CHANGED
@@ -2428,18 +2428,22 @@ supermassive_nn = ConsciousSupermassiveNN20()
2428
 
2429
  def respond(message, history, max_tokens, temperature, top_p):
2430
  messages = [{"role": "system", "content": system_prompt}]
2431
- for val in history:
2432
- if val.get("role") == "user" and val.get("content"):
2433
- messages.append({"role": "user", "content": val["content"]})
2434
- if val.get("role") == "assistant" and val.get("content"):
2435
- messages.append({"role": "assistant", "content": val["content"]})
2436
  messages.append({"role": "user", "content": message})
2437
 
2438
  response = ""
2439
- for message in client.chat_completion(
2440
- messages, max_tokens=max_tokens, stream=True, temperature=temperature, top_p=top_p
 
 
 
 
2441
  ):
2442
- token = message.choices[0].delta.content
2443
  response += token
2444
  yield response
2445
 
 
2428
 
2429
  def respond(message, history, max_tokens, temperature, top_p):
2430
  messages = [{"role": "system", "content": system_prompt}]
2431
+
2432
+ for user_msg, bot_msg in history:
2433
+ messages.append({"role": "user", "content": user_msg})
2434
+ messages.append({"role": "assistant", "content": bot_msg})
2435
+
2436
  messages.append({"role": "user", "content": message})
2437
 
2438
  response = ""
2439
+ for chunk in client.chat_completion(
2440
+ messages,
2441
+ max_tokens=max_tokens,
2442
+ stream=True,
2443
+ temperature=temperature,
2444
+ top_p=top_p
2445
  ):
2446
+ token = chunk.choices[0].delta.content
2447
  response += token
2448
  yield response
2449