richardkimsm89 commited on
Commit
2619751
·
verified ·
1 Parent(s): 743689c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -3
app.py CHANGED
@@ -16,20 +16,21 @@ def fn(
16
  top_p,
17
  ):
18
  #messages = [{"role": "system", "content": system_prompt}]
19
- messages = [{"role": "user", "content": prompt}]
 
20
 
21
  #for val in history:
22
  # if val[0]:
23
  # messages.append({"role": "user", "content": val[0]})
24
  # if val[1]:
25
  # messages.append({"role": "assistant", "content": val[1]})
26
- history.append({"role": "user", "content": prompt})
27
 
28
  #messages.append({"role": "user", "content": prompt})
29
 
30
  stream = client.chat.completions.create(
31
  model = model,
32
- messages = messages,
 
33
  max_tokens = max_tokens,
34
  temperature = temperature,
35
  top_p = top_p,
 
16
  top_p,
17
  ):
18
  #messages = [{"role": "system", "content": system_prompt}]
19
+ #messages = [{"role": "user", "content": prompt}]
20
+ history.append({"role": "user", "content": prompt})
21
 
22
  #for val in history:
23
  # if val[0]:
24
  # messages.append({"role": "user", "content": val[0]})
25
  # if val[1]:
26
  # messages.append({"role": "assistant", "content": val[1]})
 
27
 
28
  #messages.append({"role": "user", "content": prompt})
29
 
30
  stream = client.chat.completions.create(
31
  model = model,
32
+ #messages = messages,
33
+ messages = history,
34
  max_tokens = max_tokens,
35
  temperature = temperature,
36
  top_p = top_p,