palbha commited on
Commit
e5b235a
·
verified ·
1 Parent(s): 42a1748

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -9
app.py CHANGED
@@ -38,15 +38,16 @@ def respond(message, history: list[tuple[str, str]], system_message, max_tokens,
38
  # If streaming is not supported, you can simply do:
39
  # complete_response = agent.chat_completion(messages, max_tokens, temperature, top_p)
40
  # yield complete_response
41
- for token in agent.chat_completion(
42
- messages,
43
- max_tokens=max_tokens,
44
- temperature=temperature,
45
- top_p=top_p,
46
- stream=True # set to False if your agent does not support streaming
47
- ):
48
- complete_response += token
49
- yield complete_response
 
50
 
51
  # Step 3: Create the Gradio ChatInterface.
52
  demo = gr.ChatInterface(
 
38
  # If streaming is not supported, you can simply do:
39
  # complete_response = agent.chat_completion(messages, max_tokens, temperature, top_p)
40
  # yield complete_response
41
+ # for token in agent.chat_completion(
42
+ # messages,
43
+ # max_tokens=max_tokens,
44
+ # temperature=temperature,
45
+ # top_p=top_p,
46
+ # stream=True # set to False if your agent does not support streaming
47
+ # ):
48
+ # complete_response += token
49
+ complete_response=agent.run(messages)
50
+ yield complete_response
51
 
52
  # Step 3: Create the Gradio ChatInterface.
53
  demo = gr.ChatInterface(