Pinkstack commited on
Commit
00f746f
·
verified ·
1 Parent(s): 4aa6128

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -26
app.py CHANGED
@@ -11,22 +11,23 @@ def respond(
11
  max_tokens: int,
12
  temperature: float,
13
  top_p: float,
 
14
  ) -> Iterator[str]:
15
  messages = [{"role": "system", "content": system_message}]
16
-
17
  # Add history to messages
18
  for user_msg, assistant_msg in history:
19
  if user_msg:
20
  messages.append({"role": "user", "content": user_msg})
21
  if assistant_msg:
22
  messages.append({"role": "assistant", "content": assistant_msg})
23
-
24
  # Add current message
25
  messages.append({"role": "user", "content": message})
26
-
27
  # Initialize response
28
  response = ""
29
-
30
  # Stream the response
31
  try:
32
  for chunk in client.chat_completion(
@@ -36,6 +37,8 @@ def respond(
36
  temperature=temperature,
37
  top_p=top_p,
38
  ):
 
 
39
  if chunk.choices[0].delta.content is not None:
40
  token = chunk.choices[0].delta.content
41
  response += token
@@ -81,15 +84,12 @@ details[open] summary:after {
81
 
82
  # Create Gradio interface
83
  with gr.Blocks(css=css) as demo:
84
- gr.Markdown("## Chat with <span style='color: #d026ff'>Superthoughts</span> lite! (1.7B)")
85
  gr.Markdown("**Note:** First response may take a moment to initialize. Subsequent responses will be faster.")
86
-
87
- chatbot = gr.Chatbot(height=600, type='messages')
88
- with gr.Row():
89
- msg = gr.Textbox(label="Your message", placeholder="Type your message here...", scale=9)
90
- stop = gr.Button("Stop", scale=1)
91
-
92
-
93
  with gr.Accordion("Advanced Settings", open=False):
94
  system_message = gr.Textbox(
95
  value="You must act in a conversational matter and always include <think> ... </think> <output> </output> tokens.",
@@ -119,39 +119,42 @@ with gr.Blocks(css=css) as demo:
119
 
120
  def user(user_message: str, history: list) -> tuple[str, list]:
121
  """Add user message to history"""
122
- return "", history + [{"role": "user", "content": user_message}, {"role": "assistant", "content": None}]
123
 
124
- def bot(history: list, system_message: str, max_tokens: int, temperature: float, top_p: float) -> Iterator[list]:
125
  """Generate and stream bot responses"""
126
- user_message = history[-2]["content"] # Get the last user message
127
- for partial_response in respond(user_message, history[:-2], system_message, max_tokens, temperature, top_p):
128
- history[-1]["content"] = partial_response
 
 
129
  yield history
130
 
131
- # Create event handlers
132
- submit_event = msg.submit(
133
  user,
134
  [msg, chatbot],
135
  [msg, chatbot],
136
  queue=False
137
  ).then(
138
  bot,
139
- [chatbot, system_message, max_tokens, temperature, top_p],
140
  chatbot
141
  )
142
-
143
- # Set up stop button
144
- stop.click(fn=None, inputs=None, outputs=None, cancels=[submit_event])
145
 
146
  # Add a clear button
147
  clear = gr.Button("Clear Conversation")
148
  clear.click(lambda: None, None, chatbot, queue=False)
149
-
 
 
 
 
150
  # Add disclaimer
151
  gr.Markdown(
152
  """
153
  ---
154
- ⚠️ **Disclaimer:** Superthoughts may make mistakes. Always verify important information.
155
  This chat interface is intended for testing and experimentation purposes only.
156
  """
157
  )
@@ -159,4 +162,4 @@ with gr.Blocks(css=css) as demo:
159
  # Launch the interface
160
  if __name__ == "__main__":
161
  demo.queue()
162
- demo.launch(share=True)
 
11
  max_tokens: int,
12
  temperature: float,
13
  top_p: float,
14
+ stop_event: gr.EventData,
15
  ) -> Iterator[str]:
16
  messages = [{"role": "system", "content": system_message}]
17
+
18
  # Add history to messages
19
  for user_msg, assistant_msg in history:
20
  if user_msg:
21
  messages.append({"role": "user", "content": user_msg})
22
  if assistant_msg:
23
  messages.append({"role": "assistant", "content": assistant_msg})
24
+
25
  # Add current message
26
  messages.append({"role": "user", "content": message})
27
+
28
  # Initialize response
29
  response = ""
30
+
31
  # Stream the response
32
  try:
33
  for chunk in client.chat_completion(
 
37
  temperature=temperature,
38
  top_p=top_p,
39
  ):
40
+ if stop_event.originator.get("clicked"):
41
+ break
42
  if chunk.choices[0].delta.content is not None:
43
  token = chunk.choices[0].delta.content
44
  response += token
 
84
 
85
  # Create Gradio interface
86
  with gr.Blocks(css=css) as demo:
87
+ gr.Markdown("## Chat with Superthoughts lite! (1.7B)")
88
  gr.Markdown("**Note:** First response may take a moment to initialize. Subsequent responses will be faster.")
89
+
90
+ chatbot = gr.Chatbot(height=600)
91
+ msg = gr.Textbox(label="Your message", placeholder="Type your message here...")
92
+
 
 
 
93
  with gr.Accordion("Advanced Settings", open=False):
94
  system_message = gr.Textbox(
95
  value="You must act in a conversational matter and always include <think> ... </think> <output> </output> tokens.",
 
119
 
120
  def user(user_message: str, history: list) -> tuple[str, list]:
121
  """Add user message to history"""
122
+ return "", history + [[user_message, None]]
123
 
124
+ def bot(history: list, system_message: str, max_tokens: int, temperature: float, top_p: float, stop_event: gr.EventData) -> Iterator[list]:
125
  """Generate and stream bot responses"""
126
+ user_message, _ = history[-1]
127
+ history[-1][1] = "" # Initialize bot's response
128
+
129
+ for partial_response in respond(user_message, history[:-1], system_message, max_tokens, temperature, top_p, stop_event):
130
+ history[-1][1] = partial_response
131
  yield history
132
 
133
+ # Set up chat message handling
134
+ msg.submit(
135
  user,
136
  [msg, chatbot],
137
  [msg, chatbot],
138
  queue=False
139
  ).then(
140
  bot,
141
+ [chatbot, system_message, max_tokens, temperature, top_p, gr.EventData()],
142
  chatbot
143
  )
 
 
 
144
 
145
  # Add a clear button
146
  clear = gr.Button("Clear Conversation")
147
  clear.click(lambda: None, None, chatbot, queue=False)
148
+
149
+ # Add a stop button
150
+ stop_button = gr.Button("Stop")
151
+ stop_button.click(lambda: gr.EventData(clicked=True), outputs=None, queue=False)
152
+
153
  # Add disclaimer
154
  gr.Markdown(
155
  """
156
  ---
157
+ ⚠️ **Disclaimer:** Superthoughts may make mistakes. Always verify important information.
158
  This chat interface is intended for testing and experimentation purposes only.
159
  """
160
  )
 
162
  # Launch the interface
163
  if __name__ == "__main__":
164
  demo.queue()
165
+ demo.launch(share=True)