ajsbsd commited on
Commit
b797037
·
verified ·
1 Parent(s): ee2d859

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -14
app.py CHANGED
@@ -75,10 +75,9 @@ def predict_chat(message: str, history: list):
75
  yield "Error: Model or tokenizer failed to load. Please check the Space logs for details."
76
  return
77
 
78
- messages = [{"role": "system", "content": "You are a friendly chatbot."}]
79
- for human_msg, ai_msg in history:
80
- messages.append({"role": "user", "content": human_msg})
81
- messages.append({"role": "assistant", "content": ai_msg})
82
  messages.append({"role": "user", "content": message})
83
 
84
  generated_text = ""
@@ -131,18 +130,16 @@ def predict_chat(message: str, history: list):
131
  if __name__ == "__main__":
132
  load_model_for_zerocpu()
133
 
134
- initial_chatbot_message = (
 
135
  "Hello! I'm an AI assistant. I'm currently running in a CPU-only "
136
  "environment for efficient demonstration. How can I help you today?"
137
- )
138
 
139
- # Use gr.ChatInterface directly without gr.Blocks wrapper for simplicity
140
- # This often works better when ChatInterface is the sole component
141
  demo = gr.ChatInterface(
142
  fn=predict_chat,
143
- # Define the chatbot here, with type='messages'
144
- chatbot=gr.Chatbot(height=500, type='messages',
145
- value=[[None, initial_chatbot_message]]), # Set initial message directly here
146
  textbox=gr.Textbox(
147
  placeholder="Ask me a question...",
148
  container=False,
@@ -162,9 +159,7 @@ if __name__ == "__main__":
162
  ["What's the best way to stay motivated?"],
163
  ],
164
  cache_examples=False,
165
- # Gradio 4.x has `clear_btn` directly on ChatInterface again
166
- # but if this causes issues, you might need to revert to a gr.ClearButton() below
167
- clear_btn="Clear Chat" # Re-added clear_btn as it seems to be supported again in latest Gradio versions
168
  )
169
 
170
  demo.launch()
 
75
  yield "Error: Model or tokenizer failed to load. Please check the Space logs for details."
76
  return
77
 
78
+ # history is already in the 'messages' format if type='messages' is set on chatbot
79
+ # It contains dictionaries with 'role' and 'content'
80
+ messages = [{"role": "system", "content": "You are a friendly chatbot."}] + history
 
81
  messages.append({"role": "user", "content": message})
82
 
83
  generated_text = ""
 
130
  if __name__ == "__main__":
131
  load_model_for_zerocpu()
132
 
133
+ # Initial message for the chatbot in the 'messages' format
134
+ initial_messages = [{"role": "assistant", "content":
135
  "Hello! I'm an AI assistant. I'm currently running in a CPU-only "
136
  "environment for efficient demonstration. How can I help you today?"
137
+ }]
138
 
 
 
139
  demo = gr.ChatInterface(
140
  fn=predict_chat,
141
+ # Define the chatbot here, with type='messages' and initial value in the correct format
142
+ chatbot=gr.Chatbot(height=500, type='messages', value=initial_messages),
 
143
  textbox=gr.Textbox(
144
  placeholder="Ask me a question...",
145
  container=False,
 
159
  ["What's the best way to stay motivated?"],
160
  ],
161
  cache_examples=False,
162
+ clear_btn="Clear Chat"
 
 
163
  )
164
 
165
  demo.launch()