Luigi commited on
Commit
6c77ec7
·
1 Parent(s): 14564aa

fix for multi-turn conv.

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -152,7 +152,7 @@ st.caption(f"Powered by `llama.cpp` | Model: {selected_model['filename']}")
152
  user_input = st.chat_input("Ask something...")
153
 
154
  if user_input:
155
- if len(st.session_state.chat_history) % 2 == 1:
156
  st.warning("Please wait for the assistant to respond before sending another message.")
157
  else:
158
  st.session_state.chat_history.append({"role": "user", "content": user_input})
 
152
  user_input = st.chat_input("Ask something...")
153
 
154
  if user_input:
155
+ if st.session_state.chat_history and st.session_state.chat_history[-1]["role"] == "user":
156
  st.warning("Please wait for the assistant to respond before sending another message.")
157
  else:
158
  st.session_state.chat_history.append({"role": "user", "content": user_input})