import os import streamlit as st from embedchain import App os.environ["HF_HOME"] = "models" @st.cache_resource def embedchain_bot(): return App.from_config(config_path="./config_main.yaml") st.title("💬 Chatbot") st.caption("🚀 An Embedchain app created by Anurag Shukla (IRLP Lab) for DA-IICT!") if "messages" not in st.session_state: st.session_state.messages = [ { "role": "assistant", "content": """ Hi! I'm a chatbot. I can answer questions and learn new things!\n """, } ] for message in st.session_state.messages: with st.chat_message(message["role"]): st.markdown(message["content"]) if prompt := st.chat_input("Ask me anything!"): app = embedchain_bot() # if prompt.startswith("/add"): # with st.chat_message("user"): # st.markdown(prompt) # st.session_state.messages.append({"role": "user", "content": prompt}) # prompt = prompt.replace("/add", "").strip() # with st.chat_message("assistant"): # message_placeholder = st.empty() # message_placeholder.markdown("Adding to knowledge base...") # app.add(prompt) # message_placeholder.markdown(f"Added {prompt} to knowledge base!") # st.session_state.messages.append({"role": "assistant", "content": f"Added {prompt} to knowledge base!"}) # st.stop() with st.chat_message("user"): st.markdown(prompt) st.session_state.messages.append({"role": "user", "content": prompt}) with st.chat_message("assistant"): msg_placeholder = st.empty() msg_placeholder.markdown("Thinking...") print("Querying the Agent.") full_response = app.query(prompt.lower()) full_response = full_response.rpartition("Answer:")[-1].strip() print(f"Answer:\n\n{full_response}") msg_placeholder.markdown(full_response) st.session_state.messages.append( {"role": "assistant", "content": full_response} )