File size: 2,061 Bytes
c23507d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
import os
import streamlit as st

from embedchain import App

os.environ["HF_HOME"] = "models"


@st.cache_resource
def embedchain_bot():
    return App.from_config(config_path="./config_main.yaml")


st.title("πŸ’¬ Chatbot")
st.caption("πŸš€ An Embedchain app created by Anurag Shukla (IRLP Lab) for DA-IICT!")
if "messages" not in st.session_state:
    st.session_state.messages = [
        {
            "role": "assistant",
            "content": """
        Hi! I'm a chatbot. I can answer questions and learn new things!\n
        """,
        }
    ]

for message in st.session_state.messages:
    with st.chat_message(message["role"]):
        st.markdown(message["content"])

if prompt := st.chat_input("Ask me anything!"):
    app = embedchain_bot()

    # if prompt.startswith("/add"):
    #     with st.chat_message("user"):
    #         st.markdown(prompt)
    #         st.session_state.messages.append({"role": "user", "content": prompt})
    #     prompt = prompt.replace("/add", "").strip()
    #     with st.chat_message("assistant"):
    #         message_placeholder = st.empty()
    #         message_placeholder.markdown("Adding to knowledge base...")
    #         app.add(prompt)
    #         message_placeholder.markdown(f"Added {prompt} to knowledge base!")
    #         st.session_state.messages.append({"role": "assistant", "content": f"Added {prompt} to knowledge base!"})
    #         st.stop()

    with st.chat_message("user"):
        st.markdown(prompt)
        st.session_state.messages.append({"role": "user", "content": prompt})

    with st.chat_message("assistant"):
        msg_placeholder = st.empty()
        msg_placeholder.markdown("Thinking...")

        print("Querying the Agent.")
        full_response = app.query(prompt.lower())
        full_response = full_response.rpartition("Answer:")[-1].strip()
        print(f"Answer:\n\n{full_response}")

        msg_placeholder.markdown(full_response)
        st.session_state.messages.append(
            {"role": "assistant", "content": full_response}
        )