|
import os
|
|
import time
|
|
import streamlit as st
|
|
from qa_loader import load_qa_and_create_vectorstore
|
|
from rag_chain import generate_response
|
|
from dotenv import load_dotenv
|
|
|
|
|
|
load_dotenv()
|
|
|
|
|
|
st.set_page_config(page_title="Vistula University AI Assistant", layout="centered")
|
|
|
|
|
|
st.title("π Vistula University AI Assistant")
|
|
st.write("π Ask me anything about Vistula University!")
|
|
|
|
|
|
@st.cache_resource
|
|
def get_retriever():
|
|
return load_qa_and_create_vectorstore()
|
|
|
|
retriever = get_retriever()
|
|
|
|
if isinstance(retriever, tuple):
|
|
retriever = retriever[0]
|
|
|
|
|
|
if "chat_history" not in st.session_state:
|
|
st.session_state.chat_history = []
|
|
|
|
|
|
st.write("### ποΈ Chat History")
|
|
|
|
for entry in st.session_state.chat_history:
|
|
with st.chat_message("user"):
|
|
st.write(entry["question"])
|
|
with st.chat_message("assistant"):
|
|
st.write(entry["answer"])
|
|
|
|
|
|
query = st.chat_input("Ask your question about Vistula University!")
|
|
|
|
|
|
if query:
|
|
with st.spinner("π€ Thinking..."):
|
|
response = generate_response(retriever, query)
|
|
|
|
|
|
st.session_state.chat_history.append({
|
|
"question": query,
|
|
"answer": response
|
|
})
|
|
|
|
|
|
with st.chat_message("user"):
|
|
st.write(query)
|
|
with st.chat_message("assistant"):
|
|
placeholder = st.empty()
|
|
current_text = ""
|
|
|
|
|
|
for word in response.split():
|
|
current_text += word + " "
|
|
placeholder.write(current_text)
|
|
time.sleep(0.05)
|
|
|