import streamlit as st
from PIL import Image
import time
import streamlit_analytics
from dotenv import load_dotenv
import pickle
from huggingface_hub import Repository
from PyPDF2 import PdfReader
from streamlit_extras.add_vertical_space import add_vertical_space
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.vectorstores import FAISS
from langchain.llms import OpenAI
from langchain.chains.question_answering import load_qa_chain
from langchain.callbacks import get_openai_callback
import os

#st.set_page_config(layout="wide")


# Set the page config to make the sidebar start in the collapsed state
st.set_page_config(initial_sidebar_state="collapsed")

# Step 1: Clone the Dataset Repository
repo = Repository(
    local_dir="Private_Book",  # Local directory to clone the repository
    repo_type="dataset",  # Specify that this is a dataset repository
    clone_from="Anne31415/Private_Book",  # Replace with your repository URL
    token=os.environ["HUB_TOKEN"]  # Use the secret token to authenticate
)
repo.git_pull()  # Pull the latest changes (if any)

# Step 2: Load the PDF File
pdf_path = "Private_Book/KOMBI_all2.pdf"  # Replace with your PDF file path

with st.sidebar:
    st.title('BinDoc GmbH')
    st.markdown("Experience revolutionary interaction with BinDocs Chat App, leveraging state-of-the-art AI technology.")
    
    add_vertical_space(1)  # Adjust as per the desired spacing
    
    st.markdown("""
    Hello! I’m here to assist you with:<br><br>
    📘 **Glossary Inquiries:**<br>
    I can clarify terms like "DiGA", "AOP", or "BfArM", providing clear and concise explanations to help you understand our content better.<br><br>
    🆘 **Help Page Navigation:**<br>
    Ask me if you forgot your password or want to know more about topics related to the platform.<br><br>
    📰 **Latest Whitepapers Insights:**<br>
    Curious about our recent publications? Feel free to ask about our latest whitepapers!<br><br>
    """, unsafe_allow_html=True)
    
    add_vertical_space(1)  # Adjust as per the desired spacing

    st.write('Made with ❀ by BinDoc GmbH')

    api_key = os.getenv("OPENAI_API_KEY")
    # Retrieve the API key from st.secrets

# Updated caching mechanism using st.cache_data
@st.cache_data(persist="disk")  # Using persist="disk" to save cache across sessions


def load_vector_store(file_path, store_name, force_reload=False):
    # Check if we need to force reload the vector store (e.g., when the PDF changes)
    if force_reload or not os.path.exists(f"{store_name}.pkl"):
        text_splitter = RecursiveCharacterTextSplitter(
            chunk_size=1000,
            chunk_overlap=200,
            length_function=len
        )
        
        text = load_pdf_text(file_path)
        chunks = text_splitter.split_text(text=text)
        
        embeddings = OpenAIEmbeddings()
        VectorStore = FAISS.from_texts(chunks, embedding=embeddings)
        with open(f"{store_name}.pkl", "wb") as f:
            pickle.dump(VectorStore, f)
    else:
        with open(f"{store_name}.pkl", "rb") as f:
            VectorStore = pickle.load(f)

    return VectorStore

# Utility function to load text from a PDF
def load_pdf_text(file_path):
    pdf_reader = PdfReader(file_path)
    text = ""
    for page in pdf_reader.pages:
        text += page.extract_text() or ""  # Add fallback for pages where text extraction fails
    return text

def load_chatbot():
    return load_qa_chain(llm=OpenAI(), chain_type="stuff")

def main():
    try:
        hide_streamlit_style = """
                <style>
                #MainMenu {visibility: hidden;}
                footer {visibility: hidden;}
                </style>
                """
        st.markdown(hide_streamlit_style, unsafe_allow_html=True)
    
         # Create columns for layout
        col1, col2 = st.columns([3, 1])  # Adjust the ratio to your liking

        with col1:
            st.title("Welcome to BinDocs ChatBot!")

        with col2:
            # Load and display the image in the right column, which will be the top-right corner of the page
            image = Image.open('BinDoc Logo (Quadratisch).png')
            st.image(image, use_column_width='always')

    
    
    
        # Start tracking user interactions
        with streamlit_analytics.track():
            if not os.path.exists(pdf_path):
                st.error("File not found. Please check the file path.")
                return
    
            VectorStore = load_vector_store(pdf_path, "my_vector_store", force_reload=False)
    
    
            if "chat_history" not in st.session_state:
                st.session_state['chat_history'] = []
    
            display_chat_history(st.session_state['chat_history'])
    
            st.write("<!-- Start Spacer -->", unsafe_allow_html=True)
            st.write("<div style='flex: 1;'></div>", unsafe_allow_html=True)
            st.write("<!-- End Spacer -->", unsafe_allow_html=True)
    
            new_messages_placeholder = st.empty()
    
            query = st.text_input("Ask questions about your PDF file (in any preferred language):")

            add_vertical_space(2)  # Adjust as per the desired spacing
            
            # Create two columns for the buttons
            col1, col2 = st.columns(2)
            
            with col1:
                if st.button("Was kann ich mit dem Prognose-Analyse-Tool machen?"):
                    query = "Was kann ich mit dem Prognose-Analyse-Tool machen?"
                if st.button("Was sagt mir die Farbe der Balken der Bevölkerungsentwicklung?"):
                    query = "Was sagt mir die Farbe der Balken der Bevölkerungsentwicklung?"
                if st.button("Ich habe mein Meta-Password vergessen, wie kann ich es zurĂŒcksetzen?"):
                    query = "Ich habe mein Meta-Password vergessen, wie kann ich es zurĂŒcksetzen?"

            
            with col2:
                if st.button("Dies ist eine reine Test Frage, welche aber eine ausreichende LĂ€nge hat."):
                    query = "Dies ist eine reine Test Frage, welche aber eine ausreichende LĂ€nge hat."
                if st.button("Was sagt mir denn generell die wundervolle Bevölkerungsentwicklung?"):
                    query = "Was sagt mir denn generell die wundervolle Bevölkerungsentwicklung?"
                if st.button("Ob ich hier wohl viel schreibe, dass die Fragen vom Layout her passen?"):
                    query = "Ob ich hier wohl viel schreibe, dass die Fragen vom Layout her passen?"

        
            if query:
                st.session_state['chat_history'].append(("User", query, "new"))

                # Start timing
                start_time = time.time()
                
                with st.spinner('Bot is thinking...'):
                    # Use the VectorStore loaded at the start from the session state
                    chain = load_chatbot()
                    docs = VectorStore.similarity_search(query=query, k=3)
                    with get_openai_callback() as cb:
                        response = chain.run(input_documents=docs, question=query)

                        
                # Stop timing
                end_time = time.time()
                
                # Calculate duration
                duration = end_time - start_time

                # You can use Streamlit's text function to display the timing
                st.text(f"Response time: {duration:.2f} seconds")
    
                st.session_state['chat_history'].append(("Bot", response, "new"))
    
    
                # Display new messages at the bottom
                new_messages = st.session_state['chat_history'][-2:]
                for chat in new_messages:
                    background_color = "#ffeecf" if chat[2] == "new" else "#ffeecf" if chat[0] == "User" else "#ffeecf"
                    new_messages_placeholder.markdown(f"<div style='background-color: {background_color}; padding: 10px; border-radius: 10px; margin: 10px;'>{chat[0]}: {chat[1]}</div>", unsafe_allow_html=True)
    
    
                # Clear the input field after the query is made
                query = ""
    
            # Mark all messages as old after displaying
            st.session_state['chat_history'] = [(sender, msg, "old") for sender, msg, _ in st.session_state['chat_history']]

    except Exception as e:
        st.error(f"Upsi, an unexpected error occurred: {e}")
        # Optionally log the exception details to a file or error tracking service


def display_chat_history(chat_history):
    for chat in chat_history:
        background_color = "#ffeecf" if chat[2] == "new" else "#ffeecf" if chat[0] == "User" else "#ffeecf"
        st.markdown(f"<div style='background-color: {background_color}; padding: 10px; border-radius: 10px; margin: 10px;'>{chat[0]}: {chat[1]}</div>", unsafe_allow_html=True)


if __name__ == "__main__":
    main()