import streamlit as st import pickle from PyPDF2 import PdfReader from streamlit_extras.add_vertical_space import add_vertical_space from langchain.text_splitter import RecursiveCharacterTextSplitter from langchain.embeddings.openai import OpenAIEmbeddings from langchain.vectorstores import FAISS from langchain.llms import OpenAI from langchain.chains.question_answering import load_qa_chain from langchain.callbacks import get_openai_callback import os # Sidebar contents with st.sidebar: st.title(':orange_book: BinDoc GmbH') # API key input (this will not display the entered text) api_key = st.text_input('Enter your OpenAI API Key:', type='password') if api_key: os.environ['OPENAI_API_KEY'] = api_key else: st.warning('API key is required to proceed.') st.markdown( "Experience the future of document interaction with the revolutionary" ) st.markdown("**BinDocs Chat App**.") st.markdown("Harnessing the power of a Large Language Model and AI technology,") st.markdown("this innovative platform redefines PDF engagement,") st.markdown("enabling dynamic conversations that bridge the gap between") st.markdown("human and machine intelligence.") add_vertical_space(3) # Add more vertical space between text blocks st.write('Made with ❤️ by Anne') def load_pdf(file_path): pdf_reader = PdfReader(file_path) text = "" for page in pdf_reader.pages: text += page.extract_text() text_splitter = RecursiveCharacterTextSplitter( chunk_size=1000, chunk_overlap=200, length_function=len ) chunks = text_splitter.split_text(text=text) store_name = file_path.name[:-4] if os.path.exists(f"{store_name}.pkl"): with open(f"{store_name}.pkl", "rb") as f: VectorStore = pickle.load(f) else: embeddings = OpenAIEmbeddings() # No api_key parameter here VectorStore = FAISS.from_texts(chunks, embedding=embeddings) with open(f"{store_name}.pkl", "wb") as f: pickle.dump(VectorStore, f) return VectorStore def load_chatbot(): return load_qa_chain(llm=OpenAI(), chain_type="stuff") def display_chat_history(chat_history): for chat in chat_history: background_color = "#FFA07A" if chat[2] == "new" else "#acf" if chat[0] == "User" else "#caf" st.markdown(f"<div style='background-color: {background_color}; padding: 10px; border-radius: 10px; margin: 10px;'>{chat[0]}: {chat[1]}</div>", unsafe_allow_html=True) def main(): st.title("BinDocs Chat App") if "chat_history" not in st.session_state: st.session_state['chat_history'] = [] display_chat_history(st.session_state['chat_history']) st.write("<!-- Start Spacer -->", unsafe_allow_html=True) st.write("<div style='flex: 1;'></div>", unsafe_allow_html=True) st.write("<!-- End Spacer -->", unsafe_allow_html=True) new_messages_placeholder = st.empty() pdf = st.file_uploader("Upload your PDF", type="pdf") if pdf is not None: query = st.text_input("Ask questions about your PDF file (in any preferred language):") if st.button("Ask") or (query and query != st.session_state.get('last_input', '')): st.session_state['last_input'] = query # Save the current query as the last input st.session_state['chat_history'].append(("User", query, "new")) loading_message = st.empty() loading_message.text('Bot is thinking...') VectorStore = load_pdf(pdf) chain = load_chatbot() docs = VectorStore.similarity_search(query=query, k=3) with get_openai_callback() as cb: response = chain.run(input_documents=docs, question=query) st.session_state['chat_history'].append(("Bot", response, "new")) # Display new messages at the bottom new_messages = st.session_state['chat_history'][-2:] for chat in new_messages: background_color = "#FFA07A" if chat[2] == "new" else "#acf" if chat[0] == "User" else "#caf" new_messages_placeholder.markdown(f"<div style='background-color: {background_color}; padding: 10px; border-radius: 10px; margin: 10px;'>{chat[0]}: {chat[1]}</div>", unsafe_allow_html=True) # Scroll to the latest response using JavaScript st.write("<script>document.getElementById('response').scrollIntoView();</script>", unsafe_allow_html=True) loading_message.empty() # Clear the input field by setting the query variable to an empty string query = "" # Mark all messages as old after displaying st.session_state['chat_history'] = [(sender, msg, "old") for sender, msg, _ in st.session_state['chat_history']] # Display new messages at the bottom new_messages = st.session_state['chat_history'][-2:] for chat in new_messages: background_color = "#FFA07A" if chat[2] == "new" else "#acf" if chat[0] == "User" else "#caf" new_messages_placeholder.markdown(f"<div style='background-color: {background_color}; padding: 10px; border-radius: 10px; margin: 10px;'>{chat[0]}: {chat[1]}</div>", unsafe_allow_html=True) # Scroll to the latest response using JavaScript st.write("<script>document.getElementById('response').scrollIntoView();</script>", unsafe_allow_html=True) loading_message.empty() # Clear the input field by setting the query variable to an empty string query = "" # Mark all messages as old after displaying st.session_state['chat_history'] = [(sender, msg, "old") for sender, msg, _ in st.session_state['chat_history']] if __name__ == "__main__": main()