File size: 7,639 Bytes
e6d52ea 0c777e7 e5c9659 0c777e7 93a997b 0c777e7 f18eeb7 0c777e7 0db34bd 0c777e7 eda3bd5 0c777e7 0db34bd 0c777e7 eda3bd5 0c777e7 0db34bd 0c777e7 e6d52ea 537b576 0c777e7 eda3bd5 a46ea20 eda3bd5 a46ea20 eda3bd5 a46ea20 0c777e7 e6d52ea a46ea20 f18eeb7 e6d52ea 0c777e7 f18eeb7 0c777e7 f18eeb7 0c777e7 f18eeb7 710fb48 0c777e7 115f014 710fb48 0c777e7 0db34bd 3691805 115f014 710fb48 0c777e7 710fb48 0c777e7 e6d52ea 115f014 e6d52ea 0c777e7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 |
"""
creator: Lewis Kamau Kimaru
Function: chat with pdf documents in different languages
"""
from langchain.text_splitter import CharacterTextSplitter
from langchain.embeddings import HuggingFaceBgeEmbeddings
from langchain.vectorstores import FAISS
from langchain.chat_models import ChatOpenAI
from langchain.memory import ConversationBufferMemory
from langchain.chains import ConversationalRetrievalChain
from langchain.llms import HuggingFaceHub
from typing import Union
from dotenv import load_dotenv
from PyPDF2 import PdfReader
import streamlit as st
import requests
import json
import os
# Authentication
import streamlit_authenticator as stauth
import yaml
from yaml.loader import SafeLoader
with open('config.yaml') as file:
config = yaml.load(file, Loader=SafeLoader)
authenticator = stauth.Authenticate(
config['credentials'],
config['cookie']['name'],
config['cookie']['key'],
config['cookie']['expiry_days'],
config['preauthorized']
)
name, authentication_status, username = authenticator.login('Login', 'main')
if authentication_status:
authenticator.logout('Logout', 'main', key='unique_key')
st.write(f'Welcome *{name}*')
st.title('Some content')
elif authentication_status is False:
st.error('Username/password is incorrect')
elif authentication_status is None:
st.warning('Please enter your username and password')
if authentication_status:
try:
if authenticator.reset_password(username, 'Reset password'):
st.success('Password modified successfully')
except Exception as e:
st.error(e)
try:
if authenticator.register_user('Register user', preauthorization=False):
st.success('User registered successfully')
except Exception as e:
st.error(e)
try:
username_forgot_pw, email_forgot_password, random_password = authenticator.forgot_password('Forgot password')
if username_forgot_pw:
st.success('New password sent securely')
# Random password to be transferred to the user securely
else:
st.error('Username not found')
except Exception as e:
st.error(e)
try:
username_forgot_username, email_forgot_username = authenticator.forgot_username('Forgot username')
if username_forgot_username:
st.success('Username sent securely')
# Username to be transferred to the user securely
else:
st.error('Email not found')
except Exception as e:
st.error(e)
if authentication_status:
try:
if authenticator.update_user_details(username, 'Update user details'):
st.success('Entries updated successfully')
except Exception as e:
st.error(e)
with open('config.yaml', 'w') as file:
yaml.dump(config, file, default_flow_style=False)
# set this key as an environment variable
os.environ["HUGGINGFACEHUB_API_TOKEN"] = st.secrets['huggingface_token']
# Page configuration
st.set_page_config(page_title="SemaNaPDF", page_icon="📚",)
# Sema Translator
Public_Url = 'https://lewiskimaru-helloworld.hf.space' #endpoint
def translate(userinput, target_lang, source_lang=None):
if source_lang:
url = f"{Public_Url}/translate_enter/"
data = {
"userinput": userinput,
"source_lang": source_lang,
"target_lang": target_lang,
}
response = requests.post(url, json=data)
result = response.json()
print(type(result))
source_lange = source_lang
translation = result['translated_text']
else:
url = f"{Public_Url}/translate_detect/"
data = {
"userinput": userinput,
"target_lang": target_lang,
}
response = requests.post(url, json=data)
result = response.json()
source_lange = result['source_language']
translation = result['translated_text']
return source_lange, translation
def get_pdf_text(pdf : Union[str, bytes, bytearray]) -> str:
reader = PdfReader(pdf)
pdf_text = ''
for page in (reader.pages):
text = page.extract_text()
if text:
pdf_text += text
return text
def get_text_chunks(text:str) ->list:
text_splitter = CharacterTextSplitter(
separator="\n", chunk_size=1500, chunk_overlap=300, length_function=len
)
chunks = text_splitter.split_text(text)
return chunks
def get_vectorstore(text_chunks : list) -> FAISS:
model = "sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2"
encode_kwargs = {
"normalize_embeddings": True
} # set True to compute cosine similarity
embeddings = HuggingFaceBgeEmbeddings(
model_name=model, encode_kwargs=encode_kwargs, model_kwargs={"device": "cpu"}
)
vectorstore = FAISS.from_texts(texts=text_chunks, embedding=embeddings)
return vectorstore
def get_conversation_chain(vectorstore:FAISS) -> ConversationalRetrievalChain:
llm = HuggingFaceHub(
repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1",
#repo_id="TheBloke/Mixtral-8x7B-Instruct-v0.1-GGUF"
model_kwargs={"temperature": 0.5, "max_length": 1048},
)
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
conversation_chain = ConversationalRetrievalChain.from_llm(
llm=llm, retriever=vectorstore.as_retriever(), memory=memory
)
return conversation_chain
st.markdown ("""
<style> div.stSpinner > div {
text-align:center;
text-align:center;
align-items: center;
justify-content: center;
}
</style>""", unsafe_allow_html=True)
def main():
st.title("SemaNaPDF📚")
# upload file
pdf = st.file_uploader("Upload a PDF Document", type="pdf")
if pdf is not None:
with st.spinner(""):
# get pdf text
raw_text = get_pdf_text(pdf)
# get the text chunks
text_chunks = get_text_chunks(raw_text)
# create vector store
vectorstore = get_vectorstore(text_chunks)
# create conversation chain
st.session_state.conversation = get_conversation_chain(vectorstore)
st.info("done")
#user_question = st.text_input("chat with your pdf ...")
# show user input
if "messages" not in st.session_state:
st.session_state.messages = []
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
if user_question := st.chat_input("Ask your document anything ......?"):
with st.chat_message("user"):
st.markdown(user_question)
user_langd, Queryd = translate(user_question, 'eng_Latn')
st.session_state.messages.append({"role": "user", "content": user_question})
response = st.session_state.conversation({"question": Queryd}) #Queryd
st.session_state.chat_history = response["chat_history"]
output = translate(response['answer'], user_langd, 'eng_Latn')[1] # translated response
with st.chat_message("assistant"):
#st.markdown(response['answer'])
st.markdown(output)
st.session_state.messages.append({"role": "assistant", "content": response['answer']})
# Signature
st.markdown(
"""
<div style="position: fixed; bottom: 0; right: 0; padding: 10px;">
<a href="https://kamaukimaru.vercel.app" target="_blank" style="font-size: 12px; color: #269129; text-decoration: none;">©2023 Lewis Kimaru. All rights reserved.</a>
</div>
""",
unsafe_allow_html=True
)
if __name__ == '__main__':
main()
|