pankajsingh3012 commited on
Commit
3ae2cd6
·
verified ·
1 Parent(s): 314246b

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -133
app.py DELETED
@@ -1,133 +0,0 @@
1
- #imporitng libraryies
2
- import streamlit as st
3
- from PyPDF2 import PdfReader
4
- from langchain.text_splitter import RecursiveCharacterTextSplitter
5
- import os
6
- from langchain_google_genai import GoogleGenerativeAIEmbeddings
7
- import google.generativeai as genai
8
- from langchain.vectorstores import FAISS
9
- from langchain_google_genai import ChatGoogleGenerativeAI
10
- from langchain.chains.question_answering import load_qa_chain
11
- from langchain.prompts import PromptTemplate
12
- from dotenv import load_dotenv
13
- import base64
14
-
15
- load_dotenv()
16
-
17
- #get api key
18
- os.getenv("GOOGLE_API_KEY")
19
- genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
20
-
21
-
22
-
23
-
24
-
25
- #pdf read and convert into raw text
26
- def get_pdf_text(pdf_docs):
27
- text=""
28
- for pdf in pdf_docs:
29
- pdf_reader= PdfReader(pdf)
30
- for page in pdf_reader.pages:
31
- text+= page.extract_text()
32
- return text
33
-
34
-
35
- #making chunks of text
36
- def get_text_chunks(text):
37
- text_splitter = RecursiveCharacterTextSplitter(chunk_size=10000, chunk_overlap=1000)
38
- chunks = text_splitter.split_text(text)
39
- return chunks
40
-
41
- #create embeddings and store in vector database
42
- def get_vector_store(text_chunks):
43
- embeddings = GoogleGenerativeAIEmbeddings(model = "models/embedding-001")
44
- vector_store = FAISS.from_texts(text_chunks, embedding=embeddings)
45
- vector_store.save_local("faiss_index")
46
-
47
- #define chain
48
- def get_conversational_chain():
49
-
50
- prompt_template = """
51
- Answer the question as detailed as possible from the provided context, make sure to provide all the details, if the answer is not in
52
- provided context just say, "answer is not available in the context", don't provide the wrong answer\n\n
53
- Context:\n {context}?\n
54
- Question: \n{question}\n
55
-
56
- Answer:
57
- """
58
-
59
- model = ChatGoogleGenerativeAI(model="gemini-pro",
60
- temperature=0.3)
61
-
62
- prompt = PromptTemplate(template = prompt_template, input_variables = ["context", "question"])
63
- chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
64
-
65
- return chain
66
-
67
-
68
- #take user input
69
- def user_input(user_question):
70
- embeddings = GoogleGenerativeAIEmbeddings(model = "models/embedding-001")
71
-
72
- new_db = FAISS.load_local("faiss_index", embeddings)
73
- docs = new_db.similarity_search(user_question)
74
-
75
- chain = get_conversational_chain()
76
-
77
-
78
- response = chain(
79
- {"input_documents":docs, "question": user_question}
80
- , return_only_outputs=True)
81
-
82
- print(response)
83
- st.write("Reply: ", response["output_text"])
84
-
85
-
86
-
87
- #steamlit interface
88
- def main():
89
- titleimg = "bg.jpeg"
90
-
91
- # impliment background formating
92
- def set_bg_hack(main_bg):
93
- # set bg name
94
- main_bg_ext = "jpeg"
95
- st.markdown(
96
- f"""
97
- <style>
98
- .stApp {{
99
- background: url(data:image/{main_bg_ext};base64,{base64.b64encode(open(main_bg, "rb").read()).decode()});
100
- background-repeat: no-repeat;
101
- background-position: right 50% bottom 95% ;
102
- background-size: cover;
103
- background-attachment: scroll;
104
- }}
105
- </style>
106
- """,
107
- unsafe_allow_html=True,
108
- )
109
-
110
- set_bg_hack(titleimg)
111
-
112
- st.set_page_config("Chat PDF")
113
- st.header("Chat with PDF 💁")
114
-
115
- user_question = st.text_input("Ask a Question from the PDF Files")
116
-
117
- if user_question:
118
- user_input(user_question)
119
-
120
- with st.sidebar:
121
- st.title("Menu:")
122
- pdf_docs = st.file_uploader("Upload your PDF Files and Click on the Submit & Process Button", accept_multiple_files=True)
123
- if st.button("Submit & Process"):
124
- with st.spinner("Processing..."):
125
- raw_text = get_pdf_text(pdf_docs)
126
- text_chunks = get_text_chunks(raw_text)
127
- get_vector_store(text_chunks)
128
- st.success("Done")
129
-
130
-
131
-
132
- if __name__ == "__main__":
133
- main()