Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -134,19 +134,21 @@ def load_vector_store(file_path, store_name, force_reload=False):
|
|
134 |
|
135 |
|
136 |
|
137 |
-
|
138 |
def load_pdf_text(file_path):
|
139 |
pdf_reader = PdfReader(file_path)
|
140 |
text = ""
|
141 |
for page in pdf_reader.pages:
|
142 |
-
text += page.extract_text() or ""
|
143 |
return text
|
144 |
|
|
|
|
|
145 |
def load_chatbot():
|
146 |
-
#return load_qa_chain(llm=OpenAI(), chain_type="stuff")
|
147 |
return load_qa_chain(llm=OpenAI(model_name="gpt-3.5-turbo-instruct"), chain_type="stuff")
|
148 |
|
149 |
|
|
|
150 |
def display_chat_history(chat_history):
|
151 |
for chat in chat_history:
|
152 |
background_color = "#ffeecf" if chat[2] == "new" else "#ffeecf" if chat[0] == "User" else "#ffeecf"
|
|
|
134 |
|
135 |
|
136 |
|
137 |
+
@st.cache_resource
|
138 |
def load_pdf_text(file_path):
|
139 |
pdf_reader = PdfReader(file_path)
|
140 |
text = ""
|
141 |
for page in pdf_reader.pages:
|
142 |
+
text += page.extract_text() or ""
|
143 |
return text
|
144 |
|
145 |
+
|
146 |
+
@st.cache_resource
|
147 |
def load_chatbot():
|
|
|
148 |
return load_qa_chain(llm=OpenAI(model_name="gpt-3.5-turbo-instruct"), chain_type="stuff")
|
149 |
|
150 |
|
151 |
+
|
152 |
def display_chat_history(chat_history):
|
153 |
for chat in chat_history:
|
154 |
background_color = "#ffeecf" if chat[2] == "new" else "#ffeecf" if chat[0] == "User" else "#ffeecf"
|