Spaces:
Sleeping
Sleeping
Commit
·
b8a64cb
1
Parent(s):
d7b3b8a
require index, select all indices by default, swap submit buttons
Browse files
app.py
CHANGED
@@ -203,7 +203,7 @@ def run_search(
|
|
203 |
metadata = qdoc.metadata
|
204 |
# print(metadata)
|
205 |
data = BaseModel(
|
206 |
-
index=index,
|
207 |
id=metadata.get("id"),
|
208 |
title=metadata.get("title"),
|
209 |
ctime=metadata.get("ctime"),
|
@@ -217,11 +217,18 @@ def run_search(
|
|
217 |
with st.form("my_form"):
|
218 |
st.title("Document Search")
|
219 |
query = st.text_area(label="query")
|
220 |
-
index_list = st.multiselect(
|
|
|
|
|
|
|
|
|
|
|
221 |
|
222 |
submit_col1, submit_col2 = st.columns(2)
|
223 |
-
searched =
|
224 |
-
if
|
|
|
|
|
225 |
st.divider()
|
226 |
st.header("Search Results")
|
227 |
st.divider()
|
@@ -240,8 +247,8 @@ with st.form("my_form"):
|
|
240 |
st.write(text)
|
241 |
st.write("score:", score, "Date:", ctime.date(), "User:", user)
|
242 |
st.divider()
|
243 |
-
qa_searched =
|
244 |
-
if qa_searched:
|
245 |
st.divider()
|
246 |
st.header("Answer by OpenAI GPT-3")
|
247 |
st.divider()
|
@@ -256,8 +263,8 @@ with st.form("my_form"):
|
|
256 |
st.write(answer)
|
257 |
st.markdown(html, unsafe_allow_html=True)
|
258 |
st.divider()
|
259 |
-
if torch.cuda.is_available():
|
260 |
-
qa_searched_vicuna =
|
261 |
if qa_searched_vicuna:
|
262 |
st.divider()
|
263 |
st.header("Answer by Vicuna-13b-v1.5")
|
|
|
203 |
metadata = qdoc.metadata
|
204 |
# print(metadata)
|
205 |
data = BaseModel(
|
206 |
+
index=metadata.get("index"),
|
207 |
id=metadata.get("id"),
|
208 |
title=metadata.get("title"),
|
209 |
ctime=metadata.get("ctime"),
|
|
|
217 |
with st.form("my_form"):
|
218 |
st.title("Document Search")
|
219 |
query = st.text_area(label="query")
|
220 |
+
index_list = st.multiselect(
|
221 |
+
label="index",
|
222 |
+
options=INDEX_NAMES,
|
223 |
+
default=INDEX_NAMES,
|
224 |
+
placeholder="Select index",
|
225 |
+
)
|
226 |
|
227 |
submit_col1, submit_col2 = st.columns(2)
|
228 |
+
searched = submit_col2.form_submit_button("Search")
|
229 |
+
if not index_list:
|
230 |
+
st.error("Please select at least one index.")
|
231 |
+
if searched and index_list:
|
232 |
st.divider()
|
233 |
st.header("Search Results")
|
234 |
st.divider()
|
|
|
247 |
st.write(text)
|
248 |
st.write("score:", score, "Date:", ctime.date(), "User:", user)
|
249 |
st.divider()
|
250 |
+
qa_searched = submit_col1.form_submit_button("Q&A by OpenAI")
|
251 |
+
if qa_searched and index_list:
|
252 |
st.divider()
|
253 |
st.header("Answer by OpenAI GPT-3")
|
254 |
st.divider()
|
|
|
263 |
st.write(answer)
|
264 |
st.markdown(html, unsafe_allow_html=True)
|
265 |
st.divider()
|
266 |
+
if torch.cuda.is_available() and index_list:
|
267 |
+
qa_searched_vicuna = submit_col1.form_submit_button("Answer by Vicuna")
|
268 |
if qa_searched_vicuna:
|
269 |
st.divider()
|
270 |
st.header("Answer by Vicuna-13b-v1.5")
|