Fecalisboa commited on
Commit
1010bff
·
verified ·
1 Parent(s): fa8e56a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -6
app.py CHANGED
@@ -31,7 +31,8 @@ def load_doc(list_file_path, chunk_size, chunk_overlap):
31
  return doc_splits
32
 
33
  # Create vector database
34
- def create_db(splits, collection_name, db_type):
 
35
  embedding = HuggingFaceEmbeddings()
36
 
37
  if db_type == "ChromaDB":
@@ -40,7 +41,7 @@ def create_db(splits, collection_name, db_type):
40
  documents=splits,
41
  embedding=embedding,
42
  client=new_client,
43
- collection_name=collection_name,
44
  )
45
  elif db_type == "FAISS":
46
  vectordb = FAISS.from_documents(
@@ -56,12 +57,12 @@ def create_db(splits, collection_name, db_type):
56
  vectordb = Milvus.from_documents(
57
  documents=splits,
58
  embedding=embedding,
59
- collection_name=collection_name,
60
  )
61
  else:
62
  raise ValueError(f"Unsupported vector database type: {db_type}")
63
 
64
- return vectordb
65
 
66
  # Initialize langchain LLM chain
67
  def initialize_llmchain(llm_model, temperature, max_tokens, top_k, vector_db, initial_prompt, progress=gr.Progress()):
@@ -251,13 +252,13 @@ def demo():
251
  clear_btn_no_doc = gr.ClearButton([msg_no_doc, chatbot_no_doc], value="Clear conversation")
252
 
253
  # Preprocessing events
254
- db_btn.click(initialize_database,
255
  inputs=[document, slider_chunk_size, slider_chunk_overlap, db_type_radio],
256
  outputs=[vector_db, collection_name, db_progress])
257
  set_prompt_btn.click(lambda prompt: gr.update(value=prompt),
258
  inputs=prompt_input,
259
  outputs=initial_prompt)
260
- qachain_btn.click(initialize_LLM,
261
  inputs=[llm_btn, slider_temperature, slider_maxtokens, slider_topk, vector_db, initial_prompt],
262
  outputs=[qa_chain, llm_progress]).then(lambda:[None,"",0,"",0,"",0],
263
  inputs=None,
 
31
  return doc_splits
32
 
33
  # Create vector database
34
+ def create_db(list_file_path, chunk_size, chunk_overlap, db_type):
35
+ splits = load_doc(list_file_path, chunk_size, chunk_overlap)
36
  embedding = HuggingFaceEmbeddings()
37
 
38
  if db_type == "ChromaDB":
 
41
  documents=splits,
42
  embedding=embedding,
43
  client=new_client,
44
+ collection_name="default_collection",
45
  )
46
  elif db_type == "FAISS":
47
  vectordb = FAISS.from_documents(
 
57
  vectordb = Milvus.from_documents(
58
  documents=splits,
59
  embedding=embedding,
60
+ collection_name="default_collection",
61
  )
62
  else:
63
  raise ValueError(f"Unsupported vector database type: {db_type}")
64
 
65
+ return vectordb, "default_collection", "Vector database created successfully"
66
 
67
  # Initialize langchain LLM chain
68
  def initialize_llmchain(llm_model, temperature, max_tokens, top_k, vector_db, initial_prompt, progress=gr.Progress()):
 
252
  clear_btn_no_doc = gr.ClearButton([msg_no_doc, chatbot_no_doc], value="Clear conversation")
253
 
254
  # Preprocessing events
255
+ db_btn.click(create_db,
256
  inputs=[document, slider_chunk_size, slider_chunk_overlap, db_type_radio],
257
  outputs=[vector_db, collection_name, db_progress])
258
  set_prompt_btn.click(lambda prompt: gr.update(value=prompt),
259
  inputs=prompt_input,
260
  outputs=initial_prompt)
261
+ qachain_btn.click(initialize_llmchain,
262
  inputs=[llm_btn, slider_temperature, slider_maxtokens, slider_topk, vector_db, initial_prompt],
263
  outputs=[qa_chain, llm_progress]).then(lambda:[None,"",0,"",0,"",0],
264
  inputs=None,