ArturG9 commited on
Commit
1eac1eb
·
verified ·
1 Parent(s): 96078d7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -3
app.py CHANGED
@@ -60,12 +60,16 @@ def get_text_chunks(text):
60
  return chunks
61
 
62
  def create_conversational_rag_chain(vectorstore):
63
- retriever = retriever_from_chroma(docs, hf, "mmr", 6)
 
 
 
 
64
 
65
  callback_manager = CallbackManager([StreamingStdOutCallbackHandler()])
66
 
67
  llm = llamacpp.LlamaCpp(
68
- model_path=os.path.join(script_dir, 'qwen2-0_5b-instruct-q4_0.gguf'),
69
  n_gpu_layers=1,
70
  temperature=0.1,
71
  top_p=0.9,
@@ -110,8 +114,19 @@ def create_conversational_rag_chain(vectorstore):
110
  def main():
111
  """Main function for the Streamlit app."""
112
  # Initialize chat history if not already present in session state
 
 
 
 
 
 
 
 
 
 
 
113
  msgs = st.session_state.get("chat_history", StreamlitChatMessageHistory(key="special_app_key"))
114
- chain_with_history = create_conversational_rag_chain()
115
 
116
  st.title("Conversational RAG Chatbot")
117
 
 
60
  return chunks
61
 
62
  def create_conversational_rag_chain(vectorstore):
63
+
64
+ script_dir = os.path.dirname(os.path.abspath(__file__))
65
+ model_path = os.path.join(script_dir, 'qwen2-0_5b-instruct-q4_0.gguf')
66
+
67
+ retriever = vectorstore.as_retriever(search_type='mmr', search_kwargs={"k": 7})
68
 
69
  callback_manager = CallbackManager([StreamingStdOutCallbackHandler()])
70
 
71
  llm = llamacpp.LlamaCpp(
72
+ model_path=os.path.join(model_path),
73
  n_gpu_layers=1,
74
  temperature=0.1,
75
  top_p=0.9,
 
114
  def main():
115
  """Main function for the Streamlit app."""
116
  # Initialize chat history if not already present in session state
117
+ script_dir = os.path.dirname(os.path.abspath(__file__))
118
+ data_path = os.path.join(script_dir, "data/")
119
+
120
+
121
+
122
+ documents = []
123
+ documents = load_txt_documents()
124
+ docs = split_docs(documents, 350, 40)
125
+
126
+ vectorstore = get_vectorstore(docs)
127
+
128
  msgs = st.session_state.get("chat_history", StreamlitChatMessageHistory(key="special_app_key"))
129
+ chain_with_history = create_conversational_rag_chain(vectorstore)
130
 
131
  st.title("Conversational RAG Chatbot")
132