ArturG9 commited on
Commit
92bd8e5
·
verified ·
1 Parent(s): e42decc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -19
app.py CHANGED
@@ -27,7 +27,7 @@ lang_api_key = os.getenv("lang_api_key")
27
  os.environ["LANGCHAIN_TRACING_V2"] = "true"
28
  os.environ["LANGCHAIN_ENDPOINT"] = "https://api.langchain.plus"
29
  os.environ["LANGCHAIN_API_KEY"] = lang_api_key
30
- os.environ["LANGCHAIN_PROJECT"] = "Chat with multiple PDFs"
31
 
32
 
33
 
@@ -43,21 +43,6 @@ def create_retriever_from_chroma(vectorstore_path="docs/chroma/", search_type='m
43
  encode_kwargs=encode_kwargs
44
  )
45
 
46
-
47
- llm = llamacpp.LlamaCpp(
48
- model_path='qwen2-0_5b-instruct-q4_0.gguf',
49
- n_gpu_layers=0,
50
- temperature=0.1,
51
- top_p=0.9,
52
- n_ctx=22000,
53
- n_batch=2000,
54
- max_tokens=200,
55
- repeat_penalty=1.7,
56
- last_n_tokens_size = 1500,
57
- # callback_manager=callback_manager,
58
- verbose=False,
59
- )
60
-
61
 
62
  # Check if vectorstore exists
63
  if os.path.exists(vectorstore_path) and os.listdir(vectorstore_path):
@@ -107,7 +92,7 @@ def main():
107
  st.markdown("Hi, I am Qwen, chat mmodel, based on respublic of Lithuania law document. Write you question and press enter to start chat.")
108
 
109
 
110
- retriever = create_retriever_from_chroma(vectorstore_path="docs/chroma/", search_type='mmr', k=12, chunk_size=120, chunk_overlap=20)
111
  if user_question := st.text_input("Ask a question about your documents:"):
112
  handle_userinput(user_question,retriever)
113
 
@@ -153,9 +138,9 @@ def create_conversational_rag_chain(retriever):
153
  callback_manager = CallbackManager([StreamingStdOutCallbackHandler()])
154
 
155
  llm = llamacpp.LlamaCpp(
156
- model_path = "qwen2-0_5b-instruct-q4_0.gguf",
157
  n_gpu_layers=0,
158
- temperature=0.4,
159
  top_p=0.9,
160
  n_ctx=22000,
161
  n_batch=2000,
 
27
  os.environ["LANGCHAIN_TRACING_V2"] = "true"
28
  os.environ["LANGCHAIN_ENDPOINT"] = "https://api.langchain.plus"
29
  os.environ["LANGCHAIN_API_KEY"] = lang_api_key
30
+ os.environ["LANGCHAIN_PROJECT"] = "Lithuanian_Law_RAG_QA"
31
 
32
 
33
 
 
43
  encode_kwargs=encode_kwargs
44
  )
45
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
46
 
47
  # Check if vectorstore exists
48
  if os.path.exists(vectorstore_path) and os.listdir(vectorstore_path):
 
92
  st.markdown("Hi, I am Qwen, chat mmodel, based on respublic of Lithuania law document. Write you question and press enter to start chat.")
93
 
94
 
95
+ retriever = create_retriever_from_chroma(vectorstore_path="docs/chroma/", search_type='mmr', k=12, chunk_size=300, chunk_overlap=20)
96
  if user_question := st.text_input("Ask a question about your documents:"):
97
  handle_userinput(user_question,retriever)
98
 
 
138
  callback_manager = CallbackManager([StreamingStdOutCallbackHandler()])
139
 
140
  llm = llamacpp.LlamaCpp(
141
+ model_path = "qwen2-0_5b-instruct-q8_0.gguf",
142
  n_gpu_layers=0,
143
+ temperature=0.2,
144
  top_p=0.9,
145
  n_ctx=22000,
146
  n_batch=2000,