Nioooor commited on
Commit
27f347a
·
verified ·
1 Parent(s): ee4d97b

Rename app1.py to app.py

Browse files
Files changed (1) hide show
  1. app1.py → app.py +0 -15
app1.py → app.py RENAMED
@@ -8,7 +8,6 @@ from langchain.vectorstores import Chroma
8
  from langchain_community.embeddings import HuggingFaceEmbeddings
9
  from langchain_groq import ChatGroq
10
  from langchain.chains import RetrievalQA
11
- import zipfile
12
 
13
  # Load the .env file (if using it)
14
  load_dotenv()
@@ -18,20 +17,6 @@ groq_api_key = os.getenv("GROQ_API_KEY")
18
  @st.cache_resource # Singleton, prevent multiple initializations
19
  def init_chain():
20
 
21
- # Specify the path to the .zip file
22
- zip_file_path = "cspc_db.zip"
23
-
24
- # Specify the directory where you want to extract the files
25
- extract_to_path = "cspc_db"
26
-
27
- # Check if the destination directory exists, and if not, create it
28
- if not os.path.exists(extract_to_path):
29
- os.makedirs(extract_to_path)
30
-
31
- # Unzip the file
32
- with zipfile.ZipFile(zip_file_path, 'r') as zip_ref:
33
- zip_ref.extractall(extract_to_path)
34
-
35
  model_kwargs = {'trust_remote_code': True}
36
  embedding = HuggingFaceEmbeddings(model_name='nomic-ai/nomic-embed-text-v1.5', model_kwargs=model_kwargs)
37
  llm = ChatGroq(groq_api_key=groq_api_key, model_name="llama3-70b-8192", temperature=0.2)
 
8
  from langchain_community.embeddings import HuggingFaceEmbeddings
9
  from langchain_groq import ChatGroq
10
  from langchain.chains import RetrievalQA
 
11
 
12
  # Load the .env file (if using it)
13
  load_dotenv()
 
17
  @st.cache_resource # Singleton, prevent multiple initializations
18
  def init_chain():
19
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  model_kwargs = {'trust_remote_code': True}
21
  embedding = HuggingFaceEmbeddings(model_name='nomic-ai/nomic-embed-text-v1.5', model_kwargs=model_kwargs)
22
  llm = ChatGroq(groq_api_key=groq_api_key, model_name="llama3-70b-8192", temperature=0.2)