tstone87 commited on
Commit
cbe5279
Β·
verified Β·
1 Parent(s): 4611fb0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -7
app.py CHANGED
@@ -1,15 +1,16 @@
1
  import os
 
2
  import fitz # PyMuPDF for PDF reading
3
  import faiss
4
  import numpy as np
5
  import gradio as gr
6
  from sentence_transformers import SentenceTransformer
7
- from huggingface_hub import hf_hub_download
8
 
9
  # πŸ”Ή Hugging Face Space Repository Details
10
  HF_REPO_ID = "tstone87/ccr-colorado"
11
 
12
- # πŸ”Ή Load Embedding Model (Better for QA Retrieval)
13
  model = SentenceTransformer("multi-qa-mpnet-base-dot-v1")
14
 
15
  # πŸ”Ή Define PDF Directory and Chunk Size
@@ -19,19 +20,28 @@ CHUNK_SIZE = 2500 # Larger chunks for better context
19
  # πŸ”Ή Ensure Directory Exists
20
  os.makedirs(PDF_DIR, exist_ok=True)
21
 
22
- # πŸ”Ή Function to Download PDFs from Hugging Face Space (Handles LFS Storage)
23
  def download_pdfs():
24
  pdf_files = [
25
  "SNAP 10 CCR 2506-1 .pdf",
26
  "Med 10 CCR 2505-10 8.100.pdf",
27
  ]
28
-
29
  for pdf_file in pdf_files:
30
  pdf_path = os.path.join(PDF_DIR, pdf_file)
 
31
  if not os.path.exists(pdf_path): # Download if not already present
32
  print(f"πŸ“₯ Downloading {pdf_file}...")
33
- hf_hub_download(repo_id=HF_REPO_ID, filename=pdf_file, local_dir=PDF_DIR, force_download=True)
34
-
 
 
 
 
 
 
 
 
35
  print("βœ… All PDFs downloaded.")
36
 
37
  # πŸ”Ή Function to Extract Text from PDFs
@@ -79,7 +89,6 @@ def search_policy(query, top_k=3):
79
  return "\n\n".join([chunks[i] for i in indices[0] if i < len(chunks)])
80
 
81
  # πŸ”Ή Hugging Face LLM Client
82
- from huggingface_hub import InferenceClient
83
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
84
 
85
  # πŸ”Ή Function to Handle Chat Responses
 
1
  import os
2
+ import urllib.parse
3
  import fitz # PyMuPDF for PDF reading
4
  import faiss
5
  import numpy as np
6
  import gradio as gr
7
  from sentence_transformers import SentenceTransformer
8
+ from huggingface_hub import hf_hub_download, InferenceClient
9
 
10
  # πŸ”Ή Hugging Face Space Repository Details
11
  HF_REPO_ID = "tstone87/ccr-colorado"
12
 
13
+ # πŸ”Ή Load Embedding Model (Optimized for QA Retrieval)
14
  model = SentenceTransformer("multi-qa-mpnet-base-dot-v1")
15
 
16
  # πŸ”Ή Define PDF Directory and Chunk Size
 
20
  # πŸ”Ή Ensure Directory Exists
21
  os.makedirs(PDF_DIR, exist_ok=True)
22
 
23
+ # πŸ”Ή Function to Download PDFs from Hugging Face Space (Handles Spaces)
24
  def download_pdfs():
25
  pdf_files = [
26
  "SNAP 10 CCR 2506-1 .pdf",
27
  "Med 10 CCR 2505-10 8.100.pdf",
28
  ]
29
+
30
  for pdf_file in pdf_files:
31
  pdf_path = os.path.join(PDF_DIR, pdf_file)
32
+
33
  if not os.path.exists(pdf_path): # Download if not already present
34
  print(f"πŸ“₯ Downloading {pdf_file}...")
35
+
36
+ # URL encode spaces correctly
37
+ encoded_filename = urllib.parse.quote(pdf_file)
38
+
39
+ try:
40
+ hf_hub_download(repo_id=HF_REPO_ID, filename=encoded_filename, local_dir=PDF_DIR, force_download=True)
41
+ print(f"βœ… Successfully downloaded {pdf_file}")
42
+ except Exception as e:
43
+ print(f"❌ Error downloading {pdf_file}: {e}")
44
+
45
  print("βœ… All PDFs downloaded.")
46
 
47
  # πŸ”Ή Function to Extract Text from PDFs
 
89
  return "\n\n".join([chunks[i] for i in indices[0] if i < len(chunks)])
90
 
91
  # πŸ”Ή Hugging Face LLM Client
 
92
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
93
 
94
  # πŸ”Ή Function to Handle Chat Responses