Update app.py
Browse files
app.py
CHANGED
@@ -4,8 +4,13 @@ from transformers import pipeline
|
|
4 |
from sentence_transformers import SentenceTransformer, util
|
5 |
import PyPDF2
|
6 |
|
7 |
-
# Set up logging
|
8 |
-
|
|
|
|
|
|
|
|
|
|
|
9 |
|
10 |
# Load models
|
11 |
qa_model = pipeline("question-answering", model="distilbert-base-uncased-distilled-squad")
|
@@ -50,18 +55,22 @@ def find_relevant_section(query, sections, section_embeddings):
|
|
50 |
logging.info(f"No good keyword match found. Returning default fallback response.")
|
51 |
return "I don’t have enough information to answer that."
|
52 |
|
53 |
-
# Process the uploaded file
|
54 |
def process_file(file, state):
|
55 |
if file is None:
|
|
|
56 |
return [("Bot", "Please upload a file.")], state
|
57 |
|
58 |
file_path = file.name
|
59 |
if file_path.lower().endswith(".pdf"):
|
|
|
60 |
text = extract_text_from_pdf(file_path)
|
61 |
elif file_path.lower().endswith(".txt"):
|
|
|
62 |
with open(file_path, 'r', encoding='utf-8') as f:
|
63 |
text = f.read()
|
64 |
else:
|
|
|
65 |
return [("Bot", "Unsupported file format. Please upload a PDF or TXT file.")], state
|
66 |
|
67 |
sections = text.split('\n\n')
|
@@ -80,6 +89,7 @@ def process_file(file, state):
|
|
80 |
def handle_input(user_input, state):
|
81 |
if state['mode'] == 'waiting_for_upload':
|
82 |
state['chat_history'].append(("Bot", "Please upload a file first."))
|
|
|
83 |
elif state['mode'] == 'waiting_for_query':
|
84 |
query = user_input
|
85 |
state['current_query'] = query
|
@@ -140,6 +150,7 @@ with gr.Blocks() as demo:
|
|
140 |
chat = gr.Chatbot()
|
141 |
user_input = gr.Textbox(label="Your query or feedback")
|
142 |
submit_btn = gr.Button("Submit")
|
|
|
143 |
|
144 |
# Process file upload
|
145 |
file_upload.upload(process_file, inputs=[file_upload, state], outputs=[chat, state])
|
|
|
4 |
from sentence_transformers import SentenceTransformer, util
|
5 |
import PyPDF2
|
6 |
|
7 |
+
# Set up logging with immediate flushing
|
8 |
+
logger = logging.getLogger()
|
9 |
+
logger.setLevel(logging.INFO)
|
10 |
+
handler = logging.FileHandler('support_bot_log.txt', mode='a', buffering=1) # Line-buffered for immediate writes
|
11 |
+
formatter = logging.Formatter('%(asctime)s - %(message)s')
|
12 |
+
handler.setFormatter(formatter)
|
13 |
+
logger.addHandler(handler)
|
14 |
|
15 |
# Load models
|
16 |
qa_model = pipeline("question-answering", model="distilbert-base-uncased-distilled-squad")
|
|
|
55 |
logging.info(f"No good keyword match found. Returning default fallback response.")
|
56 |
return "I don’t have enough information to answer that."
|
57 |
|
58 |
+
# Process the uploaded file with detailed logging
|
59 |
def process_file(file, state):
|
60 |
if file is None:
|
61 |
+
logging.info("No file uploaded.")
|
62 |
return [("Bot", "Please upload a file.")], state
|
63 |
|
64 |
file_path = file.name
|
65 |
if file_path.lower().endswith(".pdf"):
|
66 |
+
logging.info(f"Uploaded PDF file: {file_path}")
|
67 |
text = extract_text_from_pdf(file_path)
|
68 |
elif file_path.lower().endswith(".txt"):
|
69 |
+
logging.info(f"Uploaded TXT file: {file_path}")
|
70 |
with open(file_path, 'r', encoding='utf-8') as f:
|
71 |
text = f.read()
|
72 |
else:
|
73 |
+
logging.error(f"Unsupported file format: {file_path}")
|
74 |
return [("Bot", "Unsupported file format. Please upload a PDF or TXT file.")], state
|
75 |
|
76 |
sections = text.split('\n\n')
|
|
|
89 |
def handle_input(user_input, state):
|
90 |
if state['mode'] == 'waiting_for_upload':
|
91 |
state['chat_history'].append(("Bot", "Please upload a file first."))
|
92 |
+
logging.info("User attempted to interact without uploading a file.")
|
93 |
elif state['mode'] == 'waiting_for_query':
|
94 |
query = user_input
|
95 |
state['current_query'] = query
|
|
|
150 |
chat = gr.Chatbot()
|
151 |
user_input = gr.Textbox(label="Your query or feedback")
|
152 |
submit_btn = gr.Button("Submit")
|
153 |
+
log_file = gr.File(label="Download Log File", value="support_bot_log.txt") # Added for log download
|
154 |
|
155 |
# Process file upload
|
156 |
file_upload.upload(process_file, inputs=[file_upload, state], outputs=[chat, state])
|