Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import gradio as gr
|
2 |
-
from huggingface_hub import InferenceClient
|
3 |
import requests
|
4 |
-
from bs4 import BeautifulSoup
|
5 |
-
from
|
6 |
|
|
|
7 |
def tag_visible(element):
|
8 |
if element.parent.name in ['style', 'script', 'head', 'title', 'meta', '[document]']:
|
9 |
return False
|
@@ -12,48 +12,49 @@ def tag_visible(element):
|
|
12 |
return True
|
13 |
|
14 |
def get_text_from_url(url):
|
15 |
-
response = requests.get(url)
|
16 |
soup = BeautifulSoup(response.text, 'html.parser')
|
17 |
texts = soup.find_all(text=True)
|
18 |
visible_texts = filter(tag_visible, texts)
|
19 |
-
return "
|
20 |
|
21 |
-
# Pre-fetch and truncate homepage text
|
22 |
text_list = []
|
23 |
homepage_url = "https://sites.google.com/view/abhilashnandy/home/"
|
24 |
extensions = ["", "pmrf-profile-page"]
|
|
|
25 |
for ext in extensions:
|
26 |
-
|
27 |
-
|
28 |
-
|
|
|
|
|
|
|
29 |
|
30 |
-
|
31 |
-
"You are a QA chatbot to answer queries (in less than 30 words) on my homepage. "
|
32 |
-
"Context: " + " ".join(text_list)
|
33 |
-
)
|
34 |
|
35 |
-
#
|
36 |
-
|
37 |
-
client = InferenceClient(model="google/flan-t5-base")
|
38 |
|
|
|
39 |
def answer_query(query):
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
iface = gr.Interface(
|
52 |
fn=answer_query,
|
53 |
-
inputs=gr.Textbox(lines=2, placeholder="
|
54 |
outputs="text",
|
55 |
title="Homepage QA Chatbot",
|
56 |
-
description="
|
57 |
)
|
58 |
|
59 |
if __name__ == '__main__':
|
|
|
1 |
import gradio as gr
|
|
|
2 |
import requests
|
3 |
+
from bs4 import BeautifulSoup, Comment
|
4 |
+
from llama_cpp import Llama
|
5 |
|
6 |
+
# Function to extract visible text from a webpage
|
7 |
def tag_visible(element):
|
8 |
if element.parent.name in ['style', 'script', 'head', 'title', 'meta', '[document]']:
|
9 |
return False
|
|
|
12 |
return True
|
13 |
|
14 |
def get_text_from_url(url):
|
15 |
+
response = requests.get(url, timeout=10)
|
16 |
soup = BeautifulSoup(response.text, 'html.parser')
|
17 |
texts = soup.find_all(text=True)
|
18 |
visible_texts = filter(tag_visible, texts)
|
19 |
+
return " ".join(t.strip() for t in visible_texts)
|
20 |
|
21 |
+
# Pre-fetch and truncate homepage text
|
22 |
text_list = []
|
23 |
homepage_url = "https://sites.google.com/view/abhilashnandy/home/"
|
24 |
extensions = ["", "pmrf-profile-page"]
|
25 |
+
|
26 |
for ext in extensions:
|
27 |
+
try:
|
28 |
+
full_text = get_text_from_url(homepage_url + ext)
|
29 |
+
truncated_text = full_text[:2000] # Using first 2000 characters for more context
|
30 |
+
text_list.append(truncated_text)
|
31 |
+
except Exception as e:
|
32 |
+
text_list.append(f"Error fetching {homepage_url+ext}: {str(e)}")
|
33 |
|
34 |
+
CONTEXT = " ".join(text_list)
|
|
|
|
|
|
|
35 |
|
36 |
+
# Load the Mistral model (low-latency, CPU optimized)
|
37 |
+
llm = Llama(model_path="mistral-7b-instruct-v0.1.Q4_K_M.gguf", n_ctx=4096, n_threads=6, verbose=False)
|
|
|
38 |
|
39 |
+
# Function to answer queries
|
40 |
def answer_query(query):
|
41 |
+
prompt = (
|
42 |
+
"You are an AI chatbot answering queries based on the homepage of Abhilash Nandy. "
|
43 |
+
"Your responses should be concise (under 30 words) and directly relevant to the provided context.\n\n"
|
44 |
+
f"Context: {CONTEXT}\n\nUser: {query}\nAI:"
|
45 |
+
)
|
46 |
+
|
47 |
+
response = llm(prompt, max_tokens=50, stop=["\nUser:", "\nAI:"], echo=False)
|
48 |
+
|
49 |
+
return response["choices"][0]["text"].strip()
|
50 |
+
|
51 |
+
# Gradio Interface
|
52 |
iface = gr.Interface(
|
53 |
fn=answer_query,
|
54 |
+
inputs=gr.Textbox(lines=2, placeholder="Ask a question about Abhilash Nandy..."),
|
55 |
outputs="text",
|
56 |
title="Homepage QA Chatbot",
|
57 |
+
description="Ask me anything about Abhilash Nandy's homepage."
|
58 |
)
|
59 |
|
60 |
if __name__ == '__main__':
|