Initial Space setup
Browse files
app.py
CHANGED
@@ -34,14 +34,13 @@ SYSTEM_PROMPT = (
|
|
34 |
)
|
35 |
|
36 |
def respond(query):
|
37 |
-
#
|
38 |
prompt = (
|
39 |
SYSTEM_PROMPT
|
40 |
-
f"question: {query}\n"
|
41 |
-
"answer:"
|
42 |
)
|
43 |
|
44 |
-
# 6) Generate
|
45 |
out = chatbot(
|
46 |
prompt,
|
47 |
max_new_tokens=128,
|
@@ -51,18 +50,19 @@ def respond(query):
|
|
51 |
pad_token_id=tokenizer.eos_token_id
|
52 |
)[0]["generated_text"]
|
53 |
|
54 |
-
#
|
55 |
if "answer:" in out:
|
56 |
reply = out.split("answer:", 1)[1].strip()
|
57 |
else:
|
58 |
reply = out.strip()
|
59 |
|
60 |
-
#
|
61 |
if len(reply) < 15 or "don't know" in reply.lower() or "sorry" in reply.lower():
|
62 |
return "Sorry, I don’t have that info—please contact [email protected]."
|
63 |
|
64 |
return reply
|
65 |
|
|
|
66 |
# 9) Gradio UI
|
67 |
with gr.Blocks() as demo:
|
68 |
gr.Markdown("# 🤖 BayEdger FAQ Chatbot")
|
|
|
34 |
)
|
35 |
|
36 |
def respond(query):
|
37 |
+
# Build the prompt correctly by concatenating
|
38 |
prompt = (
|
39 |
SYSTEM_PROMPT
|
40 |
+
+ f"question: {query}\n"
|
41 |
+
+ "answer:"
|
42 |
)
|
43 |
|
|
|
44 |
out = chatbot(
|
45 |
prompt,
|
46 |
max_new_tokens=128,
|
|
|
50 |
pad_token_id=tokenizer.eos_token_id
|
51 |
)[0]["generated_text"]
|
52 |
|
53 |
+
# Strip off the “answer:” prefix
|
54 |
if "answer:" in out:
|
55 |
reply = out.split("answer:", 1)[1].strip()
|
56 |
else:
|
57 |
reply = out.strip()
|
58 |
|
59 |
+
# Fallback for unknowns
|
60 |
if len(reply) < 15 or "don't know" in reply.lower() or "sorry" in reply.lower():
|
61 |
return "Sorry, I don’t have that info—please contact [email protected]."
|
62 |
|
63 |
return reply
|
64 |
|
65 |
+
|
66 |
# 9) Gradio UI
|
67 |
with gr.Blocks() as demo:
|
68 |
gr.Markdown("# 🤖 BayEdger FAQ Chatbot")
|