BioGPT-chatbot / app.py
Blaiseboy's picture
Upload 2 files
79cca78 verified
raw
history blame
3.24 kB
import gradio as gr
import os
from medical_chatbot import ColabBioGPTChatbot
# Instantiate the chatbot with CPU settings for HF Spaces
chatbot = ColabBioGPTChatbot(use_gpu=False, use_8bit=False)
medical_file_uploaded = False
def upload_and_initialize(file):
global medical_file_uploaded
if file is None:
return (
"❌ Please upload a medical .txt file.",
gr.Chatbot(visible=False),
gr.Textbox(visible=False),
gr.Button(visible=False)
)
# Handle the file path correctly for Gradio
file_path = file.name if hasattr(file, 'name') else file
success = chatbot.load_medical_data(file_path)
if success:
medical_file_uploaded = True
model_name = type(chatbot.model).__name__ if chatbot.model else "Fallback Model"
status = f"✅ Medical data processed successfully!\n📦 Model in use: {model_name}"
return (
status,
gr.Chatbot(visible=True),
gr.Textbox(visible=True),
gr.Button(visible=True)
)
else:
return (
"❌ Failed to process uploaded file.",
gr.Chatbot(visible=False),
gr.Textbox(visible=False),
gr.Button(visible=False)
)
def generate_response(user_input):
if not medical_file_uploaded:
return "⚠️ Please upload and initialize medical data first."
return chatbot.chat(user_input)
# Create the Gradio interface
with gr.Blocks(title="🩺 Pediatric Medical Assistant") as demo:
gr.Markdown("## 🩺 Pediatric Medical Assistant\nUpload a medical .txt file and start chatting.")
with gr.Row():
file_input = gr.File(label="📁 Upload Medical File", file_types=[".txt"])
upload_btn = gr.Button("📤 Upload and Initialize")
upload_output = gr.Textbox(label="System Status", interactive=False)
chatbot_ui = gr.Chatbot(label="🧠 Chat History", visible=False)
user_input = gr.Textbox(
placeholder="Ask a pediatric health question...",
lines=2,
show_label=False,
visible=False
)
submit_btn = gr.Button("Send", visible=False)
upload_btn.click(
fn=upload_and_initialize,
inputs=[file_input],
outputs=[upload_output, chatbot_ui, user_input, submit_btn]
)
def on_submit(user_message, chat_history):
if not user_message.strip():
return "", chat_history
bot_response = generate_response(user_message)
chat_history.append((user_message, bot_response))
return "", chat_history
user_input.submit(
fn=on_submit,
inputs=[user_input, chatbot_ui],
outputs=[user_input, chatbot_ui]
)
submit_btn.click(
fn=on_submit,
inputs=[user_input, chatbot_ui],
outputs=[user_input, chatbot_ui]
)
# Launch with proper settings for Hugging Face Spaces
if __name__ == "__main__":
demo.launch(
share=False, # Don't need share=True on HF Spaces
server_name="0.0.0.0", # Listen on all interfaces for HF Spaces
server_port=7860, # Standard port for HF Spaces
show_error=True # Show detailed errors for debugging
)