Lake-Chatbot / app.py
BICORP's picture
Update app.py
a02e161 verified
raw
history blame
4.55 kB
from gradio_client import Client
import gradio as gr
import random
# Hidden server configuration
SERVERS = [
"https://huggingface.co/spaces/BICORP/GOGOGOGO",
"https://huggingface.co/spaces/BICORP/server-2",
"https://huggingface.co/spaces/BICORP/server-3",
"https://huggingface.co/spaces/BICORP/server-4",
"https://huggingface.co/spaces/BICORP/server-5",
"https://huggingface.co/spaces/BICORP/server-6"
]
MODELS = [
"Lake 1 Flash",
"Lake 1 Base",
"Lake 1 Advanced",
"Lake 2 Chat [Closed Alpha]",
"Lake 2 Base [Closed Beta]"
]
PRESETS = ["Fast", "Normal", "Quality", "Unreal Performance"]
def get_random_server():
"""Randomly select from available servers"""
return random.choice(SERVERS)
def get_model_info(model_name: str) -> str:
"""Fetch model specs with retry logic"""
max_retries = 2
for _ in range(max_retries):
try:
client = Client(get_random_server())
return client.predict(model_name, api_name="/get_model_info")
except Exception as e:
continue
return "⚠️ Failed to load specifications. Please try again later."
def handle_chat(message: str, history, model: str, preset: str):
"""Process chat messages with automatic server selection"""
try:
client = Client(get_random_server())
result = client.predict(
message,
model,
preset,
api_name="/chat"
)
return result
except Exception as e:
return "⚠️ Service unavailable. Please try your request again."
def respond(message, history, model, preset):
"""
Append the user's message and model's response to the conversation history.
Returns an empty string (to clear the input textbox) and the updated chat history.
"""
history = history or []
response = handle_chat(message, history, model, preset)
history.append((message, response))
return "", history
with gr.Blocks(title="BI Corp AI Assistant", theme="soft") as demo:
gr.Markdown("# <center>πŸ”οΈ BI Corp AI Assistant</center>")
gr.Markdown("### <center>Enterprise-Grade AI Solutions</center>")
with gr.Row():
with gr.Column(scale=1):
model_dropdown = gr.Dropdown(
label="πŸ€– Model Selection",
choices=MODELS,
value=MODELS[0],
interactive=True
)
preset_dropdown = gr.Dropdown(
label="βš™οΈ Performance Preset",
choices=PRESETS,
value=PRESETS[0],
interactive=True
)
model_info = gr.Markdown(
value=get_model_info(MODELS[0]),
label="πŸ“ Model Specifications"
)
with gr.Column(scale=3):
# Reduced the height of the Chatbot to keep the textbox visible.
chatbot = gr.Chatbot(
height=300,
label="πŸ’¬ Conversation",
show_copy_button=True
)
message_input = gr.Textbox(
placeholder="Type your message...",
container=True,
scale=7,
autofocus=True
)
send_button = gr.Button("πŸš€ Send", variant="primary")
# Update the model specifications when a different model is selected.
model_dropdown.change(
fn=get_model_info,
inputs=model_dropdown,
outputs=model_info,
queue=False
)
# Wire the Send button and Enter key in the Textbox to process chat messages.
send_button.click(
fn=respond,
inputs=[message_input, chatbot, model_dropdown, preset_dropdown],
outputs=[message_input, chatbot],
queue=True
)
# Allow the Enter key in the textbox to trigger the same function.
message_input.submit(
fn=respond,
inputs=[message_input, chatbot, model_dropdown, preset_dropdown],
outputs=[message_input, chatbot],
queue=True
)
# Clear history button to reset the conversation.
clear_btn = gr.Button("🧹 Clear History")
clear_btn.click(
fn=lambda: ("", []),
inputs=[],
outputs=[message_input, chatbot],
queue=False
)
# Initialize model specifications on app load.
demo.load(
fn=lambda: get_model_info(MODELS[0]),
outputs=model_info,
queue=False
)
if __name__ == "__main__":
demo.launch()