from gradio_client import Client import gradio as gr import random # Hidden server configuration SERVERS = [ "https://huggingface.co/spaces/BICORP/GOGOGOGO", "https://huggingface.co/spaces/BICORP/server-2", "https://huggingface.co/spaces/BICORP/server-3", "https://huggingface.co/spaces/BICORP/server-4", "https://huggingface.co/spaces/BICORP/server-5", "https://huggingface.co/spaces/BICORP/server-6" ] MODELS = [ "Lake 1 Flash", "Lake 1 Base", "Lake 1 Advanced", "Lake 2 Chat [Closed Alpha]", "Lake 2 Base [Closed Beta]" ] PRESETS = ["Fast", "Normal", "Quality", "Unreal Performance"] def get_random_server(): """Randomly select from available servers""" return random.choice(SERVERS) def get_model_info(model_name: str) -> str: """Fetch model specs with retry logic""" max_retries = 2 for _ in range(max_retries): try: client = Client(get_random_server()) return client.predict(model_name, api_name="/get_model_info") except Exception as e: continue return "โ ๏ธ Failed to load specifications. Please try again later." def handle_chat(message: str, history, model: str, preset: str): """Process chat messages with automatic server selection""" try: client = Client(get_random_server()) result = client.predict( message, model, preset, api_name="/chat" ) return result except Exception as e: return "โ ๏ธ Service unavailable. Please try your request again." def respond(message, history, model, preset): """ Append the user's message and model's response to the conversation history. Returns an empty string (to clear the input textbox) and the updated chat history. """ history = history or [] response = handle_chat(message, history, model, preset) history.append((message, response)) return "", history with gr.Blocks(title="BI Corp AI Assistant", theme="soft") as demo: gr.Markdown("#