Lake-Chatbot / app.py
BICORP's picture
Update app.py
a701063 verified
raw
history blame
2.16 kB
import random
import gradio as gr
from gradio_client import Client
# List of server endpoints
servers = [
"BICORP/GOGOGOGO", # Server 1
"BICORP/server-2", # Server 2
"BICORP/server-3", # Server 3
"BICORP/server-4", # Server 4
"BICORP/server-5", # Server 5
"BICORP/server-6" # Server 6
]
# Define presets for each model
presets = {
"Fast": "Fast",
"Normal": "Normal",
"Quality": "Quality",
"Unreal Performance": "Unreal Performance"
}
# Function to respond to user input
def respond(message, history: list, selected_model, selected_preset):
# Randomly select a server
server = random.choice(servers)
client = Client(server)
# Ensure history is a list of dictionaries
messages = [{"role": "user", "content": message}]
# Get the response from the model
try:
response = client.predict(
message=message, # Required parameter
param_2=selected_model, # Model selection
param_3=selected_preset, # Preset selection
api_name="/chat" # Ensure this is the correct API endpoint
)
return response # Return the response
except Exception as e:
return f"Error: {str(e)}" # Return the error message
# Model names and their pseudonyms
model_choices = [
("Lake 1 Base", "Lake 1 Base") # Only one model as per your example
]
# Convert pseudonyms to model names for the dropdown
pseudonyms = [model[0] for model in model_choices]
# Function to handle model selection and pseudonyms
def respond_with_pseudonym(message, history: list, selected_model, selected_preset):
# Call the existing respond function
response = respond(message, history, selected_model, selected_preset)
return response
# Gradio Chat Interface
demo = gr.ChatInterface(
fn=respond_with_pseudonym,
additional_inputs=[
gr.Dropdown(choices=pseudonyms, label="Select Model", value=pseudonyms[0]), # Pseudonym selection dropdown
gr.Dropdown(choices=list(presets.keys()), label="Select Preset", value="Fast") # Preset selection dropdown
],
)
if __name__ == "__main__":
demo.launch()