Spaces:
Sleeping
Sleeping
File size: 2,155 Bytes
c489fd1 a701063 c489fd1 a701063 c489fd1 a701063 c489fd1 a701063 47b6ea0 a701063 de73e4c a701063 6a98d1a a701063 9f94225 a701063 47b6ea0 a701063 c489fd1 a701063 47b6ea0 a701063 79cb1bc a701063 c489fd1 a701063 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 |
import random
import gradio as gr
from gradio_client import Client
# List of server endpoints
servers = [
"BICORP/GOGOGOGO", # Server 1
"BICORP/server-2", # Server 2
"BICORP/server-3", # Server 3
"BICORP/server-4", # Server 4
"BICORP/server-5", # Server 5
"BICORP/server-6" # Server 6
]
# Define presets for each model
presets = {
"Fast": "Fast",
"Normal": "Normal",
"Quality": "Quality",
"Unreal Performance": "Unreal Performance"
}
# Function to respond to user input
def respond(message, history: list, selected_model, selected_preset):
# Randomly select a server
server = random.choice(servers)
client = Client(server)
# Ensure history is a list of dictionaries
messages = [{"role": "user", "content": message}]
# Get the response from the model
try:
response = client.predict(
message=message, # Required parameter
param_2=selected_model, # Model selection
param_3=selected_preset, # Preset selection
api_name="/chat" # Ensure this is the correct API endpoint
)
return response # Return the response
except Exception as e:
return f"Error: {str(e)}" # Return the error message
# Model names and their pseudonyms
model_choices = [
("Lake 1 Base", "Lake 1 Base") # Only one model as per your example
]
# Convert pseudonyms to model names for the dropdown
pseudonyms = [model[0] for model in model_choices]
# Function to handle model selection and pseudonyms
def respond_with_pseudonym(message, history: list, selected_model, selected_preset):
# Call the existing respond function
response = respond(message, history, selected_model, selected_preset)
return response
# Gradio Chat Interface
demo = gr.ChatInterface(
fn=respond_with_pseudonym,
additional_inputs=[
gr.Dropdown(choices=pseudonyms, label="Select Model", value=pseudonyms[0]), # Pseudonym selection dropdown
gr.Dropdown(choices=list(presets.keys()), label="Select Preset", value="Fast") # Preset selection dropdown
],
)
if __name__ == "__main__":
demo.launch() |