Spaces:
Running
Running
import gradio as gr | |
import sambanova_gradio | |
# Define a function to load the selected model | |
def load_model(model_choice): | |
model = gr.load( | |
name=model_choice, | |
src=sambanova_gradio.registry | |
) | |
return model | |
# Available model choices | |
model_choices = [ | |
"Meta-Llama-3.2-1B-lnstruct", | |
"Meta-Llama-3.2-3B-Instruct", | |
"Meta-Llama-3.1-88-Instruct", | |
"Meta-Llama-3.1-405B-lnstruct", | |
"Meta-Llama-3.1-70B-lnstruct" | |
] | |
# Create Gradio interface | |
with gr.Blocks() as demo: | |
gr.Markdown("# Meta-Llama Model Selector") | |
# Dropdown for selecting the model | |
model_choice = gr.Dropdown( | |
choices=model_choices, | |
label="Choose a Meta-Llama Model", | |
value="Meta-Llama-3.2-3B-Instruct" # default value | |
) | |
# Textbox for user input and output | |
input_box = gr.Textbox( | |
label="Input", | |
placeholder="Ask a question...", | |
examples=["Explain quantum gravity to a 5-year old.", | |
"How many R are there in the word Strawberry?"] | |
) | |
# Output for the model's response | |
output_box = gr.Textbox(label="Output") | |
# Button to generate the output | |
generate_button = gr.Button("Generate Response") | |
# Action when button is clicked | |
def generate_response(model_choice, user_input): | |
model = load_model(model_choice) | |
output = model.predict(user_input) | |
return output | |
generate_button.click( | |
generate_response, | |
inputs=[model_choice, input_box], | |
outputs=output_box | |
) | |
# Launch the Gradio app | |
demo.launch(share=True) | |