File size: 2,137 Bytes
0028c9d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
import gradio as gr
from transformers import pipeline

def model_inference(model_name, task, input_data):
    try:
        # Load the model pipeline dynamically based on user selection
        model_pipeline = pipeline(task, model=model_name)
        # Perform the inference
        result = model_pipeline(input_data, max_length=100)
        # Handle different output formats
        if isinstance(result, list):
            return result[0]['generated_text'] if 'generated_text' in result[0] else str(result)
        return result
    except Exception as e:
        # Return error message to the user interface
        return f"An error occurred: {str(e)}"

def setup_interface():
    # Define the available models and tasks
    models = {
        "Text Generation": ["gpt2", "EleutherAI/gpt-neo-2.7B"],
        "Text Classification": ["bert-base-uncased", "roberta-base"],
        "Token Classification": ["dbmdz/bert-large-cased-finetuned-conll03-english"]
    }

    tasks = {
        "Text Generation": "text-generation",
        "Text Classification": "text-classification",
        "Token Classification": "token-classification"
    }

    with gr.Blocks() as demo:
        gr.Markdown("### Hugging Face Model Playground")
        with gr.Row():
            selected_task = gr.Dropdown(label="Select Task", choices=list(models.keys()), value="Text Generation")
            model_name = gr.Dropdown(label="Select Model", choices=models[selected_task.value])
        input_data = gr.Textbox(label="Input", placeholder="Type here...")
        output = gr.Textbox(label="Output", placeholder="Results will appear here...")

        # Update the model dropdown based on task selection
        def update_models(task):
            return gr.Dropdown.update(choices=models[task])

        selected_task.change(fn=update_models, inputs=selected_task, outputs=model_name)

        # Run model inference when input data changes
        input_data.change(fn=model_inference, inputs=[model_name, selected_task, input_data], outputs=output)

    return demo

if __name__ == "__main__":
    interface = setup_interface()
    interface.launch()