Spaces:
Sleeping
Sleeping
File size: 4,313 Bytes
26c4ece |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 |
import gradio as gr
from transformers import pipeline
# Lazy loading: Define functions to load models only when needed
def load_qa_model():
return pipeline("question-answering", model="bert-large-uncased-whole-word-masking-finetuned-squad")
def load_classifier_model():
return pipeline("zero-shot-classification", model="MoritzLaurer/deberta-v3-base-zeroshot-v1.1-all-33")
def load_translator_model(target_language):
model_name = f"translation_en_to_{target_language}"
return pipeline("translation_en_to_nl", model=model_name)
def load_generator_model():
return pipeline("text-generation", model="EleutherAI/gpt-neo-2.7B", tokenizer="EleutherAI/gpt-neo-2.7B")
def load_summarizer_model():
return pipeline("summarization", model="facebook/bart-large-cnn")
# Define functions to process inputs
def process_qa(context, question):
qa_model = load_qa_model()
return qa_model(context=context, question=question)["answer"]
def process_classifier(text, labels):
classifier_model = load_classifier_model()
return classifier_model(text, labels)["labels"][0]
def process_translation(text, target_language):
translator_model = load_translator_model(target_language)
translation = translator_model(text)[0]["translation_text"]
return translation
def process_generation(prompt):
generator_model = load_generator_model()
return generator_model(prompt, max_length=50)[0]["generated_text"]
def process_summarization(text):
summarizer_model = load_summarizer_model()
return summarizer_model(text, max_length=150, min_length=40, do_sample=False)[0]["summary_text"]
# Gradio Interface
with gr.Blocks() as demo:
gr.Markdown("Choose an NLP task and input the required text.")
with gr.Tab("Single Models"):
gr.Markdown("This tab is for single models demonstration.")
# Single models interface
task_select_single = gr.Dropdown(["Question Answering", "Zero-Shot Classification", "Translation", "Text Generation", "Summarization"], label="Select Task")
input_fields_single = [gr.Textbox(label="Input")]
if task_select_single.value == "Zero-Shot Classification":
input_fields_single.append(gr.CheckboxGroup(["Label 1", "Label 2", "Label 3"], label="Labels"))
elif task_select_single.value == "Translation":
input_fields_single.append(gr.Dropdown(["nl", "fr", "es", "de"], label="Target Language"))
output_text_single = gr.Textbox(label="Output")
execute_button_single = gr.Button("Execute")
def execute_task_single():
task = task_select_single.value
inputs = [field.value for field in input_fields_single]
print("Inputs (Single Models):", inputs)
if task == "Translation":
translation = process_translation(*inputs)
print("Translation result (Single Models):", translation)
output_text_single.update(translation)
else:
output_text_single.update(eval(f"process_{task.lower()}")(*inputs))
print("Output updated (Single Models)")
execute_button_single.click(execute_task_single)
with gr.Tab("Multi-model"):
gr.Markdown("This tab is for multi-model demonstration.")
# Multi-model interface
task_select_multi = gr.Dropdown(["Question Answering", "Zero-Shot Classification", "Translation", "Text Generation", "Summarization"], label="Select Task")
input_text_multi = gr.Textbox(label="Input")
output_text_multi = gr.Textbox(label="Output")
execute_button_multi = gr.Button("Execute")
def execute_task_multi():
task = task_select_multi.value
input_text = input_text_multi.value
print("Input (Multi-model):", input_text)
if task == "Translation":
translation = process_translation(input_text, "nl") # Default to Dutch translation
print("Translation result (Multi-model):", translation)
output_text_multi.update(translation)
else:
output_text_multi.update(eval(f"process_{task.lower()}")(input_text))
print("Output updated (Multi-model)")
execute_button_multi.click(execute_task_multi)
demo.launch()
|