File size: 3,657 Bytes
07db68b
 
26c4ece
 
 
07db68b
 
 
 
 
 
 
 
26c4ece
07db68b
 
26c4ece
07db68b
 
 
 
26c4ece
07db68b
 
26c4ece
07db68b
 
 
26c4ece
07db68b
 
 
26c4ece
07db68b
 
 
 
26c4ece
07db68b
 
 
 
26c4ece
 
 
 
 
07db68b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86


import gradio as gr
from transformers import pipeline

#pipelines
qa_pipeline = pipeline("question-answering", model="deepset/roberta-base-squad2")
classification_pipeline = pipeline("zero-shot-classification", model="facebook/bart-large-mnli")
translation_pipeline = pipeline("translation", model="Helsinki-NLP/opus-mt-en-fr")
topic_classification_pipeline = pipeline("text-classification", model="distilbert-base-uncased-finetuned-sst-2-english")  # Fine-tuned model for topic classification
summarization_pipeline = pipeline("summarization", model="facebook/bart-large-cnn")

#functions

def answer_question(context, question):
    return qa_pipeline(question=question, context=context)["answer"]

def classify_text(text, labels):
    labels = labels.split(",")
    results = classification_pipeline(text, candidate_labels=labels)
    return {label: float(f"{prob:.4f}") for label, prob in zip(results["labels"], results["scores"])}

def translate_text(text):
    return translation_pipeline(text)[0]['translation_text'] if text else "No translation available"

def classify_topic(text):
    results = topic_classification_pipeline(text)
    return ", ".join([f"{result['label']}: {result['score']:.4f}" for result in results])

def summarize_text(text):
    result = summarization_pipeline(text, max_length=60)
    return result[0]['summary_text'] if result else "No summary available"

def multi_model_interaction(text):
    
    summary = summarize_text(text)
    translated_summary = translate_text(summary)

    return {
        "Summary (English)": summary,
        "Summary (French)": translated_summary,
    }



with gr.Blocks() as demo:
    with gr.Tab("Single Models"):
        with gr.Column():
          gr.Markdown("### Question Answering")
            context = gr.Textbox(label="Context")
            question = gr.Textbox(label="Question")
          answer_output = gr.Text(label="Answer")
          gr.Button("Answer").click(answer_question, inputs=[context, question], outputs=answer_output)

        with gr.Column():
            gr.Markdown("### Zero-Shot Classification")
            text_zsc = gr.Textbox(label="Text")
            labels = gr.Textbox(label="Labels (comma separated)")
            classification_result = gr.JSON(label="Classification Results")
            gr.Button("Classify").click(classify_text, inputs=[text_zsc, labels], outputs=classification_result)

        with gr.Column():
            gr.Markdown("### Translation")
            text_to_translate = gr.Textbox(label="Text")
            translated_text = gr.Text(label="Translated Text")
            gr.Button("Translate").click(translate_text, inputs=[text_to_translate], outputs=translated_text)

        with gr.Column():
            gr.Markdown("### Sentiment Analysis")
            text_for_sentiment = gr.Textbox(label="Text for Sentiment Analysis")
            sentiment_result = gr.Text(label="Sentiment")
            gr.Button("Classify Sentiment").click(classify_topic, inputs=[text_for_sentiment], outputs=sentiment_result)

        with gr.Column():
            gr.Markdown("### Summarization")
            text_to_summarize = gr.Textbox(label="Text")
            summary = gr.Text(label="Summary")
            gr.Button("Summarize").click(summarize_text, inputs=[text_to_summarize], outputs=summary)

    with gr.Tab("Multi-Model"):
        gr.Markdown("### Multi-Model")
        input_text = gr.Textbox(label="Enter Text for Multi-Model Analysis")
        multi_output = gr.Text(label="Results")
        gr.Button("Process").click(multi_model_interaction, inputs=[input_text], outputs=multi_output)

    demo.launch(share=True, debug=True)