import gradio as gr import torch from transformers import AutoModelForSequenceClassification, AutoTokenizer CLASSES = { 'yes': 0, 'irrelevant': 1, 'no': 2, } checkpoint_path = "MrPio/TheSeagullStory-nli-deberta-v3-base" model = AutoModelForSequenceClassification.from_pretrained(checkpoint_path) model.eval() tokenizer = AutoTokenizer.from_pretrained(checkpoint_path) story = open('story.txt').read().replace("\n\n", "\n").replace("\n", " ").strip() def ask(question): inputs = tokenizer(story, question, truncation=True, padding=True) prediction = torch.softmax(model(**inputs), dim=-1).squeeze() return [{c: prediction[i].item() for c, i in CLASSES}] demo = gr.Interface( ask, inputs=[gr.Textbox(value="", label="Your question, as an affirmative sentence:")], outputs=[gr.Label(label="Answer", num_top_classes=3)], title="The Seagull Story", description="“ Albert and Dave find themselves on the pier. They go to a nearby restaurant where Albert orders seagull meat. The waiter promptly serves Albert the meal. After taking a bite, he realizes something. Albert pulls a gun out of his ruined jacket and shoots himself. ”\n\nWhy did Albert shoot himself?\n\nCan you unravel the truth behind this epilogue by asking only yes/no questions?", article='Please refrain from embarrassing DeBERTa with ridiculous questions.', examples=['Albert shoot himself for a reason', 'Dave has a watch on his wrist', 'Albert and Dave came to the pier on their own'] ) if __name__ == "__main__": demo.launch()