import gradio as gr import torch from transformers import AutoModelForSequenceClassification, DebertaV2Tokenizer device = torch.device("cuda" if torch.cuda.is_available() else "cpu") CLASSES = { 'yes': 0, 'irrelevant': 1, 'no': 2, } tokenizer = DebertaV2Tokenizer.from_pretrained('cross-encoder/nli-deberta-v3-base', do_lower_case=True) model = AutoModelForSequenceClassification.from_pretrained('MrPio/TheSeagullStory-nli-deberta-v3-base') model.eval() story = open('story.txt').read().replace("\n\n", "\n").replace("\n", " ").strip() if torch.cuda.is_available(): model.half() def ask(question): with torch.no_grad(): input = tokenizer(story, question, truncation=True, padding=True,return_tensors="pt") input = {key: value.to(device) for key, value in input.items()} output=model(**input) prediction = torch.softmax(output.logits, 1).squeeze() print(prediction) return {c: round(prediction[i].item(), 3) for c, i in CLASSES.items()} gradio = gr.Interface( ask, inputs=[gr.Textbox(value="", label="Your question, as an affirmative sentence:")], outputs=[gr.Label(label="Answer", num_top_classes=3)], title="The Seagull Story", description="“ Albert and Dave find themselves on the pier. They go to a nearby restaurant where Albert orders " "seagull meat. The waiter promptly serves Albert the meal. After taking a bite, he realizes " "something. Albert pulls a gun out of his ruined jacket and shoots himself. ”\n\nWhy did Albert shoot " "himself?\n\nCan you unravel the truth behind this epilogue by asking only yes/no questions?\n\nPlease be specific about the time period you have in mind with your question.", article='Please refrain from embarrassing DeBERTa with dumb questions.\n\nCheck the repository for more detail: https://github.com/MrPio/The-Seagull-Story', examples=['Albert shoot himself for a reason', 'Dave has a watch on his wrist', 'Albert and Dave came to the pier on their own'] ) if __name__ == "__main__": gradio.launch(share=True)