Spaces:
Runtime error
Runtime error
from transformers import AutoTokenizer, AutoModelForSequenceClassification | |
tokenizer = AutoTokenizer.from_pretrained("hamzab/roberta-fake-news-classification") | |
model = AutoModelForSequenceClassification.from_pretrained("hamzab/roberta-fake-news-classification") | |
import gradio as gr | |
import torch | |
def predict_fake(title,text): | |
input_str = "<title>" + title + "<content>" + text + "<end>" | |
input_ids = tokenizer.encode_plus(input_str, max_length=512, padding="max_length", truncation=True, return_tensors="pt") | |
device = 'cuda' if torch.cuda.is_available() else 'cpu' | |
model.to(device) | |
with torch.no_grad(): | |
output = model(input_ids["input_ids"].to(device), attention_mask=input_ids["attention_mask"].to(device)) | |
return dict(zip(["Fake","Real"], [x.item() for x in list(torch.nn.Softmax()(output.logits)[0])] )) | |
gr.Interface(fn=predict_fake, inputs=[gr.Textbox(lines=1,label="headline"), gr.Textbox(lines=6,label="content")], outputs=gr.Label()).launch(share=True) |