from transformers import AutoTokenizer, AutoModelForSequenceClassification from scipy.special import expit import numpy as np import os import gradio as gr # set up model auth_token = os.environ.get("TOKEN") or True tokenizer = AutoTokenizer.from_pretrained("guidecare/feelings_and_issues", use_auth_token=auth_token ) model = AutoModelForSequenceClassification.from_pretrained("guidecare/feelings_and_issues", use_auth_token=auth_token ) all_label_names = list(model.config.id2label.values()) def predict(text): probs = expit(model(**tokenizer([text], return_tensors="pt", padding=True)).logits.detach().numpy()) d = {i: float(np.round(j, 2)) for i, j in zip(all_label_names, probs[0])} print(d) return d iface = gr.Interface( fn=predict, inputs="text", outputs="label", #examples=["This test tomorrow is really freaking me out."] ) iface.launch()