sergeyfeldman's picture
Update app.py
b523b2d
raw
history blame
833 Bytes
from transformers import AutoTokenizer, AutoModelForSequenceClassification
from scipy.special import expit
import numpy as np
import os
# set up model
auth_token = os.environ.get("TOKEN") or True
tokenizer = AutoTokenizer.from_pretrained("guidecare/feelings_and_issues", use_auth_token=auth_token )
model = AutoModelForSequenceClassification.from_pretrained("guidecare/feelings_and_issues", use_auth_token=auth_token )
all_label_names = list(model.config.id2label.values())
def probs(text):
probs = expit(model(**tokenizer([texts], return_tensors="pt", padding=True)).logits.detach().numpy())
return list(zip(all_label_names, probs[0]))
iface = gr.Interface(
fn=predict,
inputs='What is going on with you',
outputs='Our predictions',
examples=[["This test tomorrow is really freaking me out."]]
)
iface.launch()