File size: 1,781 Bytes
b749f9b
d74be1e
 
b523b2d
e0faa7c
d763fab
0d92287
d74be1e
 
43f72c8
 
b749f9b
43f72c8
d74be1e
 
 
431582d
46bb59f
ac68060
 
 
 
4e33088
ac68060
 
 
cfc3e8a
ac68060
e8be0f0
 
7856594
e8be0f0
ac68060
cfc3e8a
d763fab
1501319
89465fa
d74be1e
d763fab
d74be1e
 
6d53da0
 
89465fa
d74be1e
 
5695be6
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
from transformers import RobertaTokenizer, AutoModelForSequenceClassification
from scipy.special import expit
import numpy as np
import os
import gradio as gr
import requests
from datetime import datetime

# set up model
authtoken = os.environ.get("TOKEN")
tokenizer = RobertaTokenizer.from_pretrained("guidecare/feelings_and_issues_large_v2", token=authtoken, use_safetensors=True)
tokenizer.do_lower_case = True
model = AutoModelForSequenceClassification.from_pretrained("guidecare/feelings_and_issues_large_v2", token=authtoken, use_safetensors=True)
all_label_names = list(model.config.id2label.values())


def predict(text):
    probs = expit(model(**tokenizer([text], return_tensors="pt", padding=True)).logits.detach().numpy())
    # can't use numpy for whatever reason
    probs = [float(np.round(i, 2)) for i in probs[0]]
    # break out issue, harm, sentiment, feeling
    zipped_list = list(zip(all_label_names, probs))
    print(text, zipped_list)
    issues = [(i, j) for i, j in zipped_list if i.startswith('issue')]
    feelings = [(i, j) for i, j in zipped_list if i.startswith('feeling')]
    harm = [(i, j) for i, j in zipped_list if i.startswith('harm')]
    sentiment = [(i, j) for i, j in zipped_list if i.startswith('sentiment')]
    # keep tops for each one
    issues = sorted(issues, key=lambda x: x[1])[::-1]
    feelings = sorted(feelings, key=lambda x: x[1])[::-1]
    harm = sorted(harm, key=lambda x: x[1])[::-1]
    sentiment = sorted(sentiment, key=lambda x: x[1])[::-1]
    # top is the combo of these
    top = issues + feelings + harm + sentiment

    d = {i: j for i, j in top}
    return d


iface = gr.Interface(
  fn=predict, 
  inputs="text",
  outputs="label",
  #examples=["This test tomorrow is really freaking me out."]
)

iface.launch()