File size: 1,132 Bytes
f584171
 
 
404469e
 
f584171
 
 
4a383f5
 
f584171
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17962cc
 
f584171
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
import torch
from transformers import BertForSequenceClassification, AutoTokenizer

# path = 'Djacon/rubert-tiny2-russian-emotion-detection'
path = './model/'
model = BertForSequenceClassification.from_pretrained(path)
tokenizer = AutoTokenizer.from_pretrained(path)

LABELS = ['Joy', 'Interest', 'Surprise', 'Sadness', 'Anger', 'Disgust', 'Fear',
          'Guilt', 'Neutral']


# Probabilistic prediction of emotion in a text
@torch.no_grad()
def predict_emotions(text):
    inputs = tokenizer(text, max_length=512, truncation=True,
                       return_tensors='pt')
    inputs = inputs.to(model.device)

    outputs = model(**inputs)

    pred = torch.nn.functional.softmax(outputs.logits, dim=1)

    emotions_list = {}
    for i in range(len(pred[0].tolist())):
        emotions_list[LABELS[i]] = round(100 * pred[0].tolist()[i], 3)
    return '\n'.join(f'{k}: {v}' for k, v in sorted(emotions_list.items(),
                                                    key=lambda x: -x[1]))


def test():
    predict_emotions('I am so happy now!')
    print('\n>>> Emotion Detection successfully initialized! <<<\n')


test()