admehta01's picture
Logging timestamp as Notion title
0d92287
raw
history blame
2.71 kB
from transformers import AutoTokenizer, AutoModelForSequenceClassification
from scipy.special import expit
import numpy as np
import os
import gradio as gr
import requests
from datetime import datetime
# set up model
authtoken = os.environ.get("TOKEN") or True
tokenizer = AutoTokenizer.from_pretrained("guidecare/feelings_and_issues", use_auth_token=authtoken)
model = AutoModelForSequenceClassification.from_pretrained("guidecare/feelings_and_issues", use_auth_token=authtoken)
all_label_names = list(model.config.id2label.values())
def predict(text):
probs = expit(model(**tokenizer([text], return_tensors="pt", padding=True)).logits.detach().numpy())
# can't use numpy for whatever reason
probs = [float(np.round(i, 2)) for i in probs[0]]
# break out issue, harm, sentiment, feeling
zipped_list = list(zip(all_label_names, probs))
print(text, zipped_list)
issues = [(i, j) for i, j in zipped_list if i.startswith('issue')]
feelings = [(i, j) for i, j in zipped_list if i.startswith('feeling')]
harm = [(i, j) for i, j in zipped_list if i.startswith('harm')]
# keep tops for each one
issues = sorted(issues, key=lambda x: x[1])[::-1][:3]
feelings = sorted(feelings, key=lambda x: x[1])[::-1][:3]
harm = sorted(harm, key=lambda x: x[1])[::-1][:1]
# top is the combo of these
top = issues + feelings + harm
logToNotion(text, top)
d = {i: j for i, j in top}
return d
def logToNotion(text, top):
url = "https://api.notion.com/v1/pages"
payload = {
"parent": {
"database_id": "4a220773ac694851811e87f4571ec41d"
},
"properties": {
"title": {
"title": [{
"text": {
"content": datetime.now().strftime("%d/%m/%Y %H:%M:%S")
}
}]
},
"input": {
"rich_text": [{
"text": {
"content": text
}
}]
},
"output": {
"rich_text": [{
"text": {
"content": ", ".join(str(x) for x in top)
}
}]
}
}
}
headers = {
"Accept": "application/json",
"Notion-Version": "2022-02-22",
"Content-Type": "application/json",
"Authorization": "Bearer " + os.environ.get("NotionToken")
}
response = requests.post(url, json=payload, headers=headers)
iface = gr.Interface(
fn=predict,
inputs="text",
outputs="label",
#examples=["This test tomorrow is really freaking me out."]
)
iface.launch()