Spaces:
Runtime error
Runtime error
from transformers import AutoModelForMaskedLM , AutoTokenizer | |
import torch | |
model_path="bert-large-uncased" | |
tokenizer = AutoTokenizer.from_pretrained(model_path) | |
# load Prompting class | |
from prompt import Prompting | |
prompting= Prompting(model=model_path) | |
prompt= ". Because it was "+ prompting.tokenizer.mask_token +"." | |
def predict(text): | |
THRESHOLD = prompting.compute_tokens_prob(prompt, token_list1=["good"], token_list2= ["bad"])[0].item() | |
res=prompting.compute_tokens_prob(text+prompt, token_list1=["good"], token_list2= ["bad"]) | |
if res[0] > THRESHOLD: | |
return {"POSITIVE":res[0]} | |
return {"NEGATIVE":res[0]} | |