Spaces:
Sleeping
Sleeping
import gradio as gr | |
import torch | |
from minicons import cwe | |
from huggingface_hub import hf_hub_download | |
import os | |
from model import FFNModule, FeatureNormPredictor, FFNParams, TrainingParams | |
def predict (Word, Sentence, modelname): | |
models = {'Bert Layer 8 to Binder': ('bert-base-uncased', 'bert8_to_binder'), | |
'Albert Layer 8 to Binder': ('albert-xxlarge-v2', 'albert8_to_binder_opt_stop')} | |
if Word not in Sentence: return "invalid input: word not in sentence" | |
model_name = models[modelname][1] | |
lm = cwe.CWE(models[modelname][0]) | |
model_path = hf_hub_download("jwalanthi/semantic-feature-classifiers", model_name+".ckpt", use_auth_token=os.environ['TOKEN']) | |
label_path = hf_hub_download("jwalanthi/semantic-feature-classifiers", model_name+".txt", use_auth_token=os.environ['TOKEN']) | |
model = FeatureNormPredictor.load_from_checkpoint( | |
checkpoint_path=model_path, | |
map_location=None | |
) | |
model.eval() | |
with open (label_path, "r") as file: | |
labels = [line.rstrip() for line in file.readlines()] | |
data = (Sentence, Word) | |
emb = lm.extract_representation(data, layer=8) | |
pred = torch.nn.functional.relu(model(emb)) | |
pred = pred.squeeze(0) | |
pred_list = pred.detach().numpy().tolist() | |
output = [labels[i]+'\t\t\t\t\t\t\t'+str(pred_list[i]) for i in range(len(labels)) if pred_list[i] > 0.0] | |
return "All Positive Predicted Values:\n"+"\n".join(output) | |
demo = gr.Interface( | |
fn=predict, | |
inputs=[ | |
"text", | |
"text", | |
gr.Radio(["Bert Layer 8 to Binder", "Albert Layer 8 to Binder"]) | |
], | |
outputs=["text"], | |
) | |
demo.launch() | |
if __name__ == "__main__": | |
demo.launch() |