philipp-zettl's picture
Update app.py
fd2392a verified
raw
history blame
456 Bytes
import gradio as gr
from transformers import AutoTokenizer
from model import MultiHeadClassification
model = MultiHeadClassification.from_pretrained(
'philipp-zettl/multi-head-sequence-classification-model',
{"GGU": 3, "sentiment": 3}
)
tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-m3')
def generate(prompt):
inputs = tokenizer([prompt])
return model(**inputs)
gr.Interface(
generate,
inputs="text",
ouputs="label"
)