PodcastNER / app.py
sergiopperez's picture
Update app.py
3c9ff9d
raw
history blame
911 Bytes
import gradio as gr
import torch
from transformers import pipeline, GPTJForCausalLM
# load fp 16 model
model = GPTJForCausalLM.from_pretrained("hackathon-somos-nlp-2023/bertin-gpt-j-6b-ner-es", torch_dtype=torch.float16)
config = AutoConfig.from_pretrained("hackathon-somos-nlp-2023/bertin-gpt-j-6b-ner-es", name_or_path="adapter_model.bin")
# load tokenizer
tokenizer = AutoTokenizer.from_pretrained("hackathon-somos-nlp-2023/bertin-gpt-j-6b-ner-es")
# create pipeline
pipe = pipeline("text-generation", model=model, config=config, tokenizer=tokenizer, device=0,)
def predict(text):
return pipe(f"text: {text}, entities:")["generated_text"]
iface = gr.Interface(
fn=predict,
inputs='text',
outputs='text',
examples=[["Yo hoy voy a hablar de mujeres en el mundo del arte, porque me ha leΓ­do un libro fantΓ‘stico que se llama Historia del arte sin hombres, de Katie Hesel."]]
)
iface.launch()