File size: 681 Bytes
eadae5d
5026ee5
 
eadae5d
 
 
 
 
 
4ab024c
67feba3
 
eadae5d
 
 
5026ee5
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
import subprocess
import streamlit as st

p = subprocess.Popen(["pip", "freeze"], stdout=subprocess.PIPE)
output = p.communicate()[0]
st.code(output.decode("utf-8"))

MODEL_NAME = "stas/mt5-tiny-random"

from transformers import AutoTokenizer
from fastT5 import export_and_get_onnx_model, get_onnx_model  # this line will throw an error

model = export_and_get_onnx_model(MODEL_NAME)
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)

tokenized = tokenizer("Will this work?", return_tensors="pt")
tokens = model.generate(
  input_ids=tokenized["input_ids"],
  attention_mask=tokenized["attention_mask"],
)
st.write(tokenizer.decode(tokens.squeeze(), skip_special_tokens=True))