Spaces:
Runtime error
Runtime error
File size: 900 Bytes
076bd8f ad54d7a 3c9f42d 7396d67 076bd8f 26ee91d 8029b4a ad54d7a 39fedb9 ad54d7a d3426a1 7396d67 d3426a1 7396d67 d3426a1 7396d67 ad54d7a d3426a1 ad54d7a d3426a1 ad54d7a 7396d67 d3426a1 076bd8f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 |
import gradio as gr
from test import inference_img
from models import *
import numpy as np
device='cpu'
model = StyleMatte()
model = model.to(device)
checkpoint = f"stylematte.pth"
state_dict = torch.load(checkpoint, map_location=f'{device}')
model.load_state_dict(state_dict)
model.eval()
def predict(inp):
print("***********Inference****************")
mask = inference_img(model, inp)
print("***********Inference finish****************")
inp_np = np.array(inp)
fg = np.uint8((mask*inp_np).permute(1,2,0).numpy())
return [mask, fg]
print("MODEL LOADED")
print("************************************")
iface = gr.Interface(fn=predict,
inputs=gr.Image(type="numpy"),
outputs=[gr.Image(type="numpy"),gr.Image(type="numpy")],
examples=["./logo.jpeg"])
print("****************Interface created******************")
iface.launch() |