Spaces:
Running
on
Zero
Running
on
Zero
File size: 1,640 Bytes
475d332 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
# -*- coding: utf-8 -*-
# @Author : wenshao
# @Email : [email protected]
# @Project : FasterLivePortrait
# @FileName: stitching_model.py
from .base_model import BaseModel
import torch
from torch.cuda import nvtx
from .predictor import numpy_to_torch_dtype_dict
class StitchingModel(BaseModel):
"""
StitchingModel
"""
def __init__(self, **kwargs):
super(StitchingModel, self).__init__(**kwargs)
def input_process(self, *data):
input = data[0]
return input
def output_process(self, *data):
return data[0]
def predict_trt(self, *data):
nvtx.range_push("forward")
feed_dict = {}
for i, inp in enumerate(self.predictor.inputs):
if isinstance(data[i], torch.Tensor):
feed_dict[inp['name']] = data[i]
else:
feed_dict[inp['name']] = torch.from_numpy(data[i]).to(device=self.device,
dtype=numpy_to_torch_dtype_dict[inp['dtype']])
preds_dict = self.predictor.predict(feed_dict, self.cudaStream)
outs = []
for i, out in enumerate(self.predictor.outputs):
outs.append(preds_dict[out["name"]].cpu().numpy())
nvtx.range_pop()
return outs
def predict(self, *data):
data = self.input_process(*data)
if self.predict_type == "trt":
preds = self.predict_trt(data)
else:
preds = self.predictor.predict(data)
outputs = self.output_process(*preds)
return outputs
|