Spaces:
Runtime error
Runtime error
File size: 649 Bytes
5081207 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 |
import torch
from transformers import GLPNImageProcessor, GLPNForDepthEstimation
from image_resize import resize_img
def depth_detection(image, pad=16):
feature_extractor = GLPNImageProcessor.from_pretrained("vinvino02/glpn-nyu")
model = GLPNForDepthEstimation.from_pretrained("vinvino02/glpn-nyu")
new_img = resize_img(image)
inputs = feature_extractor(images=new_img, return_tensors="pt")
with torch.no_grad():
outputs = model(**inputs)
predicted_depth = outputs.predicted_depth
output = predicted_depth.squeeze().cpu().numpy() * 1000.0
output = output[pad:-pad, pad:-pad]
return new_img,output
|