Spaces:
Runtime error
Runtime error
Fix custom depth image (#74)
Browse files- Fix custom depth image (486a7bf11569184a7284f30eebb0b3e0b55f2171)
Co-authored-by: André Knörig <[email protected]>
app.py
CHANGED
|
@@ -36,8 +36,8 @@ def predict(input_image, prompt, negative_prompt, steps, num_samples, scale, see
|
|
| 36 |
depth_image = pad_image(depth_image)
|
| 37 |
depth_image = depth_image.resize((512, 512))
|
| 38 |
depth = np.array(depth_image.convert("L"))
|
|
|
|
| 39 |
depth = depth.astype(np.float32) / 255.0
|
| 40 |
-
depth = depth[None, None]
|
| 41 |
depth = torch.from_numpy(depth)
|
| 42 |
init_image = input_image.convert("RGB")
|
| 43 |
image = pad_image(init_image) # resize to integer multiple of 32
|
|
@@ -46,7 +46,7 @@ def predict(input_image, prompt, negative_prompt, steps, num_samples, scale, see
|
|
| 46 |
image=image,
|
| 47 |
prompt=prompt,
|
| 48 |
negative_prompt=negative_prompt,
|
| 49 |
-
|
| 50 |
seed=seed,
|
| 51 |
strength=strength,
|
| 52 |
num_inference_steps=steps,
|
|
|
|
| 36 |
depth_image = pad_image(depth_image)
|
| 37 |
depth_image = depth_image.resize((512, 512))
|
| 38 |
depth = np.array(depth_image.convert("L"))
|
| 39 |
+
depth = np.expand_dims(depth, 0)
|
| 40 |
depth = depth.astype(np.float32) / 255.0
|
|
|
|
| 41 |
depth = torch.from_numpy(depth)
|
| 42 |
init_image = input_image.convert("RGB")
|
| 43 |
image = pad_image(init_image) # resize to integer multiple of 32
|
|
|
|
| 46 |
image=image,
|
| 47 |
prompt=prompt,
|
| 48 |
negative_prompt=negative_prompt,
|
| 49 |
+
depth_map=depth,
|
| 50 |
seed=seed,
|
| 51 |
strength=strength,
|
| 52 |
num_inference_steps=steps,
|