yamildiego commited on
Commit
1a01558
·
1 Parent(s): 3efa11b
Files changed (1) hide show
  1. handler.py +12 -12
handler.py CHANGED
@@ -61,8 +61,8 @@ CONTROLNET_MAPPING = {
61
  class EndpointHandler():
62
  def __init__(self, path=""):
63
  # define default controlnet id and load controlnet
64
- #self.control_type = "depth"
65
- #self.controlnet = ControlNetModel.from_pretrained(CONTROLNET_MAPPING[self.control_type]["model_id"],torch_dtype=dtype).to(device)
66
 
67
  #processor = AutoProcessor.from_pretrained("CompVis/stable-diffusion-safety-checker")
68
 
@@ -71,10 +71,10 @@ class EndpointHandler():
71
  #self.stable_diffusion_id = "runwayml/stable-diffusion-v1-5"
72
  self.stable_diffusion_id = "Lykon/dreamshaper-8"
73
 
74
- #self.pipe = StableDiffusionControlNetPipeline.from_pretrained(self.stable_diffusion_id,
75
- # controlnet=self.controlnet,
76
- # torch_dtype=dtype,
77
- # safety_checker=StableDiffusionSafetyChecker.from_pretrained("CompVis/stable-diffusion-safety-checker", torch_dtype=dtype)).to("cuda")
78
  # Define Generator with seed
79
  self.generator = torch.Generator(device=device.type).manual_seed(3)
80
 
@@ -92,12 +92,12 @@ class EndpointHandler():
92
  return {"error": "Please provide a prompt and base64 encoded image."}
93
 
94
  # Check if a new controlnet is provided
95
- #if controlnet_type is not None and controlnet_type != self.control_type:
96
- # print(f"changing controlnet from {self.control_type} to {controlnet_type} using {CONTROLNET_MAPPING[controlnet_type]['model_id']} model")
97
- # self.control_type = controlnet_type
98
- # self.controlnet = ControlNetModel.from_pretrained(CONTROLNET_MAPPING[self.control_type]["model_id"],
99
- # torch_dtype=dtype).to(device)
100
- # self.pipe.controlnet = self.controlnet
101
 
102
 
103
  targets = [self.pipe.vae, self.pipe.unet]
 
61
  class EndpointHandler():
62
  def __init__(self, path=""):
63
  # define default controlnet id and load controlnet
64
+ self.control_type = "depth"
65
+ self.controlnet = ControlNetModel.from_pretrained(CONTROLNET_MAPPING[self.control_type]["model_id"],torch_dtype=dtype).to(device)
66
 
67
  #processor = AutoProcessor.from_pretrained("CompVis/stable-diffusion-safety-checker")
68
 
 
71
  #self.stable_diffusion_id = "runwayml/stable-diffusion-v1-5"
72
  self.stable_diffusion_id = "Lykon/dreamshaper-8"
73
 
74
+ self.pipe = StableDiffusionControlNetPipeline.from_pretrained(self.stable_diffusion_id,
75
+ #controlnet=self.controlnet,
76
+ torch_dtype=dtype,
77
+ safety_checker=StableDiffusionSafetyChecker.from_pretrained("CompVis/stable-diffusion-safety-checker", torch_dtype=dtype)).to("cuda")
78
  # Define Generator with seed
79
  self.generator = torch.Generator(device=device.type).manual_seed(3)
80
 
 
92
  return {"error": "Please provide a prompt and base64 encoded image."}
93
 
94
  # Check if a new controlnet is provided
95
+ if controlnet_type is not None and controlnet_type != self.control_type:
96
+ print(f"changing controlnet from {self.control_type} to {controlnet_type} using {CONTROLNET_MAPPING[controlnet_type]['model_id']} model")
97
+ self.control_type = controlnet_type
98
+ self.controlnet = ControlNetModel.from_pretrained(CONTROLNET_MAPPING[self.control_type]["model_id"],
99
+ torch_dtype=dtype).to(device)
100
+ self.pipe.controlnet = self.controlnet
101
 
102
 
103
  targets = [self.pipe.vae, self.pipe.unet]