wanghuging commited on
Commit
73cf98c
·
1 Parent(s): 8aa723f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -8
app.py CHANGED
@@ -73,6 +73,7 @@ else:
73
  # )
74
 
75
  # t2i_pipe.load_lora_weights("wanghuging/skin_demo", weight_name="skin_demo.safetensors")
 
76
  t2i_pipe.to(device=torch_device, dtype=torch_dtype).to(device)
77
  t2i_pipe.set_progress_bar_config(disable=True)
78
  i2i_pipe.to(device=torch_device, dtype=torch_dtype).to(device)
@@ -120,14 +121,14 @@ async def predict(init_image, prompt, strength, steps, seed=1231231):
120
  output_type="pil",
121
  )
122
  print(f"Pipe took {time.time() - last_time} seconds")
123
- nsfw_content_detected = (
124
- results.nsfw_content_detected[0]
125
- if "nsfw_content_detected" in results
126
- else False
127
- )
128
- if nsfw_content_detected:
129
- gr.Warning("NSFW content detected.")
130
- return Image.new("RGB", (512, 512))
131
  return results.images[0]
132
 
133
 
 
73
  # )
74
 
75
  # t2i_pipe.load_lora_weights("wanghuging/skin_demo", weight_name="skin_demo.safetensors")
76
+ t2i_pipe.safety_checker = lambda images, clip_input: (images, False)
77
  t2i_pipe.to(device=torch_device, dtype=torch_dtype).to(device)
78
  t2i_pipe.set_progress_bar_config(disable=True)
79
  i2i_pipe.to(device=torch_device, dtype=torch_dtype).to(device)
 
121
  output_type="pil",
122
  )
123
  print(f"Pipe took {time.time() - last_time} seconds")
124
+ # nsfw_content_detected = (
125
+ # results.nsfw_content_detected[0]
126
+ # if "nsfw_content_detected" in results
127
+ # else False
128
+ # )
129
+ # if nsfw_content_detected:
130
+ # gr.Warning("NSFW content detected.")
131
+ # return Image.new("RGB", (512, 512))
132
  return results.images[0]
133
 
134