Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -221,7 +221,7 @@ def save_image(img):
|
|
221 |
img.save(unique_name,optimize=False,compress_level=0)
|
222 |
return unique_name
|
223 |
|
224 |
-
def randomize_seed_fn(
|
225 |
seed = random.randint(0, MAX_SEED)
|
226 |
return seed
|
227 |
|
@@ -266,7 +266,7 @@ def generate_30(
|
|
266 |
#global models
|
267 |
#pipe = models[model_choice]
|
268 |
pipe.vae.vae_scale_factor=vae_scale
|
269 |
-
seed = int(randomize_seed_fn(
|
270 |
generator = torch.Generator(device='cuda').manual_seed(seed)
|
271 |
#prompt, negative_prompt = apply_style(style_selection, prompt, negative_prompt)
|
272 |
options = {
|
@@ -328,7 +328,7 @@ def generate_60(
|
|
328 |
#global models
|
329 |
#pipe = models[model_choice]
|
330 |
pipe.vae.vae_scale_factor=vae_scale
|
331 |
-
seed = int(randomize_seed_fn(
|
332 |
generator = torch.Generator(device='cuda').manual_seed(seed)
|
333 |
#prompt, negative_prompt = apply_style(style_selection, prompt, negative_prompt)
|
334 |
options = {
|
@@ -390,7 +390,7 @@ def generate_90(
|
|
390 |
#global models
|
391 |
#pipe = models[model_choice]
|
392 |
pipe.vae.vae_scale_factor=vae_scale
|
393 |
-
seed = int(randomize_seed_fn(
|
394 |
generator = torch.Generator(device='cuda').manual_seed(seed)
|
395 |
#prompt, negative_prompt = apply_style(style_selection, prompt, negative_prompt)
|
396 |
options = {
|
|
|
221 |
img.save(unique_name,optimize=False,compress_level=0)
|
222 |
return unique_name
|
223 |
|
224 |
+
def randomize_seed_fn() -> int:
|
225 |
seed = random.randint(0, MAX_SEED)
|
226 |
return seed
|
227 |
|
|
|
266 |
#global models
|
267 |
#pipe = models[model_choice]
|
268 |
pipe.vae.vae_scale_factor=vae_scale
|
269 |
+
seed = int(randomize_seed_fn())
|
270 |
generator = torch.Generator(device='cuda').manual_seed(seed)
|
271 |
#prompt, negative_prompt = apply_style(style_selection, prompt, negative_prompt)
|
272 |
options = {
|
|
|
328 |
#global models
|
329 |
#pipe = models[model_choice]
|
330 |
pipe.vae.vae_scale_factor=vae_scale
|
331 |
+
seed = int(randomize_seed_fn())
|
332 |
generator = torch.Generator(device='cuda').manual_seed(seed)
|
333 |
#prompt, negative_prompt = apply_style(style_selection, prompt, negative_prompt)
|
334 |
options = {
|
|
|
390 |
#global models
|
391 |
#pipe = models[model_choice]
|
392 |
pipe.vae.vae_scale_factor=vae_scale
|
393 |
+
seed = int(randomize_seed_fn())
|
394 |
generator = torch.Generator(device='cuda').manual_seed(seed)
|
395 |
#prompt, negative_prompt = apply_style(style_selection, prompt, negative_prompt)
|
396 |
options = {
|