Update app.py
Browse files
app.py
CHANGED
|
@@ -63,8 +63,6 @@ BATCH_SIZE = int(os.getenv("BATCH_SIZE", "1"))
|
|
| 63 |
|
| 64 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
| 65 |
|
| 66 |
-
generator = torch.Generator(device=device)
|
| 67 |
-
|
| 68 |
style_list = [
|
| 69 |
{
|
| 70 |
"name": "3840 x 2160",
|
|
@@ -167,7 +165,7 @@ def generate(
|
|
| 167 |
global models
|
| 168 |
pipe = models[model_choice]
|
| 169 |
seed = int(randomize_seed_fn(seed, randomize_seed))
|
| 170 |
-
generator.manual_seed(seed)
|
| 171 |
|
| 172 |
prompt, negative_prompt = apply_style(style_selection, prompt, negative_prompt)
|
| 173 |
|
|
|
|
| 63 |
|
| 64 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
| 65 |
|
|
|
|
|
|
|
| 66 |
style_list = [
|
| 67 |
{
|
| 68 |
"name": "3840 x 2160",
|
|
|
|
| 165 |
global models
|
| 166 |
pipe = models[model_choice]
|
| 167 |
seed = int(randomize_seed_fn(seed, randomize_seed))
|
| 168 |
+
generator = torch.Generator(device=device).manual_seed(seed)
|
| 169 |
|
| 170 |
prompt, negative_prompt = apply_style(style_selection, prompt, negative_prompt)
|
| 171 |
|