ford442 commited on
Commit
230ae3a
·
verified ·
1 Parent(s): 24676cb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -3
app.py CHANGED
@@ -98,7 +98,9 @@ FTP_PASS = os.getenv("FTP_PASS")
98
  FTP_DIR = os.getenv("FTP_DIR")
99
 
100
  # os.putenv('TORCH_LINALG_PREFER_CUSOLVER','1')
 
101
  os.putenv('HF_HUB_ENABLE_HF_TRANSFER','1')
 
102
  device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
103
 
104
  upscaler = UpscaleWithModel.from_pretrained("Kim2091/ClearRealityV1").to(torch.device("cuda:0"))
@@ -295,7 +297,7 @@ def generate_30(
295
  progress=gr.Progress(track_tqdm=True) # Add progress as a keyword argument
296
  ):
297
  seed = random.randint(0, MAX_SEED)
298
- generator = torch.Generator(device='cuda').manual_seed(seed)
299
  options = {
300
  "prompt": [prompt],
301
  "negative_prompt": [negative_prompt],
@@ -346,7 +348,7 @@ def generate_60(
346
  progress=gr.Progress(track_tqdm=True) # Add progress as a keyword argument
347
  ):
348
  seed = random.randint(0, MAX_SEED)
349
- generator = torch.Generator(device='cuda').manual_seed(seed)
350
  options = {
351
  "prompt": [prompt],
352
  "negative_prompt": [negative_prompt],
@@ -387,7 +389,7 @@ def generate_90(
387
  progress=gr.Progress(track_tqdm=True) # Add progress as a keyword argument
388
  ):
389
  seed = random.randint(0, MAX_SEED)
390
- generator = torch.Generator(device='cuda').manual_seed(seed)
391
  options = {
392
  "prompt": [prompt],
393
  "negative_prompt": [negative_prompt],
 
98
  FTP_DIR = os.getenv("FTP_DIR")
99
 
100
  # os.putenv('TORCH_LINALG_PREFER_CUSOLVER','1')
101
+
102
  os.putenv('HF_HUB_ENABLE_HF_TRANSFER','1')
103
+
104
  device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
105
 
106
  upscaler = UpscaleWithModel.from_pretrained("Kim2091/ClearRealityV1").to(torch.device("cuda:0"))
 
297
  progress=gr.Progress(track_tqdm=True) # Add progress as a keyword argument
298
  ):
299
  seed = random.randint(0, MAX_SEED)
300
+ generator = torch.Generator(device='cpu').manual_seed(seed)
301
  options = {
302
  "prompt": [prompt],
303
  "negative_prompt": [negative_prompt],
 
348
  progress=gr.Progress(track_tqdm=True) # Add progress as a keyword argument
349
  ):
350
  seed = random.randint(0, MAX_SEED)
351
+ generator = torch.Generator(device='cpu').manual_seed(seed)
352
  options = {
353
  "prompt": [prompt],
354
  "negative_prompt": [negative_prompt],
 
389
  progress=gr.Progress(track_tqdm=True) # Add progress as a keyword argument
390
  ):
391
  seed = random.randint(0, MAX_SEED)
392
+ generator = torch.Generator(device='cpu').manual_seed(seed)
393
  options = {
394
  "prompt": [prompt],
395
  "negative_prompt": [negative_prompt],