ford442 commited on
Commit
ab9b03a
·
verified ·
1 Parent(s): 645d6e9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -4
app.py CHANGED
@@ -32,6 +32,7 @@ torch.backends.cudnn.deterministic = False
32
  torch.backends.cudnn.benchmark = False
33
  #torch.backends.cuda.preferred_blas_library="cublas"
34
  #torch.backends.cuda.preferred_linalg_library="cusolver"
 
35
 
36
  hftoken = os.getenv("HF_AUTH_TOKEN")
37
 
@@ -111,7 +112,6 @@ def infer_30(
111
  pipe.text_encoder=text_encoder
112
  pipe.text_encoder_2=text_encoder_2
113
  pipe.text_encoder_3=text_encoder_3
114
- torch.set_float32_matmul_precision("highest")
115
  seed = random.randint(0, MAX_SEED)
116
  generator = torch.Generator(device='cuda').manual_seed(seed)
117
  print('-- generating image --')
@@ -161,7 +161,6 @@ def infer_60(
161
  pipe.text_encoder=text_encoder
162
  pipe.text_encoder_2=text_encoder_2
163
  pipe.text_encoder_3=text_encoder_3
164
- torch.set_float32_matmul_precision("highest")
165
  seed = random.randint(0, MAX_SEED)
166
  generator = torch.Generator(device='cuda').manual_seed(seed)
167
  print('-- generating image --')
@@ -210,7 +209,6 @@ def infer_90(
210
  pipe.text_encoder=text_encoder
211
  pipe.text_encoder_2=text_encoder_2
212
  pipe.text_encoder_3=text_encoder_3
213
- torch.set_float32_matmul_precision("highest")
214
  seed = random.randint(0, MAX_SEED)
215
  generator = torch.Generator(device='cuda').manual_seed(seed)
216
  print('-- generating image --')
@@ -256,7 +254,6 @@ def infer_100(
256
  num_inference_steps,
257
  progress=gr.Progress(track_tqdm=True),
258
  ):
259
- torch.set_float32_matmul_precision("highest")
260
  seed = random.randint(0, MAX_SEED)
261
  generator = torch.Generator(device='cuda').manual_seed(seed)
262
  print('-- generating image --')
 
32
  torch.backends.cudnn.benchmark = False
33
  #torch.backends.cuda.preferred_blas_library="cublas"
34
  #torch.backends.cuda.preferred_linalg_library="cusolver"
35
+ torch.set_float32_matmul_precision("highest")
36
 
37
  hftoken = os.getenv("HF_AUTH_TOKEN")
38
 
 
112
  pipe.text_encoder=text_encoder
113
  pipe.text_encoder_2=text_encoder_2
114
  pipe.text_encoder_3=text_encoder_3
 
115
  seed = random.randint(0, MAX_SEED)
116
  generator = torch.Generator(device='cuda').manual_seed(seed)
117
  print('-- generating image --')
 
161
  pipe.text_encoder=text_encoder
162
  pipe.text_encoder_2=text_encoder_2
163
  pipe.text_encoder_3=text_encoder_3
 
164
  seed = random.randint(0, MAX_SEED)
165
  generator = torch.Generator(device='cuda').manual_seed(seed)
166
  print('-- generating image --')
 
209
  pipe.text_encoder=text_encoder
210
  pipe.text_encoder_2=text_encoder_2
211
  pipe.text_encoder_3=text_encoder_3
 
212
  seed = random.randint(0, MAX_SEED)
213
  generator = torch.Generator(device='cuda').manual_seed(seed)
214
  print('-- generating image --')
 
254
  num_inference_steps,
255
  progress=gr.Progress(track_tqdm=True),
256
  ):
 
257
  seed = random.randint(0, MAX_SEED)
258
  generator = torch.Generator(device='cuda').manual_seed(seed)
259
  print('-- generating image --')