Nick088 commited on
Commit
7022e7b
·
verified ·
1 Parent(s): 2e9789d

fix example error by cache

Browse files
Files changed (1) hide show
  1. app.py +6 -2
app.py CHANGED
@@ -20,6 +20,8 @@ else:
20
 
21
  MAX_SEED = numpy.iinfo(numpy.int32).max
22
 
 
 
23
  # Global dictionary to store pipelines
24
  PIPELINES = {}
25
 
@@ -1236,6 +1238,7 @@ with gr.Blocks(theme=theme, css=css) as demo:
1236
 
1237
  gr.Examples(
1238
  examples=examples_arena,
 
1239
  inputs=[
1240
  prompt,
1241
  negative_prompt,
@@ -1280,7 +1283,7 @@ with gr.Blocks(theme=theme, css=css) as demo:
1280
  decoder_guidance_scale_d,
1281
  ],
1282
  outputs=[result_1, result_2, result_3, result_4],
1283
- fn=generate_arena_images,
1284
  )
1285
 
1286
  gr.on(
@@ -1531,6 +1534,7 @@ with gr.Blocks(theme=theme, css=css) as demo:
1531
 
1532
  gr.Examples(
1533
  examples=examples_individual,
 
1534
  inputs=[
1535
  prompt,
1536
  model_choice,
@@ -1547,7 +1551,7 @@ with gr.Blocks(theme=theme, css=css) as demo:
1547
  decoder_guidance_scale,
1548
  ],
1549
  outputs=[result],
1550
- fn=generate_individual_image,
1551
  )
1552
 
1553
  gr.on(
 
20
 
21
  MAX_SEED = numpy.iinfo(numpy.int32).max
22
 
23
+ CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES", "0") == "1"
24
+
25
  # Global dictionary to store pipelines
26
  PIPELINES = {}
27
 
 
1238
 
1239
  gr.Examples(
1240
  examples=examples_arena,
1241
+ fn=generate_arena_images,
1242
  inputs=[
1243
  prompt,
1244
  negative_prompt,
 
1283
  decoder_guidance_scale_d,
1284
  ],
1285
  outputs=[result_1, result_2, result_3, result_4],
1286
+ cache_examples=CACHE_EXAMPLES
1287
  )
1288
 
1289
  gr.on(
 
1534
 
1535
  gr.Examples(
1536
  examples=examples_individual,
1537
+ fn=generate_individual_image,
1538
  inputs=[
1539
  prompt,
1540
  model_choice,
 
1551
  decoder_guidance_scale,
1552
  ],
1553
  outputs=[result],
1554
+ cache_examples=CACHE_EXAMPLES
1555
  )
1556
 
1557
  gr.on(