RanM commited on
Commit
02161de
·
verified ·
1 Parent(s): 789e6b5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -15
app.py CHANGED
@@ -1,9 +1,7 @@
1
  import gradio as gr
2
- import torch
3
  from diffusers import AutoPipelineForText2Image
4
  from io import BytesIO
5
  from generate_propmts import generate_prompt
6
- from concurrent.futures import ThreadPoolExecutor
7
  import asyncio
8
 
9
  # Load the model once outside of the function
@@ -41,20 +39,11 @@ async def process_prompt(sentence_mapping, character_dict, selected_style):
41
  prompts.append((paragraph_number, prompt))
42
  print(f"Generated prompt for paragraph {paragraph_number}: {prompt}")
43
 
44
- loop = asyncio.get_event_loop()
45
- tasks = []
46
-
47
- with ThreadPoolExecutor() as executor:
48
- for paragraph_number, prompt in prompts:
49
- tasks.append(loop.run_in_executor(executor, generate_image, prompt))
50
 
51
- for paragraph_number, task in zip(sentence_mapping.keys(), await asyncio.gather(*tasks)):
52
- try:
53
- image = task
54
- if image:
55
- images[paragraph_number] = image
56
- except Exception as e:
57
- print(f"Error processing paragraph {paragraph_number}: {e}")
58
 
59
  return images
60
 
 
1
  import gradio as gr
 
2
  from diffusers import AutoPipelineForText2Image
3
  from io import BytesIO
4
  from generate_propmts import generate_prompt
 
5
  import asyncio
6
 
7
  # Load the model once outside of the function
 
39
  prompts.append((paragraph_number, prompt))
40
  print(f"Generated prompt for paragraph {paragraph_number}: {prompt}")
41
 
42
+ tasks = [generate_image(prompt) for _, prompt in prompts]
 
 
 
 
 
43
 
44
+ for (paragraph_number, _), image_bytes in zip(prompts, await asyncio.gather(*tasks)):
45
+ if image_bytes:
46
+ images[paragraph_number] = image_bytes
 
 
 
 
47
 
48
  return images
49