charliebaby2023 commited on
Commit
e1f04d1
Β·
verified Β·
1 Parent(s): 7fde020

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -4
app.py CHANGED
@@ -2,6 +2,7 @@ import gradio as gr
2
  from random import randint
3
  from all_models import models
4
  from datetime import datetime
 
5
 
6
  now2 = 0
7
  kii=" mohawk femboy racecar driver ";
@@ -43,13 +44,27 @@ def gen_fn(model_str, prompt, negative_prompt):
43
  if model_str == 'NA':
44
  return None
45
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
46
 
47
 
48
 
49
 
50
- noise = str(randint(0, 9999))
51
- combined_prompt = f'{prompt} {model_str} {negative_prompt} {noise}'
52
- print(f"Generating with prompt: {combined_prompt}") # Debug line
53
 
54
  # result = models_load[model_str](f'{prompt} {negative_prompt} {noise}')
55
 
@@ -64,7 +79,7 @@ def gen_fn(model_str, prompt, negative_prompt):
64
 
65
 
66
 
67
- return models_load[model_str](f'{prompt} {negative_prompt} {noise}')
68
 
69
 
70
 
 
2
  from random import randint
3
  from all_models import models
4
  from datetime import datetime
5
+ from concurrent.futures import ThreadPoolExecutor
6
 
7
  now2 = 0
8
  kii=" mohawk femboy racecar driver ";
 
44
  if model_str == 'NA':
45
  return None
46
 
47
+ executor = ThreadPoolExecutor(max_workers=num_models)
48
+
49
+ def gen_fn(model_str, prompt, negative_prompt):
50
+ if model_str == 'NA':
51
+ return None
52
+
53
+ noise = str(randint(0, 9999))
54
+ combined_prompt = f'{prompt} {model_str} {negative_prompt} {noise}'
55
+ print(f"Generating with prompt: {combined_prompt}")
56
+
57
+ # Use the executor to run models in parallel
58
+ future = executor.submit(models_load[model_str], f'{prompt} {negative_prompt} {noise}')
59
+ result = future.result()
60
+ return result
61
 
62
 
63
 
64
 
65
+ # noise = str(randint(0, 9999))
66
+ # combined_prompt = f'{prompt} {model_str} {negative_prompt} {noise}'
67
+ # print(f"Generating with prompt: {combined_prompt}") # Debug line
68
 
69
  # result = models_load[model_str](f'{prompt} {negative_prompt} {noise}')
70
 
 
79
 
80
 
81
 
82
+ # return models_load[model_str](f'{prompt} {negative_prompt} {noise}')
83
 
84
 
85