phenomenon1981 commited on
Commit
24b84b4
·
1 Parent(s): 510a8df

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -3
app.py CHANGED
@@ -1,10 +1,12 @@
1
  from transformers import pipeline, set_seed
2
  import gradio as grad, random, re
3
 
 
4
  gpt2_pipe = pipeline('text-generation', model='Gustavosta/MagicPrompt-Stable-Diffusion', tokenizer='gpt2')
5
  with open("ideas.txt", "r") as f:
6
  line = f.readlines()
7
 
 
8
  def generate(starting_text):
9
  seed = random.randint(100, 1000000)
10
  set_seed(seed)
@@ -13,14 +15,23 @@ def generate(starting_text):
13
  starting_text: str = line[random.randrange(0, len(line))].replace("\n", "").lower().capitalize()
14
  starting_text: str = re.sub(r"[,:\-–.!;?_]", '', starting_text)
15
 
16
- response = gpt2_pipe(starting_text, max_length=(len(starting_text) + random.randint(60, 90)), num_return_sequences=1)
 
17
  for x in response:
18
  resp = x['generated_text'].strip()
19
  if resp != starting_text and len(resp) > (len(starting_text) + 4) and resp.endswith((":", "-", "—")) is False:
20
- return resp
 
 
 
 
 
 
 
 
21
 
22
  txt = grad.Textbox(lines=1, label="Initial Text", placeholder="English Text here")
23
- out = grad.Textbox(lines=1, label="Generated Prompt")
24
 
25
  title = "Stable Diffusion Prompt Generator"
26
  description = 'This is a demo of the model series: "MagicPrompt", in this case, aimed at: "Stable Diffusion". To use it, simply submit your text. To learn more about the model, [click here](https://huggingface.co/Gustavosta/MagicPrompt-Stable-Diffusion).<br>'
@@ -33,3 +44,5 @@ grad.Interface(fn=generate,
33
  article='',
34
  allow_flagging='never',
35
  theme="default").launch(enable_queue=True, debug=True)
 
 
 
1
  from transformers import pipeline, set_seed
2
  import gradio as grad, random, re
3
 
4
+
5
  gpt2_pipe = pipeline('text-generation', model='Gustavosta/MagicPrompt-Stable-Diffusion', tokenizer='gpt2')
6
  with open("ideas.txt", "r") as f:
7
  line = f.readlines()
8
 
9
+
10
  def generate(starting_text):
11
  seed = random.randint(100, 1000000)
12
  set_seed(seed)
 
15
  starting_text: str = line[random.randrange(0, len(line))].replace("\n", "").lower().capitalize()
16
  starting_text: str = re.sub(r"[,:\-–.!;?_]", '', starting_text)
17
 
18
+ response = gpt2_pipe(starting_text, max_length=(len(starting_text) + random.randint(70, 90)), num_return_sequences=1)
19
+ response_list = []
20
  for x in response:
21
  resp = x['generated_text'].strip()
22
  if resp != starting_text and len(resp) > (len(starting_text) + 4) and resp.endswith((":", "-", "—")) is False:
23
+ response_list.append(resp+'\n')
24
+
25
+ response_end = "\n".join(response_list)
26
+ response_end = re.sub('[^ ]+\.[^ ]+','', response_end)
27
+ response_end = response_end.replace("<", "").replace(">", "")
28
+
29
+ if response_end != "":
30
+ return response_end
31
+
32
 
33
  txt = grad.Textbox(lines=1, label="Initial Text", placeholder="English Text here")
34
+ out = grad.Textbox(lines=4, label="Generated Prompts")
35
 
36
  title = "Stable Diffusion Prompt Generator"
37
  description = 'This is a demo of the model series: "MagicPrompt", in this case, aimed at: "Stable Diffusion". To use it, simply submit your text. To learn more about the model, [click here](https://huggingface.co/Gustavosta/MagicPrompt-Stable-Diffusion).<br>'
 
44
  article='',
45
  allow_flagging='never',
46
  theme="default").launch(enable_queue=True, debug=True)
47
+
48
+