jbilcke-hf HF staff commited on
Commit
504fb8a
·
1 Parent(s): 611d1ae

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -4
app.py CHANGED
@@ -5,7 +5,9 @@ import os
5
  import random
6
  import gradio as gr
7
  import numpy as np
8
- import PIL.Image
 
 
9
  import torch
10
  from diffusers import LCMScheduler, AutoPipelineForText2Image
11
 
@@ -52,7 +54,7 @@ def generate(prompt: str,
52
  if not use_negative_prompt:
53
  negative_prompt = None # type: ignore
54
 
55
- return pipe(prompt=prompt,
56
  negative_prompt=negative_prompt,
57
  width=width,
58
  height=height,
@@ -60,6 +62,14 @@ def generate(prompt: str,
60
  num_inference_steps=num_inference_steps,
61
  generator=generator,
62
  output_type='pil').images[0]
 
 
 
 
 
 
 
 
63
 
64
  with gr.Blocks() as demo:
65
  gr.HTML("""
@@ -81,7 +91,7 @@ with gr.Blocks() as demo:
81
  placeholder='Enter your prompt',
82
  container=False,
83
  )
84
- result = gr.Image(label='Result', show_label=False)
85
 
86
  use_negative_prompt = gr.Checkbox(label='Use negative prompt', value=False)
87
  negative_prompt = gr.Text(
@@ -156,4 +166,4 @@ with gr.Blocks() as demo:
156
  api_name='run',
157
  )
158
 
159
- demo.queue(max_size=16).launch()
 
5
  import random
6
  import gradio as gr
7
  import numpy as np
8
+ from PIL import Image
9
+ import base64
10
+ import io
11
  import torch
12
  from diffusers import LCMScheduler, AutoPipelineForText2Image
13
 
 
54
  if not use_negative_prompt:
55
  negative_prompt = None # type: ignore
56
 
57
+ image = pipe(prompt=prompt,
58
  negative_prompt=negative_prompt,
59
  width=width,
60
  height=height,
 
62
  num_inference_steps=num_inference_steps,
63
  generator=generator,
64
  output_type='pil').images[0]
65
+
66
+ # Convert PIL image to a byte stream
67
+ buffered = io.BytesIO()
68
+ image.save(buffered, format="PNG")
69
+
70
+ # Encode to base64
71
+ img_str = base64.b64encode(buffered.getvalue()).decode()
72
+ return img_str
73
 
74
  with gr.Blocks() as demo:
75
  gr.HTML("""
 
91
  placeholder='Enter your prompt',
92
  container=False,
93
  )
94
+ result = gr.Image(label='Result', show_label=False, type="base64")
95
 
96
  use_negative_prompt = gr.Checkbox(label='Use negative prompt', value=False)
97
  negative_prompt = gr.Text(
 
166
  api_name='run',
167
  )
168
 
169
+ demo.queue(max_size=32).launch()