DonImages commited on
Commit
79d9e62
·
verified ·
1 Parent(s): c93b55a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +98 -34
app.py CHANGED
@@ -71,39 +71,103 @@ def infer(
71
  return f"Error: {e}", seed # Return error to Gradio interface
72
 
73
  # ... (rest of your Gradio code - examples, CSS, etc. - same as before)
74
-
75
- # 4. Image generation function (now decorated)
76
- @GPU(duration=65) # Only if in HF Space
77
- def generate_image(prompt):
78
- global pipeline
79
- if pipeline is None:
80
- print("Error: Pipeline is None (model not loaded)") # Log this specifically
81
- return "Error: Model not loaded!"
82
-
83
- try:
84
- print("Starting image generation...") # Log before the image generation
85
- image = pipeline(prompt).images[0]
86
- print("Image generated successfully!")
87
- return image
88
- except Exception as e:
89
- error_message = f"Error during image generation: {type(e).__name__}: {e}" # Include exception type
90
- print(f"Full Error Details:\n{error_message}") # Print full details
91
- return error_message # Return error message to Gradio
92
- except RuntimeError as re:
93
- error_message = f"Runtime Error during image generation: {type(re).__name__}: {re}" # Include exception type
94
- print(f"Full Runtime Error Details:\n{error_message}") # Print full details
95
- return error_message # Return error message to Gradio
96
-
97
- # 5. Gradio interface
98
- with gr.Blocks() as demo:
99
- prompt_input = gr.Textbox(label="Prompt")
100
- image_output = gr.Image(label="Generated Image")
101
- generate_button = gr.Button("Generate")
102
-
103
- generate_button.click(
104
- fn=generate_image,
105
- inputs=prompt_input,
106
- outputs=image_output,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
107
  )
108
 
109
- demo.launch()
 
 
 
71
  return f"Error: {e}", seed # Return error to Gradio interface
72
 
73
  # ... (rest of your Gradio code - examples, CSS, etc. - same as before)
74
+ examples = [
75
+ "A capybara wearing a suit holding a sign that reads Hello World",
76
+ ]
77
+
78
+ css = """
79
+ #col-container {
80
+ margin: 0 auto;
81
+ max-width: 640px;
82
+ }
83
+ """
84
+
85
+ with gr.Blocks(css=css) as demo:
86
+ with gr.Column(elem_id="col-container"):
87
+ gr.Markdown(" # [Stable Diffusion 3.5 Large (8B)](https://huggingface.co/stabilityai/stable-diffusion-3.5-large)")
88
+ gr.Markdown("[Learn more](https://stability.ai/news/introducing-stable-diffusion-3-5) about the Stable Diffusion 3.5 series. Try on [Stability AI API](https://platform.stability.ai/docs/api-reference#tag/Generate/paths/~1v2beta~1stable-image~1generate~1sd3/post), or [download model](https://huggingface.co/stabilityai/stable-diffusion-3.5-large) to run locally with ComfyUI or diffusers.")
89
+ with gr.Row():
90
+ prompt = gr.Text(
91
+ label="Prompt",
92
+ show_label=False,
93
+ max_lines=1,
94
+ placeholder="Enter your prompt",
95
+ container=False,
96
+ )
97
+
98
+ run_button = gr.Button("Run", scale=0, variant="primary")
99
+
100
+ result = gr.Image(label="Result", show_label=False)
101
+
102
+ with gr.Accordion("Advanced Settings", open=False):
103
+ negative_prompt = gr.Text(
104
+ label="Negative prompt",
105
+ max_lines=1,
106
+ placeholder="Enter a negative prompt",
107
+ visible=False,
108
+ )
109
+
110
+ seed = gr.Slider(
111
+ label="Seed",
112
+ minimum=0,
113
+ maximum=MAX_SEED,
114
+ step=1,
115
+ value=0,
116
+ )
117
+
118
+ randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
119
+
120
+ with gr.Row():
121
+ width = gr.Slider(
122
+ label="Width",
123
+ minimum=512,
124
+ maximum=MAX_IMAGE_SIZE,
125
+ step=32,
126
+ value=1024,
127
+ )
128
+
129
+ height = gr.Slider(
130
+ label="Height",
131
+ minimum=512,
132
+ maximum=MAX_IMAGE_SIZE,
133
+ step=32,
134
+ value=1024,
135
+ )
136
+
137
+ with gr.Row():
138
+ guidance_scale = gr.Slider(
139
+ label="Guidance scale",
140
+ minimum=0.0,
141
+ maximum=7.5,
142
+ step=0.1,
143
+ value=4.5,
144
+ )
145
+
146
+ num_inference_steps = gr.Slider(
147
+ label="Number of inference steps",
148
+ minimum=1,
149
+ maximum=50,
150
+ step=1,
151
+ value=40,
152
+ )
153
+
154
+ gr.Examples(examples=examples, inputs=[prompt], outputs=[result, seed], fn=infer, cache_examples=True, cache_mode="lazy")
155
+ gr.on(
156
+ triggers=[run_button.click, prompt.submit],
157
+ fn=infer,
158
+ inputs=[
159
+ prompt,
160
+ negative_prompt,
161
+ seed,
162
+ randomize_seed,
163
+ width,
164
+ height,
165
+ guidance_scale,
166
+ num_inference_steps,
167
+ ],
168
+ outputs=[result, seed],
169
  )
170
 
171
+ if __name__ == "__main__":
172
+ demo.launch()
173
+