Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -88,10 +88,8 @@ pipe = StableDiffusion3Pipeline(
|
|
| 88 |
)
|
| 89 |
pipe = pipe.to(device, dtype=torch_dtype)
|
| 90 |
|
| 91 |
-
# The rest of the code is from the official SD3.5 space
|
| 92 |
-
|
| 93 |
MAX_SEED = np.iinfo(np.int32).max
|
| 94 |
-
MAX_IMAGE_SIZE =
|
| 95 |
|
| 96 |
@spaces.GPU(duration=65)
|
| 97 |
def infer(
|
|
@@ -124,7 +122,7 @@ def infer(
|
|
| 124 |
|
| 125 |
|
| 126 |
examples = [
|
| 127 |
-
|
| 128 |
]
|
| 129 |
|
| 130 |
css = """
|
|
@@ -136,8 +134,8 @@ css = """
|
|
| 136 |
|
| 137 |
with gr.Blocks(css=css) as demo:
|
| 138 |
with gr.Column(elem_id="col-container"):
|
| 139 |
-
gr.Markdown(" # [
|
| 140 |
-
gr.Markdown("[
|
| 141 |
with gr.Row():
|
| 142 |
prompt = gr.Text(
|
| 143 |
label="Prompt",
|
|
@@ -204,6 +202,7 @@ with gr.Blocks(css=css) as demo:
|
|
| 204 |
)
|
| 205 |
|
| 206 |
gr.Examples(examples=examples, inputs=[prompt], outputs=[result, seed], fn=infer, cache_examples=True, cache_mode="lazy")
|
|
|
|
| 207 |
gr.on(
|
| 208 |
triggers=[run_button.click, prompt.submit],
|
| 209 |
fn=infer,
|
|
|
|
| 88 |
)
|
| 89 |
pipe = pipe.to(device, dtype=torch_dtype)
|
| 90 |
|
|
|
|
|
|
|
| 91 |
MAX_SEED = np.iinfo(np.int32).max
|
| 92 |
+
MAX_IMAGE_SIZE = 1536
|
| 93 |
|
| 94 |
@spaces.GPU(duration=65)
|
| 95 |
def infer(
|
|
|
|
| 122 |
|
| 123 |
|
| 124 |
examples = [
|
| 125 |
+
"An astrounaut encounters an alien on the moon, photograph",
|
| 126 |
]
|
| 127 |
|
| 128 |
css = """
|
|
|
|
| 134 |
|
| 135 |
with gr.Blocks(css=css) as demo:
|
| 136 |
with gr.Column(elem_id="col-container"):
|
| 137 |
+
gr.Markdown(" # [Absynth 2.0](https://huggingface.co/DoctorDiffusion/Absynth-2.0) by [DoctorDiffusion](https://civitai.com/user/doctor_diffusion)")
|
| 138 |
+
gr.Markdown("Finetuned from [Stable Diffusion 3.5 Large (8B)](https://huggingface.co/stabilityai/stable-diffusion-3.5-large) by [Stability AI](https://stability.ai/news/introducing-stable-diffusion-3-5).")
|
| 139 |
with gr.Row():
|
| 140 |
prompt = gr.Text(
|
| 141 |
label="Prompt",
|
|
|
|
| 202 |
)
|
| 203 |
|
| 204 |
gr.Examples(examples=examples, inputs=[prompt], outputs=[result, seed], fn=infer, cache_examples=True, cache_mode="lazy")
|
| 205 |
+
|
| 206 |
gr.on(
|
| 207 |
triggers=[run_button.click, prompt.submit],
|
| 208 |
fn=infer,
|