AlPrompt / app.py
AlStable's picture
Update app.py
3ae8213
raw
history blame
1.95 kB
import os
import gradio as gr
import logging
logging.basicConfig(format='%(process)d-%(levelname)s-%(message)s')
logging.warning(' - app.py started')
API_KEY=os.environ.get('HUGGING_FACE_HUB_TOKEN', None)
article = """---
This space was created using [SD Space Creator](https://huggingface.co/spaces/anzorq/sd-space-creator)."""
class Model:
def __init__(self, name, path="", prefix=""):
self.name = name
self.path = path
self.prefix = prefix
self.pipe_t2i = None
self.pipe_i2i = None
models = [
Model("Marvel","models/ItsJayQz/Marvel_WhatIf_Diffusion", "whatif style"),
Model("Cyberpunk Anime Diffusion", "models/DGSpitzer/Cyberpunk-Anime-Diffusion", "dgs illustration style"),
Model("Guan Yu Diffusion", "models/DGSpitzer/Guan-Yu-Diffusion", "Guan-Yu style"),
Model("Portrait plus", "models/wavymulder/portraitplus", "portrait+ style"),
Model("classic Disney", "models/nitrosocke/classic-anim-diffusion", "classic disney style"),
Model("SD21","models/stabilityai/stable-diffusion-2-1", "sd21 default style")
]
custom_model = "models/stabilityai/stable-diffusion-2-1"
def selectModel(message):
message = message.lower()
for i in range(len(models)):
if message.find(models[i].prefix)!=-1:
c_model=models[i].path
logging.warning(' c_model = '+c_model)
return c_model
c_model=models[i].path
logging.warning(' - c_model = '+c_model)
return c_model
sandbox = gr.Interface.load(
fn= selectModel,
name= custom_model,
title="""AlStable sandbox""",
inputs = gr.Textbox(label="Prompt", show_label=False, max_lines=2, placeholder="Enter your prompt", elem_id="input-prompt"),
description="""Demo for <a href="https://huggingface.co/stabilityai/stable-diffusion-2-1">AlStable</a> Stable Diffusion model.""",
article=article,
api_key=API_KEY
)
sandbox.queue(concurrency_count=20).launch()