aiqtech commited on
Commit
23f819d
ยท
verified ยท
1 Parent(s): 9e2e6b7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +118 -636
app.py CHANGED
@@ -1,498 +1,86 @@
1
- import os
2
  import spaces
3
  import gradio as gr
4
  import numpy as np
 
5
  from PIL import Image
6
  import random
7
- from diffusers import StableDiffusionXLPipeline, EulerAncestralDiscreteScheduler
 
8
  import torch
9
- from transformers import pipeline as transformers_pipeline
10
- import re
11
- from cohere import ClientV2 # Changed from HuggingFace to Cohere
12
 
13
- # ------------------------------------------------------------
14
- # DEVICE SETUP
15
- # ------------------------------------------------------------
16
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
17
 
18
- # ------------------------------------------------------------
19
- # STABLE DIFFUSION XL PIPELINE
20
- # ------------------------------------------------------------
21
  pipe = StableDiffusionXLPipeline.from_pretrained(
22
  "votepurchase/waiREALCN_v14",
23
  torch_dtype=torch.float16,
24
- variant="fp16",
25
- use_safetensors=True,
26
  )
 
27
  pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
28
  pipe.to(device)
29
 
30
- # Force important sub-modules to fp16 for VRAM efficiency
31
- for sub in (pipe.text_encoder, pipe.text_encoder_2, pipe.vae, pipe.unet):
32
- sub.to(torch.float16)
33
-
34
- # ------------------------------------------------------------
35
- # INITIALIZE COHERE CLIENT FOR TRANSLATIONS AND PROMPT GENERATION
36
- # ------------------------------------------------------------
37
- coh_api_key = os.getenv("COH_API")
38
- if not coh_api_key:
39
- print("[WARNING] COH_API environment variable not found. LLM features will not work.")
40
- coh_client = None
41
- else:
42
- try:
43
- coh_client = ClientV2(api_key=coh_api_key)
44
- print("[INFO] Cohere client initialized successfully.")
45
- except Exception as e:
46
- print(f"[ERROR] Failed to initialize Cohere client: {str(e)}")
47
- coh_client = None
48
-
49
-
50
- # 1. ๋น„์˜์–ด ๋ฌธ์ž ๊ฐ์ง€ ์ •๊ทœ์‹์„ ๋” ๋ช…ํ™•ํ•˜๊ฒŒ ์ˆ˜์ •
51
- # ํ•œ๊ธ€, ์ผ๋ณธ์–ด, ์ค‘๊ตญ์–ด๋ฅผ ๋ช…์‹œ์ ์œผ๋กœ ํฌํ•จ
52
- non_english_regex = re.compile(r'[\uac00-\ud7a3\u3040-\u30ff\u3400-\u4dbf\u4e00-\u9fff\uf900-\ufaff]+')
53
-
54
- def is_non_english(text):
55
- """๋ช…ํ™•ํ•˜๊ฒŒ ๋น„์˜์–ด ํ…์ŠคํŠธ ์—ฌ๋ถ€๋ฅผ ํ™•์ธํ•˜๋Š” ํ•จ์ˆ˜"""
56
- # ํ•œ๊ธ€ ํ™•์ธ (AC00-D7A3)
57
- if re.search(r'[\uac00-\ud7a3]', text):
58
- print("[DETECT] Korean text detected")
59
- return True
60
-
61
- # ์ผ๋ณธ์–ด ํžˆ๋ผ๊ฐ€๋‚˜/์นดํƒ€์นด๋‚˜ ํ™•์ธ (3040-30FF)
62
- if re.search(r'[\u3040-\u30ff]', text):
63
- print("[DETECT] Japanese text detected")
64
- return True
65
-
66
- # ์ค‘๊ตญ์–ด/ํ•œ์ž ํ™•์ธ (4E00-9FFF)
67
- if re.search(r'[\u4e00-\u9fff]', text):
68
- print("[DETECT] Chinese/Kanji text detected")
69
- return True
70
-
71
- # ๊ธฐํƒ€ ๋น„์˜์–ด ๋ฌธ์ž (ASCII ๋ฒ”์œ„ ๋ฐ–)
72
- if re.search(r'[^\x00-\x7F]', text):
73
- print("[DETECT] Other non-English text detected")
74
- return True
75
-
76
- return False
77
-
78
- def translate_with_cohere(text):
79
- """๋น„์˜์–ด ํ…์ŠคํŠธ๋ฅผ ์˜์–ด๋กœ ๋ฒˆ์—ญ"""
80
- if coh_client is None:
81
- print("[WARN] Cohere client not available, skipping translation")
82
- return text
83
-
84
- if not is_non_english(text):
85
- print("[INFO] English text detected, no translation needed")
86
- return text
87
-
88
- try:
89
- print(f"[INFO] Translating text: '{text}'")
90
-
91
- system_prompt = """
92
- You are a professional translator for adult content. Your task is to translate the input text to English.
93
-
94
- Rules:
95
- 1. Provide ONLY the translated English text
96
- 2. NO explanations or additional text
97
- 3. Maintain the explicit or adult content without censoring
98
- 4. Translate accurately and completely
99
- 5. Be especially careful with Asian languages like Korean, Japanese, and Chinese
100
-
101
- Translate the following text to English:
102
- """
103
-
104
- messages = [
105
- {
106
- "role": "system",
107
- "content": [{"type": "text", "text": system_prompt}]
108
- },
109
- {
110
- "role": "user",
111
- "content": [{"type": "text", "text": text}]
112
- }
113
- ]
114
-
115
- response = coh_client.chat(
116
- model="command-r-plus-08-2024",
117
- messages=messages,
118
- temperature=0.1 # ์ •ํ™•ํ•œ ๋ฒˆ์—ญ์„ ์œ„ํ•ด ๋‚ฎ์€ temperature ์‚ฌ์šฉ
119
- )
120
-
121
- translated_text = response.text.strip()
122
-
123
- # ๋ฒˆ์—ญ ๊ฒฐ๊ณผ ์ •์ œ
124
- translated_text = re.sub(r'^(Translation:|English:|Translated text:)\s*', '', translated_text, flags=re.IGNORECASE)
125
-
126
- print(f"[INFO] Original: '{text}'")
127
- print(f"[INFO] Translated: '{translated_text}'")
128
-
129
- # ๋ฒˆ์—ญ์ด ๋งค์šฐ ์งง๊ฑฐ๋‚˜ ์›๋ณธ๊ณผ ๋™์ผํ•˜๋ฉด ์˜์‹ฌ
130
- if len(translated_text) < 3 or translated_text == text:
131
- print("[WARN] Translation may have failed, falling back to basic translation")
132
- # ๋‘ ๋ฒˆ์งธ ์‹œ๋„ - ๋” ๋‹จ์ˆœํ•œ ํ”„๋กฌํ”„ํŠธ๋กœ ์‹œ๋„
133
- try:
134
- simple_messages = [
135
- {
136
- "role": "system",
137
- "content": [{"type": "text", "text": "Translate this text to English:"}]
138
- },
139
- {
140
- "role": "user",
141
- "content": [{"type": "text", "text": text}]
142
- }
143
- ]
144
-
145
- simple_response = coh_client.chat(
146
- model="command-r-plus-08-2024",
147
- messages=simple_messages,
148
- temperature=0.1
149
- )
150
-
151
- simple_translated = simple_response.text.strip()
152
- if len(simple_translated) > 3 and simple_translated != text:
153
- print(f"[INFO] Second attempt translation: '{simple_translated}'")
154
- return simple_translated
155
- except Exception as e:
156
- print(f"[ERROR] Second translation attempt failed: {str(e)}")
157
-
158
- return text
159
-
160
- return translated_text
161
- except Exception as e:
162
- print(f"[ERROR] Translation failed: {str(e)}")
163
- import traceback
164
- traceback.print_exc()
165
- return text # ๋ฒˆ์—ญ ์‹คํŒจ ์‹œ ์›๋ณธ ๋ฐ˜ํ™˜
166
-
167
-
168
-
169
- # ------------------------------------------------------------
170
- # EXAMPLES (Hidden from UI but used for RANDOM button)
171
- # ------------------------------------------------------------
172
- prompt_examples = [
173
- "The shy college girl, with glasses and a tight plaid skirt, nervously approaches her professor",
174
- "Her skirt rose a little higher with each gentle push, a soft blush of blush spreading across her cheeks as she felt the satisfying warmth of his breath on her cheek.",
175
- "a girl in a school uniform having her skirt pulled up by a boy, and then being fucked",
176
- "Moody mature anime scene of two lovers fuck under neon rain, sensual atmosphere",
177
- "Moody mature anime scene of two lovers kissing under neon rain, sensual atmosphere",
178
- "The girl sits on the boy's lap by the window, his hands resting on her waist. She is unbuttoning his shirt, her expression focused and intense.",
179
- "A girl with long, black hair is sleeping on her desk in the classroom. Her skirt has ridden up, revealing her thighs, and a trail of drool escapes her slightly parted lips.",
180
- "The waves rolled gently, a slow, sweet kiss of the lip, a slow, slow build of anticipation as their toes bumped gently โ€“ a slow, sweet kiss of the lip, a promise of more to come.",
181
- "Her elegant silk gown swayed gracefully as she approached him, the delicate fabric brushing against her legs. A warm blush spread across her cheeks as she felt his breath on her face.",
182
- "Her white blouse and light cotton skirt rose a little higher with each gentle push, a soft blush spreading across her cheeks as she felt the satisfying warmth of his breath on her cheek.",
183
- "A woman in a business suit having her skirt lifted by a man, and then being sexually assaulted.",
184
- "The older woman sits on the man's lap by the fireplace, his hands resting on her hips. She is unbuttoning his vest, her expression focused and intense. He takes control of the situation as she finishes unbuttoning his shirt, pushing her onto her back and begins to have sex with her.",
185
- "There is a woman with long black hair. Her face features alluring eyes and full lips, with a slender figure adorned in black lace lingerie. She lies on the bed, loosening her lingerie strap with one hand while seductively glancing downward.",
186
- "In a dimly lit room, the same woman teases with her dark, flowing hair, now covering her voluptuous breasts, while a black garter belt accentuates her thighs. She sits on the sofa, leaning back, lifting one leg to expose her most private areas through the sheer lingerie.",
187
- "A woman with glasses, lying on the bed in just her bra, spreads her legs wide, revealing all! She wears a sultry expression, gazing directly at the viewer with her brown eyes, her short black hair cascading over the pillow. Her slim figure, accentuated by the lacy lingerie, exudes a seductive aura.",
188
- "A soft focus on the girl's face, eyes closed, biting her lip, as her roommate performs oral pleasure, the experienced woman's hair cascading between her thighs.",
189
- "A woman in a blue hanbok sits on a wooden floor, her legs folded beneath her, gazing out of a window, the sunlight highlighting the graceful lines of her clothing.",
190
- "The couple, immersed in a wooden outdoor bath, share an intimate moment, her wet kimono clinging to her curves, his hands exploring her body beneath the water's surface.",
191
- "A steamy shower scene, the twins embrace under the warm water, their soapy hands gliding over each other's curves, their passion intensifying as they explore uncharted territories.",
192
- "The teacher, with a firm grip, pins the student against the blackboard, her skirt hiked up, exposing her delicate lace panties. Their heavy breathing echoes in the quiet room as they share an intense, intimate moment.",
193
- "After hours, the girl sits on top of the teacher's lap, riding him on the classroom floor, her hair cascading over her face as she moves with increasing intensity, their bodies glistening with sweat.",
194
- "In the dimly lit dorm room, the roommates lay entangled in a passionate embrace, their naked bodies glistening with sweat, as the experienced woman teaches her lover the art of kissing and touching.",
195
- "The once-innocent student, now confident, takes charge, straddling her lover on the couch, their bare skin illuminated by the warm glow of the sunset through the window.",
196
- "A close-up of the secretary's hand unzipping her boss's dress shirt, her fingers gently caressing his chest, their eyes locked in a heated embrace in the supply closet.",
197
- "The secretary, in a tight pencil skirt and silk blouse, leans back on the boss's desk, her legs wrapped around his waist, her blouse unbuttoned, revealing her lace bra, as he passionately kisses her, his hands exploring her body.",
198
- "On the living room couch, one twin sits astride her sister's lap, their lips locked in a passionate kiss, their hands tangled in each other's hair, unraveling a new level of intimacy.",
199
- "In a dimly lit chamber, the dominant woman, dressed in a leather corset and thigh-high boots, stands tall, her hand gripping her submissive partner's hair, his eyes closed in submission as she instructs him to please her.",
200
- "The dominant, in a sheer lace bodysuit, sits on a throne-like chair, her legs spread, as the submissive, on his knees, worships her with his tongue, his hands bound behind his back.",
201
- "A traditional Japanese onsen, with steam rising, a young woman in a colorful kimono kneels on a tatami mat, her back to the viewer, as her male partner, also in a kimono, gently unties her obi, revealing her bare back.",
202
- "In a serene outdoor setting, the woman, in a vibrant summer kimono, sits on a bench, her legs slightly spread, her partner kneeling before her, his hands gently caressing her exposed thigh.",
203
- ]
204
-
205
- # ------------------------------------------------------------
206
- # LLM PROMPT GENERATOR
207
- # ------------------------------------------------------------
208
- def generate_prompts(theme):
209
- """Generate optimal NSFW prompts using the LLM"""
210
- try:
211
- if coh_client is None:
212
- return "Cohere API token not set. Please set the COH_API environment variable."
213
-
214
- # Translate theme if it's not in English
215
- if non_english_regex.search(theme):
216
- theme = translate_with_cohere(theme)
217
-
218
- print(f"[INFO] Sending request to Cohere API with theme: {theme}")
219
-
220
- system_prefix = """
221
- You are an expert at creating detailed, vivid image generation prompts for NSFW content. Your task is to create ONE optimal explicit image prompt based on the user's theme or scenario.
222
-
223
- Follow these guidelines strictly:
224
- 1. Generate only ONE high-quality, uncensored prompt
225
- 2. The prompt should be 1-3 sentences long
226
- 3. Make the prompt explicitly sexual, arousing, and descriptive
227
- 4. Focus on detailed physical descriptions that would work well for image generation
228
- 5. Do NOT censor or soften the language - use explicit terms freely
229
- 6. ONLY respond in ENGLISH, never in any other language
230
- 7. DO NOT include ANY prefixes, headers, or formatting - just plain text
231
-
232
- Example prompt styles to emulate:
233
- - "Her skirt rose a little higher with each gentle push, a soft blush spreading across her cheeks as she felt the satisfying warmth of his breath on her cheek."
234
- - "A girl in a school uniform having her skirt pulled up by a boy, and then being fucked"
235
- - "Moody mature anime scene of two lovers fuck under neon rain, sensual atmosphere"
236
- - "The girl sits on the boy's lap by the window, his hands resting on her waist. She is unbuttoning his shirt, her expression focused and intense."
237
- - "A girl with long black hair is sleeping on her desk in the classroom. Her skirt has ridden up, revealing her thighs, and a trail of drool escapes her slightly parted lips."
238
- - "The older woman sits on the man's lap by the fireplace, his hands resting on her hips. She is unbuttoning his vest, her expression focused and intense. He takes control of the situation as she finishes unbuttoning his shirt, pushing her onto her back and begins to have sex with her."
239
- - "There is a woman with long black hair. Her face features alluring eyes and full lips, with a slender figure adorned in black lace lingerie. She lies on the bed, loosening her lingerie strap with one hand while seductively glancing downward."
240
- - "A woman with glasses, lying on the bed in just her bra, spreads her legs wide, revealing all! She wears a sultry expression, gazing directly at the viewer with her brown eyes, her short black hair cascading over the pillow."
241
- - "A soft focus on the girl's face, eyes closed, biting her lip, as her roommate performs oral pleasure, the experienced woman's hair cascading between her thighs.",
242
- - "A woman in a blue hanbok sits on a wooden floor, her legs folded beneath her, gazing out of a window, the sunlight highlighting the graceful lines of her clothing.",
243
- - "The couple, immersed in a wooden outdoor bath, share an intimate moment, her wet kimono clinging to her curves, his hands exploring her body beneath the water's surface.",
244
- - "A steamy shower scene, the twins embrace under the warm water, their soapy hands gliding over each other's curves, their passion intensifying as they explore uncharted territories.",
245
- - "The teacher, with a firm grip, pins the student against the blackboard, her skirt hiked up, exposing her delicate lace panties. Their heavy breathing echoes in the quiet room as they share an intense, intimate moment.",
246
- - "After hours, the girl sits on top of the teacher's lap, riding him on the classroom floor, her hair cascading over her face as she moves with increasing intensity, their bodies glistening with sweat.",
247
- - "In the dimly lit dorm room, the roommates lay entangled in a passionate embrace, their naked bodies glistening with sweat, as the experienced woman teaches her lover the art of kissing and touching.",
248
- - "The once-innocent student, now confident, takes charge, straddling her lover on the couch, their bare skin illuminated by the warm glow of the sunset through the window.",
249
- - "A close-up of the secretary's hand unzipping her boss's dress shirt, her fingers gently caressing his chest, their eyes locked in a heated embrace in the supply closet.",
250
- - "The secretary, in a tight pencil skirt and silk blouse, leans back on the boss's desk, her legs wrapped around his waist, her blouse unbuttoned, revealing her lace bra, as he passionately kisses her, his hands exploring her body.",
251
- - "On the living room couch, one twin sits astride her sister's lap, their lips locked in a passionate kiss, their hands tangled in each other's hair, unraveling a new level of intimacy.",
252
- - "In a dimly lit chamber, the dominant woman, dressed in a leather corset and thigh-high boots, stands tall, her hand gripping her submissive partner's hair, his eyes closed in submission as she instructs him to please her.",
253
- - "The dominant, in a sheer lace bodysuit, sits on a throne-like chair, her legs spread, as the submissive, on his knees, worships her with his tongue, his hands bound behind his back.",
254
- - "A traditional Japanese onsen, with steam rising, a young woman in a colorful kimono kneels on a tatami mat, her back to the viewer, as her male partner, also in a kimono, gently unties her obi, revealing her bare back.",
255
- - "In a serene outdoor setting, the woman, in a vibrant summer kimono, sits on a bench, her legs slightly spread, her partner kneeling before her, his hands gently caressing her exposed thigh.",
256
-
257
- Respond ONLY with the single prompt text in ENGLISH with NO PREFIXES of any kind.
258
- """
259
-
260
- # Format messages for Cohere API
261
- messages = [
262
- {
263
- "role": "system",
264
- "content": [{"type": "text", "text": system_prefix}]
265
- },
266
- {
267
- "role": "user",
268
- "content": [{"type": "text", "text": theme}]
269
- }
270
- ]
271
-
272
- # Generate response using Cohere
273
- response = coh_client.chat(
274
- model="command-r-plus-08-2024",
275
- messages=messages,
276
- temperature=0.8
277
- )
278
-
279
- # Extract only the text content without any debug information
280
- if hasattr(response, 'text'):
281
- generated_prompt = response.text
282
- else:
283
- # Handle different response formats
284
- try:
285
- # Try to extract just the text content from the response
286
- response_str = str(response)
287
- # If it's a complex object with nested structure
288
- if 'text=' in response_str:
289
- text_match = re.search(r"text=['\"]([^'\"]+)['\"]", response_str)
290
- if text_match:
291
- generated_prompt = text_match.group(1)
292
- else:
293
- generated_prompt = response_str
294
- else:
295
- generated_prompt = response_str
296
- except:
297
- generated_prompt = str(response)
298
-
299
- # FORCE translation to English if there's any non-English content
300
- if non_english_regex.search(generated_prompt):
301
- print("[INFO] Translating non-English prompt to English")
302
- generated_prompt = translate_with_cohere(generated_prompt)
303
-
304
- # Clean the prompt
305
- generated_prompt = re.sub(r'^AI๐Ÿผ:\s*', '', generated_prompt)
306
- generated_prompt = re.sub(r'^\d+[\.\)]\s*', '', generated_prompt)
307
- generated_prompt = re.sub(r'^(Prompt|Response|Result|Output):\s*', '', generated_prompt)
308
- generated_prompt = re.sub(r'^["\']+|["\']+$', '', generated_prompt)
309
- generated_prompt = generated_prompt.strip()
310
- generated_prompt = re.sub(r'\s+', ' ', generated_prompt)
311
-
312
- print(f"[INFO] Generated prompt: {generated_prompt}")
313
-
314
- # Final verification - check length and ensure it's English
315
- if len(generated_prompt) > 10:
316
- return generated_prompt
317
- else:
318
- return "Failed to generate a valid prompt"
319
-
320
- except Exception as e:
321
- print(f"[ERROR] Prompt generation failed: {str(e)}")
322
- import traceback
323
- traceback.print_exc()
324
- return f"Error generating prompt: {str(e)}"
325
-
326
-
327
-
328
 
329
- # ------------------------------------------------------------
330
- # SDXL INFERENCE WRAPPER
331
- # ------------------------------------------------------------
332
  MAX_SEED = np.iinfo(np.int32).max
333
  MAX_IMAGE_SIZE = 1216
334
 
 
 
 
 
 
 
 
 
 
 
335
  @spaces.GPU
336
  def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps):
337
- """
338
- ์ค‘์š”: ํ”„๋กฌํ”„ํŠธ ํ…์ŠคํŠธ์— ํ•œ๊ธ€์ด๋‚˜ ๋‹ค๋ฅธ ๋น„์˜์–ด ๋ฌธ์ž๊ฐ€ ์žˆ์œผ๋ฉด ๋ฐ˜๋“œ์‹œ ์˜์–ด๋กœ ๋ฒˆ์—ญํ•ด์•ผ ํ•ฉ๋‹ˆ๋‹ค.
339
- """
340
- print(f"[DEBUG] Original prompt received: '{prompt}'")
341
- print(f"[DEBUG] Original negative prompt received: '{negative_prompt}'")
342
-
343
- # ํ•œ๊ธ€/๋น„์˜์–ด ๊ฐ์ง€ ๋ฐ ๋ฒˆ์—ญ (prompt)
344
- has_korean = bool(re.search(r'[\uac00-\ud7a3]', prompt))
345
- has_non_english = bool(re.search(r'[^\x00-\x7F]', prompt))
346
-
347
- if has_korean or has_non_english:
348
- print(f"[ALERT] ๋น„์˜์–ด ํ”„๋กฌํ”„ํŠธ ๊ฐ์ง€๋จ: '{prompt}'")
349
-
350
- # Cohere๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์ง์ ‘ ๋ฒˆ์—ญ
351
- if coh_client:
352
- try:
353
- # ๋ฒˆ์—ญ์šฉ ์‹œ์Šคํ…œ ํ”„๋กฌํ”„ํŠธ
354
- trans_system = "You are a translator. Translate the following text to English accurately. Only provide the translation, no comments or explanations."
355
-
356
- # ๋ฒˆ์—ญ ์š”์ฒญ
357
- trans_response = coh_client.chat(
358
- model="command-r-plus-08-2024",
359
- messages=[
360
- {"role": "system", "content": [{"type": "text", "text": trans_system}]},
361
- {"role": "user", "content": [{"type": "text", "text": prompt}]}
362
- ],
363
- temperature=0.1
364
- )
365
-
366
- # ์‘๋‹ต ์ฒ˜๋ฆฌ - ๋‹ค์–‘ํ•œ ์†์„ฑ ์ ‘๊ทผ ๋ฐฉ๋ฒ• ์‹œ๋„
367
- translated_prompt = None
368
-
369
- # ๋ฐฉ๋ฒ• 1: response.text
370
- try:
371
- if hasattr(trans_response, 'text'):
372
- translated_prompt = trans_response.text
373
- print("[DEBUG] ๋ฐฉ๋ฒ• 1 (text ์†์„ฑ) ์„ฑ๊ณต")
374
- except:
375
- pass
376
-
377
- # ๋ฐฉ๋ฒ• 2: response.response
378
- if translated_prompt is None:
379
- try:
380
- if hasattr(trans_response, 'response'):
381
- translated_prompt = trans_response.response
382
- print("[DEBUG] ๋ฐฉ๋ฒ• 2 (response ์†์„ฑ) ์„ฑ๊ณต")
383
- except:
384
- pass
385
-
386
- # ๋ฐฉ๋ฒ• 3: response dictionary access
387
- if translated_prompt is None:
388
- try:
389
- # ์‘๋‹ต์ด dictionary์ธ ๊ฒฝ์šฐ
390
- if isinstance(trans_response, dict) and 'text' in trans_response:
391
- translated_prompt = trans_response['text']
392
- print("[DEBUG] ๋ฐฉ๋ฒ• 3 (dictionary access) ์„ฑ๊ณต")
393
- except:
394
- pass
395
-
396
- # ๋ฐฉ๋ฒ• 4: ๋ฌธ์ž์—ด ๋ณ€ํ™˜ ํ›„ ํŒŒ์‹ฑ
397
- if translated_prompt is None:
398
- try:
399
- response_str = str(trans_response)
400
- print(f"[DEBUG] Response structure: {response_str[:200]}...")
401
-
402
- # text= ํŒจํ„ด ์ฐพ๊ธฐ
403
- match = re.search(r"text=['\"](.*?)['\"]", response_str)
404
- if match:
405
- translated_prompt = match.group(1)
406
- print("[DEBUG] ๋ฐฉ๋ฒ• 4 (์ •๊ทœ์‹ ํŒŒ์‹ฑ) ์„ฑ๊ณต")
407
-
408
- # content ํŒจํ„ด ์ฐพ๊ธฐ
409
- if not translated_prompt and 'content=' in response_str:
410
- match = re.search(r"content=['\"](.*?)['\"]", response_str)
411
- if match:
412
- translated_prompt = match.group(1)
413
- print("[DEBUG] ๋ฐฉ๋ฒ• 4.1 (content ์ •๊ทœ์‹) ์„ฑ๊ณต")
414
- except Exception as parse_err:
415
- print(f"[DEBUG] ์ •๊ทœ์‹ ํŒŒ์‹ฑ ์˜ค๋ฅ˜: {parse_err}")
416
-
417
- # ์ตœ์ข… ๊ฒฐ๊ณผ ํ™•์ธ
418
- if translated_prompt:
419
- translated_prompt = translated_prompt.strip()
420
- print(f"[SUCCESS] ๋ฒˆ์—ญ๋จ: '{prompt}' -> '{translated_prompt}'")
421
- prompt = translated_prompt
422
- else:
423
- # ๋งˆ์ง€๋ง‰ ์ˆ˜๋‹จ: ์ „์ฒด ์‘๋‹ต ๊ตฌ์กฐ ๋กœ๊น…
424
- print(f"[DEBUG] Full response type: {type(trans_response)}")
425
- print(f"[DEBUG] Full response dir: {dir(trans_response)}")
426
- print(f"[DEBUG] Could not extract translation, keeping original prompt")
427
- except Exception as e:
428
- print(f"[ERROR] ํ”„๋กฌํ”„ํŠธ ๋ฒˆ์—ญ ์‹คํŒจ: {str(e)}")
429
- import traceback
430
- traceback.print_exc()
431
- # ๋ฒˆ์—ญ ์‹คํŒจ ์‹œ ์›๋ณธ ์œ ์ง€
432
 
433
- # ํ•œ๊ธ€/๋น„์˜์–ด ๊ฐ์ง€ ๋ฐ ๋ฒˆ์—ญ (negative_prompt)
434
- has_korean = bool(re.search(r'[\uac00-\ud7a3]', negative_prompt))
435
- has_non_english = bool(re.search(r'[^\x00-\x7F]', negative_prompt))
436
-
437
- if has_korean or has_non_english:
438
- print(f"[ALERT] ๋น„์˜์–ด ๋„ค๊ฑฐํ‹ฐ๋ธŒ ํ”„๋กฌํ”„ํŠธ ๊ฐ์ง€๋จ: '{negative_prompt}'")
439
-
440
- # Cohere๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์ง์ ‘ ๋ฒˆ์—ญ (์œ„์™€ ๋™์ผํ•œ ๋ฐฉ์‹์œผ๋กœ)
441
- if coh_client:
442
- try:
443
- trans_system = "You are a translator. Translate the following text to English accurately. Only provide the translation, no comments or explanations."
444
-
445
- trans_response = coh_client.chat(
446
- model="command-r-plus-08-2024",
447
- messages=[
448
- {"role": "system", "content": [{"type": "text", "text": trans_system}]},
449
- {"role": "user", "content": [{"type": "text", "text": negative_prompt}]}
450
- ],
451
- temperature=0.1
452
- )
453
-
454
- # ๋‹ค์–‘ํ•œ ๋ฐฉ๋ฒ•์œผ๋กœ ์‘๋‹ต ์ฒ˜๋ฆฌ (ํ”„๋กฌํ”„ํŠธ ์ฒ˜๋ฆฌ์™€ ๋™์ผ)
455
- translated_negative = None
456
-
457
- # ๊ฐ์ข… ์ ‘๊ทผ ๋ฐฉ๋ฒ• (๋™์ผํ•œ ๋กœ์ง ์ ์šฉ)
458
- try:
459
- if hasattr(trans_response, 'text'):
460
- translated_negative = trans_response.text
461
- elif hasattr(trans_response, 'response'):
462
- translated_negative = trans_response.response
463
- elif isinstance(trans_response, dict) and 'text' in trans_response:
464
- translated_negative = trans_response['text']
465
- else:
466
- response_str = str(trans_response)
467
- match = re.search(r"text=['\"](.*?)['\"]", response_str)
468
- if match:
469
- translated_negative = match.group(1)
470
- elif 'content=' in response_str:
471
- match = re.search(r"content=['\"](.*?)['\"]", response_str)
472
- if match:
473
- translated_negative = match.group(1)
474
- except Exception as parse_err:
475
- print(f"[DEBUG] ๋„ค๊ฑฐํ‹ฐ๋ธŒ ํŒŒ์‹ฑ ์˜ค๋ฅ˜: {parse_err}")
476
-
477
- if translated_negative:
478
- translated_negative = translated_negative.strip()
479
- print(f"[SUCCESS] ๋„ค๊ฑฐํ‹ฐ๋ธŒ ๋ฒˆ์—ญ๋จ: '{negative_prompt}' -> '{translated_negative}'")
480
- negative_prompt = translated_negative
481
- except Exception as e:
482
- print(f"[ERROR] ๋„ค๊ฑฐํ‹ฐ๋ธŒ ํ”„๋กฌํ”„ํŠธ ๋ฒˆ์—ญ ์‹คํŒจ: {str(e)}")
483
-
484
- print(f"[INFO] ์ตœ์ข… ์‚ฌ์šฉ๋  ํ”„๋กฌํ”„ํŠธ: '{prompt}'")
485
- print(f"[INFO] ์ตœ์ข… ์‚ฌ์šฉ๋  ๋„ค๊ฑฐํ‹ฐ๋ธŒ ํ”„๋กฌํ”„ํŠธ: '{negative_prompt}'")
486
-
487
- if len(prompt.split()) > 60:
488
- print("[WARN] Prompt >60 words โ€” CLIP may truncate it.")
489
-
490
  if randomize_seed:
491
  seed = random.randint(0, MAX_SEED)
492
 
493
  generator = torch.Generator(device=device).manual_seed(seed)
494
-
495
  try:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
496
  output_image = pipe(
497
  prompt=prompt,
498
  negative_prompt=negative_prompt,
@@ -500,204 +88,98 @@ def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance
500
  num_inference_steps=num_inference_steps,
501
  width=width,
502
  height=height,
503
- generator=generator,
504
  ).images[0]
505
- return output_image, seed
 
506
  except RuntimeError as e:
507
- print(f"[ERROR] Diffusion failed โ†’ {e}")
508
- return Image.new("RGB", (width, height), color=(0, 0, 0)), seed
 
 
509
 
510
- # Function to select a random example prompt
511
- def get_random_prompt():
512
- return random.choice(prompt_examples)
513
 
514
- # ------------------------------------------------------------
515
- # UI LAYOUT + THEME (Enhanced Visual Design)
516
- # ------------------------------------------------------------
517
  css = """
518
- body {background: linear-gradient(135deg, #f2e6ff 0%, #e6f0ff 100%); color: #222; font-family: 'Noto Sans', sans-serif;}
519
- #col-container {margin: 0 auto; max-width: 768px; padding: 15px; background: rgba(255, 255, 255, 0.8); border-radius: 15px; box-shadow: 0 8px 32px rgba(31, 38, 135, 0.2);}
520
- .gr-button {background: #7fbdf6; color: #fff; border-radius: 8px; transition: all 0.3s ease; font-weight: bold;}
521
- .gr-button:hover {background: #5a9ae6; transform: translateY(-2px); box-shadow: 0 5px 15px rgba(0,0,0,0.1);}
522
- #prompt-box textarea {font-size: 1.1rem; height: 9rem !important; background: #fff; color: #222; border-radius: 10px; border: 1px solid #d1c1e0;}
523
- .boost-btn {background: #ff7eb6; margin-top: 5px;}
524
- .boost-btn:hover {background: #ff5aa5;}
525
- .random-btn {background: #9966ff; margin-top: 5px;}
526
- .random-btn:hover {background: #8040ff;}
527
- .container {animation: fadeIn 0.5s ease-in-out;}
528
- .title {color: #6600cc; text-shadow: 1px 1px 2px rgba(0,0,0,0.1);}
529
- .gr-form {border: none !important; background: transparent !important;}
530
- .gr-input {border-radius: 8px !important;}
531
- .gr-slider {height: 12px !important;}
532
- .gr-slider .handle {height: 20px !important; width: 20px !important;}
533
- .panel {border-radius: 12px; overflow: hidden; box-shadow: 0 4px 15px rgba(0,0,0,0.1);}
534
- .gr-image {border-radius: 12px; overflow: hidden; transition: all 0.3s ease;}
535
- .gr-image:hover {transform: scale(1.02); box-shadow: 0 8px 25px rgba(0,0,0,0.15);}
536
- @keyframes fadeIn {
537
- from {opacity: 0; transform: translateY(20px);}
538
- to {opacity: 1; transform: translateY(0);}
539
  }
540
- .gr-accordion {border-radius: 10px; overflow: hidden; transition: all 0.3s ease;}
541
- .gr-accordion:hover {box-shadow: 0 5px 15px rgba(0,0,0,0.1);}
542
  """
543
 
 
544
 
545
- author_note = (
546
- "**โ„น๏ธ This code provides a test environment to experiment with the censorship limits and technical possibilities of AI-based adult content generation. It serves as an educational tool that offers researchers and creators the opportunity to test various prompt methods and understand the ethical boundaries of image generation technology. Uncensored, natural motion video generation service has also been developed but will be released in the future. You can enter prompts in any language - Korean, English, Japanese, Chinese, or any other language - and our AI will automatically translate and generate images based on your input.**"
547
- )
548
-
549
-
550
- # Function to boost prompt with LLM
551
- def boost_prompt(keyword):
552
- if not keyword or keyword.strip() == "":
553
- return "Please enter a keyword or theme first"
554
-
555
- if coh_client is None:
556
- return "Cohere API token not set. Please set the COH_API environment variable."
557
-
558
- print(f"[INFO] Generating boosted prompt for keyword: {keyword}")
559
- prompt = generate_prompts(keyword)
560
-
561
- # Final verification that we're only returning valid content
562
- if isinstance(prompt, str) and len(prompt) > 10 and not prompt.startswith("Error") and not prompt.startswith("Failed"):
563
- return prompt.strip()
564
- else:
565
- return "Failed to generate a suitable prompt. Please try again with a different keyword."
566
-
567
 
568
- with gr.Blocks(css=css, theme=gr.themes.Soft()) as demo:
569
- gr.Markdown(
570
- f"""
571
- ## ๐Ÿ–Œ๏ธ NSFW Uncensored Text & Imagery: AI Limits Explorer
572
- {author_note}
573
- """, elem_classes=["title"]
574
- )
575
-
576
- with gr.Group(elem_classes="model-description"):
577
- gr.HTML("""
578
- <p>
579
- <strong>Models Use cases: </strong><br>
580
- </p>
581
- <div style="display: flex; justify-content: center; align-items: center; gap: 10px; flex-wrap: wrap; margin-top: 10px; margin-bottom: 20px;">
582
-
583
- <a href="https://huggingface.co/spaces/Heartsync/FREE-NSFW-HUB" target="_blank">
584
- <img src="https://img.shields.io/static/v1?label=huggingface&message=FREE%20NSFW%20HUB&color=%230000ff&labelColor=%23800080&logo=huggingface&logoColor=%23ffa500&style=for-the-badge" alt="badge">
585
- </a>
586
- <a href="https://huggingface.co/spaces/Heartsync/NSFW-Uncensored-Real" target="_blank">
587
- <img src="https://img.shields.io/static/v1?label=Text%20to%20Image%28Real%29&message=NSFW%20Uncensored&color=%230000ff&labelColor=%23800080&logo=Huggingface&logoColor=%23ffa500&style=for-the-badge" alt="badge">
588
- </a>
589
- <a href="https://huggingface.co/spaces/Heartsync/Novel-NSFW" target="_blank">
590
- <img src="https://img.shields.io/static/v1?label=NOVEL%20GENERATOR&message=NSFW%20Uncensored&color=%23ffc0cb&labelColor=%23ffff00&logo=huggingface&logoColor=%23ffa500&style=for-the-badge" alt="badge">
591
- </a>
592
- <a href="https://huggingface.co/spaces/Heartsync/adult" target="_blank">
593
- <img src="https://img.shields.io/static/v1?label=Text%20to%20Image%20to%20Video&message=ADULT&color=%23ff00ff&labelColor=%23000080&logo=Huggingface&logoColor=%23ffa500&style=for-the-badge" alt="badge">
594
- </a>
595
-
596
- <a href="https://huggingface.co/spaces/Heartsync/wan2-1-fast-security" target="_blank">
597
- <img src="https://img.shields.io/static/v1?label=Image%20to%20Video&message=Wan%202.1%20I2V%20Fast&color=%23ffa500&labelColor=%23000080&logo=huggingface&logoColor=white&style=for-the-badge" alt="badge">
598
- </a>
599
-
600
- <a href="https://huggingface.co/spaces/Heartsync/NSFW-Uncensored-video" target="_blank">
601
- <img src="https://img.shields.io/static/v1?label=Image%20to%20Video&message=NSFW%20Uncensored&color=%230000ff&labelColor=%23800080&logo=Huggingface&logoColor=%23ffa500&style=for-the-badge" alt="badge">
602
- </a>
603
- <a href="https://huggingface.co/spaces/Heartsync/NSFW-Uncensored-video2" target="_blank">
604
- <img src="https://img.shields.io/static/v1?label=Image%20to%20Video(Mirror)&message=NSFW%20Uncensored&color=%230000ff&labelColor=%23800080&logo=Huggingface&logoColor=%23ffa500&style=for-the-badge" alt="badge">
605
- </a>
606
- <a href="https://huggingface.co/spaces/Heartsync/NSFW-Uncensored" target="_blank">
607
- <img src="https://img.shields.io/static/v1?label=Text%20to%20Image%28Anime%29&message=NSFW%20Uncensored&color=%230000ff&labelColor=%23800080&logo=Huggingface&logoColor=%23ffa500&style=for-the-badge" alt="badge">
608
- </a>
609
-
610
- </div>
611
- <p>
612
- <small style="opacity: 0.8;">High-quality image generation powered by StableDiffusionXL with video generation capability. Supports long prompts and various artistic styles.</small>
613
- </p>
614
- """)
615
-
616
- # Create state variables to store the current image
617
- current_image = gr.State(None)
618
- current_seed = gr.State(0)
619
-
620
- with gr.Column(elem_id="col-container", elem_classes=["container", "panel"]):
621
- # Add keyword input and boost button
622
- with gr.Row():
623
- keyword_input = gr.Text(
624
- label="Keyword Input",
625
- show_label=True,
626
- max_lines=1,
627
- placeholder="Enter a keyword or theme in any language to generate an optimal prompt",
628
- value="random",
629
- )
630
- boost_button = gr.Button("BOOST", elem_classes=["boost-btn"])
631
- random_button = gr.Button("RANDOM", elem_classes=["random-btn"])
632
-
633
  with gr.Row():
634
  prompt = gr.Text(
635
  label="Prompt",
636
- elem_id="prompt-box",
637
- show_label=True,
638
- max_lines=3, # Increased to 3 lines (3x original)
639
- placeholder="Enter your prompt in any language (Korean, English, Japanese, etc.)",
640
  )
641
- run_button = gr.Button("Generate", scale=0)
642
 
643
- # Image output area
644
- result = gr.Image(label="Generated Image", elem_classes=["gr-image"])
 
 
 
645
 
646
- with gr.Accordion("Advanced Settings", open=False, elem_classes=["gr-accordion"]):
647
  negative_prompt = gr.Text(
648
  label="Negative prompt",
649
  max_lines=1,
650
- placeholder="Enter a negative prompt in any language",
651
- value="text, talk bubble, low quality, watermark, signature",
 
 
 
 
 
 
 
 
652
  )
653
 
654
- seed = gr.Slider(label="Seed", minimum=0, maximum=MAX_SEED, step=1, value=0)
655
  randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
656
 
657
  with gr.Row():
658
- width = gr.Slider(label="Width", minimum=256, maximum=MAX_IMAGE_SIZE, step=32, value=1024)
659
- height = gr.Slider(label="Height", minimum=256, maximum=MAX_IMAGE_SIZE, step=32, value=1024)
 
 
 
 
 
660
 
661
- with gr.Row():
662
- guidance_scale = gr.Slider(label="Guidance scale", minimum=0.0, maximum=20.0, step=0.1, value=7)
663
- num_inference_steps = gr.Slider(label="Inference steps", minimum=1, maximum=28, step=1, value=28)
 
 
 
 
664
 
665
- # Define a function to store the generated image in state
666
- def update_image_state(img, seed_val):
667
- return img, seed_val
 
 
 
 
 
668
 
669
- # Connect boost button to generate prompt
670
- boost_button.click(
671
- fn=boost_prompt,
672
- inputs=[keyword_input],
673
- outputs=[prompt]
674
- )
675
-
676
- # Connect random button to insert random example
677
- random_button.click(
678
- fn=get_random_prompt,
679
- inputs=[],
680
- outputs=[prompt]
681
- )
682
 
683
- # Connect image generation button
684
  run_button.click(
685
  fn=infer,
686
- inputs=[
687
- prompt,
688
- negative_prompt,
689
- seed,
690
- randomize_seed,
691
- width,
692
- height,
693
- guidance_scale,
694
- num_inference_steps,
695
- ],
696
- outputs=[result, current_seed]
697
- ).then(
698
- fn=update_image_state,
699
- inputs=[result, current_seed],
700
- outputs=[current_image, current_seed]
701
  )
702
 
703
- demo.queue().launch()
 
 
1
  import spaces
2
  import gradio as gr
3
  import numpy as np
4
+ import PIL.Image
5
  from PIL import Image
6
  import random
7
+ from diffusers import StableDiffusionXLPipeline
8
+ from diffusers import EulerAncestralDiscreteScheduler
9
  import torch
10
+ from compel import Compel, ReturnedEmbeddingsType
 
 
11
 
 
 
 
12
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
13
 
14
+ # Make sure to use torch.float16 consistently throughout the pipeline
 
 
15
  pipe = StableDiffusionXLPipeline.from_pretrained(
16
  "votepurchase/waiREALCN_v14",
17
  torch_dtype=torch.float16,
18
+ variant="fp16", # Explicitly use fp16 variant
19
+ use_safetensors=True # Use safetensors if available
20
  )
21
+
22
  pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
23
  pipe.to(device)
24
 
25
+ # Force all components to use the same dtype
26
+ pipe.text_encoder.to(torch.float16)
27
+ pipe.text_encoder_2.to(torch.float16)
28
+ pipe.vae.to(torch.float16)
29
+ pipe.unet.to(torch.float16)
30
+
31
+ # ่ฟฝๅŠ : Initialize Compel for long prompt processing
32
+ compel = Compel(
33
+ tokenizer=[pipe.tokenizer, pipe.tokenizer_2],
34
+ text_encoder=[pipe.text_encoder, pipe.text_encoder_2],
35
+ returned_embeddings_type=ReturnedEmbeddingsType.PENULTIMATE_HIDDEN_STATES_NON_NORMALIZED,
36
+ requires_pooled=[False, True],
37
+ truncate_long_prompts=False
38
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
 
 
 
 
40
  MAX_SEED = np.iinfo(np.int32).max
41
  MAX_IMAGE_SIZE = 1216
42
 
43
+ # ่ฟฝๅŠ : Simple long prompt processing function
44
+ def process_long_prompt(prompt, negative_prompt=""):
45
+ """Simple long prompt processing using Compel"""
46
+ try:
47
+ conditioning, pooled = compel([prompt, negative_prompt])
48
+ return conditioning, pooled
49
+ except Exception as e:
50
+ print(f"Long prompt processing failed: {e}, falling back to standard processing")
51
+ return None, None
52
+
53
  @spaces.GPU
54
  def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps):
55
+ # ๅค‰ๆ›ด: Remove the 60-word limit warning and add long prompt check
56
+ use_long_prompt = len(prompt.split()) > 60 or len(prompt) > 300
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
  if randomize_seed:
59
  seed = random.randint(0, MAX_SEED)
60
 
61
  generator = torch.Generator(device=device).manual_seed(seed)
62
+
63
  try:
64
+ # ่ฟฝๅŠ : Try long prompt processing first if prompt is long
65
+ if use_long_prompt:
66
+ print("Using long prompt processing...")
67
+ conditioning, pooled = process_long_prompt(prompt, negative_prompt)
68
+
69
+ if conditioning is not None:
70
+ output_image = pipe(
71
+ prompt_embeds=conditioning[0:1],
72
+ pooled_prompt_embeds=pooled[0:1],
73
+ negative_prompt_embeds=conditioning[1:2],
74
+ negative_pooled_prompt_embeds=pooled[1:2],
75
+ guidance_scale=guidance_scale,
76
+ num_inference_steps=num_inference_steps,
77
+ width=width,
78
+ height=height,
79
+ generator=generator
80
+ ).images[0]
81
+ return output_image
82
+
83
+ # Fall back to standard processing
84
  output_image = pipe(
85
  prompt=prompt,
86
  negative_prompt=negative_prompt,
 
88
  num_inference_steps=num_inference_steps,
89
  width=width,
90
  height=height,
91
+ generator=generator
92
  ).images[0]
93
+
94
+ return output_image
95
  except RuntimeError as e:
96
+ print(f"Error during generation: {e}")
97
+ # Return a blank image with error message
98
+ error_img = Image.new('RGB', (width, height), color=(0, 0, 0))
99
+ return error_img
100
 
 
 
 
101
 
 
 
 
102
  css = """
103
+ #col-container {
104
+ margin: 0 auto;
105
+ max-width: 520px;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
106
  }
 
 
107
  """
108
 
109
+ with gr.Blocks(css=css) as demo:
110
 
111
+ with gr.Column(elem_id="col-container"):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
112
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
113
  with gr.Row():
114
  prompt = gr.Text(
115
  label="Prompt",
116
+ show_label=False,
117
+ max_lines=1,
118
+ placeholder="Enter your prompt (long prompts are automatically supported)", # ๅค‰ๆ›ด: Updated placeholder
119
+ container=False,
120
  )
 
121
 
122
+ run_button = gr.Button("Run", scale=0)
123
+
124
+ result = gr.Image(label="Result", show_label=False)
125
+
126
+ with gr.Accordion("Advanced Settings", open=False):
127
 
 
128
  negative_prompt = gr.Text(
129
  label="Negative prompt",
130
  max_lines=1,
131
+ placeholder="Enter a negative prompt",
132
+ value="nsfw, (low quality, worst quality:1.2), very displeasing, 3d, watermark, signature, ugly, poorly drawn"
133
+ )
134
+
135
+ seed = gr.Slider(
136
+ label="Seed",
137
+ minimum=0,
138
+ maximum=MAX_SEED,
139
+ step=1,
140
+ value=0,
141
  )
142
 
 
143
  randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
144
 
145
  with gr.Row():
146
+ width = gr.Slider(
147
+ label="Width",
148
+ minimum=256,
149
+ maximum=MAX_IMAGE_SIZE,
150
+ step=32,
151
+ value=1024,
152
+ )
153
 
154
+ height = gr.Slider(
155
+ label="Height",
156
+ minimum=256,
157
+ maximum=MAX_IMAGE_SIZE,
158
+ step=32,
159
+ value=1024,
160
+ )
161
 
162
+ with gr.Row():
163
+ guidance_scale = gr.Slider(
164
+ label="Guidance scale",
165
+ minimum=0.0,
166
+ maximum=20.0,
167
+ step=0.1,
168
+ value=7,
169
+ )
170
 
171
+ num_inference_steps = gr.Slider(
172
+ label="Number of inference steps",
173
+ minimum=1,
174
+ maximum=28,
175
+ step=1,
176
+ value=28,
177
+ )
 
 
 
 
 
 
178
 
 
179
  run_button.click(
180
  fn=infer,
181
+ inputs=[prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps],
182
+ outputs=[result]
 
 
 
 
 
 
 
 
 
 
 
 
 
183
  )
184
 
185
+ demo.queue().launch()