Spaces:
Running
on
Zero
Running
on
Zero
Upload folder using huggingface_hub
Browse files- app.py +2 -0
- modules/SD15/SDToken.py +1 -1
app.py
CHANGED
@@ -3,6 +3,7 @@ import gradio as gr
|
|
3 |
import sys
|
4 |
import os
|
5 |
from PIL import Image
|
|
|
6 |
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")))
|
7 |
|
8 |
from modules.user.pipeline import pipeline
|
@@ -36,6 +37,7 @@ def load_generated_images():
|
|
36 |
return []
|
37 |
return batch_images
|
38 |
|
|
|
39 |
def generate_images(
|
40 |
prompt: str,
|
41 |
width: int = 512,
|
|
|
3 |
import sys
|
4 |
import os
|
5 |
from PIL import Image
|
6 |
+
import spaces
|
7 |
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")))
|
8 |
|
9 |
from modules.user.pipeline import pipeline
|
|
|
37 |
return []
|
38 |
return batch_images
|
39 |
|
40 |
+
@spaces.GPU
|
41 |
def generate_images(
|
42 |
prompt: str,
|
43 |
width: int = 512,
|
modules/SD15/SDToken.py
CHANGED
@@ -237,7 +237,7 @@ class SDTokenizer:
|
|
237 |
- `min_length` (int, optional): The minimum length of the input. Defaults to None.
|
238 |
"""
|
239 |
if tokenizer_path is None:
|
240 |
-
tokenizer_path =
|
241 |
self.tokenizer = tokenizer_class.from_pretrained(tokenizer_path)
|
242 |
self.max_length = max_length
|
243 |
self.min_length = min_length
|
|
|
237 |
- `min_length` (int, optional): The minimum length of the input. Defaults to None.
|
238 |
"""
|
239 |
if tokenizer_path is None:
|
240 |
+
tokenizer_path = "_internal/sd1_tokenizer/"
|
241 |
self.tokenizer = tokenizer_class.from_pretrained(tokenizer_path)
|
242 |
self.max_length = max_length
|
243 |
self.min_length = min_length
|