Spaces:
Running
on
Zero
Running
on
Zero
Upload folder using huggingface_hub
Browse files- modules/SD15/SDToken.py +7 -2
modules/SD15/SDToken.py
CHANGED
@@ -244,8 +244,13 @@ class SDTokenizer:
|
|
244 |
if not os.path.exists(tokenizer_path):
|
245 |
raise ValueError(f"Tokenizer path does not exist: {tokenizer_path}")
|
246 |
|
247 |
-
try:
|
248 |
-
|
|
|
|
|
|
|
|
|
|
|
249 |
except Exception as e:
|
250 |
raise RuntimeError(f"Failed to load tokenizer from {tokenizer_path}: {str(e)}")
|
251 |
|
|
|
244 |
if not os.path.exists(tokenizer_path):
|
245 |
raise ValueError(f"Tokenizer path does not exist: {tokenizer_path}")
|
246 |
|
247 |
+
try:
|
248 |
+
if tokenizer_path is None:
|
249 |
+
# Use pre-bundled tokenizer
|
250 |
+
self.tokenizer = CLIPTokenizerFast.from_pretrained("openai/clip-vit-large-patch14")
|
251 |
+
else:
|
252 |
+
# Try local tokenizer files
|
253 |
+
self.tokenizer = CLIPTokenizerFast.from_pretrained(tokenizer_path)
|
254 |
except Exception as e:
|
255 |
raise RuntimeError(f"Failed to load tokenizer from {tokenizer_path}: {str(e)}")
|
256 |
|