Update app.py
Browse files
app.py
CHANGED
@@ -12,9 +12,12 @@ openai.api_key = None # Will be set by the user through the UI
|
|
12 |
# Check if GPU is available
|
13 |
CUDA_AVAILABLE = torch.cuda.is_available()
|
14 |
|
|
|
15 |
# Initialize the models and pipelines (for TTS)
|
16 |
models = {gpu: KModel().to('cuda' if gpu else 'cpu').eval() for gpu in [False] + ([True] if CUDA_AVAILABLE else [])}
|
17 |
-
|
|
|
|
|
18 |
# Load lexicon for specific languages
|
19 |
pipelines['a'].g2p.lexicon.golds['kokoro'] = 'kˈOkəɹO'
|
20 |
pipelines['b'].g2p.lexicon.golds['kokoro'] = 'kˈQkəɹQ'
|
|
|
12 |
# Check if GPU is available
|
13 |
CUDA_AVAILABLE = torch.cuda.is_available()
|
14 |
|
15 |
+
# Initialize the models and pipelines (for TTS)
|
16 |
# Initialize the models and pipelines (for TTS)
|
17 |
models = {gpu: KModel().to('cuda' if gpu else 'cpu').eval() for gpu in [False] + ([True] if CUDA_AVAILABLE else [])}
|
18 |
+
|
19 |
+
# Fixed the iteration and dictionary comprehension for pipelines
|
20 |
+
pipelines = {lang_code: KPipeline(lang_code=lang_code, model=False) for lang_code in ['a', 'b', 'e', 'f', 'h', 'i', 'j', 'p', 'z']}
|
21 |
# Load lexicon for specific languages
|
22 |
pipelines['a'].g2p.lexicon.golds['kokoro'] = 'kˈOkəɹO'
|
23 |
pipelines['b'].g2p.lexicon.golds['kokoro'] = 'kˈQkəɹQ'
|