Spaces:
Running
on
Zero
Running
on
Zero
Upload app.py
Browse files
app.py
CHANGED
|
@@ -14,7 +14,7 @@ pipelines = {lang_code: KPipeline(lang_code=lang_code, model=False) for lang_cod
|
|
| 14 |
pipelines['a'].g2p.lexicon.golds['kokoro'] = 'kˈOkəɹO'
|
| 15 |
pipelines['b'].g2p.lexicon.golds['kokoro'] = 'kˈQkəɹQ'
|
| 16 |
|
| 17 |
-
@spaces.GPU(duration=
|
| 18 |
def forward_gpu(ps, ref_s, speed):
|
| 19 |
return models[True](ps, ref_s, speed)
|
| 20 |
|
|
@@ -70,6 +70,7 @@ def generate_all(text, voice='af_heart', speed=1, use_gpu=CUDA_AVAILABLE):
|
|
| 70 |
else:
|
| 71 |
raise gr.Error(e)
|
| 72 |
yield 24000, audio.numpy()
|
|
|
|
| 73 |
|
| 74 |
random_texts = {}
|
| 75 |
for lang in ['en']:
|
|
|
|
| 14 |
pipelines['a'].g2p.lexicon.golds['kokoro'] = 'kˈOkəɹO'
|
| 15 |
pipelines['b'].g2p.lexicon.golds['kokoro'] = 'kˈQkəɹQ'
|
| 16 |
|
| 17 |
+
@spaces.GPU(duration=30)
|
| 18 |
def forward_gpu(ps, ref_s, speed):
|
| 19 |
return models[True](ps, ref_s, speed)
|
| 20 |
|
|
|
|
| 70 |
else:
|
| 71 |
raise gr.Error(e)
|
| 72 |
yield 24000, audio.numpy()
|
| 73 |
+
yield 24000, torch.zeros(1).numpy()
|
| 74 |
|
| 75 |
random_texts = {}
|
| 76 |
for lang in ['en']:
|