Spaces:
Running
on
Zero
Running
on
Zero
Sync from GitHub repo
Browse filesThis Space is synced from the GitHub repo: https://github.com/SWivid/F5-TTS. Please submit contributions to the Space there
- pyproject.toml +1 -1
- src/f5_tts/model/backbones/dit.py +1 -2
pyproject.toml
CHANGED
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
4 |
|
5 |
[project]
|
6 |
name = "f5-tts"
|
7 |
-
version = "1.0.
|
8 |
description = "F5-TTS: A Fairytaler that Fakes Fluent and Faithful Speech with Flow Matching"
|
9 |
readme = "README.md"
|
10 |
license = {text = "MIT License"}
|
|
|
4 |
|
5 |
[project]
|
6 |
name = "f5-tts"
|
7 |
+
version = "1.0.2"
|
8 |
description = "F5-TTS: A Fairytaler that Fakes Fluent and Faithful Speech with Flow Matching"
|
9 |
readme = "README.md"
|
10 |
license = {text = "MIT License"}
|
src/f5_tts/model/backbones/dit.py
CHANGED
@@ -219,8 +219,7 @@ class DiT(nn.Module):
|
|
219 |
|
220 |
for block in self.transformer_blocks:
|
221 |
if self.checkpoint_activations:
|
222 |
-
#
|
223 |
-
# After PyTorch 2.4, we must pass the use_reentrant explicitly
|
224 |
x = torch.utils.checkpoint.checkpoint(self.ckpt_wrapper(block), x, t, mask, rope, use_reentrant=False)
|
225 |
else:
|
226 |
x = block(x, t, mask=mask, rope=rope)
|
|
|
219 |
|
220 |
for block in self.transformer_blocks:
|
221 |
if self.checkpoint_activations:
|
222 |
+
# https://pytorch.org/docs/stable/checkpoint.html#torch.utils.checkpoint.checkpoint
|
|
|
223 |
x = torch.utils.checkpoint.checkpoint(self.ckpt_wrapper(block), x, t, mask, rope, use_reentrant=False)
|
224 |
else:
|
225 |
x = block(x, t, mask=mask, rope=rope)
|