File size: 2,241 Bytes
c193ec6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
from pathlib import Path
import shutil

import datasets


def main():
    datasets.disable_caching()

    # Divide the MP3 files by language. Hugging Face requires each subset and its metadadta to be in
    # a separate directory. However, for backwards compatibility, we also want to keep the top-level
    # "audio" directory and replace the MP3 files with symlinks into the subsets.
    subsets_dir = Path("subsets")
    if subsets_dir.exists():
        shutil.rmtree(subsets_dir)
    subsets_dir.mkdir()

    # Back up the directory with the original MP3 files
    if not Path("audio_orig").exists():
        Path("audio").rename("audio_orig")
    elif Path("audio").exists():
        shutil.rmtree("audio")
    Path("audio").mkdir(exist_ok=True)

    # Create language subsets and:
    # - hard link the files from mp3_orig to subsets
    # - add symlinks from mp3 into subsets
    for config_name in ["all", "en", "es", "de", "fr"]:
        subset_dir = Path(".") if config_name == "all" else subsets_dir / config_name
        subset_dir.mkdir(exist_ok=True)

        dataset = datasets.load_dataset(
            "./loader.py",
            config_name,
            trust_remote_code=True,
            split="test",
        )
        if config_name == "all":
            dataset = dataset.add_column(
                "file_name",
                [
                    str(Path("subsets") / lg / "audio" / f"{n}.mp3")
                    for lg, n in zip(dataset["language"], dataset["name"])
                ],
            )
        else:
            dataset = dataset.add_column(
                "file_name", [str(Path("audio") / f"{n}.mp3") for n in dataset["name"]]
            )
        dataset = dataset.remove_columns("audio")

        dataset.to_json(subset_dir / "metadata.jsonl")

        if config_name != "all":
            (subset_dir / "audio").mkdir()
            for name in dataset["name"]:
                (subset_dir / "audio" / f"{name}.mp3").hardlink_to(
                    Path("audio_orig") / f"{name}.mp3"
                )
                (Path("audio") / f"{name}.mp3").symlink_to(
                    Path("..") / subset_dir / "audio" / f"{name}.mp3"
                )


if __name__ == "__main__":
    main()