Commit
·
f41c899
1
Parent(s):
db90693
ruff format
Browse files- atlas-export.py +16 -22
atlas-export.py
CHANGED
@@ -73,57 +73,51 @@ def check_gpu_available() -> bool:
|
|
73 |
|
74 |
|
75 |
def sample_dataset_to_parquet(
|
76 |
-
dataset_id: str,
|
77 |
-
sample_size: int,
|
78 |
split: str = "train",
|
79 |
-
trust_remote_code: bool = False
|
80 |
) -> tuple[Path, Path]:
|
81 |
"""Sample dataset and save to local parquet file."""
|
82 |
from datasets import load_dataset
|
83 |
-
|
84 |
logger.info(f"Pre-sampling {sample_size} examples from {dataset_id}...")
|
85 |
-
|
86 |
# Load with streaming to avoid loading entire dataset
|
87 |
ds = load_dataset(
|
88 |
-
dataset_id,
|
89 |
-
streaming=True,
|
90 |
-
split=split,
|
91 |
-
trust_remote_code=trust_remote_code
|
92 |
)
|
93 |
-
|
94 |
# Shuffle and sample
|
95 |
ds = ds.shuffle(seed=42)
|
96 |
sampled_ds = ds.take(sample_size)
|
97 |
-
|
98 |
# Create temporary directory and parquet file
|
99 |
temp_dir = Path(tempfile.mkdtemp(prefix="atlas_data_"))
|
100 |
parquet_path = temp_dir / "data.parquet"
|
101 |
-
|
102 |
logger.info(f"Saving sampled data to temporary file...")
|
103 |
sampled_ds.to_parquet(str(parquet_path))
|
104 |
-
|
105 |
file_size = parquet_path.stat().st_size / (1024 * 1024) # MB
|
106 |
logger.info(f"Created {file_size:.1f}MB parquet file with {sample_size} samples")
|
107 |
-
|
108 |
return parquet_path, temp_dir
|
109 |
|
110 |
|
111 |
def build_atlas_command(args) -> tuple[list, str, Optional[Path]]:
|
112 |
"""Build the embedding-atlas command with all parameters."""
|
113 |
temp_data_dir = None
|
114 |
-
|
115 |
# If sampling is requested, pre-sample the dataset
|
116 |
if args.sample:
|
117 |
parquet_path, temp_data_dir = sample_dataset_to_parquet(
|
118 |
-
args.dataset_id,
|
119 |
-
args.sample,
|
120 |
-
args.split,
|
121 |
-
args.trust_remote_code
|
122 |
)
|
123 |
dataset_input = str(parquet_path)
|
124 |
else:
|
125 |
dataset_input = args.dataset_id
|
126 |
-
|
127 |
# Use uvx to run embedding-atlas with required dependencies
|
128 |
# Include hf-transfer for faster downloads when HF_HUB_ENABLE_HF_TRANSFER is set
|
129 |
cmd = [
|
@@ -452,7 +446,7 @@ def main():
|
|
452 |
logger.info(f"Using temporary directory: {output_dir}")
|
453 |
|
454 |
temp_data_dir = None # Initialize to avoid UnboundLocalError
|
455 |
-
|
456 |
try:
|
457 |
# Build and run embedding-atlas command
|
458 |
cmd, export_path, temp_data_dir = build_atlas_command(args)
|
@@ -499,7 +493,7 @@ def main():
|
|
499 |
if temp_dir and not args.local_only:
|
500 |
shutil.rmtree(temp_dir)
|
501 |
logger.info("Cleaned up temporary files")
|
502 |
-
|
503 |
# Also clean up the data sampling directory
|
504 |
if temp_data_dir and temp_data_dir.exists():
|
505 |
shutil.rmtree(temp_data_dir)
|
|
|
73 |
|
74 |
|
75 |
def sample_dataset_to_parquet(
|
76 |
+
dataset_id: str,
|
77 |
+
sample_size: int,
|
78 |
split: str = "train",
|
79 |
+
trust_remote_code: bool = False,
|
80 |
) -> tuple[Path, Path]:
|
81 |
"""Sample dataset and save to local parquet file."""
|
82 |
from datasets import load_dataset
|
83 |
+
|
84 |
logger.info(f"Pre-sampling {sample_size} examples from {dataset_id}...")
|
85 |
+
|
86 |
# Load with streaming to avoid loading entire dataset
|
87 |
ds = load_dataset(
|
88 |
+
dataset_id, streaming=True, split=split, trust_remote_code=trust_remote_code
|
|
|
|
|
|
|
89 |
)
|
90 |
+
|
91 |
# Shuffle and sample
|
92 |
ds = ds.shuffle(seed=42)
|
93 |
sampled_ds = ds.take(sample_size)
|
94 |
+
|
95 |
# Create temporary directory and parquet file
|
96 |
temp_dir = Path(tempfile.mkdtemp(prefix="atlas_data_"))
|
97 |
parquet_path = temp_dir / "data.parquet"
|
98 |
+
|
99 |
logger.info(f"Saving sampled data to temporary file...")
|
100 |
sampled_ds.to_parquet(str(parquet_path))
|
101 |
+
|
102 |
file_size = parquet_path.stat().st_size / (1024 * 1024) # MB
|
103 |
logger.info(f"Created {file_size:.1f}MB parquet file with {sample_size} samples")
|
104 |
+
|
105 |
return parquet_path, temp_dir
|
106 |
|
107 |
|
108 |
def build_atlas_command(args) -> tuple[list, str, Optional[Path]]:
|
109 |
"""Build the embedding-atlas command with all parameters."""
|
110 |
temp_data_dir = None
|
111 |
+
|
112 |
# If sampling is requested, pre-sample the dataset
|
113 |
if args.sample:
|
114 |
parquet_path, temp_data_dir = sample_dataset_to_parquet(
|
115 |
+
args.dataset_id, args.sample, args.split, args.trust_remote_code
|
|
|
|
|
|
|
116 |
)
|
117 |
dataset_input = str(parquet_path)
|
118 |
else:
|
119 |
dataset_input = args.dataset_id
|
120 |
+
|
121 |
# Use uvx to run embedding-atlas with required dependencies
|
122 |
# Include hf-transfer for faster downloads when HF_HUB_ENABLE_HF_TRANSFER is set
|
123 |
cmd = [
|
|
|
446 |
logger.info(f"Using temporary directory: {output_dir}")
|
447 |
|
448 |
temp_data_dir = None # Initialize to avoid UnboundLocalError
|
449 |
+
|
450 |
try:
|
451 |
# Build and run embedding-atlas command
|
452 |
cmd, export_path, temp_data_dir = build_atlas_command(args)
|
|
|
493 |
if temp_dir and not args.local_only:
|
494 |
shutil.rmtree(temp_dir)
|
495 |
logger.info("Cleaned up temporary files")
|
496 |
+
|
497 |
# Also clean up the data sampling directory
|
498 |
if temp_data_dir and temp_data_dir.exists():
|
499 |
shutil.rmtree(temp_data_dir)
|