|
import argparse |
|
import os |
|
from huggingface_hub import upload_file, hf_hub_download, create_repo |
|
import time |
|
import math |
|
from pathlib import Path |
|
import subprocess |
|
|
|
def split_large_file(file_path, chunk_size_mb=1000): |
|
"""Split a large file into smaller chunks.""" |
|
file_path = Path(file_path) |
|
file_size = os.path.getsize(file_path) / (1024 * 1024) |
|
|
|
if file_size <= chunk_size_mb: |
|
print(f"File {file_path.name} is {file_size:.2f}MB, no need to split.") |
|
return [file_path] |
|
|
|
|
|
chunks_dir = file_path.parent / f"{file_path.stem}_chunks" |
|
os.makedirs(chunks_dir, exist_ok=True) |
|
|
|
|
|
num_chunks = math.ceil(file_size / chunk_size_mb) |
|
print(f"Splitting {file_path.name} ({file_size:.2f}MB) into {num_chunks} chunks...") |
|
|
|
|
|
chunk_prefix = chunks_dir / file_path.stem |
|
subprocess.run([ |
|
"split", |
|
"-b", f"{chunk_size_mb}m", |
|
str(file_path), |
|
f"{chunk_prefix}_part_" |
|
]) |
|
|
|
|
|
chunk_files = sorted(chunks_dir.glob(f"{file_path.stem}_part_*")) |
|
print(f"Created {len(chunk_files)} chunk files in {chunks_dir}") |
|
return chunk_files |
|
|
|
def upload_files(api_token, repo_id): |
|
|
|
try: |
|
create_repo( |
|
repo_id=repo_id, |
|
token=api_token, |
|
repo_type="dataset", |
|
private=False |
|
) |
|
print(f"Created repository: {repo_id}") |
|
except Exception as e: |
|
print(f"Repository already exists or error occurred: {e}") |
|
|
|
|
|
time.sleep(5) |
|
|
|
|
|
try: |
|
script_path = "1_hf_up_and_download.py" |
|
print(f"Uploading script: {script_path}") |
|
upload_file( |
|
repo_id=repo_id, |
|
path_or_fileobj=script_path, |
|
path_in_repo=script_path, |
|
token=api_token, |
|
repo_type="dataset", |
|
) |
|
print(f"Uploaded {script_path} to {repo_id}/{script_path}") |
|
except Exception as e: |
|
print(f"Upload failed for script: {e}") |
|
|
|
|
|
local_file = "pdfs.tar.gz" |
|
chunk_files = split_large_file(local_file) |
|
|
|
|
|
for i, chunk_file in enumerate(chunk_files): |
|
try: |
|
repo_file = chunk_file.name |
|
print(f"Uploading chunk {i+1}/{len(chunk_files)}: {repo_file}") |
|
|
|
upload_file( |
|
repo_id=repo_id, |
|
path_or_fileobj=str(chunk_file), |
|
path_in_repo=repo_file, |
|
token=api_token, |
|
repo_type="dataset", |
|
) |
|
print(f"Uploaded {chunk_file} to {repo_id}/{repo_file}") |
|
except Exception as e: |
|
print(f"Upload failed for {chunk_file}: {e}") |
|
|
|
def download_files(api_token, repo_id): |
|
|
|
try: |
|
|
|
from huggingface_hub import list_repo_files |
|
files = list_repo_files(repo_id=repo_id, repo_type="dataset", token=api_token) |
|
|
|
|
|
chunk_files = [f for f in files if f.startswith("pdfs_part_") or "chunks" in f] |
|
|
|
if chunk_files: |
|
print(f"Found {len(chunk_files)} chunk files. Downloading...") |
|
os.makedirs("chunks", exist_ok=True) |
|
|
|
for file in chunk_files: |
|
downloaded_path = hf_hub_download( |
|
repo_id=repo_id, |
|
filename=file, |
|
token=api_token, |
|
repo_type="dataset", |
|
local_dir="chunks", |
|
local_dir_use_symlinks=False |
|
) |
|
print(f"Downloaded {file} to {downloaded_path}") |
|
|
|
print("To combine chunks, use: cat chunks/pdfs_part_* > pdfs.tar.gz") |
|
return |
|
except Exception as e: |
|
print(f"Error checking for chunk files: {e}") |
|
|
|
|
|
try: |
|
downloaded_path = hf_hub_download( |
|
repo_id=repo_id, |
|
filename="pdfs.tar.gz", |
|
token=api_token, |
|
repo_type="dataset", |
|
local_dir=".", |
|
local_dir_use_symlinks=False |
|
) |
|
print(f"Downloaded pdfs.tar.gz file to {downloaded_path}") |
|
except Exception as e: |
|
print(f"Download failed: {e}") |
|
|
|
def main(): |
|
parser = argparse.ArgumentParser( |
|
description="Upload or download files to/from a remote Hugging Face dataset." |
|
) |
|
parser.add_argument( |
|
"operation", |
|
choices=["upload", "download"], |
|
help="Specify the operation: upload or download." |
|
) |
|
args = parser.parse_args() |
|
|
|
|
|
API_TOKEN = os.environ.get("HUGGINGFACE_API_TOKEN") |
|
if not API_TOKEN: |
|
API_TOKEN = os.environ.get("HUGGINGFACEHUB_API_TOKEN") |
|
if not API_TOKEN: |
|
try: |
|
from huggingface_hub.constants import HF_TOKEN_PATH |
|
if os.path.exists(HF_TOKEN_PATH): |
|
with open(HF_TOKEN_PATH, "r") as f: |
|
API_TOKEN = f.read().strip() |
|
except ImportError: |
|
pass |
|
|
|
if not API_TOKEN: |
|
raise ValueError("No Hugging Face API token found. Please set HUGGINGFACE_API_TOKEN environment variable or login using `huggingface-cli login`") |
|
|
|
|
|
username = "liuganghuggingface" |
|
repo_id = f"{username}/polymer_semantic_pdfs" |
|
|
|
if args.operation == "upload": |
|
upload_files(API_TOKEN, repo_id) |
|
elif args.operation == "download": |
|
download_files(API_TOKEN, repo_id) |
|
|
|
if __name__ == "__main__": |
|
main() |
|
|