|
import os |
|
from PIL import Image |
|
from datasets import Dataset |
|
from huggingface_hub import HfApi |
|
|
|
|
|
coleaf_dir = "/home/goya/CV_Plant_Disease/Datasets/CoLeaf_dataset" |
|
|
|
|
|
dataset_name = "bhugxer/CoLeafLabels" |
|
dataset_description = "CoLeaf dataset for fine-tuning Stable Diffusion" |
|
|
|
|
|
image_paths = [] |
|
labels = [] |
|
|
|
api_token = os.environ.get("HUGGINGFACE_API_TOKEN") |
|
|
|
|
|
for label in os.listdir(coleaf_dir): |
|
label_dir = os.path.join(coleaf_dir, label) |
|
if os.path.isdir(label_dir): |
|
for image_file in os.listdir(label_dir): |
|
image_path = os.path.join(label_dir, image_file) |
|
image_paths.append(image_path) |
|
labels.append(label) |
|
|
|
|
|
dataset = Dataset.from_dict({"image_path": image_paths, "label": labels}) |
|
|
|
|
|
def load_image(example): |
|
with open(example["image_path"], "rb") as f: |
|
image_data = f.read() |
|
return {"image": image_data} |
|
|
|
|
|
dataset = dataset.map(load_image, batched=False, num_proc=4) |
|
|
|
|
|
dataset = dataset.remove_columns("image_path") |
|
|
|
|
|
api = HfApi(token=api_token) |
|
api.upload_folder( |
|
folder_path=".", |
|
repo_id=dataset_name, |
|
repo_type="dataset", |
|
ignore_patterns=["**/.*", "**/__pycache__"], |
|
use_auth_token=api_token, |
|
) |
|
|
|
|
|
dataset_card = f""" |
|
# {dataset_name} |
|
|
|
{dataset_description} |
|
|
|
## Dataset Structure |
|
|
|
- `image`: The image data. |
|
- `label`: The label or text description of the image. |
|
|
|
## Dataset Info |
|
|
|
- Number of examples: {len(dataset)} |
|
- Image format: Various (PNG, JPEG, etc.) |
|
|
|
## License |
|
|
|
[Insert license information here] |
|
|
|
## Citation |
|
|
|
[Insert citation information here] |
|
""" |
|
|
|
|
|
with open("README.md", "w") as f: |
|
f.write(dataset_card) |
|
|
|
api.upload_file( |
|
path_or_fileobj="README.md", |
|
path_in_repo="README.md", |
|
repo_id=dataset_name, |
|
repo_type="dataset", |
|
use_auth_token=api_token, |
|
) |