|
import os |
|
import requests |
|
import tqdm |
|
from typing import List, Optional, TYPE_CHECKING |
|
|
|
|
|
def img_root_path(img_id: str): |
|
return os.path.dirname(os.path.dirname(img_id)) |
|
|
|
|
|
if TYPE_CHECKING: |
|
from .dataset_tools_config_modules import DatasetSyncCollectionConfig |
|
|
|
img_exts = ['.jpg', '.jpeg', '.webp', '.png'] |
|
|
|
class Photo: |
|
def __init__( |
|
self, |
|
id, |
|
host, |
|
width, |
|
height, |
|
url, |
|
filename |
|
): |
|
self.id = str(id) |
|
self.host = host |
|
self.width = width |
|
self.height = height |
|
self.url = url |
|
self.filename = filename |
|
|
|
|
|
def get_desired_size(img_width: int, img_height: int, min_width: int, min_height: int): |
|
if img_width > img_height: |
|
scale = min_height / img_height |
|
else: |
|
scale = min_width / img_width |
|
|
|
new_width = int(img_width * scale) |
|
new_height = int(img_height * scale) |
|
|
|
return new_width, new_height |
|
|
|
|
|
def get_pexels_images(config: 'DatasetSyncCollectionConfig') -> List[Photo]: |
|
all_images = [] |
|
next_page = f"https://api.pexels.com/v1/collections/{config.collection_id}?page=1&per_page=80&type=photos" |
|
|
|
while True: |
|
response = requests.get(next_page, headers={ |
|
"Authorization": f"{config.api_key}" |
|
}) |
|
response.raise_for_status() |
|
data = response.json() |
|
all_images.extend(data['media']) |
|
if 'next_page' in data and data['next_page']: |
|
next_page = data['next_page'] |
|
else: |
|
break |
|
|
|
photos = [] |
|
for image in all_images: |
|
new_width, new_height = get_desired_size(image['width'], image['height'], config.min_width, config.min_height) |
|
url = f"{image['src']['original']}?auto=compress&cs=tinysrgb&h={new_height}&w={new_width}" |
|
filename = os.path.basename(image['src']['original']) |
|
|
|
photos.append(Photo( |
|
id=image['id'], |
|
host="pexels", |
|
width=image['width'], |
|
height=image['height'], |
|
url=url, |
|
filename=filename |
|
)) |
|
|
|
return photos |
|
|
|
|
|
def get_unsplash_images(config: 'DatasetSyncCollectionConfig') -> List[Photo]: |
|
headers = { |
|
|
|
"Authorization": f"Client-ID {config.api_key}" |
|
} |
|
|
|
|
|
url = f"https://api.unsplash.com/collections/{config.collection_id}/photos?page=1&per_page=30" |
|
response = requests.get(url, headers=headers) |
|
response.raise_for_status() |
|
res_headers = response.headers |
|
|
|
|
|
has_next_page = False |
|
if 'Link' in res_headers: |
|
has_next_page = True |
|
link_header = res_headers['Link'] |
|
link_header = link_header.split(',') |
|
link_header = [link.strip() for link in link_header] |
|
link_header = [link.split(';') for link in link_header] |
|
link_header = [[link[0].strip('<>'), link[1].strip().strip('"')] for link in link_header] |
|
link_header = {link[1]: link[0] for link in link_header} |
|
|
|
|
|
last_page = link_header['rel="last'] |
|
last_page = last_page.split('?')[1] |
|
last_page = last_page.split('&') |
|
last_page = [param.split('=') for param in last_page] |
|
last_page = {param[0]: param[1] for param in last_page} |
|
last_page = int(last_page['page']) |
|
|
|
all_images = response.json() |
|
|
|
if has_next_page: |
|
|
|
for page in tqdm.tqdm(range(2, last_page + 1)): |
|
url = f"https://api.unsplash.com/collections/{config.collection_id}/photos?page={page}&per_page=30" |
|
response = requests.get(url, headers=headers) |
|
response.raise_for_status() |
|
all_images.extend(response.json()) |
|
|
|
photos = [] |
|
for image in all_images: |
|
new_width, new_height = get_desired_size(image['width'], image['height'], config.min_width, config.min_height) |
|
url = f"{image['urls']['raw']}&w={new_width}" |
|
filename = f"{image['id']}.jpg" |
|
|
|
photos.append(Photo( |
|
id=image['id'], |
|
host="unsplash", |
|
width=image['width'], |
|
height=image['height'], |
|
url=url, |
|
filename=filename |
|
)) |
|
|
|
return photos |
|
|
|
|
|
def get_img_paths(dir_path: str): |
|
os.makedirs(dir_path, exist_ok=True) |
|
local_files = os.listdir(dir_path) |
|
|
|
local_files = [file for file in local_files if os.path.splitext(file)[1].lower() in img_exts] |
|
|
|
local_files = [os.path.join(dir_path, file) for file in local_files] |
|
return local_files |
|
|
|
|
|
def get_local_image_ids(dir_path: str): |
|
os.makedirs(dir_path, exist_ok=True) |
|
local_files = get_img_paths(dir_path) |
|
|
|
return set([os.path.basename(file).split('.')[0] for file in local_files]) |
|
|
|
|
|
def get_local_image_file_names(dir_path: str): |
|
os.makedirs(dir_path, exist_ok=True) |
|
local_files = get_img_paths(dir_path) |
|
|
|
return set([os.path.basename(file) for file in local_files]) |
|
|
|
|
|
def download_image(photo: Photo, dir_path: str, min_width: int = 1024, min_height: int = 1024): |
|
img_width = photo.width |
|
img_height = photo.height |
|
|
|
if img_width < min_width or img_height < min_height: |
|
raise ValueError(f"Skipping {photo.id} because it is too small: {img_width}x{img_height}") |
|
|
|
img_response = requests.get(photo.url) |
|
img_response.raise_for_status() |
|
os.makedirs(dir_path, exist_ok=True) |
|
|
|
filename = os.path.join(dir_path, photo.filename) |
|
with open(filename, 'wb') as file: |
|
file.write(img_response.content) |
|
|
|
|
|
def update_caption(img_path: str): |
|
|
|
filename_no_ext = os.path.splitext(os.path.basename(img_path))[0] |
|
|
|
if os.path.exists(os.path.join(os.path.dirname(img_path), f"{filename_no_ext}.json")): |
|
|
|
return |
|
caption = "" |
|
|
|
if os.path.exists(os.path.join(os.path.dirname(img_path), f"{filename_no_ext}.txt")): |
|
|
|
with open(os.path.join(os.path.dirname(img_path), f"{filename_no_ext}.txt"), 'r') as file: |
|
caption = file.read() |
|
|
|
with open(os.path.join(os.path.dirname(img_path), f"{filename_no_ext}.json"), 'w') as file: |
|
file.write(f'{{"caption": "{caption}"}}') |
|
|
|
|
|
os.remove(os.path.join(os.path.dirname(img_path), f"{filename_no_ext}.txt")) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|