Datasets:
Size:
10B<n<100B
File size: 7,702 Bytes
a454f6c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 |
import pathlib
from urllib.parse import unquote
import anyio
import anyio.streams
import anyio.streams.file
import anyio.streams.memory
import anyio.to_thread
import httpx
import orjson
import tqdm
import typer
from loguru import logger
async def download_file(url: str, session: httpx.AsyncClient, path: pathlib.Path):
tries = 10
success = False
if path.exists():
return
tmp = path.with_stem(path.stem + "_tmp")
# There's a file being downloaded that has the same name...
# Actually unsure why it's being flagged out now lol.
if tmp.exists():
return
while tries > 0:
try:
async with session.stream("GET", url) as stream, await anyio.open_file(
tmp, "wb"
) as f:
if stream.status_code not in [302, 200, 404]:
tries -= 1
await anyio.sleep(5)
logger.warning(
f"Failed to download: {path.name}: {stream.status_code}"
)
elif stream.status_code == 404:
logger.warning(f"404 {path.name}, breaking.")
break
else:
async for bytes_data in stream.aiter_bytes():
await f.write(bytes_data)
success = True
break
except Exception as e:
logger.exception(e)
tries -= 1
await anyio.sleep(5)
if not success and tmp.exists():
tmp.unlink()
elif not tmp.exists():
logger.warning("Temp file magically disappeared?")
else:
try:
tmp.rename(path)
except Exception as e:
logger.warning(f"{path.exists(), tmp.exists()} Hotfix Exception")
pass
else:
logger.info(f"{path.name} Downloaded")
async def batched_meta_worker(
queue: anyio.streams.memory.MemoryObjectReceiveStream,
write: anyio.streams.memory.MemoryObjectSendStream,
):
root_url = "https://commons.wikimedia.org/w/api.php"
session = httpx.AsyncClient()
while True:
chunk_titles: list[str] = await queue.receive()
if chunk_titles is None:
break
query = {
"action": "query",
"format": "json",
"formatversion": 2,
"prop": "imageinfo",
"titles": "|".join([f"File:{file}" for file in chunk_titles]),
"iiprop": "extmetadata",
"iiextmetadatalanguage": "en",
"uselang": "content",
}
tries = 10
while tries > 0:
try:
r = await session.get(root_url, params=query)
break
except Exception as e:
tries -= 1
logger.warning(e)
await anyio.sleep(2)
if r.status_code == 414:
logger.warning(f'Request too long: {len("|".join([f"File:{file}" for file in chunk_titles]))}')
if r.status_code not in range(200,300):
logger.warning(f'Caught: {r.status_code}')
continue
response = r.json()
# logger.debug(response["query"]["pages"])
for response_page in response["query"]["pages"]:
if ("missing" in response_page and response_page["missing"]) or "imageinfo" not in response_page:
await write.send(
{
"url": f"https://commons.wikimedia.org/wiki/{response_page['title']}",
"miss": True,
"meta": None,
"wkdata": None,
}
)
continue
# logger.debug(response_page)
ext = response_page["imageinfo"][0]["extmetadata"]
root = f"https://commons.wikimedia.org/wiki/{response_page['title']}"
await write.send(
{
"url": root,
"miss": False,
"meta": ext,
"wkdata": f'M{response_page["pageid"]}',
}
)
async def file_worker(
queue: anyio.streams.memory.MemoryObjectReceiveStream, output_folder: pathlib.Path
):
session = httpx.AsyncClient(follow_redirects=True)
while True:
url: str = await queue.receive()
if url is None:
break
filename = unquote(url.split("/")[-1]).replace("_", " ")
if len(filename) > 128:
truc_stem = pathlib.Path(filename).stem[:128].rstrip()
filename = pathlib.Path(filename).with_stem(truc_stem).name
await download_file(url, session, output_folder / filename)
async def download_async(file: pathlib.Path, output_folder: pathlib.Path):
concurrent = 50
async with anyio.create_task_group() as task_group, await anyio.open_file(
file, "rb"
) as fp:
url_send, url_recv = anyio.create_memory_object_stream[list[str] | None](
max_buffer_size=100
)
[
task_group.start_soon(file_worker, url_recv, output_folder)
for _ in range(concurrent)
]
async for line in fp:
load = await anyio.to_thread.run_sync(orjson.loads, line)
await url_send.send(load["url"])
for _ in range(concurrent):
await url_send.send(None)
async def meta_writer(
output_file: pathlib.Path,
inputstream: anyio.streams.memory.MemoryObjectReceiveStream,
):
pbar = tqdm.tqdm()
async with await anyio.open_file(output_file, "wb") as fp:
while True:
data: dict = await inputstream.receive()
if data is None:
break
dump_bytes: bytes = await anyio.to_thread.run_sync(
orjson.dumps, data, None, orjson.OPT_APPEND_NEWLINE
)
await fp.write(dump_bytes)
pbar.update(1)
async def fetch_meta(file: pathlib.Path, output_file: pathlib.Path):
async with anyio.create_task_group() as task_group, await anyio.open_file(
file, "rb"
) as fp:
title_send, title_recv = anyio.create_memory_object_stream[list[str]](
max_buffer_size=100
)
response_send, response_recv = anyio.create_memory_object_stream[dict](
max_buffer_size=100
)
[
task_group.start_soon(batched_meta_worker, title_recv, response_send)
for _ in range(5)
]
task_group.start_soon(meta_writer, output_file, response_recv)
chunk = []
async for line in fp:
load = await anyio.to_thread.run_sync(orjson.loads, line)
chunk.append(unquote(load["url"].split("/")[-1].replace("_", " ")))
if len(chunk) >= 50:
await title_send.send(chunk)
chunk = []
await title_send.send(chunk)
chunk = []
for _ in range(5):
await title_send.send(None)
await response_send.send(None)
app = typer.Typer(pretty_exceptions_enable=False)
@app.command()
def download(input_file: pathlib.Path, output_folder: pathlib.Path):
logger.add("logs/download-wiki.log", rotation="10 MB")
output_folder = output_folder.resolve()
if not output_folder.is_dir():
output_folder.mkdir(exist_ok=True, parents=True)
anyio.run(download_async, input_file, output_folder, backend="trio")
@app.command()
@logger.catch()
def meta(input_file: pathlib.Path, output_file: pathlib.Path):
logger.add("logs/download-wiki.log", rotation="10 MB")
anyio.run(fetch_meta, input_file, output_file, backend="trio")
if __name__ == "__main__":
app()
|