|
from io import BufferedWriter |
|
import multiprocessing |
|
import pathlib |
|
|
|
import diskcache |
|
from loguru import logger |
|
import msgspec |
|
import orjson |
|
import rich |
|
import tqdm |
|
import typer |
|
|
|
from RedditModels import RedditSubmit |
|
|
|
app = typer.Typer() |
|
|
|
GB = 2**30 |
|
|
|
|
|
def read_lines_jsonl(file_name, chunk_size=GB // 2): |
|
with open(file_name, "rb") as file_handle: |
|
buffer = b"" |
|
while True: |
|
chunk = file_handle.read(chunk_size) |
|
|
|
if not chunk: |
|
break |
|
lines = (buffer + chunk).split(b"\n") |
|
|
|
for line in lines[:-1]: |
|
yield line.strip(), file_handle.tell() |
|
|
|
buffer = lines[-1] |
|
|
|
|
|
def preindex_processor(input_file: pathlib.Path): |
|
decoder = msgspec.json.Decoder() |
|
encoder = msgspec.json.Encoder() |
|
with open(f"IndexSubReddit/{input_file.stem}.jsonl", "wb") as f: |
|
buffer = {} |
|
for line, fp in read_lines_jsonl(input_file): |
|
submission = msgspec.convert(decoder.decode(line), RedditSubmit) |
|
if submission.sub.id: |
|
buffer[submission.sub.id] = msgspec.to_builtins(submission.sub) |
|
if len(buffer) > 100_000: |
|
for sub in buffer.values(): |
|
f.write(encoder.encode(sub) + b"\n") |
|
logger.debug(f"PartTxn: {len(buffer)} {input_file}") |
|
buffer = {} |
|
for sub in buffer.values(): |
|
f.write(encoder.encode(sub) + b"\n") |
|
logger.debug(f"FinalTxn: {len(buffer)} {input_file}") |
|
|
|
|
|
def error_cb(err): |
|
logger.exception(err) |
|
|
|
|
|
@app.command() |
|
def preindex(submission_folder: pathlib.Path): |
|
if not pathlib.Path("IndexSubReddit").is_dir(): |
|
pathlib.Path("IndexSubReddit").resolve().mkdir(parents=True, exist_ok=True) |
|
with multiprocessing.Pool(processes=32) as pooled: |
|
items = sorted(list(submission_folder.iterdir())) |
|
results = [] |
|
for file in items: |
|
results.append( |
|
pooled.apply_async( |
|
preindex_processor, args=(file,), error_callback=error_cb |
|
) |
|
) |
|
[result.wait() for result in results] |
|
|
|
|
|
@app.command() |
|
def combine(): |
|
indexes = sorted(list(pathlib.Path("IndexSubReddit").iterdir())) |
|
decoder = msgspec.json.Decoder() |
|
main_index = diskcache.Index("IndexSubReddit/main") |
|
for index in indexes: |
|
if index.is_file() and index.suffix == ".jsonl": |
|
buffer = [] |
|
for line, idx in tqdm.tqdm(read_lines_jsonl(index), desc=index.name): |
|
sub_json = decoder.decode(line) |
|
k = sub_json["name"] |
|
indexed_subreddit = main_index.get(sub_json["name"]) |
|
if ( |
|
indexed_subreddit |
|
and indexed_subreddit["subs"] |
|
and sub_json["subs"] is None |
|
): |
|
|
|
continue |
|
elif ( |
|
indexed_subreddit |
|
and indexed_subreddit != sub_json |
|
and sub_json["type"] is None |
|
): |
|
|
|
|
|
continue |
|
elif indexed_subreddit and indexed_subreddit != sub_json: |
|
if ( |
|
indexed_subreddit["type"] is None |
|
and sub_json["type"] is not None |
|
): |
|
pass |
|
elif indexed_subreddit["subs"] is None and sub_json["subs"]: |
|
|
|
pass |
|
elif indexed_subreddit["subs"] is not None and ( |
|
sub_json["subs"] > indexed_subreddit["subs"] |
|
or sub_json["subs"] < indexed_subreddit["subs"] |
|
): |
|
|
|
pass |
|
else: |
|
logger.debug(f"{indexed_subreddit} {sub_json}") |
|
buffer.append((k, sub_json)) |
|
if len(buffer) > 100_000: |
|
with main_index.transact(): |
|
for sub_key, sub in buffer: |
|
main_index[sub_key] = sub |
|
logger.debug(f"PartTxn: {len(buffer)} {index}") |
|
buffer = [] |
|
with main_index.transact(): |
|
for sub_key, sub in buffer: |
|
main_index[sub_key] = sub |
|
logger.debug(f"FinalTxn: {len(buffer)} {index}") |
|
|
|
|
|
@app.command() |
|
def selection(submin: int): |
|
with open("sub_selects.jsonl", "wb") as fp: |
|
main_index = diskcache.Index("IndexSubReddit/main") |
|
for v in tqdm.tqdm(main_index.values(), total=len(main_index)): |
|
if v["subs"] and v["subs"] > submin: |
|
fp.write(orjson.dumps(v, option=orjson.OPT_APPEND_NEWLINE)) |
|
|
|
|
|
@app.command() |
|
def percentile(): |
|
import numpy |
|
|
|
if not pathlib.Path("sub_count.json").exists(): |
|
main_index = diskcache.Index("IndexSubReddit/main") |
|
subscribers = [] |
|
logger.add("percentile_{time}.log") |
|
for v in tqdm.tqdm(main_index.values(), total=len(main_index)): |
|
if v["subs"]: |
|
subscribers.append(v["subs"]) |
|
logger.debug(f"Add {v}") |
|
else: |
|
logger.debug(f"NoAdd {v}") |
|
pathlib.Path("sub_count.json").write_bytes(orjson.dumps(subscribers)) |
|
else: |
|
subscribers = orjson.loads(pathlib.Path("sub_count.json").read_bytes()) |
|
nuarr = numpy.array([i for i in subscribers if i > 1]) |
|
|
|
def filter_andpercentile(p: float): |
|
min_subs = numpy.percentile(nuarr, p) |
|
selected_subs = nuarr[numpy.where(nuarr > min_subs)].size |
|
rich.print( |
|
f"{selected_subs}/{nuarr.size} ({round((selected_subs / nuarr.size)*100,ndigits=2)}%) [Min Subs: {min_subs}] @ {p} Percentile" |
|
) |
|
|
|
for pct in [95, 90, 75, 50, 25, 10, 5]: |
|
filter_andpercentile(pct) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.command() |
|
def filter_folder( |
|
comment_sub_folder: pathlib.Path, output_folder: pathlib.Path, mode:str |
|
): |
|
decoder = msgspec.json.Decoder() |
|
subreddit_names = set() |
|
|
|
if mode not in ["Submission", "Comments"]: |
|
raise Exception(f'Expecting: {["Submission","Comments"]}. Got: {mode}') |
|
with open("sub_selects.jsonl", "rb") as f: |
|
for line in f: |
|
subreddit = decoder.decode(line) |
|
if ( |
|
subreddit["type"] != "user" |
|
and subreddit["subs"] |
|
and subreddit["subs"] > 0 |
|
): |
|
subreddit_names.add(subreddit["name"]) |
|
logger.debug(f"Filtering: {len(subreddit_names)}") |
|
subreddit_fps: dict[str, BufferedWriter] = {} |
|
root = output_folder |
|
for file in sorted(list(comment_sub_folder.iterdir())): |
|
writ = 0 |
|
for data, ptr in tqdm.tqdm(read_lines_jsonl(file), desc=file.name): |
|
submission = decoder.decode(data) |
|
subname = submission.get("sub", {}).get("name") |
|
|
|
if "parent_id" not in submission and mode == "Comments": |
|
raise Exception( |
|
"User Error: Expecting `parent_id` for Comments. Did you accidentally point to a submission folder?" |
|
) |
|
if "link_flair" not in submission and mode == "Submission": |
|
raise Exception( |
|
"User Error: Expecting `link_flair` for Submissions. Did you accidentally point to a comments folder?" |
|
) |
|
if subname not in subreddit_names: |
|
continue |
|
sbfolder = root / f"{subname}_{mode}.jsonl" |
|
if sbfolder.exists(): |
|
continue |
|
if subname not in subreddit_fps: |
|
writer = subreddit_fps[subname] = open( |
|
sbfolder, "wb" |
|
) |
|
else: |
|
writer = subreddit_fps[subname] |
|
data = data + b"\n" if data[-1] != 10 else data |
|
|
|
writer.write(data) |
|
writer.flush() |
|
writ += 1 |
|
logger.debug(f"Written {writ} from {file.name}") |
|
for k, v in subreddit_fps.items(): |
|
logger.debug(f"{k}: Written {v.tell()} bytes.") |
|
v.close() |
|
|
|
if __name__ == "__main__": |
|
app() |
|
|