|
import multiprocessing |
|
import random |
|
import traceback |
|
from lxml import etree |
|
import pathlib |
|
import orjson |
|
import tqdm |
|
import typer |
|
import urllib.parse |
|
from sqlitedict import SqliteDict |
|
|
|
app = typer.Typer() |
|
|
|
|
|
def fast_iter(context, func, *args, **kwargs): |
|
""" |
|
http://lxml.de/parsing.html#modifying-the-tree |
|
Based on Liza Daly's fast_iter |
|
http://www.ibm.com/developerworks/xml/library/x-hiperfparse/ |
|
See also http://effbot.org/zone/element-iterparse.htm |
|
""" |
|
for event, elem in context: |
|
func(elem, *args, **kwargs) |
|
|
|
|
|
elem.clear() |
|
|
|
for ancestor in elem.xpath("ancestor-or-self::*"): |
|
while ancestor.getprevious() is not None: |
|
del ancestor.getparent()[0] |
|
del context |
|
|
|
|
|
@app.command() |
|
def decadence(stack_folder: pathlib.Path, output_file: pathlib.Path): |
|
post_history = stack_folder / "PostHistory.xml" |
|
if not post_history.exists(): |
|
raise Exception() |
|
|
|
print("Working on", stack_folder.name) |
|
sql_dict_posts = SqliteDict( |
|
f"tmp/{stack_folder.name}.hist.tmp.sqlite", flag="n", tablename="dict_hist" |
|
) |
|
try: |
|
with tqdm.tqdm() as pbar: |
|
ctx = etree.iterparse(post_history, tag="row") |
|
|
|
def element_processor2(elem): |
|
p_type = int(elem.get("PostHistoryTypeId")) |
|
pid: str = elem.get("PostId") |
|
if p_type in range(1, 4): |
|
if pid in sql_dict_posts: |
|
post_data = sql_dict_posts[pid] |
|
else: |
|
post_data = sql_dict_posts[pid] = {} |
|
if p_type == 1: |
|
post_data["title"] = urllib.parse.unquote( |
|
elem.get("Text", "") |
|
).replace("\r\n", "\n") |
|
elif p_type == 2: |
|
post_data["body"] = urllib.parse.unquote( |
|
elem.get("Text", "") |
|
).replace("\r\n", "\n") |
|
elif p_type == 3: |
|
post_data["tags"] = urllib.parse.unquote( |
|
elem.get("Text", "") |
|
).replace("\r\n", "\n") |
|
|
|
sql_dict_posts[pid] = post_data |
|
pbar.update(1) |
|
elif p_type in range(4, 10): |
|
if pid in sql_dict_posts: |
|
post_data = sql_dict_posts[pid] |
|
else: |
|
|
|
post_data = sql_dict_posts[pid] = {} |
|
post_data["partial"] = True |
|
if p_type in [4, 7]: |
|
post_data["title"] = urllib.parse.unquote( |
|
elem.get("Text", "") |
|
).replace("\r\n", "\n") |
|
elif p_type in [5, 8]: |
|
post_data["body"] = urllib.parse.unquote( |
|
elem.get("Text", "") |
|
).replace("\r\n", "\n") |
|
elif p_type in [6, 9]: |
|
post_data["tags"] = urllib.parse.unquote( |
|
elem.get("Text", "") |
|
).replace("\r\n", "\n") |
|
sql_dict_posts[pid] = post_data |
|
pbar.update(1) |
|
elif p_type == 35: |
|
if pid in sql_dict_posts: |
|
post_data = sql_dict_posts[pid] |
|
else: |
|
print( |
|
f"[W] {pid}, {stack_folder.name} requested to be redirected but redirect doesn't seem to exist?" |
|
) |
|
post_data = {} |
|
if not post_data.get("partial", False): |
|
print(f"[I] {pid}, {stack_folder.name} Not partial?") |
|
return |
|
print(post_data) |
|
elif p_type == 10: |
|
if pid not in sql_dict_posts: |
|
return |
|
post_data = sql_dict_posts[pid] |
|
if not post_data: |
|
raise Exception |
|
post_data["closed"] = True |
|
sql_dict_posts[pid] = post_data |
|
pbar.update(1) |
|
elif p_type == 11: |
|
if pid not in sql_dict_posts: |
|
return |
|
post_data = sql_dict_posts[pid] |
|
if not post_data: |
|
raise Exception |
|
post_data["closed"] = False |
|
sql_dict_posts[pid] = post_data |
|
pbar.update(1) |
|
elif p_type == 12: |
|
if pid not in sql_dict_posts: |
|
return |
|
post_data = sql_dict_posts[pid] |
|
if not post_data: |
|
raise Exception |
|
post_data["deleted"] = True |
|
sql_dict_posts[pid] = post_data |
|
pbar.update(1) |
|
elif p_type == 13: |
|
if pid not in sql_dict_posts: |
|
return |
|
post_data = sql_dict_posts[pid] |
|
if not post_data: |
|
raise Exception |
|
post_data["deleted"] = False |
|
sql_dict_posts[pid] = post_data |
|
pbar.update(1) |
|
elif p_type == 14: |
|
if pid not in sql_dict_posts: |
|
return |
|
post_data = sql_dict_posts[pid] |
|
if not post_data: |
|
raise Exception |
|
post_data["locked"] = True |
|
sql_dict_posts[pid] = post_data |
|
pbar.update(1) |
|
elif p_type == 15: |
|
if pid not in sql_dict_posts: |
|
return |
|
post_data = sql_dict_posts[pid] |
|
if not post_data: |
|
raise Exception |
|
post_data["locked"] = False |
|
sql_dict_posts[pid] = post_data |
|
pbar.update(1) |
|
|
|
if pbar.n % 5000 == 0 and pbar.n != 0: |
|
|
|
sql_dict_posts.commit() |
|
|
|
|
|
fast_iter(ctx, element_processor2) |
|
except Exception as e: |
|
print("[!] ERR: ", traceback.format_exception(e)) |
|
flagged = set() |
|
for k, v in sql_dict_posts.items(): |
|
if not v.get("body").strip(): |
|
flagged.add(k) |
|
if v.get("deleted", False): |
|
flagged.add(k) |
|
for fg in flagged: |
|
del sql_dict_posts[fg] |
|
sql_dict_posts.sync() |
|
print("Reconstruct done!") |
|
|
|
posts = stack_folder / "Posts.xml" |
|
if not posts.exists(): |
|
raise Exception() |
|
|
|
main_qn_posts = {} |
|
try: |
|
|
|
with tqdm.tqdm() as bar: |
|
ctx2 = etree.iterparse(posts, tag="row") |
|
|
|
def element_processor(element): |
|
pid = element.get("Id") |
|
p_type = int(element.get("PostTypeId")) |
|
|
|
if p_type == 1: |
|
if pid not in sql_dict_posts: |
|
main_qn_posts[pid] = {"Stub":True} |
|
print( |
|
f"[!] Question: {int(element.get('Id'))} {stack_folder.name} does not exist but referanced?" |
|
) |
|
return |
|
main_qn_posts[pid] = sql_dict_posts[pid] |
|
main_qn_posts[pid]["Stub"] = False |
|
main_qn_posts[pid]["Id"] = pid |
|
main_qn_posts[pid]["CreationDate"] = element.get("CreationDate") |
|
main_qn_posts[pid]["Score"] = int(element.get("Score")) |
|
main_qn_posts[pid]["Accepted"] = ( |
|
int(element.get("AcceptedAnswerId")) |
|
if element.get("AcceptedAnswerId") |
|
else None |
|
) |
|
main_qn_posts[pid]["Counts"] = { |
|
"Views": int(element.get("ViewCount")) |
|
if element.get("ViewCount") |
|
else 0, |
|
"Answers": int(element.get("AnswerCount")) |
|
if element.get("AnswerCount") |
|
else 0, |
|
"Comments": int(element.get("CommentCount")) |
|
if element.get("CommentCount") |
|
else 0, |
|
} |
|
bar.update(1) |
|
|
|
fast_iter(ctx2, element_processor) |
|
except Exception as e: |
|
print("[!] ERR: ", traceback.format_exception(e)) |
|
|
|
print("2nd Pass Posts Done") |
|
|
|
try: |
|
with tqdm.tqdm() as bar: |
|
ctx3 = etree.iterparse(posts, tag="row") |
|
|
|
def element_processor3(element): |
|
pid = element.get("Id") |
|
p_type = int(element.get("PostTypeId")) |
|
if p_type == 2: |
|
parent_id = element.get("ParentId") |
|
if parent_id not in main_qn_posts: |
|
print( |
|
f"[!] Answer: {int(element.get('Id'))} {stack_folder.name} has no parent attached to it!" |
|
) |
|
return |
|
answers = main_qn_posts[parent_id].setdefault("answers", []) |
|
if pid in sql_dict_posts: |
|
rec_answer = sql_dict_posts[pid] |
|
else: |
|
rec_answer = None |
|
if rec_answer is None: |
|
print( |
|
f"[!] Answer: {int(element.get('Id'))} {stack_folder.name} does not have a PostHistory.xml Assoc. with it!" |
|
) |
|
main_qn_posts[parent_id]["answers"] = answers |
|
return |
|
rec_answer["Id"] = int(element.get("Id")) |
|
rec_answer["Score"] = ( |
|
int(element.get("Score")) if element.get("Score") else 0 |
|
) |
|
rec_answer["Counts"] = { |
|
"Views": int(element.get("ViewCount")) |
|
if element.get("ViewCount") |
|
else 0, |
|
"Comments": int(element.get("CommentCount")) |
|
if element.get("CommentCount") |
|
else 0, |
|
} |
|
answers.append(rec_answer) |
|
main_qn_posts[parent_id]["answers"] = answers |
|
|
|
fast_iter(ctx3, element_processor3) |
|
except Exception as e: |
|
print("[!] ERR: ", traceback.format_exception(e)) |
|
print("2nd Pass done!") |
|
try: |
|
with open(output_file, "wb") as f: |
|
for k, v in main_qn_posts.items(): |
|
f.write(orjson.dumps(v) + b"\n") |
|
sql_dict_posts.close() |
|
pathlib.Path(f"tmp/{stack_folder.name}.hist.tmp.sqlite").unlink() |
|
except Exception as e: |
|
print("[!] ERR: ", traceback.format_exception(e)) |
|
|
|
|
|
@app.command() |
|
def convo_processor(base_file: pathlib.Path, qa_file: pathlib.Path): |
|
with open(qa_file, "wb") as fw: |
|
with open(base_file, "rb") as f: |
|
for line in f: |
|
data = orjson.loads(line) |
|
if not data.get("body") or not data.get("title"): |
|
continue |
|
if data["body"].lower().startswith(data["title"].lower()): |
|
question = f'{data["title"].strip()}\n\n{data["body"][len(data["title"]) :].strip()}' |
|
else: |
|
question = f'{data["title"].strip()}\n\n{data["body"].strip()}' |
|
if data.get("answers"): |
|
for answer in data.get("answers"): |
|
if "Score" not in answer: |
|
print("no score?",data) |
|
continue |
|
if answer["Score"] >= 0: |
|
zz = { |
|
"conversation": [ |
|
{"sender": "user", "message": question}, |
|
{"sender": "assistant", "message": answer}, |
|
], |
|
"meta": { |
|
"q_score": data["Score"], |
|
"a_score": answer["Score"], |
|
"s_score": data["Score"] + answer["Score"] |
|
}, |
|
} |
|
fw.write(orjson.dumps(zz) + b"\n") |
|
pass |
|
|
|
|
|
@app.command() |
|
def qa_processor(base_file: pathlib.Path, qa_file: pathlib.Path, formats: pathlib.Path): |
|
question_formats = orjson.loads(formats.read_bytes()) |
|
with open(qa_file, "wb") as fw: |
|
with open(base_file, "rb") as f: |
|
for line in f: |
|
data = orjson.loads(line) |
|
if not data.get("body") or not data.get("title"): |
|
continue |
|
if data["body"].lower().startswith(data["title"].lower()): |
|
question = f'{data["title"].strip()}\n\n{data["body"][len(data["title"]) :].strip()}' |
|
else: |
|
question = f'{data["title"].strip()}\n\n{data["body"].strip()}' |
|
if data.get("answers"): |
|
for answer in data.get("answers"): |
|
if answer["Score"] >= 0: |
|
fmt = random.choice(question_formats) |
|
fmt = fmt.format(question=question, answer=answer["body"]) |
|
zz = { |
|
"text": fmt, |
|
"meta": { |
|
"q_score": data["Score"], |
|
"a_score": answer["Score"], |
|
}, |
|
} |
|
fw.write(orjson.dumps(zz) + b"\n") |
|
pass |
|
|
|
def err_handler(e): |
|
traceback.print_exception(e) |
|
|
|
@app.command() |
|
def convo_stackstack(folder: pathlib.Path): |
|
with multiprocessing.Pool(processes=64) as pool: |
|
fn = [] |
|
for item in folder.iterdir(): |
|
if item.is_file() and item.suffix.endswith("jsonl"): |
|
|
|
fn.append( |
|
pool.apply_async( |
|
convo_processor, |
|
( |
|
item, |
|
pathlib.Path("convo") |
|
/ pathlib.Path(f"{item.stem}_convo.jsonl") |
|
), |
|
error_callback=err_handler) |
|
) |
|
for f in fn: |
|
f.wait() |
|
pool.close() |
|
pool.join |
|
|
|
@app.command() |
|
def qa_stackstack(folder: pathlib.Path): |
|
with multiprocessing.Pool(processes=64) as pool: |
|
fn = [] |
|
for item in folder.iterdir(): |
|
if item.is_file() and item.suffix.endswith("jsonl"): |
|
|
|
fn.append( |
|
pool.apply_async( |
|
qa_processor, |
|
( |
|
item, |
|
pathlib.Path("qa") |
|
/ pathlib.Path(f"{item.stem}_qa.jsonl"), |
|
pathlib.Path("staccato_format.json") |
|
), |
|
error_callback=err_handler) |
|
) |
|
for f in fn: |
|
f.wait() |
|
pool.close() |
|
pool.join() |
|
|
|
|
|
@app.command() |
|
def stackstack(folder: pathlib.Path): |
|
with multiprocessing.Pool(processes=64) as pool: |
|
fn = [] |
|
for item in folder.iterdir(): |
|
if item.is_dir() and "meta." not in item.name: |
|
|
|
fn.append( |
|
pool.apply_async( |
|
decadence, |
|
( |
|
item, |
|
pathlib.Path("parsed") |
|
/ pathlib.Path(f"{item.name}_raw.jsonl"), |
|
), |
|
) |
|
) |
|
for f in fn: |
|
f.wait() |
|
pool.close() |
|
pool.join() |
|
|
|
|
|
app() |
|
|