|
import json |
|
import os |
|
|
|
def read_and_process_dataset(file_path, num_shards, output_dir): |
|
try: |
|
with open(file_path, 'r') as file: |
|
shard_size = sum(1 for line in file) // num_shards |
|
file.seek(0) |
|
|
|
current_shard = 0 |
|
current_line = 0 |
|
shard = [] |
|
|
|
for line in file: |
|
if current_line < shard_size * (current_shard + 1): |
|
shard.append(json.loads(line)) |
|
else: |
|
write_shard(shard, current_shard, output_dir) |
|
shard = [json.loads(line)] |
|
current_shard += 1 |
|
current_line += 1 |
|
|
|
|
|
if shard: |
|
write_shard(shard, current_shard, output_dir) |
|
|
|
print(f"Dataset processed and sharded successfully into {num_shards} parts.") |
|
except Exception as e: |
|
print(f"Error processing dataset from {file_path}: {e}") |
|
|
|
def write_shard(shard, shard_index, output_dir): |
|
if not os.path.exists(output_dir): |
|
os.makedirs(output_dir) |
|
|
|
shard_file = os.path.join(output_dir, f'shard_{shard_index+1}.jsonl') |
|
with open(shard_file, 'w') as file: |
|
for item in shard: |
|
json.dump(item, file) |
|
file.write('\n') |
|
print(f"Shard {shard_index+1} written to {shard_file}.") |
|
|
|
def main(): |
|
input_file = r"C:\Users\MeMyself\reddit_question_best_answers\processed\synthesized_dataset.jsonl" |
|
output_dir = r"C:\Users\MeMyself\reddit_question_best_answers\processed" |
|
num_shards = 33 |
|
|
|
read_and_process_dataset(input_file, num_shards, output_dir) |
|
|
|
if __name__ == "__main__": |
|
main() |
|
|