|
import json |
|
import os |
|
|
|
def read_dataset(file_path): |
|
try: |
|
with open(file_path, 'r') as file: |
|
data = [json.loads(line) for line in file] |
|
print(f"Dataset loaded successfully from {file_path}.") |
|
return data |
|
except Exception as e: |
|
print(f"Error reading dataset from {file_path}: {e}") |
|
return [] |
|
|
|
def shard_dataset(dataset, num_shards): |
|
shard_size = len(dataset) // num_shards |
|
shards = [dataset[i:i + shard_size] for i in range(0, len(dataset), shard_size)] |
|
if len(shards) > num_shards: |
|
shards[num_shards - 1].extend(shards.pop()) |
|
print(f"Dataset sharded into {num_shards} parts.") |
|
return shards |
|
|
|
def write_shards(shards, output_dir): |
|
if not os.path.exists(output_dir): |
|
os.makedirs(output_dir) |
|
print(f"Created output directory at {output_dir}.") |
|
|
|
for i, shard in enumerate(shards): |
|
shard_file = os.path.join(output_dir, f'shard_{i+1}.jsonl') |
|
with open(shard_file, 'w') as file: |
|
for item in shard: |
|
json.dump(item, file) |
|
file.write('\n') |
|
print(f"Shard {i+1} written to {shard_file}.") |
|
|
|
def main(): |
|
input_file = 'path_to_processed_dataset.jsonl' |
|
output_dir = 'sharded_dataset' |
|
num_shards = 33 |
|
|
|
dataset = read_dataset(input_file) |
|
if dataset: |
|
shards = shard_dataset(dataset, num_shards) |
|
write_shards(shards, output_dir) |
|
print("All shards have been successfully written.") |
|
else: |
|
print("No dataset to process.") |
|
|
|
if __name__ == "__main__": |
|
main() |
|
|