Tonic commited on
Commit
a25b795
·
1 Parent(s): cdde8a4

Create shard.py

Browse files
Files changed (1) hide show
  1. shard.py +49 -0
shard.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import os
3
+
4
+ def read_dataset(file_path):
5
+ try:
6
+ with open(file_path, 'r') as file:
7
+ data = [json.loads(line) for line in file]
8
+ print(f"Dataset loaded successfully from {file_path}.")
9
+ return data
10
+ except Exception as e:
11
+ print(f"Error reading dataset from {file_path}: {e}")
12
+ return []
13
+
14
+ def shard_dataset(dataset, num_shards):
15
+ shard_size = len(dataset) // num_shards
16
+ shards = [dataset[i:i + shard_size] for i in range(0, len(dataset), shard_size)]
17
+ if len(shards) > num_shards:
18
+ shards[num_shards - 1].extend(shards.pop())
19
+ print(f"Dataset sharded into {num_shards} parts.")
20
+ return shards
21
+
22
+ def write_shards(shards, output_dir):
23
+ if not os.path.exists(output_dir):
24
+ os.makedirs(output_dir)
25
+ print(f"Created output directory at {output_dir}.")
26
+
27
+ for i, shard in enumerate(shards):
28
+ shard_file = os.path.join(output_dir, f'shard_{i+1}.jsonl')
29
+ with open(shard_file, 'w') as file:
30
+ for item in shard:
31
+ json.dump(item, file)
32
+ file.write('\n')
33
+ print(f"Shard {i+1} written to {shard_file}.")
34
+
35
+ def main():
36
+ input_file = 'path_to_processed_dataset.jsonl' # Update with your processed dataset file path
37
+ output_dir = 'sharded_dataset' # Update with your output directory for shards
38
+ num_shards = 33
39
+
40
+ dataset = read_dataset(input_file)
41
+ if dataset:
42
+ shards = shard_dataset(dataset, num_shards)
43
+ write_shards(shards, output_dir)
44
+ print("All shards have been successfully written.")
45
+ else:
46
+ print("No dataset to process.")
47
+
48
+ if __name__ == "__main__":
49
+ main()