Datasets:

Modalities:
Text
ArXiv:
Libraries:
Datasets
Maurice Weber commited on
Commit
2c7ef16
1 Parent(s): f2c5483

catch broken zips

Browse files
Files changed (1) hide show
  1. RedPajama-Data-V2.py +36 -22
RedPajama-Data-V2.py CHANGED
@@ -318,15 +318,22 @@ class RedPajamaV2(datasets.GeneratorBasedBuilder):
318
  yield from self._handle_head_middle(base_tag, doc_file, qs_file, dupe_file)
319
 
320
  def _handle_tail(self, base_tag, doc_file, qs_file, dupe_file):
321
- with gzip.open(doc_file, "rt", encoding="utf-8") as df:
322
- for row, doc in enumerate(df):
323
- doc_id = f"{base_tag}.json.gz/{row}"
324
- try:
325
- yield self.handle_record("tail", doc_id, doc, None, None)
326
- except Exception as e:
327
- logger.warning(f'failed handling row {row} in {doc_file}')
328
- traceback.print_exc()
329
- continue
 
 
 
 
 
 
 
330
 
331
  def _handle_head_middle(
332
  self, base_tag, doc_file, qs_file, dupe_file
@@ -345,19 +352,26 @@ class RedPajamaV2(datasets.GeneratorBasedBuilder):
345
  logger.warning(f'no duplicate ids found for {base_tag}')
346
  duplicates = set()
347
 
348
- with gzip.open(doc_file, "rt", encoding="utf-8") as df:
349
- with gzip.open(qs_file, "rt", encoding="utf-8") as qf:
350
- for row, (doc, qs) in enumerate(zip(df, qf)):
351
- doc_id = f"{base_tag}.json.gz/{row}"
352
-
353
- try:
354
- yield self.handle_record(
355
- "head_middle", doc_id, doc, qs, is_duplicate=doc_id in duplicates
356
- )
357
- except Exception as e:
358
- logger.warning(f'failed handling row {row} in {doc_file} ({qs_file})')
359
- traceback.print_exc()
360
- continue
 
 
 
 
 
 
 
361
 
362
  @staticmethod
363
  def handle_record(part, doc_id, doc, qs, is_duplicate=None):
 
318
  yield from self._handle_head_middle(base_tag, doc_file, qs_file, dupe_file)
319
 
320
  def _handle_tail(self, base_tag, doc_file, qs_file, dupe_file):
321
+ try:
322
+ with gzip.open(doc_file, "rt", encoding="utf-8") as df:
323
+ for row, doc in enumerate(df):
324
+ doc_id = f"{base_tag}.json.gz/{row}"
325
+ try:
326
+ yield self.handle_record("tail", doc_id, doc, None, None)
327
+ except Exception as e:
328
+ logger.warning(f'failed handling row {row} in {doc_file}')
329
+ traceback.print_exc()
330
+ continue
331
+
332
+ except gzip.BadGzipFile as e:
333
+ # skip broken gzip files
334
+ print(f'BadGzipFile: {doc_file, qs_file}')
335
+ traceback.print_exc()
336
+ return
337
 
338
  def _handle_head_middle(
339
  self, base_tag, doc_file, qs_file, dupe_file
 
352
  logger.warning(f'no duplicate ids found for {base_tag}')
353
  duplicates = set()
354
 
355
+ try:
356
+ with gzip.open(doc_file, "rt", encoding="utf-8") as df:
357
+ with gzip.open(qs_file, "rt", encoding="utf-8") as qf:
358
+ for row, (doc, qs) in enumerate(zip(df, qf)):
359
+ doc_id = f"{base_tag}.json.gz/{row}"
360
+
361
+ try:
362
+ yield self.handle_record(
363
+ "head_middle", doc_id, doc, qs, is_duplicate=doc_id in duplicates
364
+ )
365
+ except Exception as e:
366
+ logger.warning(f'failed handling row {row} in {doc_file} ({qs_file})')
367
+ traceback.print_exc()
368
+ continue
369
+
370
+ except gzip.BadGzipFile as e:
371
+ # skip broken gzip files
372
+ print(f'BadGzipFile: {doc_file, qs_file}')
373
+ traceback.print_exc()
374
+ return
375
 
376
  @staticmethod
377
  def handle_record(part, doc_id, doc, qs, is_duplicate=None):