pere commited on
Commit
32843dd
·
1 Parent(s): b3ddbe2

dataloader

Browse files
Files changed (1) hide show
  1. nbnn_language_detection.py +5 -1
nbnn_language_detection.py CHANGED
@@ -7,6 +7,7 @@ class MyDataset(DatasetBuilder):
7
  VERSION = "0.1.0"
8
 
9
  def _info(self):
 
10
  return DatasetInfo(
11
  features=Features({
12
  'text': Value('string'),
@@ -16,13 +17,15 @@ class MyDataset(DatasetBuilder):
16
  )
17
 
18
  def _split_generators(self, dl_manager):
 
19
  urls = {
20
  'train': 'https://huggingface.co/datasets/NbAiLab/nbnn_language_detection/resolve/main/train.jsonl',
21
  'dev': 'https://huggingface.co/datasets/NbAiLab/nbnn_language_detection/resolve/main/dev.jsonl',
22
  'test': 'https://huggingface.co/datasets/NbAiLab/nbnn_language_detection/resolve/main/test.jsonl',
23
  }
24
-
25
  downloaded_files = dl_manager.download(urls)
 
26
 
27
  return [
28
  SplitGenerator(name=split, gen_kwargs={'filepath': downloaded_files[split]})
@@ -30,6 +33,7 @@ class MyDataset(DatasetBuilder):
30
  ]
31
 
32
  def _generate_examples(self, filepath):
 
33
  with open(filepath, 'r') as f:
34
  for id, line in enumerate(f):
35
  data = json.loads(line)
 
7
  VERSION = "0.1.0"
8
 
9
  def _info(self):
10
+ print("Calling _info")
11
  return DatasetInfo(
12
  features=Features({
13
  'text': Value('string'),
 
17
  )
18
 
19
  def _split_generators(self, dl_manager):
20
+ print("Calling _split_generators")
21
  urls = {
22
  'train': 'https://huggingface.co/datasets/NbAiLab/nbnn_language_detection/resolve/main/train.jsonl',
23
  'dev': 'https://huggingface.co/datasets/NbAiLab/nbnn_language_detection/resolve/main/dev.jsonl',
24
  'test': 'https://huggingface.co/datasets/NbAiLab/nbnn_language_detection/resolve/main/test.jsonl',
25
  }
26
+
27
  downloaded_files = dl_manager.download(urls)
28
+ print(f"Downloaded files: {downloaded_files}")
29
 
30
  return [
31
  SplitGenerator(name=split, gen_kwargs={'filepath': downloaded_files[split]})
 
33
  ]
34
 
35
  def _generate_examples(self, filepath):
36
+ print(f"Calling _generate_examples with filepath: {filepath}")
37
  with open(filepath, 'r') as f:
38
  for id, line in enumerate(f):
39
  data = json.loads(line)