Datasets:
Unable to load the data with streaming = true
Here I previously loaded the dataset in a sameway like this but now I get this error. Why?
raw_dataset = load_dataset("facebook/voxpopuli", "en", split="train", streaming=True, trust_remote_code=True)
raw_dataset = raw_dataset.shuffle(buffer_size=10000, seed=42)
max_samples = 10000 # Adjust based on your compute resources
raw_dataset = raw_dataset.take(max_samples)
UnicodeDecodeError Traceback (most recent call last)
in <cell line: 5>()
----> 5 raw_dataset = load_dataset("facebook/voxpopuli", "en", split="train", streaming=True, trust_remote_code=True)
6 raw_dataset = raw_dataset.shuffle(buffer_size=10000, seed=42)
7
/usr/local/lib/python3.10/dist-packages/datasets/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, verification_mode, keep_in_memory, save_infos, revision, token, streaming, num_proc, storage_options, trust_remote_code, **config_kwargs)
2078 # Return iterable dataset in case of streaming
2079 if streaming:
-> 2080 return builder_instance.as_streaming_dataset(split=split)
2081
2082 # Download and prepare data
/usr/local/lib/python3.10/dist-packages/datasets/builder.py in as_streaming_dataset(self, split, base_path)
1263 )
1264 self._check_manual_download(dl_manager)
-> 1265 splits_generators = {sg.name: sg for sg in self._split_generators(dl_manager)}
1266 # By default, return all splits
1267 if split is None:
~/.cache/huggingface/modules/datasets_modules/datasets/facebook--voxpopuli/b5ff837284f0778eefe0f642734e142d8c3f574eba8c9c8a4b13602297f73604/voxpopuli.py in _split_generators(self, dl_manager)
119 n_shards_path = dl_manager.download_and_extract(_N_SHARDS_FILE)
120 with open(n_shards_path) as f:
--> 121 n_shards = json.load(f)
122
123 if self.config.name == "en_accented":
/usr/lib/python3.10/json/init.py in load(fp, cls, object_hook, parse_float, parse_int, parse_constant, object_pairs_hook, **kw)
291 kwarg; otherwise JSONDecoder
is used.
292 """
--> 293 return loads(fp.read(),
294 cls=cls, object_hook=object_hook,
295 parse_float=parse_float, parse_int=parse_int,
/usr/local/lib/python3.10/dist-packages/datasets/utils/file_utils.py in read_with_retries(*args, **kwargs)
825 for retry in range(1, max_retries + 1):
826 try:
--> 827 out = read(*args, **kwargs)
828 break
829 except (
/usr/lib/python3.10/codecs.py in decode(self, input, final)
320 # decode input (taking the buffer into account)
321 data = self.buffer + input
--> 322 (result, consumed) = self._buffer_decode(data, self.errors, final)
323 # keep undecoded input until the next call
324 self.buffer = data[consumed:]
UnicodeDecodeError: 'utf-8' codec can't decode byte 0x8b in position 1: invalid start byte
I'm also experiencing this with datasets[audio]
v3.4.1, Python 3.11, and transformers
4.50.0.
If I install transformers==4.49.0
instead of the latest version, loading the dataset with streaming=True
seems to work.
Yeah, it's working.