Datasets:
peterdavidfagan
commited on
Create load_tfds_example.py
Browse files- load_tfds_example.py +37 -0
load_tfds_example.py
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import tarfile
|
3 |
+
|
4 |
+
import tensorflow_datasets as tfds
|
5 |
+
from huggingface_hub import hf_hub_download
|
6 |
+
|
7 |
+
DATA_DIR="/home/robot"
|
8 |
+
FILENAME="data.tar.xz"
|
9 |
+
EXTRACTED_FILENAME="data"
|
10 |
+
FILEPATH=os.path.join(DATA_DIR, FILENAME)
|
11 |
+
EXTRACTED_FILEPATH=os.path.join(DATA_DIR, EXTRACTED_FILENAME)
|
12 |
+
|
13 |
+
# download data from huggingface
|
14 |
+
hf_hub_download(
|
15 |
+
repo_id="peterdavidfagan/transporter_networks",
|
16 |
+
repo_type="dataset",
|
17 |
+
filename=FILENAME,
|
18 |
+
local_dir=DATA_DIR,
|
19 |
+
)
|
20 |
+
|
21 |
+
# uncompress file
|
22 |
+
with tarfile.open(FILEPATH, 'r:xz') as tar:
|
23 |
+
tar.extractall(path=DATA_DIR)
|
24 |
+
os.remove(FILEPATH)
|
25 |
+
|
26 |
+
# load with tfds
|
27 |
+
ds = tfds.builder_from_directory(EXTRACTED_FILEPATH).as_dataset()['train']
|
28 |
+
|
29 |
+
# basic inspection of data
|
30 |
+
print(ds.element_spec)
|
31 |
+
for eps in ds:
|
32 |
+
print(eps["extrinsics"])
|
33 |
+
for step in eps["steps"]:
|
34 |
+
print(step["is_first"])
|
35 |
+
print(step["is_last"])
|
36 |
+
print(step["is_terminal"])
|
37 |
+
print(step["action"])
|