File size: 1,861 Bytes
37115c1 62c7ae3 37115c1 62c7ae3 37115c1 45c95ca 62c7ae3 37115c1 62c7ae3 37115c1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 |
from datasets import load_dataset
import argparse
from pathlib import Path
import json
def process_batch(examples):
return examples
if __name__ == "__main__":
# Create argument parser
parser = argparse.ArgumentParser(description="Download dataset.")
parser.add_argument("--base_dir", required=True, help="Directory to save the downloaded files.")
args = parser.parse_args()
base_dir = Path(args.base_dir)
base_dir.mkdir(exist_ok=True, parents=True)
# Load dataset
ds = load_dataset(
"gvecchio/MatSynth",
streaming=True,
)
# Setup dummy processing
ds = ds.map(process_batch, batched=False, batch_size=1)
for split in ds:
for item in ds[split]:
name = item["name"]
dest_dir = base_dir / split / item["metadata"]["category"] / name
dest_dir.mkdir(exist_ok=True, parents=True)
# Save metadata
with open(dest_dir / "metadata.json", "w") as f:
item["metadata"]["physical_size"] = str(
item["metadata"]["physical_size"]
)
json.dump(item["metadata"], f, indent=4)
# Save images
item["basecolor"].save(dest_dir / "basecolor.png")
item["diffuse"].save(dest_dir / "diffuse.png")
item["displacement"].save(dest_dir / "displacement.png")
item["specular"].save(dest_dir / "specular.png")
item["height"].save(dest_dir / "height.png")
item["metallic"].save(dest_dir / "metallic.png")
item["normal"].save(dest_dir / "normal.png")
item["opacity"].save(dest_dir / "opacity.png")
item["roughness"].save(dest_dir / "roughness.png")
if item["blend_mask"] is not None:
item["blend_mask"].save(dest_dir / "blend_mask.png")
|