nld-aa-taster / nld-aa-taster.py
Howuhh's picture
taster
d904c86
raw
history blame
6.74 kB
# Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Dungeons and Data: A Large-Scale NetHack Dataset. """
import glob
import h5py
import json
import os
import datasets
# from datasets.download.streaming_download_manager import xopen
_CITATION = """\
"""
_DESCRIPTION = """\
3 billion state-action-score transitions from 100,000 trajectories collected from the symbolic bot winner of the NetHack Challenge 2021.
"""
_HOMEPAGE = ""
_LICENSE = ""
# _TOTAL_EPISODES = 1934
# _URLS = {
# "data": [f"data/{i}.hdf5" for i in range(1, _TOTAL_EPISODES)],
# "metadata": ["metadata.json"],
# }
class NleHfDataset(datasets.GeneratorBasedBuilder):
"""Dungeons and Data: A Large-Scale NetHack Dataset."""
VERSION = datasets.Version("1.0.0")
# BUILDER_CONFIGS = [
# datasets.BuilderConfig(name="data", version=VERSION, description="Data for all episodes"),
# datasets.BuilderConfig(name="metadata", version=VERSION, description="Metadata for all episodes"),
# ]
# DEFAULT_CONFIG_NAME = "metadata"
def _info(self):
features = datasets.Features(
{
"data": {
"tty_chars": datasets.Array3D(shape=(None, 24, 80), dtype="uint8"),
"tty_colors": datasets.Array3D(shape=(None, 24, 80), dtype="int8"),
"tty_cursor": datasets.Array2D(shape=(None, 2), dtype="int16"),
"actions": datasets.Sequence(datasets.Value("int16")),
"rewards": datasets.Sequence(datasets.Value("int32")),
"dones": datasets.Sequence(datasets.Value("bool")),
},
"metadata": {
"gameid": datasets.Value("int32"),
"version": datasets.Value("string"),
"points": datasets.Value("int32"),
"deathdnum": datasets.Value("int32"),
"deathlev": datasets.Value("int32"),
"maxlvl": datasets.Value("int32"),
"hp": datasets.Value("int32"),
"maxhp": datasets.Value("int32"),
"deaths": datasets.Value("int32"),
"deathdate": datasets.Value("int32"),
"birthdate": datasets.Value("int32"),
"uid": datasets.Value("int32"),
"role": datasets.Value("string"),
"race": datasets.Value("string"),
"gender": datasets.Value("string"),
"align": datasets.Value("string"),
"name": datasets.Value("string"),
"death": datasets.Value("string"),
"conduct": datasets.Value("string"),
"turns": datasets.Value("int32"),
"achieve": datasets.Value("string"),
"realtime": datasets.Value("int64"),
"starttime": datasets.Value("int64"),
"endtime": datasets.Value("int64"),
"gender0": datasets.Value("string"),
"align0": datasets.Value("string"),
"flags": datasets.Value("string")
}
}
)
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=features,
homepage=_HOMEPAGE,
license=_LICENSE,
citation=_CITATION,
)
def _split_generators(self, dl_manager):
# data_file = dl_manager.download_and_extract(f"data/data-{self.config.name}-any.hdf5.zip")
data_file = dl_manager.download(f"data/data-{self.config.name}-any.hdf5")
metadata_file = dl_manager.download(f"data/metadata-{self.config.name}-any.json")
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN,
gen_kwargs={
"data_file": data_file,
"metadata_file": metadata_file,
"dl_manager": dl_manager
}
)
]
def _generate_examples(self, data_file, metadata_file, dl_manager):
with h5py.File(data_file, "r") as df, open(metadata_file, "r") as f:
meta = json.load(f)
for i, (ep_key, ep_meta) in enumerate(zip(df["/"], meta)):
print(ep_key, ep_meta["gameid"])
assert int(ep_key) == int(ep_meta["gameid"])
yield i, {
"data": {
"tty_chars": df[f"{ep_key}/tty_chars"][()],
"tty_colors": df[f"{ep_key}/tty_colors"][()],
"tty_cursor": df[f"{ep_key}/tty_cursor"][()],
"actions": df[f"{ep_key}/actions"][()],
"rewards": df[f"{ep_key}/rewards"][()],
"dones": df[f"{ep_key}/dones"][()]
},
"metadata": ep_meta
}
# if self.config.name == "metadata":
# assert len(filepaths) == 1
# assert not dl_manager.is_streaming
# yield from self.__generate_metadata(filepaths[0])
# else:
# yield from self.__generate_data(filepaths, dl_manager)
# def __generate_metadata(self, filepath):
# with open(filepath, "r") as f:
# data = json.load(f)
# for i, line in enumerate(data):
# yield i, line
# def __generate_data(self, filepaths, dl_manager):
# for i, filepath in enumerate(filepaths):
# if dl_manager.is_streaming:
# filepath = xopen(filepath, "rb")
# with h5py.File(filepath, "r") as f:
# yield i, {
# "tty_chars": f["tty_chars"][()],
# "tty_colors": f["tty_colors"][()],
# "tty_cursor": f["tty_cursor"][()],
# "actions": f["actions"][()],
# "rewards": f["rewards"][()],
# "dones": f["dones"][()]
# }
# if dl_manager.is_streaming:
# filepath.close()