# Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # TODO: Address all TODOs and remove all explanatory comments """TODO: Add a description here.""" import numpy as np import datasets from datasets import Value import pickle # TODO: Add BibTeX citation # Find for instance the citation on arxiv or on the dataset repo/website _CITATION = """\ @InProceedings{huggingface:dataset, title = {A great new dataset}, author={huggingface, Inc. }, year={2020} } """ # TODO: Add description of the dataset here # You can copy an official description _DESCRIPTION = """\ This new dataset is designed to solve this great NLP task and is crafted with a lot of care. """ # TODO: Add a link to an official homepage for the dataset here _HOMEPAGE = "" # TODO: Add the licence for the dataset here if you can find it _LICENSE = "" feature_dtype = {'s2_num_days': Value('int16'), 'gedi_num_days': Value('uint16'), 'lat': Value('float32'), 'lon': Value('float32'), "agbd_se": Value('float32'), "elev_lowes": Value('float32'), "leaf_off_f": Value('uint8'), "pft_class": Value('uint8'), "region_cla": Value('uint8'), "rh98": Value('float32'), "sensitivity": Value('float32'), "solar_elev": Value('float32'), "urban_prop":Value('uint8')} def encode_lat_lon(lat, lon): """ Encode the latitude and longitude into sin/cosine values. We use a simple WRAP positional encoding, as Mac Aodha et al. (2019). Args: - lat (float): the latitude - lon (float): the longitude Returns: - (lat_cos, lat_sin, lon_cos, lon_sin) (tuple): the sin/cosine values for the latitude and longitude """ # The latitude goes from -90 to 90 lat_cos, lat_sin = np.cos(np.pi * lat / 90), np.sin(np.pi * lat / 90) # The longitude goes from -180 to 180 lon_cos, lon_sin = np.cos(np.pi * lon / 180), np.sin(np.pi * lon / 180) # Now we put everything in the [0,1] range lat_cos, lat_sin = (lat_cos + 1) / 2, (lat_sin + 1) / 2 lon_cos, lon_sin = (lon_cos + 1) / 2, (lon_sin + 1) / 2 return lat_cos, lat_sin, lon_cos, lon_sin def encode_coords(central_lat, central_lon, patch_size, resolution=10): """ This function computes the latitude and longitude of a patch, from the latitude and longitude of its central pixel. It then encodes these values into sin/cosine values, and scales the results to [0,1]. Args: - central_lat (float): the latitude of the central pixel - central_lon (float): the longitude of the central pixel - patch_size (tuple): the size of the patch - resolution (int): the resolution of the patch Returns: - (lat_cos, lat_sin, lon_cos, lon_sin) (tuple): the sin/cosine values for the latitude and longitude """ # Initialize arrays to store latitude and longitude coordinates i_indices, j_indices = np.indices(patch_size) # Calculate the distance offset in meters for each pixel offset_lat = (i_indices - patch_size[0] // 2) * resolution offset_lon = (j_indices - patch_size[1] // 2) * resolution # Calculate the latitude and longitude for each pixel latitudes = central_lat + (offset_lat / 6371000) * (180 / np.pi) longitudes = central_lon + (offset_lon / 6371000) * (180 / np.pi) / np.cos(central_lat * np.pi / 180) lat_cos, lat_sin, lon_cos, lon_sin = encode_lat_lon(latitudes, longitudes) return lat_cos, lat_sin, lon_cos, lon_sin """ Example usage: lat_cos, lat_sin, lon_cos, lon_sin = encode_coords(lat, lon, self.patch_size) lat_cos, lat_sin, lon_cos, lon_sin = lat_cos[..., np.newaxis], lat_sin[..., np.newaxis], lon_cos[..., np.newaxis], lon_sin[..., np.newaxis] """ ######################################################################################################################### # Denormalizer def denormalize_data(data, norm_values, norm_strat='pct'): """ Normalize the data, according to various strategies: - mean_std: subtract the mean and divide by the standard deviation - pct: subtract the 1st percentile and divide by the 99th percentile - min_max: subtract the minimum and divide by the maximum Args: - data (np.array): the data to normalize - norm_values (dict): the normalization values - norm_strat (str): the normalization strategy Returns: - normalized_data (np.array): the normalized data """ if norm_strat == 'mean_std': mean, std = norm_values['mean'], norm_values['std'] data = (data - mean) / std elif norm_strat == 'pct': p1, p99 = norm_values['p1'], norm_values['p99'] data = data * (p99 - p1) + p1 elif norm_strat == 'min_max': min_val, max_val = norm_values['min'], norm_values['max'] data = data * (max_val - min_val) + min_val else: raise ValueError(f'De-normalization strategy `{norm_strat}` is not valid.') return data def denormalize_bands(bands_data, norm_values, order, norm_strat='pct'): """ This function normalizes the bands data using the normalization values and strategy. Args: - bands_data (np.array): the bands data to normalize - norm_values (dict): the normalization values - order (list): the order of the bands - norm_strat (str): the normalization strategy Returns: - bands_data (np.array): the normalized bands data """ for i, band in enumerate(order): band_norm = norm_values[band] bands_data[:, :, i] = denormalize_data(bands_data[:, :, i], band_norm, norm_strat) return bands_data def decode_lc(encoded_lc, mode='cos'): # Encode the LC classes with sin/cosine values and scale the data to [0,1] if mode == 'cos': lc = 100 * np.arccos(2 * encoded_lc - 1) / (2 * np.pi) elif mode == 'sin': lc = 100 * np.arcsin(2 * encoded_lc - 1) / (2 * np.pi) else: raise ValueError(f'Mode `{mode}` is not valid.') return lc class NewDataset(datasets.GeneratorBasedBuilder): def __init__(self, *args, additional_features=[], normalize_data=True, patch_size=15, **kwargs): self.inner_dataset_kwargs = kwargs self._is_streaming = False self.patch_size = patch_size self.normalize_data = normalize_data self.additional_features = additional_features super().__init__(*args, **kwargs) VERSION = datasets.Version("1.1.0") BUILDER_CONFIGS = [ datasets.BuilderConfig(name="default", version=VERSION, description="Normalized data"), datasets.BuilderConfig(name="unnormalized", version=VERSION, description="Unnormalized data"), ] DEFAULT_CONFIG_NAME = "default" def as_streaming_dataset(self, split=None, base_path=None): self._is_streaming = True return super().as_streaming_dataset(split=split, base_path=base_path) def _info(self): with open('statistics.pkl', 'rb') as f: self.norm_values = pickle.load(f) all_features = { 'input': datasets.Sequence(datasets.Sequence(datasets.Sequence(datasets.Value('float32')))), 'label': Value('float32') } for feat in self.additional_features: all_features[feat] = feature_dtype[feat] features = datasets.Features(all_features) return datasets.DatasetInfo( description=_DESCRIPTION, features=features, homepage=_HOMEPAGE, license=_LICENSE, citation=_CITATION, ) def _split_generators(self, dl_manager): self.original_dataset = datasets.load_dataset("prs-eth/AGBD_raw", streaming=self._is_streaming) return [ datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={"split": "train"}), datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={"split": "val"}), datasets.SplitGenerator(name=datasets.Split.TEST, gen_kwargs={"split": "test"}), ] def _generate_examples(self, split): for i, d in enumerate(self.original_dataset[split]): if self.normalize_data : patch = np.asarray(d["input"]) else: patch = np.asarray(d["input"]) patch[:12] = denormalize_bands(patch[:12], self.norm_values['S2_bands'],['B01', 'B02', 'B03', 'B04', 'B05', 'B06', 'B07', 'B08', 'B8A', 'B09','B11', 'B12']) patch[12:14] = denormalize_bands(patch[12:14], self.norm_values['ALOS_bands'], ['HH', 'HV']) patch[14] = denormalize_data(patch[14], self.norm_values['CH']['ch']) patch[15] = denormalize_data(patch[15], self.norm_values['CH']['std']) patch[16] = decode_lc(patch[16], 'cos') patch[17] = decode_lc(patch[17], 'sin') patch[18] = patch[18] * 100 patch[19] = denormalize_data(patch[19], self.norm_values['DEM']) lat, lon = d["metadata"]["lat"],d["metadata"]["lon"] latlon_patch = encode_coords(lat, lon,(self.patch_size,self.patch_size)) start_x = (patch.shape[1] - self.patch_size) // 2 start_y = (patch.shape[2] - self.patch_size) // 2 patch = patch[:, start_x:start_x + self.patch_size, start_y:start_y + self.patch_size] patch = np.concatenate([patch[:12],latlon_patch,patch[12:]],0) data = {'input': patch, 'label': d["label"]} for feat in self.additional_features: data[feat] = d["metadata"][feat] yield i, data