AGBD / AGBD.py
ghjuliasialelli's picture
Update AGBD.py
37e224f verified
raw
history blame contribute delete
No virus
14.4 kB
# Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TODO: Address all TODOs and remove all explanatory comments
"""TODO: Add a description here."""
import numpy as np
import datasets
from datasets import Value
import pickle
# TODO: Add BibTeX citation
# Find for instance the citation on arxiv or on the dataset repo/website
_CITATION = """\
@InProceedings{huggingface:dataset,
title = {A great new dataset},
author={huggingface, Inc.
},
year={2020}
}
"""
# TODO: Add description of the dataset here
# You can copy an official description
_DESCRIPTION = """\
This new dataset is designed to solve this great NLP task and is crafted with a lot of care.
"""
# TODO: Add a link to an official homepage for the dataset here
_HOMEPAGE = ""
# TODO: Add the licence for the dataset here if you can find it
_LICENSE = ""
feature_dtype = {'s2_num_days': Value('int16'),
'gedi_num_days': Value('uint16'),
'lat': Value('float32'),
'lon': Value('float32'),
"agbd_se": Value('float32'),
"elev_lowes": Value('float32'),
"leaf_off_f": Value('uint8'),
"pft_class": Value('uint8'),
"region_cla": Value('uint8'),
"rh98": Value('float32'),
"sensitivity": Value('float32'),
"solar_elev": Value('float32'),
"urban_prop":Value('uint8')}
norm_values = {'ALOS_bands': {
'HH': {'mean': -10.381429, 'std': 8.561741, 'min': -83.0, 'max': 13.329468, 'p1': -19.542107, 'p99': -2.402588},
'HV': {'mean': -16.722847, 'std': 8.718428, 'min': -83.0, 'max': 11.688309, 'p1': -29.285168, 'p99': -8.773987}},
'S2_bands': {'B01': {'mean': 0.12478869, 'std': 0.024433358, 'min': 1e-04, 'max': 1.8808, 'p1': 0.0787,
'p99': 0.1946},
'B02': {'mean': 0.13480005, 'std': 0.02822557, 'min': 1e-04, 'max': 2.1776, 'p1': 0.0925,
'p99': 0.2216},
'B03': {'mean': 0.16031432, 'std': 0.032037303, 'min': 1e-04, 'max': 2.12, 'p1': 0.1035,
'p99': 0.2556},
'B04': {'mean': 0.1532097, 'std': 0.038628064, 'min': 1e-04, 'max': 2.0032, 'p1': 0.1023,
'p99': 0.2816},
'B05': {'mean': 0.20312776, 'std': 0.04205057, 'min': 0.0422, 'max': 1.7502, 'p1': 0.1178,
'p99': 0.319},
'B06': {'mean': 0.32636437, 'std': 0.07139242, 'min': 0.0502, 'max': 1.7245, 'p1': 0.1633,
'p99': 0.519},
'B07': {'mean': 0.36605212, 'std': 0.08555025, 'min': 0.0616, 'max': 1.7149, 'p1': 0.1776,
'p99': 0.6076},
'B08': {'mean': 0.3811653, 'std': 0.092815965, 'min': 1e-04, 'max': 1.7488, 'p1': 0.1691,
'p99': 0.646},
'B8A': {'mean': 0.3910436, 'std': 0.0896364, 'min': 0.055, 'max': 1.688, 'p1': 0.1871,
'p99': 0.6386},
'B09': {'mean': 0.3910644, 'std': 0.0836445, 'min': 0.0012, 'max': 1.7915, 'p1': 0.2124,
'p99': 0.6241},
'B11': {'mean': 0.2917373, 'std': 0.07472579, 'min': 0.0953, 'max': 1.648, 'p1': 0.1334,
'p99': 0.4827},
'B12': {'mean': 0.21169408, 'std': 0.05880649, 'min': 0.0975, 'max': 1.6775, 'p1': 0.115,
'p99': 0.3872}},
'CH': {'ch': {'mean': 9.736144, 'std': 9.493601, 'min': 0.0, 'max': 61.0, 'p1': 0.0, 'p99': 38.0},
'std': {'mean': 7.9882116, 'std': 4.549494, 'min': 0.0, 'max': 254.0, 'p1': 0.0, 'p99': 18.0}},
'DEM': {'mean': 604.63727, 'std': 588.02094, 'min': -82.0, 'max': 5205.0, 'p1': 507.0, 'p99': 450.0},
'Sentinel_metadata': {
'S2_vegetation_score': {'mean': 89.168724, 'std': 17.17321, 'min': 20.0, 'max': 100.0, 'p1': 29.0,
'p99': 100.0},
'S2_date': {'mean': 299.1638, 'std': 192.87402, 'min': -165.0, 'max': 623.0, 'p1': 253.0,
'p99': 277.0}}, 'GEDI': {
'agbd': {'mean': 66.97266, 'std': 98.66588, 'min': 0.0, 'max': 499.99985, 'p1': 0.9703503, 'p99': 163.46234},
'agbd_se': {'mean': 8.360701, 'std': 4.211524, 'min': 2.981795, 'max': 25.041483, 'p1': 2.9830396,
'p99': 8.612499},
'rh98': {'mean': 12.074685, 'std': 10.276359, 'min': -1.1200076, 'max': 111.990005, 'p1': 2.3599916,
'p99': 6.9500012},
'date': {'mean': 361.7431, 'std': 175.37294, 'min': 0.0, 'max': 624.0, 'p1': 360.0, 'p99': 146.0}}}
def encode_lat_lon(lat, lon):
"""
Encode the latitude and longitude into sin/cosine values. We use a simple WRAP positional encoding, as
Mac Aodha et al. (2019).
Args:
- lat (float): the latitude
- lon (float): the longitude
Returns:
- (lat_cos, lat_sin, lon_cos, lon_sin) (tuple): the sin/cosine values for the latitude and longitude
"""
# The latitude goes from -90 to 90
lat_cos, lat_sin = np.cos(np.pi * lat / 90), np.sin(np.pi * lat / 90)
# The longitude goes from -180 to 180
lon_cos, lon_sin = np.cos(np.pi * lon / 180), np.sin(np.pi * lon / 180)
# Now we put everything in the [0,1] range
lat_cos, lat_sin = (lat_cos + 1) / 2, (lat_sin + 1) / 2
lon_cos, lon_sin = (lon_cos + 1) / 2, (lon_sin + 1) / 2
return lat_cos, lat_sin, lon_cos, lon_sin
def encode_coords(central_lat, central_lon, patch_size, resolution=10):
"""
This function computes the latitude and longitude of a patch, from the latitude and longitude of its central pixel.
It then encodes these values into sin/cosine values, and scales the results to [0,1].
Args:
- central_lat (float): the latitude of the central pixel
- central_lon (float): the longitude of the central pixel
- patch_size (tuple): the size of the patch
- resolution (int): the resolution of the patch
Returns:
- (lat_cos, lat_sin, lon_cos, lon_sin) (tuple): the sin/cosine values for the latitude and longitude
"""
# Initialize arrays to store latitude and longitude coordinates
i_indices, j_indices = np.indices(patch_size)
# Calculate the distance offset in meters for each pixel
offset_lat = (i_indices - patch_size[0] // 2) * resolution
offset_lon = (j_indices - patch_size[1] // 2) * resolution
# Calculate the latitude and longitude for each pixel
latitudes = central_lat + (offset_lat / 6371000) * (180 / np.pi)
longitudes = central_lon + (offset_lon / 6371000) * (180 / np.pi) / np.cos(central_lat * np.pi / 180)
lat_cos, lat_sin, lon_cos, lon_sin = encode_lat_lon(latitudes, longitudes)
return lat_cos, lat_sin, lon_cos, lon_sin
"""
Example usage:
lat_cos, lat_sin, lon_cos, lon_sin = encode_coords(lat, lon, self.patch_size)
lat_cos, lat_sin, lon_cos, lon_sin = lat_cos[..., np.newaxis], lat_sin[..., np.newaxis], lon_cos[..., np.newaxis], lon_sin[..., np.newaxis]
"""
#########################################################################################################################
# Denormalizer
def denormalize_data(data, norm_values, norm_strat='pct'):
"""
Normalize the data, according to various strategies:
- mean_std: subtract the mean and divide by the standard deviation
- pct: subtract the 1st percentile and divide by the 99th percentile
- min_max: subtract the minimum and divide by the maximum
Args:
- data (np.array): the data to normalize
- norm_values (dict): the normalization values
- norm_strat (str): the normalization strategy
Returns:
- normalized_data (np.array): the normalized data
"""
if norm_strat == 'mean_std':
mean, std = norm_values['mean'], norm_values['std']
data = (data - mean) / std
elif norm_strat == 'pct':
p1, p99 = norm_values['p1'], norm_values['p99']
data = data * (p99 - p1) + p1
elif norm_strat == 'min_max':
min_val, max_val = norm_values['min'], norm_values['max']
data = data * (max_val - min_val) + min_val
else:
raise ValueError(f'De-normalization strategy `{norm_strat}` is not valid.')
return data
def denormalize_bands(bands_data, norm_values, order, norm_strat='pct'):
"""
This function normalizes the bands data using the normalization values and strategy.
Args:
- bands_data (np.array): the bands data to normalize
- norm_values (dict): the normalization values
- order (list): the order of the bands
- norm_strat (str): the normalization strategy
Returns:
- bands_data (np.array): the normalized bands data
"""
for i, band in enumerate(order):
band_norm = norm_values[band]
bands_data[:, :, i] = denormalize_data(bands_data[:, :, i], band_norm, norm_strat)
return bands_data
"""
def decode_lc(encoded_lc, mode='cos'):
# Encode the LC classes with sin/cosine values and scale the data to [0,1]
if mode == 'cos':
lc = 100 * np.arccos(2 * encoded_lc - 1) / (2 * np.pi)
elif mode == 'sin':
lc = 100 * np.arcsin(2 * encoded_lc - 1) / (2 * np.pi)
else:
raise ValueError(f'Mode `{mode}` is not valid.')
return lc
"""
def recover_lc_map(lc_cos, lc_sin):
# Convert lc_cos and lc_sin back to the range of the original sin and cos values
lc_cos = 2 * lc_cos - 1
lc_sin = 2 * lc_sin - 1
# Calculate the angles using arccos and arcsin
theta_cos = np.arccos(lc_cos)
sin_theta_cos = np.sin(theta_cos)
check = np.isclose(sin_theta_cos, lc_sin, atol = 1e-8)
theta = np.where(check, theta_cos, 2 * np.pi - theta_cos)
# Convert the angle theta back to lc_map
lc_map = np.round((theta / (2 * np.pi)) * 100)
lc_map = np.where(lc_map % 10 != 0, lc_map + 100, lc_map)
return lc_map
class NewDataset(datasets.GeneratorBasedBuilder):
def __init__(self, *args, additional_features=[], normalize_data=True, patch_size=15, **kwargs):
self.inner_dataset_kwargs = kwargs
self._is_streaming = False
self.patch_size = patch_size
self.normalize_data = normalize_data
self.additional_features = additional_features
super().__init__(*args, **kwargs)
VERSION = datasets.Version("1.1.0")
BUILDER_CONFIGS = [
datasets.BuilderConfig(name="default", version=VERSION, description="Normalized data"),
datasets.BuilderConfig(name="unnormalized", version=VERSION, description="Unnormalized data"),
]
DEFAULT_CONFIG_NAME = "default"
def as_streaming_dataset(self, split=None, base_path=None):
self._is_streaming = True
return super().as_streaming_dataset(split=split, base_path=base_path)
def _info(self):
all_features = {
'input': datasets.Sequence(datasets.Sequence(datasets.Sequence(datasets.Value('float32')))),
'label': Value('float32')
}
for feat in self.additional_features:
all_features[feat] = feature_dtype[feat]
features = datasets.Features(all_features)
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=features,
homepage=_HOMEPAGE,
license=_LICENSE,
citation=_CITATION,
)
def _split_generators(self, dl_manager):
self.original_dataset = datasets.load_dataset("prs-eth/AGBD_raw", streaming=self._is_streaming)
return [
datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={"split": "train"}),
datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={"split": "val"}),
datasets.SplitGenerator(name=datasets.Split.TEST, gen_kwargs={"split": "test"}),
]
def _generate_examples(self, split):
for i, d in enumerate(self.original_dataset[split]):
if self.normalize_data :
patch = np.asarray(d["input"])
else:
patch = np.asarray(d["input"])
patch[:12] = denormalize_bands(patch[:12], norm_values['S2_bands'],['B01', 'B02', 'B03', 'B04', 'B05', 'B06', 'B07', 'B08', 'B8A', 'B09','B11', 'B12'])
patch[12:14] = denormalize_bands(patch[12:14], norm_values['ALOS_bands'], ['HH', 'HV'])
patch[14] = denormalize_data(patch[14], norm_values['CH']['ch'])
patch[15] = denormalize_data(patch[15], norm_values['CH']['std'])
lc_cos, lc_sin = patch[16], patch[17]
lc = recover_lc_map(lc_cos, lc_sin)
patch[16] = lc
patch[17] = lc
patch[18] = patch[18] * 100
patch[19] = denormalize_data(patch[19], norm_values['DEM'])
lat, lon = d["metadata"]["lat"],d["metadata"]["lon"]
latlon_patch = encode_coords(lat, lon,(self.patch_size,self.patch_size))
start_x = (patch.shape[1] - self.patch_size) // 2
start_y = (patch.shape[2] - self.patch_size) // 2
patch = patch[:, start_x:start_x + self.patch_size, start_y:start_y + self.patch_size]
patch = np.concatenate([patch[:12],latlon_patch,patch[12:]],0)
data = {'input': patch, 'label': d["label"]}
for feat in self.additional_features:
data[feat] = d["metadata"][feat]
yield i, data