File size: 6,186 Bytes
b7a6232 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 |
"""
prep_input.py
This script reads in MERRA2 SLV,LND data in combination with
flux station data together with HLS chips to produce a CSV with
aggregate data values that can be used to train for GPP flux prediction.
Author: Besart Mujeci, Srija Chakraborty, Christopher Phillips
Usage:
python prep_input.py
"""
import rclone
from pathlib import Path
import shutil
import os
import pandas as pd
import netCDF4 as nc
import numpy as np
import pandas as pd
# --- --- ---
def convert_HLS_date(chip_name):
"""
Extracts date string from HLS tile name and returns date object
Args:
chip_name (string): name of hls file
Returns:
datetime: datetime object of time string
"""
hls_date = chip_name.split('.')[3][:7]
year = int(hls_date[:4])
day = int(hls_date[4:])
date = datetime(year, 1, 1)+timedelta(days=day-1)
return date
# --- --- ---
# --- --- --- Set up rclone and get chips and merra files
rawdir = ''
merradir = ''
cfg = ""
result = rclone.with_config(cfg).run_cmd("ls", extra_args=[f"{rawdir}/"])
output_lines = result['out'].decode('utf-8').splitlines()
file_list = [line.split(maxsplit=1)[1] for line in output_lines if line]
result = rclone.with_config(cfg).run_cmd("ls", extra_args=[f"{merradir}/"])
output_lines = result['out'].decode('utf-8').splitlines()
merras = [line.split(maxsplit=1)[1] for line in output_lines if line]
# --- --- ---
# --- --- --- Set up paths
# Location of station tile list
station_file = './TILED_filtered_flux_sites_2018_2021.csv'
# Location to save the input file
spath = './all_inputs.csv'
odir = ''
# --- --- ---
# --- --- --- Get station information
stations = {}
fn = open(station_file, 'r')
for line in list(fn)[1:]:
dummy = line.split(',')
stations[dummy[1].strip()] = (dummy[1], float(dummy[9]), float(dummy[8]), dummy[3], dummy[4])
fn.close()
flux_nets = os.listdir("./fluxnets/flux_sites_2018_2021/")
# --- --- ---
# Locate all HLS chips
chips = sorted(file_list)
skipped = []
# Make the input file to which to save the data
out_fn = open(spath, 'w')
out_fn.write(f'Chip,Station,T2MIN,T2MAX,T2MEAN,TSMDEWMEAN,GWETROOT,LHLAND,SHLAND,SWLAND,PARDFLAND,PRECTOTLAND,GPP')
# And loop over them
for chip in chips:
rclone.with_config(cfg).copy(f"{rawdir}/{chip}", f"./{chip}")
# Match to an Ameriflux station
chip_name = chip.split('/')[-1]
tile = chip_name.split('.')[2][1:]
station_name = chip_name.split('.')[6].split("_")[0]
try: # Skip tiles for which no station exists
station = stations[station_name]
except:
print(f"exception - {('station dict indexing', station_name, tile)}")
continue
date = helpers.convert_HLS_date(chip_name)
# Locate station from tile and pull in the daily reference value
try: # Skip tiles for which no station data is available
station_file = [fluxnet for fluxnet in flux_nets if station[0] in fluxnet][0]
flux_df = pd.read_csv(f"genai-usra-east/impact/fluxnets/flux_sites_2018_2021/{station_file}")
except:
print(f"exception - {('station exception', station_name, tile)}")
continue
flux_times = np.array(flux_df.TIMESTAMP, dtype='str')
flux_gpp = np.array(flux_df.GPP_NT_VUT_REF)
try: # Skip if cannot find CO2 data
quality_flag = np.array(flux_df.NEE_VUT_REF_QC)
if quality_flag[flux_times==date.strftime("%Y%m%d")][0] >= 0.6:
co2 = flux_gpp[flux_times==date.strftime("%Y%m%d")][0]
else: # Quality not met, skip
print(f"co2 quality not met for - {('co2', station_name, tile)}")
continue
except:
print(f"co2 quality not met for - {('co2 exception', station_name, tile)}")
skipped.append(('co2 exception', station_name, tile))
continue
# Pull MERRA-2 data for temperature and dew
merra_file = [file for file in merras if "slv" in file and str(date.strftime("%Y%m%d")) in file][0]
rclone.with_config(cfg).copy(f"{merradir}/{merra_file}", f"./merra/")
merra_fn = nc.Dataset(f'./merra/{merra_file}')
# Pull in the MERRA-2 grid and find closest point
mlons = merra_fn.variables['lon'][:]
mlats = merra_fn.variables['lat'][:]
xind = np.argmin((mlons-station[1])**2)
yind = np.argmin((mlats-station[2])**2)
# Read the variables and collect stats based on time dimension
tmax = np.max(merra_fn.variables['T2M'], keepdims=True, axis=0)
tmin = np.min(merra_fn.variables['T2M'], keepdims=True, axis=0)
tmean = np.nanmean(merra_fn.variables['T2M'][:,yind, xind])
tmax = tmax[0,yind,xind]
tmin = tmin[0,yind,xind]
tdewmean = np.nanmean(merra_fn.variables['T2MDEW'][:,yind, xind])
shutil.rmtree(Path(f"./merra"))
# Pull MERRA-2 data for surface data
merra_file = [file for file in merras if "lnd" in file and str(date.strftime("%Y%m%d")) in file][0]
rclone.with_config(cfg).copy(f"{merradir}/{merra_file}", f"./merra/")
merra_fn = nc.Dataset(f'./merra/{merra_file}')
# Pull in the MERRA-2 grid and find closest point
mlons = merra_fn.variables['lon'][:]
mlats = merra_fn.variables['lat'][:]
xind = np.argmin((mlons-station[1])**2)
yind = np.argmin((mlats-station[2])**2)
# Read the variables and collect stats based on time dimension
GWETROOT = np.nanmean(merra_fn.variables['GWETROOT'][:,yind,xind])
LHLAND = np.nanmean(merra_fn.variables['LHLAND'][:,yind,xind])
SHLAND = np.nanmean(merra_fn.variables['SHLAND'][:,yind,xind])
PARDFLAND = np.nanmean(merra_fn.variables['PARDFLAND'][:,yind,xind])
PRECTOTLAND = np.nanmean(merra_fn.variables['PRECTOTLAND'][:,yind,xind])
SWLAND = np.nanmean(merra_fn.variables['SWLAND'][:,yind,xind])
shutil.rmtree(Path(f"./merra"))
shutil.rmtree(Path(f"./{tile}"))
# Save chip name, MERRA-2 values, and Ameriflux measurement to data file
out_fn.write(f'\n{chip_name},{station[0]},{tmin:.2f},{tmax:.2f},{tmean:.2f},{tdewmean:.2f},{GWETROOT:.2f},{LHLAND:.2f},{SHLAND:.2f},{SWLAND:.2f},{PARDFLAND:2f},{PRECTOTLAND:2f},{co2}')
# Close the file
out_fn.close()
rclone.with_config(cfg).copy(f"{spath}", f"{odir}")
print("DONE") |