NDVI_PERG / app.py
UjjwalKGupta's picture
Add draggable False to add_legend
5f64477 verified
raw
history blame
30.6 kB
import os
from datetime import datetime
import ee
import json
import geemap
import numpy as np
import geemap.foliumap as gee_folium
import leafmap.foliumap as leaf_folium
import streamlit as st
import pandas as pd
import geopandas as gpd
from shapely.ops import transform
from functools import reduce
import plotly.express as px
import branca.colormap as cm
import folium
import pyproj
from io import StringIO, BytesIO
import requests
import kml2geojson
print(geemap.__version__)
print(folium.__version__)
print(geemap.Report())
st.set_page_config(layout="wide")
m = st.markdown(
"""
<style>
div.stButton > button:first-child {
background-color: #006400;
color:#ffffff;
}
</style>""",
unsafe_allow_html=True,
)
# Logo
st.write(
f"""
<div style="display: flex; justify-content: space-between; align-items: center;">
<img src="https://huggingface.co/spaces/SustainabilityLabIITGN/NDVI_PERG/resolve/main/Final_IITGN-Logo-symmetric-Color.png" style="width: 10%; margin-right: auto;">
<img src="https://huggingface.co/spaces/SustainabilityLabIITGN/NDVI_PERG/resolve/main/IFS.jpg" style="width: 10%; margin-left: auto;">
</div>
""",
unsafe_allow_html=True,
)
# Title
# make title in center
st.markdown(
f"""
<h1 style="text-align: center;">Vrinda (वृन्दा): Interactive Vegetation Index Analyzer</h1>
""",
unsafe_allow_html=True,
)
############################################
# Hyperparameters
############################################
st.write("<h2><div style='text-align: center;'>User Inputs</div></h2>", unsafe_allow_html=True)
# Input: GeoJSON/KML file
file_url = st.query_params.get("file_url", None)
if file_url is None:
file_url = st.file_uploader("Upload KML/GeoJSON file", type=["geojson", "kml", "shp"])
def show_credits():
# Add credits
st.write(
"""
<div style="display: flex; justify-content: center; align-items: center; margin-top: 20px;">
<p style="text-align: left;">This tool is developed by <a href="https://sustainability-lab.github.io/">Sustainability Lab</a>, <a href="https://www.iitgn.ac.in/">IIT Gandhinagar</a> and supported by <a href="https://forests.gujarat.gov.in/">Gujarat Forest Department</a></p>""",
unsafe_allow_html=True,
)
if file_url is None:
st.warning(
"Please provide a KML or GeoJSON URL as a query parameter, e.g., `https://sustainabilitylabiitgn-ndvi-perg.hf.space?file_url=<your_file_url>` or upload a file."
)
show_credits()
st.stop()
with st.expander("Advanced Settings"):
st.write("Select the vegetation indices to calculate:")
all_veg_indices = ["NDVI", "EVI", "EVI2"]
formulas = {
"NDVI": r"$\frac{NIR - Red}{NIR + Red}$",
"EVI": r"$G \times \frac{NIR - Red}{NIR + C1 \times Red - C2 \times Blue + L}$",
"EVI2": r"$G \times \frac{NIR - Red}{NIR + L + C \times Red}$",
}
defaults = [True, False, False]
veg_indices = []
for veg_index, default in zip(all_veg_indices, defaults):
if st.checkbox(f"{veg_index} = {formulas[veg_index]}", value=default):
veg_indices.append(veg_index)
st.write("Select the parameters for the EVI/EVI2 calculation (default is as per EVI's Wikipedia page)")
cols = st.columns(5)
evi_vars = {}
for col, name, default in zip(cols, ["G", "C1", "C2", "L", "C"], [2.5, 6, 7.5, 1, 2.4]):
value = col.number_input(f"{name}", value=default)
evi_vars[name] = value
############################################
# Functions
############################################
# Function of find best suited statewise EPSG code
def get_gdf_from_file_url(file_url):
if isinstance(file_url, str):
if file_url.startswith("https://drive.google.com/file/d/"):
ID = file_url.replace("https://drive.google.com/file/d/", "").split("/")[0]
file_url = f"https://drive.google.com/uc?id={ID}"
elif file_url.startswith("https://drive.google.com/open?id="):
ID = file_url.replace("https://drive.google.com/open?id=", "")
file_url = f"https://drive.google.com/uc?id={ID}"
response = requests.get(file_url)
bytes_data = BytesIO(response.content)
string_data = response.text
else:
bytes_data = BytesIO(file_url.getvalue())
string_data = file_url.getvalue().decode("utf-8")
if string_data.startswith("<?xml"):
geojson = kml2geojson.convert(bytes_data)
features = geojson[0]["features"]
epsg = 4326
input_gdf = gpd.GeoDataFrame.from_features(features, crs=f"EPSG:{epsg}")
else:
input_gdf = gpd.read_file(bytes_data)
return input_gdf
def find_best_epsg(geometry):
if geometry.geom_type == 'Polygon':
centroid = geometry.centroid
else:
st.error("Geometry is not Polygon !!!")
st.stop()
common_epsg_codes = [7756, #Andhra Pradesh
7757, #Arunachal Pradesh
7758, #Assam
7759, #Bihar
7760, #Delhi
7761, #Gujarat
7762, #Haryana
7763, #HimachalPradesh
7764, #JammuAndKashmir
7765, #Jharkhand
7766, #MadhyaPradesh
7767, #Maharastra
7768, #Manipur
7769, #Meghalaya
7770, #Nagaland
7772, #Orissa
7773, #Punjab
7774, #Rajasthan
7775, #UttarPradesh
7776, #Uttaranchal
7777, #A&N
7778, #Chattisgarh
7779, #Goa
7780, #Karnataka
7781, #Kerala
7782, #Lakshadweep
7783, #Mizoram
7784, #Sikkim
7785, #TamilNadu
7786, #Tripura
7787, #WestBengal
7771, #NE India
7755, #India
]
for epsg in common_epsg_codes:
crs = pyproj.CRS.from_epsg(epsg)
area_of_use = crs.area_of_use.bounds # Get the bounding box of the area of use
#check if centroid of polygon lies in teh bounds of the crs
if (area_of_use[0] <= centroid.x <= area_of_use[2]) and (area_of_use[1] <= centroid.y <= area_of_use[3]):
return epsg # Return the best suitable EPSG code
def daterange_str_to_dates(daterange_str):
start_date, end_date = daterange_str.split("-")
start_date = pd.to_datetime(start_date)
end_date = pd.to_datetime(end_date)
return start_date, end_date
def daterange_dates_to_str(start_date, end_date):
return f"{start_date.strftime('%Y/%m/%d')}-{end_date.strftime('%Y/%m/%d')}"
def daterange_str_to_year(daterange_str):
start_date, _ = daterange_str.split("-")
year = pd.to_datetime(start_date).year
return year
def shape_3d_to_2d(shape):
if shape.has_z:
return transform(lambda x, y, z: (x, y), shape)
else:
return shape
def preprocess_gdf(gdf):
gdf["geometry"] = gdf["geometry"].apply(shape_3d_to_2d)
gdf["geometry"] = gdf.buffer(0) # Fixes some invalid geometries
return gdf
def to_best_crs(gdf):
best_epsg_code = find_best_epsg(gdf["geometry"].iloc[0])
gdf = gdf.to_crs(epsg=best_epsg_code)
return gdf
def is_valid_polygon(geometry_gdf):
geometry = geometry_gdf.geometry.item()
return (geometry.type == "Polygon") and (not geometry.is_empty)
def add_geometry_to_maps(map_list, opacity=0.0):
for m in map_list:
m.add_gdf(
buffer_geometry_gdf,
layer_name="Geometry Buffer",
style_function=lambda x: {"color": "red", "fillOpacity": opacity, "fillColor": "red"},
)
m.add_gdf(
geometry_gdf,
layer_name="Geometry",
style_function=lambda x: {"color": "blue", "fillOpacity": opacity, "fillColor": "blue"},
)
def get_dem_slope_maps(buffer_ee_geometry):
# Create the map for DEM
dem_map = gee_folium.Map(controls={'scale':'bottomleft'})
dem_map.add_tile_layer(
wayback_mapping[latest_date], name=f"Esri Wayback - {latest_date.replace('-', '/')}", attribution="Esri"
)
dem_layer = ee.Image("USGS/SRTMGL1_003")
# Set the target resolution to 10 meters
target_resolution = 10
dem_layer = (
dem_layer.resample("bilinear").reproject(crs="EPSG:4326", scale=target_resolution).clip(buffer_ee_geometry)
)
# Generate contour lines using elevation thresholds
terrain = ee.Algorithms.Terrain(dem_layer)
contour_interval = 1
contours = (
terrain.select("elevation").subtract(terrain.select("elevation").mod(contour_interval)).rename("contours")
)
# Calculate the minimum and maximum values
stats = contours.reduceRegion(reducer=ee.Reducer.minMax(), scale=10, maxPixels=1e13)
max_value = stats.get("contours_max").getInfo()
min_value = stats.get("contours_min").getInfo()
vis_params = {"min": min_value, "max": max_value, "palette": ["blue", "green", "yellow", "red"]}
dem_map.addLayer(contours, vis_params, "Contours")
# Create a colormap
cmap = cm.LinearColormap(colors=vis_params["palette"], vmin=vis_params["min"], vmax=vis_params["max"])
tick_size = int((max_value-min_value)/4)
dem_map.add_legend(title="Elevation (m)",
legend_dict={'{}-{} m'.format(min_value, min_value+tick_size): '#0000FF',
'{}-{} m'.format(min_value+tick_size, min_value+2*tick_size): '#00FF00',
'{}-{} m'.format(min_value+2*tick_size, min_value+3*tick_size): '#FFFF00',
'{}-{} m'.format(min_value+3*tick_size, max_value): '#FF0000'},
position='bottomright', draggable=False)
# Create the map for Slope
slope_map = gee_folium.Map(controls={'scale':'bottomleft'})
slope_map.add_tile_layer(
wayback_mapping[latest_date], name=f"Esri Wayback - {latest_date.replace('-', '/')}", attribution="Esri"
)
# Calculate slope from the DEM
slope_layer = (
ee.Terrain.slope(
ee.Image("USGS/SRTMGL1_003").resample("bilinear").reproject(crs="EPSG:4326", scale=target_resolution)
)
.clip(buffer_ee_geometry)
.rename("slope")
)
# Calculate the minimum and maximum values
stats = slope_layer.reduceRegion(reducer=ee.Reducer.minMax(), scale=10, maxPixels=1e13)
max_value = int(stats.get("slope_max").getInfo())
min_value = int(stats.get("slope_min").getInfo())
vis_params = {"min": min_value, "max": max_value, "palette": ["blue", "green", "yellow", "red"]}
slope_map.addLayer(slope_layer, vis_params, "Slope Layer")
# Create a colormap
colormap = cm.LinearColormap(colors=vis_params["palette"], vmin=vis_params["min"], vmax=vis_params["max"])
tick_size=int((max_value-min_value)/4)
slope_map.add_legend(title="Slope (degrees)",
legend_dict={'{}-{} deg'.format(min_value, min_value+tick_size): '#0000FF',
'{}-{} deg'.format(min_value+tick_size, min_value+2*tick_size): '#00FF00',
'{}-{} deg'.format(min_value+2*tick_size, min_value+3*tick_size): '#FFFF00',
'{}-{} deg'.format(min_value+3*tick_size, max_value): '#FF0000'},
position='bottomright', draggable=False)
return dem_map, slope_map
def add_indices(image, nir_band, red_band, blue_band):
# Add negative cloud
neg_cloud = image.select("MSK_CLDPRB").multiply(-1).rename("Neg_MSK_CLDPRB")
nir = image.select(nir_band).divide(10000)
red = image.select(red_band).divide(10000)
blue = image.select(blue_band).divide(10000)
numerator = nir.subtract(red)
ndvi = (numerator).divide(nir.add(red)).rename("NDVI").clamp(-1, 1)
# EVI formula taken from: https://en.wikipedia.org/wiki/Enhanced_vegetation_index
denominator = nir.add(red.multiply(evi_vars["C1"])).subtract(blue.multiply(evi_vars["C2"])).add(evi_vars["L"])
evi = numerator.divide(denominator).multiply(evi_vars["G"]).rename("EVI").clamp(-1, 1)
evi2 = (
numerator.divide(nir.add(evi_vars["L"]).add(red.multiply(evi_vars["C"])))
.multiply(evi_vars["G"])
.rename("EVI2")
.clamp(-1, 1)
)
return image.addBands([neg_cloud, ndvi, evi, evi2])
def get_histogram(image, geometry, bins):
# Get image values as a list
values = image.reduceRegion(
reducer=ee.Reducer.toList(),
geometry=geometry,
scale=10,
maxPixels=1e13
).get('NDVI')
# Convert values to a NumPy array
values_array = np.array(values.getInfo())
# Compute the histogram on bins
hist, bin_edges = np.histogram(values_array, bins=bins)
return hist, bin_edges
def process_date(daterange, satellite, veg_indices):
start_date, end_date = daterange
daterange_str = daterange_dates_to_str(start_date, end_date)
prefix = f"Processing {satellite} - {daterange_str}"
try:
attrs = satellites[satellite]
collection = attrs["collection"]
collection = collection.filterBounds(buffer_ee_geometry)
collection = collection.filterDate(start_date, end_date)
bucket = {}
for veg_index in veg_indices:
mosaic_veg_index = collection.qualityMosaic(veg_index)
fc = geemap.zonal_stats(
mosaic_veg_index, ee_feature_collection, scale=attrs["scale"], return_fc=True
).getInfo()
mean_veg_index = fc["features"][0]["properties"][veg_index]
bucket[veg_index] = mean_veg_index
fc = geemap.zonal_stats(
mosaic_veg_index, buffer_ee_feature_collection, scale=attrs["scale"], return_fc=True
).getInfo()
buffer_mean_veg_index = fc["features"][0]["properties"][veg_index]
bucket[f"{veg_index}_buffer"] = buffer_mean_veg_index
bucket[f"{veg_index}_ratio"] = mean_veg_index / buffer_mean_veg_index
bucket[f"mosaic_{veg_index}"] = mosaic_veg_index
# Get median mosaic
bucket["mosaic_visual_max_ndvi"] = collection.qualityMosaic("NDVI")
bucket["mosaic_visual_median"] = collection.median()
bucket["image_visual_least_cloud"] = collection.sort("CLOUDY_PIXEL_PERCENTAGE").first()
if satellite == "COPERNICUS/S2_SR_HARMONIZED":
cloud_mask_probability = fc["features"][0]["properties"]["MSK_CLDPRB"] / 100
else:
cloud_mask_probability = None
bucket["Cloud (0 to 1)"] = cloud_mask_probability
result_df.loc[daterange_str, list(bucket.keys())] = list(bucket.values())
count = collection.size().getInfo()
suffix = f" - Processed {count} images"
write_info(f"{prefix}{suffix}")
except Exception as e:
print(e)
suffix = f" - Imagery not available"
write_info(f"{prefix}{suffix}")
def write_info(info):
st.write(f"<span style='color:#006400;'>{info}</span>", unsafe_allow_html=True)
############################################
# One time setup
############################################
def one_time_setup():
credentials_path = os.path.expanduser("~/.config/earthengine/credentials")
if os.path.exists(credentials_path):
pass # Earth Engine credentials already exist
elif "EE" in os.environ: # write the credentials to the file
ee_credentials = os.environ.get("EE")
os.makedirs(os.path.dirname(credentials_path), exist_ok=True)
with open(credentials_path, "w") as f:
f.write(ee_credentials)
else:
raise ValueError(
f"Earth Engine credentials not found at {credentials_path} or in the environment variable 'EE'"
)
ee.Initialize()
satellites = {
"COPERNICUS/S2_SR_HARMONIZED": {
"scale": 10,
"collection": ee.ImageCollection("COPERNICUS/S2_SR_HARMONIZED")
.select(
["B2", "B4", "B8", "MSK_CLDPRB", "TCI_R", "TCI_G", "TCI_B"],
["Blue", "Red", "NIR", "MSK_CLDPRB", "R", "G", "B"],
)
.map(lambda image: add_indices(image, nir_band="NIR", red_band="Red", blue_band="Blue")),
},
}
st.session_state.satellites = satellites
with open("wayback_imagery.json") as f:
st.session_state.wayback_mapping = json.load(f)
if "one_time_setup_done" not in st.session_state:
one_time_setup()
st.session_state.one_time_setup_done = True
satellites = st.session_state.satellites
wayback_mapping = st.session_state.wayback_mapping
############################################
# App
############################################
# Input: Satellite Sources
st.markdown(f"Satellite source: `{list(satellites.keys())[0]}`")
satellite_selected = {}
for satellite in satellites:
satellite_selected[satellite] = satellite
# Date range input
max_year = datetime.now().year
jan_1 = pd.to_datetime(f"{max_year}/01/01", format="%Y/%m/%d")
dec_31 = pd.to_datetime(f"{max_year}/12/31", format="%Y/%m/%d")
nov_15 = pd.to_datetime(f"{max_year}/11/15", format="%Y/%m/%d")
dec_15 = pd.to_datetime(f"{max_year}/12/15", format="%Y/%m/%d")
input_daterange = st.date_input(
"Date Range (Ignore year. App will compute indices for this date range in each year starting from \"Minimum Year\" to \"Maximum Year\")", (nov_15, dec_15), jan_1, dec_31
)
cols = st.columns(2)
with cols[0]:
min_year = int(st.number_input("Minimum Year", value=2019, min_value=2015, step=1))
with cols[1]:
max_year = int(st.number_input("Maximum Year", value=max_year, min_value=2015, step=1))
buffer = st.number_input("Buffer (m)", value=50, min_value=0, step=1)
input_gdf = get_gdf_from_file_url(file_url)
input_gdf = preprocess_gdf(input_gdf)
if len(input_gdf) > 1:
st.warning(f"Only the first polygon in the KML will be processed; all other geometries will be ignored.")
# input_geometry_idx = st.selectbox("Select the geometry", input_gdf.index, format_func=format_fn)
for i in range(len(input_gdf)):
geometry_gdf = input_gdf[input_gdf.index == i]
if is_valid_polygon(geometry_gdf):
break
else:
st.error(f"No polygon found inside KML. Please check the KML file.")
st.stop()
geometry_gdf = to_best_crs(geometry_gdf)
outer_geometry_gdf = geometry_gdf.copy()
outer_geometry_gdf["geometry"] = outer_geometry_gdf["geometry"].buffer(buffer)
buffer_geometry_gdf = (
outer_geometry_gdf.difference(geometry_gdf).reset_index().drop(columns="index")
) # reset index forces GeoSeries to GeoDataFrame
buffer_geometry_gdf["Name"] = "Buffer"
# Derived Inputs
ee_geometry = ee.Geometry(geometry_gdf.to_crs(4326).geometry.item().__geo_interface__)
ee_feature_collection = ee.FeatureCollection(ee_geometry)
buffer_ee_geometry = ee.Geometry(buffer_geometry_gdf.to_crs(4326).geometry.item().__geo_interface__)
buffer_ee_feature_collection = ee.FeatureCollection(buffer_ee_geometry)
outer_ee_geometry = ee.Geometry(outer_geometry_gdf.to_crs(4326).geometry.item().__geo_interface__)
outer_ee_feature_collection = ee.FeatureCollection(outer_ee_geometry)
# visualize the geometry
m = leaf_folium.Map()
keys = list(wayback_mapping.keys())
latest_date = sorted(keys, key=lambda x: pd.to_datetime(x))[-1]
m.add_tile_layer(
wayback_mapping[latest_date], name=f"Esri Wayback - {latest_date.replace('-', '/')}", attribution="Esri"
)
# m.add_layer(buffer_ee_feature_collection)
add_geometry_to_maps([m], opacity=0.3)
write_info(
f"""
<div style="text-align: center;">
Latest Esri Imagery - {latest_date.replace('-', '/')}
</div>
"""
)
m.to_streamlit()
# Generate stats
stats_df = pd.DataFrame(
{
"Area (m^2)": geometry_gdf.area.item(),
"Perimeter (m)": geometry_gdf.length.item(),
"Points": json.loads(geometry_gdf.to_crs(4326).to_json())["features"][0]["geometry"]["coordinates"],
}
)
st.write("<h3><div style='text-align: center;'>Geometry Metrics</div></h3>", unsafe_allow_html=True)
# st.markdown(
# f"""| Metric | Value |
# | --- | --- |
# | Area (m^2) | {stats_df['Area (m^2)'].item():.2f} m^2 = {stats_df['Area (m^2)'].item()/10000:.2f} ha |
# | Perimeter (m) | {stats_df['Perimeter (m)'].item():.2f} m |
# """
# )
st.markdown(
f"""
<div style="display: flex; justify-content: center;">
<table style="border-collapse: collapse; width: 75%; text-align: center;">
<tr>
<th style="border: 1px solid black; padding: 8px;">Metric</th>
<th style="border: 1px solid black; padding: 8px;">Value</th>
</tr>
<tr>
<td style="border: 1px solid black; padding: 8px;">Area</td>
<td style="border: 1px solid black; padding: 8px;">{stats_df['Area (m^2)'].item()/10000:.2f} ha</td>
</tr>
<tr>
<td style="border: 1px solid black; padding: 8px;">Perimeter</td>
<td style="border: 1px solid black; padding: 8px;">{stats_df['Perimeter (m)'].item():.2f} m</td>
</tr>
</table>
</div>
""",
unsafe_allow_html=True
)
stats_csv = stats_df.to_csv(index=False)
st.download_button("Download Geometry Metrics", stats_csv, "geometry_metrics.csv", "text/csv", use_container_width=True)
# Submit
submit = st.button("Calculate Vegetation Indices", use_container_width=True)
if submit:
st.write("<h2><div style='text-align: center;'>Results</div></h2>", unsafe_allow_html=True)
if not any(satellite_selected.values()):
st.error("Please select at least one satellite source")
st.stop()
# Create range
start_day = input_daterange[0].day
start_month = input_daterange[0].month
end_day = input_daterange[1].day
end_month = input_daterange[1].month
dates = []
for year in range(min_year, max_year + 1):
start_date = pd.to_datetime(f"{year}-{start_month:02d}-{start_day:02d}")
end_date = pd.to_datetime(f"{year}-{end_month:02d}-{end_day:02d}")
dates.append((start_date, end_date))
result_df = pd.DataFrame()
for satellite, attrs in satellites.items():
if not satellite_selected[satellite]:
continue
with st.spinner(f"Processing {satellite} ..."):
progress_bar = st.progress(0)
for i, daterange in enumerate(dates):
process_date(daterange, satellite, veg_indices)
progress_bar.progress((i + 1) / len(dates))
st.session_state.result = result_df
print("Printing result...")
if "result" in st.session_state:
result_df = st.session_state.result
print(result_df.columns)
# drop rows with all NaN values
result_df = result_df.dropna(how="all")
# drop columns with all NaN values
result_df = result_df.dropna(axis=1, how="all")
print(result_df.columns)
print(result_df.head(2))
# df.reset_index(inplace=True)
# df.index = pd.to_datetime(df["index"], format="%Y-%m")
for column in result_df.columns:
result_df[column] = pd.to_numeric(result_df[column], errors="ignore")
df_numeric = result_df.select_dtypes(include=["float64"])
st.write(df_numeric)
df_numeric_csv = df_numeric.to_csv(index=True)
st.download_button(
"Download Time Series Data", df_numeric_csv, "vegetation_indices.csv", "text/csv", use_container_width=True
)
df_numeric.index = [daterange_str_to_year(daterange) for daterange in df_numeric.index]
for veg_index in veg_indices:
fig = px.line(df_numeric, y=[veg_index, f"{veg_index}_buffer", f"{veg_index}_ratio"], markers=True)
fig.update_layout(xaxis=dict(tickvals=df_numeric.index, ticktext=df_numeric.index))
st.plotly_chart(fig)
st.write(
"<h3><div style='text-align: center;'>DEM and Slope from SRTM at 30m resolution</div></h3>",
unsafe_allow_html=True,
)
cols = st.columns(2)
dem_map, slope_map = get_dem_slope_maps(ee.Geometry(geometry_gdf.to_crs(4326).geometry.item().__geo_interface__))
for col, param_map, title in zip(cols, [dem_map, slope_map], ["DEM Map", "Slope Map"]):
with col:
param_map.add_gdf(
geometry_gdf,
layer_name="Geometry",
style_function=lambda x: {"color": "blue", "fillOpacity": 0.0, "fillColor": "blue"},
)
write_info(f"""<div style="text-align: center;">{title}</div>""")
param_map.addLayerControl()
param_map.to_streamlit()
st.write(
"<h3><div style='text-align: center;'>Visual Comparison between Two Years</div></h3>", unsafe_allow_html=True
)
cols = st.columns(2)
with cols[0]:
year_1 = st.selectbox("Year 1", result_df.index, index=0, format_func=lambda x: daterange_str_to_year(x))
with cols[1]:
year_2 = st.selectbox(
"Year 2", result_df.index, index=len(result_df.index) - 1, format_func=lambda x: daterange_str_to_year(x)
)
vis_params = {"min": 0, "max": 1, "palette": ["white", "green"]} # Example visualisation for Sentinel-2
# Create a colormap and name it as NDVI
colormap = cm.LinearColormap(colors=vis_params["palette"], vmin=vis_params["min"], vmax=vis_params["max"])
for veg_index in veg_indices:
st.write(f"<h3><div style='text-align: center;'>{veg_index}</div></h3>", unsafe_allow_html=True)
cols = st.columns(2)
for col, daterange_str in zip(cols, [year_1, year_2]):
mosaic = result_df.loc[daterange_str, f"mosaic_{veg_index}"]
with col:
m = gee_folium.Map()
m.add_tile_layer(wayback_mapping[latest_date], name=f"Esri Wayback - {latest_date.replace('-', '/')}", attribution="Esri")
veg_index_layer = gee_folium.ee_tile_layer(mosaic, {"bands": [veg_index], "min": 0, "max": 1})
if satellite == "COPERNICUS/S2_SR_HARMONIZED":
min_all = 0
max_all = 255
else:
raise ValueError(f"Unknown satellite: {satellite}")
if veg_index=='NDVI':
bins=[-1, 0, 0.1, 0.2, 0.3, 0.4, 0.5, 1]
histogram, bin_edges = get_histogram(mosaic.select(veg_index), ee_geometry, bins)
total_pix = np.sum(histogram)
formatted_histogram = [f"{h*100/total_pix:.2f}" for h in histogram]
print(histogram, bin_edges, bins, formatted_histogram)
m.add_legend(title="NDVI Class/Value",
legend_dict={'<0:Waterbody ({}%)'.format(formatted_histogram[0]): '#0000FF',
'0-0.1: Open ({}%)'.format(formatted_histogram[1]): '#FF0000',
'0.1-0.2: Highly Degraded ({}%)'.format(formatted_histogram[2]):'#FFFF00',
'0.2-0.3: Degraded ({}%)'.format(formatted_histogram[3]): '#FFA500',
'0.3-0.4: Moderately Degraded ({}%)'.format(formatted_histogram[4]): '#00FE00',
'0.4-0.5: Dense ({}%)'.format(formatted_histogram[5]): '#00A400',
'>0.5: Very Dense ({}%)'.format(formatted_histogram[6]): '#006D00',
},
position='bottomright', draggable=False)
ndvi_vis_params = {'min': -0.1,
'max': 0.6,
'palette': ['#0000FF', '#FF0000', '#FFFF00', '#FFA500', '#00FE00', '#00A400', '#006D00']}
m.add_layer(mosaic.select(veg_index).clip(outer_ee_geometry), ndvi_vis_params)
# add colorbar
# m.add_colorbar(colors=["#000000", "#00FF00"], vmin=0.0, vmax=1.0)
if veg_index!='NDVI':
m.add_layer(mosaic.select(veg_index).clip(outer_ee_geometry), vis_params)
m.add_child(colormap)
add_geometry_to_maps([m])
m.to_streamlit()
for name, key in zip(
["RGB (Least Cloud Tile Crop)", "RGB (Max NDVI Mosaic)"],
["image_visual_least_cloud", "mosaic_visual_max_ndvi"],
):
st.write(f"<h3><div style='text-align: center;'>{name}</div></h3>", unsafe_allow_html=True)
cols = st.columns(2)
for col, daterange_str in zip(cols, [year_1, year_2]):
start_date, end_date = daterange_str_to_dates(daterange_str)
mid_date = start_date + (end_date - start_date) / 2
esri_date = min(wayback_mapping.keys(), key=lambda x: abs(pd.to_datetime(x) - mid_date))
with col:
m = gee_folium.Map()
visual_mosaic = result_df.loc[daterange_str, key]
# visual_layer = gee_folium.ee_tile_layer(mosaic, {"bands": ["R", "G", "B"], "min": min_all, "max": max_all})
m.add_layer(visual_mosaic.select(["R", "G", "B"]))
add_geometry_to_maps([m])
m.to_streamlit()
st.write("<h3><div style='text-align: center;'>Esri RGB Imagery</div></h3>", unsafe_allow_html=True)
cols = st.columns(2)
for col, daterange_str in zip(cols, [year_1, year_2]):
start_date, end_date = daterange_str_to_dates(daterange_str)
mid_date = start_date + (end_date - start_date) / 2
esri_date = min(wayback_mapping.keys(), key=lambda x: abs(pd.to_datetime(x) - mid_date))
with col:
m = leaf_folium.Map()
m.add_tile_layer(wayback_mapping[esri_date], name=f"Esri Wayback Imagery - {esri_date}", attribution="Esri")
add_geometry_to_maps([m])
write_info(
f"""
<div style="text-align: center;">
Esri Imagery - {esri_date.replace('-', '/')}
</div>
"""
)
m.to_streamlit()
show_credits()