import os from datetime import datetime import ee import json import geemap import numpy as np import geemap.foliumap as gee_folium import leafmap.foliumap as leaf_folium import streamlit as st import pandas as pd import geopandas as gpd from shapely.ops import transform from functools import reduce import plotly.express as px import branca.colormap as cm import folium import pyproj from io import StringIO, BytesIO import requests import kml2geojson print(geemap.__version__) print(folium.__version__) print(geemap.Report()) st.set_page_config(layout="wide") m = st.markdown( """ """, unsafe_allow_html=True, ) # Logo st.write( f"""
""", unsafe_allow_html=True, ) # Title # make title in center st.markdown( f"""

Vrinda (वृन्दा): Interactive Vegetation Index Analyzer

""", unsafe_allow_html=True, ) ############################################ # Hyperparameters ############################################ st.write("

User Inputs

", unsafe_allow_html=True) # Input: GeoJSON/KML file file_url = st.query_params.get("file_url", None) if file_url is None: file_url = st.file_uploader("Upload KML/GeoJSON file", type=["geojson", "kml", "shp"]) def show_credits(): # Add credits st.write( """

This tool is developed by Sustainability Lab, IIT Gandhinagar and supported by Gujarat Forest Department

""", unsafe_allow_html=True, ) if file_url is None: st.warning( "Please provide a KML or GeoJSON URL as a query parameter, e.g., `https://sustainabilitylabiitgn-ndvi-perg.hf.space?file_url=` or upload a file." ) show_credits() st.stop() with st.expander("Advanced Settings"): st.write("Select the vegetation indices to calculate:") all_veg_indices = ["NDVI", "EVI", "EVI2"] formulas = { "NDVI": r"$\frac{NIR - Red}{NIR + Red}$", "EVI": r"$G \times \frac{NIR - Red}{NIR + C1 \times Red - C2 \times Blue + L}$", "EVI2": r"$G \times \frac{NIR - Red}{NIR + L + C \times Red}$", } defaults = [True, False, False] veg_indices = [] for veg_index, default in zip(all_veg_indices, defaults): if st.checkbox(f"{veg_index} = {formulas[veg_index]}", value=default): veg_indices.append(veg_index) st.write("Select the parameters for the EVI/EVI2 calculation (default is as per EVI's Wikipedia page)") cols = st.columns(5) evi_vars = {} for col, name, default in zip(cols, ["G", "C1", "C2", "L", "C"], [2.5, 6, 7.5, 1, 2.4]): value = col.number_input(f"{name}", value=default) evi_vars[name] = value ############################################ # Functions ############################################ # Function of find best suited statewise EPSG code def get_gdf_from_file_url(file_url): if isinstance(file_url, str): if file_url.startswith("https://drive.google.com/file/d/"): ID = file_url.replace("https://drive.google.com/file/d/", "").split("/")[0] file_url = f"https://drive.google.com/uc?id={ID}" elif file_url.startswith("https://drive.google.com/open?id="): ID = file_url.replace("https://drive.google.com/open?id=", "") file_url = f"https://drive.google.com/uc?id={ID}" response = requests.get(file_url) bytes_data = BytesIO(response.content) string_data = response.text else: bytes_data = BytesIO(file_url.getvalue()) string_data = file_url.getvalue().decode("utf-8") if string_data.startswith("{info}", unsafe_allow_html=True) ############################################ # One time setup ############################################ def one_time_setup(): credentials_path = os.path.expanduser("~/.config/earthengine/credentials") if os.path.exists(credentials_path): pass # Earth Engine credentials already exist elif "EE" in os.environ: # write the credentials to the file ee_credentials = os.environ.get("EE") os.makedirs(os.path.dirname(credentials_path), exist_ok=True) with open(credentials_path, "w") as f: f.write(ee_credentials) else: raise ValueError( f"Earth Engine credentials not found at {credentials_path} or in the environment variable 'EE'" ) ee.Initialize() satellites = { "COPERNICUS/S2_SR_HARMONIZED": { "scale": 10, "collection": ee.ImageCollection("COPERNICUS/S2_SR_HARMONIZED") .select( ["B2", "B4", "B8", "MSK_CLDPRB", "TCI_R", "TCI_G", "TCI_B"], ["Blue", "Red", "NIR", "MSK_CLDPRB", "R", "G", "B"], ) .map(lambda image: add_indices(image, nir_band="NIR", red_band="Red", blue_band="Blue")), }, } st.session_state.satellites = satellites with open("wayback_imagery.json") as f: st.session_state.wayback_mapping = json.load(f) if "one_time_setup_done" not in st.session_state: one_time_setup() st.session_state.one_time_setup_done = True satellites = st.session_state.satellites wayback_mapping = st.session_state.wayback_mapping ############################################ # App ############################################ # Input: Satellite Sources st.markdown(f"Satellite source: `{list(satellites.keys())[0]}`") satellite_selected = {} for satellite in satellites: satellite_selected[satellite] = satellite # Date range input max_year = datetime.now().year jan_1 = pd.to_datetime(f"{max_year}/01/01", format="%Y/%m/%d") dec_31 = pd.to_datetime(f"{max_year}/12/31", format="%Y/%m/%d") nov_15 = pd.to_datetime(f"{max_year}/11/15", format="%Y/%m/%d") dec_15 = pd.to_datetime(f"{max_year}/12/15", format="%Y/%m/%d") input_daterange = st.date_input( "Date Range (Ignore year. App will compute indices for this date range in each year starting from \"Minimum Year\" to \"Maximum Year\")", (nov_15, dec_15), jan_1, dec_31 ) cols = st.columns(2) with cols[0]: min_year = int(st.number_input("Minimum Year", value=2019, min_value=2015, step=1)) with cols[1]: max_year = int(st.number_input("Maximum Year", value=max_year, min_value=2015, step=1)) buffer = st.number_input("Buffer (m)", value=50, min_value=0, step=1) input_gdf = get_gdf_from_file_url(file_url) input_gdf = preprocess_gdf(input_gdf) if len(input_gdf) > 1: st.warning(f"Only the first polygon in the KML will be processed; all other geometries will be ignored.") # input_geometry_idx = st.selectbox("Select the geometry", input_gdf.index, format_func=format_fn) for i in range(len(input_gdf)): geometry_gdf = input_gdf[input_gdf.index == i] if is_valid_polygon(geometry_gdf): break else: st.error(f"No polygon found inside KML. Please check the KML file.") st.stop() geometry_gdf = to_best_crs(geometry_gdf) outer_geometry_gdf = geometry_gdf.copy() outer_geometry_gdf["geometry"] = outer_geometry_gdf["geometry"].buffer(buffer) buffer_geometry_gdf = ( outer_geometry_gdf.difference(geometry_gdf).reset_index().drop(columns="index") ) # reset index forces GeoSeries to GeoDataFrame buffer_geometry_gdf["Name"] = "Buffer" # Derived Inputs ee_geometry = ee.Geometry(geometry_gdf.to_crs(4326).geometry.item().__geo_interface__) ee_feature_collection = ee.FeatureCollection(ee_geometry) buffer_ee_geometry = ee.Geometry(buffer_geometry_gdf.to_crs(4326).geometry.item().__geo_interface__) buffer_ee_feature_collection = ee.FeatureCollection(buffer_ee_geometry) outer_ee_geometry = ee.Geometry(outer_geometry_gdf.to_crs(4326).geometry.item().__geo_interface__) outer_ee_feature_collection = ee.FeatureCollection(outer_ee_geometry) # visualize the geometry m = leaf_folium.Map() keys = list(wayback_mapping.keys()) latest_date = sorted(keys, key=lambda x: pd.to_datetime(x))[-1] m.add_tile_layer( wayback_mapping[latest_date], name=f"Esri Wayback - {latest_date.replace('-', '/')}", attribution="Esri" ) # m.add_layer(buffer_ee_feature_collection) add_geometry_to_maps([m], opacity=0.3) write_info( f"""
Latest Esri Imagery - {latest_date.replace('-', '/')}
""" ) m.to_streamlit() # Generate stats stats_df = pd.DataFrame( { "Area (m^2)": geometry_gdf.area.item(), "Perimeter (m)": geometry_gdf.length.item(), "Points": json.loads(geometry_gdf.to_crs(4326).to_json())["features"][0]["geometry"]["coordinates"], } ) st.write("

Geometry Metrics

", unsafe_allow_html=True) # st.markdown( # f"""| Metric | Value | # | --- | --- | # | Area (m^2) | {stats_df['Area (m^2)'].item():.2f} m^2 = {stats_df['Area (m^2)'].item()/10000:.2f} ha | # | Perimeter (m) | {stats_df['Perimeter (m)'].item():.2f} m | # """ # ) st.markdown( f"""
Metric Value
Area {stats_df['Area (m^2)'].item()/10000:.2f} ha
Perimeter {stats_df['Perimeter (m)'].item():.2f} m
""", unsafe_allow_html=True ) stats_csv = stats_df.to_csv(index=False) st.download_button("Download Geometry Metrics", stats_csv, "geometry_metrics.csv", "text/csv", use_container_width=True) # Submit submit = st.button("Calculate Vegetation Indices", use_container_width=True) if submit: st.write("

Results

", unsafe_allow_html=True) if not any(satellite_selected.values()): st.error("Please select at least one satellite source") st.stop() # Create range start_day = input_daterange[0].day start_month = input_daterange[0].month end_day = input_daterange[1].day end_month = input_daterange[1].month dates = [] for year in range(min_year, max_year + 1): start_date = pd.to_datetime(f"{year}-{start_month:02d}-{start_day:02d}") end_date = pd.to_datetime(f"{year}-{end_month:02d}-{end_day:02d}") dates.append((start_date, end_date)) result_df = pd.DataFrame() for satellite, attrs in satellites.items(): if not satellite_selected[satellite]: continue with st.spinner(f"Processing {satellite} ..."): progress_bar = st.progress(0) for i, daterange in enumerate(dates): process_date(daterange, satellite, veg_indices) progress_bar.progress((i + 1) / len(dates)) st.session_state.result = result_df print("Printing result...") if "result" in st.session_state: result_df = st.session_state.result print(result_df.columns) # drop rows with all NaN values result_df = result_df.dropna(how="all") # drop columns with all NaN values result_df = result_df.dropna(axis=1, how="all") print(result_df.columns) print(result_df.head(2)) # df.reset_index(inplace=True) # df.index = pd.to_datetime(df["index"], format="%Y-%m") for column in result_df.columns: result_df[column] = pd.to_numeric(result_df[column], errors="ignore") df_numeric = result_df.select_dtypes(include=["float64"]) st.write(df_numeric) df_numeric_csv = df_numeric.to_csv(index=True) st.download_button( "Download Time Series Data", df_numeric_csv, "vegetation_indices.csv", "text/csv", use_container_width=True ) df_numeric.index = [daterange_str_to_year(daterange) for daterange in df_numeric.index] for veg_index in veg_indices: fig = px.line(df_numeric, y=[veg_index, f"{veg_index}_buffer", f"{veg_index}_ratio"], markers=True) fig.update_layout(xaxis=dict(tickvals=df_numeric.index, ticktext=df_numeric.index)) st.plotly_chart(fig) st.write( "

DEM and Slope from SRTM at 30m resolution

", unsafe_allow_html=True, ) cols = st.columns(2) dem_map, slope_map = get_dem_slope_maps(ee.Geometry(geometry_gdf.to_crs(4326).geometry.item().__geo_interface__)) for col, param_map, title in zip(cols, [dem_map, slope_map], ["DEM Map", "Slope Map"]): with col: param_map.add_gdf( geometry_gdf, layer_name="Geometry", style_function=lambda x: {"color": "blue", "fillOpacity": 0.0, "fillColor": "blue"}, ) write_info(f"""
{title}
""") param_map.addLayerControl() param_map.to_streamlit() st.write( "

Visual Comparison between Two Years

", unsafe_allow_html=True ) cols = st.columns(2) with cols[0]: year_1 = st.selectbox("Year 1", result_df.index, index=0, format_func=lambda x: daterange_str_to_year(x)) with cols[1]: year_2 = st.selectbox( "Year 2", result_df.index, index=len(result_df.index) - 1, format_func=lambda x: daterange_str_to_year(x) ) vis_params = {"min": 0, "max": 1, "palette": ["white", "green"]} # Example visualisation for Sentinel-2 # Create a colormap and name it as NDVI colormap = cm.LinearColormap(colors=vis_params["palette"], vmin=vis_params["min"], vmax=vis_params["max"]) for veg_index in veg_indices: st.write(f"

{veg_index}

", unsafe_allow_html=True) cols = st.columns(2) for col, daterange_str in zip(cols, [year_1, year_2]): mosaic = result_df.loc[daterange_str, f"mosaic_{veg_index}"] with col: m = gee_folium.Map() m.add_tile_layer(wayback_mapping[latest_date], name=f"Esri Wayback - {latest_date.replace('-', '/')}", attribution="Esri") veg_index_layer = gee_folium.ee_tile_layer(mosaic, {"bands": [veg_index], "min": 0, "max": 1}) if satellite == "COPERNICUS/S2_SR_HARMONIZED": min_all = 0 max_all = 255 else: raise ValueError(f"Unknown satellite: {satellite}") if veg_index=='NDVI': bins=[-1, 0, 0.1, 0.2, 0.3, 0.4, 0.5, 1] histogram, bin_edges = get_histogram(mosaic.select(veg_index), ee_geometry, bins) total_pix = np.sum(histogram) formatted_histogram = [f"{h*100/total_pix:.2f}" for h in histogram] print(histogram, bin_edges, bins, formatted_histogram) m.add_legend(title="NDVI Class/Value", legend_dict={'<0:Waterbody ({}%)'.format(formatted_histogram[0]): '#0000FF', '0-0.1: Open ({}%)'.format(formatted_histogram[1]): '#FF0000', '0.1-0.2: Highly Degraded ({}%)'.format(formatted_histogram[2]):'#FFFF00', '0.2-0.3: Degraded ({}%)'.format(formatted_histogram[3]): '#FFA500', '0.3-0.4: Moderately Degraded ({}%)'.format(formatted_histogram[4]): '#00FE00', '0.4-0.5: Dense ({}%)'.format(formatted_histogram[5]): '#00A400', '>0.5: Very Dense ({}%)'.format(formatted_histogram[6]): '#006D00', }, position='bottomright', draggable=False) ndvi_vis_params = {'min': -0.1, 'max': 0.6, 'palette': ['#0000FF', '#FF0000', '#FFFF00', '#FFA500', '#00FE00', '#00A400', '#006D00']} m.add_layer(mosaic.select(veg_index).clip(outer_ee_geometry), ndvi_vis_params) # add colorbar # m.add_colorbar(colors=["#000000", "#00FF00"], vmin=0.0, vmax=1.0) if veg_index!='NDVI': m.add_layer(mosaic.select(veg_index).clip(outer_ee_geometry), vis_params) m.add_child(colormap) add_geometry_to_maps([m]) m.to_streamlit() for name, key in zip( ["RGB (Least Cloud Tile Crop)", "RGB (Max NDVI Mosaic)"], ["image_visual_least_cloud", "mosaic_visual_max_ndvi"], ): st.write(f"

{name}

", unsafe_allow_html=True) cols = st.columns(2) for col, daterange_str in zip(cols, [year_1, year_2]): start_date, end_date = daterange_str_to_dates(daterange_str) mid_date = start_date + (end_date - start_date) / 2 esri_date = min(wayback_mapping.keys(), key=lambda x: abs(pd.to_datetime(x) - mid_date)) with col: m = gee_folium.Map() visual_mosaic = result_df.loc[daterange_str, key] # visual_layer = gee_folium.ee_tile_layer(mosaic, {"bands": ["R", "G", "B"], "min": min_all, "max": max_all}) m.add_layer(visual_mosaic.select(["R", "G", "B"])) add_geometry_to_maps([m]) m.to_streamlit() st.write("

Esri RGB Imagery

", unsafe_allow_html=True) cols = st.columns(2) for col, daterange_str in zip(cols, [year_1, year_2]): start_date, end_date = daterange_str_to_dates(daterange_str) mid_date = start_date + (end_date - start_date) / 2 esri_date = min(wayback_mapping.keys(), key=lambda x: abs(pd.to_datetime(x) - mid_date)) with col: m = leaf_folium.Map() m.add_tile_layer(wayback_mapping[esri_date], name=f"Esri Wayback Imagery - {esri_date}", attribution="Esri") add_geometry_to_maps([m]) write_info( f"""
Esri Imagery - {esri_date.replace('-', '/')}
""" ) m.to_streamlit() show_credits()