push latest changes
Browse files- .streamlit/config.toml +2 -0
- Final_IITGN-Logo-symmetric-Color.png +0 -0
- IFS.jpg +0 -0
- IITGN_GFD.png +0 -0
- app.py +271 -223
- gzn.html +0 -0
- sandbox.ipynb +0 -0
.streamlit/config.toml
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
[theme]
|
2 |
+
backgroundColor="#FFFFFF"
|
Final_IITGN-Logo-symmetric-Color.png
ADDED
IFS.jpg
ADDED
IITGN_GFD.png
ADDED
app.py
CHANGED
@@ -1,8 +1,7 @@
|
|
1 |
import os
|
2 |
-
import
|
3 |
import ee
|
4 |
import json
|
5 |
-
import geojson
|
6 |
import geemap
|
7 |
import numpy as np
|
8 |
import geemap.foliumap as gee_folium
|
@@ -13,13 +12,70 @@ import geopandas as gpd
|
|
13 |
from shapely.ops import transform
|
14 |
from functools import reduce
|
15 |
import plotly.express as px
|
|
|
16 |
|
17 |
st.set_page_config(layout="wide")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
19 |
|
20 |
############################################
|
21 |
# Functions
|
22 |
############################################
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
def shape_3d_to_2d(shape):
|
24 |
if shape.has_z:
|
25 |
return transform(lambda x, y, z: (x, y), shape)
|
@@ -27,68 +83,78 @@ def shape_3d_to_2d(shape):
|
|
27 |
return shape
|
28 |
|
29 |
def preprocess_gdf(gdf):
|
30 |
-
gdf = gdf.to_crs(epsg=
|
31 |
-
gdf = gdf[["Name", "geometry"]]
|
32 |
gdf["geometry"] = gdf["geometry"].apply(shape_3d_to_2d)
|
33 |
return gdf
|
34 |
|
35 |
-
def
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
40 |
|
41 |
-
def process_date(
|
|
|
42 |
try:
|
43 |
attrs = satellites[satellite]
|
44 |
collection = attrs["collection"]
|
45 |
-
collection = collection.filterBounds(
|
46 |
-
|
47 |
-
|
48 |
-
end_date = start_date + pd.DateOffset(months=1)
|
49 |
-
write_info(f"Processing {satellite} - {start_date} to {end_date}")
|
50 |
collection = collection.filterDate(start_date, end_date)
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
56 |
if satellite == "COPERNICUS/S2_SR_HARMONIZED":
|
57 |
cloud_mask_probability = fc["features"][0]["properties"]["MSK_CLDPRB"] / 100
|
58 |
else:
|
59 |
cloud_mask_probability = None
|
|
|
|
|
60 |
except Exception as e:
|
61 |
print(e)
|
62 |
-
mosaic = None
|
63 |
-
mean_ndvi = None
|
64 |
-
cloud_mask_probability = None
|
65 |
-
return mosaic, mean_ndvi, cloud_mask_probability
|
66 |
-
|
67 |
-
|
68 |
-
def postprocess_df(df, name):
|
69 |
-
df = df.T
|
70 |
-
df = df.reset_index()
|
71 |
-
ndvi_df = df[df["index"].str.contains("NDVI")]
|
72 |
-
ndvi_df["index"] = pd.to_datetime(ndvi_df["index"], format="%Y-%m_NDVI")
|
73 |
-
ndvi_df = ndvi_df.rename(columns={"index": "Date", 0: name})
|
74 |
-
|
75 |
-
cloud_mask_probability = df[df["index"].str.contains("MSK_CLDPRB")]
|
76 |
-
cloud_mask_probability["index"] = pd.to_datetime(
|
77 |
-
cloud_mask_probability["index"], format="%Y-%m_MSK_CLDPRB"
|
78 |
-
)
|
79 |
-
cloud_mask_probability = cloud_mask_probability.rename(
|
80 |
-
columns={"index": "Date", 0: f"{name}_cloud_proba"}
|
81 |
-
)
|
82 |
-
# normalize
|
83 |
-
cloud_mask_probability[f"{name}_cloud_proba"] = (
|
84 |
-
cloud_mask_probability[f"{name}_cloud_proba"] / 100
|
85 |
-
)
|
86 |
-
df = pd.merge(ndvi_df, cloud_mask_probability, on="Date", how="outer")
|
87 |
-
return df
|
88 |
-
|
89 |
|
90 |
def write_info(info):
|
91 |
-
st.write(f"<span style='color:#
|
92 |
|
93 |
|
94 |
############################################
|
@@ -113,255 +179,237 @@ def one_time_setup():
|
|
113 |
ee.Initialize()
|
114 |
|
115 |
satellites = {
|
116 |
-
# "LANDSAT/LC08/C02/T1_TOA": {
|
117 |
-
# "scale": 30,
|
118 |
-
# "collection": ee.ImageCollection("LANDSAT/LC08/C02/T1_TOA")
|
119 |
-
# .select(["B2", "B3", "B4", "B5"], ["B", "G", "R", "NIR"])
|
120 |
-
# .map(lambda image: calculate_ndvi(image, nir_band="NIR", red_band="R")),
|
121 |
-
# },
|
122 |
"COPERNICUS/S2_SR_HARMONIZED": {
|
123 |
"scale": 10,
|
124 |
"collection": ee.ImageCollection("COPERNICUS/S2_SR_HARMONIZED")
|
125 |
.select(
|
126 |
-
["B4", "B8", "MSK_CLDPRB", "TCI_R", "TCI_G", "TCI_B"],
|
127 |
-
["Red", "NIR", "MSK_CLDPRB", "R", "G", "B"],
|
128 |
)
|
129 |
-
.map(lambda image:
|
130 |
},
|
131 |
-
# "LANDSAT/LC09/C02/T1_L2": {
|
132 |
-
# "scale": 30,
|
133 |
-
# "collection": ee.ImageCollection("LANDSAT/LC09/C02/T1_L2")
|
134 |
-
# .select(["SR_B2", "SR_B3", "SR_B4", "SR_B5"], ["B", "G", "R", "NIR"])
|
135 |
-
# .map(lambda image: calculate_ndvi(image, nir_band="NIR", red_band="R")),
|
136 |
-
# },
|
137 |
-
# "LANDSAT/LC08/C02/T1_L2": {
|
138 |
-
# "scale": 30,
|
139 |
-
# "collection": ee.ImageCollection("LANDSAT/LC08/C02/T1_L2")
|
140 |
-
# .select(["SR_B2", "SR_B3", "SR_B4", "SR_B5"], ["B", "G", "R", "NIR"])
|
141 |
-
# .map(lambda image: calculate_ndvi(image, nir_band="NIR", red_band="R")),
|
142 |
-
# },
|
143 |
-
# "LANDSAT/LE07/C02/T1_L2": {
|
144 |
-
# "scale": 30,
|
145 |
-
# "collection": ee.ImageCollection("LANDSAT/LE07/C02/T1_L2")
|
146 |
-
# .select(["SR_B2", "SR_B3", "SR_B4", "SR_B5"], ["B", "G", "R", "NIR"])
|
147 |
-
# .map(lambda image: calculate_ndvi(image, nir_band="NIR", red_band="R")),
|
148 |
-
# },
|
149 |
}
|
150 |
st.session_state.satellites = satellites
|
151 |
with open("wayback_imagery.json") as f:
|
152 |
st.session_state.wayback_mapping = json.load(f)
|
153 |
|
154 |
-
|
155 |
if "one_time_setup_done" not in st.session_state:
|
156 |
one_time_setup()
|
157 |
st.session_state.one_time_setup_done = True
|
158 |
-
else:
|
159 |
-
satellites = st.session_state.satellites
|
160 |
-
wayback_mapping = st.session_state.wayback_mapping
|
161 |
|
|
|
|
|
162 |
|
163 |
############################################
|
164 |
# App
|
165 |
############################################
|
166 |
|
167 |
-
#
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
""",
|
173 |
-
unsafe_allow_html=True,
|
174 |
-
)
|
175 |
|
176 |
-
#
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
|
|
183 |
|
184 |
# Input: GeoJSON/KML file
|
185 |
-
|
186 |
-
if
|
187 |
st.stop()
|
|
|
188 |
|
189 |
-
|
190 |
|
191 |
# Input: Geometry
|
192 |
-
|
193 |
-
|
194 |
-
|
195 |
-
|
196 |
-
|
197 |
-
|
198 |
-
|
199 |
-
st.stop()
|
200 |
|
201 |
# Derived Inputs
|
202 |
-
|
203 |
-
ee_geometry = ee.Geometry(selected_geometry)
|
204 |
-
_, lonlat = ee_geometry.centroid().getInfo().values()
|
205 |
-
lon, lat = lonlat
|
206 |
ee_feature_collection = ee.FeatureCollection(ee_geometry)
|
207 |
-
|
208 |
-
|
209 |
-
epsg = f"EPSG:326{zone}"
|
210 |
-
selected_geometry_gdf = selected_geometry_gdf.to_crs(epsg)
|
211 |
-
area = selected_geometry_gdf.area.values[0]
|
212 |
-
perimeter = selected_geometry_gdf.length.values[0]
|
213 |
-
|
214 |
-
stats_df = pd.DataFrame(
|
215 |
-
{
|
216 |
-
"Area (km^2)": [f"{area/1e6:.2f}"],
|
217 |
-
"Perimeter (km)": [f"{perimeter/1e3:.2f}"],
|
218 |
-
"Centroid (lat, lon)": [f"{lat:.6f}, {lon:.6f}"],
|
219 |
-
"Points": np.array(selected_geometry['coordinates']).tolist(),
|
220 |
-
}
|
221 |
-
)
|
222 |
-
|
223 |
|
224 |
# visualize the geometry
|
225 |
m = leaf_folium.Map()
|
226 |
keys = list(wayback_mapping.keys())
|
227 |
latest_date = sorted(keys, key=lambda x: pd.to_datetime(x))[-1]
|
228 |
-
m.add_tile_layer(wayback_mapping[latest_date], name=f"Esri Wayback - {latest_date}", attribution="Esri")
|
229 |
-
m
|
230 |
-
write_info(f"
|
|
|
|
|
|
|
|
|
231 |
m.to_streamlit()
|
232 |
|
233 |
-
|
234 |
-
|
235 |
-
|
236 |
-
|
237 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
238 |
|
239 |
-
|
240 |
-
st.
|
241 |
-
satellite_selected = {}
|
242 |
-
for satellite in satellites:
|
243 |
-
satellite_selected[satellite] = st.checkbox(satellite, value=True)
|
244 |
|
245 |
# Submit
|
246 |
-
submit = st.button("
|
|
|
|
|
|
|
247 |
if submit:
|
248 |
if not any(satellite_selected.values()):
|
249 |
st.error("Please select at least one satellite source")
|
250 |
st.stop()
|
251 |
|
252 |
-
# Create
|
253 |
-
|
254 |
-
|
255 |
-
|
256 |
-
|
257 |
-
|
258 |
-
|
259 |
-
)
|
260 |
-
|
|
|
|
|
|
|
|
|
261 |
for satellite, attrs in satellites.items():
|
262 |
if not satellite_selected[satellite]:
|
263 |
continue
|
264 |
|
265 |
with st.spinner(f"Processing {satellite} ..."):
|
266 |
progress_bar = st.progress(0)
|
267 |
-
for i,
|
268 |
-
|
269 |
-
result[satellite][date] = {
|
270 |
-
"mosaic": mosaic,
|
271 |
-
"mean_ndvi": mean_ndvi,
|
272 |
-
"cloud_mask_probability": cloud_proba,
|
273 |
-
}
|
274 |
progress_bar.progress((i + 1) / len(dates))
|
275 |
|
276 |
-
st.session_state.result =
|
277 |
|
|
|
278 |
if "result" in st.session_state:
|
279 |
-
|
280 |
-
|
281 |
-
|
282 |
-
for satellite, satellite_result in result.items():
|
283 |
-
satellite_df = pd.DataFrame(satellite_result).T
|
284 |
-
satellite_df.rename(
|
285 |
-
columns={
|
286 |
-
"mean_ndvi": f"NDVI_{satellite}",
|
287 |
-
"mosaic": f"Mosaic_{satellite}",
|
288 |
-
"cloud_mask_probability": f"Cloud_{satellite}",
|
289 |
-
},
|
290 |
-
inplace=True,
|
291 |
-
)
|
292 |
-
# drop rows with all NaN values
|
293 |
-
satellite_df = satellite_df.dropna(how="all")
|
294 |
-
# drop columns with all NaN values
|
295 |
-
satellite_df = satellite_df.dropna(axis=1, how="all")
|
296 |
-
df_list.append(satellite_df)
|
297 |
-
|
298 |
-
# merge outer on index of the dataframes
|
299 |
-
df = reduce(
|
300 |
-
lambda left, right: pd.merge(
|
301 |
-
left, right, left_index=True, right_index=True, how="outer"
|
302 |
-
),
|
303 |
-
df_list,
|
304 |
-
)
|
305 |
-
df.reset_index(inplace=True)
|
306 |
-
df.index = pd.to_datetime(df["index"], format="%Y-%m")
|
307 |
-
for column in df.columns:
|
308 |
-
df[column] = pd.to_numeric(df[column], errors="ignore")
|
309 |
|
310 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
311 |
st.write(df_numeric)
|
312 |
-
# give streamlit option to download the data
|
313 |
-
csv = df_numeric.to_csv()
|
314 |
-
st.download_button("Download Time Series", csv, "data.csv", "text/csv")
|
315 |
|
316 |
-
|
317 |
-
|
318 |
-
|
|
|
|
|
|
|
|
|
|
|
319 |
|
320 |
-
st.
|
321 |
-
write_info(f"Centroid of the selected geometry (lat, lon): ({lat}, {lon})")
|
322 |
cols = st.columns(2)
|
323 |
-
|
324 |
with cols[0]:
|
325 |
-
|
326 |
with cols[1]:
|
327 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
328 |
|
329 |
-
for
|
330 |
-
|
331 |
-
|
332 |
-
|
333 |
-
mosaic =
|
334 |
with col:
|
335 |
-
|
336 |
-
|
337 |
|
338 |
if satellite == "COPERNICUS/S2_SR_HARMONIZED":
|
339 |
min_all = 0
|
340 |
max_all = 255
|
341 |
else:
|
342 |
raise ValueError(f"Unknown satellite: {satellite}")
|
343 |
-
visual_layer = gee_folium.ee_tile_layer(mosaic, {"bands": ["R", "G", "B"], "min": min_all, "max": max_all})
|
344 |
|
345 |
-
|
346 |
-
|
347 |
)
|
348 |
# add colorbar
|
349 |
-
|
350 |
-
|
351 |
-
|
352 |
-
|
353 |
-
)
|
354 |
|
355 |
-
|
356 |
-
|
357 |
-
|
358 |
-
|
359 |
-
|
360 |
-
|
361 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
362 |
with col:
|
363 |
m = leaf_folium.Map()
|
364 |
m.add_tile_layer(wayback_mapping[esri_date], name=f"Esri Wayback Imagery - {esri_date}", attribution="Esri")
|
365 |
-
m
|
366 |
-
write_info(f"
|
367 |
-
|
|
|
|
|
|
|
|
|
|
1 |
import os
|
2 |
+
from datetime import datetime
|
3 |
import ee
|
4 |
import json
|
|
|
5 |
import geemap
|
6 |
import numpy as np
|
7 |
import geemap.foliumap as gee_folium
|
|
|
12 |
from shapely.ops import transform
|
13 |
from functools import reduce
|
14 |
import plotly.express as px
|
15 |
+
import branca.colormap as cm
|
16 |
|
17 |
st.set_page_config(layout="wide")
|
18 |
+
m = st.markdown("""
|
19 |
+
<style>
|
20 |
+
div.stButton > button:first-child {
|
21 |
+
background-color: #006400;
|
22 |
+
color:#ffffff;
|
23 |
+
}
|
24 |
+
</style>""", unsafe_allow_html=True)
|
25 |
+
|
26 |
+
# Logo
|
27 |
+
cols = st.columns([1, 7, 1])
|
28 |
+
with cols[0]:
|
29 |
+
st.image("Final_IITGN-Logo-symmetric-Color.png")
|
30 |
+
with cols[-1]:
|
31 |
+
st.image("IFS.jpg")
|
32 |
|
33 |
+
# Title
|
34 |
+
# make title in center
|
35 |
+
with cols[1]:
|
36 |
+
st.markdown(
|
37 |
+
f"""
|
38 |
+
<h1 style="text-align: center;">Vrinda (वृन्दा): Interactive Vegetation Index Analyzer</h1>
|
39 |
+
""",
|
40 |
+
unsafe_allow_html=True,
|
41 |
+
)
|
42 |
+
|
43 |
+
############################################
|
44 |
+
# Hyperparameters
|
45 |
+
############################################
|
46 |
+
st.write("<h3><div style='text-align: center;'>User Inputs</div></h3>", unsafe_allow_html=True)
|
47 |
+
|
48 |
+
st.write("Select the vegetation indices to calculate:")
|
49 |
+
all_veg_indices = ["NDVI", "EVI", "EVI2"]
|
50 |
+
veg_indices = []
|
51 |
+
for veg_index in all_veg_indices:
|
52 |
+
if st.checkbox(veg_index, value=True):
|
53 |
+
veg_indices.append(veg_index)
|
54 |
+
|
55 |
+
st.write("Select the parameters for the EVI/EVI2 calculation (default is as per EVI's Wikipedia page)")
|
56 |
+
cols = st.columns(5)
|
57 |
+
evi_vars = {}
|
58 |
+
for col, name, default in zip(cols, ["G", "C1", "C2", "L", "C"], [2.5, 6, 7.5, 1, 2.4]):
|
59 |
+
value = col.number_input(f'{name}', value=default)
|
60 |
+
evi_vars[name] = value
|
61 |
|
62 |
############################################
|
63 |
# Functions
|
64 |
############################################
|
65 |
+
def daterange_str_to_dates(daterange_str):
|
66 |
+
start_date, end_date = daterange_str.split("-")
|
67 |
+
start_date = pd.to_datetime(start_date)
|
68 |
+
end_date = pd.to_datetime(end_date)
|
69 |
+
return start_date, end_date
|
70 |
+
|
71 |
+
def daterange_dates_to_str(start_date, end_date):
|
72 |
+
return f"{start_date.strftime('%Y/%m/%d')}-{end_date.strftime('%Y/%m/%d')}"
|
73 |
+
|
74 |
+
def daterange_str_to_year(daterange_str):
|
75 |
+
start_date, _ = daterange_str.split("-")
|
76 |
+
year = pd.to_datetime(start_date).year
|
77 |
+
return year
|
78 |
+
|
79 |
def shape_3d_to_2d(shape):
|
80 |
if shape.has_z:
|
81 |
return transform(lambda x, y, z: (x, y), shape)
|
|
|
83 |
return shape
|
84 |
|
85 |
def preprocess_gdf(gdf):
|
86 |
+
gdf = gdf.to_crs(epsg=7761) # epsg for Gujarat
|
|
|
87 |
gdf["geometry"] = gdf["geometry"].apply(shape_3d_to_2d)
|
88 |
return gdf
|
89 |
|
90 |
+
def check_valid_geometry(geometry_gdf):
|
91 |
+
geometry = geometry_gdf.geometry.item()
|
92 |
+
if geometry.type != "Polygon":
|
93 |
+
st.error(
|
94 |
+
f"Selected geometry is of type '{geometry.type}'. Please provide a 'Polygon' geometry."
|
95 |
+
)
|
96 |
+
st.stop()
|
97 |
+
|
98 |
+
def add_geometry_to_maps(map_list):
|
99 |
+
for m in map_list:
|
100 |
+
m.add_gdf(buffer_geometry_gdf, layer_name="Geometry Buffer", style_function=lambda x: {"color": "red", "fillOpacity": 0.0})
|
101 |
+
m.add_gdf(geometry_gdf, layer_name="Geometry", style_function=lambda x: {"color": "blue", "fillOpacity": 0.0})
|
102 |
+
|
103 |
+
def add_indices(image, nir_band, red_band, blue_band):
|
104 |
+
# Add negative cloud
|
105 |
+
neg_cloud = image.select("MSK_CLDPRB").multiply(-1).rename("Neg_MSK_CLDPRB")
|
106 |
+
nir = image.select(nir_band).divide(10000)
|
107 |
+
red = image.select(red_band).divide(10000)
|
108 |
+
blue = image.select(blue_band).divide(10000)
|
109 |
+
numerator = nir.subtract(red)
|
110 |
+
ndvi = (numerator).divide(nir.add(red)).rename("NDVI").clamp(-1, 1)
|
111 |
+
# EVI formula taken from: https://en.wikipedia.org/wiki/Enhanced_vegetation_index
|
112 |
+
|
113 |
+
denominator = nir.add(red.multiply(evi_vars['C1'])).subtract(blue.multiply(evi_vars['C2'])).add(evi_vars['L']).add(0.1)
|
114 |
+
evi = numerator.divide(denominator).multiply(evi_vars['G']).rename("EVI").clamp(-1, 1)
|
115 |
+
evi2 = numerator.divide(nir.add(evi_vars['L']).add(red.multiply(evi_vars['C']))).multiply(evi_vars['G']).rename("EVI2").clamp(-1, 1)
|
116 |
+
return image.addBands([neg_cloud, ndvi, evi, evi2])
|
117 |
|
118 |
+
def process_date(daterange, satellite, veg_indices):
|
119 |
+
start_date, end_date = daterange
|
120 |
try:
|
121 |
attrs = satellites[satellite]
|
122 |
collection = attrs["collection"]
|
123 |
+
collection = collection.filterBounds(buffer_ee_geometry)
|
124 |
+
daterange_str = daterange_dates_to_str(start_date, end_date)
|
125 |
+
write_info(f"Processing {satellite} - {daterange_str}")
|
|
|
|
|
126 |
collection = collection.filterDate(start_date, end_date)
|
127 |
+
|
128 |
+
bucket = {}
|
129 |
+
for veg_index in veg_indices:
|
130 |
+
mosaic_veg_index = collection.qualityMosaic(veg_index)
|
131 |
+
fc = geemap.zonal_stats(
|
132 |
+
mosaic_veg_index, ee_feature_collection, scale=attrs["scale"], return_fc=True
|
133 |
+
).getInfo()
|
134 |
+
mean_veg_index = fc["features"][0]["properties"][veg_index]
|
135 |
+
bucket[veg_index] = mean_veg_index
|
136 |
+
fc = geemap.zonal_stats(
|
137 |
+
mosaic_veg_index, buffer_ee_feature_collection, scale=attrs["scale"], return_fc=True
|
138 |
+
).getInfo()
|
139 |
+
buffer_mean_veg_index = fc["features"][0]["properties"][veg_index]
|
140 |
+
bucket[f"{veg_index}_buffer"] = buffer_mean_veg_index
|
141 |
+
bucket[f"{veg_index}_ratio"] = mean_veg_index / buffer_mean_veg_index
|
142 |
+
bucket[f"mosaic_{veg_index}"] = mosaic_veg_index
|
143 |
+
|
144 |
+
# Get median mosaic
|
145 |
+
bucket["mosaic_visual"] = collection.qualityMosaic("NDVI")
|
146 |
+
|
147 |
if satellite == "COPERNICUS/S2_SR_HARMONIZED":
|
148 |
cloud_mask_probability = fc["features"][0]["properties"]["MSK_CLDPRB"] / 100
|
149 |
else:
|
150 |
cloud_mask_probability = None
|
151 |
+
bucket["Cloud (0 to 1)"] = cloud_mask_probability
|
152 |
+
result_df.loc[daterange_str, list(bucket.keys())] = list(bucket.values())
|
153 |
except Exception as e:
|
154 |
print(e)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
155 |
|
156 |
def write_info(info):
|
157 |
+
st.write(f"<span style='color:#006400;'>{info}</span>", unsafe_allow_html=True)
|
158 |
|
159 |
|
160 |
############################################
|
|
|
179 |
ee.Initialize()
|
180 |
|
181 |
satellites = {
|
|
|
|
|
|
|
|
|
|
|
|
|
182 |
"COPERNICUS/S2_SR_HARMONIZED": {
|
183 |
"scale": 10,
|
184 |
"collection": ee.ImageCollection("COPERNICUS/S2_SR_HARMONIZED")
|
185 |
.select(
|
186 |
+
["B2", "B4", "B8", "MSK_CLDPRB", "TCI_R", "TCI_G", "TCI_B"],
|
187 |
+
["Blue", "Red", "NIR", "MSK_CLDPRB", "R", "G", "B"],
|
188 |
)
|
189 |
+
.map(lambda image: add_indices(image, nir_band="NIR", red_band="Red", blue_band="Blue")),
|
190 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
191 |
}
|
192 |
st.session_state.satellites = satellites
|
193 |
with open("wayback_imagery.json") as f:
|
194 |
st.session_state.wayback_mapping = json.load(f)
|
195 |
|
|
|
196 |
if "one_time_setup_done" not in st.session_state:
|
197 |
one_time_setup()
|
198 |
st.session_state.one_time_setup_done = True
|
|
|
|
|
|
|
199 |
|
200 |
+
satellites = st.session_state.satellites
|
201 |
+
wayback_mapping = st.session_state.wayback_mapping
|
202 |
|
203 |
############################################
|
204 |
# App
|
205 |
############################################
|
206 |
|
207 |
+
# Input: Satellite Sources
|
208 |
+
st.write("Select the satellite sources:")
|
209 |
+
satellite_selected = {}
|
210 |
+
for satellite in satellites:
|
211 |
+
satellite_selected[satellite] = st.checkbox(satellite, value=True, disabled=True)
|
|
|
|
|
|
|
212 |
|
213 |
+
# Date range input
|
214 |
+
jan_1 = pd.to_datetime("2024/01/01", format="%Y/%m/%d")
|
215 |
+
dec_31 = pd.to_datetime("2024/12/31", format="%Y/%m/%d")
|
216 |
+
nov_15 = pd.to_datetime("2024/11/15", format="%Y/%m/%d")
|
217 |
+
dec_15 = pd.to_datetime("2024/12/15", format="%Y/%m/%d")
|
218 |
+
input_daterange = st.date_input("Date Range (Ignore year. App will compute indices for all possible years)", (nov_15, dec_15), jan_1, dec_31)
|
219 |
+
min_year = int(st.number_input("Minimum Year", value=2010, min_value=2010, step=1))
|
220 |
+
max_year = int(st.number_input("Maximum Year", value=datetime.now().year, min_value=2010, step=1))
|
221 |
|
222 |
# Input: GeoJSON/KML file
|
223 |
+
input_file = st.file_uploader("Upload KML/GeoJSON file", type=["geojson", "kml", "shp"])
|
224 |
+
if input_file is None:
|
225 |
st.stop()
|
226 |
+
buffer = st.number_input("Buffer (m)", value=150, min_value=0, step=1)
|
227 |
|
228 |
+
input_gdf = preprocess_gdf(gpd.read_file(input_file))
|
229 |
|
230 |
# Input: Geometry
|
231 |
+
def format_fn(x):
|
232 |
+
return input_gdf.drop(columns=["geometry"]).loc[x].to_dict()
|
233 |
+
input_geometry_idx = st.selectbox("Select the geometry", input_gdf.index, format_func=format_fn)
|
234 |
+
geometry_gdf = input_gdf[input_gdf.index == input_geometry_idx]
|
235 |
+
buffer_geometry_gdf = geometry_gdf.copy()
|
236 |
+
buffer_geometry_gdf["geometry"] = buffer_geometry_gdf["geometry"].buffer(buffer)
|
237 |
+
check_valid_geometry(geometry_gdf)
|
|
|
238 |
|
239 |
# Derived Inputs
|
240 |
+
ee_geometry = ee.Geometry(geometry_gdf.to_crs(4326).geometry.item().__geo_interface__)
|
|
|
|
|
|
|
241 |
ee_feature_collection = ee.FeatureCollection(ee_geometry)
|
242 |
+
buffer_ee_geometry = ee.Geometry(buffer_geometry_gdf.to_crs(4326).geometry.item().__geo_interface__)
|
243 |
+
buffer_ee_feature_collection = ee.FeatureCollection(buffer_ee_geometry)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
244 |
|
245 |
# visualize the geometry
|
246 |
m = leaf_folium.Map()
|
247 |
keys = list(wayback_mapping.keys())
|
248 |
latest_date = sorted(keys, key=lambda x: pd.to_datetime(x))[-1]
|
249 |
+
m.add_tile_layer(wayback_mapping[latest_date], name=f"Esri Wayback - {latest_date.replace('-', '/')}", attribution="Esri")
|
250 |
+
add_geometry_to_maps([m])
|
251 |
+
write_info(f"""
|
252 |
+
<div style="text-align: center;">
|
253 |
+
Latest Esri Imagery - {latest_date.replace('-', '/')}
|
254 |
+
</div>
|
255 |
+
""")
|
256 |
m.to_streamlit()
|
257 |
|
258 |
+
# Generate stats
|
259 |
+
stats_df = pd.DataFrame(
|
260 |
+
{
|
261 |
+
"Area (m^2)": geometry_gdf.area.item(),
|
262 |
+
"Perimeter (m)": geometry_gdf.length.item(),
|
263 |
+
"Points": json.loads(geometry_gdf.to_crs(4326).to_json())['features'][0]['geometry']['coordinates'],
|
264 |
+
}
|
265 |
+
)
|
266 |
+
st.write("<h3><div style='text-align: center;'>Geometry Metrics</div></h3>", unsafe_allow_html=True)
|
267 |
+
st.markdown(f"""| Metric | Value |
|
268 |
+
| --- | --- |
|
269 |
+
| Area (m^2) | {stats_df['Area (m^2)'].item():.2f} m^2 = {stats_df['Area (m^2)'].item()/10000:.2f} ha |
|
270 |
+
| Perimeter (m) | {stats_df['Perimeter (m)'].item():.2f} m |
|
271 |
+
| Points | {stats_df['Points'][0]} |
|
272 |
+
""")
|
273 |
|
274 |
+
stats_csv = stats_df.to_csv(index=False)
|
275 |
+
st.download_button("Download Geometry Metrics", stats_csv, "geometry_metrics.csv", "text/csv", use_container_width=True)
|
|
|
|
|
|
|
276 |
|
277 |
# Submit
|
278 |
+
submit = st.button("Calculate Vegetation Indices", use_container_width=True)
|
279 |
+
|
280 |
+
st.write("<h2><div style='text-align: center;'>Results</div></h2>", unsafe_allow_html=True)
|
281 |
+
|
282 |
if submit:
|
283 |
if not any(satellite_selected.values()):
|
284 |
st.error("Please select at least one satellite source")
|
285 |
st.stop()
|
286 |
|
287 |
+
# Create range
|
288 |
+
start_day = input_daterange[0].day
|
289 |
+
start_month = input_daterange[0].month
|
290 |
+
end_day = input_daterange[1].day
|
291 |
+
end_month = input_daterange[1].month
|
292 |
+
|
293 |
+
dates = []
|
294 |
+
for year in range(min_year, max_year+1):
|
295 |
+
start_date = pd.to_datetime(f"{year}-{start_month:02d}-{start_day:02d}")
|
296 |
+
end_date = pd.to_datetime(f"{year}-{end_month:02d}-{end_day:02d}")
|
297 |
+
dates.append((start_date, end_date))
|
298 |
+
|
299 |
+
result_df = pd.DataFrame()
|
300 |
for satellite, attrs in satellites.items():
|
301 |
if not satellite_selected[satellite]:
|
302 |
continue
|
303 |
|
304 |
with st.spinner(f"Processing {satellite} ..."):
|
305 |
progress_bar = st.progress(0)
|
306 |
+
for i, daterange in enumerate(dates):
|
307 |
+
process_date(daterange, satellite, veg_indices)
|
|
|
|
|
|
|
|
|
|
|
308 |
progress_bar.progress((i + 1) / len(dates))
|
309 |
|
310 |
+
st.session_state.result = result_df
|
311 |
|
312 |
+
print("Printing result...")
|
313 |
if "result" in st.session_state:
|
314 |
+
result_df = st.session_state.result
|
315 |
+
print(result_df.columns)
|
316 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
317 |
|
318 |
+
# drop rows with all NaN values
|
319 |
+
result_df = result_df.dropna(how="all")
|
320 |
+
# drop columns with all NaN values
|
321 |
+
result_df = result_df.dropna(axis=1, how="all")
|
322 |
+
print(result_df.columns)
|
323 |
+
print(result_df.head(2))
|
324 |
+
|
325 |
+
# df.reset_index(inplace=True)
|
326 |
+
# df.index = pd.to_datetime(df["index"], format="%Y-%m")
|
327 |
+
for column in result_df.columns:
|
328 |
+
result_df[column] = pd.to_numeric(result_df[column], errors="ignore")
|
329 |
+
|
330 |
+
df_numeric = result_df.select_dtypes(include=["float64"])
|
331 |
st.write(df_numeric)
|
|
|
|
|
|
|
332 |
|
333 |
+
df_numeric_csv = df_numeric.to_csv(index=True)
|
334 |
+
st.download_button("Download Time Series Data", df_numeric_csv, "vegetation_indices.csv", "text/csv", use_container_width=True)
|
335 |
+
|
336 |
+
df_numeric.index = [daterange_str_to_year(daterange) for daterange in df_numeric.index]
|
337 |
+
for veg_index in veg_indices:
|
338 |
+
fig = px.line(df_numeric, y=[veg_index, f"{veg_index}_buffer"], markers=True)
|
339 |
+
fig.update_layout(xaxis=dict(tickvals=df_numeric.index, ticktext=df_numeric.index))
|
340 |
+
st.plotly_chart(fig)
|
341 |
|
342 |
+
st.write("<h3><div style='text-align: center;'>Visual Comparison between Two Years</div></h3>", unsafe_allow_html=True)
|
|
|
343 |
cols = st.columns(2)
|
344 |
+
|
345 |
with cols[0]:
|
346 |
+
year_1 = st.selectbox("Year 1", result_df.index, index=0, format_func=lambda x: daterange_str_to_year(x))
|
347 |
with cols[1]:
|
348 |
+
year_2 = st.selectbox("Year 2", result_df.index, index=len(result_df.index) - 1, format_func=lambda x: daterange_str_to_year(x))
|
349 |
+
|
350 |
+
vis_params = {'min': 0, 'max': 1, 'palette': ['white', 'green']} # Example visualization for Sentinel-2
|
351 |
+
|
352 |
+
# Create a colormap and name it as NDVI
|
353 |
+
colormap = cm.LinearColormap(
|
354 |
+
colors=vis_params['palette'],
|
355 |
+
vmin=vis_params['min'],
|
356 |
+
vmax=vis_params['max']
|
357 |
+
)
|
358 |
|
359 |
+
for veg_index in veg_indices:
|
360 |
+
st.write(f"<h3><div style='text-align: center;'>{veg_index}</div></h3>", unsafe_allow_html=True)
|
361 |
+
cols = st.columns(2)
|
362 |
+
for col, daterange_str in zip(cols, [year_1, year_2]):
|
363 |
+
mosaic = result_df.loc[daterange_str, f"mosaic_{veg_index}"]
|
364 |
with col:
|
365 |
+
m = gee_folium.Map()
|
366 |
+
veg_index_layer = gee_folium.ee_tile_layer(mosaic, {"bands": [veg_index], "min": 0, "max": 1})
|
367 |
|
368 |
if satellite == "COPERNICUS/S2_SR_HARMONIZED":
|
369 |
min_all = 0
|
370 |
max_all = 255
|
371 |
else:
|
372 |
raise ValueError(f"Unknown satellite: {satellite}")
|
|
|
373 |
|
374 |
+
m.add_layer(
|
375 |
+
mosaic.select(veg_index), vis_params
|
376 |
)
|
377 |
# add colorbar
|
378 |
+
# m.add_colorbar(colors=["#000000", "#00FF00"], vmin=0.0, vmax=1.0)
|
379 |
+
add_geometry_to_maps([m])
|
380 |
+
m.add_child(colormap)
|
381 |
+
m.to_streamlit()
|
|
|
382 |
|
383 |
+
st.write("<h3><div style='text-align: center;'>RGB</div></h3>", unsafe_allow_html=True)
|
384 |
+
cols = st.columns(2)
|
385 |
+
for col, daterange_str in zip(cols, [year_1, year_2]):
|
386 |
+
start_date, end_date = daterange_str_to_dates(daterange_str)
|
387 |
+
mid_date = start_date + (end_date - start_date) / 2
|
388 |
+
esri_date = min(wayback_mapping.keys(), key=lambda x: abs(pd.to_datetime(x) - mid_date))
|
389 |
+
with col:
|
390 |
+
m = gee_folium.Map()
|
391 |
+
visual_mosaic = result_df.loc[daterange_str, "mosaic_visual"]
|
392 |
+
# visual_layer = gee_folium.ee_tile_layer(mosaic, {"bands": ["R", "G", "B"], "min": min_all, "max": max_all})
|
393 |
+
|
394 |
+
m.add_layer(
|
395 |
+
visual_mosaic.select(["R", "G", "B"])
|
396 |
+
)
|
397 |
+
add_geometry_to_maps([m])
|
398 |
+
m.to_streamlit()
|
399 |
+
|
400 |
+
st.write("<h3><div style='text-align: center;'>Esri RGB Imagery</div></h3>", unsafe_allow_html=True)
|
401 |
+
cols = st.columns(2)
|
402 |
+
for col, daterange_str in zip(cols, [year_1, year_2]):
|
403 |
+
start_date, end_date = daterange_str_to_dates(daterange_str)
|
404 |
+
mid_date = start_date + (end_date - start_date) / 2
|
405 |
+
esri_date = min(wayback_mapping.keys(), key=lambda x: abs(pd.to_datetime(x) - mid_date))
|
406 |
with col:
|
407 |
m = leaf_folium.Map()
|
408 |
m.add_tile_layer(wayback_mapping[esri_date], name=f"Esri Wayback Imagery - {esri_date}", attribution="Esri")
|
409 |
+
add_geometry_to_maps([m])
|
410 |
+
write_info(f"""
|
411 |
+
<div style="text-align: center;">
|
412 |
+
Esri Imagery - {esri_date.replace('-', '/')}
|
413 |
+
</div>
|
414 |
+
""")
|
415 |
+
m.to_streamlit()
|
gzn.html
DELETED
File without changes
|
sandbox.ipynb
CHANGED
The diff for this file is too large to render.
See raw diff
|
|