Spaces:
Sleeping
Sleeping
# -*- coding: utf-8 -*- | |
""" | |
Created on Thu Jun 8 03:39:02 2023 | |
@author: mritchey | |
""" | |
import pandas as pd | |
import numpy as np | |
import streamlit as st | |
from geopy.extra.rate_limiter import RateLimiter | |
from geopy.geocoders import Nominatim | |
import folium | |
from streamlit_folium import st_folium | |
from vincenty import vincenty | |
st.set_page_config(layout="wide") | |
def convert_df(df): | |
return df.to_csv(index=0).encode('utf-8') | |
def get_data(file='hail2010-20230920_significant_bulk_all.parquet'): | |
return pd.read_parquet(file) | |
def map_perimeters(address,lat ,lon): | |
m = folium.Map(location=[lat, lon], | |
zoom_start=6, | |
height=400) | |
folium.Marker( | |
location=[lat, lon], | |
tooltip=f'Address: {address}', | |
).add_to(m) | |
return m | |
def distance(x): | |
left_coords = (x[0], x[1]) | |
right_coords = (x[2], x[3]) | |
return vincenty(left_coords, right_coords, miles=True) | |
def geocode(address): | |
try: | |
address2 = address.replace(' ', '+').replace(',', '%2C') | |
df = pd.read_json( | |
f'https://geocoding.geo.census.gov/geocoder/locations/onelineaddress?address={address2}&benchmark=2020&format=json') | |
results = df.iloc[:1, 0][0][0]['coordinates'] | |
lat, lon = results['y'], results['x'] | |
except: | |
geolocator = Nominatim(user_agent="GTA Lookup") | |
geocode = RateLimiter(geolocator.geocode, min_delay_seconds=1) | |
location = geolocator.geocode(address) | |
lat, lon = location.latitude, location.longitude | |
return lat, lon | |
#Side Bar | |
address = st.sidebar.text_input( | |
"Address", "Dallas, TX") | |
date = st.sidebar.date_input("Loss Date", pd.Timestamp(2023, 7, 14), key='date') | |
df_hail=get_data() | |
#Geocode Addreses | |
lat, lon = geocode(address) | |
#Filter DAta | |
df_hail_cut=df_hail.query(f"{lat}-1<=LAT<={lat}+1 and {lon}-1<=LON<={lon}+1 ") | |
df_hail_cut=df_hail_cut.query("Date_est<=@date") | |
df_hail_cut["Lat_address"] = lat | |
df_hail_cut["Lon_address"] = lon | |
df_hail_cut['Miles to Hail'] = [ | |
distance(i) for i in df_hail_cut[['LAT','LON','Lat_address','Lon_address']].values] | |
df_hail_cut['MAXSIZE'] = df_hail_cut['MAXSIZE'].round(2) | |
df_hail_cut=df_hail_cut.query("`Miles to Hail`<10") | |
df_hail_cut['Category']=np.where(df_hail_cut['Miles to Hail']<.25,"At Location", | |
np.where(df_hail_cut['Miles to Hail']<1,"Within 1 Mile", | |
np.where(df_hail_cut['Miles to Hail']<3,"Within 3 Miles", | |
np.where(df_hail_cut['Miles to Hail']<10,"Within 10 Miles",'Other')))) | |
df_hail_cut_group=pd.pivot_table(df_hail_cut,index='Date_est', | |
columns='Category', | |
values='MAXSIZE', | |
aggfunc='max') | |
cols=df_hail_cut_group.columns | |
cols_focus=['At Location',"Within 1 Mile","Within 3 Miles","Within 10 Miles"] | |
missing_cols=set(cols_focus)-set(cols) | |
for c in missing_cols: | |
df_hail_cut_group[c]=np.nan | |
df_hail_cut_group2=df_hail_cut_group[cols_focus] | |
for i in range(3): | |
df_hail_cut_group2[cols_focus[i+1]] = np.where(df_hail_cut_group2[cols_focus[i+1]].fillna(0) < | |
df_hail_cut_group2[cols_focus[i]].fillna(0), | |
df_hail_cut_group2[cols_focus[i]], | |
df_hail_cut_group2[cols_focus[i+1]]) | |
df_hail_cut_group2=df_hail_cut_group2.sort_index(ascending=False) | |
#Map Data | |
m = map_perimeters(address,lat, lon) | |
#Display | |
col1, col2 = st.columns((3, 2)) | |
with col1: | |
st.header('Estimated Maximum Hail Size') | |
st.write('Data from 2010 to 2023-09-20') | |
df_hail_cut_group2 | |
csv2 = convert_df(df_hail_cut_group2.reset_index()) | |
st.download_button( | |
label="Download data as CSV", | |
data=csv2, | |
file_name=f'{address}_{date}.csv', | |
mime='text/csv') | |
with col2: | |
st.header('Map') | |
st_folium(m, height=400) |