Spaces:
Runtime error
Runtime error
Commit
·
c79d2fb
1
Parent(s):
8f92b12
Upload 2 files
Browse files- app.py +237 -0
- requirements.txt +16 -0
app.py
ADDED
@@ -0,0 +1,237 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
"""
|
3 |
+
Created on Fri Oct 14 10:35:25 2022
|
4 |
+
|
5 |
+
@author: mritchey
|
6 |
+
"""
|
7 |
+
# streamlit run "C:\Users\mritchey\.spyder-py3\Python Scripts\streamlit projects\hail\hail pngs 2.py"
|
8 |
+
|
9 |
+
from tqdm import tqdm
|
10 |
+
from joblib import Parallel, delayed
|
11 |
+
import rasterio
|
12 |
+
import glob
|
13 |
+
from PIL import Image
|
14 |
+
import streamlit as st
|
15 |
+
import os
|
16 |
+
import branca.colormap as cm
|
17 |
+
import folium
|
18 |
+
from streamlit_folium import st_folium
|
19 |
+
import numpy as np
|
20 |
+
import pandas as pd
|
21 |
+
import plotly.express as px
|
22 |
+
from geopy.extra.rate_limiter import RateLimiter
|
23 |
+
from geopy.geocoders import Nominatim
|
24 |
+
import rioxarray
|
25 |
+
import xarray as xr
|
26 |
+
import warnings
|
27 |
+
warnings.filterwarnings("ignore")
|
28 |
+
|
29 |
+
@st.cache_data
|
30 |
+
def convert_df(df):
|
31 |
+
return df.to_csv(index=0).encode('utf-8')
|
32 |
+
|
33 |
+
def geocode(address):
|
34 |
+
try:
|
35 |
+
address2 = address.replace(' ', '+').replace(',', '%2C')
|
36 |
+
df = pd.read_json(
|
37 |
+
f'https://geocoding.geo.census.gov/geocoder/locations/onelineaddress?address={address2}&benchmark=2020&format=json')
|
38 |
+
results = df.iloc[:1, 0][0][0]['coordinates']
|
39 |
+
lat, lon = results['y'], results['x']
|
40 |
+
except:
|
41 |
+
geolocator = Nominatim(user_agent="GTA Lookup")
|
42 |
+
geocode = RateLimiter(geolocator.geocode, min_delay_seconds=1)
|
43 |
+
location = geolocator.geocode(address)
|
44 |
+
lat, lon = location.latitude, location.longitude
|
45 |
+
return pd.DataFrame({'Lat': lat, 'Lon': lon}, index=[0])
|
46 |
+
|
47 |
+
|
48 |
+
def map_folium(data, zoom=12):
|
49 |
+
m = folium.Map(location=[lat, lon], zoom_start=zoom, height=300)
|
50 |
+
folium.Marker(
|
51 |
+
location=[lat, lon],
|
52 |
+
popup=address).add_to(m)
|
53 |
+
|
54 |
+
# folium.GeoJson(gdf['buffer']).add_to(m)
|
55 |
+
folium.raster_layers.ImageOverlay(
|
56 |
+
data, opacity=0.8, bounds=[[bottom, left], [top, right]]).add_to(m)
|
57 |
+
return m
|
58 |
+
|
59 |
+
@st.cache_data
|
60 |
+
def crop_hail_jpg_filter(f, crop_coords, scaling_factor=255):
|
61 |
+
date = f[-19:-11]
|
62 |
+
image = Image.open(f)
|
63 |
+
cropped_image = image.crop(crop_coords)
|
64 |
+
image = (np.array(cropped_image)/scaling_factor)
|
65 |
+
if image.sum() > 0:
|
66 |
+
return date, image
|
67 |
+
|
68 |
+
@st.cache_data
|
69 |
+
def get_data(start_date,end_date):
|
70 |
+
files = glob.glob(
|
71 |
+
r'png\2022\**\*.png', recursive=True)
|
72 |
+
|
73 |
+
files_dates = np.array([int(f[-19:-11]) for f in files])
|
74 |
+
mask = np.where((files_dates >= int(start_date)) & (
|
75 |
+
files_dates <= int(end_date)), True, False)
|
76 |
+
files = np.array(files)[mask]
|
77 |
+
|
78 |
+
results = Parallel(n_jobs=96, prefer='threads')(
|
79 |
+
delayed(crop_hail_jpg_filter)(i, crop_coords) for i in tqdm(files))
|
80 |
+
results = [i for i in results if i is not None]
|
81 |
+
|
82 |
+
return results
|
83 |
+
|
84 |
+
#Set up 2 Columns
|
85 |
+
st.set_page_config(layout="wide")
|
86 |
+
col1, col2 = st.columns((2))
|
87 |
+
|
88 |
+
|
89 |
+
|
90 |
+
#Input Values
|
91 |
+
address = st.sidebar.text_input("Address", "123 Main Street, Dallas, TX 75126")
|
92 |
+
d = st.sidebar.date_input("Date", pd.Timestamp(2022, 2, 1))
|
93 |
+
days_within = st.sidebar.selectbox('Days Within:', ('30', '90', '180', 'Day Of',))
|
94 |
+
circle_radius = st.sidebar.selectbox('Box Radius (Miles)', (5, 10, 25))
|
95 |
+
|
96 |
+
zoom_dic = {5: 12, 10: 11, 25: 10}
|
97 |
+
zoom = zoom_dic[circle_radius]
|
98 |
+
|
99 |
+
try:
|
100 |
+
days_within = int(days_within)
|
101 |
+
start_date, end_date = d - \
|
102 |
+
pd.Timedelta(days=days_within), d+pd.Timedelta(days=days_within+1)
|
103 |
+
start_date, end_date = start_date.strftime(
|
104 |
+
'%Y%m%d'), end_date.strftime('%Y%m%d')
|
105 |
+
except:
|
106 |
+
days_within = 0
|
107 |
+
start_date = end_date=d -pd.Timedelta(days=days_within)
|
108 |
+
start_date = end_date=start_date.strftime('%Y%m%d')
|
109 |
+
#Geocode and get Data
|
110 |
+
result = geocode(address)
|
111 |
+
lat, lon = result.values[0]
|
112 |
+
|
113 |
+
|
114 |
+
ds_stage = xr.open_rasterio('hail_stage.grib2')
|
115 |
+
transform = ds_stage.rio.transform()
|
116 |
+
|
117 |
+
|
118 |
+
row, col = rasterio.transform.rowcol(transform, lon, lat)
|
119 |
+
|
120 |
+
# center=row,col
|
121 |
+
radius = int(np.ceil(circle_radius*1.6))
|
122 |
+
crop_coords = col-radius, row-radius, col+radius+1, row+radius+1
|
123 |
+
|
124 |
+
# Get Data
|
125 |
+
results=get_data(start_date,end_date)
|
126 |
+
|
127 |
+
try:
|
128 |
+
max_values = np.max(np.array([i[1] for i in results]), axis=0)*0.0393701
|
129 |
+
except:
|
130 |
+
max_values=np.zeros(shape=(2*radius+1,2*radius+1))
|
131 |
+
|
132 |
+
# Bin Data
|
133 |
+
bin_edges = [0, 0.1, 0.2, 0.4, 0.8, 1.2, 1.6, 2, 3, 4, np.inf]
|
134 |
+
bin_names = ["<0.1", "0.1-0.2", "0.2-0.4", "0.4-0.8", "0.8-1.2", "1.2-1.6", "1.6-2",
|
135 |
+
"2-3", "3-4", ">4"]
|
136 |
+
colors_values = ['#ffffff', '#ffff00', '#d1ab00', '#ff9b00', '#fe0000', '#cd0000', '#ff30ce',
|
137 |
+
'#ff30cd', '#9a009b', '#4a4d4c']
|
138 |
+
color_discrete_map = dict(zip(bin_names, colors_values))
|
139 |
+
|
140 |
+
all_data = []
|
141 |
+
for date, mat in results:
|
142 |
+
mat = mat*0.0393701
|
143 |
+
hist_values, _ = np.histogram(mat, bins=bin_edges)
|
144 |
+
df = pd.DataFrame({'bin_edges': bin_edges[:-1],
|
145 |
+
'Bin': bin_names,
|
146 |
+
'Values': hist_values},
|
147 |
+
)
|
148 |
+
df['Date'] = pd.to_datetime(date)
|
149 |
+
all_data.append(df)
|
150 |
+
|
151 |
+
try:
|
152 |
+
final = pd.concat(all_data)
|
153 |
+
final = final.query('bin_edges!=0')
|
154 |
+
fig = px.bar(final, x="Date", y="Values", color="Bin",
|
155 |
+
barmode="stack",
|
156 |
+
color_discrete_map=color_discrete_map)
|
157 |
+
except:
|
158 |
+
pass
|
159 |
+
|
160 |
+
|
161 |
+
# Crop the raster using the bounds
|
162 |
+
cropped_data = ds_stage[0][row-radius:row+radius+1, col-radius:col+radius+1]
|
163 |
+
cropped_data.values = max_values
|
164 |
+
|
165 |
+
# Max Values Bin for RGB
|
166 |
+
|
167 |
+
def hex_to_rgb(hex_code):
|
168 |
+
hex_code = hex_code.lstrip('#') # Remove the '#' character if present
|
169 |
+
rgb = tuple(int(hex_code[i:i+2], 16) for i in (0, 2, 4))
|
170 |
+
return rgb
|
171 |
+
|
172 |
+
def hex_to_rgba(hex_code, alpha=.8):
|
173 |
+
if hex_code == '#ffffff':
|
174 |
+
alpha = 0.0
|
175 |
+
alpha_scaled = int(alpha * 255)
|
176 |
+
rgb = hex_to_rgb(hex_code)
|
177 |
+
rgba = rgb + (alpha_scaled,)
|
178 |
+
return rgba
|
179 |
+
|
180 |
+
|
181 |
+
bin_indices = np.digitize(np.flipud(max_values), bin_edges)-1
|
182 |
+
bin_colors = np.take(colors_values, bin_indices)
|
183 |
+
|
184 |
+
|
185 |
+
max_values_rgb = np.array([hex_to_rgba(i) for i in bin_colors.flatten()]).reshape(
|
186 |
+
max_values.shape[0], max_values.shape[0], 4)
|
187 |
+
|
188 |
+
|
189 |
+
|
190 |
+
#Mapping
|
191 |
+
img = max_values_rgb.astype('uint8')
|
192 |
+
|
193 |
+
boundary = cropped_data.rio.bounds()
|
194 |
+
left, bottom, right, top = boundary
|
195 |
+
|
196 |
+
|
197 |
+
# img[img < 0.0] = np.nan
|
198 |
+
clat = (bottom + top)/2
|
199 |
+
clon = (left + right)/2
|
200 |
+
|
201 |
+
vmin = np.floor(np.nanmin(img))
|
202 |
+
vmax = np.ceil(np.nanmax(img))
|
203 |
+
|
204 |
+
colormap = cm.StepColormap(colors=list(color_discrete_map.values()),
|
205 |
+
index=bin_edges,
|
206 |
+
# vmin=vmin, vmax=vmax
|
207 |
+
)
|
208 |
+
|
209 |
+
|
210 |
+
m = map_folium(img, zoom)
|
211 |
+
|
212 |
+
|
213 |
+
with col1:
|
214 |
+
st.title('Hail Mesh')
|
215 |
+
st_folium(m, height=500)
|
216 |
+
|
217 |
+
|
218 |
+
with col2:
|
219 |
+
st.title(f'Hail')
|
220 |
+
try:
|
221 |
+
st.plotly_chart(fig)
|
222 |
+
csv = convert_df(final)
|
223 |
+
st.download_button(
|
224 |
+
label="Download data as CSV",
|
225 |
+
data=csv,
|
226 |
+
file_name='data.csv',
|
227 |
+
mime='text/csv')
|
228 |
+
except:
|
229 |
+
pass
|
230 |
+
|
231 |
+
# st.bokeh_chart(hv.render(nice_plot*points_lat_lon, backend='bokeh'),use_container_width=True)
|
232 |
+
|
233 |
+
|
234 |
+
st.markdown(""" <style>
|
235 |
+
#MainMenu {visibility: hidden;}
|
236 |
+
footer {visibility: hidden;}
|
237 |
+
</style> """, unsafe_allow_html=True)
|
requirements.txt
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
branca==0.5.0
|
2 |
+
folium==0.12.1
|
3 |
+
geopy==2.2.0
|
4 |
+
joblib==1.2.0
|
5 |
+
numpy==1.23.5
|
6 |
+
pandas==1.4.2
|
7 |
+
Pillow==10.0.0
|
8 |
+
plotly==5.15.0
|
9 |
+
rasterio==1.2.10
|
10 |
+
rioxarray==0.12.2
|
11 |
+
streamlit==1.24.0
|
12 |
+
streamlit_folium==0.6.15
|
13 |
+
tqdm==4.64.0
|
14 |
+
xarray==2022.3.0
|
15 |
+
~lotly==5.7.0
|
16 |
+
~treamlit==1.20.0
|