Spaces:
Runtime error
Runtime error
Commit
·
71a7e73
1
Parent(s):
d59170a
Upload app.py
Browse files
app.py
ADDED
@@ -0,0 +1,302 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
"""
|
3 |
+
Created on Tue Dec 6 09:56:29 2022
|
4 |
+
|
5 |
+
@author: mritchey
|
6 |
+
"""
|
7 |
+
#streamlit run "C:\Users\mritchey\.spyder-py3\Python Scripts\streamlit projects\mrms\mrms_hail2 buffer.py"
|
8 |
+
|
9 |
+
import plotly.express as px
|
10 |
+
import os
|
11 |
+
from PIL import Image
|
12 |
+
from joblib import Parallel, delayed
|
13 |
+
import pandas as pd
|
14 |
+
import streamlit as st
|
15 |
+
from geopy.extra.rate_limiter import RateLimiter
|
16 |
+
from geopy.geocoders import Nominatim
|
17 |
+
import folium
|
18 |
+
from streamlit_folium import st_folium
|
19 |
+
import math
|
20 |
+
import geopandas as gpd
|
21 |
+
from skimage.io import imread
|
22 |
+
from streamlit_plotly_events import plotly_events
|
23 |
+
import requests
|
24 |
+
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
25 |
+
import rasterio
|
26 |
+
import rioxarray
|
27 |
+
import numpy as np
|
28 |
+
|
29 |
+
|
30 |
+
@st.cache
|
31 |
+
def geocode(address, buffer_size):
|
32 |
+
try:
|
33 |
+
address2 = address.replace(' ', '+').replace(',', '%2C')
|
34 |
+
df = pd.read_json(
|
35 |
+
f'https://geocoding.geo.census.gov/geocoder/locations/onelineaddress?address={address2}&benchmark=2020&format=json')
|
36 |
+
results = df.iloc[:1, 0][0][0]['coordinates']
|
37 |
+
lat, lon = results['y'], results['x']
|
38 |
+
except:
|
39 |
+
geolocator = Nominatim(user_agent="GTA Lookup")
|
40 |
+
geocode = RateLimiter(geolocator.geocode, min_delay_seconds=1)
|
41 |
+
location = geolocator.geocode(address)
|
42 |
+
lat, lon = location.latitude, location.longitude
|
43 |
+
|
44 |
+
df = pd.DataFrame({'Lat': [lat], 'Lon': [lon]})
|
45 |
+
gdf = gpd.GeoDataFrame(
|
46 |
+
df, geometry=gpd.points_from_xy(df.Lon, df.Lat, crs=4326))
|
47 |
+
gdf['buffer'] = gdf['geometry'].to_crs(
|
48 |
+
3857).buffer(buffer_size/2*2580).to_crs(4326)
|
49 |
+
return gdf
|
50 |
+
|
51 |
+
|
52 |
+
@st.cache
|
53 |
+
def get_pngs(date):
|
54 |
+
year, month, day = date[:4], date[4:6], date[6:]
|
55 |
+
url = f'https://mrms.nssl.noaa.gov/qvs/product_viewer/local/render_multi_domain_product_layer.php?mode=run&cpp_exec_dir=/home/metop/web/specific/opv/&web_resources_dir=/var/www/html/qvs/product_viewer/resources/&prod_root={prod_root}&qperate_pal_option=0&qpe_pal_option=0&year={year}&month={month}&day={day}&hour={hour}&minute={minute}&clon={lon}&clat={lat}&zoom={zoom}&width=920&height=630'
|
56 |
+
data = imread(url)[:, :, :3]
|
57 |
+
data2 = data.reshape(630*920, 3)
|
58 |
+
data2_df = pd.DataFrame(data2, columns=['R', 'G', 'B'])
|
59 |
+
data2_df2 = pd.merge(data2_df, lut[['R', 'G', 'B', 'Hail Scale', 'Hail Scale In']], on=['R', 'G', 'B'],
|
60 |
+
how='left')[['Hail Scale', 'Hail Scale In']]
|
61 |
+
data2_df2['Date'] = date
|
62 |
+
return data2_df2.reset_index()
|
63 |
+
|
64 |
+
|
65 |
+
@st.cache
|
66 |
+
def get_pngs_parallel(dates):
|
67 |
+
results1 = Parallel(n_jobs=32, prefer="threads")(
|
68 |
+
delayed(get_pngs)(i) for i in dates)
|
69 |
+
return results1
|
70 |
+
|
71 |
+
|
72 |
+
@st.cache
|
73 |
+
def png_data(date):
|
74 |
+
year, month, day = date[:4], date[4:6], date[6:]
|
75 |
+
url = f'https://mrms.nssl.noaa.gov/qvs/product_viewer/local/render_multi_domain_product_layer.php?mode=run&cpp_exec_dir=/home/metop/web/specific/opv/&web_resources_dir=/var/www/html/qvs/product_viewer/resources/&prod_root={prod_root}&qperate_pal_option=0&qpe_pal_option=0&year={year}&month={month}&day={day}&hour={hour}&minute={minute}&clon={lon}&clat={lat}&zoom={zoom}&width=920&height=630'
|
76 |
+
data = imread(url)
|
77 |
+
return data
|
78 |
+
|
79 |
+
|
80 |
+
@st.cache(allow_output_mutation=True)
|
81 |
+
def map_folium(data, gdf):
|
82 |
+
m = folium.Map(location=[lat, lon], zoom_start=zoom, height=300)
|
83 |
+
folium.Marker(
|
84 |
+
location=[lat, lon],
|
85 |
+
popup=address).add_to(m)
|
86 |
+
|
87 |
+
folium.GeoJson(gdf['buffer']).add_to(m)
|
88 |
+
folium.raster_layers.ImageOverlay(
|
89 |
+
data, opacity=0.8, bounds=bounds).add_to(m)
|
90 |
+
return m
|
91 |
+
|
92 |
+
|
93 |
+
def to_radians(degrees):
|
94 |
+
return degrees * math.pi / 180
|
95 |
+
|
96 |
+
|
97 |
+
def lat_lon_to_bounds(lat, lng, zoom, width, height):
|
98 |
+
earth_cir_m = 40075016.686
|
99 |
+
degreesPerMeter = 360 / earth_cir_m
|
100 |
+
m_pixel_ew = earth_cir_m / math.pow(2, zoom + 8)
|
101 |
+
m_pixel_ns = earth_cir_m / \
|
102 |
+
math.pow(2, zoom + 8) * math.cos(to_radians(lat))
|
103 |
+
|
104 |
+
shift_m_ew = width/2 * m_pixel_ew
|
105 |
+
shift_m_ns = height/2 * m_pixel_ns
|
106 |
+
|
107 |
+
shift_deg_ew = shift_m_ew * degreesPerMeter
|
108 |
+
shift_deg_ns = shift_m_ns * degreesPerMeter
|
109 |
+
|
110 |
+
return [[lat-shift_deg_ns, lng-shift_deg_ew], [lat+shift_deg_ns, lng+shift_deg_ew]]
|
111 |
+
|
112 |
+
|
113 |
+
def image_to_geotiff(bounds, input_file_path, output_file_path='template.tiff'):
|
114 |
+
south, west, north, east = tuple(
|
115 |
+
[item for sublist in bounds for item in sublist])
|
116 |
+
dataset = rasterio.open(input_file_path, 'r')
|
117 |
+
bands = [1, 2, 3]
|
118 |
+
data = dataset.read(bands)
|
119 |
+
transform = rasterio.transform.from_bounds(west, south, east, north,
|
120 |
+
height=data.shape[1],
|
121 |
+
width=data.shape[2])
|
122 |
+
crs = {'init': 'epsg:4326'}
|
123 |
+
|
124 |
+
with rasterio.open(output_file_path, 'w', driver='GTiff',
|
125 |
+
height=data.shape[1],
|
126 |
+
width=data.shape[2],
|
127 |
+
count=3, dtype=data.dtype, nodata=0,
|
128 |
+
transform=transform, crs=crs,
|
129 |
+
compress='lzw') as dst:
|
130 |
+
dst.write(data, indexes=bands)
|
131 |
+
|
132 |
+
|
133 |
+
def get_mask(bounds, buffer_size):
|
134 |
+
year, month, day = date[:4], date[4:6], date[6:]
|
135 |
+
url = f'https://mrms.nssl.noaa.gov/qvs/product_viewer/local/render_multi_domain_product_layer.php?mode=run&cpp_exec_dir=/home/metop/web/specific/opv/&web_resources_dir=/var/www/html/qvs/product_viewer/resources/&prod_root={prod_root}&qperate_pal_option=0&qpe_pal_option=0&year={year}&month={month}&day={day}&hour={hour}&minute={minute}&clon={lon}&clat={lat}&zoom={zoom}&width=920&height=630'
|
136 |
+
img_data = requests.get(url, verify=False).content
|
137 |
+
input_file_path = f'image_name_{date}_{var}.png'
|
138 |
+
output_file_path = 'template.tiff'
|
139 |
+
with open(input_file_path, 'wb') as handler:
|
140 |
+
handler.write(img_data)
|
141 |
+
|
142 |
+
image_to_geotiff(bounds, input_file_path, output_file_path)
|
143 |
+
rds = rioxarray.open_rasterio(output_file_path)
|
144 |
+
# rds.plot.imshow()
|
145 |
+
|
146 |
+
rds = rds.assign_coords(distance=(haversine(rds.x, rds.y, lon, lat)))
|
147 |
+
mask = rds['distance'].values <= buffer_size
|
148 |
+
mask = np.transpose(np.stack([mask, mask, mask]), (1, 2, 0))
|
149 |
+
return mask
|
150 |
+
|
151 |
+
|
152 |
+
def haversine(lon1, lat1, lon2, lat2):
|
153 |
+
# convert decimal degrees to radians
|
154 |
+
lon1 = np.deg2rad(lon1)
|
155 |
+
lon2 = np.deg2rad(lon2)
|
156 |
+
lat1 = np.deg2rad(lat1)
|
157 |
+
lat2 = np.deg2rad(lat2)
|
158 |
+
|
159 |
+
# haversine formula
|
160 |
+
dlon = lon2 - lon1
|
161 |
+
dlat = lat2 - lat1
|
162 |
+
a = np.sin(dlat/2)**2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon/2)**2
|
163 |
+
c = 2 * np.arcsin(np.sqrt(a))
|
164 |
+
r = 6371
|
165 |
+
return c * r
|
166 |
+
|
167 |
+
|
168 |
+
#Set Columns
|
169 |
+
st.set_page_config(layout="wide")
|
170 |
+
col1, col2, col3 = st.columns((3))
|
171 |
+
col1, col2, col3 = st.columns((3, 3, 1))
|
172 |
+
|
173 |
+
#Input Data
|
174 |
+
zoom = 10
|
175 |
+
_ = st.sidebar.text_input(
|
176 |
+
"Claim Number", "836-xxxxxxx")
|
177 |
+
address = st.sidebar.text_input(
|
178 |
+
"Address", "123 Main Street, Cincinnati, OH 43215")
|
179 |
+
|
180 |
+
date = st.sidebar.date_input("Date", pd.Timestamp(
|
181 |
+
2022, 7, 6), key='date').strftime('%Y%m%d')
|
182 |
+
d = pd.Timestamp(date)
|
183 |
+
days_within = st.sidebar.selectbox('Within Days:', (5, 30, 60, 90, 180))
|
184 |
+
var = 'Hail'
|
185 |
+
var_input = 'hails&product=MESHMAX1440M'
|
186 |
+
mask_select = st.sidebar.radio('Only Show Buffer Data:', ("No", "Yes"))
|
187 |
+
buffer_size = st.sidebar.radio('Buffer Size (miles):', (5, 10, 15))
|
188 |
+
|
189 |
+
year, month, day = date[:4], date[4:6], date[6:]
|
190 |
+
hour = 23
|
191 |
+
minute = 30
|
192 |
+
|
193 |
+
prod_root = var_input[var_input.find('=')+1:]
|
194 |
+
|
195 |
+
#Geocode
|
196 |
+
gdf = geocode(address, buffer_size)
|
197 |
+
lat, lon = tuple(gdf[['Lat', 'Lon']].values[0])
|
198 |
+
|
199 |
+
#Get Value
|
200 |
+
url = 'https://mrms.nssl.noaa.gov/qvs/product_viewer/local/get_multi_domain_rect_binary_value.php?mode=run&cpp_exec_dir=/home/metop/web/specific/opv/&web_resources_dir=/var/www/html/qvs/product_viewer/resources/'\
|
201 |
+
+ f'&prod_root={prod_root}&lon={lon}&lat={lat}&year={year}&month={month}&day={day}&hour={hour}&minute={minute}'
|
202 |
+
|
203 |
+
response = requests.get(url, verify=False).json()
|
204 |
+
qvs_values = pd.DataFrame(response, index=[0])[
|
205 |
+
['qvs_value', 'qvs_units']].values[0]
|
206 |
+
qvs_value = qvs_values[0]
|
207 |
+
qvs_unit = qvs_values[1]
|
208 |
+
|
209 |
+
#Get PNG Focus
|
210 |
+
data = png_data(date)
|
211 |
+
|
212 |
+
#Legend
|
213 |
+
legend = Image.open('hail scale3b.png')
|
214 |
+
|
215 |
+
#Get PNG Max
|
216 |
+
start_date, end_date = d - \
|
217 |
+
pd.Timedelta(days=days_within), d+pd.Timedelta(days=days_within)
|
218 |
+
dates = pd.date_range(start_date,
|
219 |
+
end_date).strftime('%Y%m%d')
|
220 |
+
lut = pd.read_csv('hail scale.csv')
|
221 |
+
bounds = lat_lon_to_bounds(lat, lon, zoom, 920, 630)
|
222 |
+
|
223 |
+
|
224 |
+
results1 = get_pngs_parallel(dates)
|
225 |
+
# results1 = Parallel(n_jobs=32, prefer="threads")(delayed(get_pngs)(i) for i in dates)
|
226 |
+
results = pd.concat(results1)
|
227 |
+
max_data = results.groupby('index')[['Hail Scale']].max()
|
228 |
+
|
229 |
+
max_data2 = pd.merge(max_data,
|
230 |
+
lut[['R', 'G', 'B', 'Hail Scale']],
|
231 |
+
on=['Hail Scale'],
|
232 |
+
how='left')[['R', 'G', 'B']]
|
233 |
+
|
234 |
+
data_max = max_data2.values.reshape(630, 920, 3)
|
235 |
+
|
236 |
+
#Masked Data
|
237 |
+
if mask_select == "Yes":
|
238 |
+
mask = get_mask(bounds, buffer_size)
|
239 |
+
mask1 = mask[:, :, 0].reshape(630*920)
|
240 |
+
results = pd.concat([i[mask1] for i in results1])
|
241 |
+
data_max = data_max*mask
|
242 |
+
else:
|
243 |
+
pass
|
244 |
+
|
245 |
+
|
246 |
+
#Bar
|
247 |
+
bar = results.query("`Hail Scale`>4").groupby(
|
248 |
+
['Date', 'Hail Scale In'])['index'].count().reset_index()
|
249 |
+
bar['Date'] = pd.to_datetime(bar['Date'])
|
250 |
+
|
251 |
+
bar = bar.reset_index()
|
252 |
+
bar.columns = ['level_0', 'Date', 'Hail Scale In', 'count']
|
253 |
+
bar['Hail Scale In'] = bar['Hail Scale In'].astype(str)
|
254 |
+
bar = bar.sort_values('Hail Scale In', ascending=True)
|
255 |
+
|
256 |
+
color_discrete_map = lut[['Hail Scale In', 'c_code']].sort_values(
|
257 |
+
'Hail Scale In', ascending=True).astype(str)
|
258 |
+
color_discrete_map = color_discrete_map.set_index(
|
259 |
+
'Hail Scale In').to_dict()['c_code']
|
260 |
+
|
261 |
+
fig = px.bar(bar, x="Date", y="count", color="Hail Scale In",
|
262 |
+
barmode='stack',
|
263 |
+
color_discrete_map=color_discrete_map)
|
264 |
+
|
265 |
+
#Submit Url to New Tab
|
266 |
+
url = f'https://mrms.nssl.noaa.gov/qvs/product_viewer/index.php?web_exec_mode=run&menu=menu_config.txt&year={year}&month={month}&day={day}&hour=23&minute=30&time_mode=static&zoom=9&clon={lon}&clat={lat}&base=0&overlays=1&mping_mode=0&product_type={var_input}&qpe_pal_option=0&opacity=.75&looping_active=off&num_frames=6&frame_step=200&seconds_step=600'
|
267 |
+
|
268 |
+
|
269 |
+
#Map Focus
|
270 |
+
m = map_folium(data, gdf)
|
271 |
+
#Map Max
|
272 |
+
m_max = map_folium(data_max, gdf)
|
273 |
+
|
274 |
+
with st.container():
|
275 |
+
col1, col2, col3 = st.columns((1, 2, 2))
|
276 |
+
with col1:
|
277 |
+
link = f'[Go To MRMS Site]({url})'
|
278 |
+
st.markdown(link, unsafe_allow_html=True)
|
279 |
+
st.image(legend)
|
280 |
+
with col2:
|
281 |
+
st.header(f'{var} on {pd.Timestamp(date).strftime("%D")}')
|
282 |
+
st_folium(m, height=300)
|
283 |
+
with col3:
|
284 |
+
st.header(
|
285 |
+
f'Max from {start_date.strftime("%D")} to {end_date.strftime("%D")}')
|
286 |
+
st_folium(m_max, height=300)
|
287 |
+
|
288 |
+
try:
|
289 |
+
selected_points = plotly_events(fig, click_event=True, hover_event=False)
|
290 |
+
date2 = pd.Timestamp(selected_points[0]['x']).strftime('%Y%m%d')
|
291 |
+
data2 = png_data(date2)
|
292 |
+
m3 = map_folium(data2, gdf)
|
293 |
+
st.header(f'{var} on {pd.Timestamp(date2).strftime("%D")}')
|
294 |
+
st_folium(m3, height=300)
|
295 |
+
except:
|
296 |
+
pass
|
297 |
+
|
298 |
+
|
299 |
+
st.markdown(""" <style>
|
300 |
+
#MainMenu {visibility: hidden;}
|
301 |
+
footer {visibility: hidden;}
|
302 |
+
</style> """, unsafe_allow_html=True)
|