mattritchey commited on
Commit
88909b9
·
1 Parent(s): 7e91c2f

Upload 4 files

Browse files
Files changed (4) hide show
  1. app.py +300 -0
  2. hail scale.csv +16 -0
  3. hail scale2.png +0 -0
  4. requirements.txt +15 -0
app.py ADDED
@@ -0,0 +1,300 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Created on Tue Dec 6 09:56:29 2022
4
+
5
+ @author: mritchey
6
+ """
7
+ #streamlit run "C:\Users\mritchey\.spyder-py3\Python Scripts\streamlit projects\mrms\mrms_hail2 buffer.py"
8
+
9
+ import plotly.express as px
10
+ import os
11
+ from PIL import Image
12
+ from joblib import Parallel, delayed
13
+ import pandas as pd
14
+ import streamlit as st
15
+ from geopy.extra.rate_limiter import RateLimiter
16
+ from geopy.geocoders import Nominatim
17
+ import folium
18
+ from streamlit_folium import st_folium
19
+ import math
20
+ import geopandas as gpd
21
+ from skimage.io import imread
22
+ from streamlit_plotly_events import plotly_events
23
+ import requests
24
+ from requests.packages.urllib3.exceptions import InsecureRequestWarning
25
+ import rasterio
26
+ import rioxarray
27
+ import numpy as np
28
+
29
+
30
+ @st.cache
31
+ def geocode(address, buffer_size):
32
+ try:
33
+ address2 = address.replace(' ', '+').replace(',', '%2C')
34
+ df = pd.read_json(
35
+ f'https://geocoding.geo.census.gov/geocoder/locations/onelineaddress?address={address2}&benchmark=2020&format=json')
36
+ results = df.iloc[:1, 0][0][0]['coordinates']
37
+ lat, lon = results['y'], results['x']
38
+ except:
39
+ geolocator = Nominatim(user_agent="GTA Lookup")
40
+ geocode = RateLimiter(geolocator.geocode, min_delay_seconds=1)
41
+ location = geolocator.geocode(address)
42
+ lat, lon = location.latitude, location.longitude
43
+
44
+ df = pd.DataFrame({'Lat': [lat], 'Lon': [lon]})
45
+ gdf = gpd.GeoDataFrame(
46
+ df, geometry=gpd.points_from_xy(df.Lon, df.Lat, crs=4326))
47
+ gdf['buffer'] = gdf['geometry'].to_crs(
48
+ 3857).buffer(buffer_size/2*2580).to_crs(4326)
49
+ return gdf
50
+
51
+
52
+ @st.cache
53
+ def get_pngs(date):
54
+ year, month, day = date[:4], date[4:6], date[6:]
55
+ url = f'https://mrms.nssl.noaa.gov/qvs/product_viewer/local/render_multi_domain_product_layer.php?mode=run&cpp_exec_dir=/home/metop/web/specific/opv/&web_resources_dir=/var/www/html/qvs/product_viewer/resources/&prod_root={prod_root}&qperate_pal_option=0&qpe_pal_option=0&year={year}&month={month}&day={day}&hour={hour}&minute={minute}&clon={lon}&clat={lat}&zoom={zoom}&width=920&height=630'
56
+ data = imread(url)[:, :, :3]
57
+ data2 = data.reshape(630*920, 3)
58
+ data2_df = pd.DataFrame(data2, columns=['R', 'G', 'B'])
59
+ data2_df2 = pd.merge(data2_df, lut[['R', 'G', 'B', 'Hail Scale', 'Hail Scale In']], on=['R', 'G', 'B'],
60
+ how='left')[['Hail Scale', 'Hail Scale In']]
61
+ data2_df2['Date'] = date
62
+ return data2_df2.reset_index()
63
+
64
+
65
+ @st.cache
66
+ def get_pngs_parallel(dates):
67
+ results1 = Parallel(n_jobs=32, prefer="threads")(
68
+ delayed(get_pngs)(i) for i in dates)
69
+ return results1
70
+
71
+
72
+ @st.cache
73
+ def png_data(date):
74
+ year, month, day = date[:4], date[4:6], date[6:]
75
+ url = f'https://mrms.nssl.noaa.gov/qvs/product_viewer/local/render_multi_domain_product_layer.php?mode=run&cpp_exec_dir=/home/metop/web/specific/opv/&web_resources_dir=/var/www/html/qvs/product_viewer/resources/&prod_root={prod_root}&qperate_pal_option=0&qpe_pal_option=0&year={year}&month={month}&day={day}&hour={hour}&minute={minute}&clon={lon}&clat={lat}&zoom={zoom}&width=920&height=630'
76
+ data = imread(url)
77
+ return data
78
+
79
+
80
+ @st.cache(allow_output_mutation=True)
81
+ def map_folium(data, gdf):
82
+ m = folium.Map(location=[lat, lon], zoom_start=zoom, height=300)
83
+ folium.Marker(
84
+ location=[lat, lon],
85
+ popup=address).add_to(m)
86
+
87
+ folium.GeoJson(gdf['buffer']).add_to(m)
88
+ folium.raster_layers.ImageOverlay(
89
+ data, opacity=0.8, bounds=bounds).add_to(m)
90
+ return m
91
+
92
+
93
+ def to_radians(degrees):
94
+ return degrees * math.pi / 180
95
+
96
+
97
+ def lat_lon_to_bounds(lat, lng, zoom, width, height):
98
+ earth_cir_m = 40075016.686
99
+ degreesPerMeter = 360 / earth_cir_m
100
+ m_pixel_ew = earth_cir_m / math.pow(2, zoom + 8)
101
+ m_pixel_ns = earth_cir_m / \
102
+ math.pow(2, zoom + 8) * math.cos(to_radians(lat))
103
+
104
+ shift_m_ew = width/2 * m_pixel_ew
105
+ shift_m_ns = height/2 * m_pixel_ns
106
+
107
+ shift_deg_ew = shift_m_ew * degreesPerMeter
108
+ shift_deg_ns = shift_m_ns * degreesPerMeter
109
+
110
+ return [[lat-shift_deg_ns, lng-shift_deg_ew], [lat+shift_deg_ns, lng+shift_deg_ew]]
111
+
112
+
113
+ def image_to_geotiff(bounds, input_file_path, output_file_path='template.tiff'):
114
+ south, west, north, east = tuple(
115
+ [item for sublist in bounds for item in sublist])
116
+ dataset = rasterio.open(input_file_path, 'r')
117
+ bands = [1, 2, 3]
118
+ data = dataset.read(bands)
119
+ transform = rasterio.transform.from_bounds(west, south, east, north,
120
+ height=data.shape[1],
121
+ width=data.shape[2])
122
+ crs = {'init': 'epsg:4326'}
123
+
124
+ with rasterio.open(output_file_path, 'w', driver='GTiff',
125
+ height=data.shape[1],
126
+ width=data.shape[2],
127
+ count=3, dtype=data.dtype, nodata=0,
128
+ transform=transform, crs=crs,
129
+ compress='lzw') as dst:
130
+ dst.write(data, indexes=bands)
131
+
132
+
133
+ def get_mask(bounds, buffer_size):
134
+ year, month, day = date[:4], date[4:6], date[6:]
135
+ url = f'https://mrms.nssl.noaa.gov/qvs/product_viewer/local/render_multi_domain_product_layer.php?mode=run&cpp_exec_dir=/home/metop/web/specific/opv/&web_resources_dir=/var/www/html/qvs/product_viewer/resources/&prod_root={prod_root}&qperate_pal_option=0&qpe_pal_option=0&year={year}&month={month}&day={day}&hour={hour}&minute={minute}&clon={lon}&clat={lat}&zoom={zoom}&width=920&height=630'
136
+ img_data = requests.get(url, verify=False).content
137
+ input_file_path = f'image_name_{date}_{var}.png'
138
+ output_file_path = 'template.tiff'
139
+ with open(input_file_path, 'wb') as handler:
140
+ handler.write(img_data)
141
+
142
+ image_to_geotiff(bounds, input_file_path, output_file_path)
143
+ rds = rioxarray.open_rasterio(output_file_path)
144
+ # rds.plot.imshow()
145
+
146
+ rds = rds.assign_coords(distance=(haversine(rds.x, rds.y, lon, lat)))
147
+ mask = rds['distance'].values <= buffer_size
148
+ mask = np.transpose(np.stack([mask, mask, mask]), (1, 2, 0))
149
+ return mask
150
+
151
+
152
+ def haversine(lon1, lat1, lon2, lat2):
153
+ # convert decimal degrees to radians
154
+ lon1 = np.deg2rad(lon1)
155
+ lon2 = np.deg2rad(lon2)
156
+ lat1 = np.deg2rad(lat1)
157
+ lat2 = np.deg2rad(lat2)
158
+
159
+ # haversine formula
160
+ dlon = lon2 - lon1
161
+ dlat = lat2 - lat1
162
+ a = np.sin(dlat/2)**2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon/2)**2
163
+ c = 2 * np.arcsin(np.sqrt(a))
164
+ r = 6371
165
+ return c * r
166
+
167
+
168
+ #Set Columns
169
+ st.set_page_config(layout="wide")
170
+ col1, col2, col3 = st.columns((3))
171
+ col1, col2, col3 = st.columns((3, 3, 1))
172
+
173
+ #Input Data
174
+ zoom = 10
175
+ address = st.sidebar.text_input(
176
+ "Address", "123 Main Street, Cincinnati, OH 43215")
177
+
178
+ date = st.sidebar.date_input("Date", pd.Timestamp(
179
+ 2022, 7, 6), key='date').strftime('%Y%m%d')
180
+ d = pd.Timestamp(date)
181
+ days_within = st.sidebar.selectbox('Within Days:', (5, 30, 60))
182
+ var = 'Hail'
183
+ var_input = 'hails&product=MESHMAX1440M'
184
+ mask_select = st.sidebar.radio('Only Show Buffer Data:', ("No", "Yes"))
185
+ buffer_size = st.sidebar.radio('Buffer Size (miles):', (5, 10, 15))
186
+
187
+ year, month, day = date[:4], date[4:6], date[6:]
188
+ hour = 23
189
+ minute = 30
190
+
191
+ prod_root = var_input[var_input.find('=')+1:]
192
+
193
+ #Geocode
194
+ gdf = geocode(address, buffer_size)
195
+ lat, lon = tuple(gdf[['Lat', 'Lon']].values[0])
196
+
197
+ #Get Value
198
+ url = 'https://mrms.nssl.noaa.gov/qvs/product_viewer/local/get_multi_domain_rect_binary_value.php?mode=run&cpp_exec_dir=/home/metop/web/specific/opv/&web_resources_dir=/var/www/html/qvs/product_viewer/resources/'\
199
+ + f'&prod_root={prod_root}&lon={lon}&lat={lat}&year={year}&month={month}&day={day}&hour={hour}&minute={minute}'
200
+
201
+ response = requests.get(url, verify=False).json()
202
+ qvs_values = pd.DataFrame(response, index=[0])[
203
+ ['qvs_value', 'qvs_units']].values[0]
204
+ qvs_value = qvs_values[0]
205
+ qvs_unit = qvs_values[1]
206
+
207
+ #Get PNG Focus
208
+ data = png_data(date)
209
+
210
+ #Legend
211
+ legend = Image.open('hail scale2.png')
212
+
213
+ #Get PNG Max
214
+ start_date, end_date = d - \
215
+ pd.Timedelta(days=days_within), d+pd.Timedelta(days=days_within)
216
+ dates = pd.date_range(start_date,
217
+ end_date).strftime('%Y%m%d')
218
+ lut = pd.read_csv('hail scale.csv')
219
+ bounds = lat_lon_to_bounds(lat, lon, zoom, 920, 630)
220
+
221
+
222
+ results1 = get_pngs_parallel(dates)
223
+ # results1 = Parallel(n_jobs=32, prefer="threads")(delayed(get_pngs)(i) for i in dates)
224
+ results = pd.concat(results1)
225
+ max_data = results.groupby('index')[['Hail Scale']].max()
226
+
227
+ max_data2 = pd.merge(max_data,
228
+ lut[['R', 'G', 'B', 'Hail Scale']],
229
+ on=['Hail Scale'],
230
+ how='left')[['R', 'G', 'B']]
231
+
232
+ data_max = max_data2.values.reshape(630, 920, 3)
233
+
234
+ #Masked Data
235
+ if mask_select == "Yes":
236
+ mask = get_mask(bounds, buffer_size)
237
+ mask1 = mask[:, :, 0].reshape(630*920)
238
+ results = pd.concat([i[mask1] for i in results1])
239
+ data_max = data_max*mask
240
+ else:
241
+ pass
242
+
243
+
244
+ #Bar
245
+ bar = results.query("`Hail Scale`>4").groupby(
246
+ ['Date', 'Hail Scale In'])['index'].count().reset_index()
247
+ bar['Date'] = pd.to_datetime(bar['Date'])
248
+
249
+ bar = bar.reset_index()
250
+ bar.columns = ['level_0', 'Date', 'Hail Scale In', 'count']
251
+ bar['Hail Scale In'] = bar['Hail Scale In'].astype(str)
252
+ bar = bar.sort_values('Hail Scale In', ascending=True)
253
+
254
+ color_discrete_map = lut[['Hail Scale In', 'c_code']].sort_values(
255
+ 'Hail Scale In', ascending=True).astype(str)
256
+ color_discrete_map = color_discrete_map.set_index(
257
+ 'Hail Scale In').to_dict()['c_code']
258
+
259
+ fig = px.bar(bar, x="Date", y="count", color="Hail Scale In",
260
+ barmode='stack',
261
+ color_discrete_map=color_discrete_map)
262
+
263
+ #Submit Url to New Tab
264
+ url = f'https://mrms.nssl.noaa.gov/qvs/product_viewer/index.php?web_exec_mode=run&menu=menu_config.txt&year={year}&month={month}&day={day}&hour=23&minute=30&time_mode=static&zoom=9&clon={lon}&clat={lat}&base=0&overlays=1&mping_mode=0&product_type={var_input}&qpe_pal_option=0&opacity=.75&looping_active=off&num_frames=6&frame_step=200&seconds_step=600'
265
+
266
+
267
+ #Map Focus
268
+ m = map_folium(data, gdf)
269
+ #Map Max
270
+ m_max = map_folium(data_max, gdf)
271
+
272
+ with st.container():
273
+ col1, col2, col3 = st.columns((1, 2, 2))
274
+ with col1:
275
+ link = f'[Go To MRMS Site]({url})'
276
+ st.markdown(link, unsafe_allow_html=True)
277
+ st.image(legend)
278
+ with col2:
279
+ st.header(f'{var} on {pd.Timestamp(date).strftime("%D")}')
280
+ st_folium(m, height=300)
281
+ with col3:
282
+ st.header(
283
+ f'Max from {start_date.strftime("%D")} to {end_date.strftime("%D")}')
284
+ st_folium(m_max, height=300)
285
+
286
+ try:
287
+ selected_points = plotly_events(fig, click_event=True, hover_event=False)
288
+ date2 = pd.Timestamp(selected_points[0]['x']).strftime('%Y%m%d')
289
+ data2 = png_data(date2)
290
+ m3 = map_folium(data2, gdf)
291
+ st.header(f'{var} on {pd.Timestamp(date2).strftime("%D")}')
292
+ st_folium(m3, height=300)
293
+ except:
294
+ pass
295
+
296
+
297
+ st.markdown(""" <style>
298
+ #MainMenu {visibility: hidden;}
299
+ footer {visibility: hidden;}
300
+ </style> """, unsafe_allow_html=True)
hail scale.csv ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ c_code,occurence,RGB,R,G,B,Hail Scale,Hail Scale In,R+G
2
+ #01A0F6,408,"(1, 160, 246)",1,160,246,2,0.08,78546
3
+ #BE55DC,408,"(190, 85, 220)",190,85,220,100,3.94,3545724
4
+ #FF9000,408,"(255, 144, 0)",255,144,0,30,1.18,36608
5
+ #00C800,408,"(0, 200, 0)",0,200,0,8,0.31,199
6
+ #FF00FF,396,"(255, 0, 255)",255,0,255,75,2.95,-65536
7
+ #C00000,396,"(192, 0, 0)",192,0,0,50,1.97,-193
8
+ #FF0000,396,"(255, 0, 0)",255,0,0,40,1.57,-256
9
+ #E7C000,396,"(231, 192, 0)",231,192,0,20,0.79,44312
10
+ #FFFF00,396,"(255, 255, 0)",255,255,0,15,0.59,65024
11
+ #009000,396,"(0, 144, 0)",0,144,0,10,0.39,143
12
+ #00FF00,396,"(0, 255, 0)",0,255,0,6,0.24,254
13
+ #0000F6,396,"(0, 0, 246)",0,0,246,4,0.16,-247
14
+ #00ECEC,336,"(0, 236, 236)",0,236,236,1,0.04,55695
15
+ #7E32A7,60,"(126, 50, 167)",126,50,167,200,7.87,1045464
16
+ #7F7F7F,,"(127, 127, 127)",127,127,127,0,0,2064384
hail scale2.png ADDED
requirements.txt ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ folium==0.12.1
2
+ geopandas==0.10.2
3
+ geopy==2.2.0
4
+ joblib==1.1.0
5
+ numpy==1.21.5
6
+ pandas==1.4.2
7
+ Pillow==9.4.0
8
+ plotly==5.7.0
9
+ rasterio==1.2.10
10
+ requests==2.27.1
11
+ rioxarray==0.12.2
12
+ scikit_image==0.19.2
13
+ streamlit==1.4.0
14
+ streamlit_folium==0.6.15
15
+ streamlit_plotly_events==0.0.6