mattritchey commited on
Commit
ace7fa1
·
0 Parent(s):

Duplicate from mattritchey/multipage

Browse files
.gitattributes ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tflite filter=lfs diff=lfs merge=lfs -text
29
+ *.tgz filter=lfs diff=lfs merge=lfs -text
30
+ *.wasm filter=lfs diff=lfs merge=lfs -text
31
+ *.xz filter=lfs diff=lfs merge=lfs -text
32
+ *.zip filter=lfs diff=lfs merge=lfs -text
33
+ *.zst filter=lfs diff=lfs merge=lfs -text
34
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Multipage
3
+ emoji: 😻
4
+ colorFrom: indigo
5
+ colorTo: indigo
6
+ sdk: streamlit
7
+ sdk_version: 1.17.0
8
+ app_file: app.py
9
+ pinned: false
10
+ duplicated_from: mattritchey/multipage
11
+ ---
12
+
13
+ Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
app.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Created on Thu Mar 30 08:44:14 2023
4
+
5
+ @author: mritchey
6
+ """
7
+ # streamlit run "C:\Users\mritchey\.spyder-py3\Python Scripts\streamlit projects\multipage_app\Homepage.py"
8
+ import streamlit as st
9
+
10
+ st.set_page_config(
11
+ page_title="Multipage App",
12
+ # page_icon="👋",
13
+ )
14
+
15
+
16
+
17
+ st.title("Select A Page in Upper Left")
18
+ st.header("Hail")
19
+ # url_hail='https://mattritchey-multipage.hf.space/Hail'
20
+ # link = f'[Hail]({url_hail})'
21
+ # st.markdown(link, unsafe_allow_html=True)
22
+
23
+ st.header("Hail Plus")
24
+ st.header("Wind (High Resolution): RTMA")
25
+ st.header("Wind (Low Resolution): ERA")
26
+ st.sidebar.success("Select a page above.")
27
+
28
+ # if "my_input" not in st.session_state:
29
+ # st.session_state["my_input"] = ""
30
+
31
+ # my_input = st.text_input("Input a text here", st.session_state["my_input"])
32
+ # submit = st.button("Submit")
33
+ # if submit:
34
+ # st.session_state["my_input"] = my_input
35
+ # st.write("You have entered: ", my_input)
hail scale.csv ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ c_code,occurence,RGB,R,G,B,Hail Scale,Hail Scale In,R+G
2
+ #01A0F6,408,"(1, 160, 246)",1,160,246,2,0.08,78546
3
+ #BE55DC,408,"(190, 85, 220)",190,85,220,100,3.94,3545724
4
+ #FF9000,408,"(255, 144, 0)",255,144,0,30,1.18,36608
5
+ #00C800,408,"(0, 200, 0)",0,200,0,8,0.31,199
6
+ #FF00FF,396,"(255, 0, 255)",255,0,255,75,2.95,-65536
7
+ #C00000,396,"(192, 0, 0)",192,0,0,50,1.97,-193
8
+ #FF0000,396,"(255, 0, 0)",255,0,0,40,1.57,-256
9
+ #E7C000,396,"(231, 192, 0)",231,192,0,20,0.79,44312
10
+ #FFFF00,396,"(255, 255, 0)",255,255,0,15,0.59,65024
11
+ #009000,396,"(0, 144, 0)",0,144,0,10,0.39,143
12
+ #00FF00,396,"(0, 255, 0)",0,255,0,6,0.24,254
13
+ #0000F6,396,"(0, 0, 246)",0,0,246,4,0.16,-247
14
+ #00ECEC,336,"(0, 236, 236)",0,236,236,1,0.04,55695
15
+ #7E32A7,60,"(126, 50, 167)",126,50,167,200,7.87,1045464
16
+ #7F7F7F,,"(127, 127, 127)",127,127,127,0,0,2064384
hail scale3b.png ADDED
pages/Hail Plus.py ADDED
@@ -0,0 +1,375 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Created on Tue Dec 6 09:56:29 2022
4
+
5
+ @author: mritchey
6
+ """
7
+ #streamlit run "C:\Users\mritchey\.spyder-py3\Python Scripts\streamlit projects\mrms\mrms_all buffer.py"
8
+
9
+ import plotly.express as px
10
+
11
+ from joblib import Parallel, delayed
12
+ import pandas as pd
13
+ import streamlit as st
14
+ from geopy.extra.rate_limiter import RateLimiter
15
+ from geopy.geocoders import Nominatim
16
+ import folium
17
+ from streamlit_folium import st_folium
18
+ import math
19
+ import geopandas as gpd
20
+ from skimage.io import imread
21
+ from streamlit_plotly_events import plotly_events
22
+ import requests
23
+ import rasterio
24
+ import rioxarray
25
+ import numpy as np
26
+ import base64
27
+ import re
28
+
29
+
30
+ @st.cache
31
+ def geocode(address, buffer_size):
32
+ try:
33
+ address2 = address.replace(' ', '+').replace(',', '%2C')
34
+ df = pd.read_json(
35
+ f'https://geocoding.geo.census.gov/geocoder/locations/onelineaddress?address={address2}&benchmark=2020&format=json')
36
+ results = df.iloc[:1, 0][0][0]['coordinates']
37
+ lat, lon = results['y'], results['x']
38
+ except:
39
+ geolocator = Nominatim(user_agent="GTA Lookup")
40
+ geocode = RateLimiter(geolocator.geocode, min_delay_seconds=1)
41
+ location = geolocator.geocode(address)
42
+ lat, lon = location.latitude, location.longitude
43
+
44
+ df = pd.DataFrame({'Lat': [lat], 'Lon': [lon]})
45
+ gdf = gpd.GeoDataFrame(
46
+ df, geometry=gpd.points_from_xy(df.Lon, df.Lat, crs=4326))
47
+ gdf['buffer'] = gdf['geometry'].to_crs(
48
+ 3857).buffer(buffer_size/2*2580).to_crs(4326)
49
+ return gdf
50
+
51
+
52
+ def get_pngs(date):
53
+ year, month, day = date[:4], date[4:6], date[6:]
54
+ url = f'https://mrms.nssl.noaa.gov/qvs/product_viewer/local/render_multi_domain_product_layer.php?mode=run&cpp_exec_dir=/home/metop/web/specific/opv/&web_resources_dir=/var/www/html/qvs/product_viewer/resources/&prod_root={prod_root}&qperate_pal_option=0&qpe_pal_option=0&year={year}&month={month}&day={day}&hour={hour}&minute={minute}&clon={lon}&clat={lat}&zoom={zoom}&width=920&height=630'
55
+ data = imread(url)[:, :, :3]
56
+ data2 = data.reshape(630*920, 3)
57
+ data2_df = pd.DataFrame(data2, columns=['R', 'G', 'B'])
58
+ data2_df2 = pd.merge(data2_df, lut[['R', 'G', 'B', 'Value', ]], on=['R', 'G', 'B'],
59
+ how='left')[['Value', ]]
60
+ data2_df2['Date'] = date
61
+ return data2_df2.reset_index()
62
+
63
+
64
+ @st.cache
65
+ def get_pngs_parallel(dates):
66
+ results1 = Parallel(n_jobs=32, prefer="threads")(
67
+ delayed(get_pngs)(i) for i in dates)
68
+ return results1
69
+
70
+
71
+ def png_data(date):
72
+ year, month, day = date[:4], date[4:6], date[6:]
73
+ url = f'https://mrms.nssl.noaa.gov/qvs/product_viewer/local/render_multi_domain_product_layer.php?mode=run&cpp_exec_dir=/home/metop/web/specific/opv/&web_resources_dir=/var/www/html/qvs/product_viewer/resources/&prod_root={prod_root}&qperate_pal_option=0&qpe_pal_option=0&year={year}&month={month}&day={day}&hour={hour}&minute={minute}&clon={lon}&clat={lat}&zoom={zoom}&width=920&height=630'
74
+ data = imread(url)
75
+ return data
76
+
77
+
78
+ @st.cache(allow_output_mutation=True)
79
+ def map_folium(data, gdf):
80
+ m = folium.Map(location=[lat, lon], zoom_start=zoom, height=300)
81
+ folium.Marker(
82
+ location=[lat, lon],
83
+ popup=address).add_to(m)
84
+
85
+ folium.GeoJson(gdf['buffer']).add_to(m)
86
+ folium.raster_layers.ImageOverlay(
87
+ data, opacity=0.8, bounds=bounds).add_to(m)
88
+ return m
89
+
90
+
91
+ def to_radians(degrees):
92
+ return degrees * math.pi / 180
93
+
94
+
95
+ def lat_lon_to_bounds(lat, lng, zoom, width, height):
96
+ earth_cir_m = 40075016.686
97
+ degreesPerMeter = 360 / earth_cir_m
98
+ m_pixel_ew = earth_cir_m / math.pow(2, zoom + 8)
99
+ m_pixel_ns = earth_cir_m / \
100
+ math.pow(2, zoom + 8) * math.cos(to_radians(lat))
101
+
102
+ shift_m_ew = width/2 * m_pixel_ew
103
+ shift_m_ns = height/2 * m_pixel_ns
104
+
105
+ shift_deg_ew = shift_m_ew * degreesPerMeter
106
+ shift_deg_ns = shift_m_ns * degreesPerMeter
107
+
108
+ return [[lat-shift_deg_ns, lng-shift_deg_ew], [lat+shift_deg_ns, lng+shift_deg_ew]]
109
+
110
+
111
+ def image_to_geotiff(bounds, input_file_path, output_file_path='template.tiff'):
112
+ south, west, north, east = tuple(
113
+ [item for sublist in bounds for item in sublist])
114
+ dataset = rasterio.open(input_file_path, 'r')
115
+ bands = [1, 2, 3]
116
+ data = dataset.read(bands)
117
+ transform = rasterio.transform.from_bounds(west, south, east, north,
118
+ height=data.shape[1],
119
+ width=data.shape[2])
120
+ crs = {'init': 'epsg:4326'}
121
+
122
+ with rasterio.open(output_file_path, 'w', driver='GTiff',
123
+ height=data.shape[1],
124
+ width=data.shape[2],
125
+ count=3, dtype=data.dtype, nodata=0,
126
+ transform=transform, crs=crs,
127
+ compress='lzw') as dst:
128
+ dst.write(data, indexes=bands)
129
+
130
+
131
+ def get_mask(bounds, buffer_size):
132
+ year, month, day = date[:4], date[4:6], date[6:]
133
+ url = f'https://mrms.nssl.noaa.gov/qvs/product_viewer/local/render_multi_domain_product_layer.php?mode=run&cpp_exec_dir=/home/metop/web/specific/opv/&web_resources_dir=/var/www/html/qvs/product_viewer/resources/&prod_root={prod_root}&qperate_pal_option=0&qpe_pal_option=0&year={year}&month={month}&day={day}&hour={hour}&minute={minute}&clon={lon}&clat={lat}&zoom={zoom}&width=920&height=630'
134
+ img_data = requests.get(url, verify=False).content
135
+ input_file_path = f'image_name_{date}_{var}.png'
136
+ output_file_path = 'template.tiff'
137
+ with open(input_file_path, 'wb') as handler:
138
+ handler.write(img_data)
139
+
140
+ image_to_geotiff(bounds, input_file_path, output_file_path)
141
+ rds = rioxarray.open_rasterio(output_file_path)
142
+ # rds.plot.imshow()
143
+ rds = rds.assign_coords(distance=(haversine(rds.x, rds.y, lon, lat)))
144
+ mask = rds['distance'].values <= buffer_size
145
+ mask = np.transpose(np.stack([mask, mask, mask]), (1, 2, 0))
146
+ return mask
147
+
148
+
149
+ def haversine(lon1, lat1, lon2, lat2):
150
+ # convert decimal degrees to radians
151
+ lon1 = np.deg2rad(lon1)
152
+ lon2 = np.deg2rad(lon2)
153
+ lat1 = np.deg2rad(lat1)
154
+ lat2 = np.deg2rad(lat2)
155
+
156
+ # haversine formula
157
+ dlon = lon2 - lon1
158
+ dlat = lat2 - lat1
159
+ a = np.sin(dlat/2)**2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon/2)**2
160
+ c = 2 * np.arcsin(np.sqrt(a))
161
+ r = 6371
162
+ return c * r
163
+
164
+
165
+ def render_svg(svg):
166
+ """Renders the given svg string."""
167
+ b64 = base64.b64encode(svg.encode('utf-8')).decode("utf-8")
168
+ html = r'<img src="data:image/svg+xml;base64,%s"/>' % b64
169
+ st.write(html, unsafe_allow_html=True)
170
+
171
+
172
+ def rgb_to_hex(rgb):
173
+ return '#'+'%02x%02x%02x' % rgb
174
+
175
+
176
+ def get_legend_lut(prod_root):
177
+ url_legend = f'https://mrms.nssl.noaa.gov/qvs/product_viewer/shared/fetch_svg_legend_via_config.php?web_resources_dir=/var/www/html/qvs/product_viewer/resources/&config_name=title_and_legend_config.txt&product={prod_root}'
178
+ r = requests.get(url_legend) # Get the webpage
179
+ svg = r.content.decode() # Decoded response content with the svg string
180
+
181
+ if svg.find('size="16">mm</text>') > 0:
182
+ svg = svg.replace('size="16">mm</text>', 'size="16">in</text>')
183
+ beg_string = '"13">'
184
+ end_string = '</text>'
185
+ res = re.findall('%s(.*)%s' % (beg_string, end_string), svg)
186
+ for mm in res:
187
+ inc = round(float(mm)*0.0393701, 2)
188
+ svg = svg.replace(f'{beg_string}{mm}{end_string}',
189
+ f'{beg_string}{str(inc)}{end_string}')
190
+
191
+ elif svg.find('font-size="12">') > 0:
192
+ beg_string = '"12">'
193
+ end_string = '</text>'
194
+
195
+ else:
196
+ beg_string = '"13">'
197
+ end_string = '</text>'
198
+
199
+ #Make LUT
200
+ values = re.findall('%s(.*)%s' % (beg_string, end_string), svg)
201
+
202
+ beg_string, end_string = 'fill="rgb(', ')" />'
203
+ rgb = re.findall('%s(.*)%s' % (beg_string, end_string), svg)
204
+ rgb = [eval(i[0]) for i in rgb]
205
+
206
+ beg_string, end_string = 'style="fill:rgb(', ');" />'
207
+ rgb2 = re.findall('%s(.*)%s' % (beg_string, end_string), svg)
208
+ rgb2 = [eval(i[0]) for i in rgb2]
209
+
210
+ rgb = rgb2+rgb
211
+
212
+ lut = pd.DataFrame({'Value': values,
213
+ 'RGB': rgb})
214
+ lut['R'], lut['G'], lut['B'] = lut['RGB'].str
215
+ lut[['R', 'G', 'B']] = lut[['R', 'G', 'B']].astype('uint8')
216
+ lut['Value'] = lut['Value'].astype(float)
217
+ lut['hex'] = lut['RGB'].apply(rgb_to_hex)
218
+ return svg, lut
219
+
220
+
221
+ #Set Columns
222
+ st.set_page_config(layout="wide")
223
+
224
+
225
+ #Input Data
226
+ zoom = 10
227
+ address = st.sidebar.text_input(
228
+ "Address", "123 Main Street, Columbus, OH 43215")
229
+ var = st.sidebar.selectbox(
230
+ 'Product:', ('Hail', 'Flooding', 'Rain: Radar', 'Rain: Multi Sensor', 'Tornado'))
231
+
232
+ date = st.sidebar.date_input("Date", pd.Timestamp(
233
+ 2022, 9, 8), key='date').strftime('%Y%m%d')
234
+ d = pd.Timestamp(date)
235
+ days_within = st.sidebar.selectbox('Within Days:', (5, 30, 60))
236
+
237
+ mask_select = st.sidebar.radio('Only Show Buffer Data:', ("No", "Yes"))
238
+ buffer_size = st.sidebar.radio('Buffer Size (miles):', (5, 10, 15))
239
+
240
+ year, month, day = date[:4], date[4:6], date[6:]
241
+ hour = 23
242
+ minute = 0
243
+
244
+
245
+ #Select Variable
246
+ if var == 'Hail':
247
+ var_input = 'hails&product=MESHMAX1440M'
248
+ elif var == 'Flooding':
249
+ var_input = 'flash&product=FL_ARI24H'
250
+ elif var == 'Rain: Radar':
251
+ var_input = 'q3rads&product=Q3EVAP24H'
252
+ elif var == 'Rain: Multi Sensor':
253
+ var_input = 'q3mss&product=P1_Q3MS24H'
254
+ elif var == 'Tornado':
255
+ var_input = 'azsh&product=RT1440M'
256
+
257
+ prod_root = var_input[var_input.find('=')+1:]
258
+
259
+ #Geocode
260
+ gdf = geocode(address, buffer_size)
261
+ lat, lon = tuple(gdf[['Lat', 'Lon']].values[0])
262
+
263
+ #Get Value
264
+ url = 'https://mrms.nssl.noaa.gov/qvs/product_viewer/local/get_multi_domain_rect_binary_value.php?mode=run&cpp_exec_dir=/home/metop/web/specific/opv/&web_resources_dir=/var/www/html/qvs/product_viewer/resources/'\
265
+ + f'&prod_root={prod_root}&lon={lon}&lat={lat}&year={year}&month={month}&day={day}&hour={hour}&minute={minute}'
266
+
267
+ response = requests.get(url, verify=False).json()
268
+ qvs_values = pd.DataFrame(response, index=[0])[
269
+ ['qvs_value', 'qvs_units']].values[0]
270
+ qvs_value = qvs_values[0]
271
+ qvs_unit = qvs_values[1]
272
+
273
+ #Get PNG Focus
274
+ data = png_data(date)
275
+
276
+ #Get PNG Max
277
+ start_date, end_date = d - \
278
+ pd.Timedelta(days=days_within), d+pd.Timedelta(days=days_within)
279
+ dates = pd.date_range(start_date,
280
+ end_date).strftime('%Y%m%d')
281
+ #Get SVG and Lut
282
+ svg, lut = get_legend_lut(prod_root)
283
+
284
+ bounds = lat_lon_to_bounds(lat, lon, zoom, 920, 630)
285
+
286
+ results1 = get_pngs_parallel(dates)
287
+ # results1 = Parallel(n_jobs=32, prefer="threads")(delayed(get_pngs)(i) for i in dates)
288
+ results = pd.concat(results1).fillna(0)
289
+ max_data = results.groupby('index')[['Value']].max()
290
+
291
+ max_data2 = pd.merge(max_data,
292
+ lut[['R', 'G', 'B', 'Value']],
293
+ on=['Value'],
294
+ how='left')[['R', 'G', 'B']]
295
+
296
+ data_max = max_data2.values.reshape(630, 920, 3)
297
+
298
+ #Masked Data
299
+ if mask_select == "Yes":
300
+ mask = get_mask(bounds, buffer_size)
301
+ mask1 = mask[:, :, 0].reshape(630*920)
302
+ results = pd.concat([i[mask1] for i in results1])
303
+ data_max = data_max*mask
304
+ else:
305
+ pass
306
+
307
+
308
+ #Bar
309
+ if var == 'Tornado':
310
+ bar = results.query("Value>.006").groupby(
311
+ ['Date', 'Value'])['index'].count().reset_index()
312
+ else:
313
+ bar = results.query("Value>.2").groupby(['Date', 'Value'])[
314
+ 'index'].count().reset_index()
315
+
316
+ bar['Date'] = pd.to_datetime(bar['Date'])
317
+
318
+ bar = bar.reset_index()
319
+ bar.columns = ['level_0', 'Date', 'Value', 'count']
320
+ bar = bar.sort_values('Value', ascending=True)
321
+ bar['Value'] = bar['Value'].astype(str)
322
+
323
+
324
+ color_discrete_map = lut[['Value', 'hex']].sort_values(
325
+ 'Value', ascending=True).astype(str)
326
+ color_discrete_map = color_discrete_map.set_index(
327
+ 'Value').to_dict()['hex']
328
+
329
+ fig = px.bar(bar, x="Date", y="count", color="Value",
330
+ barmode='stack',
331
+ color_discrete_map=color_discrete_map)
332
+
333
+ #Submit Url to New Tab
334
+ url = f'https://mrms.nssl.noaa.gov/qvs/product_viewer/index.php?web_exec_mode=run&menu=menu_config.txt&year={year}&month={month}&day={day}&hour=23&minute=30&time_mode=static&zoom=9&clon={lon}&clat={lat}&base=0&overlays=1&mping_mode=0&product_type={var_input}&qpe_pal_option=0&opacity=.75&looping_active=off&num_frames=6&frame_step=200&seconds_step=600'
335
+
336
+
337
+ #Map Focus
338
+ m = map_folium(data, gdf)
339
+ #Map Max
340
+ m_max = map_folium(data_max, gdf)
341
+
342
+ with st.container():
343
+ col1, col2 = st.columns(2)
344
+
345
+ with col1:
346
+ st.header(f'{var} on {pd.Timestamp(date).strftime("%D")}')
347
+ st_folium(m, height=300)
348
+ with col2:
349
+ st.header(
350
+ f'Max from {start_date.strftime("%D")} to {end_date.strftime("%D")}')
351
+ st_folium(m_max, height=300)
352
+
353
+ with st.container():
354
+ col1, col2, col3 = st.columns((1, 10, 6))
355
+ with col1:
356
+ render_svg(svg)
357
+ with col2:
358
+ link = f'[Go To MRMS Site]({url})'
359
+ st.markdown(link, unsafe_allow_html=True)
360
+ selected_points = plotly_events(
361
+ fig, click_event=True, hover_event=False)
362
+ with col3:
363
+ try:
364
+ date2 = pd.Timestamp(selected_points[0]['x']).strftime('%Y%m%d')
365
+ data2 = png_data(date2)
366
+ m3 = map_folium(data2, gdf)
367
+ st.header(f'{var} on {pd.Timestamp(date2).strftime("%D")}')
368
+ st_folium(m3, height=300)
369
+ except:
370
+ pass
371
+
372
+ st.markdown(""" <style>
373
+ #MainMenu {visibility: hidden;}
374
+ footer {visibility: hidden;}
375
+ </style> """, unsafe_allow_html=True)
pages/Hail.py ADDED
@@ -0,0 +1,306 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+
3
+ # -*- coding: utf-8 -*-
4
+ """
5
+ Created on Tue Dec 6 09:56:29 2022
6
+
7
+ @author: mritchey
8
+ """
9
+ #streamlit run "C:\Users\mritchey\.spyder-py3\Python Scripts\streamlit projects\mrms\mrms_hail2 buffer.py"
10
+
11
+ import plotly.express as px
12
+ import os
13
+ from PIL import Image
14
+ from joblib import Parallel, delayed
15
+ import pandas as pd
16
+ import streamlit as st
17
+ from geopy.extra.rate_limiter import RateLimiter
18
+ from geopy.geocoders import Nominatim
19
+ import folium
20
+ from streamlit_folium import st_folium
21
+ import math
22
+ import geopandas as gpd
23
+ from skimage.io import imread
24
+ from streamlit_plotly_events import plotly_events
25
+ import requests
26
+ from requests.packages.urllib3.exceptions import InsecureRequestWarning
27
+ import rasterio
28
+ import rioxarray
29
+ import numpy as np
30
+
31
+
32
+ @st.cache
33
+ def geocode(address, buffer_size):
34
+ try:
35
+ address2 = address.replace(' ', '+').replace(',', '%2C')
36
+ df = pd.read_json(
37
+ f'https://geocoding.geo.census.gov/geocoder/locations/onelineaddress?address={address2}&benchmark=2020&format=json')
38
+ results = df.iloc[:1, 0][0][0]['coordinates']
39
+ lat, lon = results['y'], results['x']
40
+ except:
41
+ geolocator = Nominatim(user_agent="GTA Lookup")
42
+ geocode = RateLimiter(geolocator.geocode, min_delay_seconds=1)
43
+ location = geolocator.geocode(address)
44
+ lat, lon = location.latitude, location.longitude
45
+
46
+ df = pd.DataFrame({'Lat': [lat], 'Lon': [lon]})
47
+ gdf = gpd.GeoDataFrame(
48
+ df, geometry=gpd.points_from_xy(df.Lon, df.Lat, crs=4326))
49
+ gdf['buffer'] = gdf['geometry'].to_crs(
50
+ 3857).buffer(buffer_size/2*2580).to_crs(4326)
51
+ return gdf
52
+
53
+
54
+ @st.cache
55
+ def get_pngs(date):
56
+ year, month, day = date[:4], date[4:6], date[6:]
57
+ url = f'https://mrms.nssl.noaa.gov/qvs/product_viewer/local/render_multi_domain_product_layer.php?mode=run&cpp_exec_dir=/home/metop/web/specific/opv/&web_resources_dir=/var/www/html/qvs/product_viewer/resources/&prod_root={prod_root}&qperate_pal_option=0&qpe_pal_option=0&year={year}&month={month}&day={day}&hour={hour}&minute={minute}&clon={lon}&clat={lat}&zoom={zoom}&width=920&height=630'
58
+ data = imread(url)[:, :, :3]
59
+ data2 = data.reshape(630*920, 3)
60
+ data2_df = pd.DataFrame(data2, columns=['R', 'G', 'B'])
61
+ data2_df2 = pd.merge(data2_df, lut[['R', 'G', 'B', 'Hail Scale', 'Hail Scale In']], on=['R', 'G', 'B'],
62
+ how='left')[['Hail Scale', 'Hail Scale In']]
63
+ data2_df2['Date'] = date
64
+ return data2_df2.reset_index()
65
+
66
+
67
+ @st.cache
68
+ def get_pngs_parallel(dates):
69
+ results1 = Parallel(n_jobs=32, prefer="threads")(
70
+ delayed(get_pngs)(i) for i in dates)
71
+ return results1
72
+
73
+
74
+ @st.cache
75
+ def png_data(date):
76
+ year, month, day = date[:4], date[4:6], date[6:]
77
+ url = f'https://mrms.nssl.noaa.gov/qvs/product_viewer/local/render_multi_domain_product_layer.php?mode=run&cpp_exec_dir=/home/metop/web/specific/opv/&web_resources_dir=/var/www/html/qvs/product_viewer/resources/&prod_root={prod_root}&qperate_pal_option=0&qpe_pal_option=0&year={year}&month={month}&day={day}&hour={hour}&minute={minute}&clon={lon}&clat={lat}&zoom={zoom}&width=920&height=630'
78
+ data = imread(url)
79
+ return data
80
+
81
+
82
+ @st.cache(allow_output_mutation=True)
83
+ def map_folium(data, gdf):
84
+ m = folium.Map(location=[lat, lon], zoom_start=zoom, height=300)
85
+ folium.Marker(
86
+ location=[lat, lon],
87
+ popup=address).add_to(m)
88
+
89
+ folium.GeoJson(gdf['buffer']).add_to(m)
90
+ folium.raster_layers.ImageOverlay(
91
+ data, opacity=0.8, bounds=bounds).add_to(m)
92
+ return m
93
+
94
+
95
+ def to_radians(degrees):
96
+ return degrees * math.pi / 180
97
+
98
+
99
+ def lat_lon_to_bounds(lat, lng, zoom, width, height):
100
+ earth_cir_m = 40075016.686
101
+ degreesPerMeter = 360 / earth_cir_m
102
+ m_pixel_ew = earth_cir_m / math.pow(2, zoom + 8)
103
+ m_pixel_ns = earth_cir_m / \
104
+ math.pow(2, zoom + 8) * math.cos(to_radians(lat))
105
+
106
+ shift_m_ew = width/2 * m_pixel_ew
107
+ shift_m_ns = height/2 * m_pixel_ns
108
+
109
+ shift_deg_ew = shift_m_ew * degreesPerMeter
110
+ shift_deg_ns = shift_m_ns * degreesPerMeter
111
+
112
+ return [[lat-shift_deg_ns, lng-shift_deg_ew], [lat+shift_deg_ns, lng+shift_deg_ew]]
113
+
114
+
115
+ def image_to_geotiff(bounds, input_file_path, output_file_path='template.tiff'):
116
+ south, west, north, east = tuple(
117
+ [item for sublist in bounds for item in sublist])
118
+ dataset = rasterio.open(input_file_path, 'r')
119
+ bands = [1, 2, 3]
120
+ data = dataset.read(bands)
121
+ transform = rasterio.transform.from_bounds(west, south, east, north,
122
+ height=data.shape[1],
123
+ width=data.shape[2])
124
+ crs = {'init': 'epsg:4326'}
125
+
126
+ with rasterio.open(output_file_path, 'w', driver='GTiff',
127
+ height=data.shape[1],
128
+ width=data.shape[2],
129
+ count=3, dtype=data.dtype, nodata=0,
130
+ transform=transform, crs=crs,
131
+ compress='lzw') as dst:
132
+ dst.write(data, indexes=bands)
133
+
134
+
135
+ def get_mask(bounds, buffer_size):
136
+ year, month, day = date[:4], date[4:6], date[6:]
137
+ url = f'https://mrms.nssl.noaa.gov/qvs/product_viewer/local/render_multi_domain_product_layer.php?mode=run&cpp_exec_dir=/home/metop/web/specific/opv/&web_resources_dir=/var/www/html/qvs/product_viewer/resources/&prod_root={prod_root}&qperate_pal_option=0&qpe_pal_option=0&year={year}&month={month}&day={day}&hour={hour}&minute={minute}&clon={lon}&clat={lat}&zoom={zoom}&width=920&height=630'
138
+ img_data = requests.get(url, verify=False).content
139
+ input_file_path = f'image_name_{date}_{var}.png'
140
+ output_file_path = 'template.tiff'
141
+ with open(input_file_path, 'wb') as handler:
142
+ handler.write(img_data)
143
+
144
+ image_to_geotiff(bounds, input_file_path, output_file_path)
145
+ rds = rioxarray.open_rasterio(output_file_path)
146
+ # rds.plot.imshow()
147
+
148
+ rds = rds.assign_coords(distance=(haversine(rds.x, rds.y, lon, lat)))
149
+ mask = rds['distance'].values <= buffer_size
150
+ mask = np.transpose(np.stack([mask, mask, mask]), (1, 2, 0))
151
+ return mask
152
+
153
+
154
+ def haversine(lon1, lat1, lon2, lat2):
155
+ # convert decimal degrees to radians
156
+ lon1 = np.deg2rad(lon1)
157
+ lon2 = np.deg2rad(lon2)
158
+ lat1 = np.deg2rad(lat1)
159
+ lat2 = np.deg2rad(lat2)
160
+
161
+ # haversine formula
162
+ dlon = lon2 - lon1
163
+ dlat = lat2 - lat1
164
+ a = np.sin(dlat/2)**2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon/2)**2
165
+ c = 2 * np.arcsin(np.sqrt(a))
166
+ r = 6371
167
+ return c * r
168
+
169
+
170
+ #Set Columns
171
+ st.set_page_config(layout="wide")
172
+ col1, col2, col3 = st.columns((3))
173
+ col1, col2, col3 = st.columns((3, 3, 1))
174
+
175
+ #Input Data
176
+ zoom = 10
177
+ _ = st.sidebar.text_input(
178
+ "Claim Number", "836-xxxxxxx")
179
+ address = st.sidebar.text_input(
180
+ "Address", "123 Main Street, Cincinnati, OH 43215")
181
+
182
+ date = st.sidebar.date_input("Date", pd.Timestamp(
183
+ 2022, 7, 6), key='date').strftime('%Y%m%d')
184
+ d = pd.Timestamp(date)
185
+ days_within = st.sidebar.selectbox('Within Days:', (5, 30, 60, 90, 180))
186
+ var = 'Hail'
187
+ var_input = 'hails&product=MESHMAX1440M'
188
+ mask_select = st.sidebar.radio('Only Show Buffer Data:', ("No", "Yes"))
189
+ buffer_size = st.sidebar.radio('Buffer Size (miles):', (5, 10, 15))
190
+
191
+ year, month, day = date[:4], date[4:6], date[6:]
192
+ hour = 23
193
+ minute = 30
194
+
195
+ prod_root = var_input[var_input.find('=')+1:]
196
+
197
+ #Geocode
198
+ gdf = geocode(address, buffer_size)
199
+ lat, lon = tuple(gdf[['Lat', 'Lon']].values[0])
200
+
201
+ #Get Value
202
+ url = 'https://mrms.nssl.noaa.gov/qvs/product_viewer/local/get_multi_domain_rect_binary_value.php?mode=run&cpp_exec_dir=/home/metop/web/specific/opv/&web_resources_dir=/var/www/html/qvs/product_viewer/resources/'\
203
+ + f'&prod_root={prod_root}&lon={lon}&lat={lat}&year={year}&month={month}&day={day}&hour={hour}&minute={minute}'
204
+
205
+ response = requests.get(url, verify=False).json()
206
+ qvs_values = pd.DataFrame(response, index=[0])[
207
+ ['qvs_value', 'qvs_units']].values[0]
208
+ qvs_value = qvs_values[0]
209
+ qvs_unit = qvs_values[1]
210
+
211
+ #Get PNG Focus
212
+ data = png_data(date)
213
+
214
+ #Legend
215
+ legend = Image.open('hail scale3b.png')
216
+
217
+ #Get PNG Max
218
+ start_date, end_date = d - \
219
+ pd.Timedelta(days=days_within), d+pd.Timedelta(days=days_within)
220
+ dates = pd.date_range(start_date,
221
+ end_date).strftime('%Y%m%d')
222
+ lut = pd.read_csv('hail scale.csv')
223
+ bounds = lat_lon_to_bounds(lat, lon, zoom, 920, 630)
224
+
225
+
226
+ results1 = get_pngs_parallel(dates)
227
+ # results1 = Parallel(n_jobs=32, prefer="threads")(delayed(get_pngs)(i) for i in dates)
228
+ results = pd.concat(results1)
229
+ max_data = results.groupby('index')[['Hail Scale']].max()
230
+
231
+ max_data2 = pd.merge(max_data,
232
+ lut[['R', 'G', 'B', 'Hail Scale']],
233
+ on=['Hail Scale'],
234
+ how='left')[['R', 'G', 'B']]
235
+
236
+ data_max = max_data2.values.reshape(630, 920, 3)
237
+
238
+ #Masked Data
239
+ if mask_select == "Yes":
240
+ mask = get_mask(bounds, buffer_size)
241
+ mask1 = mask[:, :, 0].reshape(630*920)
242
+ results = pd.concat([i[mask1] for i in results1])
243
+ data_max = data_max*mask
244
+ else:
245
+ pass
246
+
247
+
248
+ #Bar
249
+ bar = results.query("`Hail Scale`>4").groupby(
250
+ ['Date', 'Hail Scale In'])['index'].count().reset_index()
251
+ bar['Date'] = pd.to_datetime(bar['Date'])
252
+
253
+ bar = bar.reset_index()
254
+ bar.columns = ['level_0', 'Date', 'Hail Scale In', 'count']
255
+ bar['Hail Scale In'] = bar['Hail Scale In'].astype(str)
256
+ bar = bar.sort_values('Hail Scale In', ascending=True)
257
+
258
+ color_discrete_map = lut[['Hail Scale In', 'c_code']].sort_values(
259
+ 'Hail Scale In', ascending=True).astype(str)
260
+ color_discrete_map = color_discrete_map.set_index(
261
+ 'Hail Scale In').to_dict()['c_code']
262
+
263
+ fig = px.bar(bar, x="Date", y="count", color="Hail Scale In",
264
+ barmode='stack',
265
+ color_discrete_map=color_discrete_map)
266
+
267
+ #Submit Url to New Tab
268
+ url = f'https://mrms.nssl.noaa.gov/qvs/product_viewer/index.php?web_exec_mode=run&menu=menu_config.txt&year={year}&month={month}&day={day}&hour=23&minute=30&time_mode=static&zoom=9&clon={lon}&clat={lat}&base=0&overlays=1&mping_mode=0&product_type={var_input}&qpe_pal_option=0&opacity=.75&looping_active=off&num_frames=6&frame_step=200&seconds_step=600'
269
+
270
+ #Map Focus
271
+ m = map_folium(data, gdf)
272
+ #Map Max
273
+ m_max = map_folium(data_max, gdf)
274
+
275
+ with st.container():
276
+ col1, col2, col3 = st.columns((1, 2, 2))
277
+ with col1:
278
+ link = f'[Go To MRMS Site]({url})'
279
+ st.markdown(link, unsafe_allow_html=True)
280
+ st.image(legend)
281
+ with col2:
282
+ st.header(f'{var} on {pd.Timestamp(date).strftime("%D")}')
283
+ st_folium(m, height=300)
284
+ with col3:
285
+ st.header(
286
+ f'Max from {start_date.strftime("%D")} to {end_date.strftime("%D")}')
287
+ st_folium(m_max, height=300)
288
+
289
+ try:
290
+ selected_points = plotly_events(fig, click_event=True, hover_event=False)
291
+ date2 = pd.Timestamp(selected_points[0]['x']).strftime('%Y%m%d')
292
+ data2 = png_data(date2)
293
+ m3 = map_folium(data2, gdf)
294
+ st.header(f'{var} on {pd.Timestamp(date2).strftime("%D")}')
295
+ st_folium(m3, height=300)
296
+ except:
297
+ pass
298
+
299
+
300
+ st.markdown(""" <style>
301
+ #MainMenu {visibility: hidden;}
302
+ footer {visibility: hidden;}
303
+ </style> """, unsafe_allow_html=True)
304
+
305
+
306
+
pages/Wind (High Resolution): RTMA.py ADDED
@@ -0,0 +1,221 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Created on Fri Oct 14 10:35:25 2022
4
+ @author: mritchey
5
+ """
6
+
7
+ import datetime
8
+ import glob
9
+ import os
10
+ import urllib.request
11
+ import branca.colormap as cm
12
+ import folium
13
+ import numpy as np
14
+ import pandas as pd
15
+ import plotly.express as px
16
+ import rasterio
17
+ import rioxarray
18
+ import streamlit as st
19
+ from geopy.extra.rate_limiter import RateLimiter
20
+ from geopy.geocoders import Nominatim
21
+ from joblib import Parallel, delayed
22
+ from matplotlib import colors as colors
23
+ from streamlit_folium import st_folium
24
+ from threading import Thread
25
+
26
+
27
+ def download_file_get_data(url, rows, columns):
28
+ file = urllib.request.urlretrieve(url, url[-23:])[0]
29
+ rds = rioxarray.open_rasterio(file)
30
+ wind_mph = rds.rio.reproject("EPSG:4326")[0, rows, columns].values*2.23694
31
+ time = url[-15:-11]
32
+ return [wind_mph, time]
33
+
34
+
35
+ def threading(df_input, func_input):
36
+ starttime = time.time()
37
+ tasks_thread = df_input
38
+ results_thread = []
39
+
40
+ def thread_func(value_input):
41
+ response = func_input(value_input)
42
+ results_thread.append(response)
43
+ return True
44
+
45
+ threads = []
46
+ for i in range(len(tasks_thread)):
47
+ process = Thread(target=thread_func, args=[tasks_thread[i]])
48
+ process.start()
49
+ threads.append(process)
50
+
51
+ for process in threads:
52
+ process.join()
53
+ print(f'Time: {str(round((time.time()-starttime)/60,5))} Minutes')
54
+ return results_thread
55
+
56
+
57
+ def mapvalue2color(value, cmap):
58
+ if np.isnan(value):
59
+ return (1, 0, 0, 0)
60
+ else:
61
+ return colors.to_rgba(cmap(value), 0.7)
62
+
63
+
64
+ def geocode(address):
65
+ try:
66
+ address2 = address.replace(' ', '+').replace(',', '%2C')
67
+ df = pd.read_json(
68
+ f'https://geocoding.geo.census.gov/geocoder/locations/onelineaddress?address={address2}&benchmark=2020&format=json')
69
+ results = df.iloc[:1, 0][0][0]['coordinates']
70
+ lat, lon = results['y'], results['x']
71
+ except:
72
+ geolocator = Nominatim(user_agent="GTA Lookup")
73
+ geocode = RateLimiter(geolocator.geocode, min_delay_seconds=1)
74
+ location = geolocator.geocode(address)
75
+ lat, lon = location.latitude, location.longitude
76
+ return lat, lon
77
+
78
+
79
+ @st.cache
80
+ def get_grib_data(url, d, t):
81
+ file = urllib.request.urlretrieve(url, f'{d}{t}{type_wind}.grib2')[0]
82
+ return file
83
+
84
+
85
+ # @st.cache
86
+ def graph_entire_day(d, rows, columns):
87
+ year, month, day = d[:4], d[4:6], d[6:8]
88
+ times = [f'0{str(i)}'[-2:] for i in range(0, 24)]
89
+ urls = [
90
+ f'https://mtarchive.geol.iastate.edu/{year}/{month}/{day}/grib2/ncep/RTMA/{d}{t}00_{type_wind.upper()}.grib2' for t in times]
91
+
92
+ results = Parallel(n_jobs=4)(
93
+ delayed(download_file_get_data)(i, rows, columns) for i in urls)
94
+
95
+ df_all = pd.DataFrame(results, columns=['MPH', 'Time'])
96
+ df_all['MPH'] = df_all['MPH'].round(2)
97
+ df_all['Time'] = pd.to_datetime(d+df_all['Time'], format='%Y%m%d%H%M')
98
+ return df_all
99
+
100
+
101
+ @st.cache
102
+ def convert_df(df):
103
+ return df.to_csv(index=0).encode('utf-8')
104
+
105
+ # try:
106
+ # for i in glob.glob('*.grib2'):
107
+ # try:
108
+ # os.remove(i)
109
+ # except:
110
+ # pass
111
+ # except:
112
+ # pass
113
+
114
+
115
+ st.set_page_config(layout="wide")
116
+ col1, col2 = st.columns((2))
117
+
118
+ address = st.sidebar.text_input(
119
+ "Address", "123 Main Street, Columbus, OH 43215")
120
+ d = st.sidebar.date_input(
121
+ "Date", pd.Timestamp(2022, 9, 28)).strftime('%Y%m%d')
122
+
123
+ time = st.sidebar.selectbox('Time:', ('12 AM', '6 AM', '12 PM', '6 PM',))
124
+ type_wind = st.sidebar.selectbox('Type:', ('Gust', 'Wind'))
125
+ entire_day = st.sidebar.radio(
126
+ 'Graph Entire Day (Takes a Bit):', ('No', 'Yes'))
127
+
128
+ if time[-2:] == 'PM' and int(time[:2].strip()) < 12:
129
+ t = datetime.time(int(time[:2].strip())+12, 00).strftime('%H')+'00'
130
+ elif time[-2:] == 'AM' and int(time[:2].strip()) == 12:
131
+ t = '0000'
132
+ else:
133
+ t = datetime.time(int(time[:2].strip()), 00).strftime('%H')+'00'
134
+
135
+ year, month, day = d[:4], d[4:6], d[6:8]
136
+
137
+ url = f'https://mtarchive.geol.iastate.edu/{year}/{month}/{day}/grib2/ncep/RTMA/{d}{t}_{type_wind.upper()}.grib2'
138
+ file = get_grib_data(url, d, t)
139
+
140
+ lat, lon = geocode(address)
141
+
142
+ rds = rioxarray.open_rasterio(file)
143
+ projected = rds.rio.reproject("EPSG:4326")
144
+ wind_mph = projected.sel(x=lon, y=lat, method="nearest").values*2.23694
145
+
146
+ affine = projected.rio.transform()
147
+
148
+ rows, columns = rasterio.transform.rowcol(affine, lon, lat)
149
+
150
+ size = 40
151
+
152
+ projected2 = projected[0, rows-size:rows+size, columns-size:columns+size]
153
+
154
+ img = projected2.values*2.23694
155
+ boundary = projected2.rio.bounds()
156
+ left, bottom, right, top = boundary
157
+
158
+ img[img < 0.0] = np.nan
159
+
160
+ clat = (bottom + top)/2
161
+ clon = (left + right)/2
162
+
163
+ vmin = np.floor(np.nanmin(img))
164
+ vmax = np.ceil(np.nanmax(img))
165
+
166
+ colormap = cm.LinearColormap(
167
+ colors=['blue', 'lightblue', 'red'], vmin=vmin, vmax=vmax)
168
+
169
+ m = folium.Map(location=[lat, lon], zoom_start=9, height=500)
170
+
171
+ folium.Marker(
172
+ location=[lat, lon],
173
+ popup=f"{wind_mph[0].round(2)} MPH").add_to(m)
174
+
175
+ folium.raster_layers.ImageOverlay(
176
+ image=img,
177
+ name='Wind Speed Map',
178
+ opacity=.8,
179
+ bounds=[[bottom, left], [top, right]],
180
+ colormap=lambda value: mapvalue2color(value, colormap)
181
+ ).add_to(m)
182
+
183
+
184
+ folium.LayerControl().add_to(m)
185
+ colormap.caption = 'Wind Speed: MPH'
186
+ m.add_child(colormap)
187
+
188
+ with col1:
189
+ st.title('RTMA Model')
190
+ url_error='https://mattritchey-rtma.hf.space/'
191
+ link = f'[If RTMA not working click here]({url_error})'
192
+ st.markdown(link, unsafe_allow_html=True)
193
+
194
+ st.write(
195
+ f"{type_wind.title()} Speed: {wind_mph[0].round(2)} MPH at {time} UTC")
196
+ st_folium(m, height=500)
197
+
198
+
199
+ if entire_day == 'Yes':
200
+ df_all = graph_entire_day(d, rows, columns)
201
+ fig = px.line(df_all, x="Time", y="MPH")
202
+ with col2:
203
+ st.title('Analysis')
204
+ st.plotly_chart(fig)
205
+
206
+ csv = convert_df(df_all)
207
+
208
+ st.download_button(
209
+ label="Download data as CSV",
210
+ data=csv,
211
+ file_name=f'{d}.csv',
212
+ mime='text/csv')
213
+ else:
214
+ pass
215
+
216
+
217
+
218
+ st.markdown(""" <style>
219
+ #MainMenu {visibility: hidden;}
220
+ footer {visibility: hidden;}
221
+ </style> """, unsafe_allow_html=True)
pages/Wind (Low Resolution): ERA.py ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Created on Fri Oct 14 10:35:25 2022
4
+
5
+ @author: mritchey
6
+ """
7
+ # streamlit run "C:\Users\mritchey\.spyder-py3\Python Scripts\streamlit projects\ERA\ERA2.py"
8
+ import datetime
9
+ import glob
10
+ import os
11
+ import branca.colormap as cm
12
+ import folium
13
+ import numpy as np
14
+ import pandas as pd
15
+ import plotly.express as px
16
+ import streamlit as st
17
+ from geopy.extra.rate_limiter import RateLimiter
18
+ from geopy.geocoders import Nominatim
19
+ from matplotlib import colors as colors
20
+ from streamlit_folium import st_folium
21
+ import rioxarray
22
+ import xarray as xr
23
+ import cdsapi
24
+ import os
25
+
26
+
27
+
28
+ def mapvalue2color(value, cmap):
29
+ if np.isnan(value):
30
+ return (1, 0, 0, 0)
31
+ else:
32
+ return colors.to_rgba(cmap(value), 0.7)
33
+
34
+
35
+ def geocode(address):
36
+ try:
37
+ address2 = address.replace(' ', '+').replace(',', '%2C')
38
+ df = pd.read_json(
39
+ f'https://geocoding.geo.census.gov/geocoder/locations/onelineaddress?address={address2}&benchmark=2020&format=json')
40
+ results = df.iloc[:1, 0][0][0]['coordinates']
41
+ lat, lon = results['y'], results['x']
42
+ except:
43
+ geolocator = Nominatim(user_agent="GTA Lookup")
44
+ geocode = RateLimiter(geolocator.geocode, min_delay_seconds=1)
45
+ location = geolocator.geocode(address)
46
+ lat, lon = location.latitude, location.longitude
47
+ return lat, lon
48
+
49
+
50
+ def graph_within_date_range(d, number_days_range):
51
+ year, month, day = d[:4], d[4:6], d[6:8]
52
+ date = pd.Timestamp(d)
53
+ start_date, end_date = date - \
54
+ pd.Timedelta(days=number_days_range), date + \
55
+ pd.Timedelta(days=number_days_range+1)
56
+ start_date = start_date.strftime("%Y-%m-%d")
57
+ end_date = end_date.strftime("%Y-%m-%d")
58
+ url = f'https://archive-api.open-meteo.com/v1/archive?latitude={lat}&longitude={lon}&start_date={start_date}&end_date={end_date}&hourly=temperature_2m,precipitation,windspeed_10m,windgusts_10m&models=best_match&temperature_unit=fahrenheit&windspeed_unit=mph&precipitation_unit=inch'
59
+ df = pd.read_json(url).reset_index()
60
+ data = pd.DataFrame({c['index']: c['hourly'] for r, c in df.iterrows()})
61
+ data['time'] = pd.to_datetime(data['time'])
62
+ data['date'] = pd.to_datetime(data['time'].dt.date)
63
+ data = data.query("temperature_2m==temperature_2m")
64
+
65
+ data_agg = data.groupby(['date']).agg({'temperature_2m': ['min', 'mean', 'max'],
66
+ 'precipitation': ['sum'],
67
+ 'windspeed_10m': ['min', 'mean', 'max'],
68
+ 'windgusts_10m': ['min', 'mean', 'max']
69
+ })
70
+ data_agg.columns = data_agg.columns.to_series().str.join('_')
71
+ data_agg = data_agg.query("temperature_2m_min==temperature_2m_min")
72
+ return data.drop(columns=['date']), data_agg
73
+
74
+
75
+ @st.cache(allow_output_mutation=True)
76
+ def get_era5_data(year, month, day):
77
+ c = cdsapi.Client(key=os.environ['key'],
78
+ url="https://cds.climate.copernicus.eu/api/v2")
79
+
80
+ c.retrieve(
81
+ 'reanalysis-era5-single-levels',
82
+ {
83
+ 'product_type': 'reanalysis',
84
+ 'variable': ['10m_u_component_of_wind', '10m_v_component_of_wind',
85
+ 'instantaneous_10m_wind_gust',
86
+ '2m_temperature', 'total_precipitation'],
87
+ 'year': year,
88
+ 'month': [month],
89
+ 'day': [day],
90
+ 'time': ['00:00', '06:00', '12:00', '18:00'],
91
+ 'area': [49.5, -125, 24.5, -66.5, ],
92
+ 'format': 'netcdf',
93
+ },
94
+ 'data.nc')
95
+
96
+
97
+ @st.cache
98
+ def convert_df(df):
99
+ return df.to_csv(index=0).encode('utf-8')
100
+
101
+
102
+ try:
103
+ for i in glob.glob('*.grib2'):
104
+ os.remove(i)
105
+ except:
106
+ pass
107
+
108
+ st.set_page_config(layout="wide")
109
+ col1, col2 = st.columns((2))
110
+
111
+ address = st.sidebar.text_input(
112
+ "Address", "123 Main Street, Columbus, OH 43215")
113
+ date = st.sidebar.date_input(
114
+ "Date", pd.Timestamp(2022, 9, 28))
115
+ d = date.strftime('%Y%m%d')
116
+ date = date.strftime('%Y-%m-%d')
117
+ time = st.sidebar.selectbox('Time (UTC):', ('12 AM', '6 AM', '12 PM', '6 PM',))
118
+ type_var = st.sidebar.selectbox(
119
+ 'Type:', ('Gust', 'Wind', 'Temp', 'Precipitation'))
120
+ number_days_range = st.sidebar.selectbox(
121
+ 'Within Day Range:', (5, 10, 30, 90, 180))
122
+ hourly_daily = st.sidebar.radio('Aggregate Data', ('Hourly', 'Daily'))
123
+
124
+ # Keys
125
+ var_key = {'Gust': 'i10fg', 'Wind': 'wind10',
126
+ 'Temp': 't2m', 'Precipitation': 'tp'}
127
+
128
+ variable = var_key[type_var]
129
+
130
+ unit_key = {'Gust': 'MPH', 'Wind': 'MPH',
131
+ 'Temp': 'F', 'Precipitation': 'In.'}
132
+ unit = unit_key[type_var]
133
+
134
+ cols_key = {'Gust': ['windgusts_10m'], 'Wind': ['windspeed_10m'], 'Temp': ['temperature_2m'],
135
+ 'Precipitation': ['precipitation']}
136
+
137
+ cols_key_agg = {'Gust': ['windgusts_10m_min', 'windgusts_10m_mean',
138
+ 'windgusts_10m_max'],
139
+ 'Wind': ['windspeed_10m_min', 'windspeed_10m_mean',
140
+ 'windspeed_10m_max'],
141
+ 'Temp': ['temperature_2m_min', 'temperature_2m_mean', 'temperature_2m_max'],
142
+ 'Precipitation': ['precipitation_sum']}
143
+
144
+ if hourly_daily == 'Hourly':
145
+ cols = cols_key[type_var]
146
+ else:
147
+ cols = cols_key_agg[type_var]
148
+
149
+
150
+ if time[-2:] == 'PM' and int(time[:2].strip()) < 12:
151
+ t = datetime.time(int(time[:2].strip())+12, 00).strftime('%H')+'00'
152
+ elif time[-2:] == 'AM' and int(time[:2].strip()) == 12:
153
+ t = '00:00'
154
+ else:
155
+ t = datetime.time(int(time[:2].strip()), 00).strftime('%H')+'00'
156
+
157
+ year, month, day = d[:4], d[4:6], d[6:8]
158
+
159
+ get_era5_data(year, month, day)
160
+ ds = xr.open_dataset('data.nc')
161
+ ds = ds.sel(time=f'{date}T{t}').drop('time')
162
+
163
+ #Convert Units
164
+ ds = ds.assign(t2m=(ds.t2m - 273.15) * 9/5 + 32)
165
+ ds = ds.assign(i10fg=(ds.i10fg*2.237))
166
+ ds = ds.assign(tp=(ds.tp/24.5))
167
+ ds = ds.assign(wind10=((ds.v10**2+ds.u10**2)**.5)*2.237)
168
+
169
+ lat, lon = geocode(address)
170
+
171
+ var_value = ds[variable].sel(
172
+ longitude=lon, latitude=lat, method="nearest").values.item()
173
+ var_value = round(var_value, 1)
174
+
175
+ img = ds[variable].values
176
+ boundary = ds.rio.bounds()
177
+ left, bottom, right, top = boundary
178
+
179
+ img[img < 0.0] = np.nan
180
+
181
+ clat = (bottom + top)/2
182
+ clon = (left + right)/2
183
+
184
+ vmin = np.floor(np.nanmin(img))
185
+ vmax = np.ceil(np.nanmax(img))
186
+
187
+ colormap = cm.LinearColormap(
188
+ colors=['blue', 'lightblue', 'red'], vmin=vmin, vmax=vmax)
189
+
190
+ m = folium.Map(location=[lat, lon], zoom_start=5, height=500)
191
+
192
+ folium.Marker(
193
+ location=[lat, lon],
194
+ popup=f"{var_value} {unit}"
195
+ ).add_to(m)
196
+
197
+ folium.raster_layers.ImageOverlay(
198
+ image=img,
199
+ name='Wind Speed Map',
200
+ opacity=.8,
201
+ bounds=[[bottom, left], [top, right]],
202
+ colormap=lambda value: mapvalue2color(value, colormap)
203
+ ).add_to(m)
204
+
205
+
206
+ folium.LayerControl().add_to(m)
207
+ colormap.caption = 'Wind Speed: MPH'
208
+ m.add_child(colormap)
209
+
210
+ with col1:
211
+ st.title('ERA5 Model')
212
+ # st.write(
213
+ # f"{type_wind.title()} Speed: {wind_mph[0].round(2)} MPH at {time} UTC")
214
+ st_folium(m, height=500)
215
+ df_all, df_all_agg = graph_within_date_range(d, number_days_range)
216
+
217
+ if hourly_daily == 'Hourly':
218
+ fig = px.line(df_all, x="time", y=cols)
219
+ df_downloald = df_all
220
+ else:
221
+ fig = px.line(df_all_agg.reset_index(), x="date", y=cols)
222
+ df_downloald = df_all_agg.reset_index()
223
+
224
+ with col2:
225
+ st.title('Analysis')
226
+ st.plotly_chart(fig)
227
+
228
+ csv = convert_df(df_downloald)
229
+
230
+ st.download_button(
231
+ label="Download data as CSV",
232
+ data=csv,
233
+ file_name=f'{d}.csv',
234
+ mime='text/csv')
235
+
236
+
237
+ st.markdown(""" <style>
238
+ #MainMenu {visibility: hidden;}
239
+ footer {visibility: hidden;}
240
+ </style> """, unsafe_allow_html=True)
pages/hail scale.csv ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ c_code,occurence,RGB,R,G,B,Hail Scale,Hail Scale In,R+G
2
+ #01A0F6,408,"(1, 160, 246)",1,160,246,2,0.08,78546
3
+ #BE55DC,408,"(190, 85, 220)",190,85,220,100,3.94,3545724
4
+ #FF9000,408,"(255, 144, 0)",255,144,0,30,1.18,36608
5
+ #00C800,408,"(0, 200, 0)",0,200,0,8,0.31,199
6
+ #FF00FF,396,"(255, 0, 255)",255,0,255,75,2.95,-65536
7
+ #C00000,396,"(192, 0, 0)",192,0,0,50,1.97,-193
8
+ #FF0000,396,"(255, 0, 0)",255,0,0,40,1.57,-256
9
+ #E7C000,396,"(231, 192, 0)",231,192,0,20,0.79,44312
10
+ #FFFF00,396,"(255, 255, 0)",255,255,0,15,0.59,65024
11
+ #009000,396,"(0, 144, 0)",0,144,0,10,0.39,143
12
+ #00FF00,396,"(0, 255, 0)",0,255,0,6,0.24,254
13
+ #0000F6,396,"(0, 0, 246)",0,0,246,4,0.16,-247
14
+ #00ECEC,336,"(0, 236, 236)",0,236,236,1,0.04,55695
15
+ #7E32A7,60,"(126, 50, 167)",126,50,167,200,7.87,1045464
16
+ #7F7F7F,,"(127, 127, 127)",127,127,127,0,0,2064384
pages/hail scale3b.png ADDED
pages/readme.rd ADDED
File without changes
requirements.txt ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ folium==0.12.1
2
+ geopandas==0.10.2
3
+ geopy==2.2.0
4
+ joblib==1.1.0
5
+ numpy==1.21.5
6
+ pandas==1.4.2
7
+ Pillow==9.4.0
8
+ plotly==5.7.0
9
+ rasterio==1.2.10
10
+ requests==2.27.1
11
+ rioxarray==0.12.2
12
+ scikit_image==0.19.2
13
+ streamlit==1.4.0
14
+ streamlit_folium==0.6.15
15
+ streamlit_plotly_events==0.0.6
16
+ branca==0.4.2
17
+ matplotlib==3.6.2
18
+ scikit_image==0.19.2
19
+ xarray[complete]
20
+ cdsapi