File size: 7,247 Bytes
be35e17
fb64c7d
 
 
 
 
 
 
 
 
be35e17
 
fb64c7d
 
 
be35e17
349b46e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fb64c7d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
be35e17
fb64c7d
 
 
 
 
be35e17
fb64c7d
 
 
 
 
 
 
 
 
 
 
 
 
 
be35e17
349b46e
 
fb64c7d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
be35e17
 
 
 
fb64c7d
 
 
be35e17
 
 
 
 
 
 
 
fb64c7d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
be35e17
fb64c7d
 
be35e17
fb64c7d
 
 
 
 
be35e17
 
 
 
349b46e
 
be35e17
349b46e
fb64c7d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
be35e17
 
fb64c7d
 
 
 
 
be35e17
 
 
 
 
 
 
 
fb64c7d
 
 
 
 
 
 
 
be35e17
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210

import pandas as pd
import numpy as np
import streamlit as st
from geopy.extra.rate_limiter import RateLimiter
from geopy.geocoders import Nominatim
import folium
from streamlit_folium import st_folium
import geopandas as gpd
from vincenty import vincenty
from shapely import wkb
import duckdb

st.set_page_config(layout="wide")


@st.cache_data
def get_perimeters(start_date_str,end_date_str):
  
    gdf_perimeters = gpd.read_file(
        'https://opendata.arcgis.com/api/v3/datasets/5e72b1699bf74eefb3f3aff6f4ba5511_0/downloads/data?format=shp&spatialRefId=4326&where=1%3D1')  
    gdf_perimeters = gdf_perimeters[['OBJECTID', 'poly_Incid', 'attr_Fir_7', 'poly_Creat',
                                     'poly_DateC', 'poly_Polyg', 'poly_Acres', 'attr_Estim', 'geometry']].copy()
    gdf_perimeters.columns = ['OBJECTID', 'Incident', 'DiscoveryDate', 'poly_Creat',
                              'LastUpdate', 'poly_Polyg', 'Size_acres', 'CurrentEstCost', 'geometry']
    gdf_perimeters['Lat_centroid'] = gdf_perimeters.centroid.y
    gdf_perimeters['Lon_centroid'] = gdf_perimeters.centroid.x
    gdf_perimeters['DiscoveryDate'] = pd.to_datetime(
        gdf_perimeters['DiscoveryDate'])

    gdf_cut = gdf_perimeters.query(f"'{start_date_str}'<=DiscoveryDate<='{end_date_str}'")
    gdf_cut['DiscoveryDate'] = gdf_cut['DiscoveryDate'].dt.strftime('%Y-%m-%d')
    return gdf_cut

@st.cache_data
def get_perimeters_fast(start_date_str,end_date_str):
    code = f"""
        select * from 'wildfire_perimeters.parquet'
            where DiscoveryDate>= '{start_date_str}' and DiscoveryDate<= '{end_date_str}' 
    """
    gdf_cut = duck_sql(code)
    gdf_cut['geometry'] = [wkb.loads(bytes(i)) for i in gdf_cut.geometry]
    gdf_cut = gpd.GeoDataFrame(gdf_cut)
    gdf_cut['DiscoveryDate'] = gdf_cut['DiscoveryDate'].dt.strftime('%Y-%m-%d')
    return gdf_cut


def map_perimeters(_gdf_data, address):
    geojson_data = _gdf_data[['OBJECTID', 'Incident', 'DiscoveryDate',
                              'Miles to Fire Centroid', 'geometry']].to_json()

    m = folium.Map(location=[lat, lon],

                   zoom_start=6,
                   height=500)
    folium.Marker(
        location=[lat, lon],
        tooltip=f'Address: {address}',
    ).add_to(m)

    folium.GeoJson(geojson_data,
                   tooltip=folium.GeoJsonTooltip(fields=["Incident",
                                                         "DiscoveryDate",
                                                         'Miles to Fire Centroid']),
                   ).add_to(m)
    return m


def distance(x):
    left_coords = (x[0], x[1])
    right_coords = (x[2], x[3])
    return vincenty(left_coords, right_coords, miles=True)


def geocode(address):
    try:
        address2 = address.replace(' ', '+').replace(',', '%2C')
        df = pd.read_json(
            f'https://geocoding.geo.census.gov/geocoder/locations/onelineaddress?address={address2}&benchmark=2020&format=json')
        results = df.iloc[:1, 0][0][0]['coordinates']
        lat, lon = results['y'], results['x']
    except:
        geolocator = Nominatim(user_agent="GTA Lookup")
        geocode = RateLimiter(geolocator.geocode, min_delay_seconds=1)
        location = geolocator.geocode(address)
        lat, lon = location.latitude, location.longitude
    return lat, lon


def extract_vertices(_gdf):
    g = [i for i in _gdf.geometry]
    all_data = []
    for i in range(len(g)):
        try:
            try:
                x, y = g[i].exterior.coords.xy
            except:
                x, y = g[i].coords.xy
            df = pd.DataFrame({'Lat': y, 'Lon': x})
        except:
            all_data2 = []
            try:
                for j in range(len(g[i])):
                    try:
                        x, y = g[i][j].exterior.coords.xy
                    except:
                        x, y = g[i][j].coords.xy
                    all_data2.append(pd.DataFrame({'Lat': y, 'Lon': x}))
                df = pd.concat(all_data2)
            except:
                for i in g.geometry:
                    x = np.concatenate([poly.exterior.coords.xy[0]
                                       for poly in i.geoms])
                    y = np.concatenate([poly.exterior.coords.xy[1]
                                       for poly in i.geoms])
                df = pd.DataFrame({'Lat': y,
                                   'Lon': x, })
        all_data.append(df)
    return pd.concat(all_data).query('Lat==Lat').reset_index(drop=1)


def duck_sql(sql_code):
    con = duckdb.connect()
    con.execute("PRAGMA threads=2")
    con.execute("PRAGMA enable_object_cache")
    return con.execute(sql_code).df()


#Side Bar
address = st.sidebar.text_input(
    "Address", "Sacramento, CA")
date = st.sidebar.date_input("Date",  pd.Timestamp(2021, 7, 14), key='date')
number_days_range = st.sidebar.selectbox(
    'Within Day Range:', (5, 10, 30, 90, 180))

refresh = st.sidebar.radio(
    'Refresh Data (as of 6/7/23): Will Take Time ', (False, True))
miles_range = st.sidebar.selectbox(
    'Find Fires within Range (Miles):', (None, 50, 100, 250, 500))

size = st.sidebar.radio(
    'Greater than 100 Acres', ("Yes", "No"))


#Geocode Addreses
lat, lon = geocode(address)

# Filter Data
start_date, end_date = date - \
    pd.Timedelta(days=number_days_range), date + \
    pd.Timedelta(days=number_days_range+1)
start_date_str, end_date_str = start_date.strftime(
    '%Y-%m-%d'), end_date.strftime('%Y-%m-%d')


# Get Data: Modified for Speed
if refresh:
    gdf = get_perimeters(start_date_str,end_date_str,refresh)
    
else:
    gdf_cut = get_perimeters_fast(start_date_str,end_date_str)


#Distance to Fire
gdf_cut["Lat_address"] = lat
gdf_cut["Lon_address"] = lon
gdf_cut['Miles to Fire Centroid'] = [
    distance(i) for i in gdf_cut[gdf_cut.columns[-4:]].values]
gdf_cut['Miles to Fire Centroid'] = gdf_cut['Miles to Fire Centroid'].round(2)
if miles_range is not None:
    gdf_cut = gdf_cut.query(f"`Miles to Fire Centroid`<={miles_range}")

if size == 'Yes':
    gdf_cut = gdf_cut.query("Size_acres>100")

gdf_cut = gdf_cut.sort_values('Miles to Fire Centroid').reset_index(drop=1)
gdf_cut.index = gdf_cut.index+1

#Map Data
m = map_perimeters(gdf_cut, address)

#Incident Edge
indicents = list(gdf_cut['Incident'].values)
incident_edge = st.sidebar.selectbox(
    'Find Distance to Closest Edge:', indicents)

vertices = extract_vertices(gdf_cut.query(f"Incident=='{incident_edge}'"))
vertices["Lat_address"] = lat
vertices["Lon_address"] = lon
vertices['Distance'] = [
    distance(i) for i in vertices.values]
closest_edge = vertices[vertices['Distance']
                        == vertices['Distance'].min()].head(1)
lon_point, lat_point = closest_edge[['Lon', 'Lat']].values[0]
distance_edge = closest_edge['Distance'].round(2).values[0]
folium.PolyLine([[lat, lon],
                 [lat_point, lon_point]],
                color='black',
                tooltip=f'Distance: {distance_edge} Miles'
                ).add_to(m)

#Display
col1, col2 = st.columns((2, 3))
with col1:
    st.header('Fire Perimeters')
    st_folium(m, height=600)
with col2:
    st.header('Fires')
    gdf_cut[['Incident', 'DiscoveryDate', 'Size_acres', 'Miles to Fire Centroid']]