EmberDeepAI / app.py
AbdullahImran's picture
Update app.py
f1ea272 verified
raw
history blame
6.14 kB
import os
import requests
import pandas as pd
import numpy as np
import pickle
import google.generativeai as genai
import gradio as gr
from google.colab import drive
from datetime import datetime, timedelta
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing import image as keras_image
from tensorflow.keras.applications.vgg16 import preprocess_input as vgg_preprocess
from tensorflow.keras.applications.xception import preprocess_input as xce_preprocess
from tensorflow.keras.losses import BinaryFocalCrossentropy
from PIL import Image
# --- CONFIGURATION ---
FOREST_COORDS = {'Pakistan Forest': (34.0, 73.0)}
API_URL = (
"https://archive-api.open-meteo.com/v1/archive"
"?latitude={lat}&longitude={lon}"
"&start_date={start}&end_date={end}"
"&daily=temperature_2m_max,temperature_2m_min,"
"precipitation_sum,windspeed_10m_max,"
"relative_humidity_2m_max,relative_humidity_2m_min"
"&timezone=UTC"
)
# --- GEMINI SETUP ---
GOOGLE_API_KEY = os.environ.get('GOOGLE_API_KEY')
if not GOOGLE_API_KEY:
raise ValueError("Missing GOOGLE_API_KEY environment variable")
genai.configure(api_key=GOOGLE_API_KEY)
flash = genai.GenerativeModel('gemini-1.5-flash')
# --- LOAD MODELS ---
def load_models():
drive.mount('/content/drive', force_remount=False)
vgg_model = load_model(
'/content/drive/MyDrive/vgg16_focal_unfreeze_more.keras',
custom_objects={'BinaryFocalCrossentropy': BinaryFocalCrossentropy}
)
def focal_loss_fixed(gamma=2., alpha=.25):
import tensorflow.keras.backend as K
def loss_fn(y_true, y_pred):
eps = K.epsilon(); y_pred = K.clip(y_pred, eps, 1.-eps)
ce = -y_true * K.log(y_pred)
w = alpha * K.pow(1-y_pred, gamma)
return K.mean(w * ce, axis=-1)
return loss_fn
xce_model = load_model(
'/content/drive/My Drive/severity_post_tta.keras',
custom_objects={'focal_loss_fixed': focal_loss_fixed()}
)
with open('/content/drive/My Drive/ensemble_rf_model.pkl', 'rb') as f:
rf_model = pickle.load(f)
with open('/content/drive/My Drive/ensemble_xgb_model.pkl', 'rb') as f:
xgb_model = pickle.load(f)
with open('/content/drive/MyDrive/wildfire_logistic_model_synthetic.joblib', 'rb') as f:
lr_model = pickle.load(f)
return vgg_model, xce_model, rf_model, xgb_model, lr_model
vgg_model, xception_model, rf_model, xgb_model, lr_model = load_models()
# --- LABEL MAPS ---
target_map = {0: 'mild', 1: 'moderate', 2: 'severe'}
trend_map = {1: 'increase', 0: 'same', -1: 'decrease'}
trend_rules = {
'mild': {'decrease':'mild','same':'mild','increase':'moderate'},
'moderate':{'decrease':'mild','same':'moderate','increase':'severe'},
'severe': {'decrease':'moderate','same':'severe','increase':'severe'}
}
# --- PIPELINE FUNCTIONS ---
def detect_fire(img):
x = keras_image.img_to_array(img.resize((128,128)))[None]
x = vgg_preprocess(x)
prob = float(vgg_model.predict(x)[0][0])
return prob >= 0.5, prob
def classify_severity(img):
x = keras_image.img_to_array(img.resize((224,224)))[None]
x = xce_preprocess(x)
preds = xception_model.predict(x)
rf_p = rf_model.predict(preds)[0]
xgb_p = xgb_model.predict(preds)[0]
ensemble = int(round((rf_p + xgb_p)/2))
return target_map.get(ensemble,'moderate')
def fetch_weather_trend(lat, lon):
end = datetime.utcnow()
start = end - timedelta(days=1)
url = API_URL.format(lat=lat, lon=lon,
start=start.strftime('%Y-%m-%d'),
end=end.strftime('%Y-%m-%d'))
data = requests.get(url).json().get('daily', {})
df = pd.DataFrame(data)
for c in ['precipitation_sum','temperature_2m_max','temperature_2m_min',
'relative_humidity_2m_max','relative_humidity_2m_min','windspeed_10m_max']:
df[c] = pd.to_numeric(df.get(c,[]), errors='coerce')
df['precipitation'] = df['precipitation_sum'].fillna(0)
df['temperature'] = (df['temperature_2m_max']+df['temperature_2m_min'])/2
df['humidity'] = (df['relative_humidity_2m_max']+df['relative_humidity_2m_min'])/2
df['wind_speed'] = df['windspeed_10m_max']
df['fire_risk_score'] = (
0.4*(df['temperature']/55)+
0.2*(1-df['humidity']/100)+
0.3*(df['wind_speed']/60)+
0.1*(1-df['precipitation']/50)
)
feats = df[['temperature','humidity','wind_speed','precipitation','fire_risk_score']]
feat = feats.fillna(feats.mean()).iloc[-1].values.reshape(1,-1)
trend_cl = lr_model.predict(feat)[0]
return trend_map.get(trend_cl)
def generate_recommendations(wildfire_present, severity, weather_trend):
prompt = f"""
You are a wildfire management expert.
- Wildfire Present: {wildfire_present}
- Severity: {severity}
- Weather Trend: {weather_trend}
Provide:
1. Immediate actions
2. Evacuation guidelines
3. Short-term containment
4. Long-term prevention & recovery
5. Community education
"""
return flash.generate_content(prompt).text
# --- GRADIO INTERFACE ---
def pipeline(image):
img = Image.fromarray(image).convert('RGB')
fire, prob = detect_fire(img)
if not fire:
return f"No wildfire detected (prob={prob:.2f})", "N/A", "N/A", "**No wildfire detected. Stay alert.**"
severity = classify_severity(img)
trend = fetch_weather_trend(*FOREST_COORDS['Pakistan Forest'])
recs = generate_recommendations(True, severity, trend)
return f"Fire Detected (prob={prob:.2f})", severity.title(), trend, recs
interface = gr.Interface(
fn=pipeline,
inputs=gr.Image(type='numpy', label='Upload Wildfire Image'),
outputs=[
gr.Textbox(label='Fire Status'),
gr.Textbox(label='Severity Level'),
gr.Textbox(label='Weather Trend'),
gr.Markdown(label='Recommendations')
],
title='Wildfire Detection & Management Assistant',
description='Upload an image from a forest region in Pakistan to determine wildfire presence, severity, weather-driven trend, and get expert recommendations.'
)
if __name__ == '__main__':
interface.launch()