Spaces:
Runtime error
Runtime error
File size: 3,908 Bytes
4d6e8c2 db5dd95 4d6e8c2 db5dd95 4d6e8c2 db5dd95 4d6e8c2 db5dd95 4d6e8c2 db5dd95 4d6e8c2 db5dd95 4d6e8c2 db5dd95 4d6e8c2 db5dd95 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 |
from fastapi import FastAPI
from fastapi.middleware.wsgi import WSGIMiddleware
from dotenv import load_dotenv
from tasks import image
import gradio as gr
import requests
import os
from huggingface_hub import HfApi
load_dotenv()
HF_TOKEN = os.getenv("HF_TOKEN")
api = HfApi(token=HF_TOKEN)
# FastAPI app
app = FastAPI(
title="Frugal AI Challenge API",
description="API for the Frugal AI Challenge evaluation endpoints"
)
app.include_router(image.router)
@app.get("/")
async def root():
return {
"message": "Wildfire Smoke Detector",
"endpoints": {
"dataset evaluation": "/image",
"single image detection": "/detect-smoke"
}
}
# ---------------------
# Gradio integration
# ---------------------
DEFAULT_PARAMS = {
"image": {
"dataset_name": "pyronear/pyro-sdis", # Replace with your actual HF dataset
"test_size": 0.2,
"test_seed": 42
}
}
def evaluate_model(task: str, space_url: str):
if "localhost" in space_url:
api_url = f"{space_url}/{task}"
else:
try:
# Assume Hugging Face space URL logic
info_space = api.space_info(repo_id=space_url)
host = info_space.host
api_url = f"{host}/{task}"
except:
return None, None, None, f"Space '{space_url}' not found"
try:
params = DEFAULT_PARAMS[task]
response = requests.post(api_url, json=params)
if response.status_code != 200:
return None, None, None, f"API call failed with status {response.status_code}"
results = response.json()
accuracy = results.get("classification_accuracy", results.get("accuracy", 0))
emissions = results.get("emissions_gco2eq", 0)
energy = results.get("energy_consumed_wh", 0)
return accuracy, emissions, energy, results
except Exception as e:
return None, None, None, str(e)
def evaluate_single_image(image_path, space_url):
api_url = f"{space_url}/detect-smoke"
with open(image_path, "rb") as f:
files = {"file": f}
response = requests.post(api_url, files=files)
if response.status_code != 200:
return f"Error: {response.status_code}", None
result = response.json()
msg = "✅ Smoke detected" if result["smoke_detected"] else "❌ No smoke"
return msg, result
# Gradio UI
with gr.Blocks(title="Frugal AI Challenge") as demo:
gr.Markdown("# 🌲 Wildfire Smoke Detector")
with gr.Tab("Evaluate Dataset Model"):
text_space_url = gr.Textbox(placeholder="username/your-space", label="API Base URL")
text_route = gr.Textbox(value="image", label="Route Name")
text_evaluate_btn = gr.Button("Evaluate Model")
text_accuracy = gr.Textbox(label="Accuracy")
text_emissions = gr.Textbox(label="Emissions (gCO2eq)")
text_energy = gr.Textbox(label="Energy (Wh)")
text_results_json = gr.JSON(label="Full Results")
text_evaluate_btn.click(
lambda url, route: evaluate_model(route.strip("/"), url),
inputs=[text_space_url, text_route],
outputs=[text_accuracy, text_emissions, text_energy, text_results_json],
concurrency_limit=5,
concurrency_id="eval_queue"
)
with gr.Tab("Single Image Detection"):
detect_url = gr.Textbox(placeholder="username/your-space",label="API Base URL")
image_input = gr.Image(type="filepath", label="Upload Image")
detect_button = gr.Button("Detect Smoke")
detect_result = gr.Textbox(label="Detection Result")
detect_json = gr.JSON(label="Raw Response")
detect_button.click(
evaluate_single_image,
inputs=[image_input, detect_url],
outputs=[detect_result, detect_json]
)
# Mount Gradio to FastAPI
app.mount("/gradio", WSGIMiddleware(demo))
|