from fastapi import FastAPI from fastapi.middleware.wsgi import WSGIMiddleware from dotenv import load_dotenv from tasks import image import gradio as gr import requests import os from huggingface_hub import HfApi load_dotenv() HF_TOKEN = os.getenv("HF_TOKEN") api = HfApi(token=HF_TOKEN) # FastAPI app app = FastAPI( title="Frugal AI Challenge API", description="API for the Frugal AI Challenge evaluation endpoints" ) app.include_router(image.router) @app.get("/") async def root(): return { "message": "Wildfire Smoke Detector", "endpoints": { "dataset evaluation": "/image", "single image detection": "/detect-smoke" } } # --------------------- # Gradio integration # --------------------- DEFAULT_PARAMS = { "image": { "dataset_name": "pyronear/pyro-sdis", # Replace with your actual HF dataset "test_size": 0.2, "test_seed": 42 } } def evaluate_model(task: str, space_url: str): if "localhost" in space_url: api_url = f"{space_url}/{task}" else: try: # Assume Hugging Face space URL logic info_space = api.space_info(repo_id=space_url) host = info_space.host api_url = f"{host}/{task}" except: return None, None, None, f"Space '{space_url}' not found" try: params = DEFAULT_PARAMS[task] response = requests.post(api_url, json=params) if response.status_code != 200: return None, None, None, f"API call failed with status {response.status_code}" results = response.json() accuracy = results.get("classification_accuracy", results.get("accuracy", 0)) emissions = results.get("emissions_gco2eq", 0) energy = results.get("energy_consumed_wh", 0) return accuracy, emissions, energy, results except Exception as e: return None, None, None, str(e) def evaluate_single_image(image_path, space_url): api_url = f"{space_url}/detect-smoke" with open(image_path, "rb") as f: files = {"file": f} response = requests.post(api_url, files=files) if response.status_code != 200: return f"Error: {response.status_code}", None result = response.json() msg = "✅ Smoke detected" if result["smoke_detected"] else "❌ No smoke" return msg, result # Gradio UI with gr.Blocks(title="Frugal AI Challenge") as demo: gr.Markdown("# 🌲 Wildfire Smoke Detector") with gr.Tab("Evaluate Dataset Model"): text_space_url = gr.Textbox(placeholder="username/your-space", label="API Base URL") text_route = gr.Textbox(value="image", label="Route Name") text_evaluate_btn = gr.Button("Evaluate Model") text_accuracy = gr.Textbox(label="Accuracy") text_emissions = gr.Textbox(label="Emissions (gCO2eq)") text_energy = gr.Textbox(label="Energy (Wh)") text_results_json = gr.JSON(label="Full Results") text_evaluate_btn.click( lambda url, route: evaluate_model(route.strip("/"), url), inputs=[text_space_url, text_route], outputs=[text_accuracy, text_emissions, text_energy, text_results_json], concurrency_limit=5, concurrency_id="eval_queue" ) with gr.Tab("Single Image Detection"): detect_url = gr.Textbox(placeholder="username/your-space",label="API Base URL") image_input = gr.Image(type="filepath", label="Upload Image") detect_button = gr.Button("Detect Smoke") detect_result = gr.Textbox(label="Detection Result") detect_json = gr.JSON(label="Raw Response") detect_button.click( evaluate_single_image, inputs=[image_input, detect_url], outputs=[detect_result, detect_json] ) # Mount Gradio to FastAPI app.mount("/gradio", WSGIMiddleware(demo))