Spaces:
Sleeping
Sleeping
import gradio as gr | |
from model_tools import extract_task, scrape_huggingface_models | |
# Final agent with Gradio UI | |
def run_agent(user_query: str): | |
""" | |
Given a user query, extracts the ML task, finds relevant models, and formats results in markdown. | |
This function is used for Gradio UI interaction. | |
""" | |
try: | |
# 1. Extract the standard ML task (e.g., "text-classification") | |
task = extract_task(user_query) | |
# 2. Get relevant models for the task | |
models = scrape_huggingface_models(task) | |
if not models: | |
return f"β No models found for task `{task}`. Try refining your query." | |
# 3. Format response as a markdown table | |
response = f"### π Models for task: `{task}`\n\n" | |
response += "| Model Name | Task | Architecture |\n" | |
response += "|------------|------|---------------|\n" | |
for model in models: | |
name = model.get("model_name", "unknown") | |
task_name = model.get("task", "unknown") | |
arch = model.get("architecture", "unknown") | |
response += f"| [{name}](https://huggingface.co/{name}) | {task_name} | {arch} |\n" | |
return response | |
except Exception as e: | |
return f"β Error: {str(e)}" | |
# Gradio interface for deployment | |
def gradio_ui(): | |
with gr.Blocks() as demo: | |
gr.Markdown("# Hugging Face Model Finder Agent") | |
gr.Markdown("Enter a task description, and I'll find suitable ML models for you!") | |
# User input for task description | |
user_input = gr.Textbox(label="Describe the ML Task", placeholder="e.g., 'I need a text summarization model'", lines=2) | |
# Output for model search results | |
output = gr.Markdown() | |
# Connect the input/output to the agent | |
user_input.submit(run_agent, inputs=user_input, outputs=output) | |
return demo | |
# Run the Gradio interface (will run locally, and can be deployed to Spaces) | |
if __name__ == "__main__": | |
gradio_ui().launch() | |