Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from model_tools import extract_task, scrape_huggingface_models
|
3 |
+
|
4 |
+
# Final agent with Gradio UI
|
5 |
+
def run_agent(user_query: str):
|
6 |
+
"""
|
7 |
+
Given a user query, extracts the ML task, finds relevant models, and formats results in markdown.
|
8 |
+
This function is used for Gradio UI interaction.
|
9 |
+
"""
|
10 |
+
try:
|
11 |
+
# 1. Extract the standard ML task (e.g., "text-classification")
|
12 |
+
task = extract_task(user_query)
|
13 |
+
|
14 |
+
# 2. Get relevant models for the task
|
15 |
+
models = scrape_huggingface_models(task)
|
16 |
+
|
17 |
+
if not models:
|
18 |
+
return f"β No models found for task `{task}`. Try refining your query."
|
19 |
+
|
20 |
+
# 3. Format response as a markdown table
|
21 |
+
response = f"### π Models for task: `{task}`\n\n"
|
22 |
+
response += "| Model Name | Task | Architecture |\n"
|
23 |
+
response += "|------------|------|---------------|\n"
|
24 |
+
|
25 |
+
for model in models:
|
26 |
+
name = model.get("model_name", "unknown")
|
27 |
+
task_name = model.get("task", "unknown")
|
28 |
+
arch = model.get("architecture", "unknown")
|
29 |
+
response += f"| [{name}](https://huggingface.co/{name}) | {task_name} | {arch} |\n"
|
30 |
+
|
31 |
+
return response
|
32 |
+
|
33 |
+
except Exception as e:
|
34 |
+
return f"β Error: {str(e)}"
|
35 |
+
|
36 |
+
# Gradio interface for deployment
|
37 |
+
def gradio_ui():
|
38 |
+
with gr.Blocks() as demo:
|
39 |
+
gr.Markdown("# Hugging Face Model Finder Agent")
|
40 |
+
gr.Markdown("Enter a task description, and I'll find suitable ML models for you!")
|
41 |
+
|
42 |
+
# User input for task description
|
43 |
+
user_input = gr.Textbox(label="Describe the ML Task", placeholder="e.g., 'I need a text summarization model'", lines=2)
|
44 |
+
|
45 |
+
# Output for model search results
|
46 |
+
output = gr.Markdown()
|
47 |
+
|
48 |
+
# Connect the input/output to the agent
|
49 |
+
user_input.submit(run_agent, inputs=user_input, outputs=output)
|
50 |
+
|
51 |
+
return demo
|
52 |
+
|
53 |
+
# Run the Gradio interface (will run locally, and can be deployed to Spaces)
|
54 |
+
if __name__ == "__main__":
|
55 |
+
gradio_ui().launch()
|