Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -3,50 +3,77 @@ import requests
|
|
3 |
from bs4 import BeautifulSoup
|
4 |
from transformers import pipeline
|
5 |
|
|
|
6 |
task_extractor = pipeline("text2text-generation", model="google/flan-t5-small")
|
7 |
|
8 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
def extract_task(user_input):
|
10 |
-
prompt =
|
|
|
|
|
|
|
|
|
|
|
11 |
result = task_extractor(prompt, max_new_tokens=10)
|
12 |
task = result[0]["generated_text"].strip().lower()
|
13 |
-
return task
|
14 |
-
|
15 |
|
16 |
-
# Scrape Hugging Face
|
17 |
def get_models_for_task(task):
|
18 |
url = f"https://huggingface.co/models?pipeline_tag={task}"
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
soup = BeautifulSoup(res.text, "html.parser")
|
23 |
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
results.append(model_name)
|
29 |
-
if len(results) >= 5:
|
30 |
-
break
|
31 |
|
32 |
-
|
|
|
|
|
|
|
|
|
|
|
33 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
34 |
except Exception as e:
|
35 |
-
return
|
36 |
|
37 |
-
#
|
38 |
-
def predict(user_input):
|
39 |
-
task = extract_task(user_input)
|
40 |
-
models = get_models_for_task(task)
|
41 |
-
return f"π§ Task: {task}\nπ¦ Models:\n" + "\n".join(models)
|
42 |
-
|
43 |
-
# Gradio UI
|
44 |
with gr.Blocks() as demo:
|
45 |
-
gr.Markdown("
|
|
|
46 |
with gr.Row():
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
|
|
|
|
|
|
51 |
|
52 |
demo.launch()
|
|
|
3 |
from bs4 import BeautifulSoup
|
4 |
from transformers import pipeline
|
5 |
|
6 |
+
# π Load transformer model once
|
7 |
task_extractor = pipeline("text2text-generation", model="google/flan-t5-small")
|
8 |
|
9 |
+
# π Optional alias correction
|
10 |
+
TASK_ALIASES = {
|
11 |
+
"classification": "text-classification",
|
12 |
+
"financial classification": "text-classification",
|
13 |
+
"news classification": "text-classification",
|
14 |
+
"qa": "question-answering",
|
15 |
+
"summarisation": "summarization",
|
16 |
+
"token": "token-classification",
|
17 |
+
"token classification": "token-classification",
|
18 |
+
"object detection": "object-detection",
|
19 |
+
}
|
20 |
+
|
21 |
+
def normalize_task(task):
|
22 |
+
return TASK_ALIASES.get(task.lower(), task)
|
23 |
+
|
24 |
+
# π Extract task from user input
|
25 |
def extract_task(user_input):
|
26 |
+
prompt = (
|
27 |
+
"Given a user query, extract the most likely machine learning task "
|
28 |
+
"from the following list: text-classification, token-classification, "
|
29 |
+
"translation, summarization, question-answering, object-detection. "
|
30 |
+
f"Query: {user_input}. Only return the task name."
|
31 |
+
)
|
32 |
result = task_extractor(prompt, max_new_tokens=10)
|
33 |
task = result[0]["generated_text"].strip().lower()
|
34 |
+
return normalize_task(task)
|
|
|
35 |
|
36 |
+
# π Scrape models from Hugging Face
|
37 |
def get_models_for_task(task):
|
38 |
url = f"https://huggingface.co/models?pipeline_tag={task}"
|
39 |
+
response = requests.get(url)
|
40 |
+
soup = BeautifulSoup(response.text, "html.parser")
|
41 |
+
model_blocks = soup.select("div[data-testid='model-card']")
|
|
|
42 |
|
43 |
+
models_info = []
|
44 |
+
for block in model_blocks[:10]: # limit to top 10 models
|
45 |
+
name = block.select_one("a[data-testid='model-link']")
|
46 |
+
arch = block.select_one("div[class*='tag']") # very rough heuristic
|
|
|
|
|
|
|
47 |
|
48 |
+
models_info.append({
|
49 |
+
"Model Name": name.text.strip() if name else "unknown",
|
50 |
+
"Task": task,
|
51 |
+
"Architecture": arch.text.strip() if arch else "unknown"
|
52 |
+
})
|
53 |
+
return models_info
|
54 |
|
55 |
+
# π Gradio UI
|
56 |
+
def model_search_interface(user_input):
|
57 |
+
try:
|
58 |
+
task = extract_task(user_input)
|
59 |
+
models = get_models_for_task(task)
|
60 |
+
if not models:
|
61 |
+
return f"No models found for task '{task}'.", []
|
62 |
+
return f"Task identified: {task}", models
|
63 |
except Exception as e:
|
64 |
+
return f"β Error: {str(e)}", []
|
65 |
|
66 |
+
# π¨ Launch UI
|
|
|
|
|
|
|
|
|
|
|
|
|
67 |
with gr.Blocks() as demo:
|
68 |
+
gr.Markdown("### π HuggingFace Model Search by Task")
|
69 |
+
|
70 |
with gr.Row():
|
71 |
+
user_input = gr.Textbox(label="Describe the ML task you're interested in:")
|
72 |
+
output_msg = gr.Textbox(label="Status", interactive=False)
|
73 |
+
|
74 |
+
model_table = gr.Dataframe(headers=["Model Name", "Task", "Architecture"], label="Top Models")
|
75 |
+
|
76 |
+
btn = gr.Button("π Search Models")
|
77 |
+
btn.click(fn=model_search_interface, inputs=user_input, outputs=[output_msg, model_table])
|
78 |
|
79 |
demo.launch()
|