rdave88 commited on
Commit
0c6d118
Β·
verified Β·
1 Parent(s): 9f59898

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +57 -30
app.py CHANGED
@@ -3,50 +3,77 @@ import requests
3
  from bs4 import BeautifulSoup
4
  from transformers import pipeline
5
 
 
6
  task_extractor = pipeline("text2text-generation", model="google/flan-t5-small")
7
 
8
- # Simulated LLM task extraction (replace with real call if local)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
  def extract_task(user_input):
10
- prompt = f"Classify the following ML task: {user_input}. Just reply with the task name."
 
 
 
 
 
11
  result = task_extractor(prompt, max_new_tokens=10)
12
  task = result[0]["generated_text"].strip().lower()
13
- return task
14
-
15
 
16
- # Scrape Hugging Face models by task
17
  def get_models_for_task(task):
18
  url = f"https://huggingface.co/models?pipeline_tag={task}"
19
- headers = {"User-Agent": "Mozilla/5.0"}
20
- try:
21
- res = requests.get(url, headers=headers)
22
- soup = BeautifulSoup(res.text, "html.parser")
23
 
24
- results = []
25
- for a in soup.find_all("a", class_="flex items-center gap-2"):
26
- model_name = a.get("href", "").strip("/").split("/")[-1]
27
- if model_name:
28
- results.append(model_name)
29
- if len(results) >= 5:
30
- break
31
 
32
- return results if results else ["No models found"]
 
 
 
 
 
33
 
 
 
 
 
 
 
 
 
34
  except Exception as e:
35
- return [f"Error: {str(e)}"]
36
 
37
- # Combined predict function
38
- def predict(user_input):
39
- task = extract_task(user_input)
40
- models = get_models_for_task(task)
41
- return f"🧠 Task: {task}\nπŸ“¦ Models:\n" + "\n".join(models)
42
-
43
- # Gradio UI
44
  with gr.Blocks() as demo:
45
- gr.Markdown("## πŸ€– ML Task β†’ Hugging Face Model Finder")
 
46
  with gr.Row():
47
- input_box = gr.Textbox(label="Describe your ML task")
48
- submit_btn = gr.Button("πŸ” Find Models")
49
- output_box = gr.Textbox(label="Suggested Models", lines=10)
50
- submit_btn.click(predict, inputs=input_box, outputs=output_box)
 
 
 
51
 
52
  demo.launch()
 
3
  from bs4 import BeautifulSoup
4
  from transformers import pipeline
5
 
6
+ # πŸ” Load transformer model once
7
  task_extractor = pipeline("text2text-generation", model="google/flan-t5-small")
8
 
9
+ # πŸ” Optional alias correction
10
+ TASK_ALIASES = {
11
+ "classification": "text-classification",
12
+ "financial classification": "text-classification",
13
+ "news classification": "text-classification",
14
+ "qa": "question-answering",
15
+ "summarisation": "summarization",
16
+ "token": "token-classification",
17
+ "token classification": "token-classification",
18
+ "object detection": "object-detection",
19
+ }
20
+
21
+ def normalize_task(task):
22
+ return TASK_ALIASES.get(task.lower(), task)
23
+
24
+ # πŸ” Extract task from user input
25
  def extract_task(user_input):
26
+ prompt = (
27
+ "Given a user query, extract the most likely machine learning task "
28
+ "from the following list: text-classification, token-classification, "
29
+ "translation, summarization, question-answering, object-detection. "
30
+ f"Query: {user_input}. Only return the task name."
31
+ )
32
  result = task_extractor(prompt, max_new_tokens=10)
33
  task = result[0]["generated_text"].strip().lower()
34
+ return normalize_task(task)
 
35
 
36
+ # πŸ” Scrape models from Hugging Face
37
  def get_models_for_task(task):
38
  url = f"https://huggingface.co/models?pipeline_tag={task}"
39
+ response = requests.get(url)
40
+ soup = BeautifulSoup(response.text, "html.parser")
41
+ model_blocks = soup.select("div[data-testid='model-card']")
 
42
 
43
+ models_info = []
44
+ for block in model_blocks[:10]: # limit to top 10 models
45
+ name = block.select_one("a[data-testid='model-link']")
46
+ arch = block.select_one("div[class*='tag']") # very rough heuristic
 
 
 
47
 
48
+ models_info.append({
49
+ "Model Name": name.text.strip() if name else "unknown",
50
+ "Task": task,
51
+ "Architecture": arch.text.strip() if arch else "unknown"
52
+ })
53
+ return models_info
54
 
55
+ # πŸŽ› Gradio UI
56
+ def model_search_interface(user_input):
57
+ try:
58
+ task = extract_task(user_input)
59
+ models = get_models_for_task(task)
60
+ if not models:
61
+ return f"No models found for task '{task}'.", []
62
+ return f"Task identified: {task}", models
63
  except Exception as e:
64
+ return f"❌ Error: {str(e)}", []
65
 
66
+ # 🎨 Launch UI
 
 
 
 
 
 
67
  with gr.Blocks() as demo:
68
+ gr.Markdown("### πŸ” HuggingFace Model Search by Task")
69
+
70
  with gr.Row():
71
+ user_input = gr.Textbox(label="Describe the ML task you're interested in:")
72
+ output_msg = gr.Textbox(label="Status", interactive=False)
73
+
74
+ model_table = gr.Dataframe(headers=["Model Name", "Task", "Architecture"], label="Top Models")
75
+
76
+ btn = gr.Button("πŸ” Search Models")
77
+ btn.click(fn=model_search_interface, inputs=user_input, outputs=[output_msg, model_table])
78
 
79
  demo.launch()