Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -36,20 +36,29 @@ def extract_task(user_input):
|
|
36 |
# π Scrape models from Hugging Face
|
37 |
def get_models_for_task(task):
|
38 |
url = f"https://huggingface.co/models?pipeline_tag={task}"
|
39 |
-
|
|
|
|
|
|
|
|
|
|
|
40 |
soup = BeautifulSoup(response.text, "html.parser")
|
41 |
-
|
42 |
|
43 |
models_info = []
|
44 |
-
for
|
45 |
-
|
46 |
-
|
|
|
|
|
|
|
47 |
|
48 |
models_info.append({
|
49 |
-
"Model Name": name
|
50 |
"Task": task,
|
51 |
-
"Architecture": arch
|
52 |
})
|
|
|
53 |
return models_info
|
54 |
|
55 |
# π Gradio UI
|
|
|
36 |
# π Scrape models from Hugging Face
|
37 |
def get_models_for_task(task):
|
38 |
url = f"https://huggingface.co/models?pipeline_tag={task}"
|
39 |
+
headers = {"User-Agent": "Mozilla/5.0"} # avoid bot detection
|
40 |
+
response = requests.get(url, headers=headers)
|
41 |
+
|
42 |
+
if response.status_code != 200:
|
43 |
+
raise Exception(f"Failed to fetch models: HTTP {response.status_code}")
|
44 |
+
|
45 |
soup = BeautifulSoup(response.text, "html.parser")
|
46 |
+
model_cards = soup.find_all("article")
|
47 |
|
48 |
models_info = []
|
49 |
+
for card in model_cards[:10]: # Limit to top 10
|
50 |
+
name_tag = card.find("a", href=True)
|
51 |
+
tags = card.find_all("span", class_="tag") or card.find_all("div", class_="tag")
|
52 |
+
|
53 |
+
name = name_tag.text.strip() if name_tag else "unknown"
|
54 |
+
arch = tags[0].text.strip() if tags else "unknown"
|
55 |
|
56 |
models_info.append({
|
57 |
+
"Model Name": name,
|
58 |
"Task": task,
|
59 |
+
"Architecture": arch,
|
60 |
})
|
61 |
+
|
62 |
return models_info
|
63 |
|
64 |
# π Gradio UI
|