Spaces:
Sleeping
Sleeping
File size: 2,107 Bytes
f8b8032 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 |
# π§° Required libraries
from smolagents import CodeAgent, DuckDuckGoSearchTool, HfApiModel, tool
from huggingface_hub import InferenceClient
import requests
from bs4 import BeautifulSoup
import json
import os
import gradio as gr
HF_TOKEN = os.getenv("HF_API_KEY")
client = InferenceClient(token=HF_TOKEN)
# π οΈ Custom tool to get Hugging Face top daily paper
@tool
def get_hugging_face_top_daily_paper() -> str:
"""
Returns the title of the most upvoted paper on Hugging Face daily papers.
"""
try:
url = "https://huggingface.co/papers"
response = requests.get(url)
response.raise_for_status()
soup = BeautifulSoup(response.content, "html.parser")
# Use more general selector
containers = soup.find_all('div', attrs={'data-props': True})
top_paper = ""
for container in containers:
data_props = container.get('data-props', '')
if data_props:
try:
json_data = json.loads(data_props.replace('"', '"'))
if 'dailyPapers' in json_data:
top_paper = json_data['dailyPapers'][0]['title']
break
except json.JSONDecodeError:
continue
return top_paper or "No top paper found."
except requests.exceptions.RequestException as e:
return f"Error occurred while fetching the paper: {e}"
# π Run the agent with a sample query
if __name__ == "__main__":
# π€ Load a model from Hugging Face Hub
model = HfApiModel(model_id="Qwen/Qwen2.5-Coder-32B-Instruct", api_key=HF_TOKEN)
# π§ Initialize tools
search_tool = DuckDuckGoSearchTool()
# π§ Build the agent
agent = CodeAgent(
tools=[search_tool, get_hugging_face_top_daily_paper],
model=model,
additional_authorized_imports=["requests", "bs4", "json"]
)
# π₯οΈ Web interface
def run_agent_interface(query):
return agent.run(query)
gr.Interface(fn=run_agent_interface, inputs="text", outputs="text").launch()
|