Freddolin's picture
Update agent.py
c3ff8d8 verified
raw
history blame
1.71 kB
import os
from transformers import pipeline
# Assuming you still want to use your local Flan-T5 model
# from tools.search_tool import search_duckduckgo # REMOVE THIS LINE
# NEW IMPORTS for smolagents
from smolagents import CodeAgent, DuckDuckGoSearchTool
from smolagents import TransformersModel # To use your local Hugging Face model
class GaiaAgent:
def __init__(self, model_id: str = "google/flan-t5-large"):
# Initialize your LLM using smolagents's TransformersModel
# This allows smolagents to manage the interaction with your local model
self.llm_model = TransformersModel(model_id=model_id)
# Initialize the smolagents CodeAgent
# Pass the DuckDuckGoSearchTool directly to the agent's tools list
# You can add other tools here if needed
self.agent = CodeAgent(
model=self.llm_model,
tools=[DuckDuckGoSearchTool()],
# 'add_base_tools=True' can add common basic tools (like a Python interpreter)
# You might need to experiment with this. For now, let's keep it explicit.
add_base_tools=False,
verbose=True # This is helpful for debugging on Hugging Face Spaces logs
)
def process_task(self, task_description: str) -> str:
# The smolagents agent.run() method handles the entire process
# of planning, tool use, and generating a final answer.
try:
# The agent will decide when to use DuckDuckGoSearchTool based on the prompt
response = self.agent.run(task_description)
return response
except Exception as e:
return f"An error occurred during agent processing: {e}"