File size: 1,709 Bytes
13755f8
c3ff8d8
 
 
bf58062
c3ff8d8
 
 
c3c803a
 
c3ff8d8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13755f8
bf58062
c3ff8d8
 
 
15b9880
c3ff8d8
 
 
15b9880
c3ff8d8
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
import os
from transformers import pipeline
# Assuming you still want to use your local Flan-T5 model
# from tools.search_tool import search_duckduckgo # REMOVE THIS LINE

# NEW IMPORTS for smolagents
from smolagents import CodeAgent, DuckDuckGoSearchTool
from smolagents import TransformersModel # To use your local Hugging Face model

class GaiaAgent:
    def __init__(self, model_id: str = "google/flan-t5-large"):
        # Initialize your LLM using smolagents's TransformersModel
        # This allows smolagents to manage the interaction with your local model
        self.llm_model = TransformersModel(model_id=model_id)

        # Initialize the smolagents CodeAgent
        # Pass the DuckDuckGoSearchTool directly to the agent's tools list
        # You can add other tools here if needed
        self.agent = CodeAgent(
            model=self.llm_model,
            tools=[DuckDuckGoSearchTool()],
            # 'add_base_tools=True' can add common basic tools (like a Python interpreter)
            # You might need to experiment with this. For now, let's keep it explicit.
            add_base_tools=False, 
            verbose=True # This is helpful for debugging on Hugging Face Spaces logs
        )

    def process_task(self, task_description: str) -> str:
        # The smolagents agent.run() method handles the entire process
        # of planning, tool use, and generating a final answer.
        try:
            # The agent will decide when to use DuckDuckGoSearchTool based on the prompt
            response = self.agent.run(task_description)
            return response
        except Exception as e:
            return f"An error occurred during agent processing: {e}"