Freddolin commited on
Commit
cd2c8df
·
verified ·
1 Parent(s): 1d49134

Update agent.py

Browse files
Files changed (1) hide show
  1. agent.py +13 -14
agent.py CHANGED
@@ -1,7 +1,5 @@
1
  import os
2
- from transformers import pipeline
3
- # Assuming you still want to use your local Flan-T5 model
4
- # from tools.search_tool import search_duckduckgo # REMOVE THIS LINE
5
 
6
  # NEW IMPORTS for smolagents
7
  from smolagents import CodeAgent, DuckDuckGoSearchTool
@@ -10,28 +8,29 @@ from smolagents import TransformersModel # To use your local Hugging Face model
10
  class GaiaAgent:
11
  def __init__(self, model_id: str = "google/flan-t5-large"):
12
  # Initialize your LLM using smolagents's TransformersModel
13
- # This allows smolagents to manage the interaction with your local model
14
- self.llm_model = TransformersModel(model_id=model_id)
 
 
 
 
 
 
 
15
 
16
  # Initialize the smolagents CodeAgent
17
- # Pass the DuckDuckGoSearchTool directly to the agent's tools list
18
- # You can add other tools here if needed
19
  self.agent = CodeAgent(
20
  model=self.llm_model,
21
  tools=[DuckDuckGoSearchTool()],
22
- # 'add_base_tools=True' can add common basic tools (like a Python interpreter)
23
- # You might need to experiment with this. For now, let's keep it explicit.
24
- add_base_tools=False,
25
- verbose=True # This is helpful for debugging on Hugging Face Spaces logs
26
  )
27
 
28
  def process_task(self, task_description: str) -> str:
29
- # The smolagents agent.run() method handles the entire process
30
- # of planning, tool use, and generating a final answer.
31
  try:
32
- # The agent will decide when to use DuckDuckGoSearchTool based on the prompt
33
  response = self.agent.run(task_description)
34
  return response
35
  except Exception as e:
36
  return f"An error occurred during agent processing: {e}"
 
37
 
 
1
  import os
2
+ # from transformers import pipeline # No longer directly using pipeline here
 
 
3
 
4
  # NEW IMPORTS for smolagents
5
  from smolagents import CodeAgent, DuckDuckGoSearchTool
 
8
  class GaiaAgent:
9
  def __init__(self, model_id: str = "google/flan-t5-large"):
10
  # Initialize your LLM using smolagents's TransformersModel
11
+ # This is the crucial part. Flan-T5 is a 'text2text-generation' model.
12
+ # TransformersModel probably builds a pipeline, which needs the correct task.
13
+ self.llm_model = TransformersModel(
14
+ model_id=model_id,
15
+ # Specify the task type for Flan-T5 models
16
+ task="text2text-generation",
17
+ # You might need to add device mapping if running into memory issues
18
+ # e.g., device_map="auto" if on GPU
19
+ )
20
 
21
  # Initialize the smolagents CodeAgent
 
 
22
  self.agent = CodeAgent(
23
  model=self.llm_model,
24
  tools=[DuckDuckGoSearchTool()],
25
+ add_base_tools=False,
26
+ verbose=True
 
 
27
  )
28
 
29
  def process_task(self, task_description: str) -> str:
 
 
30
  try:
 
31
  response = self.agent.run(task_description)
32
  return response
33
  except Exception as e:
34
  return f"An error occurred during agent processing: {e}"
35
+
36