iQuentin's picture
Some clean up and back to GAIA extra system prompt as the tested simple "write numbers in letters" works well.
8d1ae36 verified
raw
history blame
5.75 kB
import os
from smolagents import (
CodeAgent,
DuckDuckGoSearchTool,
VisitWebpageTool,
PythonInterpreterTool,
InferenceClientModel,
OpenAIServerModel,
# HfApiModel, # import bug from smolagents after adding duckduckgo-search in requirements
tool
)
# from smolagents.prompts import CODE_SYSTEM_PROMPT
from typing import List, Dict, Any, Optional
class QAgent:
def __init__(
self,
model_type: str = "InferenceClientModel",
model_id: Optional[str] = None,
api_key: Optional[str] = None,
provider: Optional[str] = None, # for InferenceClientModel
timeout: Optional[int] = None, # for InferenceClientModel
temperature: float = 0.2,
verbose: bool = False # Verbose logging or not
):
"""
QAgent description
"""
# Enhance system prompt for GAIA questions
extra="You are a general AI assistant. I will ask you a question. Report your thoughts, and finish your answer with the following template: FINAL ANSWER: [YOUR FINAL ANSWER]. YOUR FINAL ANSWER should be a number OR as few words as possible OR a comma separated list of numbers and/or strings. If you are asked for a number, don't use comma to write your number neither use units such as $ or percent sign unless specified otherwise. If you are asked for a string, don't use articles, neither abbreviations (e.g. for cities), and write the digits in plain text unless specified otherwise. If you are asked for a comma separated list, apply the above rules depending of whether the element to be put in the list is a number or a string."
# "If the answer is a number, write it out in words." --> tested, it works!
print(f"Begin QAgent init with: ")
self.verbose = verbose
# if model_type == "HfApiModel":
# if api_key is None:
# api_key = os.getenv("Q_NEB_TOK")
# if not api_key:
# raise ValueError("No API Key found for HuggingFace. Please set Q_NEB_TOK or pass api_key.")
#
# if self.verbose:
# print(f"Using Hugging Face token: {api_key[:5]}... (HfApiModel mode)")
#
# self.model = HfApiModel(
# model_id=model_id or "Qwen/Qwen2.5-Coder-32B-Instruct", # précédemment : or "meta-llama/Llama-3-70B-Instruct",
# token=api_key,
# temperature=temperature
# )
# el
if model_type == "InferenceClientModel":
if api_key is None:
api_key = os.getenv("Q_NEB_TOK")
if not api_key:
raise ValueError("No API Key found for HuggingFace. Please set SP_HF_TOK or pass api_key.")
if self.verbose:
print(f"Using Hugging Face token: {api_key[:5]}... (InferenceClientModel mode)")
self.model = InferenceClientModel(
model_id=model_id or "Qwen/Qwen2.5-Coder-32B-Instruct", # précédemment : or "meta-llama/Llama-3-70B-Instruct",
provider=provider or "nebius", # or "hf-inference",
token=api_key,
timeout=timeout or 120,
temperature=temperature
)
elif model_type == "OpenAIServerModel":
print(f"Trying to configure OpenAIServerModel.")
# Check for xAI API key and base URL first
xai_api_key = os.getenv("XAI_API_KEY")
# xai_api_base = os.getenv("XAI_API_BASE") # Ne sais pas à quoi ça sert..
# If xAI credentials are available, use them
if xai_api_key and api_key is None:
api_key = xai_api_key
if self.verbose:
print(f"Using xAI API key: {api_key[:5]}...")
# If no API key specified, fall back to OPENAI_API_KEY
if api_key is None:
api_key = os.getenv("Q_OAI_TOK")
if not api_key:
raise ValueError("No OpenAI API key provided. Please set Q_OAI_TOK or XAI_API_KEY environment variable or pass api_key parameter.")
self.model = OpenAIServerModel(
model_id=model_id or "gpt-4o",
api_key=api_key,
# api_base=api_base,
temperature=temperature
)
else:
raise ValueError(f"Unknown model type: {model_type}")
if self.verbose:
print(f"Model initialized: {model_type} - {self.model.model_id} - prov: {self.model.provider}")
# Initialize tools
self.tools = [
DuckDuckGoSearchTool(),
PythonInterpreterTool(),
# save_and_read_file,
# download_file_from_url,
# analyze_csv_file,
# analyze_excel_file
]
# Setup imports
self.imports = ["pandas", "numpy", "datetime", "json", "re", "math", "os", "requests", "csv", "urllib"]
# Create CodeAgent
print(f"Begin creating CodeAgent")
self.agent = CodeAgent(
tools=self.tools,
model=self.model,
instructions=extra,
# additional_authorized_imports=self.imports,
# executor_type=executor_type,
# executor_kwargs=executor_kwargs,
verbosity_level=2 if self.verbose else 0
)
if self.verbose:
print("CodeAgent initialized")
def invoke(self, prompt: str) -> str:
print(f"Agent invoked with prompt: {prompt[:80]}...")
result = self.agent.run(prompt)
print(result)
return result