Denis Davydov commited on
Commit
913cae8
·
1 Parent(s): 52ee323

use bing as search engine

Browse files
Files changed (3) hide show
  1. agent.py +1 -4
  2. tools.py +4 -6
  3. utils.py +0 -17
agent.py CHANGED
@@ -12,7 +12,7 @@ from langgraph.checkpoint.memory import MemorySaver
12
  from langgraph.prebuilt import tools_condition
13
  from langchain_openai import ChatOpenAI
14
  from tools import agent_tools
15
- from utils import format_gaia_answer, create_execution_plan, log_agent_step
16
 
17
  # Initialize OpenAI LLM with GPT-4o (most capable model)
18
  chat = ChatOpenAI(
@@ -89,9 +89,6 @@ class SmartAgent:
89
  try:
90
  print(f"\n🎯 Processing question: {question[:100]}...")
91
 
92
- plan = create_execution_plan(question, task_id)
93
- print(f"📋 Execution plan: {plan}")
94
-
95
  enhanced_question = question
96
  if task_id:
97
  enhanced_question = f"Task ID: {task_id}\n\nQuestion: {question}"
 
12
  from langgraph.prebuilt import tools_condition
13
  from langchain_openai import ChatOpenAI
14
  from tools import agent_tools
15
+ from utils import format_gaia_answer, log_agent_step
16
 
17
  # Initialize OpenAI LLM with GPT-4o (most capable model)
18
  chat = ChatOpenAI(
 
89
  try:
90
  print(f"\n🎯 Processing question: {question[:100]}...")
91
 
 
 
 
92
  enhanced_question = question
93
  if task_id:
94
  enhanced_question = f"Task ID: {task_id}\n\nQuestion: {question}"
tools.py CHANGED
@@ -182,23 +182,21 @@ def get_search_urls(query: str) -> list:
182
  try:
183
  with DDGS() as ddgs:
184
  # Create Wikipedia-specific search queries
185
- wikipedia_queries = [
186
- f"site:en.wikipedia.org {query}",
187
  f"{query} site:en.wikipedia.org"
188
  ]
189
 
190
  search_results = []
191
  seen_urls = set()
192
 
193
- for wiki_query in wikipedia_queries:
194
  try:
195
- results = list(ddgs.text(wiki_query, max_results=2))
196
 
197
  for result in results:
198
  url = result.get('href', '')
199
 
200
- # Only include Wikipedia URLs and avoid duplicates
201
- if 'en.wikipedia.org' in url and url not in seen_urls:
202
  search_results.append({
203
  'url': url,
204
  'title': result.get('title', 'No title'),
 
182
  try:
183
  with DDGS() as ddgs:
184
  # Create Wikipedia-specific search queries
185
+ queries = [
 
186
  f"{query} site:en.wikipedia.org"
187
  ]
188
 
189
  search_results = []
190
  seen_urls = set()
191
 
192
+ for wiki_query in queries:
193
  try:
194
+ results = list(ddgs.text(wiki_query, max_results=10, region="us-en", backend="bing", safesearch="on"))
195
 
196
  for result in results:
197
  url = result.get('href', '')
198
 
199
+ if url not in seen_urls:
 
200
  search_results.append({
201
  'url': url,
202
  'title': result.get('title', 'No title'),
utils.py CHANGED
@@ -84,23 +84,6 @@ def format_gaia_answer(raw_answer: str) -> str:
84
 
85
  return answer
86
 
87
- def create_execution_plan(question: str, task_id: str = None) -> List[str]:
88
- """Create a simple execution plan - let GPT-4o decide what tools to use."""
89
- plan = []
90
-
91
- # Always start with understanding the question
92
- plan.append("Analyze the question to understand what information is needed")
93
-
94
- # Add file processing if task_id is provided
95
- if task_id:
96
- plan.append(f"Check for and process any files associated with task {task_id}")
97
-
98
- # Let the LLM decide what other tools to use
99
- plan.append("Use appropriate tools (web search, calculations, etc.) as needed")
100
- plan.append("Synthesize all information to provide the final answer")
101
-
102
- return plan
103
-
104
  def log_agent_step(step: str, result: str, step_number: int = None):
105
  """Log agent execution steps for debugging."""
106
  prefix = f"Step {step_number}: " if step_number else ""
 
84
 
85
  return answer
86
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87
  def log_agent_step(step: str, result: str, step_number: int = None):
88
  """Log agent execution steps for debugging."""
89
  prefix = f"Step {step_number}: " if step_number else ""