Spaces:
Sleeping
Sleeping
import re | |
from langgraph.prebuilt import create_react_agent | |
from agent_util import Agent_Util | |
from prompts import * | |
from tools import * | |
from langgraph_supervisor import create_supervisor | |
from langchain.chat_models import init_chat_model | |
import glob | |
class Agent: | |
def __init__(self): | |
print("Initializing Agent....") | |
print("--> Audio Agent") | |
self.audio_agent = create_react_agent( | |
model=init_chat_model("openai:gpt-4o-mini", temperature=0), | |
tools=[extract_text_from_url_tool, extract_text_from_file_tool], | |
prompt= AUDIO_AGENT_PROMPT, | |
name="audio_agent", | |
) | |
print("--> Web Search Agent") | |
self.web_search_agent = create_react_agent( | |
model=init_chat_model("openai:gpt-4o-mini", temperature=0), | |
tools=[search_web_tool], | |
prompt= WEB_SEARCH_AGENT_PROMPT, | |
name="web_research_agent", | |
) | |
print("--> Supervisor") | |
self.supervisor = create_supervisor( | |
model=init_chat_model("openai:gpt-4o-mini", temperature=0), | |
agents=[self.web_search_agent, self.audio_agent], | |
tools=[bird_video_count_tool,chess_image_to_fen_tool,chess_fen_get_best_next_move_tool, | |
get_excel_columns_tool, calculate_excel_sum_by_columns_tool,execute_python_code_tool, | |
text_inverter_tool, check_table_commutativity_tool], | |
prompt= SUPERVISOR_PROMPT, | |
add_handoff_back_messages=True, | |
output_mode="final_response", | |
).compile() | |
print("Agent initialized.") | |
def __call__(self, question: str, task_id: str, task_file_name: str) -> str: | |
print(f"Agent (nova forma de invocar) received question({task_id}) (first 50 chars): {question[:50]}...") | |
file_prefix = "" | |
if task_file_name: | |
print(f"Task com arquivo {task_file_name}") | |
File_Util.baixa_arquivo_task(task_file_name) | |
file_prefix = f"File: {task_file_name} . " | |
# Chamando sem stream | |
response = self.supervisor.invoke( | |
{ | |
"messages": [ | |
{ | |
"role": "user", | |
"content": f"{file_prefix}{question}", | |
} | |
] | |
} | |
) | |
print(f"Resposta LLM: {response}") | |
answer_llm = response.get("output", "").strip() | |
print(f"Resposta pré-processada: {answer_llm}") | |
final_answer = re.sub(r"^FINAL ANSWER:\s*", "", answer_llm, flags=re.IGNORECASE) | |
print(f"Agent returning answer for task {task_id}: {final_answer}") | |
return final_answer |