Spaces:
Sleeping
Sleeping
import re | |
from langgraph.prebuilt import create_react_agent | |
from agent_util import Agent_Util | |
from prompts import * | |
from tools import * | |
from langgraph_supervisor import create_supervisor | |
from langchain.chat_models import init_chat_model | |
import glob | |
class Agent: | |
def __init__(self): | |
print("Initializing Agent....") | |
print("--> Audio Agent") | |
self.audio_agent = create_react_agent( | |
model=init_chat_model("openai:gpt-4o-mini", temperature=0), | |
tools=[extract_text_from_url_tool, extract_text_from_file_tool], | |
prompt= AUDIO_AGENT_PROMPT, | |
name="audio_agent", | |
) | |
print("--> Web Search Agent") | |
self.web_search_agent = create_react_agent( | |
model=init_chat_model("openai:gpt-4o-mini", temperature=0), | |
tools=[search_web_tool], | |
prompt= WEB_SEARCH_AGENT_PROMPT, | |
name="web_research_agent", | |
) | |
print("--> Supervisor") | |
self.supervisor = create_supervisor( | |
model=init_chat_model("openai:gpt-4o-mini", temperature=0), | |
agents=[self.web_search_agent, self.audio_agent], | |
tools=[bird_video_count_tool,chess_image_to_fen_tool,chess_fen_get_best_next_move_tool, | |
get_excel_columns_tool, calculate_excel_sum_by_columns_tool,execute_python_code_tool, | |
text_inverter_tool, check_table_commutativity_tool], | |
prompt= SUPERVISOR_PROMPT, | |
add_handoff_back_messages=True, | |
output_mode="full_history", | |
).compile() | |
print("Agent initialized.") | |
def _call_antiga(self, question: str, task_id: str, task_file_name: str) -> str: | |
print(f"Agent received question({task_id}) (first 50 chars): {question[:50]}...") | |
file_prefix = "" | |
if task_file_name: | |
print(f"Task com arquivo {task_file_name}") | |
File_Util.baixa_arquivo_task(task_file_name) | |
file_prefix = f"File: {task_file_name} . " | |
for chunk in self.supervisor.stream( | |
{ | |
"messages": [ | |
{ | |
"role": "user", | |
"content": f"{file_prefix}{question}", | |
} | |
] | |
}, | |
): | |
Agent_Util.pretty_print_messages(chunk, last_message=True) | |
final_chunk = chunk | |
print("Extraindo a resposta do agente") | |
agent_answer = final_chunk["supervisor"]["messages"] | |
print(f"resposta: {agent_answer}") | |
final_answer = re.sub(r"^FINAL ANSWER:\s*", "", agent_answer, flags=re.IGNORECASE) | |
print(f"Agent returning answer for task {task_id}: {final_answer}") | |
return final_answer | |
def __call__(self, question: str, task_id: str, task_file_name: str) -> str: | |
print(f"Agent (nova forma de invocar) received question({task_id}) (first 50 chars): {question[:50]}...") | |
file_prefix = "" | |
if task_file_name: | |
print(f"Task com arquivo {task_file_name}") | |
File_Util.baixa_arquivo_task(task_file_name) | |
file_prefix = f"File: {task_file_name} . " | |
# Chamando sem stream | |
response = self.supervisor.invoke( | |
{ | |
"messages": [ | |
{ | |
"role": "user", | |
"content": f"{file_prefix}{question}", | |
} | |
] | |
} | |
) | |
messages = response.get("supervisor", {}).get("messages", []) | |
if not messages: | |
print(f"Nenhuma mensagem retornada para task {task_id}.") | |
return "Desculpe, não houve resposta." | |
print("Extraindo last_message") | |
last_msg = messages[-1]["content"] | |
print(f"Last Message: {last_msg}") | |
final_answer = re.sub(r"^FINAL ANSWER:\s*", "", last_msg.strip(), flags=re.IGNORECASE) | |
print(f"Agent returning answer for task {task_id}: {final_answer}") | |
return final_answer |