Spaces:
Sleeping
Sleeping
File size: 2,736 Bytes
f15d6bc 30944a6 f15d6bc 30944a6 5879225 30944a6 5879225 30944a6 5879225 30944a6 e634f59 30944a6 f15d6bc 30944a6 f15d6bc 283ad74 e634f59 283ad74 f15d6bc |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 |
import re
from langgraph.prebuilt import create_react_agent
from agent_util import Agent_Util
from prompts import *
from tools import *
from langgraph_supervisor import create_supervisor
from langchain.chat_models import init_chat_model
import glob
class Agent:
def __init__(self):
print("Initializing Agent....")
print("--> Audio Agent")
self.audio_agent = create_react_agent(
model=init_chat_model("openai:gpt-4o-mini", temperature=0),
tools=[extract_text_from_url_tool, extract_text_from_file_tool],
prompt= AUDIO_AGENT_PROMPT,
name="audio_agent",
)
print("--> Web Search Agent")
self.web_search_agent = create_react_agent(
model=init_chat_model("openai:gpt-4o-mini", temperature=0),
tools=[search_web_tool],
prompt= WEB_SEARCH_AGENT_PROMPT,
name="web_research_agent",
)
print("--> Supervisor")
self.supervisor = create_supervisor(
model=init_chat_model("openai:gpt-4o-mini", temperature=0),
agents=[self.web_search_agent, self.audio_agent],
tools=[bird_video_count_tool,chess_image_to_fen_tool,chess_fen_get_best_next_move_tool,
get_excel_columns_tool, calculate_excel_sum_by_columns_tool,execute_python_code_tool,
text_inverter_tool, check_table_commutativity_tool],
prompt= SUPERVISOR_PROMPT,
add_handoff_back_messages=True,
output_mode="final_response",
).compile()
print("Agent initialized.")
def __call__(self, question: str, task_id: str, task_file_name: str) -> str:
print(f"Agent (nova forma de invocar) received question({task_id}) (first 50 chars): {question[:50]}...")
file_prefix = ""
if task_file_name:
print(f"Task com arquivo {task_file_name}")
File_Util.baixa_arquivo_task(task_file_name)
file_prefix = f"File: {task_file_name} . "
# Chamando sem stream
response = self.supervisor.invoke(
{
"messages": [
{
"role": "user",
"content": f"{file_prefix}{question}",
}
]
}
)
print(f"Resposta LLM: {response}")
answer_llm = response.get("output", "").strip()
print(f"Resposta pré-processada: {answer_llm}")
final_answer = re.sub(r"^FINAL ANSWER:\s*", "", answer_llm, flags=re.IGNORECASE)
print(f"Agent returning answer for task {task_id}: {final_answer}")
return final_answer |