Spaces:
Sleeping
Sleeping
Daniel Amendoeira
commited on
Update agent.py
Browse files
agent.py
CHANGED
@@ -54,18 +54,21 @@ def llm_call(state: MessagesState):
|
|
54 |
|
55 |
# Tool node
|
56 |
def tool_node(state: MessagesState):
|
|
|
|
|
57 |
result = []
|
58 |
-
for tool_call in state["messages"][-1].tool_calls:
|
59 |
-
tool = tools_by_name[tool_call["name"]]
|
60 |
-
observation = tool.invoke(tool_call["args"])
|
61 |
-
result.append(ToolMessage(content=observation, tool_call_id=tool_call["id"]))
|
62 |
-
return {"messages": result}
|
63 |
|
64 |
# Conditional edge function to route to the tool node or end based upon whether the LLM made a tool call
|
65 |
def should_continue(state: MessagesState) -> Literal["Action", END]:
|
66 |
"""Decide if we should continue the loop or stop based upon whether the LLM made a tool call"""
|
67 |
|
68 |
-
last_message = state["messages"][-1]
|
|
|
69 |
# If the LLM makes a tool call, then perform an action
|
70 |
if last_message.tool_calls:
|
71 |
return "Action"
|
@@ -82,9 +85,11 @@ builder.add_node("environment", tool_node)
|
|
82 |
# Add edges to connect nodes
|
83 |
builder.add_edge(START, "llm_call")
|
84 |
builder.add_conditional_edges(
|
85 |
-
"llm_call",
|
86 |
should_continue,
|
87 |
-
{"Action": "environment",
|
|
|
|
|
88 |
builder.add_edge("environment", "llm_call")
|
89 |
|
90 |
gaia_agent = builder.compile()
|
|
|
54 |
|
55 |
# Tool node
|
56 |
def tool_node(state: MessagesState):
|
57 |
+
"""Executes the tools"""
|
58 |
+
|
59 |
result = []
|
60 |
+
for tool_call in state["messages"][-1].tool_calls: # gives a list of the tools the LLM decided to call
|
61 |
+
tool = tools_by_name[tool_call["name"]] # look up the actual tool function using a dictionary
|
62 |
+
observation = tool.invoke(tool_call["args"]) # executes the tool
|
63 |
+
result.append(ToolMessage(content=observation, tool_call_id=tool_call["id"])) # the result from the tool is added to the memory
|
64 |
+
return {"messages": result} # thanks to add_messages, LangGraph will automatically append the result to the agent's message history
|
65 |
|
66 |
# Conditional edge function to route to the tool node or end based upon whether the LLM made a tool call
|
67 |
def should_continue(state: MessagesState) -> Literal["Action", END]:
|
68 |
"""Decide if we should continue the loop or stop based upon whether the LLM made a tool call"""
|
69 |
|
70 |
+
last_message = state["messages"][-1] # looks at the last message (usually from the LLM)
|
71 |
+
|
72 |
# If the LLM makes a tool call, then perform an action
|
73 |
if last_message.tool_calls:
|
74 |
return "Action"
|
|
|
85 |
# Add edges to connect nodes
|
86 |
builder.add_edge(START, "llm_call")
|
87 |
builder.add_conditional_edges(
|
88 |
+
"llm_call",
|
89 |
should_continue,
|
90 |
+
{"Action": "environment", # name returned by should_continue : Name of the next node
|
91 |
+
END: END}
|
92 |
+
)
|
93 |
builder.add_edge("environment", "llm_call")
|
94 |
|
95 |
gaia_agent = builder.compile()
|