Spaces:
Sleeping
Sleeping
Update agent.py
Browse files
agent.py
CHANGED
@@ -22,7 +22,7 @@ langsmith_tracing = os.getenv("LANGSMITH_TRACING")
|
|
22 |
llm = ChatOpenAI(
|
23 |
base_url="https://openrouter.ai/api/v1",
|
24 |
api_key=os.getenv("OPENROUTER_API_KEY"),
|
25 |
-
model="
|
26 |
temperature=1
|
27 |
)
|
28 |
|
@@ -37,19 +37,19 @@ community_tools = [search_tool, python_tool]
|
|
37 |
custom_tools = calculator_basic + datetime_tools + [transcribe_audio, transcribe_youtube, query_image, webpage_content, read_excel]
|
38 |
|
39 |
tools = community_tools + custom_tools
|
40 |
-
llm_with_tools = llm.bind_tools(tools)
|
41 |
|
42 |
# Prepare tools by name
|
43 |
tools_by_name = {tool.name: tool for tool in tools}
|
44 |
|
45 |
class MessagesState(TypedDict): # creates the state (is like the agent's memory at any moment)
|
46 |
messages: Annotated[list[AnyMessage], add_messages]
|
|
|
47 |
|
48 |
# LLM node
|
49 |
def llm_call(state: MessagesState):
|
50 |
return {
|
51 |
"messages": [
|
52 |
-
llm_with_tools.invoke(
|
53 |
[SystemMessage(content=system_prompt)] + state["messages"]
|
54 |
)
|
55 |
]
|
@@ -103,10 +103,27 @@ gaia_agent = builder.compile() # converts my builder into a runnable agent by u
|
|
103 |
# Wrapper class to initialize and call the LangGraph agent with a user question
|
104 |
class LangGraphAgent:
|
105 |
def __init__(self):
|
|
|
|
|
|
|
|
|
|
|
|
|
106 |
print("LangGraphAgent initialized.")
|
107 |
|
108 |
def __call__(self, question: str) -> str:
|
109 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
110 |
print(f"Running LangGraphAgent with input: {question[:150]}...")
|
111 |
|
112 |
# tracing configuration for LangSmith
|
|
|
22 |
llm = ChatOpenAI(
|
23 |
base_url="https://openrouter.ai/api/v1",
|
24 |
api_key=os.getenv("OPENROUTER_API_KEY"),
|
25 |
+
model="qwen/qwen3-coder:free", # Model must support function calling in OpenRouter
|
26 |
temperature=1
|
27 |
)
|
28 |
|
|
|
37 |
custom_tools = calculator_basic + datetime_tools + [transcribe_audio, transcribe_youtube, query_image, webpage_content, read_excel]
|
38 |
|
39 |
tools = community_tools + custom_tools
|
|
|
40 |
|
41 |
# Prepare tools by name
|
42 |
tools_by_name = {tool.name: tool for tool in tools}
|
43 |
|
44 |
class MessagesState(TypedDict): # creates the state (is like the agent's memory at any moment)
|
45 |
messages: Annotated[list[AnyMessage], add_messages]
|
46 |
+
llm_with_tools: object
|
47 |
|
48 |
# LLM node
|
49 |
def llm_call(state: MessagesState):
|
50 |
return {
|
51 |
"messages": [
|
52 |
+
state["llm_with_tools"].invoke(
|
53 |
[SystemMessage(content=system_prompt)] + state["messages"]
|
54 |
)
|
55 |
]
|
|
|
103 |
# Wrapper class to initialize and call the LangGraph agent with a user question
|
104 |
class LangGraphAgent:
|
105 |
def __init__(self):
|
106 |
+
self.api_keys = [
|
107 |
+
os.getenv("OPENROUTER_API_KEY"),
|
108 |
+
os.getenv("OPENROUTER_API_KEY_1"),
|
109 |
+
os.getenv("OPENROUTER_API_KEY_2"),
|
110 |
+
]
|
111 |
+
self.key_index = 0
|
112 |
print("LangGraphAgent initialized.")
|
113 |
|
114 |
def __call__(self, question: str) -> str:
|
115 |
+
api_key = self.api_keys[self.key_index]
|
116 |
+
self.key_index = (self.key_index + 1) % len(self.api_keys)
|
117 |
+
|
118 |
+
llm = ChatOpenAI(
|
119 |
+
base_url="https://openrouter.ai/api/v1",
|
120 |
+
api_key=api_key,
|
121 |
+
model="qwen/qwen3-coder:free",
|
122 |
+
temperature=1
|
123 |
+
)
|
124 |
+
llm_with_tools = llm.bind_tools(tools)
|
125 |
+
|
126 |
+
input_state = {"messages": [HumanMessage(content=question)], "llm_with_tools": llm_with_tools} # prepare the initial user message
|
127 |
print(f"Running LangGraphAgent with input: {question[:150]}...")
|
128 |
|
129 |
# tracing configuration for LangSmith
|