Spaces:
Sleeping
Sleeping
Commit
·
375e7cf
1
Parent(s):
58a6c94
FInal changes2
Browse files
src/langgraph_agenticAI/Nodes/chat_with_Tool_node.py
ADDED
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from src.langgraph_agenticAI.States.state import State
|
2 |
+
|
3 |
+
class ChatbotWithToolNode:
|
4 |
+
"""
|
5 |
+
Chatbot logic enhanced with tool integration.
|
6 |
+
"""
|
7 |
+
def __init__(self,model):
|
8 |
+
self.llm = model
|
9 |
+
|
10 |
+
def process(self, state: State) -> dict:
|
11 |
+
"""
|
12 |
+
Processes the input state and generates a response with tool integration.
|
13 |
+
"""
|
14 |
+
user_input = state["messages"][-1] if state["messages"] else ""
|
15 |
+
llm_response = self.llm.invoke([{"role": "user", "content": user_input}])
|
16 |
+
|
17 |
+
# Simulate tool-specific logic
|
18 |
+
tools_response = f"Tool integration for: '{user_input}'"
|
19 |
+
|
20 |
+
return {"messages": [llm_response, tools_response]}
|
21 |
+
|
22 |
+
def create_chatbot(self, tools):
|
23 |
+
"""
|
24 |
+
Returns a chatbot node function.
|
25 |
+
"""
|
26 |
+
llm_with_tools = self.llm.bind_tools(tools)
|
27 |
+
|
28 |
+
def chatbot_node(state: State):
|
29 |
+
"""
|
30 |
+
Chatbot logic for processing the input state and returning a response.
|
31 |
+
"""
|
32 |
+
return {"messages": [llm_with_tools.invoke(state["messages"])]}
|
33 |
+
|
34 |
+
return chatbot_node
|
35 |
+
|
36 |
+
|
37 |
+
|
38 |
+
|
39 |
+
|
40 |
+
|
41 |
+
|
42 |
+
|
43 |
+
|
src/langgraph_agenticAI/Tools/search_tools.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
from langchain_community.tools.tavily_search import TavilySearchResults
|
2 |
from langgraph.prebuilt import ToolNode
|
3 |
|
4 |
def get_tools():
|
|
|
1 |
+
from langchain_community.tools.tavily_search import TavilySearchResults #also user serper api for search engine
|
2 |
from langgraph.prebuilt import ToolNode
|
3 |
|
4 |
def get_tools():
|