Update agent.py
Browse files
agent.py
CHANGED
@@ -16,6 +16,9 @@ from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint, HuggingF
|
|
16 |
from langchain.text_splitter import CharacterTextSplitter
|
17 |
from langchain.tools.retriever import create_retriever_tool
|
18 |
from typing import TypedDict, Annotated, List
|
|
|
|
|
|
|
19 |
|
20 |
# Load environment variables from .env
|
21 |
load_dotenv()
|
@@ -311,47 +314,53 @@ def weather_tool(location: str) -> str:
|
|
311 |
"""
|
312 |
return get_weather(location, search_tool)
|
313 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
314 |
@tool
|
315 |
def web_search(query: str) -> str:
|
316 |
-
"""Search the web for a given query and return the summary.
|
317 |
-
Args:
|
318 |
-
query (str): The search query.
|
319 |
-
"""
|
320 |
-
|
321 |
search_tool = TavilySearchResults()
|
322 |
result = search_tool.run(query)
|
323 |
return result[0]['content']
|
324 |
|
|
|
325 |
@tool
|
326 |
-
def
|
327 |
-
"""Search
|
328 |
-
|
329 |
-
|
330 |
-
"""
|
331 |
-
|
332 |
-
search_docs = WikipediaLoader(query=query, load_max_docs=1).load()
|
333 |
-
formatted_search_docs = "\n\n----\n\n".join(
|
334 |
-
[
|
335 |
-
f'<Document Source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}">\n{doc.page_content}\n</Document>'
|
336 |
-
for doc in search_docs
|
337 |
-
]
|
338 |
-
)
|
339 |
-
return formatted_search_docs
|
340 |
-
|
341 |
-
# @tool
|
342 |
-
# def recommendation_tool(weather_condition: str) -> str:
|
343 |
-
# """
|
344 |
-
# Provides recommendations based on weather conditions.
|
345 |
|
346 |
-
#
|
347 |
-
|
|
|
|
|
|
|
|
|
|
|
348 |
|
349 |
-
#
|
350 |
-
|
351 |
-
|
352 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
353 |
|
354 |
-
tools = [
|
355 |
add, subtract, multiply, divide, square, cube, power, factorial, mean, standard_deviation]
|
356 |
|
357 |
# === LLM with Tools ===
|
@@ -362,9 +371,9 @@ llm = ChatGroq(
|
|
362 |
groq_api_key=os.getenv("GROQ_API_KEY")
|
363 |
)
|
364 |
|
365 |
-
tools = [weather_tool, wiki_search, web_search,
|
366 |
-
|
367 |
-
|
368 |
|
369 |
llm_with_tools = llm.bind_tools(tools)
|
370 |
|
|
|
16 |
from langchain.text_splitter import CharacterTextSplitter
|
17 |
from langchain.tools.retriever import create_retriever_tool
|
18 |
from typing import TypedDict, Annotated, List
|
19 |
+
from langchain_community.tools import DuckDuckGoSearchRun, WikipediaQueryRun, ArxivQueryRun
|
20 |
+
from langchain_community.utilities import WikipediaAPIWrapper, ArxivAPIWrapper
|
21 |
+
from langchain.tools import Tool
|
22 |
|
23 |
# Load environment variables from .env
|
24 |
load_dotenv()
|
|
|
314 |
"""
|
315 |
return get_weather(location, search_tool)
|
316 |
|
317 |
+
|
318 |
+
from langchain_community.tools.tavily_search import TavilySearchResults
|
319 |
+
from langchain_community.tools.ddg_search import DuckDuckGoSearchRun
|
320 |
+
from langchain_community.tools.wikipedia.tool import WikipediaQueryRun
|
321 |
+
from langchain_community.utilities.wikipedia import WikipediaAPIWrapper
|
322 |
+
from langchain_community.tools.arxiv.tool import ArxivQueryRun
|
323 |
+
from langchain_community.utilities.arxiv import ArxivAPIWrapper
|
324 |
+
from langchain.tools import tool
|
325 |
+
|
326 |
+
# 1. Tavily Web Search Tool (already in correct format)
|
327 |
@tool
|
328 |
def web_search(query: str) -> str:
|
329 |
+
"""Search the web for a given query and return the summary."""
|
|
|
|
|
|
|
|
|
330 |
search_tool = TavilySearchResults()
|
331 |
result = search_tool.run(query)
|
332 |
return result[0]['content']
|
333 |
|
334 |
+
# 2. DuckDuckGo Search Tool
|
335 |
@tool
|
336 |
+
def duckduckgo_search(query: str) -> str:
|
337 |
+
"""Search the web using DuckDuckGo for a given query and return the result."""
|
338 |
+
search_tool = DuckDuckGoSearchRun(verbose=False)
|
339 |
+
return search_tool.run(query)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
340 |
|
341 |
+
# 3. Wikipedia Search Tool
|
342 |
+
@tool
|
343 |
+
def wikipedia_search(query: str) -> str:
|
344 |
+
"""Search Wikipedia for a given query and return the top 3 results."""
|
345 |
+
wrapper = WikipediaAPIWrapper(top_k_results=3)
|
346 |
+
wikipedia = WikipediaQueryRun(api_wrapper=wrapper, verbose=False)
|
347 |
+
return wikipedia.run(query)
|
348 |
|
349 |
+
# 4. Arxiv Search Tool
|
350 |
+
@tool
|
351 |
+
def arxiv_search(query: str) -> str:
|
352 |
+
"""Search arXiv for academic papers based on a query and return the top 3 results."""
|
353 |
+
wrapper = ArxivAPIWrapper(
|
354 |
+
top_k_results=3,
|
355 |
+
ARXIV_MAX_QUERY_LENGTH=300,
|
356 |
+
load_max_docs=3,
|
357 |
+
load_all_available_meta=False,
|
358 |
+
doc_content_chars_max=40000
|
359 |
+
)
|
360 |
+
arxiv = ArxivQueryRun(api_wrapper=wrapper, verbose=False)
|
361 |
+
return arxiv.run(query)
|
362 |
|
363 |
+
tools = [arxiv_search, duckduckgo_search, web_search,wikipedia_search,
|
364 |
add, subtract, multiply, divide, square, cube, power, factorial, mean, standard_deviation]
|
365 |
|
366 |
# === LLM with Tools ===
|
|
|
371 |
groq_api_key=os.getenv("GROQ_API_KEY")
|
372 |
)
|
373 |
|
374 |
+
# tools = [weather_tool, wiki_search, web_search,
|
375 |
+
# add, subtract, multiply, divide, square, cube,
|
376 |
+
# power, factorial, mean, standard_deviation, arxiv_tool,wikisearch_tool, search_tool ]
|
377 |
|
378 |
llm_with_tools = llm.bind_tools(tools)
|
379 |
|