Spaces:
Runtime error
Runtime error
File size: 3,239 Bytes
beb1eb8 d046ba6 74744c6 60684f0 d046ba6 3ed16ee d046ba6 60684f0 d046ba6 74744c6 60684f0 d046ba6 60684f0 d046ba6 60684f0 d046ba6 60684f0 d046ba6 f5078a2 d046ba6 f5078a2 d046ba6 74744c6 d046ba6 74744c6 d046ba6 74744c6 d046ba6 74744c6 d046ba6 74744c6 beb1eb8 d046ba6 beb1eb8 60684f0 39cd847 d046ba6 60684f0 d046ba6 f5078a2 d046ba6 60684f0 d046ba6 60684f0 74744c6 60684f0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 |
import os
from dotenv import load_dotenv
from langgraph.graph import START, StateGraph, MessagesState
from langgraph.prebuilt import tools_condition
from langgraph.prebuilt import ToolNode
from duckduckgo_search import DDGS
from langchain_community.document_loaders import WikipediaLoader
from langchain_community.document_loaders import ArxivLoader
from langchain_core.messages import SystemMessage, HumanMessage
from langchain_core.tools import tool
from langchain_google_genai import ChatGoogleGenerativeAI
load_dotenv()
GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
# --- Tools ---
@tool
def multiply(a: int, b: int) -> int:
return a * b
@tool
def add(a: int, b: int) -> int:
return a + b
@tool
def subtract(a: int, b: int) -> int:
return a - b
@tool
def divide(a: int, b: int) -> float:
if b == 0:
raise ValueError("Cannot divide by zero.")
return a / b
@tool
def modulo(a: int, b: int) -> int:
return a % b
@tool
def wiki_search(query: str) -> str:
search_docs = WikipediaLoader(query=query, load_max_docs=2).load()
formatted = "\n\n---\n\n".join(
[f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}">\n{doc.page_content}\n</Document>' for doc in search_docs]
)
return {"wiki_results": formatted}
@tool
def arxiv_search(query: str) -> str:
search_docs = ArxivLoader(query=query, load_max_docs=3).load()
formatted = "\n\n---\n\n".join(
[f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}">\n{doc.page_content[:1000]}\n</Document>' for doc in search_docs]
)
return {"arxiv_results": formatted}
@tool
def web_search(query: str) -> str:
"""Searches DuckDuckGo for a query."""
with DDGS() as ddgs:
results = ddgs.text(query, max_results=5)
if not results:
return "No results found."
return "\n\n".join(f"{r['title']}: {r['href']}" for r in results)
# --- Setup LLM und Tools ---
tools = [
multiply,
add,
subtract,
divide,
modulo,
wiki_search,
arxiv_search,
web_search,
]
system_prompt = (
"You are a highly accurate AI assistant. "
"Use tools when needed. Be very concise and precise. "
"Do not hallucinate information."
)
sys_msg = SystemMessage(content=system_prompt)
def build_graph():
llm = ChatGoogleGenerativeAI(
model="gemini-2.0-flash",
google_api_key=GOOGLE_API_KEY,
temperature=0,
max_output_tokens=2048,
system_message=sys_msg,
)
llm_with_tools = llm.bind_tools(tools)
def assistant(state: MessagesState):
return {"messages": [llm_with_tools.invoke(state["messages"])]}
builder = StateGraph(MessagesState)
builder.add_node("assistant", assistant)
builder.add_node("tools", ToolNode(tools))
builder.add_edge(START, "assistant")
builder.add_conditional_edges("assistant", tools_condition)
builder.add_edge("tools", "assistant")
return builder.compile()
# Agent Executor für app.py
def agent_executor(question: str) -> str:
graph = build_graph()
messages = [HumanMessage(content=question)]
result = graph.invoke({"messages": messages})
return result["messages"][-1].content |