Spaces:
Runtime error
Runtime error
File size: 4,916 Bytes
588b982 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 |
import os
from dotenv import load_dotenv
from langgraph.graph import START, StateGraph, MessagesState
from langgraph.prebuilt import ToolNode, tools_condition
from langchain_core.tools import tool
from langchain_core.messages import SystemMessage, HumanMessage, AIMessage
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_groq import ChatGroq
from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint, HuggingFaceEmbeddings
from langchain_community.tools.tavily_search import TavilySearchResults
from langchain_community.document_loaders import WikipediaLoader, ArxivLoader
from langchain_community.vectorstores import SupabaseVectorStore
from langchain.tools.retriever import create_retriever_tool
from supabase.client import create_client
load_dotenv()
# --- System Prompt Loader ---
def load_system_prompt(path="system_prompt.txt") -> SystemMessage:
try:
with open(path, encoding="utf-8") as f:
return SystemMessage(content=f.read())
except FileNotFoundError:
return SystemMessage(content="You are a helpful assistant.")
sys_msg = load_system_prompt()
# --- Math Tools Factory ---
def math_tool(fn):
return tool(fn)
@math_tool
def add(a: int, b: int) -> int: return a + b
@math_tool
def subtract(a: int, b: int) -> int: return a - b
@math_tool
def multiply(a: int, b: int) -> int: return a * b
@math_tool
def divide(a: int, b: int) -> float:
if b == 0: raise ValueError("Cannot divide by zero.")
return a / b
@math_tool
def modulus(a: int, b: int) -> int: return a % b
# --- Document Formatting Helper ---
def format_docs(docs, key: str, max_chars: int = None) -> dict:
content = "\n\n---\n\n".join(
f'<Document source="{d.metadata.get("source","")}" page="{d.metadata.get("page","")}" />\n'
f'{d.page_content[:max_chars] if max_chars else d.page_content}\n</Document>'
for d in docs
)
return {key: content}
# --- Info Tools ---
@tool
def wiki_search(query: str) -> dict:
docs = WikipediaLoader(query=query, load_max_docs=2).load()
return format_docs(docs, "wiki_results")
@tool
def web_search(query: str) -> dict:
docs = TavilySearchResults(max_results=3).invoke(query=query)
return format_docs(docs, "web_results")
@tool
def arvix_search(query: str) -> dict:
docs = ArxivLoader(query=query, load_max_docs=3).load()
return format_docs(docs, "arvix_results", max_chars=1000)
# --- Vector Retriever Setup ---
def build_vector_retriever():
embed_model = HuggingFaceEmbeddings(model_name="sentence-transformers/all-mpnet-base-v2")
supa = create_client(os.getenv("SUPABASE_URL"), os.getenv("SUPABASE_SERVICE_KEY"))
vs = SupabaseVectorStore(
client=supa,
embedding=embed_model,
table_name="documents",
query_name="match_documents_langchain"
)
return vs.as_retriever()
# --- LLM Factory ---
def get_llm(provider: str):
if provider == "google":
return ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
if provider == "groq":
return ChatGroq(model="qwen-qwq-32b", temperature=0)
if provider == "huggingface":
return ChatHuggingFace(llm=HuggingFaceEndpoint(
url="https://api-inference.huggingface.co/models/Meta-DeepLearning/llama-2-7b-chat-hf",
temperature=0))
raise ValueError(f"Unsupported provider: {provider}")
# --- Build Graph ---
def build_graph(provider: str = "google"):
# tools list
retriever = build_vector_retriever()
question_tool = create_retriever_tool(
retriever=retriever,
name="Question Search",
description="Retrieve similar Q&A from vector store"
)
tools = [
add, subtract, multiply, divide, modulus,
wiki_search, web_search, arvix_search,
question_tool
]
# LLM w/ tools
llm = get_llm(provider).bind_tools(tools)
# Nodes
def assistant(state: MessagesState):
msgs = [sys_msg] + state["messages"]
resp = llm.invoke({"messages": msgs})
return {"messages": [resp]}
def retriever_node(state: MessagesState):
query = state["messages"][-1].content
doc = retriever.similarity_search(query, k=1)[0]
text = doc.page_content
answer = text.split("Final answer :")[-1].strip() if "Final answer :" in text else text
return {"messages": [AIMessage(content=answer)]}
# Graph assembly
graph = StateGraph(MessagesState)
graph.add_node("retriever", retriever_node)
graph.add_node("assistant", assistant)
graph.add_node("tools", ToolNode(tools))
graph.add_edge(START, "retriever")
graph.add_edge("retriever", "assistant")
graph.add_conditional_edges("assistant", tools_condition)
graph.add_edge("tools", "assistant")
graph.set_entry_point("retriever")
graph.set_finish_point("assistant")
return graph.compile()
|