LamiaYT's picture
Fix
003104b
raw
history blame
6 kB
import os
import json
from dotenv import load_dotenv
from langchain_core.messages import HumanMessage
load_dotenv()
os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python"
hf_token = os.getenv("HUGGINGFACE_INFERENCE_TOKEN")
from langgraph.graph import START, StateGraph, MessagesState
from langgraph.prebuilt import tools_condition, ToolNode
from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint, HuggingFaceEmbeddings
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_community.tools.tavily_search import TavilySearchResults
from langchain_community.document_loaders import WikipediaLoader, ArxivLoader
from langchain_community.vectorstores import Chroma
from langchain_core.messages import SystemMessage, HumanMessage
from langchain_core.tools import tool
from langchain.schema import Document
# ---- Tool Definitions (with docstrings) ----
@tool
def multiply(a: int, b: int) -> int:
"""Multiply two integers and return the result."""
return a * b
@tool
def add(a: int, b: int) -> int:
"""Add two integers and return the result."""
return a + b
@tool
def subtract(a: int, b: int) -> int:
"""Subtract second integer from the first and return the result."""
return a - b
@tool
def divide(a: int, b: int) -> float:
"""Divide first integer by second and return the result as a float."""
if b == 0:
raise ValueError("Cannot divide by zero.")
return a / b
@tool
def modulus(a: int, b: int) -> int:
"""Return the remainder when first integer is divided by second."""
return a % b
@tool
def wiki_search(query: str) -> str:
"""Search Wikipedia for the query and return text of up to 2 documents."""
search_docs = WikipediaLoader(query=query, load_max_docs=2).load()
formatted = "\n\n---\n\n".join(
f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
for doc in search_docs
)
return {"wiki_results": formatted}
@tool
def web_search(query: str) -> str:
"""Search the web for the query using Tavily and return up to 3 results."""
search_docs = TavilySearchResults(max_results=3).invoke(query=query)
formatted = "\n\n---\n\n".join(
f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
for doc in search_docs
)
return {"web_results": formatted}
@tool
def arvix_search(query: str) -> str:
"""Search Arxiv for the query and return content from up to 3 papers."""
search_docs = ArxivLoader(query=query, load_max_docs=3).load()
formatted = "\n\n---\n\n".join(
f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content[:1000]}\n</Document>'
for doc in search_docs
)
return {"arvix_results": formatted}
# Build vector store once
embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-mpnet-base-v2")
json_QA = [json.loads(line) for line in open("metadata.jsonl", "r")]
documents = [
Document(
page_content=f"Question : {sample['Question']}\n\nFinal answer : {sample['Final answer']}",
metadata={"source": sample["task_id"]}
) for sample in json_QA
]
vector_store = Chroma.from_documents(
documents=documents,
embedding=embeddings,
persist_directory="./chroma_db",
collection_name="my_collection"
)
print("Documents inserted:", vector_store._collection.count())
@tool
def similar_question_search(query: str) -> str:
"""Search for questions similar to the input query using the vector store."""
matched_docs = vector_store.similarity_search(query, 3)
formatted = "\n\n---\n\n".join(
f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content[:1000]}\n</Document>'
for doc in matched_docs
)
return {"similar_questions": formatted}
# ---- System Prompt ----
system_prompt = """
You are a helpful assistant tasked with answering questions using a set of tools.
Now, I will ask you a question. Report your thoughts, and finish your answer with the following template:
FINAL ANSWER: [YOUR FINAL ANSWER].
YOUR FINAL ANSWER should be a number OR as few words as possible OR a comma separated list of numbers and/or strings...
"""
sys_msg = SystemMessage(content=system_prompt)
tools = [
multiply, add, subtract, divide, modulus,
wiki_search, web_search, arvix_search, similar_question_search
]
# ---- Graph Builder ----
def build_graph(provider: str = "huggingface"):
if provider == "huggingface":
llm = ChatHuggingFace(
llm=HuggingFaceEndpoint(
repo_id="mosaicml/mpt-30b",
temperature=0,
huggingfacehub_api_token=hf_token
)
)
elif provider == "google":
llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
else:
raise ValueError("Invalid provider: choose 'huggingface' or 'google'.")
llm_with_tools = llm.bind_tools(tools)
def assistant(state: MessagesState):
return {"messages": [llm_with_tools.invoke(state["messages"])]}
def retriever(state: MessagesState):
similar = vector_store.similarity_search(state["messages"][0].content)
if similar:
example_msg = HumanMessage(content=f"Here is a similar question:\n\n{similar[0].page_content}")
return {"messages": [sys_msg] + state["messages"] + [example_msg]}
return {"messages": [sys_msg] + state["messages"]}
builder = StateGraph(MessagesState)
builder.add_node("retriever", retriever)
builder.add_node("assistant", assistant)
builder.add_node("tools", ToolNode(tools))
builder.add_edge(START, "retriever")
builder.add_edge("retriever", "assistant")
builder.add_conditional_edges("assistant", tools_condition)
builder.add_edge("tools", "assistant")
return builder.compile()