Spaces:
Sleeping
Sleeping
import os | |
from dotenv import load_dotenv | |
from langchain_core.messages import HumanMessage, SystemMessage | |
from langchain_core.tools import tool | |
from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint | |
from langgraph.graph import START, MessagesState, StateGraph | |
from langgraph.prebuilt import ToolNode, tools_condition | |
from tools import (absolute, add, analyze_excel_file, arvix_search, | |
audio_transcription, compound_interest, convert_temperature, | |
divide, exponential, factorial, floor_divide, | |
get_current_time_in_timezone, greatest_common_divisor, | |
is_prime, least_common_multiple, logarithm, modulus, | |
multiply, percentage_calculator, power, | |
roman_calculator_converter, square_root, subtract, | |
web_search, wiki_search) | |
# Load Constants | |
load_dotenv() | |
HUGGINGFACEHUB_API_TOKEN = os.getenv("HUGGINGFACEHUB_API_TOKEN") | |
tools = [ | |
multiply, add, subtract, power, divide, modulus, | |
square_root, floor_divide, absolute, logarithm, | |
exponential, web_search, roman_calculator_converter, | |
get_current_time_in_timezone, compound_interest, | |
convert_temperature, factorial, greatest_common_divisor, | |
is_prime, least_common_multiple, percentage_calculator, | |
wiki_search, analyze_excel_file, arvix_search, audio_transcription | |
] | |
def build_graph(): | |
"""Build the graph""" | |
# First create the HuggingFaceEndpoint | |
llm_endpoint = HuggingFaceEndpoint( | |
repo_id="Qwen/Qwen2.5-Coder-32B-Instruct", | |
huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN, | |
temperature=0.1, # Lower temperature for more consistent responses | |
max_new_tokens=1024, | |
timeout=30, | |
) | |
# Then wrap it with ChatHuggingFace to get chat model functionality | |
llm = ChatHuggingFace(llm=llm_endpoint) | |
# Bind tools to LLM | |
llm_with_tools = llm.bind_tools(tools) | |
# Node | |
def assistant(state: MessagesState): | |
"""Assistant node""" | |
return {"messages": [llm_with_tools.invoke(state["messages"])]} | |
builder = StateGraph(MessagesState) | |
builder.add_node("assistant", assistant) | |
builder.add_node("tools", ToolNode(tools)) | |
builder.add_conditional_edges( | |
"assistant", | |
tools_condition, | |
) | |
builder.add_edge(START, "assistant") | |
builder.add_edge("tools", "assistant") | |
# Compile graph | |
return builder.compile() | |
# test | |
if __name__ == "__main__": | |
question = "When was a picture of St. Thomas Aquinas first added to the Wikipedia page on the Principle of double effect?" | |
# Build the graph | |
graph = build_graph() | |
# Run the graph | |
messages = [HumanMessage(content=question)] | |
messages = graph.invoke({"messages": messages}) | |
for m in messages["messages"]: | |
m.pretty_print() |