File size: 2,852 Bytes
1669f2b
 
 
 
 
 
 
 
 
8ca5d55
 
1669f2b
 
 
 
 
8ca5d55
1669f2b
 
 
 
 
 
 
 
 
 
 
 
8ca5d55
 
1669f2b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import os

from dotenv import load_dotenv
from langchain_core.messages import HumanMessage, SystemMessage
from langchain_core.tools import tool
from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint
from langgraph.graph import START, MessagesState, StateGraph
from langgraph.prebuilt import ToolNode, tools_condition

from tools import (absolute, add, analyze_excel_file, arvix_search,
                   audio_transcription, compound_interest, convert_temperature,
                   divide, exponential, factorial, floor_divide,
                   get_current_time_in_timezone, greatest_common_divisor,
                   is_prime, least_common_multiple, logarithm, modulus,
                   multiply, percentage_calculator, power,
                   roman_calculator_converter, square_root, subtract,
                   web_search, wiki_search)

# Load Constants
load_dotenv()
HUGGINGFACEHUB_API_TOKEN = os.getenv("HUGGINGFACEHUB_API_TOKEN")


tools = [
    multiply, add, subtract, power, divide, modulus,
    square_root, floor_divide, absolute, logarithm,
    exponential, web_search, roman_calculator_converter,
    get_current_time_in_timezone, compound_interest,
    convert_temperature, factorial, greatest_common_divisor,
    is_prime, least_common_multiple, percentage_calculator,
    wiki_search, analyze_excel_file, arvix_search, audio_transcription
]

def build_graph():
    """Build the graph""" 

    # First create the HuggingFaceEndpoint
    llm_endpoint = HuggingFaceEndpoint(
        repo_id="Qwen/Qwen2.5-Coder-32B-Instruct",
        huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN,
        temperature=0.1,  # Lower temperature for more consistent responses
        max_new_tokens=1024,
        timeout=30,
        )
    
    # Then wrap it with ChatHuggingFace to get chat model functionality
    llm = ChatHuggingFace(llm=llm_endpoint)

    # Bind tools to LLM
    llm_with_tools = llm.bind_tools(tools)

    # Node
    def assistant(state: MessagesState):
        """Assistant node"""
        return {"messages": [llm_with_tools.invoke(state["messages"])]}
    

    builder = StateGraph(MessagesState)
    builder.add_node("assistant", assistant)
    builder.add_node("tools", ToolNode(tools))
    builder.add_conditional_edges(
        "assistant",
        tools_condition,
    )
    builder.add_edge(START, "assistant")
    builder.add_edge("tools", "assistant")

     # Compile graph
    return builder.compile()

# test
if __name__ == "__main__":
    question = "When was a picture of St. Thomas Aquinas first added to the Wikipedia page on the Principle of double effect?"
    # Build the graph
    graph = build_graph()
    # Run the graph
    messages = [HumanMessage(content=question)]
    messages = graph.invoke({"messages": messages})
    for m in messages["messages"]:
        m.pretty_print()