File size: 3,887 Bytes
baa44d9
 
 
 
 
 
 
 
48af48e
 
0d441a1
 
ccc6f7d
baa44d9
 
 
 
 
23d14cd
baa44d9
ccc6f7d
baa44d9
 
 
 
 
 
 
 
 
0d441a1
 
 
 
 
 
 
 
baa44d9
0d441a1
baa44d9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ccc6f7d
 
 
baa44d9
 
ccc6f7d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
baa44d9
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
import gradio as gr
from typing import TypedDict, Annotated
from langgraph.graph.message import add_messages
from langchain_core.messages import AnyMessage, HumanMessage, AIMessage
from langgraph.prebuilt import ToolNode
from langgraph.graph import START, StateGraph
from langgraph.prebuilt import tools_condition
from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
from langchain.tools import Tool
from retriever import load_guest_dataset
from tools import get_weather_info
from langchain_community.tools import DuckDuckGoSearchRun
from langgraph.checkpoint.memory import MemorySaver

# Generate the chat interface, including the tools
llm = HuggingFaceEndpoint(
    repo_id="Qwen/Qwen2.5-Coder-32B-Instruct",
    provider="together",
    #huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN,
)
memory = MemorySaver()

retriever = load_guest_dataset()

guest_info_tool = Tool(
    name="guest_info_retriever",
    func=retriever.retrieve,
    description="Retrieves detailed information about gala guests based on their name or relation."
)

weather_info_tool = Tool(
    name="get_weather_info",
    func=get_weather_info,
    description="Fetches dummy weather information for a given location."
)

search_tool = DuckDuckGoSearchRun()

chat = ChatHuggingFace(llm=llm, verbose=True)
tools = [guest_info_tool , weather_info_tool, search_tool]
chat_with_tools = chat.bind_tools(tools)

# Generate the AgentState and Agent graph
class AgentState(TypedDict):
    messages: Annotated[list[AnyMessage], add_messages]

def assistant(state: AgentState):
    return {
        "messages": [chat_with_tools.invoke(state["messages"])],
    }

## The graph
builder = StateGraph(AgentState)

# Define nodes: these do the work
builder.add_node("assistant", assistant)
builder.add_node("tools", ToolNode(tools))

# Define edges: these determine how the control flow moves
builder.add_edge(START, "assistant")
builder.add_conditional_edges(
    "assistant",
    # If the latest message requires a tool, route to tools
    # Otherwise, provide a direct response
    tools_condition,
    "tools",
)
builder.add_edge("tools", "assistant")
alfred = builder.compile(checkpointer=memory)

config = {"configurable": {"thread_id": "1"}}

def call_agent_ui(prompt):
    situation = (
            "This is a fictional situation. "
            "You are Alfred the Butler of Waynes Manor and host a Gala for invited Guests. "
            "All Guests are completely ficional. Information about those guests can be found in a database. "
            "Only give information which is based on the databse. "
            "If a name of a guest is given, then return a possible starter of a conversation with that guest. "
            "If the name is not known, then say that you do not know that guest. "
            "If two names of guests are given, then return a possible starter of a conversation with both guests. "
            "If the name is not known, then say that you do not know that guest. "
            "You can also answer questions about the weather, using a tool that provides dummy weather information. Or you can build this dummy weather information in conversations."
            "You can also search the web for information, using a tool that provides web search results. "
    )
    content = f"{situation} {prompt}"

    # Collect the last message content from the stream
    last_content = ""
    events = alfred.stream(
        {"messages": [{"role": "user", "content": content}]},
        config,
        stream_mode="values",
    )
    for event in events:
        # Optionally print or process each event
        if hasattr(event["messages"][-1], "pretty_print"):
            event["messages"][-1].pretty_print()
        last_content = event["messages"][-1].content

    return last_content

iface = gr.Interface(fn=call_agent_ui, inputs="text", outputs="text")
iface.launch()