Agentic_RAG / app.py
Stefan888's picture
added memory
ccc6f7d
import gradio as gr
from typing import TypedDict, Annotated
from langgraph.graph.message import add_messages
from langchain_core.messages import AnyMessage, HumanMessage, AIMessage
from langgraph.prebuilt import ToolNode
from langgraph.graph import START, StateGraph
from langgraph.prebuilt import tools_condition
from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
from langchain.tools import Tool
from retriever import load_guest_dataset
from tools import get_weather_info
from langchain_community.tools import DuckDuckGoSearchRun
from langgraph.checkpoint.memory import MemorySaver
# Generate the chat interface, including the tools
llm = HuggingFaceEndpoint(
repo_id="Qwen/Qwen2.5-Coder-32B-Instruct",
provider="together",
#huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN,
)
memory = MemorySaver()
retriever = load_guest_dataset()
guest_info_tool = Tool(
name="guest_info_retriever",
func=retriever.retrieve,
description="Retrieves detailed information about gala guests based on their name or relation."
)
weather_info_tool = Tool(
name="get_weather_info",
func=get_weather_info,
description="Fetches dummy weather information for a given location."
)
search_tool = DuckDuckGoSearchRun()
chat = ChatHuggingFace(llm=llm, verbose=True)
tools = [guest_info_tool , weather_info_tool, search_tool]
chat_with_tools = chat.bind_tools(tools)
# Generate the AgentState and Agent graph
class AgentState(TypedDict):
messages: Annotated[list[AnyMessage], add_messages]
def assistant(state: AgentState):
return {
"messages": [chat_with_tools.invoke(state["messages"])],
}
## The graph
builder = StateGraph(AgentState)
# Define nodes: these do the work
builder.add_node("assistant", assistant)
builder.add_node("tools", ToolNode(tools))
# Define edges: these determine how the control flow moves
builder.add_edge(START, "assistant")
builder.add_conditional_edges(
"assistant",
# If the latest message requires a tool, route to tools
# Otherwise, provide a direct response
tools_condition,
"tools",
)
builder.add_edge("tools", "assistant")
alfred = builder.compile(checkpointer=memory)
config = {"configurable": {"thread_id": "1"}}
def call_agent_ui(prompt):
situation = (
"This is a fictional situation. "
"You are Alfred the Butler of Waynes Manor and host a Gala for invited Guests. "
"All Guests are completely ficional. Information about those guests can be found in a database. "
"Only give information which is based on the databse. "
"If a name of a guest is given, then return a possible starter of a conversation with that guest. "
"If the name is not known, then say that you do not know that guest. "
"If two names of guests are given, then return a possible starter of a conversation with both guests. "
"If the name is not known, then say that you do not know that guest. "
"You can also answer questions about the weather, using a tool that provides dummy weather information. Or you can build this dummy weather information in conversations."
"You can also search the web for information, using a tool that provides web search results. "
)
content = f"{situation} {prompt}"
# Collect the last message content from the stream
last_content = ""
events = alfred.stream(
{"messages": [{"role": "user", "content": content}]},
config,
stream_mode="values",
)
for event in events:
# Optionally print or process each event
if hasattr(event["messages"][-1], "pretty_print"):
event["messages"][-1].pretty_print()
last_content = event["messages"][-1].content
return last_content
iface = gr.Interface(fn=call_agent_ui, inputs="text", outputs="text")
iface.launch()