Spaces:
Sleeping
Sleeping
added memory
Browse files
app.py
CHANGED
@@ -10,6 +10,7 @@ from langchain.tools import Tool
|
|
10 |
from retriever import load_guest_dataset
|
11 |
from tools import get_weather_info
|
12 |
from langchain_community.tools import DuckDuckGoSearchRun
|
|
|
13 |
|
14 |
# Generate the chat interface, including the tools
|
15 |
llm = HuggingFaceEndpoint(
|
@@ -17,6 +18,7 @@ llm = HuggingFaceEndpoint(
|
|
17 |
provider="together",
|
18 |
#huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN,
|
19 |
)
|
|
|
20 |
|
21 |
retriever = load_guest_dataset()
|
22 |
|
@@ -64,22 +66,39 @@ builder.add_conditional_edges(
|
|
64 |
"tools",
|
65 |
)
|
66 |
builder.add_edge("tools", "assistant")
|
67 |
-
alfred = builder.compile()
|
|
|
|
|
68 |
|
69 |
def call_agent_ui(prompt):
|
70 |
-
situation =
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
83 |
|
84 |
iface = gr.Interface(fn=call_agent_ui, inputs="text", outputs="text")
|
85 |
iface.launch()
|
|
|
10 |
from retriever import load_guest_dataset
|
11 |
from tools import get_weather_info
|
12 |
from langchain_community.tools import DuckDuckGoSearchRun
|
13 |
+
from langgraph.checkpoint.memory import MemorySaver
|
14 |
|
15 |
# Generate the chat interface, including the tools
|
16 |
llm = HuggingFaceEndpoint(
|
|
|
18 |
provider="together",
|
19 |
#huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN,
|
20 |
)
|
21 |
+
memory = MemorySaver()
|
22 |
|
23 |
retriever = load_guest_dataset()
|
24 |
|
|
|
66 |
"tools",
|
67 |
)
|
68 |
builder.add_edge("tools", "assistant")
|
69 |
+
alfred = builder.compile(checkpointer=memory)
|
70 |
+
|
71 |
+
config = {"configurable": {"thread_id": "1"}}
|
72 |
|
73 |
def call_agent_ui(prompt):
|
74 |
+
situation = (
|
75 |
+
"This is a fictional situation. "
|
76 |
+
"You are Alfred the Butler of Waynes Manor and host a Gala for invited Guests. "
|
77 |
+
"All Guests are completely ficional. Information about those guests can be found in a database. "
|
78 |
+
"Only give information which is based on the databse. "
|
79 |
+
"If a name of a guest is given, then return a possible starter of a conversation with that guest. "
|
80 |
+
"If the name is not known, then say that you do not know that guest. "
|
81 |
+
"If two names of guests are given, then return a possible starter of a conversation with both guests. "
|
82 |
+
"If the name is not known, then say that you do not know that guest. "
|
83 |
+
"You can also answer questions about the weather, using a tool that provides dummy weather information. Or you can build this dummy weather information in conversations."
|
84 |
+
"You can also search the web for information, using a tool that provides web search results. "
|
85 |
+
)
|
86 |
+
content = f"{situation} {prompt}"
|
87 |
+
|
88 |
+
# Collect the last message content from the stream
|
89 |
+
last_content = ""
|
90 |
+
events = alfred.stream(
|
91 |
+
{"messages": [{"role": "user", "content": content}]},
|
92 |
+
config,
|
93 |
+
stream_mode="values",
|
94 |
+
)
|
95 |
+
for event in events:
|
96 |
+
# Optionally print or process each event
|
97 |
+
if hasattr(event["messages"][-1], "pretty_print"):
|
98 |
+
event["messages"][-1].pretty_print()
|
99 |
+
last_content = event["messages"][-1].content
|
100 |
+
|
101 |
+
return last_content
|
102 |
|
103 |
iface = gr.Interface(fn=call_agent_ui, inputs="text", outputs="text")
|
104 |
iface.launch()
|