wt002 commited on
Commit
cd8de6f
·
verified ·
1 Parent(s): da3528f

Update agent.py

Browse files
Files changed (1) hide show
  1. agent.py +15 -11
agent.py CHANGED
@@ -3,24 +3,19 @@
3
  import os
4
  from dotenv import load_dotenv
5
  from langgraph.graph import START, StateGraph, MessagesState
6
- from langgraph.prebuilt import tools_condition, ToolNode
 
7
  from langchain_google_genai import ChatGoogleGenerativeAI
8
  from langchain_groq import ChatGroq
9
  from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint, HuggingFaceEmbeddings
10
  from langchain_community.tools.tavily_search import TavilySearchResults
11
- from langchain_community.document_loaders import WikipediaLoader, ArxivLoader
 
12
  from langchain_community.vectorstores import SupabaseVectorStore
13
  from langchain_core.messages import SystemMessage, HumanMessage
14
  from langchain_core.tools import tool
15
  from langchain.tools.retriever import create_retriever_tool
16
  from supabase.client import Client, create_client
17
- from typing import TypedDict, List, Annotated
18
- from langchain.agents.agent_toolkits import create_retriever_tool
19
- from langchain_community.document_loaders import TextLoader
20
- #from langchain_community.vectorstores import FAISS
21
- from langchain_openai import OpenAIEmbeddings
22
- from langchain_text_splitters import CharacterTextSplitter
23
- from supabase.client import Client, create_client
24
 
25
 
26
  load_dotenv()
@@ -128,7 +123,6 @@ with open("system_prompt.txt", "r", encoding="utf-8") as f:
128
  sys_msg = SystemMessage(content=system_prompt)
129
 
130
  embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-mpnet-base-v2") # dim=768
131
-
132
  supabase: Client = create_client(
133
  os.environ.get("SUPABASE_URL"),
134
  os.environ.get("SUPABASE_SERVICE_KEY"))
@@ -205,4 +199,14 @@ def build_graph(provider: str = "google"):
205
  builder.add_edge("tools", "assistant")
206
 
207
  # Compile graph
208
- return builder.compile()
 
 
 
 
 
 
 
 
 
 
 
3
  import os
4
  from dotenv import load_dotenv
5
  from langgraph.graph import START, StateGraph, MessagesState
6
+ from langgraph.prebuilt import tools_condition
7
+ from langgraph.prebuilt import ToolNode
8
  from langchain_google_genai import ChatGoogleGenerativeAI
9
  from langchain_groq import ChatGroq
10
  from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint, HuggingFaceEmbeddings
11
  from langchain_community.tools.tavily_search import TavilySearchResults
12
+ from langchain_community.document_loaders import WikipediaLoader
13
+ from langchain_community.document_loaders import ArxivLoader
14
  from langchain_community.vectorstores import SupabaseVectorStore
15
  from langchain_core.messages import SystemMessage, HumanMessage
16
  from langchain_core.tools import tool
17
  from langchain.tools.retriever import create_retriever_tool
18
  from supabase.client import Client, create_client
 
 
 
 
 
 
 
19
 
20
 
21
  load_dotenv()
 
123
  sys_msg = SystemMessage(content=system_prompt)
124
 
125
  embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-mpnet-base-v2") # dim=768
 
126
  supabase: Client = create_client(
127
  os.environ.get("SUPABASE_URL"),
128
  os.environ.get("SUPABASE_SERVICE_KEY"))
 
199
  builder.add_edge("tools", "assistant")
200
 
201
  # Compile graph
202
+ return builder.compile()
203
+
204
+ if __name__ == "__main__":
205
+ question = "When was a picture of St. Thomas Aquinas first added to the Wikipedia page on the Principle of double effect?"
206
+ # Build the graph
207
+ graph = build_graph(provider="groq")
208
+ # Run the graph
209
+ messages = [HumanMessage(content=question)]
210
+ messages = graph.invoke({"messages": messages})
211
+ for m in messages["messages"]:
212
+ m.pretty_print()