Spaces:
Runtime error
Runtime error
Upload 2 files
Browse files
agent.py
CHANGED
@@ -7,7 +7,7 @@ from langgraph.prebuilt import ToolNode
|
|
7 |
from langchain_google_genai import ChatGoogleGenerativeAI
|
8 |
from langchain_groq import ChatGroq
|
9 |
from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint, HuggingFaceEmbeddings
|
10 |
-
from langchain_community.tools.tavily_search import
|
11 |
from langchain_community.document_loaders import WikipediaLoader
|
12 |
from langchain_community.document_loaders import ArxivLoader
|
13 |
from langchain_community.vectorstores import SupabaseVectorStore
|
@@ -143,7 +143,7 @@ vector_store = SupabaseVectorStore(
|
|
143 |
table_name="documents",
|
144 |
query_name="match_documents_langchain",
|
145 |
)
|
146 |
-
|
147 |
retriever=vector_store.as_retriever(),
|
148 |
name="Question Search",
|
149 |
description="A tool to retrieve similar questions from a vector store.",
|
@@ -160,7 +160,8 @@ tools = [
|
|
160 |
wiki_search,
|
161 |
web_search,
|
162 |
arvix_search,
|
163 |
-
wolfram_alpha_query
|
|
|
164 |
]
|
165 |
|
166 |
# Build graph function
|
@@ -169,10 +170,10 @@ def build_graph(provider: str = "openai"):
|
|
169 |
# Load environment variables from .env file
|
170 |
if provider == "openai":
|
171 |
from langchain.chat_models import ChatOpenAI
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
if provider == "google":
|
177 |
# Google Gemini
|
178 |
llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
|
|
|
7 |
from langchain_google_genai import ChatGoogleGenerativeAI
|
8 |
from langchain_groq import ChatGroq
|
9 |
from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint, HuggingFaceEmbeddings
|
10 |
+
from langchain_community.tools.tavily_search import TavilySearch
|
11 |
from langchain_community.document_loaders import WikipediaLoader
|
12 |
from langchain_community.document_loaders import ArxivLoader
|
13 |
from langchain_community.vectorstores import SupabaseVectorStore
|
|
|
143 |
table_name="documents",
|
144 |
query_name="match_documents_langchain",
|
145 |
)
|
146 |
+
retriever_tool = create_retriever_tool(
|
147 |
retriever=vector_store.as_retriever(),
|
148 |
name="Question Search",
|
149 |
description="A tool to retrieve similar questions from a vector store.",
|
|
|
160 |
wiki_search,
|
161 |
web_search,
|
162 |
arvix_search,
|
163 |
+
wolfram_alpha_query,
|
164 |
+
retriever_tool
|
165 |
]
|
166 |
|
167 |
# Build graph function
|
|
|
170 |
# Load environment variables from .env file
|
171 |
if provider == "openai":
|
172 |
from langchain.chat_models import ChatOpenAI
|
173 |
+
llm = ChatOpenAI(model_name="gpt-4", temperature=0)
|
174 |
+
elif provider == "anthropic":
|
175 |
+
from langchain.chat_models import ChatAnthropic
|
176 |
+
llm = ChatAnthropic(model="claude-v1", temperature=0)
|
177 |
if provider == "google":
|
178 |
# Google Gemini
|
179 |
llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
|