Spaces:
Runtime error
Runtime error
Upload 4 files
Browse files
.env
CHANGED
@@ -3,4 +3,5 @@ SUPABASE_URL=https://qgggbukkivjakulcmprh.supabase.co
|
|
3 |
SUPABASE_SERVICE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6InFnZ2didWtraXZqYWt1bGNtcHJoIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImlhdCI6MTc1MjIxOTE4NSwiZXhwIjoyMDY3Nzk1MTg1fQ.XxiYK6LzPGWKPiCBbF_moqfmpwCBsFLzw0aBNgxpN9M
|
4 |
GROQ_API_KEY=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
|
5 |
TAVILY_API_KEY=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
|
|
|
6 |
|
|
|
3 |
SUPABASE_SERVICE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6InFnZ2didWtraXZqYWt1bGNtcHJoIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImlhdCI6MTc1MjIxOTE4NSwiZXhwIjoyMDY3Nzk1MTg1fQ.XxiYK6LzPGWKPiCBbF_moqfmpwCBsFLzw0aBNgxpN9M
|
4 |
GROQ_API_KEY=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
|
5 |
TAVILY_API_KEY=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
|
6 |
+
OPENAI_API_KEY=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
|
7 |
|
agent.py
CHANGED
@@ -113,6 +113,16 @@ def arvix_search(query: str) -> str:
|
|
113 |
return {"arvix_results": formatted_search_docs}
|
114 |
|
115 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
116 |
|
117 |
# load the system prompt from the file
|
118 |
with open("system_prompt.txt", "r", encoding="utf-8") as f:
|
@@ -150,12 +160,19 @@ tools = [
|
|
150 |
wiki_search,
|
151 |
web_search,
|
152 |
arvix_search,
|
|
|
153 |
]
|
154 |
|
155 |
# Build graph function
|
156 |
-
def build_graph(provider: str = "
|
157 |
"""Build the graph"""
|
158 |
# Load environment variables from .env file
|
|
|
|
|
|
|
|
|
|
|
|
|
159 |
if provider == "google":
|
160 |
# Google Gemini
|
161 |
llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
|
|
|
113 |
return {"arvix_results": formatted_search_docs}
|
114 |
|
115 |
|
116 |
+
@tool
|
117 |
+
def wolfram_alpha_query(query: str) -> str:
|
118 |
+
import wolframalpha
|
119 |
+
client = wolframalpha.Client(os.environ['WOLFRAM_APP_ID'])
|
120 |
+
res = client.query(query)
|
121 |
+
try:
|
122 |
+
return next(res.results).text
|
123 |
+
except StopIteration:
|
124 |
+
return "No result found."
|
125 |
+
|
126 |
|
127 |
# load the system prompt from the file
|
128 |
with open("system_prompt.txt", "r", encoding="utf-8") as f:
|
|
|
160 |
wiki_search,
|
161 |
web_search,
|
162 |
arvix_search,
|
163 |
+
wolfram_alpha_query
|
164 |
]
|
165 |
|
166 |
# Build graph function
|
167 |
+
def build_graph(provider: str = "openai"):
|
168 |
"""Build the graph"""
|
169 |
# Load environment variables from .env file
|
170 |
+
if provider == "openai":
|
171 |
+
from langchain.chat_models import ChatOpenAI
|
172 |
+
llm = ChatOpenAI(model_name="gpt-4", temperature=0)
|
173 |
+
elif provider == "anthropic":
|
174 |
+
from langchain.chat_models import ChatAnthropic
|
175 |
+
llm = ChatAnthropic(model="claude-v1", temperature=0)
|
176 |
if provider == "google":
|
177 |
# Google Gemini
|
178 |
llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
|
app.py
CHANGED
@@ -24,7 +24,7 @@ class BasicAgent:
|
|
24 |
self.graph = build_graph()
|
25 |
|
26 |
def __call__(self, question: str) -> str:
|
27 |
-
print(f"Agent received question : {question}
|
28 |
# Wrap the question in a HumanMessage from langchain_core
|
29 |
messages = [HumanMessage(content=question)]
|
30 |
messages = self.graph.invoke({"messages": messages})
|
|
|
24 |
self.graph = build_graph()
|
25 |
|
26 |
def __call__(self, question: str) -> str:
|
27 |
+
print(f"Agent received question : {question}")
|
28 |
# Wrap the question in a HumanMessage from langchain_core
|
29 |
messages = [HumanMessage(content=question)]
|
30 |
messages = self.graph.invoke({"messages": messages})
|
requirements.txt
CHANGED
@@ -17,3 +17,6 @@ wikipedia
|
|
17 |
pgvector
|
18 |
python-dotenv
|
19 |
sentence-transformers
|
|
|
|
|
|
|
|
17 |
pgvector
|
18 |
python-dotenv
|
19 |
sentence-transformers
|
20 |
+
langchain
|
21 |
+
openai
|
22 |
+
anthropic
|