Spaces:
Configuration error
Configuration error
Upload 10 files
Browse files- README.md +15 -0
- agent.py +213 -0
- app.py +207 -0
- gitattributes +35 -0
- gitignore +116 -0
- metadata.jsonl +0 -0
- requirements.txt +18 -0
- supabase_docs.csv +0 -0
- system_prompt.txt +28 -0
- test.ipynb +940 -0
README.md
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
title: Template Final Assignment
|
3 |
+
emoji: 🕵🏻♂️
|
4 |
+
colorFrom: indigo
|
5 |
+
colorTo: indigo
|
6 |
+
sdk: gradio
|
7 |
+
sdk_version: 5.25.2
|
8 |
+
app_file: app.py
|
9 |
+
pinned: false
|
10 |
+
hf_oauth: true
|
11 |
+
# optional, default duration is 8 hours/480 minutes. Max duration is 30 days/43200 minutes.
|
12 |
+
hf_oauth_expiration_minutes: 480
|
13 |
+
---
|
14 |
+
|
15 |
+
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
agent.py
ADDED
@@ -0,0 +1,213 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""LangGraph Agent"""
|
2 |
+
import os
|
3 |
+
from dotenv import load_dotenv
|
4 |
+
from langgraph.graph import START, StateGraph, MessagesState
|
5 |
+
from langgraph.prebuilt import tools_condition
|
6 |
+
from langgraph.prebuilt import ToolNode
|
7 |
+
from langchain_google_genai import ChatGoogleGenerativeAI
|
8 |
+
from langchain_groq import ChatGroq
|
9 |
+
from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint, HuggingFaceEmbeddings
|
10 |
+
from langchain_community.tools.tavily_search import TavilySearchResults
|
11 |
+
from langchain_community.document_loaders import WikipediaLoader
|
12 |
+
from langchain_community.document_loaders import ArxivLoader
|
13 |
+
from langchain_community.vectorstores import SupabaseVectorStore
|
14 |
+
from langchain_core.messages import SystemMessage, HumanMessage
|
15 |
+
from langchain_core.tools import tool
|
16 |
+
from langchain.tools.retriever import create_retriever_tool
|
17 |
+
from supabase.client import Client, create_client
|
18 |
+
|
19 |
+
load_dotenv()
|
20 |
+
|
21 |
+
@tool
|
22 |
+
def multiply(a: int, b: int) -> int:
|
23 |
+
"""Multiply two numbers.
|
24 |
+
Args:
|
25 |
+
a: first int
|
26 |
+
b: second int
|
27 |
+
"""
|
28 |
+
return a * b
|
29 |
+
|
30 |
+
@tool
|
31 |
+
def add(a: int, b: int) -> int:
|
32 |
+
"""Add two numbers.
|
33 |
+
|
34 |
+
Args:
|
35 |
+
a: first int
|
36 |
+
b: second int
|
37 |
+
"""
|
38 |
+
return a + b
|
39 |
+
|
40 |
+
@tool
|
41 |
+
def subtract(a: int, b: int) -> int:
|
42 |
+
"""Subtract two numbers.
|
43 |
+
|
44 |
+
Args:
|
45 |
+
a: first int
|
46 |
+
b: second int
|
47 |
+
"""
|
48 |
+
return a - b
|
49 |
+
|
50 |
+
@tool
|
51 |
+
def divide(a: int, b: int) -> int:
|
52 |
+
"""Divide two numbers.
|
53 |
+
|
54 |
+
Args:
|
55 |
+
a: first int
|
56 |
+
b: second int
|
57 |
+
"""
|
58 |
+
if b == 0:
|
59 |
+
raise ValueError("Cannot divide by zero.")
|
60 |
+
return a / b
|
61 |
+
|
62 |
+
@tool
|
63 |
+
def modulus(a: int, b: int) -> int:
|
64 |
+
"""Get the modulus of two numbers.
|
65 |
+
|
66 |
+
Args:
|
67 |
+
a: first int
|
68 |
+
b: second int
|
69 |
+
"""
|
70 |
+
return a % b
|
71 |
+
|
72 |
+
@tool
|
73 |
+
def wiki_search(query: str) -> str:
|
74 |
+
"""Search Wikipedia for a query and return maximum 2 results.
|
75 |
+
|
76 |
+
Args:
|
77 |
+
query: The search query."""
|
78 |
+
search_docs = WikipediaLoader(query=query, load_max_docs=2).load()
|
79 |
+
formatted_search_docs = "\n\n---\n\n".join(
|
80 |
+
[
|
81 |
+
f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
|
82 |
+
for doc in search_docs
|
83 |
+
])
|
84 |
+
return {"wiki_results": formatted_search_docs}
|
85 |
+
|
86 |
+
@tool
|
87 |
+
def web_search(query: str) -> str:
|
88 |
+
"""Search Tavily for a query and return maximum 3 results.
|
89 |
+
|
90 |
+
Args:
|
91 |
+
query: The search query."""
|
92 |
+
search_docs = TavilySearchResults(max_results=3).invoke(query=query)
|
93 |
+
formatted_search_docs = "\n\n---\n\n".join(
|
94 |
+
[
|
95 |
+
f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
|
96 |
+
for doc in search_docs
|
97 |
+
])
|
98 |
+
return {"web_results": formatted_search_docs}
|
99 |
+
|
100 |
+
@tool
|
101 |
+
def arvix_search(query: str) -> str:
|
102 |
+
"""Search Arxiv for a query and return maximum 3 result.
|
103 |
+
|
104 |
+
Args:
|
105 |
+
query: The search query."""
|
106 |
+
search_docs = ArxivLoader(query=query, load_max_docs=3).load()
|
107 |
+
formatted_search_docs = "\n\n---\n\n".join(
|
108 |
+
[
|
109 |
+
f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content[:1000]}\n</Document>'
|
110 |
+
for doc in search_docs
|
111 |
+
])
|
112 |
+
return {"arvix_results": formatted_search_docs}
|
113 |
+
|
114 |
+
|
115 |
+
|
116 |
+
# load the system prompt from the file
|
117 |
+
with open("system_prompt.txt", "r", encoding="utf-8") as f:
|
118 |
+
system_prompt = f.read()
|
119 |
+
|
120 |
+
# System message
|
121 |
+
sys_msg = SystemMessage(content=system_prompt)
|
122 |
+
|
123 |
+
# build a retriever
|
124 |
+
embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-mpnet-base-v2") # dim=768
|
125 |
+
supabase: Client = create_client(
|
126 |
+
os.environ.get("SUPABASE_URL"),
|
127 |
+
os.environ.get("SUPABASE_SERVICE_KEY"))
|
128 |
+
vector_store = SupabaseVectorStore(
|
129 |
+
client=supabase,
|
130 |
+
embedding= embeddings,
|
131 |
+
table_name="documents",
|
132 |
+
query_name="match_documents_langchain",
|
133 |
+
)
|
134 |
+
create_retriever_tool = create_retriever_tool(
|
135 |
+
retriever=vector_store.as_retriever(),
|
136 |
+
name="Question Search",
|
137 |
+
description="A tool to retrieve similar questions from a vector store.",
|
138 |
+
)
|
139 |
+
|
140 |
+
|
141 |
+
|
142 |
+
tools = [
|
143 |
+
multiply,
|
144 |
+
add,
|
145 |
+
subtract,
|
146 |
+
divide,
|
147 |
+
modulus,
|
148 |
+
wiki_search,
|
149 |
+
web_search,
|
150 |
+
arvix_search,
|
151 |
+
]
|
152 |
+
|
153 |
+
# Build graph function
|
154 |
+
def build_graph(provider: str = "groq"):
|
155 |
+
"""Build the graph"""
|
156 |
+
# Load environment variables from .env file
|
157 |
+
if provider == "google":
|
158 |
+
# Google Gemini
|
159 |
+
llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
|
160 |
+
elif provider == "groq":
|
161 |
+
# Groq https://console.groq.com/docs/models
|
162 |
+
llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
|
163 |
+
elif provider == "huggingface":
|
164 |
+
# TODO: Add huggingface endpoint
|
165 |
+
llm = ChatHuggingFace(
|
166 |
+
llm=HuggingFaceEndpoint(
|
167 |
+
url="https://api-inference.huggingface.co/models/Meta-DeepLearning/llama-2-7b-chat-hf",
|
168 |
+
temperature=0,
|
169 |
+
),
|
170 |
+
)
|
171 |
+
else:
|
172 |
+
raise ValueError("Invalid provider. Choose 'google', 'groq' or 'huggingface'.")
|
173 |
+
# Bind tools to LLM
|
174 |
+
llm_with_tools = llm.bind_tools(tools)
|
175 |
+
|
176 |
+
# Node
|
177 |
+
def assistant(state: MessagesState):
|
178 |
+
"""Assistant node"""
|
179 |
+
return {"messages": [llm_with_tools.invoke(state["messages"])]}
|
180 |
+
|
181 |
+
def retriever(state: MessagesState):
|
182 |
+
"""Retriever node"""
|
183 |
+
similar_question = vector_store.similarity_search(state["messages"][0].content)
|
184 |
+
example_msg = HumanMessage(
|
185 |
+
content=f"Here I provide a similar question and answer for reference: \n\n{similar_question[0].page_content}",
|
186 |
+
)
|
187 |
+
return {"messages": [sys_msg] + state["messages"] + [example_msg]}
|
188 |
+
|
189 |
+
builder = StateGraph(MessagesState)
|
190 |
+
builder.add_node("retriever", retriever)
|
191 |
+
builder.add_node("assistant", assistant)
|
192 |
+
builder.add_node("tools", ToolNode(tools))
|
193 |
+
builder.add_edge(START, "retriever")
|
194 |
+
builder.add_edge("retriever", "assistant")
|
195 |
+
builder.add_conditional_edges(
|
196 |
+
"assistant",
|
197 |
+
tools_condition,
|
198 |
+
)
|
199 |
+
builder.add_edge("tools", "assistant")
|
200 |
+
|
201 |
+
# Compile graph
|
202 |
+
return builder.compile()
|
203 |
+
|
204 |
+
# test
|
205 |
+
if __name__ == "__main__":
|
206 |
+
question = "When was a picture of St. Thomas Aquinas first added to the Wikipedia page on the Principle of double effect?"
|
207 |
+
# Build the graph
|
208 |
+
graph = build_graph(provider="groq")
|
209 |
+
# Run the graph
|
210 |
+
messages = [HumanMessage(content=question)]
|
211 |
+
messages = graph.invoke({"messages": messages})
|
212 |
+
for m in messages["messages"]:
|
213 |
+
m.pretty_print()
|
app.py
ADDED
@@ -0,0 +1,207 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
""" Basic Agent Evaluation Runner"""
|
2 |
+
import os
|
3 |
+
import inspect
|
4 |
+
import gradio as gr
|
5 |
+
import requests
|
6 |
+
import pandas as pd
|
7 |
+
from langchain_core.messages import HumanMessage
|
8 |
+
from agent import build_graph
|
9 |
+
|
10 |
+
|
11 |
+
|
12 |
+
# (Keep Constants as is)
|
13 |
+
# --- Constants ---
|
14 |
+
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
|
15 |
+
|
16 |
+
# --- Basic Agent Definition ---
|
17 |
+
# ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
|
18 |
+
|
19 |
+
|
20 |
+
class BasicAgent:
|
21 |
+
"""A langgraph agent."""
|
22 |
+
def __init__(self):
|
23 |
+
print("BasicAgent initialized.")
|
24 |
+
self.graph = build_graph()
|
25 |
+
|
26 |
+
def __call__(self, question: str) -> str:
|
27 |
+
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
28 |
+
# Wrap the question in a HumanMessage from langchain_core
|
29 |
+
messages = [HumanMessage(content=question)]
|
30 |
+
messages = self.graph.invoke({"messages": messages})
|
31 |
+
answer = messages['messages'][-1].content
|
32 |
+
return answer[14:]
|
33 |
+
|
34 |
+
|
35 |
+
def run_and_submit_all( profile: gr.OAuthProfile | None):
|
36 |
+
"""
|
37 |
+
Fetches all questions, runs the BasicAgent on them, submits all answers,
|
38 |
+
and displays the results.
|
39 |
+
"""
|
40 |
+
# --- Determine HF Space Runtime URL and Repo URL ---
|
41 |
+
space_id = os.getenv("SPACE_ID") # Get the SPACE_ID for sending link to the code
|
42 |
+
|
43 |
+
if profile:
|
44 |
+
username= f"{profile.username}"
|
45 |
+
print(f"User logged in: {username}")
|
46 |
+
else:
|
47 |
+
print("User not logged in.")
|
48 |
+
return "Please Login to Hugging Face with the button.", None
|
49 |
+
|
50 |
+
api_url = DEFAULT_API_URL
|
51 |
+
questions_url = f"{api_url}/questions"
|
52 |
+
submit_url = f"{api_url}/submit"
|
53 |
+
|
54 |
+
# 1. Instantiate Agent ( modify this part to create your agent)
|
55 |
+
try:
|
56 |
+
agent = BasicAgent()
|
57 |
+
except Exception as e:
|
58 |
+
print(f"Error instantiating agent: {e}")
|
59 |
+
return f"Error initializing agent: {e}", None
|
60 |
+
# In the case of an app running as a hugging Face space, this link points toward your codebase ( usefull for others so please keep it public)
|
61 |
+
agent_code = f"https://huggingface.co/spaces/{space_id}/tree/main"
|
62 |
+
print(agent_code)
|
63 |
+
|
64 |
+
# 2. Fetch Questions
|
65 |
+
print(f"Fetching questions from: {questions_url}")
|
66 |
+
try:
|
67 |
+
response = requests.get(questions_url, timeout=15)
|
68 |
+
response.raise_for_status()
|
69 |
+
questions_data = response.json()
|
70 |
+
if not questions_data:
|
71 |
+
print("Fetched questions list is empty.")
|
72 |
+
return "Fetched questions list is empty or invalid format.", None
|
73 |
+
print(f"Fetched {len(questions_data)} questions.")
|
74 |
+
except requests.exceptions.RequestException as e:
|
75 |
+
print(f"Error fetching questions: {e}")
|
76 |
+
return f"Error fetching questions: {e}", None
|
77 |
+
except requests.exceptions.JSONDecodeError as e:
|
78 |
+
print(f"Error decoding JSON response from questions endpoint: {e}")
|
79 |
+
print(f"Response text: {response.text[:500]}")
|
80 |
+
return f"Error decoding server response for questions: {e}", None
|
81 |
+
except Exception as e:
|
82 |
+
print(f"An unexpected error occurred fetching questions: {e}")
|
83 |
+
return f"An unexpected error occurred fetching questions: {e}", None
|
84 |
+
|
85 |
+
# 3. Run your Agent
|
86 |
+
results_log = []
|
87 |
+
answers_payload = []
|
88 |
+
print(f"Running agent on {len(questions_data)} questions...")
|
89 |
+
for item in questions_data:
|
90 |
+
task_id = item.get("task_id")
|
91 |
+
question_text = item.get("question")
|
92 |
+
if not task_id or question_text is None:
|
93 |
+
print(f"Skipping item with missing task_id or question: {item}")
|
94 |
+
continue
|
95 |
+
try:
|
96 |
+
submitted_answer = agent(question_text)
|
97 |
+
answers_payload.append({"task_id": task_id, "submitted_answer": submitted_answer})
|
98 |
+
results_log.append({"Task ID": task_id, "Question": question_text, "Submitted Answer": submitted_answer})
|
99 |
+
except Exception as e:
|
100 |
+
print(f"Error running agent on task {task_id}: {e}")
|
101 |
+
results_log.append({"Task ID": task_id, "Question": question_text, "Submitted Answer": f"AGENT ERROR: {e}"})
|
102 |
+
|
103 |
+
if not answers_payload:
|
104 |
+
print("Agent did not produce any answers to submit.")
|
105 |
+
return "Agent did not produce any answers to submit.", pd.DataFrame(results_log)
|
106 |
+
|
107 |
+
# 4. Prepare Submission
|
108 |
+
submission_data = {"username": username.strip(), "agent_code": agent_code, "answers": answers_payload}
|
109 |
+
status_update = f"Agent finished. Submitting {len(answers_payload)} answers for user '{username}'..."
|
110 |
+
print(status_update)
|
111 |
+
|
112 |
+
# 5. Submit
|
113 |
+
print(f"Submitting {len(answers_payload)} answers to: {submit_url}")
|
114 |
+
try:
|
115 |
+
response = requests.post(submit_url, json=submission_data, timeout=60)
|
116 |
+
response.raise_for_status()
|
117 |
+
result_data = response.json()
|
118 |
+
final_status = (
|
119 |
+
f"Submission Successful!\n"
|
120 |
+
f"User: {result_data.get('username')}\n"
|
121 |
+
f"Overall Score: {result_data.get('score', 'N/A')}% "
|
122 |
+
f"({result_data.get('correct_count', '?')}/{result_data.get('total_attempted', '?')} correct)\n"
|
123 |
+
f"Message: {result_data.get('message', 'No message received.')}"
|
124 |
+
)
|
125 |
+
print("Submission successful.")
|
126 |
+
results_df = pd.DataFrame(results_log)
|
127 |
+
return final_status, results_df
|
128 |
+
except requests.exceptions.HTTPError as e:
|
129 |
+
error_detail = f"Server responded with status {e.response.status_code}."
|
130 |
+
try:
|
131 |
+
error_json = e.response.json()
|
132 |
+
error_detail += f" Detail: {error_json.get('detail', e.response.text)}"
|
133 |
+
except requests.exceptions.JSONDecodeError:
|
134 |
+
error_detail += f" Response: {e.response.text[:500]}"
|
135 |
+
status_message = f"Submission Failed: {error_detail}"
|
136 |
+
print(status_message)
|
137 |
+
results_df = pd.DataFrame(results_log)
|
138 |
+
return status_message, results_df
|
139 |
+
except requests.exceptions.Timeout:
|
140 |
+
status_message = "Submission Failed: The request timed out."
|
141 |
+
print(status_message)
|
142 |
+
results_df = pd.DataFrame(results_log)
|
143 |
+
return status_message, results_df
|
144 |
+
except requests.exceptions.RequestException as e:
|
145 |
+
status_message = f"Submission Failed: Network error - {e}"
|
146 |
+
print(status_message)
|
147 |
+
results_df = pd.DataFrame(results_log)
|
148 |
+
return status_message, results_df
|
149 |
+
except Exception as e:
|
150 |
+
status_message = f"An unexpected error occurred during submission: {e}"
|
151 |
+
print(status_message)
|
152 |
+
results_df = pd.DataFrame(results_log)
|
153 |
+
return status_message, results_df
|
154 |
+
|
155 |
+
|
156 |
+
# --- Build Gradio Interface using Blocks ---
|
157 |
+
with gr.Blocks() as demo:
|
158 |
+
gr.Markdown("# Basic Agent Evaluation Runner")
|
159 |
+
gr.Markdown(
|
160 |
+
"""
|
161 |
+
**Instructions:**
|
162 |
+
1. Please clone this space, then modify the code to define your agent's logic, the tools, the necessary packages, etc ...
|
163 |
+
2. Log in to your Hugging Face account using the button below. This uses your HF username for submission.
|
164 |
+
3. Click 'Run Evaluation & Submit All Answers' to fetch questions, run your agent, submit answers, and see the score.
|
165 |
+
---
|
166 |
+
**Disclaimers:**
|
167 |
+
Once clicking on the "submit button, it can take quite some time ( this is the time for the agent to go through all the questions).
|
168 |
+
This space provides a basic setup and is intentionally sub-optimal to encourage you to develop your own, more robust solution. For instance for the delay process of the submit button, a solution could be to cache the answers and submit in a seperate action or even to answer the questions in async.
|
169 |
+
"""
|
170 |
+
)
|
171 |
+
|
172 |
+
gr.LoginButton()
|
173 |
+
|
174 |
+
run_button = gr.Button("Run Evaluation & Submit All Answers")
|
175 |
+
|
176 |
+
status_output = gr.Textbox(label="Run Status / Submission Result", lines=5, interactive=False)
|
177 |
+
# Removed max_rows=10 from DataFrame constructor
|
178 |
+
results_table = gr.DataFrame(label="Questions and Agent Answers", wrap=True)
|
179 |
+
|
180 |
+
run_button.click(
|
181 |
+
fn=run_and_submit_all,
|
182 |
+
outputs=[status_output, results_table]
|
183 |
+
)
|
184 |
+
|
185 |
+
if __name__ == "__main__":
|
186 |
+
print("\n" + "-"*30 + " App Starting " + "-"*30)
|
187 |
+
# Check for SPACE_HOST and SPACE_ID at startup for information
|
188 |
+
space_host_startup = os.getenv("SPACE_HOST")
|
189 |
+
space_id_startup = os.getenv("SPACE_ID") # Get SPACE_ID at startup
|
190 |
+
|
191 |
+
if space_host_startup:
|
192 |
+
print(f"✅ SPACE_HOST found: {space_host_startup}")
|
193 |
+
print(f" Runtime URL should be: https://{space_host_startup}.hf.space")
|
194 |
+
else:
|
195 |
+
print("ℹ️ SPACE_HOST environment variable not found (running locally?).")
|
196 |
+
|
197 |
+
if space_id_startup: # Print repo URLs if SPACE_ID is found
|
198 |
+
print(f"✅ SPACE_ID found: {space_id_startup}")
|
199 |
+
print(f" Repo URL: https://huggingface.co/spaces/{space_id_startup}")
|
200 |
+
print(f" Repo Tree URL: https://huggingface.co/spaces/{space_id_startup}/tree/main")
|
201 |
+
else:
|
202 |
+
print("ℹ️ SPACE_ID environment variable not found (running locally?). Repo URL cannot be determined.")
|
203 |
+
|
204 |
+
print("-"*(60 + len(" App Starting ")) + "\n")
|
205 |
+
|
206 |
+
print("Launching Gradio Interface for Basic Agent Evaluation...")
|
207 |
+
demo.launch(debug=True, share=False)
|
gitattributes
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
12 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
13 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
14 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
15 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
16 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
17 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
18 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
19 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
20 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
21 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
22 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
23 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
24 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
25 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
26 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
27 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
28 |
+
*.tar filter=lfs diff=lfs merge=lfs -text
|
29 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
30 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
31 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
32 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
33 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
gitignore
ADDED
@@ -0,0 +1,116 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Byte-compiled / optimized / DLL files
|
2 |
+
__pycache__/
|
3 |
+
*.py[cod]
|
4 |
+
*$py.class
|
5 |
+
*.so
|
6 |
+
|
7 |
+
# Distribution / packaging
|
8 |
+
.Python
|
9 |
+
build/
|
10 |
+
develop-eggs/
|
11 |
+
dist/
|
12 |
+
downloads/
|
13 |
+
eggs/
|
14 |
+
.eggs/
|
15 |
+
lib/
|
16 |
+
lib64/
|
17 |
+
parts/
|
18 |
+
sdist/
|
19 |
+
var/
|
20 |
+
wheels/
|
21 |
+
*.egg-info/
|
22 |
+
.installed.cfg
|
23 |
+
*.egg
|
24 |
+
|
25 |
+
# Virtual environments
|
26 |
+
venv/
|
27 |
+
ENV/
|
28 |
+
env/
|
29 |
+
.env
|
30 |
+
.venv
|
31 |
+
env.bak/
|
32 |
+
venv.bak/
|
33 |
+
.python-version
|
34 |
+
|
35 |
+
# Unit test / coverage reports
|
36 |
+
htmlcov/
|
37 |
+
.tox/
|
38 |
+
.nox/
|
39 |
+
.coverage
|
40 |
+
.coverage.*
|
41 |
+
.cache
|
42 |
+
nosetests.xml
|
43 |
+
coverage.xml
|
44 |
+
*.cover
|
45 |
+
.hypothesis/
|
46 |
+
.pytest_cache/
|
47 |
+
pytest-*.xml
|
48 |
+
|
49 |
+
# Jupyter Notebook
|
50 |
+
.ipynb_checkpoints
|
51 |
+
|
52 |
+
# IPython
|
53 |
+
profile_default/
|
54 |
+
ipython_config.py
|
55 |
+
|
56 |
+
# Logs
|
57 |
+
*.log
|
58 |
+
logs/
|
59 |
+
log/
|
60 |
+
|
61 |
+
# IDE specific files
|
62 |
+
.idea/
|
63 |
+
.vscode/
|
64 |
+
*.swp
|
65 |
+
*.swo
|
66 |
+
*~
|
67 |
+
.DS_Store
|
68 |
+
.project
|
69 |
+
.pydevproject
|
70 |
+
.settings/
|
71 |
+
.vs/
|
72 |
+
*.sublime-project
|
73 |
+
*.sublime-workspace
|
74 |
+
|
75 |
+
# Database
|
76 |
+
*.db
|
77 |
+
*.rdb
|
78 |
+
*.sqlite
|
79 |
+
*.sqlite3
|
80 |
+
|
81 |
+
# Environment variables
|
82 |
+
.env
|
83 |
+
.env.local
|
84 |
+
.env.development.local
|
85 |
+
.env.test.local
|
86 |
+
.env.production.local
|
87 |
+
|
88 |
+
# macOS specific
|
89 |
+
.DS_Store
|
90 |
+
.AppleDouble
|
91 |
+
.LSOverride
|
92 |
+
Icon
|
93 |
+
._*
|
94 |
+
.DocumentRevisions-V100
|
95 |
+
.fseventsd
|
96 |
+
.Spotlight-V100
|
97 |
+
.TemporaryItems
|
98 |
+
.Trashes
|
99 |
+
.VolumeIcon.icns
|
100 |
+
.com.apple.timemachine.donotpresent
|
101 |
+
|
102 |
+
# AI/model files
|
103 |
+
*.h5
|
104 |
+
*.pb
|
105 |
+
*.onnx
|
106 |
+
*.tflite
|
107 |
+
*.pt
|
108 |
+
*.pth
|
109 |
+
*.weights
|
110 |
+
|
111 |
+
# Temporary files
|
112 |
+
tmp/
|
113 |
+
temp/
|
114 |
+
.tmp
|
115 |
+
*.tmp
|
116 |
+
|
metadata.jsonl
ADDED
The diff for this file is too large to render.
See raw diff
|
|
requirements.txt
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
gradio
|
2 |
+
requests
|
3 |
+
langchain
|
4 |
+
langchain-community
|
5 |
+
langchain-core
|
6 |
+
langchain-google-genai
|
7 |
+
langchain-huggingface
|
8 |
+
langchain-groq
|
9 |
+
langchain-tavily
|
10 |
+
langchain-chroma
|
11 |
+
langgraph
|
12 |
+
huggingface_hub
|
13 |
+
supabase
|
14 |
+
arxiv
|
15 |
+
pymupdf
|
16 |
+
wikipedia
|
17 |
+
pgvector
|
18 |
+
python-dotenv
|
supabase_docs.csv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
system_prompt.txt
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
You are a helpful assistant tasked with answering questions using a set of tools.
|
3 |
+
If the tool is not available, you can try to find the information online. You can also use your own knowledge to answer the question.
|
4 |
+
You need to provide a step-by-step explanation of how you arrived at the answer.
|
5 |
+
==========================
|
6 |
+
Here is a few examples showing you how to answer the question step by step.
|
7 |
+
|
8 |
+
Question 1: As of May 2023, how many stops are between South Station and Windsor Gardens on MBTA’s Franklin-Foxboro line (not included)?
|
9 |
+
Steps:
|
10 |
+
1. Search the web for “MBTA Franklin Foxboro line”.
|
11 |
+
2. Click on top result, on the MBTA website.
|
12 |
+
3. Scroll down on the list of stops, and count the current stops between South Station and Windsor Gardens.
|
13 |
+
4. Click the “Schedule & Maps” tab to view a map of the route.
|
14 |
+
5. Examine the map to confirm that the order of stops is the same as on the listing of stops.
|
15 |
+
6. Return to web search.
|
16 |
+
7. Click on Wikipedia article for Franklin line.
|
17 |
+
8. Read the article to check whether any stops were added or removed since the date given in the question.
|
18 |
+
9. Search the web for “MBTA Franklin Foxboro Line changes”.
|
19 |
+
10. Click News tab.
|
20 |
+
11. Click article about rail schedule changes.
|
21 |
+
12. Confirm that none of the changes affect the answer to the question.
|
22 |
+
Tools:
|
23 |
+
1. Search engine
|
24 |
+
2. Web browser
|
25 |
+
Final Answer: 10
|
26 |
+
|
27 |
+
==========================
|
28 |
+
Now, please answer the following question step by step.
|
test.ipynb
ADDED
@@ -0,0 +1,940 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "markdown",
|
5 |
+
"id": "d0cc4adf",
|
6 |
+
"metadata": {},
|
7 |
+
"source": [
|
8 |
+
"### Question data"
|
9 |
+
]
|
10 |
+
},
|
11 |
+
{
|
12 |
+
"cell_type": "code",
|
13 |
+
"execution_count": 1,
|
14 |
+
"id": "14e3f417",
|
15 |
+
"metadata": {},
|
16 |
+
"outputs": [],
|
17 |
+
"source": [
|
18 |
+
"# Load metadata.jsonl\n",
|
19 |
+
"import json\n",
|
20 |
+
"# Load the metadata.jsonl file\n",
|
21 |
+
"with open('metadata.jsonl', 'r') as jsonl_file:\n",
|
22 |
+
" json_list = list(jsonl_file)\n",
|
23 |
+
"\n",
|
24 |
+
"json_QA = []\n",
|
25 |
+
"for json_str in json_list:\n",
|
26 |
+
" json_data = json.loads(json_str)\n",
|
27 |
+
" json_QA.append(json_data)"
|
28 |
+
]
|
29 |
+
},
|
30 |
+
{
|
31 |
+
"cell_type": "code",
|
32 |
+
"execution_count": 2,
|
33 |
+
"id": "5e2da6fc",
|
34 |
+
"metadata": {},
|
35 |
+
"outputs": [
|
36 |
+
{
|
37 |
+
"name": "stdout",
|
38 |
+
"output_type": "stream",
|
39 |
+
"text": [
|
40 |
+
"==================================================\n",
|
41 |
+
"Task ID: db4fd70a-2d37-40ea-873f-9433dc5e301f\n",
|
42 |
+
"Question: As of May 2023, how many stops are between South Station and Windsor Gardens on MBTA’s Franklin-Foxboro line (not included)?\n",
|
43 |
+
"Level: 2\n",
|
44 |
+
"Final Answer: 10\n",
|
45 |
+
"Annotator Metadata: \n",
|
46 |
+
" ├── Steps: \n",
|
47 |
+
" │ ├── 1. Search the web for “MBTA Franklin Foxboro line”.\n",
|
48 |
+
" │ ├── 2. Click on top result, on the MBTA website.\n",
|
49 |
+
" │ ├── 3. Scroll down on the list of stops, and count the current stops between South Station and Windsor Gardens.\n",
|
50 |
+
" │ ├── 4. Click the “Schedule & Maps” tab to view a map of the route.\n",
|
51 |
+
" │ ├── 5. Examine the map to confirm that the order of stops is the same as on the listing of stops.\n",
|
52 |
+
" │ ├── 6. Return to web search.\n",
|
53 |
+
" │ ├── 7. Click on Wikipedia article for Franklin line.\n",
|
54 |
+
" │ ├── 8. Read the article to check whether any stops were added or removed since the date given in the question.\n",
|
55 |
+
" │ ├── 9. Search the web for “MBTA Franklin Foxboro Line changes”.\n",
|
56 |
+
" │ ├── 10. Click News tab.\n",
|
57 |
+
" │ ├── 11. Click article about rail schedule changes.\n",
|
58 |
+
" │ ├── 12. Confirm that none of the changes affect the answer to the question.\n",
|
59 |
+
" ├── Number of steps: 12\n",
|
60 |
+
" ├── How long did this take?: 5-10 minutes\n",
|
61 |
+
" ├── Tools:\n",
|
62 |
+
" │ ├── 1. Search engine\n",
|
63 |
+
" │ ├── 2. Web browser\n",
|
64 |
+
" └── Number of tools: 2\n",
|
65 |
+
"==================================================\n"
|
66 |
+
]
|
67 |
+
}
|
68 |
+
],
|
69 |
+
"source": [
|
70 |
+
"# randomly select 3 samples\n",
|
71 |
+
"# {\"task_id\": \"c61d22de-5f6c-4958-a7f6-5e9707bd3466\", \"Question\": \"A paper about AI regulation that was originally submitted to arXiv.org in June 2022 shows a figure with three axes, where each axis has a label word at both ends. Which of these words is used to describe a type of society in a Physics and Society article submitted to arXiv.org on August 11, 2016?\", \"Level\": 2, \"Final answer\": \"egalitarian\", \"file_name\": \"\", \"Annotator Metadata\": {\"Steps\": \"1. Go to arxiv.org and navigate to the Advanced Search page.\\n2. Enter \\\"AI regulation\\\" in the search box and select \\\"All fields\\\" from the dropdown.\\n3. Enter 2022-06-01 and 2022-07-01 into the date inputs, select \\\"Submission date (original)\\\", and submit the search.\\n4. Go through the search results to find the article that has a figure with three axes and labels on each end of the axes, titled \\\"Fairness in Agreement With European Values: An Interdisciplinary Perspective on AI Regulation\\\".\\n5. Note the six words used as labels: deontological, egalitarian, localized, standardized, utilitarian, and consequential.\\n6. Go back to arxiv.org\\n7. Find \\\"Physics and Society\\\" and go to the page for the \\\"Physics and Society\\\" category.\\n8. Note that the tag for this category is \\\"physics.soc-ph\\\".\\n9. Go to the Advanced Search page.\\n10. Enter \\\"physics.soc-ph\\\" in the search box and select \\\"All fields\\\" from the dropdown.\\n11. Enter 2016-08-11 and 2016-08-12 into the date inputs, select \\\"Submission date (original)\\\", and submit the search.\\n12. Search for instances of the six words in the results to find the paper titled \\\"Phase transition from egalitarian to hierarchical societies driven by competition between cognitive and social constraints\\\", indicating that \\\"egalitarian\\\" is the correct answer.\", \"Number of steps\": \"12\", \"How long did this take?\": \"8 minutes\", \"Tools\": \"1. Web browser\\n2. Image recognition tools (to identify and parse a figure with three axes)\", \"Number of tools\": \"2\"}}\n",
|
72 |
+
"\n",
|
73 |
+
"import random\n",
|
74 |
+
"# random.seed(42)\n",
|
75 |
+
"random_samples = random.sample(json_QA, 1)\n",
|
76 |
+
"for sample in random_samples:\n",
|
77 |
+
" print(\"=\" * 50)\n",
|
78 |
+
" print(f\"Task ID: {sample['task_id']}\")\n",
|
79 |
+
" print(f\"Question: {sample['Question']}\")\n",
|
80 |
+
" print(f\"Level: {sample['Level']}\")\n",
|
81 |
+
" print(f\"Final Answer: {sample['Final answer']}\")\n",
|
82 |
+
" print(f\"Annotator Metadata: \")\n",
|
83 |
+
" print(f\" ├── Steps: \")\n",
|
84 |
+
" for step in sample['Annotator Metadata']['Steps'].split('\\n'):\n",
|
85 |
+
" print(f\" │ ├── {step}\")\n",
|
86 |
+
" print(f\" ├── Number of steps: {sample['Annotator Metadata']['Number of steps']}\")\n",
|
87 |
+
" print(f\" ├── How long did this take?: {sample['Annotator Metadata']['How long did this take?']}\")\n",
|
88 |
+
" print(f\" ├── Tools:\")\n",
|
89 |
+
" for tool in sample['Annotator Metadata']['Tools'].split('\\n'):\n",
|
90 |
+
" print(f\" │ ├── {tool}\")\n",
|
91 |
+
" print(f\" └── Number of tools: {sample['Annotator Metadata']['Number of tools']}\")\n",
|
92 |
+
"print(\"=\" * 50)"
|
93 |
+
]
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"cell_type": "code",
|
97 |
+
"execution_count": 10,
|
98 |
+
"id": "4bb02420",
|
99 |
+
"metadata": {},
|
100 |
+
"outputs": [
|
101 |
+
{
|
102 |
+
"name": "stdout",
|
103 |
+
"output_type": "stream",
|
104 |
+
"text": [
|
105 |
+
"Requirement already satisfied: langchain-huggingface in /opt/anaconda3/lib/python3.12/site-packages (0.2.0)\n",
|
106 |
+
"Requirement already satisfied: langchain-core<1.0.0,>=0.3.59 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-huggingface) (0.3.60)\n",
|
107 |
+
"Requirement already satisfied: tokenizers>=0.19.1 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-huggingface) (0.21.1)\n",
|
108 |
+
"Requirement already satisfied: transformers>=4.39.0 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-huggingface) (4.51.3)\n",
|
109 |
+
"Requirement already satisfied: sentence-transformers>=2.6.0 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-huggingface) (4.1.0)\n",
|
110 |
+
"Requirement already satisfied: huggingface-hub>=0.30.2 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-huggingface) (0.31.4)\n",
|
111 |
+
"Requirement already satisfied: langsmith<0.4,>=0.1.126 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (0.3.42)\n",
|
112 |
+
"Requirement already satisfied: tenacity!=8.4.0,<10.0.0,>=8.1.0 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (8.2.3)\n",
|
113 |
+
"Requirement already satisfied: jsonpatch<2.0,>=1.33 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (1.33)\n",
|
114 |
+
"Requirement already satisfied: PyYAML>=5.3 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (6.0.1)\n",
|
115 |
+
"Requirement already satisfied: packaging<25,>=23.2 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (24.1)\n",
|
116 |
+
"Requirement already satisfied: typing-extensions>=4.7 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (4.13.2)\n",
|
117 |
+
"Requirement already satisfied: pydantic>=2.7.4 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (2.11.4)\n",
|
118 |
+
"Requirement already satisfied: jsonpointer>=1.9 in /opt/anaconda3/lib/python3.12/site-packages (from jsonpatch<2.0,>=1.33->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (2.1)\n",
|
119 |
+
"Requirement already satisfied: httpx<1,>=0.23.0 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (0.27.0)\n",
|
120 |
+
"Requirement already satisfied: orjson<4.0.0,>=3.9.14 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (3.10.18)\n",
|
121 |
+
"Requirement already satisfied: requests<3,>=2 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (2.32.3)\n",
|
122 |
+
"Requirement already satisfied: requests-toolbelt<2.0.0,>=1.0.0 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (1.0.0)\n",
|
123 |
+
"Requirement already satisfied: zstandard<0.24.0,>=0.23.0 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (0.23.0)\n",
|
124 |
+
"Requirement already satisfied: anyio in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (4.2.0)\n",
|
125 |
+
"Requirement already satisfied: certifi in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (2025.4.26)\n",
|
126 |
+
"Requirement already satisfied: httpcore==1.* in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (1.0.2)\n",
|
127 |
+
"Requirement already satisfied: idna in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (3.7)\n",
|
128 |
+
"Requirement already satisfied: sniffio in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (1.3.0)\n",
|
129 |
+
"Requirement already satisfied: h11<0.15,>=0.13 in /opt/anaconda3/lib/python3.12/site-packages (from httpcore==1.*->httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (0.14.0)\n",
|
130 |
+
"Requirement already satisfied: annotated-types>=0.6.0 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic>=2.7.4->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (0.6.0)\n",
|
131 |
+
"Requirement already satisfied: pydantic-core==2.33.2 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic>=2.7.4->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (2.33.2)\n",
|
132 |
+
"Requirement already satisfied: typing-inspection>=0.4.0 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic>=2.7.4->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (0.4.0)\n",
|
133 |
+
"Requirement already satisfied: charset-normalizer<4,>=2 in /opt/anaconda3/lib/python3.12/site-packages (from requests<3,>=2->langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (3.3.2)\n",
|
134 |
+
"Requirement already satisfied: urllib3<3,>=1.21.1 in /opt/anaconda3/lib/python3.12/site-packages (from requests<3,>=2->langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (2.2.3)\n",
|
135 |
+
"Requirement already satisfied: filelock in /opt/anaconda3/lib/python3.12/site-packages (from huggingface-hub>=0.30.2->langchain-huggingface) (3.13.1)\n",
|
136 |
+
"Requirement already satisfied: fsspec>=2023.5.0 in /opt/anaconda3/lib/python3.12/site-packages (from huggingface-hub>=0.30.2->langchain-huggingface) (2024.6.1)\n",
|
137 |
+
"Requirement already satisfied: tqdm>=4.42.1 in /opt/anaconda3/lib/python3.12/site-packages (from huggingface-hub>=0.30.2->langchain-huggingface) (4.66.5)\n",
|
138 |
+
"Requirement already satisfied: torch>=1.11.0 in /opt/anaconda3/lib/python3.12/site-packages (from sentence-transformers>=2.6.0->langchain-huggingface) (2.7.0)\n",
|
139 |
+
"Requirement already satisfied: scikit-learn in /opt/anaconda3/lib/python3.12/site-packages (from sentence-transformers>=2.6.0->langchain-huggingface) (1.5.1)\n",
|
140 |
+
"Requirement already satisfied: scipy in /opt/anaconda3/lib/python3.12/site-packages (from sentence-transformers>=2.6.0->langchain-huggingface) (1.13.1)\n",
|
141 |
+
"Requirement already satisfied: Pillow in /opt/anaconda3/lib/python3.12/site-packages (from sentence-transformers>=2.6.0->langchain-huggingface) (10.4.0)\n",
|
142 |
+
"Requirement already satisfied: numpy>=1.17 in /opt/anaconda3/lib/python3.12/site-packages (from transformers>=4.39.0->langchain-huggingface) (1.26.4)\n",
|
143 |
+
"Requirement already satisfied: regex!=2019.12.17 in /opt/anaconda3/lib/python3.12/site-packages (from transformers>=4.39.0->langchain-huggingface) (2024.9.11)\n",
|
144 |
+
"Requirement already satisfied: safetensors>=0.4.3 in /opt/anaconda3/lib/python3.12/site-packages (from transformers>=4.39.0->langchain-huggingface) (0.5.3)\n",
|
145 |
+
"Requirement already satisfied: setuptools in /opt/anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (75.1.0)\n",
|
146 |
+
"Requirement already satisfied: sympy>=1.13.3 in /opt/anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (1.14.0)\n",
|
147 |
+
"Requirement already satisfied: networkx in /opt/anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (3.3)\n",
|
148 |
+
"Requirement already satisfied: jinja2 in /opt/anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (3.1.4)\n",
|
149 |
+
"Requirement already satisfied: mpmath<1.4,>=1.1.0 in /opt/anaconda3/lib/python3.12/site-packages (from sympy>=1.13.3->torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (1.3.0)\n",
|
150 |
+
"Requirement already satisfied: MarkupSafe>=2.0 in /opt/anaconda3/lib/python3.12/site-packages (from jinja2->torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (2.1.3)\n",
|
151 |
+
"Requirement already satisfied: joblib>=1.2.0 in /opt/anaconda3/lib/python3.12/site-packages (from scikit-learn->sentence-transformers>=2.6.0->langchain-huggingface) (1.4.2)\n",
|
152 |
+
"Requirement already satisfied: threadpoolctl>=3.1.0 in /opt/anaconda3/lib/python3.12/site-packages (from scikit-learn->sentence-transformers>=2.6.0->langchain-huggingface) (3.5.0)\n",
|
153 |
+
"Note: you may need to restart the kernel to use updated packages.\n",
|
154 |
+
"Requirement already satisfied: supabase in /opt/anaconda3/lib/python3.12/site-packages (2.15.1)\n",
|
155 |
+
"Requirement already satisfied: gotrue<3.0.0,>=2.11.0 in /opt/anaconda3/lib/python3.12/site-packages (from supabase) (2.12.0)\n",
|
156 |
+
"Requirement already satisfied: httpx<0.29,>=0.26 in /opt/anaconda3/lib/python3.12/site-packages (from supabase) (0.27.0)\n",
|
157 |
+
"Requirement already satisfied: postgrest<1.1,>0.19 in /opt/anaconda3/lib/python3.12/site-packages (from supabase) (1.0.2)\n",
|
158 |
+
"Requirement already satisfied: realtime<2.5.0,>=2.4.0 in /opt/anaconda3/lib/python3.12/site-packages (from supabase) (2.4.3)\n",
|
159 |
+
"Requirement already satisfied: storage3<0.12,>=0.10 in /opt/anaconda3/lib/python3.12/site-packages (from supabase) (0.11.3)\n",
|
160 |
+
"Requirement already satisfied: supafunc<0.10,>=0.9 in /opt/anaconda3/lib/python3.12/site-packages (from supabase) (0.9.4)\n",
|
161 |
+
"Requirement already satisfied: pydantic<3,>=1.10 in /opt/anaconda3/lib/python3.12/site-packages (from gotrue<3.0.0,>=2.11.0->supabase) (2.11.4)\n",
|
162 |
+
"Requirement already satisfied: pyjwt<3.0.0,>=2.10.1 in /opt/anaconda3/lib/python3.12/site-packages (from gotrue<3.0.0,>=2.11.0->supabase) (2.10.1)\n",
|
163 |
+
"Requirement already satisfied: pytest-mock<4.0.0,>=3.14.0 in /opt/anaconda3/lib/python3.12/site-packages (from gotrue<3.0.0,>=2.11.0->supabase) (3.14.0)\n",
|
164 |
+
"Requirement already satisfied: anyio in /opt/anaconda3/lib/python3.12/site-packages (from httpx<0.29,>=0.26->supabase) (4.2.0)\n",
|
165 |
+
"Requirement already satisfied: certifi in /opt/anaconda3/lib/python3.12/site-packages (from httpx<0.29,>=0.26->supabase) (2025.4.26)\n",
|
166 |
+
"Requirement already satisfied: httpcore==1.* in /opt/anaconda3/lib/python3.12/site-packages (from httpx<0.29,>=0.26->supabase) (1.0.2)\n",
|
167 |
+
"Requirement already satisfied: idna in /opt/anaconda3/lib/python3.12/site-packages (from httpx<0.29,>=0.26->supabase) (3.7)\n",
|
168 |
+
"Requirement already satisfied: sniffio in /opt/anaconda3/lib/python3.12/site-packages (from httpx<0.29,>=0.26->supabase) (1.3.0)\n",
|
169 |
+
"Requirement already satisfied: h11<0.15,>=0.13 in /opt/anaconda3/lib/python3.12/site-packages (from httpcore==1.*->httpx<0.29,>=0.26->supabase) (0.14.0)\n",
|
170 |
+
"Requirement already satisfied: h2<5,>=3 in /opt/anaconda3/lib/python3.12/site-packages (from httpx[http2]<0.29,>=0.26->gotrue<3.0.0,>=2.11.0->supabase) (4.2.0)\n",
|
171 |
+
"Requirement already satisfied: hyperframe<7,>=6.1 in /opt/anaconda3/lib/python3.12/site-packages (from h2<5,>=3->httpx[http2]<0.29,>=0.26->gotrue<3.0.0,>=2.11.0->supabase) (6.1.0)\n",
|
172 |
+
"Requirement already satisfied: hpack<5,>=4.1 in /opt/anaconda3/lib/python3.12/site-packages (from h2<5,>=3->httpx[http2]<0.29,>=0.26->gotrue<3.0.0,>=2.11.0->supabase) (4.1.0)\n",
|
173 |
+
"Requirement already satisfied: deprecation<3.0.0,>=2.1.0 in /opt/anaconda3/lib/python3.12/site-packages (from postgrest<1.1,>0.19->supabase) (2.1.0)\n",
|
174 |
+
"Requirement already satisfied: packaging in /opt/anaconda3/lib/python3.12/site-packages (from deprecation<3.0.0,>=2.1.0->postgrest<1.1,>0.19->supabase) (24.1)\n",
|
175 |
+
"Requirement already satisfied: annotated-types>=0.6.0 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic<3,>=1.10->gotrue<3.0.0,>=2.11.0->supabase) (0.6.0)\n",
|
176 |
+
"Requirement already satisfied: pydantic-core==2.33.2 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic<3,>=1.10->gotrue<3.0.0,>=2.11.0->supabase) (2.33.2)\n",
|
177 |
+
"Requirement already satisfied: typing-extensions>=4.12.2 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic<3,>=1.10->gotrue<3.0.0,>=2.11.0->supabase) (4.13.2)\n",
|
178 |
+
"Requirement already satisfied: typing-inspection>=0.4.0 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic<3,>=1.10->gotrue<3.0.0,>=2.11.0->supabase) (0.4.0)\n",
|
179 |
+
"Requirement already satisfied: pytest>=6.2.5 in /opt/anaconda3/lib/python3.12/site-packages (from pytest-mock<4.0.0,>=3.14.0->gotrue<3.0.0,>=2.11.0->supabase) (7.4.4)\n",
|
180 |
+
"Requirement already satisfied: aiohttp<4.0.0,>=3.11.18 in /opt/anaconda3/lib/python3.12/site-packages (from realtime<2.5.0,>=2.4.0->supabase) (3.11.18)\n",
|
181 |
+
"Requirement already satisfied: python-dateutil<3.0.0,>=2.8.1 in /opt/anaconda3/lib/python3.12/site-packages (from realtime<2.5.0,>=2.4.0->supabase) (2.9.0.post0)\n",
|
182 |
+
"Requirement already satisfied: websockets<15,>=11 in /opt/anaconda3/lib/python3.12/site-packages (from realtime<2.5.0,>=2.4.0->supabase) (14.2)\n",
|
183 |
+
"Requirement already satisfied: aiohappyeyeballs>=2.3.0 in /opt/anaconda3/lib/python3.12/site-packages (from aiohttp<4.0.0,>=3.11.18->realtime<2.5.0,>=2.4.0->supabase) (2.4.0)\n",
|
184 |
+
"Requirement already satisfied: aiosignal>=1.1.2 in /opt/anaconda3/lib/python3.12/site-packages (from aiohttp<4.0.0,>=3.11.18->realtime<2.5.0,>=2.4.0->supabase) (1.2.0)\n",
|
185 |
+
"Requirement already satisfied: attrs>=17.3.0 in /opt/anaconda3/lib/python3.12/site-packages (from aiohttp<4.0.0,>=3.11.18->realtime<2.5.0,>=2.4.0->supabase) (23.1.0)\n",
|
186 |
+
"Requirement already satisfied: frozenlist>=1.1.1 in /opt/anaconda3/lib/python3.12/site-packages (from aiohttp<4.0.0,>=3.11.18->realtime<2.5.0,>=2.4.0->supabase) (1.4.0)\n",
|
187 |
+
"Requirement already satisfied: multidict<7.0,>=4.5 in /opt/anaconda3/lib/python3.12/site-packages (from aiohttp<4.0.0,>=3.11.18->realtime<2.5.0,>=2.4.0->supabase) (6.0.4)\n",
|
188 |
+
"Requirement already satisfied: propcache>=0.2.0 in /opt/anaconda3/lib/python3.12/site-packages (from aiohttp<4.0.0,>=3.11.18->realtime<2.5.0,>=2.4.0->supabase) (0.3.1)\n",
|
189 |
+
"Requirement already satisfied: yarl<2.0,>=1.17.0 in /opt/anaconda3/lib/python3.12/site-packages (from aiohttp<4.0.0,>=3.11.18->realtime<2.5.0,>=2.4.0->supabase) (1.20.0)\n",
|
190 |
+
"Requirement already satisfied: six>=1.5 in /opt/anaconda3/lib/python3.12/site-packages (from python-dateutil<3.0.0,>=2.8.1->realtime<2.5.0,>=2.4.0->supabase) (1.16.0)\n",
|
191 |
+
"Requirement already satisfied: strenum<0.5.0,>=0.4.15 in /opt/anaconda3/lib/python3.12/site-packages (from supafunc<0.10,>=0.9->supabase) (0.4.15)\n",
|
192 |
+
"Requirement already satisfied: iniconfig in /opt/anaconda3/lib/python3.12/site-packages (from pytest>=6.2.5->pytest-mock<4.0.0,>=3.14.0->gotrue<3.0.0,>=2.11.0->supabase) (1.1.1)\n",
|
193 |
+
"Requirement already satisfied: pluggy<2.0,>=0.12 in /opt/anaconda3/lib/python3.12/site-packages (from pytest>=6.2.5->pytest-mock<4.0.0,>=3.14.0->gotrue<3.0.0,>=2.11.0->supabase) (1.0.0)\n",
|
194 |
+
"Note: you may need to restart the kernel to use updated packages.\n"
|
195 |
+
]
|
196 |
+
}
|
197 |
+
],
|
198 |
+
"source": [
|
199 |
+
"%pip install langchain-huggingface\n",
|
200 |
+
"%pip install supabase\n",
|
201 |
+
"\n",
|
202 |
+
"### build a vector database based on the metadata.jsonl\n",
|
203 |
+
"# https://python.langchain.com/docs/integrations/vectorstores/supabase/\n",
|
204 |
+
"import os\n",
|
205 |
+
"from dotenv import load_dotenv\n",
|
206 |
+
"from langchain_huggingface import HuggingFaceEmbeddings\n",
|
207 |
+
"from langchain_community.vectorstores import SupabaseVectorStore\n",
|
208 |
+
"from supabase.client import Client, create_client\n",
|
209 |
+
"\n",
|
210 |
+
"\n",
|
211 |
+
"load_dotenv()\n",
|
212 |
+
"embeddings = HuggingFaceEmbeddings(model_name=\"sentence-transformers/all-mpnet-base-v2\") # dim=768\n",
|
213 |
+
"\n",
|
214 |
+
"supabase_url = \"https://vqscqyeakhfsvaqonbmu.supabase.co\"\n",
|
215 |
+
"supabase_key = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6InZxc2NxeWVha2hmc3ZhcW9uYm11Iiwicm9sZSI6ImFub24iLCJpYXQiOjE3NDc4NjQwNDMsImV4cCI6MjA2MzQ0MDA0M30.kDTCuOGCuqPyZilqBu4kYEbUrOC42SAVThf3nrH8ypM\"\n",
|
216 |
+
"\n",
|
217 |
+
"if not supabase_url or not supabase_key:\n",
|
218 |
+
"\traise ValueError(\"SUPABASE_URL and SUPABASE_SERVICE_KEY must be set in your environment or .env file.\")\n",
|
219 |
+
"\n",
|
220 |
+
"supabase: Client = create_client(supabase_url, supabase_key)"
|
221 |
+
]
|
222 |
+
},
|
223 |
+
{
|
224 |
+
"cell_type": "code",
|
225 |
+
"execution_count": 42,
|
226 |
+
"id": "a070b955",
|
227 |
+
"metadata": {},
|
228 |
+
"outputs": [],
|
229 |
+
"source": [
|
230 |
+
"# wrap the metadata.jsonl's questions and answers into a list of document\n",
|
231 |
+
"from langchain.schema import Document\n",
|
232 |
+
"docs = []\n",
|
233 |
+
"for sample in json_QA:\n",
|
234 |
+
" content = f\"Question : {sample['Question']}\\n\\nFinal answer : {sample['Final answer']}\"\n",
|
235 |
+
" doc = {\n",
|
236 |
+
" \"content\" : content,\n",
|
237 |
+
" \"metadata\" : { # meatadata\n",
|
238 |
+
" \"source\" : sample['task_id']\n",
|
239 |
+
" },\n",
|
240 |
+
" \"embedding\" : embeddings.embed_query(content),\n",
|
241 |
+
" }\n",
|
242 |
+
" docs.append(doc)\n",
|
243 |
+
"\n",
|
244 |
+
"# upload the documents to the vector database\n",
|
245 |
+
"try:\n",
|
246 |
+
" response = (\n",
|
247 |
+
" supabase.table(\"documents\")\n",
|
248 |
+
" .insert(docs)\n",
|
249 |
+
" .execute()\n",
|
250 |
+
" )\n",
|
251 |
+
"except Exception as exception:\n",
|
252 |
+
" print(\"Error inserting data into Supabase:\", exception)\n",
|
253 |
+
"\n",
|
254 |
+
"# ALTERNATIVE : Save the documents (a list of dict) into a csv file, and manually upload it to Supabase\n",
|
255 |
+
"# import pandas as pd\n",
|
256 |
+
"# df = pd.DataFrame(docs)\n",
|
257 |
+
"# df.to_csv('supabase_docs.csv', index=False)"
|
258 |
+
]
|
259 |
+
},
|
260 |
+
{
|
261 |
+
"cell_type": "code",
|
262 |
+
"execution_count": null,
|
263 |
+
"id": "22c56a2f",
|
264 |
+
"metadata": {},
|
265 |
+
"outputs": [],
|
266 |
+
"source": []
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"cell_type": "code",
|
270 |
+
"execution_count": null,
|
271 |
+
"id": "3c59150d",
|
272 |
+
"metadata": {},
|
273 |
+
"outputs": [],
|
274 |
+
"source": []
|
275 |
+
},
|
276 |
+
{
|
277 |
+
"cell_type": "code",
|
278 |
+
"execution_count": 18,
|
279 |
+
"id": "77fb9dbb",
|
280 |
+
"metadata": {},
|
281 |
+
"outputs": [],
|
282 |
+
"source": [
|
283 |
+
"# add items to vector database\n",
|
284 |
+
"vector_store = SupabaseVectorStore(\n",
|
285 |
+
" client=supabase,\n",
|
286 |
+
" embedding= embeddings,\n",
|
287 |
+
" table_name=\"documents\",\n",
|
288 |
+
" query_name=\"match_documents_langchain\",\n",
|
289 |
+
")\n",
|
290 |
+
"retriever = vector_store.as_retriever()"
|
291 |
+
]
|
292 |
+
},
|
293 |
+
{
|
294 |
+
"cell_type": "code",
|
295 |
+
"execution_count": 63,
|
296 |
+
"id": "12a05971",
|
297 |
+
"metadata": {},
|
298 |
+
"outputs": [
|
299 |
+
{
|
300 |
+
"ename": "IndexError",
|
301 |
+
"evalue": "list index out of range",
|
302 |
+
"output_type": "error",
|
303 |
+
"traceback": [
|
304 |
+
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
305 |
+
"\u001b[0;31mIndexError\u001b[0m Traceback (most recent call last)",
|
306 |
+
"Cell \u001b[0;32mIn[63], line 34\u001b[0m\n\u001b[1;32m 32\u001b[0m query \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mOn June 6, 2023, an article by Carolyn Collins Petersen was published in Universe Today. This article mentions a team that produced a paper about their observations, linked at the bottom of the article. Find this paper. Under what NASA award number was the work performed by R. G. Arendt supported by?\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 33\u001b[0m docs \u001b[38;5;241m=\u001b[39m retriever\u001b[38;5;241m.\u001b[39minvoke(query)\n\u001b[0;32m---> 34\u001b[0m docs[\u001b[38;5;241m0\u001b[39m]\n",
|
307 |
+
"\u001b[0;31mIndexError\u001b[0m: list index out of range"
|
308 |
+
]
|
309 |
+
}
|
310 |
+
],
|
311 |
+
"source": [
|
312 |
+
"# Before running this cell, make sure you have created the required function in your Supabase database.\n",
|
313 |
+
"# Run the following SQL in your Supabase SQL editor (replace 'documents' and 'embedding' if your table/column names differ):\n",
|
314 |
+
"\n",
|
315 |
+
"\"\"\"\n",
|
316 |
+
"create or replace function public.match_documents_langchain(\n",
|
317 |
+
"\tquery_embedding vector(768),\n",
|
318 |
+
"\tmatch_count int default null\n",
|
319 |
+
")\n",
|
320 |
+
"returns table (\n",
|
321 |
+
"\tcontent text,\n",
|
322 |
+
"\tmetadata text,\n",
|
323 |
+
"\tembedding vector(768),\n",
|
324 |
+
"\tsimilarity float\n",
|
325 |
+
")\n",
|
326 |
+
"language plpgsql\n",
|
327 |
+
"as $$\n",
|
328 |
+
"begin\n",
|
329 |
+
"\treturn query\n",
|
330 |
+
"\tselect\n",
|
331 |
+
"\t\tdocuments.content,\n",
|
332 |
+
"\t\tdocuments.metadata::text,\n",
|
333 |
+
"\t\tdocuments.embedding,\n",
|
334 |
+
"\t\t1 - (documents.embedding <=> query_embedding) as similarity\n",
|
335 |
+
"\tfrom documents\n",
|
336 |
+
"\torder by documents.embedding <=> query_embedding\n",
|
337 |
+
"\tlimit coalesce(match_count, 5);\n",
|
338 |
+
"end;\n",
|
339 |
+
"$$;\n",
|
340 |
+
"\"\"\"\n",
|
341 |
+
"\n",
|
342 |
+
"# After creating the function, you can run your code:\n",
|
343 |
+
"query = \"On June 6, 2023, an article by Carolyn Collins Petersen was published in Universe Today. This article mentions a team that produced a paper about their observations, linked at the bottom of the article. Find this paper. Under what NASA award number was the work performed by R. G. Arendt supported by?\"\n",
|
344 |
+
"docs = retriever.invoke(query)\n",
|
345 |
+
"docs[0]"
|
346 |
+
]
|
347 |
+
},
|
348 |
+
{
|
349 |
+
"cell_type": "code",
|
350 |
+
"execution_count": 64,
|
351 |
+
"id": "1eae5ba4",
|
352 |
+
"metadata": {},
|
353 |
+
"outputs": [
|
354 |
+
{
|
355 |
+
"name": "stdout",
|
356 |
+
"output_type": "stream",
|
357 |
+
"text": [
|
358 |
+
"List of tools used in all samples:\n",
|
359 |
+
"Total number of tools used: 83\n",
|
360 |
+
" ├── web browser: 107\n",
|
361 |
+
" ├── image recognition tools (to identify and parse a figure with three axes): 1\n",
|
362 |
+
" ├── search engine: 101\n",
|
363 |
+
" ├── calculator: 34\n",
|
364 |
+
" ├── unlambda compiler (optional): 1\n",
|
365 |
+
" ├── a web browser.: 2\n",
|
366 |
+
" ├── a search engine.: 2\n",
|
367 |
+
" ├── a calculator.: 1\n",
|
368 |
+
" ├── microsoft excel: 5\n",
|
369 |
+
" ├── google search: 1\n",
|
370 |
+
" ├── ne: 9\n",
|
371 |
+
" ├── pdf access: 7\n",
|
372 |
+
" ├── file handling: 2\n",
|
373 |
+
" ├── python: 3\n",
|
374 |
+
" ├── image recognition tools: 12\n",
|
375 |
+
" ├── jsonld file access: 1\n",
|
376 |
+
" ├── video parsing: 1\n",
|
377 |
+
" ├── python compiler: 1\n",
|
378 |
+
" ├── video recognition tools: 3\n",
|
379 |
+
" ├── pdf viewer: 7\n",
|
380 |
+
" ├── microsoft excel / google sheets: 3\n",
|
381 |
+
" ├── word document access: 1\n",
|
382 |
+
" ├── tool to extract text from images: 1\n",
|
383 |
+
" ├── a word reversal tool / script: 1\n",
|
384 |
+
" ├── counter: 1\n",
|
385 |
+
" ├── excel: 3\n",
|
386 |
+
" ├── image recognition: 5\n",
|
387 |
+
" ├── color recognition: 3\n",
|
388 |
+
" ├── excel file access: 3\n",
|
389 |
+
" ├── xml file access: 1\n",
|
390 |
+
" ├── access to the internet archive, web.archive.org: 1\n",
|
391 |
+
" ├── text processing/diff tool: 1\n",
|
392 |
+
" ├── gif parsing tools: 1\n",
|
393 |
+
" ├── a web browser: 7\n",
|
394 |
+
" ├── a search engine: 7\n",
|
395 |
+
" ├── a speech-to-text tool: 2\n",
|
396 |
+
" ├── code/data analysis tools: 1\n",
|
397 |
+
" ├── audio capability: 2\n",
|
398 |
+
" ├── pdf reader: 1\n",
|
399 |
+
" ├── markdown: 1\n",
|
400 |
+
" ├── a calculator: 5\n",
|
401 |
+
" ├── access to wikipedia: 3\n",
|
402 |
+
" ├── image recognition/ocr: 3\n",
|
403 |
+
" ├── google translate access: 1\n",
|
404 |
+
" ├── ocr: 4\n",
|
405 |
+
" ├── bass note data: 1\n",
|
406 |
+
" ├── text editor: 1\n",
|
407 |
+
" ├── xlsx file access: 1\n",
|
408 |
+
" ├── powerpoint viewer: 1\n",
|
409 |
+
" ├── csv file access: 1\n",
|
410 |
+
" ├── calculator (or use excel): 1\n",
|
411 |
+
" ├── computer algebra system: 1\n",
|
412 |
+
" ├── video processing software: 1\n",
|
413 |
+
" ├── audio processing software: 1\n",
|
414 |
+
" ├── computer vision: 1\n",
|
415 |
+
" ├── google maps: 1\n",
|
416 |
+
" ├── access to excel files: 1\n",
|
417 |
+
" ├── calculator (or ability to count): 1\n",
|
418 |
+
" ├── a file interface: 3\n",
|
419 |
+
" ├── a python ide: 1\n",
|
420 |
+
" ├── spreadsheet editor: 1\n",
|
421 |
+
" ├── tools required: 1\n",
|
422 |
+
" ├── b browser: 1\n",
|
423 |
+
" ├── image recognition and processing tools: 1\n",
|
424 |
+
" ├── computer vision or ocr: 1\n",
|
425 |
+
" ├── c++ compiler: 1\n",
|
426 |
+
" ├── access to google maps: 1\n",
|
427 |
+
" ├── youtube player: 1\n",
|
428 |
+
" ├── natural language processor: 1\n",
|
429 |
+
" ├── graph interaction tools: 1\n",
|
430 |
+
" ├── bablyonian cuniform -> arabic legend: 1\n",
|
431 |
+
" ├── access to youtube: 1\n",
|
432 |
+
" ├── image search tools: 1\n",
|
433 |
+
" ├── calculator or counting function: 1\n",
|
434 |
+
" ├── a speech-to-text audio processing tool: 1\n",
|
435 |
+
" ├── access to academic journal websites: 1\n",
|
436 |
+
" ├── pdf reader/extracter: 1\n",
|
437 |
+
" ├── rubik's cube model: 1\n",
|
438 |
+
" ├── wikipedia: 1\n",
|
439 |
+
" ├── video capability: 1\n",
|
440 |
+
" ├── image processing tools: 1\n",
|
441 |
+
" ├── age recognition software: 1\n",
|
442 |
+
" ├── youtube: 1\n"
|
443 |
+
]
|
444 |
+
}
|
445 |
+
],
|
446 |
+
"source": [
|
447 |
+
"# list of the tools used in all the samples\n",
|
448 |
+
"from collections import Counter, OrderedDict\n",
|
449 |
+
"\n",
|
450 |
+
"tools = []\n",
|
451 |
+
"for sample in json_QA:\n",
|
452 |
+
" for tool in sample['Annotator Metadata']['Tools'].split('\\n'):\n",
|
453 |
+
" tool = tool[2:].strip().lower()\n",
|
454 |
+
" if tool.startswith(\"(\"):\n",
|
455 |
+
" tool = tool[11:].strip()\n",
|
456 |
+
" tools.append(tool)\n",
|
457 |
+
"tools_counter = OrderedDict(Counter(tools))\n",
|
458 |
+
"print(\"List of tools used in all samples:\")\n",
|
459 |
+
"print(\"Total number of tools used:\", len(tools_counter))\n",
|
460 |
+
"for tool, count in tools_counter.items():\n",
|
461 |
+
" print(f\" ├── {tool}: {count}\")"
|
462 |
+
]
|
463 |
+
},
|
464 |
+
{
|
465 |
+
"cell_type": "markdown",
|
466 |
+
"id": "5efee12a",
|
467 |
+
"metadata": {},
|
468 |
+
"source": [
|
469 |
+
"#### Graph"
|
470 |
+
]
|
471 |
+
},
|
472 |
+
{
|
473 |
+
"cell_type": "code",
|
474 |
+
"execution_count": 65,
|
475 |
+
"id": "7fe573cc",
|
476 |
+
"metadata": {},
|
477 |
+
"outputs": [],
|
478 |
+
"source": [
|
479 |
+
"system_prompt = \"\"\"\n",
|
480 |
+
"You are a helpful assistant tasked with answering questions using a set of tools.\n",
|
481 |
+
"If the tool is not available, you can try to find the information online. You can also use your own knowledge to answer the question. \n",
|
482 |
+
"You need to provide a step-by-step explanation of how you arrived at the answer.\n",
|
483 |
+
"==========================\n",
|
484 |
+
"Here is a few examples showing you how to answer the question step by step.\n",
|
485 |
+
"\"\"\"\n",
|
486 |
+
"for i, samples in enumerate(random_samples):\n",
|
487 |
+
" system_prompt += f\"\\nQuestion {i+1}: {samples['Question']}\\nSteps:\\n{samples['Annotator Metadata']['Steps']}\\nTools:\\n{samples['Annotator Metadata']['Tools']}\\nFinal Answer: {samples['Final answer']}\\n\"\n",
|
488 |
+
"system_prompt += \"\\n==========================\\n\"\n",
|
489 |
+
"system_prompt += \"Now, please answer the following question step by step.\\n\"\n",
|
490 |
+
"\n",
|
491 |
+
"# save the system_prompt to a file\n",
|
492 |
+
"with open('system_prompt.txt', 'w') as f:\n",
|
493 |
+
" f.write(system_prompt)"
|
494 |
+
]
|
495 |
+
},
|
496 |
+
{
|
497 |
+
"cell_type": "code",
|
498 |
+
"execution_count": 66,
|
499 |
+
"id": "d6beb0da",
|
500 |
+
"metadata": {},
|
501 |
+
"outputs": [
|
502 |
+
{
|
503 |
+
"name": "stdout",
|
504 |
+
"output_type": "stream",
|
505 |
+
"text": [
|
506 |
+
"\n",
|
507 |
+
"You are a helpful assistant tasked with answering questions using a set of tools.\n",
|
508 |
+
"If the tool is not available, you can try to find the information online. You can also use your own knowledge to answer the question. \n",
|
509 |
+
"You need to provide a step-by-step explanation of how you arrived at the answer.\n",
|
510 |
+
"==========================\n",
|
511 |
+
"Here is a few examples showing you how to answer the question step by step.\n",
|
512 |
+
"\n",
|
513 |
+
"Question 1: As of May 2023, how many stops are between South Station and Windsor Gardens on MBTA’s Franklin-Foxboro line (not included)?\n",
|
514 |
+
"Steps:\n",
|
515 |
+
"1. Search the web for “MBTA Franklin Foxboro line”.\n",
|
516 |
+
"2. Click on top result, on the MBTA website.\n",
|
517 |
+
"3. Scroll down on the list of stops, and count the current stops between South Station and Windsor Gardens.\n",
|
518 |
+
"4. Click the “Schedule & Maps” tab to view a map of the route.\n",
|
519 |
+
"5. Examine the map to confirm that the order of stops is the same as on the listing of stops.\n",
|
520 |
+
"6. Return to web search.\n",
|
521 |
+
"7. Click on Wikipedia article for Franklin line.\n",
|
522 |
+
"8. Read the article to check whether any stops were added or removed since the date given in the question.\n",
|
523 |
+
"9. Search the web for “MBTA Franklin Foxboro Line changes”.\n",
|
524 |
+
"10. Click News tab.\n",
|
525 |
+
"11. Click article about rail schedule changes.\n",
|
526 |
+
"12. Confirm that none of the changes affect the answer to the question.\n",
|
527 |
+
"Tools:\n",
|
528 |
+
"1. Search engine\n",
|
529 |
+
"2. Web browser\n",
|
530 |
+
"Final Answer: 10\n",
|
531 |
+
"\n",
|
532 |
+
"==========================\n",
|
533 |
+
"Now, please answer the following question step by step.\n",
|
534 |
+
"\n"
|
535 |
+
]
|
536 |
+
}
|
537 |
+
],
|
538 |
+
"source": [
|
539 |
+
"# load the system prompt from the file\n",
|
540 |
+
"with open('system_prompt.txt', 'r') as f:\n",
|
541 |
+
" system_prompt = f.read()\n",
|
542 |
+
"print(system_prompt)"
|
543 |
+
]
|
544 |
+
},
|
545 |
+
{
|
546 |
+
"cell_type": "code",
|
547 |
+
"execution_count": 67,
|
548 |
+
"id": "42fde0f8",
|
549 |
+
"metadata": {},
|
550 |
+
"outputs": [
|
551 |
+
{
|
552 |
+
"name": "stderr",
|
553 |
+
"output_type": "stream",
|
554 |
+
"text": [
|
555 |
+
"huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n",
|
556 |
+
"To disable this warning, you can either:\n",
|
557 |
+
"\t- Avoid using `tokenizers` before the fork if possible\n",
|
558 |
+
"\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n"
|
559 |
+
]
|
560 |
+
},
|
561 |
+
{
|
562 |
+
"name": "stdout",
|
563 |
+
"output_type": "stream",
|
564 |
+
"text": [
|
565 |
+
"6681.19s - pydevd: Sending message related to process being replaced timed-out after 5 seconds\n",
|
566 |
+
"Requirement already satisfied: langgraph in /opt/anaconda3/lib/python3.12/site-packages (0.4.5)\n",
|
567 |
+
"Requirement already satisfied: langchain-core>=0.1 in /opt/anaconda3/lib/python3.12/site-packages (from langgraph) (0.3.60)\n",
|
568 |
+
"Requirement already satisfied: langgraph-checkpoint<3.0.0,>=2.0.26 in /opt/anaconda3/lib/python3.12/site-packages (from langgraph) (2.0.26)\n",
|
569 |
+
"Requirement already satisfied: langgraph-prebuilt>=0.1.8 in /opt/anaconda3/lib/python3.12/site-packages (from langgraph) (0.1.8)\n",
|
570 |
+
"Requirement already satisfied: langgraph-sdk>=0.1.42 in /opt/anaconda3/lib/python3.12/site-packages (from langgraph) (0.1.70)\n",
|
571 |
+
"Requirement already satisfied: pydantic>=2.7.4 in /opt/anaconda3/lib/python3.12/site-packages (from langgraph) (2.11.4)\n",
|
572 |
+
"Requirement already satisfied: xxhash<4.0.0,>=3.5.0 in /opt/anaconda3/lib/python3.12/site-packages (from langgraph) (3.5.0)\n",
|
573 |
+
"Requirement already satisfied: ormsgpack<2.0.0,>=1.8.0 in /opt/anaconda3/lib/python3.12/site-packages (from langgraph-checkpoint<3.0.0,>=2.0.26->langgraph) (1.9.1)\n",
|
574 |
+
"Requirement already satisfied: langsmith<0.4,>=0.1.126 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core>=0.1->langgraph) (0.3.42)\n",
|
575 |
+
"Requirement already satisfied: tenacity!=8.4.0,<10.0.0,>=8.1.0 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core>=0.1->langgraph) (8.2.3)\n",
|
576 |
+
"Requirement already satisfied: jsonpatch<2.0,>=1.33 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core>=0.1->langgraph) (1.33)\n",
|
577 |
+
"Requirement already satisfied: PyYAML>=5.3 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core>=0.1->langgraph) (6.0.1)\n",
|
578 |
+
"Requirement already satisfied: packaging<25,>=23.2 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core>=0.1->langgraph) (24.1)\n",
|
579 |
+
"Requirement already satisfied: typing-extensions>=4.7 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core>=0.1->langgraph) (4.13.2)\n",
|
580 |
+
"Requirement already satisfied: jsonpointer>=1.9 in /opt/anaconda3/lib/python3.12/site-packages (from jsonpatch<2.0,>=1.33->langchain-core>=0.1->langgraph) (2.1)\n",
|
581 |
+
"Requirement already satisfied: httpx<1,>=0.23.0 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (0.27.0)\n",
|
582 |
+
"Requirement already satisfied: orjson<4.0.0,>=3.9.14 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (3.10.18)\n",
|
583 |
+
"Requirement already satisfied: requests<3,>=2 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (2.32.3)\n",
|
584 |
+
"Requirement already satisfied: requests-toolbelt<2.0.0,>=1.0.0 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (1.0.0)\n",
|
585 |
+
"Requirement already satisfied: zstandard<0.24.0,>=0.23.0 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (0.23.0)\n",
|
586 |
+
"Requirement already satisfied: anyio in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (4.2.0)\n",
|
587 |
+
"Requirement already satisfied: certifi in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (2025.4.26)\n",
|
588 |
+
"Requirement already satisfied: httpcore==1.* in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (1.0.2)\n",
|
589 |
+
"Requirement already satisfied: idna in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (3.7)\n",
|
590 |
+
"Requirement already satisfied: sniffio in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (1.3.0)\n",
|
591 |
+
"Requirement already satisfied: h11<0.15,>=0.13 in /opt/anaconda3/lib/python3.12/site-packages (from httpcore==1.*->httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (0.14.0)\n",
|
592 |
+
"Requirement already satisfied: annotated-types>=0.6.0 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic>=2.7.4->langgraph) (0.6.0)\n",
|
593 |
+
"Requirement already satisfied: pydantic-core==2.33.2 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic>=2.7.4->langgraph) (2.33.2)\n",
|
594 |
+
"Requirement already satisfied: typing-inspection>=0.4.0 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic>=2.7.4->langgraph) (0.4.0)\n",
|
595 |
+
"Requirement already satisfied: charset-normalizer<4,>=2 in /opt/anaconda3/lib/python3.12/site-packages (from requests<3,>=2->langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (3.3.2)\n",
|
596 |
+
"Requirement already satisfied: urllib3<3,>=1.21.1 in /opt/anaconda3/lib/python3.12/site-packages (from requests<3,>=2->langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (2.2.3)\n",
|
597 |
+
"Note: you may need to restart the kernel to use updated packages.\n"
|
598 |
+
]
|
599 |
+
},
|
600 |
+
{
|
601 |
+
"name": "stderr",
|
602 |
+
"output_type": "stream",
|
603 |
+
"text": [
|
604 |
+
"huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n",
|
605 |
+
"To disable this warning, you can either:\n",
|
606 |
+
"\t- Avoid using `tokenizers` before the fork if possible\n",
|
607 |
+
"\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n"
|
608 |
+
]
|
609 |
+
},
|
610 |
+
{
|
611 |
+
"name": "stdout",
|
612 |
+
"output_type": "stream",
|
613 |
+
"text": [
|
614 |
+
"6687.05s - pydevd: Sending message related to process being replaced timed-out after 5 seconds\n",
|
615 |
+
"Requirement already satisfied: langchain-google-genai in /opt/anaconda3/lib/python3.12/site-packages (2.1.4)\n",
|
616 |
+
"Requirement already satisfied: filetype<2.0.0,>=1.2.0 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-google-genai) (1.2.0)\n",
|
617 |
+
"Requirement already satisfied: google-ai-generativelanguage<0.7.0,>=0.6.18 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-google-genai) (0.6.18)\n",
|
618 |
+
"Requirement already satisfied: langchain-core<0.4.0,>=0.3.52 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-google-genai) (0.3.60)\n",
|
619 |
+
"Requirement already satisfied: pydantic<3,>=2 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-google-genai) (2.11.4)\n",
|
620 |
+
"Requirement already satisfied: google-api-core!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1 in /opt/anaconda3/lib/python3.12/site-packages (from google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (2.24.2)\n",
|
621 |
+
"Requirement already satisfied: google-auth!=2.24.0,!=2.25.0,<3.0.0,>=2.14.1 in /opt/anaconda3/lib/python3.12/site-packages (from google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (2.40.2)\n",
|
622 |
+
"Requirement already satisfied: proto-plus<2.0.0,>=1.22.3 in /opt/anaconda3/lib/python3.12/site-packages (from google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (1.26.1)\n",
|
623 |
+
"Requirement already satisfied: protobuf!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<7.0.0,>=3.20.2 in /opt/anaconda3/lib/python3.12/site-packages (from google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (6.31.0)\n",
|
624 |
+
"Requirement already satisfied: googleapis-common-protos<2.0.0,>=1.56.2 in /opt/anaconda3/lib/python3.12/site-packages (from google-api-core!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (1.70.0)\n",
|
625 |
+
"Requirement already satisfied: requests<3.0.0,>=2.18.0 in /opt/anaconda3/lib/python3.12/site-packages (from google-api-core!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (2.32.3)\n",
|
626 |
+
"Requirement already satisfied: grpcio<2.0dev,>=1.33.2 in /opt/anaconda3/lib/python3.12/site-packages (from google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (1.72.0rc1)\n",
|
627 |
+
"Requirement already satisfied: grpcio-status<2.0.dev0,>=1.33.2 in /opt/anaconda3/lib/python3.12/site-packages (from google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (1.72.0rc1)\n",
|
628 |
+
"Requirement already satisfied: cachetools<6.0,>=2.0.0 in /opt/anaconda3/lib/python3.12/site-packages (from google-auth!=2.24.0,!=2.25.0,<3.0.0,>=2.14.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (5.3.3)\n",
|
629 |
+
"Requirement already satisfied: pyasn1-modules>=0.2.1 in /opt/anaconda3/lib/python3.12/site-packages (from google-auth!=2.24.0,!=2.25.0,<3.0.0,>=2.14.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (0.2.8)\n",
|
630 |
+
"Requirement already satisfied: rsa<5,>=3.1.4 in /opt/anaconda3/lib/python3.12/site-packages (from google-auth!=2.24.0,!=2.25.0,<3.0.0,>=2.14.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (4.9.1)\n",
|
631 |
+
"Requirement already satisfied: langsmith<0.4,>=0.1.126 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (0.3.42)\n",
|
632 |
+
"Requirement already satisfied: tenacity!=8.4.0,<10.0.0,>=8.1.0 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (8.2.3)\n",
|
633 |
+
"Requirement already satisfied: jsonpatch<2.0,>=1.33 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (1.33)\n",
|
634 |
+
"Requirement already satisfied: PyYAML>=5.3 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (6.0.1)\n",
|
635 |
+
"Requirement already satisfied: packaging<25,>=23.2 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (24.1)\n",
|
636 |
+
"Requirement already satisfied: typing-extensions>=4.7 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (4.13.2)\n",
|
637 |
+
"Requirement already satisfied: jsonpointer>=1.9 in /opt/anaconda3/lib/python3.12/site-packages (from jsonpatch<2.0,>=1.33->langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (2.1)\n",
|
638 |
+
"Requirement already satisfied: httpx<1,>=0.23.0 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (0.27.0)\n",
|
639 |
+
"Requirement already satisfied: orjson<4.0.0,>=3.9.14 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (3.10.18)\n",
|
640 |
+
"Requirement already satisfied: requests-toolbelt<2.0.0,>=1.0.0 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (1.0.0)\n",
|
641 |
+
"Requirement already satisfied: zstandard<0.24.0,>=0.23.0 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (0.23.0)\n",
|
642 |
+
"Requirement already satisfied: anyio in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (4.2.0)\n",
|
643 |
+
"Requirement already satisfied: certifi in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (2025.4.26)\n",
|
644 |
+
"Requirement already satisfied: httpcore==1.* in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (1.0.2)\n",
|
645 |
+
"Requirement already satisfied: idna in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (3.7)\n",
|
646 |
+
"Requirement already satisfied: sniffio in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (1.3.0)\n",
|
647 |
+
"Requirement already satisfied: h11<0.15,>=0.13 in /opt/anaconda3/lib/python3.12/site-packages (from httpcore==1.*->httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (0.14.0)\n",
|
648 |
+
"Requirement already satisfied: annotated-types>=0.6.0 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic<3,>=2->langchain-google-genai) (0.6.0)\n",
|
649 |
+
"Requirement already satisfied: pydantic-core==2.33.2 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic<3,>=2->langchain-google-genai) (2.33.2)\n",
|
650 |
+
"Requirement already satisfied: typing-inspection>=0.4.0 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic<3,>=2->langchain-google-genai) (0.4.0)\n",
|
651 |
+
"Requirement already satisfied: charset-normalizer<4,>=2 in /opt/anaconda3/lib/python3.12/site-packages (from requests<3.0.0,>=2.18.0->google-api-core!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (3.3.2)\n",
|
652 |
+
"Requirement already satisfied: urllib3<3,>=1.21.1 in /opt/anaconda3/lib/python3.12/site-packages (from requests<3.0.0,>=2.18.0->google-api-core!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (2.2.3)\n",
|
653 |
+
"Requirement already satisfied: pyasn1>=0.1.3 in /opt/anaconda3/lib/python3.12/site-packages (from rsa<5,>=3.1.4->google-auth!=2.24.0,!=2.25.0,<3.0.0,>=2.14.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (0.4.8)\n",
|
654 |
+
"Note: you may need to restart the kernel to use updated packages.\n"
|
655 |
+
]
|
656 |
+
},
|
657 |
+
{
|
658 |
+
"ename": "SupabaseException",
|
659 |
+
"evalue": "supabase_url is required",
|
660 |
+
"output_type": "error",
|
661 |
+
"traceback": [
|
662 |
+
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
663 |
+
"\u001b[0;31mSupabaseException\u001b[0m Traceback (most recent call last)",
|
664 |
+
"Cell \u001b[0;32mIn[67], line 25\u001b[0m\n\u001b[1;32m 23\u001b[0m supabase_url \u001b[38;5;241m=\u001b[39m os\u001b[38;5;241m.\u001b[39menviron\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mSUPABASE_URL\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 24\u001b[0m supabase_key \u001b[38;5;241m=\u001b[39m os\u001b[38;5;241m.\u001b[39menviron\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mSUPABASE_SERVICE_KEY\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m---> 25\u001b[0m supabase: Client \u001b[38;5;241m=\u001b[39m create_client(supabase_url, supabase_key)\n\u001b[1;32m 26\u001b[0m vector_store \u001b[38;5;241m=\u001b[39m SupabaseVectorStore(\n\u001b[1;32m 27\u001b[0m client\u001b[38;5;241m=\u001b[39msupabase,\n\u001b[1;32m 28\u001b[0m embedding\u001b[38;5;241m=\u001b[39m embeddings,\n\u001b[1;32m 29\u001b[0m table_name\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdocuments\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 30\u001b[0m query_name\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmatch_documents_langchain\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 31\u001b[0m )\n\u001b[1;32m 33\u001b[0m question_retrieve_tool \u001b[38;5;241m=\u001b[39m create_retriever_tool(\n\u001b[1;32m 34\u001b[0m vector_store\u001b[38;5;241m.\u001b[39mas_retriever(),\n\u001b[1;32m 35\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mQuestion Retriever\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 36\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mFind similar questions in the vector database for the given question.\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 37\u001b[0m )\n",
|
665 |
+
"File \u001b[0;32m/opt/anaconda3/lib/python3.12/site-packages/supabase/_sync/client.py:338\u001b[0m, in \u001b[0;36mcreate_client\u001b[0;34m(supabase_url, supabase_key, options)\u001b[0m\n\u001b[1;32m 307\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mcreate_client\u001b[39m(\n\u001b[1;32m 308\u001b[0m supabase_url: \u001b[38;5;28mstr\u001b[39m,\n\u001b[1;32m 309\u001b[0m supabase_key: \u001b[38;5;28mstr\u001b[39m,\n\u001b[1;32m 310\u001b[0m options: Optional[ClientOptions] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 311\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m SyncClient:\n\u001b[1;32m 312\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Create client function to instantiate supabase client like JS runtime.\u001b[39;00m\n\u001b[1;32m 313\u001b[0m \n\u001b[1;32m 314\u001b[0m \u001b[38;5;124;03m Parameters\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 336\u001b[0m \u001b[38;5;124;03m Client\u001b[39;00m\n\u001b[1;32m 337\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m--> 338\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m SyncClient\u001b[38;5;241m.\u001b[39mcreate(\n\u001b[1;32m 339\u001b[0m supabase_url\u001b[38;5;241m=\u001b[39msupabase_url, supabase_key\u001b[38;5;241m=\u001b[39msupabase_key, options\u001b[38;5;241m=\u001b[39moptions\n\u001b[1;32m 340\u001b[0m )\n",
|
666 |
+
"File \u001b[0;32m/opt/anaconda3/lib/python3.12/site-packages/supabase/_sync/client.py:101\u001b[0m, in \u001b[0;36mSyncClient.create\u001b[0;34m(cls, supabase_url, supabase_key, options)\u001b[0m\n\u001b[1;32m 93\u001b[0m \u001b[38;5;129m@classmethod\u001b[39m\n\u001b[1;32m 94\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mcreate\u001b[39m(\n\u001b[1;32m 95\u001b[0m \u001b[38;5;28mcls\u001b[39m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 98\u001b[0m options: Optional[ClientOptions] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 99\u001b[0m ):\n\u001b[1;32m 100\u001b[0m auth_header \u001b[38;5;241m=\u001b[39m options\u001b[38;5;241m.\u001b[39mheaders\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mAuthorization\u001b[39m\u001b[38;5;124m\"\u001b[39m) \u001b[38;5;28;01mif\u001b[39;00m options \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m--> 101\u001b[0m client \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mcls\u001b[39m(supabase_url, supabase_key, options)\n\u001b[1;32m 103\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m auth_header \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 104\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n",
|
667 |
+
"File \u001b[0;32m/opt/anaconda3/lib/python3.12/site-packages/supabase/_sync/client.py:51\u001b[0m, in \u001b[0;36mSyncClient.__init__\u001b[0;34m(self, supabase_url, supabase_key, options)\u001b[0m\n\u001b[1;32m 37\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"Instantiate the client.\u001b[39;00m\n\u001b[1;32m 38\u001b[0m \n\u001b[1;32m 39\u001b[0m \u001b[38;5;124;03mParameters\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 47\u001b[0m \u001b[38;5;124;03m `DEFAULT_OPTIONS` dict.\u001b[39;00m\n\u001b[1;32m 48\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 50\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m supabase_url:\n\u001b[0;32m---> 51\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m SupabaseException(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124msupabase_url is required\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 52\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m supabase_key:\n\u001b[1;32m 53\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m SupabaseException(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124msupabase_key is required\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n",
|
668 |
+
"\u001b[0;31mSupabaseException\u001b[0m: supabase_url is required"
|
669 |
+
]
|
670 |
+
}
|
671 |
+
],
|
672 |
+
"source": [
|
673 |
+
"%pip install langgraph\n",
|
674 |
+
"%pip install langchain-google-genai\n",
|
675 |
+
"\n",
|
676 |
+
"import dotenv\n",
|
677 |
+
"from langgraph.graph import MessagesState, START, StateGraph\n",
|
678 |
+
"from langgraph.prebuilt import tools_condition\n",
|
679 |
+
"from langgraph.prebuilt import ToolNode\n",
|
680 |
+
"from langchain_google_genai import ChatGoogleGenerativeAI\n",
|
681 |
+
"from langchain_huggingface import HuggingFaceEmbeddings\n",
|
682 |
+
"from langchain_community.tools.tavily_search import TavilySearchResults\n",
|
683 |
+
"from langchain_community.document_loaders import WikipediaLoader\n",
|
684 |
+
"from langchain_community.document_loaders import ArxivLoader\n",
|
685 |
+
"from langchain_community.vectorstores import SupabaseVectorStore\n",
|
686 |
+
"from langchain.tools.retriever import create_retriever_tool\n",
|
687 |
+
"from langchain_core.messages import HumanMessage, SystemMessage\n",
|
688 |
+
"from langchain_core.tools import tool\n",
|
689 |
+
"from supabase.client import Client, create_client\n",
|
690 |
+
"\n",
|
691 |
+
"# Define the retriever from supabase\n",
|
692 |
+
"load_dotenv()\n",
|
693 |
+
"embeddings = HuggingFaceEmbeddings(model_name=\"sentence-transformers/all-mpnet-base-v2\") # dim=768\n",
|
694 |
+
"\n",
|
695 |
+
"supabase_url = os.environ.get(\"SUPABASE_URL\")\n",
|
696 |
+
"supabase_key = os.environ.get(\"SUPABASE_SERVICE_KEY\")\n",
|
697 |
+
"supabase: Client = create_client(supabase_url, supabase_key)\n",
|
698 |
+
"vector_store = SupabaseVectorStore(\n",
|
699 |
+
" client=supabase,\n",
|
700 |
+
" embedding= embeddings,\n",
|
701 |
+
" table_name=\"documents\",\n",
|
702 |
+
" query_name=\"match_documents_langchain\",\n",
|
703 |
+
")\n",
|
704 |
+
"\n",
|
705 |
+
"question_retrieve_tool = create_retriever_tool(\n",
|
706 |
+
" vector_store.as_retriever(),\n",
|
707 |
+
" \"Question Retriever\",\n",
|
708 |
+
" \"Find similar questions in the vector database for the given question.\",\n",
|
709 |
+
")\n",
|
710 |
+
"\n",
|
711 |
+
"@tool\n",
|
712 |
+
"def multiply(a: int, b: int) -> int:\n",
|
713 |
+
" \"\"\"Multiply two numbers.\n",
|
714 |
+
"\n",
|
715 |
+
" Args:\n",
|
716 |
+
" a: first int\n",
|
717 |
+
" b: second int\n",
|
718 |
+
" \"\"\"\n",
|
719 |
+
" return a * b\n",
|
720 |
+
"\n",
|
721 |
+
"@tool\n",
|
722 |
+
"def add(a: int, b: int) -> int:\n",
|
723 |
+
" \"\"\"Add two numbers.\n",
|
724 |
+
" \n",
|
725 |
+
" Args:\n",
|
726 |
+
" a: first int\n",
|
727 |
+
" b: second int\n",
|
728 |
+
" \"\"\"\n",
|
729 |
+
" return a + b\n",
|
730 |
+
"\n",
|
731 |
+
"@tool\n",
|
732 |
+
"def subtract(a: int, b: int) -> int:\n",
|
733 |
+
" \"\"\"Subtract two numbers.\n",
|
734 |
+
" \n",
|
735 |
+
" Args:\n",
|
736 |
+
" a: first int\n",
|
737 |
+
" b: second int\n",
|
738 |
+
" \"\"\"\n",
|
739 |
+
" return a - b\n",
|
740 |
+
"\n",
|
741 |
+
"@tool\n",
|
742 |
+
"def divide(a: int, b: int) -> int:\n",
|
743 |
+
" \"\"\"Divide two numbers.\n",
|
744 |
+
" \n",
|
745 |
+
" Args:\n",
|
746 |
+
" a: first int\n",
|
747 |
+
" b: second int\n",
|
748 |
+
" \"\"\"\n",
|
749 |
+
" if b == 0:\n",
|
750 |
+
" raise ValueError(\"Cannot divide by zero.\")\n",
|
751 |
+
" return a / b\n",
|
752 |
+
"\n",
|
753 |
+
"@tool\n",
|
754 |
+
"def modulus(a: int, b: int) -> int:\n",
|
755 |
+
" \"\"\"Get the modulus of two numbers.\n",
|
756 |
+
" \n",
|
757 |
+
" Args:\n",
|
758 |
+
" a: first int\n",
|
759 |
+
" b: second int\n",
|
760 |
+
" \"\"\"\n",
|
761 |
+
" return a % b\n",
|
762 |
+
"\n",
|
763 |
+
"@tool\n",
|
764 |
+
"def wiki_search(query: str) -> str:\n",
|
765 |
+
" \"\"\"Search Wikipedia for a query and return maximum 2 results.\n",
|
766 |
+
" \n",
|
767 |
+
" Args:\n",
|
768 |
+
" query: The search query.\"\"\"\n",
|
769 |
+
" search_docs = WikipediaLoader(query=query, load_max_docs=2).load()\n",
|
770 |
+
" formatted_search_docs = \"\\n\\n---\\n\\n\".join(\n",
|
771 |
+
" [\n",
|
772 |
+
" f'<Document source=\"{doc.metadata[\"source\"]}\" page=\"{doc.metadata.get(\"page\", \"\")}\"/>\\n{doc.page_content}\\n</Document>'\n",
|
773 |
+
" for doc in search_docs\n",
|
774 |
+
" ])\n",
|
775 |
+
" return {\"wiki_results\": formatted_search_docs}\n",
|
776 |
+
"\n",
|
777 |
+
"@tool\n",
|
778 |
+
"def web_search(query: str) -> str:\n",
|
779 |
+
" \"\"\"Search Tavily for a query and return maximum 3 results.\n",
|
780 |
+
" \n",
|
781 |
+
" Args:\n",
|
782 |
+
" query: The search query.\"\"\"\n",
|
783 |
+
" search_docs = TavilySearchResults(max_results=3).invoke(query=query)\n",
|
784 |
+
" formatted_search_docs = \"\\n\\n---\\n\\n\".join(\n",
|
785 |
+
" [\n",
|
786 |
+
" f'<Document source=\"{doc.metadata[\"source\"]}\" page=\"{doc.metadata.get(\"page\", \"\")}\"/>\\n{doc.page_content}\\n</Document>'\n",
|
787 |
+
" for doc in search_docs\n",
|
788 |
+
" ])\n",
|
789 |
+
" return {\"web_results\": formatted_search_docs}\n",
|
790 |
+
"\n",
|
791 |
+
"@tool\n",
|
792 |
+
"def arvix_search(query: str) -> str:\n",
|
793 |
+
" \"\"\"Search Arxiv for a query and return maximum 3 result.\n",
|
794 |
+
" \n",
|
795 |
+
" Args:\n",
|
796 |
+
" query: The search query.\"\"\"\n",
|
797 |
+
" search_docs = ArxivLoader(query=query, load_max_docs=3).load()\n",
|
798 |
+
" formatted_search_docs = \"\\n\\n---\\n\\n\".join(\n",
|
799 |
+
" [\n",
|
800 |
+
" f'<Document source=\"{doc.metadata[\"source\"]}\" page=\"{doc.metadata.get(\"page\", \"\")}\"/>\\n{doc.page_content[:1000]}\\n</Document>'\n",
|
801 |
+
" for doc in search_docs\n",
|
802 |
+
" ])\n",
|
803 |
+
" return {\"arvix_results\": formatted_search_docs}\n",
|
804 |
+
"\n",
|
805 |
+
"@tool\n",
|
806 |
+
"def similar_question_search(question: str) -> str:\n",
|
807 |
+
" \"\"\"Search the vector database for similar questions and return the first results.\n",
|
808 |
+
" \n",
|
809 |
+
" Args:\n",
|
810 |
+
" question: the question human provided.\"\"\"\n",
|
811 |
+
" matched_docs = vector_store.similarity_search(query, 3)\n",
|
812 |
+
" formatted_search_docs = \"\\n\\n---\\n\\n\".join(\n",
|
813 |
+
" [\n",
|
814 |
+
" f'<Document source=\"{doc.metadata[\"source\"]}\" page=\"{doc.metadata.get(\"page\", \"\")}\"/>\\n{doc.page_content[:1000]}\\n</Document>'\n",
|
815 |
+
" for doc in matched_docs\n",
|
816 |
+
" ])\n",
|
817 |
+
" return {\"similar_questions\": formatted_search_docs}\n",
|
818 |
+
"\n",
|
819 |
+
"tools = [\n",
|
820 |
+
" multiply,\n",
|
821 |
+
" add,\n",
|
822 |
+
" subtract,\n",
|
823 |
+
" divide,\n",
|
824 |
+
" modulus,\n",
|
825 |
+
" wiki_search,\n",
|
826 |
+
" web_search,\n",
|
827 |
+
" arvix_search,\n",
|
828 |
+
" question_retrieve_tool\n",
|
829 |
+
"]\n",
|
830 |
+
"\n",
|
831 |
+
"llm = ChatGoogleGenerativeAI(model=\"gemini-2.0-flash\")\n",
|
832 |
+
"llm_with_tools = llm.bind_tools(tools)"
|
833 |
+
]
|
834 |
+
},
|
835 |
+
{
|
836 |
+
"cell_type": "code",
|
837 |
+
"execution_count": null,
|
838 |
+
"id": "7dd0716c",
|
839 |
+
"metadata": {},
|
840 |
+
"outputs": [],
|
841 |
+
"source": [
|
842 |
+
"# load the system prompt from the file\n",
|
843 |
+
"with open('system_prompt.txt', 'r') as f:\n",
|
844 |
+
" system_prompt = f.read()\n",
|
845 |
+
"\n",
|
846 |
+
"\n",
|
847 |
+
"# System message\n",
|
848 |
+
"sys_msg = SystemMessage(content=system_prompt)\n",
|
849 |
+
"\n",
|
850 |
+
"# Node\n",
|
851 |
+
"def assistant(state: MessagesState):\n",
|
852 |
+
" \"\"\"Assistant node\"\"\"\n",
|
853 |
+
" return {\"messages\": [llm_with_tools.invoke([sys_msg] + state[\"messages\"])]}\n",
|
854 |
+
"\n",
|
855 |
+
"# Build graph\n",
|
856 |
+
"builder = StateGraph(MessagesState)\n",
|
857 |
+
"builder.add_node(\"assistant\", assistant)\n",
|
858 |
+
"builder.add_node(\"tools\", ToolNode(tools))\n",
|
859 |
+
"builder.add_edge(START, \"assistant\")\n",
|
860 |
+
"builder.add_conditional_edges(\n",
|
861 |
+
" \"assistant\",\n",
|
862 |
+
" # If the latest message (result) from assistant is a tool call -> tools_condition routes to tools\n",
|
863 |
+
" # If the latest message (result) from assistant is a not a tool call -> tools_condition routes to END\n",
|
864 |
+
" tools_condition,\n",
|
865 |
+
")\n",
|
866 |
+
"builder.add_edge(\"tools\", \"assistant\")\n",
|
867 |
+
"\n",
|
868 |
+
"# Compile graph\n",
|
869 |
+
"graph = builder.compile()\n"
|
870 |
+
]
|
871 |
+
},
|
872 |
+
{
|
873 |
+
"cell_type": "code",
|
874 |
+
"execution_count": null,
|
875 |
+
"id": "f4e77216",
|
876 |
+
"metadata": {},
|
877 |
+
"outputs": [
|
878 |
+
{
|
879 |
+
"data": {
|
880 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAANgAAAD5CAIAAADKsmwpAAAQAElEQVR4nOydB1wUR9vA5zrcwdGOXqRIFRC7gkZsxK7YguU1xhgTJcVXjVETNSYajCbGYCxYYuJnjYliYq+xRo2xIIqAgNI7HFzh+vfo5UVEQEzYuzl2/r/7HXu7e7dX/jwz88zsLFun0yECwdiwEYGAAUREAhYQEQlYQEQkYAERkYAFREQCFpikiAq5pixfKavWyKrVarVOrTSBDBTPnMnmMviWbL6Q5ehuhgjPYkoiSqtU6TekmcmSqjKVpS2Hb8mC31Voy0GmkArValDRQ4WsWsrhMbPvy7yCBd4hcLNAhCcwTCKhrdXoLv9WVpqvsHPhegdbuLY1R6ZMjUyTlSzNTZflZ9aED7Xz7WCJaI8JiHj3ivj3fSXhw+w6RNqg1gWE9suHyhQyTdR/nMwtWIjG4C7i7/uKzfjM7kNEqPVSWqBIXJc38HUnN18+oitYi3hyR5GTl1lIhBWiAQfW5fWKFolceIiW4Cti4vq8tmEWweG0sFDPgXW5IRHW8KkR/WAiLLmQWOIZJKCVhUB0rNuVo2UVRUpEP3AUMfVGNZvDDIu0RvRj4nyPs/uKaTg2D0cRz+0r6diXjhYCDAYDigLIVSGagZ2If52qCI4Q8szpm8vo2Nfm3tWqGqkG0Qm8RIQiKTtVFj60NSdrmsMro+xvnatEdAIvETPvSKFPFtEeD39+8mUxohN4/erQ8QWdsMiwfPTRR7/99ht6efr375+fn48oAHpZrEXcgodyRBvwErGyROUdYmgRU1JS0MtTWFhYWUlh6enX2SInTYZoA0YiQvW8olhJXTMlMTFx3LhxERER/fr1+/DDD4uKimBl586dIaotXbo0MjISHmo0mo0bN44cOTI8PHzQoEErVqyQy/8OSxD/du3a9f777/fo0ePChQtDhw6FlcOHD58zZw6iAIGQXZpLo4QiRiJKq9Tw7SNquHnz5rJly8aPH793795vv/0Wgtn8+fNh/ZEjR+AevDx48CAsgGo//PDDzJkz9+zZs2TJknPnzq1bt07/Cmw2e//+/W3btk1ISOjSpUtcXBys3LFjx2effYYoAL4K+EIQbcBoPKK0SiMQUhUOMzIyeDzesGHDwCc3NzcIdQUFBbDeyupx5w2fz9cvQBSEgAe2wbKHh0dUVNSlS5f0rwAZPjMzM4iI+ocCweMqhFAo1C+0OAIrllRMowwORiLqtDouZU1mKILBpGnTpo0YMaJbt24uLi52dnbP72ZtbX348GGIncXFxWq1WiaTgaO1W0NDQ5GhYLEZXDMaJRAw+qh8IVtcokLU4OnpuW3bNoiFa9euhYrdlClTkpOTn99t1apVW7Zsgark5s2boZiOjo6uu9XCwnDDESSVanAR0QaMRIRyGUpnRBm+vr4Q6k6ePAmVPBaLNWvWLKXymdYAtFSgpvj6668PHjzY1dVVJBJJJBJkJCitqGAIThHRkm3rxNFqKenvh/iXlJQEC6Bgp06dZsyYAe2VsrK/u3T1gwy0Wi24qK8sAlKp9Pz5802PP6BudIJCprF3p9HYRLxqIWZ8FnSuIAq4fPny7NmzT58+nZubm5qaCo1iZ2dnJycn3hNu3LgBK6ES6e/vf+jQIdgnPT0dQibkeqqqqh4+fAj1xXovCM0UuL948WJmZiaigNS/qp09TfvUnJcCLxE92wke3qVExKlTp0KFb82aNWPGjImNjYVIFh8fD+bBJqgvnjp1ClI2kDJcvHgxBEWoIy5YsCAmJgb2BFknT54MbZd6LxgYGAi5xm+++WblypWopdGodXkP5B4BNDpzAK8R2nKJ+sSOohHvuCJ6k3VXkpMmfyXaHtEGvCKiuQXbxpF7m2YDT57n8q9ldBudjt0J9hHDRAnzM9r3bnhgLJSb0EHX4CZoAnO53AY3eXl5Qe4GUcMPT2hwE6R7Gmt3Q8m+YcOGBjfdv17l4G5m69jwZ2mt4Hjy1K1zlQyGrv0rDZ/FXF1d3eB6hUIBIuqrffVgMpkU9X/oj1svDVSLSqXicDgNboLGe91UeV0ObcnvPcbe0rrhJ7ZWMD2LD36Mdt2tDD8kzOjQ9oNj2ok0dJrL+f0lZYUKRCfO7C128jSjoYUI5/Oaoet579c5r4yyd/GhRTrt7E/Fbr7mtJ0HB99udQaTEfOhxx9HylKuVaFWjVajO7Auz9aJS+fZmExgEqbLh0qzU2Thw0StMsH754ny1OvVkWPt6TzxDTKVaelK8hSXfysVCNlQTEMVylxg8qMBinNqslNl109UhEVadx1oy2TSaKBNg5iGiHpy02UQPLKSpfbuPCsRB7yEG1/I0moR/rAYSFyukoo1OqS7/2c1vPO27QWhr1hzuOSsxceYkoi1FGTJS/OU0io13JgMhkzSkoPHZDLZo0ePIOGMWhRLGw581QIrlqUtx83HXGBFZi9/BpMUkVJSUlKWL1++Y8cORDAg5P+SgAVERAIWEBEJWEBEJGABEZGABUREAhYQEQlYQEQkYAERkYAFREQCFhARCVhARCRgARGRgAVERAIWEBEJWEBEJGABEZGABUREAhYQEQlYQEQkYAERkYAFREQCFhARCVhARKwPg8Gwt6fR5NWYQESsj06nKykpQQTDQkQkYAERkYAFREQCFhARCVhARCRgARGRgAVERAIWEBEJWEBEJGABEZGABUREAhYQEQlYQEQkYAERkYAFREQCFpAL/vzN+PHjJRIJg8FQKpVisVgkEsGyQqE4fvw4IlAPuRDc3wwaNKi4uDg/P7+0tFSlUhUUFMCypSV9r1trYIiIfxMTE+Pu7l53DUTE3r17I4JBICL+DZfLHTlyJIv19AK8Hh4eY8aMQQSDQER8yrhx41xdXfXLEA779Onj7OyMCAaBiPgUCIqjR4/WB0UIh2PHjkUEQ0FEfAYIii4uLvpw6OjoiAiGAsc8olyiKStQKBXGySuNGDD9999/79lxdGayFBkcBtIJrNm2jlw2h14xAq88orJGe2pXUV6G3N1foJRrEf3g8hgVxSqtVuvfybLzAFtEGzASUS7V7F+b132YvYObOaI9fx4rMeMzw4fZIXqAUfzfvTK730QXYqGeLgPta+TaP0+UI3qAi4i3z1cGdLUSCEnf91O6vGr/8K5MLlUjGoCLiEWPavhCDiLUg4EqClWIBuAiokqpE9oSEetj52xWXU6LiIhLUVgj0eg0iFAPpUKjpcfwKFInI2ABEZGABUREAhYQEQlYQEQkYAERkYAFREQCFhARCVhARCRgARGRgAVERAIWkHNWUGbmgz79Ot+5cwsRjAcREYnsHWZ9MN/Fxa2JfbKyMmImDEX/jpGj+hcU5iNCQ5CiGQkthSOGv+BE+rS0FPTvKCoqFIsrEaERTFjE+6n3tmz5Lv1BqlKp8Gzj/eabsZ07ddNvOnwk8edfdhUU5PF4Zu1DO74bO9fBwbGx9VA0v/lWTPyaLSEhYaDLxoQ1t27/JZNJnZxcxoyeMGzoqB9+TPhx+2Z4OpTgsTNnw8rGDn3w15+3/bAxbvma+O9W5eQ8FFpaTZr05uBBI27euj57zjuww4SJwyf/Z9obU95BhGcx1aJZoVB8NP89Dpf71ar1G9ZtD2oXumjxnJKSYtiUlHTzq6+XjR41fuuWvXFffCuuqlz6+fwm1tdl5aqlpWUlXyxf8/3Wn0ZFx6z5dsWf16/EvPb6qFExoGzi/lPDho5u4tBsNlsqlWzfsWXpkpW/Hfw9KmrIN2viYFNIcNjiRXGwQ8LGHeNjpiDCc5hqRGSxWN98nWBnJ7KysoaHU6fM2L9/T/Ld230iB2Q9zODxeANfHQZauLq4LVm0orCoAPZpbH1dMrMeRI98LTCgHSy7Dh/j5xvg6OhsZmbG4/IYDIb+WGq1urFD67dOiJmiD8CDBo6AUJqRkda9e08+XwBrLC2F8GqI8BymKiLIpFKr4teufJCRJpFU60+KraoSw32HsM4gzfuzpkGZ2KlTN2cnF1tbuybW1yW8xyu79/wAL9itW0RoSIfAwOCXOrQeb29f/QJoB/fVkmpEeBGmWjTn5mbPmfuOUqlcuODzTRt3JmzYUbvJw8Pzu/ht0AretHkt1MlmvjvlXkpyE+vr8t9ZC6ZNjU1KujH3w5nRo/vDnhDhmn9oPRB3n3lMpkJtBqYaEc+cPaHRaD75eLn+V4dGRt2tPj6+nyxcBjtAdnDrtvULP571054jXC63wfV1nwjRbvTo8XArLy87cfLw1u/XW1vbjBs7qfmHJvwzTDUiqlRKaPnWxp6Tp576lJKSfPduEnpSjwwL6zT1jRmQNwGxGltf+0SJRHLy1FF9CIRSO+a1yUFBIdCmbv6hXwiZKLoxTFXEwIBg0OjosV/LykoTD+67n3oXQlfG40qb5Oq1yx8vmn3u/Om8/FzIsEBLwsnR2dHRqbH1ta8JNcj4tV9Cyxq25hfknTp9DNKHoCxssrCwhANBu7uwsKCJQzfxhoVP6otXrlyEV0CE5zDVojk8/JXXxv0nYVP8+g2ru3WNmD9v6c+/7Ny950cmkwnZQbVatXHjGkjECAQWwcHtV8TFg2STJk5tcH3tawoEgi9XfAcJwtlz3oYqIOQRIeEHrWzY1K/vwOMnDs35cMaE8VNgZWOH9vUNaOwN+/kFdu0avmHjN0VFBTPemYUIz4LLJEy/fJsb1kfk0IakNp7h0sGiNgHmgV2FqLVDuvgIWEBEJGABEZGABUREAhYQEQlYQEQkYAERkYAFREQCFhARCVhARCRgARGRgAVERAIWEBEJWICLiFYiro5BBo3Wh8dncXm0mAQBlw/JEzBL82oQ4VlyUqW2zlxEA3AR0TOQLy5WIkIdJGKV0JZj40BENCDu/nwLa9bVoyWI8D/O7i7oFS1C9ACv6zVfOVpeWaxy8jIXuZrR7crZehgMXVW5uqpMeeVwyaQFbaxEdLksHF4iAll3pek3JTUyTXlBoyW1UqlkPQFRgFajUapUBpuPQS6Xc7nc2s9iJmBxuAxnH7NuA+1YLAaiDdiJ+EKys7MPHDjwwQcfIGpYunTp+fPnly9f3r17d0Q9EokkLi4ODofojSmJKBaLCwsLnZycrKysEDXcu3fvk08+AdfDw8Pj4+ORAdm7d29oaGhgYCCiJSZTDystLY2Ojvby8qLOQmD37t1gIXo8IWLapUuXkAEZMmQIxMXKSprOoWgaIkJFCvw4c+YMVKcQZaSkpNy4cUO/DN7v2rULGRALC4sdOx5Po/Pw4cPc3FxEM0xAxDlz5kD9oWPHjohidu7cWVRUVPsQimkDB0XA2tra2dk5NjYWjo7oBO4i7tmzZ9iwYXw+H1EM/PC14VAPVEn1IcrA8Hi8gwcPQiEAy/QpqfEV8eLFi3APFkZGRiLq2b59O4RDrVar+x+w8v79+8hIdOr0eM4dCI3nzp1DNADTVjN8+8ePH//iiy+QwYGaIjQajBILGwT+QyZPnqxWq9ns1jxUCtOIyGQyjWIhhoCFcL969Wr41d9QSQAAD6ZJREFUz0StF7xELC8vnz59Oiz06tULEeowb948KCVqalrtACW8oj38369atQoRGgKKCCig9Q35iIgI1LrAJSIePnwY7pctW0ZpvtrUgWpijx49oA8mOTkZtS6wEHHhwoUCgQARmgHUnqHvEdKNsHzrVuu5fqCRRayoqID78ePHGyZH02pwc3t85cANGzYcPXoUtQqMKeKxY8cSExNhISQkBBFenoSEBOgYhIX8fJO/1qQxRbxw4cIbb7yBCP8CfXph9+7d27ZtQ6aMcUQ8ffo03JNBeC2FvjseFmQyGTJNDC2iSqXq1q1bWFgYIrQoU6dORU/6RXfu3IlMEIOKCJ25ZWVlkAmzs7NDBAqIioqCLxl6KU1u4L3hRIyLi6uqqnJycmrdfaZGZ/bs2e7u7pCOOHjwIDIdDOQEJGB9n4AI1KNvSt++fRvi4siRI5EpQLmIUExwuVwvL6/g4GBEMCCLFy/OzMyEhWvXrnXt2hXhDbVFM3wR0DT28fEhHSdGwdvbG+6vX7/+9ddfI7yhUETooTfWIOd/yfPXaDZpZs6cCZkK9OTUVYQrVIm4b9++v/76q0OHDsjUuHPnzvDhw1HromfPnuhJTwy2p2VRJSI0jaEHD5ka+oEtEyZMQK0R+B/Td+5jCFWnCkDiGlKGkKxBpsP3339fWlo6b9481EqBTycUCik9JfcfY3pTjlBEfHw8i8WKjY1FBGNAYWMFMqtGPAvupYBku5WVVau3cO7cudj+IhSK6OzsbBIjNxctWgSZ9tdffx21dqBohioTwhIKi2b1Eww2v9s/A8J2//79Bw8ejGgAqSNiyttvvw0N5N69eyOCsaG2ZyUyMlKpxHRm7IkTJ06fPp1WFtK0jgj4+flBXzPCj+joaKga6qf1oA80rSNiS1RU1JYtWzw8PBDNoG8dERorWq0Wn08O7wfK4l9//ZWMzMUNaovm7OxsqIohPBCLxREREadPn6athfStI3p7eysUChxmbCkoKIB64dWrVzFPJ1EKqSMamQcPHsyaNevQoUOI3tA6j1hVVcVkMvWD140C9O5AD97evXsRAWMoP3nq0qVLK1asQEYCjr527VpioR761hGB0NDQM2fODB06FJqrBpiQvS4nT54EBbdu3YoIT6BjHRE6LZKSkuqNube1tYXoaBgdExMTr1y5YsRgjCE41xGpioibNm1ycXGptxJarBAgEfXs3Lnzzp07xMJ6iEQiPC1ElBbN7777ro2NTe1DCL3t2rUzwNn1CQkJRUVF0IOHCM9C0zpi3759hwwZwuH8faFXUFB/LhmlrF69msFgzJ49GxGeg9Z5xBkzZly7dg3kgP6M9evX+/j4IMr4/PPPIYWOT18ObtCxjlhLfHy8h4cH9DhbW1tTauH8+fNDQkKIhU2Acx2xWTU2tUorl2jRP4Tx8UfLlixZ0ql9z+oKqk5cX7J4yaDh/QYMGIAIjQN1xGnTpgUEBCD8eEHRnHKtKumCuLxQaW5ByeXiWwT4CFyBtiJf5xUs6NjX2tnLHBHqAPkyqBrBtwT3+jWw7Ofnt2fPHoQNTUXEayfKS/NVvUY5WdpyEPbAlysuUf3+S1H4ELs2gZRfRNKE8Pf3T01NhY7W2jXQ4/rWW28hnGi0jnj1WLm4RN0r2tEkLATg393agTv0LXd4549STHUGXyqIiYkxN3+mlGjTpk2/fv0QTjQsYkWxsjRP0X2oAzJB+k10vnkW04k1jMKIESNcXV1rH/L5fAzn0G9YRLAQahTINOHyWJUlqqpyTBNmRgGSCbXtZchw9enTB2FGwyJKxBp7dxMeQOruL6goJiI+BYKi/hpBAoFgypQpCD8aFlGl0Kpq/nG+xvhIKlU6DZnT5xkgKEIvF4RDPC/yReZVx5FH96WQc5VVaZRybY1cg1oCAeoe2e496O4/tbsItQQCIVur0cG9QMhy8jKztPlXjVoiIkakXq9Kuyl9dE/q4idUqXQsNovFYSNmi2UtuvYYAvfVLZRRkNYw1EqVNlup0+qq9peaC1htwwTtwoUWVv/kDRMRsSD9ZvWFxDIbFwGLJ2g3wL4282wqOPgiebUiJ0t271q+VxC/50g7Nufleo+JiEZGo9Ed3loorUZu7Z255ib8c5hb8uAm8rIpzxFvWpAVOdY+qJuw+U8nIhqT4pyafWtyfbq5CN15qLVg624Ftzt/lJTkKXqPsm/ms3C5gj0NEZcpj2wrbtcf6vmtx8JaHP3ty0qZUN9o5v5ERONQ+KgmcX2hZxdX1HqxdbcuLkRHfyxszs5ERCOgVmn3r81r07k1W6jHro21TMq8furFPa5ERCNw+Psin+6t30I9dl52j1IVOenSpncjIhqau3+IpVIGT2AaY5paBL5IeO6XF1QWiYiG5tJv5Q7etohOmAt5TDYbcqVN7IORiEs+nTdn7gzUqkm+LLZrY8nmYTrc/Xby6bmLukmllailsfOyvXulqSsBtpiIBxJ/WrHyU0RokvvXJTwBHefF4/E55YXKiqJGJ1RvMRHT0nCcKxsrVAptSU6NhR1NT6kRiPiZdxoNii3TszJr9vTbt2/AwvHjhzYl7PRt63/nzq3NW78DO6HbNDAg+K233gsMaKff+fCRxJ/27cjPzzU353frGj7jnf/a2tafwhX2+fmXXQUFeTyeWfvQju/GznVwcEQmzsMUqcjLElHGzaQT5y7tKirJ4vH4HUKiBvWfweU+jr7b9yyEvmt/3x5nz28XV5c4iNpED53bxj0EPe5gVB888s2NpGM6rTbIv2db786IMizt+YXZjVYTWyYiLvtstZ9vQN8+UYn7T3l7tc3JeTR33kx7kcO6tT98F7/NnM+f++GM4uLHo49OnDj81dfLogYM+X7L3s8+XZWWfn/Bwg/qnUmYlHQT9hk9avzWLXvjvvhWXFW59PP5yPQRl6g1KqpGMyTfO7dz3yK/tl3nxO54LXpR0t0zP/8ap9/EYrGzHt3Ozrk7a+b2Tz86xudb7d2/TL/pzPkfr15PHD5o1n9nbvfyDDt17ntEGRweuyBT3tjWlhHRwsKCxWZzuFwrK2sWi3Xw158h2i2Y/5mPjy/cPl6wTK1WHz/xeMLWfT/vjIjoPXHCG+7ubcLCOr337ofgYnLy7bqvlvUwg8fjDXx1mKuLW1Bg8JJFK2JnzkGmj6RSTV0z5cyF7d6eHQcPmCmycw/0Cx8SFXvj9rFK8d9DD5VKOdjG45pDjOwYOrC49KFS+Xg+6b9uHw0O6t214zB4VnjX0X4+FM4JwzFj10gbHVtJSas5LT0FAmTtfEt8Ph+0y8hIAx0zMtODAkNq9/T3D4L7BxlpdZ/eIawzFOjvz5p26PCBgsJ8KLhBR2T6yCQaikTUarW5+SkQDmvXgJRwX1D4QP8QPNMX0wDf/PGgGJm8Sq1WlZbluLsG1T7Lw60dohKegCWtavgUDkpG38hkUjtbUd01fL4AVspr5FAKw/LT9eaPT0CWy58Zq+nh4QkF+u69P27avLZ69fLAwGCoI7YCF6mbZUilqtFqNSfObD559plZSauqS/ULbPbz4yp0ECbhD6fOJqhcIirRaXSNDbWkRESBwEIqfaZ9BA9BTXMzcyaTCUY+Xf9kGfav9wpQoH+ycJlGo4FGz9Zt6xd+POunPUewnbelmVhYsUpKWmbcfz04HDOoCPbs/lq3TsOfOaKgqcw550mMlCue/lJyeVM5538JxCBljZZv2bByLVk017Y5/P2CUtNSamdAq5ZUZ2c/DAh4PDliWx+/O8lPr517724S+l8BXUtKSvLdJ+uhugn1yKlvzBCLK8vLmzugCFssrNlqJSUiwr+3q3NARWWBg72n/mZr48pksvn8poamcthcG2vngsL02jVpGdcQZagVGjNBozWTFhPR0sLywYPU9AepIM2IEWMVipqVX30GzefMzAfLln8MMe/VqKGw29ixk65cuQjpm8LCgpu3rq9d91X79h0DnhXx6rXLHy+afe786bz8XHjB/fv3ODk6Ozo6IRPH2p7DZlF1bmRkz0l37p2FVnBxyaO8/NRdPy9Zt2V6Tc0LhhpAlgea21euJ0Jt8tylnfkFaYgylHK1s3ejOdQWK5qjo2PiVix+/4M3l366qmuXHqu+XLdpy9pp08dDVAsJDvvm6wRr68ezx/bvNxAcBRE3b/kO7OwZEfn22x/Ue6lJE6dCPXrjxjWlZSWwT3Bw+xVx8SZ3GsfzeLYTHPuxUOQtQhQQ2q7P+NFLz17Yfvz0JjMzC0+P0BlT15uZCZp+1oC+06SyykPH4rU6baBfxJCod7fvXQDLiAKkpVLf0EaHADc8G9i14+XQum8faap982d257fvZQU/PMKMA+vy2UJLSxEd54jKuJwzZparlV3Dw47I6BuDEtDVQiFRIPpRI1GK3HiNWYjIyVMGJrCL8I9DD4WOFlzzhn+S5JTze/YvbXCTwNxKKhc3uKl7p5FDB76HWoisR7e27mi4BwGSREwGEzVUTerRZRRk0VEjlGaW9xxmjRqHiGhoeo20+/N0hUu7hmda8/PpOnvm/zW4CfpCapPS9eDxWrIS4uYS2Nh7UKkULBan7lSLzXkP0ooaDkfnGdTUmyQiGhrfDpbpt6Q11YoGT94D1Wy5LsiocDg8W5uWfA81FdV9xr6giUbqiEZg8BtOmdfytVpaTBNVlFbi38Hc4UWTyxERjcP4eR6ZV3JRa6covczemRkcbvXCPYmIxsHGgTvhI9f0i9katQlP/9c0JRllPkGcvuOaNe8wEdFo8C04r81xAxelFXLUutCqtXnJhZ5+7M79bZr5FCKiMRHact750oejlebeLpBXtZL8YklWRer57J5DrLtEvUSHCGk1G5+oSY45abLzB0p5Fjwmlyu0F2B7ml8TSMrkklJZVbGk/SvWY2e+9CXGiIhY4O7Hn/iRx6N70rRb0sxreTbO5soaLZvLZnHZDCamnexMFlMlV2pUGqTTVhTIoV0c1EkQ1N3zZWdG1ENExIg2QYI2T7K+Rdk1T6YuVtfItAoZJSPH/j3mFjoGky0Q8vhCtrOXE4f7r6p5REQccfQwc/RAtKJhEblmDC0y4WFXAmsOk2Xyw8ZoRcPh1NKGU/LIhHMK2SkSWyfTPq+AbjQsooM7z3THocolapErz8Ka1DpMiUYjomtbs/O/NGuuT9w4tSO/y4Dm5lEJmNDU9Zrv/iFOvyVp39vOxpHLYuOe+q6RaapKlZcOFg+c7OjgQceJjkyaF1w4POuu9Na5ysKsGhYb66LaSsSpKld5Bgk6D7CBblxEMDVeIGItCjnWffM6LTITkO5KE6a5IhIIlEKalgQsICISsICISMACIiIBC4iIBCwgIhKw4P8BAAD//2v4e7oAAAAGSURBVAMA1x7mMDWkAPIAAAAASUVORK5CYII=",
|
881 |
+
"text/plain": [
|
882 |
+
"<IPython.core.display.Image object>"
|
883 |
+
]
|
884 |
+
},
|
885 |
+
"metadata": {},
|
886 |
+
"output_type": "display_data"
|
887 |
+
}
|
888 |
+
],
|
889 |
+
"source": [
|
890 |
+
"from IPython.display import Image, display\n",
|
891 |
+
"\n",
|
892 |
+
"display(Image(graph.get_graph(xray=True).draw_mermaid_png()))"
|
893 |
+
]
|
894 |
+
},
|
895 |
+
{
|
896 |
+
"cell_type": "code",
|
897 |
+
"execution_count": null,
|
898 |
+
"id": "5987d58c",
|
899 |
+
"metadata": {},
|
900 |
+
"outputs": [],
|
901 |
+
"source": [
|
902 |
+
"question = \"\"\n",
|
903 |
+
"messages = [HumanMessage(content=question)]\n",
|
904 |
+
"messages = graph.invoke({\"messages\": messages})"
|
905 |
+
]
|
906 |
+
},
|
907 |
+
{
|
908 |
+
"cell_type": "code",
|
909 |
+
"execution_count": null,
|
910 |
+
"id": "330cbf17",
|
911 |
+
"metadata": {},
|
912 |
+
"outputs": [],
|
913 |
+
"source": [
|
914 |
+
"for m in messages['messages']:\n",
|
915 |
+
" m.pretty_print()"
|
916 |
+
]
|
917 |
+
}
|
918 |
+
],
|
919 |
+
"metadata": {
|
920 |
+
"kernelspec": {
|
921 |
+
"display_name": "base",
|
922 |
+
"language": "python",
|
923 |
+
"name": "python3"
|
924 |
+
},
|
925 |
+
"language_info": {
|
926 |
+
"codemirror_mode": {
|
927 |
+
"name": "ipython",
|
928 |
+
"version": 3
|
929 |
+
},
|
930 |
+
"file_extension": ".py",
|
931 |
+
"mimetype": "text/x-python",
|
932 |
+
"name": "python",
|
933 |
+
"nbconvert_exporter": "python",
|
934 |
+
"pygments_lexer": "ipython3",
|
935 |
+
"version": "3.12.7"
|
936 |
+
}
|
937 |
+
},
|
938 |
+
"nbformat": 4,
|
939 |
+
"nbformat_minor": 5
|
940 |
+
}
|