Spaces:
Configuration error
Configuration error
File size: 77,972 Bytes
9e82881 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 |
{
"cells": [
{
"cell_type": "markdown",
"id": "d0cc4adf",
"metadata": {},
"source": [
"### Question data"
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "14e3f417",
"metadata": {},
"outputs": [],
"source": [
"# Load metadata.jsonl\n",
"import json\n",
"# Load the metadata.jsonl file\n",
"with open('metadata.jsonl', 'r') as jsonl_file:\n",
" json_list = list(jsonl_file)\n",
"\n",
"json_QA = []\n",
"for json_str in json_list:\n",
" json_data = json.loads(json_str)\n",
" json_QA.append(json_data)"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "5e2da6fc",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"==================================================\n",
"Task ID: db4fd70a-2d37-40ea-873f-9433dc5e301f\n",
"Question: As of May 2023, how many stops are between South Station and Windsor Gardens on MBTA’s Franklin-Foxboro line (not included)?\n",
"Level: 2\n",
"Final Answer: 10\n",
"Annotator Metadata: \n",
" ├── Steps: \n",
" │ ├── 1. Search the web for “MBTA Franklin Foxboro line”.\n",
" │ ├── 2. Click on top result, on the MBTA website.\n",
" │ ├── 3. Scroll down on the list of stops, and count the current stops between South Station and Windsor Gardens.\n",
" │ ├── 4. Click the “Schedule & Maps” tab to view a map of the route.\n",
" │ ├── 5. Examine the map to confirm that the order of stops is the same as on the listing of stops.\n",
" │ ├── 6. Return to web search.\n",
" │ ├── 7. Click on Wikipedia article for Franklin line.\n",
" │ ├── 8. Read the article to check whether any stops were added or removed since the date given in the question.\n",
" │ ├── 9. Search the web for “MBTA Franklin Foxboro Line changes”.\n",
" │ ├── 10. Click News tab.\n",
" │ ├── 11. Click article about rail schedule changes.\n",
" │ ├── 12. Confirm that none of the changes affect the answer to the question.\n",
" ├── Number of steps: 12\n",
" ├── How long did this take?: 5-10 minutes\n",
" ├── Tools:\n",
" │ ├── 1. Search engine\n",
" │ ├── 2. Web browser\n",
" └── Number of tools: 2\n",
"==================================================\n"
]
}
],
"source": [
"# randomly select 3 samples\n",
"# {\"task_id\": \"c61d22de-5f6c-4958-a7f6-5e9707bd3466\", \"Question\": \"A paper about AI regulation that was originally submitted to arXiv.org in June 2022 shows a figure with three axes, where each axis has a label word at both ends. Which of these words is used to describe a type of society in a Physics and Society article submitted to arXiv.org on August 11, 2016?\", \"Level\": 2, \"Final answer\": \"egalitarian\", \"file_name\": \"\", \"Annotator Metadata\": {\"Steps\": \"1. Go to arxiv.org and navigate to the Advanced Search page.\\n2. Enter \\\"AI regulation\\\" in the search box and select \\\"All fields\\\" from the dropdown.\\n3. Enter 2022-06-01 and 2022-07-01 into the date inputs, select \\\"Submission date (original)\\\", and submit the search.\\n4. Go through the search results to find the article that has a figure with three axes and labels on each end of the axes, titled \\\"Fairness in Agreement With European Values: An Interdisciplinary Perspective on AI Regulation\\\".\\n5. Note the six words used as labels: deontological, egalitarian, localized, standardized, utilitarian, and consequential.\\n6. Go back to arxiv.org\\n7. Find \\\"Physics and Society\\\" and go to the page for the \\\"Physics and Society\\\" category.\\n8. Note that the tag for this category is \\\"physics.soc-ph\\\".\\n9. Go to the Advanced Search page.\\n10. Enter \\\"physics.soc-ph\\\" in the search box and select \\\"All fields\\\" from the dropdown.\\n11. Enter 2016-08-11 and 2016-08-12 into the date inputs, select \\\"Submission date (original)\\\", and submit the search.\\n12. Search for instances of the six words in the results to find the paper titled \\\"Phase transition from egalitarian to hierarchical societies driven by competition between cognitive and social constraints\\\", indicating that \\\"egalitarian\\\" is the correct answer.\", \"Number of steps\": \"12\", \"How long did this take?\": \"8 minutes\", \"Tools\": \"1. Web browser\\n2. Image recognition tools (to identify and parse a figure with three axes)\", \"Number of tools\": \"2\"}}\n",
"\n",
"import random\n",
"# random.seed(42)\n",
"random_samples = random.sample(json_QA, 1)\n",
"for sample in random_samples:\n",
" print(\"=\" * 50)\n",
" print(f\"Task ID: {sample['task_id']}\")\n",
" print(f\"Question: {sample['Question']}\")\n",
" print(f\"Level: {sample['Level']}\")\n",
" print(f\"Final Answer: {sample['Final answer']}\")\n",
" print(f\"Annotator Metadata: \")\n",
" print(f\" ├── Steps: \")\n",
" for step in sample['Annotator Metadata']['Steps'].split('\\n'):\n",
" print(f\" │ ├── {step}\")\n",
" print(f\" ├── Number of steps: {sample['Annotator Metadata']['Number of steps']}\")\n",
" print(f\" ├── How long did this take?: {sample['Annotator Metadata']['How long did this take?']}\")\n",
" print(f\" ├── Tools:\")\n",
" for tool in sample['Annotator Metadata']['Tools'].split('\\n'):\n",
" print(f\" │ ├── {tool}\")\n",
" print(f\" └── Number of tools: {sample['Annotator Metadata']['Number of tools']}\")\n",
"print(\"=\" * 50)"
]
},
{
"cell_type": "code",
"execution_count": 10,
"id": "4bb02420",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Requirement already satisfied: langchain-huggingface in /opt/anaconda3/lib/python3.12/site-packages (0.2.0)\n",
"Requirement already satisfied: langchain-core<1.0.0,>=0.3.59 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-huggingface) (0.3.60)\n",
"Requirement already satisfied: tokenizers>=0.19.1 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-huggingface) (0.21.1)\n",
"Requirement already satisfied: transformers>=4.39.0 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-huggingface) (4.51.3)\n",
"Requirement already satisfied: sentence-transformers>=2.6.0 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-huggingface) (4.1.0)\n",
"Requirement already satisfied: huggingface-hub>=0.30.2 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-huggingface) (0.31.4)\n",
"Requirement already satisfied: langsmith<0.4,>=0.1.126 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (0.3.42)\n",
"Requirement already satisfied: tenacity!=8.4.0,<10.0.0,>=8.1.0 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (8.2.3)\n",
"Requirement already satisfied: jsonpatch<2.0,>=1.33 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (1.33)\n",
"Requirement already satisfied: PyYAML>=5.3 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (6.0.1)\n",
"Requirement already satisfied: packaging<25,>=23.2 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (24.1)\n",
"Requirement already satisfied: typing-extensions>=4.7 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (4.13.2)\n",
"Requirement already satisfied: pydantic>=2.7.4 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (2.11.4)\n",
"Requirement already satisfied: jsonpointer>=1.9 in /opt/anaconda3/lib/python3.12/site-packages (from jsonpatch<2.0,>=1.33->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (2.1)\n",
"Requirement already satisfied: httpx<1,>=0.23.0 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (0.27.0)\n",
"Requirement already satisfied: orjson<4.0.0,>=3.9.14 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (3.10.18)\n",
"Requirement already satisfied: requests<3,>=2 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (2.32.3)\n",
"Requirement already satisfied: requests-toolbelt<2.0.0,>=1.0.0 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (1.0.0)\n",
"Requirement already satisfied: zstandard<0.24.0,>=0.23.0 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (0.23.0)\n",
"Requirement already satisfied: anyio in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (4.2.0)\n",
"Requirement already satisfied: certifi in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (2025.4.26)\n",
"Requirement already satisfied: httpcore==1.* in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (1.0.2)\n",
"Requirement already satisfied: idna in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (3.7)\n",
"Requirement already satisfied: sniffio in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (1.3.0)\n",
"Requirement already satisfied: h11<0.15,>=0.13 in /opt/anaconda3/lib/python3.12/site-packages (from httpcore==1.*->httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (0.14.0)\n",
"Requirement already satisfied: annotated-types>=0.6.0 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic>=2.7.4->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (0.6.0)\n",
"Requirement already satisfied: pydantic-core==2.33.2 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic>=2.7.4->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (2.33.2)\n",
"Requirement already satisfied: typing-inspection>=0.4.0 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic>=2.7.4->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (0.4.0)\n",
"Requirement already satisfied: charset-normalizer<4,>=2 in /opt/anaconda3/lib/python3.12/site-packages (from requests<3,>=2->langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (3.3.2)\n",
"Requirement already satisfied: urllib3<3,>=1.21.1 in /opt/anaconda3/lib/python3.12/site-packages (from requests<3,>=2->langsmith<0.4,>=0.1.126->langchain-core<1.0.0,>=0.3.59->langchain-huggingface) (2.2.3)\n",
"Requirement already satisfied: filelock in /opt/anaconda3/lib/python3.12/site-packages (from huggingface-hub>=0.30.2->langchain-huggingface) (3.13.1)\n",
"Requirement already satisfied: fsspec>=2023.5.0 in /opt/anaconda3/lib/python3.12/site-packages (from huggingface-hub>=0.30.2->langchain-huggingface) (2024.6.1)\n",
"Requirement already satisfied: tqdm>=4.42.1 in /opt/anaconda3/lib/python3.12/site-packages (from huggingface-hub>=0.30.2->langchain-huggingface) (4.66.5)\n",
"Requirement already satisfied: torch>=1.11.0 in /opt/anaconda3/lib/python3.12/site-packages (from sentence-transformers>=2.6.0->langchain-huggingface) (2.7.0)\n",
"Requirement already satisfied: scikit-learn in /opt/anaconda3/lib/python3.12/site-packages (from sentence-transformers>=2.6.0->langchain-huggingface) (1.5.1)\n",
"Requirement already satisfied: scipy in /opt/anaconda3/lib/python3.12/site-packages (from sentence-transformers>=2.6.0->langchain-huggingface) (1.13.1)\n",
"Requirement already satisfied: Pillow in /opt/anaconda3/lib/python3.12/site-packages (from sentence-transformers>=2.6.0->langchain-huggingface) (10.4.0)\n",
"Requirement already satisfied: numpy>=1.17 in /opt/anaconda3/lib/python3.12/site-packages (from transformers>=4.39.0->langchain-huggingface) (1.26.4)\n",
"Requirement already satisfied: regex!=2019.12.17 in /opt/anaconda3/lib/python3.12/site-packages (from transformers>=4.39.0->langchain-huggingface) (2024.9.11)\n",
"Requirement already satisfied: safetensors>=0.4.3 in /opt/anaconda3/lib/python3.12/site-packages (from transformers>=4.39.0->langchain-huggingface) (0.5.3)\n",
"Requirement already satisfied: setuptools in /opt/anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (75.1.0)\n",
"Requirement already satisfied: sympy>=1.13.3 in /opt/anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (1.14.0)\n",
"Requirement already satisfied: networkx in /opt/anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (3.3)\n",
"Requirement already satisfied: jinja2 in /opt/anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (3.1.4)\n",
"Requirement already satisfied: mpmath<1.4,>=1.1.0 in /opt/anaconda3/lib/python3.12/site-packages (from sympy>=1.13.3->torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (1.3.0)\n",
"Requirement already satisfied: MarkupSafe>=2.0 in /opt/anaconda3/lib/python3.12/site-packages (from jinja2->torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (2.1.3)\n",
"Requirement already satisfied: joblib>=1.2.0 in /opt/anaconda3/lib/python3.12/site-packages (from scikit-learn->sentence-transformers>=2.6.0->langchain-huggingface) (1.4.2)\n",
"Requirement already satisfied: threadpoolctl>=3.1.0 in /opt/anaconda3/lib/python3.12/site-packages (from scikit-learn->sentence-transformers>=2.6.0->langchain-huggingface) (3.5.0)\n",
"Note: you may need to restart the kernel to use updated packages.\n",
"Requirement already satisfied: supabase in /opt/anaconda3/lib/python3.12/site-packages (2.15.1)\n",
"Requirement already satisfied: gotrue<3.0.0,>=2.11.0 in /opt/anaconda3/lib/python3.12/site-packages (from supabase) (2.12.0)\n",
"Requirement already satisfied: httpx<0.29,>=0.26 in /opt/anaconda3/lib/python3.12/site-packages (from supabase) (0.27.0)\n",
"Requirement already satisfied: postgrest<1.1,>0.19 in /opt/anaconda3/lib/python3.12/site-packages (from supabase) (1.0.2)\n",
"Requirement already satisfied: realtime<2.5.0,>=2.4.0 in /opt/anaconda3/lib/python3.12/site-packages (from supabase) (2.4.3)\n",
"Requirement already satisfied: storage3<0.12,>=0.10 in /opt/anaconda3/lib/python3.12/site-packages (from supabase) (0.11.3)\n",
"Requirement already satisfied: supafunc<0.10,>=0.9 in /opt/anaconda3/lib/python3.12/site-packages (from supabase) (0.9.4)\n",
"Requirement already satisfied: pydantic<3,>=1.10 in /opt/anaconda3/lib/python3.12/site-packages (from gotrue<3.0.0,>=2.11.0->supabase) (2.11.4)\n",
"Requirement already satisfied: pyjwt<3.0.0,>=2.10.1 in /opt/anaconda3/lib/python3.12/site-packages (from gotrue<3.0.0,>=2.11.0->supabase) (2.10.1)\n",
"Requirement already satisfied: pytest-mock<4.0.0,>=3.14.0 in /opt/anaconda3/lib/python3.12/site-packages (from gotrue<3.0.0,>=2.11.0->supabase) (3.14.0)\n",
"Requirement already satisfied: anyio in /opt/anaconda3/lib/python3.12/site-packages (from httpx<0.29,>=0.26->supabase) (4.2.0)\n",
"Requirement already satisfied: certifi in /opt/anaconda3/lib/python3.12/site-packages (from httpx<0.29,>=0.26->supabase) (2025.4.26)\n",
"Requirement already satisfied: httpcore==1.* in /opt/anaconda3/lib/python3.12/site-packages (from httpx<0.29,>=0.26->supabase) (1.0.2)\n",
"Requirement already satisfied: idna in /opt/anaconda3/lib/python3.12/site-packages (from httpx<0.29,>=0.26->supabase) (3.7)\n",
"Requirement already satisfied: sniffio in /opt/anaconda3/lib/python3.12/site-packages (from httpx<0.29,>=0.26->supabase) (1.3.0)\n",
"Requirement already satisfied: h11<0.15,>=0.13 in /opt/anaconda3/lib/python3.12/site-packages (from httpcore==1.*->httpx<0.29,>=0.26->supabase) (0.14.0)\n",
"Requirement already satisfied: h2<5,>=3 in /opt/anaconda3/lib/python3.12/site-packages (from httpx[http2]<0.29,>=0.26->gotrue<3.0.0,>=2.11.0->supabase) (4.2.0)\n",
"Requirement already satisfied: hyperframe<7,>=6.1 in /opt/anaconda3/lib/python3.12/site-packages (from h2<5,>=3->httpx[http2]<0.29,>=0.26->gotrue<3.0.0,>=2.11.0->supabase) (6.1.0)\n",
"Requirement already satisfied: hpack<5,>=4.1 in /opt/anaconda3/lib/python3.12/site-packages (from h2<5,>=3->httpx[http2]<0.29,>=0.26->gotrue<3.0.0,>=2.11.0->supabase) (4.1.0)\n",
"Requirement already satisfied: deprecation<3.0.0,>=2.1.0 in /opt/anaconda3/lib/python3.12/site-packages (from postgrest<1.1,>0.19->supabase) (2.1.0)\n",
"Requirement already satisfied: packaging in /opt/anaconda3/lib/python3.12/site-packages (from deprecation<3.0.0,>=2.1.0->postgrest<1.1,>0.19->supabase) (24.1)\n",
"Requirement already satisfied: annotated-types>=0.6.0 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic<3,>=1.10->gotrue<3.0.0,>=2.11.0->supabase) (0.6.0)\n",
"Requirement already satisfied: pydantic-core==2.33.2 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic<3,>=1.10->gotrue<3.0.0,>=2.11.0->supabase) (2.33.2)\n",
"Requirement already satisfied: typing-extensions>=4.12.2 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic<3,>=1.10->gotrue<3.0.0,>=2.11.0->supabase) (4.13.2)\n",
"Requirement already satisfied: typing-inspection>=0.4.0 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic<3,>=1.10->gotrue<3.0.0,>=2.11.0->supabase) (0.4.0)\n",
"Requirement already satisfied: pytest>=6.2.5 in /opt/anaconda3/lib/python3.12/site-packages (from pytest-mock<4.0.0,>=3.14.0->gotrue<3.0.0,>=2.11.0->supabase) (7.4.4)\n",
"Requirement already satisfied: aiohttp<4.0.0,>=3.11.18 in /opt/anaconda3/lib/python3.12/site-packages (from realtime<2.5.0,>=2.4.0->supabase) (3.11.18)\n",
"Requirement already satisfied: python-dateutil<3.0.0,>=2.8.1 in /opt/anaconda3/lib/python3.12/site-packages (from realtime<2.5.0,>=2.4.0->supabase) (2.9.0.post0)\n",
"Requirement already satisfied: websockets<15,>=11 in /opt/anaconda3/lib/python3.12/site-packages (from realtime<2.5.0,>=2.4.0->supabase) (14.2)\n",
"Requirement already satisfied: aiohappyeyeballs>=2.3.0 in /opt/anaconda3/lib/python3.12/site-packages (from aiohttp<4.0.0,>=3.11.18->realtime<2.5.0,>=2.4.0->supabase) (2.4.0)\n",
"Requirement already satisfied: aiosignal>=1.1.2 in /opt/anaconda3/lib/python3.12/site-packages (from aiohttp<4.0.0,>=3.11.18->realtime<2.5.0,>=2.4.0->supabase) (1.2.0)\n",
"Requirement already satisfied: attrs>=17.3.0 in /opt/anaconda3/lib/python3.12/site-packages (from aiohttp<4.0.0,>=3.11.18->realtime<2.5.0,>=2.4.0->supabase) (23.1.0)\n",
"Requirement already satisfied: frozenlist>=1.1.1 in /opt/anaconda3/lib/python3.12/site-packages (from aiohttp<4.0.0,>=3.11.18->realtime<2.5.0,>=2.4.0->supabase) (1.4.0)\n",
"Requirement already satisfied: multidict<7.0,>=4.5 in /opt/anaconda3/lib/python3.12/site-packages (from aiohttp<4.0.0,>=3.11.18->realtime<2.5.0,>=2.4.0->supabase) (6.0.4)\n",
"Requirement already satisfied: propcache>=0.2.0 in /opt/anaconda3/lib/python3.12/site-packages (from aiohttp<4.0.0,>=3.11.18->realtime<2.5.0,>=2.4.0->supabase) (0.3.1)\n",
"Requirement already satisfied: yarl<2.0,>=1.17.0 in /opt/anaconda3/lib/python3.12/site-packages (from aiohttp<4.0.0,>=3.11.18->realtime<2.5.0,>=2.4.0->supabase) (1.20.0)\n",
"Requirement already satisfied: six>=1.5 in /opt/anaconda3/lib/python3.12/site-packages (from python-dateutil<3.0.0,>=2.8.1->realtime<2.5.0,>=2.4.0->supabase) (1.16.0)\n",
"Requirement already satisfied: strenum<0.5.0,>=0.4.15 in /opt/anaconda3/lib/python3.12/site-packages (from supafunc<0.10,>=0.9->supabase) (0.4.15)\n",
"Requirement already satisfied: iniconfig in /opt/anaconda3/lib/python3.12/site-packages (from pytest>=6.2.5->pytest-mock<4.0.0,>=3.14.0->gotrue<3.0.0,>=2.11.0->supabase) (1.1.1)\n",
"Requirement already satisfied: pluggy<2.0,>=0.12 in /opt/anaconda3/lib/python3.12/site-packages (from pytest>=6.2.5->pytest-mock<4.0.0,>=3.14.0->gotrue<3.0.0,>=2.11.0->supabase) (1.0.0)\n",
"Note: you may need to restart the kernel to use updated packages.\n"
]
}
],
"source": [
"%pip install langchain-huggingface\n",
"%pip install supabase\n",
"\n",
"### build a vector database based on the metadata.jsonl\n",
"# https://python.langchain.com/docs/integrations/vectorstores/supabase/\n",
"import os\n",
"from dotenv import load_dotenv\n",
"from langchain_huggingface import HuggingFaceEmbeddings\n",
"from langchain_community.vectorstores import SupabaseVectorStore\n",
"from supabase.client import Client, create_client\n",
"\n",
"\n",
"load_dotenv()\n",
"embeddings = HuggingFaceEmbeddings(model_name=\"sentence-transformers/all-mpnet-base-v2\") # dim=768\n",
"\n",
"supabase_url = \"https://vqscqyeakhfsvaqonbmu.supabase.co\"\n",
"supabase_key = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6InZxc2NxeWVha2hmc3ZhcW9uYm11Iiwicm9sZSI6ImFub24iLCJpYXQiOjE3NDc4NjQwNDMsImV4cCI6MjA2MzQ0MDA0M30.kDTCuOGCuqPyZilqBu4kYEbUrOC42SAVThf3nrH8ypM\"\n",
"\n",
"if not supabase_url or not supabase_key:\n",
"\traise ValueError(\"SUPABASE_URL and SUPABASE_SERVICE_KEY must be set in your environment or .env file.\")\n",
"\n",
"supabase: Client = create_client(supabase_url, supabase_key)"
]
},
{
"cell_type": "code",
"execution_count": 42,
"id": "a070b955",
"metadata": {},
"outputs": [],
"source": [
"# wrap the metadata.jsonl's questions and answers into a list of document\n",
"from langchain.schema import Document\n",
"docs = []\n",
"for sample in json_QA:\n",
" content = f\"Question : {sample['Question']}\\n\\nFinal answer : {sample['Final answer']}\"\n",
" doc = {\n",
" \"content\" : content,\n",
" \"metadata\" : { # meatadata\n",
" \"source\" : sample['task_id']\n",
" },\n",
" \"embedding\" : embeddings.embed_query(content),\n",
" }\n",
" docs.append(doc)\n",
"\n",
"# upload the documents to the vector database\n",
"try:\n",
" response = (\n",
" supabase.table(\"documents\")\n",
" .insert(docs)\n",
" .execute()\n",
" )\n",
"except Exception as exception:\n",
" print(\"Error inserting data into Supabase:\", exception)\n",
"\n",
"# ALTERNATIVE : Save the documents (a list of dict) into a csv file, and manually upload it to Supabase\n",
"# import pandas as pd\n",
"# df = pd.DataFrame(docs)\n",
"# df.to_csv('supabase_docs.csv', index=False)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "22c56a2f",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "3c59150d",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 18,
"id": "77fb9dbb",
"metadata": {},
"outputs": [],
"source": [
"# add items to vector database\n",
"vector_store = SupabaseVectorStore(\n",
" client=supabase,\n",
" embedding= embeddings,\n",
" table_name=\"documents\",\n",
" query_name=\"match_documents_langchain\",\n",
")\n",
"retriever = vector_store.as_retriever()"
]
},
{
"cell_type": "code",
"execution_count": 63,
"id": "12a05971",
"metadata": {},
"outputs": [
{
"ename": "IndexError",
"evalue": "list index out of range",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mIndexError\u001b[0m Traceback (most recent call last)",
"Cell \u001b[0;32mIn[63], line 34\u001b[0m\n\u001b[1;32m 32\u001b[0m query \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mOn June 6, 2023, an article by Carolyn Collins Petersen was published in Universe Today. This article mentions a team that produced a paper about their observations, linked at the bottom of the article. Find this paper. Under what NASA award number was the work performed by R. G. Arendt supported by?\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 33\u001b[0m docs \u001b[38;5;241m=\u001b[39m retriever\u001b[38;5;241m.\u001b[39minvoke(query)\n\u001b[0;32m---> 34\u001b[0m docs[\u001b[38;5;241m0\u001b[39m]\n",
"\u001b[0;31mIndexError\u001b[0m: list index out of range"
]
}
],
"source": [
"# Before running this cell, make sure you have created the required function in your Supabase database.\n",
"# Run the following SQL in your Supabase SQL editor (replace 'documents' and 'embedding' if your table/column names differ):\n",
"\n",
"\"\"\"\n",
"create or replace function public.match_documents_langchain(\n",
"\tquery_embedding vector(768),\n",
"\tmatch_count int default null\n",
")\n",
"returns table (\n",
"\tcontent text,\n",
"\tmetadata text,\n",
"\tembedding vector(768),\n",
"\tsimilarity float\n",
")\n",
"language plpgsql\n",
"as $$\n",
"begin\n",
"\treturn query\n",
"\tselect\n",
"\t\tdocuments.content,\n",
"\t\tdocuments.metadata::text,\n",
"\t\tdocuments.embedding,\n",
"\t\t1 - (documents.embedding <=> query_embedding) as similarity\n",
"\tfrom documents\n",
"\torder by documents.embedding <=> query_embedding\n",
"\tlimit coalesce(match_count, 5);\n",
"end;\n",
"$$;\n",
"\"\"\"\n",
"\n",
"# After creating the function, you can run your code:\n",
"query = \"On June 6, 2023, an article by Carolyn Collins Petersen was published in Universe Today. This article mentions a team that produced a paper about their observations, linked at the bottom of the article. Find this paper. Under what NASA award number was the work performed by R. G. Arendt supported by?\"\n",
"docs = retriever.invoke(query)\n",
"docs[0]"
]
},
{
"cell_type": "code",
"execution_count": 64,
"id": "1eae5ba4",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"List of tools used in all samples:\n",
"Total number of tools used: 83\n",
" ├── web browser: 107\n",
" ├── image recognition tools (to identify and parse a figure with three axes): 1\n",
" ├── search engine: 101\n",
" ├── calculator: 34\n",
" ├── unlambda compiler (optional): 1\n",
" ├── a web browser.: 2\n",
" ├── a search engine.: 2\n",
" ├── a calculator.: 1\n",
" ├── microsoft excel: 5\n",
" ├── google search: 1\n",
" ├── ne: 9\n",
" ├── pdf access: 7\n",
" ├── file handling: 2\n",
" ├── python: 3\n",
" ├── image recognition tools: 12\n",
" ├── jsonld file access: 1\n",
" ├── video parsing: 1\n",
" ├── python compiler: 1\n",
" ├── video recognition tools: 3\n",
" ├── pdf viewer: 7\n",
" ├── microsoft excel / google sheets: 3\n",
" ├── word document access: 1\n",
" ├── tool to extract text from images: 1\n",
" ├── a word reversal tool / script: 1\n",
" ├── counter: 1\n",
" ├── excel: 3\n",
" ├── image recognition: 5\n",
" ├── color recognition: 3\n",
" ├── excel file access: 3\n",
" ├── xml file access: 1\n",
" ├── access to the internet archive, web.archive.org: 1\n",
" ├── text processing/diff tool: 1\n",
" ├── gif parsing tools: 1\n",
" ├── a web browser: 7\n",
" ├── a search engine: 7\n",
" ├── a speech-to-text tool: 2\n",
" ├── code/data analysis tools: 1\n",
" ├── audio capability: 2\n",
" ├── pdf reader: 1\n",
" ├── markdown: 1\n",
" ├── a calculator: 5\n",
" ├── access to wikipedia: 3\n",
" ├── image recognition/ocr: 3\n",
" ├── google translate access: 1\n",
" ├── ocr: 4\n",
" ├── bass note data: 1\n",
" ├── text editor: 1\n",
" ├── xlsx file access: 1\n",
" ├── powerpoint viewer: 1\n",
" ├── csv file access: 1\n",
" ├── calculator (or use excel): 1\n",
" ├── computer algebra system: 1\n",
" ├── video processing software: 1\n",
" ├── audio processing software: 1\n",
" ├── computer vision: 1\n",
" ├── google maps: 1\n",
" ├── access to excel files: 1\n",
" ├── calculator (or ability to count): 1\n",
" ├── a file interface: 3\n",
" ├── a python ide: 1\n",
" ├── spreadsheet editor: 1\n",
" ├── tools required: 1\n",
" ├── b browser: 1\n",
" ├── image recognition and processing tools: 1\n",
" ├── computer vision or ocr: 1\n",
" ├── c++ compiler: 1\n",
" ├── access to google maps: 1\n",
" ├── youtube player: 1\n",
" ├── natural language processor: 1\n",
" ├── graph interaction tools: 1\n",
" ├── bablyonian cuniform -> arabic legend: 1\n",
" ├── access to youtube: 1\n",
" ├── image search tools: 1\n",
" ├── calculator or counting function: 1\n",
" ├── a speech-to-text audio processing tool: 1\n",
" ├── access to academic journal websites: 1\n",
" ├── pdf reader/extracter: 1\n",
" ├── rubik's cube model: 1\n",
" ├── wikipedia: 1\n",
" ├── video capability: 1\n",
" ├── image processing tools: 1\n",
" ├── age recognition software: 1\n",
" ├── youtube: 1\n"
]
}
],
"source": [
"# list of the tools used in all the samples\n",
"from collections import Counter, OrderedDict\n",
"\n",
"tools = []\n",
"for sample in json_QA:\n",
" for tool in sample['Annotator Metadata']['Tools'].split('\\n'):\n",
" tool = tool[2:].strip().lower()\n",
" if tool.startswith(\"(\"):\n",
" tool = tool[11:].strip()\n",
" tools.append(tool)\n",
"tools_counter = OrderedDict(Counter(tools))\n",
"print(\"List of tools used in all samples:\")\n",
"print(\"Total number of tools used:\", len(tools_counter))\n",
"for tool, count in tools_counter.items():\n",
" print(f\" ├── {tool}: {count}\")"
]
},
{
"cell_type": "markdown",
"id": "5efee12a",
"metadata": {},
"source": [
"#### Graph"
]
},
{
"cell_type": "code",
"execution_count": 65,
"id": "7fe573cc",
"metadata": {},
"outputs": [],
"source": [
"system_prompt = \"\"\"\n",
"You are a helpful assistant tasked with answering questions using a set of tools.\n",
"If the tool is not available, you can try to find the information online. You can also use your own knowledge to answer the question. \n",
"You need to provide a step-by-step explanation of how you arrived at the answer.\n",
"==========================\n",
"Here is a few examples showing you how to answer the question step by step.\n",
"\"\"\"\n",
"for i, samples in enumerate(random_samples):\n",
" system_prompt += f\"\\nQuestion {i+1}: {samples['Question']}\\nSteps:\\n{samples['Annotator Metadata']['Steps']}\\nTools:\\n{samples['Annotator Metadata']['Tools']}\\nFinal Answer: {samples['Final answer']}\\n\"\n",
"system_prompt += \"\\n==========================\\n\"\n",
"system_prompt += \"Now, please answer the following question step by step.\\n\"\n",
"\n",
"# save the system_prompt to a file\n",
"with open('system_prompt.txt', 'w') as f:\n",
" f.write(system_prompt)"
]
},
{
"cell_type": "code",
"execution_count": 66,
"id": "d6beb0da",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"You are a helpful assistant tasked with answering questions using a set of tools.\n",
"If the tool is not available, you can try to find the information online. You can also use your own knowledge to answer the question. \n",
"You need to provide a step-by-step explanation of how you arrived at the answer.\n",
"==========================\n",
"Here is a few examples showing you how to answer the question step by step.\n",
"\n",
"Question 1: As of May 2023, how many stops are between South Station and Windsor Gardens on MBTA’s Franklin-Foxboro line (not included)?\n",
"Steps:\n",
"1. Search the web for “MBTA Franklin Foxboro line”.\n",
"2. Click on top result, on the MBTA website.\n",
"3. Scroll down on the list of stops, and count the current stops between South Station and Windsor Gardens.\n",
"4. Click the “Schedule & Maps” tab to view a map of the route.\n",
"5. Examine the map to confirm that the order of stops is the same as on the listing of stops.\n",
"6. Return to web search.\n",
"7. Click on Wikipedia article for Franklin line.\n",
"8. Read the article to check whether any stops were added or removed since the date given in the question.\n",
"9. Search the web for “MBTA Franklin Foxboro Line changes”.\n",
"10. Click News tab.\n",
"11. Click article about rail schedule changes.\n",
"12. Confirm that none of the changes affect the answer to the question.\n",
"Tools:\n",
"1. Search engine\n",
"2. Web browser\n",
"Final Answer: 10\n",
"\n",
"==========================\n",
"Now, please answer the following question step by step.\n",
"\n"
]
}
],
"source": [
"# load the system prompt from the file\n",
"with open('system_prompt.txt', 'r') as f:\n",
" system_prompt = f.read()\n",
"print(system_prompt)"
]
},
{
"cell_type": "code",
"execution_count": 67,
"id": "42fde0f8",
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n",
"To disable this warning, you can either:\n",
"\t- Avoid using `tokenizers` before the fork if possible\n",
"\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6681.19s - pydevd: Sending message related to process being replaced timed-out after 5 seconds\n",
"Requirement already satisfied: langgraph in /opt/anaconda3/lib/python3.12/site-packages (0.4.5)\n",
"Requirement already satisfied: langchain-core>=0.1 in /opt/anaconda3/lib/python3.12/site-packages (from langgraph) (0.3.60)\n",
"Requirement already satisfied: langgraph-checkpoint<3.0.0,>=2.0.26 in /opt/anaconda3/lib/python3.12/site-packages (from langgraph) (2.0.26)\n",
"Requirement already satisfied: langgraph-prebuilt>=0.1.8 in /opt/anaconda3/lib/python3.12/site-packages (from langgraph) (0.1.8)\n",
"Requirement already satisfied: langgraph-sdk>=0.1.42 in /opt/anaconda3/lib/python3.12/site-packages (from langgraph) (0.1.70)\n",
"Requirement already satisfied: pydantic>=2.7.4 in /opt/anaconda3/lib/python3.12/site-packages (from langgraph) (2.11.4)\n",
"Requirement already satisfied: xxhash<4.0.0,>=3.5.0 in /opt/anaconda3/lib/python3.12/site-packages (from langgraph) (3.5.0)\n",
"Requirement already satisfied: ormsgpack<2.0.0,>=1.8.0 in /opt/anaconda3/lib/python3.12/site-packages (from langgraph-checkpoint<3.0.0,>=2.0.26->langgraph) (1.9.1)\n",
"Requirement already satisfied: langsmith<0.4,>=0.1.126 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core>=0.1->langgraph) (0.3.42)\n",
"Requirement already satisfied: tenacity!=8.4.0,<10.0.0,>=8.1.0 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core>=0.1->langgraph) (8.2.3)\n",
"Requirement already satisfied: jsonpatch<2.0,>=1.33 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core>=0.1->langgraph) (1.33)\n",
"Requirement already satisfied: PyYAML>=5.3 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core>=0.1->langgraph) (6.0.1)\n",
"Requirement already satisfied: packaging<25,>=23.2 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core>=0.1->langgraph) (24.1)\n",
"Requirement already satisfied: typing-extensions>=4.7 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core>=0.1->langgraph) (4.13.2)\n",
"Requirement already satisfied: jsonpointer>=1.9 in /opt/anaconda3/lib/python3.12/site-packages (from jsonpatch<2.0,>=1.33->langchain-core>=0.1->langgraph) (2.1)\n",
"Requirement already satisfied: httpx<1,>=0.23.0 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (0.27.0)\n",
"Requirement already satisfied: orjson<4.0.0,>=3.9.14 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (3.10.18)\n",
"Requirement already satisfied: requests<3,>=2 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (2.32.3)\n",
"Requirement already satisfied: requests-toolbelt<2.0.0,>=1.0.0 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (1.0.0)\n",
"Requirement already satisfied: zstandard<0.24.0,>=0.23.0 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (0.23.0)\n",
"Requirement already satisfied: anyio in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (4.2.0)\n",
"Requirement already satisfied: certifi in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (2025.4.26)\n",
"Requirement already satisfied: httpcore==1.* in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (1.0.2)\n",
"Requirement already satisfied: idna in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (3.7)\n",
"Requirement already satisfied: sniffio in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (1.3.0)\n",
"Requirement already satisfied: h11<0.15,>=0.13 in /opt/anaconda3/lib/python3.12/site-packages (from httpcore==1.*->httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (0.14.0)\n",
"Requirement already satisfied: annotated-types>=0.6.0 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic>=2.7.4->langgraph) (0.6.0)\n",
"Requirement already satisfied: pydantic-core==2.33.2 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic>=2.7.4->langgraph) (2.33.2)\n",
"Requirement already satisfied: typing-inspection>=0.4.0 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic>=2.7.4->langgraph) (0.4.0)\n",
"Requirement already satisfied: charset-normalizer<4,>=2 in /opt/anaconda3/lib/python3.12/site-packages (from requests<3,>=2->langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (3.3.2)\n",
"Requirement already satisfied: urllib3<3,>=1.21.1 in /opt/anaconda3/lib/python3.12/site-packages (from requests<3,>=2->langsmith<0.4,>=0.1.126->langchain-core>=0.1->langgraph) (2.2.3)\n",
"Note: you may need to restart the kernel to use updated packages.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n",
"To disable this warning, you can either:\n",
"\t- Avoid using `tokenizers` before the fork if possible\n",
"\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6687.05s - pydevd: Sending message related to process being replaced timed-out after 5 seconds\n",
"Requirement already satisfied: langchain-google-genai in /opt/anaconda3/lib/python3.12/site-packages (2.1.4)\n",
"Requirement already satisfied: filetype<2.0.0,>=1.2.0 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-google-genai) (1.2.0)\n",
"Requirement already satisfied: google-ai-generativelanguage<0.7.0,>=0.6.18 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-google-genai) (0.6.18)\n",
"Requirement already satisfied: langchain-core<0.4.0,>=0.3.52 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-google-genai) (0.3.60)\n",
"Requirement already satisfied: pydantic<3,>=2 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-google-genai) (2.11.4)\n",
"Requirement already satisfied: google-api-core!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1 in /opt/anaconda3/lib/python3.12/site-packages (from google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (2.24.2)\n",
"Requirement already satisfied: google-auth!=2.24.0,!=2.25.0,<3.0.0,>=2.14.1 in /opt/anaconda3/lib/python3.12/site-packages (from google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (2.40.2)\n",
"Requirement already satisfied: proto-plus<2.0.0,>=1.22.3 in /opt/anaconda3/lib/python3.12/site-packages (from google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (1.26.1)\n",
"Requirement already satisfied: protobuf!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<7.0.0,>=3.20.2 in /opt/anaconda3/lib/python3.12/site-packages (from google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (6.31.0)\n",
"Requirement already satisfied: googleapis-common-protos<2.0.0,>=1.56.2 in /opt/anaconda3/lib/python3.12/site-packages (from google-api-core!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (1.70.0)\n",
"Requirement already satisfied: requests<3.0.0,>=2.18.0 in /opt/anaconda3/lib/python3.12/site-packages (from google-api-core!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (2.32.3)\n",
"Requirement already satisfied: grpcio<2.0dev,>=1.33.2 in /opt/anaconda3/lib/python3.12/site-packages (from google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (1.72.0rc1)\n",
"Requirement already satisfied: grpcio-status<2.0.dev0,>=1.33.2 in /opt/anaconda3/lib/python3.12/site-packages (from google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (1.72.0rc1)\n",
"Requirement already satisfied: cachetools<6.0,>=2.0.0 in /opt/anaconda3/lib/python3.12/site-packages (from google-auth!=2.24.0,!=2.25.0,<3.0.0,>=2.14.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (5.3.3)\n",
"Requirement already satisfied: pyasn1-modules>=0.2.1 in /opt/anaconda3/lib/python3.12/site-packages (from google-auth!=2.24.0,!=2.25.0,<3.0.0,>=2.14.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (0.2.8)\n",
"Requirement already satisfied: rsa<5,>=3.1.4 in /opt/anaconda3/lib/python3.12/site-packages (from google-auth!=2.24.0,!=2.25.0,<3.0.0,>=2.14.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (4.9.1)\n",
"Requirement already satisfied: langsmith<0.4,>=0.1.126 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (0.3.42)\n",
"Requirement already satisfied: tenacity!=8.4.0,<10.0.0,>=8.1.0 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (8.2.3)\n",
"Requirement already satisfied: jsonpatch<2.0,>=1.33 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (1.33)\n",
"Requirement already satisfied: PyYAML>=5.3 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (6.0.1)\n",
"Requirement already satisfied: packaging<25,>=23.2 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (24.1)\n",
"Requirement already satisfied: typing-extensions>=4.7 in /opt/anaconda3/lib/python3.12/site-packages (from langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (4.13.2)\n",
"Requirement already satisfied: jsonpointer>=1.9 in /opt/anaconda3/lib/python3.12/site-packages (from jsonpatch<2.0,>=1.33->langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (2.1)\n",
"Requirement already satisfied: httpx<1,>=0.23.0 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (0.27.0)\n",
"Requirement already satisfied: orjson<4.0.0,>=3.9.14 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (3.10.18)\n",
"Requirement already satisfied: requests-toolbelt<2.0.0,>=1.0.0 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (1.0.0)\n",
"Requirement already satisfied: zstandard<0.24.0,>=0.23.0 in /opt/anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.126->langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (0.23.0)\n",
"Requirement already satisfied: anyio in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (4.2.0)\n",
"Requirement already satisfied: certifi in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (2025.4.26)\n",
"Requirement already satisfied: httpcore==1.* in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (1.0.2)\n",
"Requirement already satisfied: idna in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (3.7)\n",
"Requirement already satisfied: sniffio in /opt/anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (1.3.0)\n",
"Requirement already satisfied: h11<0.15,>=0.13 in /opt/anaconda3/lib/python3.12/site-packages (from httpcore==1.*->httpx<1,>=0.23.0->langsmith<0.4,>=0.1.126->langchain-core<0.4.0,>=0.3.52->langchain-google-genai) (0.14.0)\n",
"Requirement already satisfied: annotated-types>=0.6.0 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic<3,>=2->langchain-google-genai) (0.6.0)\n",
"Requirement already satisfied: pydantic-core==2.33.2 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic<3,>=2->langchain-google-genai) (2.33.2)\n",
"Requirement already satisfied: typing-inspection>=0.4.0 in /opt/anaconda3/lib/python3.12/site-packages (from pydantic<3,>=2->langchain-google-genai) (0.4.0)\n",
"Requirement already satisfied: charset-normalizer<4,>=2 in /opt/anaconda3/lib/python3.12/site-packages (from requests<3.0.0,>=2.18.0->google-api-core!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (3.3.2)\n",
"Requirement already satisfied: urllib3<3,>=1.21.1 in /opt/anaconda3/lib/python3.12/site-packages (from requests<3.0.0,>=2.18.0->google-api-core!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (2.2.3)\n",
"Requirement already satisfied: pyasn1>=0.1.3 in /opt/anaconda3/lib/python3.12/site-packages (from rsa<5,>=3.1.4->google-auth!=2.24.0,!=2.25.0,<3.0.0,>=2.14.1->google-ai-generativelanguage<0.7.0,>=0.6.18->langchain-google-genai) (0.4.8)\n",
"Note: you may need to restart the kernel to use updated packages.\n"
]
},
{
"ename": "SupabaseException",
"evalue": "supabase_url is required",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mSupabaseException\u001b[0m Traceback (most recent call last)",
"Cell \u001b[0;32mIn[67], line 25\u001b[0m\n\u001b[1;32m 23\u001b[0m supabase_url \u001b[38;5;241m=\u001b[39m os\u001b[38;5;241m.\u001b[39menviron\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mSUPABASE_URL\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 24\u001b[0m supabase_key \u001b[38;5;241m=\u001b[39m os\u001b[38;5;241m.\u001b[39menviron\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mSUPABASE_SERVICE_KEY\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m---> 25\u001b[0m supabase: Client \u001b[38;5;241m=\u001b[39m create_client(supabase_url, supabase_key)\n\u001b[1;32m 26\u001b[0m vector_store \u001b[38;5;241m=\u001b[39m SupabaseVectorStore(\n\u001b[1;32m 27\u001b[0m client\u001b[38;5;241m=\u001b[39msupabase,\n\u001b[1;32m 28\u001b[0m embedding\u001b[38;5;241m=\u001b[39m embeddings,\n\u001b[1;32m 29\u001b[0m table_name\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdocuments\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 30\u001b[0m query_name\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmatch_documents_langchain\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 31\u001b[0m )\n\u001b[1;32m 33\u001b[0m question_retrieve_tool \u001b[38;5;241m=\u001b[39m create_retriever_tool(\n\u001b[1;32m 34\u001b[0m vector_store\u001b[38;5;241m.\u001b[39mas_retriever(),\n\u001b[1;32m 35\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mQuestion Retriever\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 36\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mFind similar questions in the vector database for the given question.\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 37\u001b[0m )\n",
"File \u001b[0;32m/opt/anaconda3/lib/python3.12/site-packages/supabase/_sync/client.py:338\u001b[0m, in \u001b[0;36mcreate_client\u001b[0;34m(supabase_url, supabase_key, options)\u001b[0m\n\u001b[1;32m 307\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mcreate_client\u001b[39m(\n\u001b[1;32m 308\u001b[0m supabase_url: \u001b[38;5;28mstr\u001b[39m,\n\u001b[1;32m 309\u001b[0m supabase_key: \u001b[38;5;28mstr\u001b[39m,\n\u001b[1;32m 310\u001b[0m options: Optional[ClientOptions] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 311\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m SyncClient:\n\u001b[1;32m 312\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Create client function to instantiate supabase client like JS runtime.\u001b[39;00m\n\u001b[1;32m 313\u001b[0m \n\u001b[1;32m 314\u001b[0m \u001b[38;5;124;03m Parameters\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 336\u001b[0m \u001b[38;5;124;03m Client\u001b[39;00m\n\u001b[1;32m 337\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m--> 338\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m SyncClient\u001b[38;5;241m.\u001b[39mcreate(\n\u001b[1;32m 339\u001b[0m supabase_url\u001b[38;5;241m=\u001b[39msupabase_url, supabase_key\u001b[38;5;241m=\u001b[39msupabase_key, options\u001b[38;5;241m=\u001b[39moptions\n\u001b[1;32m 340\u001b[0m )\n",
"File \u001b[0;32m/opt/anaconda3/lib/python3.12/site-packages/supabase/_sync/client.py:101\u001b[0m, in \u001b[0;36mSyncClient.create\u001b[0;34m(cls, supabase_url, supabase_key, options)\u001b[0m\n\u001b[1;32m 93\u001b[0m \u001b[38;5;129m@classmethod\u001b[39m\n\u001b[1;32m 94\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mcreate\u001b[39m(\n\u001b[1;32m 95\u001b[0m \u001b[38;5;28mcls\u001b[39m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 98\u001b[0m options: Optional[ClientOptions] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 99\u001b[0m ):\n\u001b[1;32m 100\u001b[0m auth_header \u001b[38;5;241m=\u001b[39m options\u001b[38;5;241m.\u001b[39mheaders\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mAuthorization\u001b[39m\u001b[38;5;124m\"\u001b[39m) \u001b[38;5;28;01mif\u001b[39;00m options \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m--> 101\u001b[0m client \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mcls\u001b[39m(supabase_url, supabase_key, options)\n\u001b[1;32m 103\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m auth_header \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 104\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n",
"File \u001b[0;32m/opt/anaconda3/lib/python3.12/site-packages/supabase/_sync/client.py:51\u001b[0m, in \u001b[0;36mSyncClient.__init__\u001b[0;34m(self, supabase_url, supabase_key, options)\u001b[0m\n\u001b[1;32m 37\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"Instantiate the client.\u001b[39;00m\n\u001b[1;32m 38\u001b[0m \n\u001b[1;32m 39\u001b[0m \u001b[38;5;124;03mParameters\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 47\u001b[0m \u001b[38;5;124;03m `DEFAULT_OPTIONS` dict.\u001b[39;00m\n\u001b[1;32m 48\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 50\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m supabase_url:\n\u001b[0;32m---> 51\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m SupabaseException(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124msupabase_url is required\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 52\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m supabase_key:\n\u001b[1;32m 53\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m SupabaseException(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124msupabase_key is required\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n",
"\u001b[0;31mSupabaseException\u001b[0m: supabase_url is required"
]
}
],
"source": [
"%pip install langgraph\n",
"%pip install langchain-google-genai\n",
"\n",
"import dotenv\n",
"from langgraph.graph import MessagesState, START, StateGraph\n",
"from langgraph.prebuilt import tools_condition\n",
"from langgraph.prebuilt import ToolNode\n",
"from langchain_google_genai import ChatGoogleGenerativeAI\n",
"from langchain_huggingface import HuggingFaceEmbeddings\n",
"from langchain_community.tools.tavily_search import TavilySearchResults\n",
"from langchain_community.document_loaders import WikipediaLoader\n",
"from langchain_community.document_loaders import ArxivLoader\n",
"from langchain_community.vectorstores import SupabaseVectorStore\n",
"from langchain.tools.retriever import create_retriever_tool\n",
"from langchain_core.messages import HumanMessage, SystemMessage\n",
"from langchain_core.tools import tool\n",
"from supabase.client import Client, create_client\n",
"\n",
"# Define the retriever from supabase\n",
"load_dotenv()\n",
"embeddings = HuggingFaceEmbeddings(model_name=\"sentence-transformers/all-mpnet-base-v2\") # dim=768\n",
"\n",
"supabase_url = os.environ.get(\"SUPABASE_URL\")\n",
"supabase_key = os.environ.get(\"SUPABASE_SERVICE_KEY\")\n",
"supabase: Client = create_client(supabase_url, supabase_key)\n",
"vector_store = SupabaseVectorStore(\n",
" client=supabase,\n",
" embedding= embeddings,\n",
" table_name=\"documents\",\n",
" query_name=\"match_documents_langchain\",\n",
")\n",
"\n",
"question_retrieve_tool = create_retriever_tool(\n",
" vector_store.as_retriever(),\n",
" \"Question Retriever\",\n",
" \"Find similar questions in the vector database for the given question.\",\n",
")\n",
"\n",
"@tool\n",
"def multiply(a: int, b: int) -> int:\n",
" \"\"\"Multiply two numbers.\n",
"\n",
" Args:\n",
" a: first int\n",
" b: second int\n",
" \"\"\"\n",
" return a * b\n",
"\n",
"@tool\n",
"def add(a: int, b: int) -> int:\n",
" \"\"\"Add two numbers.\n",
" \n",
" Args:\n",
" a: first int\n",
" b: second int\n",
" \"\"\"\n",
" return a + b\n",
"\n",
"@tool\n",
"def subtract(a: int, b: int) -> int:\n",
" \"\"\"Subtract two numbers.\n",
" \n",
" Args:\n",
" a: first int\n",
" b: second int\n",
" \"\"\"\n",
" return a - b\n",
"\n",
"@tool\n",
"def divide(a: int, b: int) -> int:\n",
" \"\"\"Divide two numbers.\n",
" \n",
" Args:\n",
" a: first int\n",
" b: second int\n",
" \"\"\"\n",
" if b == 0:\n",
" raise ValueError(\"Cannot divide by zero.\")\n",
" return a / b\n",
"\n",
"@tool\n",
"def modulus(a: int, b: int) -> int:\n",
" \"\"\"Get the modulus of two numbers.\n",
" \n",
" Args:\n",
" a: first int\n",
" b: second int\n",
" \"\"\"\n",
" return a % b\n",
"\n",
"@tool\n",
"def wiki_search(query: str) -> str:\n",
" \"\"\"Search Wikipedia for a query and return maximum 2 results.\n",
" \n",
" Args:\n",
" query: The search query.\"\"\"\n",
" search_docs = WikipediaLoader(query=query, load_max_docs=2).load()\n",
" formatted_search_docs = \"\\n\\n---\\n\\n\".join(\n",
" [\n",
" f'<Document source=\"{doc.metadata[\"source\"]}\" page=\"{doc.metadata.get(\"page\", \"\")}\"/>\\n{doc.page_content}\\n</Document>'\n",
" for doc in search_docs\n",
" ])\n",
" return {\"wiki_results\": formatted_search_docs}\n",
"\n",
"@tool\n",
"def web_search(query: str) -> str:\n",
" \"\"\"Search Tavily for a query and return maximum 3 results.\n",
" \n",
" Args:\n",
" query: The search query.\"\"\"\n",
" search_docs = TavilySearchResults(max_results=3).invoke(query=query)\n",
" formatted_search_docs = \"\\n\\n---\\n\\n\".join(\n",
" [\n",
" f'<Document source=\"{doc.metadata[\"source\"]}\" page=\"{doc.metadata.get(\"page\", \"\")}\"/>\\n{doc.page_content}\\n</Document>'\n",
" for doc in search_docs\n",
" ])\n",
" return {\"web_results\": formatted_search_docs}\n",
"\n",
"@tool\n",
"def arvix_search(query: str) -> str:\n",
" \"\"\"Search Arxiv for a query and return maximum 3 result.\n",
" \n",
" Args:\n",
" query: The search query.\"\"\"\n",
" search_docs = ArxivLoader(query=query, load_max_docs=3).load()\n",
" formatted_search_docs = \"\\n\\n---\\n\\n\".join(\n",
" [\n",
" f'<Document source=\"{doc.metadata[\"source\"]}\" page=\"{doc.metadata.get(\"page\", \"\")}\"/>\\n{doc.page_content[:1000]}\\n</Document>'\n",
" for doc in search_docs\n",
" ])\n",
" return {\"arvix_results\": formatted_search_docs}\n",
"\n",
"@tool\n",
"def similar_question_search(question: str) -> str:\n",
" \"\"\"Search the vector database for similar questions and return the first results.\n",
" \n",
" Args:\n",
" question: the question human provided.\"\"\"\n",
" matched_docs = vector_store.similarity_search(query, 3)\n",
" formatted_search_docs = \"\\n\\n---\\n\\n\".join(\n",
" [\n",
" f'<Document source=\"{doc.metadata[\"source\"]}\" page=\"{doc.metadata.get(\"page\", \"\")}\"/>\\n{doc.page_content[:1000]}\\n</Document>'\n",
" for doc in matched_docs\n",
" ])\n",
" return {\"similar_questions\": formatted_search_docs}\n",
"\n",
"tools = [\n",
" multiply,\n",
" add,\n",
" subtract,\n",
" divide,\n",
" modulus,\n",
" wiki_search,\n",
" web_search,\n",
" arvix_search,\n",
" question_retrieve_tool\n",
"]\n",
"\n",
"llm = ChatGoogleGenerativeAI(model=\"gemini-2.0-flash\")\n",
"llm_with_tools = llm.bind_tools(tools)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "7dd0716c",
"metadata": {},
"outputs": [],
"source": [
"# load the system prompt from the file\n",
"with open('system_prompt.txt', 'r') as f:\n",
" system_prompt = f.read()\n",
"\n",
"\n",
"# System message\n",
"sys_msg = SystemMessage(content=system_prompt)\n",
"\n",
"# Node\n",
"def assistant(state: MessagesState):\n",
" \"\"\"Assistant node\"\"\"\n",
" return {\"messages\": [llm_with_tools.invoke([sys_msg] + state[\"messages\"])]}\n",
"\n",
"# Build graph\n",
"builder = StateGraph(MessagesState)\n",
"builder.add_node(\"assistant\", assistant)\n",
"builder.add_node(\"tools\", ToolNode(tools))\n",
"builder.add_edge(START, \"assistant\")\n",
"builder.add_conditional_edges(\n",
" \"assistant\",\n",
" # If the latest message (result) from assistant is a tool call -> tools_condition routes to tools\n",
" # If the latest message (result) from assistant is a not a tool call -> tools_condition routes to END\n",
" tools_condition,\n",
")\n",
"builder.add_edge(\"tools\", \"assistant\")\n",
"\n",
"# Compile graph\n",
"graph = builder.compile()\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "f4e77216",
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAANgAAAD5CAIAAADKsmwpAAAQAElEQVR4nOydB1wUR9vA5zrcwdGOXqRIFRC7gkZsxK7YguU1xhgTJcVXjVETNSYajCbGYCxYYuJnjYliYq+xRo2xIIqAgNI7HFzh+vfo5UVEQEzYuzl2/r/7HXu7e7dX/jwz88zsLFun0yECwdiwEYGAAUREAhYQEQlYQEQkYAERkYAFREQCFpikiAq5pixfKavWyKrVarVOrTSBDBTPnMnmMviWbL6Q5ehuhgjPYkoiSqtU6TekmcmSqjKVpS2Hb8mC31Voy0GmkArValDRQ4WsWsrhMbPvy7yCBd4hcLNAhCcwTCKhrdXoLv9WVpqvsHPhegdbuLY1R6ZMjUyTlSzNTZflZ9aED7Xz7WCJaI8JiHj3ivj3fSXhw+w6RNqg1gWE9suHyhQyTdR/nMwtWIjG4C7i7/uKzfjM7kNEqPVSWqBIXJc38HUnN18+oitYi3hyR5GTl1lIhBWiAQfW5fWKFolceIiW4Cti4vq8tmEWweG0sFDPgXW5IRHW8KkR/WAiLLmQWOIZJKCVhUB0rNuVo2UVRUpEP3AUMfVGNZvDDIu0RvRj4nyPs/uKaTg2D0cRz+0r6diXjhYCDAYDigLIVSGagZ2If52qCI4Q8szpm8vo2Nfm3tWqGqkG0Qm8RIQiKTtVFj60NSdrmsMro+xvnatEdAIvETPvSKFPFtEeD39+8mUxohN4/erQ8QWdsMiwfPTRR7/99ht6efr375+fn48oAHpZrEXcgodyRBvwErGyROUdYmgRU1JS0MtTWFhYWUlh6enX2SInTYZoA0YiQvW8olhJXTMlMTFx3LhxERER/fr1+/DDD4uKimBl586dIaotXbo0MjISHmo0mo0bN44cOTI8PHzQoEErVqyQy/8OSxD/du3a9f777/fo0ePChQtDhw6FlcOHD58zZw6iAIGQXZpLo4QiRiJKq9Tw7SNquHnz5rJly8aPH793795vv/0Wgtn8+fNh/ZEjR+AevDx48CAsgGo//PDDzJkz9+zZs2TJknPnzq1bt07/Cmw2e//+/W3btk1ISOjSpUtcXBys3LFjx2effYYoAL4K+EIQbcBoPKK0SiMQUhUOMzIyeDzesGHDwCc3NzcIdQUFBbDeyupx5w2fz9cvQBSEgAe2wbKHh0dUVNSlS5f0rwAZPjMzM4iI+ocCweMqhFAo1C+0OAIrllRMowwORiLqtDouZU1mKILBpGnTpo0YMaJbt24uLi52dnbP72ZtbX348GGIncXFxWq1WiaTgaO1W0NDQ5GhYLEZXDMaJRAw+qh8IVtcokLU4OnpuW3bNoiFa9euhYrdlClTkpOTn99t1apVW7Zsgark5s2boZiOjo6uu9XCwnDDESSVanAR0QaMRIRyGUpnRBm+vr4Q6k6ePAmVPBaLNWvWLKXymdYAtFSgpvj6668PHjzY1dVVJBJJJBJkJCitqGAIThHRkm3rxNFqKenvh/iXlJQEC6Bgp06dZsyYAe2VsrK/u3T1gwy0Wi24qK8sAlKp9Pz5802PP6BudIJCprF3p9HYRLxqIWZ8FnSuIAq4fPny7NmzT58+nZubm5qaCo1iZ2dnJycn3hNu3LgBK6ES6e/vf+jQIdgnPT0dQibkeqqqqh4+fAj1xXovCM0UuL948WJmZiaigNS/qp09TfvUnJcCLxE92wke3qVExKlTp0KFb82aNWPGjImNjYVIFh8fD+bBJqgvnjp1ClI2kDJcvHgxBEWoIy5YsCAmJgb2BFknT54MbZd6LxgYGAi5xm+++WblypWopdGodXkP5B4BNDpzAK8R2nKJ+sSOohHvuCJ6k3VXkpMmfyXaHtEGvCKiuQXbxpF7m2YDT57n8q9ldBudjt0J9hHDRAnzM9r3bnhgLJSb0EHX4CZoAnO53AY3eXl5Qe4GUcMPT2hwE6R7Gmt3Q8m+YcOGBjfdv17l4G5m69jwZ2mt4Hjy1K1zlQyGrv0rDZ/FXF1d3eB6hUIBIuqrffVgMpkU9X/oj1svDVSLSqXicDgNboLGe91UeV0ObcnvPcbe0rrhJ7ZWMD2LD36Mdt2tDD8kzOjQ9oNj2ok0dJrL+f0lZYUKRCfO7C128jSjoYUI5/Oaoet579c5r4yyd/GhRTrt7E/Fbr7mtJ0HB99udQaTEfOhxx9HylKuVaFWjVajO7Auz9aJS+fZmExgEqbLh0qzU2Thw0StMsH754ny1OvVkWPt6TzxDTKVaelK8hSXfysVCNlQTEMVylxg8qMBinNqslNl109UhEVadx1oy2TSaKBNg5iGiHpy02UQPLKSpfbuPCsRB7yEG1/I0moR/rAYSFyukoo1OqS7/2c1vPO27QWhr1hzuOSsxceYkoi1FGTJS/OU0io13JgMhkzSkoPHZDLZo0ePIOGMWhRLGw581QIrlqUtx83HXGBFZi9/BpMUkVJSUlKWL1++Y8cORDAg5P+SgAVERAIWEBEJWEBEJGABEZGABUREAhYQEQlYQEQkYAERkYAFREQCFhARCVhARCRgARGRgAVERAIWEBEJWEBEJGABEZGABUREAhYQEQlYQEQkYAERkYAFREQCFhARCVhARKwPg8Gwt6fR5NWYQESsj06nKykpQQTDQkQkYAERkYAFREQCFhARCVhARCRgARGRgAVERAIWEBEJWEBEJGABEZGABUREAhYQEQlYQEQkYAERkYAFREQCFpAL/vzN+PHjJRIJg8FQKpVisVgkEsGyQqE4fvw4IlAPuRDc3wwaNKi4uDg/P7+0tFSlUhUUFMCypSV9r1trYIiIfxMTE+Pu7l53DUTE3r17I4JBICL+DZfLHTlyJIv19AK8Hh4eY8aMQQSDQER8yrhx41xdXfXLEA779Onj7OyMCAaBiPgUCIqjR4/WB0UIh2PHjkUEQ0FEfAYIii4uLvpw6OjoiAiGAsc8olyiKStQKBXGySuNGDD9999/79lxdGayFBkcBtIJrNm2jlw2h14xAq88orJGe2pXUV6G3N1foJRrEf3g8hgVxSqtVuvfybLzAFtEGzASUS7V7F+b132YvYObOaI9fx4rMeMzw4fZIXqAUfzfvTK730QXYqGeLgPta+TaP0+UI3qAi4i3z1cGdLUSCEnf91O6vGr/8K5MLlUjGoCLiEWPavhCDiLUg4EqClWIBuAiokqpE9oSEetj52xWXU6LiIhLUVgj0eg0iFAPpUKjpcfwKFInI2ABEZGABUREAhYQEQlYQEQkYAERkYAFREQCFhARCVhARCRgARGRgAVERAIWkHNWUGbmgz79Ot+5cwsRjAcREYnsHWZ9MN/Fxa2JfbKyMmImDEX/jpGj+hcU5iNCQ5CiGQkthSOGv+BE+rS0FPTvKCoqFIsrEaERTFjE+6n3tmz5Lv1BqlKp8Gzj/eabsZ07ddNvOnwk8edfdhUU5PF4Zu1DO74bO9fBwbGx9VA0v/lWTPyaLSEhYaDLxoQ1t27/JZNJnZxcxoyeMGzoqB9+TPhx+2Z4OpTgsTNnw8rGDn3w15+3/bAxbvma+O9W5eQ8FFpaTZr05uBBI27euj57zjuww4SJwyf/Z9obU95BhGcx1aJZoVB8NP89Dpf71ar1G9ZtD2oXumjxnJKSYtiUlHTzq6+XjR41fuuWvXFffCuuqlz6+fwm1tdl5aqlpWUlXyxf8/3Wn0ZFx6z5dsWf16/EvPb6qFExoGzi/lPDho5u4tBsNlsqlWzfsWXpkpW/Hfw9KmrIN2viYFNIcNjiRXGwQ8LGHeNjpiDCc5hqRGSxWN98nWBnJ7KysoaHU6fM2L9/T/Ld230iB2Q9zODxeANfHQZauLq4LVm0orCoAPZpbH1dMrMeRI98LTCgHSy7Dh/j5xvg6OhsZmbG4/IYDIb+WGq1urFD67dOiJmiD8CDBo6AUJqRkda9e08+XwBrLC2F8GqI8BymKiLIpFKr4teufJCRJpFU60+KraoSw32HsM4gzfuzpkGZ2KlTN2cnF1tbuybW1yW8xyu79/wAL9itW0RoSIfAwOCXOrQeb29f/QJoB/fVkmpEeBGmWjTn5mbPmfuOUqlcuODzTRt3JmzYUbvJw8Pzu/ht0AretHkt1MlmvjvlXkpyE+vr8t9ZC6ZNjU1KujH3w5nRo/vDnhDhmn9oPRB3n3lMpkJtBqYaEc+cPaHRaD75eLn+V4dGRt2tPj6+nyxcBjtAdnDrtvULP571054jXC63wfV1nwjRbvTo8XArLy87cfLw1u/XW1vbjBs7qfmHJvwzTDUiqlRKaPnWxp6Tp576lJKSfPduEnpSjwwL6zT1jRmQNwGxGltf+0SJRHLy1FF9CIRSO+a1yUFBIdCmbv6hXwiZKLoxTFXEwIBg0OjosV/LykoTD+67n3oXQlfG40qb5Oq1yx8vmn3u/Om8/FzIsEBLwsnR2dHRqbH1ta8JNcj4tV9Cyxq25hfknTp9DNKHoCxssrCwhANBu7uwsKCJQzfxhoVP6otXrlyEV0CE5zDVojk8/JXXxv0nYVP8+g2ru3WNmD9v6c+/7Ny950cmkwnZQbVatXHjGkjECAQWwcHtV8TFg2STJk5tcH3tawoEgi9XfAcJwtlz3oYqIOQRIeEHrWzY1K/vwOMnDs35cMaE8VNgZWOH9vUNaOwN+/kFdu0avmHjN0VFBTPemYUIz4LLJEy/fJsb1kfk0IakNp7h0sGiNgHmgV2FqLVDuvgIWEBEJGABEZGABUREAhYQEQlYQEQkYAERkYAFREQCFhARCVhARCRgARGRgAVERAIWEBEJWICLiFYiro5BBo3Wh8dncXm0mAQBlw/JEzBL82oQ4VlyUqW2zlxEA3AR0TOQLy5WIkIdJGKV0JZj40BENCDu/nwLa9bVoyWI8D/O7i7oFS1C9ACv6zVfOVpeWaxy8jIXuZrR7crZehgMXVW5uqpMeeVwyaQFbaxEdLksHF4iAll3pek3JTUyTXlBoyW1UqlkPQFRgFajUapUBpuPQS6Xc7nc2s9iJmBxuAxnH7NuA+1YLAaiDdiJ+EKys7MPHDjwwQcfIGpYunTp+fPnly9f3r17d0Q9EokkLi4ODofojSmJKBaLCwsLnZycrKysEDXcu3fvk08+AdfDw8Pj4+ORAdm7d29oaGhgYCCiJSZTDystLY2Ojvby8qLOQmD37t1gIXo8IWLapUuXkAEZMmQIxMXKSprOoWgaIkJFCvw4c+YMVKcQZaSkpNy4cUO/DN7v2rULGRALC4sdOx5Po/Pw4cPc3FxEM0xAxDlz5kD9oWPHjohidu7cWVRUVPsQimkDB0XA2tra2dk5NjYWjo7oBO4i7tmzZ9iwYXw+H1EM/PC14VAPVEn1IcrA8Hi8gwcPQiEAy/QpqfEV8eLFi3APFkZGRiLq2b59O4RDrVar+x+w8v79+8hIdOr0eM4dCI3nzp1DNADTVjN8+8ePH//iiy+QwYGaIjQajBILGwT+QyZPnqxWq9ns1jxUCtOIyGQyjWIhhoCFcL969Wr41d9QSQAAD6ZJREFUz0StF7xELC8vnz59Oiz06tULEeowb948KCVqalrtACW8oj38369atQoRGgKKCCig9Q35iIgI1LrAJSIePnwY7pctW0ZpvtrUgWpijx49oA8mOTkZtS6wEHHhwoUCgQARmgHUnqHvEdKNsHzrVuu5fqCRRayoqID78ePHGyZH02pwc3t85cANGzYcPXoUtQqMKeKxY8cSExNhISQkBBFenoSEBOgYhIX8fJO/1qQxRbxw4cIbb7yBCP8CfXph9+7d27ZtQ6aMcUQ8ffo03JNBeC2FvjseFmQyGTJNDC2iSqXq1q1bWFgYIrQoU6dORU/6RXfu3IlMEIOKCJ25ZWVlkAmzs7NDBAqIioqCLxl6KU1u4L3hRIyLi6uqqnJycmrdfaZGZ/bs2e7u7pCOOHjwIDIdDOQEJGB9n4AI1KNvSt++fRvi4siRI5EpQLmIUExwuVwvL6/g4GBEMCCLFy/OzMyEhWvXrnXt2hXhDbVFM3wR0DT28fEhHSdGwdvbG+6vX7/+9ddfI7yhUETooTfWIOd/yfPXaDZpZs6cCZkK9OTUVYQrVIm4b9++v/76q0OHDsjUuHPnzvDhw1HromfPnuhJTwy2p2VRJSI0jaEHD5ka+oEtEyZMQK0R+B/Td+5jCFWnCkDiGlKGkKxBpsP3339fWlo6b9481EqBTycUCik9JfcfY3pTjlBEfHw8i8WKjY1FBGNAYWMFMqtGPAvupYBku5WVVau3cO7cudj+IhSK6OzsbBIjNxctWgSZ9tdffx21dqBohioTwhIKi2b1Eww2v9s/A8J2//79Bw8ejGgAqSNiyttvvw0N5N69eyOCsaG2ZyUyMlKpxHRm7IkTJ06fPp1WFtK0jgj4+flBXzPCj+joaKga6qf1oA80rSNiS1RU1JYtWzw8PBDNoG8dERorWq0Wn08O7wfK4l9//ZWMzMUNaovm7OxsqIohPBCLxREREadPn6athfStI3p7eysUChxmbCkoKIB64dWrVzFPJ1EKqSMamQcPHsyaNevQoUOI3tA6j1hVVcVkMvWD140C9O5AD97evXsRAWMoP3nq0qVLK1asQEYCjr527VpioR761hGB0NDQM2fODB06FJqrBpiQvS4nT54EBbdu3YoIT6BjHRE6LZKSkuqNube1tYXoaBgdExMTr1y5YsRgjCE41xGpioibNm1ycXGptxJarBAgEfXs3Lnzzp07xMJ6iEQiPC1ElBbN7777ro2NTe1DCL3t2rUzwNn1CQkJRUVF0IOHCM9C0zpi3759hwwZwuH8faFXUFB/LhmlrF69msFgzJ49GxGeg9Z5xBkzZly7dg3kgP6M9evX+/j4IMr4/PPPIYWOT18ObtCxjlhLfHy8h4cH9DhbW1tTauH8+fNDQkKIhU2Acx2xWTU2tUorl2jRP4Tx8UfLlixZ0ql9z+oKqk5cX7J4yaDh/QYMGIAIjQN1xGnTpgUEBCD8eEHRnHKtKumCuLxQaW5ByeXiWwT4CFyBtiJf5xUs6NjX2tnLHBHqAPkyqBrBtwT3+jWw7Ofnt2fPHoQNTUXEayfKS/NVvUY5WdpyEPbAlysuUf3+S1H4ELs2gZRfRNKE8Pf3T01NhY7W2jXQ4/rWW28hnGi0jnj1WLm4RN0r2tEkLATg393agTv0LXd4549STHUGXyqIiYkxN3+mlGjTpk2/fv0QTjQsYkWxsjRP0X2oAzJB+k10vnkW04k1jMKIESNcXV1rH/L5fAzn0G9YRLAQahTINOHyWJUlqqpyTBNmRgGSCbXtZchw9enTB2FGwyJKxBp7dxMeQOruL6goJiI+BYKi/hpBAoFgypQpCD8aFlGl0Kpq/nG+xvhIKlU6DZnT5xkgKEIvF4RDPC/yReZVx5FH96WQc5VVaZRybY1cg1oCAeoe2e496O4/tbsItQQCIVur0cG9QMhy8jKztPlXjVoiIkakXq9Kuyl9dE/q4idUqXQsNovFYSNmi2UtuvYYAvfVLZRRkNYw1EqVNlup0+qq9peaC1htwwTtwoUWVv/kDRMRsSD9ZvWFxDIbFwGLJ2g3wL4282wqOPgiebUiJ0t271q+VxC/50g7Nufleo+JiEZGo9Ed3loorUZu7Z255ib8c5hb8uAm8rIpzxFvWpAVOdY+qJuw+U8nIhqT4pyafWtyfbq5CN15qLVg624Ftzt/lJTkKXqPsm/ms3C5gj0NEZcpj2wrbtcf6vmtx8JaHP3ty0qZUN9o5v5ERONQ+KgmcX2hZxdX1HqxdbcuLkRHfyxszs5ERCOgVmn3r81r07k1W6jHro21TMq8furFPa5ERCNw+Psin+6t30I9dl52j1IVOenSpncjIhqau3+IpVIGT2AaY5paBL5IeO6XF1QWiYiG5tJv5Q7etohOmAt5TDYbcqVN7IORiEs+nTdn7gzUqkm+LLZrY8nmYTrc/Xby6bmLukmllailsfOyvXulqSsBtpiIBxJ/WrHyU0RokvvXJTwBHefF4/E55YXKiqJGJ1RvMRHT0nCcKxsrVAptSU6NhR1NT6kRiPiZdxoNii3TszJr9vTbt2/AwvHjhzYl7PRt63/nzq3NW78DO6HbNDAg+K233gsMaKff+fCRxJ/27cjPzzU353frGj7jnf/a2tafwhX2+fmXXQUFeTyeWfvQju/GznVwcEQmzsMUqcjLElHGzaQT5y7tKirJ4vH4HUKiBvWfweU+jr7b9yyEvmt/3x5nz28XV5c4iNpED53bxj0EPe5gVB888s2NpGM6rTbIv2db786IMizt+YXZjVYTWyYiLvtstZ9vQN8+UYn7T3l7tc3JeTR33kx7kcO6tT98F7/NnM+f++GM4uLHo49OnDj81dfLogYM+X7L3s8+XZWWfn/Bwg/qnUmYlHQT9hk9avzWLXvjvvhWXFW59PP5yPQRl6g1KqpGMyTfO7dz3yK/tl3nxO54LXpR0t0zP/8ap9/EYrGzHt3Ozrk7a+b2Tz86xudb7d2/TL/pzPkfr15PHD5o1n9nbvfyDDt17ntEGRweuyBT3tjWlhHRwsKCxWZzuFwrK2sWi3Xw158h2i2Y/5mPjy/cPl6wTK1WHz/xeMLWfT/vjIjoPXHCG+7ubcLCOr337ofgYnLy7bqvlvUwg8fjDXx1mKuLW1Bg8JJFK2JnzkGmj6RSTV0z5cyF7d6eHQcPmCmycw/0Cx8SFXvj9rFK8d9DD5VKOdjG45pDjOwYOrC49KFS+Xg+6b9uHw0O6t214zB4VnjX0X4+FM4JwzFj10gbHVtJSas5LT0FAmTtfEt8Ph+0y8hIAx0zMtODAkNq9/T3D4L7BxlpdZ/eIawzFOjvz5p26PCBgsJ8KLhBR2T6yCQaikTUarW5+SkQDmvXgJRwX1D4QP8QPNMX0wDf/PGgGJm8Sq1WlZbluLsG1T7Lw60dohKegCWtavgUDkpG38hkUjtbUd01fL4AVspr5FAKw/LT9eaPT0CWy58Zq+nh4QkF+u69P27avLZ69fLAwGCoI7YCF6mbZUilqtFqNSfObD559plZSauqS/ULbPbz4yp0ECbhD6fOJqhcIirRaXSNDbWkRESBwEIqfaZ9BA9BTXMzcyaTCUY+Xf9kGfav9wpQoH+ycJlGo4FGz9Zt6xd+POunPUewnbelmVhYsUpKWmbcfz04HDOoCPbs/lq3TsOfOaKgqcw550mMlCue/lJyeVM5538JxCBljZZv2bByLVk017Y5/P2CUtNSamdAq5ZUZ2c/DAh4PDliWx+/O8lPr517724S+l8BXUtKSvLdJ+uhugn1yKlvzBCLK8vLmzugCFssrNlqJSUiwr+3q3NARWWBg72n/mZr48pksvn8poamcthcG2vngsL02jVpGdcQZagVGjNBozWTFhPR0sLywYPU9AepIM2IEWMVipqVX30GzefMzAfLln8MMe/VqKGw29ixk65cuQjpm8LCgpu3rq9d91X79h0DnhXx6rXLHy+afe786bz8XHjB/fv3ODk6Ozo6IRPH2p7DZlF1bmRkz0l37p2FVnBxyaO8/NRdPy9Zt2V6Tc0LhhpAlgea21euJ0Jt8tylnfkFaYgylHK1s3ejOdQWK5qjo2PiVix+/4M3l366qmuXHqu+XLdpy9pp08dDVAsJDvvm6wRr68ezx/bvNxAcBRE3b/kO7OwZEfn22x/Ue6lJE6dCPXrjxjWlZSWwT3Bw+xVx8SZ3GsfzeLYTHPuxUOQtQhQQ2q7P+NFLz17Yfvz0JjMzC0+P0BlT15uZCZp+1oC+06SyykPH4rU6baBfxJCod7fvXQDLiAKkpVLf0EaHADc8G9i14+XQum8faap982d257fvZQU/PMKMA+vy2UJLSxEd54jKuJwzZparlV3Dw47I6BuDEtDVQiFRIPpRI1GK3HiNWYjIyVMGJrCL8I9DD4WOFlzzhn+S5JTze/YvbXCTwNxKKhc3uKl7p5FDB76HWoisR7e27mi4BwGSREwGEzVUTerRZRRk0VEjlGaW9xxmjRqHiGhoeo20+/N0hUu7hmda8/PpOnvm/zW4CfpCapPS9eDxWrIS4uYS2Nh7UKkULBan7lSLzXkP0ooaDkfnGdTUmyQiGhrfDpbpt6Q11YoGT94D1Wy5LsiocDg8W5uWfA81FdV9xr6giUbqiEZg8BtOmdfytVpaTBNVlFbi38Hc4UWTyxERjcP4eR6ZV3JRa6covczemRkcbvXCPYmIxsHGgTvhI9f0i9katQlP/9c0JRllPkGcvuOaNe8wEdFo8C04r81xAxelFXLUutCqtXnJhZ5+7M79bZr5FCKiMRHact750oejlebeLpBXtZL8YklWRer57J5DrLtEvUSHCGk1G5+oSY45abLzB0p5Fjwmlyu0F2B7ml8TSMrkklJZVbGk/SvWY2e+9CXGiIhY4O7Hn/iRx6N70rRb0sxreTbO5soaLZvLZnHZDCamnexMFlMlV2pUGqTTVhTIoV0c1EkQ1N3zZWdG1ENExIg2QYI2T7K+Rdk1T6YuVtfItAoZJSPH/j3mFjoGky0Q8vhCtrOXE4f7r6p5REQccfQwc/RAtKJhEblmDC0y4WFXAmsOk2Xyw8ZoRcPh1NKGU/LIhHMK2SkSWyfTPq+AbjQsooM7z3THocolapErz8Ka1DpMiUYjomtbs/O/NGuuT9w4tSO/y4Dm5lEJmNDU9Zrv/iFOvyVp39vOxpHLYuOe+q6RaapKlZcOFg+c7OjgQceJjkyaF1w4POuu9Na5ysKsGhYb66LaSsSpKld5Bgk6D7CBblxEMDVeIGItCjnWffM6LTITkO5KE6a5IhIIlEKalgQsICISsICISMACIiIBC4iIBCwgIhKw4P8BAAD//2v4e7oAAAAGSURBVAMA1x7mMDWkAPIAAAAASUVORK5CYII=",
"text/plain": [
"<IPython.core.display.Image object>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"from IPython.display import Image, display\n",
"\n",
"display(Image(graph.get_graph(xray=True).draw_mermaid_png()))"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5987d58c",
"metadata": {},
"outputs": [],
"source": [
"question = \"\"\n",
"messages = [HumanMessage(content=question)]\n",
"messages = graph.invoke({\"messages\": messages})"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "330cbf17",
"metadata": {},
"outputs": [],
"source": [
"for m in messages['messages']:\n",
" m.pretty_print()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "base",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.7"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
|