Jeremy Live commited on
Commit
f44bd60
Β·
1 Parent(s): 57d9a46
Files changed (2) hide show
  1. app.py +13 -7
  2. graph-agent-sql_qa-etheroi.ipynb +1 -0
app.py CHANGED
@@ -194,12 +194,14 @@ def run_crewai_process(user_query, model, temperature):
194
  execution_output += "\n\nPlot Generation Issue: The script ran, but the plot file was not created. Ensure the generated code includes commands to save the plot to 'plot.png'."
195
 
196
  # Check for the generated plot file
197
- if os.path.exists(plot_file_path):
198
- print(f"Plot file found at: {os.path.abspath(plot_file_path)}") # Log file path
199
- generated_plot_path = plot_file_path # Set the path to be returned
 
 
200
  else:
201
- print(f"Plot file not found at expected path: {os.path.abspath(plot_file_path)}") # Log missing file path
202
- execution_output += f"\nPlot file '{plot_file_path}' not found after execution."
203
 
204
  except Exception as e:
205
  traceback_str = traceback.format_exc()
@@ -216,19 +218,23 @@ def run_crewai_process(user_query, model, temperature):
216
  # Update final answer chat to reflect execution attempt
217
  execution_complete_msg = "Code execution finished. See 'Execution Output'."
218
  if generated_plot_path:
219
- plot_msg = "Plot generated successfully. See 'Generated Plot'."
220
  final_answer_chat = [
221
  {"role": "user", "content": str(user_query)},
222
  {"role": "assistant", "content": execution_complete_msg},
223
  {"role": "assistant", "content": plot_msg}
224
  ]
 
 
225
  else:
226
- no_plot_msg = "No plot was generated. Check the execution output for details."
227
  final_answer_chat = [
228
  {"role": "user", "content": str(user_query)},
229
  {"role": "assistant", "content": execution_complete_msg},
230
  {"role": "assistant", "content": no_plot_msg}
231
  ]
 
 
232
 
233
  yield agent_thoughts, final_answer_chat, generated_code, execution_output, generated_plot_path
234
 
 
194
  execution_output += "\n\nPlot Generation Issue: The script ran, but the plot file was not created. Ensure the generated code includes commands to save the plot to 'plot.png'."
195
 
196
  # Check for the generated plot file
197
+ plot_abs_path = os.path.abspath(plot_file_path)
198
+ if os.path.exists(plot_abs_path):
199
+ print(f"Plot file found at: {plot_abs_path}")
200
+ # Return the absolute path to ensure Gradio can find the file
201
+ generated_plot_path = plot_abs_path
202
  else:
203
+ print(f"Plot file not found at expected path: {plot_abs_path}")
204
+ execution_output += f"\nPlot file '{plot_abs_path}' not found after execution.\n\nMake sure the generated code includes:\n1. `plt.savefig('plot.png')` to save the plot\n2. `plt.close()` to close the figure after saving"
205
 
206
  except Exception as e:
207
  traceback_str = traceback.format_exc()
 
218
  # Update final answer chat to reflect execution attempt
219
  execution_complete_msg = "Code execution finished. See 'Execution Output'."
220
  if generated_plot_path:
221
+ plot_msg = "Plot generated successfully. See the 'Generated Plot' tab below."
222
  final_answer_chat = [
223
  {"role": "user", "content": str(user_query)},
224
  {"role": "assistant", "content": execution_complete_msg},
225
  {"role": "assistant", "content": plot_msg}
226
  ]
227
+ yield final_answer_chat, agent_thoughts, generated_code, execution_output, None, generated_plot_path
228
+ return
229
  else:
230
+ no_plot_msg = "No plot was generated. Make sure your query includes a request for a visualization. Check the 'Execution Output' tab for any errors."
231
  final_answer_chat = [
232
  {"role": "user", "content": str(user_query)},
233
  {"role": "assistant", "content": execution_complete_msg},
234
  {"role": "assistant", "content": no_plot_msg}
235
  ]
236
+ yield final_answer_chat, agent_thoughts, generated_code, execution_output, None, None
237
+ return
238
 
239
  yield agent_thoughts, final_answer_chat, generated_code, execution_output, generated_plot_path
240
 
graph-agent-sql_qa-etheroi.ipynb ADDED
@@ -0,0 +1 @@
 
 
1
+ {"cells":[{"cell_type":"code","execution_count":4,"metadata":{"id":"DLGpJxSM4QGl","executionInfo":{"status":"ok","timestamp":1753726325372,"user_tz":360,"elapsed":12035,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"}}},"outputs":[],"source":["%%capture --no-stderr\n","%pip install --upgrade --quiet langchain-community langgraph"]},{"cell_type":"code","execution_count":5,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":11584,"status":"ok","timestamp":1753726336963,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"},"user_tz":360},"id":"Zd55aAbXBoUg","outputId":"bf3bd1ff-fdcb-4255-b645-1dc7c3f2f0f6"},"outputs":[{"output_type":"stream","name":"stdout","text":["Collecting mysql-connector-python\n"," Downloading mysql_connector_python-9.4.0-cp311-cp311-manylinux_2_28_x86_64.whl.metadata (7.3 kB)\n","Collecting pymysql\n"," Downloading PyMySQL-1.1.1-py3-none-any.whl.metadata (4.4 kB)\n","Downloading mysql_connector_python-9.4.0-cp311-cp311-manylinux_2_28_x86_64.whl (33.9 MB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m33.9/33.9 MB\u001b[0m \u001b[31m43.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hDownloading PyMySQL-1.1.1-py3-none-any.whl (44 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m45.0/45.0 kB\u001b[0m \u001b[31m2.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hInstalling collected packages: pymysql, mysql-connector-python\n","Successfully installed mysql-connector-python-9.4.0 pymysql-1.1.1\n"]}],"source":["!pip install mysql-connector-python pymysql"]},{"cell_type":"code","execution_count":6,"metadata":{"id":"GT1FaHALBvCv","executionInfo":{"status":"ok","timestamp":1753726341044,"user_tz":360,"elapsed":4078,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"}}},"outputs":[],"source":["from langchain_community.utilities import SQLDatabase\n","import mysql.connector\n","\n","# MySQL database connection\n","db = SQLDatabase.from_uri(\n"," \"mysql+pymysql://khayacore_user_gtp:A~aZA#[email protected]/khayacore_gtp\"\n",")\n","\n","#print(db.dialect)\n","#print(db.get_usable_table_names())\n"]},{"cell_type":"code","execution_count":7,"metadata":{"id":"5LSB9TdRCrZM","executionInfo":{"status":"ok","timestamp":1753726341054,"user_tz":360,"elapsed":3,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"}}},"outputs":[],"source":["#db.run(\"SELECT * FROM crm_clientes LIMIT 10;\")"]},{"cell_type":"code","execution_count":8,"metadata":{"id":"Vx2Bl5Bf4QGn","executionInfo":{"status":"ok","timestamp":1753726341055,"user_tz":360,"elapsed":2,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"}}},"outputs":[],"source":["from typing_extensions import TypedDict\n","\n","\n","class State(TypedDict):\n"," question: str\n"," query: str\n"," result: str\n"," answer: str"]},{"cell_type":"code","execution_count":9,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":12464,"status":"ok","timestamp":1753726353517,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"},"user_tz":360},"id":"dzuyBSiWDHnn","outputId":"faf00b9c-7050-4499-8cd5-837b2d783c4c"},"outputs":[{"output_type":"stream","name":"stdout","text":["\u001b[?25l \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m0.0/42.0 kB\u001b[0m \u001b[31m?\u001b[0m eta \u001b[36m-:--:--\u001b[0m\r\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m42.0/42.0 kB\u001b[0m \u001b[31m1.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25h"]}],"source":["%pip install --upgrade --quiet google-generativeai google-ai-generativelanguage langchain-community langgraph \"langchain[google-genai]\""]},{"cell_type":"code","execution_count":10,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":9934,"status":"ok","timestamp":1753726363458,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"},"user_tz":360},"id":"e346e677","outputId":"fcfb8edb-a906-457c-951d-38b360b11927"},"outputs":[{"output_type":"stream","name":"stdout","text":["\u001b[?25l \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m0.0/70.6 kB\u001b[0m \u001b[31m?\u001b[0m eta \u001b[36m-:--:--\u001b[0m\r\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m70.6/70.6 kB\u001b[0m \u001b[31m2.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25h"]}],"source":["%pip install --upgrade --quiet langchain-openai"]},{"cell_type":"code","execution_count":11,"metadata":{"id":"ozr4eAHC4QGo","executionInfo":{"status":"ok","timestamp":1753726367429,"user_tz":360,"elapsed":3969,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"}}},"outputs":[],"source":["# | output: false\n","# | echo: false\n","\n","from langchain_openai import ChatOpenAI\n","from google.colab import userdata\n","\n","llm = ChatOpenAI(model=\"gpt-4o\", temperature=0, api_key=userdata.get(\"OPENAI_API_KEY\"))"]},{"cell_type":"markdown","metadata":{"id":"GR3j1i3J4QGo"},"source":["Let's provide some instructions for our model:"]},{"cell_type":"code","execution_count":12,"metadata":{"id":"wN2jW_6U4QGo","executionInfo":{"status":"ok","timestamp":1753726367547,"user_tz":360,"elapsed":117,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"}}},"outputs":[],"source":["from langchain_core.prompts import ChatPromptTemplate\n","\n","system_message = \"\"\"\n","Given an input question, create a syntactically correct {dialect} query to\n","run to help find the answer. Unless the user specifies in his question a\n","specific number of examples they wish to obtain, always limit your query to\n","at most {top_k} results. You can order the results by a relevant column to\n","return the most interesting examples in the database.\n","\n","Never query for all the columns from a specific table, only ask for a the\n","few relevant columns given the question.\n","\n","Pay attention to use only the column names that you can see in the schema\n","description. Be careful to not query for columns that do not exist. Also,\n","pay attention to which column is in which table.\n","\n","Only use the following tables:\n","{table_info}\n","\"\"\"\n","\n","user_prompt = \"Question: {input}\"\n","\n","query_prompt_template = ChatPromptTemplate(\n"," [(\"system\", system_message), (\"user\", user_prompt)]\n",")\n","\n","#for message in query_prompt_template.messages:\n","# message.pretty_print()"]},{"cell_type":"code","execution_count":13,"metadata":{"id":"-KAnltz24QGo","executionInfo":{"status":"ok","timestamp":1753726367562,"user_tz":360,"elapsed":12,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"}}},"outputs":[],"source":["from typing_extensions import Annotated\n","\n","\n","class QueryOutput(TypedDict):\n"," \"\"\"Generated SQL query.\"\"\"\n","\n"," query: Annotated[str, ..., \"Syntactically valid SQL query.\"]\n","\n","\n","def write_query(state: State):\n"," \"\"\"Generate SQL query to fetch information.\"\"\"\n"," prompt = query_prompt_template.invoke(\n"," {\n"," \"dialect\": db.dialect,\n"," \"top_k\": 10,\n"," \"table_info\": db.get_table_info(),\n"," \"input\": state[\"question\"],\n"," }\n"," )\n"," structured_llm = llm.with_structured_output(QueryOutput)\n"," result = structured_llm.invoke(prompt)\n"," return {\"query\": result[\"query\"]}"]},{"cell_type":"code","execution_count":14,"metadata":{"id":"c_G5sEgd4QGp","executionInfo":{"status":"ok","timestamp":1753726367572,"user_tz":360,"elapsed":8,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"}}},"outputs":[],"source":["#write_query({\"question\": \"ΒΏCuΓ‘ntos clientes nuevos hubo en junio?\"})"]},{"cell_type":"code","execution_count":15,"metadata":{"id":"0hu_Yegx4QGp","executionInfo":{"status":"ok","timestamp":1753726367617,"user_tz":360,"elapsed":30,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"}}},"outputs":[],"source":["from langchain_community.tools.sql_database.tool import QuerySQLDatabaseTool\n","\n","\n","def execute_query(state: State):\n"," \"\"\"Execute SQL query.\"\"\"\n"," execute_query_tool = QuerySQLDatabaseTool(db=db)\n"," return {\"result\": execute_query_tool.invoke(state[\"query\"])}"]},{"cell_type":"code","execution_count":16,"metadata":{"id":"WWxcGP5u4QGp","executionInfo":{"status":"ok","timestamp":1753726367618,"user_tz":360,"elapsed":2,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"}}},"outputs":[],"source":["#execute_query({\"query\": \"SELECT COUNT(cli_id) AS nuevos_clientes FROM crm_clientes WHERE MONTH(cli_creado) = 6 AND YEAR(cli_creado) = 2023;\"})"]},{"cell_type":"code","execution_count":17,"metadata":{"id":"fM2qaDUe4QGp","executionInfo":{"status":"ok","timestamp":1753726367631,"user_tz":360,"elapsed":14,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"}}},"outputs":[],"source":["def generate_answer(state: State):\n"," \"\"\"Answer question using retrieved information as context.\"\"\"\n"," prompt = (\n"," \"Given the following user question, corresponding SQL query, \"\n"," \"and SQL result, answer the user question.\\n\\n\"\n"," f\"Question: {state['question']}\\n\"\n"," f\"SQL Query: {state['query']}\\n\"\n"," f\"SQL Result: {state['result']}\"\n"," )\n"," response = llm.invoke(prompt)\n"," return {\"answer\": response.content}"]},{"cell_type":"code","execution_count":18,"metadata":{"id":"gJKNtpUl4QGp","executionInfo":{"status":"ok","timestamp":1753726367744,"user_tz":360,"elapsed":77,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"}}},"outputs":[],"source":["from langgraph.graph import START, StateGraph\n","\n","graph_builder = StateGraph(State).add_sequence(\n"," [write_query, execute_query, generate_answer]\n",")\n","graph_builder.add_edge(START, \"write_query\")\n","graph = graph_builder.compile()"]},{"cell_type":"code","execution_count":19,"metadata":{"id":"qAup3BwqFr3_","executionInfo":{"status":"ok","timestamp":1753726367745,"user_tz":360,"elapsed":3,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"}}},"outputs":[],"source":["# for step in graph.stream(\n","# {\"question\": \"ΒΏCuΓ‘ntos clientes nuevos hubo en junio?\"}, stream_mode=\"updates\"\n","# ):\n","# print(step)"]},{"cell_type":"code","execution_count":20,"metadata":{"id":"Kjvw2EoD4QGq","executionInfo":{"status":"ok","timestamp":1753726367830,"user_tz":360,"elapsed":86,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"}}},"outputs":[],"source":["from langchain_community.agent_toolkits import SQLDatabaseToolkit\n","\n","toolkit = SQLDatabaseToolkit(db=db, llm=llm)\n","\n","tools = toolkit.get_tools()\n","\n","#tools"]},{"cell_type":"code","execution_count":21,"metadata":{"id":"YrgMHzcO4QGz","executionInfo":{"status":"ok","timestamp":1753726367845,"user_tz":360,"elapsed":15,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"}}},"outputs":[],"source":["system_message = \"\"\"\n","You are an agent designed to interact with a SQL database.\n","Given an input question, create a syntactically correct {dialect} query to run,\n","then look at the results of the query and return the answer. Unless the user\n","specifies a specific number of examples they wish to obtain, always limit your\n","query to at most {top_k} results.\n","\n","You can order the results by a relevant column to return the most interesting\n","examples in the database. Never query for all the columns from a specific table,\n","only ask for the relevant columns given the question.\n","\n","You MUST double check your query before executing it. If you get an error while\n","executing a query, rewrite the query and try again.\n","\n","DO NOT make any DML statements (INSERT, UPDATE, DELETE, DROP etc.) to the\n","database.\n","\n","To start you should ALWAYS look at the tables in the database to see what you\n","can query. Do NOT skip this step.\n","\n","Then you should query the schema of the most relevant tables.\n","\"\"\".format(\n"," dialect=\"SQLite\",\n"," top_k=5,\n",")"]},{"cell_type":"code","execution_count":22,"metadata":{"id":"563NRiN_4QG0","executionInfo":{"status":"ok","timestamp":1753726367898,"user_tz":360,"elapsed":53,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"}}},"outputs":[],"source":["from langchain_core.messages import HumanMessage\n","from langgraph.prebuilt import create_react_agent\n","\n","agent_executor = create_react_agent(llm, tools, prompt=system_message)"]},{"cell_type":"code","execution_count":23,"metadata":{"executionInfo":{"elapsed":42,"status":"ok","timestamp":1753726367939,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"},"user_tz":360},"id":"gQ_fMmSX4QG0"},"outputs":[],"source":["#question = \"ΒΏCuΓ‘ntas citas se han agendadoΒ porΒ ticket?\"\n","\n","#for step in agent_executor.stream(\n","# {\"messages\": [{\"role\": \"user\", \"content\": question}]},\n","# stream_mode=\"values\",\n","#):\n","# step[\"messages\"][-1].pretty_print()"]},{"cell_type":"code","execution_count":24,"metadata":{"executionInfo":{"elapsed":1,"status":"ok","timestamp":1753726367940,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"},"user_tz":360},"id":"9R-UTGRvL2m1"},"outputs":[],"source":["#question = \"Dame las unidades sin reservas.\"\n","\n","#for step in agent_executor.stream(\n","# {\"messages\": [{\"role\": \"user\", \"content\": question}]},\n","# stream_mode=\"values\",\n","#):\n","# step[\"messages\"][-1].pretty_print()"]},{"cell_type":"code","execution_count":25,"metadata":{"id":"4c4BqJKC4QG0","executionInfo":{"status":"ok","timestamp":1753726367942,"user_tz":360,"elapsed":1,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"}}},"outputs":[],"source":["#question = \"ΒΏCuΓ‘ntos tickets estΓ‘n pendientes y en quΓ© unidades?\"\n","\n","#for step in agent_executor.stream(\n","# {\"messages\": [{\"role\": \"user\", \"content\": question}]},\n","# stream_mode=\"values\",\n","#):\n","# step[\"messages\"][-1].pretty_print()"]},{"cell_type":"code","execution_count":26,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":9609,"status":"ok","timestamp":1753726377553,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"},"user_tz":360},"id":"7b3a8efe","outputId":"fc58acd6-60b5-4ce8-a080-419746abe747"},"outputs":[{"output_type":"stream","name":"stdout","text":["Requirement already satisfied: gradio in /usr/local/lib/python3.11/dist-packages (5.38.1)\n","Requirement already satisfied: aiofiles<25.0,>=22.0 in /usr/local/lib/python3.11/dist-packages (from gradio) (24.1.0)\n","Requirement already satisfied: anyio<5.0,>=3.0 in /usr/local/lib/python3.11/dist-packages (from gradio) (4.9.0)\n","Requirement already satisfied: brotli>=1.1.0 in /usr/local/lib/python3.11/dist-packages (from gradio) (1.1.0)\n","Requirement already satisfied: fastapi<1.0,>=0.115.2 in /usr/local/lib/python3.11/dist-packages (from gradio) (0.116.1)\n","Requirement already satisfied: ffmpy in /usr/local/lib/python3.11/dist-packages (from gradio) (0.6.1)\n","Requirement already satisfied: gradio-client==1.11.0 in /usr/local/lib/python3.11/dist-packages (from gradio) (1.11.0)\n","Requirement already satisfied: groovy~=0.1 in /usr/local/lib/python3.11/dist-packages (from gradio) (0.1.2)\n","Requirement already satisfied: httpx<1.0,>=0.24.1 in /usr/local/lib/python3.11/dist-packages (from gradio) (0.28.1)\n","Requirement already satisfied: huggingface-hub>=0.28.1 in /usr/local/lib/python3.11/dist-packages (from gradio) (0.33.5)\n","Requirement already satisfied: jinja2<4.0 in /usr/local/lib/python3.11/dist-packages (from gradio) (3.1.6)\n","Requirement already satisfied: markupsafe<4.0,>=2.0 in /usr/local/lib/python3.11/dist-packages (from gradio) (3.0.2)\n","Requirement already satisfied: numpy<3.0,>=1.0 in /usr/local/lib/python3.11/dist-packages (from gradio) (2.0.2)\n","Requirement already satisfied: orjson~=3.0 in /usr/local/lib/python3.11/dist-packages (from gradio) (3.11.0)\n","Requirement already satisfied: packaging in /usr/local/lib/python3.11/dist-packages (from gradio) (25.0)\n","Requirement already satisfied: pandas<3.0,>=1.0 in /usr/local/lib/python3.11/dist-packages (from gradio) (2.2.2)\n","Requirement already satisfied: pillow<12.0,>=8.0 in /usr/local/lib/python3.11/dist-packages (from gradio) (11.3.0)\n","Requirement already satisfied: pydantic<2.12,>=2.0 in /usr/local/lib/python3.11/dist-packages (from gradio) (2.11.7)\n","Requirement already satisfied: pydub in /usr/local/lib/python3.11/dist-packages (from gradio) (0.25.1)\n","Requirement already satisfied: python-multipart>=0.0.18 in /usr/local/lib/python3.11/dist-packages (from gradio) (0.0.20)\n","Requirement already satisfied: pyyaml<7.0,>=5.0 in /usr/local/lib/python3.11/dist-packages (from gradio) (6.0.2)\n","Requirement already satisfied: ruff>=0.9.3 in /usr/local/lib/python3.11/dist-packages (from gradio) (0.12.5)\n","Requirement already satisfied: safehttpx<0.2.0,>=0.1.6 in /usr/local/lib/python3.11/dist-packages (from gradio) (0.1.6)\n","Requirement already satisfied: semantic-version~=2.0 in /usr/local/lib/python3.11/dist-packages (from gradio) (2.10.0)\n","Requirement already satisfied: starlette<1.0,>=0.40.0 in /usr/local/lib/python3.11/dist-packages (from gradio) (0.47.2)\n","Requirement already satisfied: tomlkit<0.14.0,>=0.12.0 in /usr/local/lib/python3.11/dist-packages (from gradio) (0.13.3)\n","Requirement already satisfied: typer<1.0,>=0.12 in /usr/local/lib/python3.11/dist-packages (from gradio) (0.16.0)\n","Requirement already satisfied: typing-extensions~=4.0 in /usr/local/lib/python3.11/dist-packages (from gradio) (4.14.1)\n","Requirement already satisfied: uvicorn>=0.14.0 in /usr/local/lib/python3.11/dist-packages (from gradio) (0.35.0)\n","Requirement already satisfied: fsspec in /usr/local/lib/python3.11/dist-packages (from gradio-client==1.11.0->gradio) (2025.3.0)\n","Requirement already satisfied: websockets<16.0,>=10.0 in /usr/local/lib/python3.11/dist-packages (from gradio-client==1.11.0->gradio) (15.0.1)\n","Requirement already satisfied: idna>=2.8 in /usr/local/lib/python3.11/dist-packages (from anyio<5.0,>=3.0->gradio) (3.10)\n","Requirement already satisfied: sniffio>=1.1 in /usr/local/lib/python3.11/dist-packages (from anyio<5.0,>=3.0->gradio) (1.3.1)\n","Requirement already satisfied: certifi in /usr/local/lib/python3.11/dist-packages (from httpx<1.0,>=0.24.1->gradio) (2025.7.14)\n","Requirement already satisfied: httpcore==1.* in /usr/local/lib/python3.11/dist-packages (from httpx<1.0,>=0.24.1->gradio) (1.0.9)\n","Requirement already satisfied: h11>=0.16 in /usr/local/lib/python3.11/dist-packages (from httpcore==1.*->httpx<1.0,>=0.24.1->gradio) (0.16.0)\n","Requirement already satisfied: filelock in /usr/local/lib/python3.11/dist-packages (from huggingface-hub>=0.28.1->gradio) (3.18.0)\n","Requirement already satisfied: requests in /usr/local/lib/python3.11/dist-packages (from huggingface-hub>=0.28.1->gradio) (2.32.3)\n","Requirement already satisfied: tqdm>=4.42.1 in /usr/local/lib/python3.11/dist-packages (from huggingface-hub>=0.28.1->gradio) (4.67.1)\n","Requirement already satisfied: hf-xet<2.0.0,>=1.1.2 in /usr/local/lib/python3.11/dist-packages (from huggingface-hub>=0.28.1->gradio) (1.1.5)\n","Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.11/dist-packages (from pandas<3.0,>=1.0->gradio) (2.9.0.post0)\n","Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.11/dist-packages (from pandas<3.0,>=1.0->gradio) (2025.2)\n","Requirement already satisfied: tzdata>=2022.7 in /usr/local/lib/python3.11/dist-packages (from pandas<3.0,>=1.0->gradio) (2025.2)\n","Requirement already satisfied: annotated-types>=0.6.0 in /usr/local/lib/python3.11/dist-packages (from pydantic<2.12,>=2.0->gradio) (0.7.0)\n","Requirement already satisfied: pydantic-core==2.33.2 in /usr/local/lib/python3.11/dist-packages (from pydantic<2.12,>=2.0->gradio) (2.33.2)\n","Requirement already satisfied: typing-inspection>=0.4.0 in /usr/local/lib/python3.11/dist-packages (from pydantic<2.12,>=2.0->gradio) (0.4.1)\n","Requirement already satisfied: click>=8.0.0 in /usr/local/lib/python3.11/dist-packages (from typer<1.0,>=0.12->gradio) (8.2.1)\n","Requirement already satisfied: shellingham>=1.3.0 in /usr/local/lib/python3.11/dist-packages (from typer<1.0,>=0.12->gradio) (1.5.4)\n","Requirement already satisfied: rich>=10.11.0 in /usr/local/lib/python3.11/dist-packages (from typer<1.0,>=0.12->gradio) (13.9.4)\n","Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.11/dist-packages (from python-dateutil>=2.8.2->pandas<3.0,>=1.0->gradio) (1.17.0)\n","Requirement already satisfied: markdown-it-py>=2.2.0 in /usr/local/lib/python3.11/dist-packages (from rich>=10.11.0->typer<1.0,>=0.12->gradio) (3.0.0)\n","Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /usr/local/lib/python3.11/dist-packages (from rich>=10.11.0->typer<1.0,>=0.12->gradio) (2.19.2)\n","Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.11/dist-packages (from requests->huggingface-hub>=0.28.1->gradio) (3.4.2)\n","Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.11/dist-packages (from requests->huggingface-hub>=0.28.1->gradio) (2.5.0)\n","Requirement already satisfied: mdurl~=0.1 in /usr/local/lib/python3.11/dist-packages (from markdown-it-py>=2.2.0->rich>=10.11.0->typer<1.0,>=0.12->gradio) (0.1.2)\n"]}],"source":["%pip install gradio"]},{"cell_type":"code","execution_count":27,"metadata":{"id":"d22e936c","executionInfo":{"status":"ok","timestamp":1753726383285,"user_tz":360,"elapsed":5731,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"}}},"outputs":[],"source":["import gradio as gr\n","import time\n","\n","def stream_agent_response(question):\n"," final_answer = \"\"\n"," for step in agent_executor.stream(\n"," {\"messages\": [{\"role\": \"user\", \"content\": question}]},\n"," stream_mode=\"values\",\n"," ):\n"," if \"messages\" in step and step[\"messages\"]:\n"," latest_message = step[\"messages\"][-1]\n"," if hasattr(latest_message, 'content'):\n"," # Stream messages as they arrive\n"," yield latest_message.content\n"," final_answer = latest_message.content # Keep the latest message as the potential final answer\n","\n"," # After streaming, return the final answer for the separate component\n"," return final_answer"]},{"cell_type":"code","execution_count":28,"metadata":{"id":"51690448","executionInfo":{"status":"ok","timestamp":1753726383345,"user_tz":360,"elapsed":2,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"}}},"outputs":[],"source":["import gradio as gr\n","import time\n","from langchain_core.messages import HumanMessage, AIMessage\n","\n","def stream_agent_response(question, chat_history):\n"," messages = []\n"," for human, ai in chat_history:\n"," messages.append(HumanMessage(content=human))\n"," if ai is not None: # Handle the case where the bot response is not yet available\n"," messages.append(AIMessage(content=ai))\n"," messages.append(HumanMessage(content=question))\n","\n"," final_answer = \"\"\n"," streamed_content = \"\"\n"," # Yield the current chat history with the new user message and an empty bot response for immediate display\n"," yield chat_history + [[question, None]], gr.update(value=\"\")\n","\n"," for step in agent_executor.stream(\n"," {\"messages\": messages},\n"," stream_mode=\"values\",\n"," ):\n"," if \"messages\" in step and step[\"messages\"]:\n"," latest_message = step[\"messages\"][-1]\n"," if hasattr(latest_message, 'content'):\n"," # Accumulate streamed content for the streaming output display\n"," streamed_content += latest_message.content\n"," # Update the streaming output display\n"," yield chat_history + [[question, None]], gr.update(value=streamed_content)\n","\n"," # Keep track of the latest message content as the potential final answer\n"," final_answer = latest_message.content\n","\n"," # After streaming is complete, update the chatbot history with the final answer\n"," yield chat_history + [[question, final_answer]], gr.update(value=\"\")\n"]},{"cell_type":"code","source":["# with gr.Blocks() as demo:\n","# gr.Markdown(\"# SQL Database Agent\")\n","\n","# # This will be used to display the streaming output and will be always visible\n","# streaming_output_display = gr.Textbox(label=\"Streaming Output\", interactive=False, lines=10) # Increased lines for better display\n","\n","# chatbot = gr.Chatbot()\n","\n","# with gr.Row():\n","# with gr.Column(scale=4):\n","# question_input = gr.Textbox(label=\"Enter your question:\", placeholder=\"Ask me anything about the database...\")\n","# with gr.Column(scale=1):\n","# submit_button = gr.Button(\"Send\")\n","\n","\n","# def user_message(user_input, chat_history):\n","# return \"\", chat_history + [[user_input, None]]\n","\n","# def bot_response(chat_history):\n","# question = chat_history[-1][0]\n","# # This part will handle streaming updates to the streaming output\n","# # and then update the chatbot with the final answer\n","# yield from stream_agent_response(question, chat_history[:-1])\n","\n","\n","# # Handle sending message and getting bot response\n","# submit_button.click(user_message, [question_input, chatbot], [question_input, chatbot], queue=False).then(\n","# bot_response, [chatbot], [chatbot, streaming_output_display]\n","# )\n","# question_input.submit(user_message, [question_input, chatbot], [question_input, chatbot], queue=False).then(\n","# bot_response, [chatbot], [chatbot, streaming_output_display]\n","# )\n","\n","# demo.launch(debug=True)"],"metadata":{"id":"cJNaebezM3SO","executionInfo":{"status":"ok","timestamp":1753726383346,"user_tz":360,"elapsed":0,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"}}},"execution_count":29,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"e09d6a71"},"source":["# Task\n","Integrate a plotting tool into the existing agent and modify the Gradio interface to display plots within the chatbox."]},{"cell_type":"markdown","metadata":{"id":"d1aff5c7"},"source":["## Define a plotting tool\n","\n","### Subtask:\n","Create a new tool that can generate plots based on data.\n"]},{"cell_type":"markdown","metadata":{"id":"648e3d43"},"source":["**Reasoning**:\n","Define a function to generate plots, save them to a temporary file, and return the file path. Then, wrap this function as a LangChain tool.\n","\n"]},{"cell_type":"code","metadata":{"id":"a2859461","executionInfo":{"status":"ok","timestamp":1753726383884,"user_tz":360,"elapsed":536,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"}}},"source":["import matplotlib.pyplot as plt\n","import pandas as pd\n","import os\n","from langchain_core.tools import tool\n","import json # Import json to handle potential JSON string input for plotting_spec\n","\n","@tool\n","def plot_data(data: str, plotting_spec: str) -> str:\n"," \"\"\"\n"," Generates a plot based on the provided data and plotting specifications.\n","\n"," Args:\n"," data: A string representation of the data, expected to be a JSON string\n"," that can be loaded into a pandas DataFrame.\n"," plotting_spec: A JSON string specifying the plot type (e.g., 'bar', 'line', 'scatter'),\n"," x and y axis columns, title, x and y labels.\n"," Example: '{\"plot_type\": \"bar\", \"x\": \"category\", \"y\": \"value\", \"title\": \"My Plot\", \"xlabel\": \"Category\", \"ylabel\": \"Value\"}'\n","\n"," Returns:\n"," The path to the saved plot image file.\n"," \"\"\"\n"," try:\n"," # Load data from JSON string into a pandas DataFrame\n"," data_list = json.loads(data)\n"," df = pd.DataFrame(data_list)\n","\n"," # Load plotting specifications from JSON string\n"," spec = json.loads(plotting_spec)\n"," plot_type = spec.get(\"plot_type\", \"bar\")\n"," x_col = spec.get(\"x\")\n"," y_col = spec.get(\"y\")\n"," title = spec.get(\"title\", \"Generated Plot\")\n"," xlabel = spec.get(\"xlabel\", x_col)\n"," ylabel = spec.get(\"ylabel\", y_col)\n","\n"," plt.figure(figsize=(10, 6))\n","\n"," if plot_type == \"bar\":\n"," if x_col and y_col:\n"," plt.bar(df[x_col], df[y_col])\n"," else:\n"," return \"Error: 'x' and 'y' columns must be specified for a bar plot.\"\n"," elif plot_type == \"line\":\n"," if x_col and y_col:\n"," plt.plot(df[x_col], df[y_col])\n"," else:\n"," return \"Error: 'x' and 'y' columns must be specified for a line plot.\"\n"," elif plot_type == \"scatter\":\n"," if x_col and y_col:\n"," plt.scatter(df[x_col], df[y_col])\n"," else:\n"," return \"Error: 'x' and 'y' columns must be specified for a scatter plot.\"\n"," else:\n"," return f\"Error: Unsupported plot type '{plot_type}'.\"\n","\n"," plt.title(title)\n"," plt.xlabel(xlabel)\n"," plt.ylabel(ylabel)\n"," plt.tight_layout()\n","\n"," # Save the plot to a temporary file\n"," plot_filename = f\"plot_{int(time.time())}.png\"\n"," plot_path = os.path.join(\"/tmp\", plot_filename)\n"," plt.savefig(plot_path)\n"," plt.close() # Close the plot to free up memory\n","\n"," return plot_path\n","\n"," except Exception as e:\n"," return f\"Error generating plot: {e}\"\n"],"execution_count":30,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"9b7c9a21"},"source":["## Add the plotting tool to the agent\n","\n","### Subtask:\n","Include the new plotting tool in the list of tools available to the agent.\n"]},{"cell_type":"markdown","metadata":{"id":"8fc17cf2"},"source":["**Reasoning**:\n","To include the new plotting tool in the list of tools available to the agent, I will first get the existing list of tools and then append the new `plot_data` tool to it. Finally, I will recreate the agent executor with the updated list of tools.\n","\n"]},{"cell_type":"code","metadata":{"id":"5abc74a0","executionInfo":{"status":"ok","timestamp":1753726383904,"user_tz":360,"elapsed":23,"user":{"displayName":"Jeremy Live","userId":"09707482511916100466"}}},"source":["tools.append(plot_data)\n","agent_executor = create_react_agent(llm, tools, prompt=system_message)"],"execution_count":31,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"e0c50ea4"},"source":["## Modify the gradio interface\n","\n","### Subtask:\n","Update the Gradio application to recognize and display plot outputs within the chat interface.\n"]},{"cell_type":"markdown","metadata":{"id":"4a39131a"},"source":["**Reasoning**:\n","Modify the Gradio application to handle both text and image outputs from the agent and update the chatbot component accordingly.\n","\n"]},{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/","height":680},"id":"ea1f49d2","outputId":"38d4c38b-5b3b-4811-df7f-45473c9eb46d"},"source":["import gradio as gr\n","import time\n","from langchain_core.messages import HumanMessage, AIMessage, ToolMessage\n","import os\n","\n","def stream_agent_response(question, chat_history):\n"," messages = []\n"," for human, ai in chat_history:\n"," messages.append(HumanMessage(content=human))\n"," if ai is not None: # Handle the case where the bot response is not yet available\n"," if isinstance(ai, str):\n"," messages.append(AIMessage(content=ai))\n"," elif isinstance(ai, dict) and ai.get('name') == 'Generated Plot': # Handle image component\n"," messages.append(AIMessage(content=f\"![Generated Plot]({ai.get('value')})\")) # Represent image as markdown\n"," messages.append(HumanMessage(content=question))\n","\n"," streamed_content = \"\"\n"," # Yield the current chat history with the new user message and an empty bot response for immediate display\n"," yield chat_history + [[question, None]], gr.update(value=\"\", visible=True) # Make streaming output visible initially\n","\n"," final_response_content = \"\"\n"," plot_output = None # To store the plot path if generated\n","\n"," for step in agent_executor.stream(\n"," {\"messages\": messages},\n"," stream_mode=\"values\",\n"," ):\n"," if \"messages\" in step and step[\"messages\"]:\n"," latest_message = step[\"messages\"][-1]\n","\n"," if isinstance(latest_message, AIMessage):\n"," if latest_message.tool_calls:\n"," for tool_call in latest_message.tool_calls:\n"," if tool_call['name'] == 'plot_data':\n"," # Assuming the tool call has 'data' and 'plotting_spec' in args\n"," try:\n"," streamed_content += f\"\\nCalling tool: plot_data with args: {tool_call['args']}\\n\"\n"," yield chat_history + [[question, None]], gr.update(value=streamed_content, visible=True)\n","\n"," tool_output = plot_data.invoke(tool_call['args'])\n"," plot_output = tool_output # Store the plot path\n"," streamed_content += f\"Tool output: {plot_output}\\n\"\n"," # Update the streaming output display immediately after tool call\n"," yield chat_history + [[question, None]], gr.update(value=streamed_content, visible=True)\n","\n"," except Exception as e:\n"," streamed_content += f\"\\nError calling plot_data tool: {e}\\n\"\n"," yield chat_history + [[question, None]], gr.update(value=streamed_content, visible=True)\n"," plot_output = f\"Error generating plot: {e}\" # Store error message\n"," if latest_message.content:\n"," # Accumulate streamed content for the streaming output display\n"," streamed_content += latest_message.content\n"," # Update the streaming output display\n"," yield chat_history + [[question, None]], gr.update(value=streamed_content, visible=True)\n","\n"," # Keep track of the latest message content as the potential final answer\n"," final_response_content = latest_message.content\n"," elif isinstance(latest_message, ToolMessage):\n"," # Display tool observations\n"," streamed_content += f\"\\nObservation: {latest_message.content}\\n\"\n"," yield chat_history + [[question, None]], gr.update(value=streamed_content, visible=True)\n"," elif hasattr(latest_message, 'content'):\n"," # Display other types of messages\n"," streamed_content += latest_message.content\n"," yield chat_history + [[question, None]], gr.update(value=streamed_content, visible=True)\n","\n","\n"," # After streaming is complete, process the final response content or plot\n"," final_chat_history = chat_history + [[question, None]]\n"," if plot_output and os.path.exists(plot_output) and plot_output.endswith(\".png\"):\n"," # If a plot was generated, add it as a Gradio Image component\n"," final_chat_history[-1][1] = gr.Image(value=plot_output, label=\"Generated Plot\")\n"," elif final_response_content:\n"," # Otherwise, add the final text response\n"," final_chat_history[-1][1] = final_response_content\n"," else:\n"," # If no plot and no final text, perhaps there was an error or no response\n"," final_chat_history[-1][1] = \"Could not generate a response.\"\n","\n","\n"," # Update the chatbot history with the final answer (text or image) and hide the streaming output display\n"," yield final_chat_history, gr.update(value=\"\", visible=False)\n","\n","\n","with gr.Blocks() as demo:\n"," gr.Markdown(\"# SQL Database Agent\")\n","\n"," # This will be used to display the streaming output and will be always visible initially\n"," streaming_output_display = gr.Textbox(label=\"Streaming Output\", interactive=False, lines=10, visible=False) # Start hidden\n","\n"," chatbot = gr.Chatbot()\n","\n"," with gr.Row():\n"," with gr.Column(scale=4):\n"," question_input = gr.Textbox(label=\"Enter your question:\", placeholder=\"Ask me anything about the database...\")\n"," with gr.Column(scale=1):\n"," submit_button = gr.Button(\"Send\")\n","\n","\n"," def user_message(user_input, chat_history):\n"," return \"\", chat_history + [[user_input, None]]\n","\n"," def bot_response(chat_history):\n"," question = chat_history[-1][0]\n"," # This part will handle streaming updates to the streaming output\n"," # and then update the chatbot with the final answer\n"," yield from stream_agent_response(question, chat_history[:-1])\n","\n","\n"," # Handle sending message and getting bot response\n"," submit_button.click(user_message, [question_input, chatbot], [question_input, chatbot], queue=False).then(\n"," bot_response, [chatbot], [chatbot, streaming_output_display] # Pass streaming_output_display as an output\n"," )\n"," question_input.submit(user_message, [question_input, chatbot], [question_input, chatbot], queue=False).then(\n"," bot_response, [chatbot], [chatbot, streaming_output_display] # Pass streaming_output_display as an output\n"," )\n","\n","demo.launch(debug=True)"],"execution_count":null,"outputs":[{"output_type":"stream","name":"stderr","text":["/tmp/ipython-input-59-774706149.py:91: UserWarning: You have not specified a value for the `type` parameter. Defaulting to the 'tuples' format for chatbot messages, but this is deprecated and will be removed in a future version of Gradio. Please set type='messages' instead, which uses openai-style dictionaries with 'role' and 'content' keys.\n"," chatbot = gr.Chatbot()\n"]},{"output_type":"stream","name":"stdout","text":["It looks like you are running Gradio on a hosted Jupyter notebook, which requires `share=True`. Automatically setting `share=True` (you can turn this off by setting `share=False` in `launch()` explicitly).\n","\n","Colab notebook detected. This cell will run indefinitely so that you can see errors and logs. To turn off, set debug=False in launch().\n","* Running on public URL: https://56a00990e0061de883.gradio.live\n","\n","This share link expires in 1 week. For free permanent hosting and GPU upgrades, run `gradio deploy` from the terminal in the working directory to deploy to Hugging Face Spaces (https://huggingface.co/spaces)\n"]},{"output_type":"display_data","data":{"text/plain":["<IPython.core.display.HTML object>"],"text/html":["<div><iframe src=\"https://56a00990e0061de883.gradio.live\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"]},"metadata":{}}]}],"metadata":{"colab":{"provenance":[{"file_id":"10xizU525_Q7QsEN5yGWyREd9RH4kwgvV","timestamp":1753652216483},{"file_id":"1FIAVVOTJUCkSr14KnT6o649rZf72nTVN","timestamp":1753625131415},{"file_id":"https://github.com/langchain-ai/langchain/blob/master/docs/docs/tutorials/sql_qa.ipynb","timestamp":1753468882142}]},"kernelspec":{"display_name":"Python 3 (ipykernel)","language":"python","name":"python3"},"language_info":{"codemirror_mode":{"name":"ipython","version":3},"file_extension":".py","mimetype":"text/x-python","name":"python","nbconvert_exporter":"python","pygments_lexer":"ipython3","version":"3.10.4"}},"nbformat":4,"nbformat_minor":0}