naman1102 commited on
Commit
6c99cc0
Β·
1 Parent(s): 0de927d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +27 -25
app.py CHANGED
@@ -12,7 +12,7 @@ from typing import Any, Dict
12
  from langchain_openai import ChatOpenAI
13
  from langgraph.graph import StateGraph, START, END
14
  from langgraph.graph.message import add_messages
15
- from langchain.schema import HumanMessage, AIMessage
16
  # Create a ToolNode that knows about your web_search function
17
 
18
  # (Keep Constants as is)
@@ -44,39 +44,41 @@ compiled_graph = graph.compile()
44
 
45
  # ─── 5) Define `respond_to_input` to call `compiled_graph.invoke` ───
46
  def respond_to_input(user_input: str) -> str:
47
- """
48
- In v0.4.7, create_react_agent expects:
49
- state["messages"] == list[BaseMessage], typically starting with a HumanMessage.
 
 
 
 
 
 
 
 
 
 
 
 
 
50
 
51
- We feed it exactly that, then call compiled_graph.invoke().
52
- Finally, we scan final_state["messages"] for the last AIMessage and return its .content.
53
- """
54
- # 5.a) Build the initial state with a single HumanMessage
55
  initial_state = {
56
- "messages": [
57
- {"role":"system",
58
- "content": "You are an assistant that has access to a tool called web_search(query: str). "
59
- "Whenever you need up-to-date facts, respond with JSON like "
60
- '{"tool":"web_search","query":"<your search terms>"}.'},
61
- {"role":"user","content": user_input}
62
- ]
63
  }
64
-
 
65
  final_state = compiled_graph.invoke(initial_state)
66
 
67
- # 5.c) Extract the last AIMessage from final_state["messages"]
68
- # (create_react_agent always appends its assistant replies as AIMessage)
69
- assistant_messages = [
70
  msg.content
71
  for msg in final_state["messages"]
72
  if isinstance(msg, AIMessage)
73
  ]
74
- if not assistant_messages:
75
- return "❗️Agent did not return any AIMessage."
76
-
77
- # Return the final AIMessage's content
78
- return assistant_messages[-1]
79
-
80
  class BasicAgent:
81
  def __init__(self):
82
  print("BasicAgent initialized.")
 
12
  from langchain_openai import ChatOpenAI
13
  from langgraph.graph import StateGraph, START, END
14
  from langgraph.graph.message import add_messages
15
+ from langchain.schema import HumanMessage, AIMessage, SystemMessage
16
  # Create a ToolNode that knows about your web_search function
17
 
18
  # (Keep Constants as is)
 
44
 
45
  # ─── 5) Define `respond_to_input` to call `compiled_graph.invoke` ───
46
  def respond_to_input(user_input: str) -> str:
47
+ # β‘  Describe your tools in a system prompt
48
+ system_msg = SystemMessage(
49
+ content=(
50
+ "You are an assistant with access to the following tools:\n"
51
+ " 1) web_search(query: str) β†’ Returns the top search results for the query as text.\n"
52
+ " 2) parse_excel(path: str, sheet_name: str) β†’ Reads an Excel file and returns its contents.\n"
53
+ " 3) ocr_image(path: str) β†’ Runs OCR on an image and returns any detected text.\n\n"
54
+ "When you need to look something up on the internet, respond exactly with JSON:\n"
55
+ ' {"tool":"web_search","query":"<search terms>"}\n'
56
+ "If you need to parse an Excel file, respond with:\n"
57
+ ' {"tool":"parse_excel","path":"<file.xlsx>","sheet_name":"<SheetName>"}\n'
58
+ "If you need to OCR an image, respond with:\n"
59
+ ' {"tool":"ocr_image","path":"<image.png>"}\n'
60
+ "If no tool is needed, reply only with your final answer as plain text."
61
+ )
62
+ )
63
 
64
+ # β‘‘ Start the conversation with that system prompt and the userβ€˜s question
 
 
 
65
  initial_state = {
66
+ "messages": [
67
+ system_msg,
68
+ HumanMessage(content=user_input)
69
+ ]
 
 
 
70
  }
71
+
72
+ # β‘’ Invoke the compiled graph
73
  final_state = compiled_graph.invoke(initial_state)
74
 
75
+ # β‘£ Collect the last assistant message (AIMessage)
76
+ assistant_texts = [
 
77
  msg.content
78
  for msg in final_state["messages"]
79
  if isinstance(msg, AIMessage)
80
  ]
81
+ return assistant_texts[-1] if assistant_texts else ""
 
 
 
 
 
82
  class BasicAgent:
83
  def __init__(self):
84
  print("BasicAgent initialized.")