Spaces:
				
			
			
	
			
			
		Sleeping
		
	
	
	
			
			
	
	
	
	
		
		
		Sleeping
		
	Update app.py
Browse files
    	
        app.py
    CHANGED
    
    | 
         @@ -12,7 +12,7 @@ from typing import Any, Dict 
     | 
|
| 12 | 
         
             
            from langchain_openai import ChatOpenAI
         
     | 
| 13 | 
         
             
            from langgraph.graph import StateGraph, START, END
         
     | 
| 14 | 
         
             
            from langgraph.graph.message import add_messages
         
     | 
| 15 | 
         
            -
            from langchain.schema import HumanMessage, AIMessage
         
     | 
| 16 | 
         
             
            # Create a ToolNode that knows about your web_search function
         
     | 
| 17 | 
         | 
| 18 | 
         
             
            # (Keep Constants as is)
         
     | 
| 
         @@ -44,39 +44,41 @@ compiled_graph = graph.compile() 
     | 
|
| 44 | 
         | 
| 45 | 
         
             
            # βββ 5) Define `respond_to_input` to call `compiled_graph.invoke` βββ
         
     | 
| 46 | 
         
             
            def respond_to_input(user_input: str) -> str:
         
     | 
| 47 | 
         
            -
                 
     | 
| 48 | 
         
            -
                 
     | 
| 49 | 
         
            -
             
     | 
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 50 | 
         | 
| 51 | 
         
            -
                 
     | 
| 52 | 
         
            -
                Finally, we scan final_state["messages"] for the last AIMessage and return its .content.
         
     | 
| 53 | 
         
            -
                """
         
     | 
| 54 | 
         
            -
                # 5.a) Build the initial state with a single HumanMessage
         
     | 
| 55 | 
         
             
                initial_state = {
         
     | 
| 56 | 
         
            -
             
     | 
| 57 | 
         
            -
             
     | 
| 58 | 
         
            -
             
     | 
| 59 | 
         
            -
             
     | 
| 60 | 
         
            -
                                '{"tool":"web_search","query":"<your search terms>"}.'},
         
     | 
| 61 | 
         
            -
                    {"role":"user","content": user_input}
         
     | 
| 62 | 
         
            -
                ]
         
     | 
| 63 | 
         
             
                }
         
     | 
| 64 | 
         
            -
             
     | 
| 
         | 
|
| 65 | 
         
             
                final_state = compiled_graph.invoke(initial_state)
         
     | 
| 66 | 
         | 
| 67 | 
         
            -
                #  
     | 
| 68 | 
         
            -
                 
     | 
| 69 | 
         
            -
                assistant_messages = [
         
     | 
| 70 | 
         
             
                    msg.content
         
     | 
| 71 | 
         
             
                    for msg in final_state["messages"]
         
     | 
| 72 | 
         
             
                    if isinstance(msg, AIMessage)
         
     | 
| 73 | 
         
             
                ]
         
     | 
| 74 | 
         
            -
                if  
     | 
| 75 | 
         
            -
                    return "βοΈAgent did not return any AIMessage."
         
     | 
| 76 | 
         
            -
             
     | 
| 77 | 
         
            -
                # Return the final AIMessage's content
         
     | 
| 78 | 
         
            -
                return assistant_messages[-1]
         
     | 
| 79 | 
         
            -
             
     | 
| 80 | 
         
             
            class BasicAgent:
         
     | 
| 81 | 
         
             
                def __init__(self):
         
     | 
| 82 | 
         
             
                    print("BasicAgent initialized.")
         
     | 
| 
         | 
|
| 12 | 
         
             
            from langchain_openai import ChatOpenAI
         
     | 
| 13 | 
         
             
            from langgraph.graph import StateGraph, START, END
         
     | 
| 14 | 
         
             
            from langgraph.graph.message import add_messages
         
     | 
| 15 | 
         
            +
            from langchain.schema import HumanMessage, AIMessage, SystemMessage
         
     | 
| 16 | 
         
             
            # Create a ToolNode that knows about your web_search function
         
     | 
| 17 | 
         | 
| 18 | 
         
             
            # (Keep Constants as is)
         
     | 
| 
         | 
|
| 44 | 
         | 
| 45 | 
         
             
            # βββ 5) Define `respond_to_input` to call `compiled_graph.invoke` βββ
         
     | 
| 46 | 
         
             
            def respond_to_input(user_input: str) -> str:
         
     | 
| 47 | 
         
            +
                # β  Describe your tools in a system prompt
         
     | 
| 48 | 
         
            +
                system_msg = SystemMessage(
         
     | 
| 49 | 
         
            +
                    content=(
         
     | 
| 50 | 
         
            +
                        "You are an assistant with access to the following tools:\n"
         
     | 
| 51 | 
         
            +
                        "  1) web_search(query: str) β Returns the top search results for the query as text.\n"
         
     | 
| 52 | 
         
            +
                        "  2) parse_excel(path: str, sheet_name: str) β Reads an Excel file and returns its contents.\n"
         
     | 
| 53 | 
         
            +
                        "  3) ocr_image(path: str) β Runs OCR on an image and returns any detected text.\n\n"
         
     | 
| 54 | 
         
            +
                        "When you need to look something up on the internet, respond exactly with JSON:\n"
         
     | 
| 55 | 
         
            +
                        '  {"tool":"web_search","query":"<search terms>"}\n'
         
     | 
| 56 | 
         
            +
                        "If you need to parse an Excel file, respond with:\n"
         
     | 
| 57 | 
         
            +
                        '  {"tool":"parse_excel","path":"<file.xlsx>","sheet_name":"<SheetName>"}\n'
         
     | 
| 58 | 
         
            +
                        "If you need to OCR an image, respond with:\n"
         
     | 
| 59 | 
         
            +
                        '  {"tool":"ocr_image","path":"<image.png>"}\n'
         
     | 
| 60 | 
         
            +
                        "If no tool is needed, reply only with your final answer as plain text."
         
     | 
| 61 | 
         
            +
                    )
         
     | 
| 62 | 
         
            +
                )
         
     | 
| 63 | 
         | 
| 64 | 
         
            +
                # β‘ Start the conversation with that system prompt and the userβs question
         
     | 
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 65 | 
         
             
                initial_state = {
         
     | 
| 66 | 
         
            +
                    "messages": [
         
     | 
| 67 | 
         
            +
                        system_msg,
         
     | 
| 68 | 
         
            +
                        HumanMessage(content=user_input)
         
     | 
| 69 | 
         
            +
                    ]
         
     | 
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 70 | 
         
             
                }
         
     | 
| 71 | 
         
            +
             
     | 
| 72 | 
         
            +
                # β’ Invoke the compiled graph
         
     | 
| 73 | 
         
             
                final_state = compiled_graph.invoke(initial_state)
         
     | 
| 74 | 
         | 
| 75 | 
         
            +
                # β£ Collect the last assistant message (AIMessage)
         
     | 
| 76 | 
         
            +
                assistant_texts = [
         
     | 
| 
         | 
|
| 77 | 
         
             
                    msg.content
         
     | 
| 78 | 
         
             
                    for msg in final_state["messages"]
         
     | 
| 79 | 
         
             
                    if isinstance(msg, AIMessage)
         
     | 
| 80 | 
         
             
                ]
         
     | 
| 81 | 
         
            +
                return assistant_texts[-1] if assistant_texts else ""
         
     | 
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 82 | 
         
             
            class BasicAgent:
         
     | 
| 83 | 
         
             
                def __init__(self):
         
     | 
| 84 | 
         
             
                    print("BasicAgent initialized.")
         
     |