File size: 3,486 Bytes
172e21d
 
 
 
 
75115cd
 
 
172e21d
 
 
 
 
 
 
75115cd
 
172e21d
 
 
 
 
 
 
 
 
 
 
 
 
75115cd
 
 
 
172e21d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
75115cd
172e21d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
import streamlit as st
from dotenv import load_dotenv

from langchain_core.messages import HumanMessage, AIMessage, SystemMessage

from application.agents.scraper_agent import app
from main import graph
from application.utils.logger import get_logger

logger = get_logger()

st.set_page_config(page_title="Sustainability AI Assistant", layout="wide")
st.title("♻️ Sustainability Report AI Assistant")
st.caption(
    "Ask about sustainability reports by company or industry! "
    "(e.g., 'Get sustainability report for Apple', 'Download sustainability report for Microsoft 2023', "
    "'Find sustainability reports for top 3 airline companies', 'Download this pdf <link>')"
)

load_dotenv()

def initialize_chat_history():
    """Initialize session chat history."""
    if "messages" not in st.session_state:
        st.session_state.messages = []
        logger.info("Initialized empty chat history in session state.")

def display_chat_history():
    """Render previous chat messages."""
    for message in st.session_state.messages:
        # if isinstance(message, SystemMessage):
        #     # st.info(f"System: {message.content}")
        #     pass
        if isinstance(message, HumanMessage):
            with st.chat_message("user"):
                st.markdown(message.content)
        elif isinstance(message, AIMessage):
            with st.chat_message("assistant"):
                st.markdown(message.content)

def invoke_agent():
    """Invoke the LangGraph agent and update session state."""
    try:
        graph_input = {"messages": st.session_state.messages}
        logger.info("Invoking LangGraph agent...")

        # final_output_state = graph.invoke(graph_input, {"recursion_limit": 15})

        final_output_state = app.invoke(graph_input, {"recursion_limit": 15})

        logger.info("Agent invocation completed successfully.")
        return final_output_state

    except Exception as e:
        logger.error("Agent invocation failed.", exc_info=True)
        st.error(f"An error occurred while processing your request: {e}")
        return None

def display_last_ai_response():
    """Display the latest AI message, if any."""
    last_ai_message = next(
        (msg for msg in reversed(st.session_state.messages) if isinstance(msg, AIMessage)),
        None
    )
    if last_ai_message:
        with st.chat_message("assistant"):
            st.markdown(last_ai_message.content)
        logger.info("Displayed latest AI response.")
    else:
        st.warning("Agent completed without a final AI message.")
        logger.warning("No AI message found in the final output.")

initialize_chat_history()

if user_query := st.chat_input("Your question about sustainability reports..."):
    logger.info(f"User input received: {user_query}")
    display_chat_history()

    st.session_state.messages.append(HumanMessage(content=user_query))

    with st.chat_message("user"):
        st.markdown(user_query)

    with st.spinner("Processing your request... Please wait."):
        final_output_state = invoke_agent()

        if final_output_state:
            st.session_state.messages = final_output_state['messages']
            display_last_ai_response()

with st.sidebar:
    st.markdown("---")
    if st.button("Clear Chat History"):
        st.session_state.messages = []
        logger.info("Chat history cleared by user.")
        st.rerun()