File size: 3,194 Bytes
b1b6964
 
bd6665e
b1b6964
 
4afa186
b1b6964
bd6665e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b1b6964
 
 
 
 
 
6e1201a
 
 
bd6665e
 
b1b6964
 
 
6e1201a
 
b1b6964
 
 
 
 
bd6665e
b1b6964
6e1201a
b1b6964
 
bd6665e
 
 
b1b6964
bd6665e
 
 
6e1201a
bd6665e
 
 
 
 
b1b6964
bd6665e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b1b6964
bd6665e
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
import streamlit as st
import os
import time
from utils.ingestion import DocumentProcessor
from utils.llm import LLMProcessor
from utils.qa import QAEngine

# Set up Streamlit page with modern UI
st.set_page_config(page_title="AI-Powered Document Chat", layout="wide")

# CSS for better UI styling
st.markdown(
    """
    <style>
    .chat-container {
        max-width: 800px;
        margin: auto;
    }
    .chat-bubble {
        padding: 12px;
        border-radius: 12px;
        margin-bottom: 10px;
        max-width: 75%;
    }
    .user-bubble {
        background-color: #0078ff;
        color: white;
        text-align: right;
        margin-left: auto;
    }
    .ai-bubble {
        background-color: #f1f1f1;
        color: black;
    }
    .chat-input-container {
        position: fixed;
        bottom: 10px;
        width: 100%;
        max-width: 800px;
        background: white;
        padding: 10px;
        box-shadow: 0 -2px 10px rgba(0,0,0,0.1);
        border-radius: 8px;
    }
    .chat-input {
        width: 100%;
        padding: 10px;
        border-radius: 8px;
        border: 1px solid #ccc;
    }
    .spinner {
        text-align: center;
        padding: 20px;
    }
    </style>
    """,
    unsafe_allow_html=True
)

# Initialize processors
document_processor = DocumentProcessor()
llm_processor = LLMProcessor()
qa_engine = QAEngine()

# Ensure temp directory exists
os.makedirs("temp", exist_ok=True)

# Sidebar - File Upload
st.sidebar.header("πŸ“‚ Upload a PDF")
uploaded_file = st.sidebar.file_uploader("Choose a PDF file", type=["pdf"])

if uploaded_file:
    pdf_path = os.path.join("temp", uploaded_file.name)

    with open(pdf_path, "wb") as f:
        f.write(uploaded_file.read())

    st.sidebar.success("βœ… File uploaded successfully!")

    with st.spinner(""):
        document_processor.process_document(pdf_path)

    st.sidebar.success("βœ… Document processed successfully!")

# Initialize chat history in session state
if "chat_history" not in st.session_state:
    st.session_state.chat_history = []

# Chat UI container
st.title("πŸ’¬ AI-Powered Document Chat")
chat_container = st.container()

with chat_container:
    st.markdown('<div class="chat-container">', unsafe_allow_html=True)
    for role, text in st.session_state.chat_history:
        if role == "user":
            st.markdown(f'<div class="chat-bubble user-bubble">{text}</div>', unsafe_allow_html=True)
        else:
            st.markdown(f'<div class="chat-bubble ai-bubble">{text}</div>', unsafe_allow_html=True)
    st.markdown('</div>', unsafe_allow_html=True)

# User Input at the bottom with fixed position
question = st.text_input(
    "Ask a question:",
    placeholder="Type your question and press Enter...",
    key="user_input"
)

if question:
    # Append user question to chat history
    st.session_state.chat_history.append(("user", question))

    with st.spinner('<div class="spinner">...</div>'):
        time.sleep(0.5)  # Simulating delay
        answer = qa_engine.query(question)

    # Append AI answer to chat history
    st.session_state.chat_history.append(("ai", answer))

    # Rerun the app to update chat history
    st.rerun()