docling_rag / app.py
NEXAS's picture
Update app.py
bd6665e verified
raw
history blame
3.19 kB
import streamlit as st
import os
import time
from utils.ingestion import DocumentProcessor
from utils.llm import LLMProcessor
from utils.qa import QAEngine
# Set up Streamlit page with modern UI
st.set_page_config(page_title="AI-Powered Document Chat", layout="wide")
# CSS for better UI styling
st.markdown(
"""
<style>
.chat-container {
max-width: 800px;
margin: auto;
}
.chat-bubble {
padding: 12px;
border-radius: 12px;
margin-bottom: 10px;
max-width: 75%;
}
.user-bubble {
background-color: #0078ff;
color: white;
text-align: right;
margin-left: auto;
}
.ai-bubble {
background-color: #f1f1f1;
color: black;
}
.chat-input-container {
position: fixed;
bottom: 10px;
width: 100%;
max-width: 800px;
background: white;
padding: 10px;
box-shadow: 0 -2px 10px rgba(0,0,0,0.1);
border-radius: 8px;
}
.chat-input {
width: 100%;
padding: 10px;
border-radius: 8px;
border: 1px solid #ccc;
}
.spinner {
text-align: center;
padding: 20px;
}
</style>
""",
unsafe_allow_html=True
)
# Initialize processors
document_processor = DocumentProcessor()
llm_processor = LLMProcessor()
qa_engine = QAEngine()
# Ensure temp directory exists
os.makedirs("temp", exist_ok=True)
# Sidebar - File Upload
st.sidebar.header("πŸ“‚ Upload a PDF")
uploaded_file = st.sidebar.file_uploader("Choose a PDF file", type=["pdf"])
if uploaded_file:
pdf_path = os.path.join("temp", uploaded_file.name)
with open(pdf_path, "wb") as f:
f.write(uploaded_file.read())
st.sidebar.success("βœ… File uploaded successfully!")
with st.spinner(""):
document_processor.process_document(pdf_path)
st.sidebar.success("βœ… Document processed successfully!")
# Initialize chat history in session state
if "chat_history" not in st.session_state:
st.session_state.chat_history = []
# Chat UI container
st.title("πŸ’¬ AI-Powered Document Chat")
chat_container = st.container()
with chat_container:
st.markdown('<div class="chat-container">', unsafe_allow_html=True)
for role, text in st.session_state.chat_history:
if role == "user":
st.markdown(f'<div class="chat-bubble user-bubble">{text}</div>', unsafe_allow_html=True)
else:
st.markdown(f'<div class="chat-bubble ai-bubble">{text}</div>', unsafe_allow_html=True)
st.markdown('</div>', unsafe_allow_html=True)
# User Input at the bottom with fixed position
question = st.text_input(
"Ask a question:",
placeholder="Type your question and press Enter...",
key="user_input"
)
if question:
# Append user question to chat history
st.session_state.chat_history.append(("user", question))
with st.spinner('<div class="spinner">...</div>'):
time.sleep(0.5) # Simulating delay
answer = qa_engine.query(question)
# Append AI answer to chat history
st.session_state.chat_history.append(("ai", answer))
# Rerun the app to update chat history
st.rerun()