Spaces:
Sleeping
Sleeping
File size: 1,605 Bytes
9747991 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 |
import streamlit as st
from document_chat import ingest_pdf, process_query_with_memory
from langchain.memory import ConversationBufferMemory
# Configure Streamlit app
st.set_page_config(page_title="AI Document Q&A Chatbot", layout="wide")
st.title("📄 AI-Powered Document Chatbot")
st.write("Upload a document and ask questions!")
# Upload document
uploaded_file = st.file_uploader("Upload a PDF", type=["pdf"])
if uploaded_file:
file_path = "uploaded_doc.pdf"
with open(file_path, "wb") as f:
f.write(uploaded_file.getbuffer())
st.success("File uploaded! Processing...")
ingest_pdf(file_path)
# Initialize memory if not exists
if "memory" not in st.session_state:
st.session_state["memory"] = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
query = st.text_input("Ask a question:")
if query:
with st.spinner("Thinking..."):
response = process_query_with_memory(query, st.session_state["memory"])
st.session_state["memory"].save_context({"input": query}, {"output": response})
st.write(response)
# Show chat history
if st.session_state["memory"].chat_memory.messages:
st.subheader("Chat History")
for i in range(0, len(st.session_state["memory"].chat_memory.messages), 2):
user_message = st.session_state["memory"].chat_memory.messages[i].content
bot_response = st.session_state["memory"].chat_memory.messages[i + 1].content if i + 1 < len(st.session_state["memory"].chat_memory.messages) else "..."
st.write(f"**User:** {user_message}")
st.write(f"**Bot:** {bot_response}")
|