Spaces:
Sleeping
Sleeping
File size: 3,804 Bytes
fab9b4e e142901 37206b3 fab9b4e e142901 37206b3 fab9b4e e142901 fab9b4e e142901 fab9b4e e142901 37206b3 e142901 fab9b4e 37206b3 fab9b4e 37206b3 fab9b4e e142901 b13e7da e142901 fab9b4e e54383e fab9b4e e142901 37206b3 fab9b4e e54383e e142901 fab9b4e e142901 e54383e fab9b4e e54383e 37206b3 e142901 fab9b4e e142901 e54383e e142901 fab9b4e e142901 fab9b4e e142901 fab9b4e e54383e fab9b4e e142901 fab9b4e 37206b3 fab9b4e e54383e e142901 e54383e fab9b4e e142901 fab9b4e e142901 fab9b4e 37206b3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 |
import streamlit as st
import google.generativeai as genai
from langchain_community.document_loaders import PyPDFDirectoryLoader
from dotenv import load_dotenv
import os
# Load environment variables
load_dotenv()
GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
# Page config
st.set_page_config(page_title="π PDF Chat Assistant", page_icon="π€", layout="wide")
def initialize_session_state():
default_values = {
"messages": [],
"loaded_files": False,
"pdf_content": None,
"chat": None
}
for key, value in default_values.items():
if key not in st.session_state:
st.session_state[key] = value
def load_pdfs(folder_path):
if not os.path.exists(folder_path):
os.makedirs(folder_path)
loader = PyPDFDirectoryLoader(folder_path)
documents = loader.load()
return "\n\n".join(doc.page_content for doc in documents)
def initialize_chat(pdf_content):
genai.configure(api_key=GOOGLE_API_KEY)
model = genai.GenerativeModel(
model_name="gemini-2.0-flash",
generation_config={
"temperature": 0.7,
"top_p": 0.95,
"top_k": 40,
"max_output_tokens": 8192,
}
)
prompt = f"""You are a helpful assistant. Use the following PDF content to answer questions:\n{pdf_content}\nIf the answer isn't present, say so."""
chat = model.start_chat(history=[])
chat.send_message(prompt)
return chat
def main():
initialize_session_state()
st.markdown("<h1 style='text-align: center;'>π¬ PDF Chat Assistant</h1>", unsafe_allow_html=True)
with st.sidebar:
st.header("π Upload PDFs")
uploaded_files = st.file_uploader("Choose PDF files", type=["pdf"], accept_multiple_files=True)
if uploaded_files and not st.session_state.loaded_files:
if not os.path.exists("pdfs"):
os.makedirs("pdfs")
for f in os.listdir("pdfs"):
os.remove(os.path.join("pdfs", f))
for file in uploaded_files:
with open(f"pdfs/{file.name}", "wb") as f_out:
f_out.write(file.getvalue())
with st.spinner("π Reading PDFs..."):
try:
content = load_pdfs("pdfs")
st.session_state.pdf_content = content
st.session_state.loaded_files = True
st.session_state.chat = initialize_chat(content)
st.success("β
PDFs processed successfully!")
except Exception as e:
st.error(f"β Error loading PDFs: {str(e)}")
if st.session_state.loaded_files:
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
if prompt := st.chat_input("Ask something about the PDFs..."):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message("assistant"):
placeholder = st.empty()
try:
if not st.session_state.chat:
st.session_state.chat = initialize_chat(st.session_state.pdf_content)
response = st.session_state.chat.send_message(prompt)
reply = response.text
placeholder.markdown(reply)
st.session_state.messages.append({"role": "assistant", "content": reply})
except Exception as e:
placeholder.error(f"β Failed to respond: {str(e)}")
else:
st.info("Upload some PDF files to begin chatting with them!")
if __name__ == "__main__":
main()
|