Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| import google.generativeai as genai | |
| from langchain_community.document_loaders import PyPDFDirectoryLoader | |
| from dotenv import load_dotenv | |
| import os | |
| # Load environment variables | |
| load_dotenv() | |
| GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY") | |
| # Page config | |
| st.set_page_config(page_title="π PDF Chat Assistant", page_icon="π€", layout="wide") | |
| def initialize_session_state(): | |
| default_values = { | |
| "messages": [], | |
| "loaded_files": False, | |
| "pdf_content": None, | |
| "chat": None | |
| } | |
| for key, value in default_values.items(): | |
| if key not in st.session_state: | |
| st.session_state[key] = value | |
| def load_pdfs(folder_path): | |
| if not os.path.exists(folder_path): | |
| os.makedirs(folder_path) | |
| loader = PyPDFDirectoryLoader(folder_path) | |
| documents = loader.load() | |
| return "\n\n".join(doc.page_content for doc in documents) | |
| def initialize_chat(pdf_content): | |
| genai.configure(api_key=GOOGLE_API_KEY) | |
| model = genai.GenerativeModel( | |
| model_name="gemini-1.5-pro", | |
| generation_config={ | |
| "temperature": 0.7, | |
| "top_p": 0.95, | |
| "top_k": 40, | |
| "max_output_tokens": 8192, | |
| } | |
| ) | |
| prompt = f"""You are a helpful assistant. Use the following PDF content to answer questions:\n{pdf_content}\nIf the answer isn't present, say so.""" | |
| chat = model.start_chat(history=[]) | |
| chat.send_message(prompt) | |
| return chat | |
| def main(): | |
| initialize_session_state() | |
| st.markdown("<h1 style='text-align: center;'>π¬ PDF Chat Assistant</h1>", unsafe_allow_html=True) | |
| with st.sidebar: | |
| st.header("π Upload PDFs") | |
| uploaded_files = st.file_uploader("Choose PDF files", type=["pdf"], accept_multiple_files=True) | |
| if uploaded_files and not st.session_state.loaded_files: | |
| if not os.path.exists("pdfs"): | |
| os.makedirs("pdfs") | |
| for f in os.listdir("pdfs"): | |
| os.remove(os.path.join("pdfs", f)) | |
| for file in uploaded_files: | |
| with open(f"pdfs/{file.name}", "wb") as f_out: | |
| f_out.write(file.getvalue()) | |
| with st.spinner("π Reading PDFs..."): | |
| try: | |
| content = load_pdfs("pdfs") | |
| st.session_state.pdf_content = content | |
| st.session_state.loaded_files = True | |
| st.session_state.chat = initialize_chat(content) | |
| st.success("β PDFs processed successfully!") | |
| except Exception as e: | |
| st.error(f"β Error loading PDFs: {str(e)}") | |
| if st.session_state.loaded_files: | |
| for message in st.session_state.messages: | |
| with st.chat_message(message["role"]): | |
| st.markdown(message["content"]) | |
| if prompt := st.chat_input("Ask something about the PDFs..."): | |
| st.session_state.messages.append({"role": "user", "content": prompt}) | |
| with st.chat_message("user"): | |
| st.markdown(prompt) | |
| with st.chat_message("assistant"): | |
| placeholder = st.empty() | |
| try: | |
| if not st.session_state.chat: | |
| st.session_state.chat = initialize_chat(st.session_state.pdf_content) | |
| response = st.session_state.chat.send_message(prompt) | |
| reply = response.text | |
| placeholder.markdown(reply) | |
| st.session_state.messages.append({"role": "assistant", "content": reply}) | |
| except Exception as e: | |
| placeholder.error(f"β Failed to respond: {str(e)}") | |
| else: | |
| st.info("Upload some PDF files to begin chatting with them!") | |
| if __name__ == "__main__": | |
| main() | |