Manojkumarpandi commited on
Commit
fab9b4e
·
verified ·
1 Parent(s): 2b82b34

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +140 -0
app.py ADDED
@@ -0,0 +1,140 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import google.generativeai as genai
3
+ from langchain.document_loaders import PyPDFDirectoryLoader
4
+ import os
5
+ import shutil
6
+
7
+ # Configuration
8
+ GOOGLE_API_KEY = st.secrets["GOOGLE_API_KEY"]
9
+
10
+ # Page configuration
11
+ st.set_page_config(page_title="Chat with PDFs", page_icon="📚")
12
+
13
+ def initialize_session_state():
14
+ """Initialize session state variables"""
15
+ session_state_vars = {
16
+ "messages": [],
17
+ "loaded_files": False,
18
+ "pdf_content": None,
19
+ "chat": None
20
+ }
21
+
22
+ for var, value in session_state_vars.items():
23
+ if var not in st.session_state:
24
+ st.session_state[var] = value
25
+
26
+ def load_pdfs(pdf_folder):
27
+ """Load PDFs and return their content"""
28
+ if not os.path.exists(pdf_folder):
29
+ os.makedirs(pdf_folder)
30
+
31
+ loader = PyPDFDirectoryLoader(pdf_folder)
32
+ documents = loader.load()
33
+
34
+ # Concatenate all documents content
35
+ content = "\n\n".join([doc.page_content for doc in documents])
36
+ return content
37
+
38
+ def initialize_chat(pdf_content):
39
+ """Initialize Gemini chat with PDF content"""
40
+ genai.configure(api_key=GOOGLE_API_KEY)
41
+
42
+ generation_config = {
43
+ "temperature": 0.7,
44
+ "top_p": 0.95,
45
+ "top_k": 40,
46
+ "max_output_tokens": 8192,
47
+ }
48
+
49
+ model = genai.GenerativeModel(
50
+ model_name="gemini-1.5-pro",
51
+ generation_config=generation_config,
52
+ )
53
+
54
+ # Start chat with context
55
+ context_prompt = f"""You are a helpful assistant that answers questions based on the following document content:
56
+ {pdf_content}
57
+ Please use this content to answer user questions. If the answer cannot be found in the content, say so."""
58
+
59
+ chat = model.start_chat(history=[])
60
+ # Send initial context
61
+ chat.send_message(context_prompt)
62
+ return chat
63
+
64
+ def main():
65
+ initialize_session_state()
66
+
67
+ st.title("💬 Chat with PDFs")
68
+
69
+ # Sidebar for PDF upload
70
+ with st.sidebar:
71
+ st.header("Upload Documents")
72
+ uploaded_files = st.file_uploader(
73
+ "Upload your PDFs",
74
+ type=["pdf"],
75
+ accept_multiple_files=True
76
+ )
77
+
78
+ if uploaded_files and not st.session_state.loaded_files:
79
+ # Create pdfs directory if it doesn't exist
80
+ if not os.path.exists("pdfs"):
81
+ os.makedirs("pdfs")
82
+
83
+ # Clean up old PDF files
84
+ for file in os.listdir("pdfs"):
85
+ os.remove(os.path.join("pdfs", file))
86
+
87
+ # Save uploaded files
88
+ for file in uploaded_files:
89
+ with open(f"pdfs/{file.name}", "wb") as f:
90
+ f.write(file.getvalue())
91
+
92
+ # Load PDF content
93
+ with st.spinner("Processing PDFs..."):
94
+ try:
95
+ pdf_content = load_pdfs("pdfs")
96
+ st.session_state.pdf_content = pdf_content
97
+ st.session_state.loaded_files = True
98
+
99
+ # Initialize chat with content
100
+ st.session_state.chat = initialize_chat(pdf_content)
101
+ except Exception as e:
102
+ st.error(f"Error processing PDFs: {str(e)}")
103
+ return
104
+
105
+ # Main chat interface
106
+ if st.session_state.loaded_files:
107
+ # Display chat messages
108
+ for message in st.session_state.messages:
109
+ with st.chat_message(message["role"]):
110
+ st.markdown(message["content"])
111
+
112
+ # Chat input
113
+ if prompt := st.chat_input("Ask a question about your PDFs:"):
114
+ # Add user message to chat history
115
+ st.session_state.messages.append({"role": "user", "content": prompt})
116
+ with st.chat_message("user"):
117
+ st.markdown(prompt)
118
+
119
+ with st.chat_message("assistant"):
120
+ response_placeholder = st.empty()
121
+ try:
122
+ # Get response from Gemini
123
+ if not st.session_state.chat:
124
+ st.session_state.chat = initialize_chat(st.session_state.pdf_content)
125
+
126
+ response = st.session_state.chat.send_message(prompt)
127
+ response_text = response.text
128
+
129
+ response_placeholder.markdown(response_text)
130
+
131
+ # Add assistant response to chat history
132
+ st.session_state.messages.append({"role": "assistant", "content": response_text})
133
+ except Exception as e:
134
+ response_placeholder.error(f"Error generating response: {str(e)}")
135
+
136
+ else:
137
+ st.info("Please upload PDFs to start chatting.")
138
+
139
+ if __name__ == "__main__":
140
+ main()