Manojkumarpandi commited on
Commit
e142901
Β·
verified Β·
1 Parent(s): e54383e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -94
app.py CHANGED
@@ -1,67 +1,27 @@
1
  import streamlit as st
2
  import google.generativeai as genai
3
- from langchain.document_loaders import PyPDFDirectoryLoader
4
  from dotenv import load_dotenv
5
  import os
6
 
7
- # Load API keys
8
  load_dotenv()
9
  GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
10
 
11
- # Page configuration
12
- st.set_page_config(
13
- page_title="πŸ“š Chat with PDFs",
14
- page_icon="πŸ“„",
15
- layout="wide"
16
- )
17
 
18
- # Custom CSS styling
19
- st.markdown("""
20
- <style>
21
- .main {
22
- background-color: #f7f9fc;
23
- padding: 20px;
24
- }
25
- .stChatMessage {
26
- background-color: #ffffff;
27
- border-radius: 1rem;
28
- padding: 15px;
29
- margin: 10px 0;
30
- box-shadow: 0 2px 8px rgba(0,0,0,0.05);
31
- }
32
- .user-message {
33
- background-color: #e0f7fa;
34
- border-left: 5px solid #00796b;
35
- }
36
- .assistant-message {
37
- background-color: #fff8e1;
38
- border-left: 5px solid #fbc02d;
39
- }
40
- .sidebar .stButton button {
41
- width: 100%;
42
- }
43
- .uploaded-files {
44
- font-size: 0.9rem;
45
- color: #666;
46
- }
47
- </style>
48
- """, unsafe_allow_html=True)
49
-
50
-
51
- # Initialize session variables
52
  def initialize_session_state():
53
- defaults = {
54
  "messages": [],
55
  "loaded_files": False,
56
  "pdf_content": None,
57
  "chat": None
58
  }
59
- for key, val in defaults.items():
60
  if key not in st.session_state:
61
- st.session_state[key] = val
62
-
63
 
64
- # Load PDFs
65
  def load_pdfs(folder_path):
66
  if not os.path.exists(folder_path):
67
  os.makedirs(folder_path)
@@ -69,78 +29,58 @@ def load_pdfs(folder_path):
69
  documents = loader.load()
70
  return "\n\n".join(doc.page_content for doc in documents)
71
 
72
-
73
- # Initialize Gemini chat
74
  def initialize_chat(pdf_content):
75
  genai.configure(api_key=GOOGLE_API_KEY)
76
- config = {
77
- "temperature": 0.7,
78
- "top_p": 0.95,
79
- "top_k": 40,
80
- "max_output_tokens": 8192,
81
- }
82
-
83
- model = genai.GenerativeModel("gemini-1.5-pro", generation_config=config)
84
-
85
- prompt = f"""You are a helpful assistant. Use the following PDF content to answer questions:
86
- {pdf_content}
87
- If the answer is not in the content, kindly let the user know."""
88
-
89
  chat = model.start_chat(history=[])
90
  chat.send_message(prompt)
91
  return chat
92
 
93
-
94
- # Main application
95
  def main():
96
  initialize_session_state()
97
-
98
- st.title("πŸ“˜ Chat with Your PDF Files")
99
- st.caption("Built with πŸ’‘ Gemini AI & LangChain")
100
 
101
  with st.sidebar:
102
  st.header("πŸ“ Upload PDFs")
103
- uploaded_files = st.file_uploader(
104
- "Drag and drop PDFs here",
105
- type=["pdf"],
106
- accept_multiple_files=True,
107
- help="You can upload multiple PDFs"
108
- )
109
 
110
  if uploaded_files and not st.session_state.loaded_files:
111
- os.makedirs("pdfs", exist_ok=True)
 
112
  for f in os.listdir("pdfs"):
113
  os.remove(os.path.join("pdfs", f))
114
  for file in uploaded_files:
115
  with open(f"pdfs/{file.name}", "wb") as f_out:
116
  f_out.write(file.getvalue())
117
 
118
- with st.spinner("πŸ” Processing PDFs..."):
119
  try:
120
- pdf_text = load_pdfs("pdfs")
121
- st.session_state.pdf_content = pdf_text
122
- st.session_state.chat = initialize_chat(pdf_text)
123
  st.session_state.loaded_files = True
124
- st.success("PDFs processed and ready to chat!")
 
125
  except Exception as e:
126
- st.error(f"Failed to process PDFs: {str(e)}")
127
-
128
- if st.session_state.loaded_files:
129
- st.subheader("πŸ“„ Files Uploaded")
130
- for file in uploaded_files:
131
- st.markdown(f"- {file.name}", unsafe_allow_html=True)
132
 
133
- # Chat section
134
  if st.session_state.loaded_files:
135
- for msg in st.session_state.messages:
136
- css_class = "user-message" if msg["role"] == "user" else "assistant-message"
137
- with st.chat_message(msg["role"]):
138
- st.markdown(f"<div class='stChatMessage {css_class}'>{msg['content']}</div>", unsafe_allow_html=True)
139
 
140
  if prompt := st.chat_input("Ask something about the PDFs..."):
141
  st.session_state.messages.append({"role": "user", "content": prompt})
142
  with st.chat_message("user"):
143
- st.markdown(f"<div class='stChatMessage user-message'>{prompt}</div>", unsafe_allow_html=True)
144
 
145
  with st.chat_message("assistant"):
146
  placeholder = st.empty()
@@ -149,12 +89,12 @@ def main():
149
  st.session_state.chat = initialize_chat(st.session_state.pdf_content)
150
  response = st.session_state.chat.send_message(prompt)
151
  reply = response.text
152
- placeholder.markdown(f"<div class='stChatMessage assistant-message'>{reply}</div>", unsafe_allow_html=True)
153
  st.session_state.messages.append({"role": "assistant", "content": reply})
154
  except Exception as e:
155
- placeholder.error(f"⚠️ Error generating response: {str(e)}")
156
  else:
157
- st.info("πŸ“€ Upload your PDF documents from the sidebar to begin chatting.")
158
 
159
  if __name__ == "__main__":
160
  main()
 
1
  import streamlit as st
2
  import google.generativeai as genai
3
+ from langchain_community.document_loaders import PyPDFDirectoryLoader
4
  from dotenv import load_dotenv
5
  import os
6
 
7
+ # Load environment variables
8
  load_dotenv()
9
  GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
10
 
11
+ # Page config
12
+ st.set_page_config(page_title="πŸ“š PDF Chat Assistant", page_icon="πŸ€–", layout="wide")
 
 
 
 
13
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  def initialize_session_state():
15
+ default_values = {
16
  "messages": [],
17
  "loaded_files": False,
18
  "pdf_content": None,
19
  "chat": None
20
  }
21
+ for key, value in default_values.items():
22
  if key not in st.session_state:
23
+ st.session_state[key] = value
 
24
 
 
25
  def load_pdfs(folder_path):
26
  if not os.path.exists(folder_path):
27
  os.makedirs(folder_path)
 
29
  documents = loader.load()
30
  return "\n\n".join(doc.page_content for doc in documents)
31
 
 
 
32
  def initialize_chat(pdf_content):
33
  genai.configure(api_key=GOOGLE_API_KEY)
34
+ model = genai.GenerativeModel(
35
+ model_name="gemini-1.5-pro",
36
+ generation_config={
37
+ "temperature": 0.7,
38
+ "top_p": 0.95,
39
+ "top_k": 40,
40
+ "max_output_tokens": 8192,
41
+ }
42
+ )
43
+ prompt = f"""You are a helpful assistant. Use the following PDF content to answer questions:\n{pdf_content}\nIf the answer isn't present, say so."""
 
 
 
44
  chat = model.start_chat(history=[])
45
  chat.send_message(prompt)
46
  return chat
47
 
 
 
48
  def main():
49
  initialize_session_state()
50
+ st.markdown("<h1 style='text-align: center;'>πŸ’¬ PDF Chat Assistant</h1>", unsafe_allow_html=True)
 
 
51
 
52
  with st.sidebar:
53
  st.header("πŸ“ Upload PDFs")
54
+ uploaded_files = st.file_uploader("Choose PDF files", type=["pdf"], accept_multiple_files=True)
 
 
 
 
 
55
 
56
  if uploaded_files and not st.session_state.loaded_files:
57
+ if not os.path.exists("pdfs"):
58
+ os.makedirs("pdfs")
59
  for f in os.listdir("pdfs"):
60
  os.remove(os.path.join("pdfs", f))
61
  for file in uploaded_files:
62
  with open(f"pdfs/{file.name}", "wb") as f_out:
63
  f_out.write(file.getvalue())
64
 
65
+ with st.spinner("πŸ”„ Reading PDFs..."):
66
  try:
67
+ content = load_pdfs("pdfs")
68
+ st.session_state.pdf_content = content
 
69
  st.session_state.loaded_files = True
70
+ st.session_state.chat = initialize_chat(content)
71
+ st.success("βœ… PDFs processed successfully!")
72
  except Exception as e:
73
+ st.error(f"❌ Error loading PDFs: {str(e)}")
 
 
 
 
 
74
 
 
75
  if st.session_state.loaded_files:
76
+ for message in st.session_state.messages:
77
+ with st.chat_message(message["role"]):
78
+ st.markdown(message["content"])
 
79
 
80
  if prompt := st.chat_input("Ask something about the PDFs..."):
81
  st.session_state.messages.append({"role": "user", "content": prompt})
82
  with st.chat_message("user"):
83
+ st.markdown(prompt)
84
 
85
  with st.chat_message("assistant"):
86
  placeholder = st.empty()
 
89
  st.session_state.chat = initialize_chat(st.session_state.pdf_content)
90
  response = st.session_state.chat.send_message(prompt)
91
  reply = response.text
92
+ placeholder.markdown(reply)
93
  st.session_state.messages.append({"role": "assistant", "content": reply})
94
  except Exception as e:
95
+ placeholder.error(f"❌ Failed to respond: {str(e)}")
96
  else:
97
+ st.info("Upload some PDF files to begin chatting with them!")
98
 
99
  if __name__ == "__main__":
100
  main()