Manasa1 commited on
Commit
64a2736
ยท
verified ยท
1 Parent(s): a451ffc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -23
app.py CHANGED
@@ -11,14 +11,16 @@ import os
11
  import nltk
12
  nltk.download('punkt')
13
 
 
 
14
 
15
- import os
16
  secret = os.getenv('Groq_api')
17
 
18
  working_dir = os.path.dirname(os.path.abspath(__file__))
19
 
20
  def load_documents(file_path):
21
- loader = UnstructuredPDFLoader(file_path)
 
22
  documents = loader.load()
23
  return documents
24
 
@@ -26,52 +28,52 @@ def setup_vectorstore(documents):
26
  embeddings = HuggingFaceEmbeddings()
27
  text_splitter = CharacterTextSplitter(
28
  separator="/n",
29
- chunk_size = 1000,
30
- chunk_overlap = 200
31
  )
32
  doc_chunks = text_splitter.split_documents(documents)
33
- vectorstores = FAISS.from_documents(doc_chunks,embeddings)
34
  return vectorstores
35
 
36
  def create_chain(vectorstores):
37
  llm = ChatGroq(
38
- api_key = secret,
39
  model="llama-3.3-70b-versatile",
40
  temperature=0
41
  )
42
  retriever = vectorstores.as_retriever()
43
  memory = ConversationBufferMemory(
44
- llm = llm,
45
- output_key= "answer",
46
- memory_key = "chat_history",
47
  return_messages=True
48
-
49
  )
50
  chain = ConversationalRetrievalChain.from_llm(
51
- llm = llm,
52
- retriever = retriever,
53
- memory = memory,
54
- verbose = True
55
  )
56
  return chain
57
 
 
58
  st.set_page_config(
59
- page_title= "Chat with your documents",
60
- page_icon= "๐Ÿ“‘",
61
  layout="centered"
62
-
63
  )
64
 
65
  st.title("๐Ÿ“Chat With your docs ๐Ÿ˜Ž")
66
 
 
67
  if "chat_history" not in st.session_state:
68
  st.session_state.chat_history = []
69
 
70
  uploaded_file = st.file_uploader(label="Upload your PDF")
71
 
72
  if uploaded_file:
73
- file_path = f"{working_dir}{uploaded_file.name}"
74
- with open(file_path,"wb") as f:
75
  f.write(uploaded_file.getbuffer())
76
 
77
  if "vectorstores" not in st.session_state:
@@ -80,20 +82,21 @@ if uploaded_file:
80
  if "conversation_chain" not in st.session_state:
81
  st.session_state.conversation_chain = create_chain(st.session_state.vectorstores)
82
 
83
-
84
  for message in st.session_state.chat_history:
85
  with st.chat_message(message["role"]):
86
  st.markdown(message["content"])
87
 
 
88
  user_input = st.chat_input("Ask any questions relevant to uploaded pdf")
89
 
90
  if user_input:
91
- st.session_state.chat_history.append({"role":"user","content":user_input})
92
  with st.chat_message("user"):
93
  st.markdown(user_input)
94
 
95
  with st.chat_message("assistant"):
96
- response = st.session_state.conversation_chain({"question":user_input})
97
  assistant_response = response["answer"]
98
  st.markdown(assistant_response)
99
- st.session_state.chat_history.append({"role":"assistant","content":assistant_response})
 
11
  import nltk
12
  nltk.download('punkt')
13
 
14
+ # Install Poppler in the runtime environment
15
+ os.system("apt-get update && apt-get install -y poppler-utils")
16
 
 
17
  secret = os.getenv('Groq_api')
18
 
19
  working_dir = os.path.dirname(os.path.abspath(__file__))
20
 
21
  def load_documents(file_path):
22
+ # Specify poppler_path to ensure compatibility in Spaces
23
+ loader = UnstructuredPDFLoader(file_path, poppler_path="/usr/bin")
24
  documents = loader.load()
25
  return documents
26
 
 
28
  embeddings = HuggingFaceEmbeddings()
29
  text_splitter = CharacterTextSplitter(
30
  separator="/n",
31
+ chunk_size=1000,
32
+ chunk_overlap=200
33
  )
34
  doc_chunks = text_splitter.split_documents(documents)
35
+ vectorstores = FAISS.from_documents(doc_chunks, embeddings)
36
  return vectorstores
37
 
38
  def create_chain(vectorstores):
39
  llm = ChatGroq(
40
+ api_key=secret,
41
  model="llama-3.3-70b-versatile",
42
  temperature=0
43
  )
44
  retriever = vectorstores.as_retriever()
45
  memory = ConversationBufferMemory(
46
+ llm=llm,
47
+ output_key="answer",
48
+ memory_key="chat_history",
49
  return_messages=True
 
50
  )
51
  chain = ConversationalRetrievalChain.from_llm(
52
+ llm=llm,
53
+ retriever=retriever,
54
+ memory=memory,
55
+ verbose=True
56
  )
57
  return chain
58
 
59
+ # Streamlit page configuration
60
  st.set_page_config(
61
+ page_title="Chat with your documents",
62
+ page_icon="๐Ÿ“‘",
63
  layout="centered"
 
64
  )
65
 
66
  st.title("๐Ÿ“Chat With your docs ๐Ÿ˜Ž")
67
 
68
+ # Initialize session states
69
  if "chat_history" not in st.session_state:
70
  st.session_state.chat_history = []
71
 
72
  uploaded_file = st.file_uploader(label="Upload your PDF")
73
 
74
  if uploaded_file:
75
+ file_path = f"{working_dir}/{uploaded_file.name}"
76
+ with open(file_path, "wb") as f:
77
  f.write(uploaded_file.getbuffer())
78
 
79
  if "vectorstores" not in st.session_state:
 
82
  if "conversation_chain" not in st.session_state:
83
  st.session_state.conversation_chain = create_chain(st.session_state.vectorstores)
84
 
85
+ # Display chat history
86
  for message in st.session_state.chat_history:
87
  with st.chat_message(message["role"]):
88
  st.markdown(message["content"])
89
 
90
+ # User input handling
91
  user_input = st.chat_input("Ask any questions relevant to uploaded pdf")
92
 
93
  if user_input:
94
+ st.session_state.chat_history.append({"role": "user", "content": user_input})
95
  with st.chat_message("user"):
96
  st.markdown(user_input)
97
 
98
  with st.chat_message("assistant"):
99
+ response = st.session_state.conversation_chain({"question": user_input})
100
  assistant_response = response["answer"]
101
  st.markdown(assistant_response)
102
+ st.session_state.chat_history.append({"role": "assistant", "content": assistant_response})