Segizu commited on
Commit
6723742
·
1 Parent(s): 4c925ec

chatbot pdf

Browse files
Files changed (1) hide show
  1. app.py +99 -0
app.py ADDED
@@ -0,0 +1,99 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from PyPDF2 import PdfReader
3
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
4
+ from langchain_core.prompts import ChatPromptTemplate
5
+ from langchain_community.embeddings.spacy_embeddings import SpacyEmbeddings
6
+ from langchain_community.vectorstores import FAISS
7
+ from langchain.tools.retriever import create_retriever_tool
8
+ from dotenv import load_dotenv
9
+ from langchain_anthropic import ChatAnthropic
10
+ from langchain_openai import ChatOpenAI, OpenAIEmbeddings
11
+ from langchain.agents import AgentExecutor, create_tool_calling_agent
12
+
13
+ import os
14
+ os.environ["KMP_DUPLICATE_LIB_OK"]="TRUE"
15
+
16
+ embeddings = SpacyEmbeddings(model_name="en_core_web_sm")
17
+ def pdf_read(pdf_doc):
18
+ text = ""
19
+ for pdf in pdf_doc:
20
+ pdf_reader = PdfReader(pdf)
21
+ for page in pdf_reader.pages:
22
+ text += page.extract_text()
23
+ return text
24
+
25
+
26
+
27
+ def get_chunks(text):
28
+ text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
29
+ chunks = text_splitter.split_text(text)
30
+ return chunks
31
+
32
+
33
+ def vector_store(text_chunks):
34
+
35
+ vector_store = FAISS.from_texts(text_chunks, embedding=embeddings)
36
+ vector_store.save_local("faiss_db")
37
+
38
+
39
+ def get_conversational_chain(tools,ques):
40
+ #os.environ["ANTHROPIC_API_KEY"]=os.getenv["ANTHROPIC_API_KEY"]
41
+ #llm = ChatAnthropic(model="claude-3-sonnet-20240229", temperature=0, api_key=os.getenv("ANTHROPIC_API_KEY"),verbose=True)
42
+ llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0, api_key="")
43
+ prompt = ChatPromptTemplate.from_messages(
44
+ [
45
+ (
46
+ "system",
47
+ """You are a helpful assistant. Answer the question as detailed as possible from the provided context, make sure to provide all the details, if the answer is not in
48
+ provided context just say, "answer is not available in the context", don't provide the wrong answer""",
49
+ ),
50
+ ("placeholder", "{chat_history}"),
51
+ ("human", "{input}"),
52
+ ("placeholder", "{agent_scratchpad}"),
53
+ ]
54
+ )
55
+ tool=[tools]
56
+ agent = create_tool_calling_agent(llm, tool, prompt)
57
+
58
+ agent_executor = AgentExecutor(agent=agent, tools=tool, verbose=True)
59
+ response=agent_executor.invoke({"input": ques})
60
+ print(response)
61
+ st.write("Reply: ", response['output'])
62
+
63
+
64
+
65
+ def user_input(user_question):
66
+
67
+
68
+
69
+ new_db = FAISS.load_local("faiss_db", embeddings,allow_dangerous_deserialization=True)
70
+
71
+ retriever=new_db.as_retriever()
72
+ retrieval_chain= create_retriever_tool(retriever,"pdf_extractor","This tool is to give answer to queries from the pdf")
73
+ get_conversational_chain(retrieval_chain,user_question)
74
+
75
+
76
+
77
+
78
+
79
+ def main():
80
+ st.set_page_config("Chat PDF")
81
+ st.header("RAG based Chat with PDF")
82
+
83
+ user_question = st.text_input("Ask a Question from the PDF Files")
84
+
85
+ if user_question:
86
+ user_input(user_question)
87
+
88
+ with st.sidebar:
89
+ st.title("Menu:")
90
+ pdf_doc = st.file_uploader("Upload your PDF Files and Click on the Submit & Process Button", accept_multiple_files=True)
91
+ if st.button("Submit & Process"):
92
+ with st.spinner("Processing..."):
93
+ raw_text = pdf_read(pdf_doc)
94
+ text_chunks = get_chunks(raw_text)
95
+ vector_store(text_chunks)
96
+ st.success("Done")
97
+
98
+ if __name__ == "__main__":
99
+ main()