import os import gradio as gr from pinecone import Pinecone from langchain_chroma import Chroma from langchain_core.prompts import PromptTemplate from langchain_pinecone import PineconeVectorStore from langchain_community.vectorstores import FAISS from langchain_community.vectorstores import LanceDB from langchain_text_splitters import CharacterTextSplitter from langchain_community.document_loaders import PyPDFLoader from langchain_google_genai import GoogleGenerativeAIEmbeddings, GoogleGenerativeAI embeddings = GoogleGenerativeAIEmbeddings(model="models/text-embedding-004") gemini = GoogleGenerativeAI(model="models/gemini-2.0-flash") prompt_template = """ Context:\n {context}?\n Question: \n{question}\n Answer: """ prompt = PromptTemplate(template = prompt_template, input_variables = ["context", "question"]) chain = prompt | gemini index_name = "langchain-test-index" def store_embeddings(pdf_path, chunk_size, chunk_overlap): raw_documents = [] for path in pdf_path: raw_documents.extend(PyPDFLoader(path).load()) text_splitter = CharacterTextSplitter(chunk_size=chunk_size, chunk_overlap=chunk_overlap) documents = text_splitter.split_documents(raw_documents) pc = Pinecone(api_key=os.environ["PINECONE_API_KEY"]) index = pc.Index(host="https://langchain-test-index-la2n80y.svc.aped-4627-b74a.pinecone.io") if index.describe_index_stats()['total_vector_count'] > 0: index.delete(delete_all=True) chroma_db = Chroma.from_documents(documents, embeddings, persist_directory="./chroma_db") faiss_db = FAISS.from_documents(documents, embeddings) faiss_db.save_local("./faiss_db") lance_db = LanceDB.from_documents(documents, embeddings, uri="./lance_db") pinecone_db = PineconeVectorStore.from_documents(documents, index_name=index_name, embedding=embeddings) return "All embeddings are stored in vector database" title = "PDF Chat" description = "A simple Gradio interface to query PDFs and compare vector database" examples = [[["data/amazon-10-k-2024.pdf"], 1000, 100], [["data/goog-10-k-2023.pdf"], 1000, 100]] def inference(query): chroma_db = Chroma(persist_directory="./chroma_db", embedding_function=embeddings) chroma_docs = chroma_db.similarity_search(query) chroma_answer = chain.invoke({"context":chroma_docs, "question": query}, return_only_outputs=True) faiss_db = FAISS.load_local("./faiss_db", embeddings, allow_dangerous_deserialization=True) faiss_docs = faiss_db.similarity_search(query) faiss_answer = chain.invoke({"context":faiss_docs, "question": query}, return_only_outputs=True) lance_db = LanceDB(embedding=embeddings, uri="./lance_db") lance_docs = lance_db.similarity_search(query) lance_answer = chain.invoke({"context":lance_docs, "question": query}, return_only_outputs=True) pinecone_db = PineconeVectorStore(index_name=index_name, embedding=embeddings) pinecone_docs = pinecone_db.similarity_search(query) pinecoce_answer = chain.invoke({"context":pinecone_docs, "question": query}, return_only_outputs=True) return chroma_answer, faiss_answer, lance_answer, pinecoce_answer with gr.Blocks(theme=gr.themes.Monochrome()) as demo: gr.Markdown(f"# {title}\n{description}") with gr.Row(): with gr.Column(): pdf = gr.File(label="Input PDFs", file_count="multiple", file_types=[".pdf"]) chunk_size = gr.Slider(0, 2000, 1000, 100, label="Size of Chunk") chunk_overlap = gr.Slider(0, 1000, 100, 100, label="Size of Chunk Overlap") with gr.Row(): clear_btn = gr.ClearButton(components=[pdf, chunk_size, chunk_overlap]) submit_btn = gr.Button("Store Embeddings", variant='primary') with gr.Column(): message = gr.Textbox(label="Status", type="text") submit_btn.click(store_embeddings, inputs=[pdf, chunk_size, chunk_overlap], outputs=message) with gr.Row(): with gr.Column(): text = gr.Textbox(label="Question", type="text") with gr.Row(): chat_clear_btn = gr.ClearButton(components=[text]) chat_submit_btn = gr.Button("Submit", variant='primary') with gr.Column(): chroma_out = gr.Textbox(label="ChromaDB Response", type="text") faiss_out = gr.Textbox(label="FaissDB Response", type="text") lance_out = gr.Textbox(label="LanceDB Response", type="text") pinecone_out = gr.Textbox(label="PineconeDB Response", type="text") chat_submit_btn.click(inference, inputs=[text], outputs=[chroma_out, faiss_out, lance_out, pinecone_out]) examples_obj = gr.Examples(examples=examples, inputs=[pdf, chunk_size, chunk_overlap]) demo.launch()