|
import os
|
|
import logging
|
|
import streamlit as st
|
|
from langchain_community.document_loaders import PDFMinerLoader
|
|
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
|
from langchain_community.embeddings import HuggingFaceEmbeddings
|
|
from langchain_community.vectorstores import FAISS
|
|
|
|
logging.basicConfig(level=logging.INFO)
|
|
logger = logging.getLogger(__name__)
|
|
|
|
def create_faiss_index():
|
|
documents = []
|
|
docs_dir = "docs"
|
|
|
|
if not os.path.exists(docs_dir):
|
|
st.error(f"The directory '{docs_dir}' does not exist.")
|
|
return
|
|
|
|
for root, dirs, files in os.walk(docs_dir):
|
|
for file in files:
|
|
if file.endswith(".pdf"):
|
|
file_path = os.path.join(root, file)
|
|
st.info(f"Loading document: {file_path}")
|
|
try:
|
|
loader = PDFMinerLoader(file_path)
|
|
documents.extend(loader.load())
|
|
except Exception as e:
|
|
st.error(f"Error loading {file_path}: {e}")
|
|
|
|
if not documents:
|
|
st.error("No documents were loaded. Check the 'docs' directory and file paths.")
|
|
return
|
|
|
|
st.info(f"Loaded {len(documents)} documents.")
|
|
|
|
text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=50)
|
|
texts = text_splitter.split_documents(documents)
|
|
|
|
if not texts:
|
|
st.error("No text chunks were created. Check the text splitting process.")
|
|
return
|
|
|
|
st.info(f"Created {len(texts)} text chunks.")
|
|
|
|
try:
|
|
embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
|
|
except Exception as e:
|
|
st.error(f"Failed to initialize embeddings: {e}")
|
|
return
|
|
|
|
try:
|
|
db = FAISS.from_documents(texts, embeddings)
|
|
st.info(f"Created FAISS index with {len(texts)} vectors")
|
|
except Exception as e:
|
|
st.error(f"Failed to create FAISS index: {e}")
|
|
return
|
|
|
|
index_dir = "faiss_index"
|
|
if not os.path.exists(index_dir):
|
|
os.makedirs(index_dir)
|
|
|
|
try:
|
|
db.save_local(index_dir)
|
|
st.success(f"FAISS index successfully saved to {index_dir}")
|
|
index_path = os.path.join(index_dir, "index.faiss")
|
|
st.info(f"Index file size: {os.path.getsize(index_path)} bytes")
|
|
st.info(f"Index file permissions: {oct(os.stat(index_path).st_mode)[-3:]}")
|
|
except Exception as e:
|
|
st.error(f"Failed to save FAISS index: {e}")
|
|
|
|
if __name__ == "__main__":
|
|
create_faiss_index()
|
|
|