jarif commited on
Commit
bb44cbb
·
verified ·
1 Parent(s): 3197834

Upload ingest.py

Browse files
Files changed (1) hide show
  1. ingest.py +92 -92
ingest.py CHANGED
@@ -1,92 +1,92 @@
1
- import os
2
- import logging
3
- from langchain.document_loaders import PyPDFLoader
4
- from langchain.text_splitter import RecursiveCharacterTextSplitter
5
- from langchain_community.embeddings import HuggingFaceEmbeddings
6
- from langchain_community.vectorstores import FAISS
7
-
8
- # Set up logging
9
- logging.basicConfig(level=logging.INFO)
10
- logger = logging.getLogger(__name__)
11
-
12
- def create_faiss_index():
13
- documents = []
14
- docs_dir = "docs"
15
-
16
- if not os.path.exists(docs_dir):
17
- logger.error(f"The directory '{docs_dir}' does not exist.")
18
- return
19
-
20
- if not os.listdir(docs_dir):
21
- logger.error(f"The directory '{docs_dir}' is empty.")
22
- return
23
-
24
- # Try loading each PDF and log results
25
- for root, dirs, files in os.walk(docs_dir):
26
- for file in files:
27
- if file.endswith(".pdf"):
28
- file_path = os.path.join(root, file)
29
- logger.info(f"Loading document: {file_path}")
30
- try:
31
- loader = PyPDFLoader(file_path)
32
- loaded_docs = loader.load()
33
- if loaded_docs:
34
- documents.extend(loaded_docs)
35
- logger.info(f"Loaded {len(loaded_docs)} pages from {file_path}.")
36
- else:
37
- logger.warning(f"No content extracted from {file_path}.")
38
- except Exception as e:
39
- logger.error(f"Error loading {file_path}: {e}")
40
-
41
- if not documents:
42
- logger.error("No documents were loaded. Check the 'docs' directory and file paths.")
43
- return
44
-
45
- logger.info(f"Total loaded documents: {len(documents)}")
46
-
47
- # Split text into chunks
48
- text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=50)
49
- texts = text_splitter.split_documents(documents)
50
-
51
- if not texts:
52
- logger.error("No text chunks were created. Check the text splitting process.")
53
- return
54
-
55
- logger.info(f"Created {len(texts)} text chunks.")
56
-
57
- # Check a sample of text chunks
58
- for i, text in enumerate(texts[:5]):
59
- logger.info(f"Sample chunk {i}: {text[:100]}...")
60
-
61
- # Create embeddings
62
- try:
63
- embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
64
- except Exception as e:
65
- logger.error(f"Failed to initialize embeddings: {e}")
66
- return
67
-
68
- # Create FAISS index
69
- try:
70
- db = FAISS.from_documents(texts, embeddings)
71
- logger.info(f"Created FAISS index with {len(texts)} vectors")
72
- except Exception as e:
73
- logger.error(f"Failed to create FAISS index: {e}")
74
- return
75
-
76
- # Save FAISS index locally
77
- index_dir = "faiss_index"
78
- if not os.path.exists(index_dir):
79
- os.makedirs(index_dir)
80
-
81
- try:
82
- db.save_local(index_dir)
83
- index_path = os.path.join(index_dir, "index.faiss")
84
- if os.path.getsize(index_path) > 0:
85
- logger.info(f"FAISS index successfully saved to {index_path}")
86
- else:
87
- logger.error(f"FAISS index file '{index_path}' is empty.")
88
- except Exception as e:
89
- logger.error(f"Failed to save FAISS index: {e}")
90
-
91
- if __name__ == "__main__":
92
- create_faiss_index()
 
1
+ import os
2
+ import logging
3
+ from langchain.document_loaders import PyPDFLoader
4
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
5
+ from langchain_community.embeddings import HuggingFaceEmbeddings
6
+ from langchain_community.vectorstores import FAISS
7
+
8
+ # Set up logging
9
+ logging.basicConfig(level=logging.INFO)
10
+ logger = logging.getLogger(__name__)
11
+
12
+ def create_faiss_index():
13
+ documents = []
14
+ docs_dir = "docs"
15
+
16
+ if not os.path.exists(docs_dir):
17
+ logger.error(f"The directory '{docs_dir}' does not exist.")
18
+ return
19
+
20
+ if not os.listdir(docs_dir):
21
+ logger.error(f"The directory '{docs_dir}' is empty.")
22
+ return
23
+
24
+ # Try loading each PDF and log results
25
+ for root, dirs, files in os.walk(docs_dir):
26
+ for file in files:
27
+ if file.endswith(".pdf"):
28
+ file_path = os.path.join(root, file)
29
+ logger.info(f"Loading document: {file_path}")
30
+ try:
31
+ loader = PyPDFLoader(file_path)
32
+ loaded_docs = loader.load()
33
+ if loaded_docs:
34
+ documents.extend(loaded_docs)
35
+ logger.info(f"Loaded {len(loaded_docs)} pages from {file_path}.")
36
+ else:
37
+ logger.warning(f"No content extracted from {file_path}.")
38
+ except Exception as e:
39
+ logger.error(f"Error loading {file_path}: {e}")
40
+
41
+ if not documents:
42
+ logger.error("No documents were loaded. Check the 'docs' directory and file paths.")
43
+ return
44
+
45
+ logger.info(f"Total loaded documents: {len(documents)}")
46
+
47
+ # Split text into chunks
48
+ text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=50)
49
+ texts = text_splitter.split_documents(documents)
50
+
51
+ if not texts:
52
+ logger.error("No text chunks were created. Check the text splitting process.")
53
+ return
54
+
55
+ logger.info(f"Created {len(texts)} text chunks.")
56
+
57
+ # Check a sample of text chunks
58
+ for i, text in enumerate(texts[:5]):
59
+ logger.info(f"Sample chunk {i}: {text[:100]}...")
60
+
61
+ # Create embeddings
62
+ try:
63
+ embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
64
+ except Exception as e:
65
+ logger.error(f"Failed to initialize embeddings: {e}")
66
+ return
67
+
68
+ # Create FAISS index
69
+ try:
70
+ db = FAISS.from_documents(texts, embeddings)
71
+ logger.info(f"Created FAISS index with {len(texts)} vectors")
72
+ except Exception as e:
73
+ logger.error(f"Failed to create FAISS index: {e}")
74
+ return
75
+
76
+ # Save FAISS index locally
77
+ index_dir = "faiss_index"
78
+ if not os.path.exists(index_dir):
79
+ os.makedirs(index_dir)
80
+
81
+ try:
82
+ db.save_local(index_dir)
83
+ index_path = os.path.join(index_dir, "index.faiss")
84
+ if os.path.getsize(index_path) > 0:
85
+ logger.info(f"FAISS index successfully saved to {index_path}")
86
+ else:
87
+ logger.error(f"FAISS index file '{index_path}' is empty.")
88
+ except Exception as e:
89
+ logger.error(f"Failed to save FAISS index: {e}")
90
+
91
+ if __name__ == "__main__":
92
+ create_faiss_index()