Update app.py
Browse files
app.py
CHANGED
@@ -7,7 +7,7 @@ from langchain_community.embeddings import HuggingFaceEmbeddings
|
|
7 |
from langchain_community.vectorstores import FAISS
|
8 |
from langchain_community.llms import HuggingFacePipeline
|
9 |
from langchain.chains import RetrievalQA
|
10 |
-
from ingest import create_faiss_index
|
11 |
|
12 |
# Set up logging
|
13 |
logging.basicConfig(level=logging.INFO)
|
@@ -32,9 +32,6 @@ def load_llm():
|
|
32 |
|
33 |
def validate_index_file(index_path):
|
34 |
try:
|
35 |
-
if os.path.getsize(index_path) == 0:
|
36 |
-
st.error(f"Index file '{index_path}' is empty.")
|
37 |
-
return False
|
38 |
with open(index_path, 'rb') as f:
|
39 |
data = f.read(100)
|
40 |
logger.info(f"Successfully read {len(data)} bytes from the index file")
|
@@ -45,13 +42,13 @@ def validate_index_file(index_path):
|
|
45 |
|
46 |
def load_faiss_index():
|
47 |
index_path = "faiss_index/index.faiss"
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
create_faiss_index()
|
55 |
|
56 |
if not os.path.exists(index_path):
|
57 |
st.error("Failed to create the FAISS index. Please check the 'docs' directory and try again.")
|
|
|
7 |
from langchain_community.vectorstores import FAISS
|
8 |
from langchain_community.llms import HuggingFacePipeline
|
9 |
from langchain.chains import RetrievalQA
|
10 |
+
from ingest import create_faiss_index # Make sure this function is updated
|
11 |
|
12 |
# Set up logging
|
13 |
logging.basicConfig(level=logging.INFO)
|
|
|
32 |
|
33 |
def validate_index_file(index_path):
|
34 |
try:
|
|
|
|
|
|
|
35 |
with open(index_path, 'rb') as f:
|
36 |
data = f.read(100)
|
37 |
logger.info(f"Successfully read {len(data)} bytes from the index file")
|
|
|
42 |
|
43 |
def load_faiss_index():
|
44 |
index_path = "faiss_index/index.faiss"
|
45 |
+
if not os.path.exists(index_path):
|
46 |
+
st.warning("Index file not found. Creating a new one...")
|
47 |
+
# Pass the required arguments to create_faiss_index
|
48 |
+
documents = [] # Load your documents here or from a specific directory
|
49 |
+
texts = [] # Process your documents to get text chunks
|
50 |
+
embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
|
51 |
+
create_faiss_index(texts, embeddings) # Ensure this is updated with correct function
|
52 |
|
53 |
if not os.path.exists(index_path):
|
54 |
st.error("Failed to create the FAISS index. Please check the 'docs' directory and try again.")
|