Spaces:
Sleeping
Sleeping
Update agent.py
Browse files
agent.py
CHANGED
@@ -429,31 +429,42 @@ embedding_model = HuggingFaceEmbeddings(
|
|
429 |
# -----------------------------
|
430 |
# Create FAISS index and save it
|
431 |
# -----------------------------
|
432 |
-
|
433 |
-
|
434 |
-
|
435 |
-
|
436 |
-
|
437 |
-
|
438 |
-
# Fallback to rebuilding the index if loading fails
|
439 |
-
vector_store = FAISS.from_documents(docs, embedding_model)
|
440 |
|
441 |
-
|
442 |
-
|
443 |
-
|
444 |
-
|
445 |
-
index_path
|
446 |
-
try:
|
447 |
-
|
448 |
-
|
449 |
-
|
450 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
451 |
|
|
|
|
|
|
|
452 |
|
453 |
# -----------------------------
|
454 |
# Create LangChain Retriever Tool
|
455 |
# -----------------------------
|
456 |
-
retriever = loaded_store.as_retriever()
|
457 |
|
458 |
question_retriever_tool = create_retriever_tool(
|
459 |
retriever=retriever,
|
|
|
429 |
# -----------------------------
|
430 |
# Create FAISS index and save it
|
431 |
# -----------------------------
|
432 |
+
class ChatState(TypedDict):
|
433 |
+
messages: Annotated[
|
434 |
+
List[str],
|
435 |
+
gr.State(render=False),
|
436 |
+
"Stores chat history"
|
437 |
+
]
|
|
|
|
|
438 |
|
439 |
+
def initialize_vector_store():
|
440 |
+
embedding_model = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
|
441 |
+
index_path = "/home/wendy/my_hf_agent_course_projects/faiss_index"
|
442 |
+
|
443 |
+
if os.path.exists(os.path.join(index_path, "index.faiss")):
|
444 |
+
try:
|
445 |
+
return FAISS.load_local(
|
446 |
+
index_path,
|
447 |
+
embedding_model,
|
448 |
+
allow_dangerous_deserialization=True
|
449 |
+
)
|
450 |
+
except Exception as e:
|
451 |
+
print(f"Error loading index: {e}")
|
452 |
+
|
453 |
+
# Fallback: Create new index
|
454 |
+
print("Building new vector store...")
|
455 |
+
docs = [...] # Your document loading logic here
|
456 |
+
vector_store = FAISS.from_documents(docs, embedding_model)
|
457 |
+
vector_store.save_local(index_path)
|
458 |
+
return vector_store
|
459 |
|
460 |
+
# Initialize at module level
|
461 |
+
loaded_store = initialize_vector_store()
|
462 |
+
retriever = loaded_store.as_retriever()
|
463 |
|
464 |
# -----------------------------
|
465 |
# Create LangChain Retriever Tool
|
466 |
# -----------------------------
|
467 |
+
#retriever = loaded_store.as_retriever()
|
468 |
|
469 |
question_retriever_tool = create_retriever_tool(
|
470 |
retriever=retriever,
|