Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -250,14 +250,15 @@ def should_continue(state: AgentState) -> str:
|
|
250 |
return "reason"
|
251 |
|
252 |
|
|
|
253 |
from langchain_community.embeddings import HuggingFaceEmbeddings
|
254 |
-
from langchain_community.vectorstores import Chroma
|
255 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
256 |
from langchain_core.documents import Document
|
257 |
|
258 |
# ====== DOCUMENT PROCESSING SETUP ======
|
259 |
def create_vector_store():
|
260 |
-
"""Create vector store with predefined documents"""
|
261 |
# Define the documents
|
262 |
documents = [
|
263 |
Document(page_content="The capital of France is Paris.", metadata={"source": "geography"}),
|
@@ -275,13 +276,14 @@ def create_vector_store():
|
|
275 |
)
|
276 |
chunks = text_splitter.split_documents(documents)
|
277 |
|
278 |
-
# Create
|
279 |
-
return
|
280 |
documents=chunks,
|
281 |
embedding=embeddings
|
282 |
)
|
283 |
|
284 |
|
|
|
285 |
def reasoning_node(state: AgentState) -> AgentState:
|
286 |
"""
|
287 |
Node for the agent to analyze the question, determine next steps,
|
@@ -353,23 +355,6 @@ def reasoning_node(state: AgentState) -> AgentState:
|
|
353 |
rag_context = "\n\n[Relevant Knowledge]\n"
|
354 |
rag_context += "\n---\n".join([doc.page_content for doc in relevant_docs])
|
355 |
|
356 |
-
# ====== RAG RETRIEVAL ======
|
357 |
-
# Initialize vector store if not present
|
358 |
-
if "vector_store" not in state["context"]:
|
359 |
-
state["context"]["vector_store"] = create_vector_store()
|
360 |
-
|
361 |
-
vector_store = state["context"]["vector_store"]
|
362 |
-
|
363 |
-
# Perform retrieval
|
364 |
-
relevant_docs = vector_store.similarity_search(
|
365 |
-
state["question"],
|
366 |
-
k=3 # Retrieve top 3 most relevant chunks
|
367 |
-
)
|
368 |
-
|
369 |
-
# Format context for LLM
|
370 |
-
rag_context = "\n\n[Relevant Knowledge]\n"
|
371 |
-
rag_context += "\n---\n".join([doc.page_content for doc in relevant_docs])
|
372 |
-
|
373 |
# ====== MODIFIED PROMPT ======
|
374 |
# Add RAG context to system prompt
|
375 |
system_prompt = (
|
@@ -563,7 +548,7 @@ class BasicAgent:
|
|
563 |
self.workflow = create_agent_workflow(self.tools)
|
564 |
|
565 |
def __call__(self, question: str) -> str:
|
566 |
-
print(f"\n--- Agent received question: {question[:
|
567 |
|
568 |
state = {
|
569 |
"question": question,
|
|
|
250 |
return "reason"
|
251 |
|
252 |
|
253 |
+
# ====== NEW IMPORTS ======
|
254 |
from langchain_community.embeddings import HuggingFaceEmbeddings
|
255 |
+
from langchain_community.vectorstores import FAISS # Use FAISS instead of Chroma
|
256 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
257 |
from langchain_core.documents import Document
|
258 |
|
259 |
# ====== DOCUMENT PROCESSING SETUP ======
|
260 |
def create_vector_store():
|
261 |
+
"""Create vector store with predefined documents using FAISS"""
|
262 |
# Define the documents
|
263 |
documents = [
|
264 |
Document(page_content="The capital of France is Paris.", metadata={"source": "geography"}),
|
|
|
276 |
)
|
277 |
chunks = text_splitter.split_documents(documents)
|
278 |
|
279 |
+
# Create FAISS vector store
|
280 |
+
return FAISS.from_documents(
|
281 |
documents=chunks,
|
282 |
embedding=embeddings
|
283 |
)
|
284 |
|
285 |
|
286 |
+
|
287 |
def reasoning_node(state: AgentState) -> AgentState:
|
288 |
"""
|
289 |
Node for the agent to analyze the question, determine next steps,
|
|
|
355 |
rag_context = "\n\n[Relevant Knowledge]\n"
|
356 |
rag_context += "\n---\n".join([doc.page_content for doc in relevant_docs])
|
357 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
358 |
# ====== MODIFIED PROMPT ======
|
359 |
# Add RAG context to system prompt
|
360 |
system_prompt = (
|
|
|
548 |
self.workflow = create_agent_workflow(self.tools)
|
549 |
|
550 |
def __call__(self, question: str) -> str:
|
551 |
+
print(f"\n--- Agent received question: {question[:50]}{'...' if len(question) > 50 else ''} ---")
|
552 |
|
553 |
state = {
|
554 |
"question": question,
|