Spaces:
Sleeping
Sleeping
from langchain_community.vectorstores import FAISS | |
try: | |
from langchain_huggingface import HuggingFaceEmbeddings | |
except ImportError: | |
# Fallback to deprecated import if langchain-huggingface is not installed | |
from langchain_community.embeddings import HuggingFaceEmbeddings | |
from agents import function_tool | |
FAISS_INDEX_PATH = "faiss_index" | |
EMBEDDING_MODEL_NAME = "sentence-transformers/all-MiniLM-L6-v2" # Must match loader.py | |
# Initialize embeddings and vector store | |
embeddings = HuggingFaceEmbeddings(model_name=EMBEDDING_MODEL_NAME) | |
db = FAISS.load_local( | |
FAISS_INDEX_PATH, | |
embeddings, | |
allow_dangerous_deserialization=True | |
) | |
def retrieve_network_information(query: str) -> str: | |
"""Provide information of our network using semantic search. | |
Args: | |
query: The query to search for in the network documentation. | |
This should be semantically close to your target documents. | |
Use the affirmative form rather than a question. | |
""" | |
results_with_scores = db.similarity_search_with_score(query, k=10) | |
response = "" | |
if not results_with_scores: | |
return "No relevant information found in the documentation for your query." | |
for doc, score in results_with_scores: | |
device_name = doc.metadata.get('device_name') | |
source = doc.metadata.get('source', 'Unknown source') | |
if device_name: | |
response += f"Device: {device_name} (Source: {source}, Score: {score:.4f})\n" | |
else: | |
# If not device_name, assume it's global/fabric information | |
response += f"Global/Fabric Info (Source: {source}, Score: {score:.4f})\n" | |
response += f"Result: {doc.page_content}\n\n" | |
print(f"Retrieved {len(results_with_scores)} results for query: '{query}'") | |
return response | |