File size: 1,859 Bytes
890d952
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50

from langchain_community.vectorstores import FAISS
try:
    from langchain_huggingface import HuggingFaceEmbeddings
except ImportError:
    # Fallback to deprecated import if langchain-huggingface is not installed
    from langchain_community.embeddings import HuggingFaceEmbeddings
from agents import function_tool

FAISS_INDEX_PATH = "faiss_index"
EMBEDDING_MODEL_NAME = "sentence-transformers/all-MiniLM-L6-v2"  # Must match loader.py

# Initialize embeddings and vector store
embeddings = HuggingFaceEmbeddings(model_name=EMBEDDING_MODEL_NAME)
db = FAISS.load_local(
    FAISS_INDEX_PATH, 
    embeddings, 
    allow_dangerous_deserialization=True
)

@function_tool
def retrieve_network_information(query: str) -> str:
    """Provide information of our network using semantic search.
    
    Args:
        query: The query to search for in the network documentation. 
               This should be semantically close to your target documents.
               Use the affirmative form rather than a question.
    """
    
    results_with_scores = db.similarity_search_with_score(query, k=10)
    
    response = ""
    if not results_with_scores:
        return "No relevant information found in the documentation for your query."

    for doc, score in results_with_scores:
        device_name = doc.metadata.get('device_name')
        source = doc.metadata.get('source', 'Unknown source')
        
        if device_name:
            response += f"Device: {device_name} (Source: {source}, Score: {score:.4f})\n"
        else:
            # If not device_name, assume it's global/fabric information
            response += f"Global/Fabric Info (Source: {source}, Score: {score:.4f})\n"
        response += f"Result: {doc.page_content}\n\n"
            
    print(f"Retrieved {len(results_with_scores)} results for query: '{query}'")
    return response