File size: 1,075 Bytes
ffc324a
 
 
bc957c6
ffc324a
 
 
 
 
bc957c6
 
 
 
 
ffc324a
bc957c6
ffc324a
bc957c6
 
ffc324a
 
 
 
 
 
 
bc957c6
ffc324a
bc957c6
ffc324a
 
 
 
bc957c6
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
from typing import List, Optional
from llms import initialize_llms, answer_query_with_chunks, query_to_retrieve
from chroma_operations.retrieve import search_similar_chunks
import asyncio

# Load LLM once at startup
llms = initialize_llms()
llm = llms["llm"]

def ask_question(
    query_text: str, 
    file_names: List[str], 
    collection_name: str = "rag_collection"
) -> dict:
    try:
        # Await async LLM calls
        query_search = query_to_retrieve(query_text, llm)
        
        # Await ChromaDB search
        retrieved_docs = search_similar_chunks(
            query_search,
            document_names=file_names,
            collection_name=collection_name,
        )

        if not retrieved_docs:
            return {"answer": "No matching documents found.", "chunks": []}

        # Await answer generation
        answer = answer_query_with_chunks(query_text, retrieved_docs, llm)
        return {"answer": answer, "chunks": retrieved_docs}

    except Exception as e:
        return {"answer": f"Error processing request: {str(e)}", "chunks": []}