Anirudh1993 commited on
Commit
3e1a55c
·
verified ·
1 Parent(s): 16e58b7

Update document_chat.py

Browse files
Files changed (1) hide show
  1. document_chat.py +3 -4
document_chat.py CHANGED
@@ -8,7 +8,7 @@ from langchain.memory import ConversationBufferMemory
8
  from langchain.llms import HuggingFaceHub
9
  from langchain.prompts import PromptTemplate
10
  from langchain.chains import LLMChain
11
- from langchain.chains.combine_documents import DocumentCompressor
12
 
13
  # Constants
14
  CHROMA_DB_PATH = "chroma_db"
@@ -50,8 +50,8 @@ def process_query_with_memory(query, chat_history=[]):
50
  question_generator_template = "Generate a question based on the user's request: {query}"
51
  question_generator = LLMChain(llm=llm, prompt=PromptTemplate(template=question_generator_template, input_variables=["query"]))
52
 
53
- # Document combiner (example: just concatenate documents)
54
- combine_docs_chain = DocumentCompressor(combine_fn=lambda docs: " ".join([doc.page_content for doc in docs]))
55
 
56
  # Create a ConversationalRetrievalChain with the loaded model and retriever
57
  qa_chain = ConversationalRetrievalChain(
@@ -65,4 +65,3 @@ def process_query_with_memory(query, chat_history=[]):
65
  # Run the query with the current chat history and return the response
66
  response = qa_chain.run({"question": query, "chat_history": chat_history})
67
  return response
68
-
 
8
  from langchain.llms import HuggingFaceHub
9
  from langchain.prompts import PromptTemplate
10
  from langchain.chains import LLMChain
11
+ from langchain.chains.combine_documents import StuffDocumentsChain # Corrected import
12
 
13
  # Constants
14
  CHROMA_DB_PATH = "chroma_db"
 
50
  question_generator_template = "Generate a question based on the user's request: {query}"
51
  question_generator = LLMChain(llm=llm, prompt=PromptTemplate(template=question_generator_template, input_variables=["query"]))
52
 
53
+ # Use StuffDocumentsChain to combine the retrieved documents
54
+ combine_docs_chain = StuffDocumentsChain(llm=llm) # Corrected use of StuffDocumentsChain
55
 
56
  # Create a ConversationalRetrievalChain with the loaded model and retriever
57
  qa_chain = ConversationalRetrievalChain(
 
65
  # Run the query with the current chat history and return the response
66
  response = qa_chain.run({"question": query, "chat_history": chat_history})
67
  return response