annas4421 commited on
Commit
004193f
·
verified ·
1 Parent(s): 30a96d2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -28,7 +28,8 @@ ANSWER:
28
  CUSTOM_QUESTION_PROMPT = PromptTemplate.from_template(custom_template)
29
 
30
  prompt_template = """<s>[INST]
31
- You will answer from the provided files stored in knowledge base
 
32
  CONTEXT: {context}
33
  CHAT HISTORY: {chat_history}
34
  QUESTION: {question}
@@ -74,7 +75,7 @@ def get_vectorstore(chunks):
74
 
75
  # Create a conversational chain
76
  def get_conversationchain(vectorstore):
77
- llm = ChatOpenAI(temperature=0.5, model_name='gpt-4o-mini')
78
  memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True)
79
  conversation_chain = ConversationalRetrievalChain.from_llm(
80
  llm=llm,
 
28
  CUSTOM_QUESTION_PROMPT = PromptTemplate.from_template(custom_template)
29
 
30
  prompt_template = """<s>[INST]
31
+ You will answer from the provided files stored in knowledge base. You should only give response or answers from the attached file. If the user input seems unclear you should say 'Please provide more specifics about question'
32
+ If user asks out of context you will say ' Please ask queries only from the attached files'
33
  CONTEXT: {context}
34
  CHAT HISTORY: {chat_history}
35
  QUESTION: {question}
 
75
 
76
  # Create a conversational chain
77
  def get_conversationchain(vectorstore):
78
+ llm = ChatOpenAI(temperature=0.1, model_name='gpt-4o-mini')
79
  memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True)
80
  conversation_chain = ConversationalRetrievalChain.from_llm(
81
  llm=llm,