Spaces:
Runtime error
Runtime error
Commit
·
7f14c95
1
Parent(s):
9e86d4e
llama prompt and debug update
Browse files
config/prompt_templates/llama2.txt
CHANGED
|
@@ -1,6 +1,7 @@
|
|
| 1 |
-
<<SYS>>
|
| 2 |
-
|
| 3 |
-
|
|
|
|
| 4 |
<</SYS>>
|
| 5 |
|
| 6 |
-
[INST]
|
|
|
|
| 1 |
+
<<SYS>>Using the information contained in the context,
|
| 2 |
+
give a comprehensive answer to the question.
|
| 3 |
+
Respond only to the question asked, response should be concise and relevant to the question.
|
| 4 |
+
If the answer cannot be deduced from the context, do not give an answer.
|
| 5 |
<</SYS>>
|
| 6 |
|
| 7 |
+
[INST] Context: {context} [/INST] User: {question}
|
qa_engine/qa_engine.py
CHANGED
|
@@ -227,7 +227,7 @@ class QAEngine():
|
|
| 227 |
self.knowledge_index = FAISS.load_local('./indexes/run/', embedding_model)
|
| 228 |
self.reranker = CrossEncoder('cross-encoder/ms-marco-MiniLM-L-12-v2')
|
| 229 |
|
| 230 |
-
|
| 231 |
@staticmethod
|
| 232 |
def _preprocess_question(question: str) -> str:
|
| 233 |
if question[-1] != '?':
|
|
@@ -300,8 +300,8 @@ class QAEngine():
|
|
| 300 |
logger.info('Running LLM chain')
|
| 301 |
question_processed = QAEngine._preprocess_question(question)
|
| 302 |
answer = self.llm_chain.run(question=question_processed, context=context)
|
| 303 |
-
|
| 304 |
-
response.set_answer(
|
| 305 |
logger.info('Received answer')
|
| 306 |
|
| 307 |
if self.debug:
|
|
@@ -310,7 +310,8 @@ class QAEngine():
|
|
| 310 |
logger.info(f'question len: {len(question)} {sep}')
|
| 311 |
logger.info(f'question: {question} {sep}')
|
| 312 |
logger.info(f'answer len: {len(response.get_answer())} {sep}')
|
| 313 |
-
logger.info(f'answer: {
|
|
|
|
| 314 |
logger.info(f'{response.get_sources_as_text()} {sep}')
|
| 315 |
logger.info(f'messages_contex: {messages_context} {sep}')
|
| 316 |
logger.info(f'relevant_docs: {relevant_docs} {sep}')
|
|
|
|
| 227 |
self.knowledge_index = FAISS.load_local('./indexes/run/', embedding_model)
|
| 228 |
self.reranker = CrossEncoder('cross-encoder/ms-marco-MiniLM-L-12-v2')
|
| 229 |
|
| 230 |
+
|
| 231 |
@staticmethod
|
| 232 |
def _preprocess_question(question: str) -> str:
|
| 233 |
if question[-1] != '?':
|
|
|
|
| 300 |
logger.info('Running LLM chain')
|
| 301 |
question_processed = QAEngine._preprocess_question(question)
|
| 302 |
answer = self.llm_chain.run(question=question_processed, context=context)
|
| 303 |
+
answer_postprocessed = QAEngine._postprocess_answer(answer)
|
| 304 |
+
response.set_answer(answer_postprocessed)
|
| 305 |
logger.info('Received answer')
|
| 306 |
|
| 307 |
if self.debug:
|
|
|
|
| 310 |
logger.info(f'question len: {len(question)} {sep}')
|
| 311 |
logger.info(f'question: {question} {sep}')
|
| 312 |
logger.info(f'answer len: {len(response.get_answer())} {sep}')
|
| 313 |
+
logger.info(f'answer original: {answer} {sep}')
|
| 314 |
+
logger.info(f'answer postprocessed: {response.get_answer()} {sep}')
|
| 315 |
logger.info(f'{response.get_sources_as_text()} {sep}')
|
| 316 |
logger.info(f'messages_contex: {messages_context} {sep}')
|
| 317 |
logger.info(f'relevant_docs: {relevant_docs} {sep}')
|