jayebaku commited on
Commit
a14acaa
·
verified ·
1 Parent(s): 93413ed

Update qa_summary.py

Browse files
Files changed (1) hide show
  1. qa_summary.py +3 -2
qa_summary.py CHANGED
@@ -1,7 +1,7 @@
1
  import spaces
2
  from transformers import AutoModelForCausalLM, AutoTokenizer
3
 
4
- @spaces.GPU(duration=120)
5
  def generate_answer(llm_name, texts, query, queries, mode='validate'):
6
 
7
  if llm_name == 'solar':
@@ -40,7 +40,8 @@ def generate_answer(llm_name, texts, query, queries, mode='validate'):
40
  elif mode == 'h_summarize':
41
  conversation = [ {'role': 'user', 'content': f'The documents below describe a developing disaster event. Based on these documents, write a brief summary in the form of a paragraph, highlighting the most crucial information. \nDocuments: {template_texts}'} ]
42
  elif mode == "multi_summarize":
43
- conversation = [ {'role': 'user', 'content': f'For the following queries and documents, try to answer the given queries based on the documents.\nQueries: {queries} \nDocuments: {template_texts}.'} ]
 
44
 
45
 
46
  prompt = tokenizer.apply_chat_template(conversation, tokenize=False, add_generation_prompt=True)
 
1
  import spaces
2
  from transformers import AutoModelForCausalLM, AutoTokenizer
3
 
4
+ @spaces.GPU(duration=60)
5
  def generate_answer(llm_name, texts, query, queries, mode='validate'):
6
 
7
  if llm_name == 'solar':
 
40
  elif mode == 'h_summarize':
41
  conversation = [ {'role': 'user', 'content': f'The documents below describe a developing disaster event. Based on these documents, write a brief summary in the form of a paragraph, highlighting the most crucial information. \nDocuments: {template_texts}'} ]
42
  elif mode == "multi_summarize":
43
+ # conversation = [ {'role': 'user', 'content': f'For the following queries and documents, try to answer the given queries based on the documents. Also, return the top 5 unaltered documents that answer the queries.\nQueries: {queries} \nDocuments: {template_texts}.'} ]
44
+ conversation = [ {'role': 'user', 'content': f'For the following queries and documents, in a brief paragraph try to answer the given queries based on the documents. Then, return the top 5 documents as provided that answer the queries.\nQueries: {queries} \nDocuments: {template_texts}.'} ]
45
 
46
 
47
  prompt = tokenizer.apply_chat_template(conversation, tokenize=False, add_generation_prompt=True)