kennethduong commited on
Commit
c6b62e0
·
1 Parent(s): 8ed5020

change max token

Browse files
Files changed (1) hide show
  1. chatbot/chatbot.py +1 -1
chatbot/chatbot.py CHANGED
@@ -70,7 +70,7 @@ def chat(mood, prompt):
70
  contents=llm_prompt,
71
  generation_config={
72
  "temperature": 0.7,
73
- "max_output_tokens": 100
74
  }
75
  )
76
  return response.text.strip()
 
70
  contents=llm_prompt,
71
  generation_config={
72
  "temperature": 0.7,
73
+ "max_output_tokens": 500
74
  }
75
  )
76
  return response.text.strip()