CCockrum commited on
Commit
6eb97b8
·
verified ·
1 Parent(s): 7e790cb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -2
app.py CHANGED
@@ -81,7 +81,7 @@ def generate_follow_up(user_text):
81
  cleaned = ["Would you like to explore this topic further?"]
82
  return random.choice(cleaned)
83
 
84
- def get_response(system_message, chat_history, user_text, max_new_tokens=256):
85
  """
86
  Generates HAL's answer with depth and a follow-up question.
87
  The prompt instructs the model to provide a detailed explanation and then generate a follow-up.
@@ -132,7 +132,6 @@ def get_response(system_message, chat_history, user_text, max_new_tokens=256):
132
  chat = prompt | hf.bind(skip_prompt=True) | StrOutputParser(output_key='content')
133
  response = chat.invoke(input=dict(system_message=system_message, user_text=user_text, chat_history=filtered_history))
134
  # Remove any extra markers if present.
135
- response = response.split("HAL:")[-1].strip()
136
 
137
  # Fallback in case the generated answer is empty
138
  if not response:
 
81
  cleaned = ["Would you like to explore this topic further?"]
82
  return random.choice(cleaned)
83
 
84
+ def get_response(system_message, chat_history, user_text, max_new_tokens=1024):
85
  """
86
  Generates HAL's answer with depth and a follow-up question.
87
  The prompt instructs the model to provide a detailed explanation and then generate a follow-up.
 
132
  chat = prompt | hf.bind(skip_prompt=True) | StrOutputParser(output_key='content')
133
  response = chat.invoke(input=dict(system_message=system_message, user_text=user_text, chat_history=filtered_history))
134
  # Remove any extra markers if present.
 
135
 
136
  # Fallback in case the generated answer is empty
137
  if not response: