CCockrum commited on
Commit
c567c97
Β·
verified Β·
1 Parent(s): 2845932

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -3
app.py CHANGED
@@ -86,18 +86,23 @@ def ensure_english(text):
86
 
87
  # βœ… Ensure Every Response Has a Follow-Up Question
88
  def generate_follow_up(user_text):
89
- """Generates a follow-up question to guide the user toward related topics or next steps."""
90
  prompt_text = (
91
  f"Given the user's question: '{user_text}', generate a SHORT follow-up question "
92
  "suggesting either a related topic or asking if they need further help. "
93
  "Example: 'Would you like to explore quantum superposition or ask about another physics concept?' "
94
  "Keep it concise and engaging."
95
  )
 
96
  hf = get_llm_hf_inference(max_new_tokens=40, temperature=0.8)
97
  output = hf.invoke(input=prompt_text).strip()
98
 
99
- # Fallback in case of an empty response
100
- return output if output else "Would you like to explore another related topic or ask about something else?"
 
 
 
 
101
 
102
  # βœ… Main Response Function
103
  def get_response(system_message, chat_history, user_text, max_new_tokens=512):
@@ -149,6 +154,16 @@ def get_response(system_message, chat_history, user_text, max_new_tokens=512):
149
 
150
  return response, follow_up, chat_history, None
151
 
 
 
 
 
 
 
 
 
 
 
152
  # βœ… Streamlit UI
153
  st.title("πŸš€ HAL - NASA AI Assistant")
154
 
 
86
 
87
  # βœ… Ensure Every Response Has a Follow-Up Question
88
  def generate_follow_up(user_text):
89
+ """Generates a clean follow-up question to guide the user toward related topics or next steps."""
90
  prompt_text = (
91
  f"Given the user's question: '{user_text}', generate a SHORT follow-up question "
92
  "suggesting either a related topic or asking if they need further help. "
93
  "Example: 'Would you like to explore quantum superposition or ask about another physics concept?' "
94
  "Keep it concise and engaging."
95
  )
96
+
97
  hf = get_llm_hf_inference(max_new_tokens=40, temperature=0.8)
98
  output = hf.invoke(input=prompt_text).strip()
99
 
100
+ # βœ… Remove unnecessary characters (like backticks and misplaced formatting)
101
+ cleaned_output = re.sub(r"```|''|\"", "", output).strip()
102
+
103
+ # βœ… Fallback in case the response is empty or invalid
104
+ return cleaned_output if cleaned_output else "Would you like to explore another related topic or ask about something else?"
105
+
106
 
107
  # βœ… Main Response Function
108
  def get_response(system_message, chat_history, user_text, max_new_tokens=512):
 
154
 
155
  return response, follow_up, chat_history, None
156
 
157
+ # βœ… Ensure response is displayed
158
+ if response:
159
+ st.markdown(f"<div class='assistant-msg'><strong>HAL:</strong> {response}</div>", unsafe_allow_html=True)
160
+
161
+ # βœ… Save and display follow-up question separately
162
+ if follow_up: # πŸ” Here is the `if follow_up:` section
163
+ st.session_state.chat_history.append({'role': 'assistant', 'content': follow_up})
164
+ st.markdown(f"<div class='assistant-msg'><strong>HAL:</strong> {follow_up}</div>", unsafe_allow_html=True)
165
+
166
+
167
  # βœ… Streamlit UI
168
  st.title("πŸš€ HAL - NASA AI Assistant")
169