CCockrum commited on
Commit
094ffdc
Β·
verified Β·
1 Parent(s): 9ceabc0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +43 -26
app.py CHANGED
@@ -20,9 +20,6 @@ if "response_ready" not in st.session_state:
20
  if "follow_up" not in st.session_state:
21
  st.session_state.follow_up = "" # Stores follow-up question
22
 
23
- if "last_topic" not in st.session_state:
24
- st.session_state.last_topic = "" # Stores last user topic
25
-
26
  # --- Set Up Model & API Functions ---
27
  model_id = "mistralai/Mistral-7B-Instruct-v0.3"
28
 
@@ -53,12 +50,7 @@ def analyze_sentiment(user_text):
53
  def predict_action(user_text):
54
  if "NASA" in user_text or "space" in user_text:
55
  return "nasa_info"
56
- elif "quantum" in user_text or "physics" in user_text or "quark" in user_text:
57
- return "physics"
58
- elif "AI" in user_text or "machine learning" in user_text:
59
- return "AI"
60
- else:
61
- return "general_query"
62
 
63
  def generate_follow_up(user_text):
64
  """
@@ -78,7 +70,6 @@ def generate_follow_up(user_text):
78
  def get_response(system_message, chat_history, user_text, max_new_tokens=256):
79
  """
80
  Generates HAL's response, making it more conversational and engaging.
81
- Detects if the user has switched topics and removes previous follow-up questions if needed.
82
  """
83
  sentiment = analyze_sentiment(user_text)
84
  action = predict_action(user_text)
@@ -114,19 +105,10 @@ def get_response(system_message, chat_history, user_text, max_new_tokens=256):
114
  chat_history.append({'role': 'user', 'content': user_text})
115
  chat_history.append({'role': 'assistant', 'content': response})
116
 
117
- # βœ… Detect if the topic has changed
118
- current_topic = action # Predicts topic based on user input
119
-
120
- if current_topic != st.session_state.last_topic:
121
- st.session_state.follow_up = "" # Clear previous follow-up question if topic changed
122
- else:
123
- follow_up = generate_follow_up(user_text)
124
- chat_history.append({'role': 'assistant', 'content': follow_up})
125
- st.session_state.follow_up = follow_up # Store the new follow-up question
126
-
127
- st.session_state.last_topic = current_topic # Update last topic for next check
128
 
129
- return response, st.session_state.follow_up, chat_history, None
130
 
131
  # --- Chat UI ---
132
  st.title("πŸš€ HAL - Your NASA AI Assistant")
@@ -138,9 +120,41 @@ if st.sidebar.button("Reset Chat"):
138
  st.session_state.response_ready = False
139
  st.session_state.follow_up = ""
140
  st.session_state.last_topic = ""
141
- st.rerun() # βœ… Fixed: Correct method to reset the app in newer Streamlit versions
142
-
143
- # --- Chat History Display ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
144
  st.markdown("<div class='container'>", unsafe_allow_html=True)
145
 
146
  for message in st.session_state.chat_history:
@@ -155,8 +169,10 @@ st.markdown("</div>", unsafe_allow_html=True)
155
  user_input = st.chat_input("Type your message here...") # Uses Enter to submit
156
 
157
  if user_input:
 
158
  st.session_state.chat_history.append({'role': 'user', 'content': user_input})
159
 
 
160
  response, follow_up, st.session_state.chat_history, image_url = get_response(
161
  system_message="You are a helpful AI assistant.",
162
  user_text=user_input,
@@ -171,8 +187,9 @@ if user_input:
171
  st.image(image_url, caption="NASA Image of the Day")
172
 
173
  st.session_state.follow_up = follow_up
174
- st.session_state.response_ready = True
175
 
176
  if st.session_state.response_ready and st.session_state.follow_up:
 
177
  st.markdown(f"<div class='assistant-msg'><strong>HAL:</strong> {st.session_state.follow_up}</div>", unsafe_allow_html=True)
178
  st.session_state.response_ready = False
 
20
  if "follow_up" not in st.session_state:
21
  st.session_state.follow_up = "" # Stores follow-up question
22
 
 
 
 
23
  # --- Set Up Model & API Functions ---
24
  model_id = "mistralai/Mistral-7B-Instruct-v0.3"
25
 
 
50
  def predict_action(user_text):
51
  if "NASA" in user_text or "space" in user_text:
52
  return "nasa_info"
53
+ return "general_query"
 
 
 
 
 
54
 
55
  def generate_follow_up(user_text):
56
  """
 
70
  def get_response(system_message, chat_history, user_text, max_new_tokens=256):
71
  """
72
  Generates HAL's response, making it more conversational and engaging.
 
73
  """
74
  sentiment = analyze_sentiment(user_text)
75
  action = predict_action(user_text)
 
105
  chat_history.append({'role': 'user', 'content': user_text})
106
  chat_history.append({'role': 'assistant', 'content': response})
107
 
108
+ follow_up = generate_follow_up(user_text)
109
+ chat_history.append({'role': 'assistant', 'content': follow_up})
 
 
 
 
 
 
 
 
 
110
 
111
+ return response, follow_up, chat_history, None
112
 
113
  # --- Chat UI ---
114
  st.title("πŸš€ HAL - Your NASA AI Assistant")
 
120
  st.session_state.response_ready = False
121
  st.session_state.follow_up = ""
122
  st.session_state.last_topic = ""
123
+ st.rerun() # βœ… Correct method to reset the app in newer Streamlit versions
124
+
125
+ # Custom Chat Styling
126
+ st.markdown("""
127
+ <style>
128
+ .user-msg {
129
+ background-color: #0078D7;
130
+ color: white;
131
+ padding: 10px;
132
+ border-radius: 10px;
133
+ margin-bottom: 5px;
134
+ width: fit-content;
135
+ max-width: 80%;
136
+ }
137
+ .assistant-msg {
138
+ background-color: #333333;
139
+ color: white;
140
+ padding: 10px;
141
+ border-radius: 10px;
142
+ margin-bottom: 5px;
143
+ width: fit-content;
144
+ max-width: 80%;
145
+ }
146
+ .container {
147
+ display: flex;
148
+ flex-direction: column;
149
+ align-items: flex-start;
150
+ }
151
+ @media (max-width: 600px) {
152
+ .user-msg, .assistant-msg { font-size: 16px; max-width: 100%; }
153
+ }
154
+ </style>
155
+ """, unsafe_allow_html=True)
156
+
157
+ # --- Chat History Display (Ensures All Messages Are Visible) ---
158
  st.markdown("<div class='container'>", unsafe_allow_html=True)
159
 
160
  for message in st.session_state.chat_history:
 
169
  user_input = st.chat_input("Type your message here...") # Uses Enter to submit
170
 
171
  if user_input:
172
+ # Save user message in chat history
173
  st.session_state.chat_history.append({'role': 'user', 'content': user_input})
174
 
175
+ # Generate HAL's response
176
  response, follow_up, st.session_state.chat_history, image_url = get_response(
177
  system_message="You are a helpful AI assistant.",
178
  user_text=user_input,
 
187
  st.image(image_url, caption="NASA Image of the Day")
188
 
189
  st.session_state.follow_up = follow_up
190
+ st.session_state.response_ready = True # Enables follow-up response cycle
191
 
192
  if st.session_state.response_ready and st.session_state.follow_up:
193
+ st.session_state.chat_history.append({'role': 'assistant', 'content': st.session_state.follow_up})
194
  st.markdown(f"<div class='assistant-msg'><strong>HAL:</strong> {st.session_state.follow_up}</div>", unsafe_allow_html=True)
195
  st.session_state.response_ready = False