acecalisto3 commited on
Commit
386db61
·
verified ·
1 Parent(s): 03fc5c6

Update app2.py

Browse files
Files changed (1) hide show
  1. app2.py +15 -10
app2.py CHANGED
@@ -1024,25 +1024,26 @@ def respond_to_chat(
1024
  # Initialize chat_history if it's None (Gradio might pass None initially)
1025
  if chat_history is None:
1026
  chat_history = []
1027
-
1028
  if chatbot_data is None or not chatbot_data:
1029
  chat_history.append((message, "Please process some data first using the other tabs before chatting."))
1030
- return chat_history, chatbot_data, current_filtered_df_state # Return existing state
1031
-
1032
  # Append user message to history immediately
1033
- chat_history.append((message, None)) # Use None as a placeholder for the assistant's response
1034
-
1035
  response = ""
1036
  lower_message = message.lower().strip()
1037
-
1038
  # Initialize new_filtered_df_state with the current state to preserve it unless a filter changes it
1039
  new_filtered_df_state = current_filtered_df_state
1040
-
1041
  df = None
1042
  try:
1043
  # Attempt to create a DataFrame from the full chatbot_data for analysis
1044
  # This flattens the structure for easier querying with pandas
1045
  flat_data = []
 
1046
  def flatten_item(d, parent_key='', sep='_'):
1047
  items = {}
1048
  if isinstance(d, dict):
@@ -1063,7 +1064,7 @@ def respond_to_chat(
1063
  # If d is a primitive (int, str, bool, None), it won't add anything here, which is fine
1064
  # as primitives are handled in the dict/list branches.
1065
  return items
1066
-
1067
  # Process each top-level item in chatbot_data
1068
  for i, item in enumerate(chatbot_data):
1069
  if isinstance(item, dict):
@@ -1072,8 +1073,12 @@ def respond_to_chat(
1072
  flat_data.append(flat_item)
1073
  # If chatbot_data contains non-dict top-level items, flatten them too
1074
  elif isinstance(item, (list, str, int, float, bool, type(None))):
1075
- flat_data.append({'item_value': item}) # Wrap primitives in a dict
1076
-
 
 
 
 
1077
 
1078
  if flat_data:
1079
  try:
 
1024
  # Initialize chat_history if it's None (Gradio might pass None initially)
1025
  if chat_history is None:
1026
  chat_history = []
1027
+
1028
  if chatbot_data is None or not chatbot_data:
1029
  chat_history.append((message, "Please process some data first using the other tabs before chatting."))
1030
+ return chat_history, chatbot_data, current_filtered_df_state # Return existing state
1031
+
1032
  # Append user message to history immediately
1033
+ chat_history.append((message, None)) # Use None as a placeholder for the assistant's response
1034
+
1035
  response = ""
1036
  lower_message = message.lower().strip()
1037
+
1038
  # Initialize new_filtered_df_state with the current state to preserve it unless a filter changes it
1039
  new_filtered_df_state = current_filtered_df_state
1040
+
1041
  df = None
1042
  try:
1043
  # Attempt to create a DataFrame from the full chatbot_data for analysis
1044
  # This flattens the structure for easier querying with pandas
1045
  flat_data = []
1046
+
1047
  def flatten_item(d, parent_key='', sep='_'):
1048
  items = {}
1049
  if isinstance(d, dict):
 
1064
  # If d is a primitive (int, str, bool, None), it won't add anything here, which is fine
1065
  # as primitives are handled in the dict/list branches.
1066
  return items
1067
+
1068
  # Process each top-level item in chatbot_data
1069
  for i, item in enumerate(chatbot_data):
1070
  if isinstance(item, dict):
 
1073
  flat_data.append(flat_item)
1074
  # If chatbot_data contains non-dict top-level items, flatten them too
1075
  elif isinstance(item, (list, str, int, float, bool, type(None))):
1076
+ flat_data.append({'item_value': item}) # Wrap primitives in a dict
1077
+
1078
+ except Exception as e:
1079
+ # Handle exceptions that may occur during processing
1080
+ response = f"An error occurred: {str(e)}"
1081
+ chat_history.append((message, response)) # Append error message to chat history
1082
 
1083
  if flat_data:
1084
  try: