Jeremy Live commited on
Commit
d89427c
·
1 Parent(s): 5a21c8c

fix issues about tuples respond

Browse files
Files changed (1) hide show
  1. app.py +45 -29
app.py CHANGED
@@ -820,9 +820,9 @@ def create_ui():
820
  # Create the chat interface
821
  with gr.Row():
822
  chatbot = gr.Chatbot(
823
- [],
824
  elem_id="chatbot",
825
- type="tuples", # keep current list-of-lists format
826
  avatar_images=(
827
  None,
828
  (os.path.join(os.path.dirname(__file__), "logo.svg")),
@@ -932,50 +932,66 @@ def create_application():
932
  # Create the UI components
933
  demo, chatbot, chart_display, question_input, submit_button, streaming_output_display = create_ui()
934
 
935
- def user_message(user_input: str, chat_history: List[List[str]]) -> Tuple[str, List[List[str]]]:
936
- """Add user message to chat history and clear input."""
937
  if not user_input.strip():
938
  return "", chat_history
939
-
940
  logger.info(f"User message: {user_input}")
941
-
942
- # Initialize chat history if needed
943
  if chat_history is None:
944
  chat_history = []
945
-
946
- # Add user message and empty assistant response
947
- chat_history.append([user_input, None])
948
-
949
- # Clear the input
950
  return "", chat_history
951
 
952
- async def bot_response(chat_history: List[List[str]]) -> Tuple[List[List[str]], Optional[go.Figure]]:
953
- """Get bot response and update chat history and return optional chart figure."""
954
  if not chat_history:
955
  return chat_history, None
956
-
957
- # Get the last user message (first element of the last list if it exists)
958
- if not chat_history[-1][0] or chat_history[-1][1] is not None:
 
959
  return chat_history, None
960
-
961
  try:
962
- question = chat_history[-1][0]
963
  logger.info(f"Processing question: {question}")
964
-
965
- # Call the agent and get the response
966
- assistant_message, chart_fig = await stream_agent_response(question, chat_history[:-1])
967
-
968
- # Update the assistant's message in the chat history
969
- chat_history[-1] = [question, assistant_message]
970
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
971
  logger.info("Response generation complete")
972
  return chat_history, chart_fig
973
-
974
  except Exception as e:
975
  error_msg = f"## ❌ Error\n\nError al procesar la solicitud:\n\n```\n{str(e)}\n```"
976
  logger.error(error_msg, exc_info=True)
977
- if chat_history and len(chat_history[-1]) == 2 and chat_history[-1][1] is None:
978
- chat_history[-1] = [chat_history[-1][0], error_msg]
979
  return chat_history, None
980
 
981
  # Event handlers
 
820
  # Create the chat interface
821
  with gr.Row():
822
  chatbot = gr.Chatbot(
823
+ value=[],
824
  elem_id="chatbot",
825
+ type="messages", # migrate to messages format to avoid deprecation
826
  avatar_images=(
827
  None,
828
  (os.path.join(os.path.dirname(__file__), "logo.svg")),
 
932
  # Create the UI components
933
  demo, chatbot, chart_display, question_input, submit_button, streaming_output_display = create_ui()
934
 
935
+ def user_message(user_input: str, chat_history: List[Dict[str, str]]) -> Tuple[str, List[Dict[str, str]]]:
936
+ """Add user message to chat history (messages format) and clear input."""
937
  if not user_input.strip():
938
  return "", chat_history
939
+
940
  logger.info(f"User message: {user_input}")
941
+
 
942
  if chat_history is None:
943
  chat_history = []
944
+
945
+ # Append user message in messages format
946
+ chat_history.append({"role": "user", "content": user_input})
947
+
 
948
  return "", chat_history
949
 
950
+ async def bot_response(chat_history: List[Dict[str, str]]) -> Tuple[List[Dict[str, str]], Optional[go.Figure]]:
951
+ """Generate bot response for messages-format chat history and return optional chart figure."""
952
  if not chat_history:
953
  return chat_history, None
954
+
955
+ # Ensure last message is a user turn awaiting assistant reply
956
+ last = chat_history[-1]
957
+ if not isinstance(last, dict) or last.get("role") != "user" or not last.get("content"):
958
  return chat_history, None
959
+
960
  try:
961
+ question = last["content"]
962
  logger.info(f"Processing question: {question}")
963
+
964
+ # Convert prior messages to pair history for stream_agent_response()
965
+ pair_history: List[List[str]] = []
966
+ i = 0
967
+ while i < len(chat_history) - 1:
968
+ m1 = chat_history[i]
969
+ m2 = chat_history[i + 1] if i + 1 < len(chat_history) else None
970
+ if (
971
+ isinstance(m1, dict)
972
+ and m1.get("role") == "user"
973
+ and isinstance(m2, dict)
974
+ and m2.get("role") == "assistant"
975
+ ):
976
+ pair_history.append([m1.get("content", ""), m2.get("content", "")])
977
+ i += 2
978
+ else:
979
+ i += 1
980
+
981
+ # Call the agent for this new user question
982
+ assistant_message, chart_fig = await stream_agent_response(question, pair_history)
983
+
984
+ # Append assistant message back into messages history
985
+ chat_history.append({"role": "assistant", "content": assistant_message})
986
+
987
  logger.info("Response generation complete")
988
  return chat_history, chart_fig
989
+
990
  except Exception as e:
991
  error_msg = f"## ❌ Error\n\nError al procesar la solicitud:\n\n```\n{str(e)}\n```"
992
  logger.error(error_msg, exc_info=True)
993
+ # Ensure we add an assistant error message for the UI
994
+ chat_history.append({"role": "assistant", "content": error_msg})
995
  return chat_history, None
996
 
997
  # Event handlers