Jeremy Live commited on
Commit
cf30c20
·
1 Parent(s): bc753a4
Files changed (1) hide show
  1. app.py +10 -10
app.py CHANGED
@@ -423,7 +423,7 @@ def convert_to_messages_format(chat_history):
423
 
424
  return messages
425
 
426
- async def stream_agent_response(question: str, chat_history: List[Tuple[str, str]]) -> str:
427
  """Procesa la pregunta del usuario y devuelve la respuesta del agente con memoria de conversación."""
428
  global agent # Make sure we can modify the agent's memory
429
 
@@ -432,11 +432,11 @@ async def stream_agent_response(question: str, chat_history: List[Tuple[str, str
432
  messages = []
433
 
434
  # Add previous chat history in the correct format for the agent
435
- for user_msg, assistant_msg in chat_history:
436
- if user_msg:
437
- messages.append(HumanMessage(content=user_msg))
438
- if assistant_msg:
439
- messages.append(AIMessage(content=assistant_msg))
440
 
441
  # Add current user's question
442
  user_message = HumanMessage(content=question)
@@ -812,12 +812,12 @@ def create_application():
812
  # Clear the input
813
  return "", chat_history
814
 
815
- async def bot_response(chat_history: List[Tuple[str, str]]) -> List[Tuple[str, str]]:
816
  """Get bot response and update chat history."""
817
  if not chat_history:
818
  return chat_history
819
 
820
- # Get the last user message (first element of the last tuple if it exists)
821
  if not chat_history[-1][0] or chat_history[-1][1] is not None:
822
  return chat_history
823
 
@@ -829,7 +829,7 @@ def create_application():
829
  assistant_message = await stream_agent_response(question, chat_history[:-1])
830
 
831
  # Update the assistant's message in the chat history
832
- chat_history[-1] = (question, assistant_message)
833
 
834
  logger.info("Response generation complete")
835
  return chat_history
@@ -838,7 +838,7 @@ def create_application():
838
  error_msg = f"## ❌ Error\n\nError al procesar la solicitud:\n\n```\n{str(e)}\n```"
839
  logger.error(error_msg, exc_info=True)
840
  if chat_history and len(chat_history[-1]) == 2 and chat_history[-1][1] is None:
841
- chat_history[-1] = (chat_history[-1][0], error_msg)
842
  return chat_history
843
 
844
  # Event handlers
 
423
 
424
  return messages
425
 
426
+ async def stream_agent_response(question: str, chat_history: List[List[str]]) -> str:
427
  """Procesa la pregunta del usuario y devuelve la respuesta del agente con memoria de conversación."""
428
  global agent # Make sure we can modify the agent's memory
429
 
 
432
  messages = []
433
 
434
  # Add previous chat history in the correct format for the agent
435
+ for msg_pair in chat_history:
436
+ if len(msg_pair) >= 1 and msg_pair[0]: # User message
437
+ messages.append(HumanMessage(content=msg_pair[0]))
438
+ if len(msg_pair) >= 2 and msg_pair[1]: # Assistant message
439
+ messages.append(AIMessage(content=msg_pair[1]))
440
 
441
  # Add current user's question
442
  user_message = HumanMessage(content=question)
 
812
  # Clear the input
813
  return "", chat_history
814
 
815
+ async def bot_response(chat_history: List[List[str]]) -> List[List[str]]:
816
  """Get bot response and update chat history."""
817
  if not chat_history:
818
  return chat_history
819
 
820
+ # Get the last user message (first element of the last list if it exists)
821
  if not chat_history[-1][0] or chat_history[-1][1] is not None:
822
  return chat_history
823
 
 
829
  assistant_message = await stream_agent_response(question, chat_history[:-1])
830
 
831
  # Update the assistant's message in the chat history
832
+ chat_history[-1] = [question, assistant_message]
833
 
834
  logger.info("Response generation complete")
835
  return chat_history
 
838
  error_msg = f"## ❌ Error\n\nError al procesar la solicitud:\n\n```\n{str(e)}\n```"
839
  logger.error(error_msg, exc_info=True)
840
  if chat_history and len(chat_history[-1]) == 2 and chat_history[-1][1] is None:
841
+ chat_history[-1] = [chat_history[-1][0], error_msg]
842
  return chat_history
843
 
844
  # Event handlers