Jeremy Live
commited on
Commit
·
95c55e1
1
Parent(s):
86da53d
v1
Browse files
app.py
CHANGED
@@ -583,8 +583,9 @@ async def stream_agent_response(question: str, chat_history: List) -> List[Dict]
|
|
583 |
|
584 |
# Ensure we return a list of tuples in the format Gradio expects
|
585 |
# Each message should be a tuple of (user_msg, bot_msg)
|
586 |
-
# For
|
587 |
-
|
|
|
588 |
|
589 |
except Exception as e:
|
590 |
error_msg = f"## ❌ Error\n\nOcurrió un error al procesar tu solicitud:\n\n```\n{str(e)}\n```"
|
|
|
583 |
|
584 |
# Ensure we return a list of tuples in the format Gradio expects
|
585 |
# Each message should be a tuple of (user_msg, bot_msg)
|
586 |
+
# For the current response, we need to include both the user's question and the assistant's response
|
587 |
+
chat_history.append((question, message_content))
|
588 |
+
return chat_history
|
589 |
|
590 |
except Exception as e:
|
591 |
error_msg = f"## ❌ Error\n\nOcurrió un error al procesar tu solicitud:\n\n```\n{str(e)}\n```"
|