Jeremy Live commited on
Commit
c32de30
1 Parent(s): 27ba279

respuesta segundo graph

Browse files
Files changed (1) hide show
  1. app.py +17 -1
app.py CHANGED
@@ -735,7 +735,23 @@ async def stream_agent_response(question: str, chat_history: List[List[str]]) ->
735
  title="Distribuci贸n"
736
  )
737
  if chart_fig is not None:
738
- logger.info(f"Chart generated from text fallback: type={desired_type}, items={len(data)}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
739
 
740
  # Update the assistant's message with the response
741
  assistant_message["content"] = response_text
 
735
  title="Distribuci贸n"
736
  )
737
  if chart_fig is not None:
738
+ logger.info(
739
+ f"Chart generated from text fallback: type={desired_type}, items={len(data)}"
740
+ )
741
+ # Replace the assistant message so it acknowledges
742
+ # that it used the previous answer (conversation memory)
743
+ # to build the visualization. This avoids confusing
744
+ # replies like "I need more information" when we
745
+ # already have parsable data from the last turn.
746
+ summary_lines = [
747
+ f"- {d['label']}: {d['value']}" for d in data
748
+ ]
749
+ assistant_message["content"] = (
750
+ "He generado un gr谩fico "
751
+ f"de {desired_type} usando los datos previos de la "
752
+ "conversaci贸n.\n\n"
753
+ "### Datos usados\n" + "\n".join(summary_lines)
754
+ )
755
 
756
  # Update the assistant's message with the response
757
  assistant_message["content"] = response_text