import os import sys import re import gradio as gr import json import tempfile import base64 import io from typing import List, Dict, Any, Optional, Tuple, Union import logging import pandas as pd import plotly.express as px import plotly.graph_objects as go from plotly.subplots import make_subplots from flask import Flask, request, jsonify import uuid # Configuración de logging logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) # Flask app initialization flask_app = Flask(__name__) # Almacenamiento en memoria de los mensajes message_store: Dict[str, str] = {} @flask_app.route('/user_message', methods=['POST']) def handle_user_message(): try: data = request.get_json() if not data or 'message' not in data: return jsonify({'error': 'Se requiere el campo message'}), 400 user_message = data['message'] # Generar un ID único para este mensaje message_id = str(uuid.uuid4()) # Almacenar el mensaje message_store[message_id] = user_message return jsonify({ 'message_id': message_id, 'status': 'success' }) except Exception as e: return jsonify({'error': str(e)}), 500 @flask_app.route('/ask', methods=['POST']) def handle_ask(): try: data = request.get_json() if not data or 'message_id' not in data: return jsonify({'error': 'Se requiere el campo message_id'}), 400 message_id = data['message_id'] # Recuperar el mensaje almacenado if message_id not in message_store: return jsonify({'error': 'ID de mensaje no encontrado'}), 404 user_message = message_store[message_id] # Inicializar componentes necesarios llm, llm_error = initialize_llm() if llm_error: return jsonify({'error': f'Error al inicializar LLM: {llm_error}'}), 500 db_connection, db_error = setup_database_connection() if db_error: return jsonify({'error': f'Error de conexión a la base de datos: {db_error}'}), 500 agent, agent_error = create_agent(llm, db_connection) if agent_error: return jsonify({'error': f'Error al crear el agente: {agent_error}'}), 500 # Obtener respuesta del agente response = agent.invoke({"input": user_message}) # Procesar la respuesta if hasattr(response, 'output') and response.output: response_text = response.output elif isinstance(response, str): response_text = response elif hasattr(response, 'get') and callable(response.get) and 'output' in response: response_text = response['output'] else: response_text = str(response) # Eliminar el mensaje almacenado después de procesarlo del message_store[message_id] return jsonify({ 'response': response_text, 'status': 'success' }) except Exception as e: return jsonify({'error': str(e)}), 500 # ... (resto del código existente sin cambios) ... def create_application(): """Create and configure the Gradio application.""" # Create the UI components demo, chatbot, chart_display, question_input, submit_button, streaming_output_display = create_ui() # Montar la API Flask en la aplicación Gradio if os.getenv('SPACE_ID'): demo = gr.mount_gradio_app( flask_app, demo, "/api" # Prefijo para los endpoints de la API ) def user_message(user_input: str, chat_history: List[Dict[str, str]]) -> Tuple[str, List[Dict[str, str]]]: """Add user message to chat history (messages format) and clear input.""" if not user_input.strip(): return "", chat_history logger.info(f"User message: {user_input}") if chat_history is None: chat_history = [] # Append user message in messages format chat_history.append({"role": "user", "content": user_input}) return "", chat_history async def bot_response(chat_history: List[Dict[str, str]]) -> Tuple[List[Dict[str, str]], Optional[go.Figure]]: """Generate bot response for messages-format chat history and return optional chart figure.""" if not chat_history: return chat_history, None # Ensure last message is a user turn awaiting assistant reply last = chat_history[-1] if not isinstance(last, dict) or last.get("role") != "user" or not last.get("content"): return chat_history, None try: question = last["content"] logger.info(f"Processing question: {question}") # Convert prior messages to pair history for stream_agent_response() pair_history: List[List[str]] = [] i = 0 while i < len(chat_history) - 1: m1 = chat_history[i] m2 = chat_history[i + 1] if i + 1 < len(chat_history) else None if ( isinstance(m1, dict) and m1.get("role") == "user" and isinstance(m2, dict) and m2.get("role") == "assistant" ): pair_history.append([m1.get("content", ""), m2.get("content", "")]) i += 2 else: i += 1 # Call the agent for this new user question assistant_message, chart_fig = await stream_agent_response(question, pair_history) # Append assistant message back into messages history chat_history.append({"role": "assistant", "content": assistant_message}) logger.info("Response generation complete") return chat_history, chart_fig except Exception as e: error_msg = f"## ❌ Error\n\nError al procesar la solicitud:\n\n```\n{str(e)}\n```" logger.error(error_msg, exc_info=True) # Ensure we add an assistant error message for the UI chat_history.append({"role": "assistant", "content": error_msg}) return chat_history, None # Event handlers with demo: # Handle form submission msg_submit = question_input.submit( fn=user_message, inputs=[question_input, chatbot], outputs=[question_input, chatbot], queue=True ).then( fn=bot_response, inputs=[chatbot], outputs=[chatbot, chart_display], api_name="ask" ) # Handle button click btn_click = submit_button.click( fn=user_message, inputs=[question_input, chatbot], outputs=[question_input, chatbot], queue=True ).then( fn=bot_response, inputs=[chatbot], outputs=[chatbot, chart_display] ) return demo # Create the application demo = create_application() # Configuración para Hugging Face Spaces def get_app(): """Obtiene la instancia de la aplicación Gradio para Hugging Face Spaces.""" # Verificar si estamos en un entorno de Hugging Face Spaces if os.getenv('SPACE_ID'): # Configuración específica para Spaces demo.title = "🤖 Asistente de Base de Datos SQL (Demo)" demo.description = """ Este es un demo del asistente de base de datos SQL. Para usar la versión completa con conexión a base de datos, clona este espacio y configura las variables de entorno. """ return demo # Para desarrollo local if __name__ == "__main__": # Verificar si se debe ejecutar Flask o Gradio if os.environ.get('RUN_FLASK', 'false').lower() == 'true': # Ejecutar solo el servidor Flask port = int(os.environ.get('PORT', 5000)) flask_app.run(host='0.0.0.0', port=port) else: # Configuración para desarrollo local - versión simplificada para Gradio 5.x demo.launch( server_name="0.0.0.0", server_port=7860, debug=True, share=False )