|
import os |
|
import sys |
|
import re |
|
import gradio as gr |
|
import json |
|
import tempfile |
|
import base64 |
|
import io |
|
from typing import List, Dict, Any, Optional, Tuple, Union |
|
import logging |
|
import pandas as pd |
|
import plotly.express as px |
|
import plotly.graph_objects as go |
|
from plotly.subplots import make_subplots |
|
from api import app as flask_app |
|
|
|
|
|
|
|
def create_application(): |
|
"""Create and configure the Gradio application.""" |
|
|
|
demo, chatbot, chart_display, question_input, submit_button, streaming_output_display = create_ui() |
|
|
|
|
|
if os.getenv('SPACE_ID'): |
|
demo = gr.mount_gradio_app( |
|
flask_app, |
|
"/api", |
|
lambda: True |
|
) |
|
|
|
def user_message(user_input: str, chat_history: List[Dict[str, str]]) -> Tuple[str, List[Dict[str, str]]]: |
|
"""Add user message to chat history (messages format) and clear input.""" |
|
if not user_input.strip(): |
|
return "", chat_history |
|
|
|
logger.info(f"User message: {user_input}") |
|
|
|
if chat_history is None: |
|
chat_history = [] |
|
|
|
|
|
chat_history.append({"role": "user", "content": user_input}) |
|
|
|
return "", chat_history |
|
|
|
async def bot_response(chat_history: List[Dict[str, str]]) -> Tuple[List[Dict[str, str]], Optional[go.Figure]]: |
|
"""Generate bot response for messages-format chat history and return optional chart figure.""" |
|
if not chat_history: |
|
return chat_history, None |
|
|
|
|
|
last = chat_history[-1] |
|
if not isinstance(last, dict) or last.get("role") != "user" or not last.get("content"): |
|
return chat_history, None |
|
|
|
try: |
|
question = last["content"] |
|
logger.info(f"Processing question: {question}") |
|
|
|
|
|
pair_history: List[List[str]] = [] |
|
i = 0 |
|
while i < len(chat_history) - 1: |
|
m1 = chat_history[i] |
|
m2 = chat_history[i + 1] if i + 1 < len(chat_history) else None |
|
if ( |
|
isinstance(m1, dict) |
|
and m1.get("role") == "user" |
|
and isinstance(m2, dict) |
|
and m2.get("role") == "assistant" |
|
): |
|
pair_history.append([m1.get("content", ""), m2.get("content", "")]) |
|
i += 2 |
|
else: |
|
i += 1 |
|
|
|
|
|
assistant_message, chart_fig = await stream_agent_response(question, pair_history) |
|
|
|
|
|
chat_history.append({"role": "assistant", "content": assistant_message}) |
|
|
|
logger.info("Response generation complete") |
|
return chat_history, chart_fig |
|
|
|
except Exception as e: |
|
error_msg = f"## ❌ Error\n\nError al procesar la solicitud:\n\n```\n{str(e)}\n```" |
|
logger.error(error_msg, exc_info=True) |
|
|
|
chat_history.append({"role": "assistant", "content": error_msg}) |
|
return chat_history, None |
|
|
|
|
|
with demo: |
|
|
|
msg_submit = question_input.submit( |
|
fn=user_message, |
|
inputs=[question_input, chatbot], |
|
outputs=[question_input, chatbot], |
|
queue=True |
|
).then( |
|
fn=bot_response, |
|
inputs=[chatbot], |
|
outputs=[chatbot, chart_display], |
|
api_name="ask" |
|
) |
|
|
|
|
|
btn_click = submit_button.click( |
|
fn=user_message, |
|
inputs=[question_input, chatbot], |
|
outputs=[question_input, chatbot], |
|
queue=True |
|
).then( |
|
fn=bot_response, |
|
inputs=[chatbot], |
|
outputs=[chatbot, chart_display] |
|
) |
|
|
|
return demo |
|
|
|
|
|
demo = create_application() |
|
|
|
|
|
def get_app(): |
|
"""Obtiene la instancia de la aplicación Gradio para Hugging Face Spaces.""" |
|
|
|
if os.getenv('SPACE_ID'): |
|
|
|
demo.title = "🤖 Asistente de Base de Datos SQL (Demo)" |
|
demo.description = """ |
|
Este es un demo del asistente de base de datos SQL. |
|
Para usar la versión completa con conexión a base de datos, clona este espacio y configura las variables de entorno. |
|
""" |
|
|
|
return demo |
|
|
|
|
|
if __name__ == "__main__": |
|
|
|
demo.launch( |
|
server_name="0.0.0.0", |
|
server_port=7860, |
|
debug=True, |
|
share=False |
|
) |
|
|