Jeremy Live commited on
Commit
7b28785
1 Parent(s): a852f7c

Revert "add missing function"

Browse files

This reverts commit 84dd234fe9ab35cbb4d68844d2550562b9cacf70.

Files changed (1) hide show
  1. app.py +6 -164
app.py CHANGED
@@ -22,164 +22,6 @@ logger = logging.getLogger(__name__)
22
  # Flask app initialization
23
  flask_app = Flask(__name__)
24
 
25
- # Importaciones adicionales necesarias
26
- import os
27
- from dotenv import load_dotenv
28
- from sqlalchemy import create_engine, text
29
- from sqlalchemy.exc import SQLAlchemyError
30
- from langchain_google_genai import ChatGoogleGenerativeAI
31
- from langchain_community.agent_toolkits import create_sql_agent
32
- from langchain_community.utilities import SQLDatabase
33
- from langgraph.prebuilt import create_react_agent
34
-
35
- # Cargar variables de entorno
36
- load_dotenv()
37
-
38
- def initialize_llm():
39
- """Inicializar el modelo LLM de Google Gemini."""
40
- try:
41
- api_key = os.getenv('GOOGLE_API_KEY')
42
- if not api_key:
43
- return None, "No se encontr贸 GOOGLE_API_KEY en las variables de entorno"
44
-
45
- llm = ChatGoogleGenerativeAI(
46
- model="gemini-2.0-flash",
47
- google_api_key=api_key,
48
- temperature=0.1,
49
- convert_system_message_to_human=True
50
- )
51
- return llm, None
52
- except Exception as e:
53
- return None, str(e)
54
-
55
- def setup_database_connection():
56
- """Configurar la conexi贸n a la base de datos."""
57
- try:
58
- # Obtener configuraci贸n de la base de datos
59
- db_user = os.getenv('DB_USER')
60
- db_password = os.getenv('DB_PASSWORD')
61
- db_host = os.getenv('DB_HOST', 'localhost')
62
- db_name = os.getenv('DB_NAME')
63
-
64
- if not all([db_user, db_password, db_name]):
65
- return None, "Faltan variables de entorno para la conexi贸n a la base de datos"
66
-
67
- # Crear string de conexi贸n
68
- connection_string = f"mysql+pymysql://{db_user}:{db_password}@{db_host}/{db_name}"
69
-
70
- # Crear engine de SQLAlchemy y probar la conexi贸n
71
- engine = create_engine(connection_string)
72
- with engine.connect() as conn:
73
- conn.execute(text("SELECT 1"))
74
-
75
- return connection_string, None
76
- except Exception as e:
77
- return None, str(e)
78
-
79
- def create_agent(llm, connection_string):
80
- """Crear el agente SQL."""
81
- try:
82
- if not llm or not connection_string:
83
- return None, "LLM o conexi贸n a base de datos no proporcionados"
84
-
85
- # Crear la base de datos SQL
86
- db = SQLDatabase.from_uri(connection_string)
87
-
88
- # Crear el agente SQL
89
- agent = create_sql_agent(
90
- llm=llm,
91
- db=db,
92
- agent_type="zero-shot-react-description",
93
- verbose=True,
94
- return_intermediate_steps=True
95
- )
96
-
97
- return agent, None
98
- except Exception as e:
99
- return None, str(e)
100
-
101
- def stream_agent_response(question: str, chat_history: List[List[str]] = None) -> Tuple[str, Optional[go.Figure]]:
102
- """Procesar la respuesta del agente y generar visualizaciones si es necesario."""
103
- try:
104
- # Inicializar componentes
105
- llm, llm_error = initialize_llm()
106
- if llm_error:
107
- return f"Error al inicializar LLM: {llm_error}", None
108
-
109
- connection_string, db_error = setup_database_connection()
110
- if db_error:
111
- return f"Error de conexi贸n a base de datos: {db_error}", None
112
-
113
- agent, agent_error = create_agent(llm, connection_string)
114
- if agent_error:
115
- return f"Error al crear el agente: {agent_error}", None
116
-
117
- # Ejecutar la consulta
118
- response = agent.invoke({"input": question})
119
-
120
- # Extraer la respuesta
121
- if hasattr(response, 'output'):
122
- response_text = response.output
123
- elif isinstance(response, dict) and 'output' in response:
124
- response_text = response['output']
125
- else:
126
- response_text = str(response)
127
-
128
- # Verificar si hay resultados de SQL que podr铆an visualizarse
129
- chart_fig = None
130
- if hasattr(response, 'intermediate_steps'):
131
- for step in response.intermediate_steps:
132
- if len(step) > 1 and 'sql_query' in str(step[0]).lower():
133
- # Intentar ejecutar la consulta y crear visualizaci贸n
134
- try:
135
- query = str(step[0]).split('sql_query:')[1].split('\n')[0].strip()
136
- if 'SELECT' in query.upper():
137
- df = pd.read_sql_query(query, create_engine(connection_string))
138
- if len(df.columns) >= 2:
139
- fig = px.bar(df, x=df.columns[0], y=df.columns[1])
140
- chart_fig = fig
141
- except:
142
- pass
143
-
144
- return response_text, chart_fig
145
-
146
- except Exception as e:
147
- return f"Error al procesar la solicitud: {str(e)}", None
148
-
149
- def create_ui():
150
- """Crear la interfaz de usuario de Gradio."""
151
- with gr.Blocks(title="馃 Asistente SQL con Gemini", theme=gr.themes.Soft()) as demo:
152
- gr.Markdown("# 馃 Asistente de Base de Datos SQL")
153
- gr.Markdown("Pregunta cualquier cosa sobre tu base de datos en lenguaje natural")
154
-
155
- with gr.Row():
156
- with gr.Column(scale=2):
157
- chatbot = gr.Chatbot(
158
- label="Chat",
159
- type="messages",
160
- height=400
161
- )
162
-
163
- with gr.Row():
164
- question_input = gr.Textbox(
165
- label="Tu pregunta",
166
- placeholder="Ej: 驴Cu谩ntos usuarios hay registrados?",
167
- lines=2,
168
- scale=4
169
- )
170
- submit_button = gr.Button("馃摛 Enviar", scale=1)
171
-
172
- with gr.Column(scale=1):
173
- chart_display = gr.Plot(
174
- label="Visualizaci贸n de datos",
175
- height=400
176
- )
177
-
178
- # Campo oculto para el estado de salida
179
- streaming_output_display = gr.HTML(visible=False)
180
-
181
- return demo, chatbot, chart_display, question_input, submit_button, streaming_output_display
182
-
183
  # Almacenamiento en memoria de los mensajes
184
  message_store: Dict[str, str] = {}
185
 
@@ -226,11 +68,11 @@ def handle_ask():
226
  if llm_error:
227
  return jsonify({'error': f'Error al inicializar LLM: {llm_error}'}), 500
228
 
229
- connection_string, db_error = setup_database_connection()
230
  if db_error:
231
  return jsonify({'error': f'Error de conexi贸n a la base de datos: {db_error}'}), 500
232
 
233
- agent, agent_error = create_agent(llm, connection_string)
234
  if agent_error:
235
  return jsonify({'error': f'Error al crear el agente: {agent_error}'}), 500
236
 
@@ -288,7 +130,7 @@ def create_application():
288
 
289
  return "", chat_history
290
 
291
- def bot_response(chat_history: List[Dict[str, str]]) -> Tuple[List[Dict[str, str]], Optional[go.Figure]]:
292
  """Generate bot response for messages-format chat history and return optional chart figure."""
293
  if not chat_history:
294
  return chat_history, None
@@ -320,7 +162,7 @@ def create_application():
320
  i += 1
321
 
322
  # Call the agent for this new user question
323
- assistant_message, chart_fig = stream_agent_response(question, pair_history)
324
 
325
  # Append assistant message back into messages history
326
  chat_history.append({"role": "assistant", "content": assistant_message})
@@ -342,7 +184,7 @@ def create_application():
342
  fn=user_message,
343
  inputs=[question_input, chatbot],
344
  outputs=[question_input, chatbot],
345
- queue=False
346
  ).then(
347
  fn=bot_response,
348
  inputs=[chatbot],
@@ -355,7 +197,7 @@ def create_application():
355
  fn=user_message,
356
  inputs=[question_input, chatbot],
357
  outputs=[question_input, chatbot],
358
- queue=False
359
  ).then(
360
  fn=bot_response,
361
  inputs=[chatbot],
 
22
  # Flask app initialization
23
  flask_app = Flask(__name__)
24
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  # Almacenamiento en memoria de los mensajes
26
  message_store: Dict[str, str] = {}
27
 
 
68
  if llm_error:
69
  return jsonify({'error': f'Error al inicializar LLM: {llm_error}'}), 500
70
 
71
+ db_connection, db_error = setup_database_connection()
72
  if db_error:
73
  return jsonify({'error': f'Error de conexi贸n a la base de datos: {db_error}'}), 500
74
 
75
+ agent, agent_error = create_agent(llm, db_connection)
76
  if agent_error:
77
  return jsonify({'error': f'Error al crear el agente: {agent_error}'}), 500
78
 
 
130
 
131
  return "", chat_history
132
 
133
+ async def bot_response(chat_history: List[Dict[str, str]]) -> Tuple[List[Dict[str, str]], Optional[go.Figure]]:
134
  """Generate bot response for messages-format chat history and return optional chart figure."""
135
  if not chat_history:
136
  return chat_history, None
 
162
  i += 1
163
 
164
  # Call the agent for this new user question
165
+ assistant_message, chart_fig = await stream_agent_response(question, pair_history)
166
 
167
  # Append assistant message back into messages history
168
  chat_history.append({"role": "assistant", "content": assistant_message})
 
184
  fn=user_message,
185
  inputs=[question_input, chatbot],
186
  outputs=[question_input, chatbot],
187
+ queue=True
188
  ).then(
189
  fn=bot_response,
190
  inputs=[chatbot],
 
197
  fn=user_message,
198
  inputs=[question_input, chatbot],
199
  outputs=[question_input, chatbot],
200
+ queue=True
201
  ).then(
202
  fn=bot_response,
203
  inputs=[chatbot],