Jeremy Live
commited on
Commit
·
dadc6d1
1
Parent(s):
44abbd0
v2
Browse files
README.md
CHANGED
@@ -1,12 +1,29 @@
|
|
1 |
---
|
2 |
-
title:
|
3 |
-
emoji:
|
4 |
colorFrom: blue
|
5 |
-
colorTo:
|
6 |
sdk: gradio
|
7 |
-
sdk_version: "
|
8 |
app_file: app.py
|
9 |
pinned: false
|
10 |
---
|
11 |
|
12 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
---
|
2 |
+
title: Chatbot SQL Agent with Gemini
|
3 |
+
emoji: 🔍
|
4 |
colorFrom: blue
|
5 |
+
colorTo: purple
|
6 |
sdk: gradio
|
7 |
+
sdk_version: "3.50.2"
|
8 |
app_file: app.py
|
9 |
pinned: false
|
10 |
---
|
11 |
|
12 |
+
# Chatbot SQL Agent with Gemini
|
13 |
+
|
14 |
+
A powerful chatbot that can answer questions by querying your SQL database using Google's Gemini model.
|
15 |
+
|
16 |
+
## Features
|
17 |
+
|
18 |
+
- Natural language to SQL query conversion
|
19 |
+
- Interactive chat interface
|
20 |
+
- Direct database connectivity
|
21 |
+
- Powered by Google's Gemini AI
|
22 |
+
|
23 |
+
## Setup
|
24 |
+
|
25 |
+
1. Set up your environment variables in `.env` file
|
26 |
+
2. Install dependencies: `pip install -r requirements.txt`
|
27 |
+
3. Run the app: `python app.py`
|
28 |
+
|
29 |
+
Check out the [configuration reference](https://huggingface.co/docs/hub/spaces-config-reference) for more options.
|
app.py
CHANGED
@@ -208,7 +208,9 @@ async def stream_agent_response(question: str, chat_history: List) -> Tuple[List
|
|
208 |
f"3. El modelo de lenguaje esté disponible\n\n"
|
209 |
f"Error: {agent_error}"
|
210 |
)
|
211 |
-
|
|
|
|
|
212 |
|
213 |
try:
|
214 |
# Agregar un mensaje de "pensando"
|
@@ -234,11 +236,12 @@ async def stream_agent_response(question: str, chat_history: List) -> Tuple[List
|
|
234 |
|
235 |
# Actualizar el historial con la respuesta completa
|
236 |
chat_history[-1][1] = response_text
|
237 |
-
|
238 |
|
239 |
except Exception as e:
|
240 |
error_msg = f"## ❌ Error\n\nOcurrió un error al procesar tu solicitud:\n\n```\n{str(e)}\n```"
|
241 |
chat_history[-1][1] = error_msg
|
|
|
242 |
return chat_history, gr.update(visible=False)
|
243 |
|
244 |
# Custom CSS for the app
|
|
|
208 |
f"3. El modelo de lenguaje esté disponible\n\n"
|
209 |
f"Error: {agent_error}"
|
210 |
)
|
211 |
+
chat_history = chat_history + [[question, error_msg]]
|
212 |
+
yield chat_history, gr.update(visible=False)
|
213 |
+
return
|
214 |
|
215 |
try:
|
216 |
# Agregar un mensaje de "pensando"
|
|
|
236 |
|
237 |
# Actualizar el historial con la respuesta completa
|
238 |
chat_history[-1][1] = response_text
|
239 |
+
yield chat_history, gr.update(visible=False)
|
240 |
|
241 |
except Exception as e:
|
242 |
error_msg = f"## ❌ Error\n\nOcurrió un error al procesar tu solicitud:\n\n```\n{str(e)}\n```"
|
243 |
chat_history[-1][1] = error_msg
|
244 |
+
yield chat_history, gr.update(visible=False)
|
245 |
return chat_history, gr.update(visible=False)
|
246 |
|
247 |
# Custom CSS for the app
|