Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,230 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import tempfile
|
3 |
+
import gradio as gr
|
4 |
+
|
5 |
+
# Azure AI Agents SDK
|
6 |
+
from azure.core.credentials import AzureKeyCredential
|
7 |
+
from azure.ai.agents import AgentsClient
|
8 |
+
from azure.ai.agents.models import (
|
9 |
+
FilePurpose,
|
10 |
+
CodeInterpreterTool,
|
11 |
+
ListSortOrder,
|
12 |
+
MessageRole,
|
13 |
+
)
|
14 |
+
|
15 |
+
|
16 |
+
def init_agent(endpoint: str, api_key: str, model_deployment: str, data_file) -> dict:
|
17 |
+
"""
|
18 |
+
Initialize an Azure AI Agent with optional data file for the Code Interpreter.
|
19 |
+
Returns a session dict with client, agent, thread, and bookkeeping.
|
20 |
+
"""
|
21 |
+
if not endpoint or not api_key or not model_deployment:
|
22 |
+
raise ValueError("Please provide endpoint, key, and model deployment name.")
|
23 |
+
|
24 |
+
# Create client (API key auth)
|
25 |
+
client = AgentsClient(
|
26 |
+
endpoint=endpoint.strip(),
|
27 |
+
credential=AzureKeyCredential(api_key.strip()),
|
28 |
+
)
|
29 |
+
|
30 |
+
# Create a temporary file path if a file is provided
|
31 |
+
temp_path = None
|
32 |
+
if data_file is not None:
|
33 |
+
# Gradio gives a tempfile-like object; persist it to a path for upload
|
34 |
+
with tempfile.NamedTemporaryFile(delete=False) as tmp:
|
35 |
+
tmp.write(data_file.read())
|
36 |
+
temp_path = tmp.name
|
37 |
+
|
38 |
+
with client:
|
39 |
+
code_interpreter = None
|
40 |
+
|
41 |
+
if temp_path:
|
42 |
+
# Upload file for agent use
|
43 |
+
up = client.files.upload_and_poll(file_path=temp_path, purpose=FilePurpose.AGENTS)
|
44 |
+
# Create the tool bound to this file
|
45 |
+
code_interpreter = CodeInterpreterTool(file_ids=[up.id])
|
46 |
+
|
47 |
+
# Define the agent (attach tools if we created one)
|
48 |
+
agent = client.create_agent(
|
49 |
+
model=model_deployment,
|
50 |
+
name="data-agent",
|
51 |
+
instructions=(
|
52 |
+
"You are an AI agent that analyzes the uploaded data when present. "
|
53 |
+
"Use Python via the Code Interpreter to compute statistical metrics or produce "
|
54 |
+
"text-based charts when asked. If no file is provided, proceed with normal reasoning."
|
55 |
+
),
|
56 |
+
tools=(code_interpreter.definitions if code_interpreter else None),
|
57 |
+
tool_resources=(code_interpreter.resources if code_interpreter else None),
|
58 |
+
)
|
59 |
+
|
60 |
+
# Create a thread for the conversation
|
61 |
+
thread = client.threads.create()
|
62 |
+
|
63 |
+
# Keep the client open for subsequent calls (no context manager here)
|
64 |
+
session = {
|
65 |
+
"endpoint": endpoint.strip(),
|
66 |
+
"api_key": api_key.strip(),
|
67 |
+
"model": model_deployment.strip(),
|
68 |
+
"client": client,
|
69 |
+
"agent_id": agent.id,
|
70 |
+
"thread_id": thread.id,
|
71 |
+
"has_file": data_file is not None,
|
72 |
+
"temp_path": temp_path, # to clean up later if we want
|
73 |
+
}
|
74 |
+
return session
|
75 |
+
|
76 |
+
|
77 |
+
def send_message(user_msg: str, session: dict):
|
78 |
+
"""
|
79 |
+
Send a user message to the existing thread and return the agent's latest reply
|
80 |
+
as well as a printable conversation history.
|
81 |
+
"""
|
82 |
+
if not session or "client" not in session:
|
83 |
+
raise ValueError("Agent is not initialized. Click 'Connect & Prepare' first.")
|
84 |
+
|
85 |
+
client: AgentsClient = session["client"]
|
86 |
+
agent_id = session["agent_id"]
|
87 |
+
thread_id = session["thread_id"]
|
88 |
+
|
89 |
+
# Create the user message on the thread
|
90 |
+
client.messages.create(
|
91 |
+
thread_id=thread_id,
|
92 |
+
role="user",
|
93 |
+
content=user_msg,
|
94 |
+
)
|
95 |
+
|
96 |
+
# Run the agent on the thread and wait for completion
|
97 |
+
run = client.runs.create_and_process(thread_id=thread_id, agent_id=agent_id)
|
98 |
+
|
99 |
+
if getattr(run, "status", None) == "failed":
|
100 |
+
last_error = getattr(run, "last_error", "Unknown error")
|
101 |
+
return f"Run failed: {last_error}", ""
|
102 |
+
|
103 |
+
# Get the last agent message text
|
104 |
+
last_msg = client.messages.get_last_message_text_by_role(
|
105 |
+
thread_id=thread_id,
|
106 |
+
role=MessageRole.AGENT,
|
107 |
+
)
|
108 |
+
agent_reply = last_msg.text.value if last_msg else "(No reply text found.)"
|
109 |
+
|
110 |
+
# Build a readable conversation history
|
111 |
+
history_lines = []
|
112 |
+
messages = client.messages.list(thread_id=thread_id, order=ListSortOrder.ASCENDING)
|
113 |
+
for m in messages:
|
114 |
+
if m.text_messages:
|
115 |
+
last_text = m.text_messages[-1].text.value
|
116 |
+
history_lines.append(f"{m.role}: {last_text}")
|
117 |
+
history_str = "\n\n".join(history_lines)
|
118 |
+
|
119 |
+
return agent_reply, history_str
|
120 |
+
|
121 |
+
|
122 |
+
def teardown(session: dict):
|
123 |
+
"""
|
124 |
+
Delete the agent (and optionally the temp file) to avoid unnecessary Azure costs.
|
125 |
+
Note: Threads are retained by service; you can delete agents to clean up.
|
126 |
+
"""
|
127 |
+
if not session:
|
128 |
+
return "Nothing to clean up."
|
129 |
+
|
130 |
+
msg = []
|
131 |
+
try:
|
132 |
+
client: AgentsClient = session.get("client")
|
133 |
+
if client:
|
134 |
+
with client:
|
135 |
+
agent_id = session.get("agent_id")
|
136 |
+
if agent_id:
|
137 |
+
client.delete_agent(agent_id)
|
138 |
+
msg.append("Deleted agent.")
|
139 |
+
except Exception as e:
|
140 |
+
msg.append(f"Cleanup warning: {e}")
|
141 |
+
|
142 |
+
# Remove temp file if created
|
143 |
+
try:
|
144 |
+
temp_path = session.get("temp_path")
|
145 |
+
if temp_path and os.path.exists(temp_path):
|
146 |
+
os.remove(temp_path)
|
147 |
+
msg.append("Removed temp file.")
|
148 |
+
except Exception as e:
|
149 |
+
msg.append(f"Temp cleanup warning: {e}")
|
150 |
+
|
151 |
+
return " ".join(msg) if msg else "Cleanup complete."
|
152 |
+
|
153 |
+
|
154 |
+
# ----------------- Gradio UI -----------------
|
155 |
+
|
156 |
+
with gr.Blocks(title="Azure AI Agent (Endpoint+Key) — Gradio") as demo:
|
157 |
+
gr.Markdown(
|
158 |
+
"## Azure AI Agent (Code Interpreter Ready)\n"
|
159 |
+
"Enter your **Project Endpoint** and **Key**, select your **Model Deployment** (e.g., `gpt-4o`), "
|
160 |
+
"optionally upload a data file (CSV/TXT), then chat.\n"
|
161 |
+
"Click **Connect & Prepare** once, then send prompts in the chat."
|
162 |
+
)
|
163 |
+
|
164 |
+
with gr.Row():
|
165 |
+
endpoint = gr.Textbox(label="Project Endpoint", placeholder="https://<your-project-endpoint>")
|
166 |
+
api_key = gr.Textbox(label="Project Key", placeholder="paste your key", type="password")
|
167 |
+
|
168 |
+
with gr.Row():
|
169 |
+
model = gr.Textbox(label="Model Deployment Name", value="gpt-4o")
|
170 |
+
data_file = gr.File(label="Optional data file for Code Interpreter (txt/csv)", file_types=[".txt", ".csv"], type="binary")
|
171 |
+
|
172 |
+
session_state = gr.State(value=None)
|
173 |
+
|
174 |
+
connect_btn = gr.Button("🔌 Connect & Prepare Agent", variant="primary")
|
175 |
+
connect_status = gr.Markdown("")
|
176 |
+
|
177 |
+
with gr.Row():
|
178 |
+
chatbot = gr.Chatbot(height=420, label="Conversation").style(height=420)
|
179 |
+
user_input = gr.Textbox(label="Your message", placeholder="Ask a question or request a chart…")
|
180 |
+
with gr.Row():
|
181 |
+
send_btn = gr.Button("Send ▶")
|
182 |
+
cleanup_btn = gr.Button("Delete Agent & Cleanup 🧹")
|
183 |
+
|
184 |
+
history = gr.Textbox(label="Conversation Log (chronological)", lines=12)
|
185 |
+
|
186 |
+
# Callbacks
|
187 |
+
def on_connect(ep, key, mdl, f):
|
188 |
+
try:
|
189 |
+
sess = init_agent(ep, key, mdl, f)
|
190 |
+
return sess, "✅ Connected. Agent and thread are ready."
|
191 |
+
except Exception as e:
|
192 |
+
return None, f"❌ Connection error: {e}"
|
193 |
+
|
194 |
+
connect_btn.click(
|
195 |
+
fn=on_connect,
|
196 |
+
inputs=[endpoint, api_key, model, data_file],
|
197 |
+
outputs=[session_state, connect_status],
|
198 |
+
)
|
199 |
+
|
200 |
+
def on_send(msg, session, chat_hist):
|
201 |
+
if not msg:
|
202 |
+
return gr.update(), chat_hist, gr.update(value="Please enter a message.")
|
203 |
+
try:
|
204 |
+
reply, log = send_message(msg, session)
|
205 |
+
chat_hist = (chat_hist or []) + [[msg, reply]]
|
206 |
+
return chat_hist, chat_hist, gr.update(value=log)
|
207 |
+
except Exception as e:
|
208 |
+
return chat_hist, chat_hist, gr.update(value=f"❌ Error: {e}")
|
209 |
+
|
210 |
+
send_btn.click(
|
211 |
+
fn=on_send,
|
212 |
+
inputs=[user_input, session_state, chatbot],
|
213 |
+
outputs=[chatbot, chatbot, history],
|
214 |
+
)
|
215 |
+
|
216 |
+
def on_cleanup(session):
|
217 |
+
try:
|
218 |
+
msg = teardown(session)
|
219 |
+
return None, f"🧹 {msg}"
|
220 |
+
except Exception as e:
|
221 |
+
return session, f"⚠️ Cleanup error: {e}"
|
222 |
+
|
223 |
+
cleanup_btn.click(
|
224 |
+
fn=on_cleanup,
|
225 |
+
inputs=[session_state],
|
226 |
+
outputs=[session_state, connect_status],
|
227 |
+
)
|
228 |
+
|
229 |
+
if __name__ == "__main__":
|
230 |
+
demo.launch()
|