blackopsrepl commited on
Commit
b9b1ca9
Β·
1 Parent(s): e3a1efe

chore: align gradio ui

Browse files
Files changed (1) hide show
  1. src/app.py +111 -275
src/app.py CHANGED
@@ -13,13 +13,13 @@ if not os.getenv("NEBIUS_API_KEY") or not os.getenv("NEBIUS_MODEL"):
13
  load_secrets("tests/secrets/creds.py")
14
 
15
 
16
- from handlers.web_backend import (
17
- load_data,
18
- show_solved,
19
- start_timer,
20
- auto_poll,
21
- show_mock_project_content,
22
- )
23
 
24
  from handlers.mcp_backend import process_message_and_attached_file
25
 
@@ -57,7 +57,6 @@ def app(debug: bool = False):
57
  )
58
 
59
  _draw_info_page(debug)
60
- # _draw_hackathon_page(debug)
61
 
62
  # Register the MCP tool as an API endpoint
63
  gr.api(process_message_and_attached_file)
@@ -66,298 +65,135 @@ def app(debug: bool = False):
66
 
67
 
68
  def _draw_info_page(debug: bool = False):
69
- with gr.Tab("Information"):
70
-
71
- def get_server_url():
72
- try:
73
- return gr.get_state().server_url + "/gradio_api/mcp/sse"
74
- except:
75
- return "http://localhost:7860/gradio_api/mcp/sse"
76
-
77
  gr.Markdown(
78
- f"""
79
- This is a demo of the Yuga Planner system.
80
-
81
- To use as an MCP server:
82
- 1. Register the MCP server with your client using the URL:
83
- ```
84
- {get_server_url()}
85
- ```
86
- 2. Call the tool from your client. Example:
87
- ```
88
- use yuga planner tool @tests/data/calendar.ics
89
- Task Description: Create a new AWS VPC
90
- ```
91
-
92
  """
93
- )
94
 
 
 
95
 
96
- def _draw_hackathon_page(debug: bool = False):
97
- with gr.Tab("Hackathon Agent Demo"):
98
- gr.Markdown("### SWE Team Task Scheduling Demo")
99
 
100
- gr.Markdown(
101
  """
102
- ## Instructions
103
- 1. Choose a project source - either upload your own project file(s) or select from our mock projects
104
- 2. Click 'Load Data' to parse, decompose, and estimate tasks
105
- 3. Click 'Solve' to generate an optimal schedule based on employee skills and availability
106
- 4. Review the results in the tables below
107
- """
108
- )
109
-
110
- # Project source selector
111
- project_source = gr.Radio(
112
- choices=["Upload Project Files", "Use Mock Projects"],
113
- value="Upload Project Files",
114
- label="Project Source",
115
  )
116
 
117
- # Configuration parameters
118
- with gr.Row():
119
- employee_count = gr.Number(
120
- label="Number of Employees",
121
- value=12,
122
- minimum=1,
123
- maximum=100,
124
- step=1,
125
- precision=0,
126
- )
127
- days_in_schedule = gr.Number(
128
- label="Days in Schedule",
129
- value=365,
130
- minimum=1,
131
- maximum=365,
132
- step=1,
133
- precision=0,
134
- )
135
-
136
- # File upload component (initially visible)
137
- with gr.Group(visible=True) as file_upload_group:
138
- file_upload = gr.File(
139
- label="Upload Project Files (Markdown)",
140
- file_types=[".md"],
141
- file_count="multiple",
142
- )
143
-
144
- # Mock projects dropdown (initially hidden)
145
- with gr.Group(visible=False) as mock_projects_group:
146
- # Get mock project names from ProjectService
147
- available_projects = MockProjectService.get_available_project_names()
148
- mock_project_dropdown = gr.Dropdown(
149
- choices=available_projects,
150
- label="Select Mock Projects (multiple selection allowed)",
151
- value=[available_projects[0]] if available_projects else [],
152
- multiselect=True,
153
- )
154
-
155
- # Accordion for viewing mock project content
156
- with gr.Accordion("πŸ“‹ Project Content Preview", open=False):
157
- mock_project_content_accordion = gr.Textbox(
158
- label="Project Content",
159
- interactive=False,
160
- lines=15,
161
- max_lines=20,
162
- show_copy_button=True,
163
- placeholder="Select projects above and expand this section to view content...",
164
- )
165
-
166
- # Auto-update content when projects change
167
- mock_project_dropdown.change(
168
- show_mock_project_content,
169
- inputs=[mock_project_dropdown],
170
- outputs=[mock_project_content_accordion],
171
- )
172
 
173
- # Log Terminal - Always visible for streaming logs
174
- gr.Markdown("## Live Log Terminal")
 
 
 
 
175
 
176
- # Show debug status
177
- if debug:
178
  gr.Markdown(
179
- "πŸ› **Debug Mode Enabled** - Showing detailed logs including DEBUG messages"
180
- )
181
- else:
182
- gr.Markdown(
183
- "ℹ️ **Normal Mode** - Showing INFO, WARNING, and ERROR messages"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
184
  )
185
 
186
- log_terminal = gr.Textbox(
187
- label="Processing Logs",
188
- interactive=False,
189
- lines=8,
190
- max_lines=15,
191
- show_copy_button=True,
192
- placeholder="Logs will appear here during data loading and solving...",
193
- )
194
 
195
- # Toggle visibility based on project source selection
196
- def toggle_visibility(choice):
197
- if choice == "Upload Project Files":
198
- return gr.update(visible=True), gr.update(visible=False)
199
- else:
200
- return gr.update(visible=False), gr.update(visible=True)
201
-
202
- project_source.change(
203
- toggle_visibility,
204
- inputs=[project_source],
205
- outputs=[file_upload_group, mock_projects_group],
206
- )
207
 
208
- # State for LLM output, persists per session
209
- llm_output_state = gr.State(value=None)
210
- job_id_state = gr.State(value=None)
211
- status_text = gr.Textbox(
212
- label="Solver Status",
213
- interactive=False,
214
- lines=8,
215
- max_lines=20,
216
- show_copy_button=True,
217
- )
218
 
219
- with gr.Row():
220
- load_btn = gr.Button("Load Data")
221
- solve_btn = gr.Button("Solve", interactive=False) # Initially disabled
222
-
223
- gr.Markdown("## Employees")
224
- employees_table = gr.Dataframe(label="Employees", interactive=False)
225
-
226
- gr.Markdown("## Tasks")
227
- schedule_table = gr.Dataframe(label="Tasks Table", interactive=False)
228
-
229
- # Outputs: always keep state as last output
230
- outputs = [
231
- employees_table,
232
- schedule_table,
233
- job_id_state,
234
- status_text,
235
- llm_output_state,
236
- log_terminal,
237
- ]
238
-
239
- # Outputs for load_data that also enables solve button
240
- load_outputs = outputs + [solve_btn]
241
-
242
- # Create wrapper function to pass debug flag to auto_poll
243
- async def auto_poll_with_debug(job_id, llm_output):
244
- result = await auto_poll(job_id, llm_output, debug=debug)
245
- # auto_poll now returns 6 values including logs
246
- return result
247
-
248
- # Timer for polling (not related to state)
249
- timer = gr.Timer(2, active=False)
250
- timer.tick(
251
- auto_poll_with_debug,
252
- inputs=[job_id_state, llm_output_state],
253
- outputs=outputs, # This now includes log_terminal updates
254
- )
255
 
256
- # Create wrapper function to pass debug flag to load_data
257
- async def load_data_with_debug(
258
- project_source,
259
- file_obj,
260
- mock_projects,
261
- employee_count,
262
- days_in_schedule,
263
- llm_output,
264
- progress=gr.Progress(),
265
- ):
266
- async for result in load_data(
267
- project_source,
268
- file_obj,
269
- mock_projects,
270
- employee_count,
271
- days_in_schedule,
272
- llm_output,
273
- debug=debug,
274
- progress=progress,
275
- ):
276
- yield result
277
-
278
- # Use state as both input and output
279
- load_btn.click(
280
- load_data_with_debug,
281
- inputs=[
282
- project_source,
283
- file_upload,
284
- mock_project_dropdown,
285
- employee_count,
286
- days_in_schedule,
287
- llm_output_state,
288
- ],
289
- outputs=load_outputs,
290
- api_name="load_data",
291
- )
292
 
293
- # Create wrapper function to pass debug flag to show_solved
294
- async def show_solved_with_debug(state_data, job_id):
295
- return await show_solved(state_data, job_id, debug=debug)
296
 
297
- solve_btn.click(
298
- show_solved_with_debug,
299
- inputs=[llm_output_state, job_id_state],
300
- outputs=outputs,
301
- ).then(start_timer, inputs=[job_id_state, llm_output_state], outputs=timer)
302
 
303
- if debug:
304
- gr.Markdown("### πŸ› Debug Controls")
305
- gr.Markdown(
306
- "These controls help test the centralized logging system and state management."
307
- )
 
308
 
309
- def debug_set_state(state):
310
- logger.info("DEBUG: Setting state to test_value")
311
- logger.debug("DEBUG: Detailed state operation in progress")
312
- return "Debug: State set!", "test_value"
313
-
314
- def debug_show_state(state):
315
- logger.info("DEBUG: Current state is %s", state)
316
- logger.debug("DEBUG: State retrieval operation completed")
317
- return f"Debug: Current state: {state}", gr.update()
318
-
319
- def debug_test_logging():
320
- """Test all logging levels for UI demonstration"""
321
- logger.debug("πŸ› DEBUG: This is a debug message")
322
- logger.info("ℹ️ INFO: This is an info message")
323
- logger.warning("⚠️ WARNING: This is a warning message")
324
- logger.error("❌ ERROR: This is an error message")
325
- return "Generated test log messages at all levels"
326
-
327
- debug_out = gr.Textbox(label="Debug Output")
328
-
329
- with gr.Row():
330
- debug_set_btn = gr.Button("Debug Set State")
331
- debug_show_btn = gr.Button("Debug Show State")
332
- debug_log_btn = gr.Button("Test Log Levels")
333
-
334
- debug_set_btn.click(
335
- debug_set_state,
336
- inputs=[llm_output_state],
337
- outputs=[debug_out, llm_output_state],
338
- )
339
- debug_show_btn.click(
340
- debug_show_state,
341
- inputs=[llm_output_state],
342
- outputs=[debug_out, gr.State()],
343
- )
344
- debug_log_btn.click(
345
- debug_test_logging,
346
- inputs=[],
347
- outputs=[debug_out],
348
  )
349
 
 
 
 
 
 
350
 
351
- def set_test_state():
352
- logger.debug("Setting state to test_value")
353
- app_state.set("test_key", "test_value")
354
- return "State set to test_value"
355
-
356
 
357
- def get_test_state():
358
- state = app_state.get("test_key", "No state found")
359
- logger.debug("Current state is %s", state)
360
- return f"Current state: {state}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
361
 
362
 
363
  if __name__ == "__main__":
 
13
  load_secrets("tests/secrets/creds.py")
14
 
15
 
16
+ # from handlers.web_backend import (
17
+ # load_data,
18
+ # show_solved,
19
+ # start_timer,
20
+ # auto_poll,
21
+ # show_mock_project_content,
22
+ # )
23
 
24
  from handlers.mcp_backend import process_message_and_attached_file
25
 
 
57
  )
58
 
59
  _draw_info_page(debug)
 
60
 
61
  # Register the MCP tool as an API endpoint
62
  gr.api(process_message_and_attached_file)
 
65
 
66
 
67
  def _draw_info_page(debug: bool = False):
68
+ with gr.Tab("πŸ“‹ Information"):
 
 
 
 
 
 
 
69
  gr.Markdown(
 
 
 
 
 
 
 
 
 
 
 
 
 
 
70
  """
71
+ # 🐍 Yuga Planner
72
 
73
+ **Yuga Planner** is a neuro-symbolic system that combines AI agents with constraint optimization
74
+ for intelligent scheduling.
75
 
76
+ ## πŸ”Œ **Using as MCP Tool**
 
 
77
 
78
+ You can use Yuga Planner as an MCP server to integrate scheduling into your AI workflows.
79
  """
 
 
 
 
 
 
 
 
 
 
 
 
 
80
  )
81
 
82
+ def get_server_url():
83
+ try:
84
+ return gr.get_state().server_url + "/gradio_api/mcp/sse"
85
+ except:
86
+ return "http://localhost:7860/gradio_api/mcp/sse"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87
 
88
+ gr.Textbox(
89
+ value=get_server_url(),
90
+ label="🌐 MCP Server Endpoint",
91
+ interactive=False,
92
+ max_lines=1,
93
+ )
94
 
95
+ with gr.Accordion("πŸ“ MCP Setup Instructions", open=True):
 
96
  gr.Markdown(
97
+ """
98
+ ### 1. **Cursor Setup Instructions (should work from any MCP client!)**
99
+
100
+ **For Cursor AI Editor:**
101
+ 1. Create or edit your MCP configuration file: `~/.cursor/mcp.json`
102
+ 2. Add the yuga-planner server configuration:
103
+ ```json
104
+ {
105
+ "mcpServers": {
106
+ "yuga-planner": {
107
+ "url": -> "Insert the above endpoint URL here"
108
+ }
109
+ }
110
+ }
111
+ ```
112
+ 3. If you already have other MCP servers, add `yuga-planner` to the existing `mcpServers` object
113
+ 4. Restart Cursor to load the new configuration
114
+ 5. The tool will be available in your chat
115
+
116
+ ### 2. **Usage Example**
117
+ """
118
  )
119
 
120
+ gr.Textbox(
121
+ value="""use yuga-planner mcp tool
122
+ Task Description: Create a new EC2 instance on AWS
 
 
 
 
 
123
 
124
+ [Attach your calendar.ics file to provide existing commitments]
 
 
 
 
 
 
 
 
 
 
 
125
 
126
+ Tool Response: Optimized schedule created - EC2 setup task assigned to
127
+ available time slots around your existing meetings
128
+ [Returns JSON response with schedule data]
 
 
 
 
 
 
 
129
 
130
+ User: show all fields as a table, ordered by start date
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
131
 
132
+ [Displays formatted schedule table with all tasks and calendar events]""",
133
+ label="πŸ’¬ Cursor Chat Usage Example",
134
+ interactive=False,
135
+ lines=10,
136
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
137
 
138
+ gr.Markdown(
139
+ """
140
+ ### 3. **What it does**
141
 
142
+ **Personal Task Scheduling with Calendar Integration:**
 
 
 
 
143
 
144
+ 1. πŸ“… **Parses your calendar** (.ics file) for existing commitments
145
+ 2. πŸ€– **AI breaks down your task** into actionable subtasks using LLamaIndex + Nebius AI
146
+ 3. ⚑ **Constraint-based optimization** finds optimal time slots around your existing schedule
147
+ 4. πŸ“‹ **Returns complete solved schedule** integrated with your personal calendar events
148
+ 5. πŸ•˜ **Respects business hours** (9:00-18:00) and excludes weekends automatically
149
+ 6. πŸ“Š **JSON response format** - Ask to "show all fields as a table, ordered by start date" for readable formatting
150
 
151
+ **Designed for**: Personal productivity and task planning around existing appointments in Cursor.
152
+ """
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
153
  )
154
 
155
+ if debug:
156
+ with gr.Tab("πŸ› Debug Info"):
157
+ gr.Markdown(
158
+ """
159
+ # πŸ› Debug Information
160
 
161
+ **Debug Mode Enabled** - Additional system information and controls available.
162
+ """
163
+ )
 
 
164
 
165
+ with gr.Accordion("πŸ”§ **Environment Details**", open=True):
166
+ import os
167
+
168
+ env_info = f"""
169
+ **🐍 Python Environment**
170
+ - Debug Mode: {debug}
171
+ - YUGA_DEBUG: {os.getenv('YUGA_DEBUG', 'Not Set')}
172
+ - Nebius API Key: {'βœ… Set' if os.getenv('NEBIUS_API_KEY') else '❌ Not Set'}
173
+ - Nebius Model: {os.getenv('NEBIUS_MODEL', 'Not Set')}
174
+
175
+ **🌐 Server Information**
176
+ - MCP Endpoint: {get_server_url()}
177
+ - Current Working Directory: {os.getcwd()}
178
+ """
179
+ gr.Markdown(env_info)
180
+
181
+ with gr.Accordion("πŸ“Š **System Status**", open=False):
182
+ gr.Markdown(
183
+ """
184
+ **πŸ”„ Service Status**
185
+ - DataService: βœ… Active
186
+ - ScheduleService: βœ… Active
187
+ - StateService: βœ… Active
188
+ - LoggingService: βœ… Active
189
+ - MockProjectService: βœ… Active
190
+
191
+ **πŸ”Œ Integration Status**
192
+ - MCP Server: βœ… Enabled
193
+ - Gradio API: βœ… Active
194
+ - Real-time Logs: βœ… Streaming
195
+ """
196
+ )
197
 
198
 
199
  if __name__ == "__main__":