blackopsrepl commited on
Commit
ee5de8e
Β·
1 Parent(s): a0b52a3

feat: improve ui

Browse files

-Improved chat interface layout with file upload positioned to right of input textbox
-Added equal_height=True to Row for better component alignment
-Enhanced textbox with container=True, lines=1, and max_lines=3 properties
-Fixed file upload component with increased height (80px) and container=True for proper content scaling

Files changed (1) hide show
  1. src/ui/pages/chat.py +115 -44
src/ui/pages/chat.py CHANGED
@@ -1,13 +1,11 @@
1
- import os, json, re
2
  import gradio as gr
3
- import asyncio
4
- from typing import List, Dict, Any, Union, Generator
5
- from contextlib import AsyncExitStack
6
  from datetime import datetime, date
7
 
8
  import requests
9
 
10
- from handlers.mcp_backend import process_message_and_attached_file
11
  from handlers.tool_call_handler import create_tool_call_handler
12
  from services.mcp_client import MCPClientService
13
 
@@ -80,13 +78,16 @@ def draw_chat_page(debug: bool = False):
80
  "NEBIUS_MODEL or NEBIUS_API_KEY not found in environment variables"
81
  )
82
 
83
- with gr.Tab("πŸ’¬ Chat"):
84
  gr.Markdown(
85
  """
86
- # πŸ’¬ Chat with Yuga Planner
 
 
 
87
 
88
- This chatbot can help you with general questions and also schedule tasks around your calendar.
89
- To use scheduling features, just describe your task and optionally upload a calendar .ics file.
90
  """
91
  )
92
 
@@ -106,7 +107,7 @@ def draw_chat_page(debug: bool = False):
106
  _tool_assembler, _tool_processor = create_tool_call_handler(_mcp_client)
107
 
108
  # Create chat interface components
109
- chatbot, msg, clear, calendar_file = create_chat_interface()
110
 
111
  # Create parameter controls
112
  (
@@ -116,8 +117,12 @@ def draw_chat_page(debug: bool = False):
116
  top_p_slider,
117
  ) = create_chatbot_parameters()
118
 
119
- msg.submit(
120
- user_message, [msg, chatbot, calendar_file], [msg, chatbot], queue=False
 
 
 
 
121
  ).then(
122
  bot_response,
123
  [
@@ -127,25 +132,59 @@ def draw_chat_page(debug: bool = False):
127
  temperature_slider,
128
  top_p_slider,
129
  ],
130
- chatbot,
131
  show_progress=True,
132
  )
133
 
134
- clear.click(lambda: [], None, chatbot, queue=False)
 
 
 
 
 
 
 
 
 
 
 
 
135
 
136
 
137
- def create_chat_interface() -> tuple[gr.Chatbot, gr.Textbox, gr.Button, gr.File]:
 
 
138
  """Create and return the chat interface components"""
139
  chatbot = gr.Chatbot(type="messages")
140
- msg = gr.Textbox(
141
- label="Your message",
142
- placeholder="Type your message here... For scheduling, describe your task and optionally upload a calendar file.",
143
- )
144
- calendar_file = gr.File(
145
- label="πŸ“… Calendar File (.ics)", file_types=[".ics"], visible=True
146
- )
147
- clear = gr.Button("Clear")
148
- return chatbot, msg, clear, calendar_file
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
149
 
150
 
151
  def create_chatbot_parameters() -> tuple[gr.Textbox, gr.Slider, gr.Slider, gr.Slider]:
@@ -191,12 +230,17 @@ def user_message(message, history, calendar_file_obj):
191
  logger.error(f"Error reading calendar file: {e}")
192
  enhanced_message += f"\n\n[Calendar file upload failed: {str(e)}]"
193
 
194
- return "", history + [{"role": "user", "content": enhanced_message}]
 
 
 
 
 
195
 
196
 
197
  def bot_response(history, system_message, max_tokens, temperature, top_p):
198
  if not history:
199
- return history
200
 
201
  # Convert messages format to tuples for the respond function
202
  history_tuples = []
@@ -229,7 +273,20 @@ def bot_response(history, system_message, max_tokens, temperature, top_p):
229
  for response_chunk in response_gen:
230
  updated_history = history.copy()
231
  updated_history[-1] = {"role": "assistant", "content": response_chunk}
232
- yield updated_history
 
 
 
 
 
 
 
 
 
 
 
 
 
233
 
234
  except Exception as e:
235
  logger.error(f"Error in bot_response: {e}")
@@ -238,7 +295,11 @@ def bot_response(history, system_message, max_tokens, temperature, top_p):
238
  logger.error(f"Full traceback: {traceback.format_exc()}")
239
  error_history = history.copy()
240
  error_history[-1] = {"role": "assistant", "content": f"Error: {str(e)}"}
241
- yield error_history
 
 
 
 
242
 
243
 
244
  def respond(
@@ -352,11 +413,14 @@ def respond(
352
  if "choices" in chunk and len(chunk["choices"]) > 0:
353
  delta = chunk["choices"][0].get("delta", {})
354
  content = delta.get("content", "")
 
355
  if content:
356
  response_text += content
 
357
  # For scheduling requests, include essential logs inline
358
  if is_scheduling_request:
359
  session_logs = get_session_logs()
 
360
  if session_logs:
361
  # Show only new logs since last yield
362
  latest_logs = (
@@ -367,9 +431,12 @@ def respond(
367
  logs_text = "\n".join(
368
  f" {log}" for log in latest_logs
369
  )
 
370
  yield response_text + f"\n\n{logs_text}"
 
371
  else:
372
  yield response_text
 
373
  else:
374
  yield response_text
375
 
@@ -414,16 +481,25 @@ def respond(
414
  try:
415
  # Extract task description from message
416
  task_description = message
417
- calendar_content = "none"
418
 
419
  # Extract calendar data if available
420
  calendar_match = re.search(r"\[CALENDAR_DATA:([^\]]+)\]", message)
 
421
  if calendar_match:
422
  calendar_content = calendar_match.group(1)
 
 
 
 
 
 
 
423
 
424
  # Show essential task processing logs inline
425
  session_logs = get_session_logs()
426
  processing_status = ""
 
427
  if session_logs:
428
  latest_logs = (
429
  session_logs[-2:] if len(session_logs) > 2 else session_logs
@@ -437,9 +513,6 @@ def respond(
437
  logger.info("About to call MCP scheduling tool directly")
438
 
439
  # Add timeout to prevent hanging
440
- import asyncio
441
- import concurrent.futures
442
-
443
  def call_with_timeout():
444
  try:
445
  return loop.run_until_complete(
@@ -658,19 +731,19 @@ def respond(
658
  else:
659
  tool_response = f"""
660
 
661
- πŸ“… **Schedule Generated Successfully!**
662
 
663
- **Task:** {task_description}
664
- **Calendar Events Processed:** {len(calendar_entries)}
665
- **Total Scheduled Items:** {len(schedule)}
666
 
667
- ⚠️ **No schedule items to display** - This may indicate the task was completed or no scheduling was needed.
668
 
669
- **Raw Result:**
670
- ```json
671
- {safe_json_dumps(result, indent=2)[:1000]}
672
- ```
673
- """
674
 
675
  response_text += tool_response
676
  logger.info("Added success message with table to response")
@@ -688,7 +761,6 @@ def respond(
688
 
689
  except Exception as e:
690
  logger.error(f"Direct scheduling call failed: {e}")
691
- import traceback
692
 
693
  logger.error(f"Full traceback: {traceback.format_exc()}")
694
  tool_response = f"\n\n❌ **Scheduling failed:** {str(e)}"
@@ -702,7 +774,6 @@ def respond(
702
 
703
  except Exception as e:
704
  logger.error(f"Error in chat response: {e}")
705
- import traceback
706
 
707
  logger.error(f"Full traceback: {traceback.format_exc()}")
708
  yield f"Error: {str(e)}"
 
1
+ import os, json, re, traceback, asyncio
2
  import gradio as gr
3
+
4
+ from typing import Generator
 
5
  from datetime import datetime, date
6
 
7
  import requests
8
 
 
9
  from handlers.tool_call_handler import create_tool_call_handler
10
  from services.mcp_client import MCPClientService
11
 
 
78
  "NEBIUS_MODEL or NEBIUS_API_KEY not found in environment variables"
79
  )
80
 
81
+ with gr.Tab("πŸ’¬ Chat Agent Demo"):
82
  gr.Markdown(
83
  """
84
+ # πŸ’¬ Chat Agent Demo
85
+
86
+ This is a chat agent demo for Yuga Planner!
87
+ Insert a task description to have the agent schedule it standalone or around your calendar.
88
 
89
+ If you provide a calendar file, the schedule will start from the first available time slot.
90
+ If you don't, the schedule will start from the current time.
91
  """
92
  )
93
 
 
107
  _tool_assembler, _tool_processor = create_tool_call_handler(_mcp_client)
108
 
109
  # Create chat interface components
110
+ chatbot, msg, clear, stop, calendar_file = create_chat_interface()
111
 
112
  # Create parameter controls
113
  (
 
117
  top_p_slider,
118
  ) = create_chatbot_parameters()
119
 
120
+ # Handle message submission
121
+ submit_event = msg.submit(
122
+ user_message,
123
+ [msg, chatbot, calendar_file],
124
+ [msg, chatbot, msg, stop],
125
+ queue=False,
126
  ).then(
127
  bot_response,
128
  [
 
132
  temperature_slider,
133
  top_p_slider,
134
  ],
135
+ [chatbot, msg, stop],
136
  show_progress=True,
137
  )
138
 
139
+ # Handle clear button
140
+ def clear_chat():
141
+ return [], gr.update(interactive=True), gr.update(visible=False)
142
+
143
+ clear.click(clear_chat, None, [chatbot, msg, stop], queue=False)
144
+
145
+ # Handle stop button
146
+ def stop_processing():
147
+ return gr.update(interactive=True), gr.update(visible=False)
148
+
149
+ stop.click(
150
+ stop_processing, None, [msg, stop], queue=False, cancels=[submit_event]
151
+ )
152
 
153
 
154
+ def create_chat_interface() -> tuple[
155
+ gr.Chatbot, gr.Textbox, gr.Button, gr.Button, gr.File
156
+ ]:
157
  """Create and return the chat interface components"""
158
  chatbot = gr.Chatbot(type="messages")
159
+
160
+ # Message input row with calendar upload on the right - improved layout
161
+ with gr.Row(equal_height=True):
162
+ msg = gr.Textbox(
163
+ label="Insert a task description",
164
+ placeholder="Ex.: Create a new EC2 instance on AWS",
165
+ interactive=True,
166
+ scale=5, # Takes up most of the row
167
+ container=True,
168
+ lines=1,
169
+ max_lines=3,
170
+ )
171
+ calendar_file = gr.File(
172
+ label="πŸ“… Calendar",
173
+ file_types=[".ics"],
174
+ visible=True,
175
+ scale=1, # Compact size
176
+ height=80, # Larger height to accommodate content
177
+ file_count="single",
178
+ container=True,
179
+ elem_id="calendar-upload",
180
+ )
181
+
182
+ # Control buttons row
183
+ with gr.Row():
184
+ clear = gr.Button("Clear", variant="secondary")
185
+ stop = gr.Button("Stop", variant="stop", visible=False)
186
+
187
+ return chatbot, msg, clear, stop, calendar_file
188
 
189
 
190
  def create_chatbot_parameters() -> tuple[gr.Textbox, gr.Slider, gr.Slider, gr.Slider]:
 
230
  logger.error(f"Error reading calendar file: {e}")
231
  enhanced_message += f"\n\n[Calendar file upload failed: {str(e)}]"
232
 
233
+ return (
234
+ "", # Clear input
235
+ history + [{"role": "user", "content": enhanced_message}],
236
+ gr.update(interactive=False), # Disable input
237
+ gr.update(visible=True), # Show stop button
238
+ )
239
 
240
 
241
  def bot_response(history, system_message, max_tokens, temperature, top_p):
242
  if not history:
243
+ return history, gr.update(interactive=True), gr.update(visible=False)
244
 
245
  # Convert messages format to tuples for the respond function
246
  history_tuples = []
 
273
  for response_chunk in response_gen:
274
  updated_history = history.copy()
275
  updated_history[-1] = {"role": "assistant", "content": response_chunk}
276
+ yield (
277
+ updated_history,
278
+ gr.update(), # Keep input disabled during processing
279
+ gr.update(), # Keep stop button visible
280
+ )
281
+
282
+ # Final yield to re-enable input and hide stop button
283
+ final_history = history.copy()
284
+ final_history[-1] = {"role": "assistant", "content": response_chunk}
285
+ yield (
286
+ final_history,
287
+ gr.update(interactive=True), # Re-enable input
288
+ gr.update(visible=False), # Hide stop button
289
+ )
290
 
291
  except Exception as e:
292
  logger.error(f"Error in bot_response: {e}")
 
295
  logger.error(f"Full traceback: {traceback.format_exc()}")
296
  error_history = history.copy()
297
  error_history[-1] = {"role": "assistant", "content": f"Error: {str(e)}"}
298
+ yield (
299
+ error_history,
300
+ gr.update(interactive=True), # Re-enable input on error
301
+ gr.update(visible=False), # Hide stop button on error
302
+ )
303
 
304
 
305
  def respond(
 
413
  if "choices" in chunk and len(chunk["choices"]) > 0:
414
  delta = chunk["choices"][0].get("delta", {})
415
  content = delta.get("content", "")
416
+
417
  if content:
418
  response_text += content
419
+
420
  # For scheduling requests, include essential logs inline
421
  if is_scheduling_request:
422
  session_logs = get_session_logs()
423
+
424
  if session_logs:
425
  # Show only new logs since last yield
426
  latest_logs = (
 
431
  logs_text = "\n".join(
432
  f" {log}" for log in latest_logs
433
  )
434
+
435
  yield response_text + f"\n\n{logs_text}"
436
+
437
  else:
438
  yield response_text
439
+
440
  else:
441
  yield response_text
442
 
 
481
  try:
482
  # Extract task description from message
483
  task_description = message
484
+ calendar_content = "" # Always start with empty calendar
485
 
486
  # Extract calendar data if available
487
  calendar_match = re.search(r"\[CALENDAR_DATA:([^\]]+)\]", message)
488
+
489
  if calendar_match:
490
  calendar_content = calendar_match.group(1)
491
+ logger.info("Calendar data found and extracted")
492
+
493
+ else:
494
+ # If no calendar data found, proceed with empty calendar
495
+ logger.info(
496
+ "No calendar data found, proceeding with empty calendar - tool will still be called"
497
+ )
498
 
499
  # Show essential task processing logs inline
500
  session_logs = get_session_logs()
501
  processing_status = ""
502
+
503
  if session_logs:
504
  latest_logs = (
505
  session_logs[-2:] if len(session_logs) > 2 else session_logs
 
513
  logger.info("About to call MCP scheduling tool directly")
514
 
515
  # Add timeout to prevent hanging
 
 
 
516
  def call_with_timeout():
517
  try:
518
  return loop.run_until_complete(
 
731
  else:
732
  tool_response = f"""
733
 
734
+ πŸ“… **Schedule Generated Successfully!**
735
 
736
+ **Task:** {task_description}
737
+ **Calendar Events Processed:** {len(calendar_entries)}
738
+ **Total Scheduled Items:** {len(schedule)}
739
 
740
+ ⚠️ **No schedule items to display** - This may indicate the task was completed or no scheduling was needed.
741
 
742
+ **Raw Result:**
743
+ ```json
744
+ {safe_json_dumps(result, indent=2)[:1000]}
745
+ ```
746
+ """
747
 
748
  response_text += tool_response
749
  logger.info("Added success message with table to response")
 
761
 
762
  except Exception as e:
763
  logger.error(f"Direct scheduling call failed: {e}")
 
764
 
765
  logger.error(f"Full traceback: {traceback.format_exc()}")
766
  tool_response = f"\n\n❌ **Scheduling failed:** {str(e)}"
 
774
 
775
  except Exception as e:
776
  logger.error(f"Error in chat response: {e}")
 
777
 
778
  logger.error(f"Full traceback: {traceback.format_exc()}")
779
  yield f"Error: {str(e)}"