blackopsrepl commited on
Commit
9ea1761
·
1 Parent(s): e2685f7

refactor!: remove old web backend and agent demo

Browse files
Files changed (1) hide show
  1. src/handlers/web_backend.py +0 -258
src/handlers/web_backend.py DELETED
@@ -1,258 +0,0 @@
1
- from typing import Tuple
2
- import os
3
-
4
- import pandas as pd
5
- import gradio as gr
6
-
7
- from utils.logging_config import setup_logging, get_logger
8
-
9
- # Initialize logging
10
- setup_logging()
11
- logger = get_logger(__name__)
12
-
13
- from services import (
14
- LoggingService,
15
- ScheduleService,
16
- DataService,
17
- MockProjectService,
18
- StateService,
19
- )
20
-
21
- # Global logging service instance for UI streaming
22
- logging_service = LoggingService()
23
-
24
-
25
- async def show_solved(
26
- state_data, job_id: str, debug: bool = False
27
- ) -> Tuple[pd.DataFrame, pd.DataFrame, str, str, object, str]:
28
- """Handler for solving a schedule from UI state data"""
29
- # Ensure log streaming is set up and respects debug mode
30
- _ensure_log_streaming_setup(debug)
31
-
32
- logger.info(
33
- "show_solved called with state_data type: %s, job_id: %s",
34
- type(state_data),
35
- job_id,
36
- )
37
-
38
- # Check if data has been loaded
39
- if not state_data:
40
- logger.warning("No data loaded - cannot solve schedule")
41
- return (
42
- gr.update(),
43
- gr.update(),
44
- job_id,
45
- "❌ No data loaded. Please click 'Load Data' first to load project data before solving.",
46
- state_data,
47
- logging_service.get_streaming_logs(),
48
- )
49
-
50
- logger.info("State data found, proceeding with solve...")
51
-
52
- try:
53
- # Use the schedule service to solve the schedule
54
- (
55
- emp_df,
56
- solved_task_df,
57
- new_job_id,
58
- status,
59
- state_data,
60
- ) = await ScheduleService.solve_schedule_from_state(
61
- state_data, job_id, debug=debug
62
- )
63
-
64
- logger.info("Solver completed successfully, returning results")
65
-
66
- return (
67
- emp_df,
68
- solved_task_df,
69
- new_job_id,
70
- status,
71
- state_data,
72
- logging_service.get_streaming_logs(),
73
- )
74
- except Exception as e:
75
- logger.error("Error in show_solved: %s", e)
76
- return (
77
- gr.update(),
78
- gr.update(),
79
- job_id,
80
- f"❌ Error solving schedule: {str(e)}",
81
- state_data,
82
- logging_service.get_streaming_logs(),
83
- )
84
-
85
-
86
- def show_mock_project_content(project_names) -> str:
87
- """Handler for displaying mock project content"""
88
- return MockProjectService.show_mock_project_content(project_names)
89
-
90
-
91
- async def load_data(
92
- project_source: str,
93
- file_obj,
94
- mock_projects,
95
- employee_count: int,
96
- days_in_schedule: int,
97
- llm_output,
98
- debug: bool = False,
99
- progress=gr.Progress(),
100
- ):
101
- """
102
- Handler for data loading from either file uploads or mock projects - streaming version
103
- Yields intermediate updates for real-time progress
104
- """
105
- # Ensure log streaming is set up and clear previous logs
106
- _ensure_log_streaming_setup(debug)
107
- logging_service.clear_streaming_logs()
108
-
109
- # Initial log message
110
- logger.info("Starting data loading process...")
111
- if debug:
112
- logger.debug("Debug mode enabled for data loading")
113
-
114
- # Yield initial state
115
- yield (
116
- gr.update(), # employees_table
117
- gr.update(), # schedule_table
118
- gr.update(), # job_id_state
119
- "Starting data loading...", # status_text
120
- gr.update(), # llm_output_state
121
- logging_service.get_streaming_logs(), # log_terminal
122
- gr.update(interactive=False), # solve_btn - keep disabled during loading
123
- )
124
-
125
- try:
126
- # Use the data service to load data from sources
127
- (
128
- emp_df,
129
- task_df,
130
- job_id,
131
- status_message,
132
- state_data,
133
- ) = await DataService.load_data_from_sources(
134
- project_source,
135
- file_obj,
136
- mock_projects,
137
- employee_count,
138
- days_in_schedule,
139
- debug,
140
- )
141
-
142
- # Store schedule for later use
143
- StateService.store_solved_schedule(
144
- job_id, None
145
- ) # Will be populated when solved
146
-
147
- # Final yield with complete results
148
- yield (
149
- emp_df, # employees_table
150
- task_df, # schedule_table
151
- job_id, # job_id_state
152
- status_message, # status_text
153
- state_data, # llm_output_state
154
- logging_service.get_streaming_logs(), # log_terminal with accumulated logs
155
- gr.update(interactive=True), # solve_btn - enable after successful loading
156
- )
157
-
158
- except Exception as e:
159
- logger.error("Error loading data: %s", e)
160
- yield (
161
- gr.update(),
162
- gr.update(),
163
- gr.update(),
164
- f"Error loading data: {str(e)}",
165
- gr.update(),
166
- logging_service.get_streaming_logs(), # log_terminal
167
- gr.update(interactive=False), # solve_btn - keep disabled on error
168
- )
169
-
170
-
171
- def start_timer(job_id, llm_output) -> gr.Timer:
172
- """Handler for starting the polling timer"""
173
- return ScheduleService.start_timer(job_id, llm_output)
174
-
175
-
176
- def poll_solution(
177
- job_id: str, schedule, debug: bool = False
178
- ) -> Tuple[pd.DataFrame, pd.DataFrame, str, str, object, str]:
179
- """Handler for polling a solution for a given job_id"""
180
- try:
181
- (
182
- emp_df,
183
- task_df,
184
- job_id,
185
- status_message,
186
- schedule,
187
- ) = ScheduleService.poll_solution(job_id, schedule, debug)
188
-
189
- return (
190
- emp_df,
191
- task_df,
192
- job_id,
193
- status_message,
194
- schedule,
195
- logging_service.get_streaming_logs(), # Include logs in polling updates
196
- )
197
-
198
- except Exception as e:
199
- logger.error("Error in poll_solution: %s", e)
200
- return (
201
- gr.update(),
202
- gr.update(),
203
- job_id,
204
- f"Error polling solution: {str(e)}",
205
- schedule,
206
- logging_service.get_streaming_logs(), # Include logs even on error
207
- )
208
-
209
-
210
- async def auto_poll(
211
- job_id: str, llm_output: dict, debug: bool = False
212
- ) -> Tuple[pd.DataFrame, pd.DataFrame, str, str, dict, str]:
213
- """Handler for auto-polling a solution"""
214
- try:
215
- (
216
- emp_df,
217
- task_df,
218
- job_id,
219
- status_message,
220
- llm_output,
221
- ) = await ScheduleService.auto_poll(job_id, llm_output, debug)
222
-
223
- return (
224
- emp_df,
225
- task_df,
226
- job_id,
227
- status_message,
228
- llm_output,
229
- logging_service.get_streaming_logs(), # Include logs in auto-poll updates
230
- )
231
-
232
- except Exception as e:
233
- logger.error("Error in auto_poll: %s", e)
234
- return (
235
- gr.update(),
236
- gr.update(),
237
- job_id,
238
- f"Error in auto-polling: {str(e)}",
239
- llm_output,
240
- logging_service.get_streaming_logs(), # Include logs even on error
241
- )
242
-
243
-
244
- def _ensure_log_streaming_setup(debug: bool = False) -> None:
245
- """
246
- Ensure log streaming is properly set up with current debug settings.
247
- This helps maintain consistency when debug mode changes at runtime.
248
- """
249
- if debug:
250
- # Force debug mode setup if explicitly requested
251
- os.environ["YUGA_DEBUG"] = "true"
252
- setup_logging("DEBUG")
253
-
254
- # Always setup streaming (it will respect current logging level)
255
- logging_service.setup_log_streaming()
256
-
257
- if debug:
258
- logger.debug("Log streaming setup completed with debug mode enabled")