File size: 7,448 Bytes
918bdb4
2004c79
3b9a6b5
 
 
 
2004c79
918bdb4
 
 
 
3b9a6b5
 
 
 
 
 
918bdb4
3b9a6b5
 
 
 
 
 
 
 
 
 
918bdb4
 
3b9a6b5
918bdb4
 
 
 
3b9a6b5
 
 
 
918bdb4
3b9a6b5
 
 
 
 
 
 
 
 
918bdb4
3b9a6b5
 
 
 
 
 
 
 
 
 
 
 
 
918bdb4
3b9a6b5
 
 
 
 
 
 
 
 
 
918bdb4
3b9a6b5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
918bdb4
 
3b9a6b5
 
 
918bdb4
 
 
3b9a6b5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
918bdb4
 
 
3b9a6b5
 
 
 
 
 
 
 
 
 
 
 
 
918bdb4
3b9a6b5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
918bdb4
3b9a6b5
 
 
918bdb4
3b9a6b5
 
 
 
 
 
918bdb4
3b9a6b5
 
 
 
 
 
918bdb4
3b9a6b5
 
 
 
 
 
 
 
 
 
918bdb4
 
 
 
 
 
3b9a6b5
 
 
918bdb4
3b9a6b5
 
 
 
918bdb4
3b9a6b5
918bdb4
3b9a6b5
918bdb4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
from typing import Tuple
import os

import pandas as pd
import gradio as gr

from utils.logging_config import setup_logging, get_logger

# Initialize logging
setup_logging()
logger = get_logger(__name__)

from services import (
    LoggingService,
    ScheduleService,
    DataService,
    MockProjectService,
    StateService,
)

# Global logging service instance for UI streaming
logging_service = LoggingService()


async def show_solved(
    state_data, job_id: str, debug: bool = False
) -> Tuple[pd.DataFrame, pd.DataFrame, str, str, object, str]:
    """Handler for solving a schedule from UI state data"""
    # Ensure log streaming is set up and respects debug mode
    _ensure_log_streaming_setup(debug)

    logger.info(
        "show_solved called with state_data type: %s, job_id: %s",
        type(state_data),
        job_id,
    )

    # Check if data has been loaded
    if not state_data:
        logger.warning("No data loaded - cannot solve schedule")
        return (
            gr.update(),
            gr.update(),
            job_id,
            "❌ No data loaded. Please click 'Load Data' first to load project data before solving.",
            state_data,
            logging_service.get_streaming_logs(),
        )

    logger.info("State data found, proceeding with solve...")

    try:
        # Use the schedule service to solve the schedule
        (
            emp_df,
            solved_task_df,
            new_job_id,
            status,
            state_data,
        ) = await ScheduleService.solve_schedule_from_state(
            state_data, job_id, debug=debug
        )

        logger.info("Solver completed successfully, returning results")

        return (
            emp_df,
            solved_task_df,
            new_job_id,
            status,
            state_data,
            logging_service.get_streaming_logs(),
        )
    except Exception as e:
        logger.error("Error in show_solved: %s", e)
        return (
            gr.update(),
            gr.update(),
            job_id,
            f"❌ Error solving schedule: {str(e)}",
            state_data,
            logging_service.get_streaming_logs(),
        )


def show_mock_project_content(project_names) -> str:
    """Handler for displaying mock project content"""
    return MockProjectService.show_mock_project_content(project_names)


async def load_data(
    project_source: str,
    file_obj,
    mock_projects,
    employee_count: int,
    days_in_schedule: int,
    llm_output,
    debug: bool = False,
    progress=gr.Progress(),
):
    """
    Handler for data loading from either file uploads or mock projects - streaming version
    Yields intermediate updates for real-time progress
    """
    # Ensure log streaming is set up and clear previous logs
    _ensure_log_streaming_setup(debug)
    logging_service.clear_streaming_logs()

    # Initial log message
    logger.info("Starting data loading process...")
    if debug:
        logger.debug("Debug mode enabled for data loading")

    # Yield initial state
    yield (
        gr.update(),  # employees_table
        gr.update(),  # schedule_table
        gr.update(),  # job_id_state
        "Starting data loading...",  # status_text
        gr.update(),  # llm_output_state
        logging_service.get_streaming_logs(),  # log_terminal
        gr.update(interactive=False),  # solve_btn - keep disabled during loading
    )

    try:
        # Use the data service to load data from sources
        (
            emp_df,
            task_df,
            job_id,
            status_message,
            state_data,
        ) = await DataService.load_data_from_sources(
            project_source,
            file_obj,
            mock_projects,
            employee_count,
            days_in_schedule,
            debug,
        )

        # Store schedule for later use
        StateService.store_solved_schedule(
            job_id, None
        )  # Will be populated when solved

        # Final yield with complete results
        yield (
            emp_df,  # employees_table
            task_df,  # schedule_table
            job_id,  # job_id_state
            status_message,  # status_text
            state_data,  # llm_output_state
            logging_service.get_streaming_logs(),  # log_terminal with accumulated logs
            gr.update(interactive=True),  # solve_btn - enable after successful loading
        )

    except Exception as e:
        logger.error("Error loading data: %s", e)
        yield (
            gr.update(),
            gr.update(),
            gr.update(),
            f"Error loading data: {str(e)}",
            gr.update(),
            logging_service.get_streaming_logs(),  # log_terminal
            gr.update(interactive=False),  # solve_btn - keep disabled on error
        )


def start_timer(job_id, llm_output) -> gr.Timer:
    """Handler for starting the polling timer"""
    return ScheduleService.start_timer(job_id, llm_output)


def poll_solution(
    job_id: str, schedule, debug: bool = False
) -> Tuple[pd.DataFrame, pd.DataFrame, str, str, object, str]:
    """Handler for polling a solution for a given job_id"""
    try:
        (
            emp_df,
            task_df,
            job_id,
            status_message,
            schedule,
        ) = ScheduleService.poll_solution(job_id, schedule, debug)

        return (
            emp_df,
            task_df,
            job_id,
            status_message,
            schedule,
            logging_service.get_streaming_logs(),  # Include logs in polling updates
        )

    except Exception as e:
        logger.error("Error in poll_solution: %s", e)
        return (
            gr.update(),
            gr.update(),
            job_id,
            f"Error polling solution: {str(e)}",
            schedule,
            logging_service.get_streaming_logs(),  # Include logs even on error
        )


async def auto_poll(
    job_id: str, llm_output: dict, debug: bool = False
) -> Tuple[pd.DataFrame, pd.DataFrame, str, str, dict, str]:
    """Handler for auto-polling a solution"""
    try:
        (
            emp_df,
            task_df,
            job_id,
            status_message,
            llm_output,
        ) = await ScheduleService.auto_poll(job_id, llm_output, debug)

        return (
            emp_df,
            task_df,
            job_id,
            status_message,
            llm_output,
            logging_service.get_streaming_logs(),  # Include logs in auto-poll updates
        )

    except Exception as e:
        logger.error("Error in auto_poll: %s", e)
        return (
            gr.update(),
            gr.update(),
            job_id,
            f"Error in auto-polling: {str(e)}",
            llm_output,
            logging_service.get_streaming_logs(),  # Include logs even on error
        )


def _ensure_log_streaming_setup(debug: bool = False) -> None:
    """
    Ensure log streaming is properly set up with current debug settings.
    This helps maintain consistency when debug mode changes at runtime.
    """
    if debug:
        # Force debug mode setup if explicitly requested
        os.environ["YUGA_DEBUG"] = "true"
        setup_logging("DEBUG")

    # Always setup streaming (it will respect current logging level)
    logging_service.setup_log_streaming()

    if debug:
        logger.debug("Log streaming setup completed with debug mode enabled")