Spaces:
Sleeping
Sleeping
File size: 11,917 Bytes
3615850 5a20be2 3615850 4cc5e6b 3615850 4cc5e6b 3615850 de146fb 4cc5e6b 3615850 4cc5e6b 3615850 4cc5e6b 3615850 a556bfd 3615850 4cc5e6b 8995f83 4cc5e6b 3615850 4cc5e6b 5caa756 8008639 4cc5e6b a69e5ba 4cc5e6b a69e5ba 4cc5e6b a69e5ba 4cc5e6b a69e5ba 4cc5e6b 5f09447 4cc5e6b 8084ce7 ca04319 0164c19 ca04319 4cc5e6b a69e5ba 4cc5e6b a69e5ba 4cc5e6b a69e5ba 4cc5e6b a69e5ba 4cc5e6b a69e5ba 4cc5e6b a69e5ba 4cc5e6b a69e5ba 4cc5e6b a69e5ba 4cc5e6b a69e5ba 4cc5e6b 3d5e272 4cc5e6b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 |
# services/report_data_handler.py
import pandas as pd
import logging
from apis.Bubble_API_Calls import fetch_linkedin_posts_data_from_bubble, bulk_upload_to_bubble
from config import (
BUBBLE_REPORT_TABLE_NAME,
BUBBLE_OKR_TABLE_NAME,
BUBBLE_KEY_RESULTS_TABLE_NAME,
BUBBLE_TASKS_TABLE_NAME,
BUBBLE_KR_UPDATE_TABLE_NAME,
)
import json # For handling JSON data
from typing import List, Dict, Any, Optional, Tuple
# It's good practice to configure the logger at the application entry point,
# but setting a default handler here prevents "No handler found" warnings.
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
def fetch_latest_agentic_analysis(org_urn: str) -> Tuple[Optional[pd.DataFrame], Optional[str]]:
"""
Fetches all agentic analysis data for a given org_urn from Bubble.
Returns the full dataframe and any error message, or None, None.
"""
logger.info(f"Starting fetch_latest_agentic_analysis for org_urn: {org_urn}")
if not org_urn:
logger.warning("fetch_latest_agentic_analysis: org_urn is missing.")
return None, "org_urn is missing."
try:
report_data_df, error = fetch_linkedin_posts_data_from_bubble(
data_type=BUBBLE_REPORT_TABLE_NAME,
org_urn=org_urn
)
if error:
logger.error(f"Error fetching data from Bubble for org_urn {org_urn}: {error}")
return None, str(error)
if report_data_df is None or report_data_df.empty:
logger.info(f"No existing agentic analysis found in Bubble for org_urn {org_urn}.")
return None, None
logger.info(f"Successfully fetched {len(report_data_df)} records for org_urn {org_urn}")
return report_data_df, None # Return full dataframe and no error
except Exception as e:
logger.exception(f"An unexpected error occurred in fetch_latest_agentic_analysis for org_urn {org_urn}: {e}")
return None, str(e)
def save_report_results(
org_urn: str,
report_markdown: str,
quarter: int,
year: int,
report_type: str,
) -> Optional[str]:
"""Saves the agentic pipeline results to Bubble. Returns the new record ID or None."""
logger.info(f"Starting save_report_results for org_urn: {org_urn}")
if not org_urn:
logger.error("Cannot save agentic results: org_urn is missing.")
return None
try:
payload = {
"organization_urn": org_urn,
"report_text": report_markdown if report_markdown else "N/A",
"quarter": quarter,
"year": year,
"report_type": report_type,
}
logger.info(f"Attempting to save agentic analysis to Bubble for org_urn: {org_urn}")
response = bulk_upload_to_bubble([payload], BUBBLE_REPORT_TABLE_NAME)
# Assuming bulk_upload_to_bubble returns a list of IDs on success and None or False on failure
if response:
record_id = response["id"]
logger.info(f"Successfully saved agentic analysis to Bubble. Record ID: {record_id}")
return record_id
else:
logger.error(f"Failed to save agentic analysis to Bubble. Response: {response}")
return None
except Exception as e:
logger.exception(f"An unexpected error occurred in save_report_results for org_urn {org_urn}: {e}")
return None
# --- Data Saving Functions ---
def save_objectives(
org_urn: str,
report_id: str,
objectives_data: List[Dict[str, Any]]
) -> Optional[List[str]]:
"""
Saves Objective records to Bubble.
Returns a list of the newly created Bubble record IDs for the objectives, or None on failure.
"""
logger.info(f"Starting save_objectives for report_id: {report_id}")
if not objectives_data:
logger.info("No objectives to save.")
return []
try:
payloads = [
{
"description": obj.get("objective_description"),
"timeline": obj.get("objective_timeline"),
"owner": obj.get("objective_owner"),
"report": report_id,
# "organization_urn": org_urn # Assuming 'report' links to the org
}
for obj in objectives_data
]
logger.info(f"objectives data {objectives_data}")
logger.info(f"payload {payloads}")
logger.info(f"Attempting to save {len(payloads)} objectives for report_id: {report_id}")
objective_ids = bulk_upload_to_bubble(payloads, BUBBLE_OKR_TABLE_NAME) # Corrected table name
if objective_ids is None:
logger.error(f"Failed to save objectives to Bubble for report_id: {report_id}. The upload function returned None.")
return None
logger.info(f"Successfully saved {len(objective_ids)} objectives.")
return objective_ids
except Exception as e:
logger.exception(f"An unexpected error occurred in save_objectives for report_id {report_id}: {e}")
return None
def save_key_results(
org_urn: str,
objectives_with_ids: List[Tuple[Dict[str, Any], str]]
) -> Optional[List[Tuple[Dict[str, Any], str]]]:
"""
Saves Key Result records to Bubble, linking them to their parent objectives.
Returns a list of tuples containing the original key result data and its new Bubble ID, or None on failure.
"""
logger.info(f"Starting save_key_results for {len(objectives_with_ids)} objectives.")
key_result_payloads = []
# This list preserves the original KR data in the correct order to match the returned IDs
key_results_to_process = []
if not objectives_with_ids:
logger.info("No objectives provided to save_key_results.")
return []
try:
for objective_data, parent_objective_id in objectives_with_ids:
for kr in objective_data.get("key_results", []):
key_results_to_process.append(kr)
key_result_payloads.append({
"okr": parent_objective_id,
"description": kr.get("key_result_description"),
"target_metric": kr.get("target_metric"),
"target_value": kr.get("target_value"),
"kr_type": kr.get("key_result_type"),
"data_subject": kr.get("data_subject"),
})
if not key_result_payloads:
logger.info("No key results to save.")
return []
logger.info(f"Attempting to save {len(key_result_payloads)} key results for org_urn: {org_urn}")
key_result_ids = bulk_upload_to_bubble(key_result_payloads, BUBBLE_KEY_RESULTS_TABLE_NAME)
if key_result_ids is None:
logger.error(f"Failed to save key results to Bubble for org_urn: {org_urn}.")
return None
logger.info(f"Successfully saved {len(key_result_ids)} key results.")
return list(zip(key_results_to_process, key_result_ids))
except Exception as e:
logger.exception(f"An unexpected error occurred in save_key_results for org_urn {org_urn}: {e}")
return None
def save_tasks(
org_urn: str,
key_results_with_ids: List[Tuple[Dict[str, Any], str]]
) -> Optional[List[str]]:
"""
Saves Task records to Bubble, linking them to their parent key results.
Returns a list of the newly created Bubble record IDs for the tasks, or None on failure.
"""
logger.info(f"Starting save_tasks for {len(key_results_with_ids)} key results.")
if not key_results_with_ids:
logger.info("No key results provided to save_tasks.")
return []
try:
task_payloads = []
for key_result_data, parent_key_result_id in key_results_with_ids:
for task in key_result_data.get("tasks", []):
task_payloads.append({
"key_result": parent_key_result_id,
"description": task.get("task_description"),
"objective_deliverable": task.get("objective_deliverable"),
"category": task.get("task_category"),
"priority": task.get("priority"),
"priority_justification": task.get("priority_justification"),
"effort": task.get("effort"),
"timeline": task.get("timeline"),
"responsible_party": task.get("responsible_party"),
"success_criteria_metrics": task.get("success_criteria_metrics"),
"dependencies": task.get("dependencies_prerequisites"),
"why": task.get("why_proposed"),
})
if not task_payloads:
logger.info("No tasks to save.")
return []
logger.info(f"Attempting to save {len(task_payloads)} tasks for org_urn: {org_urn}")
task_ids = bulk_upload_to_bubble(task_payloads, BUBBLE_TASKS_TABLE_NAME)
if task_ids is None:
logger.error(f"Failed to save tasks to Bubble for org_urn: {org_urn}.")
return None
logger.info(f"Successfully saved {len(task_ids)} tasks.")
return task_ids
except Exception as e:
logger.exception(f"An unexpected error occurred in save_tasks for org_urn {org_urn}: {e}")
return None
# --- Orchestrator Function ---
def save_actionable_okrs(org_urn: str, actionable_okrs: Dict[str, Any], report_id: str):
"""
Orchestrates the sequential saving of objectives, key results, and tasks.
"""
logger.info(f"--- Starting OKR save process for org_urn: {org_urn}, report_id: {report_id} ---")
try:
objectives_data = actionable_okrs.get("okrs", [])
# Defensive check: If data is a string, try to parse it as JSON.
if isinstance(objectives_data, str):
logger.warning("The 'okrs' data is a string. Attempting to parse as JSON.")
try:
objectives_data = json.loads(objectives_data)
logger.info("Successfully parsed 'okrs' data from JSON string.")
except json.JSONDecodeError:
logger.error("Failed to parse 'okrs' data. The string is not valid JSON.", exc_info=True)
return # Abort if data is malformed
if not objectives_data:
logger.warning(f"No OKRs found in the input for org_urn: {org_urn}. Aborting save process.")
return
# Step 1: Save the top-level objectives
# Corrected the argument order from your original code.
objective_ids = save_objectives(org_urn, report_id, objectives_data)
if objective_ids is None:
logger.error("OKR save process aborted due to failure in saving objectives.")
return
# Combine the original objective data with their new IDs for the next step
objectives_with_ids = list(zip(objectives_data, objective_ids))
# Step 2: Save the key results, linking them to the objectives
key_results_with_ids = save_key_results(org_urn, objectives_with_ids)
if key_results_with_ids is None:
logger.error("OKR save process aborted due to failure in saving key results.")
return
# Step 3: Save the tasks, linking them to the key results
task_ids = save_tasks(org_urn, key_results_with_ids)
if task_ids is None:
logger.error("Task saving failed, but objectives and key results were saved.")
# Decide if you want to consider the whole process a failure.
# For now, we just log the error and complete.
return
logger.info(f"--- OKR save process completed successfully for org_urn: {org_urn} ---")
except Exception as e:
logger.exception(f"An unhandled exception occurred during the save_actionable_okrs orchestration for org_urn {org_urn}: {e}")
|