Spaces:
Running
Running
# -- coding: utf-8 -- | |
import gradio as gr | |
import json | |
import os | |
import logging | |
import html | |
import pandas as pd | |
from datetime import datetime, timedelta # Used for pd.Timestamp and date checks | |
# Import functions from your custom modules | |
from analytics_fetch_and_rendering import fetch_and_render_analytics | |
from gradio_utils import get_url_user_token | |
from Bubble_API_Calls import ( | |
fetch_linkedin_token_from_bubble, | |
bulk_upload_to_bubble, | |
fetch_linkedin_posts_data_from_bubble | |
) | |
from Linkedin_Data_API_Calls import ( | |
fetch_linkedin_posts_core, | |
fetch_comments, | |
analyze_sentiment, # For post comments | |
compile_detailed_posts, | |
prepare_data_for_bubble, # For posts, stats, comments | |
fetch_linkedin_mentions_core, | |
analyze_mentions_sentiment, # For individual mentions | |
compile_detailed_mentions, # Compiles to user-specified format | |
prepare_mentions_for_bubble # Prepares user-specified format for Bubble | |
) | |
# Configure logging | |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') | |
# --- Global Constants --- | |
DEFAULT_INITIAL_FETCH_COUNT = 10 | |
LINKEDIN_POST_URN_KEY = 'id' | |
BUBBLE_POST_URN_COLUMN_NAME = 'id' | |
BUBBLE_POST_DATE_COLUMN_NAME = 'published_at' | |
# Constants for Mentions - these should match the keys used in the data prepared for Bubble | |
BUBBLE_MENTIONS_TABLE_NAME = "LI_mentions" # Your Bubble table name for mentions | |
BUBBLE_MENTIONS_ID_COLUMN_NAME = "id" # Column in Bubble storing the mention's source post URN (share_urn) | |
BUBBLE_MENTIONS_DATE_COLUMN_NAME = "date" # Column in Bubble storing the mention's publication date | |
DEFAULT_MENTIONS_INITIAL_FETCH_COUNT = 20 | |
DEFAULT_MENTIONS_UPDATE_FETCH_COUNT = 10 | |
def check_token_status(token_state): | |
"""Checks the status of the LinkedIn token.""" | |
return "β Token available" if token_state and token_state.get("token") else "β Token not available" | |
def process_and_store_bubble_token(url_user_token, org_urn, token_state): | |
""" | |
Processes user token, fetches LinkedIn token, fetches existing Bubble posts & mentions, | |
and determines if an initial fetch or update is needed for LinkedIn posts. | |
Updates token state and UI for the sync button. | |
""" | |
logging.info(f"Processing token with URL user token: '{url_user_token}', Org URN: '{org_urn}'") | |
new_state = token_state.copy() if token_state else { | |
"token": None, "client_id": None, "org_urn": None, | |
"bubble_posts_df": pd.DataFrame(), "fetch_count_for_api": 0, | |
"bubble_mentions_df": pd.DataFrame(), "fetch_count_for_mentions_api": 0, | |
"url_user_token_temp_storage": None | |
} | |
new_state.update({ | |
"org_urn": org_urn, | |
"bubble_posts_df": new_state.get("bubble_posts_df", pd.DataFrame()), | |
"fetch_count_for_api": new_state.get("fetch_count_for_api", 0), | |
"bubble_mentions_df": new_state.get("bubble_mentions_df", pd.DataFrame()), | |
"fetch_count_for_mentions_api": new_state.get("fetch_count_for_mentions_api", 0), | |
"url_user_token_temp_storage": url_user_token # Store for potential re-use | |
}) | |
button_update = gr.update(visible=False, interactive=False, value="π Sync LinkedIn Data") | |
client_id = os.environ.get("Linkedin_client_id") | |
new_state["client_id"] = client_id if client_id else "ENV VAR MISSING" | |
if not client_id: logging.error("CRITICAL ERROR: 'Linkedin_client_id' environment variable not set.") | |
if url_user_token and "not found" not in url_user_token and "Could not access" not in url_user_token: | |
logging.info(f"Attempting to fetch LinkedIn token from Bubble with user token: {url_user_token}") | |
try: | |
parsed_linkedin_token = fetch_linkedin_token_from_bubble(url_user_token) | |
if isinstance(parsed_linkedin_token, dict) and "access_token" in parsed_linkedin_token: | |
new_state["token"] = parsed_linkedin_token | |
logging.info("β LinkedIn Token successfully fetched from Bubble.") | |
else: | |
new_state["token"] = None | |
logging.warning(f"β Failed to fetch a valid LinkedIn token from Bubble. Response: {parsed_linkedin_token}") | |
except Exception as e: | |
new_state["token"] = None | |
logging.error(f"β Exception while fetching LinkedIn token from Bubble: {e}") | |
else: | |
new_state["token"] = None | |
logging.info("No valid URL user token provided for LinkedIn token fetch, or an error was indicated.") | |
current_org_urn = new_state.get("org_urn") | |
if current_org_urn: | |
# Fetch Posts from Bubble | |
logging.info(f"Attempting to fetch posts from Bubble for org_urn: {current_org_urn}") | |
try: | |
fetched_posts_df, error_message_posts = fetch_linkedin_posts_data_from_bubble(current_org_urn, "LI_posts") | |
new_state["bubble_posts_df"] = pd.DataFrame() if error_message_posts or fetched_posts_df is None else fetched_posts_df | |
if error_message_posts: logging.warning(f"Error from fetch_linkedin_posts_data_from_bubble: {error_message_posts}.") | |
except Exception as e: | |
logging.error(f"β Error fetching posts from Bubble: {e}.") | |
new_state["bubble_posts_df"] = pd.DataFrame() | |
# Fetch Mentions from Bubble | |
logging.info(f"Attempting to fetch mentions from Bubble for org_urn: {current_org_urn}") | |
try: | |
fetched_mentions_df, error_message_mentions = fetch_linkedin_posts_data_from_bubble(current_org_urn, BUBBLE_MENTIONS_TABLE_NAME) | |
new_state["bubble_mentions_df"] = pd.DataFrame() if error_message_mentions or fetched_mentions_df is None else fetched_mentions_df | |
if error_message_mentions: logging.warning(f"Error from fetch_linkedin_posts_data_from_bubble: {error_message_mentions}.") | |
except Exception as e: | |
logging.error(f"β Error fetching mentions from Bubble: {e}.") | |
new_state["bubble_mentions_df"] = pd.DataFrame() | |
else: | |
logging.warning("Org URN not available in state. Cannot fetch posts or mentions from Bubble.") | |
new_state["bubble_posts_df"] = pd.DataFrame() | |
new_state["bubble_mentions_df"] = pd.DataFrame() | |
# Determine fetch count for Posts API | |
if new_state["bubble_posts_df"].empty: | |
logging.info(f"βΉοΈ No posts in Bubble. Setting to fetch initial {DEFAULT_INITIAL_FETCH_COUNT} posts.") | |
new_state['fetch_count_for_api'] = DEFAULT_INITIAL_FETCH_COUNT | |
else: | |
try: | |
df_posts_check = new_state["bubble_posts_df"].copy() | |
if BUBBLE_POST_DATE_COLUMN_NAME not in df_posts_check.columns or df_posts_check[BUBBLE_POST_DATE_COLUMN_NAME].isnull().all(): | |
logging.warning(f"Date column '{BUBBLE_POST_DATE_COLUMN_NAME}' for posts missing/all null. Initial fetch.") | |
new_state['fetch_count_for_api'] = DEFAULT_INITIAL_FETCH_COUNT | |
else: | |
df_posts_check[BUBBLE_POST_DATE_COLUMN_NAME] = pd.to_datetime(df_posts_check[BUBBLE_POST_DATE_COLUMN_NAME], errors='coerce', utc=True) | |
last_post_date_utc = df_posts_check[BUBBLE_POST_DATE_COLUMN_NAME].dropna().max() | |
if pd.isna(last_post_date_utc): | |
new_state['fetch_count_for_api'] = DEFAULT_INITIAL_FETCH_COUNT | |
else: | |
days_diff = (pd.Timestamp('now', tz='UTC').normalize() - last_post_date_utc.normalize()).days | |
if days_diff >= 7: | |
new_state['fetch_count_for_api'] = max(1, days_diff // 7) * 10 | |
else: | |
new_state['fetch_count_for_api'] = 0 | |
except Exception as e: | |
logging.error(f"Error processing post dates: {e}. Defaulting to initial fetch.") | |
new_state['fetch_count_for_api'] = DEFAULT_INITIAL_FETCH_COUNT | |
# Determine if mentions need fetching (actual count decided in sync_linkedin_mentions) | |
mentions_need_sync = False | |
if new_state["bubble_mentions_df"].empty: | |
mentions_need_sync = True | |
else: | |
if BUBBLE_MENTIONS_DATE_COLUMN_NAME not in new_state["bubble_mentions_df"].columns or new_state["bubble_mentions_df"][BUBBLE_MENTIONS_DATE_COLUMN_NAME].isnull().all(): | |
mentions_need_sync = True | |
else: | |
df_mentions_check = new_state["bubble_mentions_df"].copy() | |
df_mentions_check[BUBBLE_MENTIONS_DATE_COLUMN_NAME] = pd.to_datetime(df_mentions_check[BUBBLE_MENTIONS_DATE_COLUMN_NAME], errors='coerce', utc=True) | |
last_mention_date_utc = df_mentions_check[BUBBLE_MENTIONS_DATE_COLUMN_NAME].dropna().max() | |
if pd.isna(last_mention_date_utc) or (pd.Timestamp('now', tz='UTC').normalize() - last_mention_date_utc.normalize()).days >= 7: | |
mentions_need_sync = True | |
if new_state['fetch_count_for_api'] > 0 or (new_state["token"] and mentions_need_sync): | |
button_label = "π Sync LinkedIn Data" | |
if new_state['fetch_count_for_api'] > 0 and mentions_need_sync: | |
button_label += " (Posts & Mentions)" | |
elif new_state['fetch_count_for_api'] > 0: | |
button_label += f" ({new_state['fetch_count_for_api']} Posts)" | |
elif mentions_need_sync: | |
button_label += " (Mentions)" | |
button_update = gr.update(value=button_label, visible=True, interactive=True) | |
else: | |
button_update = gr.update(visible=False, interactive=False) | |
token_status_message = check_token_status(new_state) | |
logging.info(f"Token processing complete. Status: {token_status_message}. Button: {button_update}. Post Fetch: {new_state['fetch_count_for_api']}. Mentions sync needed: {mentions_need_sync}") | |
return token_status_message, new_state, button_update | |
def sync_linkedin_mentions(token_state): | |
"""Fetches and syncs LinkedIn mentions to Bubble based on defined logic.""" | |
logging.info("Starting LinkedIn mentions sync process.") | |
if not token_state or not token_state.get("token"): | |
logging.error("Mentions sync: Access denied. No LinkedIn token.") | |
return "Mentions: No token. ", token_state | |
client_id = token_state.get("client_id") | |
token_dict = token_state.get("token") | |
org_urn = token_state.get('org_urn') | |
bubble_mentions_df = token_state.get("bubble_mentions_df", pd.DataFrame()) | |
if not org_urn or not client_id or client_id == "ENV VAR MISSING": | |
logging.error("Mentions sync: Configuration error (Org URN or Client ID missing).") | |
return "Mentions: Config error. ", token_state | |
fetch_count_for_mentions_api = 0 | |
if bubble_mentions_df.empty: | |
fetch_count_for_mentions_api = DEFAULT_MENTIONS_INITIAL_FETCH_COUNT | |
logging.info(f"No mentions in Bubble. Fetching initial {fetch_count_for_mentions_api} mentions.") | |
else: | |
if BUBBLE_MENTIONS_DATE_COLUMN_NAME not in bubble_mentions_df.columns or bubble_mentions_df[BUBBLE_MENTIONS_DATE_COLUMN_NAME].isnull().all(): | |
logging.warning(f"Date column '{BUBBLE_MENTIONS_DATE_COLUMN_NAME}' for mentions missing or all null. Fetching initial.") | |
fetch_count_for_mentions_api = DEFAULT_MENTIONS_INITIAL_FETCH_COUNT | |
else: | |
mentions_df_copy = bubble_mentions_df.copy() | |
mentions_df_copy[BUBBLE_MENTIONS_DATE_COLUMN_NAME] = pd.to_datetime(mentions_df_copy[BUBBLE_MENTIONS_DATE_COLUMN_NAME], errors='coerce', utc=True) | |
last_mention_date_utc = mentions_df_copy[BUBBLE_MENTIONS_DATE_COLUMN_NAME].dropna().max() | |
if pd.isna(last_mention_date_utc): | |
logging.warning("No valid dates in mentions data. Fetching initial.") | |
fetch_count_for_mentions_api = DEFAULT_MENTIONS_INITIAL_FETCH_COUNT | |
else: | |
days_since_last_mention = (pd.Timestamp('now', tz='UTC').normalize() - last_mention_date_utc.normalize()).days | |
logging.info(f"Days since last mention: {days_since_last_mention}") | |
if days_since_last_mention >= 7: | |
fetch_count_for_mentions_api = DEFAULT_MENTIONS_UPDATE_FETCH_COUNT | |
logging.info(f"Last mention older than 7 days. Fetching update of {fetch_count_for_mentions_api} mentions.") | |
else: | |
logging.info("Mentions data is fresh. No API fetch needed.") | |
token_state["fetch_count_for_mentions_api"] = fetch_count_for_mentions_api | |
if fetch_count_for_mentions_api == 0: | |
return "Mentions: Up-to-date. ", token_state | |
try: | |
logging.info(f"Fetching {fetch_count_for_mentions_api} core mentions from LinkedIn for org_urn: {org_urn}") | |
processed_raw_mentions = fetch_linkedin_mentions_core(client_id, token_dict, org_urn, count=fetch_count_for_mentions_api) | |
if not processed_raw_mentions: | |
logging.info("No mentions retrieved from LinkedIn API.") | |
return "Mentions: None found via API. ", token_state | |
existing_mention_ids = set() | |
if not bubble_mentions_df.empty and BUBBLE_MENTIONS_ID_COLUMN_NAME in bubble_mentions_df.columns: | |
existing_mention_ids = set(bubble_mentions_df[BUBBLE_MENTIONS_ID_COLUMN_NAME].dropna().astype(str)) | |
sentiments_map = analyze_mentions_sentiment(processed_raw_mentions) | |
all_compiled_mentions = compile_detailed_mentions(processed_raw_mentions, sentiments_map) | |
new_compiled_mentions_to_upload = [ | |
m for m in all_compiled_mentions if str(m.get("id")) not in existing_mention_ids | |
] | |
if not new_compiled_mentions_to_upload: | |
logging.info("All fetched LinkedIn mentions are already present in Bubble.") | |
return "Mentions: All fetched already in Bubble. ", token_state | |
logging.info(f"Identified {len(new_compiled_mentions_to_upload)} new mentions to process after filtering.") | |
bubble_ready_mentions = prepare_mentions_for_bubble(new_compiled_mentions_to_upload) | |
if bubble_ready_mentions: | |
logging.info(f"Uploading {len(bubble_ready_mentions)} new mentions to Bubble table: {BUBBLE_MENTIONS_TABLE_NAME}.") | |
bulk_upload_to_bubble(bubble_ready_mentions, BUBBLE_MENTIONS_TABLE_NAME) | |
return f"Mentions: Synced {len(bubble_ready_mentions)} new. ", token_state | |
else: | |
logging.info("No new mentions to upload to Bubble after final preparation.") | |
return "Mentions: No new ones to upload. ", token_state | |
except ValueError as ve: | |
logging.error(f"ValueError during mentions sync: {ve}") | |
return f"Mentions Error: {html.escape(str(ve))}. ", token_state | |
except Exception as e: | |
logging.exception("Unexpected error in sync_linkedin_mentions.") | |
return "Mentions: Unexpected error. ", token_state | |
def guarded_fetch_posts_and_mentions(token_state): | |
logging.info("Starting guarded_fetch_posts_and_mentions process.") | |
if not token_state or not token_state.get("token"): | |
logging.error("Access denied. No LinkedIn token available.") | |
return "<p style='color:red; text-align:center;'>β Access denied. LinkedIn token not available.</p>", token_state | |
client_id = token_state.get("client_id") | |
token_dict = token_state.get("token") | |
org_urn = token_state.get('org_urn') | |
fetch_count_for_posts_api = token_state.get('fetch_count_for_api', 0) | |
bubble_posts_df = token_state.get("bubble_posts_df", pd.DataFrame()) | |
posts_sync_message = "" | |
if not org_urn: return "<p style='color:red;'>β Config error: Org URN missing.</p>", token_state | |
if not client_id or client_id == "ENV VAR MISSING": return "<p style='color:red;'>β Config error: Client ID missing.</p>", token_state | |
if fetch_count_for_posts_api == 0: | |
posts_sync_message = "Posts: Already up-to-date. " | |
else: | |
try: | |
logging.info(f"Fetching {fetch_count_for_posts_api} core posts for org_urn: {org_urn}.") | |
processed_raw_posts, stats_map, _ = fetch_linkedin_posts_core(client_id, token_dict, org_urn, count=fetch_count_for_posts_api) | |
if not processed_raw_posts: posts_sync_message = "Posts: None found via API. " | |
else: | |
existing_post_urns = set() | |
if not bubble_posts_df.empty and BUBBLE_POST_URN_COLUMN_NAME in bubble_posts_df.columns: | |
existing_post_urns = set(bubble_posts_df[BUBBLE_POST_URN_COLUMN_NAME].dropna().astype(str)) | |
new_raw_posts = [p for p in processed_raw_posts if str(p.get(LINKEDIN_POST_URN_KEY)) not in existing_post_urns] | |
if not new_raw_posts: posts_sync_message = "Posts: All fetched already in Bubble. " | |
else: | |
post_urns_to_process = [p[LINKEDIN_POST_URN_KEY] for p in new_raw_posts if p.get(LINKEDIN_POST_URN_KEY)] | |
all_comments_data = fetch_comments(client_id, token_dict, post_urns_to_process, stats_map) | |
sentiments_per_post = analyze_sentiment(all_comments_data) | |
detailed_new_posts = compile_detailed_posts(new_raw_posts, stats_map, sentiments_per_post) | |
li_posts, li_post_stats, li_post_comments = prepare_data_for_bubble(detailed_new_posts, all_comments_data) | |
if li_posts: | |
bulk_upload_to_bubble(li_posts, "LI_posts") | |
if li_post_stats: bulk_upload_to_bubble(li_post_stats, "LI_post_stats") | |
if li_post_comments: bulk_upload_to_bubble(li_post_comments, "LI_post_comments") | |
posts_sync_message = f"Posts: Synced {len(li_posts)} new. " | |
else: posts_sync_message = "Posts: No new ones to upload. " | |
except ValueError as ve: posts_sync_message = f"Posts Error: {html.escape(str(ve))}. " | |
except Exception: logging.exception("Posts processing error."); posts_sync_message = "Posts: Unexpected error. " | |
mentions_sync_message, updated_token_state = sync_linkedin_mentions(token_state) | |
token_state = updated_token_state # Ensure state is updated after mentions sync | |
# Re-fetch data from Bubble to update DataFrames in state for immediate display refresh | |
if org_urn: | |
try: | |
fetched_posts_df, _ = fetch_linkedin_posts_data_from_bubble(org_urn, "LI_posts") | |
token_state["bubble_posts_df"] = pd.DataFrame() if fetched_posts_df is None else fetched_posts_df | |
fetched_mentions_df, _ = fetch_linkedin_posts_data_from_bubble(org_urn, BUBBLE_MENTIONS_TABLE_NAME) | |
token_state["bubble_mentions_df"] = pd.DataFrame() if fetched_mentions_df is None else fetched_mentions_df | |
logging.info("Refreshed posts and mentions DataFrames in state from Bubble after sync.") | |
except Exception as e: | |
logging.error(f"Error re-fetching data from Bubble post-sync: {e}") | |
final_message = f"<p style='color:green; text-align:center;'>β Sync Attempted. {posts_sync_message} {mentions_sync_message}</p>" | |
return final_message, token_state | |
def display_main_dashboard(token_state): | |
if not token_state or not token_state.get("token"): | |
return "β Access denied. No token available for dashboard." | |
posts_df = token_state.get("bubble_posts_df", pd.DataFrame()) | |
posts_html = f"<h4>Recent Posts ({len(posts_df)} in Bubble):</h4>" | |
if not posts_df.empty: | |
cols_to_show_posts = [col for col in [BUBBLE_POST_DATE_COLUMN_NAME, 'text', 'sentiment'] if col in posts_df.columns] # Example columns | |
posts_html += posts_df[cols_to_show_posts].head().to_html(escape=True, index=False, classes="table table-striped table-sm") if cols_to_show_posts else "<p>No post data to display or columns missing.</p>" | |
else: posts_html += "<p>No posts loaded from Bubble.</p>" | |
mentions_df = token_state.get("bubble_mentions_df", pd.DataFrame()) | |
mentions_html = f"<h4>Recent Mentions ({len(mentions_df)} in Bubble):</h4>" | |
if not mentions_df.empty: | |
# Using the exact column names as defined for Bubble upload: date, id, mention_text, organization_urn, sentiment_label | |
cols_to_show_mentions = [col for col in ["date", "mention_text", "sentiment_label"] if col in mentions_df.columns] | |
mentions_html += mentions_df[cols_to_show_mentions].head().to_html(escape=True, index=False, classes="table table-striped table-sm") if cols_to_show_mentions else "<p>No mention data to display or columns missing.</p>" | |
else: mentions_html += "<p>No mentions loaded from Bubble.</p>" | |
return f"<div style='padding:10px;'><h3>Dashboard Overview</h3>{posts_html}<hr/>{mentions_html}</div>" | |
def guarded_fetch_analytics(token_state): | |
if not token_state or not token_state.get("token"): | |
return ("β Access denied. No token.", None, None, None, None, None, None, None) | |
return fetch_and_render_analytics(token_state.get("client_id"), token_state.get("token"), token_state.get("org_urn")) | |
def run_mentions_tab_display(token_state): | |
logging.info("Updating Mentions Tab display.") | |
if not token_state or not token_state.get("token"): | |
return ("β Access denied. No token available for mentions.", None) | |
mentions_df = token_state.get("bubble_mentions_df", pd.DataFrame()) | |
if mentions_df.empty: | |
return ("<p style='text-align:center;'>No mentions data in Bubble. Try syncing.</p>", None) | |
html_parts = ["<h3 style='text-align:center;'>Recent Mentions</h3>"] | |
# Columns expected from Bubble: date, id, mention_text, organization_urn, sentiment_label | |
display_columns = [col for col in ["date", "mention_text", "sentiment_label", "id"] if col in mentions_df.columns] | |
if not display_columns: | |
html_parts.append("<p>Required columns for mentions display are missing from Bubble data.</p>") | |
else: | |
mentions_df_sorted = mentions_df.sort_values(by="date", ascending=False, errors='coerce') if "date" in display_columns else mentions_df | |
html_parts.append(mentions_df_sorted[display_columns].head(10).to_html(escape=True, index=False, classes="table table-sm")) | |
mentions_html_output = "\n".join(html_parts) | |
fig = None | |
if not mentions_df.empty and "sentiment_label" in mentions_df.columns: | |
try: | |
import matplotlib.pyplot as plt | |
import io, base64 | |
plt.switch_backend('Agg') # Ensure non-interactive backend for server use | |
fig_plot, ax = plt.subplots(figsize=(6,4)) | |
sentiment_counts = mentions_df["sentiment_label"].value_counts() | |
sentiment_counts.plot(kind='bar', ax=ax) | |
ax.set_title("Mention Sentiment Distribution") | |
ax.set_ylabel("Count") | |
plt.xticks(rotation=45, ha='right') | |
plt.tight_layout() | |
fig = fig_plot # Return the figure object for Gradio plot component | |
except Exception as e: | |
logging.error(f"Error generating mentions plot: {e}"); fig = None | |
return mentions_html_output, fig | |
# --- Gradio UI Blocks --- | |
with gr.Blocks(theme=gr.themes.Soft(primary_hue="blue", secondary_hue="sky"), | |
title="LinkedIn Organization Post Viewer & Analytics") as app: | |
token_state = gr.State(value={ | |
"token": None, "client_id": None, "org_urn": None, | |
"bubble_posts_df": pd.DataFrame(), "fetch_count_for_api": 0, | |
"bubble_mentions_df": pd.DataFrame(), "fetch_count_for_mentions_api": 0, | |
"url_user_token_temp_storage": None | |
}) | |
gr.Markdown("# π LinkedIn Organization Post Viewer & Analytics") | |
url_user_token_display = gr.Textbox(label="User Token (from URL - Hidden)", interactive=False, visible=False) | |
status_box = gr.Textbox(label="Overall LinkedIn Token Status", interactive=False, value="Initializing...") | |
org_urn_display = gr.Textbox(label="Organization URN (from URL - Hidden)", interactive=False, visible=False) | |
app.load(fn=get_url_user_token, inputs=None, outputs=[url_user_token_display, org_urn_display]) | |
# Chain initial processing and dashboard display | |
def initial_load_sequence(url_token, org_urn_val, current_state): | |
status_msg, new_state, btn_update = process_and_store_bubble_token(url_token, org_urn_val, current_state) | |
dashboard_content = display_main_dashboard(new_state) | |
return status_msg, new_state, btn_update, dashboard_content | |
with gr.Tabs(): | |
with gr.TabItem("1οΈβ£ Dashboard & Sync"): | |
gr.Markdown("System checks for existing data. Button activates if new posts/mentions need fetching.") | |
sync_data_btn = gr.Button("π Sync LinkedIn Data", variant="primary", visible=False, interactive=False) | |
dashboard_html_output = gr.HTML("<p style='text-align:center;'>Initializing...</p>") | |
# Trigger initial load when org_urn (from URL) is available | |
org_urn_display.change( | |
fn=initial_load_sequence, | |
inputs=[url_user_token_display, org_urn_display, token_state], | |
outputs=[status_box, token_state, sync_data_btn, dashboard_html_output] | |
) | |
# Also allow re-processing if user token changes (e.g. manual input if that was a feature) | |
# url_user_token_display.change(...) | |
sync_data_btn.click( | |
fn=guarded_fetch_posts_and_mentions, | |
inputs=[token_state], | |
outputs=[dashboard_html_output, token_state] | |
).then( | |
fn=process_and_store_bubble_token, | |
inputs=[url_user_token_display, org_urn_display, token_state], | |
outputs=[status_box, token_state, sync_data_btn] | |
).then( | |
fn=display_main_dashboard, | |
inputs=[token_state], | |
outputs=[dashboard_html_output] | |
) | |
with gr.TabItem("2οΈβ£ Analytics"): | |
fetch_analytics_btn = gr.Button("π Fetch Follower Analytics", variant="primary") | |
follower_count = gr.Markdown("Waiting for token...") | |
with gr.Row(): follower_plot, growth_plot = gr.Plot(), gr.Plot() | |
with gr.Row(): eng_rate_plot = gr.Plot() | |
with gr.Row(): interaction_plot = gr.Plot() | |
with gr.Row(): eb_plot = gr.Plot() | |
with gr.Row(): mentions_vol_plot, mentions_sentiment_plot = gr.Plot(), gr.Plot() | |
fetch_analytics_btn.click( | |
fn=guarded_fetch_analytics, inputs=[token_state], | |
outputs=[follower_count, follower_plot, growth_plot, eng_rate_plot, | |
interaction_plot, eb_plot, mentions_vol_plot, mentions_sentiment_plot] | |
) | |
with gr.TabItem("3οΈβ£ Mentions"): | |
refresh_mentions_display_btn = gr.Button("π Refresh Mentions Display", variant="secondary") | |
mentions_html = gr.HTML("Mentions data loads from Bubble after sync.") | |
mentions_plot = gr.Plot() | |
refresh_mentions_display_btn.click( | |
fn=run_mentions_tab_display, inputs=[token_state], | |
outputs=[mentions_html, mentions_plot] | |
) | |
app.load(fn=lambda ts: check_token_status(ts), inputs=[token_state], outputs=status_box) | |
gr.Timer(15.0).tick(fn=lambda ts: check_token_status(ts), inputs=[token_state], outputs=status_box) | |
if __name__ == "__main__": | |
if not os.environ.get("Linkedin_client_id"): | |
logging.warning("WARNING: 'Linkedin_client_id' env var not set.") | |
app.launch(server_name="0.0.0.0", server_port=7860) | |