import time from datetime import datetime from collections import defaultdict from urllib.parse import quote import matplotlib.pyplot as plt from transformers import pipeline from sessions import create_session import logging logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s') # Load transformer-based sentiment model globally sentiment_pipeline = pipeline("text-classification", model="tabularisai/multilingual-sentiment-analysis") def extract_text_from_commentary(commentary): import re return re.sub(r"{.*?}", "", commentary).strip() def classify_sentiment(text): try: result = sentiment_pipeline(text[:512]) # Limit to 512 chars for transformers label = result[0]['label'].upper() if label in ['POSITIVE', 'VERY POSITIVE']: return 'Positive 👍' elif label in ['NEGATIVE', 'VERY NEGATIVE']: return 'Negative 👎' elif label == 'NEUTRAL': return 'Neutral 😐' else: return 'Unknown' except Exception as e: return 'Error' def generate_mentions_dashboard(comm_client_id, comm_token_dict): org_urn = "urn:li:organization:19010008" encoded_urn = quote(org_urn, safe='') session = create_session(comm_client_id, token=comm_token_dict) session.headers.update({ "X-Restli-Protocol-Version": "2.0.0" }) base_url = ( "https://api.linkedin.com/rest/organizationalEntityNotifications" "?q=criteria" "&actions=List(COMMENT,SHARE_MENTION)" f"&organizationalEntity={encoded_urn}" "&count=20" ) all_notifications = [] start = 0 while True: url = f"{base_url}&start={start}" resp = session.get(url) if resp.status_code != 200: logging.error(f"❌ Error fetching notifications: {resp.status_code} - {resp.text}") break data = resp.json() elements = data.get("elements", []) all_notifications.extend(elements) if len(elements) < data.get("paging", {}).get("count", 0): break start += len(elements) time.sleep(0.5) mention_shares = [e.get("generatedActivity") for e in all_notifications if e.get("action") == "SHARE_MENTION"] mention_data = [] logging.info(f"Fetched {len(all_notifications)} total notifications.") for share_urn in mention_shares: if not share_urn: continue encoded_share_urn = quote(share_urn, safe='') share_url = f"https://api.linkedin.com/rest/posts/{encoded_share_urn}" response = session.get(share_url) if response.status_code != 200: continue post = response.json() commentary_raw = post.get("commentary", "") if not commentary_raw: continue commentary = extract_text_from_commentary(commentary_raw) sentiment_label = classify_sentiment(commentary) timestamp = post.get("createdAt", 0) dt = datetime.fromtimestamp(timestamp / 1000.0) mention_data.append({ "date": dt, "text": commentary, "sentiment": sentiment_label }) # --- HTML rendering --- html_parts = [ "
📅 Date: {mention["date"].strftime('%Y-%m-%d')}
{short_text}
Sentiment: {mention["sentiment"]}