Spaces:
Running
Running
| import time | |
| from datetime import datetime | |
| from collections import defaultdict | |
| from urllib.parse import quote | |
| import matplotlib.pyplot as plt | |
| from transformers import pipeline | |
| from sessions import create_session | |
| import logging | |
| logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s') | |
| # Load transformer-based sentiment model globally | |
| sentiment_pipeline = pipeline("text-classification", model="tabularisai/multilingual-sentiment-analysis") | |
| def extract_text_from_commentary(commentary): | |
| import re | |
| return re.sub(r"{.*?}", "", commentary).strip() | |
| def classify_sentiment(text): | |
| try: | |
| result = sentiment_pipeline(text[:512]) # Limit to 512 chars for transformers | |
| label = result[0]['label'].upper() | |
| if label in ['POSITIVE', 'VERY POSITIVE']: | |
| return 'Positive π' | |
| elif label in ['NEGATIVE', 'VERY NEGATIVE']: | |
| return 'Negative π' | |
| elif label == 'NEUTRAL': | |
| return 'Neutral π' | |
| else: | |
| return 'Unknown' | |
| except Exception as e: | |
| return 'Error' | |
| def generate_mentions_dashboard(comm_client_id, comm_token_dict): | |
| org_urn = "urn:li:organization:19010008" | |
| encoded_urn = quote(org_urn, safe='') | |
| session = create_session(comm_client_id, token=comm_token_dict) | |
| session.headers.update({ | |
| "X-Restli-Protocol-Version": "2.0.0" | |
| }) | |
| base_url = ( | |
| "https://api.linkedin.com/rest/organizationalEntityNotifications" | |
| "?q=criteria" | |
| "&actions=List(COMMENT,SHARE_MENTION)" | |
| f"&organizationalEntity={encoded_urn}" | |
| "&count=20" | |
| ) | |
| all_notifications = [] | |
| start = 0 | |
| while True: | |
| url = f"{base_url}&start={start}" | |
| resp = session.get(url) | |
| if resp.status_code != 200: | |
| logging.error(f"β Error fetching notifications: {resp.status_code} - {resp.text}") | |
| break | |
| data = resp.json() | |
| elements = data.get("elements", []) | |
| all_notifications.extend(elements) | |
| if len(elements) < data.get("paging", {}).get("count", 0): | |
| break | |
| start += len(elements) | |
| time.sleep(0.5) | |
| mention_shares = [e.get("generatedActivity") for e in all_notifications if e.get("action") == "SHARE_MENTION"] | |
| mention_data = [] | |
| logging.info(f"Fetched {len(all_notifications)} total notifications.") | |
| for share_urn in mention_shares: | |
| if not share_urn: | |
| continue | |
| encoded_share_urn = quote(share_urn, safe='') | |
| share_url = f"https://api.linkedin.com/rest/posts/{encoded_share_urn}" | |
| response = session.get(share_url) | |
| if response.status_code != 200: | |
| continue | |
| post = response.json() | |
| commentary_raw = post.get("commentary", "") | |
| if not commentary_raw: | |
| continue | |
| commentary = extract_text_from_commentary(commentary_raw) | |
| sentiment_label = classify_sentiment(commentary) | |
| timestamp = post.get("createdAt", 0) | |
| dt = datetime.fromtimestamp(timestamp / 1000.0) | |
| mention_data.append({ | |
| "date": dt, | |
| "text": commentary, | |
| "sentiment": sentiment_label | |
| }) | |
| # --- HTML rendering --- | |
| html_parts = [ | |
| "<h2 style='text-align:center;'>π£ Mentions Sentiment Dashboard</h2>" | |
| ] | |
| for mention in mention_data: | |
| short_text = (mention["text"][:200] + "β¦") if len(mention["text"]) > 200 else mention["text"] | |
| html_parts.append(f""" | |
| <div style='border:1px solid #ddd; border-radius:12px; padding:15px; margin:15px; box-shadow:2px 2px 8px rgba(0,0,0,0.05); background:#fafafa;'> | |
| <p><strong>π Date:</strong> {mention["date"].strftime('%Y-%m-%d')}</p> | |
| <p style='color:#333;'>{short_text}</p> | |
| <p><strong>Sentiment:</strong> {mention["sentiment"]}</p> | |
| </div> | |
| """) | |
| html_content = "\n".join(html_parts) | |
| # --- Plotting --- | |
| from matplotlib.figure import Figure | |
| fig = Figure(figsize=(12, 6)) | |
| ax = fig.subplots() | |
| fig.subplots_adjust(bottom=0.2) | |
| if mention_data: | |
| # Sort by date | |
| mention_data.sort(key=lambda x: x["date"]) | |
| date_labels = [m["date"].strftime('%Y-%m-%d') for m in mention_data] | |
| sentiment_scores = [1 if m["sentiment"] == "Positive π" else | |
| -1 if m["sentiment"] == "Negative π" else | |
| 0 for m in mention_data] | |
| ax.plot(date_labels, sentiment_scores, marker='o', linestyle='-', color='#0073b1') | |
| ax.set_title("π Mention Sentiment Over Time") | |
| ax.set_xlabel("Date") | |
| ax.set_ylabel("Sentiment Score (1=π, 0=π, -1=π)") | |
| ax.tick_params(axis='x', rotation=45) | |
| ax.grid(True, linestyle='--', alpha=0.6) | |
| ax.set_ylim([-1.2, 1.2]) | |
| else: | |
| ax.text(0.5, 0.5, "No mention sentiment data available.", | |
| ha='center', va='center', transform=ax.transAxes, fontsize=12, color='grey') | |
| ax.set_xticks([]) | |
| ax.set_yticks([]) | |
| ax.set_title("π Mention Sentiment Over Time") | |
| return html_content, fig, mention_data | |