GuglielmoTor commited on
Commit
517193e
·
verified ·
1 Parent(s): a342a6b

Update linkedin_follower_stats.py

Browse files
Files changed (1) hide show
  1. linkedin_follower_stats.py +83 -60
linkedin_follower_stats.py CHANGED
@@ -24,8 +24,6 @@ def _fetch_linkedin_names(session, url, params, result_key_path, name_key_path,
24
  Generic helper to fetch and map IDs to names from a LinkedIn API endpoint.
25
  result_key_path: list of keys to navigate to the list of items (e.g., ["elements"])
26
  name_key_path: list of keys to navigate to the name within an item (e.g., ["name", "localized", "en_US"])
27
-
28
- Revised: Removed locale_needed parameter; calling functions should provide locale in params if required.
29
  """
30
  mapping = {}
31
  try:
@@ -37,19 +35,19 @@ def _fetch_linkedin_names(session, url, params, result_key_path, name_key_path,
37
  items = data
38
  for key in result_key_path: # Navigate to the list/dict of items
39
  if isinstance(items, dict):
40
- items = items.get(key, []) # Default to empty list if key not found
41
- else: # If items is already not a dict
42
  logging.warning(f"Expected dict to get key '{key}' but got {type(items)} at path {result_key_path} for URL {url}. Check result_key_path.")
43
- return mapping # Cannot proceed with this path
44
 
45
  if isinstance(items, dict): # For batch responses like geo/industry (where keys are IDs)
46
  for item_id_str, item_data in items.items():
47
  name = item_data
48
- for key_nav in name_key_path: # Navigate to the name string
49
  if isinstance(name, dict):
50
  name = name.get(key_nav)
51
  else:
52
- name = None # Path broken
53
  break
54
  if name:
55
  mapping[item_id_str] = name
@@ -59,14 +57,14 @@ def _fetch_linkedin_names(session, url, params, result_key_path, name_key_path,
59
  for item in items:
60
  item_id_val = item.get(id_key)
61
  name = item
62
- for key_nav in name_key_path: # Navigate to the name string
63
  if isinstance(name, dict):
64
  name = name.get(key_nav)
65
  else:
66
- name = None # Path broken
67
  break
68
  if item_id_val is not None and name:
69
- mapping[str(item_id_val)] = name # Ensure ID is string for consistency
70
  else:
71
  logging.warning(f"No ID ('{id_key}') or name found at path {name_key_path} in item: {item} from URL {url}")
72
  else:
@@ -85,14 +83,16 @@ def _fetch_linkedin_names(session, url, params, result_key_path, name_key_path,
85
  def get_functions_map(session):
86
  """Fetches all LinkedIn functions and returns a map of {id: name}."""
87
  url = f"{API_V2_BASE}/functions"
88
- params = {'locale': 'en_US'}
 
89
  logging.info("Fetching all LinkedIn functions.")
90
  return _fetch_linkedin_names(session, url, params, ["elements"], ["name", "localized", "en_US"], "id")
91
 
92
  def get_seniorities_map(session):
93
  """Fetches all LinkedIn seniorities and returns a map of {id: name}."""
94
  url = f"{API_V2_BASE}/seniorities"
95
- params = {'locale': 'en_US'}
 
96
  logging.info("Fetching all LinkedIn seniorities.")
97
  return _fetch_linkedin_names(session, url, params, ["elements"], ["name", "localized", "en_US"], "id")
98
 
@@ -100,13 +100,11 @@ def get_industries_map(session, industry_urns, version="DEFAULT"):
100
  """Fetches names for a list of industry URNs. Returns a map {id: name}."""
101
  if not industry_urns: return {}
102
  industry_ids = [_parse_urn_to_id(urn) for urn in industry_urns if urn]
103
- unique_ids = list(set(filter(None, industry_ids))) # Filter out None IDs from parsing
104
  if not unique_ids: return {}
105
 
106
  url = f"{API_V2_BASE}/industryTaxonomyVersions/{version}/industries"
107
- # LinkedIn API for batch industries expects ids as repeated query parameters: ids=1&ids=23
108
- # The requests library handles lists in params by creating repeated query parameters.
109
- params = {'ids': unique_ids, 'locale.language': 'en', 'locale.country': 'US'}
110
  logging.info(f"Fetching names for {len(unique_ids)} unique industry IDs.")
111
  return _fetch_linkedin_names(session, url, params, ["results"], ["name", "localized", "en_US"])
112
 
@@ -118,24 +116,21 @@ def get_geo_map(session, geo_urns):
118
  unique_ids = list(set(filter(None, geo_ids)))
119
  if not unique_ids: return {}
120
 
121
- # API expects ids=List(123,456) format in query string.
122
- ids_param_value = "List(" + ",".join(map(str,unique_ids)) + ")" # Ensure IDs are strings
123
- # Parameters are embedded in the URL for this specific format
124
- # Note: locale params are added here directly as part of the URL construction for this specific endpoint style.
125
- url = f"{API_V2_BASE}/geo?ids={quote(ids_param_value)}&locale.language=en&locale.country=US"
126
  logging.info(f"Fetching names for {len(unique_ids)} unique geo IDs using URL: {url}")
127
- # Params dict is empty as all params are in the URL string for this call.
128
  return _fetch_linkedin_names(session, url, {}, ["results"], ["defaultLocalizedName", "value"])
129
 
130
 
131
  def _parse_urn_to_id(urn_string):
132
  """Helper to get the last part (ID) from a URN string."""
133
  if not isinstance(urn_string, str):
134
- logging.warning(f"Invalid URN type: {type(urn_string)}, value: {urn_string}")
135
  return None
136
  try:
137
  return urn_string.split(':')[-1]
138
- except IndexError: # Handle cases where split doesn't yield enough parts
139
  logging.warning(f"Could not parse ID from URN: {urn_string}")
140
  return None
141
  except Exception as e:
@@ -150,7 +145,6 @@ def fetch_monthly_follower_gains(session, org_urn):
150
  """
151
  results = []
152
  now = datetime.now(timezone.utc)
153
- # Go back 13 months to ensure we capture at least 12 full previous months
154
  thirteen_months_ago = now - relativedelta(months=13)
155
  start_of_period = thirteen_months_ago.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
156
  start_ms = int(start_of_period.timestamp() * 1000)
@@ -159,8 +153,8 @@ def fetch_monthly_follower_gains(session, org_urn):
159
  f"{API_REST_BASE}/organizationalEntityFollowerStatistics"
160
  f"?q=organizationalEntity"
161
  f"&organizationalEntity={quote(org_urn)}"
162
- f"&timeIntervals.timeGranularityType=MONTH"
163
- f"&timeIntervals.timeRange.start={start_ms}"
164
  )
165
  logging.info(f"Fetching monthly follower gains from: {url}")
166
 
@@ -177,7 +171,7 @@ def fetch_monthly_follower_gains(session, org_urn):
177
  continue
178
 
179
  date_obj = datetime.fromtimestamp(start_timestamp_ms / 1000, tz=timezone.utc)
180
- date_str = date_obj.strftime('%Y-%m-%d') # First day of the month
181
 
182
  follower_gains = item.get("followerGains", {})
183
  organic_gain = follower_gains.get("organicFollowerGain", 0)
@@ -188,7 +182,7 @@ def fetch_monthly_follower_gains(session, org_urn):
188
  "follower_count_organic": organic_gain,
189
  "follower_count_paid": paid_gain,
190
  "follower_count_type": "follower_gains_monthly",
191
- "organization_urn": org_urn # Add org_urn for consistency
192
  })
193
  logging.info(f"Fetched {len(results)} monthly follower gain entries for org URN {org_urn}.")
194
  except requests.exceptions.RequestException as e:
@@ -204,9 +198,9 @@ def fetch_monthly_follower_gains(session, org_urn):
204
 
205
  def fetch_follower_demographics(session, org_urn, functions_map, seniorities_map):
206
  """
207
- Fetches current follower demographics (seniority, industry, function, geo, association).
208
  """
209
- results = []
210
  url = (
211
  f"{API_REST_BASE}/organizationalEntityFollowerStatistics"
212
  f"?q=organizationalEntity&organizationalEntity={quote(org_urn)}"
@@ -225,44 +219,69 @@ def fetch_follower_demographics(session, org_urn, functions_map, seniorities_map
225
 
226
  stat_element = elements[0] # Data is usually in the first element
227
 
228
- # Collect URNs for batch mapping
229
- industry_urns_to_map = [item.get("industry") for item in stat_element.get("followerCountsByIndustry", []) if item.get("industry")]
230
- geo_urns_to_map = [item.get("geo") for item in stat_element.get("followerCountsByGeoCountry", []) if item.get("geo")]
231
-
232
- industries_map = get_industries_map(session, industry_urns_to_map)
233
- geo_map = get_geo_map(session, geo_urns_to_map)
234
-
235
- # Helper to create demographic entries
236
- def _add_demographic_entry(items_list, type_name, id_map, id_field_name, org_urn_val):
237
- if not items_list:
238
- logging.info(f"No items found for demographic type '{type_name}' for org {org_urn_val}.")
239
- return
240
 
241
- for item in items_list:
242
  category_name_val = "Unknown"
243
- if type_name == "follower_association": # associationType is directly the name
244
- category_name_val = item.get("associationType", f"Unknown AssociationType")
 
245
  else: # For URN-based categories
246
- urn_val = item.get(id_field_name)
247
  entity_id = _parse_urn_to_id(urn_val)
 
248
  category_name_val = id_map.get(str(entity_id), f"Unknown {type_name.split('_')[-1].capitalize()} (ID: {entity_id if entity_id else urn_val})")
249
 
250
  counts = item.get("followerCounts", {})
251
- results.append({
 
 
 
252
  "category_name": category_name_val,
253
- "follower_count_organic": counts.get("organicFollowerCount", 0),
254
- "follower_count_paid": counts.get("paidFollowerCount", 0),
255
  "follower_count_type": type_name,
256
  "organization_urn": org_urn_val
257
  })
 
258
 
259
- _add_demographic_entry(stat_element.get("followerCountsByAssociationType", []), "follower_association", {}, "associationType", org_urn)
260
- _add_demographic_entry(stat_element.get("followerCountsBySeniority", []), "follower_seniority", seniorities_map, "seniority", org_urn)
261
- _add_demographic_entry(stat_element.get("followerCountsByFunction", []), "follower_function", functions_map, "function", org_urn)
262
- _add_demographic_entry(stat_element.get("followerCountsByIndustry", []), "follower_industry", industries_map, "industry", org_urn)
263
- _add_demographic_entry(stat_element.get("followerCountsByGeoCountry", []), "follower_geo", geo_map, "geo", org_urn)
264
 
265
- logging.info(f"Processed follower demographics for {org_urn}. Total entries from this type: {len(results)}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
266
 
267
  except requests.exceptions.RequestException as e:
268
  status_code = getattr(e.response, 'status_code', 'N/A')
@@ -272,7 +291,7 @@ def fetch_follower_demographics(session, org_urn, functions_map, seniorities_map
272
  logging.error(f"Error decoding JSON for follower demographics for {org_urn}: {e}. Response: {response.text if 'response' in locals() else 'N/A'}")
273
  except Exception as e:
274
  logging.error(f"Unexpected error fetching follower demographics for {org_urn}: {e}", exc_info=True)
275
- return results
276
 
277
  # --- Main Orchestration Function ---
278
 
@@ -287,20 +306,23 @@ def get_linkedin_follower_stats(comm_client_id, community_token, org_urn):
287
 
288
  token_dict = community_token if isinstance(community_token, dict) else {'access_token': community_token, 'token_type': 'Bearer'}
289
 
290
- session = None # Initialize session to None
291
  try:
292
  session = create_session(comm_client_id, token=token_dict)
293
  session.headers.update({
294
  "X-Restli-Protocol-Version": "2.0.0",
295
- "LinkedIn-Version": LINKEDIN_API_VERSION
 
296
  })
297
  except Exception as e:
298
  logging.error(f"Failed to create session or update headers for org {org_urn}: {e}", exc_info=True)
299
- return [] # Cannot proceed without a session
300
 
301
  logging.info(f"Starting follower stats retrieval for org: {org_urn}")
302
 
303
  # These maps are fetched once per call to get_linkedin_follower_stats
 
 
304
  functions_map = get_functions_map(session)
305
  seniorities_map = get_seniorities_map(session)
306
 
@@ -312,6 +334,7 @@ def get_linkedin_follower_stats(comm_client_id, community_token, org_urn):
312
  monthly_gains = fetch_monthly_follower_gains(session, org_urn)
313
  all_follower_data.extend(monthly_gains)
314
 
 
315
  demographics = fetch_follower_demographics(session, org_urn, functions_map, seniorities_map)
316
  all_follower_data.extend(demographics)
317
 
 
24
  Generic helper to fetch and map IDs to names from a LinkedIn API endpoint.
25
  result_key_path: list of keys to navigate to the list of items (e.g., ["elements"])
26
  name_key_path: list of keys to navigate to the name within an item (e.g., ["name", "localized", "en_US"])
 
 
27
  """
28
  mapping = {}
29
  try:
 
35
  items = data
36
  for key in result_key_path: # Navigate to the list/dict of items
37
  if isinstance(items, dict):
38
+ items = items.get(key, [])
39
+ else:
40
  logging.warning(f"Expected dict to get key '{key}' but got {type(items)} at path {result_key_path} for URL {url}. Check result_key_path.")
41
+ return mapping
42
 
43
  if isinstance(items, dict): # For batch responses like geo/industry (where keys are IDs)
44
  for item_id_str, item_data in items.items():
45
  name = item_data
46
+ for key_nav in name_key_path:
47
  if isinstance(name, dict):
48
  name = name.get(key_nav)
49
  else:
50
+ name = None
51
  break
52
  if name:
53
  mapping[item_id_str] = name
 
57
  for item in items:
58
  item_id_val = item.get(id_key)
59
  name = item
60
+ for key_nav in name_key_path:
61
  if isinstance(name, dict):
62
  name = name.get(key_nav)
63
  else:
64
+ name = None
65
  break
66
  if item_id_val is not None and name:
67
+ mapping[str(item_id_val)] = name
68
  else:
69
  logging.warning(f"No ID ('{id_key}') or name found at path {name_key_path} in item: {item} from URL {url}")
70
  else:
 
83
  def get_functions_map(session):
84
  """Fetches all LinkedIn functions and returns a map of {id: name}."""
85
  url = f"{API_V2_BASE}/functions"
86
+ # Rely on Accept-Language header from session for localization
87
+ params = {}
88
  logging.info("Fetching all LinkedIn functions.")
89
  return _fetch_linkedin_names(session, url, params, ["elements"], ["name", "localized", "en_US"], "id")
90
 
91
  def get_seniorities_map(session):
92
  """Fetches all LinkedIn seniorities and returns a map of {id: name}."""
93
  url = f"{API_V2_BASE}/seniorities"
94
+ # Rely on Accept-Language header from session for localization
95
+ params = {}
96
  logging.info("Fetching all LinkedIn seniorities.")
97
  return _fetch_linkedin_names(session, url, params, ["elements"], ["name", "localized", "en_US"], "id")
98
 
 
100
  """Fetches names for a list of industry URNs. Returns a map {id: name}."""
101
  if not industry_urns: return {}
102
  industry_ids = [_parse_urn_to_id(urn) for urn in industry_urns if urn]
103
+ unique_ids = list(set(filter(None, industry_ids)))
104
  if not unique_ids: return {}
105
 
106
  url = f"{API_V2_BASE}/industryTaxonomyVersions/{version}/industries"
107
+ params = {'ids': unique_ids, 'locale': 'en_US'} # Corrected locale parameter
 
 
108
  logging.info(f"Fetching names for {len(unique_ids)} unique industry IDs.")
109
  return _fetch_linkedin_names(session, url, params, ["results"], ["name", "localized", "en_US"])
110
 
 
116
  unique_ids = list(set(filter(None, geo_ids)))
117
  if not unique_ids: return {}
118
 
119
+ ids_param_value = "List(" + ",".join(map(str,unique_ids)) + ")"
120
+ locale_param = "en_US" # Corrected locale parameter
121
+ url = f"{API_V2_BASE}/geo?ids={quote(ids_param_value)}&locale={locale_param}"
 
 
122
  logging.info(f"Fetching names for {len(unique_ids)} unique geo IDs using URL: {url}")
 
123
  return _fetch_linkedin_names(session, url, {}, ["results"], ["defaultLocalizedName", "value"])
124
 
125
 
126
  def _parse_urn_to_id(urn_string):
127
  """Helper to get the last part (ID) from a URN string."""
128
  if not isinstance(urn_string, str):
129
+ logging.debug(f"Invalid URN type: {type(urn_string)}, value: {urn_string}. Cannot parse ID.")
130
  return None
131
  try:
132
  return urn_string.split(':')[-1]
133
+ except IndexError:
134
  logging.warning(f"Could not parse ID from URN: {urn_string}")
135
  return None
136
  except Exception as e:
 
145
  """
146
  results = []
147
  now = datetime.now(timezone.utc)
 
148
  thirteen_months_ago = now - relativedelta(months=13)
149
  start_of_period = thirteen_months_ago.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
150
  start_ms = int(start_of_period.timestamp() * 1000)
 
153
  f"{API_REST_BASE}/organizationalEntityFollowerStatistics"
154
  f"?q=organizationalEntity"
155
  f"&organizationalEntity={quote(org_urn)}"
156
+ f"&timeGranularity=MONTH" # Corrected parameter name
157
+ f"&startTime={start_ms}" # Corrected parameter name
158
  )
159
  logging.info(f"Fetching monthly follower gains from: {url}")
160
 
 
171
  continue
172
 
173
  date_obj = datetime.fromtimestamp(start_timestamp_ms / 1000, tz=timezone.utc)
174
+ date_str = date_obj.strftime('%Y-%m-%d')
175
 
176
  follower_gains = item.get("followerGains", {})
177
  organic_gain = follower_gains.get("organicFollowerGain", 0)
 
182
  "follower_count_organic": organic_gain,
183
  "follower_count_paid": paid_gain,
184
  "follower_count_type": "follower_gains_monthly",
185
+ "organization_urn": org_urn
186
  })
187
  logging.info(f"Fetched {len(results)} monthly follower gain entries for org URN {org_urn}.")
188
  except requests.exceptions.RequestException as e:
 
198
 
199
  def fetch_follower_demographics(session, org_urn, functions_map, seniorities_map):
200
  """
201
+ Fetches current follower demographics, applying Top-N for specified categories.
202
  """
203
+ final_demographics_results = []
204
  url = (
205
  f"{API_REST_BASE}/organizationalEntityFollowerStatistics"
206
  f"?q=organizationalEntity&organizationalEntity={quote(org_urn)}"
 
219
 
220
  stat_element = elements[0] # Data is usually in the first element
221
 
222
+ # Helper to convert raw API items for a single demographic type into our structured list
223
+ def _get_entries_for_type(raw_items_list, type_name, id_map, id_field_name_in_item, org_urn_val):
224
+ current_type_entries = []
225
+ if not raw_items_list:
226
+ logging.debug(f"No raw items for demographic type '{type_name}' for org {org_urn_val}.")
227
+ return current_type_entries
 
 
 
 
 
 
228
 
229
+ for item in raw_items_list:
230
  category_name_val = "Unknown"
231
+ # For associationType, the id_field_name_in_item is the direct name
232
+ if type_name == "follower_association":
233
+ category_name_val = item.get(id_field_name_in_item, f"Unknown {id_field_name_in_item}")
234
  else: # For URN-based categories
235
+ urn_val = item.get(id_field_name_in_item)
236
  entity_id = _parse_urn_to_id(urn_val)
237
+ # Use str(entity_id) for map lookup as map keys were stored as strings
238
  category_name_val = id_map.get(str(entity_id), f"Unknown {type_name.split('_')[-1].capitalize()} (ID: {entity_id if entity_id else urn_val})")
239
 
240
  counts = item.get("followerCounts", {})
241
+ organic_count = counts.get("organicFollowerCount", 0)
242
+ paid_count = counts.get("paidFollowerCount", 0)
243
+
244
+ current_type_entries.append({
245
  "category_name": category_name_val,
246
+ "follower_count_organic": organic_count,
247
+ "follower_count_paid": paid_count,
248
  "follower_count_type": type_name,
249
  "organization_urn": org_urn_val
250
  })
251
+ return current_type_entries
252
 
253
+ # Fetch live maps for industries and geo as they depend on URNs from the current API response
254
+ industry_urns_to_map = [item.get("industry") for item in stat_element.get("followerCountsByIndustry", []) if item.get("industry")]
255
+ geo_urns_to_map = [item.get("geo") for item in stat_element.get("followerCountsByGeoCountry", []) if item.get("geo")]
 
 
256
 
257
+ live_industries_map = get_industries_map(session, industry_urns_to_map)
258
+ live_geo_map = get_geo_map(session, geo_urns_to_map)
259
+
260
+ demographic_configs = [
261
+ {"items_key": "followerCountsBySeniority", "type_name": "follower_seniority", "id_map": seniorities_map, "id_field": "seniority", "top_n": 10},
262
+ {"items_key": "followerCountsByFunction", "type_name": "follower_function", "id_map": functions_map, "id_field": "function", "top_n": 10},
263
+ {"items_key": "followerCountsByIndustry", "type_name": "follower_industry", "id_map": live_industries_map, "id_field": "industry", "top_n": 10},
264
+ {"items_key": "followerCountsByGeoCountry", "type_name": "follower_geo", "id_map": live_geo_map, "id_field": "geo", "top_n": 10},
265
+ {"items_key": "followerCountsByAssociationType", "type_name": "follower_association", "id_map": {}, "id_field": "associationType", "top_n": None} # Keep all associations
266
+ ]
267
+
268
+ for config in demographic_configs:
269
+ raw_items = stat_element.get(config["items_key"], [])
270
+ processed_entries = _get_entries_for_type(raw_items, config["type_name"], config["id_map"], config["id_field"], org_urn)
271
+
272
+ if config["top_n"] is not None and processed_entries:
273
+ # Sort by organic follower count (ensure it's numeric)
274
+ for entry in processed_entries: # Ensure numeric for sorting
275
+ if not isinstance(entry.get("follower_count_organic"), (int, float)):
276
+ entry["follower_count_organic"] = 0
277
+ sorted_entries = sorted(processed_entries, key=lambda x: x.get("follower_count_organic", 0), reverse=True)
278
+ final_demographics_results.extend(sorted_entries[:config["top_n"]])
279
+ logging.debug(f"Added top {config['top_n']} for {config['type_name']}. Count: {len(sorted_entries[:config['top_n']])}")
280
+ else:
281
+ final_demographics_results.extend(processed_entries) # Add all if top_n is None or no entries
282
+ logging.debug(f"Added all for {config['type_name']}. Count: {len(processed_entries)}")
283
+
284
+ logging.info(f"Processed follower demographics for {org_urn}. Total entries from all types: {len(final_demographics_results)}")
285
 
286
  except requests.exceptions.RequestException as e:
287
  status_code = getattr(e.response, 'status_code', 'N/A')
 
291
  logging.error(f"Error decoding JSON for follower demographics for {org_urn}: {e}. Response: {response.text if 'response' in locals() else 'N/A'}")
292
  except Exception as e:
293
  logging.error(f"Unexpected error fetching follower demographics for {org_urn}: {e}", exc_info=True)
294
+ return final_demographics_results
295
 
296
  # --- Main Orchestration Function ---
297
 
 
306
 
307
  token_dict = community_token if isinstance(community_token, dict) else {'access_token': community_token, 'token_type': 'Bearer'}
308
 
309
+ session = None
310
  try:
311
  session = create_session(comm_client_id, token=token_dict)
312
  session.headers.update({
313
  "X-Restli-Protocol-Version": "2.0.0",
314
+ "LinkedIn-Version": LINKEDIN_API_VERSION,
315
+ # "Accept-Language": "en_US" # Consider adding if not set by create_session and locale issues persist for v2 name lookups
316
  })
317
  except Exception as e:
318
  logging.error(f"Failed to create session or update headers for org {org_urn}: {e}", exc_info=True)
319
+ return []
320
 
321
  logging.info(f"Starting follower stats retrieval for org: {org_urn}")
322
 
323
  # These maps are fetched once per call to get_linkedin_follower_stats
324
+ # For industries and geo, the maps will be fetched live within fetch_follower_demographics
325
+ # as they depend on URNs from the API response itself.
326
  functions_map = get_functions_map(session)
327
  seniorities_map = get_seniorities_map(session)
328
 
 
334
  monthly_gains = fetch_monthly_follower_gains(session, org_urn)
335
  all_follower_data.extend(monthly_gains)
336
 
337
+ # Pass pre-fetched function and seniority maps. Industry and Geo maps are fetched inside.
338
  demographics = fetch_follower_demographics(session, org_urn, functions_map, seniorities_map)
339
  all_follower_data.extend(demographics)
340