|
import os |
|
import gzip |
|
import json |
|
from multiprocessing import Pool |
|
from tqdm import tqdm |
|
|
|
|
|
output_directory_for_domain_list = "" |
|
input_path_for_data = "" |
|
|
|
def process_file(file_path): |
|
domain_count = {} |
|
with gzip.open(file_path, 'rt') as f: |
|
for line in f: |
|
json_obj = json.loads(line) |
|
source_domain = json_obj.get('source_domain') |
|
if source_domain: |
|
domain_count[source_domain] = domain_count.get(source_domain, 0) + 1 |
|
|
|
return domain_count |
|
|
|
def process_files_in_directory(directory): |
|
file_paths = directory |
|
with Pool() as pool: |
|
results = pool.map(process_file, file_paths) |
|
return results |
|
|
|
def main(): |
|
|
|
snapshots = ["2018-17", "2018-22", "2018-26", "2018-30", "2018-34", "2018-39", "2018-43", "2018-47", "2018-51", "2019-04", "2019-09", "2019-13", "2019-18", "2019-22", "2019-26", "2019-30", "2019-35", "2019-39", "2019-43", "2019-47", "2019-51", "2020-05", "2020-10", "2020-16", "2020-24", "2020-29", "2020-34", "2020-40", "2020-45", "2020-50", "2021-04", "2021-10", "2021-17", "2021-21", "2021-25", "2021-31", "2021-39", "2021-43", "2021-49", "2022-05", "2022-21", "2022-27", "2022-33", "2022-40", "2022-49", "2023-06", "2023-14", "2023-23", "2023-40", "2023-50", "2024-10"] |
|
|
|
langs = ["as", "bn", "gu", "kn", "hi", "ml", "mr", "ne", "or", "sa", "sd", "ta", "ur", "te", "mai"] |
|
|
|
output_directory = output_directory_for_domain_list |
|
|
|
language_domain_count = {} |
|
for lang in langs: |
|
language_domain_count[lang] = {} |
|
for snap in snapshots: |
|
input_directory = f'{input_path_for_data}/{snap}' |
|
|
|
|
|
if not os.path.exists(output_directory): |
|
os.makedirs(output_directory) |
|
|
|
language_files = {} |
|
|
|
for file_name in os.listdir(input_directory): |
|
if file_name.endswith('.json.gz'): |
|
language_id = file_name.split('_')[0] |
|
if language_id not in language_files: |
|
language_files[language_id] = [] |
|
language_files[language_id].append(os.path.join(input_directory, file_name)) |
|
|
|
for language_id, file_paths in language_files.items(): |
|
for result in process_files_in_directory(file_paths): |
|
for domain, count in result.items(): |
|
language_domain_count[language_id][domain] = language_domain_count[language_id].get(domain, 0) + count |
|
print("Done with", snap) |
|
sorted_domain_count = {} |
|
for lang in langs: |
|
sorted_domain_count[lang] = dict(sorted(language_domain_count[lang].items(), key=lambda item: item[1], reverse=True)) |
|
output_file_path = os.path.join(output_directory, f'{lang}_domain_count.json') |
|
with open(output_file_path, 'w') as f: |
|
json.dump(sorted_domain_count[lang], f, indent=4) |
|
|
|
if __name__ == "__main__": |
|
main() |
|
|