File size: 4,699 Bytes
d13869d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
import ast
import glob
import json
import os
from collections import OrderedDict

I18N_JSON_DIR   : os.PathLike = os.path.join(os.path.dirname(os.path.relpath(__file__)), 'locale')
DEFAULT_LANGUAGE: str         = "zh_CN" # ้ป˜่ฎค่ฏญ่จ€
TITLE_LEN       : int         = 60      # ๆ ‡้ข˜ๆ˜พ็คบ้•ฟๅบฆ
KEY_LEN         : int         = 30      # ้”ฎๅๆ˜พ็คบ้•ฟๅบฆ
SHOW_KEYS       : bool        = False   # ๆ˜ฏๅฆๆ˜พ็คบ้”ฎไฟกๆฏ

def extract_i18n_strings(node):
    i18n_strings = []

    if (
        isinstance(node, ast.Call)
        and isinstance(node.func, ast.Name)
        and node.func.id == "i18n"
    ):
        for arg in node.args:
            if isinstance(arg, ast.Str):
                i18n_strings.append(arg.s)

    for child_node in ast.iter_child_nodes(node):
        i18n_strings.extend(extract_i18n_strings(child_node))

    return i18n_strings

def scan_i18n_strings():
    """
    scan the directory for all .py files (recursively)
    for each file, parse the code into an AST
    for each AST, extract the i18n strings
    """
    strings = []
    print(" Scanning Files and Extracting i18n Strings ".center(TITLE_LEN, "="))
    for filename in glob.iglob("**/*.py", recursive=True):
        with open(filename, "r", encoding="utf-8") as f:
            code = f.read()
            if "I18nAuto" in code:
                tree = ast.parse(code)
                i18n_strings = extract_i18n_strings(tree)
                print(f"{filename.ljust(30)}: {len(i18n_strings)}")
                strings.extend(i18n_strings)

    code_keys = set(strings)
    print(f"{'Total Unique'.ljust(30)}: {len(code_keys)}")
    return code_keys

def update_i18n_json(json_file, standard_keys):
    print(f" Process {json_file} ".center(TITLE_LEN, "="))
    # ่ฏปๅ– JSON ๆ–‡ไปถ
    with open(json_file, "r", encoding="utf-8") as f:
        json_data = json.load(f, object_pairs_hook=OrderedDict)
    # ๆ‰“ๅฐๅค„็†ๅ‰็š„ JSON ๆก็›ฎๆ•ฐ
    len_before = len(json_data)
    print(f"{'Total Keys'.ljust(KEY_LEN)}: {len_before}")
    # ่ฏ†ๅˆซ็ผบๅคฑ็š„้”ฎๅนถ่กฅๅ…จ
    miss_keys = set(standard_keys) - set(json_data.keys())
    if len(miss_keys) > 0:
        print(f"{'Missing Keys (+)'.ljust(KEY_LEN)}: {len(miss_keys)}")
        for key in miss_keys:
            if DEFAULT_LANGUAGE in json_file:
                # ้ป˜่ฎค่ฏญ่จ€็š„้”ฎๅ€ผ็›ธๅŒ.
                json_data[key] = key
            else:
                # ๅ…ถไป–่ฏญ่จ€็š„ๅ€ผ่ฎพ็ฝฎไธบ #! + ้”ฎๅไปฅๆ ‡ๆณจๆœช่ขซ็ฟป่ฏ‘.
                json_data[key] = "#!" + key
            if SHOW_KEYS:
                print(f"{'Added Missing Key'.ljust(KEY_LEN)}: {key}")
    # ่ฏ†ๅˆซๅคšไฝ™็š„้”ฎๅนถๅˆ ้™ค
    diff_keys = set(json_data.keys()) - set(standard_keys)
    if len(diff_keys) > 0:
        print(f"{'Unused Keys  (-)'.ljust(KEY_LEN)}: {len(diff_keys)}")    
        for key in diff_keys:
            del json_data[key]
            if SHOW_KEYS:
                print(f"{'Removed Unused Key'.ljust(KEY_LEN)}: {key}")
    # ๆŒ‰้”ฎ้กบๅบๆŽ’ๅบ
    json_data = OrderedDict(
        sorted(json_data.items(), 
        key=lambda x: list(standard_keys).index(x[0])))
    # ๆ‰“ๅฐๅค„็†ๅŽ็š„ JSON ๆก็›ฎๆ•ฐ
    if len(miss_keys) != 0 or len(diff_keys) != 0:
        print(f"{'Total Keys (After)'.ljust(KEY_LEN)}: {len(json_data)}")
    # ่ฏ†ๅˆซๆœ‰ๅพ…็ฟป่ฏ‘็š„้”ฎ
    num_miss_translation = 0
    duplicate_items = {}
    for key, value in json_data.items():
        if value.startswith("#!"):
            num_miss_translation += 1
            if SHOW_KEYS:
                print(f"{'Missing Translation'.ljust(KEY_LEN)}: {key}")
        if value in duplicate_items:
            duplicate_items[value].append(key)
        else:
            duplicate_items[value] = [key]
    # ๆ‰“ๅฐๆ˜ฏๅฆๆœ‰้‡ๅค็š„ๅ€ผ
    for value, keys in duplicate_items.items():
        if len(keys) > 1:
            print("\n".join([f"\033[31m{'[Failed] Duplicate Value'.ljust(KEY_LEN)}: {key} -> {value}\033[0m" for key in keys]))
    
    if num_miss_translation > 0:
        print(f"\033[31m{'[Failed] Missing Translation'.ljust(KEY_LEN)}: {num_miss_translation}\033[0m")
    else:
        print(f"\033[32m[Passed] All Keys Translated\033[0m")
    # ๅฐ†ๅค„็†ๅŽ็š„็ป“ๆžœๅ†™ๅ…ฅ JSON ๆ–‡ไปถ
    with open(json_file, "w", encoding="utf-8") as f:
        json.dump(json_data, f, ensure_ascii=False, indent=4, sort_keys=True)
        f.write("\n")
    print(f" Updated {json_file} ".center(TITLE_LEN, "=") + '\n')

if __name__ == "__main__":
    code_keys = scan_i18n_strings()
    for json_file in os.listdir(I18N_JSON_DIR):
        if json_file.endswith(r".json"):
            json_file = os.path.join(I18N_JSON_DIR, json_file)
            update_i18n_json(json_file, code_keys)