python_code
stringlengths 0
679k
| repo_name
stringlengths 9
41
| file_path
stringlengths 6
149
|
---|---|---|
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import GraphFst, delete_space, generator_main
from nemo_text_processing.text_normalization.zh.verbalizers.postprocessor import PostProcessor
from nemo_text_processing.text_normalization.zh.verbalizers.verbalize import VerbalizeFst
from pynini.lib import pynutil
# import logging
class VerbalizeFinalFst(GraphFst):
"""
"""
def __init__(self, deterministic: bool = True, cache_dir: str = None, overwrite_cache: bool = False):
super().__init__(name="verbalize_final", kind="verbalize", deterministic=deterministic)
far_file = None
if cache_dir is not None and cache_dir != "None":
os.makedirs(cache_dir, exist_ok=True)
far_file = os.path.join(cache_dir, f"zh_tn_{deterministic}_deterministic_verbalizer.far")
if not overwrite_cache and far_file and os.path.exists(far_file):
self.fst = pynini.Far(far_file, mode="r")["verbalize"]
else:
token_graph = VerbalizeFst(deterministic=deterministic)
token_verbalizer = (
pynutil.delete("tokens {") + delete_space + token_graph.fst + delete_space + pynutil.delete(" }")
)
verbalizer = pynini.closure(delete_space + token_verbalizer + delete_space)
postprocessor = PostProcessor(remove_puncts=False, to_upper=False, to_lower=False, tag_oov=False,)
self.fst = (verbalizer @ postprocessor.fst).optimize()
if far_file:
generator_main(far_file, {"verbalize": self.fst})
| NeMo-text-processing-main | nemo_text_processing/text_normalization/zh/verbalizers/verbalize_final.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/zh/verbalizers/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import NEMO_NOT_QUOTE, GraphFst, delete_space
from pynini.lib import pynutil
class DecimalFst(GraphFst):
"""
Finite state transducer for verbalizing decimal, e.g.
decimal { integer_part: "零" fractional_part: "五" } -> 零点五
decimal { integer_part: "零" fractional_part: "五" quantity: "万" } -> 零点五万
decimal { positive: "正" integer_part: "零" fractional_part: "五" quantity: "万" } -> 正零点五万
"""
def __init__(self, deterministic: bool = True, lm: bool = False):
super().__init__(name="decimal", kind="verbalize", deterministic=deterministic)
integer = (
pynutil.delete("integer_part:")
+ delete_space
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynutil.delete("\"")
)
fractional = (
pynutil.delete("fractional_part:")
+ delete_space
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynutil.delete("\"")
)
quantity = (
pynutil.delete("quantity:")
+ delete_space
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynutil.delete("\"")
)
sign = pynini.closure(
pynutil.delete("negative:")
+ delete_space
+ pynutil.delete("\"")
+ pynini.accep("负")
+ pynutil.delete("\"")
)
graph = integer + delete_space + pynutil.insert("点") + fractional
graph_quantity = graph + delete_space + quantity
graph_regular = graph | graph_quantity
graph_sign = sign + delete_space + graph_regular
final_graph = graph_regular | graph_sign
self.decimal_component = final_graph
delete_tokens = self.delete_tokens(final_graph)
self.fst = delete_tokens.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/zh/verbalizers/decimal.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import NEMO_NOT_QUOTE, GraphFst, delete_space
from pynini.lib import pynutil
class MoneyFst(GraphFst):
"""
Finite state transducer for verbalizing fraction e.g.
tokens { money { integer: "二" currency: "$"} } -> 二美元
tokens { money { integer: "三" major_unit: "块"} } -> 三块
tokens { money { currency: "$" integer: "二" } } -> 二美元
"""
def __init__(self, decimal: GraphFst, deterministic: bool = True, lm: bool = False):
super().__init__(name="money", kind="verbalize", deterministic=deterministic)
# components to combine to make graphs
number_component = pynutil.delete("integer: \"") + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\"")
currency_component = pynutil.delete("currency: \"") + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\"")
decimal_component = decimal.decimal_component
unit_only_component = (
(pynutil.delete("currency: \"") + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\""))
| (pynutil.delete("currency_major: \"") + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\""))
| (pynutil.delete("currency_minor: \"") + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\""))
)
# graphs
graph_regular_money = number_component + delete_space + currency_component
graph_unit_money = pynini.closure(
(number_component + delete_space + unit_only_component + pynini.closure(delete_space))
)
graph_decimal_money = decimal_component + delete_space + currency_component
graph_suffix = (
number_component
+ delete_space
+ pynutil.delete("quantity: \"")
+ pynini.closure(NEMO_NOT_QUOTE)
+ pynutil.delete("\"")
+ delete_space
+ currency_component
)
graph = graph_unit_money | graph_regular_money | graph_decimal_money | graph_suffix
final_graph = graph
delete_tokens = self.delete_tokens(final_graph)
self.fst = delete_tokens.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/zh/verbalizers/money.py |
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import NEMO_NOT_QUOTE, GraphFst
from pynini.lib import pynutil
class MathSymbol(GraphFst):
'''
tokens { sign: "加" } -> 加
'''
def __init__(self, deterministic: bool = True, lm: bool = False):
super().__init__(name="sign", kind="verbalize", deterministic=deterministic)
graph = pynutil.delete('score: \"') + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete('\"')
self.fst = graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/zh/verbalizers/math_symbol.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import NEMO_NOT_QUOTE, GraphFst, delete_space
from pynini.lib import pynutil
class CardinalFst(GraphFst):
"""
Finite state transducer for verbalizing cardinal, e.g.
cardinal { negative: "负" integer: "23" } -> 负二十三
cardinal { integer: "23" } -> 二十三
cardinal { positive: "正" integer: "23" } -> 正二十三
"""
def __init__(self, deterministic: bool = True, lm: bool = False):
super().__init__(name="cardinal", kind="verbalize", deterministic=deterministic)
delete_sign = pynini.cross("negative: \"负\"", "负") | pynini.cross("positive: \"正\"", "正")
delete_integer = (
pynutil.delete("integer: ")
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE)
+ pynutil.delete("\"")
+ delete_space
)
delete_mandarin = pynutil.delete("quantity: \"") + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\"")
graph_mandarin = (delete_integer + delete_mandarin) | (
delete_sign + delete_space + delete_integer + delete_mandarin
)
graph_sign = delete_sign + delete_space + delete_integer
final_graph = delete_integer | graph_sign | graph_mandarin
delete_tokens = self.delete_tokens(final_graph)
self.fst = delete_tokens.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/zh/verbalizers/cardinal.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import NEMO_NOT_QUOTE, GraphFst, delete_space
from pynini.lib import pynutil
class DateFst(GraphFst):
"""
Finite state transducer classifiying dates, e.g.
{ date { year: "二零零二" } } -> 二零零二年
{ date { year: "二零零二" month: "一" day: "二十八"} } -> 二零零二年一月二十八日
{ date { year: "二零零二" month: "二" } } -> 二零零二年二月
{ date { month: "二" day: "十一" } } -> 二月十一日
"""
def __init__(self, deterministic: bool = True, lm: bool = False):
super().__init__(name="date", kind="verbalize", deterministic=deterministic)
year_component = (
pynutil.delete("year: ")
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE)
+ pynutil.delete("\"")
+ pynutil.insert("年")
)
month_component = (
pynutil.delete("month: ")
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE)
+ pynutil.delete("\"")
+ pynutil.insert("月")
)
day_component = (
pynutil.delete("day: ")
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE)
+ pynutil.delete("\"")
+ pynutil.insert("日")
)
optional_era = (
pynutil.delete("era: ") + pynutil.delete("\"") + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\"")
)
graph_date = (
pynini.closure(year_component)
+ pynini.closure(delete_space)
+ pynini.closure(month_component)
+ pynini.closure(delete_space)
+ pynini.closure(day_component)
)
graph_date_era = optional_era + delete_space + graph_date
graph_date_all = graph_date | graph_date_era
# range
symbol = pynini.accep("-") | pynini.accep("~") | pynini.accep("——") | pynini.accep("—")
ranges = (
pynutil.delete("range: \"")
+ delete_space
+ (pynini.closure(NEMO_NOT_QUOTE) | pynini.cross(symbol, "到"))
+ pynutil.delete("\"")
)
graph_range = (
pynini.closure((ranges + delete_space), 0, 1)
+ graph_date
+ delete_space
+ ranges
+ delete_space
+ graph_date
)
final_graph = graph_date_all | graph_range
delete_tokens = self.delete_tokens(final_graph)
self.fst = delete_tokens.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/zh/verbalizers/date.py |
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nemo_text_processing.text_normalization.zh.graph_utils import NEMO_NOT_QUOTE, GraphFst
from pynini.lib import pynutil
class Char(GraphFst):
'''
tokens { char: "你" } -> 你
'''
def __init__(self, deterministic: bool = True, lm: bool = False):
super().__init__(name="char", kind="verbalize", deterministic=deterministic)
graph = pynutil.delete("name: \"") + NEMO_NOT_QUOTE + pynutil.delete("\"")
self.fst = graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/zh/verbalizers/word.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/zh/data/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/zh/data/char/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/zh/data/date/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/zh/data/denylist/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/zh/data/math/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/zh/data/time/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/zh/data/number/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/zh/data/erhua/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/zh/data/money/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/zh/data/measure/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# Copyright 2015 and onwards Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import string
from pathlib import Path
from typing import Dict
from nemo_text_processing.text_normalization.en.utils import get_abs_path
try:
import pynini
from pynini import Far
from pynini.export import export
from pynini.examples import plurals
from pynini.lib import byte, pynutil, utf8
NEMO_CHAR = utf8.VALID_UTF8_CHAR
NEMO_DIGIT = byte.DIGIT
NEMO_LOWER = pynini.union(*string.ascii_lowercase).optimize()
NEMO_UPPER = pynini.union(*string.ascii_uppercase).optimize()
NEMO_ALPHA = pynini.union(NEMO_LOWER, NEMO_UPPER).optimize()
NEMO_ALNUM = pynini.union(NEMO_DIGIT, NEMO_ALPHA).optimize()
NEMO_HEX = pynini.union(*string.hexdigits).optimize()
NEMO_NON_BREAKING_SPACE = u"\u00A0"
NEMO_SPACE = " "
NEMO_WHITE_SPACE = pynini.union(" ", "\t", "\n", "\r", u"\u00A0").optimize()
NEMO_NOT_SPACE = pynini.difference(NEMO_CHAR, NEMO_WHITE_SPACE).optimize()
NEMO_NOT_QUOTE = pynini.difference(NEMO_CHAR, r'"').optimize()
NEMO_PUNCT = pynini.union(*map(pynini.escape, string.punctuation)).optimize()
NEMO_GRAPH = pynini.union(NEMO_ALNUM, NEMO_PUNCT).optimize()
NEMO_SIGMA = pynini.closure(NEMO_CHAR)
flop_digits = pynini.union(
"21",
"22",
"23",
"24",
"25",
"26",
"27",
"28",
"29",
"31",
"32",
"33",
"34",
"35",
"36",
"37",
"38",
"39",
"41",
"42",
"43",
"44",
"45",
"46",
"47",
"48",
"49",
"51",
"52",
"53",
"54",
"55",
"56",
"57",
"58",
"59",
"61",
"62",
"63",
"64",
"65",
"66",
"67",
"68",
"69",
"71",
"72",
"73",
"74",
"75",
"76",
"77",
"78",
"79",
"81",
"82",
"83",
"84",
"85",
"86",
"87",
"88",
"89",
"91",
"92",
"93",
"94",
"95",
"96",
"97",
"98",
"99",
)
delete_space = pynutil.delete(pynini.closure(NEMO_WHITE_SPACE))
delete_zero_or_one_space = pynutil.delete(pynini.closure(NEMO_WHITE_SPACE, 0, 1))
insert_space = pynutil.insert(" ")
insert_and = pynutil.insert("و")
delete_extra_space = pynini.cross(pynini.closure(NEMO_WHITE_SPACE, 1), " ")
delete_preserve_order = pynini.closure(
pynutil.delete(" preserve_order: true")
| (pynutil.delete(" field_order: \"") + NEMO_NOT_QUOTE + pynutil.delete("\""))
)
suppletive = pynini.string_file(get_abs_path("data/suppletive.tsv"))
# _v = pynini.union("a", "e", "i", "o", "u")
_c = pynini.union(
"b", "c", "d", "f", "g", "h", "j", "k", "l", "m", "n", "p", "q", "r", "s", "t", "v", "w", "x", "y", "z"
)
_ies = NEMO_SIGMA + _c + pynini.cross("y", "ies")
_es = NEMO_SIGMA + pynini.union("s", "sh", "ch", "x", "z") + pynutil.insert("es")
_s = NEMO_SIGMA + pynutil.insert("s")
graph_plural = plurals._priority_union(
suppletive, plurals._priority_union(_ies, plurals._priority_union(_es, _s, NEMO_SIGMA), NEMO_SIGMA), NEMO_SIGMA
).optimize()
SINGULAR_TO_PLURAL = graph_plural
PLURAL_TO_SINGULAR = pynini.invert(graph_plural)
TO_LOWER = pynini.union(*[pynini.cross(x, y) for x, y in zip(string.ascii_uppercase, string.ascii_lowercase)])
TO_UPPER = pynini.invert(TO_LOWER)
MIN_NEG_WEIGHT = -0.0001
MIN_POS_WEIGHT = 0.0001
PYNINI_AVAILABLE = True
except (ModuleNotFoundError, ImportError):
# Create placeholders
NEMO_CHAR = None
NEMO_DIGIT = None
NEMO_LOWER = None
NEMO_UPPER = None
NEMO_ALPHA = None
NEMO_ALNUM = None
NEMO_HEX = None
NEMO_NON_BREAKING_SPACE = u"\u00A0"
NEMO_SPACE = " "
NEMO_WHITE_SPACE = None
NEMO_NOT_SPACE = None
NEMO_NOT_QUOTE = None
NEMO_PUNCT = None
NEMO_GRAPH = None
NEMO_SIGMA = None
delete_space = None
insert_space = None
delete_extra_space = None
delete_preserve_order = None
suppletive = None
# _v = pynini.union("a", "e", "i", "o", "u")
_c = None
_ies = None
_es = None
_s = None
graph_plural = None
SINGULAR_TO_PLURAL = None
PLURAL_TO_SINGULAR = None
TO_LOWER = None
TO_UPPER = None
MIN_NEG_WEIGHT = None
MIN_POS_WEIGHT = None
PYNINI_AVAILABLE = False
def generator_main(file_name: str, graphs: Dict[str, 'pynini.FstLike']):
"""
Exports graph as OpenFst finite state archive (FAR) file with given file name and rule name.
Args:
file_name: exported file name
graphs: Mapping of a rule name and Pynini WFST graph to be exported
"""
exporter = export.Exporter(file_name)
for rule, graph in graphs.items():
exporter[rule] = graph.optimize()
exporter.close()
logging.info(f'Created {file_name}')
def get_plurals(fst):
"""
Given singular returns plurals
Args:
fst: Fst
Returns plurals to given singular forms
"""
return SINGULAR_TO_PLURAL @ fst
def get_singulars(fst):
"""
Given plural returns singulars
Args:
fst: Fst
Returns singulars to given plural forms
"""
return PLURAL_TO_SINGULAR @ fst
def convert_space(fst) -> 'pynini.FstLike':
"""
Converts space to nonbreaking space.
Used only in tagger grammars for transducing token values within quotes, e.g. name: "hello kitty"
This is making transducer significantly slower, so only use when there could be potential spaces within quotes, otherwise leave it.
Args:
fst: input fst
Returns output fst where breaking spaces are converted to non breaking spaces
"""
return fst @ pynini.cdrewrite(pynini.cross(NEMO_SPACE, NEMO_NON_BREAKING_SPACE), "", "", NEMO_SIGMA)
class GraphFst:
"""
Base class for all grammar fsts.
Args:
name: name of grammar class
kind: either 'classify' or 'verbalize'
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, name: str, kind: str, deterministic: bool = True):
self.name = name
self.kind = str
self._fst = None
self.deterministic = deterministic
self.far_path = Path(os.path.dirname(__file__) + '/grammars/' + kind + '/' + name + '.far')
if self.far_exist():
self._fst = Far(self.far_path, mode="r", arc_type="standard", far_type="default").get_fst()
def far_exist(self) -> bool:
"""
Returns true if FAR can be loaded
"""
return self.far_path.exists()
@property
def fst(self) -> 'pynini.FstLike':
return self._fst
@fst.setter
def fst(self, fst):
self._fst = fst
def add_tokens(self, fst) -> 'pynini.FstLike':
"""
Wraps class name around to given fst
Args:
fst: input fst
Returns:
Fst: fst
"""
return pynutil.insert(f"{self.name} {{ ") + fst + pynutil.insert(" }")
def delete_tokens(self, fst) -> 'pynini.FstLike':
"""
Deletes class name wrap around output of given fst
Args:
fst: input fst
Returns:
Fst: fst
"""
res = (
pynutil.delete(f"{self.name}")
+ delete_space
+ pynutil.delete("{")
+ delete_space
+ fst
+ delete_space
+ pynutil.delete("}")
)
return res @ pynini.cdrewrite(pynini.cross(u"\u00A0", " "), "", "", NEMO_SIGMA)
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/graph_utils.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from nemo_text_processing.text_normalization.en.taggers.tokenize_and_classify import ClassifyFst
from nemo_text_processing.text_normalization.en.verbalizers.verbalize import VerbalizeFst
from nemo_text_processing.text_normalization.en.verbalizers.verbalize_final import VerbalizeFinalFst
try:
import pynini
PYNINI_AVAILABLE = True
except (ModuleNotFoundError, ImportError):
logging.warning(
"`pynini` is not installed ! \n"
"Please run the `nemo_text_processing/setup.sh` script"
"prior to usage of this toolkit."
)
PYNINI_AVAILABLE = False
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import csv
import os
def get_abs_path(rel_path):
"""
Get absolute path
Args:
rel_path: relative path to this file
Returns absolute path
"""
return os.path.dirname(os.path.abspath(__file__)) + '/' + rel_path
def load_labels(abs_path):
"""
loads relative path file as dictionary
Args:
abs_path: absolute path
Returns dictionary of mappings
"""
label_tsv = open(abs_path, encoding="utf-8")
labels = list(csv.reader(label_tsv, delimiter="\t"))
return labels
def augment_labels_with_punct_at_end(labels):
"""
augments labels: if key ends on a punctuation that value does not have, add a new label
where the value maintains the punctuation
Args:
labels : input labels
Returns:
additional labels
"""
res = []
for label in labels:
if len(label) > 1:
if label[0][-1] == "." and label[1][-1] != ".":
res.append([label[0], label[1] + "."] + label[2:])
return res
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/utils.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.ar.graph_utils import (
NEMO_ALPHA,
NEMO_DIGIT,
NEMO_NON_BREAKING_SPACE,
GraphFst,
convert_space,
insert_space,
)
from nemo_text_processing.text_normalization.ar.utils import get_abs_path
from pynini.lib import pynutil
unit_singular = pynini.string_file(get_abs_path("data/measure/measurements.tsv"))
class MeasureFst(GraphFst):
"""
Finite state transducer for classifying measure, e.g.
"20%" -> measure { cardinal { integer: "20" } units: "%" }
Args:
cardinal: CardinalFst
decimal: DecimalFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal: GraphFst, decimal: GraphFst, fraction: GraphFst, deterministic: bool = True):
super().__init__(name="measure", kind="classify", deterministic=deterministic)
cardinal_graph = cardinal.cardinal_numbers
graph_unit = pynini.string_file(get_abs_path("data/measure/measurements.tsv"))
graph_unit_singular = convert_space(graph_unit)
optional_graph_negative = pynini.closure("-", 0, 1)
graph_unit_denominator = pynini.cross("/", "في") + pynutil.insert(
NEMO_NON_BREAKING_SPACE
) + graph_unit_singular | graph_unit_singular + pynutil.insert(NEMO_NON_BREAKING_SPACE) + pynini.cross(
"/", "في"
)
optional_unit_denominator = pynini.closure(
pynutil.insert(NEMO_NON_BREAKING_SPACE) + graph_unit_denominator, 0, 1,
)
unit_plural = (
pynutil.insert("units: \"")
+ (graph_unit_singular + (optional_unit_denominator) | graph_unit_denominator)
+ pynutil.insert("\"")
)
unit_singular_graph = (
pynutil.insert("units: \"")
+ ((graph_unit_singular + optional_unit_denominator) | graph_unit_denominator)
+ pynutil.insert("\"")
)
subgraph_decimal = (
decimal.fst + insert_space + pynini.closure(pynutil.delete(" "), 0, 1) + unit_plural
| unit_plural + pynini.closure(pynutil.delete(" "), 0, 1) + insert_space + decimal.fst
)
subgraph_cardinal = (
(optional_graph_negative + (pynini.closure(NEMO_DIGIT) - "1")) @ cardinal.fst
+ insert_space
+ pynini.closure(pynutil.delete(" "), 0, 1)
+ unit_plural
| unit_plural
+ pynini.closure(pynutil.delete(" "), 0, 1)
+ insert_space
+ (optional_graph_negative + (pynini.closure(NEMO_DIGIT) - "1")) @ cardinal.fst
)
subgraph_cardinal |= (
(optional_graph_negative + pynini.accep("1")) @ cardinal.fst
# @ pynini.cdrewrite(pynini.cross("واحد", ""), "", "", NEMO_SIGMA)
+ insert_space
+ pynini.closure(pynutil.delete(" "), 0, 1)
+ unit_singular_graph
)
subgraph_fraction = fraction.fst + insert_space + pynini.closure(pynutil.delete(" "), 0, 1) + unit_plural
subgraph_fraction |= unit_plural + pynini.closure(pynutil.delete(" "), 0, 1) + insert_space + fraction.fst
cardinal_dash_alpha = (
pynutil.insert("cardinal { integer: \"")
+ cardinal_graph
+ pynutil.delete('-')
+ pynutil.insert("\" } units: \"")
+ pynini.closure(NEMO_ALPHA, 1)
+ pynutil.insert("\"")
)
alpha_dash_cardinal = (
pynutil.insert("units: \"")
+ pynini.closure(NEMO_ALPHA, 1)
+ pynutil.delete('-')
+ pynutil.insert("\"")
+ pynutil.insert(" cardinal { integer: \"")
+ cardinal_graph
+ pynutil.insert("\" }")
)
decimal_dash_alpha = (
pynutil.insert("decimal { ")
+ decimal.final_graph_decimal
+ pynutil.delete('-')
+ pynutil.insert(" } units: \"")
+ pynini.closure(NEMO_ALPHA, 1)
+ pynutil.insert("\"")
)
decimal_times = (
pynutil.insert("decimal { ")
+ decimal.final_graph_decimal
+ pynutil.insert(" } units: \"")
+ pynini.union('x', 'X')
+ pynutil.insert("\"")
)
cardinal_times = (
pynutil.insert("cardinal { integer: \"")
+ cardinal_graph
+ pynutil.insert("\" } units: \"")
+ pynini.union('x', 'X')
+ pynutil.insert("\"")
)
alpha_dash_decimal = (
pynutil.insert("units: \"")
+ pynini.closure(NEMO_ALPHA, 1)
+ pynutil.delete('-')
+ pynutil.insert("\"")
+ pynutil.insert(" decimal { ")
+ decimal.final_graph_decimal
+ pynutil.insert(" }")
)
final_graph = (
subgraph_decimal
| subgraph_cardinal
| cardinal_dash_alpha
| alpha_dash_cardinal
| decimal_dash_alpha
| decimal_times
| alpha_dash_decimal
| subgraph_fraction
| cardinal_times
)
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/taggers/measure.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.ar.graph_utils import GraphFst
from nemo_text_processing.text_normalization.ar.utils import get_abs_path
from pynini.lib import pynutil
class FractionFst(GraphFst):
"""
Finite state transducer for classifying fraction
"1 1/2" ->
tokens { fraction { integer_part: "واحد" numerator: "واحد" denominator: "نص" } }
Args:
cardinal: cardinal fst
"""
def __init__(self, cardinal):
super().__init__(name="fraction", kind="classify")
cardinal_graph = cardinal.cardinal_numbers
digit_one = pynini.accep("1")
digit_two = pynini.accep("2")
digit_three_to_ten = pynini.union("3", "4", "5", "6", "7", "8", "9", "10")
digit_one_to_ten = pynini.union("1", "2", "3", "4", "5", "6", "7", "8", "9", "10")
graph_ones = pynini.string_file(get_abs_path("data/number/fraction_singular.tsv")).optimize()
graph_dual = pynini.string_file(get_abs_path("data/number/fraction_dual.tsv")).optimize()
graph_plural = pynini.string_file(get_abs_path("data/number/fraction_plural.tsv")).optimize()
integer = pynutil.insert("integer_part: \"") + cardinal_graph + pynutil.insert("\"")
dividor = pynini.cross("/", "\" ") | pynini.cross(" / ", "\" ")
graph_numerator = pynutil.insert("numerator: \"") + cardinal_graph
denominator_singual = pynutil.insert("denominator: \"") + digit_one_to_ten @ graph_ones + pynutil.insert("\"")
denominator_dual = pynutil.insert("denominator: \"") + digit_one_to_ten @ graph_dual + pynutil.insert("\"")
denominator_plural = pynutil.insert("denominator: \"") + digit_one_to_ten @ graph_plural + pynutil.insert("\"")
numerator_one = digit_one @ graph_numerator + dividor + denominator_singual
numerator_two = digit_two @ graph_numerator + dividor + denominator_dual
numerator_three_to_ten = digit_three_to_ten @ graph_numerator + dividor + denominator_plural
numerator_more_than_ten = (
graph_numerator + dividor + pynutil.insert("denominator: \"") + cardinal_graph + pynutil.insert("\"")
)
fraction_graph = (
numerator_one | numerator_two | numerator_three_to_ten | pynutil.add_weight(numerator_more_than_ten, 0.001)
)
graph = pynini.closure(integer + pynini.accep(" "), 0, 1) + (fraction_graph)
graph |= pynini.closure(integer + (pynini.accep(" ") | pynutil.insert(" ")), 0, 1) + pynini.compose(
pynini.string_file(get_abs_path("data/number/fraction.tsv")), (fraction_graph)
)
self.graph = graph
final_graph = self.add_tokens(self.graph)
self.fst = final_graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/taggers/fraction.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import pynini
from nemo_text_processing.text_normalization.ar.graph_utils import (
NEMO_CHAR,
NEMO_DIGIT,
GraphFst,
delete_extra_space,
delete_space,
generator_main,
)
from nemo_text_processing.text_normalization.ar.taggers.cardinal import CardinalFst
from nemo_text_processing.text_normalization.ar.taggers.decimal import DecimalFst
from nemo_text_processing.text_normalization.ar.taggers.fraction import FractionFst
from nemo_text_processing.text_normalization.ar.taggers.measure import MeasureFst
from nemo_text_processing.text_normalization.ar.taggers.money import MoneyFst
from nemo_text_processing.text_normalization.ar.taggers.word import WordFst
from nemo_text_processing.text_normalization.en.taggers.punctuation import PunctuationFst
from pynini.lib import pynutil
class ClassifyFst(GraphFst):
"""
Final class that composes all other classification grammars. This class can process an entire sentence, that is lower cased.
For deployment, this grammar will be compiled and exported to OpenFst Finite State Archive (FAR) File.
More details to deployment at NeMo/tools/text_processing_deployment.
Args:
input_case: accepting either "lower_cased" or "cased" input.
deterministic: if True will provide a single transduction option,
for False multiple options (used for audio-based normalization)
cache_dir: path to a dir with .far grammar file. Set to None to avoid using cache.
overwrite_cache: set to True to overwrite .far files
whitelist: path to a file with whitelist replacements
"""
def __init__(
self,
input_case: str,
deterministic: bool = False,
cache_dir: str = None,
overwrite_cache: bool = False,
whitelist: str = None,
):
super().__init__(name="tokenize_and_classify", kind="classify")
far_file = None
if cache_dir is not None and cache_dir != "None":
os.makedirs(cache_dir, exist_ok=True)
whitelist_file = os.path.basename(whitelist) if whitelist else ""
far_file = os.path.join(
cache_dir, f"_{input_case}_ar_tn_{deterministic}_deterministic{whitelist_file}.far"
)
if not overwrite_cache and far_file and os.path.exists(far_file):
self.fst = pynini.Far(far_file, mode="r")["tokenize_and_classify"]
no_digits = pynini.closure(pynini.difference(NEMO_CHAR, NEMO_DIGIT))
self.fst_no_digits = pynini.compose(self.fst, no_digits).optimize()
logging.info(f"ClassifyFst.fst was restored from {far_file}.")
else:
logging.info(f"Creating ClassifyFst grammars. This might take some time...")
self.cardinal = CardinalFst()
cardinal_graph = self.cardinal.fst
self.decimal = DecimalFst(cardinal=self.cardinal, deterministic=deterministic)
decimal_graph = self.decimal.fst
self.fraction = FractionFst(cardinal=self.cardinal)
fraction_graph = self.fraction.fst
self.money = MoneyFst(cardinal=self.cardinal)
money_graph = self.money.fst
self.measure = MeasureFst(
cardinal=self.cardinal, decimal=self.decimal, fraction=self.fraction, deterministic=deterministic
)
measure_graph = self.measure.fst
word_graph = WordFst(deterministic=deterministic).fst
punct_graph = PunctuationFst(deterministic=deterministic).fst
classify = (
pynutil.add_weight(cardinal_graph, 1.1)
| pynutil.add_weight(decimal_graph, 1.1)
| pynutil.add_weight(fraction_graph, 1.0)
| pynutil.add_weight(money_graph, 1.0)
| pynutil.add_weight(measure_graph, 1.0)
)
classify |= pynutil.add_weight(word_graph, 100)
punct = pynutil.insert("tokens { ") + pynutil.add_weight(punct_graph, weight=1.1) + pynutil.insert(" }")
token = pynutil.insert("tokens { ") + classify + pynutil.insert(" }")
token_plus_punct = (
pynini.closure(punct + pynutil.insert(" ")) + token + pynini.closure(pynutil.insert(" ") + punct)
)
graph = token_plus_punct + pynini.closure(pynutil.add_weight(delete_extra_space, 1.1) + token_plus_punct)
graph = delete_space + graph + delete_space
self.fst = graph.optimize()
no_digits = pynini.closure(pynini.difference(NEMO_CHAR, NEMO_DIGIT))
self.fst_no_digits = pynini.compose(self.fst, no_digits).optimize()
if far_file:
generator_main(far_file, {"tokenize_and_classify": self.fst})
logging.info(f"ClassifyFst grammars are saved to {far_file}.")
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/taggers/tokenize_and_classify.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import pynini
from nemo_text_processing.inverse_text_normalization.en.taggers.tokenize_and_classify import ClassifyFst
from nemo_text_processing.inverse_text_normalization.en.verbalizers.verbalize import VerbalizeFst
from nemo_text_processing.inverse_text_normalization.en.verbalizers.verbalize_final import VerbalizeFinalFst
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/taggers/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.ar.graph_utils import NEMO_DIGIT, NEMO_SPACE, GraphFst, insert_space
from nemo_text_processing.text_normalization.ar.utils import get_abs_path
from pynini.lib import pynutil
def get_quantity(decimal: "pynini.FstLike", cardinal_up_to_hundred: "pynini.FstLike") -> "pynini.FstLike":
"""
Returns FST that transforms either a cardinal or decimal followed by a quantity into a numeral,
e.g. 5 مليون -> integer_part: "خمسة" quantity: "مليون"
e.g. 5.4 مليون -> integer_part: "خمسة" fractional_part: "اربعة من عشرة" quantity: "مليون"
Args:
decimal: decimal FST
cardinal_up_to_hundred: cardinal FST
"""
numbers = cardinal_up_to_hundred
res = (
pynutil.insert('integer_part: "')
+ numbers
+ pynutil.insert('"')
+ pynini.accep(" ")
+ pynutil.insert('quantity: "')
+ quantities
+ pynutil.insert('"')
)
res |= decimal + pynini.accep(" ") + pynutil.insert('quantity: "') + quantities + pynutil.insert('"')
return res
class DecimalFst(GraphFst):
"""
Finite state transducer for classifying decimal, e.g.
321.7 --> ثلاث مئة وواحد وعشرون وسبعة من عشرة
-321.7 -> decimal { negative: "true" integer_part: "321" fractional_part: ".7" }
cardinal: CardinalFst
"""
def __init__(self, cardinal: GraphFst, deterministic: bool):
super().__init__(name="decimal", kind="classify", deterministic=deterministic)
integer_part = cardinal.cardinal_numbers
cardinal_numbers_with_leading_zeros = cardinal.cardinal_numbers_with_leading_zeros
self.integer_part = pynini.closure(integer_part, 0, 1)
self.seperator = pynini.string_map([(".", "و"), (",", "و")])
add_preposition = pynutil.insert(" من ")
graph_fractional = NEMO_DIGIT @ cardinal_numbers_with_leading_zeros + add_preposition + pynutil.insert("عشرة")
graph_fractional |= (
(NEMO_DIGIT + NEMO_DIGIT) @ cardinal_numbers_with_leading_zeros + add_preposition + pynutil.insert("مئة")
)
graph_fractional |= (
(NEMO_DIGIT + NEMO_DIGIT + NEMO_DIGIT) @ cardinal_numbers_with_leading_zeros
+ add_preposition
+ pynutil.insert("ألف")
)
graph_fractional |= (
(NEMO_DIGIT + NEMO_DIGIT + NEMO_DIGIT + NEMO_DIGIT) @ cardinal_numbers_with_leading_zeros
+ add_preposition
+ pynutil.insert("عشرة آلاف")
)
graph_integer = pynutil.insert('integer_part: "') + self.integer_part + pynutil.insert('" ')
# to parse something like ,50 alone as well
graph_integer_or_none = graph_integer | pynutil.insert('integer_part: "0" ', weight=0.001)
self.optional_quantity = pynini.string_file(get_abs_path("data/number/quantities.tsv")).optimize()
self.graph_fractional = graph_fractional
graph_fractional = (
pynutil.insert('fractional_part: "') + self.seperator + graph_fractional + pynutil.insert('"')
)
optional_quantity = pynini.closure(
(pynutil.add_weight(pynini.accep(NEMO_SPACE), -0.1) | insert_space)
+ pynutil.insert('quantity: "')
+ self.optional_quantity
+ pynutil.insert('"'),
0,
1,
)
self.graph_decimal = self.integer_part + insert_space + self.seperator + graph_fractional
self.final_graph_decimal = (
cardinal.optional_minus_graph + graph_integer_or_none + insert_space + graph_fractional + optional_quantity
)
self.final_graph = self.add_tokens(self.final_graph_decimal)
self.fst = self.final_graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/taggers/decimal.py |
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.ar.graph_utils import (
NEMO_ALPHA,
NEMO_DIGIT,
NEMO_SIGMA,
GraphFst,
convert_space,
insert_space,
)
from nemo_text_processing.text_normalization.ar.utils import get_abs_path, load_labels
from pynini.lib import pynutil
min_singular = pynini.string_file(get_abs_path("data/money/currency_minor_singular.tsv"))
min_plural = pynini.string_file(get_abs_path("data/money/currency_minor_plural.tsv"))
maj_singular = pynini.string_file(get_abs_path("data/money/currency_major.tsv"))
ar_cur = pynini.string_file((get_abs_path("data/money/local_currency.tsv")))
class MoneyFst(GraphFst):
"""
Finite state transducer for classifying money, e.g.
"$1,99" -> money { integer_part: "سبعة" currency_maj: "دولار" fractional_part: "تسعة وتسعون" currency_min: "سنت" preserve_order: true}
"$0,10" -> money { fractional_part: "عشرة" currency_min: "بنسات" preserve_order: true }
"$9" -> money { integer_part: "تسعة" currency_maj: "دولار" preserve_order: true}
Args:
cardinal: CardinalFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal: GraphFst, deterministic: bool = True):
super().__init__(name="money", kind="classify", deterministic=deterministic)
cardinal_graph = cardinal.cardinal_numbers_with_leading_zeros
# graph_decimal_final = final_graph_decimal
maj_singular_labels = load_labels(get_abs_path("data/money/currency_major.tsv"))
maj_singular_graph = convert_space(maj_singular)
maj_plural_graph = maj_singular_graph
ar_cur_graph = convert_space(ar_cur)
graph_maj_singular = pynutil.insert("currency_maj: \"") + maj_singular_graph + pynutil.insert("\"")
graph_maj_plural = pynutil.insert("currency_maj: \"") + maj_plural_graph + pynutil.insert("\"")
graph_ar_cur = pynutil.insert("currency_maj: \"") + ar_cur_graph + pynutil.insert("\"")
optional_delete_fractional_zeros = pynini.closure(
pynutil.delete(".") + pynini.closure(pynutil.delete("0"), 1), 0, 1
)
graph_integer_one = pynutil.insert("integer_part: \"") + pynini.cross("1", "واحد") + pynutil.insert("\"")
# only for decimals where third decimal after comma is non-zero or with quantity
decimal_delete_last_zeros = (
pynini.closure(NEMO_DIGIT, 1)
+ pynini.accep(".")
+ pynini.closure(NEMO_DIGIT, 2)
+ (NEMO_DIGIT - "0")
+ pynini.closure(pynutil.delete("0"))
)
decimal_with_quantity = NEMO_SIGMA + NEMO_ALPHA
# graph_decimal = (
# graph_maj_plural + insert_space + (decimal_delete_last_zeros | decimal_with_quantity) @ graph_decimal_final
# )
graph_integer = (
pynutil.insert("integer_part: \"") + ((NEMO_SIGMA - "1") @ cardinal_graph) + pynutil.insert("\"")
)
graph_integer_only = graph_maj_singular + insert_space + graph_integer_one
graph_integer_only |= graph_maj_plural + insert_space + graph_integer
# For local currency "9د.ك"
graph_integer_only_ar = graph_integer + insert_space + graph_ar_cur
# graph_decimal_ar = graph_decimal_final + insert_space + graph_ar_cur
graph = (graph_integer_only + optional_delete_fractional_zeros) | graph_integer_only_ar
# remove trailing zeros of non zero number in the first 2 digits and fill up to 2 digits
# e.g. 2000 -> 20, 0200->02, 01 -> 01, 10 -> 10
# not accepted: 002, 00, 0,
two_digits_fractional_part = (
pynini.closure(NEMO_DIGIT) + (NEMO_DIGIT - "0") + pynini.closure(pynutil.delete("0"))
) @ (
(pynutil.delete("0") + (NEMO_DIGIT - "0"))
| ((NEMO_DIGIT - "0") + pynutil.insert("0"))
| ((NEMO_DIGIT - "0") + NEMO_DIGIT)
)
graph_min_singular = pynutil.insert(" currency_min: \"") + min_singular + pynutil.insert("\"")
graph_min_plural = pynutil.insert(" currency_min: \"") + min_plural + pynutil.insert("\"")
# format ** euro ** cent
decimal_graph_with_minor = None
graph_with_no_minor = None
for curr_symbol, _ in maj_singular_labels:
preserve_order = pynutil.insert(" preserve_order: true")
integer_plus_maj = graph_integer + insert_space + pynutil.insert(curr_symbol) @ graph_maj_plural
integer_plus_maj |= graph_integer_one + insert_space + pynutil.insert(curr_symbol) @ graph_maj_singular
# non zero integer part
integer_plus_maj = (pynini.closure(NEMO_DIGIT) - "0") @ integer_plus_maj
graph_fractional_one = two_digits_fractional_part @ pynini.cross("1", "")
graph_fractional_one = pynutil.insert("fractional_part: \"") + graph_fractional_one + pynutil.insert("\"")
digits_two_to_ten = pynini.union("2", "3", "4", "5", "6", "7", "8", "9", "10")
graph_fractional_up_to_ten = two_digits_fractional_part @ digits_two_to_ten @ cardinal_graph
graph_fractional_up_to_ten = (
pynutil.insert("fractional_part: \"") + graph_fractional_up_to_ten + pynutil.insert("\"")
)
graph_fractional = (
two_digits_fractional_part
@ (pynini.closure(NEMO_DIGIT, 1, 2) - pynini.union("2", "3", "4", "5", "6", "7", "8", "9", "10"))
@ cardinal_graph
)
graph_fractional = pynutil.insert("fractional_part: \"") + graph_fractional + pynutil.insert("\"")
fractional_plus_min = graph_fractional + insert_space + pynutil.insert(curr_symbol) @ graph_min_singular
fractional_plus_min |= (
graph_fractional_one + insert_space + pynutil.insert(curr_symbol) @ graph_min_singular
)
fractional_plus_min |= (
graph_fractional_up_to_ten + insert_space + pynutil.insert(curr_symbol) @ graph_min_plural
)
graph_with_no_minor_curr = integer_plus_maj
graph_with_no_minor_curr |= pynutil.add_weight(integer_plus_maj, weight=0.0001,)
graph_with_no_minor_curr = pynutil.delete(curr_symbol) + graph_with_no_minor_curr + preserve_order
graph_with_no_minor = (
graph_with_no_minor_curr
if graph_with_no_minor is None
else pynini.union(graph_with_no_minor, graph_with_no_minor_curr)
)
decimal_graph_with_minor_curr = integer_plus_maj + pynini.cross(".", " ") + fractional_plus_min
decimal_graph_with_minor_curr |= pynutil.add_weight(
integer_plus_maj
+ pynini.cross(".", " ")
+ pynutil.insert("fractional_part: \"")
+ two_digits_fractional_part @ cardinal_graph
+ pynutil.insert("\""),
weight=0.0001,
)
decimal_graph_with_minor_curr |= pynutil.delete("0.") + fractional_plus_min
decimal_graph_with_minor_curr = (
pynutil.delete(curr_symbol) + decimal_graph_with_minor_curr + preserve_order
)
decimal_graph_with_minor = (
decimal_graph_with_minor_curr
if decimal_graph_with_minor is None
else pynini.union(decimal_graph_with_minor, decimal_graph_with_minor_curr)
)
final_graph = decimal_graph_with_minor | graph_with_no_minor | graph
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/taggers/money.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.ar.graph_utils import GraphFst, flop_digits, insert_and, insert_space
from nemo_text_processing.text_normalization.ar.utils import get_abs_path
from pynini.lib import pynutil
class CardinalFst(GraphFst):
"""
Finite state transducer for classifying cardinals, e.g.
"9837" -> cardinal { integer: "تسعة اَلاف وثمان مئة وسبعة وثلاثون" }
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self):
super().__init__(name="cardinal", kind="classify")
# zero
graph_zero = pynini.string_file(get_abs_path("data/number/zero.tsv"))
# cardinals data files
graph_digit = pynini.string_file(get_abs_path("data/number/digit.tsv"))
digit_100 = pynini.string_file(get_abs_path("data/number/digit_100.tsv"))
digit_1000 = pynini.string_file(get_abs_path("data/number/digit_1000.tsv"))
teens = pynini.string_file(get_abs_path("data/number/teens.tsv"))
tens = pynini.string_file(get_abs_path("data/number/tens.tsv"))
# Grammar for cardinals 10_20_30 etc
tens_zero = tens + pynutil.delete("0")
# Creating flops for two digit cardinals 34->43
reverse_digits = pynini.string_file(get_abs_path("data/number/flops.tsv"))
# Grammar for two digitcardinals
graph_flops = flop_digits @ reverse_digits
graph_tens_plus1 = graph_digit + insert_space + insert_and + tens
graph_tens_plus = graph_flops @ graph_tens_plus1
graph_all = graph_digit | teens | tens_zero | graph_tens_plus
graph_two_digits = teens | tens_zero | graph_tens_plus
# Grammar for cardinals hundreds
one_hundred = pynini.cross("1", "مئة")
hundreds_zero = digit_100 + insert_space + pynutil.insert("مئة") + pynutil.delete("00", weight=0.001)
hundreds_plus = (
digit_100 + insert_space + pynutil.insert("مئة") + insert_space + insert_and + graph_two_digits
| digit_100
+ insert_space
+ pynutil.insert("مئة")
+ pynutil.delete("0")
+ insert_space
+ insert_and
+ graph_digit
)
two_hundreds = pynini.cross("2", "مئتان")
graph_one_hundred = one_hundred + pynutil.delete("00", weight=0.001)
graph_one_hundred_plus = (
one_hundred + insert_space + insert_and + graph_two_digits
| one_hundred + pynutil.delete("0") + insert_space + insert_and + graph_digit
)
graph_two_hundreds = (
(two_hundreds + pynutil.delete("00", weight=0.001))
| two_hundreds + insert_space + insert_and + graph_two_digits
| two_hundreds + pynutil.delete("0") + insert_space + insert_and + graph_digit
)
graph_all_one_hundred = graph_one_hundred | graph_one_hundred_plus
graph_all_hundreds = graph_all_one_hundred | graph_two_hundreds | hundreds_zero | hundreds_plus
# Grammar for thousands
one_thousand = pynini.cross("1", "ألف")
thousands_zero = digit_1000 + insert_space + pynutil.insert("اَلاف") + pynutil.delete("000", weight=0.001)
thousands_plus = (
digit_1000 + insert_space + pynutil.insert("اَلاف") + insert_space + insert_and + graph_all_hundreds
)
thousands_skip_hundreds = (
digit_1000
+ insert_space
+ pynutil.insert("اَلاف")
+ pynutil.delete("0")
+ insert_space
+ insert_and
+ graph_two_digits
| digit_1000
+ insert_space
+ pynutil.insert("اَلاف")
+ pynutil.delete("00")
+ insert_space
+ insert_and
+ graph_digit
)
two_thousands = pynini.cross("2", "ألفان")
graph_one_thousand = one_thousand + pynutil.delete("000", weight=0.001)
graph_one_thousand_plus = (
one_thousand + insert_space + insert_and + graph_all_hundreds
| one_thousand + pynutil.delete("0") + insert_space + insert_and + graph_two_digits
| one_thousand + pynutil.delete("00") + insert_space + insert_and + graph_digit
)
graph_two_thousands = two_thousands + pynutil.delete("000", weight=0.001)
graph_two_thousands_plus = (
two_thousands + insert_space + insert_and + graph_all_hundreds
| two_thousands + pynutil.delete("0") + insert_space + insert_and + graph_two_digits
| two_thousands + pynutil.delete("00") + insert_space + insert_and + graph_digit
)
graph_all_one_thousand = graph_one_thousand | graph_one_thousand_plus
graph_all_two_thousands = graph_two_thousands | graph_two_thousands_plus
graph_all_thousands = (
graph_all_one_thousand
| graph_two_thousands
| thousands_zero
| thousands_plus
| thousands_skip_hundreds
| graph_all_two_thousands
)
graph = graph_all | graph_all_hundreds | graph_all_thousands | graph_zero
self.cardinal_numbers = (graph).optimize()
# remove leading zeros
leading_zeros = pynini.closure(pynini.cross("0", ""))
self.cardinal_numbers_with_leading_zeros = (leading_zeros + self.cardinal_numbers).optimize()
self.optional_minus_graph = pynini.closure(pynutil.insert("negative: ") + pynini.cross("-", '"true" '), 0, 1)
final_graph = (
self.optional_minus_graph
+ pynutil.insert('integer: "')
+ self.cardinal_numbers_with_leading_zeros
+ pynutil.insert('"')
)
final_graph = self.add_tokens(final_graph) # inserts the cardinal tag
self.fst = final_graph
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/taggers/cardinal.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_NOT_SPACE, GraphFst
from pynini.lib import pynutil
class WordFst(GraphFst):
"""
Finite state transducer for classifying word.
e.g. sleep -> tokens { name: "sleep" }
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="word", kind="classify")
word = pynutil.insert("name: \"") + pynini.closure(NEMO_NOT_SPACE, 1) + pynutil.insert("\"")
self.fst = word.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/taggers/word.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.ar.graph_utils import (
NEMO_NOT_QUOTE,
GraphFst,
delete_extra_space,
delete_preserve_order,
)
from pynini.lib import pynutil
class MeasureFst(GraphFst):
"""
Finite state transducer for verbalizing measure, e.g.
measure { cardinal { integer: "20" } units: "%" } -> "عشرون في المائة"
Args:
decimal: decimal GraphFst
cardinal: cardinal GraphFst
fraction: fraction GraphFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, decimal: GraphFst, cardinal: GraphFst, fraction: GraphFst, deterministic: bool):
super().__init__(name="measure", kind="verbalize", deterministic=deterministic)
unit = pynutil.delete("units: \"") + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\"")
graph_decimal = decimal.fst
graph_cardinal = cardinal.fst
graph_fraction = fraction.fst
graph = (graph_cardinal | graph_decimal | graph_fraction) + pynini.accep(" ") + unit
graph |= (graph_cardinal | graph_decimal | graph_fraction) + delete_extra_space + unit
# graph |= unit + delete_extra_space + (graph_cardinal | graph_decimal)
graph += delete_preserve_order
delete_tokens = self.delete_tokens(graph)
self.fst = delete_tokens.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/verbalizers/measure.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.ar.graph_utils import NEMO_NOT_QUOTE, NEMO_SIGMA, GraphFst, insert_space
from pynini.lib import pynutil
class FractionFst(GraphFst):
"""
Finite state transducer for verbalizing fraction
e.g. tokens { 'fraction { integer_part: "مئة وخمسة" numerator: "ثلاثون" denominator: "سبعة وستون" }' } ->
مئة وخمسة و ثلاثون على سبعة وستون
"""
def __init__(self):
super().__init__(name="fraction", kind="verbalize")
optional_sign = pynini.closure(pynini.cross("negative: \"true\"", "سالب ") + pynutil.delete(" "), 0, 1)
# create unions for special cases
denominator_singular = pynini.union("نصف", "ثلث", "ربع", "خمس", "سدس", "سبع", "ثمن", "تسع", "عشر")
denominator_dual = pynini.union(
"نصفين", "ثلثين", "ربعين", "خمسين", "سدسين", "سبعين", "ثمنين", "تسعين", "عشرين"
)
denominator_plural = pynini.union("أخماس", "أرباع", "أثلاث", "أسداس", "أسباع", "أثمان", "أتساع", "أعشار")
numerator_three_to_ten = pynini.union("خمسة", "سبعة", "عشرة", "ثلاثة", "أربعة", "ستة", "ثمانية", "تسعة")
# filter cases when denominator_singular
graph_denominator_singular = (
pynutil.delete("denominator: \"")
+ denominator_singular @ pynini.closure(NEMO_NOT_QUOTE)
+ pynutil.delete("\"")
)
# filter cases when denominator_dual
graph_denominator_dual = (
pynutil.delete("denominator: \"")
+ denominator_dual @ pynini.closure(NEMO_NOT_QUOTE)
+ pynutil.delete("\"")
)
# filter cases when denominator_plural
graph_denominator_plural = (
pynutil.delete("denominator: \"")
+ denominator_plural @ pynini.closure(NEMO_NOT_QUOTE)
+ pynutil.delete("\"")
)
# integer part
integer = pynutil.delete("integer_part: \"") + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\" ")
# verbalize from integer and one over half --> integer and half e.g واحد ونصف
numerator_one = pynutil.delete("numerator: \"واحد\"") + pynutil.delete(" ") + graph_denominator_singular
# verbalize from integer and two over half(dual) --> integer and half(dual) e.g. واحد وثلثين
numerator_two = pynutil.delete("numerator: \"اثنان\"") + pynutil.delete(" ") + graph_denominator_dual
# verbalize from integer and three over thirds(plural) --> integer and three thirds(plural) e.g. واحد وثلاثة أرباع
numerator_three_to_ten = (
pynutil.delete("numerator: \"")
+ numerator_three_to_ten @ pynini.closure(NEMO_NOT_QUOTE)
+ insert_space
+ pynutil.delete("\"")
+ pynutil.delete(" ")
+ graph_denominator_plural
)
# rest of numbers
denominator_rest = pynutil.delete("denominator: \"") + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\"")
numerator_rest = pynutil.delete("numerator: \"") + (pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\" "))
numerators = numerator_rest + insert_space + pynutil.insert("على ") + denominator_rest
fraction = (
numerator_one | numerator_three_to_ten | numerator_two | pynutil.add_weight(numerators, 0.001)
) # apply exceptions first then the rest
conjunction = pynutil.insert("و ")
integer = pynini.closure(integer + insert_space + conjunction, 0, 1)
self.graph = optional_sign + integer + fraction
delete_tokens = self.delete_tokens(self.graph)
self.fst = delete_tokens.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/verbalizers/fraction.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nemo_text_processing.text_normalization.ar.graph_utils import GraphFst
from nemo_text_processing.text_normalization.ar.taggers.cardinal import CardinalFst as CardinalTagger
from nemo_text_processing.text_normalization.ar.verbalizers.cardinal import CardinalFst
from nemo_text_processing.text_normalization.ar.verbalizers.decimal import DecimalFst
from nemo_text_processing.text_normalization.ar.verbalizers.fraction import FractionFst
from nemo_text_processing.text_normalization.ar.verbalizers.measure import MeasureFst
from nemo_text_processing.text_normalization.ar.verbalizers.money import MoneyFst
class VerbalizeFst(GraphFst):
"""
Composes other verbalizer grammars.
For deployment, this grammar will be compiled and exported to OpenFst Finite State Archive (FAR) File.
More details to deployment at NeMo/tools/text_processing_deployment.
Args:
deterministic: if True will provide a single transduction option,
for False multiple options (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="verbalize", kind="verbalize", deterministic=deterministic)
cardinal_tagger = CardinalTagger()
cardinal = CardinalFst()
cardinal_graph = cardinal.fst
decimal = DecimalFst()
decimal_graph = decimal.fst
fraction = FractionFst()
fraction_graph = fraction.fst
money = MoneyFst()
money_graph = money.fst
measure = MeasureFst(decimal=decimal, cardinal=cardinal, fraction=fraction, deterministic=deterministic)
measure_graph = measure.fst
graph = cardinal_graph | decimal_graph | fraction_graph | money_graph | measure_graph
self.fst = graph
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/verbalizers/verbalize.py |
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import pynini
from nemo_text_processing.text_normalization.ar.graph_utils import (
GraphFst,
delete_extra_space,
delete_space,
generator_main,
)
from nemo_text_processing.text_normalization.ar.verbalizers.verbalize import VerbalizeFst
from nemo_text_processing.text_normalization.ar.verbalizers.word import WordFst
from pynini.lib import pynutil
class VerbalizeFinalFst(GraphFst):
"""
Finite state transducer that verbalizes an entire sentence
Args:
deterministic: if True will provide a single transduction option,
for False multiple options (used for audio-based normalization)
cache_dir: path to a dir with .far grammar file. Set to None to avoid using cache.
overwrite_cache: set to True to overwrite .far files
"""
def __init__(self, deterministic: bool = True, cache_dir: str = None, overwrite_cache: bool = False):
super().__init__(name="verbalize_final", kind="verbalize", deterministic=deterministic)
far_file = None
if cache_dir is not None and cache_dir != "None":
os.makedirs(cache_dir, exist_ok=True)
far_file = os.path.join(cache_dir, f"ar_tn_{deterministic}_deterministic_verbalizer.far")
if not overwrite_cache and far_file and os.path.exists(far_file):
self.fst = pynini.Far(far_file, mode="r")["verbalize"]
logging.info(f'VerbalizeFinalFst graph was restored from {far_file}.')
else:
verbalize = VerbalizeFst(deterministic=deterministic).fst
word = WordFst(deterministic=deterministic).fst
types = verbalize | word
graph = (
pynutil.delete("tokens")
+ delete_space
+ pynutil.delete("{")
+ delete_space
+ types
+ delete_space
+ pynutil.delete("}")
)
graph = delete_space + pynini.closure(graph + delete_extra_space) + graph + delete_space
self.fst = graph.optimize()
if far_file:
generator_main(far_file, {"verbalize": self.fst})
logging.info(f"VerbalizeFinalFst grammars are saved to {far_file}.")
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/verbalizers/verbalize_final.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import pynini
from nemo_text_processing.inverse_text_normalization.en.taggers.tokenize_and_classify import ClassifyFst
from nemo_text_processing.inverse_text_normalization.en.verbalizers.verbalize import VerbalizeFst
from nemo_text_processing.inverse_text_normalization.en.verbalizers.verbalize_final import VerbalizeFinalFst
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/verbalizers/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.ar.graph_utils import NEMO_NOT_QUOTE, GraphFst, delete_space
from pynini.lib import pynutil
class DecimalFst(GraphFst):
"""
Finite state transducer for verbalizing decimal, e.g.
tokens { decimal { integer_part: "одно целая" fractional_part: "восемь сотых} } ->
"одно целая восемь сотых"
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="decimal", kind="verbalize", deterministic=deterministic)
optional_sign = pynini.closure(pynini.cross("negative: \"true\" ", "سالب"), 0, 1)
integer = pynutil.delete(" \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
integer_part = pynutil.delete("integer_part:") + integer
fractional_part = pynutil.delete("fractional_part:") + integer
optional_quantity_part = pynini.closure(
pynini.accep(" ")
+ pynutil.delete("quantity: \"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynutil.delete("\""),
0,
1,
)
self.graph = (
optional_sign + integer_part + pynini.accep(" ") + fractional_part + optional_quantity_part + delete_space
)
delete_tokens = self.delete_tokens(self.graph)
self.fst = delete_tokens.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/verbalizers/decimal.py |
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.ar.graph_utils import (
NEMO_NOT_QUOTE,
GraphFst,
delete_preserve_order,
delete_space,
)
from pynini.lib import pynutil
class MoneyFst(GraphFst):
"""
Finite state transducer for verbalizing money, e.g.
money { integer_part: "تسعة" currency_maj: "يورو" preserve_order: true} -> "تسعة يورو"
money { integer_part: "تسعة" currency_maj: "دولار" preserve_order: true} -> "تسعة دولار"
money { integer_part: "خمسة" currency_maj: "دينار كويتي"} -> "خمسة دينار كويتي"
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="money", kind="verbalize", deterministic=deterministic)
keep_space = pynini.accep(" ")
maj = pynutil.delete("currency_maj: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
min = pynutil.delete("currency_min: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
fractional_part = (
pynutil.delete("fractional_part: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
)
integer_part = pynutil.delete("integer_part: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
add_and = pynutil.insert(" و")
# *** currency_maj
graph_integer = maj + keep_space + integer_part
# *** currency_maj + (***) (و) *** current_min
graph_integer_with_minor = (
integer_part
+ keep_space
+ maj
+ delete_space
+ add_and
+ fractional_part
+ keep_space
+ delete_space
+ min
+ delete_preserve_order
)
# this graph fix word order from dollar three (دولار تسعة)--> three dollar (تسعة دولار)
graph_integer_no_minor = integer_part + keep_space + maj + delete_space + delete_preserve_order
# *** current_min
graph_minor = fractional_part + keep_space + delete_space + min + delete_preserve_order
graph = graph_integer | graph_integer_with_minor | graph_minor | graph_integer_no_minor
delete_tokens = self.delete_tokens(graph)
self.fst = delete_tokens.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/verbalizers/money.py |
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_NOT_QUOTE, GraphFst
from pynini.lib import pynutil
class CardinalFst(GraphFst):
"""
Finite state transducer for verbalizing cardinals
e.g. cardinal { integer: "ستون" } -> "ستون"
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="cardinal", kind="verbalize", deterministic=deterministic)
optional_sign = pynini.closure(pynini.cross("negative: \"true\" ", "minus "), 0, 1)
self.optional_sign = optional_sign
integer = pynini.closure(NEMO_NOT_QUOTE, 1)
self.integer = pynutil.delete(" \"") + integer + pynutil.delete("\"")
integer = pynutil.delete("integer:") + self.integer
self.numbers = integer
graph = optional_sign + self.numbers
delete_tokens = self.delete_tokens(graph)
self.fst = delete_tokens.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/verbalizers/cardinal.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.ar.graph_utils import NEMO_CHAR, NEMO_SIGMA, GraphFst, delete_space
from pynini.lib import pynutil
class WordFst(GraphFst):
"""
Finite state transducer for verbalizing word
e.g. tokens { name: "sleep" } -> sleep
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="word", kind="verbalize", deterministic=deterministic)
chars = pynini.closure(NEMO_CHAR - " ", 1)
char = pynutil.delete("name:") + delete_space + pynutil.delete("\"") + chars + pynutil.delete("\"")
graph = char @ pynini.cdrewrite(pynini.cross(u"\u00A0", " "), "", "", NEMO_SIGMA)
self.fst = graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/verbalizers/word.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/data/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/data/number/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/data/money/__init__.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/ar/data/measure/__init__.py |
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
# Copyright 2015 and onwards Google, Inc.
# Copyright (c) 2023, Jim O'Regan
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import delete_space, insert_space
from pynini.lib import byte
_ALPHA_UPPER = "AÁBCDEÉFGHIÍJKLMNOÓÖŐPQRSTUÚÜŰVWXYZ"
_ALPHA_LOWER = "aábcdeéfghiíjklmnoóöőpqrstuúüűvwxyz"
_VOWELS = "AÁEÉIÍOÓÖŐUÚÜŰaáeéiíoóöőuúüű"
TO_LOWER = pynini.union(*[pynini.cross(x, y) for x, y in zip(_ALPHA_UPPER, _ALPHA_LOWER)])
TO_UPPER = pynini.invert(TO_LOWER)
HU_LOWER = pynini.union(*_ALPHA_LOWER).optimize()
HU_UPPER = pynini.union(*_ALPHA_UPPER).optimize()
HU_ALPHA = pynini.union(HU_LOWER, HU_UPPER).optimize()
HU_ALNUM = pynini.union(byte.DIGIT, HU_ALPHA).optimize()
HU_VOWELS = pynini.union(*[x for x in _VOWELS])
ensure_space = pynini.closure(delete_space, 0, 1) + insert_space
bos_or_space = pynini.union("[BOS]", " ")
eos_or_space = pynini.union("[EOS]", " ")
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/graph_utils.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# Copyright (c) 2023, Jim O'Regan.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import csv
import os
def get_abs_path(rel_path):
"""
Get absolute path
Args:
rel_path: relative path to this file
Returns absolute path
"""
return os.path.dirname(os.path.abspath(__file__)) + '/' + rel_path
def load_labels(abs_path):
"""
loads relative path file as dictionary
Args:
abs_path: absolute path
Returns dictionary of mappings
"""
with open(abs_path) as label_tsv:
labels = list(csv.reader(label_tsv, delimiter="\t"))
return labels
def load_inflection(abs_path):
"""
loads inflection information
Args:
abs_path: absolute path
Returns dictionary of mappings of word endings to
lists of case endings.
"""
with open(abs_path) as inflection_tsv:
items = list(csv.reader(inflection_tsv, delimiter="\t"))
inflections = {k[0]: k[1].split(" ") for k in items}
return inflections
def _modify_ending(outword: str, word: str, form: str) -> str:
"""
Helper for the inflector. Modifies endings where there is a difference
between how they are written for abbreviations, and for full words.
Args:
outword: the form of the word to be output
word: the base form of the word
form: the ending to be appended
"""
if outword == word:
return form
endings = ["ny", "nny", "ly", "lly", "ev", "év", "út", "ut", "a", "á", "e", "é"]
undouble = {
"nny": "ny",
"lly": "ly",
}
for ending in endings:
if form.startswith(ending):
final = ""
if ending in undouble:
final = undouble[ending]
return final + form[len(ending) :]
return form
def inflect_abbreviation(abbr: str, word: str, singular_only=False):
"""
For currency symbols, the inflection can either be taken from
the underlying final word, or from the letter itself.
This (ab)uses naive_inflector to get the letter-based
inflection.
Args:
abbr: the abbreviated base form
word: the base (nominative singular) form of the expansion
of abbr
singular_only: whether or not to add plural forms
Returns a list of tuples containing the inflected abbreviation and
its expansion.
"""
abbr_orig = abbr
abbr = abbr.lower()
if abbr[-1] in "bcdgjptvz":
ending = "é"
elif abbr[-1] in "aáeéiíoóöőuúüű":
ending = abbr[-1]
elif abbr[-1] in "flmnrs":
ending = "e" + abbr[-1]
elif abbr[-1] in "hk":
ending = "á"
else:
return []
word_part = naive_inflector(".", word, singular_only)
abbr_part = naive_inflector(abbr_orig, ending, singular_only)
word_useful = [x[1] for x in word_part]
abbr_useful = [x[0] for x in abbr_part]
return zip(abbr_useful, word_useful)
def naive_inflector(abbr: str, word: str, singular_only=False):
"""
Performs naïve inflection of a pair of words: the abbreviation,
and its expansion. Possessive forms are omitted, due to the
nature of the kinds of words/abbreviations being expanded
Args:
abbr: the abbreviated base form
word: the base (nominative singular) form of the expansion
of abbr
singular_only: whether or not to add plural forms
Returns a list of tuples containing the inflected abbreviation and
its expansion.
"""
singular = load_inflection(get_abs_path("data/inflection/endings.tsv"))
plural = load_inflection(get_abs_path("data/inflection/plural_endings.tsv"))
lexical = load_inflection(get_abs_path("data/inflection/word_endings.tsv"))
keys_sorted = sorted(singular, key=len, reverse=True)
def get_kv():
if word in lexical:
return (word, lexical[word])
for key in keys_sorted:
if word.endswith(key):
return (key, singular[key])
raise KeyError(f"Key {key} not found ({word})")
forms = []
key, ends = get_kv()
outword = word
for wordend in ["ny", "ly", "év", "út", "a", "e"]:
if outword.endswith(wordend):
outword = outword[: -len(wordend)]
def tweak(form: str) -> str:
return _modify_ending(outword, word, form)
if "-" in abbr:
abbr = abbr.split("-")[0]
for form in ends:
forms.append((f"{abbr}-{tweak(form)}", f"{outword}{form}"))
if not singular_only:
for plural_form in plural[key]:
plural_key = plural_form
if plural_form == "k":
plural_key = key + "k"
forms.append((f"{abbr}-{tweak(plural_form)}", f"{outword}{plural_form}"))
for form in singular[plural_key]:
forms.append((f"{abbr}-{tweak(plural_form)}{form}", f"{outword}{plural_form}{form}"))
return forms
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/utils.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# Copyright (c) 2023, Jim O'Regan.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_DIGIT,
NEMO_SPACE,
GraphFst,
convert_space,
insert_space,
)
from nemo_text_processing.text_normalization.hu.utils import (
get_abs_path,
inflect_abbreviation,
load_labels,
naive_inflector,
)
from pynini.lib import pynutil
QUARTERS = {15: "negyed", 30: "fél", 45: "háromnegyed"}
def get_all_to_or_from_numbers():
output = {}
for num, word in QUARTERS.items():
current_past = []
current_to = []
for i in range(1, 60):
if i == num:
continue
elif i < num:
current_to.append((str(i), str(num - i)))
else:
current_past.append((str(i), str(i - num)))
output[word] = {}
output[word]["past"] = current_past
output[word]["to"] = current_to
return output
def get_all_to_or_from_fst(cardinal: GraphFst):
numbers = get_all_to_or_from_numbers()
output = {}
for key in numbers:
for when in ["past", "to"]:
output[key] = {}
map = pynini.string_map(numbers[key][when])
output[key][when] = pynini.project(map, "input") @ map @ cardinal.graph
return output
class TimeFst(GraphFst):
"""
Finite state transducer for classifying time, e.g.
"Délelőtt 9 óra est" -> time { hours: "2" minutes: "15" zone: "e s t"}
"9 óra" -> time { hours: "2" }
"09:00 óra" -> time { hours: "2" }
"02:15:10 óra" -> time { hours: "2" minutes: "15" seconds: "10"}
"negyed 2" -> time { minutes: "15" hours: "1" }
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal: GraphFst, deterministic: bool = True):
super().__init__(name="time", kind="classify", deterministic=deterministic)
ora_word = pynini.cross("ó", "óra") | pynini.accep("óra")
ora_forms = pynini.string_map(naive_inflector("ó", "óra", True) + [("ó", "óra")])
ora_forms_both = ora_forms | pynini.project(ora_forms, "output")
perc_word = pynini.cross("p", "perc") | pynini.accep("perc")
perc_forms = pynini.string_map(naive_inflector("p", "perc", True) + [("p", "perc")])
perc_forms_both = perc_forms | pynini.project(perc_forms, "output")
# masodperc_word = pynini.cross("mp", "másodperc") | pynini.accep("másodperc")
masodperc_forms = pynini.string_map(naive_inflector("mp", "másodperc", True) + [("mp", "másodperc")])
masodperc_forms_both = masodperc_forms | pynini.project(masodperc_forms, "output")
final_forms = ora_forms_both | perc_forms_both | masodperc_forms_both
final_suffix = pynutil.insert("suffix: \"") + final_forms + pynutil.insert("\"")
final_suffix_optional = pynini.closure(
pynutil.insert(" suffix: \"") + final_forms + pynutil.insert("\""), 0, 1
)
ora_suffix = pynutil.insert("suffix: \"") + ora_forms_both + pynutil.insert("\"")
perc_suffix = pynutil.insert("suffix: \"") + (ora_forms_both | perc_forms_both) + pynutil.insert("\"")
time_zone_graph = pynini.string_file(get_abs_path("data/time/time_zone.tsv"))
time_zone_entries = load_labels(get_abs_path("data/time/time_zone.tsv"))
for entry in time_zone_entries:
# Inflect 'a' because there's nothing else meaningful to use
inflected = inflect_abbreviation(entry[1], "a", True)
mapping = [(x[0].replace(" ", ""), x[0]) for x in inflected]
time_zone_graph |= pynini.string_map(mapping)
labels_hour = [str(x) for x in range(0, 25)]
labels_minute_single = [str(x) for x in range(1, 10)]
labels_minute_double = [str(x) for x in range(10, 60)]
minutes_to = pynini.string_map([(str(i), str(60 - i)) for i in range(1, 60)])
minutes_inverse = pynini.invert(pynini.project(minutes_to, "input") @ cardinal.graph)
self.minute_words_to_words = minutes_inverse @ minutes_to @ cardinal.graph
# minute_words_to_words = pynutil.insert("minutes: \"") + self.minute_words_to_words + pynutil.insert("\"")
def hours_to_pairs():
for x in range(1, 13):
if x == 12:
y = 1
else:
y = x + 1
yield y, x
hours_next = pynini.string_map([(str(x[0]), str(x[1])) for x in hours_to_pairs()])
hours_next_inverse = pynini.invert(pynini.project(hours_next, "input") @ cardinal.graph)
self.hour_numbers_to_words = hours_next @ cardinal.graph
self.hour_words_to_words = hours_next_inverse @ self.hour_numbers_to_words
hour_numbers_to_words = pynutil.insert("hours: \"") + self.hour_numbers_to_words + pynutil.insert("\"")
hour_words_to_words = pynutil.insert("hours: \"") + self.hour_words_to_words + pynutil.insert("\"")
quarter_map = pynini.string_map([(p[1], str(p[0])) for p in QUARTERS.items()])
quarter_map_graph = pynutil.insert("minutes: \"") + (quarter_map @ cardinal.graph) + pynutil.insert("\"")
# quarter_words = pynini.string_map(QUARTERS.values())
# quarter_words_graph = pynutil.insert("minutes: \"") + quarter_words + pynutil.insert("\"")
# {quarter} {hour_next}
# negyed 2 -> minutes: "tizenöt" hours: "egy"
self.quarter_prefixed_next_to_current = quarter_map_graph + NEMO_SPACE + hour_numbers_to_words
# For ITN
self.quarter_prefixed_next_to_current_words = quarter_map_graph + NEMO_SPACE + hour_words_to_words
delete_leading_zero_to_double_digit = (pynutil.delete("0") | (NEMO_DIGIT - "0")) + NEMO_DIGIT
optional_delete_leading_zero_to_double_digit = (
pynini.closure(pynutil.delete("0"), 0, 1) | (NEMO_DIGIT - "0")
) + NEMO_DIGIT
graph_hour = pynini.union(*labels_hour) @ cardinal.graph
graph_minute_single = pynini.union(*labels_minute_single)
graph_minute_double = pynini.union(*labels_minute_double)
# final_graph_hour_only = pynutil.insert("hours: \"") + graph_hour + pynutil.insert("\"")
final_graph_hour = (
pynutil.insert("hours: \"") + delete_leading_zero_to_double_digit @ graph_hour + pynutil.insert("\"")
)
final_graph_hour_maybe_zero = (
pynutil.insert("hours: \"")
+ optional_delete_leading_zero_to_double_digit @ graph_hour
+ pynutil.insert("\"")
)
final_graph_minute = (
pynutil.insert("minutes: \"")
+ (pynutil.delete("0") + graph_minute_single | graph_minute_double) @ cardinal.graph
+ pynutil.insert("\"")
)
final_graph_minute_maybe_zero = (
pynutil.insert("minutes: \"")
+ (pynini.closure(pynutil.delete("0"), 0, 1) + graph_minute_single | graph_minute_double) @ cardinal.graph
+ pynutil.insert("\"")
)
final_graph_second = (
pynutil.insert("seconds: \"")
+ (pynutil.delete("0") + graph_minute_single | graph_minute_double) @ cardinal.graph
+ pynutil.insert("\"")
)
final_graph_second_maybe_zero = (
pynutil.insert("seconds: \"")
+ (pynini.closure(pynutil.delete("0"), 0, 1) + graph_minute_single | graph_minute_double) @ cardinal.graph
+ pynutil.insert("\"")
)
final_time_zone = (
pynini.accep(" ") + pynutil.insert("zone: \"") + convert_space(time_zone_graph) + pynutil.insert("\"")
)
final_time_zone_optional = pynini.closure(final_time_zone, 0, 1,)
# This might be better as just the inflected forms
hour_only_delimited = (
pynutil.insert("hours: \"")
+ optional_delete_leading_zero_to_double_digit @ graph_hour
+ pynutil.insert("\"")
+ NEMO_SPACE
+ ora_suffix
+ pynutil.insert(" preserve_order: true")
)
hour_only_delimited |= (
pynutil.insert("hours: \"")
+ optional_delete_leading_zero_to_double_digit @ graph_hour
+ pynutil.insert("\"")
+ NEMO_SPACE
+ ora_word
+ final_time_zone
+ pynutil.insert(" preserve_order: true")
)
# 02:30 óra
graph_hm = (
final_graph_hour
+ pynutil.delete(":")
+ (pynutil.delete("00") | (insert_space + final_graph_minute))
+ pynini.closure(ora_forms | perc_forms)
+ final_time_zone_optional
+ pynutil.insert(" preserve_order: true")
)
graph_hm |= (
final_graph_hour_maybe_zero
+ pynini.closure(NEMO_SPACE, 0, 1)
+ pynutil.delete(ora_word)
+ NEMO_SPACE
+ final_graph_minute_maybe_zero
+ pynini.closure(NEMO_SPACE, 0, 1)
+ perc_suffix
+ pynutil.insert(" preserve_order: true")
)
graph_hm |= (
final_graph_hour_maybe_zero
+ pynini.closure(NEMO_SPACE, 0, 1)
+ pynutil.delete(ora_word)
+ NEMO_SPACE
+ final_graph_minute_maybe_zero
+ pynini.closure(NEMO_SPACE, 0, 1)
+ perc_word
+ final_time_zone
+ pynutil.insert(" preserve_order: true")
)
# 10:30:05 Uhr,
graph_hms = (
final_graph_hour
+ pynutil.delete(":")
+ (pynutil.delete("00") | (insert_space + final_graph_minute))
+ pynutil.delete(":")
+ (pynutil.delete("00") | (insert_space + final_graph_second))
+ final_suffix_optional
+ final_time_zone_optional
+ pynutil.insert(" preserve_order: true")
)
graph_hms |= (
final_graph_hour_maybe_zero
+ pynini.closure(NEMO_SPACE, 0, 1)
+ pynutil.delete(ora_word + NEMO_SPACE)
+ final_graph_minute_maybe_zero
+ pynini.closure(NEMO_SPACE, 0, 1)
+ pynutil.delete(perc_word + NEMO_SPACE)
+ final_graph_second_maybe_zero
+ pynini.closure(NEMO_SPACE, 0, 1)
+ final_suffix
+ pynutil.insert(" preserve_order: true")
)
# 2 Uhr est
graph_h = hour_only_delimited
final_graph = (graph_hm | graph_h | graph_hms).optimize()
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/taggers/time.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_ALPHA,
NEMO_NON_BREAKING_SPACE,
GraphFst,
convert_space,
delete_space,
insert_space,
)
from nemo_text_processing.text_normalization.hu.taggers.whitelist import load_inflected
from nemo_text_processing.text_normalization.hu.utils import get_abs_path
from pynini.lib import pynutil
unit_singular = pynini.string_file(get_abs_path("data/measures/measurements.tsv"))
class MeasureFst(GraphFst):
"""
Finite state transducer for classifying measure, e.g.
"2,4 oz" -> measure { cardinal { integer_part: "zwei" fractional_part: "vier" units: "unzen" preserve_order: true } }
"1 oz" -> measure { cardinal { integer: "zwei" units: "unze" preserve_order: true } }
"1 million oz" -> measure { cardinal { integer: "eins" quantity: "million" units: "unze" preserve_order: true } }
This class also converts words containing numbers and letters
e.g. "a-8" —> "a acht"
e.g. "1,2-a" —> "ein komma zwei a"
Args:
cardinal: CardinalFst
decimal: DecimalFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal: GraphFst, decimal: GraphFst, fraction: GraphFst, deterministic: bool = True):
super().__init__(name="measure", kind="classify", deterministic=deterministic)
cardinal_graph = cardinal.graph
graph_unit_singular = convert_space(unit_singular)
graph_unit_singular = graph_unit_singular | load_inflected(
get_abs_path("data/measures/measurements.tsv"), "lower_cased", False
)
optional_graph_negative = pynini.closure(pynini.cross("-", 'negative: "true" '), 0, 1)
graph_unit_denominator = (
pynini.cross("/", "per") + pynutil.insert(NEMO_NON_BREAKING_SPACE) + graph_unit_singular
)
optional_unit_denominator = pynini.closure(
pynutil.insert(NEMO_NON_BREAKING_SPACE) + graph_unit_denominator, 0, 1,
)
unit_singular_graph = (
pynutil.insert("units: \"")
+ ((graph_unit_singular + optional_unit_denominator) | graph_unit_denominator)
+ pynutil.insert("\"")
)
subgraph_decimal = decimal.fst + insert_space + pynini.closure(pynutil.delete(" "), 0, 1) + unit_singular_graph
subgraph_cardinal = (
pynutil.insert("cardinal { ")
+ optional_graph_negative
+ pynutil.insert("integer: \"")
+ cardinal.graph
+ delete_space
+ pynutil.insert("\"")
+ pynutil.insert(" } ")
+ unit_singular_graph
)
subgraph_fraction = (
fraction.fst + insert_space + pynini.closure(pynutil.delete(" "), 0, 1) + unit_singular_graph
)
cardinal_dash_alpha = (
pynutil.insert("cardinal { integer: \"")
+ cardinal_graph
+ pynutil.delete('-')
+ pynutil.insert("\" } units: \"")
+ pynini.closure(NEMO_ALPHA, 1)
+ pynutil.insert("\"")
)
alpha_dash_cardinal = (
pynutil.insert("units: \"")
+ pynini.closure(NEMO_ALPHA, 1)
+ pynutil.delete('-')
+ pynutil.insert("\"")
+ pynutil.insert(" cardinal { integer: \"")
+ cardinal_graph
+ pynutil.insert("\" }")
)
decimal_dash_alpha = (
pynutil.insert("decimal { ")
+ decimal.final_graph_wo_negative
+ pynutil.delete('-')
+ pynutil.insert(" } units: \"")
+ pynini.closure(NEMO_ALPHA, 1)
+ pynutil.insert("\"")
)
decimal_times = (
pynutil.insert("decimal { ")
+ decimal.final_graph_wo_negative
+ pynutil.insert(" } units: \"")
+ pynini.union('x', 'X')
+ pynutil.insert("\"")
)
cardinal_times = (
pynutil.insert("cardinal { integer: \"")
+ cardinal_graph
+ pynutil.insert("\" } units: \"")
+ pynini.union('x', 'X')
+ pynutil.insert("\"")
)
alpha_dash_decimal = (
pynutil.insert("units: \"")
+ pynini.closure(NEMO_ALPHA, 1)
+ pynutil.delete('-')
+ pynutil.insert("\"")
+ pynutil.insert(" decimal { ")
+ decimal.final_graph_wo_negative
+ pynutil.insert(" }")
)
final_graph = (
subgraph_decimal
| subgraph_cardinal
| cardinal_dash_alpha
| alpha_dash_cardinal
| decimal_dash_alpha
| decimal_times
| alpha_dash_decimal
| subgraph_fraction
| cardinal_times
)
final_graph += pynutil.insert(" preserve_order: true")
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/taggers/measure.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import GraphFst
from nemo_text_processing.text_normalization.hu.utils import get_abs_path
from pynini.lib import pynutil
fraction_symbols = pynini.string_file(get_abs_path("data/fractions/fraction_symbols.tsv"))
class FractionFst(GraphFst):
"""
Finite state transducer for classifying fraction
"23 4/6" ->
fraction { integer: "huszonhárom" numerator: "négy" denominator: "hatod" preserve_order: true }
Args:
cardinal: cardinal GraphFst
ordinal: ordinal GraphFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal, ordinal, deterministic: bool = True):
super().__init__(name="fraction", kind="classify", deterministic=deterministic)
cardinal_graph = cardinal.graph
self.optional_graph_negative = pynini.closure(
pynutil.insert("negative: ") + pynini.cross("-", "\"true\" "), 0, 1
)
self.integer = pynutil.insert("integer_part: \"") + cardinal_graph + pynutil.insert("\"")
self.numerator = (
pynutil.insert("numerator: \"") + cardinal_graph + pynini.cross(pynini.union("/", " / "), "\" ")
)
self.denominator = pynutil.insert("denominator: \"") + ordinal.fractional + pynutil.insert("\"")
fraction = (fraction_symbols @ (self.numerator + self.denominator)) | (self.numerator + self.denominator)
self.graph = self.optional_graph_negative + pynini.closure(self.integer + pynini.accep(" "), 0, 1) + fraction
graph = self.graph + pynutil.insert(" preserve_order: true")
final_graph = self.add_tokens(graph)
self.fst = final_graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/taggers/fraction.py |
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
# Copyright (c) 2023, Jim O'Regan for Språkbanken Tal
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_SPACE,
GraphFst,
delete_extra_space,
insert_space,
)
from nemo_text_processing.text_normalization.hu.taggers.cardinal import CardinalFst
from nemo_text_processing.text_normalization.hu.utils import get_abs_path
from pynini.lib import pynutil
class TelephoneFst(GraphFst):
"""
tel: + (36) 1 441-4000
Finite state transducer for classifying telephone numbers, e.g.
+ (36) 1 441-4000 -> { number_part: "plusz harminchat egy négyszáznegyvenegy négyezer" }.
Hungarian numbers are written in the following formats:
06 1 XXX XXXX
06 AA XXX-XXX
06 AA XXX-XXXX (mobile phones)
See:
https://en.wikipedia.org/wiki/Telephone_numbers_in_Hungary
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="telephone", kind="classify", deterministic=deterministic)
cardinal = CardinalFst(deterministic)
area_codes = pynini.string_file(get_abs_path("data/telephone/area_codes.tsv")) @ cardinal.graph
self.area_codes = area_codes
country_codes = pynini.string_file(get_abs_path("data/telephone/country_codes.tsv")) @ cardinal.graph
self.country_codes = country_codes.optimize()
digit = cardinal.digit
two_digits = cardinal.two_digits_read
three_digits = cardinal.three_digits_read
four_digits = cardinal.four_digits_read
up_to_three_digits = digit | two_digits | three_digits
up_to_four_digits = up_to_three_digits | four_digits
separators = pynini.union(NEMO_SPACE, pynini.cross("-", " "))
area_separators = pynini.union(separators, pynini.cross("/", " "))
zero = pynini.cross("0", "nulla")
digit |= zero
special_numbers = pynini.string_file(get_abs_path("data/telephone/special_numbers.tsv"))
special_numbers @= cardinal.three_digits_read
passable = pynini.union(":", ": ", " ")
prompt_pass = pynini.closure(pynutil.delete(passable) + insert_space, 0, 1)
telephone_abbr = pynini.string_file(get_abs_path("data/telephone/telephone_abbr.tsv"))
telephone_abbr = telephone_abbr + prompt_pass
telephone_prompt = pynini.string_file(get_abs_path("data/telephone/telephone_prompt.tsv"))
prompt_as_code = pynutil.insert("country_code: \"") + telephone_prompt + pynutil.insert("\"")
prompt_as_code |= pynutil.insert("country_code: \"") + telephone_abbr + pynutil.insert("\"")
prompt_as_code |= (
pynutil.insert("country_code: \"") + telephone_prompt + NEMO_SPACE + telephone_abbr + pynutil.insert("\"")
)
prompt_inner = telephone_prompt | telephone_abbr
prompt_inner |= telephone_prompt + NEMO_SPACE + telephone_abbr
plus = pynini.cross("+", "plusz ")
plus |= pynini.cross("00", "nulla nulla ")
plus = plus + pynini.closure(pynutil.delete(" "), 0, 1)
country = pynini.closure(pynutil.delete("("), 0, 1) + country_codes + pynini.closure(pynutil.delete(")"), 0, 1)
country = plus + pynini.closure(pynutil.delete(" "), 0, 1) + country
country_code = pynutil.insert("country_code: \"") + country + pynutil.insert("\"")
country_code |= prompt_as_code
country_code |= pynutil.insert("country_code: \"") + prompt_inner + NEMO_SPACE + country + pynutil.insert("\"")
trunk = pynini.cross("06", "nulla hat")
trunk |= pynutil.delete("(") + trunk + pynutil.delete(")")
area_part = area_codes + area_separators
base_number_part = pynini.union(
three_digits + separators + three_digits,
three_digits + separators + two_digits + separators + two_digits,
three_digits + separators + four_digits,
two_digits + separators + four_digits,
four_digits + separators + three_digits,
two_digits + separators + two_digits + separators + three_digits,
)
number_part = area_part + base_number_part
self.number_graph = number_part
number_part = pynutil.insert("number_part: \"") + self.number_graph + pynutil.insert("\"")
trunk_number_part = (
pynutil.insert("number_part: \"") + trunk + separators + self.number_graph + pynutil.insert("\"")
)
mellek = NEMO_SPACE + pynutil.delete("mellék")
extension = pynutil.insert("extension: \"") + up_to_four_digits + pynutil.insert("\"")
extension = pynini.closure(area_separators + extension + mellek, 0, 1)
special_numbers = pynutil.insert("number_part: \"") + special_numbers + pynutil.insert("\"")
graph = pynini.union(
country_code + separators + number_part,
country_code + separators + number_part + extension,
number_part + extension,
trunk_number_part,
trunk_number_part + extension,
country_code + number_part,
country_code + trunk_number_part,
country_code + trunk_number_part + extension,
country_code + special_numbers,
country_code + number_part + extension,
)
self.tel_graph = graph.optimize()
# ip
ip_prompts = pynini.string_file(get_abs_path("data/telephone/ip_prompt.tsv"))
ip_graph = up_to_three_digits + (pynini.cross(".", " pont ") + up_to_three_digits) ** 3
graph |= (
pynini.closure(
pynutil.insert("country_code: \"") + ip_prompts + pynutil.insert("\"") + delete_extra_space, 0, 1
)
+ pynutil.insert("number_part: \"")
+ ip_graph.optimize()
+ pynutil.insert("\"")
)
final_graph = self.add_tokens(graph)
self.fst = final_graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/taggers/telephone.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Adapted from https://github.com/google/TextNormalizationCoveringGrammars
# Russian minimally supervised number grammar.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_SIGMA, GraphFst
from nemo_text_processing.text_normalization.hu.taggers.cardinal import filter_punctuation
from nemo_text_processing.text_normalization.hu.utils import get_abs_path
from pynini.lib import pynutil
class OrdinalFst(GraphFst):
"""
Finite state transducer for classifying cardinals, e.g.
"2." -> ordinal { integer: "második" } }
Args:
cardinal: cardinal GraphFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal: GraphFst, deterministic=False):
super().__init__(name="ordinal", kind="classify", deterministic=deterministic)
endings = pynini.string_file(get_abs_path("data/ordinals/endings.tsv"))
exceptions = pynini.string_file(get_abs_path("data/ordinals/exceptional.tsv"))
superessive_endings = pynini.string_file(get_abs_path("data/ordinals/superessive_endings.tsv"))
superscript2digit = pynini.string_file(get_abs_path("data/ordinals/superscript_digits.tsv"))
cardinal_graph = cardinal.graph
bare_ordinals = (
cardinal_graph
@ pynini.cdrewrite(exceptions, "[BOS]", "[EOS]", NEMO_SIGMA)
@ pynini.cdrewrite(endings, "", "[EOS]", NEMO_SIGMA)
)
self.bare_ordinals = bare_ordinals
self.filtered_ordinals = filter_punctuation(bare_ordinals).optimize()
self.superessive = self.bare_ordinals @ pynini.cdrewrite(superessive_endings, "", "[EOS]", NEMO_SIGMA)
self.superscript_to_superessive = pynini.closure(superscript2digit) @ self.superessive
self.graph = pynini.union(
self.filtered_ordinals + pynutil.delete("."), self.filtered_ordinals.project("output")
).optimize()
# For some reason, bare_ordinals does not work when exported, so doing this here
fractional_exceptions = pynini.string_map([("első", "egyed"), ("második", "fél")])
self.fractional = (
bare_ordinals
@ pynini.cdrewrite(fractional_exceptions, "[BOS]", "[EOS]", NEMO_SIGMA)
@ pynini.cdrewrite(pynutil.delete("ik"), "", "[EOS]", NEMO_SIGMA)
).optimize()
if not deterministic:
self.fractional |= pynini.cross("2", "ketted")
final_graph = pynutil.insert("integer: \"") + self.graph + pynutil.insert("\"")
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/taggers/ordinal.py |
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# Copyright (c) 2023, Jim O'Regan
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import GraphFst, convert_space
from nemo_text_processing.text_normalization.hu.utils import get_abs_path, load_labels, naive_inflector
from pynini.lib import pynutil
def load_inflected(filename, input_case, singular_only=False, skip_spaces=True):
forms = []
with open(filename) as tsv:
for line in tsv.readlines():
parts = line.strip().split("\t")
key = parts[0]
if input_case == "lower_cased":
key = parts[0].lower()
forms.append((key, parts[1]))
if not (skip_spaces and " " in parts[1]):
forms += naive_inflector(key, parts[1], singular_only)
graph = pynini.string_map(forms)
return graph
class WhiteListFst(GraphFst):
"""
Finite state transducer for classifying whitelist, e.g.
"stb." -> tokens { name: "s a többi" }
This class has highest priority among all classifier grammars. Whitelisted tokens are defined and loaded from "data/whitelist.tsv".
Args:
input_case: accepting either "lower_cased" or "cased" input.
deterministic: if True will provide a single transduction option,
for False multiple options (used for audio-based normalization)
input_file: path to a file with whitelist replacements
"""
def __init__(self, input_case: str, deterministic: bool = True, input_file: str = None):
super().__init__(name="whitelist", kind="classify", deterministic=deterministic)
def _get_whitelist_graph(input_case, file):
whitelist = load_labels(file)
if input_case == "lower_cased":
whitelist = [[x[0].lower()] + x[1:] for x in whitelist]
graph = pynini.string_map(whitelist)
return graph
graph = _get_whitelist_graph(input_case, get_abs_path("data/whitelist.tsv"))
if not deterministic and input_case != "lower_cased":
graph |= pynutil.add_weight(
_get_whitelist_graph("lower_cased", get_abs_path("data/whitelist.tsv")), weight=0.0001
)
graph_inflected = load_inflected(get_abs_path("data/whitelist_inflect.tsv"), input_case, False)
graph_inflected_sg = load_inflected(get_abs_path("data/whitelist_inflect_sg.tsv"), input_case, True)
units_graph = load_inflected(get_abs_path("data/measures/measurements.tsv"), input_case, False)
graph |= graph_inflected
graph |= graph_inflected_sg
if input_file:
whitelist_provided = _get_whitelist_graph(input_case, input_file)
if not deterministic:
graph |= whitelist_provided
else:
graph = whitelist_provided
if not deterministic:
graph |= units_graph
self.graph = graph
self.final_graph = convert_space(self.graph).optimize()
self.fst = (pynutil.insert("name: \"") + self.final_graph + pynutil.insert("\"")).optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/taggers/whitelist.py |
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_WHITE_SPACE,
GraphFst,
delete_extra_space,
delete_space,
generator_main,
)
from nemo_text_processing.text_normalization.en.taggers.punctuation import PunctuationFst
from nemo_text_processing.text_normalization.hu.taggers.cardinal import CardinalFst
from nemo_text_processing.text_normalization.hu.taggers.date import DateFst
from nemo_text_processing.text_normalization.hu.taggers.decimal import DecimalFst
from nemo_text_processing.text_normalization.hu.taggers.electronic import ElectronicFst
from nemo_text_processing.text_normalization.hu.taggers.fraction import FractionFst
from nemo_text_processing.text_normalization.hu.taggers.measure import MeasureFst
from nemo_text_processing.text_normalization.hu.taggers.money import MoneyFst
from nemo_text_processing.text_normalization.hu.taggers.ordinal import OrdinalFst
from nemo_text_processing.text_normalization.hu.taggers.telephone import TelephoneFst
from nemo_text_processing.text_normalization.hu.taggers.time import TimeFst
from nemo_text_processing.text_normalization.hu.taggers.whitelist import WhiteListFst
from nemo_text_processing.text_normalization.hu.taggers.word import WordFst
from pynini.lib import pynutil
class ClassifyFst(GraphFst):
"""
Final class that composes all other classification grammars. This class can process an entire sentence, that is lower cased.
For deployment, this grammar will be compiled and exported to OpenFst Finite State Archive (FAR) File.
More details to deployment at NeMo/tools/text_processing_deployment.
Args:
input_case: accepting either "lower_cased" or "cased" input.
deterministic: if True will provide a single transduction option,
for False multiple options (used for audio-based normalization)
cache_dir: path to a dir with .far grammar file. Set to None to avoid using cache.
overwrite_cache: set to True to overwrite .far files
whitelist: path to a file with whitelist replacements
"""
def __init__(
self,
input_case: str,
deterministic: bool = False,
cache_dir: str = None,
overwrite_cache: bool = False,
whitelist: str = None,
):
super().__init__(name="tokenize_and_classify", kind="classify", deterministic=deterministic)
far_file = None
if cache_dir is not None and cache_dir != "None":
os.makedirs(cache_dir, exist_ok=True)
whitelist_file = os.path.basename(whitelist) if whitelist else ""
far_file = os.path.join(
cache_dir, f"_{input_case}_hu_tn_{deterministic}_deterministic{whitelist_file}.far"
)
if not overwrite_cache and far_file and os.path.exists(far_file):
self.fst = pynini.Far(far_file, mode="r")["tokenize_and_classify"]
logging.info(f"ClassifyFst.fst was restored from {far_file}.")
else:
logging.info(f"Creating ClassifyFst grammars. This might take some time...")
self.cardinal = CardinalFst(deterministic=deterministic)
cardinal_graph = self.cardinal.fst
self.ordinal = OrdinalFst(cardinal=self.cardinal, deterministic=deterministic)
ordinal_graph = self.ordinal.fst
self.decimal = DecimalFst(cardinal=self.cardinal, deterministic=deterministic)
decimal_graph = self.decimal.fst
self.fraction = FractionFst(cardinal=self.cardinal, ordinal=self.ordinal, deterministic=deterministic)
fraction_graph = self.fraction.fst
self.measure = MeasureFst(
cardinal=self.cardinal, decimal=self.decimal, fraction=self.fraction, deterministic=deterministic
)
measure_graph = self.measure.fst
self.date = DateFst(cardinal=self.cardinal, deterministic=deterministic)
date_graph = self.date.fst
word_graph = WordFst(deterministic=deterministic).fst
self.time = TimeFst(self.cardinal, deterministic=deterministic)
time_graph = self.time.fst
self.telephone = TelephoneFst(deterministic=deterministic)
telephone_graph = self.telephone.fst
self.electronic = ElectronicFst(deterministic=deterministic)
electronic_graph = self.electronic.fst
self.money = MoneyFst(cardinal=self.cardinal, decimal=self.decimal, deterministic=deterministic)
money_graph = self.money.fst
self.whitelist = WhiteListFst(input_case=input_case, deterministic=deterministic, input_file=whitelist)
whitelist_graph = self.whitelist.fst
punct_graph = PunctuationFst(deterministic=deterministic).fst
classify = (
pynutil.add_weight(whitelist_graph, 1.01)
| pynutil.add_weight(time_graph, 1.09)
| pynutil.add_weight(measure_graph, 1.08)
| pynutil.add_weight(cardinal_graph, 1.1)
| pynutil.add_weight(fraction_graph, 1.09)
| pynutil.add_weight(date_graph, 1.1)
| pynutil.add_weight(ordinal_graph, 1.1)
| pynutil.add_weight(decimal_graph, 1.1)
| pynutil.add_weight(money_graph, 1.1)
| pynutil.add_weight(telephone_graph, 1.1)
| pynutil.add_weight(electronic_graph, 1.1)
| pynutil.add_weight(word_graph, 200)
)
punct = pynutil.insert("tokens { ") + pynutil.add_weight(punct_graph, weight=2.1) + pynutil.insert(" }")
punct = pynini.closure(
pynini.compose(pynini.closure(NEMO_WHITE_SPACE, 1), delete_extra_space)
| (pynutil.insert(" ") + punct),
1,
)
token = pynutil.insert("tokens { ") + classify + pynutil.insert(" }")
token_plus_punct = (
pynini.closure(punct + pynutil.insert(" ")) + token + pynini.closure(pynutil.insert(" ") + punct)
)
graph = token_plus_punct + pynini.closure(
(
pynini.compose(pynini.closure(NEMO_WHITE_SPACE, 1), delete_extra_space)
| (pynutil.insert(" ") + punct + pynutil.insert(" "))
)
+ token_plus_punct
)
graph = delete_space + graph + delete_space
graph |= punct
self.fst = graph.optimize()
if far_file:
generator_main(far_file, {"tokenize_and_classify": self.fst})
logging.info(f"ClassifyFst grammars are saved to {far_file}.")
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/taggers/tokenize_and_classify.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/taggers/__init__.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# Copyright (c) 2023, Jim O'Regan.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_DIGIT, NEMO_SIGMA, GraphFst, insert_space
from nemo_text_processing.text_normalization.hu.utils import get_abs_path, load_labels, naive_inflector
from pynini.lib import pynutil
quantities = load_labels(get_abs_path("data/number/quantities.tsv"))
def inflect_quantities():
output = []
for quantity in quantities:
if len(quantity) == 2:
output.append((quantity[0], quantity[1]))
output += naive_inflector(quantity[0], quantity[1], True)
else:
output.append((quantity[0], quantity[0]))
tmp = naive_inflector(".", quantity[0], True)
real = [t[1] for t in tmp]
output += [(t, t) for t in real]
if "lli" in quantity[0]:
output.append((quantity[0].replace("lli", "li"), quantity[0]))
orth = [(x.replace("lli", "li"), x) for x in real]
output += orth
return output
def get_quantity(decimal: 'pynini.FstLike', cardinal_up_to_hundred: 'pynini.FstLike') -> 'pynini.FstLike':
"""
Returns FST that transforms either a cardinal or decimal followed by a quantity into a numeral,
e.g. 1 millió -> integer_part: "egy" quantity: "millió"
e.g. 1,4 million -> integer_part: "egy" fractional_part: "négy" quantity: "millió"
Args:
decimal: decimal FST
cardinal_up_to_hundred: cardinal FST
"""
numbers = cardinal_up_to_hundred
quant_fst = pynini.string_map(inflect_quantities())
res = (
pynutil.insert("integer_part: \"")
+ numbers
+ pynutil.insert("\"")
+ pynini.accep(" ")
+ pynutil.insert("quantity: \"")
+ quant_fst
+ pynutil.insert("\"")
)
res |= decimal + pynini.accep(" ") + pynutil.insert("quantity: \"") + quant_fst + pynutil.insert("\"")
return res
class DecimalFst(GraphFst):
"""
Finite state transducer for classifying decimal, e.g.
-11,4006 milliárd -> decimal { negative: "true" integer_part: "tizenegy" fractional_part: "négyezer-hat tízezred" quantity: "milliárd" preserve_order: true }
1 milliárd -> decimal { integer_part: "egy" quantity: "milliárd" preserve_order: true }
Args:
cardinal: CardinalFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal: GraphFst, deterministic: bool = True):
super().__init__(name="decimal", kind="classify", deterministic=deterministic)
cardinal_graph = cardinal.graph
digit_no_zero = NEMO_DIGIT - "0"
digit_or_del_zero = pynutil.delete("0") | digit_no_zero
final_zero = pynini.closure(pynutil.delete("0"))
# In Hungarian, the fraction is read as a whole number
# with a word for the decimal place added
# see: https://helyesiras.mta.hu/helyesiras/default/numerals
decimal_number = digit_no_zero @ cardinal_graph + final_zero + pynutil.insert(" tized")
decimal_number |= (digit_or_del_zero + NEMO_DIGIT) @ cardinal_graph + final_zero + pynutil.insert(" század")
order = 2
for decimal_name in [
"ezred",
"milliomod",
"milliárdod",
"billiomod",
"billiárdod",
"trilliomod",
"trilliárdod",
]:
for modifier in ["", "tíz", "száz"]:
decimal_number |= (
(NEMO_DIGIT ** order + (NEMO_DIGIT - "0"))
@ pynini.cdrewrite(pynini.cross("0", ""), "[BOS]", "", NEMO_SIGMA)
@ cardinal_graph
+ final_zero
+ pynutil.insert(f" {modifier}{decimal_name}")
)
order += 1
if not deterministic:
alts = pynini.string_map([("billiomod", "ezer milliárdod"), ("billiárdod", "millió milliárdod")])
decimal_alts = decimal_number @ pynini.cdrewrite(alts, "", "[EOS]", NEMO_SIGMA)
decimal_number |= decimal_alts
self.graph = decimal_number
point = pynutil.delete(",")
optional_graph_negative = pynini.closure(pynutil.insert("negative: ") + pynini.cross("-", "\"true\" "), 0, 1)
self.graph_fractional = pynutil.insert("fractional_part: \"") + self.graph + pynutil.insert("\"")
self.graph_integer = pynutil.insert("integer_part: \"") + cardinal.graph + pynutil.insert("\"")
final_graph_wo_sign = self.graph_integer + point + insert_space + self.graph_fractional
self.final_graph_wo_negative = final_graph_wo_sign | get_quantity(
final_graph_wo_sign, cardinal.graph_hundreds_component_at_least_one_non_zero_digit
)
final_graph = optional_graph_negative + self.final_graph_wo_negative
final_graph += pynutil.insert(" preserve_order: true")
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/taggers/decimal.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_ALPHA,
NEMO_DIGIT,
NEMO_SIGMA,
GraphFst,
convert_space,
insert_space,
)
from nemo_text_processing.text_normalization.hu.graph_utils import ensure_space
from nemo_text_processing.text_normalization.hu.utils import (
get_abs_path,
inflect_abbreviation,
load_labels,
naive_inflector,
)
from pynini.lib import pynutil
min_singular = pynini.string_file(get_abs_path("data/money/currency_minor.tsv"))
maj_singular = pynini.string_file((get_abs_path("data/money/currency.tsv")))
class MoneyFst(GraphFst):
"""
Finite state transducer for classifying money, e.g.
"€1" -> money { currency_maj: "euró" integer_part: "egy"}
"€1,000" -> money { currency_maj: "euró" integer_part: "egy" }
"€1,001" -> money { currency_maj: "euró" integer_part: "egy" fractional_part: "egy"}
"£1,4" -> money { integer_part: "egy" currency_maj: "font" fractional_part: "negyven" preserve_order: true}
-> money { integer_part: "egy" currency_maj: "font" fractional_part: "negyven" currency_min: "penny" preserve_order: true}
"£0,01" -> money { fractional_part: "egy" currency_min: "penny" preserve_order: true}
"£0,01 million" -> money { currency_maj: "font" integer_part: "nulla" fractional_part: "egy század" quantity: "millió"}
Args:
cardinal: CardinalFst
decimal: DecimalFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal: GraphFst, decimal: GraphFst, deterministic: bool = True):
super().__init__(name="money", kind="classify", deterministic=deterministic)
cardinal_graph = cardinal.graph
graph_decimal_final = decimal.final_graph_wo_negative
maj_singular_labels = load_labels(get_abs_path("data/money/currency.tsv"))
maj_singular_graph = convert_space(maj_singular)
letters = pynini.string_file((get_abs_path("data/money/alphabet.tsv")))
if not deterministic:
letters |= pynini.cross("W", "vé")
letters |= pynini.cross("W", "kettős vé")
read_letters = letters + pynini.closure(insert_space + letters)
self.read_letters = read_letters
graph_maj_singular = pynutil.insert("currency_maj: \"") + maj_singular_graph + pynutil.insert("\"")
optional_delete_fractional_zeros = pynini.closure(
pynutil.delete(",") + pynini.closure(pynutil.delete("0"), 1), 0, 1
)
# only for decimals where third decimal after comma is non-zero or with quantity
decimal_delete_last_zeros = (
pynini.closure(NEMO_DIGIT, 1)
+ pynini.accep(",")
+ pynini.closure(NEMO_DIGIT, 2)
+ (NEMO_DIGIT - "0")
+ pynini.closure(pynutil.delete("0"))
)
decimal_with_quantity = NEMO_SIGMA + NEMO_ALPHA
decimal_part = (decimal_delete_last_zeros | decimal_with_quantity) @ graph_decimal_final
graph_decimal = graph_maj_singular + insert_space + decimal_part
graph_integer = pynutil.insert("integer_part: \"") + cardinal_graph + pynutil.insert("\"")
graph_integer_only = graph_maj_singular + insert_space + graph_integer
graph = (graph_integer_only + optional_delete_fractional_zeros) | graph_decimal
# remove trailing zeros of non zero number in the first 2 digits and fill up to 2 digits
# e.g. 2000 -> 20, 0200->02, 01 -> 01, 10 -> 10
# not accepted: 002, 00, 0,
two_digits_fractional_part = (
pynini.closure(NEMO_DIGIT) + (NEMO_DIGIT - "0") + pynini.closure(pynutil.delete("0"))
) @ (
(pynutil.delete("0") + (NEMO_DIGIT - "0"))
| ((NEMO_DIGIT - "0") + pynutil.insert("0"))
| ((NEMO_DIGIT - "0") + NEMO_DIGIT)
)
graph_min_singular = pynutil.insert(" currency_min: \"") + min_singular + pynutil.insert("\"")
# format ** euro ** cent
decimal_graph_with_minor = None
for curr_symbol, cur_word in maj_singular_labels:
preserve_order = pynutil.insert(" preserve_order: true")
integer_plus_maj = graph_integer + insert_space + pynutil.insert(curr_symbol) @ graph_maj_singular
# non zero integer part
integer_plus_maj = (pynini.closure(NEMO_DIGIT) - "0") @ integer_plus_maj
abbr_expansion = pynini.string_map(naive_inflector(curr_symbol, cur_word))
maj_inflected = pynini.accep(cur_word)
maj_inflected |= pynini.project(abbr_expansion, "output")
# where a currency abbreviation (like GBP) appears inflected (GBP-t),
# we read the number as a pure fraction, because to add a minor currency
# would involve moving the inflectional piece from major to minor
if re.match("^[A-Z]{3}$", curr_symbol):
letter_expansion = pynini.string_map(inflect_abbreviation(curr_symbol, cur_word))
maj_inflected = letter_expansion | abbr_expansion
maj_inflected |= pynini.cross(curr_symbol, cur_word)
if not deterministic:
expanded = curr_symbol @ read_letters
get_endings = pynini.project(letter_expansion, "input")
letter_endings = get_endings @ (
pynini.cdrewrite(pynini.cross(f"{curr_symbol}-", expanded), "[BOS]", "", NEMO_SIGMA)
)
maj_inflected |= letter_endings
maj_inflected |= pynini.project(letter_endings, "output")
graph_maj_final = pynutil.insert("currency_maj: \"") + maj_inflected + pynutil.insert("\"")
graph |= graph_decimal_final + ensure_space + graph_maj_final + preserve_order
graph |= graph_integer + ensure_space + graph_maj_final + preserve_order
graph_fractional = (
two_digits_fractional_part @ pynini.closure(NEMO_DIGIT, 1, 2) @ cardinal.two_digit_non_zero
)
graph_fractional = pynutil.insert("fractional_part: \"") + graph_fractional + pynutil.insert("\"")
fractional_plus_min = graph_fractional + pynutil.insert(curr_symbol) @ graph_min_singular
decimal_graph_with_minor_curr = integer_plus_maj + pynini.cross(",", " ") + fractional_plus_min
decimal_graph_with_minor_curr |= pynutil.delete("0,") + fractional_plus_min
decimal_graph_with_minor_curr = (
pynutil.delete(curr_symbol) + decimal_graph_with_minor_curr + preserve_order
)
decimal_graph_with_minor = (
decimal_graph_with_minor_curr
if decimal_graph_with_minor is None
else pynini.union(decimal_graph_with_minor, decimal_graph_with_minor_curr)
)
final_graph = graph | decimal_graph_with_minor
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/taggers/money.py |
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# Copyright (c) 2022, Jim O'Regan.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_DIGIT,
NEMO_SIGMA,
NEMO_SPACE,
NEMO_WHITE_SPACE,
GraphFst,
delete_space,
insert_space,
)
from nemo_text_processing.text_normalization.hu.graph_utils import HU_ALPHA
from nemo_text_processing.text_normalization.hu.utils import get_abs_path
from pynini.lib import pynutil
def make_million(word: str, hundreds: 'pynini.FstLike', deterministic=False):
insert_hyphen = pynutil.insert("-")
# in the non-deterministic case, add an optional space
if not deterministic:
insert_hyphen |= pynini.closure(pynutil.insert(" "), 0, 1)
graph_million = pynutil.add_weight(pynini.cross("001", word), -0.001)
graph_million |= hundreds + pynutil.insert(word)
if not deterministic:
graph_million |= pynutil.add_weight(pynini.cross("001", "egy{word}"), -0.001)
graph_million |= pynutil.add_weight(pynini.cross("001", "egy{word} "), -0.001)
graph_million |= pynutil.add_weight(pynini.cross("001", "{word} "), -0.001)
graph_million |= pynutil.add_weight(pynini.cross("001", " egy{word}"), -0.001)
graph_million |= pynutil.add_weight(pynini.cross("001", " egy{word} "), -0.001)
graph_million |= pynutil.add_weight(pynini.cross("001", " egy {word} "), -0.001)
graph_million |= pynutil.add_weight(pynini.cross("001", " {word} "), -0.001)
graph_million += insert_hyphen
graph_million |= pynutil.delete("000")
return graph_million
def filter_punctuation(fst: 'pynini.FstLike') -> 'pynini.FstLike':
"""
Helper function for parsing number strings. Converts common cardinal strings (groups of three digits delineated by 'cardinal_separator' - see graph_utils)
and converts to a string of digits:
"1 000" -> "1000"
"1.000.000" -> "1000000"
Args:
fst: Any pynini.FstLike object. Function composes fst onto string parser fst
Returns:
fst: A pynini.FstLike object
"""
cardinal_separator = pynini.string_map([".", NEMO_SPACE])
exactly_three_digits = NEMO_DIGIT ** 3 # for blocks of three
up_to_three_digits = pynini.closure(NEMO_DIGIT, 1, 3) # for start of string
up_to_three_digits = up_to_three_digits - "000" - "00" - "0"
cardinal_string = pynini.closure(
NEMO_DIGIT, 1
) # For string w/o punctuation (used for page numbers, thousand series)
cardinal_string |= (
up_to_three_digits
+ pynutil.delete(cardinal_separator)
+ pynini.closure(exactly_three_digits + pynutil.delete(cardinal_separator))
+ exactly_three_digits
)
return cardinal_string @ fst
class CardinalFst(GraphFst):
"""
Finite state transducer for classifying cardinals, e.g.
"1000" -> cardinal { integer: "ezer" }
"9999" -> cardinal { integer: "kilencezer-kilencszázkilencvenkilenc" }
"2000000" -> cardinal { integer: "kétmillió" }
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="cardinal", kind="classify", deterministic=deterministic)
zero = pynini.invert(pynini.string_file(get_abs_path("data/number/zero.tsv")))
digit = pynini.invert(pynini.string_file(get_abs_path("data/number/digit.tsv")))
digit_inline = pynini.invert(pynini.string_file(get_abs_path("data/number/digit_inline.tsv")))
tens = pynini.invert(pynini.string_file(get_abs_path("data/number/tens.tsv")))
tens_inline = pynini.invert(pynini.string_file(get_abs_path("data/number/tens_inline.tsv")))
delete_hyphen = pynutil.delete(pynini.closure("-"))
delete_extra_hyphens = pynini.cross(pynini.closure("-", 1), "-")
delete_extra_spaces = pynini.cross(pynini.closure(" ", 1), " ")
# Any single digit
graph_digit = digit
self.digit = graph_digit
graph_zero = zero
digits_inline_no_one = (NEMO_DIGIT - "1") @ digit_inline
digits_no_one = (NEMO_DIGIT - "1") @ digit
if not deterministic:
graph_digit |= pynini.cross("2", "két")
digits_inline_no_one |= pynini.cross("2", "kettő")
insert_hyphen = pynutil.insert("-")
# in the non-deterministic case, add an optional space
if not deterministic:
insert_hyphen |= pynini.closure(pynutil.insert(" "), 0, 1)
# Any double digit
graph_tens = (tens_inline + digit) | tens
self.tens = graph_tens.optimize()
self.two_digit_non_zero = pynini.union(graph_digit, graph_tens, (pynutil.delete("0") + digit)).optimize()
base_hundreds = pynini.union(pynini.cross("1", "száz"), digits_inline_no_one + pynutil.insert("száz"))
if not deterministic:
base_hundreds |= pynini.cross("1", "egyszáz")
base_hundreds |= pynini.cross("1", " egyszáz")
base_hundreds |= pynini.cross("1", "egy száz")
base_hundreds |= pynini.cross("1", " egy száz")
base_hundreds |= pynini.cross("1", " száz")
digits_inline_no_one |= pynutil.insert(" száz")
hundreds = pynini.union(
pynini.cross("100", "száz"),
pynini.cross("1", "száz") + graph_tens,
digits_inline_no_one + pynini.cross("00", "száz"),
digits_inline_no_one + pynutil.insert("száz") + graph_tens,
)
if not deterministic:
hundreds |= pynini.union(
pynini.cross("100", "egyszáz"),
pynini.cross("1", "egyszáz") + graph_tens,
pynini.cross("100", " egyszáz"),
pynini.cross("1", " egyszáz ") + graph_tens,
pynini.cross("100", "egy száz"),
pynini.cross("1", "egy száz") + graph_tens,
pynini.cross("100", " egy száz"),
pynini.cross("1", " egy száz ") + graph_tens,
pynini.cross("100", " száz"),
pynini.cross("1", " száz ") + graph_tens,
)
# Three digit strings
graph_hundreds = base_hundreds + pynini.union(
pynutil.delete("00"), graph_tens, (pynutil.delete("0") + graph_digit)
)
self.hundreds = graph_hundreds.optimize()
# For all three digit strings with leading zeroes (graph appends '0's to manage place in string)
graph_hundreds_component = pynini.union(graph_hundreds, pynutil.delete("0") + graph_tens)
graph_hundreds_component_at_least_one_non_zero_digit = graph_hundreds_component | (
pynutil.delete("00") + graph_digit
)
self.graph_hundreds_component_at_least_one_non_zero_digit = (
graph_hundreds_component_at_least_one_non_zero_digit | graph_tens | graph_digit
).optimize()
# Needed?
graph_hundreds_component_at_least_one_non_zero_digit_no_one = graph_hundreds_component | (
pynutil.delete("00") + digits_no_one
)
self.hundreds_non_zero_no_one = graph_hundreds_component_at_least_one_non_zero_digit_no_one
ezer = pynutil.insert("ezer")
if not deterministic:
ezer |= pynutil.insert(" ezer")
ezer1 = ezer
if not deterministic:
ezer1 |= pynutil.insert("egyezer")
ezer1 |= pynutil.insert(" egyezer")
ezer1 |= pynutil.insert("egy ezer")
ezer1 |= pynutil.insert(" egy ezer")
graph_thousands_component_at_least_one_non_zero_digit = pynini.union(
pynutil.delete("000") + graph_hundreds_component_at_least_one_non_zero_digit,
graph_hundreds_component_at_least_one_non_zero_digit_no_one
+ ezer
+ insert_hyphen
+ (graph_hundreds_component_at_least_one_non_zero_digit | pynutil.delete("000")),
pynutil.delete("001")
+ ezer1
+ (graph_hundreds_component_at_least_one_non_zero_digit | pynutil.delete("000")),
)
graph_thousands_component_at_least_one_non_zero_digit_no_one = pynini.union(
pynutil.delete("000") + graph_hundreds_component_at_least_one_non_zero_digit_no_one,
graph_hundreds_component_at_least_one_non_zero_digit_no_one
+ ezer
+ insert_hyphen
+ (graph_hundreds_component_at_least_one_non_zero_digit | pynutil.delete("000")),
pynutil.delete("001")
+ ezer1
+ (graph_hundreds_component_at_least_one_non_zero_digit | pynutil.delete("000")),
)
self.graph_thousands_component_at_least_one_non_zero_digit = (
graph_thousands_component_at_least_one_non_zero_digit
)
self.graph_thousands_component_at_least_one_non_zero_digit_no_one = (
graph_thousands_component_at_least_one_non_zero_digit_no_one
)
graph_million = make_million("millió", self.hundreds_non_zero_no_one, deterministic)
graph_milliard = make_million("milliárd", self.hundreds_non_zero_no_one, deterministic)
graph_billion = make_million("billió", self.hundreds_non_zero_no_one, deterministic)
graph_billiard = make_million("billiárd", self.hundreds_non_zero_no_one, deterministic)
graph_trillion = make_million("trillió", self.hundreds_non_zero_no_one, deterministic)
graph_trilliard = make_million("trilliárd", self.hundreds_non_zero_no_one, deterministic)
graph = (
graph_trilliard
+ graph_trillion
+ graph_billiard
+ graph_billion
+ graph_milliard
+ graph_million
+ (graph_thousands_component_at_least_one_non_zero_digit | pynutil.delete("000000"))
)
clean_output = (
pynini.cdrewrite(delete_space | delete_hyphen, "[BOS]", "", NEMO_SIGMA)
@ pynini.cdrewrite(delete_space | delete_hyphen, "", "[EOS]", NEMO_SIGMA)
@ pynini.cdrewrite(delete_extra_hyphens | delete_extra_spaces, "", "", NEMO_SIGMA)
@ pynini.cdrewrite(
pynini.cross(pynini.closure(NEMO_WHITE_SPACE, 2), NEMO_SPACE), HU_ALPHA, HU_ALPHA, NEMO_SIGMA
)
).optimize()
self.graph = (
((NEMO_DIGIT - "0") + pynini.closure(NEMO_DIGIT, 0))
@ pynini.cdrewrite(pynini.closure(pynutil.insert("0")), "[BOS]", "", NEMO_SIGMA)
@ NEMO_DIGIT ** 24
@ graph
@ clean_output
)
zero_space = zero + insert_space
self.zero_space = zero_space
self.two_digits_read = pynini.union(
((NEMO_DIGIT - "0") + NEMO_DIGIT) @ self.graph_hundreds_component_at_least_one_non_zero_digit,
zero_space + digit,
).optimize()
self.three_digits_read = pynini.union(
((NEMO_DIGIT - "0") + (NEMO_DIGIT ** 2)) @ self.graph_hundreds_component_at_least_one_non_zero_digit,
zero_space + ((NEMO_DIGIT ** 2) @ graph_tens),
zero_space + zero_space + digit,
).optimize()
self.four_digits_read = pynini.union(
((NEMO_DIGIT - "0") + (NEMO_DIGIT ** 3)) @ self.graph, zero_space + self.three_digits_read
).optimize()
self.graph |= graph_zero
self.graph_unfiltered = self.graph
self.graph = filter_punctuation(self.graph).optimize()
optional_minus_graph = pynini.closure(pynutil.insert("negative: ") + pynini.cross("-", "\"true\" "), 0, 1)
final_graph = optional_minus_graph + pynutil.insert("integer: \"") + self.graph + pynutil.insert("\"")
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/taggers/cardinal.py |
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_ALPHA, NEMO_DIGIT, GraphFst, insert_space
from nemo_text_processing.text_normalization.es.utils import get_abs_path, load_labels
from pynini.lib import pynutil
common_domains = [x[0] for x in load_labels(get_abs_path("data/electronic/domain.tsv"))]
symbols = [x[0] for x in load_labels(get_abs_path("data/electronic/symbols.tsv"))]
class ElectronicFst(GraphFst):
"""
Finite state transducer for classifying electronic: email addresses
e.g. "[email protected]" -> electronic { username: "abc" domain: "hotmail.com" preserve_order: true }
e.g. "www.abc.com/123" -> electronic { protocol: "www." domain: "abc.com/123" preserve_order: true }
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="electronic", kind="classify", deterministic=deterministic)
dot = pynini.accep(".")
accepted_common_domains = pynini.union(*common_domains)
accepted_symbols = pynini.union(*symbols) - dot
accepted_characters = pynini.closure(NEMO_ALPHA | NEMO_DIGIT | accepted_symbols)
acceepted_characters_with_dot = pynini.closure(NEMO_ALPHA | NEMO_DIGIT | accepted_symbols | dot)
# email
username = (
pynutil.insert("username: \"")
+ acceepted_characters_with_dot
+ pynutil.insert("\"")
+ pynini.cross('@', ' ')
)
domain_graph = accepted_characters + dot + accepted_characters
domain_graph = pynutil.insert("domain: \"") + domain_graph + pynutil.insert("\"")
domain_common_graph = (
pynutil.insert("domain: \"")
+ accepted_characters
+ accepted_common_domains
+ pynini.closure((accepted_symbols | dot) + pynini.closure(accepted_characters, 1), 0, 1)
+ pynutil.insert("\"")
)
graph = (username + domain_graph) | domain_common_graph
# url
protocol_start = pynini.accep("https://") | pynini.accep("http://")
protocol_end = (
pynini.accep("www.")
if deterministic
else (
pynini.accep("www.")
| pynini.cross("www.", "vé vé vé.")
| pynini.cross("www.", "dupla vé dupla vé dupla vé.")
| pynini.cross("www.", "kettős vé kettős vé kettős vé.")
)
)
protocol = protocol_start | protocol_end | (protocol_start + protocol_end)
protocol = pynutil.insert("protocol: \"") + protocol + pynutil.insert("\"")
graph |= protocol + insert_space + (domain_graph | domain_common_graph)
self.graph = graph
final_graph = self.add_tokens(self.graph + pynutil.insert(" preserve_order: true"))
self.fst = final_graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/taggers/electronic.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# Copyright (c) 2023, Jim O'Regan.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_CHAR, NEMO_DIGIT, NEMO_SPACE, GraphFst
from nemo_text_processing.text_normalization.hu.graph_utils import TO_LOWER, TO_UPPER
from nemo_text_processing.text_normalization.hu.utils import get_abs_path, load_labels
from pynini.lib import pynutil
def get_suffixed_days(labels):
endings = ["je", "a", "e"]
output = []
for label in labels:
for ending in endings:
if label[1].endswith(ending):
output.append((f"{label[0]}-{ending}", label[1]))
break
return output
def day_inflector(number, day):
"""
Generates pairs of inflected day numbers and their full forms,
according to the options listed here:
https://helyesiras.mta.hu/helyesiras/default/akh#298
Args:
number: the day number
day: the day name
Returns:
a list of expanded forms, two per ending.
"""
endings = {
"e": "ét ének ével éért évé éig eként éül ében én énél ébe ére éhez éből éről étől",
"a": "át ának ával áért ává áig aként ául ában án ánál ába ára ához ából áról ától",
}
output = []
daylast = day[-1]
for ending in endings[daylast].split(" "):
daybase = day[:-1]
endtrimmed = ending[1:]
if day.endswith("eje"):
output.append((f"{number}-j{ending}", f"{daybase}{ending}"))
output.append((f"{number}-{ending}", f"{daybase}{ending}"))
else:
output.append((f"{number}-{ending}", f"{daybase}{ending}"))
output.append((f"{number}-{endtrimmed}", f"{daybase}{ending}"))
return output
def day_adj_endings(number, word, basic=True):
"""
Two adjective forms can be formed from the days (three for 1):
1-i -> elseji
1-ji -> elseji
1-jei -> elsejei
2-i -> másodiki
2-ai -> másodikai
4-i -> negyediki
4-ei -> negyedikei
This is based on other -i adjectives, because these forms are rare.
"""
endings_pl = {
"e": "iek ieket ieknek iekkel iekért iekké iekig iekként iekben ieken ieknél iekbe iekre iekhez iekből iekről iektől",
"a": "iak iakat iaknak iakkal iakért iakká iakig iakként iakban iakon iaknál iakba iakra iakhoz iakból iakról iaktól",
}
endings_sg = {
"e": "i it inek ivel iért ivé iig iként iben in inél ibe ire ihez iből iről itől",
"a": "i it inak ival iért ivá iig iként iban in inál iba ira ihoz iból iról itól",
}
last = word[-1]
short = word[:-1]
output = []
if basic:
endings = ["i"]
else:
endings = endings_sg[last].split(" ") + endings_pl[last].split(" ")
for ending in endings:
if word == "elseje":
output.append((f"{number}-{ending}", f"{short}{ending}"))
output.append((f"{number}-j{ending}", f"{short}{ending}"))
output.append((f"{number}-{last}{ending}", f"{word}{ending}"))
else:
output.append((f"{number}-{ending}", f"{short}{ending}"))
output.append((f"{number}-{last}{ending}", f"{word}{ending}"))
return output
class DateFst(GraphFst):
"""
Finite state transducer for classifying date, e.g.
"2010. április 1." -> date { year: "kettőezer-tíz" month: "április" day: "elseje" preserve_order: true }
"2010. ápr. 1." -> date { year: "kettőezer-tíz" month: "április" day: "elseje" preserve_order: true }
"2010. IV. 1." -> date { year: "kettőezer-tíz" month: "április" day: "elseje" preserve_order: true }
"2010. 04. 1." -> date { year: "kettőezer-tíz" month: "április" day: "elseje" preserve_order: true }
"2010. 04. 1-je" -> date { year: "kettőezer-tíz" month: "április" day: "elseje" preserve_order: true }
"2010. 04. 1-jén" -> date { year: "kettőezer-tíz" month: "április" day: "elsején" preserve_order: true }
"2010. 04. 1-én" -> date { year: "kettőezer-tíz" month: "április" day: "elsején" preserve_order: true }
Args:
cardinal: cardinal GraphFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal: GraphFst, deterministic: bool):
super().__init__(name="date", kind="classify", deterministic=deterministic)
delete_dot = pynutil.delete(".")
optional_dot = pynini.closure(delete_dot, 0, 1)
day_tsv = load_labels(get_abs_path("data/dates/days.tsv"))
graph_day = pynini.string_map(day_tsv)
days_suffixed = get_suffixed_days(day_tsv)
use_full_adj_forms = False
if not deterministic:
use_full_adj_forms = True
day_adj_forms = []
for day in day_tsv:
days_suffixed += day_inflector(day[0], day[1])
day_adj_forms += day_adj_endings(day[0], day[1], use_full_adj_forms)
graph_days_suffixed = pynini.string_map(days_suffixed)
graph_days_adj_suffixed = pynini.string_map(day_adj_forms)
graph_days_suffixed |= pynini.project(graph_days_suffixed, "output")
graph_days_adj_suffixed |= pynini.project(graph_days_adj_suffixed, "output")
self.days_suffixed = graph_days_suffixed
self.days_suffixed |= graph_days_adj_suffixed
self.days_only = pynutil.insert("day: \"") + graph_days_suffixed + pynutil.insert("\"")
# these express from and to, respectively
# december 25-től január 27-ig -> from December 25 to January 27
self.days_tol = (pynini.closure(NEMO_CHAR) + pynini.union("től", "tól")) @ graph_days_suffixed
self.days_ig = (pynini.closure(NEMO_CHAR) + "ig") @ graph_days_suffixed
delete_leading_zero = (pynutil.delete("0") | (NEMO_DIGIT - "0")) + NEMO_DIGIT
month_abbr_graph = load_labels(get_abs_path("data/dates/month_abbr.tsv"))
number_to_month = pynini.string_file(get_abs_path("data/dates/months.tsv")).optimize()
month_romans = pynini.string_file(get_abs_path("data/dates/months_roman.tsv")).optimize()
month_romans |= pynini.invert(pynini.invert(month_romans) @ pynini.closure(TO_UPPER))
month_romans_dot = month_romans + delete_dot
month_graph = pynini.union(*[x[1] for x in month_abbr_graph]).optimize()
month_abbr_graph = pynini.string_map(month_abbr_graph)
self.month_abbr = month_abbr_graph
month_graph |= (TO_LOWER + pynini.closure(NEMO_CHAR)) @ month_graph
# jan.-> januar, Jan-> januar, januar-> januar
month_abbr_dot = month_abbr_graph + delete_dot
numbers = cardinal.graph
optional_leading_zero = delete_leading_zero | NEMO_DIGIT
# 01, 31, 1
digit_day = optional_leading_zero @ pynini.union(*[str(x) for x in range(1, 32)]) @ graph_day
day = (pynutil.insert("day: \"") + digit_day + pynutil.insert("\"")).optimize()
day_dot = (pynutil.insert("day: \"") + digit_day + pynutil.delete(".") + pynutil.insert("\"")).optimize()
self.day_dot = day_dot
day_words = pynini.project(digit_day, "output")
day_pieces = (digit_day + optional_dot) | day_words | graph_days_suffixed
day_part = (pynutil.insert("day: \"") + day_pieces + pynutil.insert("\"")).optimize()
digit_month = optional_leading_zero @ pynini.union(*[str(x) for x in range(1, 13)])
number_to_month = digit_month @ number_to_month
number_to_month_dot = number_to_month + delete_dot
month_part = month_abbr_dot | month_graph | month_romans_dot | number_to_month_dot
self.month = month_part
month_component = (pynutil.insert("month: \"") + month_part + pynutil.insert("\"")).optimize()
month_number_only = (pynutil.insert("month: \"") + number_to_month + pynutil.insert("\"")).optimize()
self.month_component = month_component
self.month_number_only = month_number_only
self.month_number = number_to_month_dot
month_component = self.month_component.optimize()
# prefer cardinal over year
year = (NEMO_DIGIT - "0") + pynini.closure(NEMO_DIGIT, 1, 3) # 90, 990, 1990
year @= numbers
self.year = year
year_only = pynutil.insert("year: \"") + year + pynutil.insert("\"")
year_dot = pynutil.insert("year: \"") + year + pynutil.delete(".") + pynutil.insert("\"")
optional_year_dot_space = pynini.closure(year_dot + NEMO_SPACE, 0, 1)
graph_ymd = optional_year_dot_space + month_component + NEMO_SPACE + day_part
graph_ymd |= (
pynini.closure(year_only + pynini.cross("-", " "), 0, 1) + month_number_only + pynini.cross("-", " ") + day
)
self.ymd = graph_ymd
graph_ym = year_dot + NEMO_SPACE + month_component
graph_dmy = (
day + pynini.cross("-", " ") + month_number_only + pynini.closure(pynini.cross("-", " ") + year_only, 0, 1)
)
separators = [".", "/"]
for sep in separators:
year_optional = pynini.closure(pynini.cross(sep, " ") + year_only, 0, 1)
if not deterministic:
new_graph = day + pynini.cross(sep, " ") + month_number_only + year_optional
else:
new_graph = day + pynini.cross(sep, " ") + month_number_only + year_only
graph_dmy |= new_graph
final_graph = graph_ymd + pynutil.insert(" preserve_order: true")
final_graph |= graph_ym + pynutil.insert(" preserve_order: true")
final_graph |= graph_dmy
self.final_graph = final_graph.optimize()
self.fst = self.add_tokens(self.final_graph).optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/taggers/date.py |
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_NOT_SPACE, GraphFst
from pynini.lib import pynutil
class WordFst(GraphFst):
"""
Finite state transducer for classifying word.
e.g. nyomok -> tokens { name: "nyomok" }
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="word", kind="classify")
word = pynutil.insert("name: \"") + pynini.closure(NEMO_NOT_SPACE, 1) + pynutil.insert("\"")
self.fst = word.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/taggers/word.py |
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_NOT_QUOTE,
NEMO_SIGMA,
NEMO_SPACE,
GraphFst,
delete_extra_space,
delete_preserve_order,
delete_space,
insert_space,
)
from pynini.lib import pynutil
class TimeFst(GraphFst):
"""
Finite state transducer for verbalizing time, e.g.
time { hours: "tolv" minutes: "trettio" suffix: "förmiddag" zone: "e s t" } -> tolv trettio förmiddag e s t
time { hours: "tolv" } -> tolv
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="time", kind="verbalize", deterministic=deterministic)
ANY_NOT_QUOTE = pynini.closure(NEMO_NOT_QUOTE, 1)
NOT_NULLA = pynini.difference(ANY_NOT_QUOTE, "nulla")
hour = pynutil.delete("hours: \"") + ANY_NOT_QUOTE + pynutil.delete("\"")
hour_ora = hour + pynutil.insert(" óra")
minute = pynutil.delete("minutes: \"") + NOT_NULLA + pynutil.delete("\"")
minute |= pynutil.delete("minutes: \"nulla\"")
minute_perc = minute + pynutil.insert(" perc")
suffix = pynutil.delete("suffix: \"") + ANY_NOT_QUOTE + pynutil.delete("\"")
optional_suffix = pynini.closure(delete_space + insert_space + suffix, 0, 1)
zone = (
pynutil.delete("zone:")
+ delete_space
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynutil.delete("\"")
)
optional_zone = pynini.closure(delete_space + insert_space + zone, 0, 1)
second = pynutil.delete("seconds: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
second |= pynutil.delete("seconds: \"nulla\"")
second_masodperc = second + pynutil.insert(" masodperc")
graph = hour_ora + NEMO_SPACE + minute + NEMO_SPACE + suffix + optional_zone
graph |= hour_ora + NEMO_SPACE + minute + NEMO_SPACE + suffix + delete_preserve_order
graph |= (
hour_ora + NEMO_SPACE + minute_perc + NEMO_SPACE + second + NEMO_SPACE + suffix + delete_preserve_order
)
graph |= hour_ora + NEMO_SPACE + second + NEMO_SPACE + suffix + delete_preserve_order
graph |= (
hour_ora + NEMO_SPACE + minute_perc + NEMO_SPACE + second_masodperc + optional_zone + delete_preserve_order
)
graph |= hour_ora + NEMO_SPACE + second_masodperc + optional_zone + delete_preserve_order
graph |= hour + NEMO_SPACE + suffix + delete_preserve_order
graph |= hour_ora + optional_zone + delete_preserve_order
graph |= hour_ora + NEMO_SPACE + minute_perc + optional_zone + delete_preserve_order
graph |= hour + NEMO_SPACE + minute + NEMO_SPACE + second + optional_suffix + optional_zone
graph |= hour + NEMO_SPACE + suffix + optional_zone
graph |= hour + optional_zone
graph = (
graph
@ pynini.cdrewrite(delete_extra_space, "", "", NEMO_SIGMA)
@ pynini.cdrewrite(delete_space, "", "[EOS]", NEMO_SIGMA)
)
# graph |= graph_hms
delete_tokens = self.delete_tokens(graph)
self.fst = delete_tokens.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/verbalizers/time.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_NOT_QUOTE,
GraphFst,
delete_extra_space,
delete_preserve_order,
)
from pynini.lib import pynutil
class MeasureFst(GraphFst):
"""
Finite state transducer for verbalizing measure, e.g.
measure { cardinal { integer: "zwei" units: "unzen" } } -> "zwei unzen"
measure { cardinal { integer_part: "zwei" quantity: "millionen" units: "unzen" } } -> "zwei millionen unzen"
Args:
decimal: decimal GraphFst
cardinal: cardinal GraphFst
fraction: fraction GraphFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, decimal: GraphFst, cardinal: GraphFst, fraction: GraphFst, deterministic: bool):
super().__init__(name="measure", kind="verbalize", deterministic=deterministic)
unit = pynutil.delete("units: \"") + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\"")
graph_decimal = decimal.fst
graph_cardinal = cardinal.fst
graph_fraction = fraction.fst
graph = (graph_cardinal | graph_decimal | graph_fraction) + pynini.accep(" ") + unit
graph |= unit + delete_extra_space + (graph_cardinal | graph_decimal)
graph += delete_preserve_order
delete_tokens = self.delete_tokens(graph)
self.fst = delete_tokens.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/verbalizers/measure.py |
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_NOT_QUOTE,
GraphFst,
delete_preserve_order,
insert_space,
)
from pynini.lib import pynutil
class FractionFst(GraphFst):
"""
Finite state transducer for verbalizing fraction
e.g. tokens { fraction { integer: "huszonhárom" numerator: "négy" denominator: "hatod" } } ->
huszonhárom négy hatod
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True, lm: bool = False):
super().__init__(name="fraction", kind="verbalize", deterministic=deterministic)
optional_sign = pynini.closure(pynini.cross("negative: \"true\"", "mínusz "), 0, 1)
integer = pynutil.delete("integer_part: \"") + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\" ")
numerator = pynutil.delete("numerator: \"") + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\" ")
denominator = pynutil.delete("denominator: \"") + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\"")
graph = numerator + insert_space + denominator
if not deterministic:
graph |= numerator + denominator
conjunction = pynutil.insert("és ")
if not deterministic and not lm:
conjunction = pynini.closure(conjunction, 0, 1)
integer = pynini.closure(optional_sign + integer + insert_space + conjunction, 0, 1)
graph = integer + graph + delete_preserve_order
self.graph = graph
delete_tokens = self.delete_tokens(self.graph)
self.fst = delete_tokens.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/verbalizers/fraction.py |
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_NOT_QUOTE, GraphFst, delete_space, insert_space
from pynini.lib import pynutil
class TelephoneFst(GraphFst):
"""
Finite state transducer for verbalizing telephone numbers, e.g.
telephone { country_code: "one" number_part: "one two three, one two three, five six seven eight" extension: "one" }
-> one, one two three, one two three, five six seven eight, one
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="telephone", kind="verbalize", deterministic=deterministic)
country_code = pynutil.delete("country_code: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
optional_country_code = pynini.closure(country_code + delete_space + insert_space, 0, 1,)
number_part = (
pynutil.delete("number_part: \"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynini.closure(pynutil.add_weight(pynutil.delete(" "), -0.0001), 0, 1)
+ pynutil.delete("\"")
)
optional_extension = pynini.closure(
delete_space
+ insert_space
+ pynutil.delete("extension: \"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynutil.delete("\""),
0,
1,
)
graph = pynini.union(optional_country_code + number_part + optional_extension,)
delete_tokens = self.delete_tokens(graph)
self.fst = delete_tokens.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/verbalizers/telephone.py |
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_NOT_QUOTE, GraphFst, delete_space
from pynini.lib import pynutil
class OrdinalFst(GraphFst):
"""
Finite state transducer for verbalizing ordinal, e.g.
ordinal { integer: "második" } } -> második
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="ordinal", kind="verbalize", deterministic=deterministic)
graph = (
pynutil.delete("integer:")
+ delete_space
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynutil.delete("\"")
)
self.graph = graph
delete_tokens = self.delete_tokens(self.graph)
self.fst = delete_tokens.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/verbalizers/ordinal.py |
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nemo_text_processing.text_normalization.en.graph_utils import GraphFst
from nemo_text_processing.text_normalization.en.verbalizers.whitelist import WhiteListFst
from nemo_text_processing.text_normalization.hu.verbalizers.cardinal import CardinalFst
from nemo_text_processing.text_normalization.hu.verbalizers.date import DateFst
from nemo_text_processing.text_normalization.hu.verbalizers.decimal import DecimalFst
from nemo_text_processing.text_normalization.hu.verbalizers.electronic import ElectronicFst
from nemo_text_processing.text_normalization.hu.verbalizers.fraction import FractionFst
from nemo_text_processing.text_normalization.hu.verbalizers.measure import MeasureFst
from nemo_text_processing.text_normalization.hu.verbalizers.money import MoneyFst
from nemo_text_processing.text_normalization.hu.verbalizers.ordinal import OrdinalFst
from nemo_text_processing.text_normalization.hu.verbalizers.telephone import TelephoneFst
from nemo_text_processing.text_normalization.hu.verbalizers.time import TimeFst
class VerbalizeFst(GraphFst):
"""
Composes other verbalizer grammars.
For deployment, this grammar will be compiled and exported to OpenFst Finite State Archive (FAR) File.
More details to deployment at NeMo/tools/text_processing_deployment.
Args:
deterministic: if True will provide a single transduction option,
for False multiple options (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="verbalize", kind="verbalize", deterministic=deterministic)
cardinal = CardinalFst(deterministic=deterministic)
cardinal_graph = cardinal.fst
ordinal = OrdinalFst(deterministic=deterministic)
ordinal_graph = ordinal.fst
decimal = DecimalFst(deterministic=deterministic)
decimal_graph = decimal.fst
fraction = FractionFst(deterministic=deterministic)
fraction_graph = fraction.fst
date = DateFst(deterministic=deterministic)
date_graph = date.fst
measure = MeasureFst(cardinal=cardinal, decimal=decimal, fraction=fraction, deterministic=deterministic)
measure_graph = measure.fst
electronic = ElectronicFst(deterministic=deterministic)
electronic_graph = electronic.fst
whitelist_graph = WhiteListFst(deterministic=deterministic).fst
money_graph = MoneyFst(decimal=decimal, deterministic=deterministic).fst
telephone_graph = TelephoneFst(deterministic=deterministic).fst
time_graph = TimeFst(deterministic=deterministic).fst
graph = (
cardinal_graph
| measure_graph
| decimal_graph
| ordinal_graph
| date_graph
| electronic_graph
| money_graph
| fraction_graph
| whitelist_graph
| telephone_graph
| time_graph
)
self.fst = graph
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/verbalizers/verbalize.py |
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
GraphFst,
delete_extra_space,
delete_space,
generator_main,
)
from nemo_text_processing.text_normalization.en.verbalizers.word import WordFst
from nemo_text_processing.text_normalization.hu.verbalizers.verbalize import VerbalizeFst
from pynini.lib import pynutil
class VerbalizeFinalFst(GraphFst):
"""
Finite state transducer that verbalizes an entire sentence
Args:
deterministic: if True will provide a single transduction option,
for False multiple options (used for audio-based normalization)
cache_dir: path to a dir with .far grammar file. Set to None to avoid using cache.
overwrite_cache: set to True to overwrite .far files
"""
def __init__(self, deterministic: bool = True, cache_dir: str = None, overwrite_cache: bool = False):
super().__init__(name="verbalize_final", kind="verbalize", deterministic=deterministic)
far_file = None
if cache_dir is not None and cache_dir != "None":
os.makedirs(cache_dir, exist_ok=True)
far_file = os.path.join(cache_dir, f"hu_tn_{deterministic}_deterministic_verbalizer.far")
if not overwrite_cache and far_file and os.path.exists(far_file):
self.fst = pynini.Far(far_file, mode="r")["verbalize"]
logging.info(f'VerbalizeFinalFst graph was restored from {far_file}.')
else:
verbalize = VerbalizeFst(deterministic=deterministic).fst
word = WordFst(deterministic=deterministic).fst
types = verbalize | word
graph = (
pynutil.delete("tokens")
+ delete_space
+ pynutil.delete("{")
+ delete_space
+ types
+ delete_space
+ pynutil.delete("}")
)
graph = delete_space + pynini.closure(graph + delete_extra_space) + graph + delete_space
self.fst = graph.optimize()
if far_file:
generator_main(far_file, {"verbalize": self.fst})
logging.info(f"VerbalizeFinalFst grammars are saved to {far_file}.")
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/verbalizers/verbalize_final.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/verbalizers/__init__.py |
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_NOT_QUOTE,
GraphFst,
delete_preserve_order,
delete_space,
insert_space,
)
from pynini.lib import pynutil
class DecimalFst(GraphFst):
"""
Finite state transducer for verbalizing decimal, e.g.
decimal { negative: "true" integer_part: "tizenkét" fractional_part: "ötvenhatszázad" quantity: "milliárd" } -> mínusz tizenkét egész ötvenhatszázad milliárd
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="decimal", kind="verbalize", deterministic=deterministic)
self.optional_sign = pynini.cross("negative: \"true\"", "mínusz ")
self.optional_sign = pynini.closure(self.optional_sign + delete_space, 0, 1)
self.integer = (
pynutil.delete("integer_part:")
+ delete_space
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynutil.delete("\"")
)
self.optional_integer = pynini.closure(self.integer + delete_space + insert_space, 0, 1)
self.fractional_default = (
pynutil.delete("fractional_part:")
+ delete_space
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynutil.delete("\"")
)
self.fractional = pynutil.insert("egész ") + self.fractional_default
self.quantity = (
pynutil.delete("quantity:")
+ delete_space
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynutil.delete("\"")
)
self.optional_quantity = pynini.closure(delete_space + insert_space + self.quantity, 0, 1)
graph = (
self.optional_sign
+ (
self.integer
| (self.integer + delete_space + insert_space + self.quantity)
| (self.optional_integer + self.fractional + self.optional_quantity)
)
+ delete_preserve_order
)
self.numbers = graph
delete_tokens = self.delete_tokens(graph)
self.fst = delete_tokens.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/verbalizers/decimal.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_NOT_QUOTE, GraphFst, delete_preserve_order
from pynini.lib import pynutil
class MoneyFst(GraphFst):
"""
Finite state transducer for verbalizing money, e.g.
money { currency_maj: "euró" integer_part: "egy"} -> "egy euró"
money { currency_maj: "euró" integer_part: "egy" fractional_part: "egy ezred"} -> "egy egész egy ezred euro"
money { integer_part: "egy" currency_maj: "font" fractional_part: "negyven" preserve_order: true} -> "egy font negyven"
money { integer_part: "egy" currency_maj: "font" fractional_part: "negyven" currency_min: "penny" preserve_order: true} -> "egy font negyven penny"
money { fractional_part: "egy" currency_min: "penny" preserve_order: true} -> "egy penny"
money { currency_maj: "font" integer_part: "nulla" fractional_part: "null eins" quantity: "millió"} -> "null egész egy század millió font"
Args:
decimal: GraphFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, decimal: GraphFst, deterministic: bool = True):
super().__init__(name="money", kind="verbalize", deterministic=deterministic)
keep_space = pynini.accep(" ")
maj = pynutil.delete("currency_maj: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
min = pynutil.delete("currency_min: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
fractional_part = (
pynutil.delete("fractional_part: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
)
integer_part = pynutil.delete("integer_part: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
optional_add_and = pynini.closure(pynutil.insert("és "), 0, 1)
# *** currency_maj
graph_integer = integer_part + keep_space + maj
# *** currency_maj + (***) | ((und) *** current_min)
minor_part = fractional_part | (fractional_part + keep_space + min)
if not deterministic:
minor_part |= optional_add_and + fractional_part + keep_space + min
graph_integer_with_minor = integer_part + keep_space + maj + keep_space + minor_part + delete_preserve_order
# *** komma *** currency_maj
graph_decimal = decimal.numbers + keep_space + maj
graph_decimal |= decimal.numbers + keep_space + maj + delete_preserve_order
# *** current_min
graph_minor = fractional_part + keep_space + min + delete_preserve_order
graph = graph_integer | graph_integer_with_minor | graph_decimal | graph_minor
delete_tokens = self.delete_tokens(graph)
self.fst = delete_tokens.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/verbalizers/money.py |
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_NOT_QUOTE, GraphFst, delete_space
from pynini.lib import pynutil
class CardinalFst(GraphFst):
"""
Finite state transducer for verbalizing cardinal, e.g.
cardinal { negative: "true" integer: "23" } -> mínusz huszonhárom
Args:
deterministic: if True will provide a single transduction option,
for False multiple options (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="cardinal", kind="verbalize", deterministic=deterministic)
self.optional_sign = pynini.cross("negative: \"true\"", "mínusz ")
self.optional_sign = pynini.closure(self.optional_sign + delete_space, 0, 1)
integer = pynini.closure(NEMO_NOT_QUOTE)
self.integer = delete_space + pynutil.delete("\"") + integer + pynutil.delete("\"")
integer = pynutil.delete("integer:") + self.integer
self.numbers = self.optional_sign + integer
delete_tokens = self.delete_tokens(self.numbers)
self.fst = delete_tokens.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/verbalizers/cardinal.py |
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_NOT_QUOTE,
NEMO_SIGMA,
GraphFst,
delete_preserve_order,
insert_space,
)
from nemo_text_processing.text_normalization.hu.utils import get_abs_path
from pynini.lib import pynutil
digit_no_zero = pynini.invert(pynini.string_file(get_abs_path("data/number/digit.tsv")))
zero = pynini.invert(pynini.string_file(get_abs_path("data/number/zero.tsv")))
graph_symbols = pynini.string_file(get_abs_path("data/electronic/symbols.tsv"))
server_common = pynini.string_file(get_abs_path("data/electronic/server_name.tsv"))
domain_common = pynini.string_file(get_abs_path("data/electronic/domain.tsv"))
class ElectronicFst(GraphFst):
"""
Finite state transducer for verbalizing electronic
e.g. electronic { username: "abc" domain: "hotmail.com" } -> "a b c kukac hotmail pont com"
-> "a b c kukac h o t m a i l pont c o m"
-> "a b c kukac hotmail pont c o m"
-> "a b c at h o t m a i l pont com"
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="electronic", kind="verbalize", deterministic=deterministic)
graph_digit = digit_no_zero | zero
def add_space_after_char():
return pynini.closure(NEMO_NOT_QUOTE - pynini.accep(" ") + insert_space) + (
NEMO_NOT_QUOTE - pynini.accep(" ")
)
at_sign = pynutil.insert("kukac ")
if not deterministic:
at_sign |= pynutil.insert("kukacjel ")
at_sign |= pynutil.insert("csiga ")
at_sign |= pynutil.insert("ormány ")
at_sign |= pynutil.insert("farkas á ")
at_sign |= pynutil.insert("bejgli ")
at_sign |= pynutil.insert("at-jel ")
verbalize_characters = pynini.cdrewrite(graph_symbols | graph_digit, "", "", NEMO_SIGMA)
user_name = pynutil.delete("username: \"") + add_space_after_char() + pynutil.delete("\"")
user_name @= verbalize_characters
convert_defaults = pynutil.add_weight(NEMO_NOT_QUOTE, weight=0.0001) | domain_common | server_common
domain = convert_defaults + pynini.closure(insert_space + convert_defaults)
domain @= verbalize_characters
domain = pynutil.delete("domain: \"") + domain + pynutil.delete("\"")
protocol = (
pynutil.delete("protocol: \"")
+ add_space_after_char() @ pynini.cdrewrite(graph_symbols, "", "", NEMO_SIGMA)
+ pynutil.delete("\"")
)
self.graph = (pynini.closure(protocol + pynini.accep(" "), 0, 1) + domain) | (
user_name + pynini.accep(" ") + at_sign + domain
)
delete_tokens = self.delete_tokens(self.graph + delete_preserve_order)
self.fst = delete_tokens.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/verbalizers/electronic.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_NOT_QUOTE, GraphFst, delete_preserve_order
from pynini.lib import pynutil
class DateFst(GraphFst):
"""
Finite state transducer for verbalizing date, e.g.
date { year: "kettőezer-tíz" month: "szeptember" day: "harmincegyedike" preserve_order: true } -> "kettőezer-tíz szeptember harmincegyedike"
Args:
ordinal: ordinal verbalizer GraphFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="date", kind="verbalize", deterministic=deterministic)
month = pynutil.delete("month: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
year = pynutil.delete("year: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
day = pynutil.delete("day: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
# day month year
graph_ymd = pynini.closure(year + pynini.accep(" "), 0, 1) + month + pynini.accep(" ") + day
self.graph = graph_ymd
final_graph = self.graph + delete_preserve_order
delete_tokens = self.delete_tokens(final_graph)
self.fst = delete_tokens.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/verbalizers/date.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/data/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/data/dates/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/data/ordinals/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/data/inflection/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/data/electronic/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/data/telephone/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/data/time/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/data/number/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/data/money/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/data/measures/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/hu/data/fractions/__init__.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/de/__init__.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import csv
import logging
import os
def get_abs_path(rel_path):
"""
Get absolute path
Args:
rel_path: relative path to this file
Returns absolute path
"""
abs_path = os.path.dirname(os.path.abspath(__file__)) + os.sep + rel_path
if not os.path.exists(abs_path):
logging.warning(f'{abs_path} does not exist')
return abs_path
def load_labels(abs_path):
"""
loads relative path file as dictionary
Args:
abs_path: absolute path
Returns dictionary of mappings
"""
label_tsv = open(abs_path, encoding="utf-8")
labels = list(csv.reader(label_tsv, delimiter="\t"))
return labels
| NeMo-text-processing-main | nemo_text_processing/text_normalization/de/utils.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.de.utils import get_abs_path
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_DIGIT, GraphFst, convert_space, insert_space
from pynini.lib import pynutil
class TimeFst(GraphFst):
"""
Finite state transducer for classifying time, e.g.
"02:15 Uhr est" -> time { hours: "2" minutes: "15" zone: "e s t"}
"2 Uhr" -> time { hours: "2" }
"09:00 Uhr" -> time { hours: "2" }
"02:15:10 Uhr" -> time { hours: "2" minutes: "15" seconds: "10"}
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="time", kind="classify", deterministic=deterministic)
final_suffix = pynutil.delete(" ") + pynutil.delete("Uhr") | pynutil.delete("uhr")
time_zone_graph = pynini.string_file(get_abs_path("data/time/time_zone.tsv"))
labels_hour = [str(x) for x in range(0, 25)]
labels_minute_single = [str(x) for x in range(1, 10)]
labels_minute_double = [str(x) for x in range(10, 60)]
delete_leading_zero_to_double_digit = (pynutil.delete("0") | (NEMO_DIGIT - "0")) + NEMO_DIGIT
graph_hour = pynini.union(*labels_hour)
graph_minute_single = pynini.union(*labels_minute_single)
graph_minute_double = pynini.union(*labels_minute_double)
final_graph_hour_only = pynutil.insert("hours: \"") + graph_hour + pynutil.insert("\"")
final_graph_hour = (
pynutil.insert("hours: \"") + delete_leading_zero_to_double_digit @ graph_hour + pynutil.insert("\"")
)
final_graph_minute = (
pynutil.insert("minutes: \"")
+ (pynutil.delete("0") + graph_minute_single | graph_minute_double)
+ pynutil.insert("\"")
)
final_graph_second = (
pynutil.insert("seconds: \"")
+ (pynutil.delete("0") + graph_minute_single | graph_minute_double)
+ pynutil.insert("\"")
)
final_time_zone_optional = pynini.closure(
pynini.accep(" ") + pynutil.insert("zone: \"") + convert_space(time_zone_graph) + pynutil.insert("\""),
0,
1,
)
# 02:30 Uhr
graph_hm = (
final_graph_hour
+ pynutil.delete(":")
+ (pynutil.delete("00") | (insert_space + final_graph_minute))
+ final_suffix
+ final_time_zone_optional
)
# 10:30:05 Uhr,
graph_hms = (
final_graph_hour
+ pynutil.delete(":")
+ (pynini.cross("00", " minutes: \"0\"") | (insert_space + final_graph_minute))
+ pynutil.delete(":")
+ (pynini.cross("00", " seconds: \"0\"") | (insert_space + final_graph_second))
+ final_suffix
+ final_time_zone_optional
+ pynutil.insert(" preserve_order: true")
)
# 2 Uhr est
graph_h = final_graph_hour_only + final_suffix + final_time_zone_optional
final_graph = (graph_hm | graph_h | graph_hms).optimize()
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/de/taggers/time.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.de.utils import get_abs_path
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_ALPHA,
NEMO_DIGIT,
NEMO_NON_BREAKING_SPACE,
NEMO_SIGMA,
GraphFst,
convert_space,
insert_space,
)
from pynini.examples import plurals
from pynini.lib import pynutil
unit_singular = pynini.string_file(get_abs_path("data/measure/measurements.tsv"))
suppletive = pynini.string_file(get_abs_path("data/measure/suppletive.tsv"))
def singular_to_plural():
# plural endung n/en maskuline Nomen mit den Endungen e, ent, and, ant, ist, or
_n = NEMO_SIGMA + pynini.union("e") + pynutil.insert("n")
_en = (
NEMO_SIGMA
+ pynini.union("ent", "and", "ant", "ist", "or", "ion", "ik", "heit", "keit", "schaft", "tät", "ung")
+ pynutil.insert("en")
)
_nen = NEMO_SIGMA + pynini.union("in") + (pynutil.insert("e") | pynutil.insert("nen"))
_fremd = NEMO_SIGMA + pynini.union("ma", "um", "us") + pynutil.insert("en")
# maskuline Nomen mit den Endungen eur, ich, ier, ig, ling, ör
_e = NEMO_SIGMA + pynini.union("eur", "ich", "ier", "ig", "ling", "ör") + pynutil.insert("e")
_s = NEMO_SIGMA + pynini.union("a", "i", "o", "u", "y") + pynutil.insert("s")
graph_plural = plurals._priority_union(
suppletive, pynini.union(_n, _en, _nen, _fremd, _e, _s), NEMO_SIGMA
).optimize()
return graph_plural
class MeasureFst(GraphFst):
"""
Finite state transducer for classifying measure, e.g.
"2,4 oz" -> measure { cardinal { integer_part: "zwei" fractional_part: "vier" units: "unzen" preserve_order: true } }
"1 oz" -> measure { cardinal { integer: "zwei" units: "unze" preserve_order: true } }
"1 million oz" -> measure { cardinal { integer: "eins" quantity: "million" units: "unze" preserve_order: true } }
This class also converts words containing numbers and letters
e.g. "a-8" —> "a acht"
e.g. "1,2-a" —> "ein komma zwei a"
Args:
cardinal: CardinalFst
decimal: DecimalFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal: GraphFst, decimal: GraphFst, fraction: GraphFst, deterministic: bool = True):
super().__init__(name="measure", kind="classify", deterministic=deterministic)
cardinal_graph = cardinal.graph
graph_unit_singular = convert_space(unit_singular)
graph_unit_plural = graph_unit_singular @ pynini.cdrewrite(convert_space(suppletive), "", "[EOS]", NEMO_SIGMA)
optional_graph_negative = pynini.closure("-", 0, 1)
graph_unit_denominator = (
pynini.cross("/", "pro") + pynutil.insert(NEMO_NON_BREAKING_SPACE) + graph_unit_singular
)
optional_unit_denominator = pynini.closure(
pynutil.insert(NEMO_NON_BREAKING_SPACE) + graph_unit_denominator, 0, 1,
)
unit_plural = (
pynutil.insert("units: \"")
+ (graph_unit_plural + (optional_unit_denominator) | graph_unit_denominator)
+ pynutil.insert("\"")
)
unit_singular_graph = (
pynutil.insert("units: \"")
+ ((graph_unit_singular + optional_unit_denominator) | graph_unit_denominator)
+ pynutil.insert("\"")
)
subgraph_decimal = decimal.fst + insert_space + pynini.closure(pynutil.delete(" "), 0, 1) + unit_plural
subgraph_cardinal = (
(optional_graph_negative + (pynini.closure(NEMO_DIGIT) - "1")) @ cardinal.fst
+ insert_space
+ pynini.closure(pynutil.delete(" "), 0, 1)
+ unit_plural
)
subgraph_cardinal |= (
(optional_graph_negative + pynini.accep("1"))
@ cardinal.fst
@ pynini.cdrewrite(pynini.cross("eins", "ein"), "", "", NEMO_SIGMA)
+ insert_space
+ pynini.closure(pynutil.delete(" "), 0, 1)
+ unit_singular_graph
)
subgraph_fraction = fraction.fst + insert_space + pynini.closure(pynutil.delete(" "), 0, 1) + unit_plural
cardinal_dash_alpha = (
pynutil.insert("cardinal { integer: \"")
+ cardinal_graph
+ pynutil.delete('-')
+ pynutil.insert("\" } units: \"")
+ pynini.closure(NEMO_ALPHA, 1)
+ pynutil.insert("\"")
)
alpha_dash_cardinal = (
pynutil.insert("units: \"")
+ pynini.closure(NEMO_ALPHA, 1)
+ pynutil.delete('-')
+ pynutil.insert("\"")
+ pynutil.insert(" cardinal { integer: \"")
+ cardinal_graph
+ pynutil.insert("\" }")
)
decimal_dash_alpha = (
pynutil.insert("decimal { ")
+ decimal.final_graph_wo_negative
+ pynutil.delete('-')
+ pynutil.insert(" } units: \"")
+ pynini.closure(NEMO_ALPHA, 1)
+ pynutil.insert("\"")
)
decimal_times = (
pynutil.insert("decimal { ")
+ decimal.final_graph_wo_negative
+ pynutil.insert(" } units: \"")
+ pynini.union('x', 'X')
+ pynutil.insert("\"")
)
cardinal_times = (
pynutil.insert("cardinal { integer: \"")
+ cardinal_graph
+ pynutil.insert("\" } units: \"")
+ pynini.union('x', 'X')
+ pynutil.insert("\"")
)
alpha_dash_decimal = (
pynutil.insert("units: \"")
+ pynini.closure(NEMO_ALPHA, 1)
+ pynutil.delete('-')
+ pynutil.insert("\"")
+ pynutil.insert(" decimal { ")
+ decimal.final_graph_wo_negative
+ pynutil.insert(" }")
)
final_graph = (
subgraph_decimal
| subgraph_cardinal
| cardinal_dash_alpha
| alpha_dash_cardinal
| decimal_dash_alpha
| decimal_times
| alpha_dash_decimal
| subgraph_fraction
| cardinal_times
)
final_graph += pynutil.insert(" preserve_order: true")
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/de/taggers/measure.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import GraphFst
from pynini.lib import pynutil
class FractionFst(GraphFst):
"""
Finite state transducer for classifying fraction
"23 4/6" ->
fraction { integer: "drei und zwanzig" numerator: "vier" denominator: "sechs" preserve_order: true }
Args:
cardinal: cardinal GraphFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal, deterministic: bool = True):
super().__init__(name="fraction", kind="classify", deterministic=deterministic)
cardinal_graph = cardinal.graph
self.optional_graph_negative = pynini.closure(
pynutil.insert("negative: ") + pynini.cross("-", "\"true\" "), 0, 1
)
self.integer = pynutil.insert("integer_part: \"") + cardinal_graph + pynutil.insert("\"")
self.numerator = (
pynutil.insert("numerator: \"") + cardinal_graph + pynini.cross(pynini.union("/", " / "), "\" ")
)
self.denominator = pynutil.insert("denominator: \"") + cardinal_graph + pynutil.insert("\"")
self.graph = (
self.optional_graph_negative
+ pynini.closure(self.integer + pynini.accep(" "), 0, 1)
+ self.numerator
+ self.denominator
)
graph = self.graph + pynutil.insert(" preserve_order: true")
final_graph = self.add_tokens(graph)
self.fst = final_graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/de/taggers/fraction.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.de.utils import get_abs_path
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_DIGIT, GraphFst, insert_space
from pynini.lib import pynutil
class TelephoneFst(GraphFst):
"""
Finite state transducer for classifying telephone, which includes country code, number part and extension
E.g
"+49 1234-1233" -> telephone { country_code: "plus neun und vierzig" number_part: "eins zwei drei vier eins zwei drei drei" preserve_order: true }
"(012) 1234-1233" -> telephone { country_code: "null eins zwei" number_part: "eins zwei drei vier eins zwei drei drei" preserve_order: true }
(0**)
Args:
cardinal: cardinal GraphFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal: GraphFst, deterministic: bool = True):
super().__init__(name="telephone", kind="classify", deterministic=deterministic)
graph_zero = pynini.invert(pynini.string_file(get_abs_path("data/numbers/zero.tsv"))).optimize()
graph_digit_no_zero = pynini.invert(
pynini.string_file(get_abs_path("data/numbers/digit.tsv"))
).optimize() | pynini.cross("1", "eins")
graph_digit = graph_digit_no_zero | graph_zero
numbers_with_single_digits = pynini.closure(graph_digit + insert_space) + graph_digit
two_digit_and_zero = (NEMO_DIGIT ** 2 @ cardinal.two_digit_non_zero) | graph_zero
# def add_space_after_two_digit():
# return pynini.closure(two_digit_and_zero + insert_space) + (
# two_digit_and_zero
# )
country_code = pynini.closure(pynini.cross("+", "plus "), 0, 1) + two_digit_and_zero
country_code |= (
pynutil.delete("(") + graph_zero + insert_space + numbers_with_single_digits + pynutil.delete(")")
)
country_code |= graph_zero + insert_space + numbers_with_single_digits
country_code = pynutil.insert("country_code: \"") + country_code + pynutil.insert("\"")
del_separator = pynini.cross(pynini.union("-", " "), " ")
# numbers_with_two_digits = pynini.closure(graph_digit + insert_space) + add_space_after_two_digit() + pynini.closure(insert_space + graph_digit)
# numbers = numbers_with_two_digits + pynini.closure(del_separator + numbers_with_two_digits, 0, 1)
numbers = numbers_with_single_digits + pynini.closure(del_separator + numbers_with_single_digits, 0, 1)
number_length = pynini.closure((NEMO_DIGIT | pynini.union("-", " ", ")", "(")), 7)
number_part = pynini.compose(number_length, numbers)
number = pynutil.insert("number_part: \"") + number_part + pynutil.insert("\"")
graph = country_code + pynini.accep(" ") + number
self.graph = graph
final_graph = self.add_tokens(self.graph + pynutil.insert(" preserve_order: true"))
self.fst = final_graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/de/taggers/telephone.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Adapted from https://github.com/google/TextNormalizationCoveringGrammars
# Russian minimally supervised number grammar.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_DIGIT, GraphFst
from pynini.lib import pynutil
class OrdinalFst(GraphFst):
"""
Finite state transducer for classifying cardinals, e.g.
"2." -> ordinal { integer: "zwei" } }
"2tes" -> ordinal { integer: "zwei" } }
Args:
cardinal: cardinal GraphFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal: GraphFst, deterministic=False):
super().__init__(name="ordinal", kind="classify", deterministic=deterministic)
cardinal_graph = cardinal.graph
endings = ["ter", "tes", "tem", "te", "ten"]
self.graph = (
(
pynini.closure(NEMO_DIGIT | pynini.accep("."))
+ pynutil.delete(pynutil.add_weight(pynini.union(*endings), weight=0.0001) | pynini.accep("."))
)
@ cardinal_graph
).optimize()
final_graph = pynutil.insert("integer: \"") + self.graph + pynutil.insert("\"")
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/de/taggers/ordinal.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.de.utils import get_abs_path, load_labels
from nemo_text_processing.text_normalization.en.graph_utils import GraphFst, convert_space
from pynini.lib import pynutil
class WhiteListFst(GraphFst):
"""
Finite state transducer for classifying whitelist, e.g.
"Mr." -> tokens { name: "mister" }
This class has highest priority among all classifier grammars. Whitelisted tokens are defined and loaded from "data/whitelist.tsv".
Args:
input_case: accepting either "lower_cased" or "cased" input.
deterministic: if True will provide a single transduction option,
for False multiple options (used for audio-based normalization)
input_file: path to a file with whitelist replacements
"""
def __init__(self, input_case: str, deterministic: bool = True, input_file: str = None):
super().__init__(name="whitelist", kind="classify", deterministic=deterministic)
def _get_whitelist_graph(input_case, file):
whitelist = load_labels(file)
if input_case == "lower_cased":
whitelist = [[x[0].lower()] + x[1:] for x in whitelist]
graph = pynini.string_map(whitelist)
return graph
graph = _get_whitelist_graph(input_case, get_abs_path("data/whitelist.tsv"))
if not deterministic and input_case != "lower_cased":
graph |= pynutil.add_weight(
_get_whitelist_graph("lower_cased", get_abs_path("data/whitelist.tsv")), weight=0.0001
)
if input_file:
whitelist_provided = _get_whitelist_graph(input_case, input_file)
if not deterministic:
graph |= whitelist_provided
else:
graph = whitelist_provided
if not deterministic:
units_graph = _get_whitelist_graph(input_case, file=get_abs_path("data/measure/measurements.tsv"))
graph |= units_graph
self.graph = graph
self.final_graph = convert_space(self.graph).optimize()
self.fst = (pynutil.insert("name: \"") + self.final_graph + pynutil.insert("\"")).optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/de/taggers/whitelist.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import pynini
from nemo_text_processing.text_normalization.de.taggers.cardinal import CardinalFst
from nemo_text_processing.text_normalization.de.taggers.date import DateFst
from nemo_text_processing.text_normalization.de.taggers.decimal import DecimalFst
from nemo_text_processing.text_normalization.de.taggers.electronic import ElectronicFst
from nemo_text_processing.text_normalization.de.taggers.fraction import FractionFst
from nemo_text_processing.text_normalization.de.taggers.measure import MeasureFst
from nemo_text_processing.text_normalization.de.taggers.money import MoneyFst
from nemo_text_processing.text_normalization.de.taggers.ordinal import OrdinalFst
from nemo_text_processing.text_normalization.de.taggers.telephone import TelephoneFst
from nemo_text_processing.text_normalization.de.taggers.time import TimeFst
from nemo_text_processing.text_normalization.de.taggers.whitelist import WhiteListFst
from nemo_text_processing.text_normalization.de.taggers.word import WordFst
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_CHAR,
NEMO_DIGIT,
GraphFst,
delete_extra_space,
delete_space,
generator_main,
)
from nemo_text_processing.text_normalization.en.taggers.punctuation import PunctuationFst
from pynini.lib import pynutil
class ClassifyFst(GraphFst):
"""
Final class that composes all other classification grammars. This class can process an entire sentence, that is lower cased.
For deployment, this grammar will be compiled and exported to OpenFst Finite State Archive (FAR) File.
More details to deployment at NeMo/tools/text_processing_deployment.
Args:
input_case: accepting either "lower_cased" or "cased" input.
deterministic: if True will provide a single transduction option,
for False multiple options (used for audio-based normalization)
cache_dir: path to a dir with .far grammar file. Set to None to avoid using cache.
overwrite_cache: set to True to overwrite .far files
whitelist: path to a file with whitelist replacements
"""
def __init__(
self,
input_case: str,
deterministic: bool = False,
cache_dir: str = None,
overwrite_cache: bool = False,
whitelist: str = None,
):
super().__init__(name="tokenize_and_classify", kind="classify", deterministic=deterministic)
far_file = None
if cache_dir is not None and cache_dir != "None":
os.makedirs(cache_dir, exist_ok=True)
whitelist_file = os.path.basename(whitelist) if whitelist else ""
far_file = os.path.join(
cache_dir, f"_{input_case}_de_tn_{deterministic}_deterministic{whitelist_file}.far"
)
if not overwrite_cache and far_file and os.path.exists(far_file):
self.fst = pynini.Far(far_file, mode="r")["tokenize_and_classify"]
no_digits = pynini.closure(pynini.difference(NEMO_CHAR, NEMO_DIGIT))
self.fst_no_digits = pynini.compose(self.fst, no_digits).optimize()
logging.info(f"ClassifyFst.fst was restored from {far_file}.")
else:
logging.info(f"Creating ClassifyFst grammars. This might take some time...")
self.cardinal = CardinalFst(deterministic=deterministic)
cardinal_graph = self.cardinal.fst
self.ordinal = OrdinalFst(cardinal=self.cardinal, deterministic=deterministic)
ordinal_graph = self.ordinal.fst
self.decimal = DecimalFst(cardinal=self.cardinal, deterministic=deterministic)
decimal_graph = self.decimal.fst
self.fraction = FractionFst(cardinal=self.cardinal, deterministic=deterministic)
fraction_graph = self.fraction.fst
self.measure = MeasureFst(
cardinal=self.cardinal, decimal=self.decimal, fraction=self.fraction, deterministic=deterministic
)
measure_graph = self.measure.fst
self.date = DateFst(cardinal=self.cardinal, deterministic=deterministic)
date_graph = self.date.fst
word_graph = WordFst(deterministic=deterministic).fst
self.time = TimeFst(deterministic=deterministic)
time_graph = self.time.fst
self.telephone = TelephoneFst(cardinal=self.cardinal, deterministic=deterministic)
telephone_graph = self.telephone.fst
self.electronic = ElectronicFst(deterministic=deterministic)
electronic_graph = self.electronic.fst
self.money = MoneyFst(cardinal=self.cardinal, decimal=self.decimal, deterministic=deterministic)
money_graph = self.money.fst
self.whitelist = WhiteListFst(input_case=input_case, deterministic=deterministic, input_file=whitelist)
whitelist_graph = self.whitelist.fst
punct_graph = PunctuationFst(deterministic=deterministic).fst
classify = (
pynutil.add_weight(whitelist_graph, 1.01)
| pynutil.add_weight(time_graph, 1.1)
| pynutil.add_weight(measure_graph, 1.1)
| pynutil.add_weight(cardinal_graph, 1.1)
| pynutil.add_weight(fraction_graph, 1.1)
| pynutil.add_weight(date_graph, 1.1)
| pynutil.add_weight(ordinal_graph, 1.1)
| pynutil.add_weight(decimal_graph, 1.1)
| pynutil.add_weight(money_graph, 1.1)
| pynutil.add_weight(telephone_graph, 1.1)
| pynutil.add_weight(electronic_graph, 1.1)
)
classify |= pynutil.add_weight(word_graph, 100)
punct = pynutil.insert("tokens { ") + pynutil.add_weight(punct_graph, weight=1.1) + pynutil.insert(" }")
token = pynutil.insert("tokens { ") + classify + pynutil.insert(" }")
token_plus_punct = (
pynini.closure(punct + pynutil.insert(" ")) + token + pynini.closure(pynutil.insert(" ") + punct)
)
graph = token_plus_punct + pynini.closure(pynutil.add_weight(delete_extra_space, 1.1) + token_plus_punct)
graph = delete_space + graph + delete_space
self.fst = graph.optimize()
no_digits = pynini.closure(pynini.difference(NEMO_CHAR, NEMO_DIGIT))
self.fst_no_digits = pynini.compose(self.fst, no_digits).optimize()
if far_file:
generator_main(far_file, {"tokenize_and_classify": self.fst})
logging.info(f"ClassifyFst grammars are saved to {far_file}.")
| NeMo-text-processing-main | nemo_text_processing/text_normalization/de/taggers/tokenize_and_classify.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NeMo-text-processing-main | nemo_text_processing/text_normalization/de/taggers/__init__.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.de.utils import get_abs_path
from nemo_text_processing.text_normalization.en.graph_utils import GraphFst, insert_space
from pynini.lib import pynutil
quantities = pynini.string_file(get_abs_path("data/numbers/quantities.tsv"))
def get_quantity(decimal: 'pynini.FstLike', cardinal_up_to_hundred: 'pynini.FstLike') -> 'pynini.FstLike':
"""
Returns FST that transforms either a cardinal or decimal followed by a quantity into a numeral,
e.g. 1 million -> integer_part: "eine" quantity: "million"
e.g. 1.4 million -> integer_part: "eins" fractional_part: "vier" quantity: "million"
Args:
decimal: decimal FST
cardinal_up_to_hundred: cardinal FST
"""
numbers = cardinal_up_to_hundred
res = (
pynutil.insert("integer_part: \"")
+ numbers
+ pynutil.insert("\"")
+ pynini.accep(" ")
+ pynutil.insert("quantity: \"")
+ quantities
+ pynutil.insert("\"")
)
res |= decimal + pynini.accep(" ") + pynutil.insert("quantity: \"") + quantities + pynutil.insert("\"")
return res
class DecimalFst(GraphFst):
"""
Finite state transducer for classifying decimal, e.g.
-11,4006 billion -> decimal { negative: "true" integer_part: "elf" fractional_part: "vier null null sechs" quantity: "billion" preserve_order: true }
1 billion -> decimal { integer_part: "eins" quantity: "billion" preserve_order: true }
Args:
cardinal: CardinalFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal: GraphFst, deterministic: bool = True):
super().__init__(name="decimal", kind="classify", deterministic=deterministic)
graph_digit = pynini.string_file(get_abs_path("data/numbers/digit.tsv")).invert()
graph_digit |= pynini.string_file(get_abs_path("data/numbers/zero.tsv")).invert()
graph_digit |= pynini.cross("1", "eins")
self.graph = graph_digit + pynini.closure(insert_space + graph_digit).optimize()
point = pynutil.delete(",")
optional_graph_negative = pynini.closure(pynutil.insert("negative: ") + pynini.cross("-", "\"true\" "), 0, 1)
self.graph_fractional = pynutil.insert("fractional_part: \"") + self.graph + pynutil.insert("\"")
self.graph_integer = pynutil.insert("integer_part: \"") + cardinal.graph + pynutil.insert("\"")
final_graph_wo_sign = self.graph_integer + point + insert_space + self.graph_fractional
self.final_graph_wo_negative = final_graph_wo_sign | get_quantity(
final_graph_wo_sign, cardinal.graph_hundred_component_at_least_one_none_zero_digit
)
final_graph = optional_graph_negative + self.final_graph_wo_negative
final_graph += pynutil.insert(" preserve_order: true")
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/de/taggers/decimal.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.de.utils import get_abs_path, load_labels
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_ALPHA,
NEMO_DIGIT,
NEMO_SIGMA,
GraphFst,
convert_space,
insert_space,
)
from pynini.lib import pynutil
min_singular = pynini.string_file(get_abs_path("data/money/currency_minor_singular.tsv"))
min_plural = pynini.string_file(get_abs_path("data/money/currency_minor_plural.tsv"))
maj_singular = pynini.string_file((get_abs_path("data/money/currency.tsv")))
class MoneyFst(GraphFst):
"""
Finite state transducer for classifying money, e.g.
"€1" -> money { currency_maj: "euro" integer_part: "ein"}
"€1,000" -> money { currency_maj: "euro" integer_part: "ein" }
"€1,001" -> money { currency_maj: "euro" integer_part: "eins" fractional_part: "null null eins"}
"£1,4" -> money { integer_part: "ein" currency_maj: "pfund" fractional_part: "vierzig" preserve_order: true}
-> money { integer_part: "ein" currency_maj: "pfund" fractional_part: "vierzig" currency_min: "pence" preserve_order: true}
"£0,01" -> money { fractional_part: "ein" currency_min: "penny" preserve_order: true}
"£0,01 million" -> money { currency_maj: "pfund" integer_part: "null" fractional_part: "null eins" quantity: "million"}
Args:
cardinal: CardinalFst
decimal: DecimalFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal: GraphFst, decimal: GraphFst, deterministic: bool = True):
super().__init__(name="money", kind="classify", deterministic=deterministic)
cardinal_graph = cardinal.graph
graph_decimal_final = decimal.fst
maj_singular_labels = load_labels(get_abs_path("data/money/currency.tsv"))
maj_singular_graph = convert_space(maj_singular)
maj_plural_graph = maj_singular_graph
graph_maj_singular = pynutil.insert("currency_maj: \"") + maj_singular_graph + pynutil.insert("\"")
graph_maj_plural = pynutil.insert("currency_maj: \"") + maj_plural_graph + pynutil.insert("\"")
optional_delete_fractional_zeros = pynini.closure(
pynutil.delete(",") + pynini.closure(pynutil.delete("0"), 1), 0, 1
)
graph_integer_one = pynutil.insert("integer_part: \"") + pynini.cross("1", "ein") + pynutil.insert("\"")
# only for decimals where third decimal after comma is non-zero or with quantity
decimal_delete_last_zeros = (
pynini.closure(NEMO_DIGIT, 1)
+ pynini.accep(",")
+ pynini.closure(NEMO_DIGIT, 2)
+ (NEMO_DIGIT - "0")
+ pynini.closure(pynutil.delete("0"))
)
decimal_with_quantity = NEMO_SIGMA + NEMO_ALPHA
graph_decimal = (
graph_maj_plural + insert_space + (decimal_delete_last_zeros | decimal_with_quantity) @ graph_decimal_final
)
graph_integer = (
pynutil.insert("integer_part: \"") + ((NEMO_SIGMA - "1") @ cardinal_graph) + pynutil.insert("\"")
)
graph_integer_only = graph_maj_singular + insert_space + graph_integer_one
graph_integer_only |= graph_maj_plural + insert_space + graph_integer
graph = (graph_integer_only + optional_delete_fractional_zeros) | graph_decimal
# remove trailing zeros of non zero number in the first 2 digits and fill up to 2 digits
# e.g. 2000 -> 20, 0200->02, 01 -> 01, 10 -> 10
# not accepted: 002, 00, 0,
two_digits_fractional_part = (
pynini.closure(NEMO_DIGIT) + (NEMO_DIGIT - "0") + pynini.closure(pynutil.delete("0"))
) @ (
(pynutil.delete("0") + (NEMO_DIGIT - "0"))
| ((NEMO_DIGIT - "0") + pynutil.insert("0"))
| ((NEMO_DIGIT - "0") + NEMO_DIGIT)
)
graph_min_singular = pynutil.insert(" currency_min: \"") + min_singular + pynutil.insert("\"")
graph_min_plural = pynutil.insert(" currency_min: \"") + min_plural + pynutil.insert("\"")
# format ** euro ** cent
decimal_graph_with_minor = None
for curr_symbol, _ in maj_singular_labels:
preserve_order = pynutil.insert(" preserve_order: true")
integer_plus_maj = graph_integer + insert_space + pynutil.insert(curr_symbol) @ graph_maj_plural
integer_plus_maj |= graph_integer_one + insert_space + pynutil.insert(curr_symbol) @ graph_maj_singular
# non zero integer part
integer_plus_maj = (pynini.closure(NEMO_DIGIT) - "0") @ integer_plus_maj
graph_fractional_one = two_digits_fractional_part @ pynini.cross("1", "ein")
graph_fractional_one = pynutil.insert("fractional_part: \"") + graph_fractional_one + pynutil.insert("\"")
graph_fractional = (
two_digits_fractional_part @ (pynini.closure(NEMO_DIGIT, 1, 2) - "1") @ cardinal.two_digit_non_zero
)
graph_fractional = pynutil.insert("fractional_part: \"") + graph_fractional + pynutil.insert("\"")
fractional_plus_min = graph_fractional + insert_space + pynutil.insert(curr_symbol) @ graph_min_plural
fractional_plus_min |= (
graph_fractional_one + insert_space + pynutil.insert(curr_symbol) @ graph_min_singular
)
decimal_graph_with_minor_curr = integer_plus_maj + pynini.cross(",", " ") + fractional_plus_min
decimal_graph_with_minor_curr |= pynutil.add_weight(
integer_plus_maj
+ pynini.cross(",", " ")
+ pynutil.insert("fractional_part: \"")
+ two_digits_fractional_part @ cardinal.two_digit_non_zero
+ pynutil.insert("\""),
weight=0.0001,
)
decimal_graph_with_minor_curr |= pynutil.delete("0,") + fractional_plus_min
decimal_graph_with_minor_curr = (
pynutil.delete(curr_symbol) + decimal_graph_with_minor_curr + preserve_order
)
decimal_graph_with_minor = (
decimal_graph_with_minor_curr
if decimal_graph_with_minor is None
else pynini.union(decimal_graph_with_minor, decimal_graph_with_minor_curr)
)
final_graph = graph | decimal_graph_with_minor
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/de/taggers/money.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import defaultdict
import pynini
from nemo_text_processing.text_normalization.de.utils import get_abs_path, load_labels
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_DIGIT,
NEMO_SIGMA,
GraphFst,
delete_space,
insert_space,
)
from pynini.lib import pynutil
AND = "und"
def get_ties_digit(digit_path: str, tie_path: str) -> 'pynini.FstLike':
"""
getting all inverse normalizations for numbers between 21 - 100
Args:
digit_path: file to digit tsv
tie_path: file to tie tsv, e.g. 20, 30, etc.
Returns:
res: fst that converts numbers to their verbalization
"""
digits = defaultdict(list)
ties = defaultdict(list)
for k, v in load_labels(digit_path):
digits[v].append(k)
digits["1"] = ["ein"]
for k, v in load_labels(tie_path):
ties[v].append(k)
d = []
for i in range(21, 100):
s = str(i)
if s[1] == "0":
continue
for di in digits[s[1]]:
for ti in ties[s[0]]:
word = di + f" {AND} " + ti
d.append((word, s))
res = pynini.string_map(d)
return res
class CardinalFst(GraphFst):
"""
Finite state transducer for classifying cardinals, e.g.
"101" -> cardinal { integer: "ein hundert und zehn" }
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = False):
super().__init__(name="cardinal", kind="classify", deterministic=deterministic)
graph_zero = pynini.string_file(get_abs_path("data/numbers/zero.tsv")).invert()
graph_digit_no_one = pynini.string_file(get_abs_path("data/numbers/digit.tsv")).invert()
graph_one = pynini.string_file(get_abs_path("data/numbers/ones.tsv")).invert()
graph_digit = graph_digit_no_one | graph_one
self.digit = (graph_digit | graph_zero).optimize()
graph_teen = pynini.string_file(get_abs_path("data/numbers/teen.tsv")).invert()
graph_ties = pynini.string_file(get_abs_path("data/numbers/ties.tsv")).invert()
# separator = "."
def tens_no_zero():
return (
pynutil.delete("0") + graph_digit
| get_ties_digit(
get_abs_path("data/numbers/digit.tsv"), get_abs_path("data/numbers/ties.tsv")
).invert()
| graph_teen
| (graph_ties + pynutil.delete("0"))
)
def hundred_non_zero():
return (graph_digit_no_one + insert_space | pynini.cross("1", "ein ")) + pynutil.insert("hundert") + (
pynini.closure(insert_space + pynutil.insert(AND, weight=0.0001), 0, 1) + insert_space + tens_no_zero()
| pynutil.delete("00")
) | pynutil.delete("0") + tens_no_zero()
def thousand():
return (hundred_non_zero() + insert_space + pynutil.insert("tausend") | pynutil.delete("000")) + (
insert_space + hundred_non_zero() | pynutil.delete("000")
)
optional_plural_quantity_en = pynini.closure(pynutil.insert("en", weight=-0.0001), 0, 1)
optional_plural_quantity_n = pynini.closure(pynutil.insert("n", weight=-0.0001), 0, 1)
graph_million = pynini.union(
hundred_non_zero() + insert_space + pynutil.insert("million") + optional_plural_quantity_en,
pynutil.delete("000"),
)
graph_billion = pynini.union(
hundred_non_zero() + insert_space + pynutil.insert("milliarde") + optional_plural_quantity_n,
pynutil.delete("000"),
)
graph_trillion = pynini.union(
hundred_non_zero() + insert_space + pynutil.insert("billion") + optional_plural_quantity_en,
pynutil.delete("000"),
)
graph_quadrillion = pynini.union(
hundred_non_zero() + insert_space + pynutil.insert("billiarde") + optional_plural_quantity_n,
pynutil.delete("000"),
)
graph_quintillion = pynini.union(
hundred_non_zero() + insert_space + pynutil.insert("trillion") + optional_plural_quantity_en,
pynutil.delete("000"),
)
graph_sextillion = pynini.union(
hundred_non_zero() + insert_space + pynutil.insert("trilliarde") + optional_plural_quantity_n,
pynutil.delete("000"),
)
graph = pynini.union(
graph_sextillion
+ insert_space
+ graph_quintillion
+ insert_space
+ graph_quadrillion
+ insert_space
+ graph_trillion
+ insert_space
+ graph_billion
+ insert_space
+ graph_million
+ insert_space
+ thousand()
)
fix_syntax = [
("eins tausend", "ein tausend"),
("eins millionen", "eine million"),
("eins milliarden", "eine milliarde"),
("eins billionen", "eine billion"),
("eins billiarden", "eine billiarde"),
]
fix_syntax = pynini.union(*[pynini.cross(*x) for x in fix_syntax])
self.graph = (
((NEMO_DIGIT - "0" + pynini.closure(NEMO_DIGIT, 0)) - "0" - "1")
@ pynini.cdrewrite(pynini.closure(pynutil.insert("0")), "[BOS]", "", NEMO_SIGMA)
@ NEMO_DIGIT ** 24
@ graph
@ pynini.cdrewrite(delete_space, "[BOS]", "", NEMO_SIGMA)
@ pynini.cdrewrite(delete_space, "", "[EOS]", NEMO_SIGMA)
@ pynini.cdrewrite(pynini.cross(" ", " "), "", "", NEMO_SIGMA)
@ pynini.cdrewrite(fix_syntax, "[BOS]", "", NEMO_SIGMA)
)
self.graph |= graph_zero | pynini.cross("1", "eins")
# self.graph = pynini.cdrewrite(pynutil.delete(separator), "", "", NEMO_SIGMA) @ self.graph
self.graph = self.graph.optimize()
self.graph_hundred_component_at_least_one_none_zero_digit = (
((NEMO_DIGIT - "0" + pynini.closure(NEMO_DIGIT, 0)) - "0" - "1")
@ pynini.cdrewrite(pynini.closure(pynutil.insert("0")), "[BOS]", "", NEMO_SIGMA)
@ NEMO_DIGIT ** 3
@ hundred_non_zero()
) | pynini.cross("1", "eins")
self.graph_hundred_component_at_least_one_none_zero_digit = (
self.graph_hundred_component_at_least_one_none_zero_digit.optimize()
)
self.two_digit_non_zero = (
pynini.closure(NEMO_DIGIT, 1, 2) @ self.graph_hundred_component_at_least_one_none_zero_digit
)
optional_minus_graph = pynini.closure(pynutil.insert("negative: ") + pynini.cross("-", "\"true\" "), 0, 1)
final_graph = optional_minus_graph + pynutil.insert("integer: \"") + self.graph + pynutil.insert("\"")
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/de/taggers/cardinal.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.de.utils import get_abs_path, load_labels
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_ALPHA, NEMO_DIGIT, GraphFst, insert_space
from pynini.lib import pynutil
class ElectronicFst(GraphFst):
"""
Finite state transducer for classifying electronic: email addresses
e.g. "[email protected]" -> electronic { username: "abc" domain: "hotmail.com" preserve_order: true }
e.g. "www.abc.com/123" -> electronic { protocol: "www." domain: "abc.com/123" preserve_order: true }
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="electronic", kind="classify", deterministic=deterministic)
dot = pynini.accep(".")
accepted_common_domains = [x[0] for x in load_labels(get_abs_path("data/electronic/domain.tsv"))]
accepted_common_domains = pynini.union(*accepted_common_domains)
accepted_symbols = [x[0] for x in load_labels(get_abs_path("data/electronic/symbols.tsv"))]
accepted_symbols = pynini.union(*accepted_symbols) - dot
accepted_characters = pynini.closure(NEMO_ALPHA | NEMO_DIGIT | accepted_symbols)
# email
username = pynutil.insert("username: \"") + accepted_characters + pynutil.insert("\"") + pynini.cross('@', ' ')
domain_graph = accepted_characters + dot + accepted_characters
domain_graph = pynutil.insert("domain: \"") + domain_graph + pynutil.insert("\"")
domain_common_graph = (
pynutil.insert("domain: \"")
+ accepted_characters
+ accepted_common_domains
+ pynini.closure((accepted_symbols | dot) + pynini.closure(accepted_characters, 1), 0, 1)
+ pynutil.insert("\"")
)
graph = (username + domain_graph) | domain_common_graph
# url
protocol_start = pynini.accep("https://") | pynini.accep("http://")
protocol_end = pynini.accep("www.")
protocol = protocol_start | protocol_end | (protocol_start + protocol_end)
protocol = pynutil.insert("protocol: \"") + protocol + pynutil.insert("\"")
graph |= protocol + insert_space + (domain_graph | domain_common_graph)
self.graph = graph
final_graph = self.add_tokens(self.graph + pynutil.insert(" preserve_order: true"))
self.fst = final_graph.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/de/taggers/electronic.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.de.utils import get_abs_path, load_labels
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_CHAR,
NEMO_DIGIT,
TO_LOWER,
GraphFst,
insert_space,
)
from pynini.lib import pynutil
graph_teen = pynini.invert(pynini.string_file(get_abs_path("data/numbers/teen.tsv"))).optimize()
graph_digit = pynini.invert(pynini.string_file(get_abs_path("data/numbers/digit.tsv"))).optimize()
ties_graph = pynini.invert(pynini.string_file(get_abs_path("data/numbers/ties.tsv"))).optimize()
delete_leading_zero = (pynutil.delete("0") | (NEMO_DIGIT - "0")) + NEMO_DIGIT
def get_year_graph(cardinal: GraphFst) -> 'pynini.FstLike':
"""
Returns year verbalizations as fst
< 2000 neunzehn (hundert) (vier und zwanzig), >= 2000 regular cardinal
**00 ** hundert
Args:
delete_leading_zero: removed leading zero
cardinal: cardinal GraphFst
"""
year_gt_2000 = (pynini.union("21", "20") + NEMO_DIGIT ** 2) @ cardinal.graph
graph_two_digit = delete_leading_zero @ cardinal.two_digit_non_zero
hundred = pynutil.insert("hundert")
graph_double_double = (
(pynini.accep("1") + NEMO_DIGIT) @ graph_two_digit
+ insert_space
+ pynini.closure(hundred + insert_space, 0, 1)
+ graph_two_digit
)
# for 20**
graph_double_double |= pynini.accep("20") @ graph_two_digit + insert_space + graph_two_digit
graph = (
graph_double_double
| (pynini.accep("1") + NEMO_DIGIT) @ graph_two_digit + insert_space + pynutil.delete("00") + hundred
| year_gt_2000
)
return graph
class DateFst(GraphFst):
"""
Finite state transducer for classifying date, e.g.
"01.04.2010" -> date { day: "erster" month: "april" year: "zwei tausend zehn" preserve_order: true }
"1994" -> date { year: "neunzehn vier und neuzig" }
"1900" -> date { year: "neunzehn hundert" }
Args:
cardinal: cardinal GraphFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal: GraphFst, deterministic: bool):
super().__init__(name="date", kind="classify", deterministic=deterministic)
month_abbr_graph = load_labels(get_abs_path("data/months/abbr_to_name.tsv"))
number_to_month = pynini.string_file(get_abs_path("data/months/numbers.tsv")).optimize()
month_graph = pynini.union(*[x[1] for x in month_abbr_graph]).optimize()
month_abbr_graph = pynini.string_map(month_abbr_graph)
month_abbr_graph = (
pynutil.add_weight(month_abbr_graph, weight=0.0001)
| ((TO_LOWER + pynini.closure(NEMO_CHAR)) @ month_abbr_graph)
) + pynini.closure(pynutil.delete(".", weight=-0.0001), 0, 1)
self.month_abbr = month_abbr_graph
month_graph |= (TO_LOWER + pynini.closure(NEMO_CHAR)) @ month_graph
# jan.-> januar, Jan-> januar, januar-> januar
month_graph |= month_abbr_graph
numbers = cardinal.graph_hundred_component_at_least_one_none_zero_digit
optional_leading_zero = delete_leading_zero | NEMO_DIGIT
# 01, 31, 1
digit_day = optional_leading_zero @ pynini.union(*[str(x) for x in range(1, 32)]) @ numbers
day = (pynutil.insert("day: \"") + digit_day + pynutil.insert("\"")).optimize()
digit_month = optional_leading_zero @ pynini.union(*[str(x) for x in range(1, 13)])
number_to_month = digit_month @ number_to_month
digit_month @= numbers
month_name = (pynutil.insert("month: \"") + month_graph + pynutil.insert("\"")).optimize()
month_number = (
pynutil.insert("month: \"")
+ (pynutil.add_weight(digit_month, weight=0.0001) | number_to_month)
+ pynutil.insert("\"")
).optimize()
# prefer cardinal over year
year = pynutil.add_weight(get_year_graph(cardinal=cardinal), weight=0.001)
self.year = year
year_only = pynutil.insert("year: \"") + year + pynutil.insert("\"")
graph_dmy = (
day
+ pynutil.delete(".")
+ pynini.closure(pynutil.delete(" "), 0, 1)
+ insert_space
+ month_name
+ pynini.closure(pynini.accep(" ") + year_only, 0, 1)
)
separators = ["."]
for sep in separators:
year_optional = pynini.closure(pynini.cross(sep, " ") + year_only, 0, 1)
new_graph = day + pynini.cross(sep, " ") + month_number + year_optional
graph_dmy |= new_graph
dash = "-"
day_optional = pynini.closure(pynini.cross(dash, " ") + day, 0, 1)
graph_ymd = year_only + pynini.cross(dash, " ") + month_number + day_optional
final_graph = graph_dmy + pynutil.insert(" preserve_order: true")
final_graph |= year_only
final_graph |= graph_ymd
self.final_graph = final_graph.optimize()
self.fst = self.add_tokens(self.final_graph).optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/de/taggers/date.py |
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_NOT_SPACE, GraphFst
from pynini.lib import pynutil
class WordFst(GraphFst):
"""
Finite state transducer for classifying word.
e.g. sleep -> tokens { name: "sleep" }
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="word", kind="classify")
word = pynutil.insert("name: \"") + pynini.closure(NEMO_NOT_SPACE, 1) + pynutil.insert("\"")
self.fst = word.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/de/taggers/word.py |
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.de.utils import get_abs_path, load_labels
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_DIGIT,
NEMO_SIGMA,
GraphFst,
convert_space,
delete_preserve_order,
)
from pynini.lib import pynutil
class TimeFst(GraphFst):
"""
Finite state transducer for verbalizing electronic, e.g.
time { hours: "2" minutes: "15"} -> "zwei uhr fünfzehn"
time { minutes: "15" hours: "2" } -> "viertel nach zwei"
time { minutes: "15" hours: "2" } -> "fünfzehn nach zwei"
time { hours: "14" minutes: "15"} -> "vierzehn uhr fünfzehn"
time { minutes: "15" hours: "14" } -> "viertel nach zwei"
time { minutes: "15" hours: "14" } -> "fünfzehn nach drei"
time { minutes: "45" hours: "14" } -> "viertel vor drei"
Args:
cardinal_tagger: cardinal_tagger tagger GraphFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal_tagger: GraphFst, deterministic: bool = True):
super().__init__(name="time", kind="verbalize", deterministic=deterministic)
# add weight so when using inverse text normalization this conversion is depriotized
night_to_early = pynutil.add_weight(
pynini.invert(pynini.string_file(get_abs_path("data/time/hour_to_night.tsv"))).optimize(), weight=0.0001
)
hour_to = pynini.invert(pynini.string_file(get_abs_path("data/time/hour_to.tsv"))).optimize()
minute_to = pynini.invert(pynini.string_file(get_abs_path("data/time/minute_to.tsv"))).optimize()
time_zone_graph = pynini.invert(
convert_space(pynini.union(*[x[1] for x in load_labels(get_abs_path("data/time/time_zone.tsv"))]))
)
graph_zero = pynini.invert(pynini.string_file(get_abs_path("data/numbers/zero.tsv"))).optimize()
number_verbalization = graph_zero | cardinal_tagger.two_digit_non_zero
hour = pynutil.delete("hours: \"") + pynini.closure(NEMO_DIGIT, 1) + pynutil.delete("\"")
hour_verbalized = hour @ number_verbalization @ pynini.cdrewrite(
pynini.cross("eins", "ein"), "[BOS]", "[EOS]", NEMO_SIGMA
) + pynutil.insert(" uhr")
minute = pynutil.delete("minutes: \"") + pynini.closure(NEMO_DIGIT, 1) + pynutil.delete("\"")
zone = pynutil.delete("zone: \"") + time_zone_graph + pynutil.delete("\"")
optional_zone = pynini.closure(pynini.accep(" ") + zone, 0, 1)
second = pynutil.delete("seconds: \"") + pynini.closure(NEMO_DIGIT, 1) + pynutil.delete("\"")
graph_hms = (
hour_verbalized
+ pynini.accep(" ")
+ minute @ number_verbalization
+ pynutil.insert(" minuten")
+ pynini.accep(" ")
+ second @ number_verbalization
+ pynutil.insert(" sekunden")
+ optional_zone
)
graph_hms @= pynini.cdrewrite(
pynini.cross("eins minuten", "eine minute") | pynini.cross("eins sekunden", "eine sekunde"),
pynini.union(" ", "[BOS]"),
"",
NEMO_SIGMA,
)
min_30 = [str(x) for x in range(1, 31)]
min_30 = pynini.union(*min_30)
min_29 = [str(x) for x in range(1, 30)]
min_29 = pynini.union(*min_29)
graph_h = hour_verbalized
graph_hm = hour_verbalized + pynini.accep(" ") + minute @ number_verbalization
graph_m_past_h = (
minute @ min_30 @ (number_verbalization | pynini.cross("15", "viertel"))
+ pynini.accep(" ")
+ pynutil.insert("nach ")
# + hour @ number_verbalization
+ hour @ pynini.cdrewrite(night_to_early, "[BOS]", "[EOS]", NEMO_SIGMA) @ number_verbalization
)
graph_m30_h = (
minute @ pynini.cross("30", "halb")
+ pynini.accep(" ")
+ hour @ pynini.cdrewrite(night_to_early, "[BOS]", "[EOS]", NEMO_SIGMA) @ hour_to @ number_verbalization
)
graph_m_to_h = (
minute @ minute_to @ min_29 @ (number_verbalization | pynini.cross("15", "viertel"))
+ pynini.accep(" ")
+ pynutil.insert("vor ")
+ hour @ pynini.cdrewrite(night_to_early, "[BOS]", "[EOS]", NEMO_SIGMA) @ hour_to @ number_verbalization
)
self.graph = (
graph_hms
| graph_h
| graph_hm
| pynutil.add_weight(graph_m_past_h, weight=0.0001)
| pynutil.add_weight(graph_m30_h, weight=0.0001)
| pynutil.add_weight(graph_m_to_h, weight=0.0001)
) + optional_zone
delete_tokens = self.delete_tokens(self.graph + delete_preserve_order)
self.fst = delete_tokens.optimize()
| NeMo-text-processing-main | nemo_text_processing/text_normalization/de/verbalizers/time.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.