diff --git a/cc-multilingual-main/README.md b/cc-multilingual-main/README.md new file mode 100644 index 0000000000000000000000000000000000000000..2618219a0120642cb18379ed762441635041e5d8 --- /dev/null +++ b/cc-multilingual-main/README.md @@ -0,0 +1,56 @@ +# cc-multilingual +Downloading and dedup indic multi-lingual from CommonCrawl +### Installation for cc_net +```sh +cd cc_net/ +make install . +``` +### Choose a snapshot: snapshot-id + +#### Step 1: Edit the config.myconfig.json file +```json +"dump": "snapshot-id", +"num_shards": 1600, +"lang_whitelist": ["as","bn","gu","kn","hi","ml","mr","ne","or","pb","sa","sd","ta","ur","te","ks","sat","mai","mni","kok","doi","brx"], +"mine_num_processes": 16, +"pipeline": [ + "lid", + "keep_lang", + "pp_bucket", + "split_by_lang" +], +"target_size": "100M", +"output_dir": "data", +"mined_dir": "mined", +"cache_dir": "wet_cache" +``` + +#### Step 2: (Optional) Download data into cache +```sh +wget wet_file_path +python3 script.py wet.paths.gz 90 wet_cache/2023-40/ +``` +#### Step 3: Run the pipeline +```sh +python3 -m cc_net --config config/myconfig.json +``` + +## Deduplication +``` +pip install app/requirements.txt +``` +#### Step1: Add list of files downloaded from cc_net to listings/file.txt in format lang_shard.json.gz + +#### Step 2: Computing minhash signatures +``` +python3 app/src/pipeline.py --input_base_uri "file://path/to/ccnet/data" --output_base_uri "/path/to/output" --artifacts_dir "file:///path/to/empty/artifacts" --input /path/to/listings/file.txt --cc_snapshot_id 2023-50 --langs "hi" --inputs_per_process 5 --minhash_num_permutations 128 --minhash_ngram_size 13 +``` + +#### Step 3: Applying bloomfilter +``` +python3 app/src/bloomfilter.py --listings /path/to/listings/file.txt --input_base_uri "file://path/to/ccnet/data" --output_dir "/path/to/output" --parallel_readers 32 --batch_size 10 +``` +#### Step 4: Running LSH +``` +python3 app/src/run_lsh.py --listings "/path/to/minhash-signature/listings/file.txt" --input_base_uri "file:///path/to/minhash-signature/files" --output_dir "/path/to/output" --similarity "0.8" --num_perm "128" +``` diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__init__.py b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__pycache__/__init__.cpython-310.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d79e777f0e540a84a6e31369d48a357c64e7a181 Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__pycache__/__init__.cpython-310.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__pycache__/__init__.cpython-38.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..df247b9204c33502f2e85d04098fbf2b860b7b8f Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__pycache__/__init__.cpython-38.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__pycache__/document.cpython-310.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__pycache__/document.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6ce96db98e148c0984971a0ee8583d2c6774ac67 Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__pycache__/document.cpython-310.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__pycache__/document.cpython-38.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__pycache__/document.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f37d08d81a7b3bd9aa92e94ac68a842825f6a0af Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__pycache__/document.cpython-38.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__pycache__/exceptions.cpython-38.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__pycache__/exceptions.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..01765ba14987d4a942925d0139b3c319926f6dcc Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__pycache__/exceptions.cpython-38.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__pycache__/worker.cpython-310.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__pycache__/worker.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a774d44c51713d42723da64c3b0a94f1a037f509 Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__pycache__/worker.cpython-310.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__pycache__/worker.cpython-38.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__pycache__/worker.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..279a72c5376ef208e74d96171858bbcd271cae74 Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/__pycache__/worker.cpython-38.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/constants.py b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..6539fbc9c46d1fec3f9817fe40358e21eb207b51 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/constants.py @@ -0,0 +1,2 @@ +PRECISION = 8 +CCNET_LABEL = "__label__cc" diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/data_types.py b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/data_types.py new file mode 100644 index 0000000000000000000000000000000000000000..5a6599fa798700e748e018e4dbcd5a5bfe36243e --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/data_types.py @@ -0,0 +1,45 @@ +from dataclasses import dataclass +from msgspec import Struct + +from typing import List, Tuple, Optional, Dict +from typing_extensions import TypeAlias + +ScoreType: TypeAlias = Tuple[int, int, Optional[float]] +SignalType: TypeAlias = List[ScoreType] + + +@dataclass +class TextSlice: + text: str + start: int + end: int + + def __len__(self): + return len(self.text) + + +class InputSpec(Struct): + raw_content: str + url: str + nlines: int + # original_nlines: int + source_domain: str + length: int + # original_length: int + language: str + language_score: float + # perplexity: float + bucket: str + digest: str + cc_segment: str + date_download: str + + +class OutputSpec(Struct): + id: str + id_int: int + metadata: Dict[str, str] + quality_signals: Dict[str, List[Tuple[int, int, Optional[float]]]] + + + diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/document.py b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/document.py new file mode 100644 index 0000000000000000000000000000000000000000..d9f871f35084d5389da2e753459f1415fb8a81f3 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/document.py @@ -0,0 +1,178 @@ +from nltk.tokenize import WordPunctTokenizer +import re +from typing import Optional, Tuple, Callable + +from utilities.text import normalize, form_ngrams +from core.data_types import TextSlice +from core.quality_signals.utils.dsir import hash_feature + +_word_tokenizer = WordPunctTokenizer() + + +def _compute_ngrams(text_seq, n): + return tuple(form_ngrams(iter(text_seq), n)) + + +def split_paragraphs( + text: str, normalizer: Callable[[str], str], remove_empty: bool = True +) -> Tuple[TextSlice]: + """ + This function is adapted from dolma: https://github.com/allenai/dolma + + Split a string into paragraphs. A paragraph is defined as a sequence of + zero or more characters, followed by a newline character, or a sequence + of one or more characters, followed by the end of the string. + """ + text_slices = tuple( + TextSlice(normalizer(text[match.start():match.end()]), match.start(), + match.end()) + for match in re.finditer(r"([^\n]*\n|[^\n]+$)", text) + ) + + if remove_empty is True: + text_slices = tuple( + text_slice for text_slice in text_slices if text_slice[0].strip() + ) + + return text_slices + + +class Document: + __slots__ = ( + "_raw_content", "_normalized_content", "_raw_lines", + "_normalized_lines", "_raw_words", "_normalized_words", + "_num_raw_words", "_num_normalized_words", "_domain", "_raw_2grams", + "_raw_3grams", "_norm_2grams", "_norm_3grams", "_norm_4grams", + "_hash_features" + ) + + def __init__( + self, content: str, domain: Optional[str], + precompute_ngrams: bool = False, + precompute_hash_features: bool = False, + dsir_buckets: Optional[int] = None + ): + self._raw_content = content + self._domain = domain + + # the normalized content: lowercased and punctuation removed + self._normalized_content = normalize(content) + + # the lines of the document (split by newline) + self._raw_lines: Tuple[TextSlice] = split_paragraphs( + text=content, normalizer=lambda x: x, remove_empty=False + ) + + # the lines of the document (split by newline), normalized + self._normalized_lines: Tuple[TextSlice] = split_paragraphs( + text=content, normalizer=normalize, remove_empty=False + ) + + # the words of the document after normalization + self._raw_words = tuple(_word_tokenizer.tokenize(self._raw_content)) + + # the normalized words of the document (split by whitespace) + self._normalized_words = tuple(self._normalized_content.split()) + + # get number of words before and after normalization + self._num_raw_words = len(self._raw_words) + self._num_normalized_words = len(self._normalized_words) + + # precompute ngrams + if precompute_ngrams: + # raw grams + self._raw_2grams = _compute_ngrams(self._raw_words, 2) + self._raw_3grams = _compute_ngrams(self._raw_words, 3) + + # normalized grams + self._norm_2grams = _compute_ngrams(self._normalized_words, 2) + self._norm_3grams = _compute_ngrams(self._normalized_words, 3) + self._norm_4grams = _compute_ngrams(self._normalized_words, 4) + else: + self._raw_2grams = None + self._raw_3grams = None + self._norm_2grams = None + self._norm_3grams = None + self._norm_4grams = None + + # precomupte hash features + if precompute_hash_features: + bigrams = self._raw_2grams or _compute_ngrams(self._raw_words, 2) + self._hash_features = hash_feature( + unigrams=self._raw_words, + bigrams=bigrams, + buckets=dsir_buckets + ) + else: + self._hash_features = None + + def __len__(self): + return len(self._raw_content) + + @property + def raw_content(self): + return self._raw_content + + @property + def normalized_content(self): + return self._normalized_content + + @property + def raw_lines(self): + return self._raw_lines + + @property + def normalized_lines(self): + return self._normalized_lines + + @property + def raw_words(self): + return self._raw_words + + @property + def normalized_words(self): + return self._normalized_words + + @property + def num_raw_words(self): + return self._num_raw_words + + @property + def num_normalized_words(self): + return self._num_normalized_words + + @property + def domain(self): + return self._domain + + @property + def raw_1grams(self): + return self._raw_words + + @property + def raw_2grams(self): + return self._raw_2grams + + @property + def raw_3grams(self): + return self._raw_3grams + + @property + def norm_1grams(self): + return self._normalized_words + + @property + def norm_2grams(self): + return self._norm_2grams + + @property + def norm_3grams(self): + return self._norm_3grams + + @property + def norm_4grams(self): + return self._norm_4grams + + @property + def hash_features(self): + return self._hash_features diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/exceptions.py b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..b0f08535cc6bdddfef6fe9f0591da330f4e0cb17 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/exceptions.py @@ -0,0 +1,18 @@ +class S3ReadError(Exception): + def __init__(self, message): + super().__init__(message) + + +class S3WriteError(Exception): + def __init__(self, message): + super().__init__(message) + + +class LocalReadError(Exception): + def __init__(self, message): + super().__init__(message) + + +class UnknownReadError(Exception): + def __init__(self, message): + super().__init__(message) diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__init__.py b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/__init__.cpython-310.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f6cc7cc44a316b68616116b8d6bea81071b47a9f Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/__init__.cpython-310.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/__init__.cpython-38.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c0a3a9ac103735534a11303f75464bb222ec4a0f Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/__init__.cpython-38.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/base.cpython-310.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/base.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..944871da2cecad9a6ce8f52de25e32c073822853 Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/base.cpython-310.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/base.cpython-38.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/base.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5f75f7f43b41b8f35442114e8da3aa3437ac5997 Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/base.cpython-38.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/classifiers.cpython-310.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/classifiers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4c351d01e23102811ef0a841bb8cdcfedd4a2740 Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/classifiers.cpython-310.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/classifiers.cpython-38.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/classifiers.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b36fa84dce90187fd97ab9b32ad140dcfaba14e0 Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/classifiers.cpython-38.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/content.cpython-310.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/content.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..38bf9f30254e7852921a9ef2f205756e38785b7d Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/content.cpython-310.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/content.cpython-38.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/content.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..07a2f049faf15bf4dea232acf4a0e957526316f2 Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/content.cpython-38.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/importance_weights.cpython-310.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/importance_weights.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f9c5780c5f36f1706b02aefc483e010d9f0d8f5f Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/importance_weights.cpython-310.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/importance_weights.cpython-38.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/importance_weights.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dd56f34351ab1f02a9a3f21c5dfb8ae44b74b774 Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/importance_weights.cpython-38.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/lines.cpython-310.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/lines.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a98d823482da9cb0635bd5363f6af98fb0ec7e4b Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/lines.cpython-310.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/lines.cpython-38.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/lines.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9a0ab52da4d793a9fe628f63793c8c3e1f58b58d Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/lines.cpython-38.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/natural_language.cpython-310.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/natural_language.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fdec97f1752227f0c812bfbfc945c6250153d5e9 Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/natural_language.cpython-310.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/natural_language.cpython-38.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/natural_language.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4f761fe98437bcaa01102a4ca955c7073e3d99b0 Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/natural_language.cpython-38.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/repetitions.cpython-310.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/repetitions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2fc916c018045608c4aff08cca7f131068bd293f Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/repetitions.cpython-310.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/repetitions.cpython-38.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/repetitions.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..097cc9f9fc97a2db1b166ae72fdf6588e1a5c06a Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/__pycache__/repetitions.cpython-38.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/base.py b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/base.py new file mode 100644 index 0000000000000000000000000000000000000000..9eaf40314c75885df627e97fba602a2d04409b67 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/base.py @@ -0,0 +1,30 @@ +from core.document import Document +from core.data_types import SignalType + + +class RPSBase: + r""" Base class for RP signal functions. Each child class must implement + the __call__ method. The __call__ method takes a document as input and + returns a score. """ + DATA_TYPE = SignalType + + RPS_PREFIX: str = "RPS_" + + __slots__ = ["__field_name"] + + def __init__(self, *args, **kwargs): # noqa + # make sure all classes start with RPS_; this is to ensure that + # the get_rule_based_signals function works correctly when new signal + # functions are added + assert self.__class__.__name__.startswith(self.RPS_PREFIX), \ + f"Name of signal function must" \ + f" start with {self.RPS_PREFIX}; got {self.__class__.__name__}" + + self.__field_name = self.__class__.__name__.lower() + + def __call__(self, document: Document): + raise NotImplementedError + + @property + def field_name(self): + return self.__field_name diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/classifiers.py b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/classifiers.py new file mode 100644 index 0000000000000000000000000000000000000000..91b017dd0c7b6674d27bbf86b9094d764041fdaf --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/classifiers.py @@ -0,0 +1,114 @@ +import sys +from typing import List, Tuple, Type +import fasttext + +from core.constants import PRECISION, CCNET_LABEL +from core.quality_signals.base import RPSBase +from core.document import Document +from core.data_types import SignalType +from core.quality_signals.utils.classifiers import \ + preprocess_quality_classifier +from utilities.register.registry_utils import * + +__all__ = [ + "register_classifier_callables", "classifier_schema" +] + + +def classifier_schema() -> List[Tuple[str, Type]]: + r""" Returns a list of signal names and their data types """ + return signal_schema(module=sys.modules[__name__]) + + +def register_classifier_callables( + wikiref_model: str, + palm_model: str, + wikipedia_model: str +) -> List[RPSBase]: + r""" Returns a list of signal functions (i.e., RPSBase instances) that + are used to extract content signals from a document. + + Args: + wikiref_model: A fasttext model trained on Wikipedia references. + palm_model: A fasttext model trained on ccnet vs + {books, openwebtext, wikipedia}. + wikipedia_model: A fasttext model trained on Wikipedia articles. + + Returns: + A list of signal function class instances. + """ + return list(map( + lambda cls: cls( + wikiref_model=wikiref_model, + palm_model=palm_model, + wikipedia_model=wikipedia_model, + ), + get_callables_from_module(module=sys.modules[__name__]) + )) + + +class BaseMLSignal(RPSBase): + __slots__ = "_ft_model" + + def __init__(self, ft_model_file: str): + super(BaseMLSignal, self).__init__() + if ft_model_file is None: + self._ft_model = None + else: + self._ft_model = fasttext.load_model(str(ft_model_file)) + + def __call__(self, document: Document) -> SignalType: + if self._ft_model is None: + return [(0, len(document), None)] + + if len(document.raw_content) == 0: + return [(0, len(document), None)] + + text = preprocess_quality_classifier(document=document) + pred = self._ft_model.predict(text=text) + + (pred_label, pred_prob) = pred + pred_label = pred_label[0] + pred_prob = pred_prob[0] + + if pred_label == CCNET_LABEL: + high_quality_score = 1 - pred_prob + else: + high_quality_score = pred_prob + + score = round(float(high_quality_score), PRECISION) + return [(0, len(document), score)] + + +class RPS_Doc_ML_Wikiref_Score(BaseMLSignal): # noqa + r""" Fasttext classifier prediction for the document being a Wikipedia + reference. This is the same fasttext model as in the RedPajama-1T + dataset.""" + __slots__ = () + + def __init__(self, wikiref_model: str, *args, **kwargs): # noqa + super(RPS_Doc_ML_Wikiref_Score, self).__init__( + ft_model_file=wikiref_model + ) + + +class RPS_Doc_ML_Palm_Score(BaseMLSignal): # noqa + r""" Fasttext classifier prediction for the document being a Wikipedia + article, OpenWebText sample or a RedPajama-V1 book.""" + __slots__ = () + + def __init__(self, palm_model: str, *args, **kwargs): # noqa + super(RPS_Doc_ML_Palm_Score, self).__init__( + ft_model_file=palm_model + ) + + +class RPS_Doc_ML_Wikipedia_Score(BaseMLSignal): # noqa + r""" Fasttext classifier prediction for the document being a Wikipedia + article.""" + __slots__ = () + + def __init__(self, wikipedia_model: str, *args, **kwargs): # noqa + super(RPS_Doc_ML_Wikipedia_Score, self).__init__( + ft_model_file=wikipedia_model + ) diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/content.py b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/content.py new file mode 100644 index 0000000000000000000000000000000000000000..c85b58a9adc70d6a53c821445bdd480b98127501 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/content.py @@ -0,0 +1,189 @@ +import re +import sys +import operator +from pathlib import Path +from typing import List, Tuple, Type + +from core.constants import PRECISION +from core.quality_signals.base import RPSBase +from core.quality_signals.utils.stop_words import get_stop_words +from core.document import Document +from core.data_types import SignalType +from core.quality_signals.utils.content import \ + load_bad_words, load_bad_urls_index +from utilities.register.registry_utils import * +from utilities.text import form_ngrams + +__all__ = ["register_content_callables", "content_schema"] + + +def content_schema() -> List[Tuple[str, Type]]: + r""" Returns a list of signal names and their data types """ + return signal_schema(module=sys.modules[__name__]) + + +def register_content_callables( + language: str, bad_urls_dir: str, bad_words_dir: str +) -> List[RPSBase]: + r""" Returns a list of signal functions (i.e., RPSBase instances) that + are used to extract content signals from a document. + + Args: + language: The language of the document. + bad_urls_dir: directory containing the UT1 blacklist. + bad_words_dir: directory containing the LDNOOBW blacklist. + + Returns: + A list of signal function class instances. + """ + return list(map( + lambda cls: cls( + language=language, + bad_urls_dir=bad_urls_dir, + bad_words_dir=bad_words_dir + ), + get_callables_from_module(module=sys.modules[__name__]) + )) + + +class RPS_Doc_LDNOOBW_Words(RPSBase): # noqa + r""" The number of sequences of words that are contained in the + List-of-Dirty-Naughty-Obscene-and-Otherwise-Bad-Words blocklist. The + blocklist is obtained from + https://github.com/LDNOOBW/List-of-Dirty-Naughty-Obscene-and-Otherwise-Bad-Words + """ + __slots__ = ["_block_words", "_gram_vals"] + + def __init__( + self, bad_words_dir: str, language: str, *args, **kwargs # noqa + ): + super(RPS_Doc_LDNOOBW_Words, self).__init__() + self._block_words = load_bad_words( + bad_words_dir=Path(bad_words_dir), lang=language + ) + + # cache the number of words in each block list entry + self._gram_vals = set(map( + lambda w: 1 + operator.countOf(w, " "), self._block_words + )) + + def __call__(self, document: Document) -> SignalType: + if len(document.normalized_content) == 0: + return [(0, len(document), .0)] + + num_dirty = 0 + + # for each ngram value, count the number of ngrams in the document + # which are also in the block words list + for n in self._gram_vals: + if n == 1: + num_dirty += sum( + 1 for _ in filter( + lambda w: w in self._block_words, + document.normalized_words + ) + ) + continue + + num_dirty += sum( + 1 for _ in filter( + lambda t: " ".join(t) in self._block_words, + # try to fetch the cached ngrams, otherwise compute them + # on the fly + getattr(document, f"norm_{n}grams", None) + or + form_ngrams(iter(document.normalized_words), n) + ) + ) + + score = float(num_dirty) + return [(0, len(document), score)] + + +class RPS_Doc_Lorem_Ipsum(RPSBase): # noqa + r""" The ratio between the number of occurences of 'lorem ipsum' + and the number of characters in the text after normalization. Text is + normalized by lowercasing and removing punctuation. """ + SEARCH_TEXT = "lorem ipsum" + SEARCH_REGEX = re.compile(r"lorem ipsum", re.IGNORECASE) + + __slots__ = () + + def __call__(self, document: Document) -> SignalType: + if len(document.normalized_content) == 0: + return [(0, len(document), 0.0)] + + if self.SEARCH_TEXT not in document.normalized_content: + return [(0, len(document), .0)] + + num_occurences = len(self.SEARCH_REGEX.findall( + document.normalized_content + )) + + score = float(num_occurences) / len(document.normalized_content) + score = round(score, PRECISION) + + return [(0, len(document), score)] + + +class RPS_Doc_Curly_Bracket(RPSBase): # noqa + r""" The ratio between the number of occurences of '{' or '}' and the + number of characters in the raw text. """ + SEARCH_TEXT = ("{", "}") + __slots__ = () + + def __call__(self, document: Document) -> SignalType: + if len(document.raw_content) == 0: + return [(0, len(document), .0)] + + if all(map(lambda x: x not in document.raw_content, self.SEARCH_TEXT)): + return [(0, len(document), .0)] + + num_occurences = sum( + map(lambda x: operator.countOf(document.raw_content, x), + self.SEARCH_TEXT) + ) + + score = float(num_occurences) / len(document.raw_content) + score = round(score, PRECISION) + + return [(0, len(document), score)] + + +class RPS_Doc_UT1_Blacklist(RPSBase): # noqa + r""" An categorical id of the list of categories of the domain of the + document. Categories are obtained from the UT1 blacklist. + """ + __slots__ = ["_ut1_mapping"] + + def __init__(self, bad_urls_dir: str, *args, **kwargs): # noqa + super(RPS_Doc_UT1_Blacklist, self).__init__() + # self._ut1_mapping = load_bad_urls_index(Path(bad_urls_dir)) + self._ut1_mapping = {} + + def __call__(self, document: Document) -> SignalType: + score: int = self._ut1_mapping.get(document.domain, None) + return [(0, len(document), score)] + + +class RPS_Doc_Stop_Word_Fraction(RPSBase): # noqa + r""" The ratio between the number of stop words and the number of words in + the document. """ + __slots__ = ["_stop_words"] + + def __init__(self, language: str, *args, **kwargs): # noqa + super(RPS_Doc_Stop_Word_Fraction, self).__init__() + self._stop_words = get_stop_words(language) + + def __call__(self, document: Document) -> SignalType: + if len(document.normalized_words) == 0: + return [(0, len(document), .0)] + + num_stop_words = sum( + map(lambda w: w in self._stop_words, document.raw_words) + ) + + score = float(num_stop_words) / document.num_raw_words + score = round(score, PRECISION) + + return [(0, len(document), score)] diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/importance_weights.py b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/importance_weights.py new file mode 100644 index 0000000000000000000000000000000000000000..66ce2003d94fca6db83120fd9d37acd22d52633f --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/importance_weights.py @@ -0,0 +1,303 @@ +import numpy as np +import scipy.stats as stats +import sys +from typing import List, Tuple, Type, Optional +from pathlib import Path + +from core.constants import PRECISION +from core.quality_signals.base import RPSBase +from core.quality_signals.utils.dsir import hash_feature +from core.document import Document +from core.data_types import SignalType + +from utilities.register.registry_utils import * +from utilities.text import form_ngrams + +__all__ = [ + "register_importance_weights_callables", + "importance_weights_schema" +] + + +def importance_weights_schema() -> List[Tuple[str, Type]]: + r""" Returns a list of signal names and their data types """ + return signal_schema(module=sys.modules[__name__]) + + +def register_importance_weights_callables( + source_fps: Optional[Tuple[str]], + wiki_fps: Optional[Tuple[str]], + openwebtext_fps: Optional[Tuple[str]], + books_fps: Optional[Tuple[str]], + language: str +) -> List[RPSBase]: + r""" Returns a list of signal functions (i.e., RPSBase instances) that + are used to extract content signals from a document. + + Returns: + A list of signal function class instances. + """ + return list(map( + lambda cls: cls( + language=language, + source_fps=source_fps, + wiki_fps=wiki_fps, + openwebtext_fps=openwebtext_fps, + books_fps=books_fps + ), + get_callables_from_module(module=sys.modules[__name__]) + )) + + +class Base_Importance(RPSBase): # noqa + r""" Base class for functions which return the log ratio of the likelihood + of the document's features with respect to the target domain + versus the source domain. """ + + __slots__ = ( + "_log_diff_dist", "_feature_dim", "_target_lambda", + "_source_lambda", "_length_correction" + ) + + def __init__( + self, + target_fps: Tuple[str, str], + source_fps: Tuple[str, str], + language: str, + length_correction: bool = False + ): + super(Base_Importance, self).__init__() + self._length_correction = length_correction + + if target_fps is None or source_fps is None: + self._log_diff_dist = None + self._feature_dim = None + return + + target_count_fp, target_lambbda_fp = target_fps + source_count_fp, source_lambda_fp = source_fps + + assert language == Path(target_count_fp).stem.split(".")[1], \ + f"Language mismatch between {target_count_fp} and {language}" + + assert language == Path(source_count_fp).stem.split(".")[1], \ + f"Language mismatch between {target_count_fp} and {language}" + + # load hash counts + target_counts = np.load(target_count_fp) + target_dist = target_counts / target_counts.sum() + source_counts = np.load(source_count_fp) + source_dist = source_counts / source_counts.sum() + + if length_correction: + self._target_lambda = np.load(target_lambbda_fp) + self._source_lambda = np.load(source_lambda_fp) + else: + self._target_lambda = None + self._source_lambda = None + + # compute log diff dist + self._feature_dim = target_counts.shape[0] + self._log_diff_dist = np.array( + np.log(target_dist + 1e-8) - np.log(source_dist + 1e-8) + ) + + def __call__(self, document: Document) -> SignalType: + if self._log_diff_dist is None: + return [(0, len(document), None)] + + doc_len = len(document) + + if doc_len == 0: + return [(0, doc_len, None)] + + # try to fetch cached features, if not compute them + features = ( + document.hash_features + if document.hash_features is not None + else + hash_feature( + unigrams=document.raw_words, + # fetch cached bigrams, otherwise comptue them + bigrams=( + document.raw_2grams + or + tuple(form_ngrams(iter(document.raw_words), 2)) + ), + buckets=self._feature_dim + ) + ) + + logratio = np.inner(features, self._log_diff_dist) + score = float(logratio) + + if not self._length_correction: + score = round(score, PRECISION) + return [(0, doc_len, score)] + + # correct for the length assuming a Poisson distribution + return self.__add_length_penalty(score, doc_len) + + def __add_length_penalty(self, score, doc_len): + # correct for the length assuming a Poisson distribution + len_prob_source = stats.poisson.pmf(doc_len, self._source_lambda) + len_prob_target = stats.poisson.pmf(doc_len, self._target_lambda) + + len_correction = np.log(len_prob_target + 1e-8) - \ + np.log(len_prob_source + 1e-8) + + score += float(len_correction) + score = round(score, PRECISION) + return [(0, doc_len, score)] + + +class RPS_Doc_Wikipedia_Importance(Base_Importance): # noqa + r""" Given a bag of {1,2}-wordgram model trained on Wikipedia articles p, + and a model trained on the source domain q. This is the logarithm of the + ratio p(doc)/q(doc). If length_correction is enabled, then the length of + score is adjusted by adding the term log(p_poisson(len) / q_poisson(len)) + to the final score. + """ + __slots__ = () + + def __init__( + self, + wiki_fps: Tuple[str, str], + source_fps: Tuple[str, str], + language: str, + *args, **kwargs # noqa + ): + super(RPS_Doc_Wikipedia_Importance, self).__init__( + target_fps=wiki_fps, + source_fps=source_fps, + language=language, + length_correction=False + ) + + +class RPS_Doc_Wikipedia_Importance_Length_Correction( # noqa + Base_Importance +): + r""" Given a bag of {1,2}-wordgram model trained on Wikipedia articles p, + and a model trained on the source domain q. This is the logarithm of the + ratio p(doc)/q(doc). If length_correction is enabled, then the length of + score is adjusted by adding the term log(p_poisson(len) / q_poisson(len)) + to the final score. Corrects for length by adding a length penalty term. + """ + __slots__ = () + + def __init__( + self, + wiki_fps: Tuple[str, str], + source_fps: Tuple[str, str], + language: str, + *args, **kwargs # noqa + ): + super(RPS_Doc_Wikipedia_Importance_Length_Correction, + self).__init__( + target_fps=wiki_fps, + source_fps=source_fps, + language=language, + length_correction=True + ) + + +class RPS_Doc_Books_Importance(Base_Importance): # noqa + r""" Given a bag of {1,2}-wordgram model trained on Books p, + and a model trained on the source domain q. This is the logarithm of the + ratio p(doc)/q(doc). If length_correction is enabled, then the length of + score is adjusted by adding the term log(p_poisson(len) / q_poisson(len)) + to the final score. + """ + __slots__ = () + + def __init__( + self, + books_fps: Tuple[str, str], + source_fps: Tuple[str, str], + language: str, + *args, **kwargs # noqa + ): + super(RPS_Doc_Books_Importance, self).__init__( + target_fps=books_fps, + source_fps=source_fps, + language=language, + length_correction=False + ) + + +class RPS_Doc_Books_Importance_Length_Correction( # noqa + Base_Importance +): # noqa + r""" Given a bag of {1,2}-wordgram model trained on Books p, + and a model trained on the source domain q. This is the logarithm of the + ratio p(doc)/q(doc). If length_correction is enabled, then the length of + score is adjusted by adding the term log(p_poisson(len) / q_poisson(len)) + to the final score. Corrects for length by adding a length penalty term. + """ + __slots__ = () + + def __init__( + self, + books_fps: Tuple[str, str], + source_fps: Tuple[str, str], + language: str, + *args, **kwargs # noqa + ): + super(RPS_Doc_Books_Importance_Length_Correction, self).__init__( + target_fps=books_fps, + source_fps=source_fps, + language=language, + length_correction=True + ) + + +class RPS_Doc_OpenWebText_Importance(Base_Importance): # noqa + r""" Given a bag of {1,2}-wordgram model trained on OpenWebText p, + and a model trained on the source domain q. This is the logarithm of the + ratio p(doc)/q(doc). If length_correction is enabled, then the length of + score is adjusted by adding the term log(p_poisson(len) / q_poisson(len)) + to the final score. + """ + __slots__ = () + + def __init__( + self, + openwebtext_fps: Tuple[str, str], + source_fps: Tuple[str, str], + language: str, + *args, **kwargs # noqa + ): + super(RPS_Doc_OpenWebText_Importance, self).__init__( + target_fps=openwebtext_fps, + source_fps=source_fps, + language=language, + length_correction=False + ) + + +class RPS_Doc_OpenWebText_Importance_Length_Correction( # noqa + Base_Importance): # noqa + r""" Given a bag of {1,2}-wordgram model trained on OpenWebText p, + and a model trained on the source domain q. This is the logarithm of the + ratio p(doc)/q(doc). If length_correction is enabled, then the length of + score is adjusted by adding the term log(p_poisson(len) / q_poisson(len)) + to the final score. Corrects for length by adding a length penalty term. + """ + __slots__ = () + + def __init__( + self, + openwebtext_fps: Tuple[str, str], + source_fps: Tuple[str, str], + language: str, + *args, **kwargs # noqa + ): + super( + RPS_Doc_OpenWebText_Importance_Length_Correction, self + ).__init__( + target_fps=openwebtext_fps, + source_fps=source_fps, + language=language, + length_correction=True + ) diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/lines.py b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/lines.py new file mode 100644 index 0000000000000000000000000000000000000000..35f916c34c1bae3f16d7e3c77be4b21317bb2185 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/lines.py @@ -0,0 +1,153 @@ +import sys +from typing import List, Tuple, Type + +from core.constants import PRECISION +from core.quality_signals.base import RPSBase +from core.data_types import SignalType, ScoreType, TextSlice +from core.document import Document +from utilities.register.registry_utils import * + +__all__ = [ + "register_lines_callables", "lines_schema" +] + + +def lines_schema() -> List[Tuple[str, Type]]: + r""" Returns a list of signal names and their data types """ + return signal_schema(module=sys.modules[__name__]) + + +def register_lines_callables() -> List[RPSBase]: + r""" Returns a list of signal functions (i.e., RPSBase instances) that + are used to extract line signals from a document. + + Returns: + A list of signal function class instances. + """ + return list(map( + lambda cls: cls(), + get_callables_from_module(module=sys.modules[__name__]) + )) + + +class RPS_Lines_Javascript_Counts(RPSBase): # noqa + r""" The number of occurences of the word "javascript" in each line. """ + SEARCH_TEXT = "javascript" + __slots__ = () + + def _process_line(self, text_slice: TextSlice) -> ScoreType: + if len(text_slice.text) == 0: + return tuple((text_slice.start, text_slice.end, 0.0)) + + score = float(sum( + 1 for w in text_slice.text.split() if w == self.SEARCH_TEXT + )) + + return tuple((text_slice.start, text_slice.end, score)) + + def __call__(self, document: Document) -> SignalType: + return list(map(self._process_line, document.normalized_lines)) + + +class RPS_Lines_Ending_With_Terminal_Punctution_Mark(RPSBase): # noqa + r""" A list of integers indicating whether (1) or not (0) a line ends with + a terminal punctuation mark. A terminal punctation mark is defined as + one of the following: ".", "!", "?", "”" """ + TERMINAL_PUNCTUATION_MARKS = (".", "!", "?", "”") + __slots__ = () + + def _process_line(self, text_slice: TextSlice) -> ScoreType: + score = text_slice.text.rstrip().endswith( + self.TERMINAL_PUNCTUATION_MARKS + ) + score = float(score) + return tuple((text_slice.start, text_slice.end, score)) + + def __call__(self, document: Document) -> SignalType: + return list(map(self._process_line, document.raw_lines)) + + +class RPS_Lines_Num_Words(RPSBase): # noqa + r""" The number of words in each line. This is computed based on the + normalied text. Normalization is done by lowercasing the text and + removing punctuation.""" + __slots__ = () + + def _process_line(self, text_slice: TextSlice) -> ScoreType: # noqa + score = len(text_slice.text.split()) + return tuple((text_slice.start, text_slice.end, score)) + + def __call__(self, document: Document) -> SignalType: + return list(map(self._process_line, document.normalized_lines)) + + +class RPS_Lines_Uppercase_Letter_Fraction(RPSBase): # noqa + r""" The ratio between number of uppercase letters and total number of + characters in each line. This is based on the raw text. """ + __slots__ = () + + def _process_line(self, text_slice: TextSlice) -> ScoreType: # noqa + if len(text_slice) == 0: + return tuple((text_slice.start, text_slice.end, 0.0)) + + score = sum(map(str.isupper, text_slice.text)) / len(text_slice) + score = round(score, PRECISION) + return tuple((text_slice.start, text_slice.end, score)) + + def __call__(self, document: Document) -> SignalType: + return list(map(self._process_line, document.raw_lines)) + + +class RPS_Lines_Numerical_Chars_Fraction(RPSBase): # noqa + r""" The ratio between number of numerical characters and total number of + characters in each line. This is based on text after lowercasing and + removing punctuation.""" + __slots__ = () + + def _process_line(self, text_slice: TextSlice) -> ScoreType: # noqa + if len(text_slice) == 0: + return tuple((text_slice.start, text_slice.end, 0.0)) + + score = sum(map(str.isnumeric, text_slice.text)) / len(text_slice) + score = round(score, PRECISION) + return tuple((text_slice.start, text_slice.end, score)) + + def __call__(self, document: Document) -> SignalType: + return list(map(self._process_line, document.normalized_lines)) + + +class RPS_Lines_Start_With_Bulletpoint(RPSBase): # noqa + r""" Whether the lines that start with a bullet point symbol. The + following set of unicodes are considered a bullet point: + \u2022 (bullet point), \u2023 (triangular bullet point), \u25B6 (black + right pointing triangle), \u25C0 (black left pointing triangle), + \u25E6 (white bullet point), \u25A0 (black square), \u25A1 (white + square), \u25AA (black small square), \u25AB (white small square), + \u2013 (en dash).""" + BULLET_POINT_SYMBOLS = ( + "\u2022", # bullet point + "\u2023", # triangular bullet point + "\u25B6", # black right pointing triangle + "\u25C0", # black left pointing triangle + "\u25E6", # white bullet point + "\u25A0", # black square + "\u25A1", # white square + "\u25AA", # black small square + "\u25AB", # white small square + "\u2013", # en dash + ) + + __slots__ = () + + def _process_line(self, text_slice: TextSlice) -> ScoreType: # noqa + score = text_slice.text.lstrip().startswith(self.BULLET_POINT_SYMBOLS) + score = float(score) + return tuple((text_slice.start, text_slice.end, score)) + + def __call__(self, document: Document) -> SignalType: + num_lines = len(document.raw_lines) + + if num_lines == 0: + return [(0, len(document), None)] + + return list(map(self._process_line, document.raw_lines)) diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/natural_language.py b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/natural_language.py new file mode 100644 index 0000000000000000000000000000000000000000..10758fdbad7322c55fb18a29ebb6bd0c54284d94 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/natural_language.py @@ -0,0 +1,197 @@ +from collections import Counter +import math +import re +import sys +from typing import List, Tuple, Type + +from core.constants import PRECISION +from core.data_types import SignalType +from core.quality_signals.base import RPSBase +from core.document import Document +from utilities.register.registry_utils import * + +__all__ = [ + "register_natural_language_callables", + "natural_language_schema" +] + + +def natural_language_schema() -> List[Tuple[str, Type]]: + r""" Returns a list of signal names and their data types """ + return signal_schema(module=sys.modules[__name__]) + + +def register_natural_language_callables() -> List[RPSBase]: + r""" Returns a list of signal functions (i.e., RPSBase instances) that + are used to extract natural language signals from a document. + + Returns: + A list of signal function class instances. + """ + return list(map( + lambda cls: cls(), + get_callables_from_module(module=sys.modules[__name__]) + )) + + +class RPS_Doc_Num_Sentences(RPSBase): # noqa + r""" The number of sentences in the content. This is calculated using + the regex r'\b[^.!?]+[.!?]*' """ + SENT_PATTERN = re.compile(r'\b[^.!?]+[.!?]*', flags=re.UNICODE) + + __slots__ = () + + def __call__(self, document: Document) -> SignalType: + r""" count the number of sentences in the content using regex""" + score = float(len(self.SENT_PATTERN.findall(document.raw_content))) + return [(0, len(document), score)] + + +class RPS_Doc_Word_Count(RPSBase): # noqa + r""" The number of words in the content after normalization. """ + __slots__ = () + + def __call__(self, document: Document) -> SignalType: + return [(0, len(document), document.num_normalized_words)] + + +class RPS_Doc_Mean_Word_Length(RPSBase): # noqa + r""" The mean length of words in the content normalization. """ + __slots__ = () + + def __call__(self, document: Document) -> SignalType: + if document.num_normalized_words == 0: + return [(0, len(document), None)] + + num_chars = float(sum(map(len, document.normalized_words))) + score = num_chars / document.num_normalized_words + score = round(score, PRECISION) + return [(0, len(document), score)] + + +class RPS_Doc_Symbol_To_Word_Ratio(RPSBase): # noqa + r""" The ratio of symbols to words in the content. This is analogous to + the signal used in Gopher. Symbols are defined "#", "...", and "…". """ + SYMBOLS = ("#", "...", "…") + + __slots__ = () + + def __call__(self, document: Document) -> SignalType: + num_words = document.num_raw_words + + if num_words == 0: + return [(0, len(document), None)] + + # count the number of symbols in the content + num_symbols = float(sum( + document.raw_content.count(x) for x in self.SYMBOLS + )) + + score = num_symbols / num_words + score = round(score, PRECISION) + return [(0, len(document), score)] + + +class RPS_Doc_Frac_Lines_End_With_Ellipsis(RPSBase): # noqa + r""" The fraction of lines that end with an ellipsis, where an ellipsis + is defined as either "..." or "…". """ + ELLIPSIS_SYMBOLS = ("...", "…") + + __slots__ = () + + def __call__(self, document: Document) -> SignalType: + num_lines = len(document.raw_lines) + + if num_lines == 0: + return [(0, len(document), None)] + + total_ellipsis_lines = float(sum( + text_slice.text.rstrip().endswith(self.ELLIPSIS_SYMBOLS) + for text_slice in document.raw_lines + )) + + score = total_ellipsis_lines / num_lines + score = round(score, PRECISION) + return [(0, len(document), score)] + + +class RPS_Doc_Frac_No_Alph_Words(RPSBase): # noqa + r""" The fraction of words that contain no alphabetical character. + This is based on the raw content. """ + ALPH_REGEX = re.compile(r"[a-zA-Z]") + + __slots__ = () + + def __call__(self, document: Document) -> SignalType: + num_words = document.num_raw_words + + if num_words == 0: + return [(0, len(document), None)] + + num_words_with_alpha = float(sum( + int(self.ALPH_REGEX.search(word) is not None) + for word in document.raw_words + )) + + score = 1.0 - num_words_with_alpha / num_words + score = round(score, PRECISION) + return [(0, len(document), score)] + + +class RPS_Doc_Frac_Unique_Words(RPSBase): # noqa + r""" The fraction of unique words in the content. This is also known as + the degeneracy of a text sample. Calculated based on the normalized + content. """ + __slots__ = () + + def __call__(self, document: Document) -> SignalType: + num_words = document.num_normalized_words + + if num_words == 0: + return [(0, len(document), None)] + + score = float(len(set(document.normalized_words))) / num_words + score = round(score, PRECISION) + return [(0, len(document), score)] + + +class RPS_Doc_Unigram_Entropy(RPSBase): # noqa + r""" The entropy of the unigram distribution of the + content. This measures the diversity of the content and is computed + using sum(-x / total * log(x / total)) where the sum is taken over + over counts of unique words in the noramlized (punctuation removed, + lowercased) content.""" + __slots__ = () + + def __call__(self, document: Document) -> SignalType: + if len(document.normalized_words) == 0: + return [(0, len(document), None)] + + # count the number of times each word appears in the content + counter = Counter(document.normalized_words) + + # calculate the entropy of the unigram distribution + total = sum(counter.values()) + entropy = sum(map( + lambda x: -x / total * math.log(x / total) if x > 0 else 0.0, + counter.values() + )) + + score = round(entropy, PRECISION) + return [(0, len(document), score)] + + +class RPS_Doc_Frac_All_Caps_Words(RPSBase): # noqa + r""" The fraction of words in the content that only conist of uppercase + letters. This is based on the raw content.""" + __slots__ = () + + def __call__(self, document: Document) -> SignalType: + num_words = document.num_raw_words + + if num_words == 0: + return [(0, len(document), None)] + + score = float(sum(map(str.isupper, document.raw_words))) / num_words + score = round(score, PRECISION) + return [(0, len(document), score)] diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/repetitions.py b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/repetitions.py new file mode 100644 index 0000000000000000000000000000000000000000..ac27f789780765b066542033fd726ccc11808182 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/repetitions.py @@ -0,0 +1,205 @@ +from collections import Counter +import numpy as np +import sys +from typing import List, Tuple, Type + +from core.constants import PRECISION +from core.quality_signals.base import RPSBase +from core.document import Document +from core.data_types import SignalType +from utilities.register.registry_utils import * +from utilities.text import form_ngrams + +__all__ = [ + "register_repetitions_callables", + "repetitions_schema" +] + + +def repetitions_schema() -> List[Tuple[str, Type]]: + r""" Returns a list of signal names and their data types """ + return signal_schema(module=sys.modules[__name__]) + + +def register_repetitions_callables() -> List[RPSBase]: + r""" Returns a list of signal functions (i.e., RPSBase instances) that + are used to extract repetition related signals from a document. + + Returns: + A list of signal function class instances. + """ + return list(map( + lambda cls: cls(), + get_callables_from_module(module=sys.modules[__name__]) + )) + + +class Base_RPS_Frac_Chars_In_Top_NGram(RPSBase): # noqa + r""" Base class for calculating the fraction of characters in the + top N-gram. This operates on the lower-cased, punctation removed + content.""" + NGRAM_SIZE: int = None + + __slots__ = [] + + def __call__(self, document: Document) -> SignalType: + if self.NGRAM_SIZE is None: + raise NotImplementedError( + "NGRAM_SIZE must be set in the subclass" + ) + + # get the most common ngram + most_common_ngram = Counter( + # fetch the ngrams from the document if they exist, otherwise + # compute them + getattr(document, f"norm_{self.NGRAM_SIZE}grams", None) + or + form_ngrams(iter(document.normalized_words), self.NGRAM_SIZE) + ).most_common(1) + + if len(most_common_ngram) == 0: + return [(0, len(document), 0.0)] + + ngram, count = most_common_ngram[0] + + if count <= 1: + return [(0, len(document), 0.0)] + + total_chars = sum(len(w) for w in document.normalized_words) + score = sum(len(w) for w in ngram) * count / total_chars + score = round(score, PRECISION) + return [(0, len(document), score)] + + +class RPS_Doc_Frac_Chars_Top_2gram(Base_RPS_Frac_Chars_In_Top_NGram): # noqa + r""" The fraction of characters in the top word Bigram. Operates on the + lower-cased, punctation removed content.""" + NGRAM_SIZE = 2 + __slots__ = [] + + +class RPS_Doc_Frac_Chars_Top_3gram(Base_RPS_Frac_Chars_In_Top_NGram): # noqa + r""" The fraction of characters in the top word Trigram. Operates on the + lower-cased, punctation removed content.""" + NGRAM_SIZE = 3 + __slots__ = [] + + +class RPS_Doc_Frac_Chars_Top_4gram(Base_RPS_Frac_Chars_In_Top_NGram): # noqa + r""" The fraction of characters in the top word 4gram. Operates on the + lower-cased, punctation removed content.""" + NGRAM_SIZE = 4 + __slots__ = [] + + +class Base_RPS_Frac_Chars_In_Dupe_NGrams(RPSBase): # noqa + r""" Base class for calculating the fraction of characters in + duplicate word N-grams. This operates on the lower-cased, punctation + removed content. The function also ensures that characters in overlapping + ngrams are only counted once.""" + NGRAM_SIZE: int = None + __slots__ = [] + + def __call__(self, document: Document) -> SignalType: + if self.NGRAM_SIZE is None: + raise NotImplementedError( + "NGRAM_SIZE must be set in the subclass" + ) + + if len(document.normalized_words) < self.NGRAM_SIZE: + return [(0, len(document), 0.0)] + + # fetch the ngrams from the document if they exist, otherwise + # compute them + doc_n_grams = ( + getattr(document, f"norm_{self.NGRAM_SIZE}grams", None) + or + tuple(form_ngrams( + iter(document.normalized_words), self.NGRAM_SIZE + )) + ) + + # keep only ngrams which occur at least twice + ngram_dupes = { + ngram for ngram, count in Counter(doc_n_grams).items() if count > 1 + } + + duplicated_grams = np.zeros(len(document.normalized_words), dtype=int) + + i = 0 + for ngram in doc_n_grams: + if ngram in ngram_dupes: + duplicated_grams[i: i + self.NGRAM_SIZE] = 1 + + i += 1 + + word_lengths = np.array(list(map(len, document.normalized_words))) + chars_duped = np.sum(word_lengths * duplicated_grams) + total_chars = np.sum(word_lengths) + + if total_chars == 0: + return [(0, len(document), 0.0)] + + score = float(chars_duped / total_chars) + score = round(score, PRECISION) + return [(0, len(document), score)] + + +class RPS_Doc_Frac_Chars_Dupe_5Grams( # noqa + Base_RPS_Frac_Chars_In_Dupe_NGrams +): + r""" The fraction of characters in duplicate word 5grams. This operates on + the lower-cased, punctation removed content. It is also ensured that + characters in overlapping ngrams are only counted once. """ + NGRAM_SIZE = 5 + __slots__ = [] + + +class RPS_Doc_Frac_Chars_Dupe_6Grams( # noqa + Base_RPS_Frac_Chars_In_Dupe_NGrams +): + r""" The fraction of characters in duplicate word 6grams. This operates on + the lower-cased, punctation removed content. It is also ensured that + characters in overlapping ngrams are only counted once. """ + NGRAM_SIZE = 6 + __slots__ = [] + + +class RPS_Doc_Frac_Chars_Dupe_7Grams( # noqa + Base_RPS_Frac_Chars_In_Dupe_NGrams +): + r""" The fraction of characters in duplicate word 7grams. This operates on + the lower-cased, punctation removed content. It is also ensured that + characters in overlapping ngrams are only counted once. """ + NGRAM_SIZE = 7 + __slots__ = [] + + +class RPS_Doc_Frac_Chars_Dupe_8Grams( # noqa + Base_RPS_Frac_Chars_In_Dupe_NGrams +): + r""" The fraction of characters in duplicate word 8grams. This operates on + the lower-cased, punctation removed content. It is also ensured that + characters in overlapping ngrams are only counted once. """ + NGRAM_SIZE = 8 + __slots__ = [] + + +class RPS_Doc_Frac_Chars_Dupe_9Grams( # noqa + Base_RPS_Frac_Chars_In_Dupe_NGrams +): + r""" The fraction of characters in duplicate word 9grams. This operates on + the lower-cased, punctation removed content. It is also ensured that + characters in overlapping ngrams are only counted once. """ + NGRAM_SIZE = 9 + __slots__ = [] + + +class RPS_Doc_Frac_Chars_Dupe_10Grams( # noqa + Base_RPS_Frac_Chars_In_Dupe_NGrams +): + r""" The fraction of characters in duplicate word 10grams. This operates on + the lower-cased, punctation removed content. It is also ensured that + characters in overlapping ngrams are only counted once. """ + NGRAM_SIZE = 10 + __slots__ = [] diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__init__.py b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/__init__.cpython-310.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ac33cb349bfeca478c39b7ec6977b51fe15adde2 Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/__init__.cpython-310.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/__init__.cpython-38.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..932fda343cb632d2eaa6ac96c60972ca6bb8ec5e Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/__init__.cpython-38.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/classifiers.cpython-310.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/classifiers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3074cba368087d69beeb3596c1b0caaed7c13ac5 Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/classifiers.cpython-310.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/classifiers.cpython-38.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/classifiers.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6cb48f193cd7a0ff374213138f85e10a7ed02580 Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/classifiers.cpython-38.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/content.cpython-310.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/content.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ab2b23c8592d9b3be5a96c54773c0ccc0811b40c Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/content.cpython-310.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/content.cpython-38.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/content.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6d6a23ca66c5ad7e038a674ceffeda8beb5ec69f Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/content.cpython-38.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/dsir.cpython-310.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/dsir.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c664059e01a1714e33ddfc53d4087ceb9b7c3e33 Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/dsir.cpython-310.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/dsir.cpython-38.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/dsir.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1410a2742f9004a35cf29ffeb1796f04c1199767 Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/dsir.cpython-38.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/stop_words.cpython-310.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/stop_words.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4061fb15b1122add990d9d4073d90193dfde4964 Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/stop_words.cpython-310.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/stop_words.cpython-38.pyc b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/stop_words.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..55d879e0ad60f8119f0067c499f622e3d527c424 Binary files /dev/null and b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/__pycache__/stop_words.cpython-38.pyc differ diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/classifiers.py b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/classifiers.py new file mode 100644 index 0000000000000000000000000000000000000000..c90a43d41652bb505af3d32d0b4480f03ac49605 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/classifiers.py @@ -0,0 +1,15 @@ +from core.document import Document + + +def preprocess_quality_classifier(document: Document): + r""" Preprocesses a document for quality classification. This function + removes all newlines and trailing whitespaces from the document. + + Args: + document: A document. + + Returns: + A string. + """ + # remove newlines and trailing and leading whitespaces + return " ".join(document.raw_content.splitlines()).strip() diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/content.py b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/content.py new file mode 100644 index 0000000000000000000000000000000000000000..1088d1a0566ac2b6d038d69a907d4e89920c485f --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/content.py @@ -0,0 +1,39 @@ +import json +from pathlib import Path +from typing import Dict, Set + +_DEFAULT_LANGS = ("en", "fr", "it", "es", "de") + + +def load_bad_urls_index(bad_urls_dir: Path) -> Dict[str, int]: + with open(bad_urls_dir / "domain_to_category_id.json", "r") as f: + domain_to_category_id = json.load(f) + return domain_to_category_id + + +def load_bad_words(bad_words_dir: Path, lang: str) -> Set[str]: + r""" load the LDNOOBW word list for a given language + + Source: + https://github.com/LDNOOBW/List-of-Dirty-Naughty-Obscene-and-Otherwise-Bad-Words + + Args: + bad_words_dir (Path): The path to the resources directory where the + list is stored + lang (str): The language for which to fetch the word list + + Returns: + A set of words + """ + if lang not in _DEFAULT_LANGS: + return set() + + ldnoobw_fp = bad_words_dir / f"{lang}.txt" + + if not ldnoobw_fp.exists(): + raise FileNotFoundError(f"LDNOOBW word list {ldnoobw_fp} not found!") + + with open(ldnoobw_fp, 'r') as f: + data = set(ln.strip() for ln in f.readlines()) + + return data diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/dsir.py b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/dsir.py new file mode 100644 index 0000000000000000000000000000000000000000..5ecf2ff744abd987c83f73186e4750dfc4609a5a --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/dsir.py @@ -0,0 +1,20 @@ +import numpy as np +from typing import Tuple + + +def compute_hash(ngram: str, buckets: int): + return int(abs(hash(ngram)) % buckets) + + +def hash_feature( + unigrams: Tuple[str], bigrams: Tuple[str], buckets: int +) -> np.ndarray: + counts = np.zeros(buckets, dtype=np.int64) + + for unigram in unigrams: + counts[compute_hash(unigram, buckets=buckets)] += 1 + + for bigram in bigrams: + counts[compute_hash(bigram, buckets=buckets)] += 1 + + return counts diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/stop_words.py b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/stop_words.py new file mode 100644 index 0000000000000000000000000000000000000000..c554a0652a603b83c09fc28b10c3d1006213134d --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/quality_signals/utils/stop_words.py @@ -0,0 +1,610 @@ +""" +The stop words in this file are taken from https://github.com/6/stopwords-json +""" + +from typing import Set + +__all__ = ["get_stop_words"] + + +def get_stop_words(lang) -> Set[str]: + return {} + + +stop_words = { + "bg": {"а", "автентичен", "аз", "ако", "ала", "бе", "без", "беше", + "би", "бивш", "бивша", "бившо", "бил", "била", "били", "било", + "благодаря", "близо", "бъдат", "бъде", "бяха", "в", "вас", + "ваш", "ваша", "вероятно", "вече", "взема", "ви", "вие", + "винаги", "внимава", "време", "все", "всеки", "всички", + "всичко", "всяка", "във", "въпреки", "върху", "г", "ги", + "главен", "главна", "главно", "глас", "го", "година", + "години", "годишен", "д", "да", "дали", "два", "двама", + "двамата", "две", "двете", "ден", "днес", "дни", "до", + "добра", "добре", "добро", "добър", "докато", "докога", + "дори", "досега", "доста", "друг", "друга", "други", "е", + "евтин", "едва", "един", "една", "еднаква", "еднакви", + "еднакъв", "едно", "екип", "ето", "живот", "за", "забавям", + "зад", "заедно", "заради", "засега", "заспал", "затова", + "защо", "защото", "и", "из", "или", "им", "има", "имат", + "иска", "й", "каза", "как", "каква", "какво", "както", + "какъв", "като", "кога", "когато", "което", "които", "кой", + "който", "колко", "която", "къде", "където", "към", "лесен", + "лесно", "ли", "лош", "м", "май", "малко", "ме", "между", + "мек", "мен", "месец", "ми", "много", "мнозина", "мога", + "могат", "може", "мокър", "моля", "момента", "му", "н", "на", + "над", "назад", "най", "направи", "напред", "например", "нас", + "не", "него", "нещо", "нея", "ни", "ние", "никой", "нито", + "нищо", "но", "нов", "нова", "нови", "новина", "някои", + "някой", "няколко", "няма", "обаче", "около", "освен", + "особено", "от", "отгоре", "отново", "още", "пак", "по", + "повече", "повечето", "под", "поне", "поради", "после", + "почти", "прави", "пред", "преди", "през", "при", "пък", + "първата", "първи", "първо", "пъти", "равен", "равна", "с", + "са", "сам", "само", "се", "сега", "си", "син", "скоро", + "след", "следващ", "сме", "смях", "според", "сред", "срещу", + "сте", "съм", "със", "също", "т", "т.н.", "тази", "така", + "такива", "такъв", "там", "твой", "те", "тези", "ти", "то", + "това", "тогава", "този", "той", "толкова", "точно", "три", + "трябва", "тук", "тъй", "тя", "тях", "у", "утре", "харесва", + "хиляди", "ч", "часа", "че", "често", "чрез", "ще", "щом", + "юмрук", "я", "як"}, + "de": {"Ernst", "Ordnung", "Schluss", "a", "ab", "aber", "ach", "acht", + "achte", "achten", "achter", "achtes", "ag", "alle", "allein", + "allem", "allen", "aller", "allerdings", "alles", "allgemeinen", + "als", "also", "am", "an", "andere", "anderen", "andern", "anders", + "au", "auch", "auf", "aus", "ausser", "ausserdem", "außer", + "außerdem", "b", "bald", "bei", "beide", "beiden", "beim", + "beispiel", "bekannt", "bereits", "besonders", "besser", "besten", + "bin", "bis", "bisher", "bist", "c", "d", "d.h", "da", "dabei", + "dadurch", "dafür", "dagegen", "daher", "dahin", "dahinter", + "damals", "damit", "danach", "daneben", "dank", "dann", "daran", + "darauf", "daraus", "darf", "darfst", "darin", "darum", "darunter", + "darüber", "das", "dasein", "daselbst", "dass", "dasselbe", "davon", + "davor", "dazu", "dazwischen", "daß", "dein", "deine", "deinem", + "deiner", "dem", "dementsprechend", "demgegenüber", "demgemäss", + "demgemäß", "demselben", "demzufolge", "den", "denen", "denn", + "denselben", "der", "deren", "derjenige", "derjenigen", "dermassen", + "dermaßen", "derselbe", "derselben", "des", "deshalb", "desselben", + "dessen", "deswegen", "dich", "die", "diejenige", "diejenigen", + "dies", "diese", "dieselbe", "dieselben", "diesem", "diesen", + "dieser", "dieses", "dir", "doch", "dort", "drei", "drin", "dritte", + "dritten", "dritter", "drittes", "du", "durch", "durchaus", + "durfte", "durften", "dürfen", "dürft", "e", "eben", "ebenso", + "ehrlich", "ei", "ei,", "eigen", "eigene", "eigenen", "eigener", + "eigenes", "ein", "einander", "eine", "einem", "einen", "einer", + "eines", "einige", "einigen", "einiger", "einiges", "einmal", + "eins", "elf", "en", "ende", "endlich", "entweder", "er", "erst", + "erste", "ersten", "erster", "erstes", "es", "etwa", "etwas", + "euch", "euer", "eure", "f", "folgende", "früher", "fünf", "fünfte", + "fünften", "fünfter", "fünftes", "für", "g", "gab", "ganz", "ganze", + "ganzen", "ganzer", "ganzes", "gar", "gedurft", "gegen", + "gegenüber", "gehabt", "gehen", "geht", "gekannt", "gekonnt", + "gemacht", "gemocht", "gemusst", "genug", "gerade", "gern", + "gesagt", "geschweige", "gewesen", "gewollt", "geworden", "gibt", + "ging", "gleich", "gott", "gross", "grosse", "grossen", "grosser", + "grosses", "groß", "große", "großen", "großer", "großes", "gut", + "gute", "guter", "gutes", "h", "habe", "haben", "habt", "hast", + "hat", "hatte", "hatten", "hattest", "hattet", "heisst", "her", + "heute", "hier", "hin", "hinter", "hoch", "hätte", "hätten", "i", + "ich", "ihm", "ihn", "ihnen", "ihr", "ihre", "ihrem", "ihren", + "ihrer", "ihres", "im", "immer", "in", "indem", "infolgedessen", + "ins", "irgend", "ist", "j", "ja", "jahr", "jahre", "jahren", "je", + "jede", "jedem", "jeden", "jeder", "jedermann", "jedermanns", + "jedes", "jedoch", "jemand", "jemandem", "jemanden", "jene", + "jenem", "jenen", "jener", "jenes", "jetzt", "k", "kam", "kann", + "kannst", "kaum", "kein", "keine", "keinem", "keinen", "keiner", + "kleine", "kleinen", "kleiner", "kleines", "kommen", "kommt", + "konnte", "konnten", "kurz", "können", "könnt", "könnte", "l", + "lang", "lange", "leicht", "leide", "lieber", "los", "m", "machen", + "macht", "machte", "mag", "magst", "mahn", "mal", "man", "manche", + "manchem", "manchen", "mancher", "manches", "mann", "mehr", "mein", + "meine", "meinem", "meinen", "meiner", "meines", "mensch", + "menschen", "mich", "mir", "mit", "mittel", "mochte", "mochten", + "morgen", "muss", "musst", "musste", "mussten", "muß", "mußt", + "möchte", "mögen", "möglich", "mögt", "müssen", "müsst", "müßt", + "n", "na", "nach", "nachdem", "nahm", "natürlich", "neben", "nein", + "neue", "neuen", "neun", "neunte", "neunten", "neunter", "neuntes", + "nicht", "nichts", "nie", "niemand", "niemandem", "niemanden", + "noch", "nun", "nur", "o", "ob", "oben", "oder", "offen", "oft", + "ohne", "p", "q", "r", "recht", "rechte", "rechten", "rechter", + "rechtes", "richtig", "rund", "s", "sa", "sache", "sagt", "sagte", + "sah", "satt", "schlecht", "schon", "sechs", "sechste", "sechsten", + "sechster", "sechstes", "sehr", "sei", "seid", "seien", "sein", + "seine", "seinem", "seinen", "seiner", "seines", "seit", "seitdem", + "selbst", "sich", "sie", "sieben", "siebente", "siebenten", + "siebenter", "siebentes", "sind", "so", "solang", "solche", + "solchem", "solchen", "solcher", "solches", "soll", "sollen", + "sollst", "sollt", "sollte", "sollten", "sondern", "sonst", + "soweit", "sowie", "später", "startseite", "statt", "steht", + "suche", "t", "tag", "tage", "tagen", "tat", "teil", "tel", "tritt", + "trotzdem", "tun", "u", "uhr", "um", "und", "und?", "uns", "unser", + "unsere", "unserer", "unter", "v", "vergangenen", "viel", "viele", + "vielem", "vielen", "vielleicht", "vier", "vierte", "vierten", + "vierter", "viertes", "vom", "von", "vor", "w", "wahr?", "wann", + "war", "waren", "wart", "warum", "was", "wegen", "weil", "weit", + "weiter", "weitere", "weiteren", "weiteres", "welche", "welchem", + "welchen", "welcher", "welches", "wem", "wen", "wenig", "wenige", + "weniger", "weniges", "wenigstens", "wenn", "wer", "werde", + "werden", "werdet", "weshalb", "wessen", "wie", "wieder", "wieso", + "will", "willst", "wir", "wird", "wirklich", "wirst", "wissen", + "wo", "wohl", "wollen", "wollt", "wollte", "wollten", "worden", + "wurde", "wurden", "während", "währenddem", "währenddessen", "wäre", + "würde", "würden", "x", "y", "z", "z.b", "zehn", "zehnte", + "zehnten", "zehnter", "zehntes", "zeit", "zu", "zuerst", "zugleich", + "zum", "zunächst", "zur", "zurück", "zusammen", "zwanzig", "zwar", + "zwei", "zweite", "zweiten", "zweiter", "zweites", "zwischen", + "zwölf", "über", "überhaupt", "übrigens"}, + "en": {"a", "a's", "able", "about", "above", "according", "accordingly", + "across", "actually", "after", "afterwards", "again", "against", + "ain't", "all", "allow", "allows", "almost", "alone", "along", + "already", "also", "although", "always", "am", "among", "amongst", + "an", "and", "another", "any", "anybody", "anyhow", "anyone", + "anything", "anyway", "anyways", "anywhere", "apart", "appear", + "appreciate", "appropriate", "are", "aren't", "around", "as", + "aside", "ask", "asking", "associated", "at", "available", "away", + "awfully", "b", "be", "became", "because", "become", "becomes", + "becoming", "been", "before", "beforehand", "behind", "being", + "believe", "below", "beside", "besides", "best", "better", + "between", "beyond", "both", "brief", "but", "by", "c", "c'mon", + "c's", "came", "can", "can't", "cannot", "cant", "cause", "causes", + "certain", "certainly", "changes", "clearly", "co", "com", "come", + "comes", "concerning", "consequently", "consider", "considering", + "contain", "containing", "contains", "corresponding", "could", + "couldn't", "course", "currently", "d", "definitely", "described", + "despite", "did", "didn't", "different", "do", "does", "doesn't", + "doing", "don't", "done", "down", "downwards", "during", "e", + "each", "edu", "eg", "eight", "either", "else", "elsewhere", + "enough", "entirely", "especially", "et", "etc", "even", "ever", + "every", "everybody", "everyone", "everything", "everywhere", "ex", + "exactly", "example", "except", "f", "far", "few", "fifth", "first", + "five", "followed", "following", "follows", "for", "former", + "formerly", "forth", "four", "from", "further", "furthermore", "g", + "get", "gets", "getting", "given", "gives", "go", "goes", "going", + "gone", "got", "gotten", "greetings", "h", "had", "hadn't", + "happens", "hardly", "has", "hasn't", "have", "haven't", "having", + "he", "he's", "hello", "help", "hence", "her", "here", "here's", + "hereafter", "hereby", "herein", "hereupon", "hers", "herself", + "hi", "him", "himself", "his", "hither", "hopefully", "how", + "howbeit", "however", "i", "i'd", "i'll", "i'm", "i've", "ie", "if", + "ignored", "immediate", "in", "inasmuch", "inc", "indeed", + "indicate", "indicated", "indicates", "inner", "insofar", "instead", + "into", "inward", "is", "isn't", "it", "it'd", "it'll", "it's", + "its", "itself", "j", "just", "k", "keep", "keeps", "kept", "know", + "known", "knows", "l", "last", "lately", "later", "latter", + "latterly", "least", "less", "lest", "let", "let's", "like", + "liked", "likely", "little", "look", "looking", "looks", "ltd", "m", + "mainly", "many", "may", "maybe", "me", "mean", "meanwhile", + "merely", "might", "more", "moreover", "most", "mostly", "much", + "must", "my", "myself", "n", "name", "namely", "nd", "near", + "nearly", "necessary", "need", "needs", "neither", "never", + "nevertheless", "new", "next", "nine", "no", "nobody", "non", + "none", "noone", "nor", "normally", "not", "nothing", "novel", + "now", "nowhere", "o", "obviously", "of", "off", "often", "oh", + "ok", "okay", "old", "on", "once", "one", "ones", "only", "onto", + "or", "other", "others", "otherwise", "ought", "our", "ours", + "ourselves", "out", "outside", "over", "overall", "own", "p", + "particular", "particularly", "per", "perhaps", "placed", "please", + "plus", "possible", "presumably", "probably", "provides", "q", + "que", "quite", "qv", "r", "rather", "rd", "re", "really", + "reasonably", "regarding", "regardless", "regards", "relatively", + "respectively", "right", "s", "said", "same", "saw", "say", + "saying", "says", "second", "secondly", "see", "seeing", "seem", + "seemed", "seeming", "seems", "seen", "self", "selves", "sensible", + "sent", "serious", "seriously", "seven", "several", "shall", "she", + "should", "shouldn't", "since", "six", "so", "some", "somebody", + "somehow", "someone", "something", "sometime", "sometimes", + "somewhat", "somewhere", "soon", "sorry", "specified", "specify", + "specifying", "still", "sub", "such", "sup", "sure", "t", "t's", + "take", "taken", "tell", "tends", "th", "than", "thank", "thanks", + "thanx", "that", "that's", "thats", "the", "their", "theirs", + "them", "themselves", "then", "thence", "there", "there's", + "thereafter", "thereby", "therefore", "therein", "theres", + "thereupon", "these", "they", "they'd", "they'll", "they're", + "they've", "think", "third", "this", "thorough", "thoroughly", + "those", "though", "three", "through", "throughout", "thru", "thus", + "to", "together", "too", "took", "toward", "towards", "tried", + "tries", "truly", "try", "trying", "twice", "two", "u", "un", + "under", "unfortunately", "unless", "unlikely", "until", "unto", + "up", "upon", "us", "use", "used", "useful", "uses", "using", + "usually", "uucp", "v", "value", "various", "very", "via", "viz", + "vs", "w", "want", "wants", "was", "wasn't", "way", "we", "we'd", + "we'll", "we're", "we've", "welcome", "well", "went", "were", + "weren't", "what", "what's", "whatever", "when", "whence", + "whenever", "where", "where's", "whereafter", "whereas", "whereby", + "wherein", "whereupon", "wherever", "whether", "which", "while", + "whither", "who", "who's", "whoever", "whole", "whom", "whose", + "why", "will", "willing", "wish", "with", "within", "without", + "won't", "wonder", "would", "wouldn't", "x", "y", "yes", "yet", + "you", "you'd", "you'll", "you're", "you've", "your", "yours", + "yourself", "yourselves", "z", "zero"}, + "es": {"a", "actualmente", "acuerdo", "adelante", "ademas", "además", + "adrede", "afirmó", "agregó", "ahi", "ahora", "ahí", "al", "algo", + "alguna", "algunas", "alguno", "algunos", "algún", "alli", "allí", + "alrededor", "ambos", "ampleamos", "antano", "antaño", "ante", + "anterior", "antes", "apenas", "aproximadamente", "aquel", + "aquella", "aquellas", "aquello", "aquellos", "aqui", "aquél", + "aquélla", "aquéllas", "aquéllos", "aquí", "arriba", "arribaabajo", + "aseguró", "asi", "así", "atras", "aun", "aunque", "ayer", "añadió", + "aún", "b", "bajo", "bastante", "bien", "breve", "buen", "buena", + "buenas", "bueno", "buenos", "c", "cada", "casi", "cerca", "cierta", + "ciertas", "cierto", "ciertos", "cinco", "claro", "comentó", "como", + "con", "conmigo", "conocer", "conseguimos", "conseguir", + "considera", "consideró", "consigo", "consigue", "consiguen", + "consigues", "contigo", "contra", "cosas", "creo", "cual", "cuales", + "cualquier", "cuando", "cuanta", "cuantas", "cuanto", "cuantos", + "cuatro", "cuenta", "cuál", "cuáles", "cuándo", "cuánta", "cuántas", + "cuánto", "cuántos", "cómo", "d", "da", "dado", "dan", "dar", "de", + "debajo", "debe", "deben", "debido", "decir", "dejó", "del", + "delante", "demasiado", "demás", "dentro", "deprisa", "desde", + "despacio", "despues", "después", "detras", "detrás", "dia", "dias", + "dice", "dicen", "dicho", "dieron", "diferente", "diferentes", + "dijeron", "dijo", "dio", "donde", "dos", "durante", "día", "días", + "dónde", "e", "ejemplo", "el", "ella", "ellas", "ello", "ellos", + "embargo", "empleais", "emplean", "emplear", "empleas", "empleo", + "en", "encima", "encuentra", "enfrente", "enseguida", "entonces", + "entre", "era", "eramos", "eran", "eras", "eres", "es", "esa", + "esas", "ese", "eso", "esos", "esta", "estaba", "estaban", "estado", + "estados", "estais", "estamos", "estan", "estar", "estará", "estas", + "este", "esto", "estos", "estoy", "estuvo", "está", "están", "ex", + "excepto", "existe", "existen", "explicó", "expresó", "f", "fin", + "final", "fue", "fuera", "fueron", "fui", "fuimos", "g", "general", + "gran", "grandes", "gueno", "h", "ha", "haber", "habia", "habla", + "hablan", "habrá", "había", "habían", "hace", "haceis", "hacemos", + "hacen", "hacer", "hacerlo", "haces", "hacia", "haciendo", "hago", + "han", "hasta", "hay", "haya", "he", "hecho", "hemos", "hicieron", + "hizo", "horas", "hoy", "hubo", "i", "igual", "incluso", "indicó", + "informo", "informó", "intenta", "intentais", "intentamos", + "intentan", "intentar", "intentas", "intento", "ir", "j", "junto", + "k", "l", "la", "lado", "largo", "las", "le", "lejos", "les", + "llegó", "lleva", "llevar", "lo", "los", "luego", "lugar", "m", + "mal", "manera", "manifestó", "mas", "mayor", "me", "mediante", + "medio", "mejor", "mencionó", "menos", "menudo", "mi", "mia", + "mias", "mientras", "mio", "mios", "mis", "misma", "mismas", + "mismo", "mismos", "modo", "momento", "mucha", "muchas", "mucho", + "muchos", "muy", "más", "mí", "mía", "mías", "mío", "míos", "n", + "nada", "nadie", "ni", "ninguna", "ningunas", "ninguno", "ningunos", + "ningún", "no", "nos", "nosotras", "nosotros", "nuestra", + "nuestras", "nuestro", "nuestros", "nueva", "nuevas", "nuevo", + "nuevos", "nunca", "o", "ocho", "os", "otra", "otras", "otro", + "otros", "p", "pais", "para", "parece", "parte", "partir", "pasada", + "pasado", "paìs", "peor", "pero", "pesar", "poca", "pocas", "poco", + "pocos", "podeis", "podemos", "poder", "podria", "podriais", + "podriamos", "podrian", "podrias", "podrá", "podrán", "podría", + "podrían", "poner", "por", "porque", "posible", "primer", "primera", + "primero", "primeros", "principalmente", "pronto", "propia", + "propias", "propio", "propios", "proximo", "próximo", "próximos", + "pudo", "pueda", "puede", "pueden", "puedo", "pues", "q", "qeu", + "que", "quedó", "queremos", "quien", "quienes", "quiere", "quiza", + "quizas", "quizá", "quizás", "quién", "quiénes", "qué", "r", + "raras", "realizado", "realizar", "realizó", "repente", "respecto", + "s", "sabe", "sabeis", "sabemos", "saben", "saber", "sabes", + "salvo", "se", "sea", "sean", "segun", "segunda", "segundo", + "según", "seis", "ser", "sera", "será", "serán", "sería", "señaló", + "si", "sido", "siempre", "siendo", "siete", "sigue", "siguiente", + "sin", "sino", "sobre", "sois", "sola", "solamente", "solas", + "solo", "solos", "somos", "son", "soy", "soyos", "su", "supuesto", + "sus", "suya", "suyas", "suyo", "sé", "sí", "sólo", "t", "tal", + "tambien", "también", "tampoco", "tan", "tanto", "tarde", "te", + "temprano", "tendrá", "tendrán", "teneis", "tenemos", "tener", + "tenga", "tengo", "tenido", "tenía", "tercera", "ti", "tiempo", + "tiene", "tienen", "toda", "todas", "todavia", "todavía", "todo", + "todos", "total", "trabaja", "trabajais", "trabajamos", "trabajan", + "trabajar", "trabajas", "trabajo", "tras", "trata", "través", + "tres", "tu", "tus", "tuvo", "tuya", "tuyas", "tuyo", "tuyos", "tú", + "u", "ultimo", "un", "una", "unas", "uno", "unos", "usa", "usais", + "usamos", "usan", "usar", "usas", "uso", "usted", "ustedes", "v", + "va", "vais", "valor", "vamos", "van", "varias", "varios", "vaya", + "veces", "ver", "verdad", "verdadera", "verdadero", "vez", + "vosotras", "vosotros", "voy", "vuestra", "vuestras", "vuestro", + "vuestros", "w", "x", "y", "ya", "yo", "z", "él", "ésa", "ésas", + "ése", "ésos", "ésta", "éstas", "éste", "éstos", "última", + "últimas", "último", "últimos"}, + "fi": {"aiemmin", "aika", "aikaa", "aikaan", "aikaisemmin", "aikaisin", + "aikajen", "aikana", "aikoina", "aikoo", "aikovat", "aina", + "ainakaan", "ainakin", "ainoa", "ainoat", "aiomme", "aion", + "aiotte", "aist", "aivan", "ajan", "alas", "alemmas", "alkuisin", + "alkuun", "alla", "alle", "aloitamme", "aloitan", "aloitat", + "aloitatte", "aloitattivat", "aloitettava", "aloitettevaksi", + "aloitettu", "aloitimme", "aloitin", "aloitit", "aloititte", + "aloittaa", "aloittamatta", "aloitti", "aloittivat", "alta", + "aluksi", "alussa", "alusta", "annettavaksi", "annetteva", + "annettu", "ansiosta", "antaa", "antamatta", "antoi", "aoua", "apu", + "asia", "asiaa", "asian", "asiasta", "asiat", "asioiden", + "asioihin", "asioita", "asti", "avuksi", "avulla", "avun", "avutta", + "edelle", "edelleen", "edellä", "edeltä", "edemmäs", "edes", + "edessä", "edestä", "ehkä", "ei", "eikä", "eilen", "eivät", "eli", + "ellei", "elleivät", "ellemme", "ellen", "ellet", "ellette", "emme", + "en", "enemmän", "eniten", "ennen", "ensi", "ensimmäinen", + "ensimmäiseksi", "ensimmäisen", "ensimmäisenä", "ensimmäiset", + "ensimmäisiksi", "ensimmäisinä", "ensimmäisiä", "ensimmäistä", + "ensin", "entinen", "entisen", "entisiä", "entisten", "entistä", + "enää", "eri", "erittäin", "erityisesti", "eräiden", "eräs", + "eräät", "esi", "esiin", "esillä", "esimerkiksi", "et", "eteen", + "etenkin", "etessa", "ette", "ettei", "että", "haikki", "halua", + "haluaa", "haluamatta", "haluamme", "haluan", "haluat", "haluatte", + "haluavat", "halunnut", "halusi", "halusimme", "halusin", "halusit", + "halusitte", "halusivat", "halutessa", "haluton", "he", "hei", + "heidän", "heihin", "heille", "heiltä", "heissä", "heistä", "heitä", + "helposti", "heti", "hetkellä", "hieman", "hitaasti", "hoikein", + "huolimatta", "huomenna", "hyvien", "hyviin", "hyviksi", "hyville", + "hyviltä", "hyvin", "hyvinä", "hyvissä", "hyvistä", "hyviä", "hyvä", + "hyvät", "hyvää", "hän", "häneen", "hänelle", "hänellä", "häneltä", + "hänen", "hänessä", "hänestä", "hänet", "ihan", "ilman", + "ilmeisesti", "itse", "itsensä", "itseään", "ja", "jo", "johon", + "joiden", "joihin", "joiksi", "joilla", "joille", "joilta", + "joissa", "joista", "joita", "joka", "jokainen", "jokin", "joko", + "joku", "jolla", "jolle", "jolloin", "jolta", "jompikumpi", "jonka", + "jonkin", "jonne", "joo", "jopa", "jos", "joskus", "jossa", "josta", + "jota", "jotain", "joten", "jotenkin", "jotenkuten", "jotka", + "jotta", "jouduimme", "jouduin", "jouduit", "jouduitte", "joudumme", + "joudun", "joudutte", "joukkoon", "joukossa", "joukosta", "joutua", + "joutui", "joutuivat", "joutumaan", "joutuu", "joutuvat", "juuri", + "jälkeen", "jälleen", "jää", "kahdeksan", "kahdeksannen", + "kahdella", "kahdelle", "kahdelta", "kahden", "kahdessa", + "kahdesta", "kahta", "kahteen", "kai", "kaiken", "kaikille", + "kaikilta", "kaikkea", "kaikki", "kaikkia", "kaikkiaan", + "kaikkialla", "kaikkialle", "kaikkialta", "kaikkien", "kaikkin", + "kaksi", "kannalta", "kannattaa", "kanssa", "kanssaan", "kanssamme", + "kanssani", "kanssanne", "kanssasi", "kauan", "kauemmas", "kaukana", + "kautta", "kehen", "keiden", "keihin", "keiksi", "keille", "keillä", + "keiltä", "keinä", "keissä", "keistä", "keitten", "keittä", "keitä", + "keneen", "keneksi", "kenelle", "kenellä", "keneltä", "kenen", + "kenenä", "kenessä", "kenestä", "kenet", "kenettä", "kennessästä", + "kenties", "kerran", "kerta", "kertaa", "keskellä", "kesken", + "keskimäärin", "ketkä", "ketä", "kiitos", "kohti", "koko", + "kokonaan", "kolmas", "kolme", "kolmen", "kolmesti", "koska", + "koskaan", "kovin", "kuin", "kuinka", "kuinkan", "kuitenkaan", + "kuitenkin", "kuka", "kukaan", "kukin", "kukka", "kumpainen", + "kumpainenkaan", "kumpi", "kumpikaan", "kumpikin", "kun", "kuten", + "kuuden", "kuusi", "kuutta", "kylliksi", "kyllä", "kymmenen", + "kyse", "liian", "liki", "lisäksi", "lisää", "lla", "luo", "luona", + "lähekkäin", "lähelle", "lähellä", "läheltä", "lähemmäs", "lähes", + "lähinnä", "lähtien", "läpi", "mahdollisimman", "mahdollista", "me", + "meidän", "meille", "meillä", "melkein", "melko", "menee", "meneet", + "menemme", "menen", "menet", "menette", "menevät", "meni", + "menimme", "menin", "menit", "menivät", "mennessä", "mennyt", + "menossa", "mihin", "mikin", "miksi", "mikä", "mikäli", "mikään", + "milloin", "milloinkan", "minne", "minun", "minut", "minä", "missä", + "mistä", "miten", "mitä", "mitään", "moi", "molemmat", "mones", + "monesti", "monet", "moni", "moniaalla", "moniaalle", "moniaalta", + "monta", "muassa", "muiden", "muita", "muka", "mukaan", "mukaansa", + "mukana", "mutta", "muu", "muualla", "muualle", "muualta", + "muuanne", "muulloin", "muun", "muut", "muuta", "muutama", + "muutaman", "muuten", "myöhemmin", "myös", "myöskin", "myöskään", + "myötä", "ne", "neljä", "neljän", "neljää", "niiden", "niin", + "niistä", "niitä", "noin", "nopeammin", "nopeasti", "nopeiten", + "nro", "nuo", "nyt", "näiden", "näin", "näissä", "näissähin", + "näissälle", "näissältä", "näissästä", "näitä", "nämä", "ohi", + "oikea", "oikealla", "oikein", "ole", "olemme", "olen", "olet", + "olette", "oleva", "olevan", "olevat", "oli", "olimme", "olin", + "olisi", "olisimme", "olisin", "olisit", "olisitte", "olisivat", + "olit", "olitte", "olivat", "olla", "olleet", "olli", "ollut", + "oma", "omaa", "omaan", "omaksi", "omalle", "omalta", "oman", + "omassa", "omat", "omia", "omien", "omiin", "omiksi", "omille", + "omilta", "omissa", "omista", "on", "onkin", "onko", "ovat", + "paikoittain", "paitsi", "pakosti", "paljon", "paremmin", "parempi", + "parhaillaan", "parhaiten", "perusteella", "peräti", "pian", + "pieneen", "pieneksi", "pienelle", "pienellä", "pieneltä", + "pienempi", "pienestä", "pieni", "pienin", "puolesta", "puolestaan", + "päälle", "runsaasti", "saakka", "sadam", "sama", "samaa", "samaan", + "samalla", "samallalta", "samallassa", "samallasta", "saman", + "samat", "samoin", "sata", "sataa", "satojen", "se", "seitsemän", + "sekä", "sen", "seuraavat", "siellä", "sieltä", "siihen", "siinä", + "siis", "siitä", "sijaan", "siksi", "silloin", "sillä", "silti", + "sinne", "sinua", "sinulle", "sinulta", "sinun", "sinussa", + "sinusta", "sinut", "sinä", "sisäkkäin", "sisällä", "siten", + "sitten", "sitä", "ssa", "sta", "suoraan", "suuntaan", "suuren", + "suuret", "suuri", "suuria", "suurin", "suurten", "taa", "taas", + "taemmas", "tahansa", "tai", "takaa", "takaisin", "takana", "takia", + "tapauksessa", "tarpeeksi", "tavalla", "tavoitteena", "te", + "tietysti", "todella", "toinen", "toisaalla", "toisaalle", + "toisaalta", "toiseen", "toiseksi", "toisella", "toiselle", + "toiselta", "toisemme", "toisen", "toisensa", "toisessa", + "toisesta", "toista", "toistaiseksi", "toki", "tosin", "tuhannen", + "tuhat", "tule", "tulee", "tulemme", "tulen", "tulet", "tulette", + "tulevat", "tulimme", "tulin", "tulisi", "tulisimme", "tulisin", + "tulisit", "tulisitte", "tulisivat", "tulit", "tulitte", "tulivat", + "tulla", "tulleet", "tullut", "tuntuu", "tuo", "tuolla", "tuolloin", + "tuolta", "tuonne", "tuskin", "tykö", "tähän", "tällä", "tällöin", + "tämä", "tämän", "tänne", "tänä", "tänään", "tässä", "tästä", + "täten", "tätä", "täysin", "täytyvät", "täytyy", "täällä", "täältä", + "ulkopuolella", "usea", "useasti", "useimmiten", "usein", "useita", + "uudeksi", "uudelleen", "uuden", "uudet", "uusi", "uusia", "uusien", + "uusinta", "uuteen", "uutta", "vaan", "vahemmän", "vai", + "vaiheessa", "vaikea", "vaikean", "vaikeat", "vaikeilla", + "vaikeille", "vaikeilta", "vaikeissa", "vaikeista", "vaikka", + "vain", "varmasti", "varsin", "varsinkin", "varten", "vasen", + "vasenmalla", "vasta", "vastaan", "vastakkain", "vastan", "verran", + "vielä", "vierekkäin", "vieressä", "vieri", "viiden", "viime", + "viimeinen", "viimeisen", "viimeksi", "viisi", "voi", "voidaan", + "voimme", "voin", "voisi", "voit", "voitte", "voivat", "vuoden", + "vuoksi", "vuosi", "vuosien", "vuosina", "vuotta", "vähemmän", + "vähintään", "vähiten", "vähän", "välillä", "yhdeksän", "yhden", + "yhdessä", "yhteen", "yhteensä", "yhteydessä", "yhteyteen", "yhtä", + "yhtäälle", "yhtäällä", "yhtäältä", "yhtään", "yhä", "yksi", + "yksin", "yksittäin", "yleensä", "ylemmäs", "yli", "ylös", "ympäri", + "älköön", "älä"}, + "fr": {"a", "abord", "absolument", "afin", "ah", "ai", "aie", "ailleurs", + "ainsi", "ait", "allaient", "allo", "allons", "allô", "alors", + "anterieur", "anterieure", "anterieures", "apres", "après", "as", + "assez", "attendu", "au", "aucun", "aucune", "aujourd", + "aujourd'hui", "aupres", "auquel", "aura", "auraient", "aurait", + "auront", "aussi", "autre", "autrefois", "autrement", "autres", + "autrui", "aux", "auxquelles", "auxquels", "avaient", "avais", + "avait", "avant", "avec", "avoir", "avons", "ayant", "b", "bah", + "bas", "basee", "bat", "beau", "beaucoup", "bien", "bigre", "boum", + "bravo", "brrr", "c", "car", "ce", "ceci", "cela", "celle", + "celle-ci", "celle-là", "celles", "celles-ci", "celles-là", "celui", + "celui-ci", "celui-là", "cent", "cependant", "certain", "certaine", + "certaines", "certains", "certes", "ces", "cet", "cette", "ceux", + "ceux-ci", "ceux-là", "chacun", "chacune", "chaque", "cher", + "chers", "chez", "chiche", "chut", "chère", "chères", "ci", "cinq", + "cinquantaine", "cinquante", "cinquantième", "cinquième", "clac", + "clic", "combien", "comme", "comment", "comparable", "comparables", + "compris", "concernant", "contre", "couic", "crac", "d", "da", + "dans", "de", "debout", "dedans", "dehors", "deja", "delà", + "depuis", "dernier", "derniere", "derriere", "derrière", "des", + "desormais", "desquelles", "desquels", "dessous", "dessus", "deux", + "deuxième", "deuxièmement", "devant", "devers", "devra", + "different", "differentes", "differents", "différent", "différente", + "différentes", "différents", "dire", "directe", "directement", + "dit", "dite", "dits", "divers", "diverse", "diverses", "dix", + "dix-huit", "dix-neuf", "dix-sept", "dixième", "doit", "doivent", + "donc", "dont", "douze", "douzième", "dring", "du", "duquel", + "durant", "dès", "désormais", "e", "effet", "egale", "egalement", + "egales", "eh", "elle", "elle-même", "elles", "elles-mêmes", "en", + "encore", "enfin", "entre", "envers", "environ", "es", "est", "et", + "etant", "etc", "etre", "eu", "euh", "eux", "eux-mêmes", + "exactement", "excepté", "extenso", "exterieur", "f", "fais", + "faisaient", "faisant", "fait", "façon", "feront", "fi", "flac", + "floc", "font", "g", "gens", "h", "ha", "hein", "hem", "hep", "hi", + "ho", "holà", "hop", "hormis", "hors", "hou", "houp", "hue", "hui", + "huit", "huitième", "hum", "hurrah", "hé", "hélas", "i", "il", + "ils", "importe", "j", "je", "jusqu", "jusque", "juste", "k", "l", + "la", "laisser", "laquelle", "las", "le", "lequel", "les", + "lesquelles", "lesquels", "leur", "leurs", "longtemps", "lors", + "lorsque", "lui", "lui-meme", "lui-même", "là", "lès", "m", "ma", + "maint", "maintenant", "mais", "malgre", "malgré", "maximale", "me", + "meme", "memes", "merci", "mes", "mien", "mienne", "miennes", + "miens", "mille", "mince", "minimale", "moi", "moi-meme", + "moi-même", "moindres", "moins", "mon", "moyennant", "multiple", + "multiples", "même", "mêmes", "n", "na", "naturel", "naturelle", + "naturelles", "ne", "neanmoins", "necessaire", "necessairement", + "neuf", "neuvième", "ni", "nombreuses", "nombreux", "non", "nos", + "notamment", "notre", "nous", "nous-mêmes", "nouveau", "nul", + "néanmoins", "nôtre", "nôtres", "o", "oh", "ohé", "ollé", "olé", + "on", "ont", "onze", "onzième", "ore", "ou", "ouf", "ouias", "oust", + "ouste", "outre", "ouvert", "ouverte", "ouverts", "o|", "où", "p", + "paf", "pan", "par", "parce", "parfois", "parle", "parlent", + "parler", "parmi", "parseme", "partant", "particulier", + "particulière", "particulièrement", "pas", "passé", "pendant", + "pense", "permet", "personne", "peu", "peut", "peuvent", "peux", + "pff", "pfft", "pfut", "pif", "pire", "plein", "plouf", "plus", + "plusieurs", "plutôt", "possessif", "possessifs", "possible", + "possibles", "pouah", "pour", "pourquoi", "pourrais", "pourrait", + "pouvait", "prealable", "precisement", "premier", "première", + "premièrement", "pres", "probable", "probante", "procedant", + "proche", "près", "psitt", "pu", "puis", "puisque", "pur", "pure", + "q", "qu", "quand", "quant", "quant-à-soi", "quanta", "quarante", + "quatorze", "quatre", "quatre-vingt", "quatrième", "quatrièmement", + "que", "quel", "quelconque", "quelle", "quelles", "quelqu'un", + "quelque", "quelques", "quels", "qui", "quiconque", "quinze", + "quoi", "quoique", "r", "rare", "rarement", "rares", "relative", + "relativement", "remarquable", "rend", "rendre", "restant", "reste", + "restent", "restrictif", "retour", "revoici", "revoilà", "rien", + "s", "sa", "sacrebleu", "sait", "sans", "sapristi", "sauf", "se", + "sein", "seize", "selon", "semblable", "semblaient", "semble", + "semblent", "sent", "sept", "septième", "sera", "seraient", + "serait", "seront", "ses", "seul", "seule", "seulement", "si", + "sien", "sienne", "siennes", "siens", "sinon", "six", "sixième", + "soi", "soi-même", "soit", "soixante", "son", "sont", "sous", + "souvent", "specifique", "specifiques", "speculatif", "stop", + "strictement", "subtiles", "suffisant", "suffisante", "suffit", + "suis", "suit", "suivant", "suivante", "suivantes", "suivants", + "suivre", "superpose", "sur", "surtout", "t", "ta", "tac", "tant", + "tardive", "te", "tel", "telle", "tellement", "telles", "tels", + "tenant", "tend", "tenir", "tente", "tes", "tic", "tien", "tienne", + "tiennes", "tiens", "toc", "toi", "toi-même", "ton", "touchant", + "toujours", "tous", "tout", "toute", "toutefois", "toutes", + "treize", "trente", "tres", "trois", "troisième", "troisièmement", + "trop", "très", "tsoin", "tsouin", "tu", "té", "u", "un", "une", + "unes", "uniformement", "unique", "uniques", "uns", "v", "va", + "vais", "vas", "vers", "via", "vif", "vifs", "vingt", "vivat", + "vive", "vives", "vlan", "voici", "voilà", "vont", "vos", "votre", + "vous", "vous-mêmes", "vu", "vé", "vôtre", "vôtres", "w", "x", "y", + "z", "zut", "à", "â", "ça", "ès", "étaient", "étais", "était", + "étant", "été", "être", "ô"}, + "it": {"IE", "a", "abbastanza", "abbia", "abbiamo", "abbiano", "abbiate", + "accidenti", "ad", "adesso", "affinche", "agl", "agli", "ahime", + "ahimè", "ai", "al", "alcuna", "alcuni", "alcuno", "all", "alla", + "alle", "allo", "allora", "altri", "altrimenti", "altro", "altrove", + "altrui", "anche", "ancora", "anni", "anno", "ansa", "anticipo", + "assai", "attesa", "attraverso", "avanti", "avemmo", "avendo", + "avente", "aver", "avere", "averlo", "avesse", "avessero", "avessi", + "avessimo", "aveste", "avesti", "avete", "aveva", "avevamo", + "avevano", "avevate", "avevi", "avevo", "avrai", "avranno", + "avrebbe", "avrebbero", "avrei", "avremmo", "avremo", "avreste", + "avresti", "avrete", "avrà", "avrò", "avuta", "avute", "avuti", + "avuto", "basta", "bene", "benissimo", "berlusconi", "brava", + "bravo", "c", "casa", "caso", "cento", "certa", "certe", "certi", + "certo", "che", "chi", "chicchessia", "chiunque", "ci", "ciascuna", + "ciascuno", "cima", "cio", "cioe", "cioè", "circa", "citta", + "città", "ciò", "co", "codesta", "codesti", "codesto", "cogli", + "coi", "col", "colei", "coll", "coloro", "colui", "come", "cominci", + "comunque", "con", "concernente", "conciliarsi", "conclusione", + "consiglio", "contro", "cortesia", "cos", "cosa", "cosi", "così", + "cui", "d", "da", "dagl", "dagli", "dai", "dal", "dall", "dalla", + "dalle", "dallo", "dappertutto", "davanti", "degl", "degli", "dei", + "del", "dell", "della", "delle", "dello", "dentro", "detto", "deve", + "di", "dice", "dietro", "dire", "dirimpetto", "diventa", + "diventare", "diventato", "dopo", "dov", "dove", "dovra", "dovrà", + "dovunque", "due", "dunque", "durante", "e", "ebbe", "ebbero", + "ebbi", "ecc", "ecco", "ed", "effettivamente", "egli", "ella", + "entrambi", "eppure", "era", "erano", "eravamo", "eravate", "eri", + "ero", "esempio", "esse", "essendo", "esser", "essere", "essi", + "ex", "fa", "faccia", "facciamo", "facciano", "facciate", "faccio", + "facemmo", "facendo", "facesse", "facessero", "facessi", + "facessimo", "faceste", "facesti", "faceva", "facevamo", "facevano", + "facevate", "facevi", "facevo", "fai", "fanno", "farai", "faranno", + "fare", "farebbe", "farebbero", "farei", "faremmo", "faremo", + "fareste", "faresti", "farete", "farà", "farò", "fatto", "favore", + "fece", "fecero", "feci", "fin", "finalmente", "finche", "fine", + "fino", "forse", "forza", "fosse", "fossero", "fossi", "fossimo", + "foste", "fosti", "fra", "frattempo", "fu", "fui", "fummo", "fuori", + "furono", "futuro", "generale", "gia", "giacche", "giorni", + "giorno", "già", "gli", "gliela", "gliele", "glieli", "glielo", + "gliene", "governo", "grande", "grazie", "gruppo", "ha", "haha", + "hai", "hanno", "ho", "i", "ieri", "il", "improvviso", "in", "inc", + "infatti", "inoltre", "insieme", "intanto", "intorno", "invece", + "io", "l", "la", "lasciato", "lato", "lavoro", "le", "lei", "li", + "lo", "lontano", "loro", "lui", "lungo", "luogo", "là", "ma", + "macche", "magari", "maggior", "mai", "male", "malgrado", + "malissimo", "mancanza", "marche", "me", "medesimo", "mediante", + "meglio", "meno", "mentre", "mesi", "mezzo", "mi", "mia", "mie", + "miei", "mila", "miliardi", "milioni", "minimi", "ministro", "mio", + "modo", "molti", "moltissimo", "molto", "momento", "mondo", "mosto", + "nazionale", "ne", "negl", "negli", "nei", "nel", "nell", "nella", + "nelle", "nello", "nemmeno", "neppure", "nessun", "nessuna", + "nessuno", "niente", "no", "noi", "non", "nondimeno", "nonostante", + "nonsia", "nostra", "nostre", "nostri", "nostro", "novanta", "nove", + "nulla", "nuovo", "o", "od", "oggi", "ogni", "ognuna", "ognuno", + "oltre", "oppure", "ora", "ore", "osi", "ossia", "ottanta", "otto", + "paese", "parecchi", "parecchie", "parecchio", "parte", "partendo", + "peccato", "peggio", "per", "perche", "perchè", "perché", "percio", + "perciò", "perfino", "pero", "persino", "persone", "però", "piedi", + "pieno", "piglia", "piu", "piuttosto", "più", "po", "pochissimo", + "poco", "poi", "poiche", "possa", "possedere", "posteriore", + "posto", "potrebbe", "preferibilmente", "presa", "press", "prima", + "primo", "principalmente", "probabilmente", "proprio", "puo", + "pure", "purtroppo", "può", "qualche", "qualcosa", "qualcuna", + "qualcuno", "quale", "quali", "qualunque", "quando", "quanta", + "quante", "quanti", "quanto", "quantunque", "quasi", "quattro", + "quel", "quella", "quelle", "quelli", "quello", "quest", "questa", + "queste", "questi", "questo", "qui", "quindi", "realmente", + "recente", "recentemente", "registrazione", "relativo", "riecco", + "salvo", "sara", "sarai", "saranno", "sarebbe", "sarebbero", + "sarei", "saremmo", "saremo", "sareste", "saresti", "sarete", + "sarà", "sarò", "scola", "scopo", "scorso", "se", "secondo", + "seguente", "seguito", "sei", "sembra", "sembrare", "sembrato", + "sembri", "sempre", "senza", "sette", "si", "sia", "siamo", "siano", + "siate", "siete", "sig", "solito", "solo", "soltanto", "sono", + "sopra", "sotto", "spesso", "srl", "sta", "stai", "stando", + "stanno", "starai", "staranno", "starebbe", "starebbero", "starei", + "staremmo", "staremo", "stareste", "staresti", "starete", "starà", + "starò", "stata", "state", "stati", "stato", "stava", "stavamo", + "stavano", "stavate", "stavi", "stavo", "stemmo", "stessa", + "stesse", "stessero", "stessi", "stessimo", "stesso", "steste", + "stesti", "stette", "stettero", "stetti", "stia", "stiamo", + "stiano", "stiate", "sto", "su", "sua", "subito", "successivamente", + "successivo", "sue", "sugl", "sugli", "sui", "sul", "sull", "sulla", + "sulle", "sullo", "suo", "suoi", "tale", "tali", "talvolta", + "tanto", "te", "tempo", "ti", "titolo", "torino", "tra", "tranne", + "tre", "trenta", "troppo", "trovato", "tu", "tua", "tue", "tuo", + "tuoi", "tutta", "tuttavia", "tutte", "tutti", "tutto", "uguali", + "ulteriore", "ultimo", "un", "una", "uno", "uomo", "va", "vale", + "vari", "varia", "varie", "vario", "verso", "vi", "via", "vicino", + "visto", "vita", "voi", "volta", "volte", "vostra", "vostre", + "vostri", "vostro", "è"}, + "hi": {} +} diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/worker.py b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/worker.py new file mode 100644 index 0000000000000000000000000000000000000000..acaec733551ee645617dcd226f99c2b2268d6243 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/app/src/core/worker.py @@ -0,0 +1,403 @@ +import sys +import fasttext +import gc +import hashlib +import logging +import logging.handlers +import multiprocessing as mp +import os +from pathlib import Path +import re +from typing import List, Dict, Callable, Optional +from urllib.parse import urlparse +import urllib3 +import pyarrow as pa +import uuid + +from core.document import Document +from core.quality_signals.content import register_content_callables +from core.quality_signals.lines import register_lines_callables +from core.quality_signals.natural_language import \ + register_natural_language_callables +from core.quality_signals.repetitions import register_repetitions_callables +from core.quality_signals.classifiers import register_classifier_callables +from core.quality_signals.importance_weights import \ + register_importance_weights_callables +from core.data_types import InputSpec +from core.schema.rp import RP_SIGNAL_SCHEMA +from dedupe.minhash import MinHash +from utilities.io import Reader, Writer, ParquetBatchWriter +from utilities.io.s3 import init_client +from utilities.logging.mp import configure_worker_logger + +# disable warnings +fasttext.FastText.eprint = lambda x: None +urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) # noqa + +_BYTE_ORDER = sys.byteorder + + +def _ccnet_bucket_to_int(bucket: str) -> Optional[float]: + r""" ccnet bucket name to float mapping """ + if bucket == "head": + return 0.0 + elif bucket == "middle": + return 1.0 + elif bucket == "tail": + return 2.0 + else: + return None + + +class Worker: + # output file pattern + shard_pattern_signals = "{shard_id}.signals.json.gz" + shard_pattern_minhash = "{shard_id}.minhash.parquet" + + # regex to extract snapshot id from uri + snapsh_re = re.compile(r'\b\d{4}-\d{2}\b') + uri_id_re = re.compile(r'\b\d{4}-\d{2}\b/.*') + + def __init__( + self, language: str, + snapshot_id: str, + input_listings: List[str], + input_base_uri: str, + output_base_uri: str, + log_dir: str, + classifier_files: Dict[str, str], + dsir_files: Dict[str, str], + dsir_bucket: int, + ldnoobw_dir: Path, + ut1_dir: Path, + minhash_similarities: List[float], + minhash_ngram_size: int, + minhash_num_permutations: int, + monitor_queue: mp.Queue, + logging_queue: mp.Queue, + seed: int, + endpoint_url: str = None, + max_docs: int = -1, + flush_interval=1000 + ): + self._lang = language + self._snapshot_id = snapshot_id + self._input_base_uri = input_base_uri + self._output_base_uri = output_base_uri + self._dsir_files = dsir_files + self._dsir_buckets = dsir_bucket + self._flush_interval = flush_interval + + # init logger + configure_worker_logger(logging_queue, level=logging.INFO) + self._logger = logging.getLogger() + + # minhash setup + self._minhash = MinHash( + similarity_thresholds=minhash_similarities, + ngram_size=minhash_ngram_size, + num_permutations=minhash_num_permutations, + seed=seed + ) + + self._logger.info(f"__MINHASH_PERM_CHECKSUM__ " + f"{self._minhash.checksum}") + + self._max_docs = max_docs + self._monitor_queue = monitor_queue + self._endpoint_url = endpoint_url + + self._job_id = str(uuid.uuid4()) + + # build input paths + self._input_uri_list = list(map( + lambda x: os.path.join(self._input_base_uri, x), + input_listings + )) + + # init file to keep track of failed input files + self._failed_input_file = os.path.join( + log_dir, f"{language}-inputs.{self._job_id}.FAIL" + ) + + # init file to keep track of successful input files + self._success_input_file = os.path.join( + log_dir, f"{language}-inputs.{self._job_id}.SUCCESS" + ) + + # setup input file reader + read_scheme = urlparse(self._input_base_uri).scheme + if read_scheme == "s3": + client = init_client( + endpoint_url=self._endpoint_url, + aws_access_key_id=os.environ.get("AWS_ACCESS_KEY_ID"), + aws_secret_access_key=os.environ.get("AWS_SECRET_ACCESS_KEY"), + signature_version="s3v4" + ) + else: + client = None + + self._reader = Reader( + input_spec=InputSpec, threads=1, s3_client=client, + logger=self._logger + ) + + # classifier model filepaths + self._palm_model_file = classifier_files.get("palm") + self._wikiref_model_file = classifier_files.get("wikiref") + self._wikipedia_model_file = classifier_files.get("wikipedia") + + # initialize signal functions + self._quality_signals = self.__init_quality_signals( + ldnoobw_dir=ldnoobw_dir, ut1_dir=ut1_dir + ) + + # minhash_schema + self._minhash_schema = pa.schema([ + ("shard_id", pa.string()), + ("id", pa.string()), + ("id_int", pa.uint64()), + *[ + ( + "signature_sim{s}".format(s=s), pa.list_(pa.binary()) + ) + for s in minhash_similarities + ] + ]) + + @property + def job_id(self): + return self._job_id + + def __init_quality_signals(self, ldnoobw_dir, ut1_dir) -> List[Callable]: + callables = [] + + # initialize content signal functions + self._logger.info(f"Registering content signals for {self._lang}..") + callables += register_content_callables( + language=self._lang, + bad_urls_dir=ut1_dir, + bad_words_dir=ldnoobw_dir + ) + + # initialize repetition removal signal functions + self._logger.info(f"Registering repetition signals for {self._lang}..") + callables += register_repetitions_callables() + + # initialize natural language signal functions + self._logger.info(f"Registering natlang signals for {self._lang}..") + callables += register_natural_language_callables() + + # initialize line signal functions + self._logger.info(f"Registering line level signals for {self._lang}..") + callables += register_lines_callables() + + # initialize ml heuristics signal functions + self._logger.info(f"Registering classifier signals for {self._lang}..") + callables += register_classifier_callables( + wikiref_model=self._wikiref_model_file, + palm_model=self._palm_model_file, + wikipedia_model=self._wikipedia_model_file + ) + + # initialize importance weights signal functions + # hacky -- first index is the counts file, second is the lambda file + # this is set in pipeline.py + self._logger.info(f"Registering dsir signals for {self._lang}..") + callables += register_importance_weights_callables( + source_fps=self._dsir_files.get("ccnet"), + wiki_fps=self._dsir_files.get("wikipedia"), + openwebtext_fps=self._dsir_files.get("openwebtext"), + books_fps=self._dsir_files.get("books"), + language=self._lang + ) + + return callables + + def __process_record( + self, idx: int, record, uri_id: str, snapshot_id: str + ): + # Setup document; this precomputes ngrams and hash features + document = Document( + record.raw_content, + domain=record.source_domain, + precompute_ngrams=True, + precompute_hash_features=True, + dsir_buckets=self._dsir_buckets + ) + + # compute signals + rp_v2_signals = {} + for func in self._quality_signals: + rp_v2_signals[func.field_name] = func(document) # noqa + + # compute minhash signatures + minhash_signatures = self._minhash.compute_banded_signatures( + tokens=document.normalized_words + ) + + # compute document ids + doc_id = f"{uri_id}/{idx}" + doc_id_int = int.from_bytes( + hashlib.sha1(doc_id.encode("utf-8")).digest()[:8], # take 8 bytes + byteorder=_BYTE_ORDER, signed=False + ) + + record_data = { + "id": f"{uri_id}/{idx}", + "id_int": doc_id_int, + } + + metadata = { + "cc_segment": record.cc_segment, + "cc_net_source": uri_id, + "url": record.url, + "source_domain": record.source_domain, + "language": record.language, + "snapshot_id": snapshot_id + } + + ccnet_quality_signals = { + "ccnet_length": ( + (0, len(document), float(record.length)), + ), + # "ccnet_original_length": ( + # (0, len(document), float(record.original_length)), + # ), + "ccnet_nlines": ( + (0, len(document), float(record.nlines)), + ), + # "ccnet_original_nlines": ( + # (0, len(document), float(record.original_nlines)), + # ), + "ccnet_language_score": ( + (0, len(document), float(record.language_score)), + ), + # "ccnet_perplexity": ( + # (0, len(document), float(record.perplexity)), + # ), + "ccnet_bucket": ( + (0, len(document), _ccnet_bucket_to_int(record.bucket)), + ), + } + + record_data["metadata"] = metadata + record_data["quality_signals"] = { + **ccnet_quality_signals, **rp_v2_signals + } + + return record_data, minhash_signatures, doc_id, doc_id_int + + def __process_uri(self, docs_to_fetch: int, uri: str): + num_docs = 0 + docs_added = 0 + # snapshot_id = self.snapsh_re.search(uri).group(0) + # uri_id = self.uri_id_re.search(uri).group(0) + snapshot_id = self._snapshot_id + uri_id = uri[-12:-8] + + # signal writer + signal_uri = os.path.join( + self._output_base_uri, + self.shard_pattern_signals.format(shard_id=uri_id.split(".")[0]), + ) + signal_writer = Writer(uri=signal_uri, schema=RP_SIGNAL_SCHEMA) + self._logger.info(f"Initialized jsonl writer to {signal_uri}") + + # init minhash writer + minhash_uri = os.path.join( + self._output_base_uri, + self.shard_pattern_minhash.format(shard_id=uri_id.split(".")[0]), + ) + minhash_writer = ParquetBatchWriter( + output_fp=minhash_uri, schema=self._minhash_schema + ) + self._logger.info(f"Initialized parquet writer to {minhash_uri}") + uri = "file://" + uri + for idx, record in self._reader.read( + uri=uri, max_samples=docs_to_fetch, return_idx=True + ): + # compute signals + ( + record_data, minhash_signatures, doc_id, doc_id_int + ) = self.__process_record( + idx=idx, record=record, uri_id=uri_id, snapshot_id=snapshot_id + ) + num_docs += 1 + docs_added += 1 + + # write quality signals + signal_writer.write(record_data) + + # record minhash signatures + minhash_writer.update_batch( + obj={"shard_id": uri_id, "id_int": doc_id_int, "id": doc_id, + **minhash_signatures} + ) + + # send to monitor + if num_docs % self._flush_interval == 0: + minhash_writer.write_batch() + signal_writer.flush() + self._monitor_queue.put({ + "lang": self._lang, "num_docs": docs_added + }) + docs_added = 0 + + if docs_added > 0: + self._monitor_queue.put({ + "lang": self._lang, "num_docs": docs_added + }) + + # close writers + signal_writer.close() + minhash_writer.close() + + gc.collect() + + return num_docs + + def run(self): + total_docs = 0 + + for i, uri in enumerate(self._input_uri_list, start=1): + docs_to_fetch = self._max_docs - total_docs + if docs_to_fetch <= 0 < self._max_docs: + self._logger.info( + f"Reached max docs {self._max_docs} at {uri}") + break + + # process file + self._logger.info( + f"Start processing {uri} ({i}/{len(self._input_uri_list)})" + ) + try: + docs_in_uri = self.__process_uri(docs_to_fetch, uri) + except Exception as e: + with open(self._failed_input_file, "a+") as f: + f.write(f"{uri}\n") + self._logger.error(f"__URI_FAIL__ {uri} with exception: " + f"{e.__class__.__name__}: {e} in " + f"{self.__class__.__name__}.__process_uri") + continue + + total_docs += docs_in_uri + self._logger.info( + f"__URI_SUCCESS__ {uri} ({i}/{len(self._input_uri_list)})" + ) + + # send signal that a uri has been completed + self._monitor_queue.put({ + "lang": self._lang, "num_docs": None, "uri_complete": True + }) + + # keep track of completed uris + with open(self._success_input_file, "a+") as f: + f.write(f"{uri}\n") + + self._logger.info(f"Worker {self._job_id} Completed. " + f"Processed {total_docs} documents.") + + gc.collect() + + return total_docs, self._lang diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/pipeline.py b/cc-multilingual-main/dedup/RedPajama-Data/app/src/pipeline.py new file mode 100644 index 0000000000000000000000000000000000000000..a469e39b6135a7735cec7dca7888ede80f052225 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/app/src/pipeline.py @@ -0,0 +1,472 @@ +import argparse +import os +from concurrent.futures import ProcessPoolExecutor, as_completed +from collections import defaultdict +from datetime import datetime as dt +import gc +import json +import logging +import multiprocessing as mp +import numpy as np +from pathlib import Path +import random +import re +import time +from typing import Dict, List +import uuid +from urllib.parse import urlparse + +from core.worker import Worker +from utilities.logging.trackers import RateTracker +from utilities.logging.mp import * + + +def get_timestamp(): + return dt.now().isoformat() + + +def monitor_progress( + logging_queue: mp.Queue, monitor_queue: mp.Queue, languages: List[str], + total_uri_counts: Dict[str, int] +): + start_time = time.time() + log_str = " | ".join([f"{lang}: {{{lang}:,}}" for lang in languages]) + \ + " | total: {total:,} | {rate:.2f} docs/s" + \ + " | {lang}: processed {uc}/{tuc} uris" + + # setup logging + configure_worker_logger(logging_queue, level=logging.INFO) + logger = logging.getLogger() + + total_docs = 0 + + uri_counts_per_lang = {lang: 0 for lang in languages} + doc_counts_per_lang = {k: 0 for k in languages} + + logger.info(f"Start monitoring...") + + rate_tracker = RateTracker(n=200) + current_lang = None + + try: + while True: + + batch_time = time.time() + + if (data := monitor_queue.get()) is None: + break + + lang = data["lang"] + num_docs = data["num_docs"] + uri_complete = data.get("uri_complete", False) + + if uri_complete: + # we received a uri complete signal -- record that one more + # uri has been processed + uri_counts_per_lang[lang] += 1 + continue + + if lang != current_lang: + current_lang = lang + rate_tracker.reset() + logger.info(f"reset tracker for {lang}") + + doc_counts_per_lang[lang] += num_docs + total_docs += num_docs + + rate_tracker.update(num_docs, batch_time) + rate = rate_tracker.get_rate(time.time()) + + # log stats + logger.info(log_str.format( + **doc_counts_per_lang, total=total_docs, rate=rate, + lang=lang, uc=uri_counts_per_lang[lang], + tuc=total_uri_counts[lang], + )) + + except KeyboardInterrupt: + logger.error(f"KeybordInterrupt. Shutting down progress monitor.") + return + + logger.info("=" * 80) + logger.info(f"Done. Total time {time.time() - start_time:2f}s") + logger.info("=" * 80) + logger.info("Document counts:") + + for lang, num_docs in doc_counts_per_lang.items(): + logger.info(f"{lang}: {num_docs:,}") + + logger.info(f"Total: {total_docs:,}") + logger.info("=" * 80) + logger.info(f"Progress monitor done.") + + +def main_logger_process(logging_queue: mp.Queue, logfile: Path): + configure_listener_logger(logfile=logfile, level=logging.INFO) + + while True: + message = logging_queue.get() + if message is None: + break + logger = logging.getLogger(message.name) + logger.handle(message) + + +class RPSignalJob: + r""" Class for running the rp_signals pipeline """ + + # descriptions for input and output arguments. This will be shown using + # the --help flag + input_descr = "The input must be provided as a listings file containing " \ + "the relative paths to the data files, one per line as " \ + "relative paths (to input_pase_uri)." + + def __init__(self): + self._args = self.parse_arguments() + self._job_id = str(uuid.uuid4())[:16] + + random.seed(self._args.seed) + + # convenience access to args + self._languages = self._args.langs + self._inputs_per_process = self._args.inputs_per_process + + # minhash + self._minhash_ngram_size = self._args.minhash_ngram_size + self._minhash_num_permutations = self._args.minhash_num_permutations + self._minhash_similarities = self._args.minhash_similarities + + # artifacts + self._artifacts_dir = Path(self._args.artifacts_dir) + self._classifiers_dir = self._artifacts_dir / "classifiers" + self._dsir_dir = self._artifacts_dir / "dsir" + self._bad_words_dir = self._artifacts_dir / "bad_words" + self._bad_urls_dir = self._artifacts_dir / "bad_urls" + + # i/o args + self._snapshot_id = self._args.cc_snapshot_id + self._input_listings = self.__parse_input_listings() + self._output_base_uri = self._args.output_base_uri + self._output_base_uri_parsed = urlparse(self._output_base_uri) + self._log_dir = Path( + self._output_base_uri_parsed.path + ) / "logs" / self._snapshot_id + + # get classifier filepaths + self._classifiers_files = self.__parse_classifiers_dir() + + # get filepaths for importance weights + self._dsir_files = self.__parse_dsir_dir() + + def parse_arguments(self): + if self.__doc__ is not None: + description = " - " + self.__doc__ + else: + description = self.__class__.__name__ + + parser = argparse.ArgumentParser( + prog=self.__class__.__name__, description=description + ) + + # input and outputs + parser.add_argument( + "--input", type=str, default=None, help=self.input_descr + ) + parser.add_argument( + "--input_base_uri", type=str, + default=None, + help="Base URL (prefix) used for files list in input. Used to " + "select the access method: s3:/// or file:///" + ) + parser.add_argument( + "--output_base_uri", type=str, + default=None, + help="Base URL (prefix) used for files list in output. Used to " + "select the access method: s3:/// or file:///" + ) + parser.add_argument( + "--filename_keep_patterns", type=str, nargs="+", default=None, + help="list of regex patterns to match against filenames to keep" + ) + parser.add_argument( + "--cc_snapshot_id", type=str, default=None, + help="id of the common crawl snapshot to process." + ) + parser.add_argument( + "--artifacts_dir", type=str, default=None, + help="Path on the local filesystem to the directory containing " + "artifacts" + ) + parser.add_argument( + "--ext", type=str, default=".json.gz", + help="File extension of input files; defaults to .json.gz" + ) + parser.add_argument( + "--max_docs", type=int, default=-1, + help="maximum number of documents to process per input " + "file; for development purposes" + ) + parser.add_argument( + "--max_proc", type=int, default=None, + help="maximum number of processes to use; default is the number " + "of available CPUs" + ) + parser.add_argument( + "--seed", type=int, default=42, help="random seed" + ) + parser.add_argument( + "--endpoint_url", type=str, default=None, + help="endpoint url where the s3 bucket is exposed. " + ) + parser.add_argument( + "--inputs_per_process", type=int, default=20, + help="number of inputs to process per worker" + ) + parser.add_argument( + "--langs", type=str, nargs="+", + default=["en"], + help="subset of languages for which data files are processed." + ) + + # dsir + parser.add_argument( + "--dsir_buckets", type=int, default=10_000, + help="dimension of feature vector for dsir" + ) + + # minhash + parser.add_argument( + "--minhash_ngram_size", type=int, default=None, + help="ngram size for minhash" + ) + parser.add_argument( + "--minhash_num_permutations", type=int, default=None, + help="number of permutations for minhash" + ) + parser.add_argument( + "--minhash_similarities", nargs="+", default=[1.0, 0.9, 0.8, 0.7], type=float, + help="json string with minhash similarities" + ) + + return parser.parse_args() + + def __parse_input_listings(self) -> Dict[str, List[str]]: + r""" Parse the input listing """ + if self._args.input is None: + raise ValueError("Input argument must be provided") + + if not Path(self._args.input).exists(): + raise ValueError(f"Listings {self._args.input} does not exist") + + inputs_per_language = defaultdict(list) + fn_regexes = list(map( + lambda p: re.compile(p), self._args.filename_keep_patterns or [] + )) + + with open(self._args.input) as f: + for line in f.readlines(): + listing = line.strip() + + if not listing: + continue + + lang = Path(listing).name.split("_")[0] + + if lang not in self._languages: + continue + + if len(fn_regexes) > 0: + if not any(p.match(listing) for p in fn_regexes): + continue + + inputs_per_language[lang].append(listing) + + return inputs_per_language + + def __parse_classifiers_dir(self) -> Dict[str, Dict[str, str]]: + model_files = defaultdict(dict) + + for lang in self._languages: + model_dir = self._classifiers_dir / lang + if not model_dir.exists(): + continue + for model_file in model_dir.glob("*.bin"): + domain = model_file.stem.split(".")[0] + model_files[lang][domain] = str(model_file) + + return model_files + + def __parse_dsir_dir(self) -> Dict[str, Dict[str, List[str]]]: + dsir_filepaths = defaultdict(dict) + + for lang in self._languages: + dsir_dir = self._dsir_dir / lang + + if not dsir_dir.exists(): + continue + + for counts_file in dsir_dir.glob("*.counts.npy"): + domain = counts_file.stem.split(".")[0] + dsir_filepaths[lang][domain] = [str(counts_file)] + + for lambda_file in dsir_dir.glob("*.lambda.npy"): + domain = lambda_file.stem.split(".")[0] + dsir_filepaths[lang][domain].append(str(lambda_file)) + + return dsir_filepaths + + def __log_run_setup(self, logger): + logger.info(f"logging outputs to {self._log_dir}") + logger.info(f"job_id: {self._job_id}") + logger.info(f"PYTHONHASHSEED: {os.environ.get('PYTHONHASHSEED')}") + + # log args + for arg, val in vars(self._args).items(): + logger.info(f"{arg}: {val}") + + # logs job fields + logger.info(f"classifier_files: \n" + f"{json.dumps(self._classifiers_files, indent=4)}") + logger.info(f"dsir_files: \n" + f"{json.dumps(self._dsir_files, indent=4)}") + + def run(self): + max_proc = min(mp.cpu_count(), self._args.max_proc or np.inf) + + # get total number of uris per language + total_uri_counts = { + lang: len(self._input_listings[lang]) for lang in self._languages + } + + # setup logging + log_file = Path(self._log_dir) / f"{self._job_id}.log" + manager = mp.Manager() + + queue_buffer_size = 128 * (self._args.max_proc or mp.cpu_count()) + + # kick off logger process + logging_queue = manager.Queue(maxsize=queue_buffer_size) + logger_proc = mp.Process( + target=main_logger_process, args=(logging_queue, log_file) + ) + logger_proc.start() + + # start progress monitor + monitor_queue = manager.Queue(maxsize=queue_buffer_size) + monitor_proc = mp.Process( + target=monitor_progress, + args=(logging_queue, monitor_queue, self._languages, + total_uri_counts) + ) + monitor_proc.start() + + configure_worker_logger(queue=logging_queue, level=logging.INFO) + logger = logging.getLogger() + + # log run setup + self.__log_run_setup(logger) + for lang in self._languages: + logger.info(f"{lang}: {len(self._input_listings[lang]):,} inputs") + + for lang in self._languages: + lang_inputs = self._input_listings[lang] + random.shuffle(lang_inputs) + + logger.info("*" * 80) + logger.info(f"Start processing {lang}") + + chunk_size = self._args.inputs_per_process + input_chunks = [ + lang_inputs[i * chunk_size:(i + 1) * chunk_size] + for i in range(len(lang_inputs) // chunk_size) + ] + + max_docs_per_chunk = self._args.max_docs // len(input_chunks) + + with ProcessPoolExecutor(max_workers=max_proc - 2) as executor: + futures = [ + executor.submit( + self._run_chunk, + input_listings=chunk, + lang=lang, + max_docs=max_docs_per_chunk, + monitor_queue=monitor_queue, + logging_queue=logging_queue, + ) + for chunk in input_chunks + ] + + try: + for future in as_completed(futures): + result = future.result() + futures.remove(future) + wid = result["job_id"] + exc = result["exception"] + if exc is not None: + logger.error(f"__WORKER_FAIL__ ({wid}) exc={exc}") + continue + + logger.info(f"__WORKER_COMPLETED__ {wid} completed.") + except KeyboardInterrupt: + logger.error(f"KeyboardInterrupt. Shutting down.") + executor.shutdown(wait=False, cancel_futures=True) + break + + gc.collect() + + # signal monitor to stop + monitor_queue.put(None) + monitor_proc.join() + + # signal logger to stop + logging_queue.put_nowait(None) + logger_proc.join() + + manager.shutdown() + + def _run_chunk( + self, input_listings, lang, max_docs, monitor_queue, logging_queue + ): + + if len(input_listings) == 0: + return {"exception": None, "lang": lang, "job_id": None} + + proc = Worker( + language=lang, + snapshot_id=self._snapshot_id, + input_listings=input_listings, + input_base_uri=self._args.input_base_uri, + output_base_uri=self._output_base_uri, + log_dir=self._log_dir, + classifier_files=self._classifiers_files.get(lang, {}), + dsir_files=self._dsir_files.get(lang, {}), + dsir_bucket=self._args.dsir_buckets, + ldnoobw_dir=self._bad_words_dir, + ut1_dir=self._bad_urls_dir, + minhash_similarities=self._minhash_similarities, + minhash_ngram_size=self._minhash_ngram_size, + minhash_num_permutations=self._minhash_num_permutations, + logging_queue=logging_queue, + monitor_queue=monitor_queue, + endpoint_url=self._args.endpoint_url, + max_docs=max_docs, + seed=self._args.seed, + flush_interval=5000 + ) + + try: + proc.run() + exc = None + except Exception as e: + exc = f"{e.__class__.__name__}: {e}" + + gc.collect() + + return {"exception": exc, "lang": lang, "job_id": proc.job_id} + + +if __name__ == '__main__': + mp.set_start_method('fork') + mp.set_executable("python") + job = RPSignalJob() + job.run() diff --git a/cc-multilingual-main/dedup/RedPajama-Data/app/src/prep_artifacts.py b/cc-multilingual-main/dedup/RedPajama-Data/app/src/prep_artifacts.py new file mode 100644 index 0000000000000000000000000000000000000000..cfd52ccacbf025d9e657ffd73a5bfb53388685d2 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/app/src/prep_artifacts.py @@ -0,0 +1,202 @@ +import argparse +import logging +from datetime import datetime as dt +import os +from pathlib import Path + +from artifacts.downloaders import ( + WikipediaDownloader, + OpenWebTextDownloader, + BooksDownloader, + CCNetDownloader +) +from artifacts.hash_dist import HashDist +from artifacts.ft_trainer import FastTextTrainer +from utilities.logging.configure import configure_logger + + +def parse_arguments(): + def nullable_string(val): + # converts empty string to None + return None if not val else val + + parser = argparse.ArgumentParser() + # input and outputs + parser.add_argument( + "--artifacts_dir", type=str, default=None, + help="Directory where artifacts of the pipeline are stored" + ) + parser.add_argument( + "--cc_input", type=str, default=None, + help="cc_net output listings" + ) + parser.add_argument( + "--cc_input_base_uri", type=str, default=None, + help="Base URL (prefix) used for files list in input. Used to " + "select the access method: s3:/// or file:///" + ) + parser.add_argument( + "--cache_dir", type=str, default=None, + help="huggingface cache directory" + ) + parser.add_argument( + "--overwrite", action="store_true", + help="Overwrite existing files" + ) + parser.add_argument( + "--lang", type=str, default=None + ) + parser.add_argument( + "--max_workers", type=int, default=None, + help="Maximum number of workers to use" + ) + parser.add_argument( + "--dsir_num_samples", type=int, default=None, + help="Number of samples to use for dsir" + ) + parser.add_argument( + "--dsir_feature_dim", type=int, default=None, + help="Number of buckets to use for dsir" + ) + parser.add_argument( + "--classifiers_num_samples", type=int, default=None, + help="Number of samples to use for classifiers" + ) + parser.add_argument( + "--endpoint_url", type=nullable_string, default=None, + help="endpoint url where the s3 bucket is exposed." + ) + + # sampling + parser.add_argument( + "--max_samples_per_book", type=int, default=None, + help="Maximum number of samples to use per book" + ) + parser.add_argument( + "--max_paragraphs_per_book_sample", type=int, default=None, + help="Maximum number of paragraphs to use per book sample" + ) + + return parser.parse_args() + + +def main(artifacts_dir: str, cc_input: str, cc_input_base_uri: str, + cache_dir: str, overwrite: bool, lang: str, + max_workers: int, endpoint_url: str, + dsir_num_samples: int, dsir_feature_dim: int, + classifiers_num_samples: int, max_samples_per_book: int, + max_paragraphs_per_book_sample: int + ): + if max_workers is None: + max_workers = os.cpu_count() - 2 + else: + max_workers = min(max_workers, os.cpu_count() - 2) + + # parse config + num_samples = max(dsir_num_samples, classifiers_num_samples) + + # build output directory + datasets_dir = Path(artifacts_dir) / "datasets" / f"{lang}" + datasets_dir.mkdir(exist_ok=True, parents=True) + timestamp = dt.now().strftime("%Y%m%d-%H%M%S") + logfile = Path(artifacts_dir) / f"logs/{lang}_artifacts@{timestamp}.log" + logfile.parent.mkdir(exist_ok=True, parents=True) + configure_logger(logfile=logfile, level=logging.INFO) + logger = logging.getLogger() + + logger.info(f"Start preparing artifacts for {lang}") + logger.info(f"num_samples: {num_samples}") + logger.info(f"PYTHONHASHSEED: {os.environ.get('PYTHONHASHSEED')}") + + # download ccnet dataset + ccnet = CCNetDownloader( + lang=lang, artifacts_dir=artifacts_dir, cc_input=cc_input, + cc_input_base_uri=cc_input_base_uri, num_samples=num_samples, + max_workers=max_workers, endpoint_url=endpoint_url + ) + ccnet.run(logger=logger) + + # download wikipedia dataset + wikipedia = WikipediaDownloader( + lang=lang, out_dir=datasets_dir, + overwrite=overwrite, cache_dir=cache_dir, + max_samples=num_samples + ) + wikipedia.run(logger=logger) + + # download openwebtext dataset + openwebtext = OpenWebTextDownloader( + lang=lang, out_dir=datasets_dir, + overwrite=overwrite, cache_dir=cache_dir, + max_samples=num_samples + ) + openwebtext.run(logger=logger) + + # download books dataset + books = BooksDownloader( + lang=lang, out_dir=datasets_dir, + overwrite=overwrite, cache_dir=cache_dir, + max_samples=num_samples, + max_paragraphs_per_sample=max_paragraphs_per_book_sample, + max_samples_per_book=max_samples_per_book, + ) + books.run(logger=logger) + + # compute hash distributions + hash_dist = HashDist( + artifacts_dir=artifacts_dir, + num_samples=num_samples, + buckets=dsir_feature_dim, + max_workers=max_workers, + logger=logger + ) + + # compute hash distribution for each dataset + for obj in [wikipedia, openwebtext, books, ccnet]: + fp = obj.filepath + + if fp is None: + continue + + hash_dist.run(lang=lang, datafile=fp, dataset=obj.dataset_name) + + if lang == "en": + # compute fasttext palm classifier + target_name = "palm" + target_data = [ + wikipedia.filepath, books.filepath, openwebtext.filepath + ] + else: + # for non english languages, we use wikipedia as target + target_name = f"wikipedia" + target_data = [wikipedia.filepath] + + trainer = FastTextTrainer( + artifacts_dir=artifacts_dir, + ccnet_data=ccnet.filepath, + target_data=target_data, + target_name=target_name, + samples_per_class=classifiers_num_samples, + lang=lang + ) + trainer.run(logger=logger) + + logger.info(f"Finished preparing artifacts for {lang}") + + +if __name__ == '__main__': + args = parse_arguments() + main(artifacts_dir=args.artifacts_dir, + cc_input=args.cc_input, + cc_input_base_uri=args.cc_input_base_uri, + cache_dir=args.cache_dir, + overwrite=args.overwrite, + lang=args.lang, + max_workers=args.max_workers, + endpoint_url=args.endpoint_url, + dsir_num_samples=args.dsir_num_samples, + dsir_feature_dim=args.dsir_feature_dim, + classifiers_num_samples=args.classifiers_num_samples, + max_samples_per_book=args.max_samples_per_book, + max_paragraphs_per_book_sample=args.max_paragraphs_per_book_sample + ) diff --git a/cc-multilingual-main/dedup/RedPajama-Data/configs/rp_v2.0.conf b/cc-multilingual-main/dedup/RedPajama-Data/configs/rp_v2.0.conf new file mode 100644 index 0000000000000000000000000000000000000000..a504dfb48429ddb1b3886b7ac49ea98909d04467 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/configs/rp_v2.0.conf @@ -0,0 +1,149 @@ +# run parameters +DATA_ROOT="" +ARTIFACTS_ID="rpv2" +INPUT_BASE_URI="file:///home1/BharatGPT_hi/cc_net/2023-50-processed/mined_new/2023-50/" +OUTPUT_BASE_URI="file:///home1/BharatGPT_hi/dedup/RedPajama-Data/out/" +MAX_DOCS=-1 +LANGUAGES=("as","bn","gu","kn","hi","ml","mr","ne","or","pb","sa","sd","ta","ur","te","ks","sat","mai","mni","kok","doi","brx") + +# filename keep filters +FILENAME_KEEP_PATTERNS=( +".*/[a-z]{2}_middle\.json\.gz" +".*/[a-z]{2}_head\.json\.gz" +) + +# General parameters used across steps +S3_ENDPOINT_URL="" +S3_BUCKET="" +S3_CCNET_PREFIX="/rs_cc_net" +S3_PROFILE="" + +# Docker +DOCKER_S3_ENDPOINT_URL="" +DOCKER_MNT_DIR="/mnt/data" +DOCKER_REPO="" + +# Dedupe +MINHASH_NGRAM_SIZE="13" +MINHASH_NUM_PERMUTATIONS="128" +MINHASH_SIMILARITIES=(1.0 0.9 0.8 0.7) + +# DSIR +DSIR_NUM_SAMPLES=500000 +DSIR_FEATURE_DIM=10000 + +# Classifiers +CLASSIFIERS_NUM_SAMPLES=75000 + +# sampling for books artifacts +MAX_SAMPLES_PER_BOOK=1000 +MAX_PARAGRAPHS_PER_BOOK_SAMPLE=250 + +# Others +INPUTS_PER_PROCESS=20 # the number of files processed by one process at a time + +# domain blacklist categories +DOMAIN_BLACKLIST_CATEGORIES=( +"adult" +"agressive" +"agressif" +"arjel" +"chat" +"dating" +"ddos" +"filehosting" +"gambling" +"porn" +"mixed_adult" +"phishing" +"violence" +) + +# CC snapshot ids to process +CC_SNAPSHOT_IDS=( +"2014-15" +"2014-23" +"2014-35" +"2014-41" +"2014-42" +"2014-49" +"2014-52" +"2015-14" +"2015-22" +"2015-27" +"2015-32" +"2015-35" +"2015-40" +"2015-48" +"2016-07" +"2016-18" +"2016-22" +"2016-26" +"2016-30" +"2016-36" +"2016-40" +"2016-44" +"2016-50" +"2017-04" +"2017-09" +"2017-17" +"2017-22" +"2017-26" +"2017-30" +"2017-34" +"2017-39" +"2017-43" +"2017-47" +"2017-51" +"2018-05" +"2018-09" +"2018-13" +"2018-17" +"2018-22" +"2018-26" +"2018-30" +"2018-34" +"2018-39" +"2018-43" +"2018-47" +"2018-51" +"2019-04" +"2019-09" +"2019-13" +"2019-18" +"2019-22" +"2019-26" +"2019-30" +"2019-35" +"2019-39" +"2019-43" +"2019-47" +"2019-51" +"2020-05" +"2020-10" +"2020-16" +"2020-24" +"2020-29" +"2020-34" +"2020-40" +"2020-45" +"2020-50" +"2021-04" +"2021-10" +"2021-17" +"2021-21" +"2021-25" +"2021-31" +"2021-39" +"2021-43" +"2021-49" +"2022-05" +"2022-21" +"2022-27" +"2022-33" +"2022-40" +"2022-49" +"2023-06" +"2023-14" +"2023-50" +) \ No newline at end of file diff --git a/cc-multilingual-main/dedup/RedPajama-Data/exact-bloom.sh b/cc-multilingual-main/dedup/RedPajama-Data/exact-bloom.sh new file mode 100644 index 0000000000000000000000000000000000000000..73e864857621a8c0265e6c00cc83913e79b8cc46 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/exact-bloom.sh @@ -0,0 +1,16 @@ +outputfolder="/mnt/weka/peacock/wet-data/output" +redpajama="/home/sdp/Common_crawl/cc-multilingual-main/cc-multilingual-main/dedup/RedPajama-Data" +cd $outputfolder/mined/$1 +lis=("as" "bn" "gu" "kn" "hi" "ml" "mr" "ne" "or" "sa" "sd" "ta" "ur" "te" "mai") +mkdir -p $redpajama/listings/$1 +for l in "${lis[@]}"; do +ls "$l"*.json.gz > $redpajama/listings/$1/"$l".txt; +done +mkdir -p $outputfolder/bloomfilter/$1 +cd $redpajama +for l in "${lis[@]}"; do + mkdir -p $outputfolder/bloomfilter/$1/"$l" + doc_count=$(tail -n 4 $outputfolder/minhash-signatures/$1/"$l"/logs/$1/*.log | head -n 1 | awk -F ': ' '{print $2}' | tr -d ',') + cap=$((doc_count * 10)) + python3 app/src/bloomfilter.py --listings listings/$1/"$l".txt --input_base_uri "file://$outputfolder/mined/$1" --output_dir file://$outputfolder/bloomfilter/$1/"$l" --parallel_readers 128 --batch_size 64 --capacity $cap --error_rate 0.01; +done \ No newline at end of file diff --git a/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/as.txt b/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/as.txt new file mode 100644 index 0000000000000000000000000000000000000000..a8ded8254143b855dcc2863e9ecd74c87a7be95d --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/as.txt @@ -0,0 +1,23 @@ +as_all_0000.json.gz +as_all_0001.json.gz +as_all_0002.json.gz +as_all_0003.json.gz +as_all_0004.json.gz +as_all_0005.json.gz +as_all_0006.json.gz +as_all_0007.json.gz +as_all_0008.json.gz +as_all_0009.json.gz +as_all_0010.json.gz +as_all_0011.json.gz +as_all_0012.json.gz +as_all_0013.json.gz +as_all_0014.json.gz +as_all_0015.json.gz +as_all_0016.json.gz +as_all_0017.json.gz +as_all_0018.json.gz +as_all_0019.json.gz +as_all_0020.json.gz +as_all_0021.json.gz +as_all_0022.json.gz diff --git a/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/bn.txt b/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/bn.txt new file mode 100644 index 0000000000000000000000000000000000000000..02b460dee0b848051c8ee18525f694e63bb63179 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/bn.txt @@ -0,0 +1,1600 @@ +bn_all_0000.json.gz +bn_all_0001.json.gz +bn_all_0002.json.gz +bn_all_0003.json.gz +bn_all_0004.json.gz +bn_all_0005.json.gz +bn_all_0006.json.gz +bn_all_0007.json.gz +bn_all_0008.json.gz +bn_all_0009.json.gz +bn_all_0010.json.gz +bn_all_0011.json.gz +bn_all_0012.json.gz +bn_all_0013.json.gz +bn_all_0014.json.gz +bn_all_0015.json.gz +bn_all_0016.json.gz +bn_all_0017.json.gz +bn_all_0018.json.gz +bn_all_0019.json.gz +bn_all_0020.json.gz +bn_all_0021.json.gz +bn_all_0022.json.gz +bn_all_0023.json.gz +bn_all_0024.json.gz +bn_all_0025.json.gz +bn_all_0026.json.gz +bn_all_0027.json.gz +bn_all_0028.json.gz +bn_all_0029.json.gz +bn_all_0030.json.gz +bn_all_0031.json.gz +bn_all_0032.json.gz +bn_all_0033.json.gz +bn_all_0034.json.gz +bn_all_0035.json.gz +bn_all_0036.json.gz +bn_all_0037.json.gz +bn_all_0038.json.gz +bn_all_0039.json.gz +bn_all_0040.json.gz +bn_all_0041.json.gz +bn_all_0042.json.gz +bn_all_0043.json.gz +bn_all_0044.json.gz +bn_all_0045.json.gz +bn_all_0046.json.gz +bn_all_0047.json.gz +bn_all_0048.json.gz +bn_all_0049.json.gz +bn_all_0050.json.gz +bn_all_0051.json.gz +bn_all_0052.json.gz +bn_all_0053.json.gz +bn_all_0054.json.gz +bn_all_0055.json.gz +bn_all_0056.json.gz +bn_all_0057.json.gz +bn_all_0058.json.gz +bn_all_0059.json.gz +bn_all_0060.json.gz +bn_all_0061.json.gz +bn_all_0062.json.gz +bn_all_0063.json.gz +bn_all_0064.json.gz +bn_all_0065.json.gz +bn_all_0066.json.gz +bn_all_0067.json.gz +bn_all_0068.json.gz +bn_all_0069.json.gz +bn_all_0070.json.gz +bn_all_0071.json.gz +bn_all_0072.json.gz +bn_all_0073.json.gz +bn_all_0074.json.gz +bn_all_0075.json.gz +bn_all_0076.json.gz +bn_all_0077.json.gz +bn_all_0078.json.gz +bn_all_0079.json.gz +bn_all_0080.json.gz +bn_all_0081.json.gz +bn_all_0082.json.gz +bn_all_0083.json.gz +bn_all_0084.json.gz +bn_all_0085.json.gz +bn_all_0086.json.gz +bn_all_0087.json.gz +bn_all_0088.json.gz +bn_all_0089.json.gz +bn_all_0090.json.gz +bn_all_0091.json.gz +bn_all_0092.json.gz +bn_all_0093.json.gz +bn_all_0094.json.gz +bn_all_0095.json.gz +bn_all_0096.json.gz +bn_all_0097.json.gz +bn_all_0098.json.gz +bn_all_0099.json.gz +bn_all_0100.json.gz +bn_all_0101.json.gz +bn_all_0102.json.gz +bn_all_0103.json.gz +bn_all_0104.json.gz +bn_all_0105.json.gz +bn_all_0106.json.gz +bn_all_0107.json.gz +bn_all_0108.json.gz +bn_all_0109.json.gz +bn_all_0110.json.gz +bn_all_0111.json.gz +bn_all_0112.json.gz +bn_all_0113.json.gz +bn_all_0114.json.gz +bn_all_0115.json.gz +bn_all_0116.json.gz +bn_all_0117.json.gz +bn_all_0118.json.gz +bn_all_0119.json.gz +bn_all_0120.json.gz +bn_all_0121.json.gz +bn_all_0122.json.gz +bn_all_0123.json.gz +bn_all_0124.json.gz +bn_all_0125.json.gz +bn_all_0126.json.gz +bn_all_0127.json.gz +bn_all_0128.json.gz +bn_all_0129.json.gz +bn_all_0130.json.gz +bn_all_0131.json.gz +bn_all_0132.json.gz +bn_all_0133.json.gz +bn_all_0134.json.gz +bn_all_0135.json.gz +bn_all_0136.json.gz +bn_all_0137.json.gz +bn_all_0138.json.gz +bn_all_0139.json.gz +bn_all_0140.json.gz +bn_all_0141.json.gz +bn_all_0142.json.gz +bn_all_0143.json.gz +bn_all_0144.json.gz +bn_all_0145.json.gz +bn_all_0146.json.gz +bn_all_0147.json.gz +bn_all_0148.json.gz +bn_all_0149.json.gz +bn_all_0150.json.gz +bn_all_0151.json.gz +bn_all_0152.json.gz +bn_all_0153.json.gz +bn_all_0154.json.gz +bn_all_0155.json.gz +bn_all_0156.json.gz +bn_all_0157.json.gz +bn_all_0158.json.gz +bn_all_0159.json.gz +bn_all_0160.json.gz +bn_all_0161.json.gz +bn_all_0162.json.gz +bn_all_0163.json.gz +bn_all_0164.json.gz +bn_all_0165.json.gz +bn_all_0166.json.gz +bn_all_0167.json.gz +bn_all_0168.json.gz +bn_all_0169.json.gz +bn_all_0170.json.gz +bn_all_0171.json.gz +bn_all_0172.json.gz +bn_all_0173.json.gz +bn_all_0174.json.gz +bn_all_0175.json.gz +bn_all_0176.json.gz +bn_all_0177.json.gz +bn_all_0178.json.gz +bn_all_0179.json.gz +bn_all_0180.json.gz +bn_all_0181.json.gz +bn_all_0182.json.gz +bn_all_0183.json.gz +bn_all_0184.json.gz +bn_all_0185.json.gz +bn_all_0186.json.gz +bn_all_0187.json.gz +bn_all_0188.json.gz +bn_all_0189.json.gz +bn_all_0190.json.gz +bn_all_0191.json.gz +bn_all_0192.json.gz +bn_all_0193.json.gz +bn_all_0194.json.gz +bn_all_0195.json.gz +bn_all_0196.json.gz +bn_all_0197.json.gz +bn_all_0198.json.gz +bn_all_0199.json.gz +bn_all_0200.json.gz +bn_all_0201.json.gz +bn_all_0202.json.gz +bn_all_0203.json.gz +bn_all_0204.json.gz +bn_all_0205.json.gz +bn_all_0206.json.gz +bn_all_0207.json.gz +bn_all_0208.json.gz +bn_all_0209.json.gz +bn_all_0210.json.gz +bn_all_0211.json.gz +bn_all_0212.json.gz +bn_all_0213.json.gz +bn_all_0214.json.gz +bn_all_0215.json.gz +bn_all_0216.json.gz +bn_all_0217.json.gz +bn_all_0218.json.gz +bn_all_0219.json.gz +bn_all_0220.json.gz +bn_all_0221.json.gz +bn_all_0222.json.gz +bn_all_0223.json.gz +bn_all_0224.json.gz +bn_all_0225.json.gz +bn_all_0226.json.gz +bn_all_0227.json.gz +bn_all_0228.json.gz +bn_all_0229.json.gz +bn_all_0230.json.gz +bn_all_0231.json.gz +bn_all_0232.json.gz +bn_all_0233.json.gz +bn_all_0234.json.gz +bn_all_0235.json.gz +bn_all_0236.json.gz +bn_all_0237.json.gz +bn_all_0238.json.gz +bn_all_0239.json.gz +bn_all_0240.json.gz +bn_all_0241.json.gz +bn_all_0242.json.gz +bn_all_0243.json.gz +bn_all_0244.json.gz +bn_all_0245.json.gz +bn_all_0246.json.gz +bn_all_0247.json.gz +bn_all_0248.json.gz +bn_all_0249.json.gz +bn_all_0250.json.gz +bn_all_0251.json.gz +bn_all_0252.json.gz +bn_all_0253.json.gz +bn_all_0254.json.gz +bn_all_0255.json.gz +bn_all_0256.json.gz +bn_all_0257.json.gz +bn_all_0258.json.gz +bn_all_0259.json.gz +bn_all_0260.json.gz +bn_all_0261.json.gz +bn_all_0262.json.gz +bn_all_0263.json.gz +bn_all_0264.json.gz +bn_all_0265.json.gz +bn_all_0266.json.gz +bn_all_0267.json.gz +bn_all_0268.json.gz +bn_all_0269.json.gz +bn_all_0270.json.gz +bn_all_0271.json.gz +bn_all_0272.json.gz +bn_all_0273.json.gz +bn_all_0274.json.gz +bn_all_0275.json.gz +bn_all_0276.json.gz +bn_all_0277.json.gz +bn_all_0278.json.gz +bn_all_0279.json.gz +bn_all_0280.json.gz +bn_all_0281.json.gz +bn_all_0282.json.gz +bn_all_0283.json.gz +bn_all_0284.json.gz +bn_all_0285.json.gz +bn_all_0286.json.gz +bn_all_0287.json.gz +bn_all_0288.json.gz +bn_all_0289.json.gz +bn_all_0290.json.gz +bn_all_0291.json.gz +bn_all_0292.json.gz +bn_all_0293.json.gz +bn_all_0294.json.gz +bn_all_0295.json.gz +bn_all_0296.json.gz +bn_all_0297.json.gz +bn_all_0298.json.gz +bn_all_0299.json.gz +bn_all_0300.json.gz +bn_all_0301.json.gz +bn_all_0302.json.gz +bn_all_0303.json.gz +bn_all_0304.json.gz +bn_all_0305.json.gz +bn_all_0306.json.gz +bn_all_0307.json.gz +bn_all_0308.json.gz +bn_all_0309.json.gz +bn_all_0310.json.gz +bn_all_0311.json.gz +bn_all_0312.json.gz +bn_all_0313.json.gz +bn_all_0314.json.gz +bn_all_0315.json.gz +bn_all_0316.json.gz +bn_all_0317.json.gz +bn_all_0318.json.gz +bn_all_0319.json.gz +bn_all_0320.json.gz +bn_all_0321.json.gz +bn_all_0322.json.gz +bn_all_0323.json.gz +bn_all_0324.json.gz +bn_all_0325.json.gz +bn_all_0326.json.gz +bn_all_0327.json.gz +bn_all_0328.json.gz +bn_all_0329.json.gz +bn_all_0330.json.gz +bn_all_0331.json.gz +bn_all_0332.json.gz +bn_all_0333.json.gz +bn_all_0334.json.gz +bn_all_0335.json.gz +bn_all_0336.json.gz +bn_all_0337.json.gz +bn_all_0338.json.gz +bn_all_0339.json.gz +bn_all_0340.json.gz +bn_all_0341.json.gz +bn_all_0342.json.gz +bn_all_0343.json.gz +bn_all_0344.json.gz +bn_all_0345.json.gz +bn_all_0346.json.gz +bn_all_0347.json.gz +bn_all_0348.json.gz +bn_all_0349.json.gz +bn_all_0350.json.gz +bn_all_0351.json.gz +bn_all_0352.json.gz +bn_all_0353.json.gz +bn_all_0354.json.gz +bn_all_0355.json.gz +bn_all_0356.json.gz +bn_all_0357.json.gz +bn_all_0358.json.gz +bn_all_0359.json.gz +bn_all_0360.json.gz +bn_all_0361.json.gz +bn_all_0362.json.gz +bn_all_0363.json.gz +bn_all_0364.json.gz +bn_all_0365.json.gz +bn_all_0366.json.gz +bn_all_0367.json.gz +bn_all_0368.json.gz +bn_all_0369.json.gz +bn_all_0370.json.gz +bn_all_0371.json.gz +bn_all_0372.json.gz +bn_all_0373.json.gz +bn_all_0374.json.gz +bn_all_0375.json.gz +bn_all_0376.json.gz +bn_all_0377.json.gz +bn_all_0378.json.gz +bn_all_0379.json.gz +bn_all_0380.json.gz +bn_all_0381.json.gz +bn_all_0382.json.gz +bn_all_0383.json.gz +bn_all_0384.json.gz +bn_all_0385.json.gz +bn_all_0386.json.gz +bn_all_0387.json.gz +bn_all_0388.json.gz +bn_all_0389.json.gz +bn_all_0390.json.gz +bn_all_0391.json.gz +bn_all_0392.json.gz +bn_all_0393.json.gz +bn_all_0394.json.gz +bn_all_0395.json.gz +bn_all_0396.json.gz +bn_all_0397.json.gz +bn_all_0398.json.gz +bn_all_0399.json.gz +bn_all_0400.json.gz +bn_all_0401.json.gz +bn_all_0402.json.gz +bn_all_0403.json.gz +bn_all_0404.json.gz +bn_all_0405.json.gz +bn_all_0406.json.gz +bn_all_0407.json.gz +bn_all_0408.json.gz +bn_all_0409.json.gz +bn_all_0410.json.gz +bn_all_0411.json.gz +bn_all_0412.json.gz +bn_all_0413.json.gz +bn_all_0414.json.gz +bn_all_0415.json.gz +bn_all_0416.json.gz +bn_all_0417.json.gz +bn_all_0418.json.gz +bn_all_0419.json.gz +bn_all_0420.json.gz +bn_all_0421.json.gz +bn_all_0422.json.gz +bn_all_0423.json.gz +bn_all_0424.json.gz +bn_all_0425.json.gz +bn_all_0426.json.gz +bn_all_0427.json.gz +bn_all_0428.json.gz +bn_all_0429.json.gz +bn_all_0430.json.gz +bn_all_0431.json.gz +bn_all_0432.json.gz +bn_all_0433.json.gz +bn_all_0434.json.gz +bn_all_0435.json.gz +bn_all_0436.json.gz +bn_all_0437.json.gz +bn_all_0438.json.gz +bn_all_0439.json.gz +bn_all_0440.json.gz +bn_all_0441.json.gz +bn_all_0442.json.gz +bn_all_0443.json.gz +bn_all_0444.json.gz +bn_all_0445.json.gz +bn_all_0446.json.gz +bn_all_0447.json.gz +bn_all_0448.json.gz +bn_all_0449.json.gz +bn_all_0450.json.gz +bn_all_0451.json.gz +bn_all_0452.json.gz +bn_all_0453.json.gz +bn_all_0454.json.gz +bn_all_0455.json.gz +bn_all_0456.json.gz +bn_all_0457.json.gz +bn_all_0458.json.gz +bn_all_0459.json.gz +bn_all_0460.json.gz +bn_all_0461.json.gz +bn_all_0462.json.gz +bn_all_0463.json.gz +bn_all_0464.json.gz +bn_all_0465.json.gz +bn_all_0466.json.gz +bn_all_0467.json.gz +bn_all_0468.json.gz +bn_all_0469.json.gz +bn_all_0470.json.gz +bn_all_0471.json.gz +bn_all_0472.json.gz +bn_all_0473.json.gz +bn_all_0474.json.gz +bn_all_0475.json.gz +bn_all_0476.json.gz +bn_all_0477.json.gz +bn_all_0478.json.gz +bn_all_0479.json.gz +bn_all_0480.json.gz +bn_all_0481.json.gz +bn_all_0482.json.gz +bn_all_0483.json.gz +bn_all_0484.json.gz +bn_all_0485.json.gz +bn_all_0486.json.gz +bn_all_0487.json.gz +bn_all_0488.json.gz +bn_all_0489.json.gz +bn_all_0490.json.gz +bn_all_0491.json.gz +bn_all_0492.json.gz +bn_all_0493.json.gz +bn_all_0494.json.gz +bn_all_0495.json.gz +bn_all_0496.json.gz +bn_all_0497.json.gz +bn_all_0498.json.gz +bn_all_0499.json.gz +bn_all_0500.json.gz +bn_all_0501.json.gz +bn_all_0502.json.gz +bn_all_0503.json.gz +bn_all_0504.json.gz +bn_all_0505.json.gz +bn_all_0506.json.gz +bn_all_0507.json.gz +bn_all_0508.json.gz +bn_all_0509.json.gz +bn_all_0510.json.gz +bn_all_0511.json.gz +bn_all_0512.json.gz +bn_all_0513.json.gz +bn_all_0514.json.gz +bn_all_0515.json.gz +bn_all_0516.json.gz +bn_all_0517.json.gz +bn_all_0518.json.gz +bn_all_0519.json.gz +bn_all_0520.json.gz +bn_all_0521.json.gz +bn_all_0522.json.gz +bn_all_0523.json.gz +bn_all_0524.json.gz +bn_all_0525.json.gz +bn_all_0526.json.gz +bn_all_0527.json.gz +bn_all_0528.json.gz +bn_all_0529.json.gz +bn_all_0530.json.gz +bn_all_0531.json.gz +bn_all_0532.json.gz +bn_all_0533.json.gz +bn_all_0534.json.gz +bn_all_0535.json.gz +bn_all_0536.json.gz +bn_all_0537.json.gz +bn_all_0538.json.gz +bn_all_0539.json.gz +bn_all_0540.json.gz +bn_all_0541.json.gz +bn_all_0542.json.gz +bn_all_0543.json.gz +bn_all_0544.json.gz +bn_all_0545.json.gz +bn_all_0546.json.gz +bn_all_0547.json.gz +bn_all_0548.json.gz +bn_all_0549.json.gz +bn_all_0550.json.gz +bn_all_0551.json.gz +bn_all_0552.json.gz +bn_all_0553.json.gz +bn_all_0554.json.gz +bn_all_0555.json.gz +bn_all_0556.json.gz +bn_all_0557.json.gz +bn_all_0558.json.gz +bn_all_0559.json.gz +bn_all_0560.json.gz +bn_all_0561.json.gz +bn_all_0562.json.gz +bn_all_0563.json.gz +bn_all_0564.json.gz +bn_all_0565.json.gz +bn_all_0566.json.gz +bn_all_0567.json.gz +bn_all_0568.json.gz +bn_all_0569.json.gz +bn_all_0570.json.gz +bn_all_0571.json.gz +bn_all_0572.json.gz +bn_all_0573.json.gz +bn_all_0574.json.gz +bn_all_0575.json.gz +bn_all_0576.json.gz +bn_all_0577.json.gz +bn_all_0578.json.gz +bn_all_0579.json.gz +bn_all_0580.json.gz +bn_all_0581.json.gz +bn_all_0582.json.gz +bn_all_0583.json.gz +bn_all_0584.json.gz +bn_all_0585.json.gz +bn_all_0586.json.gz +bn_all_0587.json.gz +bn_all_0588.json.gz +bn_all_0589.json.gz +bn_all_0590.json.gz +bn_all_0591.json.gz +bn_all_0592.json.gz +bn_all_0593.json.gz +bn_all_0594.json.gz +bn_all_0595.json.gz +bn_all_0596.json.gz +bn_all_0597.json.gz +bn_all_0598.json.gz +bn_all_0599.json.gz +bn_all_0600.json.gz +bn_all_0601.json.gz +bn_all_0602.json.gz +bn_all_0603.json.gz +bn_all_0604.json.gz +bn_all_0605.json.gz +bn_all_0606.json.gz +bn_all_0607.json.gz +bn_all_0608.json.gz +bn_all_0609.json.gz +bn_all_0610.json.gz +bn_all_0611.json.gz +bn_all_0612.json.gz +bn_all_0613.json.gz +bn_all_0614.json.gz +bn_all_0615.json.gz +bn_all_0616.json.gz +bn_all_0617.json.gz +bn_all_0618.json.gz +bn_all_0619.json.gz +bn_all_0620.json.gz +bn_all_0621.json.gz +bn_all_0622.json.gz +bn_all_0623.json.gz +bn_all_0624.json.gz +bn_all_0625.json.gz +bn_all_0626.json.gz +bn_all_0627.json.gz +bn_all_0628.json.gz +bn_all_0629.json.gz +bn_all_0630.json.gz +bn_all_0631.json.gz +bn_all_0632.json.gz +bn_all_0633.json.gz +bn_all_0634.json.gz +bn_all_0635.json.gz +bn_all_0636.json.gz +bn_all_0637.json.gz +bn_all_0638.json.gz +bn_all_0639.json.gz +bn_all_0640.json.gz +bn_all_0641.json.gz +bn_all_0642.json.gz +bn_all_0643.json.gz +bn_all_0644.json.gz +bn_all_0645.json.gz +bn_all_0646.json.gz +bn_all_0647.json.gz +bn_all_0648.json.gz +bn_all_0649.json.gz +bn_all_0650.json.gz +bn_all_0651.json.gz +bn_all_0652.json.gz +bn_all_0653.json.gz +bn_all_0654.json.gz +bn_all_0655.json.gz +bn_all_0656.json.gz +bn_all_0657.json.gz +bn_all_0658.json.gz +bn_all_0659.json.gz +bn_all_0660.json.gz +bn_all_0661.json.gz +bn_all_0662.json.gz +bn_all_0663.json.gz +bn_all_0664.json.gz +bn_all_0665.json.gz +bn_all_0666.json.gz +bn_all_0667.json.gz +bn_all_0668.json.gz +bn_all_0669.json.gz +bn_all_0670.json.gz +bn_all_0671.json.gz +bn_all_0672.json.gz +bn_all_0673.json.gz +bn_all_0674.json.gz +bn_all_0675.json.gz +bn_all_0676.json.gz +bn_all_0677.json.gz +bn_all_0678.json.gz +bn_all_0679.json.gz +bn_all_0680.json.gz +bn_all_0681.json.gz +bn_all_0682.json.gz +bn_all_0683.json.gz +bn_all_0684.json.gz +bn_all_0685.json.gz +bn_all_0686.json.gz +bn_all_0687.json.gz +bn_all_0688.json.gz +bn_all_0689.json.gz +bn_all_0690.json.gz +bn_all_0691.json.gz +bn_all_0692.json.gz +bn_all_0693.json.gz +bn_all_0694.json.gz +bn_all_0695.json.gz +bn_all_0696.json.gz +bn_all_0697.json.gz +bn_all_0698.json.gz +bn_all_0699.json.gz +bn_all_0700.json.gz +bn_all_0701.json.gz +bn_all_0702.json.gz +bn_all_0703.json.gz +bn_all_0704.json.gz +bn_all_0705.json.gz +bn_all_0706.json.gz +bn_all_0707.json.gz +bn_all_0708.json.gz +bn_all_0709.json.gz +bn_all_0710.json.gz +bn_all_0711.json.gz +bn_all_0712.json.gz +bn_all_0713.json.gz +bn_all_0714.json.gz +bn_all_0715.json.gz +bn_all_0716.json.gz +bn_all_0717.json.gz +bn_all_0718.json.gz +bn_all_0719.json.gz +bn_all_0720.json.gz +bn_all_0721.json.gz +bn_all_0722.json.gz +bn_all_0723.json.gz +bn_all_0724.json.gz +bn_all_0725.json.gz +bn_all_0726.json.gz +bn_all_0727.json.gz +bn_all_0728.json.gz +bn_all_0729.json.gz +bn_all_0730.json.gz +bn_all_0731.json.gz +bn_all_0732.json.gz +bn_all_0733.json.gz +bn_all_0734.json.gz +bn_all_0735.json.gz +bn_all_0736.json.gz +bn_all_0737.json.gz +bn_all_0738.json.gz +bn_all_0739.json.gz +bn_all_0740.json.gz +bn_all_0741.json.gz +bn_all_0742.json.gz +bn_all_0743.json.gz +bn_all_0744.json.gz +bn_all_0745.json.gz +bn_all_0746.json.gz +bn_all_0747.json.gz +bn_all_0748.json.gz +bn_all_0749.json.gz +bn_all_0750.json.gz +bn_all_0751.json.gz +bn_all_0752.json.gz +bn_all_0753.json.gz +bn_all_0754.json.gz +bn_all_0755.json.gz +bn_all_0756.json.gz +bn_all_0757.json.gz +bn_all_0758.json.gz +bn_all_0759.json.gz +bn_all_0760.json.gz +bn_all_0761.json.gz +bn_all_0762.json.gz +bn_all_0763.json.gz +bn_all_0764.json.gz +bn_all_0765.json.gz +bn_all_0766.json.gz +bn_all_0767.json.gz +bn_all_0768.json.gz +bn_all_0769.json.gz +bn_all_0770.json.gz +bn_all_0771.json.gz +bn_all_0772.json.gz +bn_all_0773.json.gz +bn_all_0774.json.gz +bn_all_0775.json.gz +bn_all_0776.json.gz +bn_all_0777.json.gz +bn_all_0778.json.gz +bn_all_0779.json.gz +bn_all_0780.json.gz +bn_all_0781.json.gz +bn_all_0782.json.gz +bn_all_0783.json.gz +bn_all_0784.json.gz +bn_all_0785.json.gz +bn_all_0786.json.gz +bn_all_0787.json.gz +bn_all_0788.json.gz +bn_all_0789.json.gz +bn_all_0790.json.gz +bn_all_0791.json.gz +bn_all_0792.json.gz +bn_all_0793.json.gz +bn_all_0794.json.gz +bn_all_0795.json.gz +bn_all_0796.json.gz +bn_all_0797.json.gz +bn_all_0798.json.gz +bn_all_0799.json.gz +bn_all_0800.json.gz +bn_all_0801.json.gz +bn_all_0802.json.gz +bn_all_0803.json.gz +bn_all_0804.json.gz +bn_all_0805.json.gz +bn_all_0806.json.gz +bn_all_0807.json.gz +bn_all_0808.json.gz +bn_all_0809.json.gz +bn_all_0810.json.gz +bn_all_0811.json.gz +bn_all_0812.json.gz +bn_all_0813.json.gz +bn_all_0814.json.gz +bn_all_0815.json.gz +bn_all_0816.json.gz +bn_all_0817.json.gz +bn_all_0818.json.gz +bn_all_0819.json.gz +bn_all_0820.json.gz +bn_all_0821.json.gz +bn_all_0822.json.gz +bn_all_0823.json.gz +bn_all_0824.json.gz +bn_all_0825.json.gz +bn_all_0826.json.gz +bn_all_0827.json.gz +bn_all_0828.json.gz +bn_all_0829.json.gz +bn_all_0830.json.gz +bn_all_0831.json.gz +bn_all_0832.json.gz +bn_all_0833.json.gz +bn_all_0834.json.gz +bn_all_0835.json.gz +bn_all_0836.json.gz +bn_all_0837.json.gz +bn_all_0838.json.gz +bn_all_0839.json.gz +bn_all_0840.json.gz +bn_all_0841.json.gz +bn_all_0842.json.gz +bn_all_0843.json.gz +bn_all_0844.json.gz +bn_all_0845.json.gz +bn_all_0846.json.gz +bn_all_0847.json.gz +bn_all_0848.json.gz +bn_all_0849.json.gz +bn_all_0850.json.gz +bn_all_0851.json.gz +bn_all_0852.json.gz +bn_all_0853.json.gz +bn_all_0854.json.gz +bn_all_0855.json.gz +bn_all_0856.json.gz +bn_all_0857.json.gz +bn_all_0858.json.gz +bn_all_0859.json.gz +bn_all_0860.json.gz +bn_all_0861.json.gz +bn_all_0862.json.gz +bn_all_0863.json.gz +bn_all_0864.json.gz +bn_all_0865.json.gz +bn_all_0866.json.gz +bn_all_0867.json.gz +bn_all_0868.json.gz +bn_all_0869.json.gz +bn_all_0870.json.gz +bn_all_0871.json.gz +bn_all_0872.json.gz +bn_all_0873.json.gz +bn_all_0874.json.gz +bn_all_0875.json.gz +bn_all_0876.json.gz +bn_all_0877.json.gz +bn_all_0878.json.gz +bn_all_0879.json.gz +bn_all_0880.json.gz +bn_all_0881.json.gz +bn_all_0882.json.gz +bn_all_0883.json.gz +bn_all_0884.json.gz +bn_all_0885.json.gz +bn_all_0886.json.gz +bn_all_0887.json.gz +bn_all_0888.json.gz +bn_all_0889.json.gz +bn_all_0890.json.gz +bn_all_0891.json.gz +bn_all_0892.json.gz +bn_all_0893.json.gz +bn_all_0894.json.gz +bn_all_0895.json.gz +bn_all_0896.json.gz +bn_all_0897.json.gz +bn_all_0898.json.gz +bn_all_0899.json.gz +bn_all_0900.json.gz +bn_all_0901.json.gz +bn_all_0902.json.gz +bn_all_0903.json.gz +bn_all_0904.json.gz +bn_all_0905.json.gz +bn_all_0906.json.gz +bn_all_0907.json.gz +bn_all_0908.json.gz +bn_all_0909.json.gz +bn_all_0910.json.gz +bn_all_0911.json.gz +bn_all_0912.json.gz +bn_all_0913.json.gz +bn_all_0914.json.gz +bn_all_0915.json.gz +bn_all_0916.json.gz +bn_all_0917.json.gz +bn_all_0918.json.gz +bn_all_0919.json.gz +bn_all_0920.json.gz +bn_all_0921.json.gz +bn_all_0922.json.gz +bn_all_0923.json.gz +bn_all_0924.json.gz +bn_all_0925.json.gz +bn_all_0926.json.gz +bn_all_0927.json.gz +bn_all_0928.json.gz +bn_all_0929.json.gz +bn_all_0930.json.gz +bn_all_0931.json.gz +bn_all_0932.json.gz +bn_all_0933.json.gz +bn_all_0934.json.gz +bn_all_0935.json.gz +bn_all_0936.json.gz +bn_all_0937.json.gz +bn_all_0938.json.gz +bn_all_0939.json.gz +bn_all_0940.json.gz +bn_all_0941.json.gz +bn_all_0942.json.gz +bn_all_0943.json.gz +bn_all_0944.json.gz +bn_all_0945.json.gz +bn_all_0946.json.gz +bn_all_0947.json.gz +bn_all_0948.json.gz +bn_all_0949.json.gz +bn_all_0950.json.gz +bn_all_0951.json.gz +bn_all_0952.json.gz +bn_all_0953.json.gz +bn_all_0954.json.gz +bn_all_0955.json.gz +bn_all_0956.json.gz +bn_all_0957.json.gz +bn_all_0958.json.gz +bn_all_0959.json.gz +bn_all_0960.json.gz +bn_all_0961.json.gz +bn_all_0962.json.gz +bn_all_0963.json.gz +bn_all_0964.json.gz +bn_all_0965.json.gz +bn_all_0966.json.gz +bn_all_0967.json.gz +bn_all_0968.json.gz +bn_all_0969.json.gz +bn_all_0970.json.gz +bn_all_0971.json.gz +bn_all_0972.json.gz +bn_all_0973.json.gz +bn_all_0974.json.gz +bn_all_0975.json.gz +bn_all_0976.json.gz +bn_all_0977.json.gz +bn_all_0978.json.gz +bn_all_0979.json.gz +bn_all_0980.json.gz +bn_all_0981.json.gz +bn_all_0982.json.gz +bn_all_0983.json.gz +bn_all_0984.json.gz +bn_all_0985.json.gz +bn_all_0986.json.gz +bn_all_0987.json.gz +bn_all_0988.json.gz +bn_all_0989.json.gz +bn_all_0990.json.gz +bn_all_0991.json.gz +bn_all_0992.json.gz +bn_all_0993.json.gz +bn_all_0994.json.gz +bn_all_0995.json.gz +bn_all_0996.json.gz +bn_all_0997.json.gz +bn_all_0998.json.gz +bn_all_0999.json.gz +bn_all_1000.json.gz +bn_all_1001.json.gz +bn_all_1002.json.gz +bn_all_1003.json.gz +bn_all_1004.json.gz +bn_all_1005.json.gz +bn_all_1006.json.gz +bn_all_1007.json.gz +bn_all_1008.json.gz +bn_all_1009.json.gz +bn_all_1010.json.gz +bn_all_1011.json.gz +bn_all_1012.json.gz +bn_all_1013.json.gz +bn_all_1014.json.gz +bn_all_1015.json.gz +bn_all_1016.json.gz +bn_all_1017.json.gz +bn_all_1018.json.gz +bn_all_1019.json.gz +bn_all_1020.json.gz +bn_all_1021.json.gz +bn_all_1022.json.gz +bn_all_1023.json.gz +bn_all_1024.json.gz +bn_all_1025.json.gz +bn_all_1026.json.gz +bn_all_1027.json.gz +bn_all_1028.json.gz +bn_all_1029.json.gz +bn_all_1030.json.gz +bn_all_1031.json.gz +bn_all_1032.json.gz +bn_all_1033.json.gz +bn_all_1034.json.gz +bn_all_1035.json.gz +bn_all_1036.json.gz +bn_all_1037.json.gz +bn_all_1038.json.gz +bn_all_1039.json.gz +bn_all_1040.json.gz +bn_all_1041.json.gz +bn_all_1042.json.gz +bn_all_1043.json.gz +bn_all_1044.json.gz +bn_all_1045.json.gz +bn_all_1046.json.gz +bn_all_1047.json.gz +bn_all_1048.json.gz +bn_all_1049.json.gz +bn_all_1050.json.gz +bn_all_1051.json.gz +bn_all_1052.json.gz +bn_all_1053.json.gz +bn_all_1054.json.gz +bn_all_1055.json.gz +bn_all_1056.json.gz +bn_all_1057.json.gz +bn_all_1058.json.gz +bn_all_1059.json.gz +bn_all_1060.json.gz +bn_all_1061.json.gz +bn_all_1062.json.gz +bn_all_1063.json.gz +bn_all_1064.json.gz +bn_all_1065.json.gz +bn_all_1066.json.gz +bn_all_1067.json.gz +bn_all_1068.json.gz +bn_all_1069.json.gz +bn_all_1070.json.gz +bn_all_1071.json.gz +bn_all_1072.json.gz +bn_all_1073.json.gz +bn_all_1074.json.gz +bn_all_1075.json.gz +bn_all_1076.json.gz +bn_all_1077.json.gz +bn_all_1078.json.gz +bn_all_1079.json.gz +bn_all_1080.json.gz +bn_all_1081.json.gz +bn_all_1082.json.gz +bn_all_1083.json.gz +bn_all_1084.json.gz +bn_all_1085.json.gz +bn_all_1086.json.gz +bn_all_1087.json.gz +bn_all_1088.json.gz +bn_all_1089.json.gz +bn_all_1090.json.gz +bn_all_1091.json.gz +bn_all_1092.json.gz +bn_all_1093.json.gz +bn_all_1094.json.gz +bn_all_1095.json.gz +bn_all_1096.json.gz +bn_all_1097.json.gz +bn_all_1098.json.gz +bn_all_1099.json.gz +bn_all_1100.json.gz +bn_all_1101.json.gz +bn_all_1102.json.gz +bn_all_1103.json.gz +bn_all_1104.json.gz +bn_all_1105.json.gz +bn_all_1106.json.gz +bn_all_1107.json.gz +bn_all_1108.json.gz +bn_all_1109.json.gz +bn_all_1110.json.gz +bn_all_1111.json.gz +bn_all_1112.json.gz +bn_all_1113.json.gz +bn_all_1114.json.gz +bn_all_1115.json.gz +bn_all_1116.json.gz +bn_all_1117.json.gz +bn_all_1118.json.gz +bn_all_1119.json.gz +bn_all_1120.json.gz +bn_all_1121.json.gz +bn_all_1122.json.gz +bn_all_1123.json.gz +bn_all_1124.json.gz +bn_all_1125.json.gz +bn_all_1126.json.gz +bn_all_1127.json.gz +bn_all_1128.json.gz +bn_all_1129.json.gz +bn_all_1130.json.gz +bn_all_1131.json.gz +bn_all_1132.json.gz +bn_all_1133.json.gz +bn_all_1134.json.gz +bn_all_1135.json.gz +bn_all_1136.json.gz +bn_all_1137.json.gz +bn_all_1138.json.gz +bn_all_1139.json.gz +bn_all_1140.json.gz +bn_all_1141.json.gz +bn_all_1142.json.gz +bn_all_1143.json.gz +bn_all_1144.json.gz +bn_all_1145.json.gz +bn_all_1146.json.gz +bn_all_1147.json.gz +bn_all_1148.json.gz +bn_all_1149.json.gz +bn_all_1150.json.gz +bn_all_1151.json.gz +bn_all_1152.json.gz +bn_all_1153.json.gz +bn_all_1154.json.gz +bn_all_1155.json.gz +bn_all_1156.json.gz +bn_all_1157.json.gz +bn_all_1158.json.gz +bn_all_1159.json.gz +bn_all_1160.json.gz +bn_all_1161.json.gz +bn_all_1162.json.gz +bn_all_1163.json.gz +bn_all_1164.json.gz +bn_all_1165.json.gz +bn_all_1166.json.gz +bn_all_1167.json.gz +bn_all_1168.json.gz +bn_all_1169.json.gz +bn_all_1170.json.gz +bn_all_1171.json.gz +bn_all_1172.json.gz +bn_all_1173.json.gz +bn_all_1174.json.gz +bn_all_1175.json.gz +bn_all_1176.json.gz +bn_all_1177.json.gz +bn_all_1178.json.gz +bn_all_1179.json.gz +bn_all_1180.json.gz +bn_all_1181.json.gz +bn_all_1182.json.gz +bn_all_1183.json.gz +bn_all_1184.json.gz +bn_all_1185.json.gz +bn_all_1186.json.gz +bn_all_1187.json.gz +bn_all_1188.json.gz +bn_all_1189.json.gz +bn_all_1190.json.gz +bn_all_1191.json.gz +bn_all_1192.json.gz +bn_all_1193.json.gz +bn_all_1194.json.gz +bn_all_1195.json.gz +bn_all_1196.json.gz +bn_all_1197.json.gz +bn_all_1198.json.gz +bn_all_1199.json.gz +bn_all_1200.json.gz +bn_all_1201.json.gz +bn_all_1202.json.gz +bn_all_1203.json.gz +bn_all_1204.json.gz +bn_all_1205.json.gz +bn_all_1206.json.gz +bn_all_1207.json.gz +bn_all_1208.json.gz +bn_all_1209.json.gz +bn_all_1210.json.gz +bn_all_1211.json.gz +bn_all_1212.json.gz +bn_all_1213.json.gz +bn_all_1214.json.gz +bn_all_1215.json.gz +bn_all_1216.json.gz +bn_all_1217.json.gz +bn_all_1218.json.gz +bn_all_1219.json.gz +bn_all_1220.json.gz +bn_all_1221.json.gz +bn_all_1222.json.gz +bn_all_1223.json.gz +bn_all_1224.json.gz +bn_all_1225.json.gz +bn_all_1226.json.gz +bn_all_1227.json.gz +bn_all_1228.json.gz +bn_all_1229.json.gz +bn_all_1230.json.gz +bn_all_1231.json.gz +bn_all_1232.json.gz +bn_all_1233.json.gz +bn_all_1234.json.gz +bn_all_1235.json.gz +bn_all_1236.json.gz +bn_all_1237.json.gz +bn_all_1238.json.gz +bn_all_1239.json.gz +bn_all_1240.json.gz +bn_all_1241.json.gz +bn_all_1242.json.gz +bn_all_1243.json.gz +bn_all_1244.json.gz +bn_all_1245.json.gz +bn_all_1246.json.gz +bn_all_1247.json.gz +bn_all_1248.json.gz +bn_all_1249.json.gz +bn_all_1250.json.gz +bn_all_1251.json.gz +bn_all_1252.json.gz +bn_all_1253.json.gz +bn_all_1254.json.gz +bn_all_1255.json.gz +bn_all_1256.json.gz +bn_all_1257.json.gz +bn_all_1258.json.gz +bn_all_1259.json.gz +bn_all_1260.json.gz +bn_all_1261.json.gz +bn_all_1262.json.gz +bn_all_1263.json.gz +bn_all_1264.json.gz +bn_all_1265.json.gz +bn_all_1266.json.gz +bn_all_1267.json.gz +bn_all_1268.json.gz +bn_all_1269.json.gz +bn_all_1270.json.gz +bn_all_1271.json.gz +bn_all_1272.json.gz +bn_all_1273.json.gz +bn_all_1274.json.gz +bn_all_1275.json.gz +bn_all_1276.json.gz +bn_all_1277.json.gz +bn_all_1278.json.gz +bn_all_1279.json.gz +bn_all_1280.json.gz +bn_all_1281.json.gz +bn_all_1282.json.gz +bn_all_1283.json.gz +bn_all_1284.json.gz +bn_all_1285.json.gz +bn_all_1286.json.gz +bn_all_1287.json.gz +bn_all_1288.json.gz +bn_all_1289.json.gz +bn_all_1290.json.gz +bn_all_1291.json.gz +bn_all_1292.json.gz +bn_all_1293.json.gz +bn_all_1294.json.gz +bn_all_1295.json.gz +bn_all_1296.json.gz +bn_all_1297.json.gz +bn_all_1298.json.gz +bn_all_1299.json.gz +bn_all_1300.json.gz +bn_all_1301.json.gz +bn_all_1302.json.gz +bn_all_1303.json.gz +bn_all_1304.json.gz +bn_all_1305.json.gz +bn_all_1306.json.gz +bn_all_1307.json.gz +bn_all_1308.json.gz +bn_all_1309.json.gz +bn_all_1310.json.gz +bn_all_1311.json.gz +bn_all_1312.json.gz +bn_all_1313.json.gz +bn_all_1314.json.gz +bn_all_1315.json.gz +bn_all_1316.json.gz +bn_all_1317.json.gz +bn_all_1318.json.gz +bn_all_1319.json.gz +bn_all_1320.json.gz +bn_all_1321.json.gz +bn_all_1322.json.gz +bn_all_1323.json.gz +bn_all_1324.json.gz +bn_all_1325.json.gz +bn_all_1326.json.gz +bn_all_1327.json.gz +bn_all_1328.json.gz +bn_all_1329.json.gz +bn_all_1330.json.gz +bn_all_1331.json.gz +bn_all_1332.json.gz +bn_all_1333.json.gz +bn_all_1334.json.gz +bn_all_1335.json.gz +bn_all_1336.json.gz +bn_all_1337.json.gz +bn_all_1338.json.gz +bn_all_1339.json.gz +bn_all_1340.json.gz +bn_all_1341.json.gz +bn_all_1342.json.gz +bn_all_1343.json.gz +bn_all_1344.json.gz +bn_all_1345.json.gz +bn_all_1346.json.gz +bn_all_1347.json.gz +bn_all_1348.json.gz +bn_all_1349.json.gz +bn_all_1350.json.gz +bn_all_1351.json.gz +bn_all_1352.json.gz +bn_all_1353.json.gz +bn_all_1354.json.gz +bn_all_1355.json.gz +bn_all_1356.json.gz +bn_all_1357.json.gz +bn_all_1358.json.gz +bn_all_1359.json.gz +bn_all_1360.json.gz +bn_all_1361.json.gz +bn_all_1362.json.gz +bn_all_1363.json.gz +bn_all_1364.json.gz +bn_all_1365.json.gz +bn_all_1366.json.gz +bn_all_1367.json.gz +bn_all_1368.json.gz +bn_all_1369.json.gz +bn_all_1370.json.gz +bn_all_1371.json.gz +bn_all_1372.json.gz +bn_all_1373.json.gz +bn_all_1374.json.gz +bn_all_1375.json.gz +bn_all_1376.json.gz +bn_all_1377.json.gz +bn_all_1378.json.gz +bn_all_1379.json.gz +bn_all_1380.json.gz +bn_all_1381.json.gz +bn_all_1382.json.gz +bn_all_1383.json.gz +bn_all_1384.json.gz +bn_all_1385.json.gz +bn_all_1386.json.gz +bn_all_1387.json.gz +bn_all_1388.json.gz +bn_all_1389.json.gz +bn_all_1390.json.gz +bn_all_1391.json.gz +bn_all_1392.json.gz +bn_all_1393.json.gz +bn_all_1394.json.gz +bn_all_1395.json.gz +bn_all_1396.json.gz +bn_all_1397.json.gz +bn_all_1398.json.gz +bn_all_1399.json.gz +bn_all_1400.json.gz +bn_all_1401.json.gz +bn_all_1402.json.gz +bn_all_1403.json.gz +bn_all_1404.json.gz +bn_all_1405.json.gz +bn_all_1406.json.gz +bn_all_1407.json.gz +bn_all_1408.json.gz +bn_all_1409.json.gz +bn_all_1410.json.gz +bn_all_1411.json.gz +bn_all_1412.json.gz +bn_all_1413.json.gz +bn_all_1414.json.gz +bn_all_1415.json.gz +bn_all_1416.json.gz +bn_all_1417.json.gz +bn_all_1418.json.gz +bn_all_1419.json.gz +bn_all_1420.json.gz +bn_all_1421.json.gz +bn_all_1422.json.gz +bn_all_1423.json.gz +bn_all_1424.json.gz +bn_all_1425.json.gz +bn_all_1426.json.gz +bn_all_1427.json.gz +bn_all_1428.json.gz +bn_all_1429.json.gz +bn_all_1430.json.gz +bn_all_1431.json.gz +bn_all_1432.json.gz +bn_all_1433.json.gz +bn_all_1434.json.gz +bn_all_1435.json.gz +bn_all_1436.json.gz +bn_all_1437.json.gz +bn_all_1438.json.gz +bn_all_1439.json.gz +bn_all_1440.json.gz +bn_all_1441.json.gz +bn_all_1442.json.gz +bn_all_1443.json.gz +bn_all_1444.json.gz +bn_all_1445.json.gz +bn_all_1446.json.gz +bn_all_1447.json.gz +bn_all_1448.json.gz +bn_all_1449.json.gz +bn_all_1450.json.gz +bn_all_1451.json.gz +bn_all_1452.json.gz +bn_all_1453.json.gz +bn_all_1454.json.gz +bn_all_1455.json.gz +bn_all_1456.json.gz +bn_all_1457.json.gz +bn_all_1458.json.gz +bn_all_1459.json.gz +bn_all_1460.json.gz +bn_all_1461.json.gz +bn_all_1462.json.gz +bn_all_1463.json.gz +bn_all_1464.json.gz +bn_all_1465.json.gz +bn_all_1466.json.gz +bn_all_1467.json.gz +bn_all_1468.json.gz +bn_all_1469.json.gz +bn_all_1470.json.gz +bn_all_1471.json.gz +bn_all_1472.json.gz +bn_all_1473.json.gz +bn_all_1474.json.gz +bn_all_1475.json.gz +bn_all_1476.json.gz +bn_all_1477.json.gz +bn_all_1478.json.gz +bn_all_1479.json.gz +bn_all_1480.json.gz +bn_all_1481.json.gz +bn_all_1482.json.gz +bn_all_1483.json.gz +bn_all_1484.json.gz +bn_all_1485.json.gz +bn_all_1486.json.gz +bn_all_1487.json.gz +bn_all_1488.json.gz +bn_all_1489.json.gz +bn_all_1490.json.gz +bn_all_1491.json.gz +bn_all_1492.json.gz +bn_all_1493.json.gz +bn_all_1494.json.gz +bn_all_1495.json.gz +bn_all_1496.json.gz +bn_all_1497.json.gz +bn_all_1498.json.gz +bn_all_1499.json.gz +bn_all_1500.json.gz +bn_all_1501.json.gz +bn_all_1502.json.gz +bn_all_1503.json.gz +bn_all_1504.json.gz +bn_all_1505.json.gz +bn_all_1506.json.gz +bn_all_1507.json.gz +bn_all_1508.json.gz +bn_all_1509.json.gz +bn_all_1510.json.gz +bn_all_1511.json.gz +bn_all_1512.json.gz +bn_all_1513.json.gz +bn_all_1514.json.gz +bn_all_1515.json.gz +bn_all_1516.json.gz +bn_all_1517.json.gz +bn_all_1518.json.gz +bn_all_1519.json.gz +bn_all_1520.json.gz +bn_all_1521.json.gz +bn_all_1522.json.gz +bn_all_1523.json.gz +bn_all_1524.json.gz +bn_all_1525.json.gz +bn_all_1526.json.gz +bn_all_1527.json.gz +bn_all_1528.json.gz +bn_all_1529.json.gz +bn_all_1530.json.gz +bn_all_1531.json.gz +bn_all_1532.json.gz +bn_all_1533.json.gz +bn_all_1534.json.gz +bn_all_1535.json.gz +bn_all_1536.json.gz +bn_all_1537.json.gz +bn_all_1538.json.gz +bn_all_1539.json.gz +bn_all_1540.json.gz +bn_all_1541.json.gz +bn_all_1542.json.gz +bn_all_1543.json.gz +bn_all_1544.json.gz +bn_all_1545.json.gz +bn_all_1546.json.gz +bn_all_1547.json.gz +bn_all_1548.json.gz +bn_all_1549.json.gz +bn_all_1550.json.gz +bn_all_1551.json.gz +bn_all_1552.json.gz +bn_all_1553.json.gz +bn_all_1554.json.gz +bn_all_1555.json.gz +bn_all_1556.json.gz +bn_all_1557.json.gz +bn_all_1558.json.gz +bn_all_1559.json.gz +bn_all_1560.json.gz +bn_all_1561.json.gz +bn_all_1562.json.gz +bn_all_1563.json.gz +bn_all_1564.json.gz +bn_all_1565.json.gz +bn_all_1566.json.gz +bn_all_1567.json.gz +bn_all_1568.json.gz +bn_all_1569.json.gz +bn_all_1570.json.gz +bn_all_1571.json.gz +bn_all_1572.json.gz +bn_all_1573.json.gz +bn_all_1574.json.gz +bn_all_1575.json.gz +bn_all_1576.json.gz +bn_all_1577.json.gz +bn_all_1578.json.gz +bn_all_1579.json.gz +bn_all_1580.json.gz +bn_all_1581.json.gz +bn_all_1582.json.gz +bn_all_1583.json.gz +bn_all_1584.json.gz +bn_all_1585.json.gz +bn_all_1586.json.gz +bn_all_1587.json.gz +bn_all_1588.json.gz +bn_all_1589.json.gz +bn_all_1590.json.gz +bn_all_1591.json.gz +bn_all_1592.json.gz +bn_all_1593.json.gz +bn_all_1594.json.gz +bn_all_1595.json.gz +bn_all_1596.json.gz +bn_all_1597.json.gz +bn_all_1598.json.gz +bn_all_1599.json.gz diff --git a/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/mai.txt b/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/mai.txt new file mode 100644 index 0000000000000000000000000000000000000000..42df956af2054e0e16e966e4fe0ca5424c3c0287 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/mai.txt @@ -0,0 +1 @@ +mai_all_0000.json.gz diff --git a/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/mr.txt b/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/mr.txt new file mode 100644 index 0000000000000000000000000000000000000000..28f1535e56a44a8412e26433e30dc1f8940fef4f --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/mr.txt @@ -0,0 +1,400 @@ +mr_all_0000.json.gz +mr_all_0001.json.gz +mr_all_0002.json.gz +mr_all_0003.json.gz +mr_all_0004.json.gz +mr_all_0005.json.gz +mr_all_0006.json.gz +mr_all_0007.json.gz +mr_all_0008.json.gz +mr_all_0009.json.gz +mr_all_0010.json.gz +mr_all_0011.json.gz +mr_all_0012.json.gz +mr_all_0013.json.gz +mr_all_0014.json.gz +mr_all_0015.json.gz +mr_all_0016.json.gz +mr_all_0017.json.gz +mr_all_0018.json.gz +mr_all_0019.json.gz +mr_all_0020.json.gz +mr_all_0021.json.gz +mr_all_0022.json.gz +mr_all_0023.json.gz +mr_all_0024.json.gz +mr_all_0025.json.gz +mr_all_0026.json.gz +mr_all_0027.json.gz +mr_all_0028.json.gz +mr_all_0029.json.gz +mr_all_0030.json.gz +mr_all_0031.json.gz +mr_all_0032.json.gz +mr_all_0033.json.gz +mr_all_0034.json.gz +mr_all_0035.json.gz +mr_all_0036.json.gz +mr_all_0037.json.gz +mr_all_0038.json.gz +mr_all_0039.json.gz +mr_all_0040.json.gz +mr_all_0041.json.gz +mr_all_0042.json.gz +mr_all_0043.json.gz +mr_all_0044.json.gz +mr_all_0045.json.gz +mr_all_0046.json.gz +mr_all_0047.json.gz +mr_all_0048.json.gz +mr_all_0049.json.gz +mr_all_0050.json.gz +mr_all_0051.json.gz +mr_all_0052.json.gz +mr_all_0053.json.gz +mr_all_0054.json.gz +mr_all_0055.json.gz +mr_all_0056.json.gz +mr_all_0057.json.gz +mr_all_0058.json.gz +mr_all_0059.json.gz +mr_all_0060.json.gz +mr_all_0061.json.gz +mr_all_0062.json.gz +mr_all_0063.json.gz +mr_all_0064.json.gz +mr_all_0065.json.gz +mr_all_0066.json.gz +mr_all_0067.json.gz +mr_all_0068.json.gz +mr_all_0069.json.gz +mr_all_0070.json.gz +mr_all_0071.json.gz +mr_all_0072.json.gz +mr_all_0073.json.gz +mr_all_0074.json.gz +mr_all_0075.json.gz +mr_all_0076.json.gz +mr_all_0077.json.gz +mr_all_0078.json.gz +mr_all_0079.json.gz +mr_all_0080.json.gz +mr_all_0081.json.gz +mr_all_0082.json.gz +mr_all_0083.json.gz +mr_all_0084.json.gz +mr_all_0085.json.gz +mr_all_0086.json.gz +mr_all_0087.json.gz +mr_all_0088.json.gz +mr_all_0089.json.gz +mr_all_0090.json.gz +mr_all_0091.json.gz +mr_all_0092.json.gz +mr_all_0093.json.gz +mr_all_0094.json.gz +mr_all_0095.json.gz +mr_all_0096.json.gz +mr_all_0097.json.gz +mr_all_0098.json.gz +mr_all_0099.json.gz +mr_all_0100.json.gz +mr_all_0101.json.gz +mr_all_0102.json.gz +mr_all_0103.json.gz +mr_all_0104.json.gz +mr_all_0105.json.gz +mr_all_0106.json.gz +mr_all_0107.json.gz +mr_all_0108.json.gz +mr_all_0109.json.gz +mr_all_0110.json.gz +mr_all_0111.json.gz +mr_all_0112.json.gz +mr_all_0113.json.gz +mr_all_0114.json.gz +mr_all_0115.json.gz +mr_all_0116.json.gz +mr_all_0117.json.gz +mr_all_0118.json.gz +mr_all_0119.json.gz +mr_all_0120.json.gz +mr_all_0121.json.gz +mr_all_0122.json.gz +mr_all_0123.json.gz +mr_all_0124.json.gz +mr_all_0125.json.gz +mr_all_0126.json.gz +mr_all_0127.json.gz +mr_all_0128.json.gz +mr_all_0129.json.gz +mr_all_0130.json.gz +mr_all_0131.json.gz +mr_all_0132.json.gz +mr_all_0133.json.gz +mr_all_0134.json.gz +mr_all_0135.json.gz +mr_all_0136.json.gz +mr_all_0137.json.gz +mr_all_0138.json.gz +mr_all_0139.json.gz +mr_all_0140.json.gz +mr_all_0141.json.gz +mr_all_0142.json.gz +mr_all_0143.json.gz +mr_all_0144.json.gz +mr_all_0145.json.gz +mr_all_0146.json.gz +mr_all_0147.json.gz +mr_all_0148.json.gz +mr_all_0149.json.gz +mr_all_0150.json.gz +mr_all_0151.json.gz +mr_all_0152.json.gz +mr_all_0153.json.gz +mr_all_0154.json.gz +mr_all_0155.json.gz +mr_all_0156.json.gz +mr_all_0157.json.gz +mr_all_0158.json.gz +mr_all_0159.json.gz +mr_all_0160.json.gz +mr_all_0161.json.gz +mr_all_0162.json.gz +mr_all_0163.json.gz +mr_all_0164.json.gz +mr_all_0165.json.gz +mr_all_0166.json.gz +mr_all_0167.json.gz +mr_all_0168.json.gz +mr_all_0169.json.gz +mr_all_0170.json.gz +mr_all_0171.json.gz +mr_all_0172.json.gz +mr_all_0173.json.gz +mr_all_0174.json.gz +mr_all_0175.json.gz +mr_all_0176.json.gz +mr_all_0177.json.gz +mr_all_0178.json.gz +mr_all_0179.json.gz +mr_all_0180.json.gz +mr_all_0181.json.gz +mr_all_0182.json.gz +mr_all_0183.json.gz +mr_all_0184.json.gz +mr_all_0185.json.gz +mr_all_0186.json.gz +mr_all_0187.json.gz +mr_all_0188.json.gz +mr_all_0189.json.gz +mr_all_0190.json.gz +mr_all_0191.json.gz +mr_all_0192.json.gz +mr_all_0193.json.gz +mr_all_0194.json.gz +mr_all_0195.json.gz +mr_all_0196.json.gz +mr_all_0197.json.gz +mr_all_0198.json.gz +mr_all_0199.json.gz +mr_all_0200.json.gz +mr_all_0201.json.gz +mr_all_0202.json.gz +mr_all_0203.json.gz +mr_all_0204.json.gz +mr_all_0205.json.gz +mr_all_0206.json.gz +mr_all_0207.json.gz +mr_all_0208.json.gz +mr_all_0209.json.gz +mr_all_0210.json.gz +mr_all_0211.json.gz +mr_all_0212.json.gz +mr_all_0213.json.gz +mr_all_0214.json.gz +mr_all_0215.json.gz +mr_all_0216.json.gz +mr_all_0217.json.gz +mr_all_0218.json.gz +mr_all_0219.json.gz +mr_all_0220.json.gz +mr_all_0221.json.gz +mr_all_0222.json.gz +mr_all_0223.json.gz +mr_all_0224.json.gz +mr_all_0225.json.gz +mr_all_0226.json.gz +mr_all_0227.json.gz +mr_all_0228.json.gz +mr_all_0229.json.gz +mr_all_0230.json.gz +mr_all_0231.json.gz +mr_all_0232.json.gz +mr_all_0233.json.gz +mr_all_0234.json.gz +mr_all_0235.json.gz +mr_all_0236.json.gz +mr_all_0237.json.gz +mr_all_0238.json.gz +mr_all_0239.json.gz +mr_all_0240.json.gz +mr_all_0241.json.gz +mr_all_0242.json.gz +mr_all_0243.json.gz +mr_all_0244.json.gz +mr_all_0245.json.gz +mr_all_0246.json.gz +mr_all_0247.json.gz +mr_all_0248.json.gz +mr_all_0249.json.gz +mr_all_0250.json.gz +mr_all_0251.json.gz +mr_all_0252.json.gz +mr_all_0253.json.gz +mr_all_0254.json.gz +mr_all_0255.json.gz +mr_all_0256.json.gz +mr_all_0257.json.gz +mr_all_0258.json.gz +mr_all_0259.json.gz +mr_all_0260.json.gz +mr_all_0261.json.gz +mr_all_0262.json.gz +mr_all_0263.json.gz +mr_all_0264.json.gz +mr_all_0265.json.gz +mr_all_0266.json.gz +mr_all_0267.json.gz +mr_all_0268.json.gz +mr_all_0269.json.gz +mr_all_0270.json.gz +mr_all_0271.json.gz +mr_all_0272.json.gz +mr_all_0273.json.gz +mr_all_0274.json.gz +mr_all_0275.json.gz +mr_all_0276.json.gz +mr_all_0277.json.gz +mr_all_0278.json.gz +mr_all_0279.json.gz +mr_all_0280.json.gz +mr_all_0281.json.gz +mr_all_0282.json.gz +mr_all_0283.json.gz +mr_all_0284.json.gz +mr_all_0285.json.gz +mr_all_0286.json.gz +mr_all_0287.json.gz +mr_all_0288.json.gz +mr_all_0289.json.gz +mr_all_0290.json.gz +mr_all_0291.json.gz +mr_all_0292.json.gz +mr_all_0293.json.gz +mr_all_0294.json.gz +mr_all_0295.json.gz +mr_all_0296.json.gz +mr_all_0297.json.gz +mr_all_0298.json.gz +mr_all_0299.json.gz +mr_all_0300.json.gz +mr_all_0301.json.gz +mr_all_0302.json.gz +mr_all_0303.json.gz +mr_all_0304.json.gz +mr_all_0305.json.gz +mr_all_0306.json.gz +mr_all_0307.json.gz +mr_all_0308.json.gz +mr_all_0309.json.gz +mr_all_0310.json.gz +mr_all_0311.json.gz +mr_all_0312.json.gz +mr_all_0313.json.gz +mr_all_0314.json.gz +mr_all_0315.json.gz +mr_all_0316.json.gz +mr_all_0317.json.gz +mr_all_0318.json.gz +mr_all_0319.json.gz +mr_all_0320.json.gz +mr_all_0321.json.gz +mr_all_0322.json.gz +mr_all_0323.json.gz +mr_all_0324.json.gz +mr_all_0325.json.gz +mr_all_0326.json.gz +mr_all_0327.json.gz +mr_all_0328.json.gz +mr_all_0329.json.gz +mr_all_0330.json.gz +mr_all_0331.json.gz +mr_all_0332.json.gz +mr_all_0333.json.gz +mr_all_0334.json.gz +mr_all_0335.json.gz +mr_all_0336.json.gz +mr_all_0337.json.gz +mr_all_0338.json.gz +mr_all_0339.json.gz +mr_all_0340.json.gz +mr_all_0341.json.gz +mr_all_0342.json.gz +mr_all_0343.json.gz +mr_all_0344.json.gz +mr_all_0345.json.gz +mr_all_0346.json.gz +mr_all_0347.json.gz +mr_all_0348.json.gz +mr_all_0349.json.gz +mr_all_0350.json.gz +mr_all_0351.json.gz +mr_all_0352.json.gz +mr_all_0353.json.gz +mr_all_0354.json.gz +mr_all_0355.json.gz +mr_all_0356.json.gz +mr_all_0357.json.gz +mr_all_0358.json.gz +mr_all_0359.json.gz +mr_all_0360.json.gz +mr_all_0361.json.gz +mr_all_0362.json.gz +mr_all_0363.json.gz +mr_all_0364.json.gz +mr_all_0365.json.gz +mr_all_0366.json.gz +mr_all_0367.json.gz +mr_all_0368.json.gz +mr_all_0369.json.gz +mr_all_0370.json.gz +mr_all_0371.json.gz +mr_all_0372.json.gz +mr_all_0373.json.gz +mr_all_0374.json.gz +mr_all_0375.json.gz +mr_all_0376.json.gz +mr_all_0377.json.gz +mr_all_0378.json.gz +mr_all_0379.json.gz +mr_all_0380.json.gz +mr_all_0381.json.gz +mr_all_0382.json.gz +mr_all_0383.json.gz +mr_all_0384.json.gz +mr_all_0385.json.gz +mr_all_0386.json.gz +mr_all_0387.json.gz +mr_all_0388.json.gz +mr_all_0389.json.gz +mr_all_0390.json.gz +mr_all_0391.json.gz +mr_all_0392.json.gz +mr_all_0393.json.gz +mr_all_0394.json.gz +mr_all_0395.json.gz +mr_all_0396.json.gz +mr_all_0397.json.gz +mr_all_0398.json.gz +mr_all_0399.json.gz diff --git a/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/or.txt b/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/or.txt new file mode 100644 index 0000000000000000000000000000000000000000..f0006e9e0d1fe496d8f052b0e8f66394ba372c5a --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/or.txt @@ -0,0 +1,58 @@ +or_all_0000.json.gz +or_all_0001.json.gz +or_all_0002.json.gz +or_all_0003.json.gz +or_all_0004.json.gz +or_all_0005.json.gz +or_all_0006.json.gz +or_all_0007.json.gz +or_all_0008.json.gz +or_all_0009.json.gz +or_all_0010.json.gz +or_all_0011.json.gz +or_all_0012.json.gz +or_all_0013.json.gz +or_all_0014.json.gz +or_all_0015.json.gz +or_all_0016.json.gz +or_all_0017.json.gz +or_all_0018.json.gz +or_all_0019.json.gz +or_all_0020.json.gz +or_all_0021.json.gz +or_all_0022.json.gz +or_all_0023.json.gz +or_all_0024.json.gz +or_all_0025.json.gz +or_all_0026.json.gz +or_all_0027.json.gz +or_all_0028.json.gz +or_all_0029.json.gz +or_all_0030.json.gz +or_all_0031.json.gz +or_all_0032.json.gz +or_all_0033.json.gz +or_all_0034.json.gz +or_all_0035.json.gz +or_all_0036.json.gz +or_all_0037.json.gz +or_all_0038.json.gz +or_all_0039.json.gz +or_all_0040.json.gz +or_all_0041.json.gz +or_all_0042.json.gz +or_all_0043.json.gz +or_all_0044.json.gz +or_all_0045.json.gz +or_all_0046.json.gz +or_all_0047.json.gz +or_all_0048.json.gz +or_all_0049.json.gz +or_all_0050.json.gz +or_all_0051.json.gz +or_all_0052.json.gz +or_all_0053.json.gz +or_all_0054.json.gz +or_all_0055.json.gz +or_all_0056.json.gz +or_all_0057.json.gz diff --git a/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/sa.txt b/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/sa.txt new file mode 100644 index 0000000000000000000000000000000000000000..2eea6c70e06e5f65ec0f37f3a318e0d8db8b01b1 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/sa.txt @@ -0,0 +1,24 @@ +sa_all_0000.json.gz +sa_all_0001.json.gz +sa_all_0002.json.gz +sa_all_0003.json.gz +sa_all_0004.json.gz +sa_all_0005.json.gz +sa_all_0006.json.gz +sa_all_0007.json.gz +sa_all_0008.json.gz +sa_all_0009.json.gz +sa_all_0010.json.gz +sa_all_0011.json.gz +sa_all_0012.json.gz +sa_all_0013.json.gz +sa_all_0014.json.gz +sa_all_0015.json.gz +sa_all_0016.json.gz +sa_all_0017.json.gz +sa_all_0018.json.gz +sa_all_0019.json.gz +sa_all_0020.json.gz +sa_all_0021.json.gz +sa_all_0022.json.gz +sa_all_0023.json.gz diff --git a/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/ta.txt b/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/ta.txt new file mode 100644 index 0000000000000000000000000000000000000000..16245a2fcdc02e0cf9f21edc005e9be38e0e1824 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/ta.txt @@ -0,0 +1,1600 @@ +ta_all_0000.json.gz +ta_all_0001.json.gz +ta_all_0002.json.gz +ta_all_0003.json.gz +ta_all_0004.json.gz +ta_all_0005.json.gz +ta_all_0006.json.gz +ta_all_0007.json.gz +ta_all_0008.json.gz +ta_all_0009.json.gz +ta_all_0010.json.gz +ta_all_0011.json.gz +ta_all_0012.json.gz +ta_all_0013.json.gz +ta_all_0014.json.gz +ta_all_0015.json.gz +ta_all_0016.json.gz +ta_all_0017.json.gz +ta_all_0018.json.gz +ta_all_0019.json.gz +ta_all_0020.json.gz +ta_all_0021.json.gz +ta_all_0022.json.gz +ta_all_0023.json.gz +ta_all_0024.json.gz +ta_all_0025.json.gz +ta_all_0026.json.gz +ta_all_0027.json.gz +ta_all_0028.json.gz +ta_all_0029.json.gz +ta_all_0030.json.gz +ta_all_0031.json.gz +ta_all_0032.json.gz +ta_all_0033.json.gz +ta_all_0034.json.gz +ta_all_0035.json.gz +ta_all_0036.json.gz +ta_all_0037.json.gz +ta_all_0038.json.gz +ta_all_0039.json.gz +ta_all_0040.json.gz +ta_all_0041.json.gz +ta_all_0042.json.gz +ta_all_0043.json.gz +ta_all_0044.json.gz +ta_all_0045.json.gz +ta_all_0046.json.gz +ta_all_0047.json.gz +ta_all_0048.json.gz +ta_all_0049.json.gz +ta_all_0050.json.gz +ta_all_0051.json.gz +ta_all_0052.json.gz +ta_all_0053.json.gz +ta_all_0054.json.gz +ta_all_0055.json.gz +ta_all_0056.json.gz +ta_all_0057.json.gz +ta_all_0058.json.gz +ta_all_0059.json.gz +ta_all_0060.json.gz +ta_all_0061.json.gz +ta_all_0062.json.gz +ta_all_0063.json.gz +ta_all_0064.json.gz +ta_all_0065.json.gz +ta_all_0066.json.gz +ta_all_0067.json.gz +ta_all_0068.json.gz +ta_all_0069.json.gz +ta_all_0070.json.gz +ta_all_0071.json.gz +ta_all_0072.json.gz +ta_all_0073.json.gz +ta_all_0074.json.gz +ta_all_0075.json.gz +ta_all_0076.json.gz +ta_all_0077.json.gz +ta_all_0078.json.gz +ta_all_0079.json.gz +ta_all_0080.json.gz +ta_all_0081.json.gz +ta_all_0082.json.gz +ta_all_0083.json.gz +ta_all_0084.json.gz +ta_all_0085.json.gz +ta_all_0086.json.gz +ta_all_0087.json.gz +ta_all_0088.json.gz +ta_all_0089.json.gz +ta_all_0090.json.gz +ta_all_0091.json.gz +ta_all_0092.json.gz +ta_all_0093.json.gz +ta_all_0094.json.gz +ta_all_0095.json.gz +ta_all_0096.json.gz +ta_all_0097.json.gz +ta_all_0098.json.gz +ta_all_0099.json.gz +ta_all_0100.json.gz +ta_all_0101.json.gz +ta_all_0102.json.gz +ta_all_0103.json.gz +ta_all_0104.json.gz +ta_all_0105.json.gz +ta_all_0106.json.gz +ta_all_0107.json.gz +ta_all_0108.json.gz +ta_all_0109.json.gz +ta_all_0110.json.gz +ta_all_0111.json.gz +ta_all_0112.json.gz +ta_all_0113.json.gz +ta_all_0114.json.gz +ta_all_0115.json.gz +ta_all_0116.json.gz +ta_all_0117.json.gz +ta_all_0118.json.gz +ta_all_0119.json.gz +ta_all_0120.json.gz +ta_all_0121.json.gz +ta_all_0122.json.gz +ta_all_0123.json.gz +ta_all_0124.json.gz +ta_all_0125.json.gz +ta_all_0126.json.gz +ta_all_0127.json.gz +ta_all_0128.json.gz +ta_all_0129.json.gz +ta_all_0130.json.gz +ta_all_0131.json.gz +ta_all_0132.json.gz +ta_all_0133.json.gz +ta_all_0134.json.gz +ta_all_0135.json.gz +ta_all_0136.json.gz +ta_all_0137.json.gz +ta_all_0138.json.gz +ta_all_0139.json.gz +ta_all_0140.json.gz +ta_all_0141.json.gz +ta_all_0142.json.gz +ta_all_0143.json.gz +ta_all_0144.json.gz +ta_all_0145.json.gz +ta_all_0146.json.gz +ta_all_0147.json.gz +ta_all_0148.json.gz +ta_all_0149.json.gz +ta_all_0150.json.gz +ta_all_0151.json.gz +ta_all_0152.json.gz +ta_all_0153.json.gz +ta_all_0154.json.gz +ta_all_0155.json.gz +ta_all_0156.json.gz +ta_all_0157.json.gz +ta_all_0158.json.gz +ta_all_0159.json.gz +ta_all_0160.json.gz +ta_all_0161.json.gz +ta_all_0162.json.gz +ta_all_0163.json.gz +ta_all_0164.json.gz +ta_all_0165.json.gz +ta_all_0166.json.gz +ta_all_0167.json.gz +ta_all_0168.json.gz +ta_all_0169.json.gz +ta_all_0170.json.gz +ta_all_0171.json.gz +ta_all_0172.json.gz +ta_all_0173.json.gz +ta_all_0174.json.gz +ta_all_0175.json.gz +ta_all_0176.json.gz +ta_all_0177.json.gz +ta_all_0178.json.gz +ta_all_0179.json.gz +ta_all_0180.json.gz +ta_all_0181.json.gz +ta_all_0182.json.gz +ta_all_0183.json.gz +ta_all_0184.json.gz +ta_all_0185.json.gz +ta_all_0186.json.gz +ta_all_0187.json.gz +ta_all_0188.json.gz +ta_all_0189.json.gz +ta_all_0190.json.gz +ta_all_0191.json.gz +ta_all_0192.json.gz +ta_all_0193.json.gz +ta_all_0194.json.gz +ta_all_0195.json.gz +ta_all_0196.json.gz +ta_all_0197.json.gz +ta_all_0198.json.gz +ta_all_0199.json.gz +ta_all_0200.json.gz +ta_all_0201.json.gz +ta_all_0202.json.gz +ta_all_0203.json.gz +ta_all_0204.json.gz +ta_all_0205.json.gz +ta_all_0206.json.gz +ta_all_0207.json.gz +ta_all_0208.json.gz +ta_all_0209.json.gz +ta_all_0210.json.gz +ta_all_0211.json.gz +ta_all_0212.json.gz +ta_all_0213.json.gz +ta_all_0214.json.gz +ta_all_0215.json.gz +ta_all_0216.json.gz +ta_all_0217.json.gz +ta_all_0218.json.gz +ta_all_0219.json.gz +ta_all_0220.json.gz +ta_all_0221.json.gz +ta_all_0222.json.gz +ta_all_0223.json.gz +ta_all_0224.json.gz +ta_all_0225.json.gz +ta_all_0226.json.gz +ta_all_0227.json.gz +ta_all_0228.json.gz +ta_all_0229.json.gz +ta_all_0230.json.gz +ta_all_0231.json.gz +ta_all_0232.json.gz +ta_all_0233.json.gz +ta_all_0234.json.gz +ta_all_0235.json.gz +ta_all_0236.json.gz +ta_all_0237.json.gz +ta_all_0238.json.gz +ta_all_0239.json.gz +ta_all_0240.json.gz +ta_all_0241.json.gz +ta_all_0242.json.gz +ta_all_0243.json.gz +ta_all_0244.json.gz +ta_all_0245.json.gz +ta_all_0246.json.gz +ta_all_0247.json.gz +ta_all_0248.json.gz +ta_all_0249.json.gz +ta_all_0250.json.gz +ta_all_0251.json.gz +ta_all_0252.json.gz +ta_all_0253.json.gz +ta_all_0254.json.gz +ta_all_0255.json.gz +ta_all_0256.json.gz +ta_all_0257.json.gz +ta_all_0258.json.gz +ta_all_0259.json.gz +ta_all_0260.json.gz +ta_all_0261.json.gz +ta_all_0262.json.gz +ta_all_0263.json.gz +ta_all_0264.json.gz +ta_all_0265.json.gz +ta_all_0266.json.gz +ta_all_0267.json.gz +ta_all_0268.json.gz +ta_all_0269.json.gz +ta_all_0270.json.gz +ta_all_0271.json.gz +ta_all_0272.json.gz +ta_all_0273.json.gz +ta_all_0274.json.gz +ta_all_0275.json.gz +ta_all_0276.json.gz +ta_all_0277.json.gz +ta_all_0278.json.gz +ta_all_0279.json.gz +ta_all_0280.json.gz +ta_all_0281.json.gz +ta_all_0282.json.gz +ta_all_0283.json.gz +ta_all_0284.json.gz +ta_all_0285.json.gz +ta_all_0286.json.gz +ta_all_0287.json.gz +ta_all_0288.json.gz +ta_all_0289.json.gz +ta_all_0290.json.gz +ta_all_0291.json.gz +ta_all_0292.json.gz +ta_all_0293.json.gz +ta_all_0294.json.gz +ta_all_0295.json.gz +ta_all_0296.json.gz +ta_all_0297.json.gz +ta_all_0298.json.gz +ta_all_0299.json.gz +ta_all_0300.json.gz +ta_all_0301.json.gz +ta_all_0302.json.gz +ta_all_0303.json.gz +ta_all_0304.json.gz +ta_all_0305.json.gz +ta_all_0306.json.gz +ta_all_0307.json.gz +ta_all_0308.json.gz +ta_all_0309.json.gz +ta_all_0310.json.gz +ta_all_0311.json.gz +ta_all_0312.json.gz +ta_all_0313.json.gz +ta_all_0314.json.gz +ta_all_0315.json.gz +ta_all_0316.json.gz +ta_all_0317.json.gz +ta_all_0318.json.gz +ta_all_0319.json.gz +ta_all_0320.json.gz +ta_all_0321.json.gz +ta_all_0322.json.gz +ta_all_0323.json.gz +ta_all_0324.json.gz +ta_all_0325.json.gz +ta_all_0326.json.gz +ta_all_0327.json.gz +ta_all_0328.json.gz +ta_all_0329.json.gz +ta_all_0330.json.gz +ta_all_0331.json.gz +ta_all_0332.json.gz +ta_all_0333.json.gz +ta_all_0334.json.gz +ta_all_0335.json.gz +ta_all_0336.json.gz +ta_all_0337.json.gz +ta_all_0338.json.gz +ta_all_0339.json.gz +ta_all_0340.json.gz +ta_all_0341.json.gz +ta_all_0342.json.gz +ta_all_0343.json.gz +ta_all_0344.json.gz +ta_all_0345.json.gz +ta_all_0346.json.gz +ta_all_0347.json.gz +ta_all_0348.json.gz +ta_all_0349.json.gz +ta_all_0350.json.gz +ta_all_0351.json.gz +ta_all_0352.json.gz +ta_all_0353.json.gz +ta_all_0354.json.gz +ta_all_0355.json.gz +ta_all_0356.json.gz +ta_all_0357.json.gz +ta_all_0358.json.gz +ta_all_0359.json.gz +ta_all_0360.json.gz +ta_all_0361.json.gz +ta_all_0362.json.gz +ta_all_0363.json.gz +ta_all_0364.json.gz +ta_all_0365.json.gz +ta_all_0366.json.gz +ta_all_0367.json.gz +ta_all_0368.json.gz +ta_all_0369.json.gz +ta_all_0370.json.gz +ta_all_0371.json.gz +ta_all_0372.json.gz +ta_all_0373.json.gz +ta_all_0374.json.gz +ta_all_0375.json.gz +ta_all_0376.json.gz +ta_all_0377.json.gz +ta_all_0378.json.gz +ta_all_0379.json.gz +ta_all_0380.json.gz +ta_all_0381.json.gz +ta_all_0382.json.gz +ta_all_0383.json.gz +ta_all_0384.json.gz +ta_all_0385.json.gz +ta_all_0386.json.gz +ta_all_0387.json.gz +ta_all_0388.json.gz +ta_all_0389.json.gz +ta_all_0390.json.gz +ta_all_0391.json.gz +ta_all_0392.json.gz +ta_all_0393.json.gz +ta_all_0394.json.gz +ta_all_0395.json.gz +ta_all_0396.json.gz +ta_all_0397.json.gz +ta_all_0398.json.gz +ta_all_0399.json.gz +ta_all_0400.json.gz +ta_all_0401.json.gz +ta_all_0402.json.gz +ta_all_0403.json.gz +ta_all_0404.json.gz +ta_all_0405.json.gz +ta_all_0406.json.gz +ta_all_0407.json.gz +ta_all_0408.json.gz +ta_all_0409.json.gz +ta_all_0410.json.gz +ta_all_0411.json.gz +ta_all_0412.json.gz +ta_all_0413.json.gz +ta_all_0414.json.gz +ta_all_0415.json.gz +ta_all_0416.json.gz +ta_all_0417.json.gz +ta_all_0418.json.gz +ta_all_0419.json.gz +ta_all_0420.json.gz +ta_all_0421.json.gz +ta_all_0422.json.gz +ta_all_0423.json.gz +ta_all_0424.json.gz +ta_all_0425.json.gz +ta_all_0426.json.gz +ta_all_0427.json.gz +ta_all_0428.json.gz +ta_all_0429.json.gz +ta_all_0430.json.gz +ta_all_0431.json.gz +ta_all_0432.json.gz +ta_all_0433.json.gz +ta_all_0434.json.gz +ta_all_0435.json.gz +ta_all_0436.json.gz +ta_all_0437.json.gz +ta_all_0438.json.gz +ta_all_0439.json.gz +ta_all_0440.json.gz +ta_all_0441.json.gz +ta_all_0442.json.gz +ta_all_0443.json.gz +ta_all_0444.json.gz +ta_all_0445.json.gz +ta_all_0446.json.gz +ta_all_0447.json.gz +ta_all_0448.json.gz +ta_all_0449.json.gz +ta_all_0450.json.gz +ta_all_0451.json.gz +ta_all_0452.json.gz +ta_all_0453.json.gz +ta_all_0454.json.gz +ta_all_0455.json.gz +ta_all_0456.json.gz +ta_all_0457.json.gz +ta_all_0458.json.gz +ta_all_0459.json.gz +ta_all_0460.json.gz +ta_all_0461.json.gz +ta_all_0462.json.gz +ta_all_0463.json.gz +ta_all_0464.json.gz +ta_all_0465.json.gz +ta_all_0466.json.gz +ta_all_0467.json.gz +ta_all_0468.json.gz +ta_all_0469.json.gz +ta_all_0470.json.gz +ta_all_0471.json.gz +ta_all_0472.json.gz +ta_all_0473.json.gz +ta_all_0474.json.gz +ta_all_0475.json.gz +ta_all_0476.json.gz +ta_all_0477.json.gz +ta_all_0478.json.gz +ta_all_0479.json.gz +ta_all_0480.json.gz +ta_all_0481.json.gz +ta_all_0482.json.gz +ta_all_0483.json.gz +ta_all_0484.json.gz +ta_all_0485.json.gz +ta_all_0486.json.gz +ta_all_0487.json.gz +ta_all_0488.json.gz +ta_all_0489.json.gz +ta_all_0490.json.gz +ta_all_0491.json.gz +ta_all_0492.json.gz +ta_all_0493.json.gz +ta_all_0494.json.gz +ta_all_0495.json.gz +ta_all_0496.json.gz +ta_all_0497.json.gz +ta_all_0498.json.gz +ta_all_0499.json.gz +ta_all_0500.json.gz +ta_all_0501.json.gz +ta_all_0502.json.gz +ta_all_0503.json.gz +ta_all_0504.json.gz +ta_all_0505.json.gz +ta_all_0506.json.gz +ta_all_0507.json.gz +ta_all_0508.json.gz +ta_all_0509.json.gz +ta_all_0510.json.gz +ta_all_0511.json.gz +ta_all_0512.json.gz +ta_all_0513.json.gz +ta_all_0514.json.gz +ta_all_0515.json.gz +ta_all_0516.json.gz +ta_all_0517.json.gz +ta_all_0518.json.gz +ta_all_0519.json.gz +ta_all_0520.json.gz +ta_all_0521.json.gz +ta_all_0522.json.gz +ta_all_0523.json.gz +ta_all_0524.json.gz +ta_all_0525.json.gz +ta_all_0526.json.gz +ta_all_0527.json.gz +ta_all_0528.json.gz +ta_all_0529.json.gz +ta_all_0530.json.gz +ta_all_0531.json.gz +ta_all_0532.json.gz +ta_all_0533.json.gz +ta_all_0534.json.gz +ta_all_0535.json.gz +ta_all_0536.json.gz +ta_all_0537.json.gz +ta_all_0538.json.gz +ta_all_0539.json.gz +ta_all_0540.json.gz +ta_all_0541.json.gz +ta_all_0542.json.gz +ta_all_0543.json.gz +ta_all_0544.json.gz +ta_all_0545.json.gz +ta_all_0546.json.gz +ta_all_0547.json.gz +ta_all_0548.json.gz +ta_all_0549.json.gz +ta_all_0550.json.gz +ta_all_0551.json.gz +ta_all_0552.json.gz +ta_all_0553.json.gz +ta_all_0554.json.gz +ta_all_0555.json.gz +ta_all_0556.json.gz +ta_all_0557.json.gz +ta_all_0558.json.gz +ta_all_0559.json.gz +ta_all_0560.json.gz +ta_all_0561.json.gz +ta_all_0562.json.gz +ta_all_0563.json.gz +ta_all_0564.json.gz +ta_all_0565.json.gz +ta_all_0566.json.gz +ta_all_0567.json.gz +ta_all_0568.json.gz +ta_all_0569.json.gz +ta_all_0570.json.gz +ta_all_0571.json.gz +ta_all_0572.json.gz +ta_all_0573.json.gz +ta_all_0574.json.gz +ta_all_0575.json.gz +ta_all_0576.json.gz +ta_all_0577.json.gz +ta_all_0578.json.gz +ta_all_0579.json.gz +ta_all_0580.json.gz +ta_all_0581.json.gz +ta_all_0582.json.gz +ta_all_0583.json.gz +ta_all_0584.json.gz +ta_all_0585.json.gz +ta_all_0586.json.gz +ta_all_0587.json.gz +ta_all_0588.json.gz +ta_all_0589.json.gz +ta_all_0590.json.gz +ta_all_0591.json.gz +ta_all_0592.json.gz +ta_all_0593.json.gz +ta_all_0594.json.gz +ta_all_0595.json.gz +ta_all_0596.json.gz +ta_all_0597.json.gz +ta_all_0598.json.gz +ta_all_0599.json.gz +ta_all_0600.json.gz +ta_all_0601.json.gz +ta_all_0602.json.gz +ta_all_0603.json.gz +ta_all_0604.json.gz +ta_all_0605.json.gz +ta_all_0606.json.gz +ta_all_0607.json.gz +ta_all_0608.json.gz +ta_all_0609.json.gz +ta_all_0610.json.gz +ta_all_0611.json.gz +ta_all_0612.json.gz +ta_all_0613.json.gz +ta_all_0614.json.gz +ta_all_0615.json.gz +ta_all_0616.json.gz +ta_all_0617.json.gz +ta_all_0618.json.gz +ta_all_0619.json.gz +ta_all_0620.json.gz +ta_all_0621.json.gz +ta_all_0622.json.gz +ta_all_0623.json.gz +ta_all_0624.json.gz +ta_all_0625.json.gz +ta_all_0626.json.gz +ta_all_0627.json.gz +ta_all_0628.json.gz +ta_all_0629.json.gz +ta_all_0630.json.gz +ta_all_0631.json.gz +ta_all_0632.json.gz +ta_all_0633.json.gz +ta_all_0634.json.gz +ta_all_0635.json.gz +ta_all_0636.json.gz +ta_all_0637.json.gz +ta_all_0638.json.gz +ta_all_0639.json.gz +ta_all_0640.json.gz +ta_all_0641.json.gz +ta_all_0642.json.gz +ta_all_0643.json.gz +ta_all_0644.json.gz +ta_all_0645.json.gz +ta_all_0646.json.gz +ta_all_0647.json.gz +ta_all_0648.json.gz +ta_all_0649.json.gz +ta_all_0650.json.gz +ta_all_0651.json.gz +ta_all_0652.json.gz +ta_all_0653.json.gz +ta_all_0654.json.gz +ta_all_0655.json.gz +ta_all_0656.json.gz +ta_all_0657.json.gz +ta_all_0658.json.gz +ta_all_0659.json.gz +ta_all_0660.json.gz +ta_all_0661.json.gz +ta_all_0662.json.gz +ta_all_0663.json.gz +ta_all_0664.json.gz +ta_all_0665.json.gz +ta_all_0666.json.gz +ta_all_0667.json.gz +ta_all_0668.json.gz +ta_all_0669.json.gz +ta_all_0670.json.gz +ta_all_0671.json.gz +ta_all_0672.json.gz +ta_all_0673.json.gz +ta_all_0674.json.gz +ta_all_0675.json.gz +ta_all_0676.json.gz +ta_all_0677.json.gz +ta_all_0678.json.gz +ta_all_0679.json.gz +ta_all_0680.json.gz +ta_all_0681.json.gz +ta_all_0682.json.gz +ta_all_0683.json.gz +ta_all_0684.json.gz +ta_all_0685.json.gz +ta_all_0686.json.gz +ta_all_0687.json.gz +ta_all_0688.json.gz +ta_all_0689.json.gz +ta_all_0690.json.gz +ta_all_0691.json.gz +ta_all_0692.json.gz +ta_all_0693.json.gz +ta_all_0694.json.gz +ta_all_0695.json.gz +ta_all_0696.json.gz +ta_all_0697.json.gz +ta_all_0698.json.gz +ta_all_0699.json.gz +ta_all_0700.json.gz +ta_all_0701.json.gz +ta_all_0702.json.gz +ta_all_0703.json.gz +ta_all_0704.json.gz +ta_all_0705.json.gz +ta_all_0706.json.gz +ta_all_0707.json.gz +ta_all_0708.json.gz +ta_all_0709.json.gz +ta_all_0710.json.gz +ta_all_0711.json.gz +ta_all_0712.json.gz +ta_all_0713.json.gz +ta_all_0714.json.gz +ta_all_0715.json.gz +ta_all_0716.json.gz +ta_all_0717.json.gz +ta_all_0718.json.gz +ta_all_0719.json.gz +ta_all_0720.json.gz +ta_all_0721.json.gz +ta_all_0722.json.gz +ta_all_0723.json.gz +ta_all_0724.json.gz +ta_all_0725.json.gz +ta_all_0726.json.gz +ta_all_0727.json.gz +ta_all_0728.json.gz +ta_all_0729.json.gz +ta_all_0730.json.gz +ta_all_0731.json.gz +ta_all_0732.json.gz +ta_all_0733.json.gz +ta_all_0734.json.gz +ta_all_0735.json.gz +ta_all_0736.json.gz +ta_all_0737.json.gz +ta_all_0738.json.gz +ta_all_0739.json.gz +ta_all_0740.json.gz +ta_all_0741.json.gz +ta_all_0742.json.gz +ta_all_0743.json.gz +ta_all_0744.json.gz +ta_all_0745.json.gz +ta_all_0746.json.gz +ta_all_0747.json.gz +ta_all_0748.json.gz +ta_all_0749.json.gz +ta_all_0750.json.gz +ta_all_0751.json.gz +ta_all_0752.json.gz +ta_all_0753.json.gz +ta_all_0754.json.gz +ta_all_0755.json.gz +ta_all_0756.json.gz +ta_all_0757.json.gz +ta_all_0758.json.gz +ta_all_0759.json.gz +ta_all_0760.json.gz +ta_all_0761.json.gz +ta_all_0762.json.gz +ta_all_0763.json.gz +ta_all_0764.json.gz +ta_all_0765.json.gz +ta_all_0766.json.gz +ta_all_0767.json.gz +ta_all_0768.json.gz +ta_all_0769.json.gz +ta_all_0770.json.gz +ta_all_0771.json.gz +ta_all_0772.json.gz +ta_all_0773.json.gz +ta_all_0774.json.gz +ta_all_0775.json.gz +ta_all_0776.json.gz +ta_all_0777.json.gz +ta_all_0778.json.gz +ta_all_0779.json.gz +ta_all_0780.json.gz +ta_all_0781.json.gz +ta_all_0782.json.gz +ta_all_0783.json.gz +ta_all_0784.json.gz +ta_all_0785.json.gz +ta_all_0786.json.gz +ta_all_0787.json.gz +ta_all_0788.json.gz +ta_all_0789.json.gz +ta_all_0790.json.gz +ta_all_0791.json.gz +ta_all_0792.json.gz +ta_all_0793.json.gz +ta_all_0794.json.gz +ta_all_0795.json.gz +ta_all_0796.json.gz +ta_all_0797.json.gz +ta_all_0798.json.gz +ta_all_0799.json.gz +ta_all_0800.json.gz +ta_all_0801.json.gz +ta_all_0802.json.gz +ta_all_0803.json.gz +ta_all_0804.json.gz +ta_all_0805.json.gz +ta_all_0806.json.gz +ta_all_0807.json.gz +ta_all_0808.json.gz +ta_all_0809.json.gz +ta_all_0810.json.gz +ta_all_0811.json.gz +ta_all_0812.json.gz +ta_all_0813.json.gz +ta_all_0814.json.gz +ta_all_0815.json.gz +ta_all_0816.json.gz +ta_all_0817.json.gz +ta_all_0818.json.gz +ta_all_0819.json.gz +ta_all_0820.json.gz +ta_all_0821.json.gz +ta_all_0822.json.gz +ta_all_0823.json.gz +ta_all_0824.json.gz +ta_all_0825.json.gz +ta_all_0826.json.gz +ta_all_0827.json.gz +ta_all_0828.json.gz +ta_all_0829.json.gz +ta_all_0830.json.gz +ta_all_0831.json.gz +ta_all_0832.json.gz +ta_all_0833.json.gz +ta_all_0834.json.gz +ta_all_0835.json.gz +ta_all_0836.json.gz +ta_all_0837.json.gz +ta_all_0838.json.gz +ta_all_0839.json.gz +ta_all_0840.json.gz +ta_all_0841.json.gz +ta_all_0842.json.gz +ta_all_0843.json.gz +ta_all_0844.json.gz +ta_all_0845.json.gz +ta_all_0846.json.gz +ta_all_0847.json.gz +ta_all_0848.json.gz +ta_all_0849.json.gz +ta_all_0850.json.gz +ta_all_0851.json.gz +ta_all_0852.json.gz +ta_all_0853.json.gz +ta_all_0854.json.gz +ta_all_0855.json.gz +ta_all_0856.json.gz +ta_all_0857.json.gz +ta_all_0858.json.gz +ta_all_0859.json.gz +ta_all_0860.json.gz +ta_all_0861.json.gz +ta_all_0862.json.gz +ta_all_0863.json.gz +ta_all_0864.json.gz +ta_all_0865.json.gz +ta_all_0866.json.gz +ta_all_0867.json.gz +ta_all_0868.json.gz +ta_all_0869.json.gz +ta_all_0870.json.gz +ta_all_0871.json.gz +ta_all_0872.json.gz +ta_all_0873.json.gz +ta_all_0874.json.gz +ta_all_0875.json.gz +ta_all_0876.json.gz +ta_all_0877.json.gz +ta_all_0878.json.gz +ta_all_0879.json.gz +ta_all_0880.json.gz +ta_all_0881.json.gz +ta_all_0882.json.gz +ta_all_0883.json.gz +ta_all_0884.json.gz +ta_all_0885.json.gz +ta_all_0886.json.gz +ta_all_0887.json.gz +ta_all_0888.json.gz +ta_all_0889.json.gz +ta_all_0890.json.gz +ta_all_0891.json.gz +ta_all_0892.json.gz +ta_all_0893.json.gz +ta_all_0894.json.gz +ta_all_0895.json.gz +ta_all_0896.json.gz +ta_all_0897.json.gz +ta_all_0898.json.gz +ta_all_0899.json.gz +ta_all_0900.json.gz +ta_all_0901.json.gz +ta_all_0902.json.gz +ta_all_0903.json.gz +ta_all_0904.json.gz +ta_all_0905.json.gz +ta_all_0906.json.gz +ta_all_0907.json.gz +ta_all_0908.json.gz +ta_all_0909.json.gz +ta_all_0910.json.gz +ta_all_0911.json.gz +ta_all_0912.json.gz +ta_all_0913.json.gz +ta_all_0914.json.gz +ta_all_0915.json.gz +ta_all_0916.json.gz +ta_all_0917.json.gz +ta_all_0918.json.gz +ta_all_0919.json.gz +ta_all_0920.json.gz +ta_all_0921.json.gz +ta_all_0922.json.gz +ta_all_0923.json.gz +ta_all_0924.json.gz +ta_all_0925.json.gz +ta_all_0926.json.gz +ta_all_0927.json.gz +ta_all_0928.json.gz +ta_all_0929.json.gz +ta_all_0930.json.gz +ta_all_0931.json.gz +ta_all_0932.json.gz +ta_all_0933.json.gz +ta_all_0934.json.gz +ta_all_0935.json.gz +ta_all_0936.json.gz +ta_all_0937.json.gz +ta_all_0938.json.gz +ta_all_0939.json.gz +ta_all_0940.json.gz +ta_all_0941.json.gz +ta_all_0942.json.gz +ta_all_0943.json.gz +ta_all_0944.json.gz +ta_all_0945.json.gz +ta_all_0946.json.gz +ta_all_0947.json.gz +ta_all_0948.json.gz +ta_all_0949.json.gz +ta_all_0950.json.gz +ta_all_0951.json.gz +ta_all_0952.json.gz +ta_all_0953.json.gz +ta_all_0954.json.gz +ta_all_0955.json.gz +ta_all_0956.json.gz +ta_all_0957.json.gz +ta_all_0958.json.gz +ta_all_0959.json.gz +ta_all_0960.json.gz +ta_all_0961.json.gz +ta_all_0962.json.gz +ta_all_0963.json.gz +ta_all_0964.json.gz +ta_all_0965.json.gz +ta_all_0966.json.gz +ta_all_0967.json.gz +ta_all_0968.json.gz +ta_all_0969.json.gz +ta_all_0970.json.gz +ta_all_0971.json.gz +ta_all_0972.json.gz +ta_all_0973.json.gz +ta_all_0974.json.gz +ta_all_0975.json.gz +ta_all_0976.json.gz +ta_all_0977.json.gz +ta_all_0978.json.gz +ta_all_0979.json.gz +ta_all_0980.json.gz +ta_all_0981.json.gz +ta_all_0982.json.gz +ta_all_0983.json.gz +ta_all_0984.json.gz +ta_all_0985.json.gz +ta_all_0986.json.gz +ta_all_0987.json.gz +ta_all_0988.json.gz +ta_all_0989.json.gz +ta_all_0990.json.gz +ta_all_0991.json.gz +ta_all_0992.json.gz +ta_all_0993.json.gz +ta_all_0994.json.gz +ta_all_0995.json.gz +ta_all_0996.json.gz +ta_all_0997.json.gz +ta_all_0998.json.gz +ta_all_0999.json.gz +ta_all_1000.json.gz +ta_all_1001.json.gz +ta_all_1002.json.gz +ta_all_1003.json.gz +ta_all_1004.json.gz +ta_all_1005.json.gz +ta_all_1006.json.gz +ta_all_1007.json.gz +ta_all_1008.json.gz +ta_all_1009.json.gz +ta_all_1010.json.gz +ta_all_1011.json.gz +ta_all_1012.json.gz +ta_all_1013.json.gz +ta_all_1014.json.gz +ta_all_1015.json.gz +ta_all_1016.json.gz +ta_all_1017.json.gz +ta_all_1018.json.gz +ta_all_1019.json.gz +ta_all_1020.json.gz +ta_all_1021.json.gz +ta_all_1022.json.gz +ta_all_1023.json.gz +ta_all_1024.json.gz +ta_all_1025.json.gz +ta_all_1026.json.gz +ta_all_1027.json.gz +ta_all_1028.json.gz +ta_all_1029.json.gz +ta_all_1030.json.gz +ta_all_1031.json.gz +ta_all_1032.json.gz +ta_all_1033.json.gz +ta_all_1034.json.gz +ta_all_1035.json.gz +ta_all_1036.json.gz +ta_all_1037.json.gz +ta_all_1038.json.gz +ta_all_1039.json.gz +ta_all_1040.json.gz +ta_all_1041.json.gz +ta_all_1042.json.gz +ta_all_1043.json.gz +ta_all_1044.json.gz +ta_all_1045.json.gz +ta_all_1046.json.gz +ta_all_1047.json.gz +ta_all_1048.json.gz +ta_all_1049.json.gz +ta_all_1050.json.gz +ta_all_1051.json.gz +ta_all_1052.json.gz +ta_all_1053.json.gz +ta_all_1054.json.gz +ta_all_1055.json.gz +ta_all_1056.json.gz +ta_all_1057.json.gz +ta_all_1058.json.gz +ta_all_1059.json.gz +ta_all_1060.json.gz +ta_all_1061.json.gz +ta_all_1062.json.gz +ta_all_1063.json.gz +ta_all_1064.json.gz +ta_all_1065.json.gz +ta_all_1066.json.gz +ta_all_1067.json.gz +ta_all_1068.json.gz +ta_all_1069.json.gz +ta_all_1070.json.gz +ta_all_1071.json.gz +ta_all_1072.json.gz +ta_all_1073.json.gz +ta_all_1074.json.gz +ta_all_1075.json.gz +ta_all_1076.json.gz +ta_all_1077.json.gz +ta_all_1078.json.gz +ta_all_1079.json.gz +ta_all_1080.json.gz +ta_all_1081.json.gz +ta_all_1082.json.gz +ta_all_1083.json.gz +ta_all_1084.json.gz +ta_all_1085.json.gz +ta_all_1086.json.gz +ta_all_1087.json.gz +ta_all_1088.json.gz +ta_all_1089.json.gz +ta_all_1090.json.gz +ta_all_1091.json.gz +ta_all_1092.json.gz +ta_all_1093.json.gz +ta_all_1094.json.gz +ta_all_1095.json.gz +ta_all_1096.json.gz +ta_all_1097.json.gz +ta_all_1098.json.gz +ta_all_1099.json.gz +ta_all_1100.json.gz +ta_all_1101.json.gz +ta_all_1102.json.gz +ta_all_1103.json.gz +ta_all_1104.json.gz +ta_all_1105.json.gz +ta_all_1106.json.gz +ta_all_1107.json.gz +ta_all_1108.json.gz +ta_all_1109.json.gz +ta_all_1110.json.gz +ta_all_1111.json.gz +ta_all_1112.json.gz +ta_all_1113.json.gz +ta_all_1114.json.gz +ta_all_1115.json.gz +ta_all_1116.json.gz +ta_all_1117.json.gz +ta_all_1118.json.gz +ta_all_1119.json.gz +ta_all_1120.json.gz +ta_all_1121.json.gz +ta_all_1122.json.gz +ta_all_1123.json.gz +ta_all_1124.json.gz +ta_all_1125.json.gz +ta_all_1126.json.gz +ta_all_1127.json.gz +ta_all_1128.json.gz +ta_all_1129.json.gz +ta_all_1130.json.gz +ta_all_1131.json.gz +ta_all_1132.json.gz +ta_all_1133.json.gz +ta_all_1134.json.gz +ta_all_1135.json.gz +ta_all_1136.json.gz +ta_all_1137.json.gz +ta_all_1138.json.gz +ta_all_1139.json.gz +ta_all_1140.json.gz +ta_all_1141.json.gz +ta_all_1142.json.gz +ta_all_1143.json.gz +ta_all_1144.json.gz +ta_all_1145.json.gz +ta_all_1146.json.gz +ta_all_1147.json.gz +ta_all_1148.json.gz +ta_all_1149.json.gz +ta_all_1150.json.gz +ta_all_1151.json.gz +ta_all_1152.json.gz +ta_all_1153.json.gz +ta_all_1154.json.gz +ta_all_1155.json.gz +ta_all_1156.json.gz +ta_all_1157.json.gz +ta_all_1158.json.gz +ta_all_1159.json.gz +ta_all_1160.json.gz +ta_all_1161.json.gz +ta_all_1162.json.gz +ta_all_1163.json.gz +ta_all_1164.json.gz +ta_all_1165.json.gz +ta_all_1166.json.gz +ta_all_1167.json.gz +ta_all_1168.json.gz +ta_all_1169.json.gz +ta_all_1170.json.gz +ta_all_1171.json.gz +ta_all_1172.json.gz +ta_all_1173.json.gz +ta_all_1174.json.gz +ta_all_1175.json.gz +ta_all_1176.json.gz +ta_all_1177.json.gz +ta_all_1178.json.gz +ta_all_1179.json.gz +ta_all_1180.json.gz +ta_all_1181.json.gz +ta_all_1182.json.gz +ta_all_1183.json.gz +ta_all_1184.json.gz +ta_all_1185.json.gz +ta_all_1186.json.gz +ta_all_1187.json.gz +ta_all_1188.json.gz +ta_all_1189.json.gz +ta_all_1190.json.gz +ta_all_1191.json.gz +ta_all_1192.json.gz +ta_all_1193.json.gz +ta_all_1194.json.gz +ta_all_1195.json.gz +ta_all_1196.json.gz +ta_all_1197.json.gz +ta_all_1198.json.gz +ta_all_1199.json.gz +ta_all_1200.json.gz +ta_all_1201.json.gz +ta_all_1202.json.gz +ta_all_1203.json.gz +ta_all_1204.json.gz +ta_all_1205.json.gz +ta_all_1206.json.gz +ta_all_1207.json.gz +ta_all_1208.json.gz +ta_all_1209.json.gz +ta_all_1210.json.gz +ta_all_1211.json.gz +ta_all_1212.json.gz +ta_all_1213.json.gz +ta_all_1214.json.gz +ta_all_1215.json.gz +ta_all_1216.json.gz +ta_all_1217.json.gz +ta_all_1218.json.gz +ta_all_1219.json.gz +ta_all_1220.json.gz +ta_all_1221.json.gz +ta_all_1222.json.gz +ta_all_1223.json.gz +ta_all_1224.json.gz +ta_all_1225.json.gz +ta_all_1226.json.gz +ta_all_1227.json.gz +ta_all_1228.json.gz +ta_all_1229.json.gz +ta_all_1230.json.gz +ta_all_1231.json.gz +ta_all_1232.json.gz +ta_all_1233.json.gz +ta_all_1234.json.gz +ta_all_1235.json.gz +ta_all_1236.json.gz +ta_all_1237.json.gz +ta_all_1238.json.gz +ta_all_1239.json.gz +ta_all_1240.json.gz +ta_all_1241.json.gz +ta_all_1242.json.gz +ta_all_1243.json.gz +ta_all_1244.json.gz +ta_all_1245.json.gz +ta_all_1246.json.gz +ta_all_1247.json.gz +ta_all_1248.json.gz +ta_all_1249.json.gz +ta_all_1250.json.gz +ta_all_1251.json.gz +ta_all_1252.json.gz +ta_all_1253.json.gz +ta_all_1254.json.gz +ta_all_1255.json.gz +ta_all_1256.json.gz +ta_all_1257.json.gz +ta_all_1258.json.gz +ta_all_1259.json.gz +ta_all_1260.json.gz +ta_all_1261.json.gz +ta_all_1262.json.gz +ta_all_1263.json.gz +ta_all_1264.json.gz +ta_all_1265.json.gz +ta_all_1266.json.gz +ta_all_1267.json.gz +ta_all_1268.json.gz +ta_all_1269.json.gz +ta_all_1270.json.gz +ta_all_1271.json.gz +ta_all_1272.json.gz +ta_all_1273.json.gz +ta_all_1274.json.gz +ta_all_1275.json.gz +ta_all_1276.json.gz +ta_all_1277.json.gz +ta_all_1278.json.gz +ta_all_1279.json.gz +ta_all_1280.json.gz +ta_all_1281.json.gz +ta_all_1282.json.gz +ta_all_1283.json.gz +ta_all_1284.json.gz +ta_all_1285.json.gz +ta_all_1286.json.gz +ta_all_1287.json.gz +ta_all_1288.json.gz +ta_all_1289.json.gz +ta_all_1290.json.gz +ta_all_1291.json.gz +ta_all_1292.json.gz +ta_all_1293.json.gz +ta_all_1294.json.gz +ta_all_1295.json.gz +ta_all_1296.json.gz +ta_all_1297.json.gz +ta_all_1298.json.gz +ta_all_1299.json.gz +ta_all_1300.json.gz +ta_all_1301.json.gz +ta_all_1302.json.gz +ta_all_1303.json.gz +ta_all_1304.json.gz +ta_all_1305.json.gz +ta_all_1306.json.gz +ta_all_1307.json.gz +ta_all_1308.json.gz +ta_all_1309.json.gz +ta_all_1310.json.gz +ta_all_1311.json.gz +ta_all_1312.json.gz +ta_all_1313.json.gz +ta_all_1314.json.gz +ta_all_1315.json.gz +ta_all_1316.json.gz +ta_all_1317.json.gz +ta_all_1318.json.gz +ta_all_1319.json.gz +ta_all_1320.json.gz +ta_all_1321.json.gz +ta_all_1322.json.gz +ta_all_1323.json.gz +ta_all_1324.json.gz +ta_all_1325.json.gz +ta_all_1326.json.gz +ta_all_1327.json.gz +ta_all_1328.json.gz +ta_all_1329.json.gz +ta_all_1330.json.gz +ta_all_1331.json.gz +ta_all_1332.json.gz +ta_all_1333.json.gz +ta_all_1334.json.gz +ta_all_1335.json.gz +ta_all_1336.json.gz +ta_all_1337.json.gz +ta_all_1338.json.gz +ta_all_1339.json.gz +ta_all_1340.json.gz +ta_all_1341.json.gz +ta_all_1342.json.gz +ta_all_1343.json.gz +ta_all_1344.json.gz +ta_all_1345.json.gz +ta_all_1346.json.gz +ta_all_1347.json.gz +ta_all_1348.json.gz +ta_all_1349.json.gz +ta_all_1350.json.gz +ta_all_1351.json.gz +ta_all_1352.json.gz +ta_all_1353.json.gz +ta_all_1354.json.gz +ta_all_1355.json.gz +ta_all_1356.json.gz +ta_all_1357.json.gz +ta_all_1358.json.gz +ta_all_1359.json.gz +ta_all_1360.json.gz +ta_all_1361.json.gz +ta_all_1362.json.gz +ta_all_1363.json.gz +ta_all_1364.json.gz +ta_all_1365.json.gz +ta_all_1366.json.gz +ta_all_1367.json.gz +ta_all_1368.json.gz +ta_all_1369.json.gz +ta_all_1370.json.gz +ta_all_1371.json.gz +ta_all_1372.json.gz +ta_all_1373.json.gz +ta_all_1374.json.gz +ta_all_1375.json.gz +ta_all_1376.json.gz +ta_all_1377.json.gz +ta_all_1378.json.gz +ta_all_1379.json.gz +ta_all_1380.json.gz +ta_all_1381.json.gz +ta_all_1382.json.gz +ta_all_1383.json.gz +ta_all_1384.json.gz +ta_all_1385.json.gz +ta_all_1386.json.gz +ta_all_1387.json.gz +ta_all_1388.json.gz +ta_all_1389.json.gz +ta_all_1390.json.gz +ta_all_1391.json.gz +ta_all_1392.json.gz +ta_all_1393.json.gz +ta_all_1394.json.gz +ta_all_1395.json.gz +ta_all_1396.json.gz +ta_all_1397.json.gz +ta_all_1398.json.gz +ta_all_1399.json.gz +ta_all_1400.json.gz +ta_all_1401.json.gz +ta_all_1402.json.gz +ta_all_1403.json.gz +ta_all_1404.json.gz +ta_all_1405.json.gz +ta_all_1406.json.gz +ta_all_1407.json.gz +ta_all_1408.json.gz +ta_all_1409.json.gz +ta_all_1410.json.gz +ta_all_1411.json.gz +ta_all_1412.json.gz +ta_all_1413.json.gz +ta_all_1414.json.gz +ta_all_1415.json.gz +ta_all_1416.json.gz +ta_all_1417.json.gz +ta_all_1418.json.gz +ta_all_1419.json.gz +ta_all_1420.json.gz +ta_all_1421.json.gz +ta_all_1422.json.gz +ta_all_1423.json.gz +ta_all_1424.json.gz +ta_all_1425.json.gz +ta_all_1426.json.gz +ta_all_1427.json.gz +ta_all_1428.json.gz +ta_all_1429.json.gz +ta_all_1430.json.gz +ta_all_1431.json.gz +ta_all_1432.json.gz +ta_all_1433.json.gz +ta_all_1434.json.gz +ta_all_1435.json.gz +ta_all_1436.json.gz +ta_all_1437.json.gz +ta_all_1438.json.gz +ta_all_1439.json.gz +ta_all_1440.json.gz +ta_all_1441.json.gz +ta_all_1442.json.gz +ta_all_1443.json.gz +ta_all_1444.json.gz +ta_all_1445.json.gz +ta_all_1446.json.gz +ta_all_1447.json.gz +ta_all_1448.json.gz +ta_all_1449.json.gz +ta_all_1450.json.gz +ta_all_1451.json.gz +ta_all_1452.json.gz +ta_all_1453.json.gz +ta_all_1454.json.gz +ta_all_1455.json.gz +ta_all_1456.json.gz +ta_all_1457.json.gz +ta_all_1458.json.gz +ta_all_1459.json.gz +ta_all_1460.json.gz +ta_all_1461.json.gz +ta_all_1462.json.gz +ta_all_1463.json.gz +ta_all_1464.json.gz +ta_all_1465.json.gz +ta_all_1466.json.gz +ta_all_1467.json.gz +ta_all_1468.json.gz +ta_all_1469.json.gz +ta_all_1470.json.gz +ta_all_1471.json.gz +ta_all_1472.json.gz +ta_all_1473.json.gz +ta_all_1474.json.gz +ta_all_1475.json.gz +ta_all_1476.json.gz +ta_all_1477.json.gz +ta_all_1478.json.gz +ta_all_1479.json.gz +ta_all_1480.json.gz +ta_all_1481.json.gz +ta_all_1482.json.gz +ta_all_1483.json.gz +ta_all_1484.json.gz +ta_all_1485.json.gz +ta_all_1486.json.gz +ta_all_1487.json.gz +ta_all_1488.json.gz +ta_all_1489.json.gz +ta_all_1490.json.gz +ta_all_1491.json.gz +ta_all_1492.json.gz +ta_all_1493.json.gz +ta_all_1494.json.gz +ta_all_1495.json.gz +ta_all_1496.json.gz +ta_all_1497.json.gz +ta_all_1498.json.gz +ta_all_1499.json.gz +ta_all_1500.json.gz +ta_all_1501.json.gz +ta_all_1502.json.gz +ta_all_1503.json.gz +ta_all_1504.json.gz +ta_all_1505.json.gz +ta_all_1506.json.gz +ta_all_1507.json.gz +ta_all_1508.json.gz +ta_all_1509.json.gz +ta_all_1510.json.gz +ta_all_1511.json.gz +ta_all_1512.json.gz +ta_all_1513.json.gz +ta_all_1514.json.gz +ta_all_1515.json.gz +ta_all_1516.json.gz +ta_all_1517.json.gz +ta_all_1518.json.gz +ta_all_1519.json.gz +ta_all_1520.json.gz +ta_all_1521.json.gz +ta_all_1522.json.gz +ta_all_1523.json.gz +ta_all_1524.json.gz +ta_all_1525.json.gz +ta_all_1526.json.gz +ta_all_1527.json.gz +ta_all_1528.json.gz +ta_all_1529.json.gz +ta_all_1530.json.gz +ta_all_1531.json.gz +ta_all_1532.json.gz +ta_all_1533.json.gz +ta_all_1534.json.gz +ta_all_1535.json.gz +ta_all_1536.json.gz +ta_all_1537.json.gz +ta_all_1538.json.gz +ta_all_1539.json.gz +ta_all_1540.json.gz +ta_all_1541.json.gz +ta_all_1542.json.gz +ta_all_1543.json.gz +ta_all_1544.json.gz +ta_all_1545.json.gz +ta_all_1546.json.gz +ta_all_1547.json.gz +ta_all_1548.json.gz +ta_all_1549.json.gz +ta_all_1550.json.gz +ta_all_1551.json.gz +ta_all_1552.json.gz +ta_all_1553.json.gz +ta_all_1554.json.gz +ta_all_1555.json.gz +ta_all_1556.json.gz +ta_all_1557.json.gz +ta_all_1558.json.gz +ta_all_1559.json.gz +ta_all_1560.json.gz +ta_all_1561.json.gz +ta_all_1562.json.gz +ta_all_1563.json.gz +ta_all_1564.json.gz +ta_all_1565.json.gz +ta_all_1566.json.gz +ta_all_1567.json.gz +ta_all_1568.json.gz +ta_all_1569.json.gz +ta_all_1570.json.gz +ta_all_1571.json.gz +ta_all_1572.json.gz +ta_all_1573.json.gz +ta_all_1574.json.gz +ta_all_1575.json.gz +ta_all_1576.json.gz +ta_all_1577.json.gz +ta_all_1578.json.gz +ta_all_1579.json.gz +ta_all_1580.json.gz +ta_all_1581.json.gz +ta_all_1582.json.gz +ta_all_1583.json.gz +ta_all_1584.json.gz +ta_all_1585.json.gz +ta_all_1586.json.gz +ta_all_1587.json.gz +ta_all_1588.json.gz +ta_all_1589.json.gz +ta_all_1590.json.gz +ta_all_1591.json.gz +ta_all_1592.json.gz +ta_all_1593.json.gz +ta_all_1594.json.gz +ta_all_1595.json.gz +ta_all_1596.json.gz +ta_all_1597.json.gz +ta_all_1598.json.gz +ta_all_1599.json.gz diff --git a/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/te.txt b/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/te.txt new file mode 100644 index 0000000000000000000000000000000000000000..4e6f1295a7b47444aeb55330cf72693fa89fa048 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/te.txt @@ -0,0 +1,400 @@ +te_all_0000.json.gz +te_all_0001.json.gz +te_all_0002.json.gz +te_all_0003.json.gz +te_all_0004.json.gz +te_all_0005.json.gz +te_all_0006.json.gz +te_all_0007.json.gz +te_all_0008.json.gz +te_all_0009.json.gz +te_all_0010.json.gz +te_all_0011.json.gz +te_all_0012.json.gz +te_all_0013.json.gz +te_all_0014.json.gz +te_all_0015.json.gz +te_all_0016.json.gz +te_all_0017.json.gz +te_all_0018.json.gz +te_all_0019.json.gz +te_all_0020.json.gz +te_all_0021.json.gz +te_all_0022.json.gz +te_all_0023.json.gz +te_all_0024.json.gz +te_all_0025.json.gz +te_all_0026.json.gz +te_all_0027.json.gz +te_all_0028.json.gz +te_all_0029.json.gz +te_all_0030.json.gz +te_all_0031.json.gz +te_all_0032.json.gz +te_all_0033.json.gz +te_all_0034.json.gz +te_all_0035.json.gz +te_all_0036.json.gz +te_all_0037.json.gz +te_all_0038.json.gz +te_all_0039.json.gz +te_all_0040.json.gz +te_all_0041.json.gz +te_all_0042.json.gz +te_all_0043.json.gz +te_all_0044.json.gz +te_all_0045.json.gz +te_all_0046.json.gz +te_all_0047.json.gz +te_all_0048.json.gz +te_all_0049.json.gz +te_all_0050.json.gz +te_all_0051.json.gz +te_all_0052.json.gz +te_all_0053.json.gz +te_all_0054.json.gz +te_all_0055.json.gz +te_all_0056.json.gz +te_all_0057.json.gz +te_all_0058.json.gz +te_all_0059.json.gz +te_all_0060.json.gz +te_all_0061.json.gz +te_all_0062.json.gz +te_all_0063.json.gz +te_all_0064.json.gz +te_all_0065.json.gz +te_all_0066.json.gz +te_all_0067.json.gz +te_all_0068.json.gz +te_all_0069.json.gz +te_all_0070.json.gz +te_all_0071.json.gz +te_all_0072.json.gz +te_all_0073.json.gz +te_all_0074.json.gz +te_all_0075.json.gz +te_all_0076.json.gz +te_all_0077.json.gz +te_all_0078.json.gz +te_all_0079.json.gz +te_all_0080.json.gz +te_all_0081.json.gz +te_all_0082.json.gz +te_all_0083.json.gz +te_all_0084.json.gz +te_all_0085.json.gz +te_all_0086.json.gz +te_all_0087.json.gz +te_all_0088.json.gz +te_all_0089.json.gz +te_all_0090.json.gz +te_all_0091.json.gz +te_all_0092.json.gz +te_all_0093.json.gz +te_all_0094.json.gz +te_all_0095.json.gz +te_all_0096.json.gz +te_all_0097.json.gz +te_all_0098.json.gz +te_all_0099.json.gz +te_all_0100.json.gz +te_all_0101.json.gz +te_all_0102.json.gz +te_all_0103.json.gz +te_all_0104.json.gz +te_all_0105.json.gz +te_all_0106.json.gz +te_all_0107.json.gz +te_all_0108.json.gz +te_all_0109.json.gz +te_all_0110.json.gz +te_all_0111.json.gz +te_all_0112.json.gz +te_all_0113.json.gz +te_all_0114.json.gz +te_all_0115.json.gz +te_all_0116.json.gz +te_all_0117.json.gz +te_all_0118.json.gz +te_all_0119.json.gz +te_all_0120.json.gz +te_all_0121.json.gz +te_all_0122.json.gz +te_all_0123.json.gz +te_all_0124.json.gz +te_all_0125.json.gz +te_all_0126.json.gz +te_all_0127.json.gz +te_all_0128.json.gz +te_all_0129.json.gz +te_all_0130.json.gz +te_all_0131.json.gz +te_all_0132.json.gz +te_all_0133.json.gz +te_all_0134.json.gz +te_all_0135.json.gz +te_all_0136.json.gz +te_all_0137.json.gz +te_all_0138.json.gz +te_all_0139.json.gz +te_all_0140.json.gz +te_all_0141.json.gz +te_all_0142.json.gz +te_all_0143.json.gz +te_all_0144.json.gz +te_all_0145.json.gz +te_all_0146.json.gz +te_all_0147.json.gz +te_all_0148.json.gz +te_all_0149.json.gz +te_all_0150.json.gz +te_all_0151.json.gz +te_all_0152.json.gz +te_all_0153.json.gz +te_all_0154.json.gz +te_all_0155.json.gz +te_all_0156.json.gz +te_all_0157.json.gz +te_all_0158.json.gz +te_all_0159.json.gz +te_all_0160.json.gz +te_all_0161.json.gz +te_all_0162.json.gz +te_all_0163.json.gz +te_all_0164.json.gz +te_all_0165.json.gz +te_all_0166.json.gz +te_all_0167.json.gz +te_all_0168.json.gz +te_all_0169.json.gz +te_all_0170.json.gz +te_all_0171.json.gz +te_all_0172.json.gz +te_all_0173.json.gz +te_all_0174.json.gz +te_all_0175.json.gz +te_all_0176.json.gz +te_all_0177.json.gz +te_all_0178.json.gz +te_all_0179.json.gz +te_all_0180.json.gz +te_all_0181.json.gz +te_all_0182.json.gz +te_all_0183.json.gz +te_all_0184.json.gz +te_all_0185.json.gz +te_all_0186.json.gz +te_all_0187.json.gz +te_all_0188.json.gz +te_all_0189.json.gz +te_all_0190.json.gz +te_all_0191.json.gz +te_all_0192.json.gz +te_all_0193.json.gz +te_all_0194.json.gz +te_all_0195.json.gz +te_all_0196.json.gz +te_all_0197.json.gz +te_all_0198.json.gz +te_all_0199.json.gz +te_all_0200.json.gz +te_all_0201.json.gz +te_all_0202.json.gz +te_all_0203.json.gz +te_all_0204.json.gz +te_all_0205.json.gz +te_all_0206.json.gz +te_all_0207.json.gz +te_all_0208.json.gz +te_all_0209.json.gz +te_all_0210.json.gz +te_all_0211.json.gz +te_all_0212.json.gz +te_all_0213.json.gz +te_all_0214.json.gz +te_all_0215.json.gz +te_all_0216.json.gz +te_all_0217.json.gz +te_all_0218.json.gz +te_all_0219.json.gz +te_all_0220.json.gz +te_all_0221.json.gz +te_all_0222.json.gz +te_all_0223.json.gz +te_all_0224.json.gz +te_all_0225.json.gz +te_all_0226.json.gz +te_all_0227.json.gz +te_all_0228.json.gz +te_all_0229.json.gz +te_all_0230.json.gz +te_all_0231.json.gz +te_all_0232.json.gz +te_all_0233.json.gz +te_all_0234.json.gz +te_all_0235.json.gz +te_all_0236.json.gz +te_all_0237.json.gz +te_all_0238.json.gz +te_all_0239.json.gz +te_all_0240.json.gz +te_all_0241.json.gz +te_all_0242.json.gz +te_all_0243.json.gz +te_all_0244.json.gz +te_all_0245.json.gz +te_all_0246.json.gz +te_all_0247.json.gz +te_all_0248.json.gz +te_all_0249.json.gz +te_all_0250.json.gz +te_all_0251.json.gz +te_all_0252.json.gz +te_all_0253.json.gz +te_all_0254.json.gz +te_all_0255.json.gz +te_all_0256.json.gz +te_all_0257.json.gz +te_all_0258.json.gz +te_all_0259.json.gz +te_all_0260.json.gz +te_all_0261.json.gz +te_all_0262.json.gz +te_all_0263.json.gz +te_all_0264.json.gz +te_all_0265.json.gz +te_all_0266.json.gz +te_all_0267.json.gz +te_all_0268.json.gz +te_all_0269.json.gz +te_all_0270.json.gz +te_all_0271.json.gz +te_all_0272.json.gz +te_all_0273.json.gz +te_all_0274.json.gz +te_all_0275.json.gz +te_all_0276.json.gz +te_all_0277.json.gz +te_all_0278.json.gz +te_all_0279.json.gz +te_all_0280.json.gz +te_all_0281.json.gz +te_all_0282.json.gz +te_all_0283.json.gz +te_all_0284.json.gz +te_all_0285.json.gz +te_all_0286.json.gz +te_all_0287.json.gz +te_all_0288.json.gz +te_all_0289.json.gz +te_all_0290.json.gz +te_all_0291.json.gz +te_all_0292.json.gz +te_all_0293.json.gz +te_all_0294.json.gz +te_all_0295.json.gz +te_all_0296.json.gz +te_all_0297.json.gz +te_all_0298.json.gz +te_all_0299.json.gz +te_all_0300.json.gz +te_all_0301.json.gz +te_all_0302.json.gz +te_all_0303.json.gz +te_all_0304.json.gz +te_all_0305.json.gz +te_all_0306.json.gz +te_all_0307.json.gz +te_all_0308.json.gz +te_all_0309.json.gz +te_all_0310.json.gz +te_all_0311.json.gz +te_all_0312.json.gz +te_all_0313.json.gz +te_all_0314.json.gz +te_all_0315.json.gz +te_all_0316.json.gz +te_all_0317.json.gz +te_all_0318.json.gz +te_all_0319.json.gz +te_all_0320.json.gz +te_all_0321.json.gz +te_all_0322.json.gz +te_all_0323.json.gz +te_all_0324.json.gz +te_all_0325.json.gz +te_all_0326.json.gz +te_all_0327.json.gz +te_all_0328.json.gz +te_all_0329.json.gz +te_all_0330.json.gz +te_all_0331.json.gz +te_all_0332.json.gz +te_all_0333.json.gz +te_all_0334.json.gz +te_all_0335.json.gz +te_all_0336.json.gz +te_all_0337.json.gz +te_all_0338.json.gz +te_all_0339.json.gz +te_all_0340.json.gz +te_all_0341.json.gz +te_all_0342.json.gz +te_all_0343.json.gz +te_all_0344.json.gz +te_all_0345.json.gz +te_all_0346.json.gz +te_all_0347.json.gz +te_all_0348.json.gz +te_all_0349.json.gz +te_all_0350.json.gz +te_all_0351.json.gz +te_all_0352.json.gz +te_all_0353.json.gz +te_all_0354.json.gz +te_all_0355.json.gz +te_all_0356.json.gz +te_all_0357.json.gz +te_all_0358.json.gz +te_all_0359.json.gz +te_all_0360.json.gz +te_all_0361.json.gz +te_all_0362.json.gz +te_all_0363.json.gz +te_all_0364.json.gz +te_all_0365.json.gz +te_all_0366.json.gz +te_all_0367.json.gz +te_all_0368.json.gz +te_all_0369.json.gz +te_all_0370.json.gz +te_all_0371.json.gz +te_all_0372.json.gz +te_all_0373.json.gz +te_all_0374.json.gz +te_all_0375.json.gz +te_all_0376.json.gz +te_all_0377.json.gz +te_all_0378.json.gz +te_all_0379.json.gz +te_all_0380.json.gz +te_all_0381.json.gz +te_all_0382.json.gz +te_all_0383.json.gz +te_all_0384.json.gz +te_all_0385.json.gz +te_all_0386.json.gz +te_all_0387.json.gz +te_all_0388.json.gz +te_all_0389.json.gz +te_all_0390.json.gz +te_all_0391.json.gz +te_all_0392.json.gz +te_all_0393.json.gz +te_all_0394.json.gz +te_all_0395.json.gz +te_all_0396.json.gz +te_all_0397.json.gz +te_all_0398.json.gz +te_all_0399.json.gz diff --git a/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/ur.txt b/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/ur.txt new file mode 100644 index 0000000000000000000000000000000000000000..44185d5cadb56a1568630da482644ce2a8a09128 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/listings/2022-27/ur.txt @@ -0,0 +1,534 @@ +ur_all_0000.json.gz +ur_all_0001.json.gz +ur_all_0002.json.gz +ur_all_0003.json.gz +ur_all_0004.json.gz +ur_all_0005.json.gz +ur_all_0006.json.gz +ur_all_0007.json.gz +ur_all_0008.json.gz +ur_all_0009.json.gz +ur_all_0010.json.gz +ur_all_0011.json.gz +ur_all_0012.json.gz +ur_all_0013.json.gz +ur_all_0014.json.gz +ur_all_0015.json.gz +ur_all_0016.json.gz +ur_all_0017.json.gz +ur_all_0018.json.gz +ur_all_0019.json.gz +ur_all_0020.json.gz +ur_all_0021.json.gz +ur_all_0022.json.gz +ur_all_0023.json.gz +ur_all_0024.json.gz +ur_all_0025.json.gz +ur_all_0026.json.gz +ur_all_0027.json.gz +ur_all_0028.json.gz +ur_all_0029.json.gz +ur_all_0030.json.gz +ur_all_0031.json.gz +ur_all_0032.json.gz +ur_all_0033.json.gz +ur_all_0034.json.gz +ur_all_0035.json.gz +ur_all_0036.json.gz +ur_all_0037.json.gz +ur_all_0038.json.gz +ur_all_0039.json.gz +ur_all_0040.json.gz +ur_all_0041.json.gz +ur_all_0042.json.gz +ur_all_0043.json.gz +ur_all_0044.json.gz +ur_all_0045.json.gz +ur_all_0046.json.gz +ur_all_0047.json.gz +ur_all_0048.json.gz +ur_all_0049.json.gz +ur_all_0050.json.gz +ur_all_0051.json.gz +ur_all_0052.json.gz +ur_all_0053.json.gz +ur_all_0054.json.gz +ur_all_0055.json.gz +ur_all_0056.json.gz +ur_all_0057.json.gz +ur_all_0058.json.gz +ur_all_0059.json.gz +ur_all_0060.json.gz +ur_all_0061.json.gz +ur_all_0062.json.gz +ur_all_0063.json.gz +ur_all_0064.json.gz +ur_all_0065.json.gz +ur_all_0066.json.gz +ur_all_0067.json.gz +ur_all_0068.json.gz +ur_all_0069.json.gz +ur_all_0070.json.gz +ur_all_0071.json.gz +ur_all_0072.json.gz +ur_all_0073.json.gz +ur_all_0074.json.gz +ur_all_0075.json.gz +ur_all_0076.json.gz +ur_all_0077.json.gz +ur_all_0078.json.gz +ur_all_0079.json.gz +ur_all_0080.json.gz +ur_all_0081.json.gz +ur_all_0082.json.gz +ur_all_0083.json.gz +ur_all_0084.json.gz +ur_all_0085.json.gz +ur_all_0086.json.gz +ur_all_0087.json.gz +ur_all_0088.json.gz +ur_all_0089.json.gz +ur_all_0090.json.gz +ur_all_0091.json.gz +ur_all_0092.json.gz +ur_all_0093.json.gz +ur_all_0094.json.gz +ur_all_0095.json.gz +ur_all_0096.json.gz +ur_all_0097.json.gz +ur_all_0098.json.gz +ur_all_0099.json.gz +ur_all_0100.json.gz +ur_all_0101.json.gz +ur_all_0102.json.gz +ur_all_0103.json.gz +ur_all_0104.json.gz +ur_all_0105.json.gz +ur_all_0106.json.gz +ur_all_0107.json.gz +ur_all_0108.json.gz +ur_all_0109.json.gz +ur_all_0110.json.gz +ur_all_0111.json.gz +ur_all_0112.json.gz +ur_all_0113.json.gz +ur_all_0114.json.gz +ur_all_0115.json.gz +ur_all_0116.json.gz +ur_all_0117.json.gz +ur_all_0118.json.gz +ur_all_0119.json.gz +ur_all_0120.json.gz +ur_all_0121.json.gz +ur_all_0122.json.gz +ur_all_0123.json.gz +ur_all_0124.json.gz +ur_all_0125.json.gz +ur_all_0126.json.gz +ur_all_0127.json.gz +ur_all_0128.json.gz +ur_all_0129.json.gz +ur_all_0130.json.gz +ur_all_0131.json.gz +ur_all_0132.json.gz +ur_all_0133.json.gz +ur_all_0134.json.gz +ur_all_0135.json.gz +ur_all_0136.json.gz +ur_all_0137.json.gz +ur_all_0138.json.gz +ur_all_0139.json.gz +ur_all_0140.json.gz +ur_all_0141.json.gz +ur_all_0142.json.gz +ur_all_0143.json.gz +ur_all_0144.json.gz +ur_all_0145.json.gz +ur_all_0146.json.gz +ur_all_0147.json.gz +ur_all_0148.json.gz +ur_all_0149.json.gz +ur_all_0150.json.gz +ur_all_0151.json.gz +ur_all_0152.json.gz +ur_all_0153.json.gz +ur_all_0154.json.gz +ur_all_0155.json.gz +ur_all_0156.json.gz +ur_all_0157.json.gz +ur_all_0158.json.gz +ur_all_0159.json.gz +ur_all_0160.json.gz +ur_all_0161.json.gz +ur_all_0162.json.gz +ur_all_0163.json.gz +ur_all_0164.json.gz +ur_all_0165.json.gz +ur_all_0166.json.gz +ur_all_0167.json.gz +ur_all_0168.json.gz +ur_all_0169.json.gz +ur_all_0170.json.gz +ur_all_0171.json.gz +ur_all_0172.json.gz +ur_all_0173.json.gz +ur_all_0174.json.gz +ur_all_0175.json.gz +ur_all_0176.json.gz +ur_all_0177.json.gz +ur_all_0178.json.gz +ur_all_0179.json.gz +ur_all_0180.json.gz +ur_all_0181.json.gz +ur_all_0182.json.gz +ur_all_0183.json.gz +ur_all_0184.json.gz +ur_all_0185.json.gz +ur_all_0186.json.gz +ur_all_0187.json.gz +ur_all_0188.json.gz +ur_all_0189.json.gz +ur_all_0190.json.gz +ur_all_0191.json.gz +ur_all_0192.json.gz +ur_all_0193.json.gz +ur_all_0194.json.gz +ur_all_0195.json.gz +ur_all_0196.json.gz +ur_all_0197.json.gz +ur_all_0198.json.gz +ur_all_0199.json.gz +ur_all_0200.json.gz +ur_all_0201.json.gz +ur_all_0202.json.gz +ur_all_0203.json.gz +ur_all_0204.json.gz +ur_all_0205.json.gz +ur_all_0206.json.gz +ur_all_0207.json.gz +ur_all_0208.json.gz +ur_all_0209.json.gz +ur_all_0210.json.gz +ur_all_0211.json.gz +ur_all_0212.json.gz +ur_all_0213.json.gz +ur_all_0214.json.gz +ur_all_0215.json.gz +ur_all_0216.json.gz +ur_all_0217.json.gz +ur_all_0218.json.gz +ur_all_0219.json.gz +ur_all_0220.json.gz +ur_all_0221.json.gz +ur_all_0222.json.gz +ur_all_0223.json.gz +ur_all_0224.json.gz +ur_all_0225.json.gz +ur_all_0226.json.gz +ur_all_0227.json.gz +ur_all_0228.json.gz +ur_all_0229.json.gz +ur_all_0230.json.gz +ur_all_0231.json.gz +ur_all_0232.json.gz +ur_all_0233.json.gz +ur_all_0234.json.gz +ur_all_0235.json.gz +ur_all_0236.json.gz +ur_all_0237.json.gz +ur_all_0238.json.gz +ur_all_0239.json.gz +ur_all_0240.json.gz +ur_all_0241.json.gz +ur_all_0242.json.gz +ur_all_0243.json.gz +ur_all_0244.json.gz +ur_all_0245.json.gz +ur_all_0246.json.gz +ur_all_0247.json.gz +ur_all_0248.json.gz +ur_all_0249.json.gz +ur_all_0250.json.gz +ur_all_0251.json.gz +ur_all_0252.json.gz +ur_all_0253.json.gz +ur_all_0254.json.gz +ur_all_0255.json.gz +ur_all_0256.json.gz +ur_all_0257.json.gz +ur_all_0258.json.gz +ur_all_0259.json.gz +ur_all_0260.json.gz +ur_all_0261.json.gz +ur_all_0262.json.gz +ur_all_0263.json.gz +ur_all_0264.json.gz +ur_all_0265.json.gz +ur_all_0266.json.gz +ur_all_0267.json.gz +ur_all_0268.json.gz +ur_all_0269.json.gz +ur_all_0270.json.gz +ur_all_0271.json.gz +ur_all_0272.json.gz +ur_all_0273.json.gz +ur_all_0274.json.gz +ur_all_0275.json.gz +ur_all_0276.json.gz +ur_all_0277.json.gz +ur_all_0278.json.gz +ur_all_0279.json.gz +ur_all_0280.json.gz +ur_all_0281.json.gz +ur_all_0282.json.gz +ur_all_0283.json.gz +ur_all_0284.json.gz +ur_all_0285.json.gz +ur_all_0286.json.gz +ur_all_0287.json.gz +ur_all_0288.json.gz +ur_all_0289.json.gz +ur_all_0290.json.gz +ur_all_0291.json.gz +ur_all_0292.json.gz +ur_all_0293.json.gz +ur_all_0294.json.gz +ur_all_0295.json.gz +ur_all_0296.json.gz +ur_all_0297.json.gz +ur_all_0298.json.gz +ur_all_0299.json.gz +ur_all_0300.json.gz +ur_all_0301.json.gz +ur_all_0302.json.gz +ur_all_0303.json.gz +ur_all_0304.json.gz +ur_all_0305.json.gz +ur_all_0306.json.gz +ur_all_0307.json.gz +ur_all_0308.json.gz +ur_all_0309.json.gz +ur_all_0310.json.gz +ur_all_0311.json.gz +ur_all_0312.json.gz +ur_all_0313.json.gz +ur_all_0314.json.gz +ur_all_0315.json.gz +ur_all_0316.json.gz +ur_all_0317.json.gz +ur_all_0318.json.gz +ur_all_0319.json.gz +ur_all_0320.json.gz +ur_all_0321.json.gz +ur_all_0322.json.gz +ur_all_0323.json.gz +ur_all_0324.json.gz +ur_all_0325.json.gz +ur_all_0326.json.gz +ur_all_0327.json.gz +ur_all_0328.json.gz +ur_all_0329.json.gz +ur_all_0330.json.gz +ur_all_0331.json.gz +ur_all_0332.json.gz +ur_all_0333.json.gz +ur_all_0334.json.gz +ur_all_0335.json.gz +ur_all_0336.json.gz +ur_all_0337.json.gz +ur_all_0338.json.gz +ur_all_0339.json.gz +ur_all_0340.json.gz +ur_all_0341.json.gz +ur_all_0342.json.gz +ur_all_0343.json.gz +ur_all_0344.json.gz +ur_all_0345.json.gz +ur_all_0346.json.gz +ur_all_0347.json.gz +ur_all_0348.json.gz +ur_all_0349.json.gz +ur_all_0350.json.gz +ur_all_0351.json.gz +ur_all_0352.json.gz +ur_all_0353.json.gz +ur_all_0354.json.gz +ur_all_0355.json.gz +ur_all_0356.json.gz +ur_all_0357.json.gz +ur_all_0358.json.gz +ur_all_0359.json.gz +ur_all_0360.json.gz +ur_all_0361.json.gz +ur_all_0362.json.gz +ur_all_0363.json.gz +ur_all_0364.json.gz +ur_all_0365.json.gz +ur_all_0366.json.gz +ur_all_0367.json.gz +ur_all_0368.json.gz +ur_all_0369.json.gz +ur_all_0370.json.gz +ur_all_0371.json.gz +ur_all_0372.json.gz +ur_all_0373.json.gz +ur_all_0374.json.gz +ur_all_0375.json.gz +ur_all_0376.json.gz +ur_all_0377.json.gz +ur_all_0378.json.gz +ur_all_0379.json.gz +ur_all_0380.json.gz +ur_all_0381.json.gz +ur_all_0382.json.gz +ur_all_0383.json.gz +ur_all_0384.json.gz +ur_all_0385.json.gz +ur_all_0386.json.gz +ur_all_0387.json.gz +ur_all_0388.json.gz +ur_all_0389.json.gz +ur_all_0390.json.gz +ur_all_0391.json.gz +ur_all_0392.json.gz +ur_all_0393.json.gz +ur_all_0394.json.gz +ur_all_0395.json.gz +ur_all_0396.json.gz +ur_all_0397.json.gz +ur_all_0398.json.gz +ur_all_0399.json.gz +ur_all_0400.json.gz +ur_all_0401.json.gz +ur_all_0402.json.gz +ur_all_0403.json.gz +ur_all_0404.json.gz +ur_all_0405.json.gz +ur_all_0406.json.gz +ur_all_0407.json.gz +ur_all_0408.json.gz +ur_all_0409.json.gz +ur_all_0410.json.gz +ur_all_0411.json.gz +ur_all_0412.json.gz +ur_all_0413.json.gz +ur_all_0414.json.gz +ur_all_0415.json.gz +ur_all_0416.json.gz +ur_all_0417.json.gz +ur_all_0418.json.gz +ur_all_0419.json.gz +ur_all_0420.json.gz +ur_all_0421.json.gz +ur_all_0422.json.gz +ur_all_0423.json.gz +ur_all_0424.json.gz +ur_all_0425.json.gz +ur_all_0426.json.gz +ur_all_0427.json.gz +ur_all_0428.json.gz +ur_all_0429.json.gz +ur_all_0430.json.gz +ur_all_0431.json.gz +ur_all_0432.json.gz +ur_all_0433.json.gz +ur_all_0434.json.gz +ur_all_0435.json.gz +ur_all_0436.json.gz +ur_all_0437.json.gz +ur_all_0438.json.gz +ur_all_0439.json.gz +ur_all_0440.json.gz +ur_all_0441.json.gz +ur_all_0442.json.gz +ur_all_0443.json.gz +ur_all_0444.json.gz +ur_all_0445.json.gz +ur_all_0446.json.gz +ur_all_0447.json.gz +ur_all_0448.json.gz +ur_all_0449.json.gz +ur_all_0450.json.gz +ur_all_0451.json.gz +ur_all_0452.json.gz +ur_all_0453.json.gz +ur_all_0454.json.gz +ur_all_0455.json.gz +ur_all_0456.json.gz +ur_all_0457.json.gz +ur_all_0458.json.gz +ur_all_0459.json.gz +ur_all_0460.json.gz +ur_all_0461.json.gz +ur_all_0462.json.gz +ur_all_0463.json.gz +ur_all_0464.json.gz +ur_all_0465.json.gz +ur_all_0466.json.gz +ur_all_0467.json.gz +ur_all_0468.json.gz +ur_all_0469.json.gz +ur_all_0470.json.gz +ur_all_0471.json.gz +ur_all_0472.json.gz +ur_all_0473.json.gz +ur_all_0474.json.gz +ur_all_0475.json.gz +ur_all_0476.json.gz +ur_all_0477.json.gz +ur_all_0478.json.gz +ur_all_0479.json.gz +ur_all_0480.json.gz +ur_all_0481.json.gz +ur_all_0482.json.gz +ur_all_0483.json.gz +ur_all_0484.json.gz +ur_all_0485.json.gz +ur_all_0486.json.gz +ur_all_0487.json.gz +ur_all_0488.json.gz +ur_all_0489.json.gz +ur_all_0490.json.gz +ur_all_0491.json.gz +ur_all_0492.json.gz +ur_all_0493.json.gz +ur_all_0494.json.gz +ur_all_0495.json.gz +ur_all_0496.json.gz +ur_all_0497.json.gz +ur_all_0498.json.gz +ur_all_0499.json.gz +ur_all_0500.json.gz +ur_all_0501.json.gz +ur_all_0502.json.gz +ur_all_0503.json.gz +ur_all_0504.json.gz +ur_all_0505.json.gz +ur_all_0506.json.gz +ur_all_0507.json.gz +ur_all_0508.json.gz +ur_all_0509.json.gz +ur_all_0510.json.gz +ur_all_0511.json.gz +ur_all_0512.json.gz +ur_all_0513.json.gz +ur_all_0514.json.gz +ur_all_0515.json.gz +ur_all_0516.json.gz +ur_all_0517.json.gz +ur_all_0518.json.gz +ur_all_0519.json.gz +ur_all_0520.json.gz +ur_all_0521.json.gz +ur_all_0522.json.gz +ur_all_0523.json.gz +ur_all_0524.json.gz +ur_all_0525.json.gz +ur_all_0526.json.gz +ur_all_0527.json.gz +ur_all_0528.json.gz +ur_all_0529.json.gz +ur_all_0530.json.gz +ur_all_0531.json.gz +ur_all_0532.json.gz +ur_all_0533.json.gz diff --git a/cc-multilingual-main/dedup/RedPajama-Data/listings/2023-06/as.txt b/cc-multilingual-main/dedup/RedPajama-Data/listings/2023-06/as.txt new file mode 100644 index 0000000000000000000000000000000000000000..c405bec37518c24f440341cf1b3121ea9c58b9d8 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/listings/2023-06/as.txt @@ -0,0 +1,73 @@ +as_all_0000.json.gz +as_all_0001.json.gz +as_all_0002.json.gz +as_all_0003.json.gz +as_all_0004.json.gz +as_all_0005.json.gz +as_all_0006.json.gz +as_all_0007.json.gz +as_all_0008.json.gz +as_all_0009.json.gz +as_all_0010.json.gz +as_all_0011.json.gz +as_all_0012.json.gz +as_all_0013.json.gz +as_all_0014.json.gz +as_all_0015.json.gz +as_all_0016.json.gz +as_all_0017.json.gz +as_all_0018.json.gz +as_all_0019.json.gz +as_all_0020.json.gz +as_all_0021.json.gz +as_all_0022.json.gz +as_all_0023.json.gz +as_all_0024.json.gz +as_all_0025.json.gz +as_all_0026.json.gz +as_all_0027.json.gz +as_all_0028.json.gz +as_all_0029.json.gz +as_all_0030.json.gz +as_all_0031.json.gz +as_all_0032.json.gz +as_all_0033.json.gz +as_all_0034.json.gz +as_all_0035.json.gz +as_all_0036.json.gz +as_all_0037.json.gz +as_all_0038.json.gz +as_all_0039.json.gz +as_all_0040.json.gz +as_all_0041.json.gz +as_all_0042.json.gz +as_all_0043.json.gz +as_all_0044.json.gz +as_all_0045.json.gz +as_all_0046.json.gz +as_all_0047.json.gz +as_all_0048.json.gz +as_all_0049.json.gz +as_all_0050.json.gz +as_all_0051.json.gz +as_all_0052.json.gz +as_all_0053.json.gz +as_all_0054.json.gz +as_all_0055.json.gz +as_all_0056.json.gz +as_all_0057.json.gz +as_all_0058.json.gz +as_all_0059.json.gz +as_all_0060.json.gz +as_all_0061.json.gz +as_all_0062.json.gz +as_all_0063.json.gz +as_all_0064.json.gz +as_all_0065.json.gz +as_all_0066.json.gz +as_all_0067.json.gz +as_all_0068.json.gz +as_all_0069.json.gz +as_all_0070.json.gz +as_all_0071.json.gz +as_all_0072.json.gz diff --git a/cc-multilingual-main/dedup/RedPajama-Data/listings/2023-06/kn.txt b/cc-multilingual-main/dedup/RedPajama-Data/listings/2023-06/kn.txt new file mode 100644 index 0000000000000000000000000000000000000000..8faab05e1a607f25fe05b1ee82afdbb0ce15313c --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/listings/2023-06/kn.txt @@ -0,0 +1,320 @@ +kn_all_0000.json.gz +kn_all_0001.json.gz +kn_all_0002.json.gz +kn_all_0003.json.gz +kn_all_0004.json.gz +kn_all_0005.json.gz +kn_all_0006.json.gz +kn_all_0007.json.gz +kn_all_0008.json.gz +kn_all_0009.json.gz +kn_all_0010.json.gz +kn_all_0011.json.gz +kn_all_0012.json.gz +kn_all_0013.json.gz +kn_all_0014.json.gz +kn_all_0015.json.gz +kn_all_0016.json.gz +kn_all_0017.json.gz +kn_all_0018.json.gz +kn_all_0019.json.gz +kn_all_0020.json.gz +kn_all_0021.json.gz +kn_all_0022.json.gz +kn_all_0023.json.gz +kn_all_0024.json.gz +kn_all_0025.json.gz +kn_all_0026.json.gz +kn_all_0027.json.gz +kn_all_0028.json.gz +kn_all_0029.json.gz +kn_all_0030.json.gz +kn_all_0031.json.gz +kn_all_0032.json.gz +kn_all_0033.json.gz +kn_all_0034.json.gz +kn_all_0035.json.gz +kn_all_0036.json.gz +kn_all_0037.json.gz +kn_all_0038.json.gz +kn_all_0039.json.gz +kn_all_0040.json.gz +kn_all_0041.json.gz +kn_all_0042.json.gz +kn_all_0043.json.gz +kn_all_0044.json.gz +kn_all_0045.json.gz +kn_all_0046.json.gz +kn_all_0047.json.gz +kn_all_0048.json.gz +kn_all_0049.json.gz +kn_all_0050.json.gz +kn_all_0051.json.gz +kn_all_0052.json.gz +kn_all_0053.json.gz +kn_all_0054.json.gz +kn_all_0055.json.gz +kn_all_0056.json.gz +kn_all_0057.json.gz +kn_all_0058.json.gz +kn_all_0059.json.gz +kn_all_0060.json.gz +kn_all_0061.json.gz +kn_all_0062.json.gz +kn_all_0063.json.gz +kn_all_0064.json.gz +kn_all_0065.json.gz +kn_all_0066.json.gz +kn_all_0067.json.gz +kn_all_0068.json.gz +kn_all_0069.json.gz +kn_all_0070.json.gz +kn_all_0071.json.gz +kn_all_0072.json.gz +kn_all_0073.json.gz +kn_all_0074.json.gz +kn_all_0075.json.gz +kn_all_0076.json.gz +kn_all_0077.json.gz +kn_all_0078.json.gz +kn_all_0079.json.gz +kn_all_0080.json.gz +kn_all_0081.json.gz +kn_all_0082.json.gz +kn_all_0083.json.gz +kn_all_0084.json.gz +kn_all_0085.json.gz +kn_all_0086.json.gz +kn_all_0087.json.gz +kn_all_0088.json.gz +kn_all_0089.json.gz +kn_all_0090.json.gz +kn_all_0091.json.gz +kn_all_0092.json.gz +kn_all_0093.json.gz +kn_all_0094.json.gz +kn_all_0095.json.gz +kn_all_0096.json.gz +kn_all_0097.json.gz +kn_all_0098.json.gz +kn_all_0099.json.gz +kn_all_0100.json.gz +kn_all_0101.json.gz +kn_all_0102.json.gz +kn_all_0103.json.gz +kn_all_0104.json.gz +kn_all_0105.json.gz +kn_all_0106.json.gz +kn_all_0107.json.gz +kn_all_0108.json.gz +kn_all_0109.json.gz +kn_all_0110.json.gz +kn_all_0111.json.gz +kn_all_0112.json.gz +kn_all_0113.json.gz +kn_all_0114.json.gz +kn_all_0115.json.gz +kn_all_0116.json.gz +kn_all_0117.json.gz +kn_all_0118.json.gz +kn_all_0119.json.gz +kn_all_0120.json.gz +kn_all_0121.json.gz +kn_all_0122.json.gz +kn_all_0123.json.gz +kn_all_0124.json.gz +kn_all_0125.json.gz +kn_all_0126.json.gz +kn_all_0127.json.gz +kn_all_0128.json.gz +kn_all_0129.json.gz +kn_all_0130.json.gz +kn_all_0131.json.gz +kn_all_0132.json.gz +kn_all_0133.json.gz +kn_all_0134.json.gz +kn_all_0135.json.gz +kn_all_0136.json.gz +kn_all_0137.json.gz +kn_all_0138.json.gz +kn_all_0139.json.gz +kn_all_0140.json.gz +kn_all_0141.json.gz +kn_all_0142.json.gz +kn_all_0143.json.gz +kn_all_0144.json.gz +kn_all_0145.json.gz +kn_all_0146.json.gz +kn_all_0147.json.gz +kn_all_0148.json.gz +kn_all_0149.json.gz +kn_all_0150.json.gz +kn_all_0151.json.gz +kn_all_0152.json.gz +kn_all_0153.json.gz +kn_all_0154.json.gz +kn_all_0155.json.gz +kn_all_0156.json.gz +kn_all_0157.json.gz +kn_all_0158.json.gz +kn_all_0159.json.gz +kn_all_0160.json.gz +kn_all_0161.json.gz +kn_all_0162.json.gz +kn_all_0163.json.gz +kn_all_0164.json.gz +kn_all_0165.json.gz +kn_all_0166.json.gz +kn_all_0167.json.gz +kn_all_0168.json.gz +kn_all_0169.json.gz +kn_all_0170.json.gz +kn_all_0171.json.gz +kn_all_0172.json.gz +kn_all_0173.json.gz +kn_all_0174.json.gz +kn_all_0175.json.gz +kn_all_0176.json.gz +kn_all_0177.json.gz +kn_all_0178.json.gz +kn_all_0179.json.gz +kn_all_0180.json.gz +kn_all_0181.json.gz +kn_all_0182.json.gz +kn_all_0183.json.gz +kn_all_0184.json.gz +kn_all_0185.json.gz +kn_all_0186.json.gz +kn_all_0187.json.gz +kn_all_0188.json.gz +kn_all_0189.json.gz +kn_all_0190.json.gz +kn_all_0191.json.gz +kn_all_0192.json.gz +kn_all_0193.json.gz +kn_all_0194.json.gz +kn_all_0195.json.gz +kn_all_0196.json.gz +kn_all_0197.json.gz +kn_all_0198.json.gz +kn_all_0199.json.gz +kn_all_0200.json.gz +kn_all_0201.json.gz +kn_all_0202.json.gz +kn_all_0203.json.gz +kn_all_0204.json.gz +kn_all_0205.json.gz +kn_all_0206.json.gz +kn_all_0207.json.gz +kn_all_0208.json.gz +kn_all_0209.json.gz +kn_all_0210.json.gz +kn_all_0211.json.gz +kn_all_0212.json.gz +kn_all_0213.json.gz +kn_all_0214.json.gz +kn_all_0215.json.gz +kn_all_0216.json.gz +kn_all_0217.json.gz +kn_all_0218.json.gz +kn_all_0219.json.gz +kn_all_0220.json.gz +kn_all_0221.json.gz +kn_all_0222.json.gz +kn_all_0223.json.gz +kn_all_0224.json.gz +kn_all_0225.json.gz +kn_all_0226.json.gz +kn_all_0227.json.gz +kn_all_0228.json.gz +kn_all_0229.json.gz +kn_all_0230.json.gz +kn_all_0231.json.gz +kn_all_0232.json.gz +kn_all_0233.json.gz +kn_all_0234.json.gz +kn_all_0235.json.gz +kn_all_0236.json.gz +kn_all_0237.json.gz +kn_all_0238.json.gz +kn_all_0239.json.gz +kn_all_0240.json.gz +kn_all_0241.json.gz +kn_all_0242.json.gz +kn_all_0243.json.gz +kn_all_0244.json.gz +kn_all_0245.json.gz +kn_all_0246.json.gz +kn_all_0247.json.gz +kn_all_0248.json.gz +kn_all_0249.json.gz +kn_all_0250.json.gz +kn_all_0251.json.gz +kn_all_0252.json.gz +kn_all_0253.json.gz +kn_all_0254.json.gz +kn_all_0255.json.gz +kn_all_0256.json.gz +kn_all_0257.json.gz +kn_all_0258.json.gz +kn_all_0259.json.gz +kn_all_0260.json.gz +kn_all_0261.json.gz +kn_all_0262.json.gz +kn_all_0263.json.gz +kn_all_0264.json.gz +kn_all_0265.json.gz +kn_all_0266.json.gz +kn_all_0267.json.gz +kn_all_0268.json.gz +kn_all_0269.json.gz +kn_all_0270.json.gz +kn_all_0271.json.gz +kn_all_0272.json.gz +kn_all_0273.json.gz +kn_all_0274.json.gz +kn_all_0275.json.gz +kn_all_0276.json.gz +kn_all_0277.json.gz +kn_all_0278.json.gz +kn_all_0279.json.gz +kn_all_0280.json.gz +kn_all_0281.json.gz +kn_all_0282.json.gz +kn_all_0283.json.gz +kn_all_0284.json.gz +kn_all_0285.json.gz +kn_all_0286.json.gz +kn_all_0287.json.gz +kn_all_0288.json.gz +kn_all_0289.json.gz +kn_all_0290.json.gz +kn_all_0291.json.gz +kn_all_0292.json.gz +kn_all_0293.json.gz +kn_all_0294.json.gz +kn_all_0295.json.gz +kn_all_0296.json.gz +kn_all_0297.json.gz +kn_all_0298.json.gz +kn_all_0299.json.gz +kn_all_0300.json.gz +kn_all_0301.json.gz +kn_all_0302.json.gz +kn_all_0303.json.gz +kn_all_0304.json.gz +kn_all_0305.json.gz +kn_all_0306.json.gz +kn_all_0307.json.gz +kn_all_0308.json.gz +kn_all_0309.json.gz +kn_all_0310.json.gz +kn_all_0311.json.gz +kn_all_0312.json.gz +kn_all_0313.json.gz +kn_all_0314.json.gz +kn_all_0315.json.gz +kn_all_0316.json.gz +kn_all_0317.json.gz +kn_all_0318.json.gz +kn_all_0319.json.gz diff --git a/cc-multilingual-main/dedup/RedPajama-Data/listings/2023-06/ml.txt b/cc-multilingual-main/dedup/RedPajama-Data/listings/2023-06/ml.txt new file mode 100644 index 0000000000000000000000000000000000000000..0689798ece65eb7a926ad23cb73ec53ddf5a6fe8 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/listings/2023-06/ml.txt @@ -0,0 +1,534 @@ +ml_all_0000.json.gz +ml_all_0001.json.gz +ml_all_0002.json.gz +ml_all_0003.json.gz +ml_all_0004.json.gz +ml_all_0005.json.gz +ml_all_0006.json.gz +ml_all_0007.json.gz +ml_all_0008.json.gz +ml_all_0009.json.gz +ml_all_0010.json.gz +ml_all_0011.json.gz +ml_all_0012.json.gz +ml_all_0013.json.gz +ml_all_0014.json.gz +ml_all_0015.json.gz +ml_all_0016.json.gz +ml_all_0017.json.gz +ml_all_0018.json.gz +ml_all_0019.json.gz +ml_all_0020.json.gz +ml_all_0021.json.gz +ml_all_0022.json.gz +ml_all_0023.json.gz +ml_all_0024.json.gz +ml_all_0025.json.gz +ml_all_0026.json.gz +ml_all_0027.json.gz +ml_all_0028.json.gz +ml_all_0029.json.gz +ml_all_0030.json.gz +ml_all_0031.json.gz +ml_all_0032.json.gz +ml_all_0033.json.gz +ml_all_0034.json.gz +ml_all_0035.json.gz +ml_all_0036.json.gz +ml_all_0037.json.gz +ml_all_0038.json.gz +ml_all_0039.json.gz +ml_all_0040.json.gz +ml_all_0041.json.gz +ml_all_0042.json.gz +ml_all_0043.json.gz +ml_all_0044.json.gz +ml_all_0045.json.gz +ml_all_0046.json.gz +ml_all_0047.json.gz +ml_all_0048.json.gz +ml_all_0049.json.gz +ml_all_0050.json.gz +ml_all_0051.json.gz +ml_all_0052.json.gz +ml_all_0053.json.gz +ml_all_0054.json.gz +ml_all_0055.json.gz +ml_all_0056.json.gz +ml_all_0057.json.gz +ml_all_0058.json.gz +ml_all_0059.json.gz +ml_all_0060.json.gz +ml_all_0061.json.gz +ml_all_0062.json.gz +ml_all_0063.json.gz +ml_all_0064.json.gz +ml_all_0065.json.gz +ml_all_0066.json.gz +ml_all_0067.json.gz +ml_all_0068.json.gz +ml_all_0069.json.gz +ml_all_0070.json.gz +ml_all_0071.json.gz +ml_all_0072.json.gz +ml_all_0073.json.gz +ml_all_0074.json.gz +ml_all_0075.json.gz +ml_all_0076.json.gz +ml_all_0077.json.gz +ml_all_0078.json.gz +ml_all_0079.json.gz +ml_all_0080.json.gz +ml_all_0081.json.gz +ml_all_0082.json.gz +ml_all_0083.json.gz +ml_all_0084.json.gz +ml_all_0085.json.gz +ml_all_0086.json.gz +ml_all_0087.json.gz +ml_all_0088.json.gz +ml_all_0089.json.gz +ml_all_0090.json.gz +ml_all_0091.json.gz +ml_all_0092.json.gz +ml_all_0093.json.gz +ml_all_0094.json.gz +ml_all_0095.json.gz +ml_all_0096.json.gz +ml_all_0097.json.gz +ml_all_0098.json.gz +ml_all_0099.json.gz +ml_all_0100.json.gz +ml_all_0101.json.gz +ml_all_0102.json.gz +ml_all_0103.json.gz +ml_all_0104.json.gz +ml_all_0105.json.gz +ml_all_0106.json.gz +ml_all_0107.json.gz +ml_all_0108.json.gz +ml_all_0109.json.gz +ml_all_0110.json.gz +ml_all_0111.json.gz +ml_all_0112.json.gz +ml_all_0113.json.gz +ml_all_0114.json.gz +ml_all_0115.json.gz +ml_all_0116.json.gz +ml_all_0117.json.gz +ml_all_0118.json.gz +ml_all_0119.json.gz +ml_all_0120.json.gz +ml_all_0121.json.gz +ml_all_0122.json.gz +ml_all_0123.json.gz +ml_all_0124.json.gz +ml_all_0125.json.gz +ml_all_0126.json.gz +ml_all_0127.json.gz +ml_all_0128.json.gz +ml_all_0129.json.gz +ml_all_0130.json.gz +ml_all_0131.json.gz +ml_all_0132.json.gz +ml_all_0133.json.gz +ml_all_0134.json.gz +ml_all_0135.json.gz +ml_all_0136.json.gz +ml_all_0137.json.gz +ml_all_0138.json.gz +ml_all_0139.json.gz +ml_all_0140.json.gz +ml_all_0141.json.gz +ml_all_0142.json.gz +ml_all_0143.json.gz +ml_all_0144.json.gz +ml_all_0145.json.gz +ml_all_0146.json.gz +ml_all_0147.json.gz +ml_all_0148.json.gz +ml_all_0149.json.gz +ml_all_0150.json.gz +ml_all_0151.json.gz +ml_all_0152.json.gz +ml_all_0153.json.gz +ml_all_0154.json.gz +ml_all_0155.json.gz +ml_all_0156.json.gz +ml_all_0157.json.gz +ml_all_0158.json.gz +ml_all_0159.json.gz +ml_all_0160.json.gz +ml_all_0161.json.gz +ml_all_0162.json.gz +ml_all_0163.json.gz +ml_all_0164.json.gz +ml_all_0165.json.gz +ml_all_0166.json.gz +ml_all_0167.json.gz +ml_all_0168.json.gz +ml_all_0169.json.gz +ml_all_0170.json.gz +ml_all_0171.json.gz +ml_all_0172.json.gz +ml_all_0173.json.gz +ml_all_0174.json.gz +ml_all_0175.json.gz +ml_all_0176.json.gz +ml_all_0177.json.gz +ml_all_0178.json.gz +ml_all_0179.json.gz +ml_all_0180.json.gz +ml_all_0181.json.gz +ml_all_0182.json.gz +ml_all_0183.json.gz +ml_all_0184.json.gz +ml_all_0185.json.gz +ml_all_0186.json.gz +ml_all_0187.json.gz +ml_all_0188.json.gz +ml_all_0189.json.gz +ml_all_0190.json.gz +ml_all_0191.json.gz +ml_all_0192.json.gz +ml_all_0193.json.gz +ml_all_0194.json.gz +ml_all_0195.json.gz +ml_all_0196.json.gz +ml_all_0197.json.gz +ml_all_0198.json.gz +ml_all_0199.json.gz +ml_all_0200.json.gz +ml_all_0201.json.gz +ml_all_0202.json.gz +ml_all_0203.json.gz +ml_all_0204.json.gz +ml_all_0205.json.gz +ml_all_0206.json.gz +ml_all_0207.json.gz +ml_all_0208.json.gz +ml_all_0209.json.gz +ml_all_0210.json.gz +ml_all_0211.json.gz +ml_all_0212.json.gz +ml_all_0213.json.gz +ml_all_0214.json.gz +ml_all_0215.json.gz +ml_all_0216.json.gz +ml_all_0217.json.gz +ml_all_0218.json.gz +ml_all_0219.json.gz +ml_all_0220.json.gz +ml_all_0221.json.gz +ml_all_0222.json.gz +ml_all_0223.json.gz +ml_all_0224.json.gz +ml_all_0225.json.gz +ml_all_0226.json.gz +ml_all_0227.json.gz +ml_all_0228.json.gz +ml_all_0229.json.gz +ml_all_0230.json.gz +ml_all_0231.json.gz +ml_all_0232.json.gz +ml_all_0233.json.gz +ml_all_0234.json.gz +ml_all_0235.json.gz +ml_all_0236.json.gz +ml_all_0237.json.gz +ml_all_0238.json.gz +ml_all_0239.json.gz +ml_all_0240.json.gz +ml_all_0241.json.gz +ml_all_0242.json.gz +ml_all_0243.json.gz +ml_all_0244.json.gz +ml_all_0245.json.gz +ml_all_0246.json.gz +ml_all_0247.json.gz +ml_all_0248.json.gz +ml_all_0249.json.gz +ml_all_0250.json.gz +ml_all_0251.json.gz +ml_all_0252.json.gz +ml_all_0253.json.gz +ml_all_0254.json.gz +ml_all_0255.json.gz +ml_all_0256.json.gz +ml_all_0257.json.gz +ml_all_0258.json.gz +ml_all_0259.json.gz +ml_all_0260.json.gz +ml_all_0261.json.gz +ml_all_0262.json.gz +ml_all_0263.json.gz +ml_all_0264.json.gz +ml_all_0265.json.gz +ml_all_0266.json.gz +ml_all_0267.json.gz +ml_all_0268.json.gz +ml_all_0269.json.gz +ml_all_0270.json.gz +ml_all_0271.json.gz +ml_all_0272.json.gz +ml_all_0273.json.gz +ml_all_0274.json.gz +ml_all_0275.json.gz +ml_all_0276.json.gz +ml_all_0277.json.gz +ml_all_0278.json.gz +ml_all_0279.json.gz +ml_all_0280.json.gz +ml_all_0281.json.gz +ml_all_0282.json.gz +ml_all_0283.json.gz +ml_all_0284.json.gz +ml_all_0285.json.gz +ml_all_0286.json.gz +ml_all_0287.json.gz +ml_all_0288.json.gz +ml_all_0289.json.gz +ml_all_0290.json.gz +ml_all_0291.json.gz +ml_all_0292.json.gz +ml_all_0293.json.gz +ml_all_0294.json.gz +ml_all_0295.json.gz +ml_all_0296.json.gz +ml_all_0297.json.gz +ml_all_0298.json.gz +ml_all_0299.json.gz +ml_all_0300.json.gz +ml_all_0301.json.gz +ml_all_0302.json.gz +ml_all_0303.json.gz +ml_all_0304.json.gz +ml_all_0305.json.gz +ml_all_0306.json.gz +ml_all_0307.json.gz +ml_all_0308.json.gz +ml_all_0309.json.gz +ml_all_0310.json.gz +ml_all_0311.json.gz +ml_all_0312.json.gz +ml_all_0313.json.gz +ml_all_0314.json.gz +ml_all_0315.json.gz +ml_all_0316.json.gz +ml_all_0317.json.gz +ml_all_0318.json.gz +ml_all_0319.json.gz +ml_all_0320.json.gz +ml_all_0321.json.gz +ml_all_0322.json.gz +ml_all_0323.json.gz +ml_all_0324.json.gz +ml_all_0325.json.gz +ml_all_0326.json.gz +ml_all_0327.json.gz +ml_all_0328.json.gz +ml_all_0329.json.gz +ml_all_0330.json.gz +ml_all_0331.json.gz +ml_all_0332.json.gz +ml_all_0333.json.gz +ml_all_0334.json.gz +ml_all_0335.json.gz +ml_all_0336.json.gz +ml_all_0337.json.gz +ml_all_0338.json.gz +ml_all_0339.json.gz +ml_all_0340.json.gz +ml_all_0341.json.gz +ml_all_0342.json.gz +ml_all_0343.json.gz +ml_all_0344.json.gz +ml_all_0345.json.gz +ml_all_0346.json.gz +ml_all_0347.json.gz +ml_all_0348.json.gz +ml_all_0349.json.gz +ml_all_0350.json.gz +ml_all_0351.json.gz +ml_all_0352.json.gz +ml_all_0353.json.gz +ml_all_0354.json.gz +ml_all_0355.json.gz +ml_all_0356.json.gz +ml_all_0357.json.gz +ml_all_0358.json.gz +ml_all_0359.json.gz +ml_all_0360.json.gz +ml_all_0361.json.gz +ml_all_0362.json.gz +ml_all_0363.json.gz +ml_all_0364.json.gz +ml_all_0365.json.gz +ml_all_0366.json.gz +ml_all_0367.json.gz +ml_all_0368.json.gz +ml_all_0369.json.gz +ml_all_0370.json.gz +ml_all_0371.json.gz +ml_all_0372.json.gz +ml_all_0373.json.gz +ml_all_0374.json.gz +ml_all_0375.json.gz +ml_all_0376.json.gz +ml_all_0377.json.gz +ml_all_0378.json.gz +ml_all_0379.json.gz +ml_all_0380.json.gz +ml_all_0381.json.gz +ml_all_0382.json.gz +ml_all_0383.json.gz +ml_all_0384.json.gz +ml_all_0385.json.gz +ml_all_0386.json.gz +ml_all_0387.json.gz +ml_all_0388.json.gz +ml_all_0389.json.gz +ml_all_0390.json.gz +ml_all_0391.json.gz +ml_all_0392.json.gz +ml_all_0393.json.gz +ml_all_0394.json.gz +ml_all_0395.json.gz +ml_all_0396.json.gz +ml_all_0397.json.gz +ml_all_0398.json.gz +ml_all_0399.json.gz +ml_all_0400.json.gz +ml_all_0401.json.gz +ml_all_0402.json.gz +ml_all_0403.json.gz +ml_all_0404.json.gz +ml_all_0405.json.gz +ml_all_0406.json.gz +ml_all_0407.json.gz +ml_all_0408.json.gz +ml_all_0409.json.gz +ml_all_0410.json.gz +ml_all_0411.json.gz +ml_all_0412.json.gz +ml_all_0413.json.gz +ml_all_0414.json.gz +ml_all_0415.json.gz +ml_all_0416.json.gz +ml_all_0417.json.gz +ml_all_0418.json.gz +ml_all_0419.json.gz +ml_all_0420.json.gz +ml_all_0421.json.gz +ml_all_0422.json.gz +ml_all_0423.json.gz +ml_all_0424.json.gz +ml_all_0425.json.gz +ml_all_0426.json.gz +ml_all_0427.json.gz +ml_all_0428.json.gz +ml_all_0429.json.gz +ml_all_0430.json.gz +ml_all_0431.json.gz +ml_all_0432.json.gz +ml_all_0433.json.gz +ml_all_0434.json.gz +ml_all_0435.json.gz +ml_all_0436.json.gz +ml_all_0437.json.gz +ml_all_0438.json.gz +ml_all_0439.json.gz +ml_all_0440.json.gz +ml_all_0441.json.gz +ml_all_0442.json.gz +ml_all_0443.json.gz +ml_all_0444.json.gz +ml_all_0445.json.gz +ml_all_0446.json.gz +ml_all_0447.json.gz +ml_all_0448.json.gz +ml_all_0449.json.gz +ml_all_0450.json.gz +ml_all_0451.json.gz +ml_all_0452.json.gz +ml_all_0453.json.gz +ml_all_0454.json.gz +ml_all_0455.json.gz +ml_all_0456.json.gz +ml_all_0457.json.gz +ml_all_0458.json.gz +ml_all_0459.json.gz +ml_all_0460.json.gz +ml_all_0461.json.gz +ml_all_0462.json.gz +ml_all_0463.json.gz +ml_all_0464.json.gz +ml_all_0465.json.gz +ml_all_0466.json.gz +ml_all_0467.json.gz +ml_all_0468.json.gz +ml_all_0469.json.gz +ml_all_0470.json.gz +ml_all_0471.json.gz +ml_all_0472.json.gz +ml_all_0473.json.gz +ml_all_0474.json.gz +ml_all_0475.json.gz +ml_all_0476.json.gz +ml_all_0477.json.gz +ml_all_0478.json.gz +ml_all_0479.json.gz +ml_all_0480.json.gz +ml_all_0481.json.gz +ml_all_0482.json.gz +ml_all_0483.json.gz +ml_all_0484.json.gz +ml_all_0485.json.gz +ml_all_0486.json.gz +ml_all_0487.json.gz +ml_all_0488.json.gz +ml_all_0489.json.gz +ml_all_0490.json.gz +ml_all_0491.json.gz +ml_all_0492.json.gz +ml_all_0493.json.gz +ml_all_0494.json.gz +ml_all_0495.json.gz +ml_all_0496.json.gz +ml_all_0497.json.gz +ml_all_0498.json.gz +ml_all_0499.json.gz +ml_all_0500.json.gz +ml_all_0501.json.gz +ml_all_0502.json.gz +ml_all_0503.json.gz +ml_all_0504.json.gz +ml_all_0505.json.gz +ml_all_0506.json.gz +ml_all_0507.json.gz +ml_all_0508.json.gz +ml_all_0509.json.gz +ml_all_0510.json.gz +ml_all_0511.json.gz +ml_all_0512.json.gz +ml_all_0513.json.gz +ml_all_0514.json.gz +ml_all_0515.json.gz +ml_all_0516.json.gz +ml_all_0517.json.gz +ml_all_0518.json.gz +ml_all_0519.json.gz +ml_all_0520.json.gz +ml_all_0521.json.gz +ml_all_0522.json.gz +ml_all_0523.json.gz +ml_all_0524.json.gz +ml_all_0525.json.gz +ml_all_0526.json.gz +ml_all_0527.json.gz +ml_all_0528.json.gz +ml_all_0529.json.gz +ml_all_0530.json.gz +ml_all_0531.json.gz +ml_all_0532.json.gz +ml_all_0533.json.gz diff --git a/cc-multilingual-main/dedup/RedPajama-Data/listings/2023-06/sd.txt b/cc-multilingual-main/dedup/RedPajama-Data/listings/2023-06/sd.txt new file mode 100644 index 0000000000000000000000000000000000000000..7660b5e2a8f69776ef864ea8a5b394d20b2b5943 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/listings/2023-06/sd.txt @@ -0,0 +1,12 @@ +sd_all_0000.json.gz +sd_all_0001.json.gz +sd_all_0002.json.gz +sd_all_0003.json.gz +sd_all_0004.json.gz +sd_all_0005.json.gz +sd_all_0006.json.gz +sd_all_0007.json.gz +sd_all_0008.json.gz +sd_all_0009.json.gz +sd_all_0010.json.gz +sd_all_0011.json.gz diff --git a/cc-multilingual-main/dedup/RedPajama-Data/listings/2023-14/as.txt b/cc-multilingual-main/dedup/RedPajama-Data/listings/2023-14/as.txt new file mode 100644 index 0000000000000000000000000000000000000000..ba00176a4bcf7dc53fc4e7dee980dcf67371fa37 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/listings/2023-14/as.txt @@ -0,0 +1,27 @@ +as_all_0000.json.gz +as_all_0001.json.gz +as_all_0002.json.gz +as_all_0003.json.gz +as_all_0004.json.gz +as_all_0005.json.gz +as_all_0006.json.gz +as_all_0007.json.gz +as_all_0008.json.gz +as_all_0009.json.gz +as_all_0010.json.gz +as_all_0011.json.gz +as_all_0012.json.gz +as_all_0013.json.gz +as_all_0014.json.gz +as_all_0015.json.gz +as_all_0016.json.gz +as_all_0017.json.gz +as_all_0018.json.gz +as_all_0019.json.gz +as_all_0020.json.gz +as_all_0021.json.gz +as_all_0022.json.gz +as_all_0023.json.gz +as_all_0024.json.gz +as_all_0025.json.gz +as_all_0026.json.gz diff --git a/cc-multilingual-main/dedup/RedPajama-Data/lsh.sh b/cc-multilingual-main/dedup/RedPajama-Data/lsh.sh new file mode 100644 index 0000000000000000000000000000000000000000..590987382184ed9750b98feed125c57846ae6dd8 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/lsh.sh @@ -0,0 +1,14 @@ +outputfolder="/mnt/weka/peacock/wet-data/output" +redpajama="/home/sdp/Common_crawl/cc-multilingual-main/cc-multilingual-main/dedup/RedPajama-Data" +cd $outputfolder/minhash-signatures/$1 +lis=("as" "bn" "gu" "kn" "hi" "ml" "mr" "ne" "or" "sa" "sd" "ta" "ur" "te" "mai") +mkdir -p $redpajama/minhash-listings/$1 +for l in "${lis[@]}"; do + ls "$l"/*.minhash.parquet > $redpajama/minhash-listings/$1/"$l".txt +done +mkdir -p $outputfolder/fuzzy-clusters/$1 +cd $redpajama +for l in "${lis[@]}"; do + mkdir -p $outputfolder/fuzzy-clusters/$1/"$l" + python3 app/src/run_lsh.py --listings minhash-listings/$1/"$l".txt --input_base_uri "file://$outputfolder/minhash-signatures/$1/" --output_dir $outputfolder/fuzzy-clusters/$1/"$l" --similarity "0.8" --num_perm "128" +done \ No newline at end of file diff --git a/cc-multilingual-main/dedup/RedPajama-Data/minhash.sh b/cc-multilingual-main/dedup/RedPajama-Data/minhash.sh new file mode 100644 index 0000000000000000000000000000000000000000..10c1dd48698123ada3c393664996fe86d0f8dfee --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/minhash.sh @@ -0,0 +1,15 @@ +outputfolder="/mnt/weka/peacock/wet-data/output" +redpajama="/home/sdp/Common_crawl/cc-multilingual-main/cc-multilingual-main/dedup/RedPajama-Data" +cd $outputfolder/mined/$1 +lis=("as" "bn" "gu" "kn" "hi" "ml" "mr" "ne" "or" "sa" "sd" "ta" "ur" "te" "mai") +mkdir -p $redpajama/listings/$1 +for l in "${lis[@]}"; do +ls "$l"*.json.gz > $redpajama/listings/$1/"$l".txt; +done +mkdir -p $outputfolder/minhash-signatures/$1 +cd $redpajama +for l in "${lis[@]}"; do + line_count=$(cat listings/$1/"$l".txt | wc -l) + arg=$((line_count > 128 ? line_count/128 : 1)) + python3 app/src/pipeline.py --input_base_uri "file://$outputfolder/mined/$1" --output_base_uri "file://$outputfolder/minhash-signatures/$1/$l" --artifacts_dir "file://$redpajama/artifacts" --input listings/$1/"$l".txt --cc_snapshot_id $1 --langs "$l" --inputs_per_process "$arg" --minhash_num_permutations 128 --minhash_ngram_size 13; +done \ No newline at end of file diff --git a/cc-multilingual-main/dedup/RedPajama-Data/scripts/apptainer_run_lsh.sh b/cc-multilingual-main/dedup/RedPajama-Data/scripts/apptainer_run_lsh.sh new file mode 100644 index 0000000000000000000000000000000000000000..fd9af9a2647a014fc791cd0496f90a676a15386c --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/scripts/apptainer_run_lsh.sh @@ -0,0 +1,64 @@ +#!/bin/bash + +set -e +trap cleanup_on_error ERR SIGINT SIGTERM + +cleanup_on_error() { + echo "Error: $0:$LINENO: command \`$BASH_COMMAND\` failed with exit code $?" + exit 1 +} + +while [[ $# -gt 0 ]]; do + key="$1" + case $key in + --config) + CONFIG_FILE="$2" + shift + shift + ;; + --input_base_uri) + INPUT_BASE_URI="$2" + shift + shift + ;; + --output_dir) + OUTPUT_DIR="$2" + shift + shift + ;; + --similarity) + SIMILARITY="$2" + shift + shift + ;; + --listings) + LISTINGS="$2" + shift + shift + ;; + --max_docs) + MAX_DOCS="$2" + shift + shift + ;; + *) + echo "Invalid option: -$OPTARG" >&2 + ;; + esac +done + +# make environment variables available to downstream scripts +set -a +# shellcheck source=configs/base.conf +. "$CONFIG_FILE" +set +a + +# run pipeline +apptainer run --memory 480g "${DOCKER_REPO}" \ + python3 src/run_lsh.py \ + --listings "${LISTINGS}" \ + --input_base_uri "${INPUT_BASE_URI}" \ + --output_dir "${OUTPUT_DIR}" \ + --similarity "${SIMILARITY}" \ + --num_perm "${MINHASH_NUM_PERMUTATIONS}" \ + --max_docs ${MAX_DOCS} diff --git a/cc-multilingual-main/dedup/RedPajama-Data/scripts/apptainer_run_quality_signals.sh b/cc-multilingual-main/dedup/RedPajama-Data/scripts/apptainer_run_quality_signals.sh new file mode 100644 index 0000000000000000000000000000000000000000..46c93123541d105e6a13c24d814cf1537f9df277 --- /dev/null +++ b/cc-multilingual-main/dedup/RedPajama-Data/scripts/apptainer_run_quality_signals.sh @@ -0,0 +1,98 @@ +#!/bin/bash + +set -e +trap cleanup_on_error ERR SIGINT SIGTERM + +cleanup_on_error() { + echo "Error: $0:$LINENO: command \`$BASH_COMMAND\` failed with exit code $?" + exit 1 +} + +help() { + echo "Usage: apptainer_run_quality_signals.sh [ -c | --config ] [ -d | --dump_id ]" + exit 2 +} + +while [[ $# -gt 0 ]]; do + key="$1" + case $key in + -c | --config) + CONFIG_FILE="$2" + shift 2 + ;; + -d | --dump_id) + DUMP_ID="$2" + shift 2 + ;; + -l | --listings) + LISTINGS="$2" + shift 2 + ;; + -h | --help) + help + ;; + --) + shift + break + ;; + *) + echo "Invalid option: -$1" + help + ;; + esac +done + +# make environment variables available to downstream scripts +set -a +# shellcheck source=configs/base.conf +. "$CONFIG_FILE" +set +a + +if [ -z "${MAX_DOCS}" ]; then + MAX_DOCS=-1 +fi + +ARTIFACTS_ARCHIVE="${DATA_ROOT%/}/artifacts-${ARTIFACTS_ID}.tar.gz" + +if [ ! -d "${DATA_ROOT%/}/artifacts-${ARTIFACTS_ID}" ]; then + # download artifacts from bucket + echo "Downloading artifacts from ${INPUT_BASE_URI%/}/artifacts-${ARTIFACTS_ID}.tar.gz" + s5cmd --profile "$S3_PROFILE" --endpoint-url "$S3_ENDPOINT_URL" \ + cp "${S3_BUCKET%/}/artifacts/artifacts-${ARTIFACTS_ID}.tar.gz" "${ARTIFACTS_ARCHIVE}" + + # extract artifacts + mkdir -p "${DATA_ROOT%/}/artifacts-${ARTIFACTS_ID}" + echo "Extracting artifacts to ${DATA_ROOT%/}/artifacts-${ARTIFACTS_ID}" + tar -xzf "${ARTIFACTS_ARCHIVE}" -C "${DATA_ROOT%/}/artifacts-${ARTIFACTS_ID}" + rm "${ARTIFACTS_ARCHIVE}" +else + echo "Artifacts already exist at ${DATA_ROOT%/}/artifacts-${ARTIFACTS_ID}; skipping download." +fi + + +# run pipeline +ARTIFACTS_DIR="${DATA_ROOT%/}/artifacts-${ARTIFACTS_ID}" + +if [ -z "${LISTINGS}" ]; then + LISTINGS="${ARTIFACTS_DIR%/}/listings/listings-${DUMP_ID}.txt" +fi + +apptainer cache clean -f +apptainer run \ + --env AWS_ACCESS_KEY_ID="$AWS_ACCESS_KEY_ID" --env AWS_SECRET_ACCESS_KEY="$AWS_SECRET_ACCESS_KEY" \ + "docker://docker.io/${DOCKER_REPO}:amd64" \ + python3 /usr/app/src/pipeline.py \ + --input "${LISTINGS}" \ + --input_base_uri "${INPUT_BASE_URI}" \ + --output_base_uri "${OUTPUT_BASE_URI}" \ + --cc_snapshot_id "${DUMP_ID}" \ + --artifacts_dir "${ARTIFACTS_DIR}" \ + --dsir_buckets "${DSIR_FEATURE_DIM}" \ + --max_docs "${MAX_DOCS}" \ + --inputs_per_process "${INPUTS_PER_PROCESS}" \ + --langs "${LANGUAGES[@]}" \ + --endpoint_url "${S3_ENDPOINT_URL}" \ + --minhash_ngram_size "${MINHASH_NGRAM_SIZE}" \ + --minhash_num_permutations "${MINHASH_NUM_PERMUTATIONS}" \ + --minhash_similarities "${MINHASH_SIMILARITIES[@]}" \ + --filename_keep_patterns "${FILENAME_KEEP_PATTERNS[@]}"