itqop commited on
Commit
c8748be
·
verified ·
1 Parent(s): d195999

Upload tokenizer

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +2 -14
  2. tokenizer_config.json +1 -1
special_tokens_map.json CHANGED
@@ -1,19 +1,7 @@
1
  {
2
  "additional_special_tokens": [
3
- {
4
- "content": "<start_of_sum>",
5
- "lstrip": false,
6
- "normalized": false,
7
- "rstrip": false,
8
- "single_word": false
9
- },
10
- {
11
- "content": "<end_of_sum>",
12
- "lstrip": false,
13
- "normalized": false,
14
- "rstrip": false,
15
- "single_word": false
16
- }
17
  ],
18
  "boi_token": "<start_of_image>",
19
  "bos_token": {
 
1
  {
2
  "additional_special_tokens": [
3
+ "<start_of_sum>",
4
+ "<end_of_sum>"
 
 
 
 
 
 
 
 
 
 
 
 
5
  ],
6
  "boi_token": "<start_of_image>",
7
  "bos_token": {
tokenizer_config.json CHANGED
@@ -51358,7 +51358,7 @@
51358
  "pad_token": "<pad>",
51359
  "sp_model_kwargs": null,
51360
  "spaces_between_special_tokens": false,
51361
- "tokenizer_class": "GemmaTokenizer",
51362
  "unk_token": "<unk>",
51363
  "use_default_system_prompt": false
51364
  }
 
51358
  "pad_token": "<pad>",
51359
  "sp_model_kwargs": null,
51360
  "spaces_between_special_tokens": false,
51361
+ "tokenizer_class": "GemmaTokenizerFast",
51362
  "unk_token": "<unk>",
51363
  "use_default_system_prompt": false
51364
  }