sgugger commited on
Commit
5330aab
1 Parent(s): 18d836e

Upload tiny models for BigBirdPegasusForQuestionAnswering

Browse files
Files changed (5) hide show
  1. config.json +2 -2
  2. pytorch_model.bin +2 -2
  3. spiece.model +3 -0
  4. tokenizer.json +0 -0
  5. tokenizer_config.json +2 -9
config.json CHANGED
@@ -30,8 +30,8 @@
30
  "pad_token_id": 0,
31
  "scale_embedding": true,
32
  "torch_dtype": "float32",
33
- "transformers_version": "4.25.0.dev0",
34
  "use_bias": false,
35
  "use_cache": true,
36
- "vocab_size": 1024
37
  }
 
30
  "pad_token_id": 0,
31
  "scale_embedding": true,
32
  "torch_dtype": "float32",
33
+ "transformers_version": "4.28.0.dev0",
34
  "use_bias": false,
35
  "use_cache": true,
36
+ "vocab_size": 96103
37
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1d375e59eda05e1a9ae7fb325c134811def6cccf8d006434c762351698b0b1bc
3
- size 354112
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ee1434b55c44a8fdf0e1dd45a21e7a18ee0fb0a11e0a6948f459b0c09838195
3
+ size 12524288
spiece.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe1b40df7e8825709c0172639c47338a68d5622e9e3b6cc0fae516537cae738b
3
+ size 1915455
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -24,17 +24,9 @@
24
  "rstrip": false,
25
  "single_word": false
26
  },
27
- "mask_token": {
28
- "__type": "AddedToken",
29
- "content": "[MASK]",
30
- "lstrip": true,
31
- "normalized": true,
32
- "rstrip": false,
33
- "single_word": false
34
- },
35
  "mask_token_sent": null,
36
  "model_max_length": 260,
37
- "name_or_path": "google/bigbird-pegasus-large-arxiv",
38
  "offset": 0,
39
  "pad_token": {
40
  "__type": "AddedToken",
@@ -52,6 +44,7 @@
52
  "rstrip": false,
53
  "single_word": false
54
  },
 
55
  "special_tokens_map_file": "/home/patrick/.cache/huggingface/transformers/b548e984b09823ed5cea0e622c0ec194a7e07b3c5d2e6b48a7cde84bd179cc7c.4eda581f816a0a941629106f0338c957910ce4839ecf7e3e743bb79523bf7249",
56
  "tokenizer_class": "PegasusTokenizer",
57
  "unk_token": {
 
24
  "rstrip": false,
25
  "single_word": false
26
  },
27
+ "mask_token": "[MASK]",
 
 
 
 
 
 
 
28
  "mask_token_sent": null,
29
  "model_max_length": 260,
 
30
  "offset": 0,
31
  "pad_token": {
32
  "__type": "AddedToken",
 
44
  "rstrip": false,
45
  "single_word": false
46
  },
47
+ "sp_model_kwargs": {},
48
  "special_tokens_map_file": "/home/patrick/.cache/huggingface/transformers/b548e984b09823ed5cea0e622c0ec194a7e07b3c5d2e6b48a7cde84bd179cc7c.4eda581f816a0a941629106f0338c957910ce4839ecf7e3e743bb79523bf7249",
49
  "tokenizer_class": "PegasusTokenizer",
50
  "unk_token": {