sgugger commited on
Commit
cd2e888
·
1 Parent(s): cacda18

Upload tiny models for PegasusForConditionalGeneration

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "temp/dummy/pegasus/PegasusForConditionalGeneration",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "architectures": [
@@ -7,7 +7,6 @@
7
  ],
8
  "attention_dropout": 0.1,
9
  "bos_token_id": 0,
10
- "classifier_dropout": 0.0,
11
  "d_model": 16,
12
  "decoder_attention_heads": 4,
13
  "decoder_ffn_dim": 4,
@@ -29,7 +28,7 @@
29
  "pad_token_id": 0,
30
  "scale_embedding": false,
31
  "torch_dtype": "float32",
32
- "transformers_version": "4.25.0.dev0",
33
  "use_cache": true,
34
- "vocab_size": 1305
35
  }
 
1
  {
2
+ "_name_or_path": "tiny_models/pegasus/PegasusForConditionalGeneration",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "architectures": [
 
7
  ],
8
  "attention_dropout": 0.1,
9
  "bos_token_id": 0,
 
10
  "d_model": 16,
11
  "decoder_attention_heads": 4,
12
  "decoder_ffn_dim": 4,
 
28
  "pad_token_id": 0,
29
  "scale_embedding": false,
30
  "torch_dtype": "float32",
31
+ "transformers_version": "4.28.0.dev0",
32
  "use_cache": true,
33
+ "vocab_size": 96103
34
  }
generation_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 0,
4
+ "decoder_start_token_id": 0,
5
+ "eos_token_id": 1,
6
+ "forced_eos_token_id": 1,
7
+ "pad_token_id": 0,
8
+ "transformers_version": "4.28.0.dev0"
9
+ }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9851224dd60b3076fcf78847ac9575a2a2ee39b94657d1c102b4a84e2152d1de
3
- size 173829
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:78c04ce7de78bd2b1f2d53fcf3df3e46890994b1fc86caa98df2fdbacbeed225
3
+ size 6620101
spiece.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0015189ef36359283fec8b93cf6d9ce51bca37eb1101defc68a53b394913b96c
3
+ size 1912529
tf_model.h5 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c7693a115e4416ce8f512e4bdff6441d4da03bce829e1ec644fb1013fb2ef1f6
3
- size 252540
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:733a1c181a99fa3e24b0f09b01b6190e3a1acb67956a38d55d94b727ff8f87f1
3
+ size 6698804
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -108,7 +108,6 @@
108
  "mask_token": "<mask_2>",
109
  "mask_token_sent": "<mask_1>",
110
  "model_max_length": 200,
111
- "name_or_path": "google/pegasus-large",
112
  "offset": 103,
113
  "pad_token": "<pad>",
114
  "sp_model_kwargs": {},
 
108
  "mask_token": "<mask_2>",
109
  "mask_token_sent": "<mask_1>",
110
  "model_max_length": 200,
 
111
  "offset": 103,
112
  "pad_token": "<pad>",
113
  "sp_model_kwargs": {},