minpeter commited on
Commit
fa17e76
·
verified ·
1 Parent(s): 9535a09
Files changed (5) hide show
  1. config.json +16 -14
  2. model.safetensors +2 -2
  3. tokenizer.json +2 -2
  4. tokenizer_config.json +37 -37
  5. training_args.bin +1 -1
config.json CHANGED
@@ -4,27 +4,29 @@
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
- "bos_token_id": 1,
8
- "eos_token_id": 32001,
9
- "head_dim": 32,
10
  "hidden_act": "silu",
11
- "hidden_size": 256,
12
- "initializer_range": 0.02,
13
- "intermediate_size": 1024,
14
- "max_position_embeddings": 8192,
 
15
  "mlp_bias": false,
16
  "model_type": "llama",
17
- "num_attention_heads": 8,
18
- "num_hidden_layers": 12,
19
- "num_key_value_heads": 2,
20
- "pad_token_id": 32003,
21
  "pretraining_tp": 1,
22
- "rms_norm_eps": 1e-06,
 
23
  "rope_scaling": null,
24
- "rope_theta": 10000.0,
25
  "tie_word_embeddings": true,
26
  "torch_dtype": "float32",
27
  "transformers_version": "4.52.4",
28
  "use_cache": true,
29
- "vocab_size": 32018
30
  }
 
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
+ "bos_token_id": 0,
8
+ "eos_token_id": 3,
9
+ "head_dim": 64,
10
  "hidden_act": "silu",
11
+ "hidden_size": 576,
12
+ "initializer_range": 0.041666666666666664,
13
+ "intermediate_size": 1536,
14
+ "is_llama_config": true,
15
+ "max_position_embeddings": 4096,
16
  "mlp_bias": false,
17
  "model_type": "llama",
18
+ "num_attention_heads": 9,
19
+ "num_hidden_layers": 30,
20
+ "num_key_value_heads": 3,
21
+ "pad_token_id": 1,
22
  "pretraining_tp": 1,
23
+ "rms_norm_eps": 1e-05,
24
+ "rope_interleaved": false,
25
  "rope_scaling": null,
26
+ "rope_theta": 100000,
27
  "tie_word_embeddings": true,
28
  "torch_dtype": "float32",
29
  "transformers_version": "4.52.4",
30
  "use_cache": true,
31
+ "vocab_size": 32000
32
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d9e511a89a68110f896a7373f812fd6041629cc471887aabcda85447c7031f28
3
- size 78437072
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aaaf5bd62a28c1e2ea35ce725cd2e608c582aac986ca5bba4639e77aa9701515
3
+ size 498572168
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0f24e35414c22e483b892df074f22186bcbfd1b1c1f183ef4c8f4990f826ff30
3
- size 2869924
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a36c3ff5b839a44b064837a12b832e61cab245c31d1618d03c85d1314cc07911
3
+ size 2846439
tokenizer_config.json CHANGED
@@ -1,143 +1,143 @@
1
  {
2
  "added_tokens_decoder": {
3
- "32000": {
4
- "content": "<|im_start|>",
5
  "lstrip": false,
6
  "normalized": false,
7
  "rstrip": false,
8
  "single_word": false,
9
  "special": true
10
  },
11
- "32001": {
12
- "content": "<|im_end|>",
13
  "lstrip": false,
14
  "normalized": false,
15
  "rstrip": false,
16
  "single_word": false,
17
  "special": true
18
  },
19
- "32002": {
20
- "content": "<|unk_token|>",
21
  "lstrip": false,
22
  "normalized": false,
23
  "rstrip": false,
24
  "single_word": false,
25
  "special": true
26
  },
27
- "32003": {
28
- "content": "<|pad_token|>",
29
  "lstrip": false,
30
  "normalized": false,
31
  "rstrip": false,
32
  "single_word": false,
33
  "special": true
34
  },
35
- "32004": {
36
  "content": "<tool_call>",
37
  "lstrip": false,
38
  "normalized": false,
39
  "rstrip": false,
40
  "single_word": false,
41
- "special": false
42
  },
43
- "32005": {
44
  "content": "</tool_call>",
45
  "lstrip": false,
46
  "normalized": false,
47
  "rstrip": false,
48
  "single_word": false,
49
- "special": false
50
  },
51
- "32006": {
52
  "content": "<think>",
53
  "lstrip": false,
54
  "normalized": false,
55
  "rstrip": false,
56
  "single_word": false,
57
- "special": false
58
  },
59
- "32007": {
60
  "content": "</think>",
61
  "lstrip": false,
62
  "normalized": false,
63
  "rstrip": false,
64
  "single_word": false,
65
- "special": false
66
  },
67
- "32008": {
68
- "content": "<|unused_special_token_0|>",
69
  "lstrip": false,
70
  "normalized": false,
71
  "rstrip": false,
72
  "single_word": false,
73
  "special": true
74
  },
75
- "32009": {
76
- "content": "<|unused_special_token_1|>",
77
  "lstrip": false,
78
  "normalized": false,
79
  "rstrip": false,
80
  "single_word": false,
81
  "special": true
82
  },
83
- "32010": {
84
- "content": "<|unused_special_token_2|>",
85
  "lstrip": false,
86
  "normalized": false,
87
  "rstrip": false,
88
  "single_word": false,
89
  "special": true
90
  },
91
- "32011": {
92
- "content": "<|unused_special_token_3|>",
93
  "lstrip": false,
94
  "normalized": false,
95
  "rstrip": false,
96
  "single_word": false,
97
  "special": true
98
  },
99
- "32012": {
100
- "content": "<|unused_special_token_4|>",
101
  "lstrip": false,
102
  "normalized": false,
103
  "rstrip": false,
104
  "single_word": false,
105
  "special": true
106
  },
107
- "32013": {
108
- "content": "<|unused_special_token_5|>",
109
  "lstrip": false,
110
  "normalized": false,
111
  "rstrip": false,
112
  "single_word": false,
113
  "special": true
114
  },
115
- "32014": {
116
- "content": "<|unused_special_token_6|>",
117
  "lstrip": false,
118
  "normalized": false,
119
  "rstrip": false,
120
  "single_word": false,
121
  "special": true
122
  },
123
- "32015": {
124
- "content": "<|unused_special_token_7|>",
125
  "lstrip": false,
126
  "normalized": false,
127
  "rstrip": false,
128
  "single_word": false,
129
  "special": true
130
  },
131
- "32016": {
132
- "content": "<|unused_special_token_8|>",
133
  "lstrip": false,
134
  "normalized": false,
135
  "rstrip": false,
136
  "single_word": false,
137
  "special": true
138
  },
139
- "32017": {
140
- "content": "<|unused_special_token_9|>",
141
  "lstrip": false,
142
  "normalized": false,
143
  "rstrip": false,
@@ -148,7 +148,7 @@
148
  "clean_up_tokenization_spaces": false,
149
  "eos_token": "<|im_end|>",
150
  "extra_special_tokens": {},
151
- "model_max_length": 4096,
152
  "pad_token": "<|pad_token|>",
153
  "tokenizer_class": "PreTrainedTokenizer",
154
  "unk_token": "<|unk_token|>"
 
1
  {
2
  "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "<|unk_token|>",
5
  "lstrip": false,
6
  "normalized": false,
7
  "rstrip": false,
8
  "single_word": false,
9
  "special": true
10
  },
11
+ "1": {
12
+ "content": "<|pad_token|>",
13
  "lstrip": false,
14
  "normalized": false,
15
  "rstrip": false,
16
  "single_word": false,
17
  "special": true
18
  },
19
+ "2": {
20
+ "content": "<|im_start|>",
21
  "lstrip": false,
22
  "normalized": false,
23
  "rstrip": false,
24
  "single_word": false,
25
  "special": true
26
  },
27
+ "3": {
28
+ "content": "<|im_end|>",
29
  "lstrip": false,
30
  "normalized": false,
31
  "rstrip": false,
32
  "single_word": false,
33
  "special": true
34
  },
35
+ "4": {
36
  "content": "<tool_call>",
37
  "lstrip": false,
38
  "normalized": false,
39
  "rstrip": false,
40
  "single_word": false,
41
+ "special": true
42
  },
43
+ "5": {
44
  "content": "</tool_call>",
45
  "lstrip": false,
46
  "normalized": false,
47
  "rstrip": false,
48
  "single_word": false,
49
+ "special": true
50
  },
51
+ "6": {
52
  "content": "<think>",
53
  "lstrip": false,
54
  "normalized": false,
55
  "rstrip": false,
56
  "single_word": false,
57
+ "special": true
58
  },
59
+ "7": {
60
  "content": "</think>",
61
  "lstrip": false,
62
  "normalized": false,
63
  "rstrip": false,
64
  "single_word": false,
65
+ "special": true
66
  },
67
+ "8": {
68
+ "content": "<|unused_token_0|>",
69
  "lstrip": false,
70
  "normalized": false,
71
  "rstrip": false,
72
  "single_word": false,
73
  "special": true
74
  },
75
+ "9": {
76
+ "content": "<|unused_token_1|>",
77
  "lstrip": false,
78
  "normalized": false,
79
  "rstrip": false,
80
  "single_word": false,
81
  "special": true
82
  },
83
+ "10": {
84
+ "content": "<|unused_token_2|>",
85
  "lstrip": false,
86
  "normalized": false,
87
  "rstrip": false,
88
  "single_word": false,
89
  "special": true
90
  },
91
+ "11": {
92
+ "content": "<|unused_token_3|>",
93
  "lstrip": false,
94
  "normalized": false,
95
  "rstrip": false,
96
  "single_word": false,
97
  "special": true
98
  },
99
+ "12": {
100
+ "content": "<|unused_token_4|>",
101
  "lstrip": false,
102
  "normalized": false,
103
  "rstrip": false,
104
  "single_word": false,
105
  "special": true
106
  },
107
+ "13": {
108
+ "content": "<|unused_token_5|>",
109
  "lstrip": false,
110
  "normalized": false,
111
  "rstrip": false,
112
  "single_word": false,
113
  "special": true
114
  },
115
+ "14": {
116
+ "content": "<|unused_token_6|>",
117
  "lstrip": false,
118
  "normalized": false,
119
  "rstrip": false,
120
  "single_word": false,
121
  "special": true
122
  },
123
+ "15": {
124
+ "content": "<|unused_token_7|>",
125
  "lstrip": false,
126
  "normalized": false,
127
  "rstrip": false,
128
  "single_word": false,
129
  "special": true
130
  },
131
+ "16": {
132
+ "content": "<|unused_token_8|>",
133
  "lstrip": false,
134
  "normalized": false,
135
  "rstrip": false,
136
  "single_word": false,
137
  "special": true
138
  },
139
+ "17": {
140
+ "content": "<|unused_token_9|>",
141
  "lstrip": false,
142
  "normalized": false,
143
  "rstrip": false,
 
148
  "clean_up_tokenization_spaces": false,
149
  "eos_token": "<|im_end|>",
150
  "extra_special_tokens": {},
151
+ "model_max_length": 1000000000000000019884624838656,
152
  "pad_token": "<|pad_token|>",
153
  "tokenizer_class": "PreTrainedTokenizer",
154
  "unk_token": "<|unk_token|>"
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5b9edc82518a3363d37477942c413c134ab6b8f4032a4f60942c01d82a0577ef
3
  size 5713
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:02a35e277ab5aa892680510168973e0c935efdcd186a619b19b5aaf90d144b98
3
  size 5713