File size: 1,184 Bytes
f53aa73 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
{
"MODEL": {
"num_hidden_layers": 24,
"hidden_size": 2048,
"num_attention_heads": 32,
"intermediate_size": 4096
},
"LAYER_MAPPINGS" : {
"word_embeddings": 1,
"transformer": [3, 24+2],
"final_layernorm": 28,
"final_word_embeddings": 29
},
"FULL_NAME_MAPPINGS": {
},
"PARTIAL_NAME_MAPPINGS": {
"final_word_embeddings": {
"vocab_parallel_projection": "lm_head"
},
"final_layernorm": {
"final_rmsnorm": "model.norm"
},
"word_embeddings": {
"word_embeddings": "model.embed_tokens"
},
"transformer": {
"dense_h_to_4h": "mlp.gate_proj",
"dense_4h_to_h": "mlp.down_proj",
"dense_h_to_4h_swiglu": "mlp.up_proj",
"post_attention_layernorm": "post_attention_layernorm",
"input_layernorm": "input_layernorm",
"dense": "self_attn.o_proj",
"query_key_value": {"query": "self_attn.q_proj", "key": "self_attn.k_proj", "value": "self_attn.v_proj"}
}
},
"SPECIAL": {
"query_key_value": "attention_qkv"
}
}
|