File size: 1,205 Bytes
f53aa73 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
{
"MODEL": {
"num_hidden_layers": NLAYERS,
"hidden_size": NHIDDEN,
"num_attention_heads": NHEADS,
"intermediate_size": FFN_HIDDEN_SIZE
},
"LAYER_MAPPINGS" : {
"word_embeddings": 1,
"transformer": [3, 18],
"final_layernorm": 20,
"final_word_embeddings": 21
},
"FULL_NAME_MAPPINGS": {
},
"PARTIAL_NAME_MAPPINGS": {
"final_word_embeddings": {
"vocab_parallel_projection": "lm_head"
},
"final_layernorm": {
"final_rmsnorm": "model.norm"
},
"word_embeddings": {
"word_embeddings": "model.embed_tokens"
},
"transformer": {
"dense_h_to_4h": "mlp.gate_proj",
"dense_4h_to_h": "mlp.down_proj",
"dense_h_to_4h_swiglu": "mlp.up_proj",
"post_attention_layernorm": "post_attention_layernorm",
"input_layernorm": "input_layernorm",
"dense": "self_attn.o_proj",
"query_key_value": {"query": "self_attn.q_proj", "key": "self_attn.k_proj", "value": "self_attn.v_proj"}
}
},
"SPECIAL": {
"query_key_value": "attention_qkv"
}
}
|