| { | |
| "MODEL": { | |
| "num_hidden_layers": 24, | |
| "hidden_size": 2048, | |
| "num_attention_heads": 32, | |
| "intermediate_size": 4096 | |
| }, | |
| "LAYER_MAPPINGS" : { | |
| "word_embeddings": 1, | |
| "transformer": [3, 24+2], | |
| "final_layernorm": 28, | |
| "final_word_embeddings": 29 | |
| }, | |
| "FULL_NAME_MAPPINGS": { | |
| }, | |
| "PARTIAL_NAME_MAPPINGS": { | |
| "final_word_embeddings": { | |
| "vocab_parallel_projection": "lm_head" | |
| }, | |
| "final_layernorm": { | |
| "final_rmsnorm": "model.norm" | |
| }, | |
| "word_embeddings": { | |
| "word_embeddings": "model.embed_tokens" | |
| }, | |
| "transformer": { | |
| "dense_h_to_4h": "mlp.gate_proj", | |
| "dense_4h_to_h": "mlp.down_proj", | |
| "dense_h_to_4h_swiglu": "mlp.up_proj", | |
| "post_attention_layernorm": "post_attention_layernorm", | |
| "input_layernorm": "input_layernorm", | |
| "dense": "self_attn.o_proj", | |
| "query_key_value": {"query": "self_attn.q_proj", "key": "self_attn.k_proj", "value": "self_attn.v_proj"} | |
| } | |
| }, | |
| "SPECIAL": { | |
| "query_key_value": "attention_qkv" | |
| } | |
| } | |