0
stringclasses 12
values | 1
float64 0
55.9k
|
|---|---|
megatron.core.transformer.attention.forward.qkv
| 2.302208
|
megatron.core.transformer.attention.forward.adjust_key_value
| 0.089024
|
megatron.core.transformer.attention.forward.rotary_pos_emb
| 0.102144
|
megatron.core.transformer.attention.forward.core_attention
| 1,854.470459
|
megatron.core.transformer.attention.forward.linear_proj
| 0.178016
|
megatron.core.transformer.transformer_layer._forward_attention.self_attention
| 1,857.47644
|
megatron.core.transformer.transformer_layer._forward_attention.self_attn_bda
| 0.064544
|
megatron.core.transformer.mlp.forward.linear_fc1
| 0.763168
|
megatron.core.transformer.mlp.forward.activation
| 0.087584
|
megatron.core.transformer.mlp.forward.linear_fc2
| 0.69872
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp
| 1.562336
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp_bda
| 0.064576
|
megatron.core.transformer.attention.forward.qkv
| 175.002304
|
megatron.core.transformer.attention.forward.adjust_key_value
| 0.128256
|
megatron.core.transformer.attention.forward.rotary_pos_emb
| 0.086784
|
megatron.core.transformer.attention.forward.core_attention
| 3,866.452637
|
megatron.core.transformer.attention.forward.linear_proj
| 3.989568
|
megatron.core.transformer.transformer_layer._forward_attention.self_attention
| 4,051.022949
|
megatron.core.transformer.transformer_layer._forward_attention.self_attn_bda
| 1,145.490234
|
megatron.core.transformer.mlp.forward.linear_fc1
| 7.819968
|
megatron.core.transformer.mlp.forward.activation
| 505.874634
|
megatron.core.transformer.mlp.forward.linear_fc2
| 3.344256
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp
| 517.830566
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp_bda
| 0.237536
|
megatron.core.transformer.attention.forward.qkv
| 1.345376
|
megatron.core.transformer.attention.forward.adjust_key_value
| 0.006656
|
megatron.core.transformer.attention.forward.rotary_pos_emb
| 0.00656
|
megatron.core.transformer.attention.forward.core_attention
| 2,202.561768
|
megatron.core.transformer.attention.forward.linear_proj
| 0.712608
|
megatron.core.transformer.transformer_layer._forward_attention.self_attention
| 2,204.666504
|
megatron.core.transformer.transformer_layer._forward_attention.self_attn_bda
| 0.229664
|
megatron.core.transformer.mlp.forward.linear_fc1
| 3.031648
|
megatron.core.transformer.mlp.forward.activation
| 0.332384
|
megatron.core.transformer.mlp.forward.linear_fc2
| 2.935168
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp
| 6.31104
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp_bda
| 0.228608
|
megatron.core.transformer.attention.forward.qkv
| 260.089905
|
megatron.core.transformer.attention.forward.adjust_key_value
| 0.102688
|
megatron.core.transformer.attention.forward.rotary_pos_emb
| 0.0944
|
megatron.core.transformer.attention.forward.core_attention
| 8,111.364258
|
megatron.core.transformer.attention.forward.linear_proj
| 3.262528
|
megatron.core.transformer.transformer_layer._forward_attention.self_attention
| 8,376.419922
|
megatron.core.transformer.transformer_layer._forward_attention.self_attn_bda
| 721.8573
|
megatron.core.transformer.mlp.forward.linear_fc1
| 3.923456
|
megatron.core.transformer.mlp.forward.activation
| 306.531158
|
megatron.core.transformer.mlp.forward.linear_fc2
| 10.908768
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp
| 322.216461
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp_bda
| 0.508576
|
megatron.core.transformer.attention.forward.qkv
| 0.624192
|
megatron.core.transformer.attention.forward.adjust_key_value
| 0.099392
|
megatron.core.transformer.attention.forward.rotary_pos_emb
| 0.094112
|
megatron.core.transformer.attention.forward.core_attention
| 2,057.075195
|
megatron.core.transformer.attention.forward.linear_proj
| 0.021184
|
megatron.core.transformer.transformer_layer._forward_attention.self_attention
| 2,058.21875
|
megatron.core.transformer.transformer_layer._forward_attention.self_attn_bda
| 0.0104
|
megatron.core.transformer.mlp.forward.linear_fc1
| 0.051808
|
megatron.core.transformer.mlp.forward.activation
| 0.0088
|
megatron.core.transformer.mlp.forward.linear_fc2
| 0.048032
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp
| 0.120192
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp_bda
| 0.01088
|
megatron.core.transformer.attention.forward.qkv
| 323.215973
|
megatron.core.transformer.attention.forward.adjust_key_value
| 0.003136
|
megatron.core.transformer.attention.forward.rotary_pos_emb
| 0.003008
|
megatron.core.transformer.attention.forward.core_attention
| 8,908.595703
|
megatron.core.transformer.attention.forward.linear_proj
| 4.427296
|
megatron.core.transformer.transformer_layer._forward_attention.self_attention
| 9,237.367188
|
megatron.core.transformer.transformer_layer._forward_attention.self_attn_bda
| 1,775.715332
|
megatron.core.transformer.mlp.forward.linear_fc1
| 9.47216
|
megatron.core.transformer.mlp.forward.activation
| 732.778015
|
megatron.core.transformer.mlp.forward.linear_fc2
| 6.098592
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp
| 748.931519
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp_bda
| 0.453824
|
megatron.core.transformer.attention.forward.qkv
| 2.605664
|
megatron.core.transformer.attention.forward.adjust_key_value
| 0.003008
|
megatron.core.transformer.attention.forward.rotary_pos_emb
| 0.003072
|
megatron.core.transformer.attention.forward.core_attention
| 2,448.930664
|
megatron.core.transformer.attention.forward.linear_proj
| 1.4888
|
megatron.core.transformer.transformer_layer._forward_attention.self_attention
| 2,453.050049
|
megatron.core.transformer.transformer_layer._forward_attention.self_attn_bda
| 0.45008
|
megatron.core.transformer.mlp.forward.linear_fc1
| 5.8976
|
megatron.core.transformer.mlp.forward.activation
| 0.660832
|
megatron.core.transformer.mlp.forward.linear_fc2
| 5.907232
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp
| 12.477568
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp_bda
| 0.450112
|
megatron.core.transformer.attention.forward.qkv
| 258.671753
|
megatron.core.transformer.attention.forward.adjust_key_value
| 0.115552
|
megatron.core.transformer.attention.forward.rotary_pos_emb
| 0.096256
|
megatron.core.transformer.attention.forward.core_attention
| 7,966.716797
|
megatron.core.transformer.attention.forward.linear_proj
| 4.383104
|
megatron.core.transformer.transformer_layer._forward_attention.self_attention
| 8,231.30957
|
megatron.core.transformer.transformer_layer._forward_attention.self_attn_bda
| 1,434.865723
|
megatron.core.transformer.mlp.forward.linear_fc1
| 4.182752
|
megatron.core.transformer.mlp.forward.activation
| 536.986084
|
megatron.core.transformer.mlp.forward.linear_fc2
| 1.112992
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp
| 543.560303
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp_bda
| 0.635744
|
megatron.core.transformer.attention.forward.qkv
| 0.744896
|
megatron.core.transformer.attention.forward.adjust_key_value
| 0.099488
|
megatron.core.transformer.attention.forward.rotary_pos_emb
| 0.105728
|
megatron.core.transformer.attention.forward.core_attention
| 3,007.682373
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.