File size: 447 Bytes
c5b37ae |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 |
{
"adaln_group": true,
"attn_cfg": {},
"attn_layer_idx": [],
"d_intermediate": 0,
"d_model": 1536,
"fused_add_norm": true,
"mixer_drop": 0.0,
"mlp_drop": 0.0,
"n_layer": 48,
"num_classes": 1000,
"num_groups": 4,
"num_tokens": 256,
"pad_vocab_size_multiple": 8,
"residual_in_fp32": true,
"rms_norm": true,
"ssm_cfg": {
"layer": "Mamba2"
},
"tie_embeddings": true,
"token_drop": 0.0,
"vocab_size": 16384
} |