cpm-bee-10b / config.json
jeffreygo's picture
add resource files
1b34eda
raw
history blame contribute delete
661 Bytes
{
"_from_model_config": true,
"_name_or_path": "openbmb/cpm-bee-10b",
"architectures": [
"CpmBeeForCausalLM"
],
"auto_map": {
"AutoConfig": "configuration_cpmbee.CpmBeeConfig",
"AutoModel": "modeling_cpmbee.CpmBeeForCausalLM",
"AutoModelForCausalLM": "modeling_cpmbee.CpmBeeForCausalLM"
},
"vocab_size": 86583,
"hidden_size": 4096,
"dim_ff" : 10240,
"num_hidden_layers" : 48,
"num_attention_heads": 32,
"dim_head" : 128,
"dropout_p" : 0.0,
"position_bias_num_buckets" : 256,
"position_bias_num_segment_buckets": 256,
"position_bias_max_distance" : 2048,
"eps" : 1e-6,
"half" : true,
"model_type": "cpmbee"
}