Jiqing's picture
Update config.json
f5b6331 verified
raw
history blame
1.27 kB
{
"_name_or_path": "Jiqing/protst-esm1b-for-sequential-classification",
"architectures": [
"ProtSTForProteinPropertyPrediction"
],
"auto_map": {
"AutoConfig": "Jiqing/ProtST-esm1b--configuration_protst.ProtSTConfig",
"AutoModel": "Jiqing/protst-esm1b-for-sequential-classification--modeling_protst.ProtSTForProteinPropertyPrediction"
},
"model_type": "protest",
"num_labels": 2,
"protein_config": {
"_name_or_path": "/tmp/facebook/esm1b_t33_650M_UR50S",
"architectures": [
"EsmForMaskedLM"
],
"attention_probs_dropout_prob": 0.0,
"classifier_dropout": null,
"cls_token_id": 0,
"emb_layer_norm_before": true,
"eos_token_id": 2,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.0,
"hidden_size": 1280,
"intermediate_size": 5120,
"layer_norm_eps": 1e-05,
"mask_token_id": 32,
"model_type": "esm",
"num_attention_heads": 20,
"num_hidden_layers": 33,
"pad_token_id": 1,
"token_dropout": true,
"torch_dtype": "float32",
"vocab_size": 33
},
"text_config": {
"architectures": [
"BertForMaskedLM"
],
"cls_token_id": 2,
"model_type": "bert",
"sep_token_id": 3
},
"torch_dtype": "float32",
"transformers_version": "4.36.2"
}