|
{ |
|
"activation_dropout": 0.1, |
|
"activation_function": "gelu", |
|
"adaptor_activation_function": "gelu", |
|
"adaptor_dropout": 0.1, |
|
"adaptor_hidden_size": 512, |
|
"adaptor_init_std": 0.02, |
|
"adaptor_scaling_factor": 1.0, |
|
"adaptor_tuning": false, |
|
"additional_source_wait_k": -1, |
|
"alibi_encoding": false, |
|
"architectures": [ |
|
"MBartForConditionalGeneration" |
|
], |
|
"asymmetric_alibi_encoding": false, |
|
"attention_dropout": 0.1, |
|
"bos_token_id": 64000, |
|
"bottleneck_mid_fusion_tokens": 4, |
|
"classifier_dropout": 0.0, |
|
"d_model": 512, |
|
"decoder_adaptor_tying_config": null, |
|
"decoder_attention_heads": 8, |
|
"decoder_ffn_dim": 2048, |
|
"decoder_layerdrop": 0.0, |
|
"decoder_layers": 6, |
|
"decoder_tying_config": null, |
|
"deep_adaptor_tuning": false, |
|
"deep_adaptor_tuning_ffn_only": false, |
|
"dropout": 0.1, |
|
"embed_low_rank_dim": 0, |
|
"encoder_adaptor_tying_config": null, |
|
"encoder_attention_heads": 8, |
|
"encoder_ffn_dim": 2048, |
|
"encoder_layerdrop": 0.0, |
|
"encoder_layers": 6, |
|
"encoder_tying_config": null, |
|
"eos_token_id": 64001, |
|
"expert_ffn_size": 128, |
|
"features_embed_dims": null, |
|
"features_vocab_sizes": null, |
|
"gradient_checkpointing": false, |
|
"gradient_reversal_for_domain_classifier": false, |
|
"hypercomplex": false, |
|
"hypercomplex_n": 2, |
|
"ia3_adaptors": false, |
|
"init_std": 0.02, |
|
"initialization_scheme": "static", |
|
"is_encoder_decoder": true, |
|
"layernorm_adaptor_input": false, |
|
"layernorm_prompt_projection": false, |
|
"lora_adaptor_rank": 2, |
|
"lora_adaptors": false, |
|
"max_position_embeddings": 1024, |
|
"mid_fusion_layers": 3, |
|
"model_type": "mbart", |
|
"moe_adaptors": false, |
|
"multi_source": false, |
|
"multi_source_method": null, |
|
"multilayer_softmaxing": null, |
|
"no_embed_norm": false, |
|
"no_positional_encoding_decoder": false, |
|
"no_positional_encoding_encoder": false, |
|
"no_projection_prompt": false, |
|
"no_scale_attention_embedding": false, |
|
"num_domains_for_domain_classifier": 1, |
|
"num_experts": 8, |
|
"num_hidden_layers": 6, |
|
"num_moe_adaptor_experts": 4, |
|
"num_prompts": 100, |
|
"num_sparsify_blocks": 8, |
|
"pad_token_id": 0, |
|
"parallel_adaptors": false, |
|
"positional_encodings": false, |
|
"postnorm_decoder": false, |
|
"postnorm_encoder": false, |
|
"prompt_dropout": 0.1, |
|
"prompt_init_std": 0.02, |
|
"prompt_projection_hidden_size": 4096, |
|
"prompt_tuning": false, |
|
"recurrent_projections": 1, |
|
"residual_connection_adaptor": false, |
|
"residual_connection_prompt": false, |
|
"rope_encoding": false, |
|
"scale_embedding": false, |
|
"softmax_bias_tuning": false, |
|
"softmax_temperature": 1.0, |
|
"sparsification_temperature": 3.0, |
|
"sparsify_attention": false, |
|
"sparsify_ffn": false, |
|
"target_vocab_size": 0, |
|
"temperature_calibration": false, |
|
"tokenizer_class": "AlbertTokenizer", |
|
"transformers_version": "4.3.2", |
|
"unidirectional_encoder": false, |
|
"use_cache": true, |
|
"use_moe": false, |
|
"use_tanh_activation_prompt": false, |
|
"vocab_size": 64053, |
|
"wait_k": -1 |
|
} |
|
|