jdorairaj's picture
LA on mrpc
0562351
raw
history blame
1.22 kB
{
"task_name": "mrpc",
"train_file": null,
"validation_file": null,
"max_length": 256,
"pad_to_max_length": false,
"model_name_or_path": "google-bert/bert-base-uncased",
"use_slow_tokenizer": true,
"per_device_train_batch_size": 8,
"per_device_eval_batch_size": 8,
"learning_rate": 5e-05,
"weight_decay": 0.0,
"num_train_epochs": 5,
"max_train_steps": null,
"gradient_accumulation_steps": 1,
"lr_scheduler_type": "linear",
"num_warmup_steps": 0,
"output_dir": "./outputs",
"peft_method": null,
"seed": 42,
"push_to_hub": false,
"hub_model_id": null,
"hub_token": null,
"checkpointing_steps": "1000",
"resume_from_checkpoint": null,
"with_tracking": false,
"report_to": "all",
"ignore_mismatched_sizes": true,
"save": false,
"load_step": 999,
"laplace_hessian": "kron",
"laplace_sub": "all",
"laplace_prior": "homo",
"laplace_optim_step": 1000,
"testing_set": "val",
"laplace_predict": "mc_corr",
"lm_head": false,
"cache_dir": "/content/cache/huggingface/metrics/glue",
"step_list": [
0,
458,
917,
1376,
1835,
2294
]
}