roberta-base-mrpc / trainer_state.json
xinhe's picture
upload fp32 model
f2f8409
raw
history blame
838 Bytes
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 5.0,
"global_step": 1150,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 2.17,
"learning_rate": 1.1304347826086957e-05,
"loss": 0.4267,
"step": 500
},
{
"epoch": 4.35,
"learning_rate": 2.6086956521739132e-06,
"loss": 0.1886,
"step": 1000
},
{
"epoch": 5.0,
"step": 1150,
"total_flos": 2412728377651200.0,
"train_loss": 0.2803073907935101,
"train_runtime": 6011.5008,
"train_samples_per_second": 3.051,
"train_steps_per_second": 0.191
}
],
"max_steps": 1150,
"num_train_epochs": 5,
"total_flos": 2412728377651200.0,
"trial_name": null,
"trial_params": null
}