|
{ |
|
"best_metric": 0.9825925925925926, |
|
"best_model_checkpoint": "swinv2-tiny-patch4-window8-256-finetuned-eurosat/checkpoint-475", |
|
"epoch": 5.0, |
|
"global_step": 475, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.0526315789473684e-05, |
|
"loss": 2.3113, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.105263157894737e-05, |
|
"loss": 2.1411, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.157894736842105e-05, |
|
"loss": 1.715, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.210526315789474e-05, |
|
"loss": 1.0832, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.2631578947368424e-05, |
|
"loss": 0.7166, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.31578947368421e-05, |
|
"loss": 0.5886, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.368421052631579e-05, |
|
"loss": 0.4958, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.421052631578948e-05, |
|
"loss": 0.4257, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.473684210526316e-05, |
|
"loss": 0.4479, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.9477777777777778, |
|
"eval_f1": 0.9478323359637986, |
|
"eval_loss": 0.15920470654964447, |
|
"eval_precision": 0.9500303173429833, |
|
"eval_recall": 0.9477777777777778, |
|
"eval_runtime": 23.3166, |
|
"eval_samples_per_second": 115.798, |
|
"eval_steps_per_second": 1.844, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.868421052631579e-05, |
|
"loss": 0.4166, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 9.605263157894737e-05, |
|
"loss": 0.3648, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 9.342105263157896e-05, |
|
"loss": 0.3384, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 9.078947368421054e-05, |
|
"loss": 0.3095, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 8.81578947368421e-05, |
|
"loss": 0.3676, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 8.552631578947369e-05, |
|
"loss": 0.3062, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.289473684210527e-05, |
|
"loss": 0.3251, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 8.026315789473685e-05, |
|
"loss": 0.2973, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 7.763157894736843e-05, |
|
"loss": 0.2848, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.3078, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9685185185185186, |
|
"eval_f1": 0.9685839286713788, |
|
"eval_loss": 0.09139837324619293, |
|
"eval_precision": 0.9694937388736072, |
|
"eval_recall": 0.9685185185185186, |
|
"eval_runtime": 23.2994, |
|
"eval_samples_per_second": 115.883, |
|
"eval_steps_per_second": 1.846, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 7.236842105263159e-05, |
|
"loss": 0.2692, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 6.973684210526315e-05, |
|
"loss": 0.2636, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 6.710526315789474e-05, |
|
"loss": 0.2537, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 6.447368421052632e-05, |
|
"loss": 0.2531, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 6.18421052631579e-05, |
|
"loss": 0.265, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 5.921052631578947e-05, |
|
"loss": 0.2379, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 5.6578947368421056e-05, |
|
"loss": 0.2156, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 5.3947368421052635e-05, |
|
"loss": 0.2299, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 5.131578947368422e-05, |
|
"loss": 0.2307, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.9785185185185186, |
|
"eval_f1": 0.9784834463836688, |
|
"eval_loss": 0.060329582542181015, |
|
"eval_precision": 0.978983995320319, |
|
"eval_recall": 0.9785185185185186, |
|
"eval_runtime": 23.3466, |
|
"eval_samples_per_second": 115.648, |
|
"eval_steps_per_second": 1.842, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 4.868421052631579e-05, |
|
"loss": 0.2046, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 4.605263157894737e-05, |
|
"loss": 0.2185, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 4.342105263157895e-05, |
|
"loss": 0.2206, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 4.078947368421053e-05, |
|
"loss": 0.1843, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 3.815789473684211e-05, |
|
"loss": 0.2162, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 3.5526315789473684e-05, |
|
"loss": 0.1891, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.289473684210527e-05, |
|
"loss": 0.2399, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 3.0263157894736844e-05, |
|
"loss": 0.2171, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 2.7631578947368426e-05, |
|
"loss": 0.1889, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.227, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.9811111111111112, |
|
"eval_f1": 0.9811027103905656, |
|
"eval_loss": 0.053087469190359116, |
|
"eval_precision": 0.9814045642951811, |
|
"eval_recall": 0.9811111111111112, |
|
"eval_runtime": 23.3477, |
|
"eval_samples_per_second": 115.643, |
|
"eval_steps_per_second": 1.842, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 2.236842105263158e-05, |
|
"loss": 0.1926, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 1.9736842105263158e-05, |
|
"loss": 0.1774, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 1.7105263157894737e-05, |
|
"loss": 0.1884, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 1.4473684210526317e-05, |
|
"loss": 0.1797, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 1.1842105263157895e-05, |
|
"loss": 0.1612, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 9.210526315789474e-06, |
|
"loss": 0.1793, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 6.578947368421053e-06, |
|
"loss": 0.2003, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 3.9473684210526315e-06, |
|
"loss": 0.1785, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 1.3157894736842106e-06, |
|
"loss": 0.1674, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.9825925925925926, |
|
"eval_f1": 0.9825868474705166, |
|
"eval_loss": 0.0510183610022068, |
|
"eval_precision": 0.9828193476192771, |
|
"eval_recall": 0.9825925925925926, |
|
"eval_runtime": 23.3012, |
|
"eval_samples_per_second": 115.874, |
|
"eval_steps_per_second": 1.845, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 475, |
|
"total_flos": 3.953802662903808e+18, |
|
"train_loss": 0.4062625066857589, |
|
"train_runtime": 2798.1175, |
|
"train_samples_per_second": 43.422, |
|
"train_steps_per_second": 0.17 |
|
} |
|
], |
|
"max_steps": 475, |
|
"num_train_epochs": 5, |
|
"total_flos": 3.953802662903808e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|