|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 10.0, |
|
"global_step": 30790, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.91880480675544e-05, |
|
"loss": 0.9625, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.8376096135108804e-05, |
|
"loss": 0.7409, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.7564144202663205e-05, |
|
"loss": 0.6805, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.6752192270217606e-05, |
|
"loss": 0.6566, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.5940240337772006e-05, |
|
"loss": 0.6385, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.512828840532641e-05, |
|
"loss": 0.6292, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.431633647288081e-05, |
|
"loss": 0.5934, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.350438454043521e-05, |
|
"loss": 0.5859, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.269243260798961e-05, |
|
"loss": 0.5734, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.188048067554401e-05, |
|
"loss": 0.5754, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.106852874309841e-05, |
|
"loss": 0.5728, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 4.025657681065281e-05, |
|
"loss": 0.563, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 3.944462487820721e-05, |
|
"loss": 0.5386, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.8632672945761614e-05, |
|
"loss": 0.5346, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 3.7820721013316015e-05, |
|
"loss": 0.5307, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 3.7008769080870415e-05, |
|
"loss": 0.5283, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.6196817148424816e-05, |
|
"loss": 0.5274, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.538486521597922e-05, |
|
"loss": 0.5221, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 3.457291328353362e-05, |
|
"loss": 0.5092, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.376096135108802e-05, |
|
"loss": 0.4963, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 3.294900941864242e-05, |
|
"loss": 0.4996, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 3.213705748619682e-05, |
|
"loss": 0.4891, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.132510555375122e-05, |
|
"loss": 0.4942, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 3.051315362130562e-05, |
|
"loss": 0.4907, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 2.970120168886002e-05, |
|
"loss": 0.4851, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 2.888924975641442e-05, |
|
"loss": 0.4711, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 2.8077297823968824e-05, |
|
"loss": 0.4683, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 2.7265345891523225e-05, |
|
"loss": 0.471, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 2.6453393959077623e-05, |
|
"loss": 0.4728, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 2.5641442026632023e-05, |
|
"loss": 0.4728, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 2.4829490094186424e-05, |
|
"loss": 0.4583, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 2.401753816174083e-05, |
|
"loss": 0.4559, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 2.3205586229295226e-05, |
|
"loss": 0.4476, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 2.2393634296849627e-05, |
|
"loss": 0.4495, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 2.158168236440403e-05, |
|
"loss": 0.4433, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 2.0769730431958428e-05, |
|
"loss": 0.4491, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 1.995777849951283e-05, |
|
"loss": 0.4518, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 1.914582656706723e-05, |
|
"loss": 0.4294, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 1.833387463462163e-05, |
|
"loss": 0.4307, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 1.752192270217603e-05, |
|
"loss": 0.4326, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 1.6709970769730432e-05, |
|
"loss": 0.4369, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 1.5898018837284833e-05, |
|
"loss": 0.4312, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 1.5086066904839236e-05, |
|
"loss": 0.435, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 1.4274114972393635e-05, |
|
"loss": 0.422, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 1.3462163039948036e-05, |
|
"loss": 0.4163, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 1.2650211107502435e-05, |
|
"loss": 0.4213, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 1.1838259175056837e-05, |
|
"loss": 0.423, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 1.1026307242611238e-05, |
|
"loss": 0.4185, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 1.0214355310165639e-05, |
|
"loss": 0.418, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 9.40240337772004e-06, |
|
"loss": 0.4146, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 8.59045144527444e-06, |
|
"loss": 0.4127, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 7.778499512828841e-06, |
|
"loss": 0.4124, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 6.966547580383241e-06, |
|
"loss": 0.4122, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 6.154595647937642e-06, |
|
"loss": 0.4106, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 5.342643715492044e-06, |
|
"loss": 0.4087, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 4.530691783046444e-06, |
|
"loss": 0.4035, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"learning_rate": 3.718739850600845e-06, |
|
"loss": 0.4031, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 2.9067879181552453e-06, |
|
"loss": 0.4094, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"learning_rate": 2.094835985709646e-06, |
|
"loss": 0.404, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 1.2828840532640467e-06, |
|
"loss": 0.402, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 4.7093212081844755e-07, |
|
"loss": 0.4028, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 30790, |
|
"total_flos": 4.1886686457195725e+17, |
|
"train_loss": 0.48998048069338473, |
|
"train_runtime": 23746.1377, |
|
"train_samples_per_second": 12.963, |
|
"train_steps_per_second": 1.297 |
|
} |
|
], |
|
"max_steps": 30790, |
|
"num_train_epochs": 10, |
|
"total_flos": 4.1886686457195725e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|