|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.6530392580846667, |
|
"eval_steps": 500, |
|
"global_step": 21500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"ep_loss": 4.4238, |
|
"epoch": 0.04, |
|
"learning_rate": 2.5e-05, |
|
"loss": 11.1467, |
|
"mlm_loss": 6.7229, |
|
"step": 500 |
|
}, |
|
{ |
|
"ep_loss": 0.4305, |
|
"epoch": 0.08, |
|
"learning_rate": 5e-05, |
|
"loss": 2.9841, |
|
"mlm_loss": 2.5537, |
|
"step": 1000 |
|
}, |
|
{ |
|
"ep_loss": 0.3724, |
|
"epoch": 0.12, |
|
"learning_rate": 7.5e-05, |
|
"loss": 2.2371, |
|
"mlm_loss": 1.8647, |
|
"step": 1500 |
|
}, |
|
{ |
|
"ep_loss": 0.364, |
|
"epoch": 0.15, |
|
"learning_rate": 0.0001, |
|
"loss": 1.8477, |
|
"mlm_loss": 1.4837, |
|
"step": 2000 |
|
}, |
|
{ |
|
"ep_loss": 0.3678, |
|
"epoch": 0.19, |
|
"learning_rate": 0.00012495, |
|
"loss": 1.5215, |
|
"mlm_loss": 1.1538, |
|
"step": 2500 |
|
}, |
|
{ |
|
"ep_loss": 0.3617, |
|
"epoch": 0.23, |
|
"learning_rate": 0.00014995, |
|
"loss": 1.4119, |
|
"mlm_loss": 1.0501, |
|
"step": 3000 |
|
}, |
|
{ |
|
"ep_loss": 0.3336, |
|
"epoch": 0.27, |
|
"learning_rate": 0.0001749, |
|
"loss": 1.3027, |
|
"mlm_loss": 0.9691, |
|
"step": 3500 |
|
}, |
|
{ |
|
"ep_loss": 0.3348, |
|
"epoch": 0.31, |
|
"learning_rate": 0.0001999, |
|
"loss": 1.2441, |
|
"mlm_loss": 0.9093, |
|
"step": 4000 |
|
}, |
|
{ |
|
"ep_loss": 0.3348, |
|
"epoch": 0.35, |
|
"learning_rate": 0.0002249, |
|
"loss": 1.1942, |
|
"mlm_loss": 0.8594, |
|
"step": 4500 |
|
}, |
|
{ |
|
"ep_loss": 0.3331, |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002499, |
|
"loss": 1.1466, |
|
"mlm_loss": 0.8135, |
|
"step": 5000 |
|
}, |
|
{ |
|
"ep_loss": 0.3268, |
|
"epoch": 0.42, |
|
"learning_rate": 0.00027489999999999996, |
|
"loss": 1.1067, |
|
"mlm_loss": 0.7799, |
|
"step": 5500 |
|
}, |
|
{ |
|
"ep_loss": 0.3378, |
|
"epoch": 0.46, |
|
"learning_rate": 0.00029985, |
|
"loss": 1.1007, |
|
"mlm_loss": 0.7629, |
|
"step": 6000 |
|
}, |
|
{ |
|
"ep_loss": 0.3033, |
|
"epoch": 0.5, |
|
"learning_rate": 0.00032485, |
|
"loss": 1.028, |
|
"mlm_loss": 0.7246, |
|
"step": 6500 |
|
}, |
|
{ |
|
"ep_loss": 0.2848, |
|
"epoch": 0.54, |
|
"learning_rate": 0.00034985, |
|
"loss": 0.9833, |
|
"mlm_loss": 0.6985, |
|
"step": 7000 |
|
}, |
|
{ |
|
"ep_loss": 0.316, |
|
"epoch": 0.58, |
|
"learning_rate": 0.00037485000000000003, |
|
"loss": 1.0235, |
|
"mlm_loss": 0.7076, |
|
"step": 7500 |
|
}, |
|
{ |
|
"ep_loss": 0.2885, |
|
"epoch": 0.62, |
|
"learning_rate": 0.00039975, |
|
"loss": 0.9673, |
|
"mlm_loss": 0.6789, |
|
"step": 8000 |
|
}, |
|
{ |
|
"ep_loss": 0.274, |
|
"epoch": 0.65, |
|
"learning_rate": 0.00042475000000000005, |
|
"loss": 0.9266, |
|
"mlm_loss": 0.6525, |
|
"step": 8500 |
|
}, |
|
{ |
|
"ep_loss": 0.3008, |
|
"epoch": 0.69, |
|
"learning_rate": 0.0004497, |
|
"loss": 0.9646, |
|
"mlm_loss": 0.6638, |
|
"step": 9000 |
|
}, |
|
{ |
|
"ep_loss": 0.2843, |
|
"epoch": 0.73, |
|
"learning_rate": 0.00047470000000000005, |
|
"loss": 0.9349, |
|
"mlm_loss": 0.6506, |
|
"step": 9500 |
|
}, |
|
{ |
|
"ep_loss": 0.2693, |
|
"epoch": 0.77, |
|
"learning_rate": 0.0004997, |
|
"loss": 0.8901, |
|
"mlm_loss": 0.6208, |
|
"step": 10000 |
|
}, |
|
{ |
|
"ep_loss": 0.2655, |
|
"epoch": 0.81, |
|
"learning_rate": 0.0004995159140796489, |
|
"loss": 0.8783, |
|
"mlm_loss": 0.6128, |
|
"step": 10500 |
|
}, |
|
{ |
|
"ep_loss": 0.2711, |
|
"epoch": 0.85, |
|
"learning_rate": 0.0004990269285042333, |
|
"loss": 0.8794, |
|
"mlm_loss": 0.6083, |
|
"step": 11000 |
|
}, |
|
{ |
|
"ep_loss": 0.2579, |
|
"epoch": 0.88, |
|
"learning_rate": 0.000498536962997805, |
|
"loss": 0.8489, |
|
"mlm_loss": 0.591, |
|
"step": 11500 |
|
}, |
|
{ |
|
"ep_loss": 0.2607, |
|
"epoch": 0.92, |
|
"learning_rate": 0.0004980469974913766, |
|
"loss": 0.8504, |
|
"mlm_loss": 0.5898, |
|
"step": 12000 |
|
}, |
|
{ |
|
"ep_loss": 0.2585, |
|
"epoch": 0.96, |
|
"learning_rate": 0.0004975570319849483, |
|
"loss": 0.8338, |
|
"mlm_loss": 0.5753, |
|
"step": 12500 |
|
}, |
|
{ |
|
"ep_loss": 0.2584, |
|
"epoch": 1.0, |
|
"learning_rate": 0.0004970670664785199, |
|
"loss": 0.8287, |
|
"mlm_loss": 0.5703, |
|
"step": 13000 |
|
}, |
|
{ |
|
"ep_loss": 0.2511, |
|
"epoch": 1.04, |
|
"learning_rate": 0.0004965771009720916, |
|
"loss": 0.8047, |
|
"mlm_loss": 0.5536, |
|
"step": 13500 |
|
}, |
|
{ |
|
"ep_loss": 0.2476, |
|
"epoch": 1.08, |
|
"learning_rate": 0.0004960871354656633, |
|
"loss": 0.7952, |
|
"mlm_loss": 0.5476, |
|
"step": 14000 |
|
}, |
|
{ |
|
"ep_loss": 0.2482, |
|
"epoch": 1.11, |
|
"learning_rate": 0.0004955971699592349, |
|
"loss": 0.7894, |
|
"mlm_loss": 0.5412, |
|
"step": 14500 |
|
}, |
|
{ |
|
"ep_loss": 0.2424, |
|
"epoch": 1.15, |
|
"learning_rate": 0.0004951072044528065, |
|
"loss": 0.7744, |
|
"mlm_loss": 0.5321, |
|
"step": 15000 |
|
}, |
|
{ |
|
"ep_loss": 0.2462, |
|
"epoch": 1.19, |
|
"learning_rate": 0.0004946182188773911, |
|
"loss": 0.7849, |
|
"mlm_loss": 0.5386, |
|
"step": 15500 |
|
}, |
|
{ |
|
"ep_loss": 0.235, |
|
"epoch": 1.23, |
|
"learning_rate": 0.0004941282533709627, |
|
"loss": 0.7563, |
|
"mlm_loss": 0.5213, |
|
"step": 16000 |
|
}, |
|
{ |
|
"ep_loss": 0.2354, |
|
"epoch": 1.27, |
|
"learning_rate": 0.0004936382878645343, |
|
"loss": 0.7513, |
|
"mlm_loss": 0.5159, |
|
"step": 16500 |
|
}, |
|
{ |
|
"ep_loss": 0.2336, |
|
"epoch": 1.31, |
|
"learning_rate": 0.0004931493022891189, |
|
"loss": 0.7454, |
|
"mlm_loss": 0.5118, |
|
"step": 17000 |
|
}, |
|
{ |
|
"ep_loss": 0.233, |
|
"epoch": 1.35, |
|
"learning_rate": 0.0004926603167137033, |
|
"loss": 0.7403, |
|
"mlm_loss": 0.5073, |
|
"step": 17500 |
|
}, |
|
{ |
|
"ep_loss": 0.2345, |
|
"epoch": 1.38, |
|
"learning_rate": 0.0004921713311382879, |
|
"loss": 0.7399, |
|
"mlm_loss": 0.5054, |
|
"step": 18000 |
|
}, |
|
{ |
|
"ep_loss": 0.2258, |
|
"epoch": 1.42, |
|
"learning_rate": 0.0004916823455628724, |
|
"loss": 0.7183, |
|
"mlm_loss": 0.4925, |
|
"step": 18500 |
|
}, |
|
{ |
|
"ep_loss": 0.235, |
|
"epoch": 1.46, |
|
"learning_rate": 0.0004911962997804954, |
|
"loss": 0.7247, |
|
"mlm_loss": 0.4897, |
|
"step": 19000 |
|
}, |
|
{ |
|
"ep_loss": 0.2446, |
|
"epoch": 1.5, |
|
"learning_rate": 0.0004907082941360928, |
|
"loss": 0.7579, |
|
"mlm_loss": 0.5133, |
|
"step": 19500 |
|
}, |
|
{ |
|
"ep_loss": 0.2295, |
|
"epoch": 1.54, |
|
"learning_rate": 0.0004902183286296644, |
|
"loss": 0.7203, |
|
"mlm_loss": 0.4908, |
|
"step": 20000 |
|
}, |
|
{ |
|
"ep_loss": 0.2332, |
|
"epoch": 1.58, |
|
"learning_rate": 0.0004897283631232362, |
|
"loss": 0.739, |
|
"mlm_loss": 0.5058, |
|
"step": 20500 |
|
}, |
|
{ |
|
"ep_loss": 0.2257, |
|
"epoch": 1.61, |
|
"learning_rate": 0.0004892383976168078, |
|
"loss": 0.7104, |
|
"mlm_loss": 0.4847, |
|
"step": 21000 |
|
}, |
|
{ |
|
"ep_loss": 0.2296, |
|
"epoch": 1.65, |
|
"learning_rate": 0.0004887484321103795, |
|
"loss": 0.7109, |
|
"mlm_loss": 0.4813, |
|
"step": 21500 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 520240, |
|
"num_train_epochs": 40, |
|
"save_steps": 500, |
|
"total_flos": 2.0528571346929582e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|