BPE-HF-Wikipedia-FR-Morphemes / trainer_state.json
qanastek's picture
Upload 43 files
db219b2
raw
history blame
20 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 21.0,
"global_step": 80997,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"learning_rate": 5e-09,
"loss": 10.5291,
"step": 1
},
{
"epoch": 0.13,
"learning_rate": 2.5e-06,
"loss": 9.5195,
"step": 500
},
{
"epoch": 0.26,
"learning_rate": 5e-06,
"loss": 7.8918,
"step": 1000
},
{
"epoch": 0.39,
"learning_rate": 7.5e-06,
"loss": 6.8556,
"step": 1500
},
{
"epoch": 0.52,
"learning_rate": 1e-05,
"loss": 6.5569,
"step": 2000
},
{
"epoch": 0.65,
"learning_rate": 1.25e-05,
"loss": 6.3942,
"step": 2500
},
{
"epoch": 0.78,
"learning_rate": 1.5e-05,
"loss": 6.286,
"step": 3000
},
{
"epoch": 0.91,
"learning_rate": 1.75e-05,
"loss": 6.1963,
"step": 3500
},
{
"epoch": 1.04,
"learning_rate": 2e-05,
"loss": 6.1271,
"step": 4000
},
{
"epoch": 1.17,
"learning_rate": 2.25e-05,
"loss": 6.0662,
"step": 4500
},
{
"epoch": 1.3,
"learning_rate": 2.5e-05,
"loss": 6.0132,
"step": 5000
},
{
"epoch": 1.43,
"learning_rate": 2.7500000000000004e-05,
"loss": 5.9636,
"step": 5500
},
{
"epoch": 1.56,
"learning_rate": 3e-05,
"loss": 5.9214,
"step": 6000
},
{
"epoch": 1.69,
"learning_rate": 3.2500000000000004e-05,
"loss": 5.893,
"step": 6500
},
{
"epoch": 1.81,
"learning_rate": 3.5e-05,
"loss": 5.8604,
"step": 7000
},
{
"epoch": 1.94,
"learning_rate": 3.7500000000000003e-05,
"loss": 5.8285,
"step": 7500
},
{
"epoch": 2.07,
"learning_rate": 4e-05,
"loss": 5.8056,
"step": 8000
},
{
"epoch": 2.2,
"learning_rate": 4.2495e-05,
"loss": 5.7834,
"step": 8500
},
{
"epoch": 2.33,
"learning_rate": 4.4995000000000005e-05,
"loss": 5.7641,
"step": 9000
},
{
"epoch": 2.46,
"learning_rate": 4.7495e-05,
"loss": 5.7448,
"step": 9500
},
{
"epoch": 2.59,
"learning_rate": 4.9995000000000005e-05,
"loss": 5.7285,
"step": 10000
},
{
"epoch": 2.72,
"learning_rate": 4.998375521920668e-05,
"loss": 5.7139,
"step": 10500
},
{
"epoch": 2.85,
"learning_rate": 4.996744519832985e-05,
"loss": 5.6956,
"step": 11000
},
{
"epoch": 2.98,
"learning_rate": 4.995113517745303e-05,
"loss": 5.6856,
"step": 11500
},
{
"epoch": 3.11,
"learning_rate": 4.9934825156576205e-05,
"loss": 5.6724,
"step": 12000
},
{
"epoch": 3.24,
"learning_rate": 4.991854775574113e-05,
"loss": 5.6621,
"step": 12500
},
{
"epoch": 3.37,
"learning_rate": 4.9902237734864304e-05,
"loss": 5.6517,
"step": 13000
},
{
"epoch": 3.5,
"learning_rate": 4.988592771398747e-05,
"loss": 5.643,
"step": 13500
},
{
"epoch": 3.63,
"learning_rate": 4.986961769311065e-05,
"loss": 5.6321,
"step": 14000
},
{
"epoch": 3.76,
"learning_rate": 4.9853307672233826e-05,
"loss": 5.6268,
"step": 14500
},
{
"epoch": 3.89,
"learning_rate": 4.983703027139875e-05,
"loss": 5.6193,
"step": 15000
},
{
"epoch": 4.02,
"learning_rate": 4.9820752870563676e-05,
"loss": 5.6084,
"step": 15500
},
{
"epoch": 4.15,
"learning_rate": 4.980444284968685e-05,
"loss": 5.6034,
"step": 16000
},
{
"epoch": 4.28,
"learning_rate": 4.978813282881002e-05,
"loss": 5.597,
"step": 16500
},
{
"epoch": 4.41,
"learning_rate": 4.977182280793319e-05,
"loss": 5.5927,
"step": 17000
},
{
"epoch": 4.54,
"learning_rate": 4.975551278705637e-05,
"loss": 5.5841,
"step": 17500
},
{
"epoch": 4.67,
"learning_rate": 4.9739202766179545e-05,
"loss": 5.5778,
"step": 18000
},
{
"epoch": 4.8,
"learning_rate": 4.972289274530272e-05,
"loss": 5.5748,
"step": 18500
},
{
"epoch": 4.93,
"learning_rate": 4.970658272442589e-05,
"loss": 5.5692,
"step": 19000
},
{
"epoch": 5.06,
"learning_rate": 4.969030532359082e-05,
"loss": 5.5608,
"step": 19500
},
{
"epoch": 5.19,
"learning_rate": 4.967399530271399e-05,
"loss": 5.5594,
"step": 20000
},
{
"epoch": 5.32,
"learning_rate": 4.965771790187892e-05,
"loss": 5.5563,
"step": 20500
},
{
"epoch": 5.44,
"learning_rate": 4.964140788100209e-05,
"loss": 5.5528,
"step": 21000
},
{
"epoch": 5.57,
"learning_rate": 4.962509786012526e-05,
"loss": 5.5461,
"step": 21500
},
{
"epoch": 5.7,
"learning_rate": 4.960878783924844e-05,
"loss": 5.5413,
"step": 22000
},
{
"epoch": 5.83,
"learning_rate": 4.959251043841336e-05,
"loss": 5.5347,
"step": 22500
},
{
"epoch": 5.96,
"learning_rate": 4.957623303757829e-05,
"loss": 5.535,
"step": 23000
},
{
"epoch": 6.09,
"learning_rate": 4.9559923016701466e-05,
"loss": 5.5296,
"step": 23500
},
{
"epoch": 6.22,
"learning_rate": 4.954361299582464e-05,
"loss": 5.5252,
"step": 24000
},
{
"epoch": 6.35,
"learning_rate": 4.952730297494781e-05,
"loss": 5.5228,
"step": 24500
},
{
"epoch": 6.48,
"learning_rate": 4.951099295407098e-05,
"loss": 5.5196,
"step": 25000
},
{
"epoch": 6.61,
"learning_rate": 4.949468293319416e-05,
"loss": 5.5208,
"step": 25500
},
{
"epoch": 6.74,
"learning_rate": 4.947837291231733e-05,
"loss": 5.5133,
"step": 26000
},
{
"epoch": 6.87,
"learning_rate": 4.9462062891440504e-05,
"loss": 5.5112,
"step": 26500
},
{
"epoch": 7.0,
"learning_rate": 4.9445785490605426e-05,
"loss": 5.5082,
"step": 27000
},
{
"epoch": 7.13,
"learning_rate": 4.94294754697286e-05,
"loss": 5.4993,
"step": 27500
},
{
"epoch": 7.26,
"learning_rate": 4.941316544885178e-05,
"loss": 5.5017,
"step": 28000
},
{
"epoch": 7.39,
"learning_rate": 4.9396855427974955e-05,
"loss": 5.4975,
"step": 28500
},
{
"epoch": 7.52,
"learning_rate": 4.9380545407098125e-05,
"loss": 5.4939,
"step": 29000
},
{
"epoch": 7.65,
"learning_rate": 4.9364268006263046e-05,
"loss": 5.4887,
"step": 29500
},
{
"epoch": 7.78,
"learning_rate": 4.934795798538622e-05,
"loss": 5.4916,
"step": 30000
},
{
"epoch": 7.91,
"learning_rate": 4.93316479645094e-05,
"loss": 5.4877,
"step": 30500
},
{
"epoch": 8.04,
"learning_rate": 4.9315337943632576e-05,
"loss": 5.4827,
"step": 31000
},
{
"epoch": 8.17,
"learning_rate": 4.929902792275574e-05,
"loss": 5.4813,
"step": 31500
},
{
"epoch": 8.3,
"learning_rate": 4.9282750521920674e-05,
"loss": 5.4788,
"step": 32000
},
{
"epoch": 8.43,
"learning_rate": 4.926644050104384e-05,
"loss": 5.4751,
"step": 32500
},
{
"epoch": 8.56,
"learning_rate": 4.925013048016702e-05,
"loss": 5.4706,
"step": 33000
},
{
"epoch": 8.69,
"learning_rate": 4.923382045929019e-05,
"loss": 5.4727,
"step": 33500
},
{
"epoch": 8.82,
"learning_rate": 4.921754305845512e-05,
"loss": 5.4667,
"step": 34000
},
{
"epoch": 8.94,
"learning_rate": 4.9201233037578294e-05,
"loss": 5.3861,
"step": 34500
},
{
"epoch": 9.07,
"learning_rate": 4.9184923016701464e-05,
"loss": 5.1493,
"step": 35000
},
{
"epoch": 9.2,
"learning_rate": 4.9168612995824633e-05,
"loss": 4.972,
"step": 35500
},
{
"epoch": 9.33,
"learning_rate": 4.915230297494781e-05,
"loss": 4.7871,
"step": 36000
},
{
"epoch": 9.46,
"learning_rate": 4.9135992954070986e-05,
"loss": 4.5924,
"step": 36500
},
{
"epoch": 9.59,
"learning_rate": 4.9119715553235915e-05,
"loss": 4.4234,
"step": 37000
},
{
"epoch": 9.72,
"learning_rate": 4.9103405532359084e-05,
"loss": 4.2503,
"step": 37500
},
{
"epoch": 9.85,
"learning_rate": 4.9087095511482254e-05,
"loss": 4.0878,
"step": 38000
},
{
"epoch": 9.98,
"learning_rate": 4.907078549060543e-05,
"loss": 3.8941,
"step": 38500
},
{
"epoch": 10.11,
"learning_rate": 4.905447546972861e-05,
"loss": 3.4534,
"step": 39000
},
{
"epoch": 10.24,
"learning_rate": 4.9038165448851776e-05,
"loss": 2.8029,
"step": 39500
},
{
"epoch": 10.37,
"learning_rate": 4.902185542797495e-05,
"loss": 2.3458,
"step": 40000
},
{
"epoch": 10.5,
"learning_rate": 4.900554540709812e-05,
"loss": 2.1352,
"step": 40500
},
{
"epoch": 10.63,
"learning_rate": 4.89893006263048e-05,
"loss": 2.0089,
"step": 41000
},
{
"epoch": 10.76,
"learning_rate": 4.897299060542798e-05,
"loss": 1.9194,
"step": 41500
},
{
"epoch": 10.89,
"learning_rate": 4.895668058455115e-05,
"loss": 1.8505,
"step": 42000
},
{
"epoch": 11.02,
"learning_rate": 4.8940370563674325e-05,
"loss": 1.7901,
"step": 42500
},
{
"epoch": 11.15,
"learning_rate": 4.8924060542797495e-05,
"loss": 1.7405,
"step": 43000
},
{
"epoch": 11.28,
"learning_rate": 4.890775052192067e-05,
"loss": 1.6962,
"step": 43500
},
{
"epoch": 11.41,
"learning_rate": 4.889144050104385e-05,
"loss": 1.6585,
"step": 44000
},
{
"epoch": 11.54,
"learning_rate": 4.887516310020877e-05,
"loss": 1.6184,
"step": 44500
},
{
"epoch": 11.67,
"learning_rate": 4.8858853079331946e-05,
"loss": 1.5853,
"step": 45000
},
{
"epoch": 11.8,
"learning_rate": 4.8842543058455116e-05,
"loss": 1.5588,
"step": 45500
},
{
"epoch": 11.93,
"learning_rate": 4.882623303757829e-05,
"loss": 1.5296,
"step": 46000
},
{
"epoch": 12.06,
"learning_rate": 4.880992301670146e-05,
"loss": 1.504,
"step": 46500
},
{
"epoch": 12.19,
"learning_rate": 4.879364561586639e-05,
"loss": 1.4818,
"step": 47000
},
{
"epoch": 12.32,
"learning_rate": 4.8777335594989566e-05,
"loss": 1.4608,
"step": 47500
},
{
"epoch": 12.44,
"learning_rate": 4.876102557411274e-05,
"loss": 1.4441,
"step": 48000
},
{
"epoch": 12.57,
"learning_rate": 4.8744715553235906e-05,
"loss": 1.4253,
"step": 48500
},
{
"epoch": 12.7,
"learning_rate": 4.872840553235908e-05,
"loss": 1.4099,
"step": 49000
},
{
"epoch": 12.83,
"learning_rate": 4.871209551148226e-05,
"loss": 1.3957,
"step": 49500
},
{
"epoch": 12.96,
"learning_rate": 4.869578549060543e-05,
"loss": 1.3802,
"step": 50000
},
{
"epoch": 13.09,
"learning_rate": 4.8679475469728604e-05,
"loss": 1.3644,
"step": 50500
},
{
"epoch": 13.22,
"learning_rate": 4.8663198068893526e-05,
"loss": 1.3536,
"step": 51000
},
{
"epoch": 13.35,
"learning_rate": 4.86468880480167e-05,
"loss": 1.3423,
"step": 51500
},
{
"epoch": 13.48,
"learning_rate": 4.863061064718163e-05,
"loss": 1.328,
"step": 52000
},
{
"epoch": 13.61,
"learning_rate": 4.86143006263048e-05,
"loss": 1.3191,
"step": 52500
},
{
"epoch": 13.74,
"learning_rate": 4.859799060542798e-05,
"loss": 1.3084,
"step": 53000
},
{
"epoch": 13.87,
"learning_rate": 4.858168058455115e-05,
"loss": 1.2963,
"step": 53500
},
{
"epoch": 14.0,
"learning_rate": 4.856540318371608e-05,
"loss": 1.2866,
"step": 54000
},
{
"epoch": 14.13,
"learning_rate": 4.854909316283925e-05,
"loss": 1.2748,
"step": 54500
},
{
"epoch": 14.26,
"learning_rate": 4.853278314196242e-05,
"loss": 1.2662,
"step": 55000
},
{
"epoch": 14.39,
"learning_rate": 4.85164731210856e-05,
"loss": 1.2603,
"step": 55500
},
{
"epoch": 14.52,
"learning_rate": 4.8500163100208774e-05,
"loss": 1.2479,
"step": 56000
},
{
"epoch": 14.65,
"learning_rate": 4.8483885699373696e-05,
"loss": 1.2429,
"step": 56500
},
{
"epoch": 14.78,
"learning_rate": 4.8467575678496865e-05,
"loss": 1.2346,
"step": 57000
},
{
"epoch": 14.91,
"learning_rate": 4.845126565762004e-05,
"loss": 1.227,
"step": 57500
},
{
"epoch": 15.04,
"learning_rate": 4.843495563674322e-05,
"loss": 1.2201,
"step": 58000
},
{
"epoch": 15.17,
"learning_rate": 4.8418645615866394e-05,
"loss": 1.2107,
"step": 58500
},
{
"epoch": 15.3,
"learning_rate": 4.8402368215031316e-05,
"loss": 1.2042,
"step": 59000
},
{
"epoch": 15.43,
"learning_rate": 4.838605819415449e-05,
"loss": 1.1966,
"step": 59500
},
{
"epoch": 15.56,
"learning_rate": 4.836974817327766e-05,
"loss": 1.1893,
"step": 60000
},
{
"epoch": 15.69,
"learning_rate": 4.835343815240084e-05,
"loss": 1.1849,
"step": 60500
},
{
"epoch": 15.82,
"learning_rate": 4.833716075156576e-05,
"loss": 1.1777,
"step": 61000
},
{
"epoch": 15.95,
"learning_rate": 4.832085073068894e-05,
"loss": 1.1731,
"step": 61500
},
{
"epoch": 16.07,
"learning_rate": 4.830454070981211e-05,
"loss": 1.1659,
"step": 62000
},
{
"epoch": 16.2,
"learning_rate": 4.828823068893528e-05,
"loss": 1.159,
"step": 62500
},
{
"epoch": 16.33,
"learning_rate": 4.827192066805846e-05,
"loss": 1.1537,
"step": 63000
},
{
"epoch": 16.46,
"learning_rate": 4.825564326722338e-05,
"loss": 1.1486,
"step": 63500
},
{
"epoch": 16.59,
"learning_rate": 4.823936586638831e-05,
"loss": 1.1415,
"step": 64000
},
{
"epoch": 16.72,
"learning_rate": 4.8223055845511486e-05,
"loss": 1.138,
"step": 64500
},
{
"epoch": 16.85,
"learning_rate": 4.8206745824634655e-05,
"loss": 1.1307,
"step": 65000
},
{
"epoch": 16.98,
"learning_rate": 4.819043580375783e-05,
"loss": 1.127,
"step": 65500
},
{
"epoch": 17.11,
"learning_rate": 4.817415840292276e-05,
"loss": 1.1194,
"step": 66000
},
{
"epoch": 17.24,
"learning_rate": 4.815784838204594e-05,
"loss": 1.1152,
"step": 66500
},
{
"epoch": 17.37,
"learning_rate": 4.8141538361169106e-05,
"loss": 1.1109,
"step": 67000
},
{
"epoch": 17.5,
"learning_rate": 4.8125228340292276e-05,
"loss": 1.1096,
"step": 67500
},
{
"epoch": 17.63,
"learning_rate": 4.8108950939457204e-05,
"loss": 1.1036,
"step": 68000
},
{
"epoch": 17.76,
"learning_rate": 4.809264091858038e-05,
"loss": 1.0995,
"step": 68500
},
{
"epoch": 17.89,
"learning_rate": 4.807633089770355e-05,
"loss": 1.0941,
"step": 69000
},
{
"epoch": 18.02,
"learning_rate": 4.806002087682672e-05,
"loss": 1.09,
"step": 69500
},
{
"epoch": 18.15,
"learning_rate": 4.8043710855949896e-05,
"loss": 1.0841,
"step": 70000
},
{
"epoch": 18.28,
"learning_rate": 4.802740083507307e-05,
"loss": 1.0802,
"step": 70500
},
{
"epoch": 18.41,
"learning_rate": 4.801109081419625e-05,
"loss": 1.0769,
"step": 71000
},
{
"epoch": 18.54,
"learning_rate": 4.799481341336117e-05,
"loss": 1.0729,
"step": 71500
},
{
"epoch": 18.67,
"learning_rate": 4.79785360125261e-05,
"loss": 1.0696,
"step": 72000
},
{
"epoch": 18.8,
"learning_rate": 4.7962225991649276e-05,
"loss": 1.0645,
"step": 72500
},
{
"epoch": 18.93,
"learning_rate": 4.7945915970772445e-05,
"loss": 1.06,
"step": 73000
},
{
"epoch": 19.06,
"learning_rate": 4.7929605949895615e-05,
"loss": 1.0546,
"step": 73500
},
{
"epoch": 19.19,
"learning_rate": 4.791329592901879e-05,
"loss": 1.0523,
"step": 74000
},
{
"epoch": 19.32,
"learning_rate": 4.789698590814197e-05,
"loss": 1.0483,
"step": 74500
},
{
"epoch": 19.45,
"learning_rate": 4.788067588726514e-05,
"loss": 1.0445,
"step": 75000
},
{
"epoch": 19.57,
"learning_rate": 4.7864365866388314e-05,
"loss": 1.0402,
"step": 75500
},
{
"epoch": 19.7,
"learning_rate": 4.7848088465553235e-05,
"loss": 1.0354,
"step": 76000
},
{
"epoch": 19.83,
"learning_rate": 4.783177844467641e-05,
"loss": 1.0329,
"step": 76500
},
{
"epoch": 19.96,
"learning_rate": 4.781546842379959e-05,
"loss": 1.0308,
"step": 77000
},
{
"epoch": 20.09,
"learning_rate": 4.779919102296451e-05,
"loss": 1.0272,
"step": 77500
},
{
"epoch": 20.22,
"learning_rate": 4.7782881002087686e-05,
"loss": 1.0218,
"step": 78000
},
{
"epoch": 20.35,
"learning_rate": 4.7766570981210856e-05,
"loss": 1.0197,
"step": 78500
},
{
"epoch": 20.48,
"learning_rate": 4.775026096033403e-05,
"loss": 1.0147,
"step": 79000
},
{
"epoch": 20.61,
"learning_rate": 4.773395093945721e-05,
"loss": 1.0115,
"step": 79500
},
{
"epoch": 20.74,
"learning_rate": 4.771764091858038e-05,
"loss": 1.0095,
"step": 80000
},
{
"epoch": 20.87,
"learning_rate": 4.770136351774531e-05,
"loss": 1.0087,
"step": 80500
}
],
"max_steps": 1542800,
"num_train_epochs": 400,
"total_flos": 2.1827467373904396e+19,
"trial_name": null,
"trial_params": null
}