|
{ |
|
"best_metric": 0.23823951184749603, |
|
"best_model_checkpoint": "./distilbert-marian-training1/checkpoint-48000", |
|
"epoch": 10.0, |
|
"global_step": 160000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.212500000000001e-07, |
|
"loss": 0.1004, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2462500000000001e-06, |
|
"loss": 0.1263, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.8712500000000003e-06, |
|
"loss": 0.1256, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.4962500000000005e-06, |
|
"loss": 0.1014, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.12125e-06, |
|
"loss": 0.1124, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.7462500000000003e-06, |
|
"loss": 0.1479, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.37125e-06, |
|
"loss": 0.1206, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.9950000000000005e-06, |
|
"loss": 0.1157, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 5.620000000000001e-06, |
|
"loss": 0.1104, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 6.245000000000001e-06, |
|
"loss": 0.1329, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 6.870000000000001e-06, |
|
"loss": 0.1493, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.493750000000001e-06, |
|
"loss": 0.1359, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.11875e-06, |
|
"loss": 0.1326, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 8.74375e-06, |
|
"loss": 0.1488, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.3675e-06, |
|
"loss": 0.1719, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 9.992500000000001e-06, |
|
"loss": 0.1606, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.9675e-06, |
|
"loss": 0.1633, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.934605263157895e-06, |
|
"loss": 0.1358, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.90171052631579e-06, |
|
"loss": 0.1801, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 9.868815789473684e-06, |
|
"loss": 0.1531, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 9.835921052631579e-06, |
|
"loss": 0.185, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.803026315789475e-06, |
|
"loss": 0.1619, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.77013157894737e-06, |
|
"loss": 0.136, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.737302631578948e-06, |
|
"loss": 0.1712, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.704473684210527e-06, |
|
"loss": 0.1482, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.671578947368422e-06, |
|
"loss": 0.1849, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.638684210526316e-06, |
|
"loss": 0.1395, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 9.605789473684211e-06, |
|
"loss": 0.152, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.572894736842107e-06, |
|
"loss": 0.158, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.54e-06, |
|
"loss": 0.1458, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 9.507105263157895e-06, |
|
"loss": 0.1783, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.474210526315791e-06, |
|
"loss": 0.1681, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_BLEU": 72.24934811777383, |
|
"eval_BLEU-Bigram-Precision": 74.84708063021316, |
|
"eval_BLEU-Trigram-Precision": 68.90927624872579, |
|
"eval_BLEU-Unigram-Precision": 81.34860050890586, |
|
"eval_ROUGE-2": 75.2339725526462, |
|
"eval_ROUGE-L": 89.68424579231484, |
|
"eval_Sacre-Bigram-Precision": 73.79434037465126, |
|
"eval_Sacre-Trigram-Precision": 68.54019434628975, |
|
"eval_Sacre-Unigram-Precision": 80.34613990576295, |
|
"eval_SacreBLEU": 71.54791264346096, |
|
"eval_loss": 0.24808737635612488, |
|
"eval_runtime": 1428.7921, |
|
"eval_samples_per_second": 0.7, |
|
"eval_steps_per_second": 0.7, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.441315789473686e-06, |
|
"loss": 0.1191, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.408486842105265e-06, |
|
"loss": 0.1805, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.375592105263158e-06, |
|
"loss": 0.1555, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.342697368421053e-06, |
|
"loss": 0.1624, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 9.309802631578949e-06, |
|
"loss": 0.1512, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 9.276907894736843e-06, |
|
"loss": 0.159, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 9.244013157894738e-06, |
|
"loss": 0.133, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 9.211118421052633e-06, |
|
"loss": 0.1277, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 9.178223684210527e-06, |
|
"loss": 0.1622, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 9.145394736842107e-06, |
|
"loss": 0.1263, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 9.112565789473685e-06, |
|
"loss": 0.1463, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 9.079671052631581e-06, |
|
"loss": 0.1262, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 9.046776315789474e-06, |
|
"loss": 0.1825, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 9.013881578947368e-06, |
|
"loss": 0.1582, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 8.981052631578948e-06, |
|
"loss": 0.1307, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 8.948157894736843e-06, |
|
"loss": 0.1333, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 8.915263157894739e-06, |
|
"loss": 0.1462, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 8.882368421052632e-06, |
|
"loss": 0.1527, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 8.849473684210526e-06, |
|
"loss": 0.1388, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 8.81657894736842e-06, |
|
"loss": 0.1202, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.783684210526317e-06, |
|
"loss": 0.1144, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.750789473684212e-06, |
|
"loss": 0.119, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.717960526315791e-06, |
|
"loss": 0.1077, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.685065789473684e-06, |
|
"loss": 0.1226, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.652171052631579e-06, |
|
"loss": 0.1432, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.619276315789475e-06, |
|
"loss": 0.1415, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 8.58638157894737e-06, |
|
"loss": 0.1462, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 8.553552631578949e-06, |
|
"loss": 0.1162, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 8.520657894736842e-06, |
|
"loss": 0.1478, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 8.487763157894737e-06, |
|
"loss": 0.1861, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 8.454868421052633e-06, |
|
"loss": 0.1166, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 8.421973684210527e-06, |
|
"loss": 0.1209, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_BLEU": 77.2930486436819, |
|
"eval_BLEU-Bigram-Precision": 79.7232309353224, |
|
"eval_BLEU-Trigram-Precision": 74.12314040612445, |
|
"eval_BLEU-Unigram-Precision": 85.70024130842792, |
|
"eval_ROUGE-2": 76.44781505694336, |
|
"eval_ROUGE-L": 90.63044702330345, |
|
"eval_Sacre-Bigram-Precision": 78.87249177195031, |
|
"eval_Sacre-Trigram-Precision": 74.09645692617609, |
|
"eval_Sacre-Unigram-Precision": 84.96016892216143, |
|
"eval_SacreBLEU": 76.88465263390154, |
|
"eval_loss": 0.24807412922382355, |
|
"eval_runtime": 1016.4629, |
|
"eval_samples_per_second": 0.984, |
|
"eval_steps_per_second": 0.984, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 8.389078947368422e-06, |
|
"loss": 0.1208, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 8.356184210526316e-06, |
|
"loss": 0.1029, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 8.323355263157894e-06, |
|
"loss": 0.1046, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 8.290526315789474e-06, |
|
"loss": 0.1182, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 8.257631578947369e-06, |
|
"loss": 0.1045, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 8.224736842105265e-06, |
|
"loss": 0.0835, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 8.191842105263158e-06, |
|
"loss": 0.1486, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 8.158947368421052e-06, |
|
"loss": 0.1506, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 8.126052631578949e-06, |
|
"loss": 0.1248, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 8.093157894736843e-06, |
|
"loss": 0.0867, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 8.060263157894738e-06, |
|
"loss": 0.0927, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 8.027434210526317e-06, |
|
"loss": 0.1175, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 7.99453947368421e-06, |
|
"loss": 0.1003, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 7.961644736842106e-06, |
|
"loss": 0.1343, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 7.928750000000001e-06, |
|
"loss": 0.1368, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 7.89592105263158e-06, |
|
"loss": 0.1264, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.863026315789475e-06, |
|
"loss": 0.1269, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.830131578947368e-06, |
|
"loss": 0.1214, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 7.797236842105264e-06, |
|
"loss": 0.1248, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 7.764342105263159e-06, |
|
"loss": 0.1191, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 7.731513157894738e-06, |
|
"loss": 0.1247, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 7.698618421052633e-06, |
|
"loss": 0.0996, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 7.665723684210526e-06, |
|
"loss": 0.135, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 7.632828947368422e-06, |
|
"loss": 0.1153, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 7.599934210526317e-06, |
|
"loss": 0.1054, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 7.567039473684211e-06, |
|
"loss": 0.1013, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 7.534210526315791e-06, |
|
"loss": 0.1396, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 7.501315789473685e-06, |
|
"loss": 0.1169, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 7.468421052631579e-06, |
|
"loss": 0.1182, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 7.435526315789474e-06, |
|
"loss": 0.108, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 7.402631578947369e-06, |
|
"loss": 0.1106, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 7.369802631578949e-06, |
|
"loss": 0.0983, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_BLEU": 76.73720363059847, |
|
"eval_BLEU-Bigram-Precision": 79.20062087698875, |
|
"eval_BLEU-Trigram-Precision": 73.47770154373929, |
|
"eval_BLEU-Unigram-Precision": 85.37318712415988, |
|
"eval_ROUGE-2": 76.72216094294652, |
|
"eval_ROUGE-L": 90.70575387918443, |
|
"eval_Sacre-Bigram-Precision": 78.34241408214585, |
|
"eval_Sacre-Trigram-Precision": 73.58710882765062, |
|
"eval_Sacre-Unigram-Precision": 84.60735963581183, |
|
"eval_SacreBLEU": 76.40271655688608, |
|
"eval_loss": 0.23823951184749603, |
|
"eval_runtime": 1594.3998, |
|
"eval_samples_per_second": 0.627, |
|
"eval_steps_per_second": 0.627, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 7.3369078947368425e-06, |
|
"loss": 0.1156, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 7.304078947368421e-06, |
|
"loss": 0.0855, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 7.271184210526317e-06, |
|
"loss": 0.0923, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 7.238289473684211e-06, |
|
"loss": 0.1026, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 7.205394736842106e-06, |
|
"loss": 0.1265, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 7.172500000000001e-06, |
|
"loss": 0.0995, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 7.139605263157895e-06, |
|
"loss": 0.0903, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 7.1067105263157895e-06, |
|
"loss": 0.0929, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 7.073815789473685e-06, |
|
"loss": 0.0773, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 7.0409210526315795e-06, |
|
"loss": 0.0851, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 7.008026315789475e-06, |
|
"loss": 0.1169, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 6.975131578947369e-06, |
|
"loss": 0.1006, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 6.942236842105263e-06, |
|
"loss": 0.0852, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 6.909407894736843e-06, |
|
"loss": 0.0989, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 6.876513157894737e-06, |
|
"loss": 0.0805, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 6.843684210526317e-06, |
|
"loss": 0.1044, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 6.810789473684211e-06, |
|
"loss": 0.0902, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 6.777894736842105e-06, |
|
"loss": 0.0765, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 6.745000000000001e-06, |
|
"loss": 0.0951, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 6.7121710526315794e-06, |
|
"loss": 0.1134, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 6.679342105263158e-06, |
|
"loss": 0.1051, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 6.646447368421053e-06, |
|
"loss": 0.0955, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 6.613552631578948e-06, |
|
"loss": 0.0964, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 6.580657894736843e-06, |
|
"loss": 0.1046, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 6.547763157894737e-06, |
|
"loss": 0.0741, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 6.514868421052633e-06, |
|
"loss": 0.0695, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 6.481973684210526e-06, |
|
"loss": 0.0885, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 6.449078947368421e-06, |
|
"loss": 0.0889, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 6.416184210526316e-06, |
|
"loss": 0.0942, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 6.383289473684211e-06, |
|
"loss": 0.103, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 6.350460526315791e-06, |
|
"loss": 0.0768, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 6.317565789473685e-06, |
|
"loss": 0.0773, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_BLEU": 75.32461616904307, |
|
"eval_BLEU-Bigram-Precision": 77.5221070647523, |
|
"eval_BLEU-Trigram-Precision": 72.33640939597315, |
|
"eval_BLEU-Unigram-Precision": 83.58947642615264, |
|
"eval_ROUGE-2": 75.57825134251199, |
|
"eval_ROUGE-L": 88.70065022361639, |
|
"eval_Sacre-Bigram-Precision": 76.6359918200409, |
|
"eval_Sacre-Trigram-Precision": 72.25821116035914, |
|
"eval_Sacre-Unigram-Precision": 82.69016697588125, |
|
"eval_SacreBLEU": 74.94210102448932, |
|
"eval_loss": 0.24377790093421936, |
|
"eval_runtime": 1720.9311, |
|
"eval_samples_per_second": 0.581, |
|
"eval_steps_per_second": 0.581, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 6.284671052631579e-06, |
|
"loss": 0.0805, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 6.251776315789474e-06, |
|
"loss": 0.0701, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 6.218881578947369e-06, |
|
"loss": 0.07, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 6.185986842105263e-06, |
|
"loss": 0.0785, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 6.153092105263159e-06, |
|
"loss": 0.0832, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 6.120197368421053e-06, |
|
"loss": 0.094, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 6.087368421052632e-06, |
|
"loss": 0.074, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 6.054539473684211e-06, |
|
"loss": 0.0915, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 6.021644736842106e-06, |
|
"loss": 0.0904, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 5.988750000000001e-06, |
|
"loss": 0.0586, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 5.955855263157895e-06, |
|
"loss": 0.0754, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 5.92296052631579e-06, |
|
"loss": 0.074, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 5.890131578947369e-06, |
|
"loss": 0.0723, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 5.857236842105264e-06, |
|
"loss": 0.0814, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 5.824342105263159e-06, |
|
"loss": 0.0623, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 5.7914473684210525e-06, |
|
"loss": 0.0859, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 5.758552631578948e-06, |
|
"loss": 0.0713, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 5.7256578947368425e-06, |
|
"loss": 0.0891, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 5.692763157894737e-06, |
|
"loss": 0.0661, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 5.6598684210526324e-06, |
|
"loss": 0.0893, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 5.62703947368421e-06, |
|
"loss": 0.0694, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 5.594144736842106e-06, |
|
"loss": 0.0836, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 5.56125e-06, |
|
"loss": 0.074, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 5.528355263157895e-06, |
|
"loss": 0.0798, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 5.4955263157894745e-06, |
|
"loss": 0.0899, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 5.4626973684210525e-06, |
|
"loss": 0.0717, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 5.429802631578948e-06, |
|
"loss": 0.0754, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 5.3969078947368424e-06, |
|
"loss": 0.0923, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 5.364013157894738e-06, |
|
"loss": 0.0679, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 5.331118421052632e-06, |
|
"loss": 0.0722, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 5.298223684210527e-06, |
|
"loss": 0.071, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 5.265328947368421e-06, |
|
"loss": 0.0836, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_BLEU": 80.51272222480867, |
|
"eval_BLEU-Bigram-Precision": 82.70449743434953, |
|
"eval_BLEU-Trigram-Precision": 77.53097443911152, |
|
"eval_BLEU-Unigram-Precision": 88.20733156595666, |
|
"eval_ROUGE-2": 77.48308477253742, |
|
"eval_ROUGE-L": 90.67140065243555, |
|
"eval_Sacre-Bigram-Precision": 82.0950723376482, |
|
"eval_Sacre-Trigram-Precision": 77.98611956654085, |
|
"eval_Sacre-Unigram-Precision": 87.60914352987345, |
|
"eval_SacreBLEU": 80.51595106600885, |
|
"eval_loss": 0.24554118514060974, |
|
"eval_runtime": 1564.2824, |
|
"eval_samples_per_second": 0.639, |
|
"eval_steps_per_second": 0.639, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 5.232434210526316e-06, |
|
"loss": 0.0516, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 5.199539473684211e-06, |
|
"loss": 0.0681, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 5.1667763157894745e-06, |
|
"loss": 0.066, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 5.133881578947368e-06, |
|
"loss": 0.0517, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 5.100986842105264e-06, |
|
"loss": 0.0446, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 5.068092105263158e-06, |
|
"loss": 0.0642, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 5.035263157894738e-06, |
|
"loss": 0.0708, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 5.002368421052632e-06, |
|
"loss": 0.0607, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 4.969473684210527e-06, |
|
"loss": 0.062, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 4.9365789473684215e-06, |
|
"loss": 0.0844, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 4.90375e-06, |
|
"loss": 0.0611, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 4.870855263157896e-06, |
|
"loss": 0.0702, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 4.837960526315789e-06, |
|
"loss": 0.048, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 4.805065789473685e-06, |
|
"loss": 0.0725, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 4.772171052631579e-06, |
|
"loss": 0.0492, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 4.739276315789474e-06, |
|
"loss": 0.0443, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 4.7063815789473685e-06, |
|
"loss": 0.074, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 4.673486842105264e-06, |
|
"loss": 0.0742, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 4.6405921052631585e-06, |
|
"loss": 0.0579, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 4.6078289473684215e-06, |
|
"loss": 0.0673, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 4.574934210526316e-06, |
|
"loss": 0.0685, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 4.5420394736842114e-06, |
|
"loss": 0.0708, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 4.509144736842105e-06, |
|
"loss": 0.076, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 4.476315789473685e-06, |
|
"loss": 0.0717, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 4.443421052631579e-06, |
|
"loss": 0.0781, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 4.410526315789474e-06, |
|
"loss": 0.0678, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 4.377631578947369e-06, |
|
"loss": 0.0584, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 4.344736842105263e-06, |
|
"loss": 0.0736, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 4.3118421052631584e-06, |
|
"loss": 0.0472, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 4.278947368421053e-06, |
|
"loss": 0.0676, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 4.246052631578948e-06, |
|
"loss": 0.0595, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 4.213223684210526e-06, |
|
"loss": 0.0567, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_BLEU": 81.21370204393791, |
|
"eval_BLEU-Bigram-Precision": 83.26283987915409, |
|
"eval_BLEU-Trigram-Precision": 78.32402234636872, |
|
"eval_BLEU-Unigram-Precision": 88.63677950594693, |
|
"eval_ROUGE-2": 77.3545431272267, |
|
"eval_ROUGE-L": 90.77000002140947, |
|
"eval_Sacre-Bigram-Precision": 82.65461672473867, |
|
"eval_Sacre-Trigram-Precision": 78.53486104339346, |
|
"eval_Sacre-Unigram-Precision": 88.0695208169678, |
|
"eval_SacreBLEU": 81.05092176649303, |
|
"eval_loss": 0.2411227524280548, |
|
"eval_runtime": 1329.304, |
|
"eval_samples_per_second": 0.752, |
|
"eval_steps_per_second": 0.752, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 4.180394736842105e-06, |
|
"loss": 0.0561, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 4.1475000000000005e-06, |
|
"loss": 0.0484, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 4.114605263157895e-06, |
|
"loss": 0.0649, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 4.08171052631579e-06, |
|
"loss": 0.0489, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 4.048815789473684e-06, |
|
"loss": 0.0418, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 4.01592105263158e-06, |
|
"loss": 0.0671, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 3.983026315789474e-06, |
|
"loss": 0.0496, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 3.950131578947369e-06, |
|
"loss": 0.064, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 3.9173026315789475e-06, |
|
"loss": 0.0633, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 3.884407894736842e-06, |
|
"loss": 0.0578, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 3.8515131578947375e-06, |
|
"loss": 0.0501, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 3.818618421052632e-06, |
|
"loss": 0.0517, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 3.7857236842105266e-06, |
|
"loss": 0.0502, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 3.7528947368421054e-06, |
|
"loss": 0.0603, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 3.7200000000000004e-06, |
|
"loss": 0.0485, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 3.6871052631578954e-06, |
|
"loss": 0.0532, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 3.654276315789474e-06, |
|
"loss": 0.0551, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 3.6213815789473687e-06, |
|
"loss": 0.0429, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 3.5884868421052633e-06, |
|
"loss": 0.0485, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 3.5555921052631583e-06, |
|
"loss": 0.0606, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 3.5226973684210532e-06, |
|
"loss": 0.046, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 3.4898026315789474e-06, |
|
"loss": 0.0398, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 3.4569078947368424e-06, |
|
"loss": 0.0516, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 3.4240131578947373e-06, |
|
"loss": 0.06, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 3.391118421052632e-06, |
|
"loss": 0.0507, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 3.3582236842105265e-06, |
|
"loss": 0.0651, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 3.3253947368421057e-06, |
|
"loss": 0.0502, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 3.2925000000000002e-06, |
|
"loss": 0.0455, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 3.2596052631578952e-06, |
|
"loss": 0.0467, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 3.2267105263157898e-06, |
|
"loss": 0.0571, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 3.1938157894736843e-06, |
|
"loss": 0.0546, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 3.1609210526315793e-06, |
|
"loss": 0.043, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_BLEU": 80.56639568650905, |
|
"eval_BLEU-Bigram-Precision": 82.55929838548933, |
|
"eval_BLEU-Trigram-Precision": 77.6121051468964, |
|
"eval_BLEU-Unigram-Precision": 88.20010875475802, |
|
"eval_ROUGE-2": 77.17811658990045, |
|
"eval_ROUGE-L": 90.74508068067605, |
|
"eval_Sacre-Bigram-Precision": 81.82111373898087, |
|
"eval_Sacre-Trigram-Precision": 77.62557077625571, |
|
"eval_Sacre-Unigram-Precision": 87.54610755193167, |
|
"eval_SacreBLEU": 80.26422111075566, |
|
"eval_loss": 0.2396911382675171, |
|
"eval_runtime": 1545.8252, |
|
"eval_samples_per_second": 0.647, |
|
"eval_steps_per_second": 0.647, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 3.1280263157894743e-06, |
|
"loss": 0.0353, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 3.0951315789473685e-06, |
|
"loss": 0.05, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 3.0623026315789477e-06, |
|
"loss": 0.0403, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 3.0294078947368422e-06, |
|
"loss": 0.0384, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 2.996513157894737e-06, |
|
"loss": 0.0492, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 2.9636184210526318e-06, |
|
"loss": 0.0382, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 2.9307236842105263e-06, |
|
"loss": 0.0582, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 2.8978289473684213e-06, |
|
"loss": 0.037, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 2.865e-06, |
|
"loss": 0.0326, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 2.832105263157895e-06, |
|
"loss": 0.0447, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 2.7992105263157896e-06, |
|
"loss": 0.0531, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 2.7663815789473684e-06, |
|
"loss": 0.0426, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 2.7334868421052634e-06, |
|
"loss": 0.0464, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 2.700592105263158e-06, |
|
"loss": 0.0491, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 2.667697368421053e-06, |
|
"loss": 0.0324, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 2.6348026315789475e-06, |
|
"loss": 0.0405, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 2.6019078947368425e-06, |
|
"loss": 0.0328, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 2.569013157894737e-06, |
|
"loss": 0.0456, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 2.536118421052632e-06, |
|
"loss": 0.0321, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 2.5032236842105266e-06, |
|
"loss": 0.0385, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 2.470328947368421e-06, |
|
"loss": 0.0385, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 2.437434210526316e-06, |
|
"loss": 0.0403, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 2.404605263157895e-06, |
|
"loss": 0.0563, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 2.3717105263157895e-06, |
|
"loss": 0.057, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 2.3388815789473687e-06, |
|
"loss": 0.0463, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 2.3059868421052633e-06, |
|
"loss": 0.0449, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 2.273092105263158e-06, |
|
"loss": 0.04, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 2.240197368421053e-06, |
|
"loss": 0.0447, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 2.2073026315789474e-06, |
|
"loss": 0.0557, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 2.1744078947368424e-06, |
|
"loss": 0.0602, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 2.141513157894737e-06, |
|
"loss": 0.0383, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 2.108618421052632e-06, |
|
"loss": 0.0491, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_BLEU": 78.11044908486078, |
|
"eval_BLEU-Bigram-Precision": 80.32311115410661, |
|
"eval_BLEU-Trigram-Precision": 75.1309180292829, |
|
"eval_BLEU-Unigram-Precision": 85.89573961365441, |
|
"eval_ROUGE-2": 76.73896747691722, |
|
"eval_ROUGE-L": 90.4316320530482, |
|
"eval_Sacre-Bigram-Precision": 79.53764448609809, |
|
"eval_Sacre-Trigram-Precision": 75.15945726545286, |
|
"eval_Sacre-Unigram-Precision": 85.18343864943884, |
|
"eval_SacreBLEU": 77.77903164926069, |
|
"eval_loss": 0.24439701437950134, |
|
"eval_runtime": 1491.254, |
|
"eval_samples_per_second": 0.671, |
|
"eval_steps_per_second": 0.671, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 2.0757236842105265e-06, |
|
"loss": 0.0393, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 2.0428289473684215e-06, |
|
"loss": 0.0466, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 2.009934210526316e-06, |
|
"loss": 0.0377, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 1.977105263157895e-06, |
|
"loss": 0.0413, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 1.9442105263157894e-06, |
|
"loss": 0.0394, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 1.9113157894736843e-06, |
|
"loss": 0.03, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"learning_rate": 1.8784210526315791e-06, |
|
"loss": 0.0286, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 1.8455263157894737e-06, |
|
"loss": 0.0403, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 1.8126973684210527e-06, |
|
"loss": 0.0543, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 1.7798026315789474e-06, |
|
"loss": 0.0362, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 1.7469078947368422e-06, |
|
"loss": 0.0321, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 1.714013157894737e-06, |
|
"loss": 0.0306, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 1.6811184210526318e-06, |
|
"loss": 0.0385, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 1.6482236842105265e-06, |
|
"loss": 0.0347, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 1.615328947368421e-06, |
|
"loss": 0.0352, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 1.5825e-06, |
|
"loss": 0.0437, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 1.5496052631578949e-06, |
|
"loss": 0.0324, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 1.5167105263157896e-06, |
|
"loss": 0.0354, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 1.4838157894736844e-06, |
|
"loss": 0.0367, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 1.450921052631579e-06, |
|
"loss": 0.0408, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 1.418026315789474e-06, |
|
"loss": 0.0543, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 1.3851315789473685e-06, |
|
"loss": 0.0245, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 1.3523026315789475e-06, |
|
"loss": 0.0283, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 1.319407894736842e-06, |
|
"loss": 0.0353, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 1.286513157894737e-06, |
|
"loss": 0.0442, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 1.2536184210526316e-06, |
|
"loss": 0.0396, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 1.2207236842105264e-06, |
|
"loss": 0.0382, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 1.1878289473684212e-06, |
|
"loss": 0.0393, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 1.154934210526316e-06, |
|
"loss": 0.0279, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 1.1220394736842107e-06, |
|
"loss": 0.0368, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 1.0891447368421055e-06, |
|
"loss": 0.0339, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 1.0563157894736843e-06, |
|
"loss": 0.0367, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_BLEU": 78.72613610038634, |
|
"eval_BLEU-Bigram-Precision": 80.66982867099023, |
|
"eval_BLEU-Trigram-Precision": 75.83146187573522, |
|
"eval_BLEU-Unigram-Precision": 86.24128497043509, |
|
"eval_ROUGE-2": 77.10014496108923, |
|
"eval_ROUGE-L": 90.75201858392994, |
|
"eval_Sacre-Bigram-Precision": 79.9499791579825, |
|
"eval_Sacre-Trigram-Precision": 75.83565459610028, |
|
"eval_Sacre-Unigram-Precision": 85.47565118912797, |
|
"eval_SacreBLEU": 78.39445859677504, |
|
"eval_loss": 0.2408515363931656, |
|
"eval_runtime": 1473.0938, |
|
"eval_samples_per_second": 0.679, |
|
"eval_steps_per_second": 0.679, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 1.0234868421052633e-06, |
|
"loss": 0.0404, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 9.90592105263158e-07, |
|
"loss": 0.0337, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 9.576973684210528e-07, |
|
"loss": 0.0426, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 9.248026315789474e-07, |
|
"loss": 0.0408, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 8.919078947368421e-07, |
|
"loss": 0.0347, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 8.59078947368421e-07, |
|
"loss": 0.0392, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 8.261842105263158e-07, |
|
"loss": 0.0339, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 7.932894736842106e-07, |
|
"loss": 0.0371, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"learning_rate": 7.603947368421054e-07, |
|
"loss": 0.0375, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 7.275e-07, |
|
"loss": 0.0359, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 6.946052631578948e-07, |
|
"loss": 0.0241, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 6.617105263157896e-07, |
|
"loss": 0.031, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"learning_rate": 6.288815789473685e-07, |
|
"loss": 0.0294, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 5.959868421052632e-07, |
|
"loss": 0.0371, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 5.630921052631579e-07, |
|
"loss": 0.0263, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 5.301973684210527e-07, |
|
"loss": 0.049, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 4.973026315789474e-07, |
|
"loss": 0.033, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"learning_rate": 4.6440789473684216e-07, |
|
"loss": 0.045, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 4.315131578947369e-07, |
|
"loss": 0.0464, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 3.986842105263158e-07, |
|
"loss": 0.0353, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 3.6578947368421055e-07, |
|
"loss": 0.0365, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 3.328947368421053e-07, |
|
"loss": 0.0335, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 0.0404, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 2.67171052631579e-07, |
|
"loss": 0.0374, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"learning_rate": 2.3427631578947373e-07, |
|
"loss": 0.0366, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 2.0138157894736842e-07, |
|
"loss": 0.0245, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 1.6848684210526317e-07, |
|
"loss": 0.039, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 1.3559210526315791e-07, |
|
"loss": 0.0292, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 1.0269736842105265e-07, |
|
"loss": 0.0361, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"learning_rate": 6.980263157894737e-08, |
|
"loss": 0.0372, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 3.690789473684211e-08, |
|
"loss": 0.0211, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 4.0131578947368425e-09, |
|
"loss": 0.0354, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_BLEU": 78.74942323410538, |
|
"eval_BLEU-Bigram-Precision": 80.77783179387458, |
|
"eval_BLEU-Trigram-Precision": 75.84094572810316, |
|
"eval_BLEU-Unigram-Precision": 86.30926007975188, |
|
"eval_ROUGE-2": 77.17672035094623, |
|
"eval_ROUGE-L": 90.75273654124103, |
|
"eval_Sacre-Bigram-Precision": 80.08164119740422, |
|
"eval_Sacre-Trigram-Precision": 75.89223233030091, |
|
"eval_Sacre-Unigram-Precision": 85.5694523403449, |
|
"eval_SacreBLEU": 78.4509112113324, |
|
"eval_loss": 0.2423561066389084, |
|
"eval_runtime": 1552.9635, |
|
"eval_samples_per_second": 0.644, |
|
"eval_steps_per_second": 0.644, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 160000, |
|
"total_flos": 2093466255360000.0, |
|
"train_loss": 0.08072224444150924, |
|
"train_runtime": 26837.0061, |
|
"train_samples_per_second": 5.962, |
|
"train_steps_per_second": 5.962 |
|
} |
|
], |
|
"max_steps": 160000, |
|
"num_train_epochs": 10, |
|
"total_flos": 2093466255360000.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|