TinyLlama-1.1B-Chat-rust-cpp-encodings
/
LORAs
/tinyllama-encoder_4e-5
/checkpoint-4452
/trainer_state.json
{ | |
"best_metric": null, | |
"best_model_checkpoint": null, | |
"epoch": 6.0, | |
"eval_steps": 500, | |
"global_step": 4452, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.01, | |
"learning_rate": 3.999950204782701e-05, | |
"loss": 1.7807, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.03, | |
"learning_rate": 3.999800821610369e-05, | |
"loss": 1.781, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.04, | |
"learning_rate": 3.999551857921571e-05, | |
"loss": 1.8258, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.05, | |
"learning_rate": 3.999203326113507e-05, | |
"loss": 1.5688, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.07, | |
"learning_rate": 3.9987552435413944e-05, | |
"loss": 1.5824, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.08, | |
"learning_rate": 3.9982076325176035e-05, | |
"loss": 1.7159, | |
"step": 60 | |
}, | |
{ | |
"epoch": 0.09, | |
"learning_rate": 3.9975605203105434e-05, | |
"loss": 1.7095, | |
"step": 70 | |
}, | |
{ | |
"epoch": 0.11, | |
"learning_rate": 3.996813939143307e-05, | |
"loss": 1.6194, | |
"step": 80 | |
}, | |
{ | |
"epoch": 0.12, | |
"learning_rate": 3.9959679261920665e-05, | |
"loss": 1.6602, | |
"step": 90 | |
}, | |
{ | |
"epoch": 0.13, | |
"learning_rate": 3.995022523584219e-05, | |
"loss": 1.6734, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.15, | |
"learning_rate": 3.9939777783962946e-05, | |
"loss": 1.715, | |
"step": 110 | |
}, | |
{ | |
"epoch": 0.16, | |
"learning_rate": 3.992833742651606e-05, | |
"loss": 1.7322, | |
"step": 120 | |
}, | |
{ | |
"epoch": 0.18, | |
"learning_rate": 3.9915904733176614e-05, | |
"loss": 1.6645, | |
"step": 130 | |
}, | |
{ | |
"epoch": 0.19, | |
"learning_rate": 3.9902480323033285e-05, | |
"loss": 1.6249, | |
"step": 140 | |
}, | |
{ | |
"epoch": 0.2, | |
"learning_rate": 3.9888064864557486e-05, | |
"loss": 1.6279, | |
"step": 150 | |
}, | |
{ | |
"epoch": 0.22, | |
"learning_rate": 3.987265907557011e-05, | |
"loss": 1.5166, | |
"step": 160 | |
}, | |
{ | |
"epoch": 0.23, | |
"learning_rate": 3.9856263723205755e-05, | |
"loss": 1.4946, | |
"step": 170 | |
}, | |
{ | |
"epoch": 0.24, | |
"learning_rate": 3.983887962387457e-05, | |
"loss": 1.5583, | |
"step": 180 | |
}, | |
{ | |
"epoch": 0.26, | |
"learning_rate": 3.982050764322154e-05, | |
"loss": 1.526, | |
"step": 190 | |
}, | |
{ | |
"epoch": 0.27, | |
"learning_rate": 3.9801148696083455e-05, | |
"loss": 1.6003, | |
"step": 200 | |
}, | |
{ | |
"epoch": 0.28, | |
"learning_rate": 3.9780803746443284e-05, | |
"loss": 1.6403, | |
"step": 210 | |
}, | |
{ | |
"epoch": 0.3, | |
"learning_rate": 3.9759473807382214e-05, | |
"loss": 1.5128, | |
"step": 220 | |
}, | |
{ | |
"epoch": 0.31, | |
"learning_rate": 3.97371599410292e-05, | |
"loss": 1.4739, | |
"step": 230 | |
}, | |
{ | |
"epoch": 0.32, | |
"learning_rate": 3.9713863258508064e-05, | |
"loss": 1.5466, | |
"step": 240 | |
}, | |
{ | |
"epoch": 0.34, | |
"learning_rate": 3.968958491988216e-05, | |
"loss": 1.5307, | |
"step": 250 | |
}, | |
{ | |
"epoch": 0.35, | |
"learning_rate": 3.966432613409667e-05, | |
"loss": 1.4508, | |
"step": 260 | |
}, | |
{ | |
"epoch": 0.36, | |
"learning_rate": 3.9638088158918285e-05, | |
"loss": 1.5163, | |
"step": 270 | |
}, | |
{ | |
"epoch": 0.38, | |
"learning_rate": 3.9610872300872704e-05, | |
"loss": 1.5583, | |
"step": 280 | |
}, | |
{ | |
"epoch": 0.39, | |
"learning_rate": 3.958267991517948e-05, | |
"loss": 1.3893, | |
"step": 290 | |
}, | |
{ | |
"epoch": 0.4, | |
"learning_rate": 3.955351240568459e-05, | |
"loss": 1.4718, | |
"step": 300 | |
}, | |
{ | |
"epoch": 0.42, | |
"learning_rate": 3.9523371224790505e-05, | |
"loss": 1.479, | |
"step": 310 | |
}, | |
{ | |
"epoch": 0.43, | |
"learning_rate": 3.949225787338388e-05, | |
"loss": 1.4874, | |
"step": 320 | |
}, | |
{ | |
"epoch": 0.44, | |
"learning_rate": 3.946017390076081e-05, | |
"loss": 1.2723, | |
"step": 330 | |
}, | |
{ | |
"epoch": 0.46, | |
"learning_rate": 3.942712090454968e-05, | |
"loss": 1.3925, | |
"step": 340 | |
}, | |
{ | |
"epoch": 0.47, | |
"learning_rate": 3.939310053063161e-05, | |
"loss": 1.3833, | |
"step": 350 | |
}, | |
{ | |
"epoch": 0.49, | |
"learning_rate": 3.935811447305853e-05, | |
"loss": 1.4989, | |
"step": 360 | |
}, | |
{ | |
"epoch": 0.5, | |
"learning_rate": 3.9322164473968774e-05, | |
"loss": 1.4961, | |
"step": 370 | |
}, | |
{ | |
"epoch": 0.51, | |
"learning_rate": 3.928525232350035e-05, | |
"loss": 1.5295, | |
"step": 380 | |
}, | |
{ | |
"epoch": 0.53, | |
"learning_rate": 3.924737985970182e-05, | |
"loss": 1.4051, | |
"step": 390 | |
}, | |
{ | |
"epoch": 0.54, | |
"learning_rate": 3.920854896844074e-05, | |
"loss": 1.3306, | |
"step": 400 | |
}, | |
{ | |
"epoch": 0.55, | |
"learning_rate": 3.916876158330979e-05, | |
"loss": 1.3522, | |
"step": 410 | |
}, | |
{ | |
"epoch": 0.57, | |
"learning_rate": 3.912801968553045e-05, | |
"loss": 1.4479, | |
"step": 420 | |
}, | |
{ | |
"epoch": 0.58, | |
"learning_rate": 3.908632530385438e-05, | |
"loss": 1.5481, | |
"step": 430 | |
}, | |
{ | |
"epoch": 0.59, | |
"learning_rate": 3.9043680514462366e-05, | |
"loss": 1.5375, | |
"step": 440 | |
}, | |
{ | |
"epoch": 0.61, | |
"learning_rate": 3.900008744086097e-05, | |
"loss": 1.321, | |
"step": 450 | |
}, | |
{ | |
"epoch": 0.62, | |
"learning_rate": 3.895554825377676e-05, | |
"loss": 1.4584, | |
"step": 460 | |
}, | |
{ | |
"epoch": 0.63, | |
"learning_rate": 3.891006517104823e-05, | |
"loss": 1.4188, | |
"step": 470 | |
}, | |
{ | |
"epoch": 0.65, | |
"learning_rate": 3.886364045751538e-05, | |
"loss": 1.2557, | |
"step": 480 | |
}, | |
{ | |
"epoch": 0.66, | |
"learning_rate": 3.881627642490691e-05, | |
"loss": 1.412, | |
"step": 490 | |
}, | |
{ | |
"epoch": 0.67, | |
"learning_rate": 3.876797543172511e-05, | |
"loss": 1.3267, | |
"step": 500 | |
}, | |
{ | |
"epoch": 0.69, | |
"learning_rate": 3.871873988312842e-05, | |
"loss": 1.3768, | |
"step": 510 | |
}, | |
{ | |
"epoch": 0.7, | |
"learning_rate": 3.86685722308117e-05, | |
"loss": 1.5065, | |
"step": 520 | |
}, | |
{ | |
"epoch": 0.71, | |
"learning_rate": 3.861747497288409e-05, | |
"loss": 1.382, | |
"step": 530 | |
}, | |
{ | |
"epoch": 0.73, | |
"learning_rate": 3.856545065374465e-05, | |
"loss": 1.3336, | |
"step": 540 | |
}, | |
{ | |
"epoch": 0.74, | |
"learning_rate": 3.851250186395565e-05, | |
"loss": 1.2626, | |
"step": 550 | |
}, | |
{ | |
"epoch": 0.75, | |
"learning_rate": 3.845863124011361e-05, | |
"loss": 1.4174, | |
"step": 560 | |
}, | |
{ | |
"epoch": 0.77, | |
"learning_rate": 3.840384146471792e-05, | |
"loss": 1.3371, | |
"step": 570 | |
}, | |
{ | |
"epoch": 0.78, | |
"learning_rate": 3.8348135266037364e-05, | |
"loss": 1.3496, | |
"step": 580 | |
}, | |
{ | |
"epoch": 0.8, | |
"learning_rate": 3.829151541797421e-05, | |
"loss": 1.2245, | |
"step": 590 | |
}, | |
{ | |
"epoch": 0.81, | |
"learning_rate": 3.82339847399261e-05, | |
"loss": 1.2381, | |
"step": 600 | |
}, | |
{ | |
"epoch": 0.82, | |
"learning_rate": 3.817554609664564e-05, | |
"loss": 1.2805, | |
"step": 610 | |
}, | |
{ | |
"epoch": 0.84, | |
"learning_rate": 3.811620239809778e-05, | |
"loss": 1.2055, | |
"step": 620 | |
}, | |
{ | |
"epoch": 0.85, | |
"learning_rate": 3.805595659931487e-05, | |
"loss": 1.3493, | |
"step": 630 | |
}, | |
{ | |
"epoch": 0.86, | |
"learning_rate": 3.799481170024957e-05, | |
"loss": 1.4545, | |
"step": 640 | |
}, | |
{ | |
"epoch": 0.88, | |
"learning_rate": 3.7932770745625406e-05, | |
"loss": 1.3633, | |
"step": 650 | |
}, | |
{ | |
"epoch": 0.89, | |
"learning_rate": 3.786983682478519e-05, | |
"loss": 1.2677, | |
"step": 660 | |
}, | |
{ | |
"epoch": 0.9, | |
"learning_rate": 3.78060130715372e-05, | |
"loss": 1.2218, | |
"step": 670 | |
}, | |
{ | |
"epoch": 0.92, | |
"learning_rate": 3.7741302663999085e-05, | |
"loss": 1.4738, | |
"step": 680 | |
}, | |
{ | |
"epoch": 0.93, | |
"learning_rate": 3.7675708824439656e-05, | |
"loss": 1.191, | |
"step": 690 | |
}, | |
{ | |
"epoch": 0.94, | |
"learning_rate": 3.76092348191184e-05, | |
"loss": 1.2747, | |
"step": 700 | |
}, | |
{ | |
"epoch": 0.96, | |
"learning_rate": 3.7541883958122864e-05, | |
"loss": 1.2833, | |
"step": 710 | |
}, | |
{ | |
"epoch": 0.97, | |
"learning_rate": 3.7473659595203806e-05, | |
"loss": 1.3725, | |
"step": 720 | |
}, | |
{ | |
"epoch": 0.98, | |
"learning_rate": 3.74045651276082e-05, | |
"loss": 1.31, | |
"step": 730 | |
}, | |
{ | |
"epoch": 1.0, | |
"learning_rate": 3.7334603995910075e-05, | |
"loss": 1.2406, | |
"step": 740 | |
}, | |
{ | |
"epoch": 1.01, | |
"learning_rate": 3.726377968383919e-05, | |
"loss": 1.1966, | |
"step": 750 | |
}, | |
{ | |
"epoch": 1.02, | |
"learning_rate": 3.719209571810755e-05, | |
"loss": 1.2898, | |
"step": 760 | |
}, | |
{ | |
"epoch": 1.04, | |
"learning_rate": 3.711955566823381e-05, | |
"loss": 1.3516, | |
"step": 770 | |
}, | |
{ | |
"epoch": 1.05, | |
"learning_rate": 3.704616314636551e-05, | |
"loss": 1.4107, | |
"step": 780 | |
}, | |
{ | |
"epoch": 1.06, | |
"learning_rate": 3.697192180709923e-05, | |
"loss": 1.3388, | |
"step": 790 | |
}, | |
{ | |
"epoch": 1.08, | |
"learning_rate": 3.6896835347298583e-05, | |
"loss": 1.1733, | |
"step": 800 | |
}, | |
{ | |
"epoch": 1.09, | |
"learning_rate": 3.682090750591016e-05, | |
"loss": 1.3502, | |
"step": 810 | |
}, | |
{ | |
"epoch": 1.11, | |
"learning_rate": 3.674414206377732e-05, | |
"loss": 1.3909, | |
"step": 820 | |
}, | |
{ | |
"epoch": 1.12, | |
"learning_rate": 3.6666542843451936e-05, | |
"loss": 1.2328, | |
"step": 830 | |
}, | |
{ | |
"epoch": 1.13, | |
"learning_rate": 3.658811370900404e-05, | |
"loss": 1.1578, | |
"step": 840 | |
}, | |
{ | |
"epoch": 1.15, | |
"learning_rate": 3.650885856582943e-05, | |
"loss": 1.3599, | |
"step": 850 | |
}, | |
{ | |
"epoch": 1.16, | |
"learning_rate": 3.6428781360455176e-05, | |
"loss": 1.1904, | |
"step": 860 | |
}, | |
{ | |
"epoch": 1.17, | |
"learning_rate": 3.6347886080343135e-05, | |
"loss": 1.2668, | |
"step": 870 | |
}, | |
{ | |
"epoch": 1.19, | |
"learning_rate": 3.626617675369135e-05, | |
"loss": 1.215, | |
"step": 880 | |
}, | |
{ | |
"epoch": 1.2, | |
"learning_rate": 3.6183657449233484e-05, | |
"loss": 1.2978, | |
"step": 890 | |
}, | |
{ | |
"epoch": 1.21, | |
"learning_rate": 3.610033227603626e-05, | |
"loss": 1.3659, | |
"step": 900 | |
}, | |
{ | |
"epoch": 1.23, | |
"learning_rate": 3.601620538329476e-05, | |
"loss": 1.2659, | |
"step": 910 | |
}, | |
{ | |
"epoch": 1.24, | |
"learning_rate": 3.593128096012589e-05, | |
"loss": 1.2425, | |
"step": 920 | |
}, | |
{ | |
"epoch": 1.25, | |
"learning_rate": 3.584556323535978e-05, | |
"loss": 1.362, | |
"step": 930 | |
}, | |
{ | |
"epoch": 1.27, | |
"learning_rate": 3.5759056477329135e-05, | |
"loss": 1.1444, | |
"step": 940 | |
}, | |
{ | |
"epoch": 1.28, | |
"learning_rate": 3.5671764993656784e-05, | |
"loss": 1.3044, | |
"step": 950 | |
}, | |
{ | |
"epoch": 1.29, | |
"learning_rate": 3.558369313104112e-05, | |
"loss": 1.288, | |
"step": 960 | |
}, | |
{ | |
"epoch": 1.31, | |
"learning_rate": 3.5494845275039676e-05, | |
"loss": 1.338, | |
"step": 970 | |
}, | |
{ | |
"epoch": 1.32, | |
"learning_rate": 3.5405225849850754e-05, | |
"loss": 1.4229, | |
"step": 980 | |
}, | |
{ | |
"epoch": 1.33, | |
"learning_rate": 3.531483931809311e-05, | |
"loss": 1.2542, | |
"step": 990 | |
}, | |
{ | |
"epoch": 1.35, | |
"learning_rate": 3.5223690180583717e-05, | |
"loss": 1.3793, | |
"step": 1000 | |
}, | |
{ | |
"epoch": 1.36, | |
"learning_rate": 3.513178297611369e-05, | |
"loss": 1.2778, | |
"step": 1010 | |
}, | |
{ | |
"epoch": 1.37, | |
"learning_rate": 3.503912228122226e-05, | |
"loss": 1.384, | |
"step": 1020 | |
}, | |
{ | |
"epoch": 1.39, | |
"learning_rate": 3.494571270996885e-05, | |
"loss": 1.1319, | |
"step": 1030 | |
}, | |
{ | |
"epoch": 1.4, | |
"learning_rate": 3.4851558913703367e-05, | |
"loss": 1.312, | |
"step": 1040 | |
}, | |
{ | |
"epoch": 1.42, | |
"learning_rate": 3.475666558083455e-05, | |
"loss": 1.1727, | |
"step": 1050 | |
}, | |
{ | |
"epoch": 1.43, | |
"learning_rate": 3.4661037436596526e-05, | |
"loss": 1.1568, | |
"step": 1060 | |
}, | |
{ | |
"epoch": 1.44, | |
"learning_rate": 3.456467924281353e-05, | |
"loss": 1.2801, | |
"step": 1070 | |
}, | |
{ | |
"epoch": 1.46, | |
"learning_rate": 3.446759579766275e-05, | |
"loss": 1.2211, | |
"step": 1080 | |
}, | |
{ | |
"epoch": 1.47, | |
"learning_rate": 3.436979193543543e-05, | |
"loss": 1.2206, | |
"step": 1090 | |
}, | |
{ | |
"epoch": 1.48, | |
"learning_rate": 3.427127252629616e-05, | |
"loss": 1.2377, | |
"step": 1100 | |
}, | |
{ | |
"epoch": 1.5, | |
"learning_rate": 3.417204247604031e-05, | |
"loss": 1.2548, | |
"step": 1110 | |
}, | |
{ | |
"epoch": 1.51, | |
"learning_rate": 3.407210672584979e-05, | |
"loss": 1.1323, | |
"step": 1120 | |
}, | |
{ | |
"epoch": 1.52, | |
"learning_rate": 3.397147025204701e-05, | |
"loss": 1.1272, | |
"step": 1130 | |
}, | |
{ | |
"epoch": 1.54, | |
"learning_rate": 3.387013806584705e-05, | |
"loss": 1.1798, | |
"step": 1140 | |
}, | |
{ | |
"epoch": 1.55, | |
"learning_rate": 3.376811521310814e-05, | |
"loss": 1.4287, | |
"step": 1150 | |
}, | |
{ | |
"epoch": 1.56, | |
"learning_rate": 3.36654067740804e-05, | |
"loss": 1.1023, | |
"step": 1160 | |
}, | |
{ | |
"epoch": 1.58, | |
"learning_rate": 3.3562017863152867e-05, | |
"loss": 1.3098, | |
"step": 1170 | |
}, | |
{ | |
"epoch": 1.59, | |
"learning_rate": 3.3457953628598826e-05, | |
"loss": 1.3028, | |
"step": 1180 | |
}, | |
{ | |
"epoch": 1.6, | |
"learning_rate": 3.335321925231946e-05, | |
"loss": 1.1454, | |
"step": 1190 | |
}, | |
{ | |
"epoch": 1.62, | |
"learning_rate": 3.3247819949585776e-05, | |
"loss": 1.1599, | |
"step": 1200 | |
}, | |
{ | |
"epoch": 1.63, | |
"learning_rate": 3.314176096877898e-05, | |
"loss": 1.2164, | |
"step": 1210 | |
}, | |
{ | |
"epoch": 1.64, | |
"learning_rate": 3.3035047591129054e-05, | |
"loss": 1.266, | |
"step": 1220 | |
}, | |
{ | |
"epoch": 1.66, | |
"learning_rate": 3.292768513045183e-05, | |
"loss": 1.2517, | |
"step": 1230 | |
}, | |
{ | |
"epoch": 1.67, | |
"learning_rate": 3.281967893288436e-05, | |
"loss": 1.1866, | |
"step": 1240 | |
}, | |
{ | |
"epoch": 1.68, | |
"learning_rate": 3.271103437661873e-05, | |
"loss": 1.1079, | |
"step": 1250 | |
}, | |
{ | |
"epoch": 1.7, | |
"learning_rate": 3.260175687163423e-05, | |
"loss": 1.2, | |
"step": 1260 | |
}, | |
{ | |
"epoch": 1.71, | |
"learning_rate": 3.249185185942795e-05, | |
"loss": 1.1048, | |
"step": 1270 | |
}, | |
{ | |
"epoch": 1.73, | |
"learning_rate": 3.2381324812743875e-05, | |
"loss": 1.3801, | |
"step": 1280 | |
}, | |
{ | |
"epoch": 1.74, | |
"learning_rate": 3.22701812353003e-05, | |
"loss": 1.1595, | |
"step": 1290 | |
}, | |
{ | |
"epoch": 1.75, | |
"learning_rate": 3.215842666151582e-05, | |
"loss": 0.9625, | |
"step": 1300 | |
}, | |
{ | |
"epoch": 1.77, | |
"learning_rate": 3.2046066656233726e-05, | |
"loss": 1.0588, | |
"step": 1310 | |
}, | |
{ | |
"epoch": 1.78, | |
"learning_rate": 3.193310681444488e-05, | |
"loss": 1.1553, | |
"step": 1320 | |
}, | |
{ | |
"epoch": 1.79, | |
"learning_rate": 3.181955276100917e-05, | |
"loss": 1.0569, | |
"step": 1330 | |
}, | |
{ | |
"epoch": 1.81, | |
"learning_rate": 3.170541015037535e-05, | |
"loss": 1.1522, | |
"step": 1340 | |
}, | |
{ | |
"epoch": 1.82, | |
"learning_rate": 3.159068466629951e-05, | |
"loss": 1.0199, | |
"step": 1350 | |
}, | |
{ | |
"epoch": 1.83, | |
"learning_rate": 3.147538202156208e-05, | |
"loss": 1.2719, | |
"step": 1360 | |
}, | |
{ | |
"epoch": 1.85, | |
"learning_rate": 3.135950795768331e-05, | |
"loss": 1.1117, | |
"step": 1370 | |
}, | |
{ | |
"epoch": 1.86, | |
"learning_rate": 3.1243068244637364e-05, | |
"loss": 1.0545, | |
"step": 1380 | |
}, | |
{ | |
"epoch": 1.87, | |
"learning_rate": 3.112606868056508e-05, | |
"loss": 1.1852, | |
"step": 1390 | |
}, | |
{ | |
"epoch": 1.89, | |
"learning_rate": 3.100851509148517e-05, | |
"loss": 1.0035, | |
"step": 1400 | |
}, | |
{ | |
"epoch": 1.9, | |
"learning_rate": 3.089041333100414e-05, | |
"loss": 1.2372, | |
"step": 1410 | |
}, | |
{ | |
"epoch": 1.91, | |
"learning_rate": 3.077176928002482e-05, | |
"loss": 1.2852, | |
"step": 1420 | |
}, | |
{ | |
"epoch": 1.93, | |
"learning_rate": 3.065258884645351e-05, | |
"loss": 1.1826, | |
"step": 1430 | |
}, | |
{ | |
"epoch": 1.94, | |
"learning_rate": 3.05328779649058e-05, | |
"loss": 1.2395, | |
"step": 1440 | |
}, | |
{ | |
"epoch": 1.95, | |
"learning_rate": 3.041264259641104e-05, | |
"loss": 1.3701, | |
"step": 1450 | |
}, | |
{ | |
"epoch": 1.97, | |
"learning_rate": 3.029188872811554e-05, | |
"loss": 1.0938, | |
"step": 1460 | |
}, | |
{ | |
"epoch": 1.98, | |
"learning_rate": 3.017062237298441e-05, | |
"loss": 1.2473, | |
"step": 1470 | |
}, | |
{ | |
"epoch": 1.99, | |
"learning_rate": 3.0048849569502158e-05, | |
"loss": 1.2034, | |
"step": 1480 | |
}, | |
{ | |
"epoch": 2.01, | |
"learning_rate": 2.9926576381371987e-05, | |
"loss": 1.2143, | |
"step": 1490 | |
}, | |
{ | |
"epoch": 2.02, | |
"learning_rate": 2.980380889721388e-05, | |
"loss": 1.152, | |
"step": 1500 | |
}, | |
{ | |
"epoch": 2.04, | |
"learning_rate": 2.9680553230261373e-05, | |
"loss": 1.0943, | |
"step": 1510 | |
}, | |
{ | |
"epoch": 2.05, | |
"learning_rate": 2.9556815518057198e-05, | |
"loss": 1.4116, | |
"step": 1520 | |
}, | |
{ | |
"epoch": 2.06, | |
"learning_rate": 2.9432601922147615e-05, | |
"loss": 1.1894, | |
"step": 1530 | |
}, | |
{ | |
"epoch": 2.08, | |
"learning_rate": 2.9307918627775627e-05, | |
"loss": 1.0567, | |
"step": 1540 | |
}, | |
{ | |
"epoch": 2.09, | |
"learning_rate": 2.918277184357297e-05, | |
"loss": 1.0172, | |
"step": 1550 | |
}, | |
{ | |
"epoch": 2.1, | |
"learning_rate": 2.9057167801250962e-05, | |
"loss": 1.1905, | |
"step": 1560 | |
}, | |
{ | |
"epoch": 2.12, | |
"learning_rate": 2.8931112755290173e-05, | |
"loss": 1.3694, | |
"step": 1570 | |
}, | |
{ | |
"epoch": 2.13, | |
"learning_rate": 2.8804612982629015e-05, | |
"loss": 1.1596, | |
"step": 1580 | |
}, | |
{ | |
"epoch": 2.14, | |
"learning_rate": 2.8677674782351164e-05, | |
"loss": 1.3288, | |
"step": 1590 | |
}, | |
{ | |
"epoch": 2.16, | |
"learning_rate": 2.8550304475371876e-05, | |
"loss": 1.0669, | |
"step": 1600 | |
}, | |
{ | |
"epoch": 2.17, | |
"learning_rate": 2.8422508404123264e-05, | |
"loss": 1.1202, | |
"step": 1610 | |
}, | |
{ | |
"epoch": 2.18, | |
"learning_rate": 2.829429293223847e-05, | |
"loss": 1.1461, | |
"step": 1620 | |
}, | |
{ | |
"epoch": 2.2, | |
"learning_rate": 2.8165664444234776e-05, | |
"loss": 1.1503, | |
"step": 1630 | |
}, | |
{ | |
"epoch": 2.21, | |
"learning_rate": 2.8036629345195695e-05, | |
"loss": 1.1463, | |
"step": 1640 | |
}, | |
{ | |
"epoch": 2.22, | |
"learning_rate": 2.7907194060452013e-05, | |
"loss": 1.1446, | |
"step": 1650 | |
}, | |
{ | |
"epoch": 2.24, | |
"learning_rate": 2.777736503526188e-05, | |
"loss": 1.1918, | |
"step": 1660 | |
}, | |
{ | |
"epoch": 2.25, | |
"learning_rate": 2.7647148734489797e-05, | |
"loss": 1.2787, | |
"step": 1670 | |
}, | |
{ | |
"epoch": 2.26, | |
"learning_rate": 2.7516551642284765e-05, | |
"loss": 1.1412, | |
"step": 1680 | |
}, | |
{ | |
"epoch": 2.28, | |
"learning_rate": 2.7385580261757368e-05, | |
"loss": 1.0178, | |
"step": 1690 | |
}, | |
{ | |
"epoch": 2.29, | |
"learning_rate": 2.7254241114655965e-05, | |
"loss": 1.0412, | |
"step": 1700 | |
}, | |
{ | |
"epoch": 2.3, | |
"learning_rate": 2.7122540741041915e-05, | |
"loss": 1.0242, | |
"step": 1710 | |
}, | |
{ | |
"epoch": 2.32, | |
"learning_rate": 2.6990485698963945e-05, | |
"loss": 1.1745, | |
"step": 1720 | |
}, | |
{ | |
"epoch": 2.33, | |
"learning_rate": 2.6858082564131577e-05, | |
"loss": 0.9009, | |
"step": 1730 | |
}, | |
{ | |
"epoch": 2.35, | |
"learning_rate": 2.672533792958768e-05, | |
"loss": 1.0899, | |
"step": 1740 | |
}, | |
{ | |
"epoch": 2.36, | |
"learning_rate": 2.659225840538016e-05, | |
"loss": 0.9803, | |
"step": 1750 | |
}, | |
{ | |
"epoch": 2.37, | |
"learning_rate": 2.645885061823286e-05, | |
"loss": 1.0568, | |
"step": 1760 | |
}, | |
{ | |
"epoch": 2.39, | |
"learning_rate": 2.6325121211215526e-05, | |
"loss": 1.1428, | |
"step": 1770 | |
}, | |
{ | |
"epoch": 2.4, | |
"learning_rate": 2.619107684341304e-05, | |
"loss": 1.2611, | |
"step": 1780 | |
}, | |
{ | |
"epoch": 2.41, | |
"learning_rate": 2.6056724189593817e-05, | |
"loss": 1.0649, | |
"step": 1790 | |
}, | |
{ | |
"epoch": 2.43, | |
"learning_rate": 2.5922069939877464e-05, | |
"loss": 1.0907, | |
"step": 1800 | |
}, | |
{ | |
"epoch": 2.44, | |
"learning_rate": 2.5787120799401595e-05, | |
"loss": 1.1944, | |
"step": 1810 | |
}, | |
{ | |
"epoch": 2.45, | |
"learning_rate": 2.565188348798798e-05, | |
"loss": 1.0326, | |
"step": 1820 | |
}, | |
{ | |
"epoch": 2.47, | |
"learning_rate": 2.5516364739807938e-05, | |
"loss": 1.1004, | |
"step": 1830 | |
}, | |
{ | |
"epoch": 2.48, | |
"learning_rate": 2.538057130304698e-05, | |
"loss": 1.253, | |
"step": 1840 | |
}, | |
{ | |
"epoch": 2.49, | |
"learning_rate": 2.5244509939568786e-05, | |
"loss": 1.0609, | |
"step": 1850 | |
}, | |
{ | |
"epoch": 2.51, | |
"learning_rate": 2.5108187424578533e-05, | |
"loss": 0.9903, | |
"step": 1860 | |
}, | |
{ | |
"epoch": 2.52, | |
"learning_rate": 2.4971610546285474e-05, | |
"loss": 1.0218, | |
"step": 1870 | |
}, | |
{ | |
"epoch": 2.53, | |
"learning_rate": 2.483478610556494e-05, | |
"loss": 1.2747, | |
"step": 1880 | |
}, | |
{ | |
"epoch": 2.55, | |
"learning_rate": 2.469772091561968e-05, | |
"loss": 1.0557, | |
"step": 1890 | |
}, | |
{ | |
"epoch": 2.56, | |
"learning_rate": 2.4560421801640618e-05, | |
"loss": 1.1359, | |
"step": 1900 | |
}, | |
{ | |
"epoch": 2.57, | |
"learning_rate": 2.4422895600466968e-05, | |
"loss": 1.1069, | |
"step": 1910 | |
}, | |
{ | |
"epoch": 2.59, | |
"learning_rate": 2.4285149160245806e-05, | |
"loss": 1.2929, | |
"step": 1920 | |
}, | |
{ | |
"epoch": 2.6, | |
"learning_rate": 2.414718934009105e-05, | |
"loss": 0.932, | |
"step": 1930 | |
}, | |
{ | |
"epoch": 2.61, | |
"learning_rate": 2.4009023009741932e-05, | |
"loss": 1.1145, | |
"step": 1940 | |
}, | |
{ | |
"epoch": 2.63, | |
"learning_rate": 2.3870657049220885e-05, | |
"loss": 0.9006, | |
"step": 1950 | |
}, | |
{ | |
"epoch": 2.64, | |
"learning_rate": 2.373209834849098e-05, | |
"loss": 1.0325, | |
"step": 1960 | |
}, | |
{ | |
"epoch": 2.65, | |
"learning_rate": 2.3593353807112838e-05, | |
"loss": 1.023, | |
"step": 1970 | |
}, | |
{ | |
"epoch": 2.67, | |
"learning_rate": 2.3454430333901037e-05, | |
"loss": 0.9664, | |
"step": 1980 | |
}, | |
{ | |
"epoch": 2.68, | |
"learning_rate": 2.331533484658011e-05, | |
"loss": 1.05, | |
"step": 1990 | |
}, | |
{ | |
"epoch": 2.7, | |
"learning_rate": 2.3176074271440086e-05, | |
"loss": 0.8807, | |
"step": 2000 | |
}, | |
{ | |
"epoch": 2.71, | |
"learning_rate": 2.303665554299156e-05, | |
"loss": 0.9423, | |
"step": 2010 | |
}, | |
{ | |
"epoch": 2.72, | |
"learning_rate": 2.2897085603620405e-05, | |
"loss": 1.0951, | |
"step": 2020 | |
}, | |
{ | |
"epoch": 2.74, | |
"learning_rate": 2.2757371403242094e-05, | |
"loss": 1.1253, | |
"step": 2030 | |
}, | |
{ | |
"epoch": 2.75, | |
"learning_rate": 2.2617519898955572e-05, | |
"loss": 1.099, | |
"step": 2040 | |
}, | |
{ | |
"epoch": 2.76, | |
"learning_rate": 2.2477538054696905e-05, | |
"loss": 0.9475, | |
"step": 2050 | |
}, | |
{ | |
"epoch": 2.78, | |
"learning_rate": 2.2337432840892438e-05, | |
"loss": 1.0271, | |
"step": 2060 | |
}, | |
{ | |
"epoch": 2.79, | |
"learning_rate": 2.2197211234111743e-05, | |
"loss": 0.9412, | |
"step": 2070 | |
}, | |
{ | |
"epoch": 2.8, | |
"learning_rate": 2.2056880216720184e-05, | |
"loss": 1.1529, | |
"step": 2080 | |
}, | |
{ | |
"epoch": 2.82, | |
"learning_rate": 2.1916446776531276e-05, | |
"loss": 1.0621, | |
"step": 2090 | |
}, | |
{ | |
"epoch": 2.83, | |
"learning_rate": 2.1775917906458698e-05, | |
"loss": 1.1638, | |
"step": 2100 | |
}, | |
{ | |
"epoch": 2.84, | |
"learning_rate": 2.163530060416806e-05, | |
"loss": 1.1005, | |
"step": 2110 | |
}, | |
{ | |
"epoch": 2.86, | |
"learning_rate": 2.149460187172849e-05, | |
"loss": 1.2081, | |
"step": 2120 | |
}, | |
{ | |
"epoch": 2.87, | |
"learning_rate": 2.1353828715263937e-05, | |
"loss": 1.0704, | |
"step": 2130 | |
}, | |
{ | |
"epoch": 2.88, | |
"learning_rate": 2.121298814460433e-05, | |
"loss": 1.0406, | |
"step": 2140 | |
}, | |
{ | |
"epoch": 2.9, | |
"learning_rate": 2.1072087172936473e-05, | |
"loss": 0.8944, | |
"step": 2150 | |
}, | |
{ | |
"epoch": 2.91, | |
"learning_rate": 2.093113281645488e-05, | |
"loss": 0.87, | |
"step": 2160 | |
}, | |
{ | |
"epoch": 2.92, | |
"learning_rate": 2.079013209401236e-05, | |
"loss": 1.1228, | |
"step": 2170 | |
}, | |
{ | |
"epoch": 2.94, | |
"learning_rate": 2.0649092026770517e-05, | |
"loss": 1.0616, | |
"step": 2180 | |
}, | |
{ | |
"epoch": 2.95, | |
"learning_rate": 2.0508019637850164e-05, | |
"loss": 1.1175, | |
"step": 2190 | |
}, | |
{ | |
"epoch": 2.96, | |
"learning_rate": 2.0366921951981546e-05, | |
"loss": 1.1289, | |
"step": 2200 | |
}, | |
{ | |
"epoch": 2.98, | |
"learning_rate": 2.02258059951546e-05, | |
"loss": 1.1765, | |
"step": 2210 | |
}, | |
{ | |
"epoch": 2.99, | |
"learning_rate": 2.0084678794269057e-05, | |
"loss": 1.042, | |
"step": 2220 | |
}, | |
{ | |
"epoch": 3.01, | |
"learning_rate": 1.994354737678455e-05, | |
"loss": 1.0727, | |
"step": 2230 | |
}, | |
{ | |
"epoch": 3.02, | |
"learning_rate": 1.980241877037069e-05, | |
"loss": 1.0584, | |
"step": 2240 | |
}, | |
{ | |
"epoch": 3.03, | |
"learning_rate": 1.9661300002557085e-05, | |
"loss": 1.0318, | |
"step": 2250 | |
}, | |
{ | |
"epoch": 3.05, | |
"learning_rate": 1.952019810038346e-05, | |
"loss": 1.0157, | |
"step": 2260 | |
}, | |
{ | |
"epoch": 3.06, | |
"learning_rate": 1.9379120090049686e-05, | |
"loss": 1.1007, | |
"step": 2270 | |
}, | |
{ | |
"epoch": 3.07, | |
"learning_rate": 1.9238072996565936e-05, | |
"loss": 0.9557, | |
"step": 2280 | |
}, | |
{ | |
"epoch": 3.09, | |
"learning_rate": 1.9097063843402895e-05, | |
"loss": 1.0187, | |
"step": 2290 | |
}, | |
{ | |
"epoch": 3.1, | |
"learning_rate": 1.8956099652141976e-05, | |
"loss": 1.1501, | |
"step": 2300 | |
}, | |
{ | |
"epoch": 3.11, | |
"learning_rate": 1.8815187442125716e-05, | |
"loss": 1.0928, | |
"step": 2310 | |
}, | |
{ | |
"epoch": 3.13, | |
"learning_rate": 1.867433423010824e-05, | |
"loss": 0.9985, | |
"step": 2320 | |
}, | |
{ | |
"epoch": 3.14, | |
"learning_rate": 1.8533547029905833e-05, | |
"loss": 1.0785, | |
"step": 2330 | |
}, | |
{ | |
"epoch": 3.15, | |
"learning_rate": 1.8392832852047737e-05, | |
"loss": 1.0574, | |
"step": 2340 | |
}, | |
{ | |
"epoch": 3.17, | |
"learning_rate": 1.8252198703426997e-05, | |
"loss": 1.0207, | |
"step": 2350 | |
}, | |
{ | |
"epoch": 3.18, | |
"learning_rate": 1.8111651586951627e-05, | |
"loss": 0.833, | |
"step": 2360 | |
}, | |
{ | |
"epoch": 3.19, | |
"learning_rate": 1.797119850119581e-05, | |
"loss": 1.0397, | |
"step": 2370 | |
}, | |
{ | |
"epoch": 3.21, | |
"learning_rate": 1.7830846440051493e-05, | |
"loss": 1.1027, | |
"step": 2380 | |
}, | |
{ | |
"epoch": 3.22, | |
"learning_rate": 1.7690602392380046e-05, | |
"loss": 0.9658, | |
"step": 2390 | |
}, | |
{ | |
"epoch": 3.23, | |
"learning_rate": 1.7550473341664292e-05, | |
"loss": 1.1669, | |
"step": 2400 | |
}, | |
{ | |
"epoch": 3.25, | |
"learning_rate": 1.7410466265660767e-05, | |
"loss": 1.0738, | |
"step": 2410 | |
}, | |
{ | |
"epoch": 3.26, | |
"learning_rate": 1.727058813605226e-05, | |
"loss": 0.9897, | |
"step": 2420 | |
}, | |
{ | |
"epoch": 3.27, | |
"learning_rate": 1.7130845918100606e-05, | |
"loss": 0.8493, | |
"step": 2430 | |
}, | |
{ | |
"epoch": 3.29, | |
"learning_rate": 1.6991246570299924e-05, | |
"loss": 0.8922, | |
"step": 2440 | |
}, | |
{ | |
"epoch": 3.3, | |
"learning_rate": 1.6851797044030076e-05, | |
"loss": 1.0693, | |
"step": 2450 | |
}, | |
{ | |
"epoch": 3.32, | |
"learning_rate": 1.671250428321052e-05, | |
"loss": 1.2113, | |
"step": 2460 | |
}, | |
{ | |
"epoch": 3.33, | |
"learning_rate": 1.657337522395455e-05, | |
"loss": 0.9658, | |
"step": 2470 | |
}, | |
{ | |
"epoch": 3.34, | |
"learning_rate": 1.6434416794223896e-05, | |
"loss": 0.9116, | |
"step": 2480 | |
}, | |
{ | |
"epoch": 3.36, | |
"learning_rate": 1.629563591348378e-05, | |
"loss": 1.0346, | |
"step": 2490 | |
}, | |
{ | |
"epoch": 3.37, | |
"learning_rate": 1.6157039492358305e-05, | |
"loss": 1.0265, | |
"step": 2500 | |
}, | |
{ | |
"epoch": 3.38, | |
"learning_rate": 1.6018634432286374e-05, | |
"loss": 0.8564, | |
"step": 2510 | |
}, | |
{ | |
"epoch": 3.4, | |
"learning_rate": 1.5880427625178035e-05, | |
"loss": 1.0525, | |
"step": 2520 | |
}, | |
{ | |
"epoch": 3.41, | |
"learning_rate": 1.574242595307128e-05, | |
"loss": 1.1558, | |
"step": 2530 | |
}, | |
{ | |
"epoch": 3.42, | |
"learning_rate": 1.5604636287789356e-05, | |
"loss": 1.04, | |
"step": 2540 | |
}, | |
{ | |
"epoch": 3.44, | |
"learning_rate": 1.546706549059858e-05, | |
"loss": 1.0166, | |
"step": 2550 | |
}, | |
{ | |
"epoch": 3.45, | |
"learning_rate": 1.5329720411866712e-05, | |
"loss": 0.9473, | |
"step": 2560 | |
}, | |
{ | |
"epoch": 3.46, | |
"learning_rate": 1.5192607890721775e-05, | |
"loss": 1.0322, | |
"step": 2570 | |
}, | |
{ | |
"epoch": 3.48, | |
"learning_rate": 1.505573475471156e-05, | |
"loss": 1.0675, | |
"step": 2580 | |
}, | |
{ | |
"epoch": 3.49, | |
"learning_rate": 1.491910781946362e-05, | |
"loss": 0.9918, | |
"step": 2590 | |
}, | |
{ | |
"epoch": 3.5, | |
"learning_rate": 1.4782733888345882e-05, | |
"loss": 1.0469, | |
"step": 2600 | |
}, | |
{ | |
"epoch": 3.52, | |
"learning_rate": 1.4646619752127872e-05, | |
"loss": 1.0355, | |
"step": 2610 | |
}, | |
{ | |
"epoch": 3.53, | |
"learning_rate": 1.4510772188642586e-05, | |
"loss": 0.8829, | |
"step": 2620 | |
}, | |
{ | |
"epoch": 3.54, | |
"learning_rate": 1.4375197962448977e-05, | |
"loss": 0.9707, | |
"step": 2630 | |
}, | |
{ | |
"epoch": 3.56, | |
"learning_rate": 1.423990382449509e-05, | |
"loss": 1.0144, | |
"step": 2640 | |
}, | |
{ | |
"epoch": 3.57, | |
"learning_rate": 1.4104896511781916e-05, | |
"loss": 0.8244, | |
"step": 2650 | |
}, | |
{ | |
"epoch": 3.58, | |
"learning_rate": 1.3970182747027944e-05, | |
"loss": 0.9784, | |
"step": 2660 | |
}, | |
{ | |
"epoch": 3.6, | |
"learning_rate": 1.3835769238334354e-05, | |
"loss": 0.9066, | |
"step": 2670 | |
}, | |
{ | |
"epoch": 3.61, | |
"learning_rate": 1.3701662678851016e-05, | |
"loss": 1.0048, | |
"step": 2680 | |
}, | |
{ | |
"epoch": 3.63, | |
"learning_rate": 1.35678697464432e-05, | |
"loss": 0.8127, | |
"step": 2690 | |
}, | |
{ | |
"epoch": 3.64, | |
"learning_rate": 1.3434397103359063e-05, | |
"loss": 0.9977, | |
"step": 2700 | |
}, | |
{ | |
"epoch": 3.65, | |
"learning_rate": 1.3301251395897863e-05, | |
"loss": 1.0585, | |
"step": 2710 | |
}, | |
{ | |
"epoch": 3.67, | |
"learning_rate": 1.3168439254079026e-05, | |
"loss": 0.8895, | |
"step": 2720 | |
}, | |
{ | |
"epoch": 3.68, | |
"learning_rate": 1.3035967291312029e-05, | |
"loss": 1.0318, | |
"step": 2730 | |
}, | |
{ | |
"epoch": 3.69, | |
"learning_rate": 1.2903842104067032e-05, | |
"loss": 0.9691, | |
"step": 2740 | |
}, | |
{ | |
"epoch": 3.71, | |
"learning_rate": 1.2772070271546444e-05, | |
"loss": 1.1253, | |
"step": 2750 | |
}, | |
{ | |
"epoch": 3.72, | |
"learning_rate": 1.2640658355357312e-05, | |
"loss": 1.1046, | |
"step": 2760 | |
}, | |
{ | |
"epoch": 3.73, | |
"learning_rate": 1.2509612899184545e-05, | |
"loss": 1.1334, | |
"step": 2770 | |
}, | |
{ | |
"epoch": 3.75, | |
"learning_rate": 1.237894042846512e-05, | |
"loss": 0.9221, | |
"step": 2780 | |
}, | |
{ | |
"epoch": 3.76, | |
"learning_rate": 1.22486474500631e-05, | |
"loss": 1.0444, | |
"step": 2790 | |
}, | |
{ | |
"epoch": 3.77, | |
"learning_rate": 1.2118740451945668e-05, | |
"loss": 1.0768, | |
"step": 2800 | |
}, | |
{ | |
"epoch": 3.79, | |
"learning_rate": 1.198922590286002e-05, | |
"loss": 1.0885, | |
"step": 2810 | |
}, | |
{ | |
"epoch": 3.8, | |
"learning_rate": 1.186011025201126e-05, | |
"loss": 1.0116, | |
"step": 2820 | |
}, | |
{ | |
"epoch": 3.81, | |
"learning_rate": 1.1731399928741296e-05, | |
"loss": 1.0825, | |
"step": 2830 | |
}, | |
{ | |
"epoch": 3.83, | |
"learning_rate": 1.1603101342208638e-05, | |
"loss": 1.1292, | |
"step": 2840 | |
}, | |
{ | |
"epoch": 3.84, | |
"learning_rate": 1.1475220881069275e-05, | |
"loss": 0.8873, | |
"step": 2850 | |
}, | |
{ | |
"epoch": 3.85, | |
"learning_rate": 1.1347764913158565e-05, | |
"loss": 0.7846, | |
"step": 2860 | |
}, | |
{ | |
"epoch": 3.87, | |
"learning_rate": 1.1220739785174129e-05, | |
"loss": 0.8996, | |
"step": 2870 | |
}, | |
{ | |
"epoch": 3.88, | |
"learning_rate": 1.1094151822359805e-05, | |
"loss": 1.0204, | |
"step": 2880 | |
}, | |
{ | |
"epoch": 3.89, | |
"learning_rate": 1.0968007328190708e-05, | |
"loss": 0.9055, | |
"step": 2890 | |
}, | |
{ | |
"epoch": 3.91, | |
"learning_rate": 1.084231258405936e-05, | |
"loss": 1.0888, | |
"step": 2900 | |
}, | |
{ | |
"epoch": 3.92, | |
"learning_rate": 1.0717073848962833e-05, | |
"loss": 1.2233, | |
"step": 2910 | |
}, | |
{ | |
"epoch": 3.94, | |
"learning_rate": 1.0592297359191166e-05, | |
"loss": 0.8835, | |
"step": 2920 | |
}, | |
{ | |
"epoch": 3.95, | |
"learning_rate": 1.0467989328016787e-05, | |
"loss": 0.6883, | |
"step": 2930 | |
}, | |
{ | |
"epoch": 3.96, | |
"learning_rate": 1.0344155945385106e-05, | |
"loss": 0.8986, | |
"step": 2940 | |
}, | |
{ | |
"epoch": 3.98, | |
"learning_rate": 1.0220803377606328e-05, | |
"loss": 1.1333, | |
"step": 2950 | |
}, | |
{ | |
"epoch": 3.99, | |
"learning_rate": 1.0097937767048365e-05, | |
"loss": 1.1204, | |
"step": 2960 | |
}, | |
{ | |
"epoch": 4.0, | |
"learning_rate": 9.975565231830999e-06, | |
"loss": 1.013, | |
"step": 2970 | |
}, | |
{ | |
"epoch": 4.02, | |
"learning_rate": 9.853691865521217e-06, | |
"loss": 1.007, | |
"step": 2980 | |
}, | |
{ | |
"epoch": 4.03, | |
"learning_rate": 9.732323736829766e-06, | |
"loss": 0.8642, | |
"step": 2990 | |
}, | |
{ | |
"epoch": 4.04, | |
"learning_rate": 9.611466889308992e-06, | |
"loss": 1.0881, | |
"step": 3000 | |
}, | |
{ | |
"epoch": 4.06, | |
"learning_rate": 9.491127341051884e-06, | |
"loss": 1.0182, | |
"step": 3010 | |
}, | |
{ | |
"epoch": 4.07, | |
"learning_rate": 9.371311084392388e-06, | |
"loss": 0.8715, | |
"step": 3020 | |
}, | |
{ | |
"epoch": 4.08, | |
"learning_rate": 9.252024085607043e-06, | |
"loss": 1.1717, | |
"step": 3030 | |
}, | |
{ | |
"epoch": 4.1, | |
"learning_rate": 9.133272284617878e-06, | |
"loss": 1.0757, | |
"step": 3040 | |
}, | |
{ | |
"epoch": 4.11, | |
"learning_rate": 9.015061594696626e-06, | |
"loss": 1.0925, | |
"step": 3050 | |
}, | |
{ | |
"epoch": 4.12, | |
"learning_rate": 8.897397902170287e-06, | |
"loss": 1.0577, | |
"step": 3060 | |
}, | |
{ | |
"epoch": 4.14, | |
"learning_rate": 8.780287066127982e-06, | |
"loss": 1.1386, | |
"step": 3070 | |
}, | |
{ | |
"epoch": 4.15, | |
"learning_rate": 8.663734918129247e-06, | |
"loss": 1.0195, | |
"step": 3080 | |
}, | |
{ | |
"epoch": 4.16, | |
"learning_rate": 8.547747261913622e-06, | |
"loss": 1.1513, | |
"step": 3090 | |
}, | |
{ | |
"epoch": 4.18, | |
"learning_rate": 8.432329873111648e-06, | |
"loss": 0.7835, | |
"step": 3100 | |
}, | |
{ | |
"epoch": 4.19, | |
"learning_rate": 8.317488498957282e-06, | |
"loss": 0.9272, | |
"step": 3110 | |
}, | |
{ | |
"epoch": 4.2, | |
"learning_rate": 8.203228858001713e-06, | |
"loss": 0.9591, | |
"step": 3120 | |
}, | |
{ | |
"epoch": 4.22, | |
"learning_rate": 8.089556639828576e-06, | |
"loss": 1.1586, | |
"step": 3130 | |
}, | |
{ | |
"epoch": 4.23, | |
"learning_rate": 7.976477504770682e-06, | |
"loss": 0.9203, | |
"step": 3140 | |
}, | |
{ | |
"epoch": 4.25, | |
"learning_rate": 7.863997083628139e-06, | |
"loss": 0.787, | |
"step": 3150 | |
}, | |
{ | |
"epoch": 4.26, | |
"learning_rate": 7.752120977387943e-06, | |
"loss": 0.9091, | |
"step": 3160 | |
}, | |
{ | |
"epoch": 4.27, | |
"learning_rate": 7.640854756945138e-06, | |
"loss": 0.9766, | |
"step": 3170 | |
}, | |
{ | |
"epoch": 4.29, | |
"learning_rate": 7.530203962825331e-06, | |
"loss": 1.0665, | |
"step": 3180 | |
}, | |
{ | |
"epoch": 4.3, | |
"learning_rate": 7.420174104908866e-06, | |
"loss": 0.997, | |
"step": 3190 | |
}, | |
{ | |
"epoch": 4.31, | |
"learning_rate": 7.310770662156434e-06, | |
"loss": 1.1448, | |
"step": 3200 | |
}, | |
{ | |
"epoch": 4.33, | |
"learning_rate": 7.201999082336226e-06, | |
"loss": 0.9019, | |
"step": 3210 | |
}, | |
{ | |
"epoch": 4.34, | |
"learning_rate": 7.0938647817527014e-06, | |
"loss": 0.9662, | |
"step": 3220 | |
}, | |
{ | |
"epoch": 4.35, | |
"learning_rate": 6.986373144976855e-06, | |
"loss": 0.9343, | |
"step": 3230 | |
}, | |
{ | |
"epoch": 4.37, | |
"learning_rate": 6.879529524578102e-06, | |
"loss": 0.8667, | |
"step": 3240 | |
}, | |
{ | |
"epoch": 4.38, | |
"learning_rate": 6.77333924085773e-06, | |
"loss": 0.8929, | |
"step": 3250 | |
}, | |
{ | |
"epoch": 4.39, | |
"learning_rate": 6.667807581584002e-06, | |
"loss": 0.8216, | |
"step": 3260 | |
}, | |
{ | |
"epoch": 4.41, | |
"learning_rate": 6.5629398017288136e-06, | |
"loss": 0.9057, | |
"step": 3270 | |
}, | |
{ | |
"epoch": 4.42, | |
"learning_rate": 6.458741123206053e-06, | |
"loss": 0.9432, | |
"step": 3280 | |
}, | |
{ | |
"epoch": 4.43, | |
"learning_rate": 6.355216734611567e-06, | |
"loss": 0.8579, | |
"step": 3290 | |
}, | |
{ | |
"epoch": 4.45, | |
"learning_rate": 6.252371790964762e-06, | |
"loss": 0.8602, | |
"step": 3300 | |
}, | |
{ | |
"epoch": 4.46, | |
"learning_rate": 6.150211413451978e-06, | |
"loss": 0.8188, | |
"step": 3310 | |
}, | |
{ | |
"epoch": 4.47, | |
"learning_rate": 6.048740689171397e-06, | |
"loss": 0.8784, | |
"step": 3320 | |
}, | |
{ | |
"epoch": 4.49, | |
"learning_rate": 5.9479646708797915e-06, | |
"loss": 1.0767, | |
"step": 3330 | |
}, | |
{ | |
"epoch": 4.5, | |
"learning_rate": 5.847888376740891e-06, | |
"loss": 0.9005, | |
"step": 3340 | |
}, | |
{ | |
"epoch": 4.51, | |
"learning_rate": 5.748516790075502e-06, | |
"loss": 0.9828, | |
"step": 3350 | |
}, | |
{ | |
"epoch": 4.53, | |
"learning_rate": 5.649854859113375e-06, | |
"loss": 1.1729, | |
"step": 3360 | |
}, | |
{ | |
"epoch": 4.54, | |
"learning_rate": 5.551907496746816e-06, | |
"loss": 1.0378, | |
"step": 3370 | |
}, | |
{ | |
"epoch": 4.56, | |
"learning_rate": 5.454679580286005e-06, | |
"loss": 0.9476, | |
"step": 3380 | |
}, | |
{ | |
"epoch": 4.57, | |
"learning_rate": 5.358175951216169e-06, | |
"loss": 0.8411, | |
"step": 3390 | |
}, | |
{ | |
"epoch": 4.58, | |
"learning_rate": 5.262401414956499e-06, | |
"loss": 0.9726, | |
"step": 3400 | |
}, | |
{ | |
"epoch": 4.6, | |
"learning_rate": 5.167360740620828e-06, | |
"loss": 0.8943, | |
"step": 3410 | |
}, | |
{ | |
"epoch": 4.61, | |
"learning_rate": 5.07305866078019e-06, | |
"loss": 0.9221, | |
"step": 3420 | |
}, | |
{ | |
"epoch": 4.62, | |
"learning_rate": 4.9794998712271425e-06, | |
"loss": 1.0752, | |
"step": 3430 | |
}, | |
{ | |
"epoch": 4.64, | |
"learning_rate": 4.8866890307419445e-06, | |
"loss": 1.3655, | |
"step": 3440 | |
}, | |
{ | |
"epoch": 4.65, | |
"learning_rate": 4.794630760860568e-06, | |
"loss": 1.0098, | |
"step": 3450 | |
}, | |
{ | |
"epoch": 4.66, | |
"learning_rate": 4.703329645644557e-06, | |
"loss": 0.9754, | |
"step": 3460 | |
}, | |
{ | |
"epoch": 4.68, | |
"learning_rate": 4.612790231452784e-06, | |
"loss": 0.822, | |
"step": 3470 | |
}, | |
{ | |
"epoch": 4.69, | |
"learning_rate": 4.523017026715066e-06, | |
"loss": 0.9473, | |
"step": 3480 | |
}, | |
{ | |
"epoch": 4.7, | |
"learning_rate": 4.434014501707624e-06, | |
"loss": 0.8881, | |
"step": 3490 | |
}, | |
{ | |
"epoch": 4.72, | |
"learning_rate": 4.345787088330537e-06, | |
"loss": 0.8736, | |
"step": 3500 | |
}, | |
{ | |
"epoch": 4.73, | |
"learning_rate": 4.258339179887028e-06, | |
"loss": 0.8703, | |
"step": 3510 | |
}, | |
{ | |
"epoch": 4.74, | |
"learning_rate": 4.1716751308646966e-06, | |
"loss": 0.7873, | |
"step": 3520 | |
}, | |
{ | |
"epoch": 4.76, | |
"learning_rate": 4.085799256718701e-06, | |
"loss": 0.9637, | |
"step": 3530 | |
}, | |
{ | |
"epoch": 4.77, | |
"learning_rate": 4.000715833656856e-06, | |
"loss": 1.0796, | |
"step": 3540 | |
}, | |
{ | |
"epoch": 4.78, | |
"learning_rate": 3.916429098426693e-06, | |
"loss": 1.138, | |
"step": 3550 | |
}, | |
{ | |
"epoch": 4.8, | |
"learning_rate": 3.832943248104512e-06, | |
"loss": 0.8794, | |
"step": 3560 | |
}, | |
{ | |
"epoch": 4.81, | |
"learning_rate": 3.750262439886374e-06, | |
"loss": 1.0175, | |
"step": 3570 | |
}, | |
{ | |
"epoch": 4.82, | |
"learning_rate": 3.668390790881087e-06, | |
"loss": 0.7578, | |
"step": 3580 | |
}, | |
{ | |
"epoch": 4.84, | |
"learning_rate": 3.5873323779052126e-06, | |
"loss": 0.8712, | |
"step": 3590 | |
}, | |
{ | |
"epoch": 4.85, | |
"learning_rate": 3.5070912372800246e-06, | |
"loss": 0.8932, | |
"step": 3600 | |
}, | |
{ | |
"epoch": 4.87, | |
"learning_rate": 3.427671364630565e-06, | |
"loss": 0.9601, | |
"step": 3610 | |
}, | |
{ | |
"epoch": 4.88, | |
"learning_rate": 3.349076714686652e-06, | |
"loss": 0.9283, | |
"step": 3620 | |
}, | |
{ | |
"epoch": 4.89, | |
"learning_rate": 3.2713112010859516e-06, | |
"loss": 0.8924, | |
"step": 3630 | |
}, | |
{ | |
"epoch": 4.91, | |
"learning_rate": 3.1943786961791166e-06, | |
"loss": 1.1402, | |
"step": 3640 | |
}, | |
{ | |
"epoch": 4.92, | |
"learning_rate": 3.118283030836944e-06, | |
"loss": 1.1463, | |
"step": 3650 | |
}, | |
{ | |
"epoch": 4.93, | |
"learning_rate": 3.0430279942596262e-06, | |
"loss": 1.002, | |
"step": 3660 | |
}, | |
{ | |
"epoch": 4.95, | |
"learning_rate": 2.9686173337880662e-06, | |
"loss": 0.9601, | |
"step": 3670 | |
}, | |
{ | |
"epoch": 4.96, | |
"learning_rate": 2.8950547547172637e-06, | |
"loss": 0.6846, | |
"step": 3680 | |
}, | |
{ | |
"epoch": 4.97, | |
"learning_rate": 2.8223439201118318e-06, | |
"loss": 1.018, | |
"step": 3690 | |
}, | |
{ | |
"epoch": 4.99, | |
"learning_rate": 2.7504884506235806e-06, | |
"loss": 0.9281, | |
"step": 3700 | |
}, | |
{ | |
"epoch": 5.0, | |
"learning_rate": 2.679491924311226e-06, | |
"loss": 0.866, | |
"step": 3710 | |
}, | |
{ | |
"epoch": 5.01, | |
"learning_rate": 2.6093578764622263e-06, | |
"loss": 1.0279, | |
"step": 3720 | |
}, | |
{ | |
"epoch": 5.03, | |
"learning_rate": 2.540089799416736e-06, | |
"loss": 1.0756, | |
"step": 3730 | |
}, | |
{ | |
"epoch": 5.04, | |
"learning_rate": 2.4716911423936927e-06, | |
"loss": 0.8898, | |
"step": 3740 | |
}, | |
{ | |
"epoch": 5.05, | |
"learning_rate": 2.4041653113190954e-06, | |
"loss": 1.026, | |
"step": 3750 | |
}, | |
{ | |
"epoch": 5.07, | |
"learning_rate": 2.337515668656376e-06, | |
"loss": 1.0635, | |
"step": 3760 | |
}, | |
{ | |
"epoch": 5.08, | |
"learning_rate": 2.2717455332389625e-06, | |
"loss": 1.1642, | |
"step": 3770 | |
}, | |
{ | |
"epoch": 5.09, | |
"learning_rate": 2.2068581801050557e-06, | |
"loss": 1.0406, | |
"step": 3780 | |
}, | |
{ | |
"epoch": 5.11, | |
"learning_rate": 2.142856840334495e-06, | |
"loss": 0.8154, | |
"step": 3790 | |
}, | |
{ | |
"epoch": 5.12, | |
"learning_rate": 2.0797447008879024e-06, | |
"loss": 1.0129, | |
"step": 3800 | |
}, | |
{ | |
"epoch": 5.13, | |
"learning_rate": 2.0175249044479804e-06, | |
"loss": 0.8056, | |
"step": 3810 | |
}, | |
{ | |
"epoch": 5.15, | |
"learning_rate": 1.9562005492630077e-06, | |
"loss": 0.9076, | |
"step": 3820 | |
}, | |
{ | |
"epoch": 5.16, | |
"learning_rate": 1.8957746889925776e-06, | |
"loss": 1.0562, | |
"step": 3830 | |
}, | |
{ | |
"epoch": 5.18, | |
"learning_rate": 1.8362503325555337e-06, | |
"loss": 1.0019, | |
"step": 3840 | |
}, | |
{ | |
"epoch": 5.19, | |
"learning_rate": 1.7776304439801384e-06, | |
"loss": 0.8736, | |
"step": 3850 | |
}, | |
{ | |
"epoch": 5.2, | |
"learning_rate": 1.7199179422564838e-06, | |
"loss": 0.9338, | |
"step": 3860 | |
}, | |
{ | |
"epoch": 5.22, | |
"learning_rate": 1.6631157011911358e-06, | |
"loss": 0.9483, | |
"step": 3870 | |
}, | |
{ | |
"epoch": 5.23, | |
"learning_rate": 1.607226549264025e-06, | |
"loss": 1.1014, | |
"step": 3880 | |
}, | |
{ | |
"epoch": 5.24, | |
"learning_rate": 1.552253269487618e-06, | |
"loss": 0.8895, | |
"step": 3890 | |
}, | |
{ | |
"epoch": 5.26, | |
"learning_rate": 1.4981985992683324e-06, | |
"loss": 0.9174, | |
"step": 3900 | |
}, | |
{ | |
"epoch": 5.27, | |
"learning_rate": 1.4450652302702084e-06, | |
"loss": 1.0354, | |
"step": 3910 | |
}, | |
{ | |
"epoch": 5.28, | |
"learning_rate": 1.3928558082809107e-06, | |
"loss": 1.2321, | |
"step": 3920 | |
}, | |
{ | |
"epoch": 5.3, | |
"learning_rate": 1.3415729330799465e-06, | |
"loss": 0.9933, | |
"step": 3930 | |
}, | |
{ | |
"epoch": 5.31, | |
"learning_rate": 1.2912191583092293e-06, | |
"loss": 0.7278, | |
"step": 3940 | |
}, | |
{ | |
"epoch": 5.32, | |
"learning_rate": 1.2417969913459227e-06, | |
"loss": 0.8875, | |
"step": 3950 | |
}, | |
{ | |
"epoch": 5.34, | |
"learning_rate": 1.1933088931775606e-06, | |
"loss": 0.8956, | |
"step": 3960 | |
}, | |
{ | |
"epoch": 5.35, | |
"learning_rate": 1.1457572782795335e-06, | |
"loss": 0.9413, | |
"step": 3970 | |
}, | |
{ | |
"epoch": 5.36, | |
"learning_rate": 1.0991445144948365e-06, | |
"loss": 0.9904, | |
"step": 3980 | |
}, | |
{ | |
"epoch": 5.38, | |
"learning_rate": 1.0534729229161722e-06, | |
"loss": 1.0132, | |
"step": 3990 | |
}, | |
{ | |
"epoch": 5.39, | |
"learning_rate": 1.0087447777703674e-06, | |
"loss": 0.7655, | |
"step": 4000 | |
}, | |
{ | |
"epoch": 5.4, | |
"learning_rate": 9.649623063051283e-07, | |
"loss": 0.9661, | |
"step": 4010 | |
}, | |
{ | |
"epoch": 5.42, | |
"learning_rate": 9.22127688678136e-07, | |
"loss": 0.9205, | |
"step": 4020 | |
}, | |
{ | |
"epoch": 5.43, | |
"learning_rate": 8.802430578484822e-07, | |
"loss": 1.0175, | |
"step": 4030 | |
}, | |
{ | |
"epoch": 5.44, | |
"learning_rate": 8.393104994704604e-07, | |
"loss": 0.9424, | |
"step": 4040 | |
}, | |
{ | |
"epoch": 5.46, | |
"learning_rate": 7.993320517897096e-07, | |
"loss": 0.9292, | |
"step": 4050 | |
}, | |
{ | |
"epoch": 5.47, | |
"learning_rate": 7.603097055417242e-07, | |
"loss": 0.9804, | |
"step": 4060 | |
}, | |
{ | |
"epoch": 5.49, | |
"learning_rate": 7.222454038527083e-07, | |
"loss": 1.1119, | |
"step": 4070 | |
}, | |
{ | |
"epoch": 5.5, | |
"learning_rate": 6.851410421428384e-07, | |
"loss": 0.9871, | |
"step": 4080 | |
}, | |
{ | |
"epoch": 5.51, | |
"learning_rate": 6.489984680318717e-07, | |
"loss": 1.0466, | |
"step": 4090 | |
}, | |
{ | |
"epoch": 5.53, | |
"learning_rate": 6.138194812471332e-07, | |
"loss": 1.0137, | |
"step": 4100 | |
}, | |
{ | |
"epoch": 5.54, | |
"learning_rate": 5.796058335339183e-07, | |
"loss": 0.7032, | |
"step": 4110 | |
}, | |
{ | |
"epoch": 5.55, | |
"learning_rate": 5.463592285682495e-07, | |
"loss": 0.8068, | |
"step": 4120 | |
}, | |
{ | |
"epoch": 5.57, | |
"learning_rate": 5.140813218720442e-07, | |
"loss": 0.9045, | |
"step": 4130 | |
}, | |
{ | |
"epoch": 5.58, | |
"learning_rate": 4.827737207306826e-07, | |
"loss": 1.133, | |
"step": 4140 | |
}, | |
{ | |
"epoch": 5.59, | |
"learning_rate": 4.5243798411296337e-07, | |
"loss": 1.1057, | |
"step": 4150 | |
}, | |
{ | |
"epoch": 5.61, | |
"learning_rate": 4.2307562259348735e-07, | |
"loss": 0.9352, | |
"step": 4160 | |
}, | |
{ | |
"epoch": 5.62, | |
"learning_rate": 3.946880982774226e-07, | |
"loss": 0.6152, | |
"step": 4170 | |
}, | |
{ | |
"epoch": 5.63, | |
"learning_rate": 3.6727682472771143e-07, | |
"loss": 0.9055, | |
"step": 4180 | |
}, | |
{ | |
"epoch": 5.65, | |
"learning_rate": 3.408431668946799e-07, | |
"loss": 0.9063, | |
"step": 4190 | |
}, | |
{ | |
"epoch": 5.66, | |
"learning_rate": 3.1538844104806343e-07, | |
"loss": 0.7718, | |
"step": 4200 | |
}, | |
{ | |
"epoch": 5.67, | |
"learning_rate": 2.909139147114659e-07, | |
"loss": 1.0093, | |
"step": 4210 | |
}, | |
{ | |
"epoch": 5.69, | |
"learning_rate": 2.674208065992412e-07, | |
"loss": 0.8273, | |
"step": 4220 | |
}, | |
{ | |
"epoch": 5.7, | |
"learning_rate": 2.4491028655581727e-07, | |
"loss": 1.0351, | |
"step": 4230 | |
}, | |
{ | |
"epoch": 5.71, | |
"learning_rate": 2.2338347549742956e-07, | |
"loss": 0.9928, | |
"step": 4240 | |
}, | |
{ | |
"epoch": 5.73, | |
"learning_rate": 2.0284144535631212e-07, | |
"loss": 0.9106, | |
"step": 4250 | |
}, | |
{ | |
"epoch": 5.74, | |
"learning_rate": 1.8328521902732043e-07, | |
"loss": 1.059, | |
"step": 4260 | |
}, | |
{ | |
"epoch": 5.75, | |
"learning_rate": 1.6471577031699214e-07, | |
"loss": 0.8302, | |
"step": 4270 | |
}, | |
{ | |
"epoch": 5.77, | |
"learning_rate": 1.471340238950658e-07, | |
"loss": 1.18, | |
"step": 4280 | |
}, | |
{ | |
"epoch": 5.78, | |
"learning_rate": 1.3054085524841775e-07, | |
"loss": 0.9281, | |
"step": 4290 | |
}, | |
{ | |
"epoch": 5.8, | |
"learning_rate": 1.1493709063749248e-07, | |
"loss": 0.9893, | |
"step": 4300 | |
}, | |
{ | |
"epoch": 5.81, | |
"learning_rate": 1.003235070551356e-07, | |
"loss": 0.9611, | |
"step": 4310 | |
}, | |
{ | |
"epoch": 5.82, | |
"learning_rate": 8.670083218792036e-08, | |
"loss": 0.8989, | |
"step": 4320 | |
}, | |
{ | |
"epoch": 5.84, | |
"learning_rate": 7.406974437989878e-08, | |
"loss": 1.0763, | |
"step": 4330 | |
}, | |
{ | |
"epoch": 5.85, | |
"learning_rate": 6.24308725988354e-08, | |
"loss": 0.7364, | |
"step": 4340 | |
}, | |
{ | |
"epoch": 5.86, | |
"learning_rate": 5.178479640487899e-08, | |
"loss": 0.8311, | |
"step": 4350 | |
}, | |
{ | |
"epoch": 5.88, | |
"learning_rate": 4.213204592170561e-08, | |
"loss": 1.0996, | |
"step": 4360 | |
}, | |
{ | |
"epoch": 5.89, | |
"learning_rate": 3.347310181012642e-08, | |
"loss": 1.0, | |
"step": 4370 | |
}, | |
{ | |
"epoch": 5.9, | |
"learning_rate": 2.580839524414014e-08, | |
"loss": 0.9017, | |
"step": 4380 | |
}, | |
{ | |
"epoch": 5.92, | |
"learning_rate": 1.9138307889481346e-08, | |
"loss": 1.0839, | |
"step": 4390 | |
}, | |
{ | |
"epoch": 5.93, | |
"learning_rate": 1.3463171884595672e-08, | |
"loss": 0.883, | |
"step": 4400 | |
}, | |
{ | |
"epoch": 5.94, | |
"learning_rate": 8.78326982411304e-09, | |
"loss": 0.7533, | |
"step": 4410 | |
}, | |
{ | |
"epoch": 5.96, | |
"learning_rate": 5.098834744776682e-09, | |
"loss": 0.6881, | |
"step": 4420 | |
}, | |
{ | |
"epoch": 5.97, | |
"learning_rate": 2.410050113828e-09, | |
"loss": 0.8809, | |
"step": 4430 | |
}, | |
{ | |
"epoch": 5.98, | |
"learning_rate": 7.170498198871834e-10, | |
"loss": 0.999, | |
"step": 4440 | |
}, | |
{ | |
"epoch": 6.0, | |
"learning_rate": 1.9918166265231465e-11, | |
"loss": 0.8996, | |
"step": 4450 | |
} | |
], | |
"logging_steps": 10, | |
"max_steps": 4452, | |
"num_train_epochs": 6, | |
"save_steps": 500, | |
"total_flos": 2.326608096362496e+16, | |
"trial_name": null, | |
"trial_params": null | |
} | |