TinyLlama-1.1B-Chat-rust-cpp-encodings
/
LORAs
/tinyllama-encoder_2e-4
/checkpoint-4452
/trainer_state.json
{ | |
"best_metric": null, | |
"best_model_checkpoint": null, | |
"epoch": 6.0, | |
"eval_steps": 500, | |
"global_step": 4452, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.01, | |
"learning_rate": 0.00019999751023913506, | |
"loss": 1.726, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.03, | |
"learning_rate": 0.00019999004108051845, | |
"loss": 1.6889, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.04, | |
"learning_rate": 0.00019997759289607853, | |
"loss": 1.7184, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.05, | |
"learning_rate": 0.00019996016630567534, | |
"loss": 1.4497, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.07, | |
"learning_rate": 0.00019993776217706971, | |
"loss": 1.4543, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.08, | |
"learning_rate": 0.00019991038162588018, | |
"loss": 1.5685, | |
"step": 60 | |
}, | |
{ | |
"epoch": 0.09, | |
"learning_rate": 0.00019987802601552718, | |
"loss": 1.5524, | |
"step": 70 | |
}, | |
{ | |
"epoch": 0.11, | |
"learning_rate": 0.00019984069695716533, | |
"loss": 1.477, | |
"step": 80 | |
}, | |
{ | |
"epoch": 0.12, | |
"learning_rate": 0.0001997983963096033, | |
"loss": 1.4757, | |
"step": 90 | |
}, | |
{ | |
"epoch": 0.13, | |
"learning_rate": 0.00019975112617921098, | |
"loss": 1.4486, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.15, | |
"learning_rate": 0.0001996988889198147, | |
"loss": 1.461, | |
"step": 110 | |
}, | |
{ | |
"epoch": 0.16, | |
"learning_rate": 0.0001996416871325803, | |
"loss": 1.4589, | |
"step": 120 | |
}, | |
{ | |
"epoch": 0.18, | |
"learning_rate": 0.00019957952366588306, | |
"loss": 1.4624, | |
"step": 130 | |
}, | |
{ | |
"epoch": 0.19, | |
"learning_rate": 0.0001995124016151664, | |
"loss": 1.399, | |
"step": 140 | |
}, | |
{ | |
"epoch": 0.2, | |
"learning_rate": 0.0001994403243227874, | |
"loss": 1.424, | |
"step": 150 | |
}, | |
{ | |
"epoch": 0.22, | |
"learning_rate": 0.00019936329537785052, | |
"loss": 1.3211, | |
"step": 160 | |
}, | |
{ | |
"epoch": 0.23, | |
"learning_rate": 0.00019928131861602876, | |
"loss": 1.279, | |
"step": 170 | |
}, | |
{ | |
"epoch": 0.24, | |
"learning_rate": 0.0001991943981193728, | |
"loss": 1.3186, | |
"step": 180 | |
}, | |
{ | |
"epoch": 0.26, | |
"learning_rate": 0.00019910253821610771, | |
"loss": 1.2727, | |
"step": 190 | |
}, | |
{ | |
"epoch": 0.27, | |
"learning_rate": 0.00019900574348041727, | |
"loss": 1.3562, | |
"step": 200 | |
}, | |
{ | |
"epoch": 0.28, | |
"learning_rate": 0.0001989040187322164, | |
"loss": 1.4529, | |
"step": 210 | |
}, | |
{ | |
"epoch": 0.3, | |
"learning_rate": 0.00019879736903691107, | |
"loss": 1.2631, | |
"step": 220 | |
}, | |
{ | |
"epoch": 0.31, | |
"learning_rate": 0.00019868579970514598, | |
"loss": 1.285, | |
"step": 230 | |
}, | |
{ | |
"epoch": 0.32, | |
"learning_rate": 0.00019856931629254028, | |
"loss": 1.3466, | |
"step": 240 | |
}, | |
{ | |
"epoch": 0.34, | |
"learning_rate": 0.00019844792459941082, | |
"loss": 1.3003, | |
"step": 250 | |
}, | |
{ | |
"epoch": 0.35, | |
"learning_rate": 0.00019832163067048334, | |
"loss": 1.2173, | |
"step": 260 | |
}, | |
{ | |
"epoch": 0.36, | |
"learning_rate": 0.00019819044079459142, | |
"loss": 1.2895, | |
"step": 270 | |
}, | |
{ | |
"epoch": 0.38, | |
"learning_rate": 0.00019805436150436352, | |
"loss": 1.3731, | |
"step": 280 | |
}, | |
{ | |
"epoch": 0.39, | |
"learning_rate": 0.00019791339957589738, | |
"loss": 1.1251, | |
"step": 290 | |
}, | |
{ | |
"epoch": 0.4, | |
"learning_rate": 0.00019776756202842294, | |
"loss": 1.2362, | |
"step": 300 | |
}, | |
{ | |
"epoch": 0.42, | |
"learning_rate": 0.0001976168561239525, | |
"loss": 1.3371, | |
"step": 310 | |
}, | |
{ | |
"epoch": 0.43, | |
"learning_rate": 0.00019746128936691938, | |
"loss": 1.2906, | |
"step": 320 | |
}, | |
{ | |
"epoch": 0.44, | |
"learning_rate": 0.00019730086950380403, | |
"loss": 1.1032, | |
"step": 330 | |
}, | |
{ | |
"epoch": 0.46, | |
"learning_rate": 0.00019713560452274838, | |
"loss": 1.1211, | |
"step": 340 | |
}, | |
{ | |
"epoch": 0.47, | |
"learning_rate": 0.00019696550265315806, | |
"loss": 1.1438, | |
"step": 350 | |
}, | |
{ | |
"epoch": 0.49, | |
"learning_rate": 0.00019679057236529266, | |
"loss": 1.2266, | |
"step": 360 | |
}, | |
{ | |
"epoch": 0.5, | |
"learning_rate": 0.00019661082236984386, | |
"loss": 1.3422, | |
"step": 370 | |
}, | |
{ | |
"epoch": 0.51, | |
"learning_rate": 0.00019642626161750176, | |
"loss": 1.3473, | |
"step": 380 | |
}, | |
{ | |
"epoch": 0.53, | |
"learning_rate": 0.00019623689929850908, | |
"loss": 1.2263, | |
"step": 390 | |
}, | |
{ | |
"epoch": 0.54, | |
"learning_rate": 0.0001960427448422037, | |
"loss": 1.0773, | |
"step": 400 | |
}, | |
{ | |
"epoch": 0.55, | |
"learning_rate": 0.00019584380791654894, | |
"loss": 1.099, | |
"step": 410 | |
}, | |
{ | |
"epoch": 0.57, | |
"learning_rate": 0.00019564009842765225, | |
"loss": 1.1754, | |
"step": 420 | |
}, | |
{ | |
"epoch": 0.58, | |
"learning_rate": 0.00019543162651927188, | |
"loss": 1.2885, | |
"step": 430 | |
}, | |
{ | |
"epoch": 0.59, | |
"learning_rate": 0.00019521840257231182, | |
"loss": 1.2681, | |
"step": 440 | |
}, | |
{ | |
"epoch": 0.61, | |
"learning_rate": 0.00019500043720430482, | |
"loss": 1.1195, | |
"step": 450 | |
}, | |
{ | |
"epoch": 0.62, | |
"learning_rate": 0.00019477774126888377, | |
"loss": 1.1689, | |
"step": 460 | |
}, | |
{ | |
"epoch": 0.63, | |
"learning_rate": 0.00019455032585524116, | |
"loss": 1.0643, | |
"step": 470 | |
}, | |
{ | |
"epoch": 0.65, | |
"learning_rate": 0.0001943182022875769, | |
"loss": 1.0415, | |
"step": 480 | |
}, | |
{ | |
"epoch": 0.66, | |
"learning_rate": 0.00019408138212453454, | |
"loss": 1.1249, | |
"step": 490 | |
}, | |
{ | |
"epoch": 0.67, | |
"learning_rate": 0.00019383987715862553, | |
"loss": 0.9767, | |
"step": 500 | |
}, | |
{ | |
"epoch": 0.69, | |
"learning_rate": 0.00019359369941564212, | |
"loss": 1.0936, | |
"step": 510 | |
}, | |
{ | |
"epoch": 0.7, | |
"learning_rate": 0.0001933428611540585, | |
"loss": 1.1746, | |
"step": 520 | |
}, | |
{ | |
"epoch": 0.71, | |
"learning_rate": 0.00019308737486442045, | |
"loss": 1.0178, | |
"step": 530 | |
}, | |
{ | |
"epoch": 0.73, | |
"learning_rate": 0.00019282725326872323, | |
"loss": 0.91, | |
"step": 540 | |
}, | |
{ | |
"epoch": 0.74, | |
"learning_rate": 0.00019256250931977826, | |
"loss": 0.9748, | |
"step": 550 | |
}, | |
{ | |
"epoch": 0.75, | |
"learning_rate": 0.00019229315620056806, | |
"loss": 1.1454, | |
"step": 560 | |
}, | |
{ | |
"epoch": 0.77, | |
"learning_rate": 0.0001920192073235896, | |
"loss": 0.9785, | |
"step": 570 | |
}, | |
{ | |
"epoch": 0.78, | |
"learning_rate": 0.00019174067633018682, | |
"loss": 1.0508, | |
"step": 580 | |
}, | |
{ | |
"epoch": 0.8, | |
"learning_rate": 0.00019145757708987105, | |
"loss": 1.0429, | |
"step": 590 | |
}, | |
{ | |
"epoch": 0.81, | |
"learning_rate": 0.0001911699236996305, | |
"loss": 0.8784, | |
"step": 600 | |
}, | |
{ | |
"epoch": 0.82, | |
"learning_rate": 0.00019087773048322818, | |
"loss": 1.0023, | |
"step": 610 | |
}, | |
{ | |
"epoch": 0.84, | |
"learning_rate": 0.00019058101199048887, | |
"loss": 0.8826, | |
"step": 620 | |
}, | |
{ | |
"epoch": 0.85, | |
"learning_rate": 0.00019027978299657436, | |
"loss": 1.1243, | |
"step": 630 | |
}, | |
{ | |
"epoch": 0.86, | |
"learning_rate": 0.00018997405850124785, | |
"loss": 1.1292, | |
"step": 640 | |
}, | |
{ | |
"epoch": 0.88, | |
"learning_rate": 0.000189663853728127, | |
"loss": 1.1113, | |
"step": 650 | |
}, | |
{ | |
"epoch": 0.89, | |
"learning_rate": 0.00018934918412392596, | |
"loss": 0.9625, | |
"step": 660 | |
}, | |
{ | |
"epoch": 0.9, | |
"learning_rate": 0.00018903006535768598, | |
"loss": 0.9192, | |
"step": 670 | |
}, | |
{ | |
"epoch": 0.92, | |
"learning_rate": 0.00018870651331999542, | |
"loss": 1.1748, | |
"step": 680 | |
}, | |
{ | |
"epoch": 0.93, | |
"learning_rate": 0.00018837854412219826, | |
"loss": 0.7678, | |
"step": 690 | |
}, | |
{ | |
"epoch": 0.94, | |
"learning_rate": 0.00018804617409559198, | |
"loss": 0.9359, | |
"step": 700 | |
}, | |
{ | |
"epoch": 0.96, | |
"learning_rate": 0.0001877094197906143, | |
"loss": 0.7756, | |
"step": 710 | |
}, | |
{ | |
"epoch": 0.97, | |
"learning_rate": 0.00018736829797601903, | |
"loss": 1.0965, | |
"step": 720 | |
}, | |
{ | |
"epoch": 0.98, | |
"learning_rate": 0.000187022825638041, | |
"loss": 0.7658, | |
"step": 730 | |
}, | |
{ | |
"epoch": 1.0, | |
"learning_rate": 0.00018667301997955037, | |
"loss": 0.8303, | |
"step": 740 | |
}, | |
{ | |
"epoch": 1.01, | |
"learning_rate": 0.00018631889841919596, | |
"loss": 0.6526, | |
"step": 750 | |
}, | |
{ | |
"epoch": 1.02, | |
"learning_rate": 0.00018596047859053777, | |
"loss": 0.8849, | |
"step": 760 | |
}, | |
{ | |
"epoch": 1.04, | |
"learning_rate": 0.00018559777834116904, | |
"loss": 0.8624, | |
"step": 770 | |
}, | |
{ | |
"epoch": 1.05, | |
"learning_rate": 0.00018523081573182753, | |
"loss": 1.0776, | |
"step": 780 | |
}, | |
{ | |
"epoch": 1.06, | |
"learning_rate": 0.00018485960903549613, | |
"loss": 1.0613, | |
"step": 790 | |
}, | |
{ | |
"epoch": 1.08, | |
"learning_rate": 0.00018448417673649293, | |
"loss": 0.7977, | |
"step": 800 | |
}, | |
{ | |
"epoch": 1.09, | |
"learning_rate": 0.0001841045375295508, | |
"loss": 1.0102, | |
"step": 810 | |
}, | |
{ | |
"epoch": 1.11, | |
"learning_rate": 0.0001837207103188866, | |
"loss": 0.9193, | |
"step": 820 | |
}, | |
{ | |
"epoch": 1.12, | |
"learning_rate": 0.00018333271421725966, | |
"loss": 0.791, | |
"step": 830 | |
}, | |
{ | |
"epoch": 1.13, | |
"learning_rate": 0.0001829405685450202, | |
"loss": 0.7282, | |
"step": 840 | |
}, | |
{ | |
"epoch": 1.15, | |
"learning_rate": 0.00018254429282914717, | |
"loss": 0.909, | |
"step": 850 | |
}, | |
{ | |
"epoch": 1.16, | |
"learning_rate": 0.00018214390680227588, | |
"loss": 0.7082, | |
"step": 860 | |
}, | |
{ | |
"epoch": 1.17, | |
"learning_rate": 0.00018173943040171567, | |
"loss": 0.7878, | |
"step": 870 | |
}, | |
{ | |
"epoch": 1.19, | |
"learning_rate": 0.00018133088376845672, | |
"loss": 0.7337, | |
"step": 880 | |
}, | |
{ | |
"epoch": 1.2, | |
"learning_rate": 0.0001809182872461674, | |
"loss": 0.8767, | |
"step": 890 | |
}, | |
{ | |
"epoch": 1.21, | |
"learning_rate": 0.00018050166138018127, | |
"loss": 0.9053, | |
"step": 900 | |
}, | |
{ | |
"epoch": 1.23, | |
"learning_rate": 0.00018008102691647378, | |
"loss": 0.9425, | |
"step": 910 | |
}, | |
{ | |
"epoch": 1.24, | |
"learning_rate": 0.00017965640480062947, | |
"loss": 0.8695, | |
"step": 920 | |
}, | |
{ | |
"epoch": 1.25, | |
"learning_rate": 0.0001792278161767989, | |
"loss": 0.8363, | |
"step": 930 | |
}, | |
{ | |
"epoch": 1.27, | |
"learning_rate": 0.00017879528238664568, | |
"loss": 0.8174, | |
"step": 940 | |
}, | |
{ | |
"epoch": 1.28, | |
"learning_rate": 0.00017835882496828391, | |
"loss": 0.8841, | |
"step": 950 | |
}, | |
{ | |
"epoch": 1.29, | |
"learning_rate": 0.00017791846565520558, | |
"loss": 1.0871, | |
"step": 960 | |
}, | |
{ | |
"epoch": 1.31, | |
"learning_rate": 0.00017747422637519837, | |
"loss": 0.8819, | |
"step": 970 | |
}, | |
{ | |
"epoch": 1.32, | |
"learning_rate": 0.00017702612924925376, | |
"loss": 0.9717, | |
"step": 980 | |
}, | |
{ | |
"epoch": 1.33, | |
"learning_rate": 0.0001765741965904655, | |
"loss": 0.9332, | |
"step": 990 | |
}, | |
{ | |
"epoch": 1.35, | |
"learning_rate": 0.00017611845090291856, | |
"loss": 0.9255, | |
"step": 1000 | |
}, | |
{ | |
"epoch": 1.36, | |
"learning_rate": 0.00017565891488056847, | |
"loss": 0.8273, | |
"step": 1010 | |
}, | |
{ | |
"epoch": 1.37, | |
"learning_rate": 0.0001751956114061113, | |
"loss": 1.0004, | |
"step": 1020 | |
}, | |
{ | |
"epoch": 1.39, | |
"learning_rate": 0.00017472856354984427, | |
"loss": 0.6283, | |
"step": 1030 | |
}, | |
{ | |
"epoch": 1.4, | |
"learning_rate": 0.00017425779456851684, | |
"loss": 0.7763, | |
"step": 1040 | |
}, | |
{ | |
"epoch": 1.42, | |
"learning_rate": 0.00017378332790417273, | |
"loss": 0.7186, | |
"step": 1050 | |
}, | |
{ | |
"epoch": 1.43, | |
"learning_rate": 0.00017330518718298264, | |
"loss": 0.6694, | |
"step": 1060 | |
}, | |
{ | |
"epoch": 1.44, | |
"learning_rate": 0.00017282339621406764, | |
"loss": 0.7898, | |
"step": 1070 | |
}, | |
{ | |
"epoch": 1.46, | |
"learning_rate": 0.00017233797898831373, | |
"loss": 0.8441, | |
"step": 1080 | |
}, | |
{ | |
"epoch": 1.47, | |
"learning_rate": 0.00017184895967717716, | |
"loss": 0.8105, | |
"step": 1090 | |
}, | |
{ | |
"epoch": 1.48, | |
"learning_rate": 0.00017135636263148078, | |
"loss": 0.6729, | |
"step": 1100 | |
}, | |
{ | |
"epoch": 1.5, | |
"learning_rate": 0.00017086021238020153, | |
"loss": 0.81, | |
"step": 1110 | |
}, | |
{ | |
"epoch": 1.51, | |
"learning_rate": 0.00017036053362924896, | |
"loss": 0.5832, | |
"step": 1120 | |
}, | |
{ | |
"epoch": 1.52, | |
"learning_rate": 0.00016985735126023505, | |
"loss": 0.597, | |
"step": 1130 | |
}, | |
{ | |
"epoch": 1.54, | |
"learning_rate": 0.00016935069032923526, | |
"loss": 0.714, | |
"step": 1140 | |
}, | |
{ | |
"epoch": 1.55, | |
"learning_rate": 0.0001688405760655407, | |
"loss": 1.0678, | |
"step": 1150 | |
}, | |
{ | |
"epoch": 1.56, | |
"learning_rate": 0.000168327033870402, | |
"loss": 0.5946, | |
"step": 1160 | |
}, | |
{ | |
"epoch": 1.58, | |
"learning_rate": 0.00016781008931576432, | |
"loss": 0.8521, | |
"step": 1170 | |
}, | |
{ | |
"epoch": 1.59, | |
"learning_rate": 0.00016728976814299413, | |
"loss": 0.8702, | |
"step": 1180 | |
}, | |
{ | |
"epoch": 1.6, | |
"learning_rate": 0.0001667660962615973, | |
"loss": 0.6877, | |
"step": 1190 | |
}, | |
{ | |
"epoch": 1.62, | |
"learning_rate": 0.00016623909974792888, | |
"loss": 0.787, | |
"step": 1200 | |
}, | |
{ | |
"epoch": 1.63, | |
"learning_rate": 0.00016570880484389488, | |
"loss": 0.6594, | |
"step": 1210 | |
}, | |
{ | |
"epoch": 1.64, | |
"learning_rate": 0.00016517523795564528, | |
"loss": 0.7966, | |
"step": 1220 | |
}, | |
{ | |
"epoch": 1.66, | |
"learning_rate": 0.00016463842565225914, | |
"loss": 0.8111, | |
"step": 1230 | |
}, | |
{ | |
"epoch": 1.67, | |
"learning_rate": 0.00016409839466442178, | |
"loss": 0.7495, | |
"step": 1240 | |
}, | |
{ | |
"epoch": 1.68, | |
"learning_rate": 0.00016355517188309363, | |
"loss": 0.5614, | |
"step": 1250 | |
}, | |
{ | |
"epoch": 1.7, | |
"learning_rate": 0.00016300878435817113, | |
"loss": 0.8317, | |
"step": 1260 | |
}, | |
{ | |
"epoch": 1.71, | |
"learning_rate": 0.00016245925929713977, | |
"loss": 0.7025, | |
"step": 1270 | |
}, | |
{ | |
"epoch": 1.73, | |
"learning_rate": 0.00016190662406371937, | |
"loss": 0.8666, | |
"step": 1280 | |
}, | |
{ | |
"epoch": 1.74, | |
"learning_rate": 0.0001613509061765015, | |
"loss": 0.6441, | |
"step": 1290 | |
}, | |
{ | |
"epoch": 1.75, | |
"learning_rate": 0.00016079213330757913, | |
"loss": 0.491, | |
"step": 1300 | |
}, | |
{ | |
"epoch": 1.77, | |
"learning_rate": 0.00016023033328116862, | |
"loss": 0.5865, | |
"step": 1310 | |
}, | |
{ | |
"epoch": 1.78, | |
"learning_rate": 0.00015966553407222443, | |
"loss": 0.5936, | |
"step": 1320 | |
}, | |
{ | |
"epoch": 1.79, | |
"learning_rate": 0.00015909776380504585, | |
"loss": 0.72, | |
"step": 1330 | |
}, | |
{ | |
"epoch": 1.81, | |
"learning_rate": 0.00015852705075187674, | |
"loss": 0.6545, | |
"step": 1340 | |
}, | |
{ | |
"epoch": 1.82, | |
"learning_rate": 0.00015795342333149755, | |
"loss": 0.4781, | |
"step": 1350 | |
}, | |
{ | |
"epoch": 1.83, | |
"learning_rate": 0.0001573769101078104, | |
"loss": 0.858, | |
"step": 1360 | |
}, | |
{ | |
"epoch": 1.85, | |
"learning_rate": 0.00015679753978841654, | |
"loss": 0.712, | |
"step": 1370 | |
}, | |
{ | |
"epoch": 1.86, | |
"learning_rate": 0.00015621534122318683, | |
"loss": 0.5954, | |
"step": 1380 | |
}, | |
{ | |
"epoch": 1.87, | |
"learning_rate": 0.0001556303434028254, | |
"loss": 0.7352, | |
"step": 1390 | |
}, | |
{ | |
"epoch": 1.89, | |
"learning_rate": 0.00015504257545742584, | |
"loss": 0.5647, | |
"step": 1400 | |
}, | |
{ | |
"epoch": 1.9, | |
"learning_rate": 0.0001544520666550207, | |
"loss": 0.7925, | |
"step": 1410 | |
}, | |
{ | |
"epoch": 1.91, | |
"learning_rate": 0.00015385884640012408, | |
"loss": 0.8434, | |
"step": 1420 | |
}, | |
{ | |
"epoch": 1.93, | |
"learning_rate": 0.00015326294423226755, | |
"loss": 0.7084, | |
"step": 1430 | |
}, | |
{ | |
"epoch": 1.94, | |
"learning_rate": 0.00015266438982452897, | |
"loss": 0.75, | |
"step": 1440 | |
}, | |
{ | |
"epoch": 1.95, | |
"learning_rate": 0.0001520632129820552, | |
"loss": 0.896, | |
"step": 1450 | |
}, | |
{ | |
"epoch": 1.97, | |
"learning_rate": 0.0001514594436405777, | |
"loss": 0.664, | |
"step": 1460 | |
}, | |
{ | |
"epoch": 1.98, | |
"learning_rate": 0.00015085311186492206, | |
"loss": 0.7149, | |
"step": 1470 | |
}, | |
{ | |
"epoch": 1.99, | |
"learning_rate": 0.00015024424784751078, | |
"loss": 0.7642, | |
"step": 1480 | |
}, | |
{ | |
"epoch": 2.01, | |
"learning_rate": 0.00014963288190685992, | |
"loss": 0.7575, | |
"step": 1490 | |
}, | |
{ | |
"epoch": 2.02, | |
"learning_rate": 0.0001490190444860694, | |
"loss": 0.6248, | |
"step": 1500 | |
}, | |
{ | |
"epoch": 2.04, | |
"learning_rate": 0.00014840276615130687, | |
"loss": 0.5879, | |
"step": 1510 | |
}, | |
{ | |
"epoch": 2.05, | |
"learning_rate": 0.00014778407759028598, | |
"loss": 0.8715, | |
"step": 1520 | |
}, | |
{ | |
"epoch": 2.06, | |
"learning_rate": 0.00014716300961073808, | |
"loss": 0.689, | |
"step": 1530 | |
}, | |
{ | |
"epoch": 2.08, | |
"learning_rate": 0.00014653959313887813, | |
"loss": 0.5215, | |
"step": 1540 | |
}, | |
{ | |
"epoch": 2.09, | |
"learning_rate": 0.00014591385921786484, | |
"loss": 0.4937, | |
"step": 1550 | |
}, | |
{ | |
"epoch": 2.1, | |
"learning_rate": 0.00014528583900625481, | |
"loss": 0.6311, | |
"step": 1560 | |
}, | |
{ | |
"epoch": 2.12, | |
"learning_rate": 0.00014465556377645086, | |
"loss": 0.8815, | |
"step": 1570 | |
}, | |
{ | |
"epoch": 2.13, | |
"learning_rate": 0.00014402306491314508, | |
"loss": 0.6525, | |
"step": 1580 | |
}, | |
{ | |
"epoch": 2.14, | |
"learning_rate": 0.00014338837391175582, | |
"loss": 0.7482, | |
"step": 1590 | |
}, | |
{ | |
"epoch": 2.16, | |
"learning_rate": 0.00014275152237685937, | |
"loss": 0.6613, | |
"step": 1600 | |
}, | |
{ | |
"epoch": 2.17, | |
"learning_rate": 0.00014211254202061633, | |
"loss": 0.5247, | |
"step": 1610 | |
}, | |
{ | |
"epoch": 2.18, | |
"learning_rate": 0.00014147146466119234, | |
"loss": 0.6503, | |
"step": 1620 | |
}, | |
{ | |
"epoch": 2.2, | |
"learning_rate": 0.00014082832222117388, | |
"loss": 0.5727, | |
"step": 1630 | |
}, | |
{ | |
"epoch": 2.21, | |
"learning_rate": 0.00014018314672597848, | |
"loss": 0.67, | |
"step": 1640 | |
}, | |
{ | |
"epoch": 2.22, | |
"learning_rate": 0.00013953597030226007, | |
"loss": 0.6613, | |
"step": 1650 | |
}, | |
{ | |
"epoch": 2.24, | |
"learning_rate": 0.0001388868251763094, | |
"loss": 0.7015, | |
"step": 1660 | |
}, | |
{ | |
"epoch": 2.25, | |
"learning_rate": 0.000138235743672449, | |
"loss": 0.7483, | |
"step": 1670 | |
}, | |
{ | |
"epoch": 2.26, | |
"learning_rate": 0.00013758275821142382, | |
"loss": 0.6186, | |
"step": 1680 | |
}, | |
{ | |
"epoch": 2.28, | |
"learning_rate": 0.00013692790130878684, | |
"loss": 0.582, | |
"step": 1690 | |
}, | |
{ | |
"epoch": 2.29, | |
"learning_rate": 0.00013627120557327982, | |
"loss": 0.5636, | |
"step": 1700 | |
}, | |
{ | |
"epoch": 2.3, | |
"learning_rate": 0.00013561270370520957, | |
"loss": 0.5026, | |
"step": 1710 | |
}, | |
{ | |
"epoch": 2.32, | |
"learning_rate": 0.00013495242849481974, | |
"loss": 0.6765, | |
"step": 1720 | |
}, | |
{ | |
"epoch": 2.33, | |
"learning_rate": 0.00013429041282065788, | |
"loss": 0.4437, | |
"step": 1730 | |
}, | |
{ | |
"epoch": 2.35, | |
"learning_rate": 0.0001336266896479384, | |
"loss": 0.5845, | |
"step": 1740 | |
}, | |
{ | |
"epoch": 2.36, | |
"learning_rate": 0.0001329612920269008, | |
"loss": 0.4177, | |
"step": 1750 | |
}, | |
{ | |
"epoch": 2.37, | |
"learning_rate": 0.0001322942530911643, | |
"loss": 0.6281, | |
"step": 1760 | |
}, | |
{ | |
"epoch": 2.39, | |
"learning_rate": 0.00013162560605607763, | |
"loss": 0.6297, | |
"step": 1770 | |
}, | |
{ | |
"epoch": 2.4, | |
"learning_rate": 0.00013095538421706518, | |
"loss": 0.7309, | |
"step": 1780 | |
}, | |
{ | |
"epoch": 2.41, | |
"learning_rate": 0.0001302836209479691, | |
"loss": 0.5898, | |
"step": 1790 | |
}, | |
{ | |
"epoch": 2.43, | |
"learning_rate": 0.00012961034969938731, | |
"loss": 0.5945, | |
"step": 1800 | |
}, | |
{ | |
"epoch": 2.44, | |
"learning_rate": 0.00012893560399700798, | |
"loss": 0.6987, | |
"step": 1810 | |
}, | |
{ | |
"epoch": 2.45, | |
"learning_rate": 0.0001282594174399399, | |
"loss": 0.4476, | |
"step": 1820 | |
}, | |
{ | |
"epoch": 2.47, | |
"learning_rate": 0.00012758182369903968, | |
"loss": 0.6016, | |
"step": 1830 | |
}, | |
{ | |
"epoch": 2.48, | |
"learning_rate": 0.00012690285651523488, | |
"loss": 0.667, | |
"step": 1840 | |
}, | |
{ | |
"epoch": 2.49, | |
"learning_rate": 0.00012622254969784394, | |
"loss": 0.4903, | |
"step": 1850 | |
}, | |
{ | |
"epoch": 2.51, | |
"learning_rate": 0.00012554093712289265, | |
"loss": 0.4684, | |
"step": 1860 | |
}, | |
{ | |
"epoch": 2.52, | |
"learning_rate": 0.00012485805273142737, | |
"loss": 0.5173, | |
"step": 1870 | |
}, | |
{ | |
"epoch": 2.53, | |
"learning_rate": 0.00012417393052782468, | |
"loss": 0.7519, | |
"step": 1880 | |
}, | |
{ | |
"epoch": 2.55, | |
"learning_rate": 0.00012348860457809838, | |
"loss": 0.5067, | |
"step": 1890 | |
}, | |
{ | |
"epoch": 2.56, | |
"learning_rate": 0.00012280210900820308, | |
"loss": 0.6649, | |
"step": 1900 | |
}, | |
{ | |
"epoch": 2.57, | |
"learning_rate": 0.00012211447800233483, | |
"loss": 0.5927, | |
"step": 1910 | |
}, | |
{ | |
"epoch": 2.59, | |
"learning_rate": 0.00012142574580122903, | |
"loss": 0.8222, | |
"step": 1920 | |
}, | |
{ | |
"epoch": 2.6, | |
"learning_rate": 0.00012073594670045525, | |
"loss": 0.4151, | |
"step": 1930 | |
}, | |
{ | |
"epoch": 2.61, | |
"learning_rate": 0.00012004511504870966, | |
"loss": 0.5066, | |
"step": 1940 | |
}, | |
{ | |
"epoch": 2.63, | |
"learning_rate": 0.00011935328524610443, | |
"loss": 0.4361, | |
"step": 1950 | |
}, | |
{ | |
"epoch": 2.64, | |
"learning_rate": 0.00011866049174245491, | |
"loss": 0.5214, | |
"step": 1960 | |
}, | |
{ | |
"epoch": 2.65, | |
"learning_rate": 0.00011796676903556418, | |
"loss": 0.5282, | |
"step": 1970 | |
}, | |
{ | |
"epoch": 2.67, | |
"learning_rate": 0.00011727215166950519, | |
"loss": 0.5023, | |
"step": 1980 | |
}, | |
{ | |
"epoch": 2.68, | |
"learning_rate": 0.00011657667423290055, | |
"loss": 0.5332, | |
"step": 1990 | |
}, | |
{ | |
"epoch": 2.7, | |
"learning_rate": 0.00011588037135720042, | |
"loss": 0.3463, | |
"step": 2000 | |
}, | |
{ | |
"epoch": 2.71, | |
"learning_rate": 0.0001151832777149578, | |
"loss": 0.448, | |
"step": 2010 | |
}, | |
{ | |
"epoch": 2.72, | |
"learning_rate": 0.00011448542801810203, | |
"loss": 0.6003, | |
"step": 2020 | |
}, | |
{ | |
"epoch": 2.74, | |
"learning_rate": 0.00011378685701621045, | |
"loss": 0.6724, | |
"step": 2030 | |
}, | |
{ | |
"epoch": 2.75, | |
"learning_rate": 0.00011308759949477785, | |
"loss": 0.5553, | |
"step": 2040 | |
}, | |
{ | |
"epoch": 2.76, | |
"learning_rate": 0.00011238769027348452, | |
"loss": 0.4584, | |
"step": 2050 | |
}, | |
{ | |
"epoch": 2.78, | |
"learning_rate": 0.00011168716420446219, | |
"loss": 0.4843, | |
"step": 2060 | |
}, | |
{ | |
"epoch": 2.79, | |
"learning_rate": 0.00011098605617055871, | |
"loss": 0.4809, | |
"step": 2070 | |
}, | |
{ | |
"epoch": 2.8, | |
"learning_rate": 0.00011028440108360092, | |
"loss": 0.6288, | |
"step": 2080 | |
}, | |
{ | |
"epoch": 2.82, | |
"learning_rate": 0.00010958223388265639, | |
"loss": 0.5355, | |
"step": 2090 | |
}, | |
{ | |
"epoch": 2.83, | |
"learning_rate": 0.00010887958953229349, | |
"loss": 0.5807, | |
"step": 2100 | |
}, | |
{ | |
"epoch": 2.84, | |
"learning_rate": 0.00010817650302084027, | |
"loss": 0.5534, | |
"step": 2110 | |
}, | |
{ | |
"epoch": 2.86, | |
"learning_rate": 0.00010747300935864243, | |
"loss": 0.72, | |
"step": 2120 | |
}, | |
{ | |
"epoch": 2.87, | |
"learning_rate": 0.00010676914357631969, | |
"loss": 0.6605, | |
"step": 2130 | |
}, | |
{ | |
"epoch": 2.88, | |
"learning_rate": 0.00010606494072302165, | |
"loss": 0.4739, | |
"step": 2140 | |
}, | |
{ | |
"epoch": 2.9, | |
"learning_rate": 0.00010536043586468237, | |
"loss": 0.3668, | |
"step": 2150 | |
}, | |
{ | |
"epoch": 2.91, | |
"learning_rate": 0.00010465566408227438, | |
"loss": 0.3007, | |
"step": 2160 | |
}, | |
{ | |
"epoch": 2.92, | |
"learning_rate": 0.00010395066047006179, | |
"loss": 0.6202, | |
"step": 2170 | |
}, | |
{ | |
"epoch": 2.94, | |
"learning_rate": 0.00010324546013385259, | |
"loss": 0.592, | |
"step": 2180 | |
}, | |
{ | |
"epoch": 2.95, | |
"learning_rate": 0.00010254009818925082, | |
"loss": 0.5273, | |
"step": 2190 | |
}, | |
{ | |
"epoch": 2.96, | |
"learning_rate": 0.00010183460975990773, | |
"loss": 0.616, | |
"step": 2200 | |
}, | |
{ | |
"epoch": 2.98, | |
"learning_rate": 0.000101129029975773, | |
"loss": 0.6444, | |
"step": 2210 | |
}, | |
{ | |
"epoch": 2.99, | |
"learning_rate": 0.00010042339397134528, | |
"loss": 0.4996, | |
"step": 2220 | |
}, | |
{ | |
"epoch": 3.01, | |
"learning_rate": 9.971773688392276e-05, | |
"loss": 0.517, | |
"step": 2230 | |
}, | |
{ | |
"epoch": 3.02, | |
"learning_rate": 9.901209385185345e-05, | |
"loss": 0.5568, | |
"step": 2240 | |
}, | |
{ | |
"epoch": 3.03, | |
"learning_rate": 9.830650001278543e-05, | |
"loss": 0.405, | |
"step": 2250 | |
}, | |
{ | |
"epoch": 3.05, | |
"learning_rate": 9.76009905019173e-05, | |
"loss": 0.4922, | |
"step": 2260 | |
}, | |
{ | |
"epoch": 3.06, | |
"learning_rate": 9.689560045024843e-05, | |
"loss": 0.4797, | |
"step": 2270 | |
}, | |
{ | |
"epoch": 3.07, | |
"learning_rate": 9.619036498282967e-05, | |
"loss": 0.3972, | |
"step": 2280 | |
}, | |
{ | |
"epoch": 3.09, | |
"learning_rate": 9.548531921701447e-05, | |
"loss": 0.4663, | |
"step": 2290 | |
}, | |
{ | |
"epoch": 3.1, | |
"learning_rate": 9.478049826070987e-05, | |
"loss": 0.5251, | |
"step": 2300 | |
}, | |
{ | |
"epoch": 3.11, | |
"learning_rate": 9.407593721062859e-05, | |
"loss": 0.5958, | |
"step": 2310 | |
}, | |
{ | |
"epoch": 3.13, | |
"learning_rate": 9.33716711505412e-05, | |
"loss": 0.4223, | |
"step": 2320 | |
}, | |
{ | |
"epoch": 3.14, | |
"learning_rate": 9.266773514952917e-05, | |
"loss": 0.6653, | |
"step": 2330 | |
}, | |
{ | |
"epoch": 3.15, | |
"learning_rate": 9.196416426023869e-05, | |
"loss": 0.4452, | |
"step": 2340 | |
}, | |
{ | |
"epoch": 3.17, | |
"learning_rate": 9.126099351713498e-05, | |
"loss": 0.4905, | |
"step": 2350 | |
}, | |
{ | |
"epoch": 3.18, | |
"learning_rate": 9.055825793475813e-05, | |
"loss": 0.3252, | |
"step": 2360 | |
}, | |
{ | |
"epoch": 3.19, | |
"learning_rate": 8.985599250597905e-05, | |
"loss": 0.6868, | |
"step": 2370 | |
}, | |
{ | |
"epoch": 3.21, | |
"learning_rate": 8.915423220025747e-05, | |
"loss": 0.5649, | |
"step": 2380 | |
}, | |
{ | |
"epoch": 3.22, | |
"learning_rate": 8.845301196190022e-05, | |
"loss": 0.3913, | |
"step": 2390 | |
}, | |
{ | |
"epoch": 3.23, | |
"learning_rate": 8.775236670832146e-05, | |
"loss": 0.5299, | |
"step": 2400 | |
}, | |
{ | |
"epoch": 3.25, | |
"learning_rate": 8.705233132830384e-05, | |
"loss": 0.5685, | |
"step": 2410 | |
}, | |
{ | |
"epoch": 3.26, | |
"learning_rate": 8.635294068026128e-05, | |
"loss": 0.4531, | |
"step": 2420 | |
}, | |
{ | |
"epoch": 3.27, | |
"learning_rate": 8.565422959050304e-05, | |
"loss": 0.3068, | |
"step": 2430 | |
}, | |
{ | |
"epoch": 3.29, | |
"learning_rate": 8.495623285149962e-05, | |
"loss": 0.4172, | |
"step": 2440 | |
}, | |
{ | |
"epoch": 3.3, | |
"learning_rate": 8.425898522015038e-05, | |
"loss": 0.4935, | |
"step": 2450 | |
}, | |
{ | |
"epoch": 3.32, | |
"learning_rate": 8.356252141605261e-05, | |
"loss": 0.6658, | |
"step": 2460 | |
}, | |
{ | |
"epoch": 3.33, | |
"learning_rate": 8.286687611977273e-05, | |
"loss": 0.4393, | |
"step": 2470 | |
}, | |
{ | |
"epoch": 3.34, | |
"learning_rate": 8.217208397111947e-05, | |
"loss": 0.3912, | |
"step": 2480 | |
}, | |
{ | |
"epoch": 3.36, | |
"learning_rate": 8.147817956741889e-05, | |
"loss": 0.5443, | |
"step": 2490 | |
}, | |
{ | |
"epoch": 3.37, | |
"learning_rate": 8.078519746179151e-05, | |
"loss": 0.4553, | |
"step": 2500 | |
}, | |
{ | |
"epoch": 3.38, | |
"learning_rate": 8.009317216143186e-05, | |
"loss": 0.4, | |
"step": 2510 | |
}, | |
{ | |
"epoch": 3.4, | |
"learning_rate": 7.940213812589018e-05, | |
"loss": 0.5195, | |
"step": 2520 | |
}, | |
{ | |
"epoch": 3.41, | |
"learning_rate": 7.871212976535641e-05, | |
"loss": 0.573, | |
"step": 2530 | |
}, | |
{ | |
"epoch": 3.42, | |
"learning_rate": 7.802318143894677e-05, | |
"loss": 0.4429, | |
"step": 2540 | |
}, | |
{ | |
"epoch": 3.44, | |
"learning_rate": 7.733532745299291e-05, | |
"loss": 0.5758, | |
"step": 2550 | |
}, | |
{ | |
"epoch": 3.45, | |
"learning_rate": 7.664860205933356e-05, | |
"loss": 0.4888, | |
"step": 2560 | |
}, | |
{ | |
"epoch": 3.46, | |
"learning_rate": 7.596303945360887e-05, | |
"loss": 0.4142, | |
"step": 2570 | |
}, | |
{ | |
"epoch": 3.48, | |
"learning_rate": 7.52786737735578e-05, | |
"loss": 0.5152, | |
"step": 2580 | |
}, | |
{ | |
"epoch": 3.49, | |
"learning_rate": 7.45955390973181e-05, | |
"loss": 0.4343, | |
"step": 2590 | |
}, | |
{ | |
"epoch": 3.5, | |
"learning_rate": 7.391366944172941e-05, | |
"loss": 0.5203, | |
"step": 2600 | |
}, | |
{ | |
"epoch": 3.52, | |
"learning_rate": 7.323309876063936e-05, | |
"loss": 0.4741, | |
"step": 2610 | |
}, | |
{ | |
"epoch": 3.53, | |
"learning_rate": 7.255386094321293e-05, | |
"loss": 0.3718, | |
"step": 2620 | |
}, | |
{ | |
"epoch": 3.54, | |
"learning_rate": 7.187598981224489e-05, | |
"loss": 0.4686, | |
"step": 2630 | |
}, | |
{ | |
"epoch": 3.56, | |
"learning_rate": 7.119951912247544e-05, | |
"loss": 0.4407, | |
"step": 2640 | |
}, | |
{ | |
"epoch": 3.57, | |
"learning_rate": 7.052448255890957e-05, | |
"loss": 0.4055, | |
"step": 2650 | |
}, | |
{ | |
"epoch": 3.58, | |
"learning_rate": 6.985091373513972e-05, | |
"loss": 0.4273, | |
"step": 2660 | |
}, | |
{ | |
"epoch": 3.6, | |
"learning_rate": 6.917884619167177e-05, | |
"loss": 0.4123, | |
"step": 2670 | |
}, | |
{ | |
"epoch": 3.61, | |
"learning_rate": 6.850831339425507e-05, | |
"loss": 0.4393, | |
"step": 2680 | |
}, | |
{ | |
"epoch": 3.63, | |
"learning_rate": 6.7839348732216e-05, | |
"loss": 0.2929, | |
"step": 2690 | |
}, | |
{ | |
"epoch": 3.64, | |
"learning_rate": 6.717198551679531e-05, | |
"loss": 0.3345, | |
"step": 2700 | |
}, | |
{ | |
"epoch": 3.65, | |
"learning_rate": 6.650625697948931e-05, | |
"loss": 0.5929, | |
"step": 2710 | |
}, | |
{ | |
"epoch": 3.67, | |
"learning_rate": 6.584219627039513e-05, | |
"loss": 0.4089, | |
"step": 2720 | |
}, | |
{ | |
"epoch": 3.68, | |
"learning_rate": 6.517983645656014e-05, | |
"loss": 0.493, | |
"step": 2730 | |
}, | |
{ | |
"epoch": 3.69, | |
"learning_rate": 6.451921052033516e-05, | |
"loss": 0.3468, | |
"step": 2740 | |
}, | |
{ | |
"epoch": 3.71, | |
"learning_rate": 6.386035135773222e-05, | |
"loss": 0.4615, | |
"step": 2750 | |
}, | |
{ | |
"epoch": 3.72, | |
"learning_rate": 6.320329177678655e-05, | |
"loss": 0.5787, | |
"step": 2760 | |
}, | |
{ | |
"epoch": 3.73, | |
"learning_rate": 6.254806449592272e-05, | |
"loss": 0.5033, | |
"step": 2770 | |
}, | |
{ | |
"epoch": 3.75, | |
"learning_rate": 6.18947021423256e-05, | |
"loss": 0.3752, | |
"step": 2780 | |
}, | |
{ | |
"epoch": 3.76, | |
"learning_rate": 6.12432372503155e-05, | |
"loss": 0.5675, | |
"step": 2790 | |
}, | |
{ | |
"epoch": 3.77, | |
"learning_rate": 6.0593702259728336e-05, | |
"loss": 0.4978, | |
"step": 2800 | |
}, | |
{ | |
"epoch": 3.79, | |
"learning_rate": 5.99461295143001e-05, | |
"loss": 0.523, | |
"step": 2810 | |
}, | |
{ | |
"epoch": 3.8, | |
"learning_rate": 5.930055126005629e-05, | |
"loss": 0.4715, | |
"step": 2820 | |
}, | |
{ | |
"epoch": 3.81, | |
"learning_rate": 5.865699964370648e-05, | |
"loss": 0.5398, | |
"step": 2830 | |
}, | |
{ | |
"epoch": 3.83, | |
"learning_rate": 5.801550671104319e-05, | |
"loss": 0.5612, | |
"step": 2840 | |
}, | |
{ | |
"epoch": 3.84, | |
"learning_rate": 5.7376104405346376e-05, | |
"loss": 0.2808, | |
"step": 2850 | |
}, | |
{ | |
"epoch": 3.85, | |
"learning_rate": 5.673882456579283e-05, | |
"loss": 0.3372, | |
"step": 2860 | |
}, | |
{ | |
"epoch": 3.87, | |
"learning_rate": 5.610369892587064e-05, | |
"loss": 0.4146, | |
"step": 2870 | |
}, | |
{ | |
"epoch": 3.88, | |
"learning_rate": 5.547075911179902e-05, | |
"loss": 0.4013, | |
"step": 2880 | |
}, | |
{ | |
"epoch": 3.89, | |
"learning_rate": 5.484003664095354e-05, | |
"loss": 0.3936, | |
"step": 2890 | |
}, | |
{ | |
"epoch": 3.91, | |
"learning_rate": 5.42115629202968e-05, | |
"loss": 0.5478, | |
"step": 2900 | |
}, | |
{ | |
"epoch": 3.92, | |
"learning_rate": 5.3585369244814164e-05, | |
"loss": 0.5952, | |
"step": 2910 | |
}, | |
{ | |
"epoch": 3.94, | |
"learning_rate": 5.296148679595583e-05, | |
"loss": 0.3574, | |
"step": 2920 | |
}, | |
{ | |
"epoch": 3.95, | |
"learning_rate": 5.2339946640083936e-05, | |
"loss": 0.2372, | |
"step": 2930 | |
}, | |
{ | |
"epoch": 3.96, | |
"learning_rate": 5.172077972692553e-05, | |
"loss": 0.4633, | |
"step": 2940 | |
}, | |
{ | |
"epoch": 3.98, | |
"learning_rate": 5.110401688803163e-05, | |
"loss": 0.5295, | |
"step": 2950 | |
}, | |
{ | |
"epoch": 3.99, | |
"learning_rate": 5.048968883524182e-05, | |
"loss": 0.5673, | |
"step": 2960 | |
}, | |
{ | |
"epoch": 4.0, | |
"learning_rate": 4.9877826159154994e-05, | |
"loss": 0.5023, | |
"step": 2970 | |
}, | |
{ | |
"epoch": 4.02, | |
"learning_rate": 4.9268459327606085e-05, | |
"loss": 0.4506, | |
"step": 2980 | |
}, | |
{ | |
"epoch": 4.03, | |
"learning_rate": 4.866161868414882e-05, | |
"loss": 0.3591, | |
"step": 2990 | |
}, | |
{ | |
"epoch": 4.04, | |
"learning_rate": 4.805733444654495e-05, | |
"loss": 0.532, | |
"step": 3000 | |
}, | |
{ | |
"epoch": 4.06, | |
"learning_rate": 4.7455636705259424e-05, | |
"loss": 0.5063, | |
"step": 3010 | |
}, | |
{ | |
"epoch": 4.07, | |
"learning_rate": 4.6856555421961936e-05, | |
"loss": 0.3964, | |
"step": 3020 | |
}, | |
{ | |
"epoch": 4.08, | |
"learning_rate": 4.6260120428035214e-05, | |
"loss": 0.5457, | |
"step": 3030 | |
}, | |
{ | |
"epoch": 4.1, | |
"learning_rate": 4.5666361423089386e-05, | |
"loss": 0.5478, | |
"step": 3040 | |
}, | |
{ | |
"epoch": 4.11, | |
"learning_rate": 4.5075307973483126e-05, | |
"loss": 0.4954, | |
"step": 3050 | |
}, | |
{ | |
"epoch": 4.12, | |
"learning_rate": 4.448698951085143e-05, | |
"loss": 0.4224, | |
"step": 3060 | |
}, | |
{ | |
"epoch": 4.14, | |
"learning_rate": 4.390143533063991e-05, | |
"loss": 0.584, | |
"step": 3070 | |
}, | |
{ | |
"epoch": 4.15, | |
"learning_rate": 4.3318674590646237e-05, | |
"loss": 0.3655, | |
"step": 3080 | |
}, | |
{ | |
"epoch": 4.16, | |
"learning_rate": 4.273873630956811e-05, | |
"loss": 0.4947, | |
"step": 3090 | |
}, | |
{ | |
"epoch": 4.18, | |
"learning_rate": 4.2161649365558245e-05, | |
"loss": 0.3446, | |
"step": 3100 | |
}, | |
{ | |
"epoch": 4.19, | |
"learning_rate": 4.158744249478641e-05, | |
"loss": 0.2411, | |
"step": 3110 | |
}, | |
{ | |
"epoch": 4.2, | |
"learning_rate": 4.1016144290008566e-05, | |
"loss": 0.457, | |
"step": 3120 | |
}, | |
{ | |
"epoch": 4.22, | |
"learning_rate": 4.0447783199142876e-05, | |
"loss": 0.5, | |
"step": 3130 | |
}, | |
{ | |
"epoch": 4.23, | |
"learning_rate": 3.98823875238534e-05, | |
"loss": 0.2912, | |
"step": 3140 | |
}, | |
{ | |
"epoch": 4.25, | |
"learning_rate": 3.931998541814069e-05, | |
"loss": 0.3226, | |
"step": 3150 | |
}, | |
{ | |
"epoch": 4.26, | |
"learning_rate": 3.876060488693971e-05, | |
"loss": 0.3796, | |
"step": 3160 | |
}, | |
{ | |
"epoch": 4.27, | |
"learning_rate": 3.820427378472569e-05, | |
"loss": 0.3609, | |
"step": 3170 | |
}, | |
{ | |
"epoch": 4.29, | |
"learning_rate": 3.7651019814126654e-05, | |
"loss": 0.466, | |
"step": 3180 | |
}, | |
{ | |
"epoch": 4.3, | |
"learning_rate": 3.710087052454433e-05, | |
"loss": 0.4199, | |
"step": 3190 | |
}, | |
{ | |
"epoch": 4.31, | |
"learning_rate": 3.655385331078217e-05, | |
"loss": 0.4164, | |
"step": 3200 | |
}, | |
{ | |
"epoch": 4.33, | |
"learning_rate": 3.600999541168113e-05, | |
"loss": 0.4004, | |
"step": 3210 | |
}, | |
{ | |
"epoch": 4.34, | |
"learning_rate": 3.546932390876351e-05, | |
"loss": 0.3916, | |
"step": 3220 | |
}, | |
{ | |
"epoch": 4.35, | |
"learning_rate": 3.493186572488427e-05, | |
"loss": 0.3104, | |
"step": 3230 | |
}, | |
{ | |
"epoch": 4.37, | |
"learning_rate": 3.4397647622890505e-05, | |
"loss": 0.3467, | |
"step": 3240 | |
}, | |
{ | |
"epoch": 4.38, | |
"learning_rate": 3.386669620428865e-05, | |
"loss": 0.3333, | |
"step": 3250 | |
}, | |
{ | |
"epoch": 4.39, | |
"learning_rate": 3.3339037907920015e-05, | |
"loss": 0.3964, | |
"step": 3260 | |
}, | |
{ | |
"epoch": 4.41, | |
"learning_rate": 3.2814699008644066e-05, | |
"loss": 0.3894, | |
"step": 3270 | |
}, | |
{ | |
"epoch": 4.42, | |
"learning_rate": 3.229370561603027e-05, | |
"loss": 0.3275, | |
"step": 3280 | |
}, | |
{ | |
"epoch": 4.43, | |
"learning_rate": 3.177608367305783e-05, | |
"loss": 0.4069, | |
"step": 3290 | |
}, | |
{ | |
"epoch": 4.45, | |
"learning_rate": 3.126185895482381e-05, | |
"loss": 0.3087, | |
"step": 3300 | |
}, | |
{ | |
"epoch": 4.46, | |
"learning_rate": 3.075105706725989e-05, | |
"loss": 0.3359, | |
"step": 3310 | |
}, | |
{ | |
"epoch": 4.47, | |
"learning_rate": 3.0243703445856986e-05, | |
"loss": 0.2674, | |
"step": 3320 | |
}, | |
{ | |
"epoch": 4.49, | |
"learning_rate": 2.9739823354398953e-05, | |
"loss": 0.4484, | |
"step": 3330 | |
}, | |
{ | |
"epoch": 4.5, | |
"learning_rate": 2.9239441883704455e-05, | |
"loss": 0.3065, | |
"step": 3340 | |
}, | |
{ | |
"epoch": 4.51, | |
"learning_rate": 2.874258395037751e-05, | |
"loss": 0.3809, | |
"step": 3350 | |
}, | |
{ | |
"epoch": 4.53, | |
"learning_rate": 2.8249274295566875e-05, | |
"loss": 0.5126, | |
"step": 3360 | |
}, | |
{ | |
"epoch": 4.54, | |
"learning_rate": 2.7759537483734077e-05, | |
"loss": 0.5343, | |
"step": 3370 | |
}, | |
{ | |
"epoch": 4.56, | |
"learning_rate": 2.727339790143002e-05, | |
"loss": 0.4366, | |
"step": 3380 | |
}, | |
{ | |
"epoch": 4.57, | |
"learning_rate": 2.6790879756080844e-05, | |
"loss": 0.3404, | |
"step": 3390 | |
}, | |
{ | |
"epoch": 4.58, | |
"learning_rate": 2.6312007074782497e-05, | |
"loss": 0.3992, | |
"step": 3400 | |
}, | |
{ | |
"epoch": 4.6, | |
"learning_rate": 2.583680370310414e-05, | |
"loss": 0.3978, | |
"step": 3410 | |
}, | |
{ | |
"epoch": 4.61, | |
"learning_rate": 2.536529330390095e-05, | |
"loss": 0.4491, | |
"step": 3420 | |
}, | |
{ | |
"epoch": 4.62, | |
"learning_rate": 2.489749935613571e-05, | |
"loss": 0.4361, | |
"step": 3430 | |
}, | |
{ | |
"epoch": 4.64, | |
"learning_rate": 2.4433445153709722e-05, | |
"loss": 0.6447, | |
"step": 3440 | |
}, | |
{ | |
"epoch": 4.65, | |
"learning_rate": 2.397315380430284e-05, | |
"loss": 0.3623, | |
"step": 3450 | |
}, | |
{ | |
"epoch": 4.66, | |
"learning_rate": 2.351664822822278e-05, | |
"loss": 0.4129, | |
"step": 3460 | |
}, | |
{ | |
"epoch": 4.68, | |
"learning_rate": 2.306395115726392e-05, | |
"loss": 0.3398, | |
"step": 3470 | |
}, | |
{ | |
"epoch": 4.69, | |
"learning_rate": 2.2615085133575333e-05, | |
"loss": 0.4225, | |
"step": 3480 | |
}, | |
{ | |
"epoch": 4.7, | |
"learning_rate": 2.217007250853812e-05, | |
"loss": 0.3665, | |
"step": 3490 | |
}, | |
{ | |
"epoch": 4.72, | |
"learning_rate": 2.1728935441652686e-05, | |
"loss": 0.3539, | |
"step": 3500 | |
}, | |
{ | |
"epoch": 4.73, | |
"learning_rate": 2.1291695899435136e-05, | |
"loss": 0.3286, | |
"step": 3510 | |
}, | |
{ | |
"epoch": 4.74, | |
"learning_rate": 2.085837565432348e-05, | |
"loss": 0.2785, | |
"step": 3520 | |
}, | |
{ | |
"epoch": 4.76, | |
"learning_rate": 2.0428996283593505e-05, | |
"loss": 0.4158, | |
"step": 3530 | |
}, | |
{ | |
"epoch": 4.77, | |
"learning_rate": 2.000357916828428e-05, | |
"loss": 0.4497, | |
"step": 3540 | |
}, | |
{ | |
"epoch": 4.78, | |
"learning_rate": 1.9582145492133464e-05, | |
"loss": 0.5906, | |
"step": 3550 | |
}, | |
{ | |
"epoch": 4.8, | |
"learning_rate": 1.916471624052256e-05, | |
"loss": 0.3557, | |
"step": 3560 | |
}, | |
{ | |
"epoch": 4.81, | |
"learning_rate": 1.875131219943187e-05, | |
"loss": 0.4502, | |
"step": 3570 | |
}, | |
{ | |
"epoch": 4.82, | |
"learning_rate": 1.8341953954405432e-05, | |
"loss": 0.3378, | |
"step": 3580 | |
}, | |
{ | |
"epoch": 4.84, | |
"learning_rate": 1.7936661889526063e-05, | |
"loss": 0.3981, | |
"step": 3590 | |
}, | |
{ | |
"epoch": 4.85, | |
"learning_rate": 1.753545618640012e-05, | |
"loss": 0.3576, | |
"step": 3600 | |
}, | |
{ | |
"epoch": 4.87, | |
"learning_rate": 1.7138356823152823e-05, | |
"loss": 0.3878, | |
"step": 3610 | |
}, | |
{ | |
"epoch": 4.88, | |
"learning_rate": 1.674538357343326e-05, | |
"loss": 0.3435, | |
"step": 3620 | |
}, | |
{ | |
"epoch": 4.89, | |
"learning_rate": 1.6356556005429757e-05, | |
"loss": 0.2566, | |
"step": 3630 | |
}, | |
{ | |
"epoch": 4.91, | |
"learning_rate": 1.5971893480895583e-05, | |
"loss": 0.537, | |
"step": 3640 | |
}, | |
{ | |
"epoch": 4.92, | |
"learning_rate": 1.559141515418472e-05, | |
"loss": 0.4963, | |
"step": 3650 | |
}, | |
{ | |
"epoch": 4.93, | |
"learning_rate": 1.5215139971298132e-05, | |
"loss": 0.4599, | |
"step": 3660 | |
}, | |
{ | |
"epoch": 4.95, | |
"learning_rate": 1.4843086668940332e-05, | |
"loss": 0.3847, | |
"step": 3670 | |
}, | |
{ | |
"epoch": 4.96, | |
"learning_rate": 1.4475273773586317e-05, | |
"loss": 0.1902, | |
"step": 3680 | |
}, | |
{ | |
"epoch": 4.97, | |
"learning_rate": 1.411171960055916e-05, | |
"loss": 0.3575, | |
"step": 3690 | |
}, | |
{ | |
"epoch": 4.99, | |
"learning_rate": 1.3752442253117902e-05, | |
"loss": 0.3715, | |
"step": 3700 | |
}, | |
{ | |
"epoch": 5.0, | |
"learning_rate": 1.339745962155613e-05, | |
"loss": 0.3478, | |
"step": 3710 | |
}, | |
{ | |
"epoch": 5.01, | |
"learning_rate": 1.3046789382311131e-05, | |
"loss": 0.456, | |
"step": 3720 | |
}, | |
{ | |
"epoch": 5.03, | |
"learning_rate": 1.2700448997083681e-05, | |
"loss": 0.4674, | |
"step": 3730 | |
}, | |
{ | |
"epoch": 5.04, | |
"learning_rate": 1.2358455711968464e-05, | |
"loss": 0.2651, | |
"step": 3740 | |
}, | |
{ | |
"epoch": 5.05, | |
"learning_rate": 1.2020826556595476e-05, | |
"loss": 0.4427, | |
"step": 3750 | |
}, | |
{ | |
"epoch": 5.07, | |
"learning_rate": 1.168757834328188e-05, | |
"loss": 0.4976, | |
"step": 3760 | |
}, | |
{ | |
"epoch": 5.08, | |
"learning_rate": 1.1358727666194812e-05, | |
"loss": 0.4809, | |
"step": 3770 | |
}, | |
{ | |
"epoch": 5.09, | |
"learning_rate": 1.103429090052528e-05, | |
"loss": 0.4277, | |
"step": 3780 | |
}, | |
{ | |
"epoch": 5.11, | |
"learning_rate": 1.0714284201672476e-05, | |
"loss": 0.267, | |
"step": 3790 | |
}, | |
{ | |
"epoch": 5.12, | |
"learning_rate": 1.0398723504439512e-05, | |
"loss": 0.4261, | |
"step": 3800 | |
}, | |
{ | |
"epoch": 5.13, | |
"learning_rate": 1.0087624522239902e-05, | |
"loss": 0.2857, | |
"step": 3810 | |
}, | |
{ | |
"epoch": 5.15, | |
"learning_rate": 9.781002746315038e-06, | |
"loss": 0.3459, | |
"step": 3820 | |
}, | |
{ | |
"epoch": 5.16, | |
"learning_rate": 9.478873444962888e-06, | |
"loss": 0.4636, | |
"step": 3830 | |
}, | |
{ | |
"epoch": 5.18, | |
"learning_rate": 9.181251662777668e-06, | |
"loss": 0.4077, | |
"step": 3840 | |
}, | |
{ | |
"epoch": 5.19, | |
"learning_rate": 8.888152219900691e-06, | |
"loss": 0.3093, | |
"step": 3850 | |
}, | |
{ | |
"epoch": 5.2, | |
"learning_rate": 8.599589711282419e-06, | |
"loss": 0.3924, | |
"step": 3860 | |
}, | |
{ | |
"epoch": 5.22, | |
"learning_rate": 8.315578505955678e-06, | |
"loss": 0.3845, | |
"step": 3870 | |
}, | |
{ | |
"epoch": 5.23, | |
"learning_rate": 8.036132746320123e-06, | |
"loss": 0.4115, | |
"step": 3880 | |
}, | |
{ | |
"epoch": 5.24, | |
"learning_rate": 7.76126634743809e-06, | |
"loss": 0.368, | |
"step": 3890 | |
}, | |
{ | |
"epoch": 5.26, | |
"learning_rate": 7.490992996341661e-06, | |
"loss": 0.3052, | |
"step": 3900 | |
}, | |
{ | |
"epoch": 5.27, | |
"learning_rate": 7.2253261513510416e-06, | |
"loss": 0.4267, | |
"step": 3910 | |
}, | |
{ | |
"epoch": 5.28, | |
"learning_rate": 6.964279041404553e-06, | |
"loss": 0.5, | |
"step": 3920 | |
}, | |
{ | |
"epoch": 5.3, | |
"learning_rate": 6.7078646653997324e-06, | |
"loss": 0.3822, | |
"step": 3930 | |
}, | |
{ | |
"epoch": 5.31, | |
"learning_rate": 6.456095791546146e-06, | |
"loss": 0.2661, | |
"step": 3940 | |
}, | |
{ | |
"epoch": 5.32, | |
"learning_rate": 6.208984956729613e-06, | |
"loss": 0.3472, | |
"step": 3950 | |
}, | |
{ | |
"epoch": 5.34, | |
"learning_rate": 5.9665444658878026e-06, | |
"loss": 0.2669, | |
"step": 3960 | |
}, | |
{ | |
"epoch": 5.35, | |
"learning_rate": 5.728786391397667e-06, | |
"loss": 0.3258, | |
"step": 3970 | |
}, | |
{ | |
"epoch": 5.36, | |
"learning_rate": 5.4957225724741825e-06, | |
"loss": 0.3431, | |
"step": 3980 | |
}, | |
{ | |
"epoch": 5.38, | |
"learning_rate": 5.267364614580861e-06, | |
"loss": 0.4076, | |
"step": 3990 | |
}, | |
{ | |
"epoch": 5.39, | |
"learning_rate": 5.043723888851836e-06, | |
"loss": 0.2181, | |
"step": 4000 | |
}, | |
{ | |
"epoch": 5.4, | |
"learning_rate": 4.824811531525641e-06, | |
"loss": 0.4222, | |
"step": 4010 | |
}, | |
{ | |
"epoch": 5.42, | |
"learning_rate": 4.6106384433906806e-06, | |
"loss": 0.3658, | |
"step": 4020 | |
}, | |
{ | |
"epoch": 5.43, | |
"learning_rate": 4.401215289242411e-06, | |
"loss": 0.4654, | |
"step": 4030 | |
}, | |
{ | |
"epoch": 5.44, | |
"learning_rate": 4.196552497352302e-06, | |
"loss": 0.3404, | |
"step": 4040 | |
}, | |
{ | |
"epoch": 5.46, | |
"learning_rate": 3.996660258948548e-06, | |
"loss": 0.3914, | |
"step": 4050 | |
}, | |
{ | |
"epoch": 5.47, | |
"learning_rate": 3.8015485277086205e-06, | |
"loss": 0.379, | |
"step": 4060 | |
}, | |
{ | |
"epoch": 5.49, | |
"learning_rate": 3.6112270192635413e-06, | |
"loss": 0.4581, | |
"step": 4070 | |
}, | |
{ | |
"epoch": 5.5, | |
"learning_rate": 3.4257052107141916e-06, | |
"loss": 0.3449, | |
"step": 4080 | |
}, | |
{ | |
"epoch": 5.51, | |
"learning_rate": 3.2449923401593587e-06, | |
"loss": 0.3944, | |
"step": 4090 | |
}, | |
{ | |
"epoch": 5.53, | |
"learning_rate": 3.069097406235666e-06, | |
"loss": 0.4832, | |
"step": 4100 | |
}, | |
{ | |
"epoch": 5.54, | |
"learning_rate": 2.8980291676695915e-06, | |
"loss": 0.2436, | |
"step": 4110 | |
}, | |
{ | |
"epoch": 5.55, | |
"learning_rate": 2.7317961428412473e-06, | |
"loss": 0.2595, | |
"step": 4120 | |
}, | |
{ | |
"epoch": 5.57, | |
"learning_rate": 2.570406609360221e-06, | |
"loss": 0.4066, | |
"step": 4130 | |
}, | |
{ | |
"epoch": 5.58, | |
"learning_rate": 2.413868603653413e-06, | |
"loss": 0.4674, | |
"step": 4140 | |
}, | |
{ | |
"epoch": 5.59, | |
"learning_rate": 2.262189920564817e-06, | |
"loss": 0.4194, | |
"step": 4150 | |
}, | |
{ | |
"epoch": 5.61, | |
"learning_rate": 2.1153781129674365e-06, | |
"loss": 0.3481, | |
"step": 4160 | |
}, | |
{ | |
"epoch": 5.62, | |
"learning_rate": 1.9734404913871127e-06, | |
"loss": 0.2861, | |
"step": 4170 | |
}, | |
{ | |
"epoch": 5.63, | |
"learning_rate": 1.836384123638557e-06, | |
"loss": 0.3346, | |
"step": 4180 | |
}, | |
{ | |
"epoch": 5.65, | |
"learning_rate": 1.7042158344733995e-06, | |
"loss": 0.3528, | |
"step": 4190 | |
}, | |
{ | |
"epoch": 5.66, | |
"learning_rate": 1.576942205240317e-06, | |
"loss": 0.2301, | |
"step": 4200 | |
}, | |
{ | |
"epoch": 5.67, | |
"learning_rate": 1.4545695735573295e-06, | |
"loss": 0.4004, | |
"step": 4210 | |
}, | |
{ | |
"epoch": 5.69, | |
"learning_rate": 1.337104032996206e-06, | |
"loss": 0.2058, | |
"step": 4220 | |
}, | |
{ | |
"epoch": 5.7, | |
"learning_rate": 1.2245514327790864e-06, | |
"loss": 0.5147, | |
"step": 4230 | |
}, | |
{ | |
"epoch": 5.71, | |
"learning_rate": 1.1169173774871478e-06, | |
"loss": 0.5135, | |
"step": 4240 | |
}, | |
{ | |
"epoch": 5.73, | |
"learning_rate": 1.0142072267815606e-06, | |
"loss": 0.2979, | |
"step": 4250 | |
}, | |
{ | |
"epoch": 5.74, | |
"learning_rate": 9.164260951366022e-07, | |
"loss": 0.5042, | |
"step": 4260 | |
}, | |
{ | |
"epoch": 5.75, | |
"learning_rate": 8.235788515849607e-07, | |
"loss": 0.2839, | |
"step": 4270 | |
}, | |
{ | |
"epoch": 5.77, | |
"learning_rate": 7.356701194753291e-07, | |
"loss": 0.4848, | |
"step": 4280 | |
}, | |
{ | |
"epoch": 5.78, | |
"learning_rate": 6.527042762420888e-07, | |
"loss": 0.4224, | |
"step": 4290 | |
}, | |
{ | |
"epoch": 5.8, | |
"learning_rate": 5.746854531874624e-07, | |
"loss": 0.3816, | |
"step": 4300 | |
}, | |
{ | |
"epoch": 5.81, | |
"learning_rate": 5.01617535275678e-07, | |
"loss": 0.3958, | |
"step": 4310 | |
}, | |
{ | |
"epoch": 5.82, | |
"learning_rate": 4.335041609396018e-07, | |
"loss": 0.438, | |
"step": 4320 | |
}, | |
{ | |
"epoch": 5.84, | |
"learning_rate": 3.703487218994939e-07, | |
"loss": 0.4769, | |
"step": 4330 | |
}, | |
{ | |
"epoch": 5.85, | |
"learning_rate": 3.12154362994177e-07, | |
"loss": 0.28, | |
"step": 4340 | |
}, | |
{ | |
"epoch": 5.86, | |
"learning_rate": 2.5892398202439496e-07, | |
"loss": 0.3212, | |
"step": 4350 | |
}, | |
{ | |
"epoch": 5.88, | |
"learning_rate": 2.1066022960852805e-07, | |
"loss": 0.4231, | |
"step": 4360 | |
}, | |
{ | |
"epoch": 5.89, | |
"learning_rate": 1.673655090506321e-07, | |
"loss": 0.3384, | |
"step": 4370 | |
}, | |
{ | |
"epoch": 5.9, | |
"learning_rate": 1.290419762207007e-07, | |
"loss": 0.3409, | |
"step": 4380 | |
}, | |
{ | |
"epoch": 5.92, | |
"learning_rate": 9.569153944740672e-08, | |
"loss": 0.4758, | |
"step": 4390 | |
}, | |
{ | |
"epoch": 5.93, | |
"learning_rate": 6.731585942297835e-08, | |
"loss": 0.2881, | |
"step": 4400 | |
}, | |
{ | |
"epoch": 5.94, | |
"learning_rate": 4.391634912056519e-08, | |
"loss": 0.2555, | |
"step": 4410 | |
}, | |
{ | |
"epoch": 5.96, | |
"learning_rate": 2.5494173723883408e-08, | |
"loss": 0.203, | |
"step": 4420 | |
}, | |
{ | |
"epoch": 5.97, | |
"learning_rate": 1.2050250569139998e-08, | |
"loss": 0.3696, | |
"step": 4430 | |
}, | |
{ | |
"epoch": 5.98, | |
"learning_rate": 3.5852490994359165e-09, | |
"loss": 0.4845, | |
"step": 4440 | |
}, | |
{ | |
"epoch": 6.0, | |
"learning_rate": 9.959083132615731e-11, | |
"loss": 0.351, | |
"step": 4450 | |
} | |
], | |
"logging_steps": 10, | |
"max_steps": 4452, | |
"num_train_epochs": 6, | |
"save_steps": 500, | |
"total_flos": 2.326608096362496e+16, | |
"trial_name": null, | |
"trial_params": null | |
} | |