TinyLlama-1.1B-Chat-rust-cpp-encodings
/
LORAs
/tinyllama-encoder_5e-5
/checkpoint-2226
/trainer_state.json
{ | |
"best_metric": null, | |
"best_model_checkpoint": null, | |
"epoch": 3.0, | |
"eval_steps": 500, | |
"global_step": 2226, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.01, | |
"learning_rate": 4.9999377559783764e-05, | |
"loss": 1.7762, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.03, | |
"learning_rate": 4.999751027012961e-05, | |
"loss": 1.7656, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.04, | |
"learning_rate": 4.999439822401963e-05, | |
"loss": 1.8093, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.05, | |
"learning_rate": 4.9990041576418836e-05, | |
"loss": 1.5602, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.07, | |
"learning_rate": 4.998444054426743e-05, | |
"loss": 1.5704, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.08, | |
"learning_rate": 4.9977595406470044e-05, | |
"loss": 1.7002, | |
"step": 60 | |
}, | |
{ | |
"epoch": 0.09, | |
"learning_rate": 4.9969506503881794e-05, | |
"loss": 1.694, | |
"step": 70 | |
}, | |
{ | |
"epoch": 0.11, | |
"learning_rate": 4.996017423929133e-05, | |
"loss": 1.6043, | |
"step": 80 | |
}, | |
{ | |
"epoch": 0.12, | |
"learning_rate": 4.9949599077400825e-05, | |
"loss": 1.6391, | |
"step": 90 | |
}, | |
{ | |
"epoch": 0.13, | |
"learning_rate": 4.9937781544802745e-05, | |
"loss": 1.6518, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.15, | |
"learning_rate": 4.992472222995368e-05, | |
"loss": 1.6928, | |
"step": 110 | |
}, | |
{ | |
"epoch": 0.16, | |
"learning_rate": 4.9910421783145074e-05, | |
"loss": 1.7065, | |
"step": 120 | |
}, | |
{ | |
"epoch": 0.18, | |
"learning_rate": 4.9894880916470764e-05, | |
"loss": 1.6429, | |
"step": 130 | |
}, | |
{ | |
"epoch": 0.19, | |
"learning_rate": 4.98781004037916e-05, | |
"loss": 1.5974, | |
"step": 140 | |
}, | |
{ | |
"epoch": 0.2, | |
"learning_rate": 4.986008108069685e-05, | |
"loss": 1.6029, | |
"step": 150 | |
}, | |
{ | |
"epoch": 0.22, | |
"learning_rate": 4.984082384446263e-05, | |
"loss": 1.4892, | |
"step": 160 | |
}, | |
{ | |
"epoch": 0.23, | |
"learning_rate": 4.982032965400719e-05, | |
"loss": 1.4642, | |
"step": 170 | |
}, | |
{ | |
"epoch": 0.24, | |
"learning_rate": 4.97985995298432e-05, | |
"loss": 1.5265, | |
"step": 180 | |
}, | |
{ | |
"epoch": 0.26, | |
"learning_rate": 4.977563455402693e-05, | |
"loss": 1.4878, | |
"step": 190 | |
}, | |
{ | |
"epoch": 0.27, | |
"learning_rate": 4.975143587010432e-05, | |
"loss": 1.5623, | |
"step": 200 | |
}, | |
{ | |
"epoch": 0.28, | |
"learning_rate": 4.97260046830541e-05, | |
"loss": 1.6067, | |
"step": 210 | |
}, | |
{ | |
"epoch": 0.3, | |
"learning_rate": 4.969934225922777e-05, | |
"loss": 1.4726, | |
"step": 220 | |
}, | |
{ | |
"epoch": 0.31, | |
"learning_rate": 4.9671449926286494e-05, | |
"loss": 1.4386, | |
"step": 230 | |
}, | |
{ | |
"epoch": 0.32, | |
"learning_rate": 4.964232907313507e-05, | |
"loss": 1.5058, | |
"step": 240 | |
}, | |
{ | |
"epoch": 0.34, | |
"learning_rate": 4.9611981149852705e-05, | |
"loss": 1.4842, | |
"step": 250 | |
}, | |
{ | |
"epoch": 0.35, | |
"learning_rate": 4.9580407667620835e-05, | |
"loss": 1.4038, | |
"step": 260 | |
}, | |
{ | |
"epoch": 0.36, | |
"learning_rate": 4.9547610198647855e-05, | |
"loss": 1.4696, | |
"step": 270 | |
}, | |
{ | |
"epoch": 0.38, | |
"learning_rate": 4.951359037609088e-05, | |
"loss": 1.5231, | |
"step": 280 | |
}, | |
{ | |
"epoch": 0.39, | |
"learning_rate": 4.9478349893974345e-05, | |
"loss": 1.341, | |
"step": 290 | |
}, | |
{ | |
"epoch": 0.4, | |
"learning_rate": 4.9441890507105735e-05, | |
"loss": 1.4268, | |
"step": 300 | |
}, | |
{ | |
"epoch": 0.42, | |
"learning_rate": 4.9404214030988126e-05, | |
"loss": 1.4481, | |
"step": 310 | |
}, | |
{ | |
"epoch": 0.43, | |
"learning_rate": 4.9365322341729845e-05, | |
"loss": 1.4532, | |
"step": 320 | |
}, | |
{ | |
"epoch": 0.44, | |
"learning_rate": 4.9325217375951006e-05, | |
"loss": 1.2414, | |
"step": 330 | |
}, | |
{ | |
"epoch": 0.46, | |
"learning_rate": 4.9283901130687094e-05, | |
"loss": 1.3546, | |
"step": 340 | |
}, | |
{ | |
"epoch": 0.47, | |
"learning_rate": 4.9241375663289516e-05, | |
"loss": 1.3501, | |
"step": 350 | |
}, | |
{ | |
"epoch": 0.49, | |
"learning_rate": 4.9197643091323165e-05, | |
"loss": 1.4564, | |
"step": 360 | |
}, | |
{ | |
"epoch": 0.5, | |
"learning_rate": 4.9152705592460964e-05, | |
"loss": 1.4721, | |
"step": 370 | |
}, | |
{ | |
"epoch": 0.51, | |
"learning_rate": 4.910656540437544e-05, | |
"loss": 1.5005, | |
"step": 380 | |
}, | |
{ | |
"epoch": 0.53, | |
"learning_rate": 4.905922482462727e-05, | |
"loss": 1.3807, | |
"step": 390 | |
}, | |
{ | |
"epoch": 0.54, | |
"learning_rate": 4.901068621055093e-05, | |
"loss": 1.3058, | |
"step": 400 | |
}, | |
{ | |
"epoch": 0.55, | |
"learning_rate": 4.8960951979137236e-05, | |
"loss": 1.321, | |
"step": 410 | |
}, | |
{ | |
"epoch": 0.57, | |
"learning_rate": 4.891002460691306e-05, | |
"loss": 1.4186, | |
"step": 420 | |
}, | |
{ | |
"epoch": 0.58, | |
"learning_rate": 4.885790662981797e-05, | |
"loss": 1.5187, | |
"step": 430 | |
}, | |
{ | |
"epoch": 0.59, | |
"learning_rate": 4.8804600643077954e-05, | |
"loss": 1.505, | |
"step": 440 | |
}, | |
{ | |
"epoch": 0.61, | |
"learning_rate": 4.8750109301076205e-05, | |
"loss": 1.2945, | |
"step": 450 | |
}, | |
{ | |
"epoch": 0.62, | |
"learning_rate": 4.869443531722094e-05, | |
"loss": 1.4216, | |
"step": 460 | |
}, | |
{ | |
"epoch": 0.63, | |
"learning_rate": 4.863758146381029e-05, | |
"loss": 1.3838, | |
"step": 470 | |
}, | |
{ | |
"epoch": 0.65, | |
"learning_rate": 4.8579550571894225e-05, | |
"loss": 1.2366, | |
"step": 480 | |
}, | |
{ | |
"epoch": 0.66, | |
"learning_rate": 4.8520345531133636e-05, | |
"loss": 1.3815, | |
"step": 490 | |
}, | |
{ | |
"epoch": 0.67, | |
"learning_rate": 4.845996928965638e-05, | |
"loss": 1.2919, | |
"step": 500 | |
}, | |
{ | |
"epoch": 0.69, | |
"learning_rate": 4.839842485391053e-05, | |
"loss": 1.3449, | |
"step": 510 | |
}, | |
{ | |
"epoch": 0.7, | |
"learning_rate": 4.833571528851463e-05, | |
"loss": 1.4683, | |
"step": 520 | |
}, | |
{ | |
"epoch": 0.71, | |
"learning_rate": 4.827184371610511e-05, | |
"loss": 1.3436, | |
"step": 530 | |
}, | |
{ | |
"epoch": 0.73, | |
"learning_rate": 4.820681331718081e-05, | |
"loss": 1.2934, | |
"step": 540 | |
}, | |
{ | |
"epoch": 0.74, | |
"learning_rate": 4.8140627329944566e-05, | |
"loss": 1.2362, | |
"step": 550 | |
}, | |
{ | |
"epoch": 0.75, | |
"learning_rate": 4.8073289050142015e-05, | |
"loss": 1.3885, | |
"step": 560 | |
}, | |
{ | |
"epoch": 0.77, | |
"learning_rate": 4.80048018308974e-05, | |
"loss": 1.3071, | |
"step": 570 | |
}, | |
{ | |
"epoch": 0.78, | |
"learning_rate": 4.7935169082546705e-05, | |
"loss": 1.3223, | |
"step": 580 | |
}, | |
{ | |
"epoch": 0.8, | |
"learning_rate": 4.7864394272467764e-05, | |
"loss": 1.2082, | |
"step": 590 | |
}, | |
{ | |
"epoch": 0.81, | |
"learning_rate": 4.7792480924907624e-05, | |
"loss": 1.202, | |
"step": 600 | |
}, | |
{ | |
"epoch": 0.82, | |
"learning_rate": 4.7719432620807046e-05, | |
"loss": 1.2511, | |
"step": 610 | |
}, | |
{ | |
"epoch": 0.84, | |
"learning_rate": 4.764525299762222e-05, | |
"loss": 1.1798, | |
"step": 620 | |
}, | |
{ | |
"epoch": 0.85, | |
"learning_rate": 4.756994574914359e-05, | |
"loss": 1.3269, | |
"step": 630 | |
}, | |
{ | |
"epoch": 0.86, | |
"learning_rate": 4.749351462531196e-05, | |
"loss": 1.4226, | |
"step": 640 | |
}, | |
{ | |
"epoch": 0.88, | |
"learning_rate": 4.741596343203175e-05, | |
"loss": 1.3431, | |
"step": 650 | |
}, | |
{ | |
"epoch": 0.89, | |
"learning_rate": 4.733729603098149e-05, | |
"loss": 1.2419, | |
"step": 660 | |
}, | |
{ | |
"epoch": 0.9, | |
"learning_rate": 4.7257516339421494e-05, | |
"loss": 1.1961, | |
"step": 670 | |
}, | |
{ | |
"epoch": 0.92, | |
"learning_rate": 4.7176628329998854e-05, | |
"loss": 1.4433, | |
"step": 680 | |
}, | |
{ | |
"epoch": 0.93, | |
"learning_rate": 4.7094636030549564e-05, | |
"loss": 1.1532, | |
"step": 690 | |
}, | |
{ | |
"epoch": 0.94, | |
"learning_rate": 4.7011543523897996e-05, | |
"loss": 1.2486, | |
"step": 700 | |
}, | |
{ | |
"epoch": 0.96, | |
"learning_rate": 4.692735494765358e-05, | |
"loss": 1.2433, | |
"step": 710 | |
}, | |
{ | |
"epoch": 0.97, | |
"learning_rate": 4.684207449400476e-05, | |
"loss": 1.3521, | |
"step": 720 | |
}, | |
{ | |
"epoch": 0.98, | |
"learning_rate": 4.675570640951025e-05, | |
"loss": 1.264, | |
"step": 730 | |
}, | |
{ | |
"epoch": 1.0, | |
"learning_rate": 4.666825499488759e-05, | |
"loss": 1.2021, | |
"step": 740 | |
}, | |
{ | |
"epoch": 1.01, | |
"learning_rate": 4.657972460479899e-05, | |
"loss": 1.1499, | |
"step": 750 | |
}, | |
{ | |
"epoch": 1.02, | |
"learning_rate": 4.649011964763444e-05, | |
"loss": 1.2565, | |
"step": 760 | |
}, | |
{ | |
"epoch": 1.04, | |
"learning_rate": 4.639944458529226e-05, | |
"loss": 1.2917, | |
"step": 770 | |
}, | |
{ | |
"epoch": 1.05, | |
"learning_rate": 4.630770393295688e-05, | |
"loss": 1.3944, | |
"step": 780 | |
}, | |
{ | |
"epoch": 1.06, | |
"learning_rate": 4.6214902258874034e-05, | |
"loss": 1.3155, | |
"step": 790 | |
}, | |
{ | |
"epoch": 1.08, | |
"learning_rate": 4.612104418412323e-05, | |
"loss": 1.1417, | |
"step": 800 | |
}, | |
{ | |
"epoch": 1.09, | |
"learning_rate": 4.60261343823877e-05, | |
"loss": 1.3151, | |
"step": 810 | |
}, | |
{ | |
"epoch": 1.11, | |
"learning_rate": 4.593017757972165e-05, | |
"loss": 1.3504, | |
"step": 820 | |
}, | |
{ | |
"epoch": 1.12, | |
"learning_rate": 4.5833178554314915e-05, | |
"loss": 1.1928, | |
"step": 830 | |
}, | |
{ | |
"epoch": 1.13, | |
"learning_rate": 4.573514213625505e-05, | |
"loss": 1.1215, | |
"step": 840 | |
}, | |
{ | |
"epoch": 1.15, | |
"learning_rate": 4.563607320728679e-05, | |
"loss": 1.3223, | |
"step": 850 | |
}, | |
{ | |
"epoch": 1.16, | |
"learning_rate": 4.553597670056897e-05, | |
"loss": 1.1495, | |
"step": 860 | |
}, | |
{ | |
"epoch": 1.17, | |
"learning_rate": 4.543485760042892e-05, | |
"loss": 1.2296, | |
"step": 870 | |
}, | |
{ | |
"epoch": 1.19, | |
"learning_rate": 4.533272094211418e-05, | |
"loss": 1.1743, | |
"step": 880 | |
}, | |
{ | |
"epoch": 1.2, | |
"learning_rate": 4.522957181154185e-05, | |
"loss": 1.2596, | |
"step": 890 | |
}, | |
{ | |
"epoch": 1.21, | |
"learning_rate": 4.5125415345045316e-05, | |
"loss": 1.3216, | |
"step": 900 | |
}, | |
{ | |
"epoch": 1.23, | |
"learning_rate": 4.5020256729118446e-05, | |
"loss": 1.2431, | |
"step": 910 | |
}, | |
{ | |
"epoch": 1.24, | |
"learning_rate": 4.491410120015737e-05, | |
"loss": 1.2134, | |
"step": 920 | |
}, | |
{ | |
"epoch": 1.25, | |
"learning_rate": 4.4806954044199725e-05, | |
"loss": 1.3169, | |
"step": 930 | |
}, | |
{ | |
"epoch": 1.27, | |
"learning_rate": 4.469882059666142e-05, | |
"loss": 1.1074, | |
"step": 940 | |
}, | |
{ | |
"epoch": 1.28, | |
"learning_rate": 4.458970624207098e-05, | |
"loss": 1.2713, | |
"step": 950 | |
}, | |
{ | |
"epoch": 1.29, | |
"learning_rate": 4.4479616413801396e-05, | |
"loss": 1.2656, | |
"step": 960 | |
}, | |
{ | |
"epoch": 1.31, | |
"learning_rate": 4.4368556593799593e-05, | |
"loss": 1.2994, | |
"step": 970 | |
}, | |
{ | |
"epoch": 1.32, | |
"learning_rate": 4.425653231231344e-05, | |
"loss": 1.3854, | |
"step": 980 | |
}, | |
{ | |
"epoch": 1.33, | |
"learning_rate": 4.414354914761638e-05, | |
"loss": 1.2299, | |
"step": 990 | |
}, | |
{ | |
"epoch": 1.35, | |
"learning_rate": 4.402961272572964e-05, | |
"loss": 1.3386, | |
"step": 1000 | |
}, | |
{ | |
"epoch": 1.36, | |
"learning_rate": 4.391472872014212e-05, | |
"loss": 1.2417, | |
"step": 1010 | |
}, | |
{ | |
"epoch": 1.37, | |
"learning_rate": 4.379890285152782e-05, | |
"loss": 1.3562, | |
"step": 1020 | |
}, | |
{ | |
"epoch": 1.39, | |
"learning_rate": 4.368214088746107e-05, | |
"loss": 1.0887, | |
"step": 1030 | |
}, | |
{ | |
"epoch": 1.4, | |
"learning_rate": 4.356444864212921e-05, | |
"loss": 1.2619, | |
"step": 1040 | |
}, | |
{ | |
"epoch": 1.42, | |
"learning_rate": 4.344583197604318e-05, | |
"loss": 1.1281, | |
"step": 1050 | |
}, | |
{ | |
"epoch": 1.43, | |
"learning_rate": 4.332629679574566e-05, | |
"loss": 1.123, | |
"step": 1060 | |
}, | |
{ | |
"epoch": 1.44, | |
"learning_rate": 4.320584905351691e-05, | |
"loss": 1.2429, | |
"step": 1070 | |
}, | |
{ | |
"epoch": 1.46, | |
"learning_rate": 4.308449474707843e-05, | |
"loss": 1.1914, | |
"step": 1080 | |
}, | |
{ | |
"epoch": 1.47, | |
"learning_rate": 4.296223991929429e-05, | |
"loss": 1.1858, | |
"step": 1090 | |
}, | |
{ | |
"epoch": 1.48, | |
"learning_rate": 4.2839090657870196e-05, | |
"loss": 1.1781, | |
"step": 1100 | |
}, | |
{ | |
"epoch": 1.5, | |
"learning_rate": 4.271505309505038e-05, | |
"loss": 1.2164, | |
"step": 1110 | |
}, | |
{ | |
"epoch": 1.51, | |
"learning_rate": 4.259013340731224e-05, | |
"loss": 1.0746, | |
"step": 1120 | |
}, | |
{ | |
"epoch": 1.52, | |
"learning_rate": 4.246433781505876e-05, | |
"loss": 1.0874, | |
"step": 1130 | |
}, | |
{ | |
"epoch": 1.54, | |
"learning_rate": 4.2337672582308815e-05, | |
"loss": 1.1537, | |
"step": 1140 | |
}, | |
{ | |
"epoch": 1.55, | |
"learning_rate": 4.221014401638517e-05, | |
"loss": 1.3942, | |
"step": 1150 | |
}, | |
{ | |
"epoch": 1.56, | |
"learning_rate": 4.20817584676005e-05, | |
"loss": 1.0343, | |
"step": 1160 | |
}, | |
{ | |
"epoch": 1.58, | |
"learning_rate": 4.195252232894108e-05, | |
"loss": 1.2696, | |
"step": 1170 | |
}, | |
{ | |
"epoch": 1.59, | |
"learning_rate": 4.1822442035748534e-05, | |
"loss": 1.2511, | |
"step": 1180 | |
}, | |
{ | |
"epoch": 1.6, | |
"learning_rate": 4.1691524065399324e-05, | |
"loss": 1.0978, | |
"step": 1190 | |
}, | |
{ | |
"epoch": 1.62, | |
"learning_rate": 4.155977493698222e-05, | |
"loss": 1.1222, | |
"step": 1200 | |
}, | |
{ | |
"epoch": 1.63, | |
"learning_rate": 4.142720121097372e-05, | |
"loss": 1.1663, | |
"step": 1210 | |
}, | |
{ | |
"epoch": 1.64, | |
"learning_rate": 4.129380948891132e-05, | |
"loss": 1.209, | |
"step": 1220 | |
}, | |
{ | |
"epoch": 1.66, | |
"learning_rate": 4.1159606413064786e-05, | |
"loss": 1.2177, | |
"step": 1230 | |
}, | |
{ | |
"epoch": 1.67, | |
"learning_rate": 4.1024598666105444e-05, | |
"loss": 1.1437, | |
"step": 1240 | |
}, | |
{ | |
"epoch": 1.68, | |
"learning_rate": 4.088879297077341e-05, | |
"loss": 1.0417, | |
"step": 1250 | |
}, | |
{ | |
"epoch": 1.7, | |
"learning_rate": 4.075219608954278e-05, | |
"loss": 1.1635, | |
"step": 1260 | |
}, | |
{ | |
"epoch": 1.71, | |
"learning_rate": 4.0614814824284943e-05, | |
"loss": 1.0666, | |
"step": 1270 | |
}, | |
{ | |
"epoch": 1.73, | |
"learning_rate": 4.047665601592984e-05, | |
"loss": 1.3199, | |
"step": 1280 | |
}, | |
{ | |
"epoch": 1.74, | |
"learning_rate": 4.0337726544125376e-05, | |
"loss": 1.1025, | |
"step": 1290 | |
}, | |
{ | |
"epoch": 1.75, | |
"learning_rate": 4.019803332689478e-05, | |
"loss": 0.9055, | |
"step": 1300 | |
}, | |
{ | |
"epoch": 1.77, | |
"learning_rate": 4.0057583320292154e-05, | |
"loss": 1.0077, | |
"step": 1310 | |
}, | |
{ | |
"epoch": 1.78, | |
"learning_rate": 3.991638351805611e-05, | |
"loss": 1.1106, | |
"step": 1320 | |
}, | |
{ | |
"epoch": 1.79, | |
"learning_rate": 3.977444095126146e-05, | |
"loss": 1.0175, | |
"step": 1330 | |
}, | |
{ | |
"epoch": 1.81, | |
"learning_rate": 3.9631762687969184e-05, | |
"loss": 1.1105, | |
"step": 1340 | |
}, | |
{ | |
"epoch": 1.82, | |
"learning_rate": 3.948835583287439e-05, | |
"loss": 0.96, | |
"step": 1350 | |
}, | |
{ | |
"epoch": 1.83, | |
"learning_rate": 3.93442275269526e-05, | |
"loss": 1.2329, | |
"step": 1360 | |
}, | |
{ | |
"epoch": 1.85, | |
"learning_rate": 3.9199384947104136e-05, | |
"loss": 1.0752, | |
"step": 1370 | |
}, | |
{ | |
"epoch": 1.86, | |
"learning_rate": 3.9053835305796706e-05, | |
"loss": 0.9997, | |
"step": 1380 | |
}, | |
{ | |
"epoch": 1.87, | |
"learning_rate": 3.890758585070635e-05, | |
"loss": 1.1088, | |
"step": 1390 | |
}, | |
{ | |
"epoch": 1.89, | |
"learning_rate": 3.876064386435646e-05, | |
"loss": 0.94, | |
"step": 1400 | |
}, | |
{ | |
"epoch": 1.9, | |
"learning_rate": 3.861301666375517e-05, | |
"loss": 1.1949, | |
"step": 1410 | |
}, | |
{ | |
"epoch": 1.91, | |
"learning_rate": 3.846471160003102e-05, | |
"loss": 1.2521, | |
"step": 1420 | |
}, | |
{ | |
"epoch": 1.93, | |
"learning_rate": 3.831573605806689e-05, | |
"loss": 1.1376, | |
"step": 1430 | |
}, | |
{ | |
"epoch": 1.94, | |
"learning_rate": 3.816609745613224e-05, | |
"loss": 1.1883, | |
"step": 1440 | |
}, | |
{ | |
"epoch": 1.95, | |
"learning_rate": 3.80158032455138e-05, | |
"loss": 1.3168, | |
"step": 1450 | |
}, | |
{ | |
"epoch": 1.97, | |
"learning_rate": 3.7864860910144425e-05, | |
"loss": 1.0501, | |
"step": 1460 | |
}, | |
{ | |
"epoch": 1.98, | |
"learning_rate": 3.7713277966230514e-05, | |
"loss": 1.1886, | |
"step": 1470 | |
}, | |
{ | |
"epoch": 1.99, | |
"learning_rate": 3.7561061961877694e-05, | |
"loss": 1.1367, | |
"step": 1480 | |
}, | |
{ | |
"epoch": 2.01, | |
"learning_rate": 3.740822047671498e-05, | |
"loss": 1.1561, | |
"step": 1490 | |
}, | |
{ | |
"epoch": 2.02, | |
"learning_rate": 3.725476112151735e-05, | |
"loss": 1.0747, | |
"step": 1500 | |
}, | |
{ | |
"epoch": 2.04, | |
"learning_rate": 3.710069153782672e-05, | |
"loss": 1.0443, | |
"step": 1510 | |
}, | |
{ | |
"epoch": 2.05, | |
"learning_rate": 3.6946019397571496e-05, | |
"loss": 1.3622, | |
"step": 1520 | |
}, | |
{ | |
"epoch": 2.06, | |
"learning_rate": 3.679075240268452e-05, | |
"loss": 1.1274, | |
"step": 1530 | |
}, | |
{ | |
"epoch": 2.08, | |
"learning_rate": 3.663489828471953e-05, | |
"loss": 0.9682, | |
"step": 1540 | |
}, | |
{ | |
"epoch": 2.09, | |
"learning_rate": 3.647846480446621e-05, | |
"loss": 0.9268, | |
"step": 1550 | |
}, | |
{ | |
"epoch": 2.1, | |
"learning_rate": 3.6321459751563703e-05, | |
"loss": 1.1239, | |
"step": 1560 | |
}, | |
{ | |
"epoch": 2.12, | |
"learning_rate": 3.6163890944112715e-05, | |
"loss": 1.3213, | |
"step": 1570 | |
}, | |
{ | |
"epoch": 2.13, | |
"learning_rate": 3.600576622828627e-05, | |
"loss": 1.1064, | |
"step": 1580 | |
}, | |
{ | |
"epoch": 2.14, | |
"learning_rate": 3.5847093477938956e-05, | |
"loss": 1.2662, | |
"step": 1590 | |
}, | |
{ | |
"epoch": 2.16, | |
"learning_rate": 3.568788059421484e-05, | |
"loss": 1.0053, | |
"step": 1600 | |
}, | |
{ | |
"epoch": 2.17, | |
"learning_rate": 3.5528135505154084e-05, | |
"loss": 1.0666, | |
"step": 1610 | |
}, | |
{ | |
"epoch": 2.18, | |
"learning_rate": 3.5367866165298084e-05, | |
"loss": 1.1048, | |
"step": 1620 | |
}, | |
{ | |
"epoch": 2.2, | |
"learning_rate": 3.520708055529347e-05, | |
"loss": 1.0777, | |
"step": 1630 | |
}, | |
{ | |
"epoch": 2.21, | |
"learning_rate": 3.504578668149462e-05, | |
"loss": 1.0932, | |
"step": 1640 | |
}, | |
{ | |
"epoch": 2.22, | |
"learning_rate": 3.488399257556502e-05, | |
"loss": 1.0945, | |
"step": 1650 | |
}, | |
{ | |
"epoch": 2.24, | |
"learning_rate": 3.472170629407735e-05, | |
"loss": 1.1311, | |
"step": 1660 | |
}, | |
{ | |
"epoch": 2.25, | |
"learning_rate": 3.455893591811225e-05, | |
"loss": 1.2245, | |
"step": 1670 | |
}, | |
{ | |
"epoch": 2.26, | |
"learning_rate": 3.4395689552855955e-05, | |
"loss": 1.088, | |
"step": 1680 | |
}, | |
{ | |
"epoch": 2.28, | |
"learning_rate": 3.423197532719671e-05, | |
"loss": 0.9423, | |
"step": 1690 | |
}, | |
{ | |
"epoch": 2.29, | |
"learning_rate": 3.4067801393319954e-05, | |
"loss": 0.9905, | |
"step": 1700 | |
}, | |
{ | |
"epoch": 2.3, | |
"learning_rate": 3.390317592630239e-05, | |
"loss": 0.9556, | |
"step": 1710 | |
}, | |
{ | |
"epoch": 2.32, | |
"learning_rate": 3.3738107123704934e-05, | |
"loss": 1.1236, | |
"step": 1720 | |
}, | |
{ | |
"epoch": 2.33, | |
"learning_rate": 3.357260320516447e-05, | |
"loss": 0.8063, | |
"step": 1730 | |
}, | |
{ | |
"epoch": 2.35, | |
"learning_rate": 3.34066724119846e-05, | |
"loss": 1.0238, | |
"step": 1740 | |
}, | |
{ | |
"epoch": 2.36, | |
"learning_rate": 3.32403230067252e-05, | |
"loss": 0.8993, | |
"step": 1750 | |
}, | |
{ | |
"epoch": 2.37, | |
"learning_rate": 3.3073563272791074e-05, | |
"loss": 1.0067, | |
"step": 1760 | |
}, | |
{ | |
"epoch": 2.39, | |
"learning_rate": 3.2906401514019406e-05, | |
"loss": 1.094, | |
"step": 1770 | |
}, | |
{ | |
"epoch": 2.4, | |
"learning_rate": 3.2738846054266296e-05, | |
"loss": 1.1968, | |
"step": 1780 | |
}, | |
{ | |
"epoch": 2.41, | |
"learning_rate": 3.2570905236992274e-05, | |
"loss": 1.0276, | |
"step": 1790 | |
}, | |
{ | |
"epoch": 2.43, | |
"learning_rate": 3.240258742484683e-05, | |
"loss": 1.0161, | |
"step": 1800 | |
}, | |
{ | |
"epoch": 2.44, | |
"learning_rate": 3.2233900999251995e-05, | |
"loss": 1.1456, | |
"step": 1810 | |
}, | |
{ | |
"epoch": 2.45, | |
"learning_rate": 3.206485435998498e-05, | |
"loss": 0.9577, | |
"step": 1820 | |
}, | |
{ | |
"epoch": 2.47, | |
"learning_rate": 3.189545592475992e-05, | |
"loss": 1.0474, | |
"step": 1830 | |
}, | |
{ | |
"epoch": 2.48, | |
"learning_rate": 3.172571412880872e-05, | |
"loss": 1.1945, | |
"step": 1840 | |
}, | |
{ | |
"epoch": 2.49, | |
"learning_rate": 3.1555637424460984e-05, | |
"loss": 0.994, | |
"step": 1850 | |
}, | |
{ | |
"epoch": 2.51, | |
"learning_rate": 3.138523428072316e-05, | |
"loss": 0.9061, | |
"step": 1860 | |
}, | |
{ | |
"epoch": 2.52, | |
"learning_rate": 3.121451318285684e-05, | |
"loss": 0.9634, | |
"step": 1870 | |
}, | |
{ | |
"epoch": 2.53, | |
"learning_rate": 3.104348263195617e-05, | |
"loss": 1.2188, | |
"step": 1880 | |
}, | |
{ | |
"epoch": 2.55, | |
"learning_rate": 3.0872151144524595e-05, | |
"loss": 0.9831, | |
"step": 1890 | |
}, | |
{ | |
"epoch": 2.56, | |
"learning_rate": 3.070052725205077e-05, | |
"loss": 1.0728, | |
"step": 1900 | |
}, | |
{ | |
"epoch": 2.57, | |
"learning_rate": 3.052861950058371e-05, | |
"loss": 1.0418, | |
"step": 1910 | |
}, | |
{ | |
"epoch": 2.59, | |
"learning_rate": 3.0356436450307256e-05, | |
"loss": 1.2411, | |
"step": 1920 | |
}, | |
{ | |
"epoch": 2.6, | |
"learning_rate": 3.0183986675113814e-05, | |
"loss": 0.8613, | |
"step": 1930 | |
}, | |
{ | |
"epoch": 2.61, | |
"learning_rate": 3.0011278762177415e-05, | |
"loss": 1.0478, | |
"step": 1940 | |
}, | |
{ | |
"epoch": 2.63, | |
"learning_rate": 2.9838321311526107e-05, | |
"loss": 0.82, | |
"step": 1950 | |
}, | |
{ | |
"epoch": 2.64, | |
"learning_rate": 2.9665122935613727e-05, | |
"loss": 0.9665, | |
"step": 1960 | |
}, | |
{ | |
"epoch": 2.65, | |
"learning_rate": 2.9491692258891045e-05, | |
"loss": 0.9478, | |
"step": 1970 | |
}, | |
{ | |
"epoch": 2.67, | |
"learning_rate": 2.9318037917376297e-05, | |
"loss": 0.8923, | |
"step": 1980 | |
}, | |
{ | |
"epoch": 2.68, | |
"learning_rate": 2.9144168558225138e-05, | |
"loss": 0.9888, | |
"step": 1990 | |
}, | |
{ | |
"epoch": 2.7, | |
"learning_rate": 2.8970092839300106e-05, | |
"loss": 0.7904, | |
"step": 2000 | |
}, | |
{ | |
"epoch": 2.71, | |
"learning_rate": 2.879581942873945e-05, | |
"loss": 0.8784, | |
"step": 2010 | |
}, | |
{ | |
"epoch": 2.72, | |
"learning_rate": 2.8621357004525506e-05, | |
"loss": 1.0034, | |
"step": 2020 | |
}, | |
{ | |
"epoch": 2.74, | |
"learning_rate": 2.8446714254052613e-05, | |
"loss": 1.0585, | |
"step": 2030 | |
}, | |
{ | |
"epoch": 2.75, | |
"learning_rate": 2.827189987369446e-05, | |
"loss": 1.0363, | |
"step": 2040 | |
}, | |
{ | |
"epoch": 2.76, | |
"learning_rate": 2.809692256837113e-05, | |
"loss": 0.8699, | |
"step": 2050 | |
}, | |
{ | |
"epoch": 2.78, | |
"learning_rate": 2.7921791051115548e-05, | |
"loss": 0.9598, | |
"step": 2060 | |
}, | |
{ | |
"epoch": 2.79, | |
"learning_rate": 2.7746514042639677e-05, | |
"loss": 0.8898, | |
"step": 2070 | |
}, | |
{ | |
"epoch": 2.8, | |
"learning_rate": 2.757110027090023e-05, | |
"loss": 1.0903, | |
"step": 2080 | |
}, | |
{ | |
"epoch": 2.82, | |
"learning_rate": 2.7395558470664097e-05, | |
"loss": 0.9971, | |
"step": 2090 | |
}, | |
{ | |
"epoch": 2.83, | |
"learning_rate": 2.7219897383073373e-05, | |
"loss": 1.103, | |
"step": 2100 | |
}, | |
{ | |
"epoch": 2.84, | |
"learning_rate": 2.704412575521007e-05, | |
"loss": 1.0361, | |
"step": 2110 | |
}, | |
{ | |
"epoch": 2.86, | |
"learning_rate": 2.686825233966061e-05, | |
"loss": 1.1389, | |
"step": 2120 | |
}, | |
{ | |
"epoch": 2.87, | |
"learning_rate": 2.6692285894079923e-05, | |
"loss": 1.0003, | |
"step": 2130 | |
}, | |
{ | |
"epoch": 2.88, | |
"learning_rate": 2.6516235180755412e-05, | |
"loss": 0.9947, | |
"step": 2140 | |
}, | |
{ | |
"epoch": 2.9, | |
"learning_rate": 2.6340108966170592e-05, | |
"loss": 0.8088, | |
"step": 2150 | |
}, | |
{ | |
"epoch": 2.91, | |
"learning_rate": 2.6163916020568595e-05, | |
"loss": 0.7742, | |
"step": 2160 | |
}, | |
{ | |
"epoch": 2.92, | |
"learning_rate": 2.5987665117515446e-05, | |
"loss": 1.0853, | |
"step": 2170 | |
}, | |
{ | |
"epoch": 2.94, | |
"learning_rate": 2.5811365033463147e-05, | |
"loss": 1.0034, | |
"step": 2180 | |
}, | |
{ | |
"epoch": 2.95, | |
"learning_rate": 2.5635024547312704e-05, | |
"loss": 1.0611, | |
"step": 2190 | |
}, | |
{ | |
"epoch": 2.96, | |
"learning_rate": 2.5458652439976932e-05, | |
"loss": 1.0789, | |
"step": 2200 | |
}, | |
{ | |
"epoch": 2.98, | |
"learning_rate": 2.528225749394325e-05, | |
"loss": 1.1031, | |
"step": 2210 | |
}, | |
{ | |
"epoch": 2.99, | |
"learning_rate": 2.510584849283632e-05, | |
"loss": 0.9604, | |
"step": 2220 | |
} | |
], | |
"logging_steps": 10, | |
"max_steps": 4452, | |
"num_train_epochs": 6, | |
"save_steps": 500, | |
"total_flos": 1.163304048181248e+16, | |
"trial_name": null, | |
"trial_params": null | |
} | |