|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 16.483516483516482, |
|
"eval_steps": 500, |
|
"global_step": 1500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.996861268047709e-05, |
|
"loss": 2.5145, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.9937225360954175e-05, |
|
"loss": 2.8458, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.9905838041431265e-05, |
|
"loss": 2.7597, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.987445072190835e-05, |
|
"loss": 2.8058, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.984306340238544e-05, |
|
"loss": 2.6902, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.981167608286253e-05, |
|
"loss": 2.7747, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.978028876333961e-05, |
|
"loss": 2.9923, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.97489014438167e-05, |
|
"loss": 2.7536, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.971751412429379e-05, |
|
"loss": 2.8079, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.968612680477088e-05, |
|
"loss": 2.8927, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.965473948524796e-05, |
|
"loss": 2.815, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.962335216572505e-05, |
|
"loss": 2.9312, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.959196484620214e-05, |
|
"loss": 2.598, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.956057752667922e-05, |
|
"loss": 2.6474, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.952919020715631e-05, |
|
"loss": 2.7857, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.9497802887633396e-05, |
|
"loss": 2.7879, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.9466415568110485e-05, |
|
"loss": 2.6259, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 4.9435028248587575e-05, |
|
"loss": 2.6434, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.940364092906466e-05, |
|
"loss": 2.6546, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 4.937225360954175e-05, |
|
"loss": 2.7263, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 4.934086629001883e-05, |
|
"loss": 2.843, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 4.930947897049592e-05, |
|
"loss": 2.6683, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 4.927809165097301e-05, |
|
"loss": 2.441, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 4.924670433145009e-05, |
|
"loss": 2.8475, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.921531701192718e-05, |
|
"loss": 2.7734, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.918392969240427e-05, |
|
"loss": 2.839, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.915254237288136e-05, |
|
"loss": 2.5971, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 4.9121155053358444e-05, |
|
"loss": 2.5059, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 4.9089767733835534e-05, |
|
"loss": 2.6131, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 4.9058380414312623e-05, |
|
"loss": 2.8308, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 4.9026993094789706e-05, |
|
"loss": 2.7235, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 4.8995605775266796e-05, |
|
"loss": 2.7609, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 4.896421845574388e-05, |
|
"loss": 2.7446, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 4.893283113622097e-05, |
|
"loss": 2.6262, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 4.890144381669806e-05, |
|
"loss": 2.4824, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 4.887005649717514e-05, |
|
"loss": 2.8134, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 4.883866917765223e-05, |
|
"loss": 2.6734, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 4.8807281858129313e-05, |
|
"loss": 2.6152, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 4.87758945386064e-05, |
|
"loss": 2.8046, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 4.874450721908349e-05, |
|
"loss": 2.6603, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 4.8713119899560576e-05, |
|
"loss": 2.789, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 4.8681732580037665e-05, |
|
"loss": 2.5235, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 4.8650345260514755e-05, |
|
"loss": 2.6632, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 4.8618957940991844e-05, |
|
"loss": 2.6261, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 4.8587570621468934e-05, |
|
"loss": 2.6481, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 4.855618330194602e-05, |
|
"loss": 2.6359, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 4.8524795982423107e-05, |
|
"loss": 2.6283, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 4.849340866290019e-05, |
|
"loss": 2.6531, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 4.846202134337728e-05, |
|
"loss": 2.6473, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 4.843063402385437e-05, |
|
"loss": 2.7426, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 4.839924670433145e-05, |
|
"loss": 2.7108, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 4.836785938480854e-05, |
|
"loss": 2.6723, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 4.8336472065285624e-05, |
|
"loss": 2.6087, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 4.8305084745762714e-05, |
|
"loss": 2.5313, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 4.82736974262398e-05, |
|
"loss": 2.6208, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 4.8242310106716886e-05, |
|
"loss": 2.613, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 4.8210922787193976e-05, |
|
"loss": 2.645, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 4.817953546767106e-05, |
|
"loss": 2.521, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 4.814814814814815e-05, |
|
"loss": 2.7001, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 4.811676082862524e-05, |
|
"loss": 2.6073, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 4.808537350910233e-05, |
|
"loss": 2.6636, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 4.805398618957942e-05, |
|
"loss": 2.5819, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 4.80225988700565e-05, |
|
"loss": 2.6064, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 4.799121155053359e-05, |
|
"loss": 2.7406, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 4.795982423101067e-05, |
|
"loss": 2.5997, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 4.792843691148776e-05, |
|
"loss": 2.5727, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 4.789704959196485e-05, |
|
"loss": 2.4625, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 4.7865662272441935e-05, |
|
"loss": 2.5773, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 4.7834274952919024e-05, |
|
"loss": 2.5806, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 4.780288763339611e-05, |
|
"loss": 2.5901, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 4.77715003138732e-05, |
|
"loss": 2.4906, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 4.7740112994350286e-05, |
|
"loss": 2.6069, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 4.770872567482737e-05, |
|
"loss": 2.5363, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 4.767733835530446e-05, |
|
"loss": 2.5511, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 4.764595103578154e-05, |
|
"loss": 2.5839, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 4.761456371625863e-05, |
|
"loss": 2.462, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 4.758317639673572e-05, |
|
"loss": 2.8533, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"learning_rate": 4.755178907721281e-05, |
|
"loss": 2.3477, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"learning_rate": 4.75204017576899e-05, |
|
"loss": 2.553, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 4.748901443816698e-05, |
|
"loss": 2.614, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 4.745762711864407e-05, |
|
"loss": 2.6441, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 4.7426239799121155e-05, |
|
"loss": 2.6618, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 4.7394852479598245e-05, |
|
"loss": 2.4887, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 4.7363465160075335e-05, |
|
"loss": 2.3639, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 4.733207784055242e-05, |
|
"loss": 2.7387, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"learning_rate": 4.730069052102951e-05, |
|
"loss": 2.4262, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"learning_rate": 4.726930320150659e-05, |
|
"loss": 2.6172, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 4.723791588198368e-05, |
|
"loss": 2.7115, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"learning_rate": 4.720652856246077e-05, |
|
"loss": 2.5397, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"learning_rate": 4.717514124293785e-05, |
|
"loss": 2.4934, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 4.714375392341494e-05, |
|
"loss": 2.5521, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 10.11, |
|
"learning_rate": 4.7112366603892025e-05, |
|
"loss": 2.3767, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 10.22, |
|
"learning_rate": 4.7080979284369114e-05, |
|
"loss": 2.5422, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 10.33, |
|
"learning_rate": 4.7049591964846204e-05, |
|
"loss": 2.7068, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 10.44, |
|
"learning_rate": 4.7018204645323294e-05, |
|
"loss": 2.5518, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 10.55, |
|
"learning_rate": 4.698681732580038e-05, |
|
"loss": 2.4129, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 10.66, |
|
"learning_rate": 4.6955430006277466e-05, |
|
"loss": 2.6916, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 10.77, |
|
"learning_rate": 4.6924042686754556e-05, |
|
"loss": 2.5753, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 10.88, |
|
"learning_rate": 4.689265536723164e-05, |
|
"loss": 2.3783, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"learning_rate": 4.686126804770873e-05, |
|
"loss": 2.5766, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 11.1, |
|
"learning_rate": 4.682988072818582e-05, |
|
"loss": 2.56, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 11.21, |
|
"learning_rate": 4.67984934086629e-05, |
|
"loss": 2.59, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 11.32, |
|
"learning_rate": 4.676710608913999e-05, |
|
"loss": 2.5592, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 11.43, |
|
"learning_rate": 4.673571876961707e-05, |
|
"loss": 2.553, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 11.54, |
|
"learning_rate": 4.670433145009416e-05, |
|
"loss": 2.564, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 11.65, |
|
"learning_rate": 4.667294413057125e-05, |
|
"loss": 2.471, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 11.76, |
|
"learning_rate": 4.6641556811048335e-05, |
|
"loss": 2.4783, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 11.87, |
|
"learning_rate": 4.6610169491525425e-05, |
|
"loss": 2.5474, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 11.98, |
|
"learning_rate": 4.657878217200251e-05, |
|
"loss": 2.5478, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 12.09, |
|
"learning_rate": 4.65473948524796e-05, |
|
"loss": 2.4591, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 12.2, |
|
"learning_rate": 4.651600753295669e-05, |
|
"loss": 2.4682, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 12.31, |
|
"learning_rate": 4.6484620213433777e-05, |
|
"loss": 2.5391, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 12.42, |
|
"learning_rate": 4.6453232893910866e-05, |
|
"loss": 2.6003, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 12.53, |
|
"learning_rate": 4.642184557438795e-05, |
|
"loss": 2.4073, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 12.64, |
|
"learning_rate": 4.639045825486504e-05, |
|
"loss": 2.5084, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 12.75, |
|
"learning_rate": 4.635907093534212e-05, |
|
"loss": 2.4579, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 12.86, |
|
"learning_rate": 4.632768361581921e-05, |
|
"loss": 2.512, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 12.97, |
|
"learning_rate": 4.62962962962963e-05, |
|
"loss": 2.6637, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 13.08, |
|
"learning_rate": 4.6264908976773384e-05, |
|
"loss": 2.3842, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 13.19, |
|
"learning_rate": 4.623352165725047e-05, |
|
"loss": 2.3314, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 13.3, |
|
"learning_rate": 4.620213433772756e-05, |
|
"loss": 2.4636, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 13.41, |
|
"learning_rate": 4.6170747018204646e-05, |
|
"loss": 2.4403, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 13.52, |
|
"learning_rate": 4.6139359698681735e-05, |
|
"loss": 2.7347, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 13.63, |
|
"learning_rate": 4.610797237915882e-05, |
|
"loss": 2.4503, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 13.74, |
|
"learning_rate": 4.607658505963591e-05, |
|
"loss": 2.566, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 13.85, |
|
"learning_rate": 4.6045197740113e-05, |
|
"loss": 2.3861, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 13.96, |
|
"learning_rate": 4.601381042059008e-05, |
|
"loss": 2.4167, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 14.07, |
|
"learning_rate": 4.598242310106718e-05, |
|
"loss": 2.751, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 14.18, |
|
"learning_rate": 4.595103578154426e-05, |
|
"loss": 2.4427, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 14.29, |
|
"learning_rate": 4.591964846202135e-05, |
|
"loss": 2.5363, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 14.4, |
|
"learning_rate": 4.588826114249843e-05, |
|
"loss": 2.3152, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 14.51, |
|
"learning_rate": 4.585687382297552e-05, |
|
"loss": 2.6868, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 14.62, |
|
"learning_rate": 4.582548650345261e-05, |
|
"loss": 2.4857, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 14.73, |
|
"learning_rate": 4.5794099183929694e-05, |
|
"loss": 2.5417, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 14.84, |
|
"learning_rate": 4.5762711864406784e-05, |
|
"loss": 2.4462, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 14.95, |
|
"learning_rate": 4.573132454488387e-05, |
|
"loss": 2.3253, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 15.05, |
|
"learning_rate": 4.5699937225360956e-05, |
|
"loss": 2.674, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 15.16, |
|
"learning_rate": 4.5668549905838046e-05, |
|
"loss": 2.607, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 15.27, |
|
"learning_rate": 4.563716258631513e-05, |
|
"loss": 2.3933, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 15.38, |
|
"learning_rate": 4.560577526679222e-05, |
|
"loss": 2.4335, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 15.49, |
|
"learning_rate": 4.55743879472693e-05, |
|
"loss": 2.2217, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 15.6, |
|
"learning_rate": 4.554300062774639e-05, |
|
"loss": 2.4138, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 15.71, |
|
"learning_rate": 4.551161330822348e-05, |
|
"loss": 2.5214, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 15.82, |
|
"learning_rate": 4.548022598870056e-05, |
|
"loss": 2.6247, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 15.93, |
|
"learning_rate": 4.544883866917766e-05, |
|
"loss": 2.495, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 16.04, |
|
"learning_rate": 4.541745134965474e-05, |
|
"loss": 2.4676, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 16.15, |
|
"learning_rate": 4.538606403013183e-05, |
|
"loss": 2.5501, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 16.26, |
|
"learning_rate": 4.5354676710608915e-05, |
|
"loss": 2.612, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 16.37, |
|
"learning_rate": 4.5323289391086005e-05, |
|
"loss": 2.6939, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 16.48, |
|
"learning_rate": 4.5291902071563094e-05, |
|
"loss": 2.3641, |
|
"step": 1500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 15930, |
|
"num_train_epochs": 176, |
|
"save_steps": 500, |
|
"total_flos": 7.73986862813184e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|