|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 233, |
|
"global_step": 930, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.001075268817204301, |
|
"grad_norm": 0.47046032547950745, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.2903, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.001075268817204301, |
|
"eval_loss": 1.1097546815872192, |
|
"eval_runtime": 239.745, |
|
"eval_samples_per_second": 11.612, |
|
"eval_steps_per_second": 0.484, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002150537634408602, |
|
"grad_norm": 0.4801943898200989, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.3048, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0032258064516129032, |
|
"grad_norm": 0.4532926380634308, |
|
"learning_rate": 6e-06, |
|
"loss": 1.3283, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.004301075268817204, |
|
"grad_norm": 0.4831653833389282, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.2813, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.005376344086021506, |
|
"grad_norm": 0.4505281150341034, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3272, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0064516129032258064, |
|
"grad_norm": 0.4494975507259369, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.3336, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.007526881720430108, |
|
"grad_norm": 0.48131081461906433, |
|
"learning_rate": 1.4e-05, |
|
"loss": 1.3175, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.008602150537634409, |
|
"grad_norm": 0.46796271204948425, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.3211, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.00967741935483871, |
|
"grad_norm": 0.42700183391571045, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.29, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.010752688172043012, |
|
"grad_norm": 0.4512764811515808, |
|
"learning_rate": 2e-05, |
|
"loss": 1.289, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.011827956989247311, |
|
"grad_norm": 0.38503292202949524, |
|
"learning_rate": 1.999994169662801e-05, |
|
"loss": 1.2838, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.012903225806451613, |
|
"grad_norm": 0.36103355884552, |
|
"learning_rate": 1.9999766787191897e-05, |
|
"loss": 1.2636, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.013978494623655914, |
|
"grad_norm": 0.3443821668624878, |
|
"learning_rate": 1.9999475273731218e-05, |
|
"loss": 1.2708, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.015053763440860216, |
|
"grad_norm": 0.33744773268699646, |
|
"learning_rate": 1.9999067159645222e-05, |
|
"loss": 1.2806, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.016129032258064516, |
|
"grad_norm": 0.3615255355834961, |
|
"learning_rate": 1.9998542449692794e-05, |
|
"loss": 1.1926, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.017204301075268817, |
|
"grad_norm": 0.3394218981266022, |
|
"learning_rate": 1.99979011499924e-05, |
|
"loss": 1.2624, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01827956989247312, |
|
"grad_norm": 0.35456153750419617, |
|
"learning_rate": 1.999714326802203e-05, |
|
"loss": 1.2372, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01935483870967742, |
|
"grad_norm": 0.3715231418609619, |
|
"learning_rate": 1.999626881261911e-05, |
|
"loss": 1.2655, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02043010752688172, |
|
"grad_norm": 0.375711590051651, |
|
"learning_rate": 1.9995277793980362e-05, |
|
"loss": 1.2065, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.021505376344086023, |
|
"grad_norm": 0.3626311421394348, |
|
"learning_rate": 1.999417022366174e-05, |
|
"loss": 1.2691, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02258064516129032, |
|
"grad_norm": 0.3745582699775696, |
|
"learning_rate": 1.9992946114578264e-05, |
|
"loss": 1.2962, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.023655913978494623, |
|
"grad_norm": 0.3464348018169403, |
|
"learning_rate": 1.9991605481003865e-05, |
|
"loss": 1.2189, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.024731182795698924, |
|
"grad_norm": 0.3342266082763672, |
|
"learning_rate": 1.999014833857124e-05, |
|
"loss": 1.2144, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.025806451612903226, |
|
"grad_norm": 0.3211102783679962, |
|
"learning_rate": 1.9988574704271652e-05, |
|
"loss": 1.2098, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.026881720430107527, |
|
"grad_norm": 0.3194901943206787, |
|
"learning_rate": 1.998688459645473e-05, |
|
"loss": 1.2266, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02795698924731183, |
|
"grad_norm": 0.32383179664611816, |
|
"learning_rate": 1.998507803482828e-05, |
|
"loss": 1.2227, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02903225806451613, |
|
"grad_norm": 0.3091806173324585, |
|
"learning_rate": 1.998315504045803e-05, |
|
"loss": 1.2372, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.030107526881720432, |
|
"grad_norm": 0.306734561920166, |
|
"learning_rate": 1.998111563576738e-05, |
|
"loss": 1.1947, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03118279569892473, |
|
"grad_norm": 0.3311488628387451, |
|
"learning_rate": 1.9978959844537176e-05, |
|
"loss": 1.1853, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.03225806451612903, |
|
"grad_norm": 0.34573501348495483, |
|
"learning_rate": 1.9976687691905394e-05, |
|
"loss": 1.1903, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03333333333333333, |
|
"grad_norm": 0.3311573266983032, |
|
"learning_rate": 1.997429920436686e-05, |
|
"loss": 1.1754, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.034408602150537634, |
|
"grad_norm": 0.3365638852119446, |
|
"learning_rate": 1.9971794409772962e-05, |
|
"loss": 1.1259, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.035483870967741936, |
|
"grad_norm": 0.3338710069656372, |
|
"learning_rate": 1.9969173337331283e-05, |
|
"loss": 1.161, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.03655913978494624, |
|
"grad_norm": 0.321247398853302, |
|
"learning_rate": 1.9966436017605296e-05, |
|
"loss": 1.0962, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.03763440860215054, |
|
"grad_norm": 0.32759588956832886, |
|
"learning_rate": 1.9963582482514003e-05, |
|
"loss": 1.1921, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03870967741935484, |
|
"grad_norm": 0.2961257994174957, |
|
"learning_rate": 1.996061276533154e-05, |
|
"loss": 1.1777, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.03978494623655914, |
|
"grad_norm": 0.3336629271507263, |
|
"learning_rate": 1.9957526900686814e-05, |
|
"loss": 1.1454, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.04086021505376344, |
|
"grad_norm": 0.3197214901447296, |
|
"learning_rate": 1.9954324924563088e-05, |
|
"loss": 1.136, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.041935483870967745, |
|
"grad_norm": 0.3015052080154419, |
|
"learning_rate": 1.9951006874297568e-05, |
|
"loss": 1.1581, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.043010752688172046, |
|
"grad_norm": 0.32588157057762146, |
|
"learning_rate": 1.994757278858095e-05, |
|
"loss": 1.1603, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04408602150537634, |
|
"grad_norm": 0.3433900475502014, |
|
"learning_rate": 1.9944022707456992e-05, |
|
"loss": 1.1162, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.04516129032258064, |
|
"grad_norm": 0.31243011355400085, |
|
"learning_rate": 1.9940356672322037e-05, |
|
"loss": 1.1585, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.046236559139784944, |
|
"grad_norm": 0.35727739334106445, |
|
"learning_rate": 1.9936574725924526e-05, |
|
"loss": 1.1663, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.047311827956989246, |
|
"grad_norm": 0.3205494284629822, |
|
"learning_rate": 1.99326769123645e-05, |
|
"loss": 1.1508, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.04838709677419355, |
|
"grad_norm": 0.31664592027664185, |
|
"learning_rate": 1.99286632770931e-05, |
|
"loss": 1.1644, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04946236559139785, |
|
"grad_norm": 0.3515632748603821, |
|
"learning_rate": 1.9924533866912017e-05, |
|
"loss": 1.1326, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.05053763440860215, |
|
"grad_norm": 0.3686864674091339, |
|
"learning_rate": 1.992028872997296e-05, |
|
"loss": 1.0378, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.05161290322580645, |
|
"grad_norm": 0.3334296941757202, |
|
"learning_rate": 1.9915927915777085e-05, |
|
"loss": 1.154, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.05268817204301075, |
|
"grad_norm": 0.33582431077957153, |
|
"learning_rate": 1.9911451475174428e-05, |
|
"loss": 1.1164, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.053763440860215055, |
|
"grad_norm": 0.3328990936279297, |
|
"learning_rate": 1.9906859460363307e-05, |
|
"loss": 1.1293, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.054838709677419356, |
|
"grad_norm": 0.3484123945236206, |
|
"learning_rate": 1.9902151924889715e-05, |
|
"loss": 1.1883, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.05591397849462366, |
|
"grad_norm": 0.3534570038318634, |
|
"learning_rate": 1.989732892364668e-05, |
|
"loss": 1.1912, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.05698924731182796, |
|
"grad_norm": 0.3628205955028534, |
|
"learning_rate": 1.989239051287366e-05, |
|
"loss": 1.0801, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.05806451612903226, |
|
"grad_norm": 0.35341545939445496, |
|
"learning_rate": 1.988733675015585e-05, |
|
"loss": 1.1372, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.05913978494623656, |
|
"grad_norm": 0.37951940298080444, |
|
"learning_rate": 1.988216769442353e-05, |
|
"loss": 1.0838, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.060215053763440864, |
|
"grad_norm": 0.37033918499946594, |
|
"learning_rate": 1.9876883405951378e-05, |
|
"loss": 1.1248, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.06129032258064516, |
|
"grad_norm": 0.360681414604187, |
|
"learning_rate": 1.987148394635776e-05, |
|
"loss": 1.1153, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.06236559139784946, |
|
"grad_norm": 0.4272811710834503, |
|
"learning_rate": 1.9865969378604023e-05, |
|
"loss": 1.0496, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.06344086021505377, |
|
"grad_norm": 0.40148288011550903, |
|
"learning_rate": 1.9860339766993735e-05, |
|
"loss": 1.1039, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.06451612903225806, |
|
"grad_norm": 0.3811061680316925, |
|
"learning_rate": 1.9854595177171968e-05, |
|
"loss": 1.0615, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06559139784946237, |
|
"grad_norm": 0.4223267734050751, |
|
"learning_rate": 1.984873567612452e-05, |
|
"loss": 1.1248, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.06666666666666667, |
|
"grad_norm": 0.3878687918186188, |
|
"learning_rate": 1.9842761332177115e-05, |
|
"loss": 1.0849, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.06774193548387097, |
|
"grad_norm": 0.3889671862125397, |
|
"learning_rate": 1.983667221499464e-05, |
|
"loss": 1.107, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.06881720430107527, |
|
"grad_norm": 0.3888603746891022, |
|
"learning_rate": 1.9830468395580306e-05, |
|
"loss": 1.0947, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.06989247311827956, |
|
"grad_norm": 0.3902936577796936, |
|
"learning_rate": 1.9824149946274827e-05, |
|
"loss": 1.0747, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.07096774193548387, |
|
"grad_norm": 0.4241058826446533, |
|
"learning_rate": 1.9817716940755586e-05, |
|
"loss": 1.0701, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.07204301075268817, |
|
"grad_norm": 0.41224780678749084, |
|
"learning_rate": 1.981116945403577e-05, |
|
"loss": 1.0375, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.07311827956989247, |
|
"grad_norm": 0.4317214787006378, |
|
"learning_rate": 1.9804507562463483e-05, |
|
"loss": 1.0293, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.07419354838709677, |
|
"grad_norm": 0.4070356786251068, |
|
"learning_rate": 1.9797731343720878e-05, |
|
"loss": 1.0768, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.07526881720430108, |
|
"grad_norm": 0.42713648080825806, |
|
"learning_rate": 1.979084087682323e-05, |
|
"loss": 1.1332, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07634408602150537, |
|
"grad_norm": 0.45279622077941895, |
|
"learning_rate": 1.9783836242118036e-05, |
|
"loss": 1.0571, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.07741935483870968, |
|
"grad_norm": 0.43857499957084656, |
|
"learning_rate": 1.977671752128406e-05, |
|
"loss": 1.096, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.07849462365591398, |
|
"grad_norm": 0.4599035978317261, |
|
"learning_rate": 1.9769484797330383e-05, |
|
"loss": 1.0352, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.07956989247311828, |
|
"grad_norm": 0.4975890815258026, |
|
"learning_rate": 1.9762138154595448e-05, |
|
"loss": 0.9886, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.08064516129032258, |
|
"grad_norm": 0.39150896668434143, |
|
"learning_rate": 1.9754677678746064e-05, |
|
"loss": 1.0713, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.08172043010752689, |
|
"grad_norm": 0.4559513032436371, |
|
"learning_rate": 1.9747103456776406e-05, |
|
"loss": 1.049, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.08279569892473118, |
|
"grad_norm": 0.45070788264274597, |
|
"learning_rate": 1.9739415577007016e-05, |
|
"loss": 1.038, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.08387096774193549, |
|
"grad_norm": 0.4617964029312134, |
|
"learning_rate": 1.9731614129083756e-05, |
|
"loss": 1.0626, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.08494623655913978, |
|
"grad_norm": 0.47204914689064026, |
|
"learning_rate": 1.9723699203976768e-05, |
|
"loss": 1.0785, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.08602150537634409, |
|
"grad_norm": 0.5538530945777893, |
|
"learning_rate": 1.9715670893979416e-05, |
|
"loss": 1.0384, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08709677419354839, |
|
"grad_norm": 0.500848114490509, |
|
"learning_rate": 1.970752929270721e-05, |
|
"loss": 1.0732, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.08817204301075268, |
|
"grad_norm": 0.5184169411659241, |
|
"learning_rate": 1.9699274495096712e-05, |
|
"loss": 1.064, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.08924731182795699, |
|
"grad_norm": 0.5162873864173889, |
|
"learning_rate": 1.969090659740443e-05, |
|
"loss": 0.9992, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.09032258064516129, |
|
"grad_norm": 0.4499477744102478, |
|
"learning_rate": 1.9682425697205695e-05, |
|
"loss": 1.0495, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.0913978494623656, |
|
"grad_norm": 0.48833805322647095, |
|
"learning_rate": 1.967383189339352e-05, |
|
"loss": 1.064, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.09247311827956989, |
|
"grad_norm": 0.4258129596710205, |
|
"learning_rate": 1.9665125286177448e-05, |
|
"loss": 1.0463, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.0935483870967742, |
|
"grad_norm": 0.47551074624061584, |
|
"learning_rate": 1.9656305977082405e-05, |
|
"loss": 1.0386, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.09462365591397849, |
|
"grad_norm": 0.49520763754844666, |
|
"learning_rate": 1.964737406894747e-05, |
|
"loss": 1.0107, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.0956989247311828, |
|
"grad_norm": 0.47921285033226013, |
|
"learning_rate": 1.963832966592472e-05, |
|
"loss": 1.0101, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.0967741935483871, |
|
"grad_norm": 0.49400344491004944, |
|
"learning_rate": 1.9629172873477995e-05, |
|
"loss": 1.0635, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0978494623655914, |
|
"grad_norm": 0.4567995071411133, |
|
"learning_rate": 1.961990379838167e-05, |
|
"loss": 1.0244, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.0989247311827957, |
|
"grad_norm": 0.533193051815033, |
|
"learning_rate": 1.961052254871941e-05, |
|
"loss": 0.9857, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.45922616124153137, |
|
"learning_rate": 1.9601029233882914e-05, |
|
"loss": 1.0685, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.1010752688172043, |
|
"grad_norm": 0.501941978931427, |
|
"learning_rate": 1.9591423964570634e-05, |
|
"loss": 1.0426, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.10215053763440861, |
|
"grad_norm": 0.5177110433578491, |
|
"learning_rate": 1.9581706852786492e-05, |
|
"loss": 1.0302, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1032258064516129, |
|
"grad_norm": 0.47546830773353577, |
|
"learning_rate": 1.9571878011838557e-05, |
|
"loss": 1.0515, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1043010752688172, |
|
"grad_norm": 0.5165790319442749, |
|
"learning_rate": 1.956193755633775e-05, |
|
"loss": 1.0473, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.1053763440860215, |
|
"grad_norm": 0.493230938911438, |
|
"learning_rate": 1.9551885602196482e-05, |
|
"loss": 0.9971, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.1064516129032258, |
|
"grad_norm": 0.524946928024292, |
|
"learning_rate": 1.954172226662732e-05, |
|
"loss": 0.988, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.10752688172043011, |
|
"grad_norm": 0.49421951174736023, |
|
"learning_rate": 1.953144766814161e-05, |
|
"loss": 1.086, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1086021505376344, |
|
"grad_norm": 0.4998270869255066, |
|
"learning_rate": 1.9521061926548096e-05, |
|
"loss": 1.0394, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.10967741935483871, |
|
"grad_norm": 0.5035815834999084, |
|
"learning_rate": 1.9510565162951538e-05, |
|
"loss": 0.9236, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.110752688172043, |
|
"grad_norm": 0.5167160034179688, |
|
"learning_rate": 1.949995749975127e-05, |
|
"loss": 1.0328, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.11182795698924732, |
|
"grad_norm": 0.48468247056007385, |
|
"learning_rate": 1.94892390606398e-05, |
|
"loss": 1.0292, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.11290322580645161, |
|
"grad_norm": 0.5353156328201294, |
|
"learning_rate": 1.947840997060136e-05, |
|
"loss": 1.0258, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.11397849462365592, |
|
"grad_norm": 0.4897732138633728, |
|
"learning_rate": 1.9467470355910438e-05, |
|
"loss": 1.0167, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.11505376344086021, |
|
"grad_norm": 0.5131762623786926, |
|
"learning_rate": 1.9456420344130324e-05, |
|
"loss": 1.0104, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.11612903225806452, |
|
"grad_norm": 0.5203889012336731, |
|
"learning_rate": 1.9445260064111608e-05, |
|
"loss": 1.0029, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.11720430107526882, |
|
"grad_norm": 0.5201652646064758, |
|
"learning_rate": 1.9433989645990677e-05, |
|
"loss": 0.9724, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.11827956989247312, |
|
"grad_norm": 0.48938658833503723, |
|
"learning_rate": 1.9422609221188208e-05, |
|
"loss": 1.003, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.11935483870967742, |
|
"grad_norm": 0.5455314517021179, |
|
"learning_rate": 1.941111892240763e-05, |
|
"loss": 0.9697, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.12043010752688173, |
|
"grad_norm": 0.5419149398803711, |
|
"learning_rate": 1.9399518883633575e-05, |
|
"loss": 1.042, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.12150537634408602, |
|
"grad_norm": 0.5646453499794006, |
|
"learning_rate": 1.938780924013032e-05, |
|
"loss": 1.0211, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.12258064516129032, |
|
"grad_norm": 0.5471717119216919, |
|
"learning_rate": 1.9375990128440205e-05, |
|
"loss": 1.0343, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.12365591397849462, |
|
"grad_norm": 0.54964679479599, |
|
"learning_rate": 1.9364061686382042e-05, |
|
"loss": 0.989, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.12473118279569892, |
|
"grad_norm": 0.5681911110877991, |
|
"learning_rate": 1.935202405304951e-05, |
|
"loss": 0.9719, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.12580645161290321, |
|
"grad_norm": 0.5024511218070984, |
|
"learning_rate": 1.933987736880953e-05, |
|
"loss": 1.04, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.12688172043010754, |
|
"grad_norm": 0.5859746932983398, |
|
"learning_rate": 1.932762177530064e-05, |
|
"loss": 0.9692, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.12795698924731183, |
|
"grad_norm": 0.5424095392227173, |
|
"learning_rate": 1.931525741543131e-05, |
|
"loss": 1.0015, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.12903225806451613, |
|
"grad_norm": 0.5528049468994141, |
|
"learning_rate": 1.9302784433378333e-05, |
|
"loss": 1.0054, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.13010752688172042, |
|
"grad_norm": 0.5513008832931519, |
|
"learning_rate": 1.929020297458508e-05, |
|
"loss": 1.0207, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.13118279569892474, |
|
"grad_norm": 0.5633342266082764, |
|
"learning_rate": 1.9277513185759847e-05, |
|
"loss": 1.0283, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.13225806451612904, |
|
"grad_norm": 0.5417779088020325, |
|
"learning_rate": 1.926471521487413e-05, |
|
"loss": 0.9858, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.13333333333333333, |
|
"grad_norm": 0.5973786115646362, |
|
"learning_rate": 1.9251809211160905e-05, |
|
"loss": 1.0141, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.13440860215053763, |
|
"grad_norm": 0.5340192914009094, |
|
"learning_rate": 1.9238795325112867e-05, |
|
"loss": 0.983, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.13548387096774195, |
|
"grad_norm": 0.5954460501670837, |
|
"learning_rate": 1.922567370848072e-05, |
|
"loss": 0.9743, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.13655913978494624, |
|
"grad_norm": 0.5733973383903503, |
|
"learning_rate": 1.921244451427135e-05, |
|
"loss": 1.0521, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.13763440860215054, |
|
"grad_norm": 0.5830503702163696, |
|
"learning_rate": 1.9199107896746093e-05, |
|
"loss": 0.9766, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.13870967741935483, |
|
"grad_norm": 0.6205757260322571, |
|
"learning_rate": 1.9185664011418895e-05, |
|
"loss": 0.973, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.13978494623655913, |
|
"grad_norm": 0.6140737533569336, |
|
"learning_rate": 1.917211301505453e-05, |
|
"loss": 0.9315, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.14086021505376345, |
|
"grad_norm": 0.6285789608955383, |
|
"learning_rate": 1.9158455065666758e-05, |
|
"loss": 0.9331, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.14193548387096774, |
|
"grad_norm": 0.5214990973472595, |
|
"learning_rate": 1.914469032251647e-05, |
|
"loss": 0.8851, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.14301075268817204, |
|
"grad_norm": 0.6822535395622253, |
|
"learning_rate": 1.9130818946109865e-05, |
|
"loss": 1.0275, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.14408602150537633, |
|
"grad_norm": 0.5924133062362671, |
|
"learning_rate": 1.9116841098196538e-05, |
|
"loss": 0.9584, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.14516129032258066, |
|
"grad_norm": 0.5674971342086792, |
|
"learning_rate": 1.9102756941767625e-05, |
|
"loss": 0.9744, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.14623655913978495, |
|
"grad_norm": 0.5704190135002136, |
|
"learning_rate": 1.9088566641053887e-05, |
|
"loss": 1.0019, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.14731182795698924, |
|
"grad_norm": 0.5644530653953552, |
|
"learning_rate": 1.9074270361523805e-05, |
|
"loss": 0.9846, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.14838709677419354, |
|
"grad_norm": 0.5966128706932068, |
|
"learning_rate": 1.9059868269881637e-05, |
|
"loss": 0.9585, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.14946236559139786, |
|
"grad_norm": 0.5670474171638489, |
|
"learning_rate": 1.9045360534065484e-05, |
|
"loss": 0.9591, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.15053763440860216, |
|
"grad_norm": 0.5436234474182129, |
|
"learning_rate": 1.903074732324533e-05, |
|
"loss": 1.0095, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.15161290322580645, |
|
"grad_norm": 0.6641930341720581, |
|
"learning_rate": 1.9016028807821067e-05, |
|
"loss": 0.962, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.15268817204301074, |
|
"grad_norm": 0.687738835811615, |
|
"learning_rate": 1.9001205159420512e-05, |
|
"loss": 0.9705, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.15376344086021507, |
|
"grad_norm": 0.5606933832168579, |
|
"learning_rate": 1.8986276550897404e-05, |
|
"loss": 1.0314, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.15483870967741936, |
|
"grad_norm": 0.5617864727973938, |
|
"learning_rate": 1.897124315632938e-05, |
|
"loss": 0.9743, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.15591397849462366, |
|
"grad_norm": 0.603577733039856, |
|
"learning_rate": 1.8956105151015966e-05, |
|
"loss": 0.9762, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.15698924731182795, |
|
"grad_norm": 0.5890366435050964, |
|
"learning_rate": 1.8940862711476515e-05, |
|
"loss": 0.9394, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.15806451612903225, |
|
"grad_norm": 0.609809935092926, |
|
"learning_rate": 1.8925516015448144e-05, |
|
"loss": 0.9551, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.15913978494623657, |
|
"grad_norm": 0.621967613697052, |
|
"learning_rate": 1.891006524188368e-05, |
|
"loss": 0.9278, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.16021505376344086, |
|
"grad_norm": 0.6274746656417847, |
|
"learning_rate": 1.8894510570949566e-05, |
|
"loss": 0.9409, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.16129032258064516, |
|
"grad_norm": 0.6348162293434143, |
|
"learning_rate": 1.8878852184023754e-05, |
|
"loss": 1.0121, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.16236559139784945, |
|
"grad_norm": 0.6199047565460205, |
|
"learning_rate": 1.8863090263693596e-05, |
|
"loss": 0.9365, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.16344086021505377, |
|
"grad_norm": 0.5536498427391052, |
|
"learning_rate": 1.884722499375371e-05, |
|
"loss": 0.8554, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.16451612903225807, |
|
"grad_norm": 0.5735920071601868, |
|
"learning_rate": 1.8831256559203847e-05, |
|
"loss": 0.9685, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.16559139784946236, |
|
"grad_norm": 0.6529683470726013, |
|
"learning_rate": 1.8815185146246718e-05, |
|
"loss": 0.9688, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.16666666666666666, |
|
"grad_norm": 0.6526486873626709, |
|
"learning_rate": 1.879901094228584e-05, |
|
"loss": 0.9869, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.16774193548387098, |
|
"grad_norm": 0.6016148328781128, |
|
"learning_rate": 1.878273413592334e-05, |
|
"loss": 0.9608, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.16881720430107527, |
|
"grad_norm": 0.582095742225647, |
|
"learning_rate": 1.8766354916957758e-05, |
|
"loss": 1.0218, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.16989247311827957, |
|
"grad_norm": 0.5814548134803772, |
|
"learning_rate": 1.8749873476381827e-05, |
|
"loss": 0.9845, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.17096774193548386, |
|
"grad_norm": 0.5818253755569458, |
|
"learning_rate": 1.8733290006380264e-05, |
|
"loss": 1.0094, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.17204301075268819, |
|
"grad_norm": 0.6561658382415771, |
|
"learning_rate": 1.8716604700327516e-05, |
|
"loss": 0.9721, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.17311827956989248, |
|
"grad_norm": 0.6266697645187378, |
|
"learning_rate": 1.8699817752785495e-05, |
|
"loss": 0.969, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.17419354838709677, |
|
"grad_norm": 0.6561981439590454, |
|
"learning_rate": 1.8682929359501338e-05, |
|
"loss": 0.9676, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.17526881720430107, |
|
"grad_norm": 0.6283778548240662, |
|
"learning_rate": 1.86659397174051e-05, |
|
"loss": 0.9781, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.17634408602150536, |
|
"grad_norm": 0.6202700138092041, |
|
"learning_rate": 1.864884902460746e-05, |
|
"loss": 0.9959, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.1774193548387097, |
|
"grad_norm": 0.6140043139457703, |
|
"learning_rate": 1.863165748039743e-05, |
|
"loss": 0.9877, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.17849462365591398, |
|
"grad_norm": 0.6894904971122742, |
|
"learning_rate": 1.8614365285240002e-05, |
|
"loss": 0.9207, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.17956989247311828, |
|
"grad_norm": 0.6222094893455505, |
|
"learning_rate": 1.859697264077384e-05, |
|
"loss": 0.9966, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.18064516129032257, |
|
"grad_norm": 0.6153229475021362, |
|
"learning_rate": 1.8579479749808896e-05, |
|
"loss": 0.9319, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.1817204301075269, |
|
"grad_norm": 0.5651281476020813, |
|
"learning_rate": 1.8561886816324094e-05, |
|
"loss": 1.0048, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.1827956989247312, |
|
"grad_norm": 0.6294932961463928, |
|
"learning_rate": 1.8544194045464888e-05, |
|
"loss": 0.9759, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.18387096774193548, |
|
"grad_norm": 0.607211172580719, |
|
"learning_rate": 1.8526401643540924e-05, |
|
"loss": 0.9158, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.18494623655913978, |
|
"grad_norm": 0.6540266275405884, |
|
"learning_rate": 1.850850981802361e-05, |
|
"loss": 0.9033, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.1860215053763441, |
|
"grad_norm": 0.587095320224762, |
|
"learning_rate": 1.849051877754369e-05, |
|
"loss": 0.9804, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.1870967741935484, |
|
"grad_norm": 0.6455761194229126, |
|
"learning_rate": 1.8472428731888836e-05, |
|
"loss": 0.9612, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.1881720430107527, |
|
"grad_norm": 0.6742892861366272, |
|
"learning_rate": 1.845423989200118e-05, |
|
"loss": 0.9744, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.18924731182795698, |
|
"grad_norm": 0.616705596446991, |
|
"learning_rate": 1.8435952469974858e-05, |
|
"loss": 1.0034, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.19032258064516128, |
|
"grad_norm": 0.6112809777259827, |
|
"learning_rate": 1.8417566679053546e-05, |
|
"loss": 0.9842, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.1913978494623656, |
|
"grad_norm": 0.6453455090522766, |
|
"learning_rate": 1.8399082733627967e-05, |
|
"loss": 0.9599, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.1924731182795699, |
|
"grad_norm": 0.6839727163314819, |
|
"learning_rate": 1.8380500849233388e-05, |
|
"loss": 0.9645, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.1935483870967742, |
|
"grad_norm": 0.6297851204872131, |
|
"learning_rate": 1.836182124254711e-05, |
|
"loss": 0.9668, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.19462365591397848, |
|
"grad_norm": 0.6919458508491516, |
|
"learning_rate": 1.834304413138595e-05, |
|
"loss": 0.9229, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.1956989247311828, |
|
"grad_norm": 0.6567605137825012, |
|
"learning_rate": 1.832416973470368e-05, |
|
"loss": 0.9154, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.1967741935483871, |
|
"grad_norm": 0.6956243515014648, |
|
"learning_rate": 1.8305198272588508e-05, |
|
"loss": 0.9811, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.1978494623655914, |
|
"grad_norm": 0.6485621929168701, |
|
"learning_rate": 1.828612996626046e-05, |
|
"loss": 0.9676, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.1989247311827957, |
|
"grad_norm": 0.7490953207015991, |
|
"learning_rate": 1.8266965038068856e-05, |
|
"loss": 0.9766, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.6626827120780945, |
|
"learning_rate": 1.8247703711489684e-05, |
|
"loss": 0.9575, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.2010752688172043, |
|
"grad_norm": 0.6175429821014404, |
|
"learning_rate": 1.8228346211123e-05, |
|
"loss": 0.9801, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.2021505376344086, |
|
"grad_norm": 0.6406196355819702, |
|
"learning_rate": 1.820889276269032e-05, |
|
"loss": 0.9453, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.2032258064516129, |
|
"grad_norm": 0.6591558456420898, |
|
"learning_rate": 1.8189343593031957e-05, |
|
"loss": 0.9892, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.20430107526881722, |
|
"grad_norm": 0.6754528284072876, |
|
"learning_rate": 1.816969893010442e-05, |
|
"loss": 0.9521, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.2053763440860215, |
|
"grad_norm": 0.6206115484237671, |
|
"learning_rate": 1.8149959002977736e-05, |
|
"loss": 0.9677, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.2064516129032258, |
|
"grad_norm": 0.6599158644676208, |
|
"learning_rate": 1.813012404183275e-05, |
|
"loss": 0.9396, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.2075268817204301, |
|
"grad_norm": 0.6430920958518982, |
|
"learning_rate": 1.8110194277958505e-05, |
|
"loss": 0.9879, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.2086021505376344, |
|
"grad_norm": 0.7177631855010986, |
|
"learning_rate": 1.8090169943749477e-05, |
|
"loss": 0.9088, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.20967741935483872, |
|
"grad_norm": 0.6201785206794739, |
|
"learning_rate": 1.8070051272702905e-05, |
|
"loss": 0.968, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.210752688172043, |
|
"grad_norm": 0.669746458530426, |
|
"learning_rate": 1.804983849941607e-05, |
|
"loss": 0.9375, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.2118279569892473, |
|
"grad_norm": 0.6609156727790833, |
|
"learning_rate": 1.802953185958354e-05, |
|
"loss": 0.9481, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.2129032258064516, |
|
"grad_norm": 0.6623542308807373, |
|
"learning_rate": 1.8009131589994418e-05, |
|
"loss": 0.9446, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.21397849462365592, |
|
"grad_norm": 0.6999096870422363, |
|
"learning_rate": 1.7988637928529616e-05, |
|
"loss": 1.0086, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.21505376344086022, |
|
"grad_norm": 0.7636443972587585, |
|
"learning_rate": 1.7968051114159046e-05, |
|
"loss": 0.9363, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2161290322580645, |
|
"grad_norm": 0.6308568120002747, |
|
"learning_rate": 1.7947371386938848e-05, |
|
"loss": 0.9971, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.2172043010752688, |
|
"grad_norm": 0.6510555148124695, |
|
"learning_rate": 1.7926598988008584e-05, |
|
"loss": 1.0124, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.21827956989247313, |
|
"grad_norm": 0.6936912536621094, |
|
"learning_rate": 1.7905734159588437e-05, |
|
"loss": 0.9521, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.21935483870967742, |
|
"grad_norm": 0.7332192063331604, |
|
"learning_rate": 1.7884777144976376e-05, |
|
"loss": 1.0081, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.22043010752688172, |
|
"grad_norm": 0.7468619346618652, |
|
"learning_rate": 1.7863728188545326e-05, |
|
"loss": 0.922, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.221505376344086, |
|
"grad_norm": 0.6665534377098083, |
|
"learning_rate": 1.7842587535740315e-05, |
|
"loss": 0.9683, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.22258064516129034, |
|
"grad_norm": 0.6921372413635254, |
|
"learning_rate": 1.7821355433075612e-05, |
|
"loss": 0.9393, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.22365591397849463, |
|
"grad_norm": 0.7466671466827393, |
|
"learning_rate": 1.7800032128131846e-05, |
|
"loss": 0.9419, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.22473118279569892, |
|
"grad_norm": 0.6907378435134888, |
|
"learning_rate": 1.7778617869553144e-05, |
|
"loss": 0.9374, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.22580645161290322, |
|
"grad_norm": 0.704535722732544, |
|
"learning_rate": 1.77571129070442e-05, |
|
"loss": 0.9838, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.22688172043010751, |
|
"grad_norm": 0.7151262164115906, |
|
"learning_rate": 1.7735517491367377e-05, |
|
"loss": 0.934, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.22795698924731184, |
|
"grad_norm": 0.7060497403144836, |
|
"learning_rate": 1.771383187433978e-05, |
|
"loss": 0.9295, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.22903225806451613, |
|
"grad_norm": 0.6923340559005737, |
|
"learning_rate": 1.769205630883034e-05, |
|
"loss": 0.9267, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.23010752688172043, |
|
"grad_norm": 0.6596957445144653, |
|
"learning_rate": 1.7670191048756827e-05, |
|
"loss": 0.9271, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.23118279569892472, |
|
"grad_norm": 0.6493801474571228, |
|
"learning_rate": 1.7648236349082928e-05, |
|
"loss": 0.8111, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.23225806451612904, |
|
"grad_norm": 0.7255016565322876, |
|
"learning_rate": 1.762619246581524e-05, |
|
"loss": 0.9433, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.23333333333333334, |
|
"grad_norm": 0.6772259473800659, |
|
"learning_rate": 1.7604059656000313e-05, |
|
"loss": 0.9621, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.23440860215053763, |
|
"grad_norm": 0.6661478281021118, |
|
"learning_rate": 1.758183817772163e-05, |
|
"loss": 0.9105, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.23548387096774193, |
|
"grad_norm": 0.6999973058700562, |
|
"learning_rate": 1.7559528290096618e-05, |
|
"loss": 0.8903, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.23655913978494625, |
|
"grad_norm": 0.707212507724762, |
|
"learning_rate": 1.7537130253273613e-05, |
|
"loss": 0.9644, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.23763440860215054, |
|
"grad_norm": 0.6678735613822937, |
|
"learning_rate": 1.7514644328428826e-05, |
|
"loss": 0.9811, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.23870967741935484, |
|
"grad_norm": 0.6692453026771545, |
|
"learning_rate": 1.749207077776331e-05, |
|
"loss": 0.8874, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.23978494623655913, |
|
"grad_norm": 0.7460502982139587, |
|
"learning_rate": 1.746940986449989e-05, |
|
"loss": 0.9298, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.24086021505376345, |
|
"grad_norm": 0.7537499666213989, |
|
"learning_rate": 1.744666185288009e-05, |
|
"loss": 0.9439, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.24193548387096775, |
|
"grad_norm": 0.7694090604782104, |
|
"learning_rate": 1.742382700816107e-05, |
|
"loss": 0.9174, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.24301075268817204, |
|
"grad_norm": 0.7226264476776123, |
|
"learning_rate": 1.740090559661252e-05, |
|
"loss": 0.937, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.24408602150537634, |
|
"grad_norm": 0.7109532356262207, |
|
"learning_rate": 1.7377897885513556e-05, |
|
"loss": 1.0118, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.24516129032258063, |
|
"grad_norm": 0.6979506015777588, |
|
"learning_rate": 1.735480414314961e-05, |
|
"loss": 0.8199, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.24623655913978496, |
|
"grad_norm": 0.6781380772590637, |
|
"learning_rate": 1.733162463880929e-05, |
|
"loss": 0.992, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.24731182795698925, |
|
"grad_norm": 0.6880660653114319, |
|
"learning_rate": 1.730835964278124e-05, |
|
"loss": 0.9816, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.24838709677419354, |
|
"grad_norm": 0.7391674518585205, |
|
"learning_rate": 1.7285009426351018e-05, |
|
"loss": 0.9519, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.24946236559139784, |
|
"grad_norm": 0.5953665375709534, |
|
"learning_rate": 1.7261574261797887e-05, |
|
"loss": 0.9857, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.25053763440860216, |
|
"grad_norm": 0.7037912011146545, |
|
"learning_rate": 1.7238054422391672e-05, |
|
"loss": 0.9259, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.25053763440860216, |
|
"eval_loss": 0.8279001712799072, |
|
"eval_runtime": 241.007, |
|
"eval_samples_per_second": 11.552, |
|
"eval_steps_per_second": 0.481, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.25161290322580643, |
|
"grad_norm": 0.718204140663147, |
|
"learning_rate": 1.721445018238956e-05, |
|
"loss": 0.9104, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.25268817204301075, |
|
"grad_norm": 0.7386565804481506, |
|
"learning_rate": 1.719076181703291e-05, |
|
"loss": 0.9562, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.2537634408602151, |
|
"grad_norm": 0.7336955070495605, |
|
"learning_rate": 1.7166989602544036e-05, |
|
"loss": 0.9347, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.25483870967741934, |
|
"grad_norm": 0.732012927532196, |
|
"learning_rate": 1.7143133816122993e-05, |
|
"loss": 0.7988, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.25591397849462366, |
|
"grad_norm": 0.7496695518493652, |
|
"learning_rate": 1.7119194735944336e-05, |
|
"loss": 0.9349, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.256989247311828, |
|
"grad_norm": 0.7384220957756042, |
|
"learning_rate": 1.7095172641153894e-05, |
|
"loss": 0.9476, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.25806451612903225, |
|
"grad_norm": 0.7248124480247498, |
|
"learning_rate": 1.7071067811865477e-05, |
|
"loss": 0.9411, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.2591397849462366, |
|
"grad_norm": 0.7681155800819397, |
|
"learning_rate": 1.704688052915766e-05, |
|
"loss": 0.9623, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.26021505376344084, |
|
"grad_norm": 0.7818267345428467, |
|
"learning_rate": 1.7022611075070476e-05, |
|
"loss": 0.9081, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.26129032258064516, |
|
"grad_norm": 0.7292929887771606, |
|
"learning_rate": 1.6998259732602114e-05, |
|
"loss": 0.9384, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.2623655913978495, |
|
"grad_norm": 0.7873015403747559, |
|
"learning_rate": 1.697382678570566e-05, |
|
"loss": 0.9372, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.26344086021505375, |
|
"grad_norm": 0.8167495131492615, |
|
"learning_rate": 1.694931251928575e-05, |
|
"loss": 0.9433, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.2645161290322581, |
|
"grad_norm": 0.7841079831123352, |
|
"learning_rate": 1.6924717219195258e-05, |
|
"loss": 0.9284, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.26559139784946234, |
|
"grad_norm": 0.7518281936645508, |
|
"learning_rate": 1.6900041172231976e-05, |
|
"loss": 0.9286, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 0.8195715546607971, |
|
"learning_rate": 1.6875284666135252e-05, |
|
"loss": 0.8856, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.267741935483871, |
|
"grad_norm": 0.8226678967475891, |
|
"learning_rate": 1.6850447989582642e-05, |
|
"loss": 0.9344, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.26881720430107525, |
|
"grad_norm": 0.7635514736175537, |
|
"learning_rate": 1.6825531432186545e-05, |
|
"loss": 0.9615, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.2698924731182796, |
|
"grad_norm": 0.7255238890647888, |
|
"learning_rate": 1.680053528449082e-05, |
|
"loss": 0.9485, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.2709677419354839, |
|
"grad_norm": 0.7115532159805298, |
|
"learning_rate": 1.677545983796741e-05, |
|
"loss": 0.8788, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.27204301075268816, |
|
"grad_norm": 0.7228000164031982, |
|
"learning_rate": 1.6750305385012936e-05, |
|
"loss": 0.9634, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.2731182795698925, |
|
"grad_norm": 0.7726433873176575, |
|
"learning_rate": 1.6725072218945274e-05, |
|
"loss": 0.9424, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.27419354838709675, |
|
"grad_norm": 0.7616019248962402, |
|
"learning_rate": 1.6699760634000166e-05, |
|
"loss": 0.9736, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.2752688172043011, |
|
"grad_norm": 0.6681120991706848, |
|
"learning_rate": 1.667437092532776e-05, |
|
"loss": 0.9507, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.2763440860215054, |
|
"grad_norm": 0.7210878133773804, |
|
"learning_rate": 1.6648903388989182e-05, |
|
"loss": 0.9055, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.27741935483870966, |
|
"grad_norm": 0.7816373109817505, |
|
"learning_rate": 1.662335832195308e-05, |
|
"loss": 0.9537, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.278494623655914, |
|
"grad_norm": 0.80950528383255, |
|
"learning_rate": 1.659773602209216e-05, |
|
"loss": 0.9246, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.27956989247311825, |
|
"grad_norm": 0.81439608335495, |
|
"learning_rate": 1.6572036788179728e-05, |
|
"loss": 0.864, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.2806451612903226, |
|
"grad_norm": 0.7569161057472229, |
|
"learning_rate": 1.654626091988617e-05, |
|
"loss": 0.9431, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.2817204301075269, |
|
"grad_norm": 0.7427302002906799, |
|
"learning_rate": 1.6520408717775507e-05, |
|
"loss": 0.9782, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.28279569892473116, |
|
"grad_norm": 0.7272223234176636, |
|
"learning_rate": 1.6494480483301836e-05, |
|
"loss": 0.966, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.2838709677419355, |
|
"grad_norm": 0.8124448657035828, |
|
"learning_rate": 1.6468476518805872e-05, |
|
"loss": 0.8872, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.2849462365591398, |
|
"grad_norm": 0.7480745911598206, |
|
"learning_rate": 1.6442397127511366e-05, |
|
"loss": 0.9192, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.2860215053763441, |
|
"grad_norm": 0.7396025061607361, |
|
"learning_rate": 1.6416242613521612e-05, |
|
"loss": 0.8917, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.2870967741935484, |
|
"grad_norm": 0.7699283957481384, |
|
"learning_rate": 1.6390013281815884e-05, |
|
"loss": 0.933, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.28817204301075267, |
|
"grad_norm": 0.781078577041626, |
|
"learning_rate": 1.6363709438245877e-05, |
|
"loss": 0.931, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.289247311827957, |
|
"grad_norm": 0.8071228265762329, |
|
"learning_rate": 1.6337331389532148e-05, |
|
"loss": 0.913, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.2903225806451613, |
|
"grad_norm": 0.8504091501235962, |
|
"learning_rate": 1.631087944326053e-05, |
|
"loss": 0.9049, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.2913978494623656, |
|
"grad_norm": 0.7057498097419739, |
|
"learning_rate": 1.6284353907878557e-05, |
|
"loss": 0.9174, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.2924731182795699, |
|
"grad_norm": 0.8215770721435547, |
|
"learning_rate": 1.6257755092691865e-05, |
|
"loss": 0.9154, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.29354838709677417, |
|
"grad_norm": 0.7845609188079834, |
|
"learning_rate": 1.6231083307860574e-05, |
|
"loss": 0.9382, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.2946236559139785, |
|
"grad_norm": 0.7890587449073792, |
|
"learning_rate": 1.6204338864395683e-05, |
|
"loss": 0.9309, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.2956989247311828, |
|
"grad_norm": 0.7580718994140625, |
|
"learning_rate": 1.6177522074155436e-05, |
|
"loss": 0.9568, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.2967741935483871, |
|
"grad_norm": 0.7641699314117432, |
|
"learning_rate": 1.6150633249841696e-05, |
|
"loss": 0.9461, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.2978494623655914, |
|
"grad_norm": 0.7395573854446411, |
|
"learning_rate": 1.612367270499629e-05, |
|
"loss": 0.9212, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.2989247311827957, |
|
"grad_norm": 0.7308792471885681, |
|
"learning_rate": 1.609664075399735e-05, |
|
"loss": 0.9333, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.8151824474334717, |
|
"learning_rate": 1.6069537712055652e-05, |
|
"loss": 0.9271, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.3010752688172043, |
|
"grad_norm": 0.7019997239112854, |
|
"learning_rate": 1.6042363895210948e-05, |
|
"loss": 0.8021, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.3021505376344086, |
|
"grad_norm": 0.7757791876792908, |
|
"learning_rate": 1.6015119620328266e-05, |
|
"loss": 0.9181, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.3032258064516129, |
|
"grad_norm": 0.7289377450942993, |
|
"learning_rate": 1.5987805205094225e-05, |
|
"loss": 0.9425, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.3043010752688172, |
|
"grad_norm": 0.8123842477798462, |
|
"learning_rate": 1.5960420968013334e-05, |
|
"loss": 0.9172, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.3053763440860215, |
|
"grad_norm": 0.9251433610916138, |
|
"learning_rate": 1.5932967228404255e-05, |
|
"loss": 0.9684, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.3064516129032258, |
|
"grad_norm": 0.8081958293914795, |
|
"learning_rate": 1.590544430639611e-05, |
|
"loss": 0.9333, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.30752688172043013, |
|
"grad_norm": 0.805475115776062, |
|
"learning_rate": 1.5877852522924733e-05, |
|
"loss": 0.9307, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.3086021505376344, |
|
"grad_norm": 0.7162765264511108, |
|
"learning_rate": 1.5850192199728927e-05, |
|
"loss": 0.9564, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.3096774193548387, |
|
"grad_norm": 0.7985969185829163, |
|
"learning_rate": 1.582246365934671e-05, |
|
"loss": 0.9382, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.310752688172043, |
|
"grad_norm": 0.8044575452804565, |
|
"learning_rate": 1.5794667225111572e-05, |
|
"loss": 0.9351, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.3118279569892473, |
|
"grad_norm": 0.804595410823822, |
|
"learning_rate": 1.5766803221148676e-05, |
|
"loss": 0.8794, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.31290322580645163, |
|
"grad_norm": 0.7444095611572266, |
|
"learning_rate": 1.5738871972371096e-05, |
|
"loss": 0.9392, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.3139784946236559, |
|
"grad_norm": 0.8467448949813843, |
|
"learning_rate": 1.5710873804476035e-05, |
|
"loss": 0.8868, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.3150537634408602, |
|
"grad_norm": 0.7347026467323303, |
|
"learning_rate": 1.5682809043941013e-05, |
|
"loss": 0.9709, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.3161290322580645, |
|
"grad_norm": 0.8257015943527222, |
|
"learning_rate": 1.565467801802006e-05, |
|
"loss": 0.9504, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.3172043010752688, |
|
"grad_norm": 0.7517876625061035, |
|
"learning_rate": 1.5626481054739916e-05, |
|
"loss": 0.9575, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.31827956989247314, |
|
"grad_norm": 0.806496798992157, |
|
"learning_rate": 1.5598218482896182e-05, |
|
"loss": 0.9305, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.3193548387096774, |
|
"grad_norm": 0.7192513942718506, |
|
"learning_rate": 1.5569890632049515e-05, |
|
"loss": 0.8197, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.3204301075268817, |
|
"grad_norm": 0.7461637258529663, |
|
"learning_rate": 1.554149783252175e-05, |
|
"loss": 0.9123, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.32150537634408605, |
|
"grad_norm": 0.8110654950141907, |
|
"learning_rate": 1.551304041539208e-05, |
|
"loss": 0.8738, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.3225806451612903, |
|
"grad_norm": 0.804533064365387, |
|
"learning_rate": 1.5484518712493188e-05, |
|
"loss": 0.946, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.32365591397849464, |
|
"grad_norm": 0.7754772305488586, |
|
"learning_rate": 1.545593305640736e-05, |
|
"loss": 0.9249, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.3247311827956989, |
|
"grad_norm": 0.7326719760894775, |
|
"learning_rate": 1.542728378046262e-05, |
|
"loss": 0.9068, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.3258064516129032, |
|
"grad_norm": 0.835762083530426, |
|
"learning_rate": 1.539857121872885e-05, |
|
"loss": 0.9104, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.32688172043010755, |
|
"grad_norm": 0.7900486588478088, |
|
"learning_rate": 1.536979570601388e-05, |
|
"loss": 0.9449, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.3279569892473118, |
|
"grad_norm": 0.8278104662895203, |
|
"learning_rate": 1.5340957577859605e-05, |
|
"loss": 0.9153, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.32903225806451614, |
|
"grad_norm": 0.7644244432449341, |
|
"learning_rate": 1.5312057170538033e-05, |
|
"loss": 0.9347, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.3301075268817204, |
|
"grad_norm": 0.7568009495735168, |
|
"learning_rate": 1.5283094821047416e-05, |
|
"loss": 0.9186, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.3311827956989247, |
|
"grad_norm": 0.9083740711212158, |
|
"learning_rate": 1.5254070867108277e-05, |
|
"loss": 0.8872, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.33225806451612905, |
|
"grad_norm": 0.7168444395065308, |
|
"learning_rate": 1.5224985647159489e-05, |
|
"loss": 0.9319, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.8705156445503235, |
|
"learning_rate": 1.5195839500354337e-05, |
|
"loss": 0.8789, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.33440860215053764, |
|
"grad_norm": 0.800618588924408, |
|
"learning_rate": 1.5166632766556546e-05, |
|
"loss": 0.9108, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.33548387096774196, |
|
"grad_norm": 0.7793113589286804, |
|
"learning_rate": 1.5137365786336329e-05, |
|
"loss": 0.9201, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.3365591397849462, |
|
"grad_norm": 0.8202828764915466, |
|
"learning_rate": 1.5108038900966416e-05, |
|
"loss": 0.9229, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.33763440860215055, |
|
"grad_norm": 0.8053287267684937, |
|
"learning_rate": 1.5078652452418063e-05, |
|
"loss": 0.927, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.3387096774193548, |
|
"grad_norm": 0.8459357619285583, |
|
"learning_rate": 1.5049206783357082e-05, |
|
"loss": 0.905, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.33978494623655914, |
|
"grad_norm": 0.8645407557487488, |
|
"learning_rate": 1.501970223713983e-05, |
|
"loss": 0.8827, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.34086021505376346, |
|
"grad_norm": 0.7910537719726562, |
|
"learning_rate": 1.4990139157809217e-05, |
|
"loss": 0.9287, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.3419354838709677, |
|
"grad_norm": 0.7768109440803528, |
|
"learning_rate": 1.4960517890090683e-05, |
|
"loss": 0.9122, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.34301075268817205, |
|
"grad_norm": 0.827042281627655, |
|
"learning_rate": 1.4930838779388186e-05, |
|
"loss": 0.9957, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.34408602150537637, |
|
"grad_norm": 0.8755815625190735, |
|
"learning_rate": 1.4901102171780175e-05, |
|
"loss": 0.901, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.34516129032258064, |
|
"grad_norm": 0.8080335259437561, |
|
"learning_rate": 1.4871308414015547e-05, |
|
"loss": 0.9454, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.34623655913978496, |
|
"grad_norm": 0.737436830997467, |
|
"learning_rate": 1.4841457853509606e-05, |
|
"loss": 0.9691, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.3473118279569892, |
|
"grad_norm": 0.8127477765083313, |
|
"learning_rate": 1.4811550838340028e-05, |
|
"loss": 0.9147, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.34838709677419355, |
|
"grad_norm": 0.8597912192344666, |
|
"learning_rate": 1.4781587717242772e-05, |
|
"loss": 0.8878, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.34946236559139787, |
|
"grad_norm": 0.8688490390777588, |
|
"learning_rate": 1.4751568839608036e-05, |
|
"loss": 0.8875, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.35053763440860214, |
|
"grad_norm": 0.8337180614471436, |
|
"learning_rate": 1.4721494555476189e-05, |
|
"loss": 0.9472, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.35161290322580646, |
|
"grad_norm": 0.716763973236084, |
|
"learning_rate": 1.4691365215533653e-05, |
|
"loss": 0.7999, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.35268817204301073, |
|
"grad_norm": 0.8275082111358643, |
|
"learning_rate": 1.4661181171108855e-05, |
|
"loss": 0.8463, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.35376344086021505, |
|
"grad_norm": 0.8415479063987732, |
|
"learning_rate": 1.4630942774168113e-05, |
|
"loss": 0.9254, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.3548387096774194, |
|
"grad_norm": 0.8180720806121826, |
|
"learning_rate": 1.4600650377311523e-05, |
|
"loss": 0.9728, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.35591397849462364, |
|
"grad_norm": 0.7769216895103455, |
|
"learning_rate": 1.4570304333768865e-05, |
|
"loss": 0.909, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.35698924731182796, |
|
"grad_norm": 0.8296689987182617, |
|
"learning_rate": 1.4539904997395468e-05, |
|
"loss": 0.8499, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.3580645161290323, |
|
"grad_norm": 0.8449047803878784, |
|
"learning_rate": 1.45094527226681e-05, |
|
"loss": 0.9287, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.35913978494623655, |
|
"grad_norm": 0.8248459696769714, |
|
"learning_rate": 1.4478947864680821e-05, |
|
"loss": 0.912, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.3602150537634409, |
|
"grad_norm": 0.8891895413398743, |
|
"learning_rate": 1.4448390779140844e-05, |
|
"loss": 0.9246, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.36129032258064514, |
|
"grad_norm": 0.8913347721099854, |
|
"learning_rate": 1.4417781822364396e-05, |
|
"loss": 0.8844, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.36236559139784946, |
|
"grad_norm": 0.7879188656806946, |
|
"learning_rate": 1.4387121351272555e-05, |
|
"loss": 0.9675, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.3634408602150538, |
|
"grad_norm": 0.8322626352310181, |
|
"learning_rate": 1.4356409723387092e-05, |
|
"loss": 0.9204, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.36451612903225805, |
|
"grad_norm": 0.8096638321876526, |
|
"learning_rate": 1.4325647296826302e-05, |
|
"loss": 0.9202, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.3655913978494624, |
|
"grad_norm": 0.8181870579719543, |
|
"learning_rate": 1.4294834430300822e-05, |
|
"loss": 0.9297, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.36666666666666664, |
|
"grad_norm": 0.8794824481010437, |
|
"learning_rate": 1.4263971483109456e-05, |
|
"loss": 0.93, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.36774193548387096, |
|
"grad_norm": 0.7784544825553894, |
|
"learning_rate": 1.4233058815134978e-05, |
|
"loss": 0.918, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.3688172043010753, |
|
"grad_norm": 0.8606961369514465, |
|
"learning_rate": 1.4202096786839953e-05, |
|
"loss": 0.9038, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.36989247311827955, |
|
"grad_norm": 0.8213427662849426, |
|
"learning_rate": 1.4171085759262507e-05, |
|
"loss": 0.9241, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.3709677419354839, |
|
"grad_norm": 0.8662806153297424, |
|
"learning_rate": 1.4140026094012136e-05, |
|
"loss": 0.8955, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.3720430107526882, |
|
"grad_norm": 0.8682425022125244, |
|
"learning_rate": 1.4108918153265485e-05, |
|
"loss": 0.9472, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.37311827956989246, |
|
"grad_norm": 0.8322152495384216, |
|
"learning_rate": 1.4077762299762116e-05, |
|
"loss": 0.9551, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.3741935483870968, |
|
"grad_norm": 0.8238283395767212, |
|
"learning_rate": 1.4046558896800299e-05, |
|
"loss": 0.9479, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.37526881720430105, |
|
"grad_norm": 0.8035296201705933, |
|
"learning_rate": 1.4015308308232753e-05, |
|
"loss": 0.9319, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.3763440860215054, |
|
"grad_norm": 0.7743344902992249, |
|
"learning_rate": 1.3984010898462417e-05, |
|
"loss": 0.9102, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3774193548387097, |
|
"grad_norm": 0.8227608799934387, |
|
"learning_rate": 1.3952667032438191e-05, |
|
"loss": 0.8969, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.37849462365591396, |
|
"grad_norm": 0.8089775443077087, |
|
"learning_rate": 1.3921277075650703e-05, |
|
"loss": 0.9621, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.3795698924731183, |
|
"grad_norm": 0.8506449460983276, |
|
"learning_rate": 1.3889841394128007e-05, |
|
"loss": 0.8901, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.38064516129032255, |
|
"grad_norm": 0.9277204871177673, |
|
"learning_rate": 1.3858360354431355e-05, |
|
"loss": 0.8627, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.3817204301075269, |
|
"grad_norm": 0.832259476184845, |
|
"learning_rate": 1.3826834323650899e-05, |
|
"loss": 0.9437, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.3827956989247312, |
|
"grad_norm": 0.7443482279777527, |
|
"learning_rate": 1.379526366940142e-05, |
|
"loss": 0.9532, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.38387096774193546, |
|
"grad_norm": 0.887854814529419, |
|
"learning_rate": 1.3763648759818035e-05, |
|
"loss": 0.8876, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.3849462365591398, |
|
"grad_norm": 0.8205659985542297, |
|
"learning_rate": 1.3731989963551916e-05, |
|
"loss": 0.8975, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.3860215053763441, |
|
"grad_norm": 0.8111019134521484, |
|
"learning_rate": 1.3700287649765973e-05, |
|
"loss": 0.9087, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.3870967741935484, |
|
"grad_norm": 0.8579467535018921, |
|
"learning_rate": 1.3668542188130567e-05, |
|
"loss": 0.9444, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3881720430107527, |
|
"grad_norm": 0.8444235920906067, |
|
"learning_rate": 1.3636753948819188e-05, |
|
"loss": 0.8645, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.38924731182795697, |
|
"grad_norm": 0.8532136082649231, |
|
"learning_rate": 1.3604923302504146e-05, |
|
"loss": 0.9295, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.3903225806451613, |
|
"grad_norm": 0.9755718111991882, |
|
"learning_rate": 1.3573050620352247e-05, |
|
"loss": 0.8978, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.3913978494623656, |
|
"grad_norm": 0.8287932872772217, |
|
"learning_rate": 1.3541136274020452e-05, |
|
"loss": 0.9195, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.3924731182795699, |
|
"grad_norm": 0.8778457641601562, |
|
"learning_rate": 1.350918063565157e-05, |
|
"loss": 0.8834, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.3935483870967742, |
|
"grad_norm": 1.0230962038040161, |
|
"learning_rate": 1.3477184077869892e-05, |
|
"loss": 0.8352, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.3946236559139785, |
|
"grad_norm": 0.8578838109970093, |
|
"learning_rate": 1.3445146973776854e-05, |
|
"loss": 0.9203, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.3956989247311828, |
|
"grad_norm": 0.8627865314483643, |
|
"learning_rate": 1.3413069696946706e-05, |
|
"loss": 0.9246, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.3967741935483871, |
|
"grad_norm": 0.8898962140083313, |
|
"learning_rate": 1.338095262142212e-05, |
|
"loss": 0.9146, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.3978494623655914, |
|
"grad_norm": 0.9508132338523865, |
|
"learning_rate": 1.3348796121709862e-05, |
|
"loss": 0.8892, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.3989247311827957, |
|
"grad_norm": 0.9117575287818909, |
|
"learning_rate": 1.3316600572776405e-05, |
|
"loss": 0.9301, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.8592390418052673, |
|
"learning_rate": 1.3284366350043558e-05, |
|
"loss": 0.8979, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.4010752688172043, |
|
"grad_norm": 0.8330084681510925, |
|
"learning_rate": 1.32520938293841e-05, |
|
"loss": 0.9418, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.4021505376344086, |
|
"grad_norm": 0.7911673784255981, |
|
"learning_rate": 1.3219783387117384e-05, |
|
"loss": 0.9552, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.4032258064516129, |
|
"grad_norm": 0.820270299911499, |
|
"learning_rate": 1.318743540000496e-05, |
|
"loss": 0.9313, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.4043010752688172, |
|
"grad_norm": 0.7301176190376282, |
|
"learning_rate": 1.3155050245246171e-05, |
|
"loss": 0.9221, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.4053763440860215, |
|
"grad_norm": 0.901095986366272, |
|
"learning_rate": 1.3122628300473763e-05, |
|
"loss": 0.8694, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.4064516129032258, |
|
"grad_norm": 0.8115401268005371, |
|
"learning_rate": 1.3090169943749475e-05, |
|
"loss": 0.8838, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.4075268817204301, |
|
"grad_norm": 0.8125736713409424, |
|
"learning_rate": 1.3057675553559638e-05, |
|
"loss": 0.9649, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.40860215053763443, |
|
"grad_norm": 0.8270546793937683, |
|
"learning_rate": 1.302514550881076e-05, |
|
"loss": 0.9482, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.4096774193548387, |
|
"grad_norm": 1.0296354293823242, |
|
"learning_rate": 1.2992580188825093e-05, |
|
"loss": 0.8666, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.410752688172043, |
|
"grad_norm": 0.8970001935958862, |
|
"learning_rate": 1.2959979973336236e-05, |
|
"loss": 0.9621, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.4118279569892473, |
|
"grad_norm": 0.9341355562210083, |
|
"learning_rate": 1.2927345242484683e-05, |
|
"loss": 0.9454, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.4129032258064516, |
|
"grad_norm": 0.8696271181106567, |
|
"learning_rate": 1.289467637681341e-05, |
|
"loss": 0.9376, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.41397849462365593, |
|
"grad_norm": 0.7984682321548462, |
|
"learning_rate": 1.2861973757263416e-05, |
|
"loss": 0.8875, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.4150537634408602, |
|
"grad_norm": 0.9292618632316589, |
|
"learning_rate": 1.28292377651693e-05, |
|
"loss": 0.8475, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.4161290322580645, |
|
"grad_norm": 0.8904953002929688, |
|
"learning_rate": 1.2796468782254814e-05, |
|
"loss": 0.9205, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.4172043010752688, |
|
"grad_norm": 0.8816090822219849, |
|
"learning_rate": 1.2763667190628391e-05, |
|
"loss": 0.9353, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.4182795698924731, |
|
"grad_norm": 0.8573107123374939, |
|
"learning_rate": 1.2730833372778714e-05, |
|
"loss": 0.9269, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.41935483870967744, |
|
"grad_norm": 0.9212923049926758, |
|
"learning_rate": 1.2697967711570243e-05, |
|
"loss": 0.8834, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.4204301075268817, |
|
"grad_norm": 0.9144071936607361, |
|
"learning_rate": 1.2665070590238753e-05, |
|
"loss": 0.9382, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.421505376344086, |
|
"grad_norm": 0.8885826468467712, |
|
"learning_rate": 1.263214239238686e-05, |
|
"loss": 0.8705, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.42258064516129035, |
|
"grad_norm": 0.9030449986457825, |
|
"learning_rate": 1.2599183501979558e-05, |
|
"loss": 0.9058, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.4236559139784946, |
|
"grad_norm": 0.8024026155471802, |
|
"learning_rate": 1.2566194303339738e-05, |
|
"loss": 0.9498, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.42473118279569894, |
|
"grad_norm": 0.8615883588790894, |
|
"learning_rate": 1.2533175181143704e-05, |
|
"loss": 0.9147, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.4258064516129032, |
|
"grad_norm": 0.8240619897842407, |
|
"learning_rate": 1.250012652041669e-05, |
|
"loss": 0.9038, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.4268817204301075, |
|
"grad_norm": 0.8786563277244568, |
|
"learning_rate": 1.2467048706528373e-05, |
|
"loss": 0.9278, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.42795698924731185, |
|
"grad_norm": 0.9570086002349854, |
|
"learning_rate": 1.2433942125188359e-05, |
|
"loss": 0.9454, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.4290322580645161, |
|
"grad_norm": 0.8550770282745361, |
|
"learning_rate": 1.2400807162441721e-05, |
|
"loss": 0.9276, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.43010752688172044, |
|
"grad_norm": 0.9284153580665588, |
|
"learning_rate": 1.2367644204664468e-05, |
|
"loss": 0.8967, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.4311827956989247, |
|
"grad_norm": 0.8993642330169678, |
|
"learning_rate": 1.2334453638559057e-05, |
|
"loss": 0.9173, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.432258064516129, |
|
"grad_norm": 0.8449714183807373, |
|
"learning_rate": 1.2301235851149867e-05, |
|
"loss": 0.9276, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.43333333333333335, |
|
"grad_norm": 0.8758628964424133, |
|
"learning_rate": 1.2267991229778702e-05, |
|
"loss": 0.902, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.4344086021505376, |
|
"grad_norm": 0.9460270404815674, |
|
"learning_rate": 1.2234720162100271e-05, |
|
"loss": 0.9205, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.43548387096774194, |
|
"grad_norm": 0.9168769121170044, |
|
"learning_rate": 1.2201423036077657e-05, |
|
"loss": 0.9079, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.43655913978494626, |
|
"grad_norm": 0.83861243724823, |
|
"learning_rate": 1.216810023997781e-05, |
|
"loss": 0.9318, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.4376344086021505, |
|
"grad_norm": 0.8358942270278931, |
|
"learning_rate": 1.2134752162366998e-05, |
|
"loss": 0.9074, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.43870967741935485, |
|
"grad_norm": 0.8796260356903076, |
|
"learning_rate": 1.2101379192106302e-05, |
|
"loss": 0.9288, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.4397849462365591, |
|
"grad_norm": 0.8561341762542725, |
|
"learning_rate": 1.206798171834706e-05, |
|
"loss": 0.8724, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.44086021505376344, |
|
"grad_norm": 0.9396000504493713, |
|
"learning_rate": 1.2034560130526341e-05, |
|
"loss": 0.9081, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.44193548387096776, |
|
"grad_norm": 0.9365426898002625, |
|
"learning_rate": 1.2001114818362394e-05, |
|
"loss": 0.8575, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.443010752688172, |
|
"grad_norm": 0.9004622101783752, |
|
"learning_rate": 1.1967646171850118e-05, |
|
"loss": 0.9361, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.44408602150537635, |
|
"grad_norm": 0.8997867107391357, |
|
"learning_rate": 1.1934154581256498e-05, |
|
"loss": 0.8709, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.44516129032258067, |
|
"grad_norm": 0.890632688999176, |
|
"learning_rate": 1.1900640437116074e-05, |
|
"loss": 0.9333, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.44623655913978494, |
|
"grad_norm": 0.869012176990509, |
|
"learning_rate": 1.1867104130226363e-05, |
|
"loss": 0.8991, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.44731182795698926, |
|
"grad_norm": 0.8767854571342468, |
|
"learning_rate": 1.1833546051643325e-05, |
|
"loss": 0.9269, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.4483870967741935, |
|
"grad_norm": 0.905999481678009, |
|
"learning_rate": 1.1799966592676784e-05, |
|
"loss": 0.9236, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.44946236559139785, |
|
"grad_norm": 0.8079774379730225, |
|
"learning_rate": 1.1766366144885877e-05, |
|
"loss": 0.9014, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.4505376344086022, |
|
"grad_norm": 0.9038012027740479, |
|
"learning_rate": 1.1732745100074485e-05, |
|
"loss": 0.8835, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.45161290322580644, |
|
"grad_norm": 0.9231516718864441, |
|
"learning_rate": 1.1699103850286668e-05, |
|
"loss": 0.8985, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.45268817204301076, |
|
"grad_norm": 0.9052597284317017, |
|
"learning_rate": 1.1665442787802083e-05, |
|
"loss": 0.9492, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.45376344086021503, |
|
"grad_norm": 0.9429377913475037, |
|
"learning_rate": 1.1631762305131424e-05, |
|
"loss": 0.8591, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.45483870967741935, |
|
"grad_norm": 0.9614149332046509, |
|
"learning_rate": 1.1598062795011827e-05, |
|
"loss": 0.9166, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.4559139784946237, |
|
"grad_norm": 0.8375269174575806, |
|
"learning_rate": 1.156434465040231e-05, |
|
"loss": 0.9374, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.45698924731182794, |
|
"grad_norm": 0.7212313413619995, |
|
"learning_rate": 1.153060826447918e-05, |
|
"loss": 0.7176, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.45806451612903226, |
|
"grad_norm": 0.9267921447753906, |
|
"learning_rate": 1.1496854030631443e-05, |
|
"loss": 0.8844, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.4591397849462366, |
|
"grad_norm": 0.9112178087234497, |
|
"learning_rate": 1.1463082342456238e-05, |
|
"loss": 0.9367, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.46021505376344085, |
|
"grad_norm": 0.8534091114997864, |
|
"learning_rate": 1.1429293593754216e-05, |
|
"loss": 0.917, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.4612903225806452, |
|
"grad_norm": 0.946246325969696, |
|
"learning_rate": 1.1395488178524982e-05, |
|
"loss": 0.9292, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.46236559139784944, |
|
"grad_norm": 0.8600665330886841, |
|
"learning_rate": 1.1361666490962468e-05, |
|
"loss": 0.8945, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.46344086021505376, |
|
"grad_norm": 0.8451212644577026, |
|
"learning_rate": 1.1327828925450363e-05, |
|
"loss": 0.9061, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.4645161290322581, |
|
"grad_norm": 0.8945679068565369, |
|
"learning_rate": 1.1293975876557506e-05, |
|
"loss": 0.9045, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.46559139784946235, |
|
"grad_norm": 0.8499189019203186, |
|
"learning_rate": 1.126010773903327e-05, |
|
"loss": 0.9097, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.4666666666666667, |
|
"grad_norm": 0.9028451442718506, |
|
"learning_rate": 1.1226224907802986e-05, |
|
"loss": 0.8855, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.46774193548387094, |
|
"grad_norm": 0.8975670337677002, |
|
"learning_rate": 1.1192327777963313e-05, |
|
"loss": 0.8604, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.46881720430107526, |
|
"grad_norm": 0.9408153295516968, |
|
"learning_rate": 1.1158416744777644e-05, |
|
"loss": 0.8763, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.4698924731182796, |
|
"grad_norm": 0.8648738265037537, |
|
"learning_rate": 1.1124492203671498e-05, |
|
"loss": 0.9029, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.47096774193548385, |
|
"grad_norm": 0.8570336699485779, |
|
"learning_rate": 1.1090554550227899e-05, |
|
"loss": 0.9141, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.4720430107526882, |
|
"grad_norm": 0.9008473753929138, |
|
"learning_rate": 1.1056604180182777e-05, |
|
"loss": 0.9176, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.4731182795698925, |
|
"grad_norm": 0.9899921417236328, |
|
"learning_rate": 1.1022641489420342e-05, |
|
"loss": 0.887, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.47419354838709676, |
|
"grad_norm": 0.843747079372406, |
|
"learning_rate": 1.0988666873968477e-05, |
|
"loss": 0.9383, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.4752688172043011, |
|
"grad_norm": 0.8630385398864746, |
|
"learning_rate": 1.0954680729994103e-05, |
|
"loss": 0.9149, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.47634408602150535, |
|
"grad_norm": 0.9598048329353333, |
|
"learning_rate": 1.0920683453798581e-05, |
|
"loss": 0.908, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.4774193548387097, |
|
"grad_norm": 0.9103092551231384, |
|
"learning_rate": 1.0886675441813083e-05, |
|
"loss": 0.976, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.478494623655914, |
|
"grad_norm": 0.8863061666488647, |
|
"learning_rate": 1.0852657090593961e-05, |
|
"loss": 0.891, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.47956989247311826, |
|
"grad_norm": 0.9999749064445496, |
|
"learning_rate": 1.0818628796818134e-05, |
|
"loss": 0.8681, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.4806451612903226, |
|
"grad_norm": 0.858534038066864, |
|
"learning_rate": 1.0784590957278452e-05, |
|
"loss": 0.917, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.4817204301075269, |
|
"grad_norm": 0.9822399020195007, |
|
"learning_rate": 1.0750543968879081e-05, |
|
"loss": 0.8228, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.4827956989247312, |
|
"grad_norm": 0.9111456871032715, |
|
"learning_rate": 1.0716488228630867e-05, |
|
"loss": 0.9088, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.4838709677419355, |
|
"grad_norm": 1.0209755897521973, |
|
"learning_rate": 1.0682424133646712e-05, |
|
"loss": 0.8397, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.48494623655913977, |
|
"grad_norm": 0.9134584665298462, |
|
"learning_rate": 1.064835208113693e-05, |
|
"loss": 0.8753, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.4860215053763441, |
|
"grad_norm": 0.9252523183822632, |
|
"learning_rate": 1.0614272468404637e-05, |
|
"loss": 0.8912, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.4870967741935484, |
|
"grad_norm": 1.0427286624908447, |
|
"learning_rate": 1.0580185692841095e-05, |
|
"loss": 0.9254, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.4881720430107527, |
|
"grad_norm": 0.968657910823822, |
|
"learning_rate": 1.05460921519211e-05, |
|
"loss": 0.8925, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.489247311827957, |
|
"grad_norm": 0.9973106980323792, |
|
"learning_rate": 1.0511992243198335e-05, |
|
"loss": 0.8594, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.49032258064516127, |
|
"grad_norm": 0.8644976019859314, |
|
"learning_rate": 1.0477886364300722e-05, |
|
"loss": 0.9359, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.4913978494623656, |
|
"grad_norm": 0.9711951017379761, |
|
"learning_rate": 1.0443774912925814e-05, |
|
"loss": 0.8896, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.4924731182795699, |
|
"grad_norm": 0.8667163848876953, |
|
"learning_rate": 1.0409658286836144e-05, |
|
"loss": 0.9817, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.4935483870967742, |
|
"grad_norm": 0.8854442238807678, |
|
"learning_rate": 1.0375536883854575e-05, |
|
"loss": 0.8869, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.4946236559139785, |
|
"grad_norm": 0.8985066413879395, |
|
"learning_rate": 1.034141110185968e-05, |
|
"loss": 0.8919, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.4956989247311828, |
|
"grad_norm": 0.8306286931037903, |
|
"learning_rate": 1.0307281338781092e-05, |
|
"loss": 0.9197, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.4967741935483871, |
|
"grad_norm": 0.8850293755531311, |
|
"learning_rate": 1.0273147992594861e-05, |
|
"loss": 0.8781, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.4978494623655914, |
|
"grad_norm": 0.8463788032531738, |
|
"learning_rate": 1.0239011461318827e-05, |
|
"loss": 0.9323, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.4989247311827957, |
|
"grad_norm": 0.9025292992591858, |
|
"learning_rate": 1.0204872143007965e-05, |
|
"loss": 0.9365, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.8844058513641357, |
|
"learning_rate": 1.017073043574975e-05, |
|
"loss": 0.8897, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.5010752688172043, |
|
"grad_norm": 0.9002809524536133, |
|
"learning_rate": 1.013658673765951e-05, |
|
"loss": 0.8773, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.5010752688172043, |
|
"eval_loss": 0.8080702424049377, |
|
"eval_runtime": 241.1163, |
|
"eval_samples_per_second": 11.546, |
|
"eval_steps_per_second": 0.481, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.5021505376344086, |
|
"grad_norm": 0.9127078056335449, |
|
"learning_rate": 1.0102441446875801e-05, |
|
"loss": 0.9209, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.5032258064516129, |
|
"grad_norm": 0.9291108846664429, |
|
"learning_rate": 1.0068294961555734e-05, |
|
"loss": 0.9451, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.5043010752688172, |
|
"grad_norm": 0.9335411190986633, |
|
"learning_rate": 1.0034147679870356e-05, |
|
"loss": 0.8707, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.5053763440860215, |
|
"grad_norm": 1.0403931140899658, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8727, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.5064516129032258, |
|
"grad_norm": 0.9628621935844421, |
|
"learning_rate": 9.965852320129646e-06, |
|
"loss": 0.9089, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.5075268817204301, |
|
"grad_norm": 0.8969815373420715, |
|
"learning_rate": 9.93170503844427e-06, |
|
"loss": 0.891, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.5086021505376344, |
|
"grad_norm": 0.9003312587738037, |
|
"learning_rate": 9.897558553124202e-06, |
|
"loss": 0.9265, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.5096774193548387, |
|
"grad_norm": 0.8279120922088623, |
|
"learning_rate": 9.863413262340491e-06, |
|
"loss": 0.9122, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.510752688172043, |
|
"grad_norm": 0.9585661888122559, |
|
"learning_rate": 9.829269564250254e-06, |
|
"loss": 0.9025, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.5118279569892473, |
|
"grad_norm": 0.906315267086029, |
|
"learning_rate": 9.79512785699204e-06, |
|
"loss": 0.9445, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.5129032258064516, |
|
"grad_norm": 0.86076420545578, |
|
"learning_rate": 9.760988538681176e-06, |
|
"loss": 0.9392, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.513978494623656, |
|
"grad_norm": 0.8743923902511597, |
|
"learning_rate": 9.726852007405144e-06, |
|
"loss": 0.9016, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.5150537634408602, |
|
"grad_norm": 1.0092248916625977, |
|
"learning_rate": 9.692718661218912e-06, |
|
"loss": 0.8621, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.5161290322580645, |
|
"grad_norm": 0.8339441418647766, |
|
"learning_rate": 9.658588898140322e-06, |
|
"loss": 0.9196, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.5172043010752688, |
|
"grad_norm": 0.9604438543319702, |
|
"learning_rate": 9.624463116145428e-06, |
|
"loss": 0.9051, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.5182795698924731, |
|
"grad_norm": 0.9511142373085022, |
|
"learning_rate": 9.590341713163858e-06, |
|
"loss": 0.9259, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.5193548387096775, |
|
"grad_norm": 0.9032608866691589, |
|
"learning_rate": 9.55622508707419e-06, |
|
"loss": 0.8866, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.5204301075268817, |
|
"grad_norm": 0.84687340259552, |
|
"learning_rate": 9.522113635699281e-06, |
|
"loss": 0.9221, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.521505376344086, |
|
"grad_norm": 0.9060841202735901, |
|
"learning_rate": 9.488007756801672e-06, |
|
"loss": 0.8699, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.5225806451612903, |
|
"grad_norm": 1.0113224983215332, |
|
"learning_rate": 9.453907848078901e-06, |
|
"loss": 0.8556, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.5236559139784946, |
|
"grad_norm": 0.9846504926681519, |
|
"learning_rate": 9.419814307158908e-06, |
|
"loss": 0.8768, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.524731182795699, |
|
"grad_norm": 1.0450615882873535, |
|
"learning_rate": 9.385727531595367e-06, |
|
"loss": 0.841, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.5258064516129032, |
|
"grad_norm": 0.9559835195541382, |
|
"learning_rate": 9.35164791886307e-06, |
|
"loss": 0.8999, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.5268817204301075, |
|
"grad_norm": 1.0124439001083374, |
|
"learning_rate": 9.317575866353293e-06, |
|
"loss": 0.8727, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.5279569892473118, |
|
"grad_norm": 0.8743895292282104, |
|
"learning_rate": 9.283511771369132e-06, |
|
"loss": 0.884, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.5290322580645161, |
|
"grad_norm": 1.0142030715942383, |
|
"learning_rate": 9.249456031120922e-06, |
|
"loss": 0.9232, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.5301075268817205, |
|
"grad_norm": 0.8965379595756531, |
|
"learning_rate": 9.215409042721553e-06, |
|
"loss": 0.9123, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.5311827956989247, |
|
"grad_norm": 0.8908298015594482, |
|
"learning_rate": 9.181371203181873e-06, |
|
"loss": 0.9218, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.532258064516129, |
|
"grad_norm": 0.9260167479515076, |
|
"learning_rate": 9.14734290940604e-06, |
|
"loss": 0.9393, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 0.86910080909729, |
|
"learning_rate": 9.113324558186922e-06, |
|
"loss": 0.9093, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.5344086021505376, |
|
"grad_norm": 0.9058336615562439, |
|
"learning_rate": 9.07931654620142e-06, |
|
"loss": 0.8892, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.535483870967742, |
|
"grad_norm": 0.9350973963737488, |
|
"learning_rate": 9.0453192700059e-06, |
|
"loss": 0.9123, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.5365591397849462, |
|
"grad_norm": 0.889754056930542, |
|
"learning_rate": 9.011333126031526e-06, |
|
"loss": 0.8924, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.5376344086021505, |
|
"grad_norm": 0.949856162071228, |
|
"learning_rate": 8.977358510579658e-06, |
|
"loss": 0.8427, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.5387096774193548, |
|
"grad_norm": 0.8923821449279785, |
|
"learning_rate": 8.943395819817226e-06, |
|
"loss": 0.9231, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.5397849462365591, |
|
"grad_norm": 1.030486822128296, |
|
"learning_rate": 8.909445449772103e-06, |
|
"loss": 0.8475, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.5408602150537635, |
|
"grad_norm": 1.0622977018356323, |
|
"learning_rate": 8.875507796328509e-06, |
|
"loss": 0.8469, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.5419354838709678, |
|
"grad_norm": 0.9633312225341797, |
|
"learning_rate": 8.841583255222359e-06, |
|
"loss": 0.8952, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.543010752688172, |
|
"grad_norm": 0.9589855670928955, |
|
"learning_rate": 8.807672222036692e-06, |
|
"loss": 0.8869, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.5440860215053763, |
|
"grad_norm": 0.9809610843658447, |
|
"learning_rate": 8.773775092197018e-06, |
|
"loss": 0.8571, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.5451612903225806, |
|
"grad_norm": 1.1358520984649658, |
|
"learning_rate": 8.73989226096673e-06, |
|
"loss": 0.8433, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.546236559139785, |
|
"grad_norm": 0.9888678193092346, |
|
"learning_rate": 8.706024123442497e-06, |
|
"loss": 0.8635, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.5473118279569893, |
|
"grad_norm": 1.0114792585372925, |
|
"learning_rate": 8.672171074549637e-06, |
|
"loss": 0.8879, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.5483870967741935, |
|
"grad_norm": 0.9216302037239075, |
|
"learning_rate": 8.638333509037537e-06, |
|
"loss": 0.8967, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.5494623655913978, |
|
"grad_norm": 0.9108251929283142, |
|
"learning_rate": 8.604511821475021e-06, |
|
"loss": 0.9245, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.5505376344086022, |
|
"grad_norm": 0.9983306527137756, |
|
"learning_rate": 8.570706406245787e-06, |
|
"loss": 0.8908, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.5516129032258065, |
|
"grad_norm": 0.996141791343689, |
|
"learning_rate": 8.536917657543767e-06, |
|
"loss": 0.9281, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.5526881720430108, |
|
"grad_norm": 0.9432565569877625, |
|
"learning_rate": 8.503145969368562e-06, |
|
"loss": 0.9435, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.553763440860215, |
|
"grad_norm": 0.9987965226173401, |
|
"learning_rate": 8.469391735520824e-06, |
|
"loss": 0.8568, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.5548387096774193, |
|
"grad_norm": 0.8919755220413208, |
|
"learning_rate": 8.43565534959769e-06, |
|
"loss": 0.9074, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.5559139784946237, |
|
"grad_norm": 1.0158838033676147, |
|
"learning_rate": 8.401937204988175e-06, |
|
"loss": 0.8527, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.556989247311828, |
|
"grad_norm": 1.0149086713790894, |
|
"learning_rate": 8.36823769486858e-06, |
|
"loss": 0.838, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.5580645161290323, |
|
"grad_norm": 0.9243462681770325, |
|
"learning_rate": 8.33455721219792e-06, |
|
"loss": 0.9201, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.5591397849462365, |
|
"grad_norm": 0.9948527812957764, |
|
"learning_rate": 8.300896149713334e-06, |
|
"loss": 0.9512, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.5602150537634408, |
|
"grad_norm": 0.9152011275291443, |
|
"learning_rate": 8.26725489992552e-06, |
|
"loss": 0.9144, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.5612903225806452, |
|
"grad_norm": 1.0240675210952759, |
|
"learning_rate": 8.233633855114127e-06, |
|
"loss": 0.8597, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.5623655913978495, |
|
"grad_norm": 0.9682225584983826, |
|
"learning_rate": 8.200033407323223e-06, |
|
"loss": 0.88, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.5634408602150538, |
|
"grad_norm": 0.9406993389129639, |
|
"learning_rate": 8.166453948356679e-06, |
|
"loss": 0.9235, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.5645161290322581, |
|
"grad_norm": 1.0435328483581543, |
|
"learning_rate": 8.132895869773638e-06, |
|
"loss": 0.9093, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.5655913978494623, |
|
"grad_norm": 0.9654241800308228, |
|
"learning_rate": 8.099359562883931e-06, |
|
"loss": 0.9118, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.5666666666666667, |
|
"grad_norm": 0.9184148907661438, |
|
"learning_rate": 8.065845418743504e-06, |
|
"loss": 0.9209, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.567741935483871, |
|
"grad_norm": 0.9631726741790771, |
|
"learning_rate": 8.032353828149889e-06, |
|
"loss": 0.8759, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.5688172043010753, |
|
"grad_norm": 0.9072070717811584, |
|
"learning_rate": 7.99888518163761e-06, |
|
"loss": 0.8856, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.5698924731182796, |
|
"grad_norm": 0.9847999215126038, |
|
"learning_rate": 7.965439869473664e-06, |
|
"loss": 0.9003, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.5709677419354838, |
|
"grad_norm": 0.9502539038658142, |
|
"learning_rate": 7.932018281652943e-06, |
|
"loss": 0.9067, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.5720430107526882, |
|
"grad_norm": 0.9413378834724426, |
|
"learning_rate": 7.898620807893698e-06, |
|
"loss": 0.8469, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.5731182795698925, |
|
"grad_norm": 0.9912509322166443, |
|
"learning_rate": 7.865247837633005e-06, |
|
"loss": 0.8938, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.5741935483870968, |
|
"grad_norm": 1.0345542430877686, |
|
"learning_rate": 7.831899760022192e-06, |
|
"loss": 0.8418, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.5752688172043011, |
|
"grad_norm": 0.8752864003181458, |
|
"learning_rate": 7.798576963922347e-06, |
|
"loss": 0.9208, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.5763440860215053, |
|
"grad_norm": 0.9315550923347473, |
|
"learning_rate": 7.76527983789973e-06, |
|
"loss": 0.9092, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.5774193548387097, |
|
"grad_norm": 0.9463087320327759, |
|
"learning_rate": 7.732008770221301e-06, |
|
"loss": 0.8932, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.578494623655914, |
|
"grad_norm": 0.9277362823486328, |
|
"learning_rate": 7.698764148850138e-06, |
|
"loss": 0.8703, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.5795698924731183, |
|
"grad_norm": 0.8701269626617432, |
|
"learning_rate": 7.66554636144095e-06, |
|
"loss": 0.9303, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.5806451612903226, |
|
"grad_norm": 0.9152162075042725, |
|
"learning_rate": 7.632355795335533e-06, |
|
"loss": 0.9022, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5817204301075268, |
|
"grad_norm": 0.9596089720726013, |
|
"learning_rate": 7.5991928375582805e-06, |
|
"loss": 0.8888, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.5827956989247312, |
|
"grad_norm": 0.9606913924217224, |
|
"learning_rate": 7.566057874811643e-06, |
|
"loss": 0.882, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.5838709677419355, |
|
"grad_norm": 0.90365070104599, |
|
"learning_rate": 7.532951293471631e-06, |
|
"loss": 0.9083, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.5849462365591398, |
|
"grad_norm": 1.0671300888061523, |
|
"learning_rate": 7.499873479583312e-06, |
|
"loss": 0.8924, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.5860215053763441, |
|
"grad_norm": 0.9826951622962952, |
|
"learning_rate": 7.466824818856296e-06, |
|
"loss": 0.9171, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.5870967741935483, |
|
"grad_norm": 0.9401928186416626, |
|
"learning_rate": 7.433805696660267e-06, |
|
"loss": 0.9013, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.5881720430107527, |
|
"grad_norm": 0.9318894147872925, |
|
"learning_rate": 7.4008164980204455e-06, |
|
"loss": 0.8928, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.589247311827957, |
|
"grad_norm": 0.9555893540382385, |
|
"learning_rate": 7.367857607613147e-06, |
|
"loss": 0.8951, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.5903225806451613, |
|
"grad_norm": 0.9515864849090576, |
|
"learning_rate": 7.3349294097612515e-06, |
|
"loss": 0.8946, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.5913978494623656, |
|
"grad_norm": 0.9463446140289307, |
|
"learning_rate": 7.3020322884297565e-06, |
|
"loss": 0.8968, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5924731182795699, |
|
"grad_norm": 0.9150295257568359, |
|
"learning_rate": 7.269166627221288e-06, |
|
"loss": 0.8985, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.5935483870967742, |
|
"grad_norm": 0.9752622842788696, |
|
"learning_rate": 7.236332809371609e-06, |
|
"loss": 0.8772, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.5946236559139785, |
|
"grad_norm": 0.978859543800354, |
|
"learning_rate": 7.20353121774519e-06, |
|
"loss": 0.8428, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.5956989247311828, |
|
"grad_norm": 1.0362200736999512, |
|
"learning_rate": 7.1707622348307e-06, |
|
"loss": 0.9033, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.5967741935483871, |
|
"grad_norm": 0.8912584781646729, |
|
"learning_rate": 7.1380262427365885e-06, |
|
"loss": 0.9208, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.5978494623655914, |
|
"grad_norm": 0.9754722118377686, |
|
"learning_rate": 7.105323623186595e-06, |
|
"loss": 0.8981, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.5989247311827957, |
|
"grad_norm": 1.003328561782837, |
|
"learning_rate": 7.072654757515321e-06, |
|
"loss": 0.8874, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.9966676235198975, |
|
"learning_rate": 7.040020026663767e-06, |
|
"loss": 0.9093, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.6010752688172043, |
|
"grad_norm": 0.9583764672279358, |
|
"learning_rate": 7.007419811174907e-06, |
|
"loss": 0.9144, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.6021505376344086, |
|
"grad_norm": 0.9645876884460449, |
|
"learning_rate": 6.974854491189243e-06, |
|
"loss": 0.8827, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.603225806451613, |
|
"grad_norm": 0.9542210698127747, |
|
"learning_rate": 6.94232444644036e-06, |
|
"loss": 0.8877, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.6043010752688172, |
|
"grad_norm": 1.0281572341918945, |
|
"learning_rate": 6.909830056250527e-06, |
|
"loss": 0.8761, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.6053763440860215, |
|
"grad_norm": 0.9534195065498352, |
|
"learning_rate": 6.877371699526241e-06, |
|
"loss": 0.8991, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.6064516129032258, |
|
"grad_norm": 0.9893036484718323, |
|
"learning_rate": 6.844949754753833e-06, |
|
"loss": 0.8753, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.6075268817204301, |
|
"grad_norm": 0.9432750344276428, |
|
"learning_rate": 6.812564599995042e-06, |
|
"loss": 0.925, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.6086021505376344, |
|
"grad_norm": 0.954412579536438, |
|
"learning_rate": 6.78021661288262e-06, |
|
"loss": 0.8825, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.6096774193548387, |
|
"grad_norm": 0.8994936943054199, |
|
"learning_rate": 6.7479061706159034e-06, |
|
"loss": 0.9371, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.610752688172043, |
|
"grad_norm": 0.9021198153495789, |
|
"learning_rate": 6.715633649956444e-06, |
|
"loss": 0.8811, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.6118279569892473, |
|
"grad_norm": 0.9786022901535034, |
|
"learning_rate": 6.683399427223598e-06, |
|
"loss": 0.8681, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.6129032258064516, |
|
"grad_norm": 1.0326224565505981, |
|
"learning_rate": 6.651203878290139e-06, |
|
"loss": 0.8406, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.613978494623656, |
|
"grad_norm": 0.9551198482513428, |
|
"learning_rate": 6.619047378577883e-06, |
|
"loss": 0.8975, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.6150537634408603, |
|
"grad_norm": 0.8866798281669617, |
|
"learning_rate": 6.586930303053297e-06, |
|
"loss": 0.8914, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.6161290322580645, |
|
"grad_norm": 0.9743491411209106, |
|
"learning_rate": 6.554853026223149e-06, |
|
"loss": 0.8871, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.6172043010752688, |
|
"grad_norm": 0.9986077547073364, |
|
"learning_rate": 6.522815922130112e-06, |
|
"loss": 0.9049, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.6182795698924731, |
|
"grad_norm": 1.0726540088653564, |
|
"learning_rate": 6.490819364348434e-06, |
|
"loss": 0.8759, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.6193548387096774, |
|
"grad_norm": 1.0001327991485596, |
|
"learning_rate": 6.458863725979549e-06, |
|
"loss": 0.8812, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.6204301075268818, |
|
"grad_norm": 0.860525369644165, |
|
"learning_rate": 6.426949379647755e-06, |
|
"loss": 0.8316, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.621505376344086, |
|
"grad_norm": 0.9693924784660339, |
|
"learning_rate": 6.395076697495854e-06, |
|
"loss": 0.8581, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.6225806451612903, |
|
"grad_norm": 1.0345405340194702, |
|
"learning_rate": 6.363246051180812e-06, |
|
"loss": 0.8673, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.6236559139784946, |
|
"grad_norm": 0.894782543182373, |
|
"learning_rate": 6.331457811869437e-06, |
|
"loss": 0.9318, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.624731182795699, |
|
"grad_norm": 0.9780985116958618, |
|
"learning_rate": 6.2997123502340286e-06, |
|
"loss": 0.7838, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.6258064516129033, |
|
"grad_norm": 0.9211633801460266, |
|
"learning_rate": 6.2680100364480876e-06, |
|
"loss": 0.9141, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.6268817204301075, |
|
"grad_norm": 1.0366703271865845, |
|
"learning_rate": 6.236351240181967e-06, |
|
"loss": 0.8726, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.6279569892473118, |
|
"grad_norm": 1.0116140842437744, |
|
"learning_rate": 6.204736330598585e-06, |
|
"loss": 0.8964, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.6290322580645161, |
|
"grad_norm": 0.9604509472846985, |
|
"learning_rate": 6.173165676349103e-06, |
|
"loss": 0.9338, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.6301075268817204, |
|
"grad_norm": 1.0354490280151367, |
|
"learning_rate": 6.141639645568646e-06, |
|
"loss": 0.895, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.6311827956989248, |
|
"grad_norm": 1.0494648218154907, |
|
"learning_rate": 6.110158605871994e-06, |
|
"loss": 0.8726, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.632258064516129, |
|
"grad_norm": 1.0010687112808228, |
|
"learning_rate": 6.0787229243493e-06, |
|
"loss": 0.8815, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.6333333333333333, |
|
"grad_norm": 1.0460788011550903, |
|
"learning_rate": 6.047332967561809e-06, |
|
"loss": 0.8739, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.6344086021505376, |
|
"grad_norm": 0.9111729860305786, |
|
"learning_rate": 6.015989101537586e-06, |
|
"loss": 0.8874, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.635483870967742, |
|
"grad_norm": 0.9480916261672974, |
|
"learning_rate": 5.984691691767252e-06, |
|
"loss": 0.8739, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.6365591397849463, |
|
"grad_norm": 0.9860565066337585, |
|
"learning_rate": 5.953441103199704e-06, |
|
"loss": 0.8724, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.6376344086021506, |
|
"grad_norm": 0.9200236201286316, |
|
"learning_rate": 5.922237700237887e-06, |
|
"loss": 0.8974, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.6387096774193548, |
|
"grad_norm": 1.0478520393371582, |
|
"learning_rate": 5.891081846734519e-06, |
|
"loss": 0.8654, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.6397849462365591, |
|
"grad_norm": 0.9680602550506592, |
|
"learning_rate": 5.859973905987866e-06, |
|
"loss": 0.9025, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.6408602150537634, |
|
"grad_norm": 1.0783361196517944, |
|
"learning_rate": 5.828914240737496e-06, |
|
"loss": 0.8894, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.6419354838709678, |
|
"grad_norm": 1.1557034254074097, |
|
"learning_rate": 5.797903213160047e-06, |
|
"loss": 0.9288, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.6430107526881721, |
|
"grad_norm": 0.9906182289123535, |
|
"learning_rate": 5.766941184865024e-06, |
|
"loss": 0.8832, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.6440860215053763, |
|
"grad_norm": 1.0977959632873535, |
|
"learning_rate": 5.736028516890548e-06, |
|
"loss": 0.8604, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.6451612903225806, |
|
"grad_norm": 0.9982092380523682, |
|
"learning_rate": 5.7051655696991825e-06, |
|
"loss": 0.9022, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.646236559139785, |
|
"grad_norm": 0.9859678149223328, |
|
"learning_rate": 5.6743527031737e-06, |
|
"loss": 0.8692, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.6473118279569893, |
|
"grad_norm": 1.0263206958770752, |
|
"learning_rate": 5.643590276612909e-06, |
|
"loss": 0.9302, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.6483870967741936, |
|
"grad_norm": 0.8961809873580933, |
|
"learning_rate": 5.612878648727444e-06, |
|
"loss": 0.9261, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.6494623655913978, |
|
"grad_norm": 1.0333125591278076, |
|
"learning_rate": 5.582218177635607e-06, |
|
"loss": 0.8615, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.6505376344086021, |
|
"grad_norm": 0.9634983539581299, |
|
"learning_rate": 5.55160922085916e-06, |
|
"loss": 0.8879, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.6516129032258065, |
|
"grad_norm": 1.1239064931869507, |
|
"learning_rate": 5.521052135319182e-06, |
|
"loss": 0.8164, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.6526881720430108, |
|
"grad_norm": 0.9737497568130493, |
|
"learning_rate": 5.490547277331904e-06, |
|
"loss": 0.9079, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.6537634408602151, |
|
"grad_norm": 1.053808569908142, |
|
"learning_rate": 5.460095002604533e-06, |
|
"loss": 0.8461, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.6548387096774193, |
|
"grad_norm": 0.9344451427459717, |
|
"learning_rate": 5.429695666231141e-06, |
|
"loss": 0.9008, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.6559139784946236, |
|
"grad_norm": 1.06412672996521, |
|
"learning_rate": 5.399349622688479e-06, |
|
"loss": 0.8795, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.656989247311828, |
|
"grad_norm": 0.9756316542625427, |
|
"learning_rate": 5.369057225831893e-06, |
|
"loss": 0.9081, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.6580645161290323, |
|
"grad_norm": 1.004437804222107, |
|
"learning_rate": 5.338818828891148e-06, |
|
"loss": 0.9095, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.6591397849462366, |
|
"grad_norm": 1.0311338901519775, |
|
"learning_rate": 5.308634784466349e-06, |
|
"loss": 0.8887, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.6602150537634408, |
|
"grad_norm": 0.9663563370704651, |
|
"learning_rate": 5.2785054445238156e-06, |
|
"loss": 0.8774, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.6612903225806451, |
|
"grad_norm": 1.0709410905838013, |
|
"learning_rate": 5.248431160391963e-06, |
|
"loss": 0.8699, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.6623655913978495, |
|
"grad_norm": 1.000380277633667, |
|
"learning_rate": 5.2184122827572315e-06, |
|
"loss": 0.9126, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.6634408602150538, |
|
"grad_norm": 1.0107392072677612, |
|
"learning_rate": 5.188449161659978e-06, |
|
"loss": 0.9067, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.6645161290322581, |
|
"grad_norm": 1.0179517269134521, |
|
"learning_rate": 5.1585421464904e-06, |
|
"loss": 0.8486, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.6655913978494624, |
|
"grad_norm": 0.9806568622589111, |
|
"learning_rate": 5.12869158598446e-06, |
|
"loss": 0.8918, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 1.0624562501907349, |
|
"learning_rate": 5.098897828219831e-06, |
|
"loss": 0.875, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.667741935483871, |
|
"grad_norm": 1.0656713247299194, |
|
"learning_rate": 5.069161220611818e-06, |
|
"loss": 0.8999, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.6688172043010753, |
|
"grad_norm": 1.0987064838409424, |
|
"learning_rate": 5.0394821099093195e-06, |
|
"loss": 0.8594, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.6698924731182796, |
|
"grad_norm": 1.0532398223876953, |
|
"learning_rate": 5.009860842190787e-06, |
|
"loss": 0.8575, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.6709677419354839, |
|
"grad_norm": 0.9223129153251648, |
|
"learning_rate": 4.980297762860171e-06, |
|
"loss": 0.9127, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.6720430107526881, |
|
"grad_norm": 1.0524375438690186, |
|
"learning_rate": 4.950793216642923e-06, |
|
"loss": 0.8778, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.6731182795698925, |
|
"grad_norm": 0.9768378138542175, |
|
"learning_rate": 4.921347547581939e-06, |
|
"loss": 0.8781, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.6741935483870968, |
|
"grad_norm": 0.9231652021408081, |
|
"learning_rate": 4.891961099033589e-06, |
|
"loss": 0.9115, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.6752688172043011, |
|
"grad_norm": 0.9717528223991394, |
|
"learning_rate": 4.862634213663672e-06, |
|
"loss": 0.9068, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.6763440860215054, |
|
"grad_norm": 1.0528284311294556, |
|
"learning_rate": 4.833367233443457e-06, |
|
"loss": 0.8647, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.6774193548387096, |
|
"grad_norm": 0.9975810647010803, |
|
"learning_rate": 4.804160499645667e-06, |
|
"loss": 0.8773, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.678494623655914, |
|
"grad_norm": 0.9192178845405579, |
|
"learning_rate": 4.775014352840512e-06, |
|
"loss": 0.8895, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.6795698924731183, |
|
"grad_norm": 1.0221238136291504, |
|
"learning_rate": 4.7459291328917275e-06, |
|
"loss": 0.8937, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.6806451612903226, |
|
"grad_norm": 1.0171881914138794, |
|
"learning_rate": 4.7169051789525856e-06, |
|
"loss": 0.8711, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.6817204301075269, |
|
"grad_norm": 0.9004276394844055, |
|
"learning_rate": 4.687942829461969e-06, |
|
"loss": 0.7798, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.6827956989247311, |
|
"grad_norm": 1.0783709287643433, |
|
"learning_rate": 4.659042422140399e-06, |
|
"loss": 0.8841, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.6838709677419355, |
|
"grad_norm": 0.9793049693107605, |
|
"learning_rate": 4.630204293986122e-06, |
|
"loss": 0.9068, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.6849462365591398, |
|
"grad_norm": 1.0510399341583252, |
|
"learning_rate": 4.601428781271154e-06, |
|
"loss": 0.921, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.6860215053763441, |
|
"grad_norm": 0.966804563999176, |
|
"learning_rate": 4.572716219537386e-06, |
|
"loss": 0.9267, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.6870967741935484, |
|
"grad_norm": 0.9800238013267517, |
|
"learning_rate": 4.544066943592645e-06, |
|
"loss": 0.8814, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.6881720430107527, |
|
"grad_norm": 0.9430668950080872, |
|
"learning_rate": 4.515481287506811e-06, |
|
"loss": 0.9247, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.689247311827957, |
|
"grad_norm": 0.9821351766586304, |
|
"learning_rate": 4.486959584607919e-06, |
|
"loss": 0.8631, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.6903225806451613, |
|
"grad_norm": 0.9794047474861145, |
|
"learning_rate": 4.458502167478254e-06, |
|
"loss": 0.899, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.6913978494623656, |
|
"grad_norm": 0.955868661403656, |
|
"learning_rate": 4.430109367950488e-06, |
|
"loss": 0.8784, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.6924731182795699, |
|
"grad_norm": 0.873094379901886, |
|
"learning_rate": 4.401781517103819e-06, |
|
"loss": 0.7778, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.6935483870967742, |
|
"grad_norm": 0.9053919315338135, |
|
"learning_rate": 4.37351894526009e-06, |
|
"loss": 0.8362, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.6946236559139785, |
|
"grad_norm": 1.0417245626449585, |
|
"learning_rate": 4.345321981979942e-06, |
|
"loss": 0.8229, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.6956989247311828, |
|
"grad_norm": 0.93317711353302, |
|
"learning_rate": 4.31719095605899e-06, |
|
"loss": 0.897, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.6967741935483871, |
|
"grad_norm": 0.9389287233352661, |
|
"learning_rate": 4.289126195523968e-06, |
|
"loss": 0.892, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.6978494623655914, |
|
"grad_norm": 0.936539351940155, |
|
"learning_rate": 4.261128027628906e-06, |
|
"loss": 0.8989, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.6989247311827957, |
|
"grad_norm": 1.0945050716400146, |
|
"learning_rate": 4.2331967788513295e-06, |
|
"loss": 0.8451, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.003908395767212, |
|
"learning_rate": 4.20533277488843e-06, |
|
"loss": 0.8652, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.7010752688172043, |
|
"grad_norm": 0.9572248458862305, |
|
"learning_rate": 4.177536340653291e-06, |
|
"loss": 0.8847, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.7021505376344086, |
|
"grad_norm": 0.9244607090950012, |
|
"learning_rate": 4.149807800271075e-06, |
|
"loss": 0.7855, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.7032258064516129, |
|
"grad_norm": 1.0144091844558716, |
|
"learning_rate": 4.12214747707527e-06, |
|
"loss": 0.9042, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.7043010752688172, |
|
"grad_norm": 1.0786426067352295, |
|
"learning_rate": 4.094555693603891e-06, |
|
"loss": 0.8647, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.7053763440860215, |
|
"grad_norm": 1.0761374235153198, |
|
"learning_rate": 4.067032771595749e-06, |
|
"loss": 0.8165, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.7064516129032258, |
|
"grad_norm": 0.9577154517173767, |
|
"learning_rate": 4.039579031986672e-06, |
|
"loss": 0.8688, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.7075268817204301, |
|
"grad_norm": 0.9912071228027344, |
|
"learning_rate": 4.0121947949057745e-06, |
|
"loss": 0.8915, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.7086021505376344, |
|
"grad_norm": 0.9261897206306458, |
|
"learning_rate": 3.9848803796717385e-06, |
|
"loss": 0.9261, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.7096774193548387, |
|
"grad_norm": 0.9974510073661804, |
|
"learning_rate": 3.957636104789056e-06, |
|
"loss": 0.9204, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.710752688172043, |
|
"grad_norm": 1.0481423139572144, |
|
"learning_rate": 3.930462287944354e-06, |
|
"loss": 0.8695, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.7118279569892473, |
|
"grad_norm": 0.901894211769104, |
|
"learning_rate": 3.903359246002655e-06, |
|
"loss": 0.916, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.7129032258064516, |
|
"grad_norm": 1.0383106470108032, |
|
"learning_rate": 3.876327295003716e-06, |
|
"loss": 0.8709, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.7139784946236559, |
|
"grad_norm": 1.1252386569976807, |
|
"learning_rate": 3.849366750158305e-06, |
|
"loss": 0.8917, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.7150537634408602, |
|
"grad_norm": 0.9972463846206665, |
|
"learning_rate": 3.822477925844564e-06, |
|
"loss": 0.8683, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.7161290322580646, |
|
"grad_norm": 1.0251529216766357, |
|
"learning_rate": 3.7956611356043196e-06, |
|
"loss": 0.889, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.7172043010752688, |
|
"grad_norm": 0.9553551077842712, |
|
"learning_rate": 3.768916692139426e-06, |
|
"loss": 0.8807, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.7182795698924731, |
|
"grad_norm": 0.9973981976509094, |
|
"learning_rate": 3.7422449073081356e-06, |
|
"loss": 0.906, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.7193548387096774, |
|
"grad_norm": 1.014689564704895, |
|
"learning_rate": 3.715646092121444e-06, |
|
"loss": 0.8588, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.7204301075268817, |
|
"grad_norm": 1.0839016437530518, |
|
"learning_rate": 3.689120556739475e-06, |
|
"loss": 0.8402, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.7215053763440861, |
|
"grad_norm": 1.1042214632034302, |
|
"learning_rate": 3.6626686104678565e-06, |
|
"loss": 0.8552, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.7225806451612903, |
|
"grad_norm": 0.956203043460846, |
|
"learning_rate": 3.6362905617541276e-06, |
|
"loss": 0.895, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.7236559139784946, |
|
"grad_norm": 1.0225743055343628, |
|
"learning_rate": 3.6099867181841188e-06, |
|
"loss": 0.8946, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.7247311827956989, |
|
"grad_norm": 1.0949145555496216, |
|
"learning_rate": 3.583757386478389e-06, |
|
"loss": 0.8293, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.7258064516129032, |
|
"grad_norm": 0.9770424962043762, |
|
"learning_rate": 3.557602872488638e-06, |
|
"loss": 0.8798, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.7268817204301076, |
|
"grad_norm": 1.0102465152740479, |
|
"learning_rate": 3.53152348119413e-06, |
|
"loss": 0.8777, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.7279569892473118, |
|
"grad_norm": 0.9756696224212646, |
|
"learning_rate": 3.505519516698165e-06, |
|
"loss": 0.9019, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.7290322580645161, |
|
"grad_norm": 1.1217525005340576, |
|
"learning_rate": 3.479591282224496e-06, |
|
"loss": 0.8504, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.7301075268817204, |
|
"grad_norm": 0.9998635053634644, |
|
"learning_rate": 3.4537390801138306e-06, |
|
"loss": 0.7564, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.7311827956989247, |
|
"grad_norm": 1.1241053342819214, |
|
"learning_rate": 3.4279632118202744e-06, |
|
"loss": 0.8806, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.7322580645161291, |
|
"grad_norm": 0.9840561151504517, |
|
"learning_rate": 3.4022639779078403e-06, |
|
"loss": 0.8893, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.7333333333333333, |
|
"grad_norm": 0.9647068977355957, |
|
"learning_rate": 3.376641678046926e-06, |
|
"loss": 0.8661, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.7344086021505376, |
|
"grad_norm": 1.043853998184204, |
|
"learning_rate": 3.3510966110108213e-06, |
|
"loss": 0.8963, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.7354838709677419, |
|
"grad_norm": 1.0099283456802368, |
|
"learning_rate": 3.325629074672244e-06, |
|
"loss": 0.8752, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.7365591397849462, |
|
"grad_norm": 1.0153430700302124, |
|
"learning_rate": 3.3002393659998357e-06, |
|
"loss": 0.9404, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.7376344086021506, |
|
"grad_norm": 0.987329363822937, |
|
"learning_rate": 3.2749277810547286e-06, |
|
"loss": 0.8663, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.7387096774193549, |
|
"grad_norm": 1.0522453784942627, |
|
"learning_rate": 3.249694614987068e-06, |
|
"loss": 0.875, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.7397849462365591, |
|
"grad_norm": 1.0339679718017578, |
|
"learning_rate": 3.2245401620325934e-06, |
|
"loss": 0.8648, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.7408602150537634, |
|
"grad_norm": 1.144313931465149, |
|
"learning_rate": 3.199464715509183e-06, |
|
"loss": 0.7979, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.7419354838709677, |
|
"grad_norm": 1.0547746419906616, |
|
"learning_rate": 3.174468567813461e-06, |
|
"loss": 0.8985, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.7430107526881721, |
|
"grad_norm": 0.990385890007019, |
|
"learning_rate": 3.1495520104173603e-06, |
|
"loss": 0.9123, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.7440860215053764, |
|
"grad_norm": 1.0328789949417114, |
|
"learning_rate": 3.1247153338647486e-06, |
|
"loss": 0.8739, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.7451612903225806, |
|
"grad_norm": 1.099030613899231, |
|
"learning_rate": 3.099958827768025e-06, |
|
"loss": 0.8441, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.7462365591397849, |
|
"grad_norm": 0.9660579562187195, |
|
"learning_rate": 3.0752827808047446e-06, |
|
"loss": 0.9012, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.7473118279569892, |
|
"grad_norm": 0.9739366173744202, |
|
"learning_rate": 3.050687480714256e-06, |
|
"loss": 0.8412, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.7483870967741936, |
|
"grad_norm": 1.058748483657837, |
|
"learning_rate": 3.0261732142943435e-06, |
|
"loss": 0.9002, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.7494623655913979, |
|
"grad_norm": 1.0254429578781128, |
|
"learning_rate": 3.00174026739789e-06, |
|
"loss": 0.8546, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.7505376344086021, |
|
"grad_norm": 1.0190998315811157, |
|
"learning_rate": 2.9773889249295294e-06, |
|
"loss": 0.8726, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.7516129032258064, |
|
"grad_norm": 1.057529330253601, |
|
"learning_rate": 2.9531194708423428e-06, |
|
"loss": 0.8962, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.7516129032258064, |
|
"eval_loss": 0.8028071522712708, |
|
"eval_runtime": 240.7419, |
|
"eval_samples_per_second": 11.564, |
|
"eval_steps_per_second": 0.482, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.7526881720430108, |
|
"grad_norm": 1.0174949169158936, |
|
"learning_rate": 2.9289321881345257e-06, |
|
"loss": 0.9032, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.7537634408602151, |
|
"grad_norm": 1.0628769397735596, |
|
"learning_rate": 2.9048273588461097e-06, |
|
"loss": 0.8494, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.7548387096774194, |
|
"grad_norm": 0.9887354373931885, |
|
"learning_rate": 2.8808052640556637e-06, |
|
"loss": 0.7472, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.7559139784946236, |
|
"grad_norm": 0.9702305197715759, |
|
"learning_rate": 2.8568661838770073e-06, |
|
"loss": 0.8806, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.7569892473118279, |
|
"grad_norm": 0.9678685069084167, |
|
"learning_rate": 2.8330103974559665e-06, |
|
"loss": 0.8736, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.7580645161290323, |
|
"grad_norm": 1.0689879655838013, |
|
"learning_rate": 2.809238182967092e-06, |
|
"loss": 0.8909, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.7591397849462366, |
|
"grad_norm": 1.041756510734558, |
|
"learning_rate": 2.7855498176104435e-06, |
|
"loss": 0.8741, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.7602150537634409, |
|
"grad_norm": 1.1385363340377808, |
|
"learning_rate": 2.761945577608334e-06, |
|
"loss": 0.8897, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.7612903225806451, |
|
"grad_norm": 0.9872462749481201, |
|
"learning_rate": 2.7384257382021185e-06, |
|
"loss": 0.8893, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.7623655913978494, |
|
"grad_norm": 0.9995443224906921, |
|
"learning_rate": 2.7149905736489846e-06, |
|
"loss": 0.8745, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.7634408602150538, |
|
"grad_norm": 1.1389273405075073, |
|
"learning_rate": 2.691640357218759e-06, |
|
"loss": 0.8441, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.7645161290322581, |
|
"grad_norm": 0.9834983944892883, |
|
"learning_rate": 2.668375361190716e-06, |
|
"loss": 0.876, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.7655913978494624, |
|
"grad_norm": 1.0866228342056274, |
|
"learning_rate": 2.645195856850391e-06, |
|
"loss": 0.8391, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.7666666666666667, |
|
"grad_norm": 1.0782625675201416, |
|
"learning_rate": 2.6221021144864444e-06, |
|
"loss": 0.8789, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.7677419354838709, |
|
"grad_norm": 1.1091196537017822, |
|
"learning_rate": 2.599094403387481e-06, |
|
"loss": 0.8514, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.7688172043010753, |
|
"grad_norm": 1.0931015014648438, |
|
"learning_rate": 2.576172991838933e-06, |
|
"loss": 0.8657, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.7698924731182796, |
|
"grad_norm": 1.0916446447372437, |
|
"learning_rate": 2.5533381471199138e-06, |
|
"loss": 0.8847, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.7709677419354839, |
|
"grad_norm": 1.0096673965454102, |
|
"learning_rate": 2.5305901355001152e-06, |
|
"loss": 0.896, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.7720430107526882, |
|
"grad_norm": 0.9677090048789978, |
|
"learning_rate": 2.5079292222366903e-06, |
|
"loss": 0.9044, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.7731182795698924, |
|
"grad_norm": 0.9203047752380371, |
|
"learning_rate": 2.485355671571175e-06, |
|
"loss": 0.9017, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.7741935483870968, |
|
"grad_norm": 0.9744219779968262, |
|
"learning_rate": 2.4628697467263916e-06, |
|
"loss": 0.858, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.7752688172043011, |
|
"grad_norm": 1.0295017957687378, |
|
"learning_rate": 2.4404717099033847e-06, |
|
"loss": 0.885, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.7763440860215054, |
|
"grad_norm": 1.0058088302612305, |
|
"learning_rate": 2.418161822278374e-06, |
|
"loss": 0.896, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.7774193548387097, |
|
"grad_norm": 1.0643796920776367, |
|
"learning_rate": 2.395940343999691e-06, |
|
"loss": 0.9154, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.7784946236559139, |
|
"grad_norm": 1.031701922416687, |
|
"learning_rate": 2.3738075341847634e-06, |
|
"loss": 0.8866, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.7795698924731183, |
|
"grad_norm": 1.0520869493484497, |
|
"learning_rate": 2.351763650917074e-06, |
|
"loss": 0.8557, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.7806451612903226, |
|
"grad_norm": 1.026106595993042, |
|
"learning_rate": 2.3298089512431744e-06, |
|
"loss": 0.8788, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.7817204301075269, |
|
"grad_norm": 1.016325831413269, |
|
"learning_rate": 2.3079436911696617e-06, |
|
"loss": 0.8884, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.7827956989247312, |
|
"grad_norm": 1.1190268993377686, |
|
"learning_rate": 2.2861681256602187e-06, |
|
"loss": 0.856, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.7838709677419354, |
|
"grad_norm": 0.9312067031860352, |
|
"learning_rate": 2.264482508632627e-06, |
|
"loss": 0.8972, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.7849462365591398, |
|
"grad_norm": 1.1495349407196045, |
|
"learning_rate": 2.2428870929558012e-06, |
|
"loss": 0.8709, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.7860215053763441, |
|
"grad_norm": 1.1420360803604126, |
|
"learning_rate": 2.2213821304468554e-06, |
|
"loss": 0.8496, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.7870967741935484, |
|
"grad_norm": 1.0807099342346191, |
|
"learning_rate": 2.199967871868154e-06, |
|
"loss": 0.8572, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.7881720430107527, |
|
"grad_norm": 1.1299638748168945, |
|
"learning_rate": 2.178644566924394e-06, |
|
"loss": 0.8662, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.789247311827957, |
|
"grad_norm": 1.0073062181472778, |
|
"learning_rate": 2.1574124642596882e-06, |
|
"loss": 0.8924, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.7903225806451613, |
|
"grad_norm": 1.001193881034851, |
|
"learning_rate": 2.1362718114546777e-06, |
|
"loss": 0.8564, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.7913978494623656, |
|
"grad_norm": 0.9366834759712219, |
|
"learning_rate": 2.1152228550236264e-06, |
|
"loss": 0.8991, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.7924731182795699, |
|
"grad_norm": 1.0331993103027344, |
|
"learning_rate": 2.0942658404115646e-06, |
|
"loss": 0.9229, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.7935483870967742, |
|
"grad_norm": 1.0128207206726074, |
|
"learning_rate": 2.0734010119914193e-06, |
|
"loss": 0.8739, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.7946236559139785, |
|
"grad_norm": 0.9919546246528625, |
|
"learning_rate": 2.052628613061154e-06, |
|
"loss": 0.8879, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.7956989247311828, |
|
"grad_norm": 0.9642401933670044, |
|
"learning_rate": 2.0319488858409552e-06, |
|
"loss": 0.8813, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.7967741935483871, |
|
"grad_norm": 1.09434175491333, |
|
"learning_rate": 2.011362071470384e-06, |
|
"loss": 0.8467, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.7978494623655914, |
|
"grad_norm": 1.0729540586471558, |
|
"learning_rate": 1.9908684100055843e-06, |
|
"loss": 0.8745, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.7989247311827957, |
|
"grad_norm": 1.0908386707305908, |
|
"learning_rate": 1.9704681404164637e-06, |
|
"loss": 0.8573, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.9976701736450195, |
|
"learning_rate": 1.95016150058393e-06, |
|
"loss": 0.8977, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.8010752688172043, |
|
"grad_norm": 1.1222140789031982, |
|
"learning_rate": 1.929948727297096e-06, |
|
"loss": 0.8868, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.8021505376344086, |
|
"grad_norm": 0.9284682869911194, |
|
"learning_rate": 1.9098300562505266e-06, |
|
"loss": 0.8813, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.8032258064516129, |
|
"grad_norm": 1.050628662109375, |
|
"learning_rate": 1.889805722041499e-06, |
|
"loss": 0.7365, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.8043010752688172, |
|
"grad_norm": 1.0807946920394897, |
|
"learning_rate": 1.8698759581672487e-06, |
|
"loss": 0.8901, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.8053763440860215, |
|
"grad_norm": 1.083493947982788, |
|
"learning_rate": 1.85004099702227e-06, |
|
"loss": 0.8758, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.8064516129032258, |
|
"grad_norm": 1.1448321342468262, |
|
"learning_rate": 1.8303010698955803e-06, |
|
"loss": 0.8712, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.8075268817204301, |
|
"grad_norm": 1.0084006786346436, |
|
"learning_rate": 1.8106564069680476e-06, |
|
"loss": 0.866, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.8086021505376344, |
|
"grad_norm": 1.027698278427124, |
|
"learning_rate": 1.791107237309685e-06, |
|
"loss": 0.9034, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.8096774193548387, |
|
"grad_norm": 0.9917596578598022, |
|
"learning_rate": 1.771653788876999e-06, |
|
"loss": 0.8887, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.810752688172043, |
|
"grad_norm": 1.087314248085022, |
|
"learning_rate": 1.7522962885103145e-06, |
|
"loss": 0.8235, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.8118279569892473, |
|
"grad_norm": 1.2066447734832764, |
|
"learning_rate": 1.7330349619311415e-06, |
|
"loss": 0.8196, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.8129032258064516, |
|
"grad_norm": 1.0600000619888306, |
|
"learning_rate": 1.713870033739541e-06, |
|
"loss": 0.8673, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.8139784946236559, |
|
"grad_norm": 1.1086914539337158, |
|
"learning_rate": 1.6948017274114959e-06, |
|
"loss": 0.8774, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.8150537634408602, |
|
"grad_norm": 1.1460777521133423, |
|
"learning_rate": 1.6758302652963176e-06, |
|
"loss": 0.8987, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.8161290322580645, |
|
"grad_norm": 1.002736210823059, |
|
"learning_rate": 1.656955868614053e-06, |
|
"loss": 0.8779, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.8172043010752689, |
|
"grad_norm": 1.012317419052124, |
|
"learning_rate": 1.638178757452894e-06, |
|
"loss": 0.864, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.8182795698924731, |
|
"grad_norm": 1.0144654512405396, |
|
"learning_rate": 1.6194991507666159e-06, |
|
"loss": 0.9059, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.8193548387096774, |
|
"grad_norm": 0.9600997567176819, |
|
"learning_rate": 1.6009172663720352e-06, |
|
"loss": 0.889, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.8204301075268817, |
|
"grad_norm": 0.9644306898117065, |
|
"learning_rate": 1.582433320946456e-06, |
|
"loss": 0.8738, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.821505376344086, |
|
"grad_norm": 1.0256295204162598, |
|
"learning_rate": 1.5640475300251423e-06, |
|
"loss": 0.877, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.8225806451612904, |
|
"grad_norm": 1.060616135597229, |
|
"learning_rate": 1.5457601079988226e-06, |
|
"loss": 0.9018, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.8236559139784946, |
|
"grad_norm": 1.079298734664917, |
|
"learning_rate": 1.5275712681111643e-06, |
|
"loss": 0.8666, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.8247311827956989, |
|
"grad_norm": 1.1446715593338013, |
|
"learning_rate": 1.5094812224563117e-06, |
|
"loss": 0.8528, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.8258064516129032, |
|
"grad_norm": 1.057084560394287, |
|
"learning_rate": 1.4914901819763938e-06, |
|
"loss": 0.8398, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.8268817204301075, |
|
"grad_norm": 1.0480070114135742, |
|
"learning_rate": 1.4735983564590784e-06, |
|
"loss": 0.8905, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.8279569892473119, |
|
"grad_norm": 0.9545390605926514, |
|
"learning_rate": 1.4558059545351144e-06, |
|
"loss": 0.8528, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.8290322580645161, |
|
"grad_norm": 1.0012227296829224, |
|
"learning_rate": 1.43811318367591e-06, |
|
"loss": 0.8955, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.8301075268817204, |
|
"grad_norm": 1.0132238864898682, |
|
"learning_rate": 1.4205202501911052e-06, |
|
"loss": 0.8934, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.8311827956989247, |
|
"grad_norm": 1.0243781805038452, |
|
"learning_rate": 1.4030273592261656e-06, |
|
"loss": 0.891, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.832258064516129, |
|
"grad_norm": 1.0289664268493652, |
|
"learning_rate": 1.3856347147600014e-06, |
|
"loss": 0.8597, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 1.0462311506271362, |
|
"learning_rate": 1.3683425196025734e-06, |
|
"loss": 0.8856, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.8344086021505376, |
|
"grad_norm": 1.0177634954452515, |
|
"learning_rate": 1.3511509753925422e-06, |
|
"loss": 0.853, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.8354838709677419, |
|
"grad_norm": 1.084958791732788, |
|
"learning_rate": 1.3340602825949024e-06, |
|
"loss": 0.8668, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.8365591397849462, |
|
"grad_norm": 0.9712878465652466, |
|
"learning_rate": 1.3170706404986645e-06, |
|
"loss": 0.916, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.8376344086021505, |
|
"grad_norm": 1.0489739179611206, |
|
"learning_rate": 1.3001822472145066e-06, |
|
"loss": 0.8668, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.8387096774193549, |
|
"grad_norm": 1.0077139139175415, |
|
"learning_rate": 1.2833952996724864e-06, |
|
"loss": 0.8885, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.8397849462365592, |
|
"grad_norm": 1.1088800430297852, |
|
"learning_rate": 1.266709993619737e-06, |
|
"loss": 0.9051, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.8408602150537634, |
|
"grad_norm": 0.9531431198120117, |
|
"learning_rate": 1.2501265236181736e-06, |
|
"loss": 0.8967, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.8419354838709677, |
|
"grad_norm": 1.0077635049819946, |
|
"learning_rate": 1.2336450830422452e-06, |
|
"loss": 0.8741, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.843010752688172, |
|
"grad_norm": 1.001855731010437, |
|
"learning_rate": 1.2172658640766622e-06, |
|
"loss": 0.9065, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.8440860215053764, |
|
"grad_norm": 1.1800569295883179, |
|
"learning_rate": 1.2009890577141625e-06, |
|
"loss": 0.8352, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.8451612903225807, |
|
"grad_norm": 0.9963459968566895, |
|
"learning_rate": 1.1848148537532845e-06, |
|
"loss": 0.8659, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.8462365591397849, |
|
"grad_norm": 0.9820273518562317, |
|
"learning_rate": 1.1687434407961584e-06, |
|
"loss": 0.8985, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.8473118279569892, |
|
"grad_norm": 1.0634284019470215, |
|
"learning_rate": 1.1527750062462928e-06, |
|
"loss": 0.8535, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.8483870967741935, |
|
"grad_norm": 1.0087202787399292, |
|
"learning_rate": 1.1369097363064064e-06, |
|
"loss": 0.8976, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.8494623655913979, |
|
"grad_norm": 0.9989567399024963, |
|
"learning_rate": 1.121147815976248e-06, |
|
"loss": 0.8748, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.8505376344086022, |
|
"grad_norm": 1.0062466859817505, |
|
"learning_rate": 1.1054894290504348e-06, |
|
"loss": 0.8621, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.8516129032258064, |
|
"grad_norm": 1.0595635175704956, |
|
"learning_rate": 1.0899347581163222e-06, |
|
"loss": 0.8782, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.8526881720430107, |
|
"grad_norm": 1.0409711599349976, |
|
"learning_rate": 1.0744839845518585e-06, |
|
"loss": 0.8769, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.853763440860215, |
|
"grad_norm": 1.0703274011611938, |
|
"learning_rate": 1.0591372885234885e-06, |
|
"loss": 0.8763, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.8548387096774194, |
|
"grad_norm": 1.095167875289917, |
|
"learning_rate": 1.0438948489840327e-06, |
|
"loss": 0.8772, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.8559139784946237, |
|
"grad_norm": 1.166521430015564, |
|
"learning_rate": 1.0287568436706208e-06, |
|
"loss": 0.8678, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.8569892473118279, |
|
"grad_norm": 1.0134778022766113, |
|
"learning_rate": 1.0137234491026015e-06, |
|
"loss": 0.887, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.8580645161290322, |
|
"grad_norm": 1.0230733156204224, |
|
"learning_rate": 9.987948405794912e-07, |
|
"loss": 0.8634, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.8591397849462366, |
|
"grad_norm": 1.062451958656311, |
|
"learning_rate": 9.839711921789363e-07, |
|
"loss": 0.8754, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.8602150537634409, |
|
"grad_norm": 0.9886546730995178, |
|
"learning_rate": 9.692526767546727e-07, |
|
"loss": 0.8967, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.8612903225806452, |
|
"grad_norm": 1.0505887269973755, |
|
"learning_rate": 9.546394659345192e-07, |
|
"loss": 0.8783, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.8623655913978494, |
|
"grad_norm": 1.0658241510391235, |
|
"learning_rate": 9.401317301183655e-07, |
|
"loss": 0.748, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.8634408602150537, |
|
"grad_norm": 0.9909445643424988, |
|
"learning_rate": 9.257296384761971e-07, |
|
"loss": 0.9126, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.864516129032258, |
|
"grad_norm": 1.0579025745391846, |
|
"learning_rate": 9.114333589461144e-07, |
|
"loss": 0.9195, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.8655913978494624, |
|
"grad_norm": 1.0848064422607422, |
|
"learning_rate": 8.972430582323788e-07, |
|
"loss": 0.8731, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.8666666666666667, |
|
"grad_norm": 0.9736969470977783, |
|
"learning_rate": 8.831589018034659e-07, |
|
"loss": 0.8881, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.867741935483871, |
|
"grad_norm": 1.1477285623550415, |
|
"learning_rate": 8.69181053890138e-07, |
|
"loss": 0.9058, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.8688172043010752, |
|
"grad_norm": 1.0351393222808838, |
|
"learning_rate": 8.553096774835312e-07, |
|
"loss": 0.8766, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.8698924731182796, |
|
"grad_norm": 1.1132804155349731, |
|
"learning_rate": 8.41544934333246e-07, |
|
"loss": 0.8549, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.8709677419354839, |
|
"grad_norm": 0.9678705930709839, |
|
"learning_rate": 8.278869849454718e-07, |
|
"loss": 0.9077, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.8720430107526882, |
|
"grad_norm": 1.0790833234786987, |
|
"learning_rate": 8.143359885811064e-07, |
|
"loss": 0.8587, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.8731182795698925, |
|
"grad_norm": 1.0063000917434692, |
|
"learning_rate": 8.008921032539108e-07, |
|
"loss": 0.8842, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.8741935483870967, |
|
"grad_norm": 1.024970531463623, |
|
"learning_rate": 7.875554857286504e-07, |
|
"loss": 0.8685, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.875268817204301, |
|
"grad_norm": 1.1168882846832275, |
|
"learning_rate": 7.743262915192839e-07, |
|
"loss": 0.8398, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.8763440860215054, |
|
"grad_norm": 1.0974441766738892, |
|
"learning_rate": 7.612046748871327e-07, |
|
"loss": 0.8702, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.8774193548387097, |
|
"grad_norm": 1.0631004571914673, |
|
"learning_rate": 7.481907888390994e-07, |
|
"loss": 0.8968, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.878494623655914, |
|
"grad_norm": 1.018012523651123, |
|
"learning_rate": 7.352847851258716e-07, |
|
"loss": 0.8952, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.8795698924731182, |
|
"grad_norm": 1.067273736000061, |
|
"learning_rate": 7.224868142401542e-07, |
|
"loss": 0.8675, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.8806451612903226, |
|
"grad_norm": 1.0818495750427246, |
|
"learning_rate": 7.097970254149222e-07, |
|
"loss": 0.8667, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.8817204301075269, |
|
"grad_norm": 0.9330995082855225, |
|
"learning_rate": 6.972155666216684e-07, |
|
"loss": 0.7546, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.8827956989247312, |
|
"grad_norm": 1.007285475730896, |
|
"learning_rate": 6.847425845686884e-07, |
|
"loss": 0.8765, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.8838709677419355, |
|
"grad_norm": 1.0542314052581787, |
|
"learning_rate": 6.723782246993648e-07, |
|
"loss": 0.8631, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.8849462365591397, |
|
"grad_norm": 1.1038877964019775, |
|
"learning_rate": 6.601226311904718e-07, |
|
"loss": 0.8584, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.886021505376344, |
|
"grad_norm": 0.9646521210670471, |
|
"learning_rate": 6.479759469504931e-07, |
|
"loss": 0.8958, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.8870967741935484, |
|
"grad_norm": 1.0366793870925903, |
|
"learning_rate": 6.359383136179598e-07, |
|
"loss": 0.8811, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.8881720430107527, |
|
"grad_norm": 1.0770856142044067, |
|
"learning_rate": 6.240098715597975e-07, |
|
"loss": 0.8734, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.889247311827957, |
|
"grad_norm": 1.1148271560668945, |
|
"learning_rate": 6.121907598696808e-07, |
|
"loss": 0.8623, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.8903225806451613, |
|
"grad_norm": 0.9609014987945557, |
|
"learning_rate": 6.00481116366427e-07, |
|
"loss": 0.901, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.8913978494623656, |
|
"grad_norm": 0.9495878219604492, |
|
"learning_rate": 5.888810775923726e-07, |
|
"loss": 0.8664, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.8924731182795699, |
|
"grad_norm": 1.089764952659607, |
|
"learning_rate": 5.77390778811796e-07, |
|
"loss": 0.8839, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.8935483870967742, |
|
"grad_norm": 0.9896240234375, |
|
"learning_rate": 5.660103540093265e-07, |
|
"loss": 0.8833, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.8946236559139785, |
|
"grad_norm": 0.9893712997436523, |
|
"learning_rate": 5.547399358883953e-07, |
|
"loss": 0.8811, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.8956989247311828, |
|
"grad_norm": 1.0602530241012573, |
|
"learning_rate": 5.43579655869676e-07, |
|
"loss": 0.8908, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.896774193548387, |
|
"grad_norm": 1.0764974355697632, |
|
"learning_rate": 5.325296440895622e-07, |
|
"loss": 0.8822, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.8978494623655914, |
|
"grad_norm": 1.0722241401672363, |
|
"learning_rate": 5.215900293986431e-07, |
|
"loss": 0.8555, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.8989247311827957, |
|
"grad_norm": 0.9710987210273743, |
|
"learning_rate": 5.107609393602019e-07, |
|
"loss": 0.9039, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.0058637857437134, |
|
"learning_rate": 5.000425002487342e-07, |
|
"loss": 0.9027, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.9010752688172043, |
|
"grad_norm": 1.0156899690628052, |
|
"learning_rate": 4.894348370484648e-07, |
|
"loss": 0.862, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.9021505376344086, |
|
"grad_norm": 1.1212072372436523, |
|
"learning_rate": 4.789380734519045e-07, |
|
"loss": 0.8812, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.9032258064516129, |
|
"grad_norm": 1.0828582048416138, |
|
"learning_rate": 4.6855233185839175e-07, |
|
"loss": 0.9243, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.9043010752688172, |
|
"grad_norm": 0.9520331025123596, |
|
"learning_rate": 4.5827773337268177e-07, |
|
"loss": 0.8822, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.9053763440860215, |
|
"grad_norm": 1.057708740234375, |
|
"learning_rate": 4.481143978035196e-07, |
|
"loss": 0.8768, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.9064516129032258, |
|
"grad_norm": 1.1681333780288696, |
|
"learning_rate": 4.380624436622516e-07, |
|
"loss": 0.8689, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.9075268817204301, |
|
"grad_norm": 1.0377984046936035, |
|
"learning_rate": 4.281219881614451e-07, |
|
"loss": 0.8299, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.9086021505376344, |
|
"grad_norm": 1.1868066787719727, |
|
"learning_rate": 4.1829314721351213e-07, |
|
"loss": 0.8652, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.9096774193548387, |
|
"grad_norm": 1.0405972003936768, |
|
"learning_rate": 4.0857603542936776e-07, |
|
"loss": 0.9007, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.910752688172043, |
|
"grad_norm": 1.080142855644226, |
|
"learning_rate": 3.989707661170894e-07, |
|
"loss": 0.7416, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.9118279569892473, |
|
"grad_norm": 1.0355066061019897, |
|
"learning_rate": 3.894774512805932e-07, |
|
"loss": 0.9158, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.9129032258064517, |
|
"grad_norm": 0.9819759130477905, |
|
"learning_rate": 3.8009620161833295e-07, |
|
"loss": 0.8866, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.9139784946236559, |
|
"grad_norm": 1.0069386959075928, |
|
"learning_rate": 3.708271265220087e-07, |
|
"loss": 0.9083, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.9150537634408602, |
|
"grad_norm": 1.0547349452972412, |
|
"learning_rate": 3.616703340752825e-07, |
|
"loss": 0.8957, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.9161290322580645, |
|
"grad_norm": 0.9976608157157898, |
|
"learning_rate": 3.5262593105253374e-07, |
|
"loss": 0.7867, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.9172043010752688, |
|
"grad_norm": 0.9935895204544067, |
|
"learning_rate": 3.436940229176e-07, |
|
"loss": 0.902, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.9182795698924732, |
|
"grad_norm": 1.0534745454788208, |
|
"learning_rate": 3.3487471382255277e-07, |
|
"loss": 0.9242, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.9193548387096774, |
|
"grad_norm": 1.0113539695739746, |
|
"learning_rate": 3.261681066064859e-07, |
|
"loss": 0.8852, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.9204301075268817, |
|
"grad_norm": 1.0486923456192017, |
|
"learning_rate": 3.175743027943079e-07, |
|
"loss": 0.8942, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.921505376344086, |
|
"grad_norm": 1.1777671575546265, |
|
"learning_rate": 3.0909340259557055e-07, |
|
"loss": 0.8087, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.9225806451612903, |
|
"grad_norm": 1.0966805219650269, |
|
"learning_rate": 3.0072550490328754e-07, |
|
"loss": 0.8399, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.9236559139784947, |
|
"grad_norm": 1.0180057287216187, |
|
"learning_rate": 2.9247070729279017e-07, |
|
"loss": 0.8571, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.9247311827956989, |
|
"grad_norm": 1.12461256980896, |
|
"learning_rate": 2.843291060205855e-07, |
|
"loss": 0.817, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.9258064516129032, |
|
"grad_norm": 0.962689220905304, |
|
"learning_rate": 2.7630079602323447e-07, |
|
"loss": 0.892, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.9268817204301075, |
|
"grad_norm": 1.0531262159347534, |
|
"learning_rate": 2.683858709162468e-07, |
|
"loss": 0.9067, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.9279569892473118, |
|
"grad_norm": 1.0303092002868652, |
|
"learning_rate": 2.6058442299298437e-07, |
|
"loss": 0.8756, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.9290322580645162, |
|
"grad_norm": 1.0068696737289429, |
|
"learning_rate": 2.5289654322359526e-07, |
|
"loss": 0.8851, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.9301075268817204, |
|
"grad_norm": 0.9323456287384033, |
|
"learning_rate": 2.453223212539391e-07, |
|
"loss": 0.9156, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.9311827956989247, |
|
"grad_norm": 1.013602614402771, |
|
"learning_rate": 2.3786184540455449e-07, |
|
"loss": 0.8941, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.932258064516129, |
|
"grad_norm": 1.0885473489761353, |
|
"learning_rate": 2.3051520266961892e-07, |
|
"loss": 0.8786, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.9333333333333333, |
|
"grad_norm": 0.957119345664978, |
|
"learning_rate": 2.2328247871594379e-07, |
|
"loss": 0.8985, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.9344086021505377, |
|
"grad_norm": 1.0595957040786743, |
|
"learning_rate": 2.161637578819653e-07, |
|
"loss": 0.8598, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.9354838709677419, |
|
"grad_norm": 1.0160598754882812, |
|
"learning_rate": 2.091591231767709e-07, |
|
"loss": 0.8458, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.9365591397849462, |
|
"grad_norm": 1.0207420587539673, |
|
"learning_rate": 2.022686562791254e-07, |
|
"loss": 0.8503, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.9376344086021505, |
|
"grad_norm": 0.9531442523002625, |
|
"learning_rate": 1.954924375365197e-07, |
|
"loss": 0.885, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.9387096774193548, |
|
"grad_norm": 1.05819571018219, |
|
"learning_rate": 1.8883054596423255e-07, |
|
"loss": 0.8559, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.9397849462365592, |
|
"grad_norm": 1.021631121635437, |
|
"learning_rate": 1.8228305924441469e-07, |
|
"loss": 0.8785, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.9408602150537635, |
|
"grad_norm": 1.0116877555847168, |
|
"learning_rate": 1.7585005372517504e-07, |
|
"loss": 0.8627, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.9419354838709677, |
|
"grad_norm": 0.9780187606811523, |
|
"learning_rate": 1.6953160441969707e-07, |
|
"loss": 0.9232, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.943010752688172, |
|
"grad_norm": 1.017525553703308, |
|
"learning_rate": 1.633277850053605e-07, |
|
"loss": 0.8649, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.9440860215053763, |
|
"grad_norm": 1.1021080017089844, |
|
"learning_rate": 1.5723866782288545e-07, |
|
"loss": 0.8513, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.9451612903225807, |
|
"grad_norm": 1.0974959135055542, |
|
"learning_rate": 1.5126432387548185e-07, |
|
"loss": 0.8503, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.946236559139785, |
|
"grad_norm": 1.0184605121612549, |
|
"learning_rate": 1.4540482282803136e-07, |
|
"loss": 0.8458, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.9473118279569892, |
|
"grad_norm": 0.9723237156867981, |
|
"learning_rate": 1.3966023300626685e-07, |
|
"loss": 0.8941, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.9483870967741935, |
|
"grad_norm": 1.0808511972427368, |
|
"learning_rate": 1.3403062139598078e-07, |
|
"loss": 0.8676, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.9494623655913978, |
|
"grad_norm": 1.1188552379608154, |
|
"learning_rate": 1.285160536422392e-07, |
|
"loss": 0.8637, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.9505376344086022, |
|
"grad_norm": 1.1268365383148193, |
|
"learning_rate": 1.231165940486234e-07, |
|
"loss": 0.8953, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.9516129032258065, |
|
"grad_norm": 1.0947691202163696, |
|
"learning_rate": 1.1783230557647075e-07, |
|
"loss": 0.8834, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.9526881720430107, |
|
"grad_norm": 0.9339337348937988, |
|
"learning_rate": 1.1266324984415266e-07, |
|
"loss": 0.868, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.953763440860215, |
|
"grad_norm": 1.1089427471160889, |
|
"learning_rate": 1.0760948712634112e-07, |
|
"loss": 0.8783, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.9548387096774194, |
|
"grad_norm": 1.0046530961990356, |
|
"learning_rate": 1.0267107635331897e-07, |
|
"loss": 0.8655, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.9559139784946237, |
|
"grad_norm": 1.1793391704559326, |
|
"learning_rate": 9.784807511028837e-08, |
|
"loss": 0.8016, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.956989247311828, |
|
"grad_norm": 1.149901270866394, |
|
"learning_rate": 9.314053963669245e-08, |
|
"loss": 0.8069, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.9580645161290322, |
|
"grad_norm": 0.9446433186531067, |
|
"learning_rate": 8.854852482557242e-08, |
|
"loss": 0.9094, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.9591397849462365, |
|
"grad_norm": 1.1705981492996216, |
|
"learning_rate": 8.407208422291702e-08, |
|
"loss": 0.8226, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.9602150537634409, |
|
"grad_norm": 1.121645450592041, |
|
"learning_rate": 7.971127002704304e-08, |
|
"loss": 0.8453, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.9612903225806452, |
|
"grad_norm": 0.929806649684906, |
|
"learning_rate": 7.546613308798468e-08, |
|
"loss": 0.9233, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.9623655913978495, |
|
"grad_norm": 1.1136562824249268, |
|
"learning_rate": 7.133672290690064e-08, |
|
"loss": 0.8564, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.9634408602150538, |
|
"grad_norm": 1.028089165687561, |
|
"learning_rate": 6.732308763550022e-08, |
|
"loss": 0.8639, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.964516129032258, |
|
"grad_norm": 1.0633161067962646, |
|
"learning_rate": 6.342527407547594e-08, |
|
"loss": 0.8674, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.9655913978494624, |
|
"grad_norm": 1.0046178102493286, |
|
"learning_rate": 5.964332767796399e-08, |
|
"loss": 0.8437, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.9666666666666667, |
|
"grad_norm": 1.0161393880844116, |
|
"learning_rate": 5.5977292543007987e-08, |
|
"loss": 0.8521, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.967741935483871, |
|
"grad_norm": 1.0653326511383057, |
|
"learning_rate": 5.2427211419051605e-08, |
|
"loss": 0.8699, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.9688172043010753, |
|
"grad_norm": 1.103620171546936, |
|
"learning_rate": 4.899312570243453e-08, |
|
"loss": 0.8566, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.9698924731182795, |
|
"grad_norm": 1.0353485345840454, |
|
"learning_rate": 4.567507543691174e-08, |
|
"loss": 0.8792, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.9709677419354839, |
|
"grad_norm": 0.9971827268600464, |
|
"learning_rate": 4.247309931318722e-08, |
|
"loss": 0.8856, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.9720430107526882, |
|
"grad_norm": 1.0072693824768066, |
|
"learning_rate": 3.938723466846206e-08, |
|
"loss": 0.9181, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.9731182795698925, |
|
"grad_norm": 1.0200785398483276, |
|
"learning_rate": 3.641751748600042e-08, |
|
"loss": 0.853, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.9741935483870968, |
|
"grad_norm": 1.2243609428405762, |
|
"learning_rate": 3.356398239470427e-08, |
|
"loss": 0.7918, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.975268817204301, |
|
"grad_norm": 1.082497000694275, |
|
"learning_rate": 3.082666266872036e-08, |
|
"loss": 0.8853, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.9763440860215054, |
|
"grad_norm": 1.0451264381408691, |
|
"learning_rate": 2.8205590227040613e-08, |
|
"loss": 0.8645, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.9774193548387097, |
|
"grad_norm": 1.0385005474090576, |
|
"learning_rate": 2.5700795633138987e-08, |
|
"loss": 0.905, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.978494623655914, |
|
"grad_norm": 1.056601643562317, |
|
"learning_rate": 2.3312308094607382e-08, |
|
"loss": 0.7373, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.9795698924731183, |
|
"grad_norm": 1.2547791004180908, |
|
"learning_rate": 2.1040155462824786e-08, |
|
"loss": 0.8263, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.9806451612903225, |
|
"grad_norm": 1.0116780996322632, |
|
"learning_rate": 1.8884364232619744e-08, |
|
"loss": 0.9072, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.9817204301075269, |
|
"grad_norm": 0.9879081845283508, |
|
"learning_rate": 1.6844959541973958e-08, |
|
"loss": 0.8897, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.9827956989247312, |
|
"grad_norm": 1.1083647012710571, |
|
"learning_rate": 1.4921965171720288e-08, |
|
"loss": 0.8483, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.9838709677419355, |
|
"grad_norm": 1.0797687768936157, |
|
"learning_rate": 1.3115403545270744e-08, |
|
"loss": 0.8615, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.9849462365591398, |
|
"grad_norm": 1.0498789548873901, |
|
"learning_rate": 1.142529572835227e-08, |
|
"loss": 0.877, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.986021505376344, |
|
"grad_norm": 1.0166867971420288, |
|
"learning_rate": 9.851661428761372e-09, |
|
"loss": 0.8823, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.9870967741935484, |
|
"grad_norm": 1.130036473274231, |
|
"learning_rate": 8.394518996135414e-09, |
|
"loss": 0.8339, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.9881720430107527, |
|
"grad_norm": 1.0444576740264893, |
|
"learning_rate": 7.053885421737239e-09, |
|
"loss": 0.8898, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.989247311827957, |
|
"grad_norm": 0.9490436911582947, |
|
"learning_rate": 5.8297763382597625e-09, |
|
"loss": 0.8947, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.9903225806451613, |
|
"grad_norm": 1.0969460010528564, |
|
"learning_rate": 4.7222060196394634e-09, |
|
"loss": 0.914, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.9913978494623656, |
|
"grad_norm": 1.2216132879257202, |
|
"learning_rate": 3.731187380893176e-09, |
|
"loss": 0.8302, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.9924731182795699, |
|
"grad_norm": 1.0914058685302734, |
|
"learning_rate": 2.856731977968208e-09, |
|
"loss": 0.8287, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.9935483870967742, |
|
"grad_norm": 1.0946998596191406, |
|
"learning_rate": 2.0988500076013494e-09, |
|
"loss": 0.8098, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.9946236559139785, |
|
"grad_norm": 0.9429404735565186, |
|
"learning_rate": 1.4575503072100649e-09, |
|
"loss": 0.9227, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.9956989247311828, |
|
"grad_norm": 1.110209345817566, |
|
"learning_rate": 9.328403547792518e-10, |
|
"loss": 0.8376, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.9967741935483871, |
|
"grad_norm": 1.0573797225952148, |
|
"learning_rate": 5.247262687835264e-10, |
|
"loss": 0.826, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.9978494623655914, |
|
"grad_norm": 1.0567365884780884, |
|
"learning_rate": 2.3321280810617575e-10, |
|
"loss": 0.8646, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.9989247311827957, |
|
"grad_norm": 1.0667850971221924, |
|
"learning_rate": 5.830337199030922e-11, |
|
"loss": 0.8539, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.0270296335220337, |
|
"learning_rate": 0.0, |
|
"loss": 0.8958, |
|
"step": 930 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 930, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.644255795888783e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|