BPE-HF-Wikipedia-FR / trainer_state.json
qanastek's picture
Upload 41 files
2a8357e
raw
history blame contribute delete
No virus
24.3 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 26.0,
"global_step": 98228,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"learning_rate": 5e-09,
"loss": 10.4629,
"step": 1
},
{
"epoch": 0.13,
"learning_rate": 2.5e-06,
"loss": 9.562,
"step": 500
},
{
"epoch": 0.26,
"learning_rate": 5e-06,
"loss": 7.9982,
"step": 1000
},
{
"epoch": 0.4,
"learning_rate": 7.5e-06,
"loss": 6.9798,
"step": 1500
},
{
"epoch": 0.53,
"learning_rate": 1e-05,
"loss": 6.6752,
"step": 2000
},
{
"epoch": 0.66,
"learning_rate": 1.25e-05,
"loss": 6.5045,
"step": 2500
},
{
"epoch": 0.79,
"learning_rate": 1.5e-05,
"loss": 6.3892,
"step": 3000
},
{
"epoch": 0.93,
"learning_rate": 1.75e-05,
"loss": 6.2943,
"step": 3500
},
{
"epoch": 1.06,
"learning_rate": 2e-05,
"loss": 6.2221,
"step": 4000
},
{
"epoch": 1.19,
"learning_rate": 2.25e-05,
"loss": 6.154,
"step": 4500
},
{
"epoch": 1.32,
"learning_rate": 2.5e-05,
"loss": 6.0982,
"step": 5000
},
{
"epoch": 1.46,
"learning_rate": 2.7500000000000004e-05,
"loss": 6.0464,
"step": 5500
},
{
"epoch": 1.59,
"learning_rate": 3e-05,
"loss": 6.0017,
"step": 6000
},
{
"epoch": 1.72,
"learning_rate": 3.2500000000000004e-05,
"loss": 5.9683,
"step": 6500
},
{
"epoch": 1.85,
"learning_rate": 3.5e-05,
"loss": 5.936,
"step": 7000
},
{
"epoch": 1.99,
"learning_rate": 3.7500000000000003e-05,
"loss": 5.9005,
"step": 7500
},
{
"epoch": 2.12,
"learning_rate": 4e-05,
"loss": 5.8763,
"step": 8000
},
{
"epoch": 2.25,
"learning_rate": 4.2495e-05,
"loss": 5.8522,
"step": 8500
},
{
"epoch": 2.38,
"learning_rate": 4.4995000000000005e-05,
"loss": 5.8356,
"step": 9000
},
{
"epoch": 2.51,
"learning_rate": 4.7495e-05,
"loss": 5.8125,
"step": 9500
},
{
"epoch": 2.65,
"learning_rate": 4.9995000000000005e-05,
"loss": 5.7984,
"step": 10000
},
{
"epoch": 2.78,
"learning_rate": 4.998341326938449e-05,
"loss": 5.7788,
"step": 10500
},
{
"epoch": 2.91,
"learning_rate": 4.996675992539302e-05,
"loss": 5.7609,
"step": 11000
},
{
"epoch": 3.04,
"learning_rate": 4.995010658140154e-05,
"loss": 5.7509,
"step": 11500
},
{
"epoch": 3.18,
"learning_rate": 4.993345323741007e-05,
"loss": 5.7398,
"step": 12000
},
{
"epoch": 3.31,
"learning_rate": 4.991683320010658e-05,
"loss": 5.7264,
"step": 12500
},
{
"epoch": 3.44,
"learning_rate": 4.990017985611511e-05,
"loss": 5.7196,
"step": 13000
},
{
"epoch": 3.57,
"learning_rate": 4.988352651212364e-05,
"loss": 5.7094,
"step": 13500
},
{
"epoch": 3.71,
"learning_rate": 4.986687316813216e-05,
"loss": 5.6992,
"step": 14000
},
{
"epoch": 3.84,
"learning_rate": 4.9850253130828675e-05,
"loss": 5.6917,
"step": 14500
},
{
"epoch": 3.97,
"learning_rate": 4.9833599786837196e-05,
"loss": 5.6858,
"step": 15000
},
{
"epoch": 4.1,
"learning_rate": 4.9816946442845725e-05,
"loss": 5.6734,
"step": 15500
},
{
"epoch": 4.24,
"learning_rate": 4.980029309885425e-05,
"loss": 5.6687,
"step": 16000
},
{
"epoch": 4.37,
"learning_rate": 4.978367306155076e-05,
"loss": 5.6662,
"step": 16500
},
{
"epoch": 4.5,
"learning_rate": 4.976701971755929e-05,
"loss": 5.6598,
"step": 17000
},
{
"epoch": 4.63,
"learning_rate": 4.975036637356781e-05,
"loss": 5.655,
"step": 17500
},
{
"epoch": 4.76,
"learning_rate": 4.973374633626433e-05,
"loss": 5.6471,
"step": 18000
},
{
"epoch": 4.9,
"learning_rate": 4.971709299227285e-05,
"loss": 5.6424,
"step": 18500
},
{
"epoch": 5.03,
"learning_rate": 4.970043964828138e-05,
"loss": 5.6371,
"step": 19000
},
{
"epoch": 5.16,
"learning_rate": 4.96837863042899e-05,
"loss": 5.6289,
"step": 19500
},
{
"epoch": 5.29,
"learning_rate": 4.966713296029843e-05,
"loss": 5.6248,
"step": 20000
},
{
"epoch": 5.43,
"learning_rate": 4.965047961630696e-05,
"loss": 5.622,
"step": 20500
},
{
"epoch": 5.56,
"learning_rate": 4.963382627231548e-05,
"loss": 5.6202,
"step": 21000
},
{
"epoch": 5.69,
"learning_rate": 4.9617206235011995e-05,
"loss": 5.6134,
"step": 21500
},
{
"epoch": 5.82,
"learning_rate": 4.9600552891020516e-05,
"loss": 5.6098,
"step": 22000
},
{
"epoch": 5.96,
"learning_rate": 4.9583899547029045e-05,
"loss": 5.6084,
"step": 22500
},
{
"epoch": 6.09,
"learning_rate": 4.956724620303757e-05,
"loss": 5.6018,
"step": 23000
},
{
"epoch": 6.22,
"learning_rate": 4.9550592859046095e-05,
"loss": 5.5955,
"step": 23500
},
{
"epoch": 6.35,
"learning_rate": 4.953397282174261e-05,
"loss": 5.5957,
"step": 24000
},
{
"epoch": 6.48,
"learning_rate": 4.951731947775113e-05,
"loss": 5.5875,
"step": 24500
},
{
"epoch": 6.62,
"learning_rate": 4.950066613375966e-05,
"loss": 5.5857,
"step": 25000
},
{
"epoch": 6.75,
"learning_rate": 4.948401278976819e-05,
"loss": 5.5856,
"step": 25500
},
{
"epoch": 6.88,
"learning_rate": 4.94673927524647e-05,
"loss": 5.5855,
"step": 26000
},
{
"epoch": 7.01,
"learning_rate": 4.945073940847322e-05,
"loss": 5.579,
"step": 26500
},
{
"epoch": 7.15,
"learning_rate": 4.943408606448175e-05,
"loss": 5.5772,
"step": 27000
},
{
"epoch": 7.28,
"learning_rate": 4.941743272049028e-05,
"loss": 5.5725,
"step": 27500
},
{
"epoch": 7.41,
"learning_rate": 4.94007793764988e-05,
"loss": 5.57,
"step": 28000
},
{
"epoch": 7.54,
"learning_rate": 4.9384159339195315e-05,
"loss": 5.5676,
"step": 28500
},
{
"epoch": 7.68,
"learning_rate": 4.9367505995203836e-05,
"loss": 5.5665,
"step": 29000
},
{
"epoch": 7.81,
"learning_rate": 4.9350852651212365e-05,
"loss": 5.5615,
"step": 29500
},
{
"epoch": 7.94,
"learning_rate": 4.933419930722089e-05,
"loss": 5.5169,
"step": 30000
},
{
"epoch": 8.07,
"learning_rate": 4.9317545963229415e-05,
"loss": 5.2484,
"step": 30500
},
{
"epoch": 8.21,
"learning_rate": 4.930089261923794e-05,
"loss": 5.043,
"step": 31000
},
{
"epoch": 8.34,
"learning_rate": 4.928423927524647e-05,
"loss": 4.8676,
"step": 31500
},
{
"epoch": 8.47,
"learning_rate": 4.9267585931255e-05,
"loss": 4.6987,
"step": 32000
},
{
"epoch": 8.6,
"learning_rate": 4.925096589395151e-05,
"loss": 4.5359,
"step": 32500
},
{
"epoch": 8.73,
"learning_rate": 4.923434585664802e-05,
"loss": 4.3603,
"step": 33000
},
{
"epoch": 8.87,
"learning_rate": 4.921769251265655e-05,
"loss": 4.1923,
"step": 33500
},
{
"epoch": 9.0,
"learning_rate": 4.920103916866507e-05,
"loss": 4.0239,
"step": 34000
},
{
"epoch": 9.13,
"learning_rate": 4.91843858246736e-05,
"loss": 3.7255,
"step": 34500
},
{
"epoch": 9.26,
"learning_rate": 4.916773248068212e-05,
"loss": 3.2457,
"step": 35000
},
{
"epoch": 9.4,
"learning_rate": 4.9151112443378634e-05,
"loss": 2.5753,
"step": 35500
},
{
"epoch": 9.53,
"learning_rate": 4.9134459099387156e-05,
"loss": 2.2821,
"step": 36000
},
{
"epoch": 9.66,
"learning_rate": 4.9117805755395685e-05,
"loss": 2.1267,
"step": 36500
},
{
"epoch": 9.79,
"learning_rate": 4.910115241140421e-05,
"loss": 2.0179,
"step": 37000
},
{
"epoch": 9.93,
"learning_rate": 4.908453237410072e-05,
"loss": 1.9368,
"step": 37500
},
{
"epoch": 10.06,
"learning_rate": 4.906787903010925e-05,
"loss": 1.8691,
"step": 38000
},
{
"epoch": 10.19,
"learning_rate": 4.905125899280576e-05,
"loss": 1.816,
"step": 38500
},
{
"epoch": 10.32,
"learning_rate": 4.903460564881429e-05,
"loss": 1.7673,
"step": 39000
},
{
"epoch": 10.46,
"learning_rate": 4.901795230482281e-05,
"loss": 1.7288,
"step": 39500
},
{
"epoch": 10.59,
"learning_rate": 4.900129896083134e-05,
"loss": 1.6907,
"step": 40000
},
{
"epoch": 10.72,
"learning_rate": 4.898464561683987e-05,
"loss": 1.6572,
"step": 40500
},
{
"epoch": 10.85,
"learning_rate": 4.896799227284839e-05,
"loss": 1.6261,
"step": 41000
},
{
"epoch": 10.98,
"learning_rate": 4.895133892885692e-05,
"loss": 1.6001,
"step": 41500
},
{
"epoch": 11.12,
"learning_rate": 4.893468558486544e-05,
"loss": 1.5719,
"step": 42000
},
{
"epoch": 11.25,
"learning_rate": 4.891803224087397e-05,
"loss": 1.5495,
"step": 42500
},
{
"epoch": 11.38,
"learning_rate": 4.8901412203570476e-05,
"loss": 1.5277,
"step": 43000
},
{
"epoch": 11.51,
"learning_rate": 4.8884758859579004e-05,
"loss": 1.5069,
"step": 43500
},
{
"epoch": 11.65,
"learning_rate": 4.886810551558753e-05,
"loss": 1.4871,
"step": 44000
},
{
"epoch": 11.78,
"learning_rate": 4.8851452171596055e-05,
"loss": 1.4707,
"step": 44500
},
{
"epoch": 11.91,
"learning_rate": 4.883479882760458e-05,
"loss": 1.4522,
"step": 45000
},
{
"epoch": 12.04,
"learning_rate": 4.881817879030109e-05,
"loss": 1.4362,
"step": 45500
},
{
"epoch": 12.18,
"learning_rate": 4.880152544630962e-05,
"loss": 1.4219,
"step": 46000
},
{
"epoch": 12.31,
"learning_rate": 4.878487210231815e-05,
"loss": 1.4066,
"step": 46500
},
{
"epoch": 12.44,
"learning_rate": 4.876821875832667e-05,
"loss": 1.3935,
"step": 47000
},
{
"epoch": 12.57,
"learning_rate": 4.875159872102319e-05,
"loss": 1.3809,
"step": 47500
},
{
"epoch": 12.71,
"learning_rate": 4.873494537703171e-05,
"loss": 1.3663,
"step": 48000
},
{
"epoch": 12.84,
"learning_rate": 4.871829203304024e-05,
"loss": 1.3571,
"step": 48500
},
{
"epoch": 12.97,
"learning_rate": 4.8701671995736746e-05,
"loss": 1.3454,
"step": 49000
},
{
"epoch": 13.1,
"learning_rate": 4.8685018651745274e-05,
"loss": 1.3336,
"step": 49500
},
{
"epoch": 13.23,
"learning_rate": 4.8668365307753796e-05,
"loss": 1.3236,
"step": 50000
},
{
"epoch": 13.37,
"learning_rate": 4.8651711963762324e-05,
"loss": 1.3131,
"step": 50500
},
{
"epoch": 13.5,
"learning_rate": 4.863505861977085e-05,
"loss": 1.3029,
"step": 51000
},
{
"epoch": 13.63,
"learning_rate": 4.8618405275779374e-05,
"loss": 1.2961,
"step": 51500
},
{
"epoch": 13.76,
"learning_rate": 4.86017519317879e-05,
"loss": 1.2889,
"step": 52000
},
{
"epoch": 13.9,
"learning_rate": 4.858513189448441e-05,
"loss": 1.2814,
"step": 52500
},
{
"epoch": 14.03,
"learning_rate": 4.856847855049294e-05,
"loss": 1.2704,
"step": 53000
},
{
"epoch": 14.16,
"learning_rate": 4.855182520650147e-05,
"loss": 1.2605,
"step": 53500
},
{
"epoch": 14.29,
"learning_rate": 4.853517186250999e-05,
"loss": 1.2545,
"step": 54000
},
{
"epoch": 14.43,
"learning_rate": 4.851851851851852e-05,
"loss": 1.2487,
"step": 54500
},
{
"epoch": 14.56,
"learning_rate": 4.850186517452705e-05,
"loss": 1.2381,
"step": 55000
},
{
"epoch": 14.69,
"learning_rate": 4.8485211830535576e-05,
"loss": 1.2326,
"step": 55500
},
{
"epoch": 14.82,
"learning_rate": 4.8468558486544104e-05,
"loss": 1.224,
"step": 56000
},
{
"epoch": 14.96,
"learning_rate": 4.845193844924061e-05,
"loss": 1.2213,
"step": 56500
},
{
"epoch": 15.09,
"learning_rate": 4.8435285105249135e-05,
"loss": 1.2126,
"step": 57000
},
{
"epoch": 15.22,
"learning_rate": 4.841863176125766e-05,
"loss": 1.2047,
"step": 57500
},
{
"epoch": 15.35,
"learning_rate": 4.840197841726619e-05,
"loss": 1.1999,
"step": 58000
},
{
"epoch": 15.48,
"learning_rate": 4.8385358379962694e-05,
"loss": 1.1967,
"step": 58500
},
{
"epoch": 15.62,
"learning_rate": 4.836870503597122e-05,
"loss": 1.1901,
"step": 59000
},
{
"epoch": 15.75,
"learning_rate": 4.835205169197975e-05,
"loss": 1.1843,
"step": 59500
},
{
"epoch": 15.88,
"learning_rate": 4.833539834798828e-05,
"loss": 1.1792,
"step": 60000
},
{
"epoch": 16.01,
"learning_rate": 4.831877831068479e-05,
"loss": 1.1752,
"step": 60500
},
{
"epoch": 16.15,
"learning_rate": 4.83021582733813e-05,
"loss": 1.1657,
"step": 61000
},
{
"epoch": 16.28,
"learning_rate": 4.8285504929389827e-05,
"loss": 1.1628,
"step": 61500
},
{
"epoch": 16.41,
"learning_rate": 4.826885158539835e-05,
"loss": 1.1586,
"step": 62000
},
{
"epoch": 16.54,
"learning_rate": 4.825219824140688e-05,
"loss": 1.1547,
"step": 62500
},
{
"epoch": 16.68,
"learning_rate": 4.8235578204103386e-05,
"loss": 1.1486,
"step": 63000
},
{
"epoch": 16.81,
"learning_rate": 4.8218924860111914e-05,
"loss": 1.1439,
"step": 63500
},
{
"epoch": 16.94,
"learning_rate": 4.820227151612044e-05,
"loss": 1.1409,
"step": 64000
},
{
"epoch": 17.07,
"learning_rate": 4.8185618172128964e-05,
"loss": 1.1349,
"step": 64500
},
{
"epoch": 17.2,
"learning_rate": 4.816896482813749e-05,
"loss": 1.1306,
"step": 65000
},
{
"epoch": 17.34,
"learning_rate": 4.8152311484146014e-05,
"loss": 1.1287,
"step": 65500
},
{
"epoch": 17.47,
"learning_rate": 4.813569144684253e-05,
"loss": 1.1224,
"step": 66000
},
{
"epoch": 17.6,
"learning_rate": 4.811903810285105e-05,
"loss": 1.1196,
"step": 66500
},
{
"epoch": 17.73,
"learning_rate": 4.810238475885958e-05,
"loss": 1.1162,
"step": 67000
},
{
"epoch": 17.87,
"learning_rate": 4.808576472155609e-05,
"loss": 1.1141,
"step": 67500
},
{
"epoch": 18.0,
"learning_rate": 4.806911137756462e-05,
"loss": 1.1098,
"step": 68000
},
{
"epoch": 18.13,
"learning_rate": 4.8052458033573147e-05,
"loss": 1.1043,
"step": 68500
},
{
"epoch": 18.26,
"learning_rate": 4.803580468958167e-05,
"loss": 1.1008,
"step": 69000
},
{
"epoch": 18.4,
"learning_rate": 4.8019151345590197e-05,
"loss": 1.0991,
"step": 69500
},
{
"epoch": 18.53,
"learning_rate": 4.8002531308286706e-05,
"loss": 1.0941,
"step": 70000
},
{
"epoch": 18.66,
"learning_rate": 4.7985877964295234e-05,
"loss": 1.0892,
"step": 70500
},
{
"epoch": 18.79,
"learning_rate": 4.796922462030376e-05,
"loss": 1.0847,
"step": 71000
},
{
"epoch": 18.93,
"learning_rate": 4.7952571276312284e-05,
"loss": 1.083,
"step": 71500
},
{
"epoch": 19.06,
"learning_rate": 4.793591793232081e-05,
"loss": 1.0802,
"step": 72000
},
{
"epoch": 19.19,
"learning_rate": 4.791926458832934e-05,
"loss": 1.0761,
"step": 72500
},
{
"epoch": 19.32,
"learning_rate": 4.790261124433786e-05,
"loss": 1.0726,
"step": 73000
},
{
"epoch": 19.45,
"learning_rate": 4.788595790034639e-05,
"loss": 1.0683,
"step": 73500
},
{
"epoch": 19.59,
"learning_rate": 4.78693378630429e-05,
"loss": 1.0667,
"step": 74000
},
{
"epoch": 19.72,
"learning_rate": 4.785268451905143e-05,
"loss": 1.0655,
"step": 74500
},
{
"epoch": 19.85,
"learning_rate": 4.783603117505995e-05,
"loss": 1.0617,
"step": 75000
},
{
"epoch": 19.98,
"learning_rate": 4.781937783106848e-05,
"loss": 1.0599,
"step": 75500
},
{
"epoch": 20.12,
"learning_rate": 4.780272448707701e-05,
"loss": 1.0554,
"step": 76000
},
{
"epoch": 20.25,
"learning_rate": 4.778607114308553e-05,
"loss": 1.0541,
"step": 76500
},
{
"epoch": 20.38,
"learning_rate": 4.7769417799094064e-05,
"loss": 1.0482,
"step": 77000
},
{
"epoch": 20.51,
"learning_rate": 4.7752764455102586e-05,
"loss": 1.0486,
"step": 77500
},
{
"epoch": 20.65,
"learning_rate": 4.7736144417799095e-05,
"loss": 1.0453,
"step": 78000
},
{
"epoch": 20.78,
"learning_rate": 4.7719491073807623e-05,
"loss": 1.0427,
"step": 78500
},
{
"epoch": 20.91,
"learning_rate": 4.770287103650413e-05,
"loss": 1.0399,
"step": 79000
},
{
"epoch": 21.04,
"learning_rate": 4.768621769251266e-05,
"loss": 1.0363,
"step": 79500
},
{
"epoch": 21.18,
"learning_rate": 4.766956434852118e-05,
"loss": 1.0321,
"step": 80000
},
{
"epoch": 21.31,
"learning_rate": 4.765291100452971e-05,
"loss": 1.0311,
"step": 80500
},
{
"epoch": 21.44,
"learning_rate": 4.763629096722622e-05,
"loss": 1.0287,
"step": 81000
},
{
"epoch": 21.57,
"learning_rate": 4.761963762323475e-05,
"loss": 1.0263,
"step": 81500
},
{
"epoch": 21.7,
"learning_rate": 4.760298427924327e-05,
"loss": 1.0241,
"step": 82000
},
{
"epoch": 21.84,
"learning_rate": 4.75863309352518e-05,
"loss": 1.0231,
"step": 82500
},
{
"epoch": 21.97,
"learning_rate": 4.756967759126033e-05,
"loss": 1.0223,
"step": 83000
},
{
"epoch": 22.1,
"learning_rate": 4.7553057553956836e-05,
"loss": 1.019,
"step": 83500
},
{
"epoch": 22.23,
"learning_rate": 4.7536404209965365e-05,
"loss": 1.0148,
"step": 84000
},
{
"epoch": 22.37,
"learning_rate": 4.7519750865973887e-05,
"loss": 1.012,
"step": 84500
},
{
"epoch": 22.5,
"learning_rate": 4.7503097521982415e-05,
"loss": 1.0117,
"step": 85000
},
{
"epoch": 22.63,
"learning_rate": 4.7486477484678924e-05,
"loss": 1.0089,
"step": 85500
},
{
"epoch": 22.76,
"learning_rate": 4.746982414068745e-05,
"loss": 1.0068,
"step": 86000
},
{
"epoch": 22.9,
"learning_rate": 4.745317079669598e-05,
"loss": 1.0042,
"step": 86500
},
{
"epoch": 23.03,
"learning_rate": 4.74365174527045e-05,
"loss": 1.002,
"step": 87000
},
{
"epoch": 23.16,
"learning_rate": 4.741989741540102e-05,
"loss": 1.0018,
"step": 87500
},
{
"epoch": 23.29,
"learning_rate": 4.740324407140954e-05,
"loss": 0.996,
"step": 88000
},
{
"epoch": 23.43,
"learning_rate": 4.738659072741807e-05,
"loss": 0.9964,
"step": 88500
},
{
"epoch": 23.56,
"learning_rate": 4.736993738342659e-05,
"loss": 0.9921,
"step": 89000
},
{
"epoch": 23.69,
"learning_rate": 4.735328403943512e-05,
"loss": 0.9919,
"step": 89500
},
{
"epoch": 23.82,
"learning_rate": 4.733666400213163e-05,
"loss": 0.9894,
"step": 90000
},
{
"epoch": 23.95,
"learning_rate": 4.7320010658140156e-05,
"loss": 0.9867,
"step": 90500
},
{
"epoch": 24.09,
"learning_rate": 4.7303357314148685e-05,
"loss": 0.9856,
"step": 91000
},
{
"epoch": 24.22,
"learning_rate": 4.7286737276845194e-05,
"loss": 0.9821,
"step": 91500
},
{
"epoch": 24.35,
"learning_rate": 4.727008393285372e-05,
"loss": 0.983,
"step": 92000
},
{
"epoch": 24.48,
"learning_rate": 4.7253430588862244e-05,
"loss": 0.9782,
"step": 92500
},
{
"epoch": 24.62,
"learning_rate": 4.723677724487077e-05,
"loss": 0.9789,
"step": 93000
},
{
"epoch": 24.75,
"learning_rate": 4.72201239008793e-05,
"loss": 0.9767,
"step": 93500
},
{
"epoch": 24.88,
"learning_rate": 4.720347055688782e-05,
"loss": 0.9741,
"step": 94000
},
{
"epoch": 25.01,
"learning_rate": 4.718681721289635e-05,
"loss": 0.9723,
"step": 94500
},
{
"epoch": 25.15,
"learning_rate": 4.717016386890488e-05,
"loss": 0.9688,
"step": 95000
},
{
"epoch": 25.28,
"learning_rate": 4.71535105249134e-05,
"loss": 0.9683,
"step": 95500
},
{
"epoch": 25.41,
"learning_rate": 4.713689048760992e-05,
"loss": 0.9683,
"step": 96000
},
{
"epoch": 25.54,
"learning_rate": 4.712023714361844e-05,
"loss": 0.9663,
"step": 96500
},
{
"epoch": 25.67,
"learning_rate": 4.710358379962697e-05,
"loss": 0.9623,
"step": 97000
},
{
"epoch": 25.81,
"learning_rate": 4.708693045563549e-05,
"loss": 0.9614,
"step": 97500
},
{
"epoch": 25.94,
"learning_rate": 4.7070310418332005e-05,
"loss": 0.9612,
"step": 98000
}
],
"max_steps": 1511200,
"num_train_epochs": 400,
"total_flos": 2.647131420139664e+19,
"trial_name": null,
"trial_params": null
}