| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 8.0, | |
| "global_step": 65880, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 2.9772313296903464e-05, | |
| "loss": 5.8692, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 2.954462659380692e-05, | |
| "loss": 5.9529, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 2.9316939890710385e-05, | |
| "loss": 5.9524, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 2.9089253187613845e-05, | |
| "loss": 5.9525, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 2.8861566484517305e-05, | |
| "loss": 5.9517, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 2.8633879781420765e-05, | |
| "loss": 5.9514, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 2.8406193078324225e-05, | |
| "loss": 5.9514, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.817850637522769e-05, | |
| "loss": 5.9515, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.795081967213115e-05, | |
| "loss": 5.9517, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 2.772313296903461e-05, | |
| "loss": 5.9513, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 2.749544626593807e-05, | |
| "loss": 5.9504, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 2.7267759562841533e-05, | |
| "loss": 5.9516, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.704007285974499e-05, | |
| "loss": 5.951, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.6812386156648453e-05, | |
| "loss": 5.952, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 2.6584699453551913e-05, | |
| "loss": 5.9522, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 2.6357012750455373e-05, | |
| "loss": 5.9519, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 2.6129326047358837e-05, | |
| "loss": 5.9517, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 2.5901639344262294e-05, | |
| "loss": 5.9518, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 2.5673952641165757e-05, | |
| "loss": 5.9518, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 2.5446265938069218e-05, | |
| "loss": 5.9513, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 2.5218579234972678e-05, | |
| "loss": 5.9512, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 2.4990892531876138e-05, | |
| "loss": 5.9514, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 2.47632058287796e-05, | |
| "loss": 5.952, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 2.453551912568306e-05, | |
| "loss": 5.9509, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 2.4307832422586522e-05, | |
| "loss": 5.9516, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.4080145719489982e-05, | |
| "loss": 5.9517, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 2.3852459016393442e-05, | |
| "loss": 5.9501, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 2.3624772313296906e-05, | |
| "loss": 5.9515, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 2.3397085610200362e-05, | |
| "loss": 5.9505, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 2.3169398907103826e-05, | |
| "loss": 5.9507, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 2.294171220400729e-05, | |
| "loss": 5.9512, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 2.2714025500910746e-05, | |
| "loss": 5.9511, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 2.248633879781421e-05, | |
| "loss": 5.9514, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 2.2258652094717667e-05, | |
| "loss": 5.9503, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 2.203096539162113e-05, | |
| "loss": 5.9518, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 2.180327868852459e-05, | |
| "loss": 5.9514, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 2.157559198542805e-05, | |
| "loss": 5.9518, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 2.1347905282331514e-05, | |
| "loss": 5.9517, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 2.1120218579234974e-05, | |
| "loss": 5.9513, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 2.0892531876138434e-05, | |
| "loss": 5.9503, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 2.0664845173041894e-05, | |
| "loss": 5.9513, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 2.0437158469945358e-05, | |
| "loss": 5.9515, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 2.0209471766848815e-05, | |
| "loss": 5.9511, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 1.998178506375228e-05, | |
| "loss": 5.9509, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 1.975409836065574e-05, | |
| "loss": 5.9515, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 1.95264116575592e-05, | |
| "loss": 5.951, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 1.9298724954462662e-05, | |
| "loss": 5.9511, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 1.907103825136612e-05, | |
| "loss": 5.9511, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 1.8843351548269583e-05, | |
| "loss": 5.9504, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 1.8615664845173043e-05, | |
| "loss": 5.9507, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 1.8387978142076503e-05, | |
| "loss": 5.9506, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 1.8160291438979963e-05, | |
| "loss": 5.9515, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 1.7932604735883423e-05, | |
| "loss": 5.9509, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 1.7704918032786887e-05, | |
| "loss": 5.9505, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 1.7477231329690347e-05, | |
| "loss": 5.951, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 1.7249544626593807e-05, | |
| "loss": 5.9512, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 1.7021857923497267e-05, | |
| "loss": 5.9511, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 1.679417122040073e-05, | |
| "loss": 5.9507, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 1.6566484517304187e-05, | |
| "loss": 5.9511, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 1.633879781420765e-05, | |
| "loss": 5.9507, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 1.6111111111111115e-05, | |
| "loss": 5.9514, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 1.588342440801457e-05, | |
| "loss": 5.9511, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 1.5655737704918035e-05, | |
| "loss": 5.9509, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 1.542805100182149e-05, | |
| "loss": 5.9508, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 1.5200364298724955e-05, | |
| "loss": 5.9511, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 1.4972677595628415e-05, | |
| "loss": 5.9518, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 1.4744990892531877e-05, | |
| "loss": 5.9518, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 1.4517304189435337e-05, | |
| "loss": 5.9512, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 1.4289617486338798e-05, | |
| "loss": 5.9512, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 1.4061930783242258e-05, | |
| "loss": 5.9508, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 1.383424408014572e-05, | |
| "loss": 5.951, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 4.37, | |
| "learning_rate": 1.3606557377049181e-05, | |
| "loss": 5.9514, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "learning_rate": 1.3378870673952642e-05, | |
| "loss": 5.9505, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 1.3151183970856102e-05, | |
| "loss": 5.9508, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "learning_rate": 1.2923497267759564e-05, | |
| "loss": 5.9513, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 1.2695810564663024e-05, | |
| "loss": 5.9513, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 1.2468123861566484e-05, | |
| "loss": 5.9503, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 1.2240437158469946e-05, | |
| "loss": 5.9508, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "learning_rate": 1.2012750455373408e-05, | |
| "loss": 5.9504, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 1.1785063752276868e-05, | |
| "loss": 5.9503, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "learning_rate": 1.1557377049180328e-05, | |
| "loss": 5.951, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "learning_rate": 1.132969034608379e-05, | |
| "loss": 5.9514, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "learning_rate": 1.110200364298725e-05, | |
| "loss": 5.9507, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 5.1, | |
| "learning_rate": 1.087431693989071e-05, | |
| "loss": 5.9505, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 5.16, | |
| "learning_rate": 1.064663023679417e-05, | |
| "loss": 5.9509, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 5.22, | |
| "learning_rate": 1.0418943533697632e-05, | |
| "loss": 5.9506, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "learning_rate": 1.0191256830601094e-05, | |
| "loss": 5.9511, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 5.34, | |
| "learning_rate": 9.963570127504554e-06, | |
| "loss": 5.9505, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 5.4, | |
| "learning_rate": 9.735883424408014e-06, | |
| "loss": 5.951, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 9.508196721311476e-06, | |
| "loss": 5.9507, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 5.53, | |
| "learning_rate": 9.280510018214936e-06, | |
| "loss": 5.9509, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 5.59, | |
| "learning_rate": 9.052823315118397e-06, | |
| "loss": 5.9509, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 5.65, | |
| "learning_rate": 8.825136612021857e-06, | |
| "loss": 5.9514, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "learning_rate": 8.59744990892532e-06, | |
| "loss": 5.9506, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 8.36976320582878e-06, | |
| "loss": 5.9511, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "learning_rate": 8.14207650273224e-06, | |
| "loss": 5.9514, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 5.89, | |
| "learning_rate": 7.914389799635702e-06, | |
| "loss": 5.9506, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 5.95, | |
| "learning_rate": 7.686703096539163e-06, | |
| "loss": 5.9503, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 6.01, | |
| "learning_rate": 7.459016393442623e-06, | |
| "loss": 5.9505, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 6.07, | |
| "learning_rate": 7.2313296903460846e-06, | |
| "loss": 5.9509, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 6.13, | |
| "learning_rate": 7.003642987249545e-06, | |
| "loss": 5.9507, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 6.19, | |
| "learning_rate": 6.775956284153006e-06, | |
| "loss": 5.9506, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "learning_rate": 6.548269581056467e-06, | |
| "loss": 5.9508, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 6.31, | |
| "learning_rate": 6.320582877959928e-06, | |
| "loss": 5.9508, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 6.38, | |
| "learning_rate": 6.092896174863388e-06, | |
| "loss": 5.9508, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 6.44, | |
| "learning_rate": 5.865209471766849e-06, | |
| "loss": 5.9509, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 6.5, | |
| "learning_rate": 5.63752276867031e-06, | |
| "loss": 5.951, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 6.56, | |
| "learning_rate": 5.409836065573771e-06, | |
| "loss": 5.9509, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 6.62, | |
| "learning_rate": 5.182149362477231e-06, | |
| "loss": 5.9505, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 6.68, | |
| "learning_rate": 4.954462659380692e-06, | |
| "loss": 5.9511, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 6.74, | |
| "learning_rate": 4.726775956284153e-06, | |
| "loss": 5.951, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 6.8, | |
| "learning_rate": 4.499089253187614e-06, | |
| "loss": 5.9504, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 6.86, | |
| "learning_rate": 4.271402550091074e-06, | |
| "loss": 5.9507, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 6.92, | |
| "learning_rate": 4.043715846994535e-06, | |
| "loss": 5.9507, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 6.98, | |
| "learning_rate": 3.816029143897996e-06, | |
| "loss": 5.9506, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "learning_rate": 3.5883424408014573e-06, | |
| "loss": 5.9504, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 7.1, | |
| "learning_rate": 3.3606557377049183e-06, | |
| "loss": 5.9508, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 7.16, | |
| "learning_rate": 3.132969034608379e-06, | |
| "loss": 5.9505, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 7.23, | |
| "learning_rate": 2.90528233151184e-06, | |
| "loss": 5.9507, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 7.29, | |
| "learning_rate": 2.6775956284153005e-06, | |
| "loss": 5.9513, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 7.35, | |
| "learning_rate": 2.4499089253187615e-06, | |
| "loss": 5.9508, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 7.41, | |
| "learning_rate": 2.222222222222222e-06, | |
| "loss": 5.9508, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 7.47, | |
| "learning_rate": 1.994535519125683e-06, | |
| "loss": 5.9507, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 7.53, | |
| "learning_rate": 1.766848816029144e-06, | |
| "loss": 5.9506, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 7.59, | |
| "learning_rate": 1.5391621129326049e-06, | |
| "loss": 5.951, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 7.65, | |
| "learning_rate": 1.3114754098360657e-06, | |
| "loss": 5.9504, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 7.71, | |
| "learning_rate": 1.0837887067395265e-06, | |
| "loss": 5.9508, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 7.77, | |
| "learning_rate": 8.561020036429873e-07, | |
| "loss": 5.9508, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 7.83, | |
| "learning_rate": 6.284153005464482e-07, | |
| "loss": 5.9509, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 7.89, | |
| "learning_rate": 4.0072859744990897e-07, | |
| "loss": 5.9506, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 7.95, | |
| "learning_rate": 1.7304189435336977e-07, | |
| "loss": 5.9511, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "step": 65880, | |
| "total_flos": 8.113569971306496e+17, | |
| "train_runtime": 71825.285, | |
| "train_samples_per_second": 14.675, | |
| "train_steps_per_second": 0.917 | |
| } | |
| ], | |
| "max_steps": 65880, | |
| "num_train_epochs": 8, | |
| "total_flos": 8.113569971306496e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |