| { | |
| "best_metric": 0.90963293, | |
| "best_model_checkpoint": "/volume/output/run2/checkpoint-252560", | |
| "epoch": 20.0, | |
| "eval_steps": 500, | |
| "global_step": 252560, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.05004751346214761, | |
| "grad_norm": 1.831141710281372, | |
| "learning_rate": 9.974976243268926e-05, | |
| "loss": 1.5344, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.10009502692429521, | |
| "grad_norm": 3.449512004852295, | |
| "learning_rate": 9.949952486537853e-05, | |
| "loss": 0.3225, | |
| "step": 1264 | |
| }, | |
| { | |
| "epoch": 0.1501425403864428, | |
| "grad_norm": 1.8513766527175903, | |
| "learning_rate": 9.92492872980678e-05, | |
| "loss": 0.2466, | |
| "step": 1896 | |
| }, | |
| { | |
| "epoch": 0.20019005384859043, | |
| "grad_norm": 1.0473196506500244, | |
| "learning_rate": 9.899904973075705e-05, | |
| "loss": 0.2176, | |
| "step": 2528 | |
| }, | |
| { | |
| "epoch": 0.25023756731073804, | |
| "grad_norm": 1.3224372863769531, | |
| "learning_rate": 9.874881216344632e-05, | |
| "loss": 0.1852, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.3002850807728856, | |
| "grad_norm": 1.1163212060928345, | |
| "learning_rate": 9.849857459613557e-05, | |
| "loss": 0.1725, | |
| "step": 3792 | |
| }, | |
| { | |
| "epoch": 0.35033259423503327, | |
| "grad_norm": 0.2766059339046478, | |
| "learning_rate": 9.824833702882485e-05, | |
| "loss": 0.1613, | |
| "step": 4424 | |
| }, | |
| { | |
| "epoch": 0.40038010769718085, | |
| "grad_norm": 1.0185670852661133, | |
| "learning_rate": 9.79980994615141e-05, | |
| "loss": 0.1345, | |
| "step": 5056 | |
| }, | |
| { | |
| "epoch": 0.4504276211593285, | |
| "grad_norm": 1.164562702178955, | |
| "learning_rate": 9.774786189420336e-05, | |
| "loss": 0.1364, | |
| "step": 5688 | |
| }, | |
| { | |
| "epoch": 0.5004751346214761, | |
| "grad_norm": 1.1677042245864868, | |
| "learning_rate": 9.749762432689263e-05, | |
| "loss": 0.1277, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 0.5505226480836237, | |
| "grad_norm": 3.836665153503418, | |
| "learning_rate": 9.724738675958188e-05, | |
| "loss": 0.1254, | |
| "step": 6952 | |
| }, | |
| { | |
| "epoch": 0.6005701615457713, | |
| "grad_norm": 2.3849477767944336, | |
| "learning_rate": 9.699714919227115e-05, | |
| "loss": 0.1188, | |
| "step": 7584 | |
| }, | |
| { | |
| "epoch": 0.6506176750079189, | |
| "grad_norm": 3.4638051986694336, | |
| "learning_rate": 9.674691162496041e-05, | |
| "loss": 0.1159, | |
| "step": 8216 | |
| }, | |
| { | |
| "epoch": 0.7006651884700665, | |
| "grad_norm": 0.2078377604484558, | |
| "learning_rate": 9.649667405764967e-05, | |
| "loss": 0.1097, | |
| "step": 8848 | |
| }, | |
| { | |
| "epoch": 0.7507127019322142, | |
| "grad_norm": 2.513908624649048, | |
| "learning_rate": 9.624643649033894e-05, | |
| "loss": 0.1073, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 0.8007602153943617, | |
| "grad_norm": 1.9936473369598389, | |
| "learning_rate": 9.599619892302819e-05, | |
| "loss": 0.1006, | |
| "step": 10112 | |
| }, | |
| { | |
| "epoch": 0.8508077288565093, | |
| "grad_norm": 1.9626480340957642, | |
| "learning_rate": 9.574596135571746e-05, | |
| "loss": 0.1019, | |
| "step": 10744 | |
| }, | |
| { | |
| "epoch": 0.900855242318657, | |
| "grad_norm": 1.7610087394714355, | |
| "learning_rate": 9.549572378840672e-05, | |
| "loss": 0.0898, | |
| "step": 11376 | |
| }, | |
| { | |
| "epoch": 0.9509027557808045, | |
| "grad_norm": 1.6743286848068237, | |
| "learning_rate": 9.524548622109598e-05, | |
| "loss": 0.0912, | |
| "step": 12008 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_gen_len": 12.34557138, | |
| "eval_loss": 0.06976257264614105, | |
| "eval_runtime": 538.4523, | |
| "eval_samples_per_second": 98.843, | |
| "eval_steps_per_second": 1.545, | |
| "eval_wordacc": 0.8983691, | |
| "eval_wordacc_oov": 0.84212982, | |
| "step": 12628 | |
| }, | |
| { | |
| "epoch": 1.0009502692429522, | |
| "grad_norm": 0.813258707523346, | |
| "learning_rate": 9.499524865378525e-05, | |
| "loss": 0.0908, | |
| "step": 12640 | |
| }, | |
| { | |
| "epoch": 1.0509977827050998, | |
| "grad_norm": 1.4410091638565063, | |
| "learning_rate": 9.474501108647451e-05, | |
| "loss": 0.0864, | |
| "step": 13272 | |
| }, | |
| { | |
| "epoch": 1.1010452961672474, | |
| "grad_norm": 0.1239413172006607, | |
| "learning_rate": 9.449477351916377e-05, | |
| "loss": 0.0819, | |
| "step": 13904 | |
| }, | |
| { | |
| "epoch": 1.151092809629395, | |
| "grad_norm": 0.41668111085891724, | |
| "learning_rate": 9.424453595185304e-05, | |
| "loss": 0.0872, | |
| "step": 14536 | |
| }, | |
| { | |
| "epoch": 1.2011403230915425, | |
| "grad_norm": 0.2707739472389221, | |
| "learning_rate": 9.399429838454229e-05, | |
| "loss": 0.082, | |
| "step": 15168 | |
| }, | |
| { | |
| "epoch": 1.2511878365536901, | |
| "grad_norm": 0.5811319351196289, | |
| "learning_rate": 9.374406081723154e-05, | |
| "loss": 0.0847, | |
| "step": 15800 | |
| }, | |
| { | |
| "epoch": 1.3012353500158378, | |
| "grad_norm": 1.7169886827468872, | |
| "learning_rate": 9.349382324992082e-05, | |
| "loss": 0.0817, | |
| "step": 16432 | |
| }, | |
| { | |
| "epoch": 1.3512828634779854, | |
| "grad_norm": 0.11871356517076492, | |
| "learning_rate": 9.324358568261008e-05, | |
| "loss": 0.0884, | |
| "step": 17064 | |
| }, | |
| { | |
| "epoch": 1.401330376940133, | |
| "grad_norm": 1.614180326461792, | |
| "learning_rate": 9.299334811529935e-05, | |
| "loss": 0.0781, | |
| "step": 17696 | |
| }, | |
| { | |
| "epoch": 1.4513778904022807, | |
| "grad_norm": 0.5827309489250183, | |
| "learning_rate": 9.27431105479886e-05, | |
| "loss": 0.0722, | |
| "step": 18328 | |
| }, | |
| { | |
| "epoch": 1.5014254038644284, | |
| "grad_norm": 0.09628592431545258, | |
| "learning_rate": 9.249287298067785e-05, | |
| "loss": 0.0815, | |
| "step": 18960 | |
| }, | |
| { | |
| "epoch": 1.551472917326576, | |
| "grad_norm": 1.4028997421264648, | |
| "learning_rate": 9.224263541336713e-05, | |
| "loss": 0.0758, | |
| "step": 19592 | |
| }, | |
| { | |
| "epoch": 1.6015204307887236, | |
| "grad_norm": 0.37902089953422546, | |
| "learning_rate": 9.199239784605639e-05, | |
| "loss": 0.0772, | |
| "step": 20224 | |
| }, | |
| { | |
| "epoch": 1.651567944250871, | |
| "grad_norm": 0.5193475484848022, | |
| "learning_rate": 9.174216027874564e-05, | |
| "loss": 0.0731, | |
| "step": 20856 | |
| }, | |
| { | |
| "epoch": 1.7016154577130187, | |
| "grad_norm": 1.5854244232177734, | |
| "learning_rate": 9.149192271143491e-05, | |
| "loss": 0.0704, | |
| "step": 21488 | |
| }, | |
| { | |
| "epoch": 1.7516629711751663, | |
| "grad_norm": 2.4236505031585693, | |
| "learning_rate": 9.124168514412418e-05, | |
| "loss": 0.0733, | |
| "step": 22120 | |
| }, | |
| { | |
| "epoch": 1.8017104846373138, | |
| "grad_norm": 0.610543429851532, | |
| "learning_rate": 9.099144757681343e-05, | |
| "loss": 0.0712, | |
| "step": 22752 | |
| }, | |
| { | |
| "epoch": 1.8517579980994614, | |
| "grad_norm": 0.8024447560310364, | |
| "learning_rate": 9.07412100095027e-05, | |
| "loss": 0.0758, | |
| "step": 23384 | |
| }, | |
| { | |
| "epoch": 1.901805511561609, | |
| "grad_norm": 0.1649412214756012, | |
| "learning_rate": 9.049097244219195e-05, | |
| "loss": 0.0736, | |
| "step": 24016 | |
| }, | |
| { | |
| "epoch": 1.9518530250237567, | |
| "grad_norm": 0.15900301933288574, | |
| "learning_rate": 9.024073487488122e-05, | |
| "loss": 0.0746, | |
| "step": 24648 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_gen_len": 12.34418098, | |
| "eval_loss": 0.057007092982530594, | |
| "eval_runtime": 547.57, | |
| "eval_samples_per_second": 97.197, | |
| "eval_steps_per_second": 1.519, | |
| "eval_wordacc": 0.91236707, | |
| "eval_wordacc_oov": 0.85835753, | |
| "step": 25256 | |
| }, | |
| { | |
| "epoch": 2.0019005384859043, | |
| "grad_norm": 1.2868945598602295, | |
| "learning_rate": 8.999049730757049e-05, | |
| "loss": 0.0676, | |
| "step": 25280 | |
| }, | |
| { | |
| "epoch": 2.051948051948052, | |
| "grad_norm": 0.5984334945678711, | |
| "learning_rate": 8.974025974025974e-05, | |
| "loss": 0.0627, | |
| "step": 25912 | |
| }, | |
| { | |
| "epoch": 2.1019955654101996, | |
| "grad_norm": 0.043390534818172455, | |
| "learning_rate": 8.949002217294901e-05, | |
| "loss": 0.0583, | |
| "step": 26544 | |
| }, | |
| { | |
| "epoch": 2.1520430788723472, | |
| "grad_norm": 0.544601321220398, | |
| "learning_rate": 8.923978460563826e-05, | |
| "loss": 0.0614, | |
| "step": 27176 | |
| }, | |
| { | |
| "epoch": 2.202090592334495, | |
| "grad_norm": 0.22555088996887207, | |
| "learning_rate": 8.898954703832753e-05, | |
| "loss": 0.0713, | |
| "step": 27808 | |
| }, | |
| { | |
| "epoch": 2.2521381057966425, | |
| "grad_norm": 2.1508424282073975, | |
| "learning_rate": 8.87393094710168e-05, | |
| "loss": 0.071, | |
| "step": 28440 | |
| }, | |
| { | |
| "epoch": 2.30218561925879, | |
| "grad_norm": 0.2992984354496002, | |
| "learning_rate": 8.848907190370605e-05, | |
| "loss": 0.0655, | |
| "step": 29072 | |
| }, | |
| { | |
| "epoch": 2.3522331327209374, | |
| "grad_norm": 1.4744491577148438, | |
| "learning_rate": 8.823883433639532e-05, | |
| "loss": 0.0697, | |
| "step": 29704 | |
| }, | |
| { | |
| "epoch": 2.402280646183085, | |
| "grad_norm": 0.6834865808486938, | |
| "learning_rate": 8.798859676908457e-05, | |
| "loss": 0.0661, | |
| "step": 30336 | |
| }, | |
| { | |
| "epoch": 2.4523281596452327, | |
| "grad_norm": 0.06805714964866638, | |
| "learning_rate": 8.773835920177384e-05, | |
| "loss": 0.0672, | |
| "step": 30968 | |
| }, | |
| { | |
| "epoch": 2.5023756731073803, | |
| "grad_norm": 1.880346655845642, | |
| "learning_rate": 8.748812163446311e-05, | |
| "loss": 0.063, | |
| "step": 31600 | |
| }, | |
| { | |
| "epoch": 2.552423186569528, | |
| "grad_norm": 0.5247331857681274, | |
| "learning_rate": 8.723788406715236e-05, | |
| "loss": 0.0621, | |
| "step": 32232 | |
| }, | |
| { | |
| "epoch": 2.6024707000316756, | |
| "grad_norm": 0.15831807255744934, | |
| "learning_rate": 8.698764649984163e-05, | |
| "loss": 0.0653, | |
| "step": 32864 | |
| }, | |
| { | |
| "epoch": 2.652518213493823, | |
| "grad_norm": 0.04121825844049454, | |
| "learning_rate": 8.673740893253088e-05, | |
| "loss": 0.0642, | |
| "step": 33496 | |
| }, | |
| { | |
| "epoch": 2.702565726955971, | |
| "grad_norm": 0.3549499809741974, | |
| "learning_rate": 8.648717136522015e-05, | |
| "loss": 0.0679, | |
| "step": 34128 | |
| }, | |
| { | |
| "epoch": 2.7526132404181185, | |
| "grad_norm": 0.16000640392303467, | |
| "learning_rate": 8.623693379790942e-05, | |
| "loss": 0.0597, | |
| "step": 34760 | |
| }, | |
| { | |
| "epoch": 2.802660753880266, | |
| "grad_norm": 0.6360165476799011, | |
| "learning_rate": 8.598669623059867e-05, | |
| "loss": 0.0666, | |
| "step": 35392 | |
| }, | |
| { | |
| "epoch": 2.852708267342414, | |
| "grad_norm": 0.4808698296546936, | |
| "learning_rate": 8.573645866328793e-05, | |
| "loss": 0.0654, | |
| "step": 36024 | |
| }, | |
| { | |
| "epoch": 2.9027557808045614, | |
| "grad_norm": 1.7070688009262085, | |
| "learning_rate": 8.54862210959772e-05, | |
| "loss": 0.0645, | |
| "step": 36656 | |
| }, | |
| { | |
| "epoch": 2.952803294266709, | |
| "grad_norm": 0.3017909824848175, | |
| "learning_rate": 8.523598352866646e-05, | |
| "loss": 0.0622, | |
| "step": 37288 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_gen_len": 12.35115178, | |
| "eval_loss": 0.04925922676920891, | |
| "eval_runtime": 519.5392, | |
| "eval_samples_per_second": 102.441, | |
| "eval_steps_per_second": 1.601, | |
| "eval_wordacc": 0.9194506, | |
| "eval_wordacc_oov": 0.87168187, | |
| "step": 37884 | |
| }, | |
| { | |
| "epoch": 3.0028508077288567, | |
| "grad_norm": 0.5821070671081543, | |
| "learning_rate": 8.498574596135573e-05, | |
| "loss": 0.0661, | |
| "step": 37920 | |
| }, | |
| { | |
| "epoch": 3.052898321191004, | |
| "grad_norm": 0.4475654363632202, | |
| "learning_rate": 8.473550839404498e-05, | |
| "loss": 0.0574, | |
| "step": 38552 | |
| }, | |
| { | |
| "epoch": 3.1029458346531515, | |
| "grad_norm": 1.4051363468170166, | |
| "learning_rate": 8.448527082673424e-05, | |
| "loss": 0.0614, | |
| "step": 39184 | |
| }, | |
| { | |
| "epoch": 3.152993348115299, | |
| "grad_norm": 0.5270406603813171, | |
| "learning_rate": 8.423503325942352e-05, | |
| "loss": 0.059, | |
| "step": 39816 | |
| }, | |
| { | |
| "epoch": 3.203040861577447, | |
| "grad_norm": 0.2516399621963501, | |
| "learning_rate": 8.398479569211277e-05, | |
| "loss": 0.0568, | |
| "step": 40448 | |
| }, | |
| { | |
| "epoch": 3.2530883750395945, | |
| "grad_norm": 2.4771833419799805, | |
| "learning_rate": 8.373455812480203e-05, | |
| "loss": 0.055, | |
| "step": 41080 | |
| }, | |
| { | |
| "epoch": 3.303135888501742, | |
| "grad_norm": 0.39709779620170593, | |
| "learning_rate": 8.348432055749129e-05, | |
| "loss": 0.0591, | |
| "step": 41712 | |
| }, | |
| { | |
| "epoch": 3.3531834019638898, | |
| "grad_norm": 0.2029147893190384, | |
| "learning_rate": 8.323408299018055e-05, | |
| "loss": 0.0572, | |
| "step": 42344 | |
| }, | |
| { | |
| "epoch": 3.4032309154260374, | |
| "grad_norm": 0.1706971675157547, | |
| "learning_rate": 8.298384542286983e-05, | |
| "loss": 0.0618, | |
| "step": 42976 | |
| }, | |
| { | |
| "epoch": 3.453278428888185, | |
| "grad_norm": 0.14079004526138306, | |
| "learning_rate": 8.273360785555908e-05, | |
| "loss": 0.0583, | |
| "step": 43608 | |
| }, | |
| { | |
| "epoch": 3.5033259423503327, | |
| "grad_norm": 0.8446473479270935, | |
| "learning_rate": 8.248337028824834e-05, | |
| "loss": 0.0582, | |
| "step": 44240 | |
| }, | |
| { | |
| "epoch": 3.5533734558124803, | |
| "grad_norm": 1.3202866315841675, | |
| "learning_rate": 8.22331327209376e-05, | |
| "loss": 0.0594, | |
| "step": 44872 | |
| }, | |
| { | |
| "epoch": 3.603420969274628, | |
| "grad_norm": 0.1289588212966919, | |
| "learning_rate": 8.198289515362686e-05, | |
| "loss": 0.0668, | |
| "step": 45504 | |
| }, | |
| { | |
| "epoch": 3.653468482736775, | |
| "grad_norm": 0.7715129852294922, | |
| "learning_rate": 8.173265758631612e-05, | |
| "loss": 0.056, | |
| "step": 46136 | |
| }, | |
| { | |
| "epoch": 3.703515996198923, | |
| "grad_norm": 0.8882943391799927, | |
| "learning_rate": 8.148242001900539e-05, | |
| "loss": 0.0585, | |
| "step": 46768 | |
| }, | |
| { | |
| "epoch": 3.7535635096610704, | |
| "grad_norm": 0.5238478183746338, | |
| "learning_rate": 8.123218245169465e-05, | |
| "loss": 0.0523, | |
| "step": 47400 | |
| }, | |
| { | |
| "epoch": 3.803611023123218, | |
| "grad_norm": 0.7119426727294922, | |
| "learning_rate": 8.098194488438391e-05, | |
| "loss": 0.0566, | |
| "step": 48032 | |
| }, | |
| { | |
| "epoch": 3.8536585365853657, | |
| "grad_norm": 1.7843942642211914, | |
| "learning_rate": 8.073170731707318e-05, | |
| "loss": 0.0577, | |
| "step": 48664 | |
| }, | |
| { | |
| "epoch": 3.9037060500475134, | |
| "grad_norm": 0.09263037890195847, | |
| "learning_rate": 8.048146974976244e-05, | |
| "loss": 0.0602, | |
| "step": 49296 | |
| }, | |
| { | |
| "epoch": 3.953753563509661, | |
| "grad_norm": 0.8220856785774231, | |
| "learning_rate": 8.02312321824517e-05, | |
| "loss": 0.0584, | |
| "step": 49928 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_gen_len": 12.34404945, | |
| "eval_loss": 0.04651999473571777, | |
| "eval_runtime": 525.6595, | |
| "eval_samples_per_second": 101.248, | |
| "eval_steps_per_second": 1.583, | |
| "eval_wordacc": 0.92211867, | |
| "eval_wordacc_oov": 0.87494815, | |
| "step": 50512 | |
| }, | |
| { | |
| "epoch": 4.003801076971809, | |
| "grad_norm": 0.09420084208250046, | |
| "learning_rate": 7.998099461514096e-05, | |
| "loss": 0.0559, | |
| "step": 50560 | |
| }, | |
| { | |
| "epoch": 4.053848590433956, | |
| "grad_norm": 0.6216241121292114, | |
| "learning_rate": 7.973075704783022e-05, | |
| "loss": 0.0507, | |
| "step": 51192 | |
| }, | |
| { | |
| "epoch": 4.103896103896104, | |
| "grad_norm": 0.6072413921356201, | |
| "learning_rate": 7.948051948051949e-05, | |
| "loss": 0.0508, | |
| "step": 51824 | |
| }, | |
| { | |
| "epoch": 4.153943617358252, | |
| "grad_norm": 0.3261624872684479, | |
| "learning_rate": 7.923028191320875e-05, | |
| "loss": 0.0584, | |
| "step": 52456 | |
| }, | |
| { | |
| "epoch": 4.203991130820399, | |
| "grad_norm": 3.996793508529663, | |
| "learning_rate": 7.898004434589801e-05, | |
| "loss": 0.0542, | |
| "step": 53088 | |
| }, | |
| { | |
| "epoch": 4.254038644282547, | |
| "grad_norm": 0.09758679568767548, | |
| "learning_rate": 7.872980677858727e-05, | |
| "loss": 0.0543, | |
| "step": 53720 | |
| }, | |
| { | |
| "epoch": 4.3040861577446945, | |
| "grad_norm": 0.8341479897499084, | |
| "learning_rate": 7.847956921127652e-05, | |
| "loss": 0.06, | |
| "step": 54352 | |
| }, | |
| { | |
| "epoch": 4.354133671206842, | |
| "grad_norm": 0.3256041407585144, | |
| "learning_rate": 7.82293316439658e-05, | |
| "loss": 0.0547, | |
| "step": 54984 | |
| }, | |
| { | |
| "epoch": 4.40418118466899, | |
| "grad_norm": 0.8251773715019226, | |
| "learning_rate": 7.797909407665506e-05, | |
| "loss": 0.0563, | |
| "step": 55616 | |
| }, | |
| { | |
| "epoch": 4.454228698131137, | |
| "grad_norm": 0.23588858544826508, | |
| "learning_rate": 7.772885650934432e-05, | |
| "loss": 0.0566, | |
| "step": 56248 | |
| }, | |
| { | |
| "epoch": 4.504276211593285, | |
| "grad_norm": 0.8622148633003235, | |
| "learning_rate": 7.747861894203358e-05, | |
| "loss": 0.0511, | |
| "step": 56880 | |
| }, | |
| { | |
| "epoch": 4.554323725055433, | |
| "grad_norm": 0.29321447014808655, | |
| "learning_rate": 7.722838137472284e-05, | |
| "loss": 0.0502, | |
| "step": 57512 | |
| }, | |
| { | |
| "epoch": 4.60437123851758, | |
| "grad_norm": 1.1889938116073608, | |
| "learning_rate": 7.697814380741211e-05, | |
| "loss": 0.0525, | |
| "step": 58144 | |
| }, | |
| { | |
| "epoch": 4.654418751979728, | |
| "grad_norm": 0.5421351790428162, | |
| "learning_rate": 7.672790624010137e-05, | |
| "loss": 0.0509, | |
| "step": 58776 | |
| }, | |
| { | |
| "epoch": 4.704466265441875, | |
| "grad_norm": 0.4371638000011444, | |
| "learning_rate": 7.647766867279062e-05, | |
| "loss": 0.0548, | |
| "step": 59408 | |
| }, | |
| { | |
| "epoch": 4.754513778904023, | |
| "grad_norm": 0.06439998745918274, | |
| "learning_rate": 7.622743110547989e-05, | |
| "loss": 0.0553, | |
| "step": 60040 | |
| }, | |
| { | |
| "epoch": 4.80456129236617, | |
| "grad_norm": 0.8337986469268799, | |
| "learning_rate": 7.597719353816916e-05, | |
| "loss": 0.0539, | |
| "step": 60672 | |
| }, | |
| { | |
| "epoch": 4.854608805828318, | |
| "grad_norm": 0.5551128387451172, | |
| "learning_rate": 7.572695597085841e-05, | |
| "loss": 0.0531, | |
| "step": 61304 | |
| }, | |
| { | |
| "epoch": 4.904656319290465, | |
| "grad_norm": 0.842311680316925, | |
| "learning_rate": 7.547671840354768e-05, | |
| "loss": 0.053, | |
| "step": 61936 | |
| }, | |
| { | |
| "epoch": 4.954703832752613, | |
| "grad_norm": 0.6434153318405151, | |
| "learning_rate": 7.522648083623693e-05, | |
| "loss": 0.0497, | |
| "step": 62568 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_gen_len": 12.35519146, | |
| "eval_loss": 0.04361514747142792, | |
| "eval_runtime": 525.1326, | |
| "eval_samples_per_second": 101.35, | |
| "eval_steps_per_second": 1.584, | |
| "eval_wordacc": 0.92741723, | |
| "eval_wordacc_oov": 0.88205102, | |
| "step": 63140 | |
| }, | |
| { | |
| "epoch": 5.004751346214761, | |
| "grad_norm": 0.11094748228788376, | |
| "learning_rate": 7.49762432689262e-05, | |
| "loss": 0.0509, | |
| "step": 63200 | |
| }, | |
| { | |
| "epoch": 5.054798859676908, | |
| "grad_norm": 0.46878868341445923, | |
| "learning_rate": 7.472600570161547e-05, | |
| "loss": 0.0445, | |
| "step": 63832 | |
| }, | |
| { | |
| "epoch": 5.104846373139056, | |
| "grad_norm": 1.7899694442749023, | |
| "learning_rate": 7.447576813430472e-05, | |
| "loss": 0.045, | |
| "step": 64464 | |
| }, | |
| { | |
| "epoch": 5.1548938866012035, | |
| "grad_norm": 0.03743477538228035, | |
| "learning_rate": 7.422553056699399e-05, | |
| "loss": 0.0538, | |
| "step": 65096 | |
| }, | |
| { | |
| "epoch": 5.204941400063351, | |
| "grad_norm": 0.9636221528053284, | |
| "learning_rate": 7.397529299968324e-05, | |
| "loss": 0.0444, | |
| "step": 65728 | |
| }, | |
| { | |
| "epoch": 5.254988913525499, | |
| "grad_norm": 0.1024821326136589, | |
| "learning_rate": 7.372505543237251e-05, | |
| "loss": 0.0543, | |
| "step": 66360 | |
| }, | |
| { | |
| "epoch": 5.305036426987646, | |
| "grad_norm": 0.6220707297325134, | |
| "learning_rate": 7.347481786506178e-05, | |
| "loss": 0.0537, | |
| "step": 66992 | |
| }, | |
| { | |
| "epoch": 5.355083940449794, | |
| "grad_norm": 0.13747639954090118, | |
| "learning_rate": 7.322458029775103e-05, | |
| "loss": 0.0443, | |
| "step": 67624 | |
| }, | |
| { | |
| "epoch": 5.405131453911942, | |
| "grad_norm": 0.25481894612312317, | |
| "learning_rate": 7.29743427304403e-05, | |
| "loss": 0.0544, | |
| "step": 68256 | |
| }, | |
| { | |
| "epoch": 5.455178967374089, | |
| "grad_norm": 0.40640395879745483, | |
| "learning_rate": 7.272410516312955e-05, | |
| "loss": 0.0501, | |
| "step": 68888 | |
| }, | |
| { | |
| "epoch": 5.505226480836237, | |
| "grad_norm": 0.017994888126850128, | |
| "learning_rate": 7.247386759581882e-05, | |
| "loss": 0.0497, | |
| "step": 69520 | |
| }, | |
| { | |
| "epoch": 5.555273994298385, | |
| "grad_norm": 0.7399475574493408, | |
| "learning_rate": 7.222363002850809e-05, | |
| "loss": 0.0503, | |
| "step": 70152 | |
| }, | |
| { | |
| "epoch": 5.605321507760532, | |
| "grad_norm": 0.1965421885251999, | |
| "learning_rate": 7.197339246119734e-05, | |
| "loss": 0.0494, | |
| "step": 70784 | |
| }, | |
| { | |
| "epoch": 5.65536902122268, | |
| "grad_norm": 0.603735625743866, | |
| "learning_rate": 7.172315489388661e-05, | |
| "loss": 0.0492, | |
| "step": 71416 | |
| }, | |
| { | |
| "epoch": 5.705416534684828, | |
| "grad_norm": 0.4098168909549713, | |
| "learning_rate": 7.147291732657586e-05, | |
| "loss": 0.0496, | |
| "step": 72048 | |
| }, | |
| { | |
| "epoch": 5.755464048146975, | |
| "grad_norm": 0.026117555797100067, | |
| "learning_rate": 7.122267975926513e-05, | |
| "loss": 0.05, | |
| "step": 72680 | |
| }, | |
| { | |
| "epoch": 5.805511561609123, | |
| "grad_norm": 0.03871222585439682, | |
| "learning_rate": 7.09724421919544e-05, | |
| "loss": 0.0508, | |
| "step": 73312 | |
| }, | |
| { | |
| "epoch": 5.8555590750712705, | |
| "grad_norm": 1.100329041481018, | |
| "learning_rate": 7.072220462464365e-05, | |
| "loss": 0.0503, | |
| "step": 73944 | |
| }, | |
| { | |
| "epoch": 5.905606588533418, | |
| "grad_norm": 0.8121901154518127, | |
| "learning_rate": 7.04719670573329e-05, | |
| "loss": 0.0468, | |
| "step": 74576 | |
| }, | |
| { | |
| "epoch": 5.955654101995566, | |
| "grad_norm": 0.6457042694091797, | |
| "learning_rate": 7.022172949002219e-05, | |
| "loss": 0.0502, | |
| "step": 75208 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "eval_gen_len": 12.35192214, | |
| "eval_loss": 0.041068777441978455, | |
| "eval_runtime": 522.1947, | |
| "eval_samples_per_second": 101.92, | |
| "eval_steps_per_second": 1.593, | |
| "eval_wordacc": 0.93108113, | |
| "eval_wordacc_oov": 0.88583575, | |
| "step": 75768 | |
| }, | |
| { | |
| "epoch": 6.005701615457713, | |
| "grad_norm": 0.20237529277801514, | |
| "learning_rate": 6.997149192271144e-05, | |
| "loss": 0.0496, | |
| "step": 75840 | |
| }, | |
| { | |
| "epoch": 6.055749128919861, | |
| "grad_norm": 0.10558341443538666, | |
| "learning_rate": 6.97212543554007e-05, | |
| "loss": 0.0422, | |
| "step": 76472 | |
| }, | |
| { | |
| "epoch": 6.105796642382008, | |
| "grad_norm": 0.38815170526504517, | |
| "learning_rate": 6.947101678808996e-05, | |
| "loss": 0.0468, | |
| "step": 77104 | |
| }, | |
| { | |
| "epoch": 6.1558441558441555, | |
| "grad_norm": 0.042554233223199844, | |
| "learning_rate": 6.922077922077921e-05, | |
| "loss": 0.0446, | |
| "step": 77736 | |
| }, | |
| { | |
| "epoch": 6.205891669306303, | |
| "grad_norm": 1.2894216775894165, | |
| "learning_rate": 6.89705416534685e-05, | |
| "loss": 0.0462, | |
| "step": 78368 | |
| }, | |
| { | |
| "epoch": 6.255939182768451, | |
| "grad_norm": 0.7259889841079712, | |
| "learning_rate": 6.872030408615775e-05, | |
| "loss": 0.045, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 6.305986696230598, | |
| "grad_norm": 0.5286532044410706, | |
| "learning_rate": 6.8470066518847e-05, | |
| "loss": 0.0446, | |
| "step": 79632 | |
| }, | |
| { | |
| "epoch": 6.356034209692746, | |
| "grad_norm": 0.20671215653419495, | |
| "learning_rate": 6.821982895153627e-05, | |
| "loss": 0.0442, | |
| "step": 80264 | |
| }, | |
| { | |
| "epoch": 6.406081723154894, | |
| "grad_norm": 0.2522045969963074, | |
| "learning_rate": 6.796959138422552e-05, | |
| "loss": 0.0455, | |
| "step": 80896 | |
| }, | |
| { | |
| "epoch": 6.456129236617041, | |
| "grad_norm": 0.09235669672489166, | |
| "learning_rate": 6.771935381691479e-05, | |
| "loss": 0.0463, | |
| "step": 81528 | |
| }, | |
| { | |
| "epoch": 6.506176750079189, | |
| "grad_norm": 1.3369249105453491, | |
| "learning_rate": 6.746911624960406e-05, | |
| "loss": 0.0467, | |
| "step": 82160 | |
| }, | |
| { | |
| "epoch": 6.556224263541337, | |
| "grad_norm": 0.1361207365989685, | |
| "learning_rate": 6.721887868229331e-05, | |
| "loss": 0.0464, | |
| "step": 82792 | |
| }, | |
| { | |
| "epoch": 6.606271777003484, | |
| "grad_norm": 0.057785116136074066, | |
| "learning_rate": 6.696864111498258e-05, | |
| "loss": 0.0471, | |
| "step": 83424 | |
| }, | |
| { | |
| "epoch": 6.656319290465632, | |
| "grad_norm": 0.5493625998497009, | |
| "learning_rate": 6.671840354767185e-05, | |
| "loss": 0.0493, | |
| "step": 84056 | |
| }, | |
| { | |
| "epoch": 6.7063668039277795, | |
| "grad_norm": 0.08289259672164917, | |
| "learning_rate": 6.64681659803611e-05, | |
| "loss": 0.0491, | |
| "step": 84688 | |
| }, | |
| { | |
| "epoch": 6.756414317389927, | |
| "grad_norm": 0.08197712898254395, | |
| "learning_rate": 6.621792841305037e-05, | |
| "loss": 0.0487, | |
| "step": 85320 | |
| }, | |
| { | |
| "epoch": 6.806461830852075, | |
| "grad_norm": 0.1574297547340393, | |
| "learning_rate": 6.596769084573962e-05, | |
| "loss": 0.042, | |
| "step": 85952 | |
| }, | |
| { | |
| "epoch": 6.856509344314222, | |
| "grad_norm": 0.07021531462669373, | |
| "learning_rate": 6.571745327842889e-05, | |
| "loss": 0.0469, | |
| "step": 86584 | |
| }, | |
| { | |
| "epoch": 6.90655685777637, | |
| "grad_norm": 0.5342025756835938, | |
| "learning_rate": 6.546721571111816e-05, | |
| "loss": 0.0458, | |
| "step": 87216 | |
| }, | |
| { | |
| "epoch": 6.956604371238518, | |
| "grad_norm": 0.754435658454895, | |
| "learning_rate": 6.521697814380741e-05, | |
| "loss": 0.0428, | |
| "step": 87848 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "eval_gen_len": 12.34435008, | |
| "eval_loss": 0.039582036435604095, | |
| "eval_runtime": 513.2393, | |
| "eval_samples_per_second": 103.698, | |
| "eval_steps_per_second": 1.621, | |
| "eval_wordacc": 0.93356131, | |
| "eval_wordacc_oov": 0.88780589, | |
| "step": 88396 | |
| }, | |
| { | |
| "epoch": 7.006651884700665, | |
| "grad_norm": 1.7553069591522217, | |
| "learning_rate": 6.496674057649668e-05, | |
| "loss": 0.0429, | |
| "step": 88480 | |
| }, | |
| { | |
| "epoch": 7.056699398162813, | |
| "grad_norm": 0.15994039177894592, | |
| "learning_rate": 6.471650300918593e-05, | |
| "loss": 0.0451, | |
| "step": 89112 | |
| }, | |
| { | |
| "epoch": 7.106746911624961, | |
| "grad_norm": 0.21786805987358093, | |
| "learning_rate": 6.44662654418752e-05, | |
| "loss": 0.0415, | |
| "step": 89744 | |
| }, | |
| { | |
| "epoch": 7.156794425087108, | |
| "grad_norm": 0.07405902445316315, | |
| "learning_rate": 6.421602787456447e-05, | |
| "loss": 0.0408, | |
| "step": 90376 | |
| }, | |
| { | |
| "epoch": 7.206841938549256, | |
| "grad_norm": 0.1853848397731781, | |
| "learning_rate": 6.396579030725372e-05, | |
| "loss": 0.0415, | |
| "step": 91008 | |
| }, | |
| { | |
| "epoch": 7.256889452011404, | |
| "grad_norm": 0.41366642713546753, | |
| "learning_rate": 6.371555273994299e-05, | |
| "loss": 0.046, | |
| "step": 91640 | |
| }, | |
| { | |
| "epoch": 7.306936965473551, | |
| "grad_norm": 0.2615118622779846, | |
| "learning_rate": 6.346531517263224e-05, | |
| "loss": 0.0413, | |
| "step": 92272 | |
| }, | |
| { | |
| "epoch": 7.356984478935699, | |
| "grad_norm": 0.06805741786956787, | |
| "learning_rate": 6.321507760532151e-05, | |
| "loss": 0.0436, | |
| "step": 92904 | |
| }, | |
| { | |
| "epoch": 7.407031992397846, | |
| "grad_norm": 1.3070762157440186, | |
| "learning_rate": 6.296484003801078e-05, | |
| "loss": 0.0464, | |
| "step": 93536 | |
| }, | |
| { | |
| "epoch": 7.457079505859994, | |
| "grad_norm": 0.11481507122516632, | |
| "learning_rate": 6.271460247070003e-05, | |
| "loss": 0.0442, | |
| "step": 94168 | |
| }, | |
| { | |
| "epoch": 7.507127019322141, | |
| "grad_norm": 0.9575181603431702, | |
| "learning_rate": 6.246436490338929e-05, | |
| "loss": 0.043, | |
| "step": 94800 | |
| }, | |
| { | |
| "epoch": 7.5571745327842885, | |
| "grad_norm": 0.2559140920639038, | |
| "learning_rate": 6.221412733607856e-05, | |
| "loss": 0.0375, | |
| "step": 95432 | |
| }, | |
| { | |
| "epoch": 7.607222046246436, | |
| "grad_norm": 0.043967317789793015, | |
| "learning_rate": 6.196388976876782e-05, | |
| "loss": 0.0412, | |
| "step": 96064 | |
| }, | |
| { | |
| "epoch": 7.657269559708584, | |
| "grad_norm": 0.5306654572486877, | |
| "learning_rate": 6.171365220145709e-05, | |
| "loss": 0.0449, | |
| "step": 96696 | |
| }, | |
| { | |
| "epoch": 7.7073170731707314, | |
| "grad_norm": 0.03713352233171463, | |
| "learning_rate": 6.146341463414634e-05, | |
| "loss": 0.0443, | |
| "step": 97328 | |
| }, | |
| { | |
| "epoch": 7.757364586632879, | |
| "grad_norm": 0.6375567317008972, | |
| "learning_rate": 6.12131770668356e-05, | |
| "loss": 0.0441, | |
| "step": 97960 | |
| }, | |
| { | |
| "epoch": 7.807412100095027, | |
| "grad_norm": 0.9398515224456787, | |
| "learning_rate": 6.096293949952487e-05, | |
| "loss": 0.0421, | |
| "step": 98592 | |
| }, | |
| { | |
| "epoch": 7.857459613557174, | |
| "grad_norm": 0.6615290641784668, | |
| "learning_rate": 6.071270193221413e-05, | |
| "loss": 0.0468, | |
| "step": 99224 | |
| }, | |
| { | |
| "epoch": 7.907507127019322, | |
| "grad_norm": 0.48468518257141113, | |
| "learning_rate": 6.0462464364903394e-05, | |
| "loss": 0.0427, | |
| "step": 99856 | |
| }, | |
| { | |
| "epoch": 7.95755464048147, | |
| "grad_norm": 0.967910647392273, | |
| "learning_rate": 6.0212226797592654e-05, | |
| "loss": 0.0416, | |
| "step": 100488 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "eval_gen_len": 12.34714967, | |
| "eval_loss": 0.0372321754693985, | |
| "eval_runtime": 517.1292, | |
| "eval_samples_per_second": 102.918, | |
| "eval_steps_per_second": 1.609, | |
| "eval_wordacc": 0.93393709, | |
| "eval_wordacc_oov": 0.88868727, | |
| "step": 101024 | |
| }, | |
| { | |
| "epoch": 8.007602153943617, | |
| "grad_norm": 0.5545419454574585, | |
| "learning_rate": 5.9961989230281915e-05, | |
| "loss": 0.0447, | |
| "step": 101120 | |
| }, | |
| { | |
| "epoch": 8.057649667405766, | |
| "grad_norm": 1.7096141576766968, | |
| "learning_rate": 5.971175166297118e-05, | |
| "loss": 0.038, | |
| "step": 101752 | |
| }, | |
| { | |
| "epoch": 8.107697180867913, | |
| "grad_norm": 0.06642602384090424, | |
| "learning_rate": 5.946151409566044e-05, | |
| "loss": 0.0395, | |
| "step": 102384 | |
| }, | |
| { | |
| "epoch": 8.15774469433006, | |
| "grad_norm": 0.15396763384342194, | |
| "learning_rate": 5.9211276528349704e-05, | |
| "loss": 0.0411, | |
| "step": 103016 | |
| }, | |
| { | |
| "epoch": 8.207792207792208, | |
| "grad_norm": 1.0655204057693481, | |
| "learning_rate": 5.8961038961038965e-05, | |
| "loss": 0.0416, | |
| "step": 103648 | |
| }, | |
| { | |
| "epoch": 8.257839721254355, | |
| "grad_norm": 0.42243492603302, | |
| "learning_rate": 5.871080139372822e-05, | |
| "loss": 0.0445, | |
| "step": 104280 | |
| }, | |
| { | |
| "epoch": 8.307887234716503, | |
| "grad_norm": 0.05212310701608658, | |
| "learning_rate": 5.846056382641749e-05, | |
| "loss": 0.0392, | |
| "step": 104912 | |
| }, | |
| { | |
| "epoch": 8.35793474817865, | |
| "grad_norm": 0.6314841508865356, | |
| "learning_rate": 5.8210326259106754e-05, | |
| "loss": 0.0392, | |
| "step": 105544 | |
| }, | |
| { | |
| "epoch": 8.407982261640798, | |
| "grad_norm": 0.4014628827571869, | |
| "learning_rate": 5.796008869179601e-05, | |
| "loss": 0.0387, | |
| "step": 106176 | |
| }, | |
| { | |
| "epoch": 8.458029775102945, | |
| "grad_norm": 0.13107645511627197, | |
| "learning_rate": 5.770985112448527e-05, | |
| "loss": 0.0425, | |
| "step": 106808 | |
| }, | |
| { | |
| "epoch": 8.508077288565094, | |
| "grad_norm": 0.1390117108821869, | |
| "learning_rate": 5.745961355717454e-05, | |
| "loss": 0.038, | |
| "step": 107440 | |
| }, | |
| { | |
| "epoch": 8.55812480202724, | |
| "grad_norm": 0.1780831664800644, | |
| "learning_rate": 5.72093759898638e-05, | |
| "loss": 0.0397, | |
| "step": 108072 | |
| }, | |
| { | |
| "epoch": 8.608172315489389, | |
| "grad_norm": 0.8216772675514221, | |
| "learning_rate": 5.695913842255306e-05, | |
| "loss": 0.0377, | |
| "step": 108704 | |
| }, | |
| { | |
| "epoch": 8.658219828951536, | |
| "grad_norm": 0.040023334324359894, | |
| "learning_rate": 5.670890085524232e-05, | |
| "loss": 0.0395, | |
| "step": 109336 | |
| }, | |
| { | |
| "epoch": 8.708267342413684, | |
| "grad_norm": 0.7334257364273071, | |
| "learning_rate": 5.645866328793158e-05, | |
| "loss": 0.0401, | |
| "step": 109968 | |
| }, | |
| { | |
| "epoch": 8.758314855875831, | |
| "grad_norm": 0.09213205426931381, | |
| "learning_rate": 5.620842572062085e-05, | |
| "loss": 0.038, | |
| "step": 110600 | |
| }, | |
| { | |
| "epoch": 8.80836236933798, | |
| "grad_norm": 0.8264344930648804, | |
| "learning_rate": 5.595818815331011e-05, | |
| "loss": 0.0404, | |
| "step": 111232 | |
| }, | |
| { | |
| "epoch": 8.858409882800126, | |
| "grad_norm": 0.4497428238391876, | |
| "learning_rate": 5.570795058599937e-05, | |
| "loss": 0.0417, | |
| "step": 111864 | |
| }, | |
| { | |
| "epoch": 8.908457396262275, | |
| "grad_norm": 0.02390374056994915, | |
| "learning_rate": 5.545771301868863e-05, | |
| "loss": 0.0351, | |
| "step": 112496 | |
| }, | |
| { | |
| "epoch": 8.958504909724422, | |
| "grad_norm": 0.1745648831129074, | |
| "learning_rate": 5.520747545137789e-05, | |
| "loss": 0.042, | |
| "step": 113128 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "eval_gen_len": 12.3485025, | |
| "eval_loss": 0.03648155927658081, | |
| "eval_runtime": 514.9505, | |
| "eval_samples_per_second": 103.354, | |
| "eval_steps_per_second": 1.616, | |
| "eval_wordacc": 0.93964902, | |
| "eval_wordacc_oov": 0.89444214, | |
| "step": 113652 | |
| }, | |
| { | |
| "epoch": 9.00855242318657, | |
| "grad_norm": 0.7463224530220032, | |
| "learning_rate": 5.4957237884067156e-05, | |
| "loss": 0.0393, | |
| "step": 113760 | |
| }, | |
| { | |
| "epoch": 9.058599936648717, | |
| "grad_norm": 0.2374447137117386, | |
| "learning_rate": 5.470700031675642e-05, | |
| "loss": 0.0349, | |
| "step": 114392 | |
| }, | |
| { | |
| "epoch": 9.108647450110865, | |
| "grad_norm": 1.1859426498413086, | |
| "learning_rate": 5.445676274944568e-05, | |
| "loss": 0.039, | |
| "step": 115024 | |
| }, | |
| { | |
| "epoch": 9.158694963573012, | |
| "grad_norm": 0.03181586042046547, | |
| "learning_rate": 5.420652518213494e-05, | |
| "loss": 0.0354, | |
| "step": 115656 | |
| }, | |
| { | |
| "epoch": 9.20874247703516, | |
| "grad_norm": 0.0614316463470459, | |
| "learning_rate": 5.3956287614824206e-05, | |
| "loss": 0.0337, | |
| "step": 116288 | |
| }, | |
| { | |
| "epoch": 9.258789990497307, | |
| "grad_norm": 0.031755685806274414, | |
| "learning_rate": 5.370605004751347e-05, | |
| "loss": 0.0346, | |
| "step": 116920 | |
| }, | |
| { | |
| "epoch": 9.308837503959456, | |
| "grad_norm": 0.4263891875743866, | |
| "learning_rate": 5.345581248020273e-05, | |
| "loss": 0.0365, | |
| "step": 117552 | |
| }, | |
| { | |
| "epoch": 9.358885017421603, | |
| "grad_norm": 0.7516904473304749, | |
| "learning_rate": 5.320557491289199e-05, | |
| "loss": 0.0394, | |
| "step": 118184 | |
| }, | |
| { | |
| "epoch": 9.408932530883751, | |
| "grad_norm": 0.547282338142395, | |
| "learning_rate": 5.295533734558125e-05, | |
| "loss": 0.037, | |
| "step": 118816 | |
| }, | |
| { | |
| "epoch": 9.458980044345898, | |
| "grad_norm": 0.048598043620586395, | |
| "learning_rate": 5.2705099778270516e-05, | |
| "loss": 0.0396, | |
| "step": 119448 | |
| }, | |
| { | |
| "epoch": 9.509027557808047, | |
| "grad_norm": 0.019015343859791756, | |
| "learning_rate": 5.245486221095978e-05, | |
| "loss": 0.0377, | |
| "step": 120080 | |
| }, | |
| { | |
| "epoch": 9.559075071270193, | |
| "grad_norm": 0.42539820075035095, | |
| "learning_rate": 5.220462464364904e-05, | |
| "loss": 0.0361, | |
| "step": 120712 | |
| }, | |
| { | |
| "epoch": 9.60912258473234, | |
| "grad_norm": 0.5751402974128723, | |
| "learning_rate": 5.19543870763383e-05, | |
| "loss": 0.0407, | |
| "step": 121344 | |
| }, | |
| { | |
| "epoch": 9.659170098194489, | |
| "grad_norm": 1.380823016166687, | |
| "learning_rate": 5.170414950902755e-05, | |
| "loss": 0.0388, | |
| "step": 121976 | |
| }, | |
| { | |
| "epoch": 9.709217611656635, | |
| "grad_norm": 0.6849233508110046, | |
| "learning_rate": 5.145391194171683e-05, | |
| "loss": 0.0387, | |
| "step": 122608 | |
| }, | |
| { | |
| "epoch": 9.759265125118784, | |
| "grad_norm": 1.4156357049942017, | |
| "learning_rate": 5.120367437440609e-05, | |
| "loss": 0.0373, | |
| "step": 123240 | |
| }, | |
| { | |
| "epoch": 9.80931263858093, | |
| "grad_norm": 0.18062171339988708, | |
| "learning_rate": 5.095343680709535e-05, | |
| "loss": 0.0333, | |
| "step": 123872 | |
| }, | |
| { | |
| "epoch": 9.85936015204308, | |
| "grad_norm": 0.6566870212554932, | |
| "learning_rate": 5.07031992397846e-05, | |
| "loss": 0.0367, | |
| "step": 124504 | |
| }, | |
| { | |
| "epoch": 9.909407665505226, | |
| "grad_norm": 0.2741030156612396, | |
| "learning_rate": 5.0452961672473876e-05, | |
| "loss": 0.0426, | |
| "step": 125136 | |
| }, | |
| { | |
| "epoch": 9.959455178967374, | |
| "grad_norm": 0.7864658236503601, | |
| "learning_rate": 5.020272410516314e-05, | |
| "loss": 0.0376, | |
| "step": 125768 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "eval_gen_len": 12.34852129, | |
| "eval_loss": 0.03532838076353073, | |
| "eval_runtime": 515.5211, | |
| "eval_samples_per_second": 103.239, | |
| "eval_steps_per_second": 1.614, | |
| "eval_wordacc": 0.94117094, | |
| "eval_wordacc_oov": 0.89620489, | |
| "step": 126280 | |
| }, | |
| { | |
| "epoch": 10.009502692429521, | |
| "grad_norm": 0.08346331119537354, | |
| "learning_rate": 4.995248653785239e-05, | |
| "loss": 0.0358, | |
| "step": 126400 | |
| }, | |
| { | |
| "epoch": 10.05955020589167, | |
| "grad_norm": 0.1552925556898117, | |
| "learning_rate": 4.970224897054165e-05, | |
| "loss": 0.0286, | |
| "step": 127032 | |
| }, | |
| { | |
| "epoch": 10.109597719353816, | |
| "grad_norm": 0.017832357436418533, | |
| "learning_rate": 4.945201140323092e-05, | |
| "loss": 0.0342, | |
| "step": 127664 | |
| }, | |
| { | |
| "epoch": 10.159645232815965, | |
| "grad_norm": 0.7822960019111633, | |
| "learning_rate": 4.920177383592018e-05, | |
| "loss": 0.0342, | |
| "step": 128296 | |
| }, | |
| { | |
| "epoch": 10.209692746278112, | |
| "grad_norm": 0.8483818173408508, | |
| "learning_rate": 4.895153626860944e-05, | |
| "loss": 0.036, | |
| "step": 128928 | |
| }, | |
| { | |
| "epoch": 10.25974025974026, | |
| "grad_norm": 0.12484422326087952, | |
| "learning_rate": 4.87012987012987e-05, | |
| "loss": 0.0353, | |
| "step": 129560 | |
| }, | |
| { | |
| "epoch": 10.309787773202407, | |
| "grad_norm": 1.0866436958312988, | |
| "learning_rate": 4.845106113398797e-05, | |
| "loss": 0.0336, | |
| "step": 130192 | |
| }, | |
| { | |
| "epoch": 10.359835286664556, | |
| "grad_norm": 2.065387487411499, | |
| "learning_rate": 4.820082356667723e-05, | |
| "loss": 0.036, | |
| "step": 130824 | |
| }, | |
| { | |
| "epoch": 10.409882800126702, | |
| "grad_norm": 0.13544411957263947, | |
| "learning_rate": 4.795058599936649e-05, | |
| "loss": 0.0353, | |
| "step": 131456 | |
| }, | |
| { | |
| "epoch": 10.45993031358885, | |
| "grad_norm": 0.8644410967826843, | |
| "learning_rate": 4.770034843205575e-05, | |
| "loss": 0.0366, | |
| "step": 132088 | |
| }, | |
| { | |
| "epoch": 10.509977827050998, | |
| "grad_norm": 0.044758204370737076, | |
| "learning_rate": 4.745011086474501e-05, | |
| "loss": 0.0337, | |
| "step": 132720 | |
| }, | |
| { | |
| "epoch": 10.560025340513146, | |
| "grad_norm": 0.07954395562410355, | |
| "learning_rate": 4.719987329743428e-05, | |
| "loss": 0.038, | |
| "step": 133352 | |
| }, | |
| { | |
| "epoch": 10.610072853975293, | |
| "grad_norm": 0.15263523161411285, | |
| "learning_rate": 4.694963573012354e-05, | |
| "loss": 0.037, | |
| "step": 133984 | |
| }, | |
| { | |
| "epoch": 10.660120367437441, | |
| "grad_norm": 0.4090266227722168, | |
| "learning_rate": 4.66993981628128e-05, | |
| "loss": 0.0341, | |
| "step": 134616 | |
| }, | |
| { | |
| "epoch": 10.710167880899588, | |
| "grad_norm": 0.3766542077064514, | |
| "learning_rate": 4.644916059550206e-05, | |
| "loss": 0.0395, | |
| "step": 135248 | |
| }, | |
| { | |
| "epoch": 10.760215394361737, | |
| "grad_norm": 0.044228482991456985, | |
| "learning_rate": 4.619892302819132e-05, | |
| "loss": 0.0354, | |
| "step": 135880 | |
| }, | |
| { | |
| "epoch": 10.810262907823883, | |
| "grad_norm": 3.4171650409698486, | |
| "learning_rate": 4.594868546088059e-05, | |
| "loss": 0.0344, | |
| "step": 136512 | |
| }, | |
| { | |
| "epoch": 10.86031042128603, | |
| "grad_norm": 0.1112111434340477, | |
| "learning_rate": 4.569844789356984e-05, | |
| "loss": 0.0361, | |
| "step": 137144 | |
| }, | |
| { | |
| "epoch": 10.910357934748179, | |
| "grad_norm": 0.09063247591257095, | |
| "learning_rate": 4.544821032625911e-05, | |
| "loss": 0.0325, | |
| "step": 137776 | |
| }, | |
| { | |
| "epoch": 10.960405448210325, | |
| "grad_norm": 0.2144654095172882, | |
| "learning_rate": 4.519797275894837e-05, | |
| "loss": 0.031, | |
| "step": 138408 | |
| }, | |
| { | |
| "epoch": 11.0, | |
| "eval_gen_len": 12.35186577, | |
| "eval_loss": 0.03388630226254463, | |
| "eval_runtime": 519.1861, | |
| "eval_samples_per_second": 102.51, | |
| "eval_steps_per_second": 1.603, | |
| "eval_wordacc": 0.94387659, | |
| "eval_wordacc_oov": 0.90081916, | |
| "step": 138908 | |
| }, | |
| { | |
| "epoch": 11.010452961672474, | |
| "grad_norm": 0.9230628609657288, | |
| "learning_rate": 4.494773519163763e-05, | |
| "loss": 0.0336, | |
| "step": 139040 | |
| }, | |
| { | |
| "epoch": 11.06050047513462, | |
| "grad_norm": 0.04931863397359848, | |
| "learning_rate": 4.469749762432689e-05, | |
| "loss": 0.0332, | |
| "step": 139672 | |
| }, | |
| { | |
| "epoch": 11.11054798859677, | |
| "grad_norm": 0.43372172117233276, | |
| "learning_rate": 4.4447260057016154e-05, | |
| "loss": 0.0303, | |
| "step": 140304 | |
| }, | |
| { | |
| "epoch": 11.160595502058916, | |
| "grad_norm": 1.8216339349746704, | |
| "learning_rate": 4.419702248970542e-05, | |
| "loss": 0.0327, | |
| "step": 140936 | |
| }, | |
| { | |
| "epoch": 11.210643015521065, | |
| "grad_norm": 0.4168206453323364, | |
| "learning_rate": 4.394678492239468e-05, | |
| "loss": 0.0299, | |
| "step": 141568 | |
| }, | |
| { | |
| "epoch": 11.260690528983211, | |
| "grad_norm": 0.2748374938964844, | |
| "learning_rate": 4.369654735508394e-05, | |
| "loss": 0.031, | |
| "step": 142200 | |
| }, | |
| { | |
| "epoch": 11.31073804244536, | |
| "grad_norm": 1.0721484422683716, | |
| "learning_rate": 4.34463097877732e-05, | |
| "loss": 0.0338, | |
| "step": 142832 | |
| }, | |
| { | |
| "epoch": 11.360785555907507, | |
| "grad_norm": 0.6880238056182861, | |
| "learning_rate": 4.319607222046247e-05, | |
| "loss": 0.0306, | |
| "step": 143464 | |
| }, | |
| { | |
| "epoch": 11.410833069369655, | |
| "grad_norm": 0.6363319158554077, | |
| "learning_rate": 4.294583465315173e-05, | |
| "loss": 0.0324, | |
| "step": 144096 | |
| }, | |
| { | |
| "epoch": 11.460880582831802, | |
| "grad_norm": 0.9793805480003357, | |
| "learning_rate": 4.2695597085840985e-05, | |
| "loss": 0.0352, | |
| "step": 144728 | |
| }, | |
| { | |
| "epoch": 11.51092809629395, | |
| "grad_norm": 0.05823446065187454, | |
| "learning_rate": 4.244535951853025e-05, | |
| "loss": 0.0302, | |
| "step": 145360 | |
| }, | |
| { | |
| "epoch": 11.560975609756097, | |
| "grad_norm": 0.44829338788986206, | |
| "learning_rate": 4.2195121951219514e-05, | |
| "loss": 0.0317, | |
| "step": 145992 | |
| }, | |
| { | |
| "epoch": 11.611023123218246, | |
| "grad_norm": 0.21787405014038086, | |
| "learning_rate": 4.194488438390878e-05, | |
| "loss": 0.0327, | |
| "step": 146624 | |
| }, | |
| { | |
| "epoch": 11.661070636680392, | |
| "grad_norm": 0.09327682852745056, | |
| "learning_rate": 4.1694646816598035e-05, | |
| "loss": 0.0343, | |
| "step": 147256 | |
| }, | |
| { | |
| "epoch": 11.711118150142541, | |
| "grad_norm": 0.4916613698005676, | |
| "learning_rate": 4.14444092492873e-05, | |
| "loss": 0.0358, | |
| "step": 147888 | |
| }, | |
| { | |
| "epoch": 11.761165663604688, | |
| "grad_norm": 0.04805804416537285, | |
| "learning_rate": 4.119417168197656e-05, | |
| "loss": 0.0343, | |
| "step": 148520 | |
| }, | |
| { | |
| "epoch": 11.811213177066836, | |
| "grad_norm": 0.019468722864985466, | |
| "learning_rate": 4.0943934114665824e-05, | |
| "loss": 0.0298, | |
| "step": 149152 | |
| }, | |
| { | |
| "epoch": 11.861260690528983, | |
| "grad_norm": 0.09931553900241852, | |
| "learning_rate": 4.0693696547355085e-05, | |
| "loss": 0.0304, | |
| "step": 149784 | |
| }, | |
| { | |
| "epoch": 11.911308203991132, | |
| "grad_norm": 0.03299326449632645, | |
| "learning_rate": 4.0443458980044345e-05, | |
| "loss": 0.0355, | |
| "step": 150416 | |
| }, | |
| { | |
| "epoch": 11.961355717453278, | |
| "grad_norm": 0.11701735109090805, | |
| "learning_rate": 4.019322141273361e-05, | |
| "loss": 0.0298, | |
| "step": 151048 | |
| }, | |
| { | |
| "epoch": 12.0, | |
| "eval_gen_len": 12.34792003, | |
| "eval_loss": 0.03365420550107956, | |
| "eval_runtime": 512.0738, | |
| "eval_samples_per_second": 103.934, | |
| "eval_steps_per_second": 1.625, | |
| "eval_wordacc": 0.94539852, | |
| "eval_wordacc_oov": 0.90128577, | |
| "step": 151536 | |
| }, | |
| { | |
| "epoch": 12.011403230915427, | |
| "grad_norm": 0.14153118431568146, | |
| "learning_rate": 3.9942983845422874e-05, | |
| "loss": 0.0294, | |
| "step": 151680 | |
| }, | |
| { | |
| "epoch": 12.061450744377574, | |
| "grad_norm": 0.8496013283729553, | |
| "learning_rate": 3.9692746278112134e-05, | |
| "loss": 0.0302, | |
| "step": 152312 | |
| }, | |
| { | |
| "epoch": 12.111498257839722, | |
| "grad_norm": 0.3620108664035797, | |
| "learning_rate": 3.9442508710801395e-05, | |
| "loss": 0.0293, | |
| "step": 152944 | |
| }, | |
| { | |
| "epoch": 12.161545771301869, | |
| "grad_norm": 0.8358076810836792, | |
| "learning_rate": 3.9192271143490656e-05, | |
| "loss": 0.0279, | |
| "step": 153576 | |
| }, | |
| { | |
| "epoch": 12.211593284764016, | |
| "grad_norm": 2.183274745941162, | |
| "learning_rate": 3.894203357617992e-05, | |
| "loss": 0.0298, | |
| "step": 154208 | |
| }, | |
| { | |
| "epoch": 12.261640798226164, | |
| "grad_norm": 0.11177387088537216, | |
| "learning_rate": 3.869179600886918e-05, | |
| "loss": 0.0279, | |
| "step": 154840 | |
| }, | |
| { | |
| "epoch": 12.311688311688311, | |
| "grad_norm": 0.16532179713249207, | |
| "learning_rate": 3.8441558441558445e-05, | |
| "loss": 0.0283, | |
| "step": 155472 | |
| }, | |
| { | |
| "epoch": 12.36173582515046, | |
| "grad_norm": 0.2667466104030609, | |
| "learning_rate": 3.8191320874247705e-05, | |
| "loss": 0.0324, | |
| "step": 156104 | |
| }, | |
| { | |
| "epoch": 12.411783338612606, | |
| "grad_norm": 1.4449559450149536, | |
| "learning_rate": 3.794108330693697e-05, | |
| "loss": 0.0327, | |
| "step": 156736 | |
| }, | |
| { | |
| "epoch": 12.461830852074755, | |
| "grad_norm": 1.9251255989074707, | |
| "learning_rate": 3.769084573962623e-05, | |
| "loss": 0.0293, | |
| "step": 157368 | |
| }, | |
| { | |
| "epoch": 12.511878365536901, | |
| "grad_norm": 1.981763243675232, | |
| "learning_rate": 3.744060817231549e-05, | |
| "loss": 0.0296, | |
| "step": 158000 | |
| }, | |
| { | |
| "epoch": 12.56192587899905, | |
| "grad_norm": 0.5154635906219482, | |
| "learning_rate": 3.7190370605004755e-05, | |
| "loss": 0.0326, | |
| "step": 158632 | |
| }, | |
| { | |
| "epoch": 12.611973392461197, | |
| "grad_norm": 0.7047850489616394, | |
| "learning_rate": 3.6940133037694016e-05, | |
| "loss": 0.0313, | |
| "step": 159264 | |
| }, | |
| { | |
| "epoch": 12.662020905923345, | |
| "grad_norm": 0.05169570446014404, | |
| "learning_rate": 3.6689895470383276e-05, | |
| "loss": 0.0311, | |
| "step": 159896 | |
| }, | |
| { | |
| "epoch": 12.712068419385492, | |
| "grad_norm": 0.3161393105983734, | |
| "learning_rate": 3.643965790307254e-05, | |
| "loss": 0.0295, | |
| "step": 160528 | |
| }, | |
| { | |
| "epoch": 12.76211593284764, | |
| "grad_norm": 0.4407559633255005, | |
| "learning_rate": 3.6189420335761805e-05, | |
| "loss": 0.0307, | |
| "step": 161160 | |
| }, | |
| { | |
| "epoch": 12.812163446309787, | |
| "grad_norm": 2.090928792953491, | |
| "learning_rate": 3.5939182768451065e-05, | |
| "loss": 0.0288, | |
| "step": 161792 | |
| }, | |
| { | |
| "epoch": 12.862210959771936, | |
| "grad_norm": 0.039744604378938675, | |
| "learning_rate": 3.568894520114032e-05, | |
| "loss": 0.0285, | |
| "step": 162424 | |
| }, | |
| { | |
| "epoch": 12.912258473234083, | |
| "grad_norm": 0.48494115471839905, | |
| "learning_rate": 3.543870763382959e-05, | |
| "loss": 0.0302, | |
| "step": 163056 | |
| }, | |
| { | |
| "epoch": 12.962305986696231, | |
| "grad_norm": 0.38117051124572754, | |
| "learning_rate": 3.518847006651885e-05, | |
| "loss": 0.0302, | |
| "step": 163688 | |
| }, | |
| { | |
| "epoch": 13.0, | |
| "eval_gen_len": 12.34829582, | |
| "eval_loss": 0.032208241522312164, | |
| "eval_runtime": 516.3051, | |
| "eval_samples_per_second": 103.082, | |
| "eval_steps_per_second": 1.611, | |
| "eval_wordacc": 0.94701439, | |
| "eval_wordacc_oov": 0.90429282, | |
| "step": 164164 | |
| }, | |
| { | |
| "epoch": 13.012353500158378, | |
| "grad_norm": 0.5338648557662964, | |
| "learning_rate": 3.4938232499208115e-05, | |
| "loss": 0.0273, | |
| "step": 164320 | |
| }, | |
| { | |
| "epoch": 13.062401013620526, | |
| "grad_norm": 0.5787109136581421, | |
| "learning_rate": 3.468799493189737e-05, | |
| "loss": 0.0278, | |
| "step": 164952 | |
| }, | |
| { | |
| "epoch": 13.112448527082673, | |
| "grad_norm": 3.402815818786621, | |
| "learning_rate": 3.4437757364586636e-05, | |
| "loss": 0.0309, | |
| "step": 165584 | |
| }, | |
| { | |
| "epoch": 13.162496040544822, | |
| "grad_norm": 0.3387065827846527, | |
| "learning_rate": 3.41875197972759e-05, | |
| "loss": 0.0266, | |
| "step": 166216 | |
| }, | |
| { | |
| "epoch": 13.212543554006968, | |
| "grad_norm": 0.6726759076118469, | |
| "learning_rate": 3.393728222996516e-05, | |
| "loss": 0.0257, | |
| "step": 166848 | |
| }, | |
| { | |
| "epoch": 13.262591067469117, | |
| "grad_norm": 0.039511628448963165, | |
| "learning_rate": 3.368704466265442e-05, | |
| "loss": 0.0285, | |
| "step": 167480 | |
| }, | |
| { | |
| "epoch": 13.312638580931264, | |
| "grad_norm": 0.16470862925052643, | |
| "learning_rate": 3.343680709534368e-05, | |
| "loss": 0.0265, | |
| "step": 168112 | |
| }, | |
| { | |
| "epoch": 13.362686094393412, | |
| "grad_norm": 1.1072640419006348, | |
| "learning_rate": 3.318656952803295e-05, | |
| "loss": 0.0289, | |
| "step": 168744 | |
| }, | |
| { | |
| "epoch": 13.412733607855559, | |
| "grad_norm": 0.4822203516960144, | |
| "learning_rate": 3.293633196072221e-05, | |
| "loss": 0.0282, | |
| "step": 169376 | |
| }, | |
| { | |
| "epoch": 13.462781121317708, | |
| "grad_norm": 0.29173898696899414, | |
| "learning_rate": 3.268609439341147e-05, | |
| "loss": 0.025, | |
| "step": 170008 | |
| }, | |
| { | |
| "epoch": 13.512828634779854, | |
| "grad_norm": 0.12304585427045822, | |
| "learning_rate": 3.243585682610073e-05, | |
| "loss": 0.0286, | |
| "step": 170640 | |
| }, | |
| { | |
| "epoch": 13.562876148242001, | |
| "grad_norm": 0.02250618301331997, | |
| "learning_rate": 3.218561925878999e-05, | |
| "loss": 0.0261, | |
| "step": 171272 | |
| }, | |
| { | |
| "epoch": 13.61292366170415, | |
| "grad_norm": 1.214078664779663, | |
| "learning_rate": 3.193538169147926e-05, | |
| "loss": 0.0265, | |
| "step": 171904 | |
| }, | |
| { | |
| "epoch": 13.662971175166296, | |
| "grad_norm": 1.080644965171814, | |
| "learning_rate": 3.168514412416851e-05, | |
| "loss": 0.0264, | |
| "step": 172536 | |
| }, | |
| { | |
| "epoch": 13.713018688628445, | |
| "grad_norm": 0.47105857729911804, | |
| "learning_rate": 3.143490655685778e-05, | |
| "loss": 0.03, | |
| "step": 173168 | |
| }, | |
| { | |
| "epoch": 13.763066202090592, | |
| "grad_norm": 0.8579492568969727, | |
| "learning_rate": 3.118466898954704e-05, | |
| "loss": 0.03, | |
| "step": 173800 | |
| }, | |
| { | |
| "epoch": 13.81311371555274, | |
| "grad_norm": 1.1287840604782104, | |
| "learning_rate": 3.0934431422236307e-05, | |
| "loss": 0.0274, | |
| "step": 174432 | |
| }, | |
| { | |
| "epoch": 13.863161229014887, | |
| "grad_norm": 1.2223260402679443, | |
| "learning_rate": 3.068419385492556e-05, | |
| "loss": 0.0275, | |
| "step": 175064 | |
| }, | |
| { | |
| "epoch": 13.913208742477035, | |
| "grad_norm": 0.6263837814331055, | |
| "learning_rate": 3.0433956287614825e-05, | |
| "loss": 0.03, | |
| "step": 175696 | |
| }, | |
| { | |
| "epoch": 13.963256255939182, | |
| "grad_norm": 2.3187966346740723, | |
| "learning_rate": 3.018371872030409e-05, | |
| "loss": 0.0277, | |
| "step": 176328 | |
| }, | |
| { | |
| "epoch": 14.0, | |
| "eval_gen_len": 12.35064447, | |
| "eval_loss": 0.031636711210012436, | |
| "eval_runtime": 514.9724, | |
| "eval_samples_per_second": 103.349, | |
| "eval_steps_per_second": 1.616, | |
| "eval_wordacc": 0.94785991, | |
| "eval_wordacc_oov": 0.9040336, | |
| "step": 176792 | |
| }, | |
| { | |
| "epoch": 14.01330376940133, | |
| "grad_norm": 0.09013538807630539, | |
| "learning_rate": 2.993348115299335e-05, | |
| "loss": 0.0291, | |
| "step": 176960 | |
| }, | |
| { | |
| "epoch": 14.063351282863477, | |
| "grad_norm": 0.7201142907142639, | |
| "learning_rate": 2.9683243585682614e-05, | |
| "loss": 0.0216, | |
| "step": 177592 | |
| }, | |
| { | |
| "epoch": 14.113398796325626, | |
| "grad_norm": 0.2142077535390854, | |
| "learning_rate": 2.943300601837187e-05, | |
| "loss": 0.0264, | |
| "step": 178224 | |
| }, | |
| { | |
| "epoch": 14.163446309787773, | |
| "grad_norm": 0.618773341178894, | |
| "learning_rate": 2.918276845106114e-05, | |
| "loss": 0.0248, | |
| "step": 178856 | |
| }, | |
| { | |
| "epoch": 14.213493823249921, | |
| "grad_norm": 1.0433090925216675, | |
| "learning_rate": 2.8932530883750396e-05, | |
| "loss": 0.0286, | |
| "step": 179488 | |
| }, | |
| { | |
| "epoch": 14.263541336712068, | |
| "grad_norm": 0.11434757709503174, | |
| "learning_rate": 2.8682293316439656e-05, | |
| "loss": 0.0253, | |
| "step": 180120 | |
| }, | |
| { | |
| "epoch": 14.313588850174217, | |
| "grad_norm": 1.1828645467758179, | |
| "learning_rate": 2.843205574912892e-05, | |
| "loss": 0.0245, | |
| "step": 180752 | |
| }, | |
| { | |
| "epoch": 14.363636363636363, | |
| "grad_norm": 0.3703102171421051, | |
| "learning_rate": 2.818181818181818e-05, | |
| "loss": 0.0261, | |
| "step": 181384 | |
| }, | |
| { | |
| "epoch": 14.413683877098512, | |
| "grad_norm": 0.20109039545059204, | |
| "learning_rate": 2.7931580614507445e-05, | |
| "loss": 0.0255, | |
| "step": 182016 | |
| }, | |
| { | |
| "epoch": 14.463731390560659, | |
| "grad_norm": 0.627756655216217, | |
| "learning_rate": 2.7681343047196706e-05, | |
| "loss": 0.0228, | |
| "step": 182648 | |
| }, | |
| { | |
| "epoch": 14.513778904022807, | |
| "grad_norm": 0.04185875132679939, | |
| "learning_rate": 2.743110547988597e-05, | |
| "loss": 0.0237, | |
| "step": 183280 | |
| }, | |
| { | |
| "epoch": 14.563826417484954, | |
| "grad_norm": 0.10042019188404083, | |
| "learning_rate": 2.718086791257523e-05, | |
| "loss": 0.0272, | |
| "step": 183912 | |
| }, | |
| { | |
| "epoch": 14.613873930947102, | |
| "grad_norm": 1.0942548513412476, | |
| "learning_rate": 2.693063034526449e-05, | |
| "loss": 0.0242, | |
| "step": 184544 | |
| }, | |
| { | |
| "epoch": 14.66392144440925, | |
| "grad_norm": 0.7228376269340515, | |
| "learning_rate": 2.6680392777953756e-05, | |
| "loss": 0.0252, | |
| "step": 185176 | |
| }, | |
| { | |
| "epoch": 14.713968957871398, | |
| "grad_norm": 0.6470568776130676, | |
| "learning_rate": 2.6430155210643016e-05, | |
| "loss": 0.0269, | |
| "step": 185808 | |
| }, | |
| { | |
| "epoch": 14.764016471333544, | |
| "grad_norm": 0.4635055363178253, | |
| "learning_rate": 2.617991764333228e-05, | |
| "loss": 0.0243, | |
| "step": 186440 | |
| }, | |
| { | |
| "epoch": 14.814063984795691, | |
| "grad_norm": 0.43463265895843506, | |
| "learning_rate": 2.592968007602154e-05, | |
| "loss": 0.0265, | |
| "step": 187072 | |
| }, | |
| { | |
| "epoch": 14.86411149825784, | |
| "grad_norm": 0.7284203767776489, | |
| "learning_rate": 2.5679442508710805e-05, | |
| "loss": 0.0269, | |
| "step": 187704 | |
| }, | |
| { | |
| "epoch": 14.914159011719988, | |
| "grad_norm": 1.462714433670044, | |
| "learning_rate": 2.5429204941400066e-05, | |
| "loss": 0.0236, | |
| "step": 188336 | |
| }, | |
| { | |
| "epoch": 14.964206525182135, | |
| "grad_norm": 0.029578888788819313, | |
| "learning_rate": 2.5178967374089323e-05, | |
| "loss": 0.0277, | |
| "step": 188968 | |
| }, | |
| { | |
| "epoch": 15.0, | |
| "eval_gen_len": 12.35139604, | |
| "eval_loss": 0.032311730086803436, | |
| "eval_runtime": 516.3173, | |
| "eval_samples_per_second": 103.08, | |
| "eval_steps_per_second": 1.611, | |
| "eval_wordacc": 0.94876179, | |
| "eval_wordacc_oov": 0.90299668, | |
| "step": 189420 | |
| }, | |
| { | |
| "epoch": 15.014254038644282, | |
| "grad_norm": 0.3032616376876831, | |
| "learning_rate": 2.4928729806778587e-05, | |
| "loss": 0.0275, | |
| "step": 189600 | |
| }, | |
| { | |
| "epoch": 15.06430155210643, | |
| "grad_norm": 0.08022745698690414, | |
| "learning_rate": 2.467849223946785e-05, | |
| "loss": 0.0212, | |
| "step": 190232 | |
| }, | |
| { | |
| "epoch": 15.114349065568577, | |
| "grad_norm": 0.06174493208527565, | |
| "learning_rate": 2.4428254672157112e-05, | |
| "loss": 0.0233, | |
| "step": 190864 | |
| }, | |
| { | |
| "epoch": 15.164396579030726, | |
| "grad_norm": 0.7364438772201538, | |
| "learning_rate": 2.4178017104846373e-05, | |
| "loss": 0.0214, | |
| "step": 191496 | |
| }, | |
| { | |
| "epoch": 15.214444092492872, | |
| "grad_norm": 0.022581493481993675, | |
| "learning_rate": 2.3927779537535637e-05, | |
| "loss": 0.0236, | |
| "step": 192128 | |
| }, | |
| { | |
| "epoch": 15.26449160595502, | |
| "grad_norm": 0.03313857316970825, | |
| "learning_rate": 2.3677541970224898e-05, | |
| "loss": 0.0246, | |
| "step": 192760 | |
| }, | |
| { | |
| "epoch": 15.314539119417168, | |
| "grad_norm": 0.7156698703765869, | |
| "learning_rate": 2.3427304402914162e-05, | |
| "loss": 0.0263, | |
| "step": 193392 | |
| }, | |
| { | |
| "epoch": 15.364586632879316, | |
| "grad_norm": 0.626669704914093, | |
| "learning_rate": 2.3177066835603423e-05, | |
| "loss": 0.0219, | |
| "step": 194024 | |
| }, | |
| { | |
| "epoch": 15.414634146341463, | |
| "grad_norm": 1.3695429563522339, | |
| "learning_rate": 2.2926829268292687e-05, | |
| "loss": 0.0244, | |
| "step": 194656 | |
| }, | |
| { | |
| "epoch": 15.464681659803611, | |
| "grad_norm": 0.03239896893501282, | |
| "learning_rate": 2.2676591700981947e-05, | |
| "loss": 0.0217, | |
| "step": 195288 | |
| }, | |
| { | |
| "epoch": 15.514729173265758, | |
| "grad_norm": 0.4434269666671753, | |
| "learning_rate": 2.2426354133671208e-05, | |
| "loss": 0.026, | |
| "step": 195920 | |
| }, | |
| { | |
| "epoch": 15.564776686727907, | |
| "grad_norm": 0.11541499197483063, | |
| "learning_rate": 2.217611656636047e-05, | |
| "loss": 0.0251, | |
| "step": 196552 | |
| }, | |
| { | |
| "epoch": 15.614824200190053, | |
| "grad_norm": 0.11521150171756744, | |
| "learning_rate": 2.1925878999049733e-05, | |
| "loss": 0.0234, | |
| "step": 197184 | |
| }, | |
| { | |
| "epoch": 15.664871713652202, | |
| "grad_norm": 0.10318135470151901, | |
| "learning_rate": 2.1675641431738994e-05, | |
| "loss": 0.0239, | |
| "step": 197816 | |
| }, | |
| { | |
| "epoch": 15.714919227114349, | |
| "grad_norm": 0.181732639670372, | |
| "learning_rate": 2.1425403864428258e-05, | |
| "loss": 0.0231, | |
| "step": 198448 | |
| }, | |
| { | |
| "epoch": 15.764966740576497, | |
| "grad_norm": 0.43891534209251404, | |
| "learning_rate": 2.117516629711752e-05, | |
| "loss": 0.0258, | |
| "step": 199080 | |
| }, | |
| { | |
| "epoch": 15.815014254038644, | |
| "grad_norm": 0.2669106125831604, | |
| "learning_rate": 2.0924928729806782e-05, | |
| "loss": 0.0258, | |
| "step": 199712 | |
| }, | |
| { | |
| "epoch": 15.865061767500793, | |
| "grad_norm": 0.787148118019104, | |
| "learning_rate": 2.067469116249604e-05, | |
| "loss": 0.023, | |
| "step": 200344 | |
| }, | |
| { | |
| "epoch": 15.91510928096294, | |
| "grad_norm": 0.09489845484495163, | |
| "learning_rate": 2.0424453595185304e-05, | |
| "loss": 0.0233, | |
| "step": 200976 | |
| }, | |
| { | |
| "epoch": 15.965156794425088, | |
| "grad_norm": 0.07817026227712631, | |
| "learning_rate": 2.0174216027874565e-05, | |
| "loss": 0.0245, | |
| "step": 201608 | |
| }, | |
| { | |
| "epoch": 16.0, | |
| "eval_gen_len": 12.35013716, | |
| "eval_loss": 0.03139541670680046, | |
| "eval_runtime": 516.248, | |
| "eval_samples_per_second": 103.094, | |
| "eval_steps_per_second": 1.612, | |
| "eval_wordacc": 0.95127955, | |
| "eval_wordacc_oov": 0.90724803, | |
| "step": 202048 | |
| }, | |
| { | |
| "epoch": 16.015204307887235, | |
| "grad_norm": 0.026963738724589348, | |
| "learning_rate": 1.992397846056383e-05, | |
| "loss": 0.0209, | |
| "step": 202240 | |
| }, | |
| { | |
| "epoch": 16.06525182134938, | |
| "grad_norm": 0.031288400292396545, | |
| "learning_rate": 1.967374089325309e-05, | |
| "loss": 0.0211, | |
| "step": 202872 | |
| }, | |
| { | |
| "epoch": 16.11529933481153, | |
| "grad_norm": 0.04831352084875107, | |
| "learning_rate": 1.9423503325942354e-05, | |
| "loss": 0.0216, | |
| "step": 203504 | |
| }, | |
| { | |
| "epoch": 16.16534684827368, | |
| "grad_norm": 0.3481796383857727, | |
| "learning_rate": 1.9173265758631614e-05, | |
| "loss": 0.0215, | |
| "step": 204136 | |
| }, | |
| { | |
| "epoch": 16.215394361735825, | |
| "grad_norm": 0.21324285864830017, | |
| "learning_rate": 1.8923028191320875e-05, | |
| "loss": 0.0218, | |
| "step": 204768 | |
| }, | |
| { | |
| "epoch": 16.265441875197972, | |
| "grad_norm": 1.6428892612457275, | |
| "learning_rate": 1.8672790624010136e-05, | |
| "loss": 0.021, | |
| "step": 205400 | |
| }, | |
| { | |
| "epoch": 16.31548938866012, | |
| "grad_norm": 0.2430795133113861, | |
| "learning_rate": 1.84225530566994e-05, | |
| "loss": 0.02, | |
| "step": 206032 | |
| }, | |
| { | |
| "epoch": 16.36553690212227, | |
| "grad_norm": 0.5367847681045532, | |
| "learning_rate": 1.817231548938866e-05, | |
| "loss": 0.0209, | |
| "step": 206664 | |
| }, | |
| { | |
| "epoch": 16.415584415584416, | |
| "grad_norm": 0.08580495417118073, | |
| "learning_rate": 1.7922077922077925e-05, | |
| "loss": 0.0204, | |
| "step": 207296 | |
| }, | |
| { | |
| "epoch": 16.465631929046562, | |
| "grad_norm": 0.6704065203666687, | |
| "learning_rate": 1.7671840354767185e-05, | |
| "loss": 0.0218, | |
| "step": 207928 | |
| }, | |
| { | |
| "epoch": 16.51567944250871, | |
| "grad_norm": 1.014145016670227, | |
| "learning_rate": 1.742160278745645e-05, | |
| "loss": 0.0216, | |
| "step": 208560 | |
| }, | |
| { | |
| "epoch": 16.56572695597086, | |
| "grad_norm": 0.040132321417331696, | |
| "learning_rate": 1.7171365220145707e-05, | |
| "loss": 0.0249, | |
| "step": 209192 | |
| }, | |
| { | |
| "epoch": 16.615774469433006, | |
| "grad_norm": 1.3288291692733765, | |
| "learning_rate": 1.692112765283497e-05, | |
| "loss": 0.0216, | |
| "step": 209824 | |
| }, | |
| { | |
| "epoch": 16.665821982895153, | |
| "grad_norm": 0.6535305976867676, | |
| "learning_rate": 1.667089008552423e-05, | |
| "loss": 0.0221, | |
| "step": 210456 | |
| }, | |
| { | |
| "epoch": 16.7158694963573, | |
| "grad_norm": 0.516942024230957, | |
| "learning_rate": 1.6420652518213496e-05, | |
| "loss": 0.0209, | |
| "step": 211088 | |
| }, | |
| { | |
| "epoch": 16.76591700981945, | |
| "grad_norm": 0.6040933728218079, | |
| "learning_rate": 1.6170414950902756e-05, | |
| "loss": 0.0231, | |
| "step": 211720 | |
| }, | |
| { | |
| "epoch": 16.815964523281597, | |
| "grad_norm": 0.07999061793088913, | |
| "learning_rate": 1.592017738359202e-05, | |
| "loss": 0.023, | |
| "step": 212352 | |
| }, | |
| { | |
| "epoch": 16.866012036743744, | |
| "grad_norm": 2.897904872894287, | |
| "learning_rate": 1.566993981628128e-05, | |
| "loss": 0.0227, | |
| "step": 212984 | |
| }, | |
| { | |
| "epoch": 16.91605955020589, | |
| "grad_norm": 0.2583966553211212, | |
| "learning_rate": 1.5419702248970542e-05, | |
| "loss": 0.0218, | |
| "step": 213616 | |
| }, | |
| { | |
| "epoch": 16.96610706366804, | |
| "grad_norm": 1.3506840467453003, | |
| "learning_rate": 1.5169464681659804e-05, | |
| "loss": 0.0235, | |
| "step": 214248 | |
| }, | |
| { | |
| "epoch": 17.0, | |
| "eval_gen_len": 12.35109541, | |
| "eval_loss": 0.03128550201654434, | |
| "eval_runtime": 515.496, | |
| "eval_samples_per_second": 103.244, | |
| "eval_steps_per_second": 1.614, | |
| "eval_wordacc": 0.95197475, | |
| "eval_wordacc_oov": 0.90714434, | |
| "step": 214676 | |
| }, | |
| { | |
| "epoch": 17.016154577130187, | |
| "grad_norm": 0.28379184007644653, | |
| "learning_rate": 1.4919227114349067e-05, | |
| "loss": 0.0196, | |
| "step": 214880 | |
| }, | |
| { | |
| "epoch": 17.066202090592334, | |
| "grad_norm": 0.2609073221683502, | |
| "learning_rate": 1.4668989547038327e-05, | |
| "loss": 0.0199, | |
| "step": 215512 | |
| }, | |
| { | |
| "epoch": 17.11624960405448, | |
| "grad_norm": 0.22402538359165192, | |
| "learning_rate": 1.441875197972759e-05, | |
| "loss": 0.0203, | |
| "step": 216144 | |
| }, | |
| { | |
| "epoch": 17.16629711751663, | |
| "grad_norm": 0.020612630993127823, | |
| "learning_rate": 1.4168514412416852e-05, | |
| "loss": 0.0196, | |
| "step": 216776 | |
| }, | |
| { | |
| "epoch": 17.216344630978778, | |
| "grad_norm": 0.036708053201436996, | |
| "learning_rate": 1.3918276845106115e-05, | |
| "loss": 0.0164, | |
| "step": 217408 | |
| }, | |
| { | |
| "epoch": 17.266392144440925, | |
| "grad_norm": 1.0852469205856323, | |
| "learning_rate": 1.3668039277795375e-05, | |
| "loss": 0.0197, | |
| "step": 218040 | |
| }, | |
| { | |
| "epoch": 17.31643965790307, | |
| "grad_norm": 0.08419329673051834, | |
| "learning_rate": 1.3417801710484638e-05, | |
| "loss": 0.0223, | |
| "step": 218672 | |
| }, | |
| { | |
| "epoch": 17.366487171365222, | |
| "grad_norm": 0.8586103320121765, | |
| "learning_rate": 1.31675641431739e-05, | |
| "loss": 0.0217, | |
| "step": 219304 | |
| }, | |
| { | |
| "epoch": 17.41653468482737, | |
| "grad_norm": 0.41822636127471924, | |
| "learning_rate": 1.2917326575863162e-05, | |
| "loss": 0.0203, | |
| "step": 219936 | |
| }, | |
| { | |
| "epoch": 17.466582198289515, | |
| "grad_norm": 0.16271276772022247, | |
| "learning_rate": 1.2667089008552425e-05, | |
| "loss": 0.0211, | |
| "step": 220568 | |
| }, | |
| { | |
| "epoch": 17.516629711751662, | |
| "grad_norm": 0.7023501992225647, | |
| "learning_rate": 1.2416851441241686e-05, | |
| "loss": 0.0197, | |
| "step": 221200 | |
| }, | |
| { | |
| "epoch": 17.56667722521381, | |
| "grad_norm": 0.038701217621564865, | |
| "learning_rate": 1.2166613873930948e-05, | |
| "loss": 0.022, | |
| "step": 221832 | |
| }, | |
| { | |
| "epoch": 17.61672473867596, | |
| "grad_norm": 0.03481602668762207, | |
| "learning_rate": 1.191637630662021e-05, | |
| "loss": 0.0178, | |
| "step": 222464 | |
| }, | |
| { | |
| "epoch": 17.666772252138106, | |
| "grad_norm": 0.3818272352218628, | |
| "learning_rate": 1.1666138739309473e-05, | |
| "loss": 0.0204, | |
| "step": 223096 | |
| }, | |
| { | |
| "epoch": 17.716819765600253, | |
| "grad_norm": 1.1725687980651855, | |
| "learning_rate": 1.1415901171998734e-05, | |
| "loss": 0.0214, | |
| "step": 223728 | |
| }, | |
| { | |
| "epoch": 17.7668672790624, | |
| "grad_norm": 0.5618239641189575, | |
| "learning_rate": 1.1165663604687996e-05, | |
| "loss": 0.0198, | |
| "step": 224360 | |
| }, | |
| { | |
| "epoch": 17.81691479252455, | |
| "grad_norm": 0.11917304992675781, | |
| "learning_rate": 1.0915426037377258e-05, | |
| "loss": 0.0203, | |
| "step": 224992 | |
| }, | |
| { | |
| "epoch": 17.866962305986696, | |
| "grad_norm": 0.1154344379901886, | |
| "learning_rate": 1.066518847006652e-05, | |
| "loss": 0.0217, | |
| "step": 225624 | |
| }, | |
| { | |
| "epoch": 17.917009819448843, | |
| "grad_norm": 0.12502028048038483, | |
| "learning_rate": 1.0414950902755781e-05, | |
| "loss": 0.0217, | |
| "step": 226256 | |
| }, | |
| { | |
| "epoch": 17.96705733291099, | |
| "grad_norm": 0.27321675419807434, | |
| "learning_rate": 1.0164713335445044e-05, | |
| "loss": 0.0206, | |
| "step": 226888 | |
| }, | |
| { | |
| "epoch": 18.0, | |
| "eval_gen_len": 12.35015595, | |
| "eval_loss": 0.030980365350842476, | |
| "eval_runtime": 547.6731, | |
| "eval_samples_per_second": 97.178, | |
| "eval_steps_per_second": 1.519, | |
| "eval_wordacc": 0.95308331, | |
| "eval_wordacc_oov": 0.90844048, | |
| "step": 227304 | |
| }, | |
| { | |
| "epoch": 18.01710484637314, | |
| "grad_norm": 0.15305259823799133, | |
| "learning_rate": 9.914475768134306e-06, | |
| "loss": 0.018, | |
| "step": 227520 | |
| }, | |
| { | |
| "epoch": 18.067152359835287, | |
| "grad_norm": 2.5299997329711914, | |
| "learning_rate": 9.664238200823567e-06, | |
| "loss": 0.0183, | |
| "step": 228152 | |
| }, | |
| { | |
| "epoch": 18.117199873297434, | |
| "grad_norm": 0.2780356705188751, | |
| "learning_rate": 9.41400063351283e-06, | |
| "loss": 0.0179, | |
| "step": 228784 | |
| }, | |
| { | |
| "epoch": 18.16724738675958, | |
| "grad_norm": 0.06621097773313522, | |
| "learning_rate": 9.163763066202092e-06, | |
| "loss": 0.0196, | |
| "step": 229416 | |
| }, | |
| { | |
| "epoch": 18.21729490022173, | |
| "grad_norm": 0.05437783896923065, | |
| "learning_rate": 8.913525498891354e-06, | |
| "loss": 0.0186, | |
| "step": 230048 | |
| }, | |
| { | |
| "epoch": 18.267342413683878, | |
| "grad_norm": 0.62905353307724, | |
| "learning_rate": 8.663287931580615e-06, | |
| "loss": 0.0202, | |
| "step": 230680 | |
| }, | |
| { | |
| "epoch": 18.317389927146024, | |
| "grad_norm": 0.03628231957554817, | |
| "learning_rate": 8.413050364269877e-06, | |
| "loss": 0.0186, | |
| "step": 231312 | |
| }, | |
| { | |
| "epoch": 18.36743744060817, | |
| "grad_norm": 0.1532977819442749, | |
| "learning_rate": 8.16281279695914e-06, | |
| "loss": 0.0197, | |
| "step": 231944 | |
| }, | |
| { | |
| "epoch": 18.41748495407032, | |
| "grad_norm": 0.15254627168178558, | |
| "learning_rate": 7.9125752296484e-06, | |
| "loss": 0.0177, | |
| "step": 232576 | |
| }, | |
| { | |
| "epoch": 18.467532467532468, | |
| "grad_norm": 1.0562399625778198, | |
| "learning_rate": 7.662337662337663e-06, | |
| "loss": 0.0172, | |
| "step": 233208 | |
| }, | |
| { | |
| "epoch": 18.517579980994615, | |
| "grad_norm": 0.4453679323196411, | |
| "learning_rate": 7.412100095026925e-06, | |
| "loss": 0.0197, | |
| "step": 233840 | |
| }, | |
| { | |
| "epoch": 18.56762749445676, | |
| "grad_norm": 0.26340222358703613, | |
| "learning_rate": 7.161862527716187e-06, | |
| "loss": 0.018, | |
| "step": 234472 | |
| }, | |
| { | |
| "epoch": 18.617675007918912, | |
| "grad_norm": 0.052670177072286606, | |
| "learning_rate": 6.911624960405448e-06, | |
| "loss": 0.0184, | |
| "step": 235104 | |
| }, | |
| { | |
| "epoch": 18.66772252138106, | |
| "grad_norm": 0.026885686442255974, | |
| "learning_rate": 6.661387393094711e-06, | |
| "loss": 0.0183, | |
| "step": 235736 | |
| }, | |
| { | |
| "epoch": 18.717770034843205, | |
| "grad_norm": 0.34494689106941223, | |
| "learning_rate": 6.411149825783973e-06, | |
| "loss": 0.0192, | |
| "step": 236368 | |
| }, | |
| { | |
| "epoch": 18.767817548305352, | |
| "grad_norm": 0.0610111765563488, | |
| "learning_rate": 6.160912258473235e-06, | |
| "loss": 0.0179, | |
| "step": 237000 | |
| }, | |
| { | |
| "epoch": 18.817865061767503, | |
| "grad_norm": 0.02205130271613598, | |
| "learning_rate": 5.910674691162496e-06, | |
| "loss": 0.0218, | |
| "step": 237632 | |
| }, | |
| { | |
| "epoch": 18.86791257522965, | |
| "grad_norm": 0.046364884823560715, | |
| "learning_rate": 5.660437123851759e-06, | |
| "loss": 0.0167, | |
| "step": 238264 | |
| }, | |
| { | |
| "epoch": 18.917960088691796, | |
| "grad_norm": 0.8253034949302673, | |
| "learning_rate": 5.41019955654102e-06, | |
| "loss": 0.0206, | |
| "step": 238896 | |
| }, | |
| { | |
| "epoch": 18.968007602153943, | |
| "grad_norm": 0.07394740730524063, | |
| "learning_rate": 5.159961989230283e-06, | |
| "loss": 0.0178, | |
| "step": 239528 | |
| }, | |
| { | |
| "epoch": 19.0, | |
| "eval_gen_len": 12.35068205, | |
| "eval_loss": 0.03073255531489849, | |
| "eval_runtime": 534.9855, | |
| "eval_samples_per_second": 99.483, | |
| "eval_steps_per_second": 1.555, | |
| "eval_wordacc": 0.95447371, | |
| "eval_wordacc_oov": 0.9093737, | |
| "step": 239932 | |
| }, | |
| { | |
| "epoch": 19.01805511561609, | |
| "grad_norm": 0.05381210148334503, | |
| "learning_rate": 4.909724421919544e-06, | |
| "loss": 0.017, | |
| "step": 240160 | |
| }, | |
| { | |
| "epoch": 19.06810262907824, | |
| "grad_norm": 0.05161185935139656, | |
| "learning_rate": 4.659486854608807e-06, | |
| "loss": 0.0184, | |
| "step": 240792 | |
| }, | |
| { | |
| "epoch": 19.118150142540387, | |
| "grad_norm": 0.054974284023046494, | |
| "learning_rate": 4.409249287298068e-06, | |
| "loss": 0.0165, | |
| "step": 241424 | |
| }, | |
| { | |
| "epoch": 19.168197656002533, | |
| "grad_norm": 0.4919557571411133, | |
| "learning_rate": 4.15901171998733e-06, | |
| "loss": 0.0171, | |
| "step": 242056 | |
| }, | |
| { | |
| "epoch": 19.21824516946468, | |
| "grad_norm": 0.8104053735733032, | |
| "learning_rate": 3.908774152676592e-06, | |
| "loss": 0.0161, | |
| "step": 242688 | |
| }, | |
| { | |
| "epoch": 19.26829268292683, | |
| "grad_norm": 0.04452453926205635, | |
| "learning_rate": 3.6585365853658537e-06, | |
| "loss": 0.017, | |
| "step": 243320 | |
| }, | |
| { | |
| "epoch": 19.318340196388977, | |
| "grad_norm": 2.039726972579956, | |
| "learning_rate": 3.408299018055116e-06, | |
| "loss": 0.0188, | |
| "step": 243952 | |
| }, | |
| { | |
| "epoch": 19.368387709851124, | |
| "grad_norm": 0.05839849263429642, | |
| "learning_rate": 3.1580614507443777e-06, | |
| "loss": 0.0174, | |
| "step": 244584 | |
| }, | |
| { | |
| "epoch": 19.41843522331327, | |
| "grad_norm": 0.06209117919206619, | |
| "learning_rate": 2.9078238834336396e-06, | |
| "loss": 0.0177, | |
| "step": 245216 | |
| }, | |
| { | |
| "epoch": 19.46848273677542, | |
| "grad_norm": 0.023634545505046844, | |
| "learning_rate": 2.6575863161229016e-06, | |
| "loss": 0.0187, | |
| "step": 245848 | |
| }, | |
| { | |
| "epoch": 19.518530250237568, | |
| "grad_norm": 0.4597359001636505, | |
| "learning_rate": 2.4073487488121636e-06, | |
| "loss": 0.0157, | |
| "step": 246480 | |
| }, | |
| { | |
| "epoch": 19.568577763699714, | |
| "grad_norm": 0.03822094574570656, | |
| "learning_rate": 2.1571111815014256e-06, | |
| "loss": 0.0191, | |
| "step": 247112 | |
| }, | |
| { | |
| "epoch": 19.61862527716186, | |
| "grad_norm": 0.028475474566221237, | |
| "learning_rate": 1.9068736141906876e-06, | |
| "loss": 0.0184, | |
| "step": 247744 | |
| }, | |
| { | |
| "epoch": 19.66867279062401, | |
| "grad_norm": 2.0368409156799316, | |
| "learning_rate": 1.6566360468799495e-06, | |
| "loss": 0.016, | |
| "step": 248376 | |
| }, | |
| { | |
| "epoch": 19.71872030408616, | |
| "grad_norm": 0.6395711898803711, | |
| "learning_rate": 1.4063984795692113e-06, | |
| "loss": 0.0182, | |
| "step": 249008 | |
| }, | |
| { | |
| "epoch": 19.768767817548305, | |
| "grad_norm": 0.11478591710329056, | |
| "learning_rate": 1.1561609122584733e-06, | |
| "loss": 0.0172, | |
| "step": 249640 | |
| }, | |
| { | |
| "epoch": 19.818815331010452, | |
| "grad_norm": 2.0897409915924072, | |
| "learning_rate": 9.059233449477353e-07, | |
| "loss": 0.0178, | |
| "step": 250272 | |
| }, | |
| { | |
| "epoch": 19.868862844472602, | |
| "grad_norm": 1.6272854804992676, | |
| "learning_rate": 6.556857776369973e-07, | |
| "loss": 0.0165, | |
| "step": 250904 | |
| }, | |
| { | |
| "epoch": 19.91891035793475, | |
| "grad_norm": 0.018388045951724052, | |
| "learning_rate": 4.0544821032625913e-07, | |
| "loss": 0.0153, | |
| "step": 251536 | |
| }, | |
| { | |
| "epoch": 19.968957871396896, | |
| "grad_norm": 0.097270667552948, | |
| "learning_rate": 1.5521064301552106e-07, | |
| "loss": 0.016, | |
| "step": 252168 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "eval_gen_len": 12.35156514, | |
| "eval_loss": 0.030767865478992462, | |
| "eval_runtime": 535.8673, | |
| "eval_samples_per_second": 99.319, | |
| "eval_steps_per_second": 1.553, | |
| "eval_wordacc": 0.95458645, | |
| "eval_wordacc_oov": 0.90963293, | |
| "step": 252560 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "step": 252560, | |
| "total_flos": 1486519029399552.0, | |
| "train_loss": 0.04684994914012881, | |
| "train_runtime": 19459.2196, | |
| "train_samples_per_second": 103.832, | |
| "train_steps_per_second": 12.979 | |
| } | |
| ], | |
| "logging_steps": 632, | |
| "max_steps": 252560, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 20, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1486519029399552.0, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |