| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 6.955414012738854, | |
| "eval_steps": 500, | |
| "global_step": 364, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01910828025477707, | |
| "grad_norm": 16.385162993426572, | |
| "learning_rate": 5.405405405405406e-07, | |
| "loss": 1.5698, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.03821656050955414, | |
| "grad_norm": 16.504800604597186, | |
| "learning_rate": 1.0810810810810812e-06, | |
| "loss": 1.5972, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.05732484076433121, | |
| "grad_norm": 15.46635677824654, | |
| "learning_rate": 1.6216216216216219e-06, | |
| "loss": 1.5254, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.07643312101910828, | |
| "grad_norm": 16.32262911321192, | |
| "learning_rate": 2.1621621621621623e-06, | |
| "loss": 1.5602, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.09554140127388536, | |
| "grad_norm": 14.532866256919611, | |
| "learning_rate": 2.702702702702703e-06, | |
| "loss": 1.4957, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.11464968152866242, | |
| "grad_norm": 12.429084270136249, | |
| "learning_rate": 3.2432432432432437e-06, | |
| "loss": 1.4258, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.1337579617834395, | |
| "grad_norm": 9.200276130835531, | |
| "learning_rate": 3.7837837837837844e-06, | |
| "loss": 1.2829, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.15286624203821655, | |
| "grad_norm": 7.518795812144619, | |
| "learning_rate": 4.324324324324325e-06, | |
| "loss": 1.265, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.17197452229299362, | |
| "grad_norm": 6.557943979035445, | |
| "learning_rate": 4.864864864864866e-06, | |
| "loss": 1.2011, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.1910828025477707, | |
| "grad_norm": 7.761386310023502, | |
| "learning_rate": 5.405405405405406e-06, | |
| "loss": 1.1285, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.21019108280254778, | |
| "grad_norm": 5.800801599651337, | |
| "learning_rate": 5.945945945945947e-06, | |
| "loss": 1.0849, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.22929936305732485, | |
| "grad_norm": 4.458529924701665, | |
| "learning_rate": 6.486486486486487e-06, | |
| "loss": 1.0295, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.2484076433121019, | |
| "grad_norm": 4.449464961059666, | |
| "learning_rate": 7.027027027027028e-06, | |
| "loss": 1.0054, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.267515923566879, | |
| "grad_norm": 5.151158650335397, | |
| "learning_rate": 7.567567567567569e-06, | |
| "loss": 0.9589, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.28662420382165604, | |
| "grad_norm": 4.757079257630854, | |
| "learning_rate": 8.108108108108109e-06, | |
| "loss": 0.9881, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.3057324840764331, | |
| "grad_norm": 4.440520703195754, | |
| "learning_rate": 8.64864864864865e-06, | |
| "loss": 0.955, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.3248407643312102, | |
| "grad_norm": 2.8333651824679036, | |
| "learning_rate": 9.189189189189191e-06, | |
| "loss": 0.9141, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.34394904458598724, | |
| "grad_norm": 2.759048068164436, | |
| "learning_rate": 9.729729729729732e-06, | |
| "loss": 0.9251, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.3630573248407643, | |
| "grad_norm": 3.20055476511223, | |
| "learning_rate": 1.027027027027027e-05, | |
| "loss": 0.9106, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.3821656050955414, | |
| "grad_norm": 3.541249127738148, | |
| "learning_rate": 1.0810810810810812e-05, | |
| "loss": 0.8812, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.4012738853503185, | |
| "grad_norm": 2.807031455712289, | |
| "learning_rate": 1.1351351351351352e-05, | |
| "loss": 0.8684, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.42038216560509556, | |
| "grad_norm": 2.042523718413902, | |
| "learning_rate": 1.1891891891891894e-05, | |
| "loss": 0.8832, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.4394904458598726, | |
| "grad_norm": 2.1481689144751694, | |
| "learning_rate": 1.2432432432432433e-05, | |
| "loss": 0.878, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.4585987261146497, | |
| "grad_norm": 2.4022110137127726, | |
| "learning_rate": 1.2972972972972975e-05, | |
| "loss": 0.864, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.47770700636942676, | |
| "grad_norm": 2.1446981991533782, | |
| "learning_rate": 1.3513513513513515e-05, | |
| "loss": 0.8365, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.4968152866242038, | |
| "grad_norm": 2.0893745183886896, | |
| "learning_rate": 1.4054054054054055e-05, | |
| "loss": 0.8362, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.5159235668789809, | |
| "grad_norm": 1.6488707388541979, | |
| "learning_rate": 1.4594594594594596e-05, | |
| "loss": 0.8495, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.535031847133758, | |
| "grad_norm": 1.6921694203114936, | |
| "learning_rate": 1.5135135135135138e-05, | |
| "loss": 0.8239, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.554140127388535, | |
| "grad_norm": 2.0573030839069117, | |
| "learning_rate": 1.5675675675675676e-05, | |
| "loss": 0.8448, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.5732484076433121, | |
| "grad_norm": 1.7248249635999773, | |
| "learning_rate": 1.6216216216216218e-05, | |
| "loss": 0.8311, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.5923566878980892, | |
| "grad_norm": 1.6940388762105907, | |
| "learning_rate": 1.6756756756756757e-05, | |
| "loss": 0.8207, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.6114649681528662, | |
| "grad_norm": 1.6926871774648706, | |
| "learning_rate": 1.72972972972973e-05, | |
| "loss": 0.8042, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.6305732484076433, | |
| "grad_norm": 1.6302594155794337, | |
| "learning_rate": 1.783783783783784e-05, | |
| "loss": 0.809, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.6496815286624203, | |
| "grad_norm": 1.8069821223276006, | |
| "learning_rate": 1.8378378378378383e-05, | |
| "loss": 0.8475, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.6687898089171974, | |
| "grad_norm": 1.6810398894595866, | |
| "learning_rate": 1.891891891891892e-05, | |
| "loss": 0.795, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.6878980891719745, | |
| "grad_norm": 1.6752610079484405, | |
| "learning_rate": 1.9459459459459463e-05, | |
| "loss": 0.8243, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.7070063694267515, | |
| "grad_norm": 1.748395181450843, | |
| "learning_rate": 2e-05, | |
| "loss": 0.8132, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.7261146496815286, | |
| "grad_norm": 1.648537098710576, | |
| "learning_rate": 1.9999538500851633e-05, | |
| "loss": 0.8151, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.7452229299363057, | |
| "grad_norm": 1.5757172038673941, | |
| "learning_rate": 1.9998154046002822e-05, | |
| "loss": 0.8062, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.7643312101910829, | |
| "grad_norm": 1.8845590574584812, | |
| "learning_rate": 1.9995846763238514e-05, | |
| "loss": 0.8148, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.7834394904458599, | |
| "grad_norm": 2.094114813878872, | |
| "learning_rate": 1.9992616865520515e-05, | |
| "loss": 0.8219, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.802547770700637, | |
| "grad_norm": 1.6890917880505276, | |
| "learning_rate": 1.9988464650967834e-05, | |
| "loss": 0.8053, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.821656050955414, | |
| "grad_norm": 1.606154927757665, | |
| "learning_rate": 1.9983390502829168e-05, | |
| "loss": 0.8001, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.8407643312101911, | |
| "grad_norm": 1.4922460443430643, | |
| "learning_rate": 1.9977394889447526e-05, | |
| "loss": 0.8023, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.8598726114649682, | |
| "grad_norm": 1.721473976909461, | |
| "learning_rate": 1.9970478364217e-05, | |
| "loss": 0.7922, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.8789808917197452, | |
| "grad_norm": 1.492822163666417, | |
| "learning_rate": 1.9962641565531694e-05, | |
| "loss": 0.8043, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.8980891719745223, | |
| "grad_norm": 1.4028695347925793, | |
| "learning_rate": 1.9953885216726788e-05, | |
| "loss": 0.8107, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.9171974522292994, | |
| "grad_norm": 1.4948166631903215, | |
| "learning_rate": 1.994421012601179e-05, | |
| "loss": 0.7702, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.9363057324840764, | |
| "grad_norm": 1.5163532996354703, | |
| "learning_rate": 1.9933617186395917e-05, | |
| "loss": 0.8117, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.9554140127388535, | |
| "grad_norm": 1.550783453013226, | |
| "learning_rate": 1.99221073756057e-05, | |
| "loss": 0.7898, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.9745222929936306, | |
| "grad_norm": 1.6260454029830684, | |
| "learning_rate": 1.990968175599471e-05, | |
| "loss": 0.8044, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.9936305732484076, | |
| "grad_norm": 1.6316779083657356, | |
| "learning_rate": 1.9896341474445526e-05, | |
| "loss": 0.7751, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 1.0127388535031847, | |
| "grad_norm": 1.6273455814047662, | |
| "learning_rate": 1.9882087762263857e-05, | |
| "loss": 0.7185, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 1.0318471337579618, | |
| "grad_norm": 1.3702016927688272, | |
| "learning_rate": 1.9866921935064907e-05, | |
| "loss": 0.6317, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 1.0509554140127388, | |
| "grad_norm": 1.3702776063715345, | |
| "learning_rate": 1.985084539265195e-05, | |
| "loss": 0.5979, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 1.070063694267516, | |
| "grad_norm": 1.357835755609063, | |
| "learning_rate": 1.983385961888711e-05, | |
| "loss": 0.599, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 1.089171974522293, | |
| "grad_norm": 1.5607197079673851, | |
| "learning_rate": 1.9815966181554412e-05, | |
| "loss": 0.602, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 1.10828025477707, | |
| "grad_norm": 1.5351352622145285, | |
| "learning_rate": 1.9797166732215078e-05, | |
| "loss": 0.6138, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 1.127388535031847, | |
| "grad_norm": 1.7266095022061323, | |
| "learning_rate": 1.977746300605507e-05, | |
| "loss": 0.5711, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 1.1464968152866242, | |
| "grad_norm": 1.587015285885074, | |
| "learning_rate": 1.975685682172497e-05, | |
| "loss": 0.5927, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.1656050955414012, | |
| "grad_norm": 1.3602798340924052, | |
| "learning_rate": 1.973535008117207e-05, | |
| "loss": 0.5961, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 1.1847133757961783, | |
| "grad_norm": 1.5396673283104687, | |
| "learning_rate": 1.9712944769464864e-05, | |
| "loss": 0.5776, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 1.2038216560509554, | |
| "grad_norm": 1.4151140824773938, | |
| "learning_rate": 1.9689642954609808e-05, | |
| "loss": 0.5703, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 1.2229299363057324, | |
| "grad_norm": 1.6305530726818593, | |
| "learning_rate": 1.9665446787360444e-05, | |
| "loss": 0.6179, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 1.2420382165605095, | |
| "grad_norm": 1.5052390637526016, | |
| "learning_rate": 1.9640358501018885e-05, | |
| "loss": 0.5533, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 1.2611464968152866, | |
| "grad_norm": 1.4443125571283753, | |
| "learning_rate": 1.9614380411229693e-05, | |
| "loss": 0.5839, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 1.2802547770700636, | |
| "grad_norm": 1.3308482373115775, | |
| "learning_rate": 1.9587514915766124e-05, | |
| "loss": 0.5883, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 1.2993630573248407, | |
| "grad_norm": 1.42729767228037, | |
| "learning_rate": 1.9559764494308838e-05, | |
| "loss": 0.5448, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 1.3184713375796178, | |
| "grad_norm": 1.3781970251357278, | |
| "learning_rate": 1.9531131708217005e-05, | |
| "loss": 0.5767, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 1.3375796178343948, | |
| "grad_norm": 1.3677879447269978, | |
| "learning_rate": 1.950161920029191e-05, | |
| "loss": 0.5755, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.356687898089172, | |
| "grad_norm": 1.453105482367857, | |
| "learning_rate": 1.9471229694533003e-05, | |
| "loss": 0.5469, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 1.3757961783439492, | |
| "grad_norm": 1.4482246018744436, | |
| "learning_rate": 1.943996599588649e-05, | |
| "loss": 0.5784, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.394904458598726, | |
| "grad_norm": 1.401709151246391, | |
| "learning_rate": 1.940783098998643e-05, | |
| "loss": 0.592, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.4140127388535033, | |
| "grad_norm": 1.445141295836924, | |
| "learning_rate": 1.93748276428884e-05, | |
| "loss": 0.5365, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.4331210191082802, | |
| "grad_norm": 1.3732889276145823, | |
| "learning_rate": 1.9340959000795707e-05, | |
| "loss": 0.5772, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.4522292993630574, | |
| "grad_norm": 1.2182338563233495, | |
| "learning_rate": 1.9306228189778255e-05, | |
| "loss": 0.5819, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.4713375796178343, | |
| "grad_norm": 1.470612047637857, | |
| "learning_rate": 1.927063841548398e-05, | |
| "loss": 0.5574, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.4904458598726116, | |
| "grad_norm": 1.586990211389091, | |
| "learning_rate": 1.9234192962842996e-05, | |
| "loss": 0.5665, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.5095541401273884, | |
| "grad_norm": 1.3622127174538021, | |
| "learning_rate": 1.9196895195764363e-05, | |
| "loss": 0.6213, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.5286624203821657, | |
| "grad_norm": 1.5279213108356509, | |
| "learning_rate": 1.9158748556825637e-05, | |
| "loss": 0.5928, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.5477707006369426, | |
| "grad_norm": 1.381548176474239, | |
| "learning_rate": 1.9119756566955092e-05, | |
| "loss": 0.6015, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.5668789808917198, | |
| "grad_norm": 1.4791457207376197, | |
| "learning_rate": 1.907992282510675e-05, | |
| "loss": 0.5937, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.5859872611464967, | |
| "grad_norm": 1.3848517552915744, | |
| "learning_rate": 1.90392510079282e-05, | |
| "loss": 0.5835, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.605095541401274, | |
| "grad_norm": 1.4796492304425215, | |
| "learning_rate": 1.8997744869421248e-05, | |
| "loss": 0.5785, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.6242038216560508, | |
| "grad_norm": 1.4205681356051292, | |
| "learning_rate": 1.8955408240595396e-05, | |
| "loss": 0.5773, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.643312101910828, | |
| "grad_norm": 1.5299136367520119, | |
| "learning_rate": 1.891224502911428e-05, | |
| "loss": 0.6068, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.662420382165605, | |
| "grad_norm": 1.4517525362811488, | |
| "learning_rate": 1.886825921893497e-05, | |
| "loss": 0.5602, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.6815286624203822, | |
| "grad_norm": 1.3605482820727088, | |
| "learning_rate": 1.8823454869940243e-05, | |
| "loss": 0.582, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.700636942675159, | |
| "grad_norm": 1.462809151995504, | |
| "learning_rate": 1.8777836117563894e-05, | |
| "loss": 0.578, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.7197452229299364, | |
| "grad_norm": 1.309078307464712, | |
| "learning_rate": 1.873140717240899e-05, | |
| "loss": 0.6215, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.7388535031847132, | |
| "grad_norm": 1.3851134513499674, | |
| "learning_rate": 1.8684172319859258e-05, | |
| "loss": 0.6236, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.7579617834394905, | |
| "grad_norm": 1.3737393579288095, | |
| "learning_rate": 1.863613591968355e-05, | |
| "loss": 0.5538, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.7770700636942676, | |
| "grad_norm": 1.2714423428796768, | |
| "learning_rate": 1.858730240563342e-05, | |
| "loss": 0.5492, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.7961783439490446, | |
| "grad_norm": 1.3886107352803891, | |
| "learning_rate": 1.8537676285033886e-05, | |
| "loss": 0.6016, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.8152866242038217, | |
| "grad_norm": 1.3906573234567905, | |
| "learning_rate": 1.848726213836744e-05, | |
| "loss": 0.5578, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.8343949044585988, | |
| "grad_norm": 1.3072957741832658, | |
| "learning_rate": 1.8436064618851225e-05, | |
| "loss": 0.5573, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.8535031847133758, | |
| "grad_norm": 1.3086263321701983, | |
| "learning_rate": 1.838408845200758e-05, | |
| "loss": 0.5524, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.872611464968153, | |
| "grad_norm": 1.6817742508337377, | |
| "learning_rate": 1.8331338435227838e-05, | |
| "loss": 0.5807, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.89171974522293, | |
| "grad_norm": 1.4420032351612682, | |
| "learning_rate": 1.8277819437329577e-05, | |
| "loss": 0.5997, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.910828025477707, | |
| "grad_norm": 1.2716603040915755, | |
| "learning_rate": 1.8223536398107177e-05, | |
| "loss": 0.5822, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.929936305732484, | |
| "grad_norm": 1.255547700459211, | |
| "learning_rate": 1.8168494327875918e-05, | |
| "loss": 0.6029, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.9490445859872612, | |
| "grad_norm": 1.5220214086917268, | |
| "learning_rate": 1.8112698307009506e-05, | |
| "loss": 0.5896, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.9681528662420382, | |
| "grad_norm": 1.3560096197263136, | |
| "learning_rate": 1.8056153485471167e-05, | |
| "loss": 0.5807, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.9872611464968153, | |
| "grad_norm": 1.4780227859635648, | |
| "learning_rate": 1.799886508233829e-05, | |
| "loss": 0.5835, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 2.0063694267515926, | |
| "grad_norm": 1.2866181457082495, | |
| "learning_rate": 1.7940838385320732e-05, | |
| "loss": 0.4879, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 2.0254777070063694, | |
| "grad_norm": 1.2198554612027852, | |
| "learning_rate": 1.788207875027274e-05, | |
| "loss": 0.3399, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 2.0445859872611467, | |
| "grad_norm": 1.1638004148938548, | |
| "learning_rate": 1.7822591600698632e-05, | |
| "loss": 0.3263, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 2.0636942675159236, | |
| "grad_norm": 1.6947898795763063, | |
| "learning_rate": 1.776238242725217e-05, | |
| "loss": 0.3745, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 2.082802547770701, | |
| "grad_norm": 1.4731026648636676, | |
| "learning_rate": 1.7701456787229805e-05, | |
| "loss": 0.3178, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 2.1019108280254777, | |
| "grad_norm": 1.2004912660652227, | |
| "learning_rate": 1.7639820304057745e-05, | |
| "loss": 0.3473, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 2.121019108280255, | |
| "grad_norm": 1.2476423160721966, | |
| "learning_rate": 1.7577478666772886e-05, | |
| "loss": 0.3422, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 2.140127388535032, | |
| "grad_norm": 1.193306106849365, | |
| "learning_rate": 1.751443762949772e-05, | |
| "loss": 0.3631, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 2.159235668789809, | |
| "grad_norm": 1.0898051156699036, | |
| "learning_rate": 1.7450703010909263e-05, | |
| "loss": 0.3147, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 2.178343949044586, | |
| "grad_norm": 1.3273161284848005, | |
| "learning_rate": 1.738628069370195e-05, | |
| "loss": 0.3676, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 2.1974522292993632, | |
| "grad_norm": 1.2284704646661384, | |
| "learning_rate": 1.732117662404469e-05, | |
| "loss": 0.3665, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 2.21656050955414, | |
| "grad_norm": 1.2096350181391013, | |
| "learning_rate": 1.7255396811032014e-05, | |
| "loss": 0.3573, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 2.2356687898089174, | |
| "grad_norm": 1.2389791773164829, | |
| "learning_rate": 1.718894732612947e-05, | |
| "loss": 0.322, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 2.254777070063694, | |
| "grad_norm": 1.111622582088061, | |
| "learning_rate": 1.712183430261319e-05, | |
| "loss": 0.3427, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 2.2738853503184715, | |
| "grad_norm": 1.180404315815012, | |
| "learning_rate": 1.7054063935003813e-05, | |
| "loss": 0.3678, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 2.2929936305732483, | |
| "grad_norm": 1.1375955561384368, | |
| "learning_rate": 1.698564247849473e-05, | |
| "loss": 0.3314, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 2.3121019108280256, | |
| "grad_norm": 1.1098720646050049, | |
| "learning_rate": 1.691657624837472e-05, | |
| "loss": 0.3298, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 2.3312101910828025, | |
| "grad_norm": 1.210067269931286, | |
| "learning_rate": 1.684687161944506e-05, | |
| "loss": 0.3599, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 2.3503184713375798, | |
| "grad_norm": 1.1374262295996864, | |
| "learning_rate": 1.677653502543113e-05, | |
| "loss": 0.359, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 2.3694267515923566, | |
| "grad_norm": 1.111840116040498, | |
| "learning_rate": 1.6705572958388576e-05, | |
| "loss": 0.3477, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 2.388535031847134, | |
| "grad_norm": 1.1183070469663134, | |
| "learning_rate": 1.6633991968104095e-05, | |
| "loss": 0.3131, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 2.4076433121019107, | |
| "grad_norm": 1.0815893984722618, | |
| "learning_rate": 1.6561798661490904e-05, | |
| "loss": 0.3548, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 2.426751592356688, | |
| "grad_norm": 1.1159336391630241, | |
| "learning_rate": 1.6488999701978905e-05, | |
| "loss": 0.3491, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 2.445859872611465, | |
| "grad_norm": 1.0974239822986869, | |
| "learning_rate": 1.6415601808899658e-05, | |
| "loss": 0.3177, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 2.464968152866242, | |
| "grad_norm": 1.181955191907005, | |
| "learning_rate": 1.63416117568662e-05, | |
| "loss": 0.3594, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 2.484076433121019, | |
| "grad_norm": 1.1059188589471078, | |
| "learning_rate": 1.6267036375147728e-05, | |
| "loss": 0.3424, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 2.5031847133757963, | |
| "grad_norm": 1.1747469983595025, | |
| "learning_rate": 1.619188254703927e-05, | |
| "loss": 0.3567, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 2.522292993630573, | |
| "grad_norm": 1.1490766705292863, | |
| "learning_rate": 1.6116157209226356e-05, | |
| "loss": 0.3231, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 2.5414012738853504, | |
| "grad_norm": 1.147679134732286, | |
| "learning_rate": 1.6039867351144778e-05, | |
| "loss": 0.362, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 2.5605095541401273, | |
| "grad_norm": 1.1049665212638309, | |
| "learning_rate": 1.5963020014335437e-05, | |
| "loss": 0.3236, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 2.5796178343949046, | |
| "grad_norm": 1.2888513595868938, | |
| "learning_rate": 1.588562229179443e-05, | |
| "loss": 0.3675, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 2.5987261146496814, | |
| "grad_norm": 1.153461765608754, | |
| "learning_rate": 1.5807681327318372e-05, | |
| "loss": 0.3488, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 2.6178343949044587, | |
| "grad_norm": 1.1111216445665915, | |
| "learning_rate": 1.5729204314845002e-05, | |
| "loss": 0.3616, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 2.6369426751592355, | |
| "grad_norm": 1.1610643251674635, | |
| "learning_rate": 1.56501984977892e-05, | |
| "loss": 0.3413, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 2.656050955414013, | |
| "grad_norm": 1.0992867252068856, | |
| "learning_rate": 1.557067116837444e-05, | |
| "loss": 0.3049, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 2.6751592356687897, | |
| "grad_norm": 1.1231428941054005, | |
| "learning_rate": 1.5490629666959668e-05, | |
| "loss": 0.3631, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 2.694267515923567, | |
| "grad_norm": 1.1373285108620568, | |
| "learning_rate": 1.541008138136183e-05, | |
| "loss": 0.3233, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 2.713375796178344, | |
| "grad_norm": 1.0712877988374556, | |
| "learning_rate": 1.5329033746173975e-05, | |
| "loss": 0.358, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 2.732484076433121, | |
| "grad_norm": 1.0291514447015422, | |
| "learning_rate": 1.5247494242079024e-05, | |
| "loss": 0.37, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 2.7515923566878984, | |
| "grad_norm": 1.1128200584801455, | |
| "learning_rate": 1.5165470395159314e-05, | |
| "loss": 0.3301, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 2.770700636942675, | |
| "grad_norm": 1.1323881148146315, | |
| "learning_rate": 1.5082969776201948e-05, | |
| "loss": 0.3067, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 2.789808917197452, | |
| "grad_norm": 1.0308429646272341, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.3289, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 2.8089171974522293, | |
| "grad_norm": 1.0552781992097453, | |
| "learning_rate": 1.4916568724649688e-05, | |
| "loss": 0.3226, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 2.8280254777070066, | |
| "grad_norm": 1.1483190940288472, | |
| "learning_rate": 1.483268365084351e-05, | |
| "loss": 0.341, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 2.8471337579617835, | |
| "grad_norm": 1.1983775278717306, | |
| "learning_rate": 1.4748352521159492e-05, | |
| "loss": 0.3085, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 2.8662420382165603, | |
| "grad_norm": 1.0020507576002027, | |
| "learning_rate": 1.466358311934654e-05, | |
| "loss": 0.3429, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.8853503184713376, | |
| "grad_norm": 1.1972458385216016, | |
| "learning_rate": 1.4578383269606004e-05, | |
| "loss": 0.2933, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 2.904458598726115, | |
| "grad_norm": 1.1589718554108799, | |
| "learning_rate": 1.4492760835869504e-05, | |
| "loss": 0.345, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 2.9235668789808917, | |
| "grad_norm": 1.1444814297688288, | |
| "learning_rate": 1.4406723721073088e-05, | |
| "loss": 0.3898, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 2.9426751592356686, | |
| "grad_norm": 1.1048277109165345, | |
| "learning_rate": 1.4320279866427798e-05, | |
| "loss": 0.3674, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 2.961783439490446, | |
| "grad_norm": 1.14131770709984, | |
| "learning_rate": 1.4233437250686695e-05, | |
| "loss": 0.3271, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.980891719745223, | |
| "grad_norm": 1.0730828437212399, | |
| "learning_rate": 1.4146203889408418e-05, | |
| "loss": 0.3342, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 1.0986481112547453, | |
| "learning_rate": 1.4058587834217356e-05, | |
| "loss": 0.3276, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 3.0191082802547773, | |
| "grad_norm": 0.9727813760048125, | |
| "learning_rate": 1.3970597172060482e-05, | |
| "loss": 0.2086, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 3.038216560509554, | |
| "grad_norm": 0.9351469513287544, | |
| "learning_rate": 1.3882240024460928e-05, | |
| "loss": 0.2144, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 3.0573248407643314, | |
| "grad_norm": 0.9596216840602939, | |
| "learning_rate": 1.3793524546768358e-05, | |
| "loss": 0.1854, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 3.0764331210191083, | |
| "grad_norm": 1.0070708882490527, | |
| "learning_rate": 1.3704458927406261e-05, | |
| "loss": 0.1979, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 3.0955414012738856, | |
| "grad_norm": 1.1947399346702603, | |
| "learning_rate": 1.3615051387116131e-05, | |
| "loss": 0.2177, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 3.1146496815286624, | |
| "grad_norm": 0.9221956509184932, | |
| "learning_rate": 1.3525310178198707e-05, | |
| "loss": 0.2099, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 3.1337579617834397, | |
| "grad_norm": 0.9410399106958189, | |
| "learning_rate": 1.3435243583752294e-05, | |
| "loss": 0.2038, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 3.1528662420382165, | |
| "grad_norm": 0.9814965993205671, | |
| "learning_rate": 1.3344859916908206e-05, | |
| "loss": 0.2378, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 3.171974522292994, | |
| "grad_norm": 0.9231538478983656, | |
| "learning_rate": 1.325416752006351e-05, | |
| "loss": 0.1767, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 3.1910828025477707, | |
| "grad_norm": 0.8950308599796192, | |
| "learning_rate": 1.3163174764110985e-05, | |
| "loss": 0.1753, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 3.210191082802548, | |
| "grad_norm": 0.9109948031677837, | |
| "learning_rate": 1.3071890047666498e-05, | |
| "loss": 0.2195, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 3.229299363057325, | |
| "grad_norm": 0.9602237468554572, | |
| "learning_rate": 1.2980321796293838e-05, | |
| "loss": 0.2231, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 3.248407643312102, | |
| "grad_norm": 0.9472126025942549, | |
| "learning_rate": 1.288847846172701e-05, | |
| "loss": 0.233, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 3.267515923566879, | |
| "grad_norm": 0.9448974877228525, | |
| "learning_rate": 1.2796368521090143e-05, | |
| "loss": 0.2293, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 3.286624203821656, | |
| "grad_norm": 0.8735754301637381, | |
| "learning_rate": 1.2704000476115079e-05, | |
| "loss": 0.1923, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 3.305732484076433, | |
| "grad_norm": 0.9320466823822048, | |
| "learning_rate": 1.2611382852356632e-05, | |
| "loss": 0.241, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 3.3248407643312103, | |
| "grad_norm": 0.8692677947193066, | |
| "learning_rate": 1.2518524198405699e-05, | |
| "loss": 0.227, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 3.343949044585987, | |
| "grad_norm": 0.8801146416841564, | |
| "learning_rate": 1.2425433085100224e-05, | |
| "loss": 0.1892, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 3.3630573248407645, | |
| "grad_norm": 1.024131338299322, | |
| "learning_rate": 1.233211810473411e-05, | |
| "loss": 0.2217, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 3.3821656050955413, | |
| "grad_norm": 0.8806375469042244, | |
| "learning_rate": 1.2238587870264152e-05, | |
| "loss": 0.2289, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 3.4012738853503186, | |
| "grad_norm": 0.8949628237735877, | |
| "learning_rate": 1.2144851014515055e-05, | |
| "loss": 0.1952, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 3.4203821656050954, | |
| "grad_norm": 0.9317408522256906, | |
| "learning_rate": 1.2050916189382646e-05, | |
| "loss": 0.2116, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 3.4394904458598727, | |
| "grad_norm": 0.8998850494532862, | |
| "learning_rate": 1.1956792065035281e-05, | |
| "loss": 0.1798, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 3.4585987261146496, | |
| "grad_norm": 0.854442662864881, | |
| "learning_rate": 1.1862487329113606e-05, | |
| "loss": 0.213, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 3.477707006369427, | |
| "grad_norm": 0.904951729039575, | |
| "learning_rate": 1.1768010685928686e-05, | |
| "loss": 0.1573, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 3.4968152866242037, | |
| "grad_norm": 0.8905053917581376, | |
| "learning_rate": 1.1673370855658592e-05, | |
| "loss": 0.1893, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 3.515923566878981, | |
| "grad_norm": 0.8324598623669239, | |
| "learning_rate": 1.1578576573543541e-05, | |
| "loss": 0.2159, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 3.535031847133758, | |
| "grad_norm": 1.0077484019543617, | |
| "learning_rate": 1.1483636589079627e-05, | |
| "loss": 0.1904, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 3.554140127388535, | |
| "grad_norm": 0.8809032468402521, | |
| "learning_rate": 1.1388559665211241e-05, | |
| "loss": 0.2147, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 3.573248407643312, | |
| "grad_norm": 0.8655097326224989, | |
| "learning_rate": 1.1293354577522264e-05, | |
| "loss": 0.2162, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 3.5923566878980893, | |
| "grad_norm": 0.8789799920919437, | |
| "learning_rate": 1.1198030113426074e-05, | |
| "loss": 0.19, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 3.611464968152866, | |
| "grad_norm": 0.903268539210289, | |
| "learning_rate": 1.1102595071354471e-05, | |
| "loss": 0.1852, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 3.6305732484076434, | |
| "grad_norm": 0.9455765723436995, | |
| "learning_rate": 1.1007058259945584e-05, | |
| "loss": 0.2172, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 3.6496815286624202, | |
| "grad_norm": 0.854386751908126, | |
| "learning_rate": 1.0911428497230834e-05, | |
| "loss": 0.2218, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 3.6687898089171975, | |
| "grad_norm": 0.8554909616748702, | |
| "learning_rate": 1.0815714609821027e-05, | |
| "loss": 0.2033, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 3.6878980891719744, | |
| "grad_norm": 0.9177260635164998, | |
| "learning_rate": 1.0719925432091671e-05, | |
| "loss": 0.2166, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 3.7070063694267517, | |
| "grad_norm": 0.8574723299072876, | |
| "learning_rate": 1.0624069805367558e-05, | |
| "loss": 0.208, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 3.7261146496815285, | |
| "grad_norm": 0.8427079553036521, | |
| "learning_rate": 1.0528156577106703e-05, | |
| "loss": 0.2083, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 3.745222929936306, | |
| "grad_norm": 0.8880834494398775, | |
| "learning_rate": 1.043219460008374e-05, | |
| "loss": 0.2112, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 3.7643312101910826, | |
| "grad_norm": 0.895077225646693, | |
| "learning_rate": 1.0336192731572805e-05, | |
| "loss": 0.2181, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 3.78343949044586, | |
| "grad_norm": 0.8535144125727894, | |
| "learning_rate": 1.0240159832530007e-05, | |
| "loss": 0.1875, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 3.802547770700637, | |
| "grad_norm": 0.8852101248249045, | |
| "learning_rate": 1.0144104766775574e-05, | |
| "loss": 0.2243, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 3.821656050955414, | |
| "grad_norm": 0.8844111940812327, | |
| "learning_rate": 1.004803640017571e-05, | |
| "loss": 0.2107, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 3.840764331210191, | |
| "grad_norm": 0.8703098886346262, | |
| "learning_rate": 9.951963599824294e-06, | |
| "loss": 0.2522, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 3.859872611464968, | |
| "grad_norm": 0.8379951683247892, | |
| "learning_rate": 9.855895233224431e-06, | |
| "loss": 0.2271, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 3.8789808917197455, | |
| "grad_norm": 0.8485804093654242, | |
| "learning_rate": 9.759840167469995e-06, | |
| "loss": 0.2111, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 3.8980891719745223, | |
| "grad_norm": 0.9209244983709053, | |
| "learning_rate": 9.663807268427197e-06, | |
| "loss": 0.2085, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 3.917197452229299, | |
| "grad_norm": 0.8341014031710875, | |
| "learning_rate": 9.56780539991626e-06, | |
| "loss": 0.2054, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 3.9363057324840764, | |
| "grad_norm": 0.830481999318346, | |
| "learning_rate": 9.471843422893299e-06, | |
| "loss": 0.1696, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 3.9554140127388537, | |
| "grad_norm": 0.870075031630067, | |
| "learning_rate": 9.375930194632447e-06, | |
| "loss": 0.2088, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 3.9745222929936306, | |
| "grad_norm": 0.8949036645858451, | |
| "learning_rate": 9.28007456790833e-06, | |
| "loss": 0.1856, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 3.9936305732484074, | |
| "grad_norm": 0.8409816207558508, | |
| "learning_rate": 9.184285390178978e-06, | |
| "loss": 0.1888, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 4.012738853503185, | |
| "grad_norm": 0.7770572252155418, | |
| "learning_rate": 9.08857150276917e-06, | |
| "loss": 0.1449, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 4.031847133757962, | |
| "grad_norm": 0.7207065511670053, | |
| "learning_rate": 8.992941740054418e-06, | |
| "loss": 0.1393, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 4.050955414012739, | |
| "grad_norm": 0.6573904924432173, | |
| "learning_rate": 8.897404928645529e-06, | |
| "loss": 0.1382, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 4.070063694267516, | |
| "grad_norm": 0.5998961182192761, | |
| "learning_rate": 8.80196988657393e-06, | |
| "loss": 0.1179, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 4.089171974522293, | |
| "grad_norm": 0.7353709913341053, | |
| "learning_rate": 8.706645422477739e-06, | |
| "loss": 0.1296, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 4.10828025477707, | |
| "grad_norm": 0.8195740194511104, | |
| "learning_rate": 8.611440334788762e-06, | |
| "loss": 0.1485, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 4.127388535031847, | |
| "grad_norm": 0.7112832621893119, | |
| "learning_rate": 8.516363410920376e-06, | |
| "loss": 0.1182, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 4.146496815286624, | |
| "grad_norm": 0.7215183552128456, | |
| "learning_rate": 8.42142342645646e-06, | |
| "loss": 0.1575, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 4.165605095541402, | |
| "grad_norm": 0.6361659814580954, | |
| "learning_rate": 8.326629144341408e-06, | |
| "loss": 0.1021, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 4.1847133757961785, | |
| "grad_norm": 0.6779558705834515, | |
| "learning_rate": 8.231989314071318e-06, | |
| "loss": 0.1243, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 4.203821656050955, | |
| "grad_norm": 0.6707598534659711, | |
| "learning_rate": 8.137512670886397e-06, | |
| "loss": 0.0998, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 4.222929936305732, | |
| "grad_norm": 0.6470471778032241, | |
| "learning_rate": 8.043207934964722e-06, | |
| "loss": 0.148, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 4.24203821656051, | |
| "grad_norm": 0.661244303933043, | |
| "learning_rate": 7.949083810617358e-06, | |
| "loss": 0.1246, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 4.261146496815287, | |
| "grad_norm": 0.6142982587848405, | |
| "learning_rate": 7.855148985484946e-06, | |
| "loss": 0.1198, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 4.280254777070064, | |
| "grad_norm": 0.6629685162864739, | |
| "learning_rate": 7.761412129735853e-06, | |
| "loss": 0.1212, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 4.2993630573248405, | |
| "grad_norm": 0.6952845083203738, | |
| "learning_rate": 7.667881895265895e-06, | |
| "loss": 0.1298, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 4.318471337579618, | |
| "grad_norm": 0.6333882044772869, | |
| "learning_rate": 7.574566914899779e-06, | |
| "loss": 0.1225, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 4.337579617834395, | |
| "grad_norm": 0.6333292253877881, | |
| "learning_rate": 7.481475801594302e-06, | |
| "loss": 0.0954, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 4.356687898089172, | |
| "grad_norm": 0.6771048371022191, | |
| "learning_rate": 7.388617147643371e-06, | |
| "loss": 0.1156, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 4.375796178343949, | |
| "grad_norm": 0.6522245094480688, | |
| "learning_rate": 7.295999523884921e-06, | |
| "loss": 0.1123, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 4.3949044585987265, | |
| "grad_norm": 0.6404010369130144, | |
| "learning_rate": 7.203631478909857e-06, | |
| "loss": 0.113, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 4.414012738853503, | |
| "grad_norm": 0.6917698578909479, | |
| "learning_rate": 7.111521538272997e-06, | |
| "loss": 0.1179, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 4.43312101910828, | |
| "grad_norm": 0.6432868110985619, | |
| "learning_rate": 7.019678203706164e-06, | |
| "loss": 0.1284, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 4.452229299363057, | |
| "grad_norm": 0.5862011184671926, | |
| "learning_rate": 6.928109952333506e-06, | |
| "loss": 0.1318, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 4.471337579617835, | |
| "grad_norm": 0.5983391729216361, | |
| "learning_rate": 6.83682523588902e-06, | |
| "loss": 0.1361, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 4.490445859872612, | |
| "grad_norm": 0.7384156441048828, | |
| "learning_rate": 6.745832479936492e-06, | |
| "loss": 0.1202, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 4.509554140127388, | |
| "grad_norm": 0.5801257613679321, | |
| "learning_rate": 6.655140083091794e-06, | |
| "loss": 0.1142, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 4.528662420382165, | |
| "grad_norm": 0.6644460629119762, | |
| "learning_rate": 6.564756416247712e-06, | |
| "loss": 0.1132, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 4.547770700636943, | |
| "grad_norm": 0.6338897010453224, | |
| "learning_rate": 6.474689821801295e-06, | |
| "loss": 0.1224, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 4.56687898089172, | |
| "grad_norm": 0.6265192886702466, | |
| "learning_rate": 6.384948612883872e-06, | |
| "loss": 0.1081, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 4.585987261146497, | |
| "grad_norm": 0.6375524943746127, | |
| "learning_rate": 6.2955410725937405e-06, | |
| "loss": 0.142, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 4.6050955414012735, | |
| "grad_norm": 0.6024003247740342, | |
| "learning_rate": 6.206475453231644e-06, | |
| "loss": 0.1098, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 4.624203821656051, | |
| "grad_norm": 0.5422627616710795, | |
| "learning_rate": 6.117759975539075e-06, | |
| "loss": 0.1084, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 4.643312101910828, | |
| "grad_norm": 0.6082249236985866, | |
| "learning_rate": 6.029402827939519e-06, | |
| "loss": 0.1356, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 4.662420382165605, | |
| "grad_norm": 0.6409844864728751, | |
| "learning_rate": 5.941412165782645e-06, | |
| "loss": 0.1072, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 4.681528662420382, | |
| "grad_norm": 0.6047083213755711, | |
| "learning_rate": 5.853796110591583e-06, | |
| "loss": 0.1199, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 4.7006369426751595, | |
| "grad_norm": 0.578772450983975, | |
| "learning_rate": 5.766562749313309e-06, | |
| "loss": 0.1397, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 4.719745222929936, | |
| "grad_norm": 0.651473421215594, | |
| "learning_rate": 5.6797201335722064e-06, | |
| "loss": 0.1534, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 4.738853503184713, | |
| "grad_norm": 0.5959248362521793, | |
| "learning_rate": 5.593276278926912e-06, | |
| "loss": 0.1258, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 4.757961783439491, | |
| "grad_norm": 0.5896005713691379, | |
| "learning_rate": 5.507239164130501e-06, | |
| "loss": 0.1215, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 4.777070063694268, | |
| "grad_norm": 0.6246354502017855, | |
| "learning_rate": 5.421616730394e-06, | |
| "loss": 0.1421, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 4.796178343949045, | |
| "grad_norm": 0.5802170532932973, | |
| "learning_rate": 5.336416880653461e-06, | |
| "loss": 0.1217, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 4.8152866242038215, | |
| "grad_norm": 0.6301892377174323, | |
| "learning_rate": 5.251647478840511e-06, | |
| "loss": 0.1253, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 4.834394904458598, | |
| "grad_norm": 0.6324248660174402, | |
| "learning_rate": 5.167316349156495e-06, | |
| "loss": 0.1049, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 4.853503184713376, | |
| "grad_norm": 0.6270125086141117, | |
| "learning_rate": 5.083431275350312e-06, | |
| "loss": 0.0929, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 4.872611464968153, | |
| "grad_norm": 0.589626433719279, | |
| "learning_rate": 5.000000000000003e-06, | |
| "loss": 0.1183, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 4.89171974522293, | |
| "grad_norm": 0.6333892873260404, | |
| "learning_rate": 4.917030223798057e-06, | |
| "loss": 0.1385, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 4.9108280254777075, | |
| "grad_norm": 0.6049252674697725, | |
| "learning_rate": 4.834529604840686e-06, | |
| "loss": 0.106, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 4.929936305732484, | |
| "grad_norm": 0.5901985327547369, | |
| "learning_rate": 4.7525057579209775e-06, | |
| "loss": 0.096, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 4.949044585987261, | |
| "grad_norm": 0.5559455099052016, | |
| "learning_rate": 4.670966253826027e-06, | |
| "loss": 0.1009, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 4.968152866242038, | |
| "grad_norm": 0.6211677396356956, | |
| "learning_rate": 4.589918618638173e-06, | |
| "loss": 0.1286, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 4.987261146496815, | |
| "grad_norm": 0.5873071270705379, | |
| "learning_rate": 4.5093703330403385e-06, | |
| "loss": 0.1145, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 5.006369426751593, | |
| "grad_norm": 0.469843710090882, | |
| "learning_rate": 4.429328831625565e-06, | |
| "loss": 0.1196, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 5.025477707006369, | |
| "grad_norm": 0.43945316537836715, | |
| "learning_rate": 4.349801502210801e-06, | |
| "loss": 0.0876, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 5.044585987261146, | |
| "grad_norm": 0.37285253307993454, | |
| "learning_rate": 4.270795685155001e-06, | |
| "loss": 0.0992, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 5.063694267515924, | |
| "grad_norm": 0.36075563930480936, | |
| "learning_rate": 4.192318672681631e-06, | |
| "loss": 0.0811, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 5.082802547770701, | |
| "grad_norm": 0.36018265438822283, | |
| "learning_rate": 4.1143777082055715e-06, | |
| "loss": 0.0765, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 5.101910828025478, | |
| "grad_norm": 0.40524551115007706, | |
| "learning_rate": 4.036979985664566e-06, | |
| "loss": 0.0993, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 5.1210191082802545, | |
| "grad_norm": 0.3902710741865906, | |
| "learning_rate": 3.960132648855226e-06, | |
| "loss": 0.0704, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 5.140127388535032, | |
| "grad_norm": 0.4257168548853022, | |
| "learning_rate": 3.883842790773647e-06, | |
| "loss": 0.078, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 5.159235668789809, | |
| "grad_norm": 0.42709657802643913, | |
| "learning_rate": 3.8081174529607346e-06, | |
| "loss": 0.0901, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 5.178343949044586, | |
| "grad_norm": 0.4464466835988097, | |
| "learning_rate": 3.732963624852275e-06, | |
| "loss": 0.1005, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 5.197452229299363, | |
| "grad_norm": 0.43813407531215287, | |
| "learning_rate": 3.6583882431338047e-06, | |
| "loss": 0.0794, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 5.2165605095541405, | |
| "grad_norm": 0.42534884458138056, | |
| "learning_rate": 3.584398191100341e-06, | |
| "loss": 0.0804, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 5.235668789808917, | |
| "grad_norm": 0.3904545598805854, | |
| "learning_rate": 3.511000298021098e-06, | |
| "loss": 0.0746, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 5.254777070063694, | |
| "grad_norm": 0.38122125804588974, | |
| "learning_rate": 3.4382013385090985e-06, | |
| "loss": 0.0771, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 5.273885350318471, | |
| "grad_norm": 0.3910636832792095, | |
| "learning_rate": 3.3660080318959043e-06, | |
| "loss": 0.0763, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 5.292993630573249, | |
| "grad_norm": 0.3943252213563477, | |
| "learning_rate": 3.2944270416114256e-06, | |
| "loss": 0.0824, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 5.312101910828026, | |
| "grad_norm": 0.4153720046814973, | |
| "learning_rate": 3.223464974568874e-06, | |
| "loss": 0.0808, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 5.3312101910828025, | |
| "grad_norm": 0.37449475995730286, | |
| "learning_rate": 3.153128380554941e-06, | |
| "loss": 0.0669, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 5.350318471337579, | |
| "grad_norm": 0.3678087641481315, | |
| "learning_rate": 3.0834237516252817e-06, | |
| "loss": 0.078, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 5.369426751592357, | |
| "grad_norm": 0.39958648443736494, | |
| "learning_rate": 3.0143575215052732e-06, | |
| "loss": 0.1098, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 5.388535031847134, | |
| "grad_norm": 0.44061456973739005, | |
| "learning_rate": 2.94593606499619e-06, | |
| "loss": 0.0736, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 5.407643312101911, | |
| "grad_norm": 0.40798693413680315, | |
| "learning_rate": 2.878165697386812e-06, | |
| "loss": 0.0743, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 5.426751592356688, | |
| "grad_norm": 0.40002583724881186, | |
| "learning_rate": 2.8110526738705345e-06, | |
| "loss": 0.0973, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 5.445859872611465, | |
| "grad_norm": 0.4027836230760282, | |
| "learning_rate": 2.7446031889679893e-06, | |
| "loss": 0.0863, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 5.464968152866242, | |
| "grad_norm": 0.4223422247231742, | |
| "learning_rate": 2.678823375955314e-06, | |
| "loss": 0.0827, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 5.484076433121019, | |
| "grad_norm": 0.3753472338541051, | |
| "learning_rate": 2.6137193062980506e-06, | |
| "loss": 0.0927, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 5.503184713375796, | |
| "grad_norm": 0.4371880580691285, | |
| "learning_rate": 2.5492969890907383e-06, | |
| "loss": 0.0862, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 5.522292993630574, | |
| "grad_norm": 0.43678290237390494, | |
| "learning_rate": 2.485562370502279e-06, | |
| "loss": 0.1076, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 5.54140127388535, | |
| "grad_norm": 0.4686470682614719, | |
| "learning_rate": 2.4225213332271203e-06, | |
| "loss": 0.0658, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 5.560509554140127, | |
| "grad_norm": 0.399090511069466, | |
| "learning_rate": 2.3601796959422585e-06, | |
| "loss": 0.0867, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 5.579617834394904, | |
| "grad_norm": 0.38033742520384534, | |
| "learning_rate": 2.2985432127701945e-06, | |
| "loss": 0.0939, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 5.598726114649682, | |
| "grad_norm": 0.4195573707985727, | |
| "learning_rate": 2.2376175727478346e-06, | |
| "loss": 0.0792, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 5.617834394904459, | |
| "grad_norm": 0.39410042613894186, | |
| "learning_rate": 2.1774083993013715e-06, | |
| "loss": 0.0939, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 5.6369426751592355, | |
| "grad_norm": 0.42028801078240485, | |
| "learning_rate": 2.1179212497272582e-06, | |
| "loss": 0.0914, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 5.656050955414012, | |
| "grad_norm": 0.4373301107215838, | |
| "learning_rate": 2.0591616146792705e-06, | |
| "loss": 0.0612, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 5.67515923566879, | |
| "grad_norm": 0.42230760844766996, | |
| "learning_rate": 2.0011349176617133e-06, | |
| "loss": 0.0745, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 5.694267515923567, | |
| "grad_norm": 0.43204660972362535, | |
| "learning_rate": 1.9438465145288377e-06, | |
| "loss": 0.0862, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 5.713375796178344, | |
| "grad_norm": 0.42621175764084096, | |
| "learning_rate": 1.8873016929904942e-06, | |
| "loss": 0.0902, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 5.732484076433121, | |
| "grad_norm": 0.3834518525429406, | |
| "learning_rate": 1.8315056721240831e-06, | |
| "loss": 0.0651, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 5.751592356687898, | |
| "grad_norm": 0.37147796573780106, | |
| "learning_rate": 1.7764636018928249e-06, | |
| "loss": 0.0725, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 5.770700636942675, | |
| "grad_norm": 0.3537695521438638, | |
| "learning_rate": 1.722180562670428e-06, | |
| "loss": 0.0805, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 5.789808917197452, | |
| "grad_norm": 0.3894501723809, | |
| "learning_rate": 1.6686615647721638e-06, | |
| "loss": 0.0774, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 5.80891719745223, | |
| "grad_norm": 0.37961916616617003, | |
| "learning_rate": 1.6159115479924259e-06, | |
| "loss": 0.0927, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 5.828025477707007, | |
| "grad_norm": 0.3872561941363768, | |
| "learning_rate": 1.5639353811487744e-06, | |
| "loss": 0.0741, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 5.8471337579617835, | |
| "grad_norm": 0.34833321067453243, | |
| "learning_rate": 1.5127378616325606e-06, | |
| "loss": 0.0744, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 5.86624203821656, | |
| "grad_norm": 0.3855037708428381, | |
| "learning_rate": 1.462323714966114e-06, | |
| "loss": 0.0908, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 5.885350318471337, | |
| "grad_norm": 0.40297529412539035, | |
| "learning_rate": 1.4126975943665844e-06, | |
| "loss": 0.0998, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 5.904458598726115, | |
| "grad_norm": 0.4029126709486481, | |
| "learning_rate": 1.3638640803164516e-06, | |
| "loss": 0.0911, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 5.923566878980892, | |
| "grad_norm": 0.4258982687557646, | |
| "learning_rate": 1.3158276801407432e-06, | |
| "loss": 0.0849, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 5.942675159235669, | |
| "grad_norm": 0.3553371859042462, | |
| "learning_rate": 1.2685928275910142e-06, | |
| "loss": 0.0676, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 5.961783439490446, | |
| "grad_norm": 0.3979066431111897, | |
| "learning_rate": 1.222163882436107e-06, | |
| "loss": 0.0509, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 5.980891719745223, | |
| "grad_norm": 0.4141884197499124, | |
| "learning_rate": 1.1765451300597574e-06, | |
| "loss": 0.089, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "grad_norm": 0.3770050896807073, | |
| "learning_rate": 1.1317407810650372e-06, | |
| "loss": 0.0544, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 6.019108280254777, | |
| "grad_norm": 0.32674830108323855, | |
| "learning_rate": 1.0877549708857228e-06, | |
| "loss": 0.0603, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 6.038216560509555, | |
| "grad_norm": 0.3123835583619939, | |
| "learning_rate": 1.0445917594046073e-06, | |
| "loss": 0.0645, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 6.057324840764331, | |
| "grad_norm": 0.2533884278161009, | |
| "learning_rate": 1.0022551305787564e-06, | |
| "loss": 0.0571, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 6.076433121019108, | |
| "grad_norm": 0.25372043981270836, | |
| "learning_rate": 9.607489920717983e-07, | |
| "loss": 0.0677, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 6.095541401273885, | |
| "grad_norm": 0.28932834883810127, | |
| "learning_rate": 9.200771748932513e-07, | |
| "loss": 0.0665, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 6.114649681528663, | |
| "grad_norm": 0.29574813210798573, | |
| "learning_rate": 8.802434330449128e-07, | |
| "loss": 0.0605, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 6.13375796178344, | |
| "grad_norm": 0.32412205688268597, | |
| "learning_rate": 8.412514431743657e-07, | |
| "loss": 0.0759, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 6.1528662420382165, | |
| "grad_norm": 0.298971869628302, | |
| "learning_rate": 8.031048042356393e-07, | |
| "loss": 0.0944, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 6.171974522292993, | |
| "grad_norm": 0.28058094255268096, | |
| "learning_rate": 7.65807037157007e-07, | |
| "loss": 0.0737, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 6.191082802547771, | |
| "grad_norm": 0.26273990851994955, | |
| "learning_rate": 7.293615845160196e-07, | |
| "loss": 0.0801, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 6.210191082802548, | |
| "grad_norm": 0.26749718281850127, | |
| "learning_rate": 6.937718102217461e-07, | |
| "loss": 0.0843, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 6.229299363057325, | |
| "grad_norm": 0.27788643017096765, | |
| "learning_rate": 6.590409992042957e-07, | |
| "loss": 0.0479, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 6.248407643312102, | |
| "grad_norm": 0.32662056891673275, | |
| "learning_rate": 6.251723571116031e-07, | |
| "loss": 0.0996, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 6.267515923566879, | |
| "grad_norm": 0.34232243605883206, | |
| "learning_rate": 5.921690100135713e-07, | |
| "loss": 0.1195, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 6.286624203821656, | |
| "grad_norm": 0.26341475326491803, | |
| "learning_rate": 5.600340041135133e-07, | |
| "loss": 0.041, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 6.305732484076433, | |
| "grad_norm": 0.32091863933742903, | |
| "learning_rate": 5.287703054670012e-07, | |
| "loss": 0.0896, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 6.32484076433121, | |
| "grad_norm": 0.26324953620812674, | |
| "learning_rate": 4.983807997080925e-07, | |
| "loss": 0.0507, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 6.343949044585988, | |
| "grad_norm": 0.31884114580503486, | |
| "learning_rate": 4.6886829178299676e-07, | |
| "loss": 0.0916, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 6.3630573248407645, | |
| "grad_norm": 0.2987800921220235, | |
| "learning_rate": 4.402355056911656e-07, | |
| "loss": 0.0787, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 6.382165605095541, | |
| "grad_norm": 0.27285623552894217, | |
| "learning_rate": 4.124850842338779e-07, | |
| "loss": 0.0898, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 6.401273885350318, | |
| "grad_norm": 0.32925345757232627, | |
| "learning_rate": 3.8561958877030957e-07, | |
| "loss": 0.082, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 6.420382165605096, | |
| "grad_norm": 0.25722868705327834, | |
| "learning_rate": 3.5964149898111587e-07, | |
| "loss": 0.0537, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 6.439490445859873, | |
| "grad_norm": 0.309845365274313, | |
| "learning_rate": 3.345532126395579e-07, | |
| "loss": 0.0579, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 6.45859872611465, | |
| "grad_norm": 0.2977940307136604, | |
| "learning_rate": 3.1035704539019384e-07, | |
| "loss": 0.0726, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 6.477707006369426, | |
| "grad_norm": 0.31025376844846553, | |
| "learning_rate": 2.870552305351382e-07, | |
| "loss": 0.0706, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 6.496815286624204, | |
| "grad_norm": 0.2554835376087535, | |
| "learning_rate": 2.646499188279328e-07, | |
| "loss": 0.0424, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 6.515923566878981, | |
| "grad_norm": 0.3180912293312616, | |
| "learning_rate": 2.4314317827503375e-07, | |
| "loss": 0.1085, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 6.535031847133758, | |
| "grad_norm": 0.2982157435124411, | |
| "learning_rate": 2.2253699394493066e-07, | |
| "loss": 0.0576, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 6.554140127388535, | |
| "grad_norm": 0.3315729789977526, | |
| "learning_rate": 2.028332677849254e-07, | |
| "loss": 0.0573, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 6.573248407643312, | |
| "grad_norm": 0.25427048529330454, | |
| "learning_rate": 1.840338184455881e-07, | |
| "loss": 0.059, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 6.592356687898089, | |
| "grad_norm": 0.2785181993003191, | |
| "learning_rate": 1.6614038111289034e-07, | |
| "loss": 0.0611, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 6.611464968152866, | |
| "grad_norm": 0.29248938451924733, | |
| "learning_rate": 1.49154607348051e-07, | |
| "loss": 0.0793, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 6.630573248407643, | |
| "grad_norm": 0.24475616642622175, | |
| "learning_rate": 1.330780649350938e-07, | |
| "loss": 0.0613, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 6.649681528662421, | |
| "grad_norm": 0.28091544427398124, | |
| "learning_rate": 1.1791223773614635e-07, | |
| "loss": 0.0487, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 6.6687898089171975, | |
| "grad_norm": 0.30442981684389875, | |
| "learning_rate": 1.0365852555447642e-07, | |
| "loss": 0.0642, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 6.687898089171974, | |
| "grad_norm": 0.2746202449333878, | |
| "learning_rate": 9.031824400528854e-08, | |
| "loss": 0.0756, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 6.707006369426751, | |
| "grad_norm": 0.3087108598983567, | |
| "learning_rate": 7.789262439430012e-08, | |
| "loss": 0.0848, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 6.726114649681529, | |
| "grad_norm": 0.24776098383715156, | |
| "learning_rate": 6.638281360408339e-08, | |
| "loss": 0.0532, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 6.745222929936306, | |
| "grad_norm": 0.26456060953324073, | |
| "learning_rate": 5.578987398821345e-08, | |
| "loss": 0.0662, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 6.764331210191083, | |
| "grad_norm": 0.2955614791032562, | |
| "learning_rate": 4.6114783273213395e-08, | |
| "loss": 0.0583, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 6.7834394904458595, | |
| "grad_norm": 0.30673675317506477, | |
| "learning_rate": 3.735843446830867e-08, | |
| "loss": 0.0782, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 6.802547770700637, | |
| "grad_norm": 0.28925621808565305, | |
| "learning_rate": 2.9521635783001932e-08, | |
| "loss": 0.0671, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 6.821656050955414, | |
| "grad_norm": 0.30205551410687065, | |
| "learning_rate": 2.2605110552477162e-08, | |
| "loss": 0.0832, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 6.840764331210191, | |
| "grad_norm": 0.2919584904581562, | |
| "learning_rate": 1.6609497170834154e-08, | |
| "loss": 0.0729, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 6.859872611464969, | |
| "grad_norm": 0.24648188888791672, | |
| "learning_rate": 1.1535349032167908e-08, | |
| "loss": 0.0611, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 6.8789808917197455, | |
| "grad_norm": 0.3133304348225492, | |
| "learning_rate": 7.3831344794872415e-09, | |
| "loss": 0.0791, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 6.898089171974522, | |
| "grad_norm": 0.3021167233528452, | |
| "learning_rate": 4.153236761488266e-09, | |
| "loss": 0.0523, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 6.917197452229299, | |
| "grad_norm": 0.2634021956191592, | |
| "learning_rate": 1.8459539971804608e-09, | |
| "loss": 0.0352, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 6.936305732484076, | |
| "grad_norm": 0.30858275497051213, | |
| "learning_rate": 4.614991483686826e-10, | |
| "loss": 0.0874, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 6.955414012738854, | |
| "grad_norm": 0.2820445593615768, | |
| "learning_rate": 0.0, | |
| "loss": 0.0458, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 6.955414012738854, | |
| "step": 364, | |
| "total_flos": 8.44150893179945e+16, | |
| "train_loss": 0.3398333612092576, | |
| "train_runtime": 2635.8839, | |
| "train_samples_per_second": 13.278, | |
| "train_steps_per_second": 0.138 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 364, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 7, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 8.44150893179945e+16, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |