| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.991111111111111, | |
| "eval_steps": 500, | |
| "global_step": 112, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.017777777777777778, | |
| "grad_norm": 3.0161187648773193, | |
| "learning_rate": 1.6666666666666667e-06, | |
| "loss": 0.9094, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.035555555555555556, | |
| "grad_norm": 3.0456559658050537, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 0.9199, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.05333333333333334, | |
| "grad_norm": 2.8616299629211426, | |
| "learning_rate": 5e-06, | |
| "loss": 0.8971, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.07111111111111111, | |
| "grad_norm": 2.2876524925231934, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 0.8814, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.08888888888888889, | |
| "grad_norm": 3.967463493347168, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 0.8735, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.10666666666666667, | |
| "grad_norm": 1.9434279203414917, | |
| "learning_rate": 1e-05, | |
| "loss": 0.8465, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.12444444444444444, | |
| "grad_norm": 2.0345959663391113, | |
| "learning_rate": 1.1666666666666668e-05, | |
| "loss": 0.8162, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.14222222222222222, | |
| "grad_norm": 1.519321084022522, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 0.7766, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.5613726377487183, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.7484, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.17777777777777778, | |
| "grad_norm": 1.1348494291305542, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 0.7333, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.19555555555555557, | |
| "grad_norm": 1.3956992626190186, | |
| "learning_rate": 1.8333333333333333e-05, | |
| "loss": 0.7199, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.21333333333333335, | |
| "grad_norm": 1.2517744302749634, | |
| "learning_rate": 2e-05, | |
| "loss": 0.7146, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.2311111111111111, | |
| "grad_norm": 1.2728958129882812, | |
| "learning_rate": 1.9995065603657317e-05, | |
| "loss": 0.7104, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.24888888888888888, | |
| "grad_norm": 1.0933605432510376, | |
| "learning_rate": 1.9980267284282718e-05, | |
| "loss": 0.6716, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.26666666666666666, | |
| "grad_norm": 0.8169775605201721, | |
| "learning_rate": 1.99556196460308e-05, | |
| "loss": 0.6816, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.28444444444444444, | |
| "grad_norm": 1.0378296375274658, | |
| "learning_rate": 1.9921147013144782e-05, | |
| "loss": 0.6847, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.3022222222222222, | |
| "grad_norm": 0.7941539883613586, | |
| "learning_rate": 1.9876883405951378e-05, | |
| "loss": 0.6453, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.6657661199569702, | |
| "learning_rate": 1.982287250728689e-05, | |
| "loss": 0.6504, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.3377777777777778, | |
| "grad_norm": 0.7748784422874451, | |
| "learning_rate": 1.9759167619387474e-05, | |
| "loss": 0.6482, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.35555555555555557, | |
| "grad_norm": 0.7648543119430542, | |
| "learning_rate": 1.9685831611286312e-05, | |
| "loss": 0.6399, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.37333333333333335, | |
| "grad_norm": 0.6626160740852356, | |
| "learning_rate": 1.9602936856769432e-05, | |
| "loss": 0.6304, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.39111111111111113, | |
| "grad_norm": 0.6575434803962708, | |
| "learning_rate": 1.9510565162951538e-05, | |
| "loss": 0.6415, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.4088888888888889, | |
| "grad_norm": 0.6369120478630066, | |
| "learning_rate": 1.9408807689542257e-05, | |
| "loss": 0.6439, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.4266666666666667, | |
| "grad_norm": 0.6337743401527405, | |
| "learning_rate": 1.9297764858882516e-05, | |
| "loss": 0.6368, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.4444444444444444, | |
| "grad_norm": 0.6001498699188232, | |
| "learning_rate": 1.9177546256839814e-05, | |
| "loss": 0.623, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.4622222222222222, | |
| "grad_norm": 0.48166558146476746, | |
| "learning_rate": 1.9048270524660197e-05, | |
| "loss": 0.6155, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.5851263403892517, | |
| "learning_rate": 1.891006524188368e-05, | |
| "loss": 0.617, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.49777777777777776, | |
| "grad_norm": 0.603469729423523, | |
| "learning_rate": 1.8763066800438638e-05, | |
| "loss": 0.6253, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.5155555555555555, | |
| "grad_norm": 0.45908477902412415, | |
| "learning_rate": 1.860742027003944e-05, | |
| "loss": 0.6267, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.5333333333333333, | |
| "grad_norm": 0.5786503553390503, | |
| "learning_rate": 1.8443279255020153e-05, | |
| "loss": 0.6087, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.5511111111111111, | |
| "grad_norm": 0.5608594417572021, | |
| "learning_rate": 1.827080574274562e-05, | |
| "loss": 0.6103, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.5688888888888889, | |
| "grad_norm": 0.43821826577186584, | |
| "learning_rate": 1.8090169943749477e-05, | |
| "loss": 0.5908, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.5866666666666667, | |
| "grad_norm": 0.5472081899642944, | |
| "learning_rate": 1.7901550123756906e-05, | |
| "loss": 0.6251, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.6044444444444445, | |
| "grad_norm": 0.4198133945465088, | |
| "learning_rate": 1.7705132427757895e-05, | |
| "loss": 0.5963, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.6222222222222222, | |
| "grad_norm": 0.524138867855072, | |
| "learning_rate": 1.7501110696304598e-05, | |
| "loss": 0.5929, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.399955153465271, | |
| "learning_rate": 1.7289686274214116e-05, | |
| "loss": 0.6027, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.6577777777777778, | |
| "grad_norm": 0.43022847175598145, | |
| "learning_rate": 1.7071067811865477e-05, | |
| "loss": 0.6085, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.6755555555555556, | |
| "grad_norm": 0.4289129972457886, | |
| "learning_rate": 1.684547105928689e-05, | |
| "loss": 0.6019, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.6933333333333334, | |
| "grad_norm": 0.4198264181613922, | |
| "learning_rate": 1.661311865323652e-05, | |
| "loss": 0.6007, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.7111111111111111, | |
| "grad_norm": 0.3680365979671478, | |
| "learning_rate": 1.63742398974869e-05, | |
| "loss": 0.5827, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.7288888888888889, | |
| "grad_norm": 0.4037115275859833, | |
| "learning_rate": 1.6129070536529767e-05, | |
| "loss": 0.6047, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.7466666666666667, | |
| "grad_norm": 0.473398894071579, | |
| "learning_rate": 1.5877852522924733e-05, | |
| "loss": 0.6041, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.7644444444444445, | |
| "grad_norm": 0.33001384139060974, | |
| "learning_rate": 1.5620833778521306e-05, | |
| "loss": 0.5888, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.7822222222222223, | |
| "grad_norm": 0.34208983182907104, | |
| "learning_rate": 1.5358267949789968e-05, | |
| "loss": 0.5927, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.3997894823551178, | |
| "learning_rate": 1.5090414157503715e-05, | |
| "loss": 0.6037, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.8177777777777778, | |
| "grad_norm": 0.3622935712337494, | |
| "learning_rate": 1.4817536741017153e-05, | |
| "loss": 0.59, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.8355555555555556, | |
| "grad_norm": 0.35205668210983276, | |
| "learning_rate": 1.4539904997395468e-05, | |
| "loss": 0.6038, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.8533333333333334, | |
| "grad_norm": 0.38642618060112, | |
| "learning_rate": 1.4257792915650728e-05, | |
| "loss": 0.5796, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.8711111111111111, | |
| "grad_norm": 0.34358909726142883, | |
| "learning_rate": 1.3971478906347806e-05, | |
| "loss": 0.5842, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.8888888888888888, | |
| "grad_norm": 0.3424360752105713, | |
| "learning_rate": 1.3681245526846782e-05, | |
| "loss": 0.5824, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.9066666666666666, | |
| "grad_norm": 0.3290707468986511, | |
| "learning_rate": 1.3387379202452917e-05, | |
| "loss": 0.5766, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.9244444444444444, | |
| "grad_norm": 0.3417136073112488, | |
| "learning_rate": 1.3090169943749475e-05, | |
| "loss": 0.5917, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.9422222222222222, | |
| "grad_norm": 0.3143644332885742, | |
| "learning_rate": 1.2789911060392295e-05, | |
| "loss": 0.5862, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.3264472782611847, | |
| "learning_rate": 1.2486898871648552e-05, | |
| "loss": 0.5941, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.9777777777777777, | |
| "grad_norm": 0.3463677167892456, | |
| "learning_rate": 1.2181432413965428e-05, | |
| "loss": 0.5792, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.9955555555555555, | |
| "grad_norm": 0.3562481105327606, | |
| "learning_rate": 1.187381314585725e-05, | |
| "loss": 0.5949, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 1.0133333333333334, | |
| "grad_norm": 0.6014902591705322, | |
| "learning_rate": 1.156434465040231e-05, | |
| "loss": 0.9832, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 1.031111111111111, | |
| "grad_norm": 0.4315400719642639, | |
| "learning_rate": 1.1253332335643043e-05, | |
| "loss": 0.579, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 1.048888888888889, | |
| "grad_norm": 0.4110487401485443, | |
| "learning_rate": 1.0941083133185146e-05, | |
| "loss": 0.5473, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 1.0666666666666667, | |
| "grad_norm": 0.3785395920276642, | |
| "learning_rate": 1.0627905195293135e-05, | |
| "loss": 0.5123, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.0844444444444445, | |
| "grad_norm": 0.41390877962112427, | |
| "learning_rate": 1.0314107590781284e-05, | |
| "loss": 0.5945, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 1.1022222222222222, | |
| "grad_norm": 0.35746318101882935, | |
| "learning_rate": 1e-05, | |
| "loss": 0.525, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 0.4079684019088745, | |
| "learning_rate": 9.685892409218718e-06, | |
| "loss": 0.5318, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 1.1377777777777778, | |
| "grad_norm": 0.36759430170059204, | |
| "learning_rate": 9.372094804706867e-06, | |
| "loss": 0.5176, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 1.1555555555555554, | |
| "grad_norm": 0.41054409742355347, | |
| "learning_rate": 9.058916866814857e-06, | |
| "loss": 0.5637, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 1.1733333333333333, | |
| "grad_norm": 0.3619822859764099, | |
| "learning_rate": 8.746667664356957e-06, | |
| "loss": 0.5567, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 1.1911111111111112, | |
| "grad_norm": 0.3154706656932831, | |
| "learning_rate": 8.43565534959769e-06, | |
| "loss": 0.5005, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 1.208888888888889, | |
| "grad_norm": 0.38217437267303467, | |
| "learning_rate": 8.126186854142752e-06, | |
| "loss": 0.5691, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 1.2266666666666666, | |
| "grad_norm": 0.30643534660339355, | |
| "learning_rate": 7.818567586034578e-06, | |
| "loss": 0.5169, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 1.2444444444444445, | |
| "grad_norm": 0.335054486989975, | |
| "learning_rate": 7.513101128351454e-06, | |
| "loss": 0.5474, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.2622222222222224, | |
| "grad_norm": 0.32074597477912903, | |
| "learning_rate": 7.210088939607709e-06, | |
| "loss": 0.499, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 0.3078364431858063, | |
| "learning_rate": 6.909830056250527e-06, | |
| "loss": 0.5772, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.2977777777777777, | |
| "grad_norm": 0.26938191056251526, | |
| "learning_rate": 6.612620797547087e-06, | |
| "loss": 0.4868, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.3155555555555556, | |
| "grad_norm": 0.34066033363342285, | |
| "learning_rate": 6.318754473153221e-06, | |
| "loss": 0.5599, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.3333333333333333, | |
| "grad_norm": 0.28962934017181396, | |
| "learning_rate": 6.028521093652195e-06, | |
| "loss": 0.5101, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.3511111111111112, | |
| "grad_norm": 0.2816958427429199, | |
| "learning_rate": 5.742207084349274e-06, | |
| "loss": 0.5604, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.3688888888888888, | |
| "grad_norm": 0.31672269105911255, | |
| "learning_rate": 5.460095002604533e-06, | |
| "loss": 0.5351, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.3866666666666667, | |
| "grad_norm": 0.307559609413147, | |
| "learning_rate": 5.1824632589828465e-06, | |
| "loss": 0.5609, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.4044444444444444, | |
| "grad_norm": 0.3002449572086334, | |
| "learning_rate": 4.909585842496287e-06, | |
| "loss": 0.4945, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.4222222222222223, | |
| "grad_norm": 0.4366104304790497, | |
| "learning_rate": 4.641732050210032e-06, | |
| "loss": 0.529, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 0.2942907214164734, | |
| "learning_rate": 4.379166221478697e-06, | |
| "loss": 0.5456, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.4577777777777778, | |
| "grad_norm": 0.29603904485702515, | |
| "learning_rate": 4.12214747707527e-06, | |
| "loss": 0.5672, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.4755555555555555, | |
| "grad_norm": 0.26003167033195496, | |
| "learning_rate": 3.8709294634702374e-06, | |
| "loss": 0.4796, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.4933333333333334, | |
| "grad_norm": 0.2945937514305115, | |
| "learning_rate": 3.625760102513103e-06, | |
| "loss": 0.5144, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.511111111111111, | |
| "grad_norm": 0.32350650429725647, | |
| "learning_rate": 3.3868813467634833e-06, | |
| "loss": 0.5563, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.528888888888889, | |
| "grad_norm": 0.28247952461242676, | |
| "learning_rate": 3.1545289407131128e-06, | |
| "loss": 0.5267, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.5466666666666666, | |
| "grad_norm": 0.2574496269226074, | |
| "learning_rate": 2.9289321881345257e-06, | |
| "loss": 0.5464, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.5644444444444443, | |
| "grad_norm": 0.2627605199813843, | |
| "learning_rate": 2.7103137257858867e-06, | |
| "loss": 0.5406, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.5822222222222222, | |
| "grad_norm": 0.2714482545852661, | |
| "learning_rate": 2.4988893036954045e-06, | |
| "loss": 0.5094, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 0.28099218010902405, | |
| "learning_rate": 2.2948675722421086e-06, | |
| "loss": 0.5433, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.6177777777777778, | |
| "grad_norm": 0.24211485683918, | |
| "learning_rate": 2.098449876243096e-06, | |
| "loss": 0.5239, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.6355555555555554, | |
| "grad_norm": 0.2546832859516144, | |
| "learning_rate": 1.9098300562505266e-06, | |
| "loss": 0.5626, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.6533333333333333, | |
| "grad_norm": 0.2391272783279419, | |
| "learning_rate": 1.7291942572543806e-06, | |
| "loss": 0.5097, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.6711111111111112, | |
| "grad_norm": 0.2698579430580139, | |
| "learning_rate": 1.5567207449798517e-06, | |
| "loss": 0.5432, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.6888888888888889, | |
| "grad_norm": 0.2520196735858917, | |
| "learning_rate": 1.3925797299605649e-06, | |
| "loss": 0.5245, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.7066666666666666, | |
| "grad_norm": 0.25460532307624817, | |
| "learning_rate": 1.2369331995613664e-06, | |
| "loss": 0.5438, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.7244444444444444, | |
| "grad_norm": 0.26885905861854553, | |
| "learning_rate": 1.0899347581163222e-06, | |
| "loss": 0.5418, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.7422222222222223, | |
| "grad_norm": 0.22826924920082092, | |
| "learning_rate": 9.517294753398066e-07, | |
| "loss": 0.5258, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "grad_norm": 0.2222457379102707, | |
| "learning_rate": 8.224537431601886e-07, | |
| "loss": 0.4905, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.7777777777777777, | |
| "grad_norm": 0.24972008168697357, | |
| "learning_rate": 7.022351411174866e-07, | |
| "loss": 0.5436, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.7955555555555556, | |
| "grad_norm": 0.24206911027431488, | |
| "learning_rate": 5.911923104577455e-07, | |
| "loss": 0.5322, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.8133333333333335, | |
| "grad_norm": 0.2620464861392975, | |
| "learning_rate": 4.894348370484648e-07, | |
| "loss": 0.5142, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.8311111111111111, | |
| "grad_norm": 0.25102680921554565, | |
| "learning_rate": 3.9706314323056936e-07, | |
| "loss": 0.5264, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.8488888888888888, | |
| "grad_norm": 0.24889089167118073, | |
| "learning_rate": 3.1416838871368925e-07, | |
| "loss": 0.5069, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.8666666666666667, | |
| "grad_norm": 0.24384674429893494, | |
| "learning_rate": 2.4083238061252565e-07, | |
| "loss": 0.5694, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.8844444444444446, | |
| "grad_norm": 0.2293427288532257, | |
| "learning_rate": 1.7712749271311392e-07, | |
| "loss": 0.4963, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.9022222222222223, | |
| "grad_norm": 0.2467752993106842, | |
| "learning_rate": 1.231165940486234e-07, | |
| "loss": 0.512, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "grad_norm": 0.2353520542383194, | |
| "learning_rate": 7.885298685522235e-08, | |
| "loss": 0.5168, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.9377777777777778, | |
| "grad_norm": 0.2465604692697525, | |
| "learning_rate": 4.438035396920004e-08, | |
| "loss": 0.5412, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.9555555555555557, | |
| "grad_norm": 0.23654182255268097, | |
| "learning_rate": 1.973271571728441e-08, | |
| "loss": 0.5511, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.9733333333333334, | |
| "grad_norm": 0.22757108509540558, | |
| "learning_rate": 4.9343963426840006e-09, | |
| "loss": 0.5274, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.991111111111111, | |
| "grad_norm": 0.24920369684696198, | |
| "learning_rate": 0.0, | |
| "loss": 0.544, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.991111111111111, | |
| "step": 112, | |
| "total_flos": 467112937127936.0, | |
| "train_loss": 0.6003679327134576, | |
| "train_runtime": 29362.7142, | |
| "train_samples_per_second": 0.367, | |
| "train_steps_per_second": 0.004 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 112, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 467112937127936.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |