| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.9822485207100593, | |
| "eval_steps": 500, | |
| "global_step": 168, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01775147928994083, | |
| "grad_norm": 2.533238649368286, | |
| "learning_rate": 5.882352941176471e-07, | |
| "loss": 0.924, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.03550295857988166, | |
| "grad_norm": 2.927447557449341, | |
| "learning_rate": 1.1764705882352942e-06, | |
| "loss": 1.0731, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.05325443786982249, | |
| "grad_norm": 2.820338726043701, | |
| "learning_rate": 1.7647058823529414e-06, | |
| "loss": 0.9836, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.07100591715976332, | |
| "grad_norm": 2.642514228820801, | |
| "learning_rate": 2.3529411764705885e-06, | |
| "loss": 0.9904, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.08875739644970414, | |
| "grad_norm": 2.659113883972168, | |
| "learning_rate": 2.9411764705882355e-06, | |
| "loss": 0.9903, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.10650887573964497, | |
| "grad_norm": 2.257899522781372, | |
| "learning_rate": 3.529411764705883e-06, | |
| "loss": 0.9045, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.1242603550295858, | |
| "grad_norm": 1.839630365371704, | |
| "learning_rate": 4.11764705882353e-06, | |
| "loss": 0.9406, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.14201183431952663, | |
| "grad_norm": 1.631960391998291, | |
| "learning_rate": 4.705882352941177e-06, | |
| "loss": 0.8611, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.15976331360946747, | |
| "grad_norm": 1.4379936456680298, | |
| "learning_rate": 5.294117647058824e-06, | |
| "loss": 0.8991, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.17751479289940827, | |
| "grad_norm": 1.5505577325820923, | |
| "learning_rate": 5.882352941176471e-06, | |
| "loss": 0.9153, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.1952662721893491, | |
| "grad_norm": 1.5041471719741821, | |
| "learning_rate": 6.470588235294119e-06, | |
| "loss": 0.9025, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.21301775147928995, | |
| "grad_norm": 1.1657485961914062, | |
| "learning_rate": 7.058823529411766e-06, | |
| "loss": 0.8255, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.23076923076923078, | |
| "grad_norm": 1.60923433303833, | |
| "learning_rate": 7.647058823529411e-06, | |
| "loss": 0.7197, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.2485207100591716, | |
| "grad_norm": 1.6407514810562134, | |
| "learning_rate": 8.23529411764706e-06, | |
| "loss": 0.7522, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.26627218934911245, | |
| "grad_norm": 1.4880717992782593, | |
| "learning_rate": 8.823529411764707e-06, | |
| "loss": 0.7343, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.28402366863905326, | |
| "grad_norm": 1.2115110158920288, | |
| "learning_rate": 9.411764705882354e-06, | |
| "loss": 0.7614, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.30177514792899407, | |
| "grad_norm": 0.9123843312263489, | |
| "learning_rate": 1e-05, | |
| "loss": 0.6835, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.31952662721893493, | |
| "grad_norm": 1.1175857782363892, | |
| "learning_rate": 9.998917893031615e-06, | |
| "loss": 0.7777, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.33727810650887574, | |
| "grad_norm": 1.0061542987823486, | |
| "learning_rate": 9.995672040508656e-06, | |
| "loss": 0.7068, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.35502958579881655, | |
| "grad_norm": 0.8296826481819153, | |
| "learning_rate": 9.990263847374976e-06, | |
| "loss": 0.6594, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.3727810650887574, | |
| "grad_norm": 0.7384388446807861, | |
| "learning_rate": 9.982695654527966e-06, | |
| "loss": 0.6521, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.3905325443786982, | |
| "grad_norm": 0.9570622444152832, | |
| "learning_rate": 9.972970737805312e-06, | |
| "loss": 0.729, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.40828402366863903, | |
| "grad_norm": 0.6549481153488159, | |
| "learning_rate": 9.961093306567076e-06, | |
| "loss": 0.6251, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.4260355029585799, | |
| "grad_norm": 0.7221797108650208, | |
| "learning_rate": 9.947068501873702e-06, | |
| "loss": 0.6779, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.4437869822485207, | |
| "grad_norm": 0.6575434803962708, | |
| "learning_rate": 9.930902394260746e-06, | |
| "loss": 0.6849, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.46153846153846156, | |
| "grad_norm": 0.5789452791213989, | |
| "learning_rate": 9.912601981111287e-06, | |
| "loss": 0.5844, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.47928994082840237, | |
| "grad_norm": 0.6969165205955505, | |
| "learning_rate": 9.892175183627161e-06, | |
| "loss": 0.6698, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.4970414201183432, | |
| "grad_norm": 0.7126699686050415, | |
| "learning_rate": 9.869630843400331e-06, | |
| "loss": 0.6847, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.514792899408284, | |
| "grad_norm": 0.6048182845115662, | |
| "learning_rate": 9.844978718585855e-06, | |
| "loss": 0.6191, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.5325443786982249, | |
| "grad_norm": 0.6467904448509216, | |
| "learning_rate": 9.81822947967816e-06, | |
| "loss": 0.6409, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.5502958579881657, | |
| "grad_norm": 0.6857551336288452, | |
| "learning_rate": 9.789394704892364e-06, | |
| "loss": 0.6316, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.5680473372781065, | |
| "grad_norm": 0.7137037515640259, | |
| "learning_rate": 9.758486875152766e-06, | |
| "loss": 0.6325, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.5857988165680473, | |
| "grad_norm": 0.6259506940841675, | |
| "learning_rate": 9.725519368690539e-06, | |
| "loss": 0.6478, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.6035502958579881, | |
| "grad_norm": 0.5821611881256104, | |
| "learning_rate": 9.690506455253073e-06, | |
| "loss": 0.6117, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.621301775147929, | |
| "grad_norm": 0.6482532024383545, | |
| "learning_rate": 9.65346328992741e-06, | |
| "loss": 0.6164, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.6390532544378699, | |
| "grad_norm": 0.6187491416931152, | |
| "learning_rate": 9.614405906580486e-06, | |
| "loss": 0.567, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.6568047337278107, | |
| "grad_norm": 0.6659966111183167, | |
| "learning_rate": 9.573351210918976e-06, | |
| "loss": 0.6044, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.6745562130177515, | |
| "grad_norm": 0.6486673951148987, | |
| "learning_rate": 9.53031697317178e-06, | |
| "loss": 0.5811, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.6923076923076923, | |
| "grad_norm": 0.6228745579719543, | |
| "learning_rate": 9.485321820398321e-06, | |
| "loss": 0.6137, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.7100591715976331, | |
| "grad_norm": 0.5413960814476013, | |
| "learning_rate": 9.43838522842594e-06, | |
| "loss": 0.5736, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.727810650887574, | |
| "grad_norm": 0.5927128195762634, | |
| "learning_rate": 9.389527513419935e-06, | |
| "loss": 0.6567, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.7455621301775148, | |
| "grad_norm": 0.5900639295578003, | |
| "learning_rate": 9.338769823089853e-06, | |
| "loss": 0.6382, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.7633136094674556, | |
| "grad_norm": 0.6012625098228455, | |
| "learning_rate": 9.286134127535859e-06, | |
| "loss": 0.6525, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.7810650887573964, | |
| "grad_norm": 0.5634270906448364, | |
| "learning_rate": 9.231643209739128e-06, | |
| "loss": 0.5819, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.7988165680473372, | |
| "grad_norm": 0.5087092518806458, | |
| "learning_rate": 9.175320655700407e-06, | |
| "loss": 0.5681, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.8165680473372781, | |
| "grad_norm": 0.5246061682701111, | |
| "learning_rate": 9.117190844230971e-06, | |
| "loss": 0.5408, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.834319526627219, | |
| "grad_norm": 0.5206530094146729, | |
| "learning_rate": 9.057278936400453e-06, | |
| "loss": 0.5804, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.8520710059171598, | |
| "grad_norm": 0.5596741437911987, | |
| "learning_rate": 8.99561086464603e-06, | |
| "loss": 0.5521, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.8698224852071006, | |
| "grad_norm": 0.5072557330131531, | |
| "learning_rate": 8.932213321547769e-06, | |
| "loss": 0.5277, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.8875739644970414, | |
| "grad_norm": 0.5525227785110474, | |
| "learning_rate": 8.86711374827494e-06, | |
| "loss": 0.6031, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.9053254437869822, | |
| "grad_norm": 0.6006079316139221, | |
| "learning_rate": 8.800340322708291e-06, | |
| "loss": 0.5698, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.9230769230769231, | |
| "grad_norm": 0.5963747501373291, | |
| "learning_rate": 8.73192194724347e-06, | |
| "loss": 0.6289, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.9408284023668639, | |
| "grad_norm": 0.5696894526481628, | |
| "learning_rate": 8.661888236280813e-06, | |
| "loss": 0.5918, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.9585798816568047, | |
| "grad_norm": 0.6133765578269958, | |
| "learning_rate": 8.590269503406986e-06, | |
| "loss": 0.6514, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.9763313609467456, | |
| "grad_norm": 0.6414260864257812, | |
| "learning_rate": 8.517096748273951e-06, | |
| "loss": 0.5686, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.9940828402366864, | |
| "grad_norm": 0.6260319948196411, | |
| "learning_rate": 8.442401643181e-06, | |
| "loss": 0.5175, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 1.0118343195266273, | |
| "grad_norm": 1.199350118637085, | |
| "learning_rate": 8.366216519365623e-06, | |
| "loss": 0.9417, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 1.029585798816568, | |
| "grad_norm": 0.6130478978157043, | |
| "learning_rate": 8.288574353009164e-06, | |
| "loss": 0.4821, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 1.047337278106509, | |
| "grad_norm": 0.6134289503097534, | |
| "learning_rate": 8.20950875096333e-06, | |
| "loss": 0.4782, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 1.0650887573964498, | |
| "grad_norm": 0.7462955117225647, | |
| "learning_rate": 8.129053936203688e-06, | |
| "loss": 0.5882, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.0828402366863905, | |
| "grad_norm": 0.6716148257255554, | |
| "learning_rate": 8.04724473301652e-06, | |
| "loss": 0.5487, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 1.1005917159763314, | |
| "grad_norm": 0.5907769799232483, | |
| "learning_rate": 7.964116551925365e-06, | |
| "loss": 0.4711, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 1.1183431952662721, | |
| "grad_norm": 0.6688742637634277, | |
| "learning_rate": 7.879705374363831e-06, | |
| "loss": 0.4645, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 1.136094674556213, | |
| "grad_norm": 0.6526861786842346, | |
| "learning_rate": 7.794047737101298e-06, | |
| "loss": 0.5206, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 1.1538461538461537, | |
| "grad_norm": 0.5595381855964661, | |
| "learning_rate": 7.707180716428237e-06, | |
| "loss": 0.4313, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 1.1715976331360947, | |
| "grad_norm": 0.6181778311729431, | |
| "learning_rate": 7.619141912108008e-06, | |
| "loss": 0.564, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 1.1893491124260356, | |
| "grad_norm": 0.614881157875061, | |
| "learning_rate": 7.529969431102063e-06, | |
| "loss": 0.5041, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 1.2071005917159763, | |
| "grad_norm": 0.6447051763534546, | |
| "learning_rate": 7.4397018710756415e-06, | |
| "loss": 0.5046, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 1.2248520710059172, | |
| "grad_norm": 0.5753356218338013, | |
| "learning_rate": 7.34837830369103e-06, | |
| "loss": 0.4534, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 1.242603550295858, | |
| "grad_norm": 0.5938759446144104, | |
| "learning_rate": 7.2560382576956875e-06, | |
| "loss": 0.4369, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.2603550295857988, | |
| "grad_norm": 0.6178358793258667, | |
| "learning_rate": 7.162721701812506e-06, | |
| "loss": 0.4949, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 1.2781065088757395, | |
| "grad_norm": 0.6533629894256592, | |
| "learning_rate": 7.068469027439642e-06, | |
| "loss": 0.4816, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.2958579881656804, | |
| "grad_norm": 0.6626835465431213, | |
| "learning_rate": 6.9733210311673826e-06, | |
| "loss": 0.5976, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.3136094674556213, | |
| "grad_norm": 0.5293806195259094, | |
| "learning_rate": 6.8773188971196515e-06, | |
| "loss": 0.4124, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.331360946745562, | |
| "grad_norm": 0.6411221623420715, | |
| "learning_rate": 6.780504179127735e-06, | |
| "loss": 0.5081, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.349112426035503, | |
| "grad_norm": 0.6326239109039307, | |
| "learning_rate": 6.682918782744033e-06, | |
| "loss": 0.5123, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.3668639053254439, | |
| "grad_norm": 0.6738696098327637, | |
| "learning_rate": 6.584604947103515e-06, | |
| "loss": 0.499, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.3846153846153846, | |
| "grad_norm": 0.5083363652229309, | |
| "learning_rate": 6.4856052266408375e-06, | |
| "loss": 0.4522, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.4023668639053255, | |
| "grad_norm": 0.6393911838531494, | |
| "learning_rate": 6.385962472670953e-06, | |
| "loss": 0.4685, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.4201183431952662, | |
| "grad_norm": 0.6106072664260864, | |
| "learning_rate": 6.28571981484123e-06, | |
| "loss": 0.4942, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.4378698224852071, | |
| "grad_norm": 0.5104251503944397, | |
| "learning_rate": 6.184920642463095e-06, | |
| "loss": 0.4625, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.4556213017751478, | |
| "grad_norm": 0.587416410446167, | |
| "learning_rate": 6.083608585731283e-06, | |
| "loss": 0.5671, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.4733727810650887, | |
| "grad_norm": 0.5419860482215881, | |
| "learning_rate": 5.9818274968388225e-06, | |
| "loss": 0.516, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.4911242603550297, | |
| "grad_norm": 0.6191542744636536, | |
| "learning_rate": 5.879621430995927e-06, | |
| "loss": 0.4823, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.5088757396449703, | |
| "grad_norm": 0.5240432620048523, | |
| "learning_rate": 5.777034627361025e-06, | |
| "loss": 0.3751, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.5266272189349113, | |
| "grad_norm": 0.6388912200927734, | |
| "learning_rate": 5.674111489892144e-06, | |
| "loss": 0.5432, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.5443786982248522, | |
| "grad_norm": 0.6531295776367188, | |
| "learning_rate": 5.570896568126994e-06, | |
| "loss": 0.5527, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.5621301775147929, | |
| "grad_norm": 0.5705211758613586, | |
| "learning_rate": 5.4674345379e-06, | |
| "loss": 0.4588, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.5798816568047336, | |
| "grad_norm": 0.5820942521095276, | |
| "learning_rate": 5.36377018200472e-06, | |
| "loss": 0.4563, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.5976331360946747, | |
| "grad_norm": 0.6194245219230652, | |
| "learning_rate": 5.259948370809902e-06, | |
| "loss": 0.5123, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.6153846153846154, | |
| "grad_norm": 0.5981638431549072, | |
| "learning_rate": 5.156014042837696e-06, | |
| "loss": 0.4979, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.6331360946745561, | |
| "grad_norm": 0.7485818862915039, | |
| "learning_rate": 5.052012185312322e-06, | |
| "loss": 0.5863, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.650887573964497, | |
| "grad_norm": 0.5108672380447388, | |
| "learning_rate": 4.94798781468768e-06, | |
| "loss": 0.398, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.668639053254438, | |
| "grad_norm": 0.6520046591758728, | |
| "learning_rate": 4.843985957162304e-06, | |
| "loss": 0.6246, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.6863905325443787, | |
| "grad_norm": 0.4945230484008789, | |
| "learning_rate": 4.740051629190099e-06, | |
| "loss": 0.3286, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.7041420118343196, | |
| "grad_norm": 0.613183319568634, | |
| "learning_rate": 4.636229817995281e-06, | |
| "loss": 0.4692, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.7218934911242605, | |
| "grad_norm": 0.6236385107040405, | |
| "learning_rate": 4.532565462099999e-06, | |
| "loss": 0.5258, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.7396449704142012, | |
| "grad_norm": 0.5453357100486755, | |
| "learning_rate": 4.429103431873009e-06, | |
| "loss": 0.4714, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.7573964497041419, | |
| "grad_norm": 0.5440649390220642, | |
| "learning_rate": 4.3258885101078565e-06, | |
| "loss": 0.4571, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.7751479289940828, | |
| "grad_norm": 0.6320599913597107, | |
| "learning_rate": 4.2229653726389765e-06, | |
| "loss": 0.4783, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.7928994082840237, | |
| "grad_norm": 0.5469642281532288, | |
| "learning_rate": 4.120378569004074e-06, | |
| "loss": 0.4467, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.8106508875739644, | |
| "grad_norm": 0.5750222206115723, | |
| "learning_rate": 4.018172503161179e-06, | |
| "loss": 0.5032, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.8284023668639053, | |
| "grad_norm": 0.5565685629844666, | |
| "learning_rate": 3.9163914142687185e-06, | |
| "loss": 0.482, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.8461538461538463, | |
| "grad_norm": 0.6028342247009277, | |
| "learning_rate": 3.815079357536907e-06, | |
| "loss": 0.4609, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.863905325443787, | |
| "grad_norm": 0.6078227758407593, | |
| "learning_rate": 3.714280185158771e-06, | |
| "loss": 0.5237, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.8816568047337277, | |
| "grad_norm": 0.5719071626663208, | |
| "learning_rate": 3.614037527329048e-06, | |
| "loss": 0.4206, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.8994082840236688, | |
| "grad_norm": 0.5875373482704163, | |
| "learning_rate": 3.5143947733591633e-06, | |
| "loss": 0.4779, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.9171597633136095, | |
| "grad_norm": 0.5589243769645691, | |
| "learning_rate": 3.4153950528964867e-06, | |
| "loss": 0.5056, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.9349112426035502, | |
| "grad_norm": 0.5471234917640686, | |
| "learning_rate": 3.3170812172559695e-06, | |
| "loss": 0.4929, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.952662721893491, | |
| "grad_norm": 0.5713181495666504, | |
| "learning_rate": 3.2194958208722656e-06, | |
| "loss": 0.4785, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.970414201183432, | |
| "grad_norm": 0.553081750869751, | |
| "learning_rate": 3.1226811028803514e-06, | |
| "loss": 0.4389, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.9881656804733727, | |
| "grad_norm": 0.5729470252990723, | |
| "learning_rate": 3.0266789688326187e-06, | |
| "loss": 0.4665, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 2.0059171597633134, | |
| "grad_norm": 1.2184429168701172, | |
| "learning_rate": 2.9315309725603596e-06, | |
| "loss": 0.7849, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 2.0236686390532546, | |
| "grad_norm": 0.522588849067688, | |
| "learning_rate": 2.8372782981874964e-06, | |
| "loss": 0.3366, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 2.0414201183431953, | |
| "grad_norm": 0.6389386057853699, | |
| "learning_rate": 2.7439617423043146e-06, | |
| "loss": 0.4961, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 2.059171597633136, | |
| "grad_norm": 0.6419571042060852, | |
| "learning_rate": 2.6516216963089698e-06, | |
| "loss": 0.4662, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 2.076923076923077, | |
| "grad_norm": 0.6015217900276184, | |
| "learning_rate": 2.560298128924358e-06, | |
| "loss": 0.4298, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 2.094674556213018, | |
| "grad_norm": 0.5608621835708618, | |
| "learning_rate": 2.470030568897938e-06, | |
| "loss": 0.3721, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 2.1124260355029585, | |
| "grad_norm": 0.6403945684432983, | |
| "learning_rate": 2.3808580878919948e-06, | |
| "loss": 0.4947, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 2.1301775147928996, | |
| "grad_norm": 0.4906422793865204, | |
| "learning_rate": 2.2928192835717642e-06, | |
| "loss": 0.3607, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 2.1479289940828403, | |
| "grad_norm": 0.5612274408340454, | |
| "learning_rate": 2.205952262898704e-06, | |
| "loss": 0.4134, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 2.165680473372781, | |
| "grad_norm": 0.587371289730072, | |
| "learning_rate": 2.120294625636171e-06, | |
| "loss": 0.4862, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 2.1834319526627217, | |
| "grad_norm": 0.5207931399345398, | |
| "learning_rate": 2.0358834480746363e-06, | |
| "loss": 0.3613, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 2.201183431952663, | |
| "grad_norm": 0.6090331673622131, | |
| "learning_rate": 1.9527552669834797e-06, | |
| "loss": 0.4493, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 2.2189349112426036, | |
| "grad_norm": 0.5700682401657104, | |
| "learning_rate": 1.8709460637963123e-06, | |
| "loss": 0.4133, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 2.2366863905325443, | |
| "grad_norm": 0.4857932925224304, | |
| "learning_rate": 1.7904912490366723e-06, | |
| "loss": 0.3573, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 2.2544378698224854, | |
| "grad_norm": 0.6004585027694702, | |
| "learning_rate": 1.711425646990838e-06, | |
| "loss": 0.4517, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 2.272189349112426, | |
| "grad_norm": 0.5605913996696472, | |
| "learning_rate": 1.6337834806343783e-06, | |
| "loss": 0.4134, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 2.289940828402367, | |
| "grad_norm": 0.6110245585441589, | |
| "learning_rate": 1.557598356819e-06, | |
| "loss": 0.4579, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 2.3076923076923075, | |
| "grad_norm": 0.5302851796150208, | |
| "learning_rate": 1.482903251726049e-06, | |
| "loss": 0.3641, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 2.3254437869822486, | |
| "grad_norm": 0.6419249176979065, | |
| "learning_rate": 1.409730496593016e-06, | |
| "loss": 0.5007, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 2.3431952662721893, | |
| "grad_norm": 0.5336579084396362, | |
| "learning_rate": 1.3381117637191887e-06, | |
| "loss": 0.3433, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 2.36094674556213, | |
| "grad_norm": 0.5869147777557373, | |
| "learning_rate": 1.2680780527565313e-06, | |
| "loss": 0.4898, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 2.378698224852071, | |
| "grad_norm": 0.4657803475856781, | |
| "learning_rate": 1.1996596772917091e-06, | |
| "loss": 0.3326, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 2.396449704142012, | |
| "grad_norm": 0.6105607748031616, | |
| "learning_rate": 1.132886251725061e-06, | |
| "loss": 0.4423, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 2.4142011834319526, | |
| "grad_norm": 0.5837461948394775, | |
| "learning_rate": 1.0677866784522317e-06, | |
| "loss": 0.4593, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 2.4319526627218933, | |
| "grad_norm": 0.6025368571281433, | |
| "learning_rate": 1.004389135353972e-06, | |
| "loss": 0.4215, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 2.4497041420118344, | |
| "grad_norm": 0.5407091379165649, | |
| "learning_rate": 9.427210635995482e-07, | |
| "loss": 0.3988, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 2.467455621301775, | |
| "grad_norm": 0.5298522114753723, | |
| "learning_rate": 8.828091557690288e-07, | |
| "loss": 0.4313, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 2.485207100591716, | |
| "grad_norm": 0.5403129458427429, | |
| "learning_rate": 8.246793442995954e-07, | |
| "loss": 0.4222, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 2.502958579881657, | |
| "grad_norm": 0.5571721792221069, | |
| "learning_rate": 7.68356790260873e-07, | |
| "loss": 0.4143, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 2.5207100591715976, | |
| "grad_norm": 0.5758110284805298, | |
| "learning_rate": 7.138658724641417e-07, | |
| "loss": 0.4597, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 2.5384615384615383, | |
| "grad_norm": 0.5439177751541138, | |
| "learning_rate": 6.612301769101464e-07, | |
| "loss": 0.4078, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 2.556213017751479, | |
| "grad_norm": 0.6059474349021912, | |
| "learning_rate": 6.104724865800665e-07, | |
| "loss": 0.4234, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 2.57396449704142, | |
| "grad_norm": 0.5578750371932983, | |
| "learning_rate": 5.616147715740611e-07, | |
| "loss": 0.3722, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 2.591715976331361, | |
| "grad_norm": 0.5182917714118958, | |
| "learning_rate": 5.146781796016798e-07, | |
| "loss": 0.3592, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 2.609467455621302, | |
| "grad_norm": 0.5251271724700928, | |
| "learning_rate": 4.696830268282204e-07, | |
| "loss": 0.4132, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 2.6272189349112427, | |
| "grad_norm": 0.5910128951072693, | |
| "learning_rate": 4.2664878908102556e-07, | |
| "loss": 0.4721, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 2.6449704142011834, | |
| "grad_norm": 0.5311455130577087, | |
| "learning_rate": 3.855940934195146e-07, | |
| "loss": 0.4011, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 2.662721893491124, | |
| "grad_norm": 0.5455037951469421, | |
| "learning_rate": 3.4653671007259084e-07, | |
| "loss": 0.4317, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.6804733727810652, | |
| "grad_norm": 0.521126389503479, | |
| "learning_rate": 3.0949354474692937e-07, | |
| "loss": 0.4071, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 2.698224852071006, | |
| "grad_norm": 0.5334401726722717, | |
| "learning_rate": 2.7448063130946224e-07, | |
| "loss": 0.3797, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 2.7159763313609466, | |
| "grad_norm": 0.6030508875846863, | |
| "learning_rate": 2.4151312484723465e-07, | |
| "loss": 0.4331, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 2.7337278106508878, | |
| "grad_norm": 0.47127389907836914, | |
| "learning_rate": 2.106052951076365e-07, | |
| "loss": 0.3481, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 2.7514792899408285, | |
| "grad_norm": 0.5236401557922363, | |
| "learning_rate": 1.8177052032184285e-07, | |
| "loss": 0.3702, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.769230769230769, | |
| "grad_norm": 0.5896205902099609, | |
| "learning_rate": 1.5502128141414496e-07, | |
| "loss": 0.4371, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 2.78698224852071, | |
| "grad_norm": 0.5283501148223877, | |
| "learning_rate": 1.303691565996712e-07, | |
| "loss": 0.3741, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 2.804733727810651, | |
| "grad_norm": 0.5836403369903564, | |
| "learning_rate": 1.0782481637284014e-07, | |
| "loss": 0.4289, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 2.8224852071005917, | |
| "grad_norm": 0.64845210313797, | |
| "learning_rate": 8.739801888871468e-08, | |
| "loss": 0.4927, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 2.8402366863905324, | |
| "grad_norm": 0.5165106654167175, | |
| "learning_rate": 6.909760573925561e-08, | |
| "loss": 0.3382, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.8579881656804735, | |
| "grad_norm": 0.5744462609291077, | |
| "learning_rate": 5.2931498126298495e-08, | |
| "loss": 0.4541, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 2.8757396449704142, | |
| "grad_norm": 0.5389672517776489, | |
| "learning_rate": 3.890669343292464e-08, | |
| "loss": 0.4203, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 2.893491124260355, | |
| "grad_norm": 0.43833690881729126, | |
| "learning_rate": 2.702926219468882e-08, | |
| "loss": 0.3555, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 2.9112426035502956, | |
| "grad_norm": 0.48717668652534485, | |
| "learning_rate": 1.7304345472035634e-08, | |
| "loss": 0.4286, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 2.9289940828402368, | |
| "grad_norm": 0.5516893267631531, | |
| "learning_rate": 9.73615262502503e-09, | |
| "loss": 0.4472, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 2.9467455621301775, | |
| "grad_norm": 0.44991251826286316, | |
| "learning_rate": 4.327959491344791e-09, | |
| "loss": 0.3565, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 2.9644970414201186, | |
| "grad_norm": 0.5328838229179382, | |
| "learning_rate": 1.082106968385288e-09, | |
| "loss": 0.45, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 2.9822485207100593, | |
| "grad_norm": 0.5235728621482849, | |
| "learning_rate": 0.0, | |
| "loss": 0.3742, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 2.9822485207100593, | |
| "step": 168, | |
| "total_flos": 6.46489842951127e+16, | |
| "train_loss": 0.5384191712808042, | |
| "train_runtime": 12683.2217, | |
| "train_samples_per_second": 0.32, | |
| "train_steps_per_second": 0.013 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 168, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 600, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 6.46489842951127e+16, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |