params
stringclasses 14
values | data
stringclasses 25
values | task
stringclasses 66
values | step
int64 0
69.4k
| seed
stringclasses 5
values | chinchilla
stringclasses 1
value | tokens
int64 0
100B
| compute
float64 0
706,209,840,435B
| metrics
stringlengths 769
1.41k
|
---|---|---|---|---|---|---|---|---|
10M
|
DCLM-Baseline
|
mmlu_high_school_geography
| 13,750 |
small aux 3
|
5xC
| 901,120,000 | 53,528,863,703,040,000 |
{'predicted_index_raw': 1.4191919191919191, 'predicted_index_per_token': 1.6161616161616161, 'predicted_index_per_char': 1.5757575757575757, 'predicted_index_per_byte': 1.5757575757575757, 'predicted_index_uncond': 1.5858585858585859, 'correct_choice': 1.7828282828282829, 'acc_raw': 0.20707070707070707, 'acc_per_token': 0.2777777777777778, 'acc_per_char': 0.25757575757575757, 'acc_per_byte': 0.25757575757575757, 'acc_uncond': 0.25757575757575757, 'no_answer': 0.0, 'sum_logits_corr': -27.15490455820103, 'logits_per_token_corr': -6.4302259619713436, 'logits_per_char_corr': -1.2350350807740307, 'bits_per_byte_corr': 1.775312897229918, 'correct_prob': 5.008141292489707e-05, 'correct_prob_per_token': 0.006907103254208915, 'correct_prob_per_char': 0.32483216572591395, 'margin': -1.3716074444931687e-05, 'margin_per_token': -0.003221833211324216, 'margin_per_char': -0.06074018221335966, 'total_prob': 0.00013585900001351518, 'total_prob_per_token': 0.023656152717701544, 'total_prob_per_char': 1.2513120570526732, 'uncond_correct_prob': 9.472635162740652e-06, 'uncond_correct_prob_per_token': 0.002870559161687233, 'uncond_correct_prob_per_char': 0.28084773352992826, 'uncond_total_prob': 2.7271256688734613e-05, 'norm_correct_prob': 0.22576898556105057, 'norm_correct_prob_per_token': 0.2755874661916957, 'norm_correct_prob_per_char': 0.26111198209920783, 'primary_metric': 0.20707070707070707}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_geography
| 13,750 |
default
|
5xC
| 901,120,000 | 53,528,863,703,040,000 |
{'predicted_index_raw': 1.404040404040404, 'predicted_index_per_token': 1.5454545454545454, 'predicted_index_per_char': 1.5707070707070707, 'predicted_index_per_byte': 1.5707070707070707, 'predicted_index_uncond': 1.6666666666666667, 'correct_choice': 1.7828282828282829, 'acc_raw': 0.2777777777777778, 'acc_per_token': 0.2727272727272727, 'acc_per_char': 0.29292929292929293, 'acc_per_byte': 0.29292929292929293, 'acc_uncond': 0.31313131313131315, 'no_answer': 0.0, 'sum_logits_corr': -26.491325648144038, 'logits_per_token_corr': -6.136068714110205, 'logits_per_char_corr': -1.1888006148980337, 'bits_per_byte_corr': 1.709054697665682, 'correct_prob': 8.924801659309986e-05, 'correct_prob_per_token': 0.00691349705748757, 'correct_prob_per_char': 0.3372636803942704, 'margin': -3.0701128219192353e-05, 'margin_per_token': -0.0032750747926869562, 'margin_per_char': -0.05759712571911482, 'total_prob': 0.0002627738800252976, 'total_prob_per_token': 0.02415918073900293, 'total_prob_per_char': 1.3018642988648181, 'uncond_correct_prob': 8.66534146248293e-06, 'uncond_correct_prob_per_token': 0.0025438315279745537, 'uncond_correct_prob_per_char': 0.2726789863352295, 'uncond_total_prob': 2.3031705025392806e-05, 'norm_correct_prob': 0.24744620154059566, 'norm_correct_prob_per_token': 0.2681949890606498, 'norm_correct_prob_per_char': 0.2596333708929374, 'primary_metric': 0.2777777777777778}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_geography
| 15,000 |
small aux 2
|
5xC
| 983,040,000 | 58,395,124,039,680,000 |
{'predicted_index_raw': 1.4242424242424243, 'predicted_index_per_token': 1.5252525252525253, 'predicted_index_per_char': 1.5858585858585859, 'predicted_index_per_byte': 1.5858585858585859, 'predicted_index_uncond': 1.5656565656565657, 'correct_choice': 1.7828282828282829, 'acc_raw': 0.2828282828282828, 'acc_per_token': 0.2676767676767677, 'acc_per_char': 0.2878787878787879, 'acc_per_byte': 0.2878787878787879, 'acc_uncond': 0.2727272727272727, 'no_answer': 0.0, 'sum_logits_corr': -26.09727433474377, 'logits_per_token_corr': -6.052839095085247, 'logits_per_char_corr': -1.1688546977734544, 'bits_per_byte_corr': 1.6806337403778397, 'correct_prob': 7.501120071058675e-05, 'correct_prob_per_token': 0.008420313553075343, 'correct_prob_per_char': 0.3439621247611711, 'margin': -2.006897123298696e-05, 'margin_per_token': -0.0031848105959814883, 'margin_per_char': -0.055380111112951484, 'total_prob': 0.00020970919859804562, 'total_prob_per_token': 0.02809960612173881, 'total_prob_per_char': 1.3198102368744484, 'uncond_correct_prob': 6.880869475103804e-06, 'uncond_correct_prob_per_token': 0.0026388574479952577, 'uncond_correct_prob_per_char': 0.2761423168393784, 'uncond_total_prob': 2.4531267068908664e-05, 'norm_correct_prob': 0.2700748736610376, 'norm_correct_prob_per_token': 0.2767561916020531, 'norm_correct_prob_per_char': 0.26096193776413085, 'primary_metric': 0.2828282828282828}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_geography
| 15,000 |
small aux 3
|
5xC
| 983,040,000 | 58,395,124,039,680,000 |
{'predicted_index_raw': 1.378787878787879, 'predicted_index_per_token': 1.5808080808080809, 'predicted_index_per_char': 1.5757575757575757, 'predicted_index_per_byte': 1.5757575757575757, 'predicted_index_uncond': 1.606060606060606, 'correct_choice': 1.7828282828282829, 'acc_raw': 0.21717171717171718, 'acc_per_token': 0.2878787878787879, 'acc_per_char': 0.26262626262626265, 'acc_per_byte': 0.26262626262626265, 'acc_uncond': 0.2828282828282828, 'no_answer': 0.0, 'sum_logits_corr': -27.09944684096057, 'logits_per_token_corr': -6.407430085230439, 'logits_per_char_corr': -1.2321712922298647, 'bits_per_byte_corr': 1.7712234973791041, 'correct_prob': 5.154474034413847e-05, 'correct_prob_per_token': 0.006936097045109744, 'correct_prob_per_char': 0.3259167965436324, 'margin': -1.9421382544630382e-05, 'margin_per_token': -0.0033692438186455143, 'margin_per_char': -0.05968818575288301, 'total_prob': 0.00014345884957458905, 'total_prob_per_token': 0.02395891744880734, 'total_prob_per_char': 1.2537263225378892, 'uncond_correct_prob': 9.741661353656898e-06, 'uncond_correct_prob_per_token': 0.002802083342098787, 'uncond_correct_prob_per_char': 0.27902701954409925, 'uncond_total_prob': 2.766614865900174e-05, 'norm_correct_prob': 0.22610350386767708, 'norm_correct_prob_per_token': 0.27681977167938276, 'norm_correct_prob_per_char': 0.2611762642560091, 'primary_metric': 0.21717171717171718}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_geography
| 15,000 |
default
|
5xC
| 983,040,000 | 58,395,124,039,680,000 |
{'predicted_index_raw': 1.3737373737373737, 'predicted_index_per_token': 1.5303030303030303, 'predicted_index_per_char': 1.5656565656565657, 'predicted_index_per_byte': 1.5656565656565657, 'predicted_index_uncond': 1.6616161616161615, 'correct_choice': 1.7828282828282829, 'acc_raw': 0.2777777777777778, 'acc_per_token': 0.26262626262626265, 'acc_per_char': 0.30303030303030304, 'acc_per_byte': 0.30303030303030304, 'acc_uncond': 0.30303030303030304, 'no_answer': 0.0, 'sum_logits_corr': -26.487519738650082, 'logits_per_token_corr': -6.125819386678257, 'logits_per_char_corr': -1.185868790424057, 'bits_per_byte_corr': 1.7048394808575964, 'correct_prob': 9.175016994668268e-05, 'correct_prob_per_token': 0.007114266712733918, 'correct_prob_per_char': 0.3378738239750369, 'margin': -9.39872044787964e-06, 'margin_per_token': -0.0032150623433306732, 'margin_per_char': -0.0568428192866271, 'total_prob': 0.0002358768768034621, 'total_prob_per_token': 0.02470325101959065, 'total_prob_per_char': 1.3033139226738788, 'uncond_correct_prob': 8.97935030698669e-06, 'uncond_correct_prob_per_token': 0.002568378172297205, 'uncond_correct_prob_per_char': 0.2745675913158424, 'uncond_total_prob': 2.3393392616857463e-05, 'norm_correct_prob': 0.2499353555119679, 'norm_correct_prob_per_token': 0.2698035503884174, 'norm_correct_prob_per_char': 0.2597786817647775, 'primary_metric': 0.2777777777777778}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_geography
| 15,117 |
small aux 2
|
5xC
| 990,707,712 | 58,850,606,007,189,500 |
{'predicted_index_raw': 1.4343434343434343, 'predicted_index_per_token': 1.5151515151515151, 'predicted_index_per_char': 1.5808080808080809, 'predicted_index_per_byte': 1.5808080808080809, 'predicted_index_uncond': 1.5454545454545454, 'correct_choice': 1.7828282828282829, 'acc_raw': 0.2727272727272727, 'acc_per_token': 0.25252525252525254, 'acc_per_char': 0.29292929292929293, 'acc_per_byte': 0.29292929292929293, 'acc_uncond': 0.2777777777777778, 'no_answer': 0.0, 'sum_logits_corr': -26.175271347315626, 'logits_per_token_corr': -6.07946834989949, 'logits_per_char_corr': -1.1738236935491002, 'bits_per_byte_corr': 1.6878063132000638, 'correct_prob': 7.130538669202188e-05, 'correct_prob_per_token': 0.00823357558244518, 'correct_prob_per_char': 0.34234385741840273, 'margin': -1.8057506517255832e-05, 'margin_per_token': -0.003202645432204083, 'margin_per_char': -0.05523003055376265, 'total_prob': 0.0001970583202492532, 'total_prob_per_token': 0.02757828025214538, 'total_prob_per_char': 1.313461307796071, 'uncond_correct_prob': 6.4361925822069105e-06, 'uncond_correct_prob_per_token': 0.002610354285264777, 'uncond_correct_prob_per_char': 0.2749930180211934, 'uncond_total_prob': 2.3226066137398605e-05, 'norm_correct_prob': 0.2700074946647177, 'norm_correct_prob_per_token': 0.2763097681250924, 'norm_correct_prob_per_char': 0.26094375691862093, 'primary_metric': 0.2727272727272727}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_geography
| 15,117 |
small aux 3
|
5xC
| 990,707,712 | 58,850,606,007,189,500 |
{'predicted_index_raw': 1.3686868686868687, 'predicted_index_per_token': 1.5808080808080809, 'predicted_index_per_char': 1.5757575757575757, 'predicted_index_per_byte': 1.5757575757575757, 'predicted_index_uncond': 1.5858585858585859, 'correct_choice': 1.7828282828282829, 'acc_raw': 0.21212121212121213, 'acc_per_token': 0.29797979797979796, 'acc_per_char': 0.25757575757575757, 'acc_per_byte': 0.25757575757575757, 'acc_uncond': 0.30303030303030304, 'no_answer': 0.0, 'sum_logits_corr': -27.015799312880546, 'logits_per_token_corr': -6.392699091076896, 'logits_per_char_corr': -1.2286041232998437, 'bits_per_byte_corr': 1.7661129726774003, 'correct_prob': 4.972068348396813e-05, 'correct_prob_per_token': 0.00704854247736914, 'correct_prob_per_char': 0.3268139843112598, 'margin': -1.8692276414764995e-05, 'margin_per_token': -0.0032972424920362843, 'margin_per_char': -0.05948125003060657, 'total_prob': 0.00013946699217029347, 'total_prob_per_token': 0.024242907953334832, 'total_prob_per_char': 1.258114499280203, 'uncond_correct_prob': 1.0053786006232186e-05, 'uncond_correct_prob_per_token': 0.0028378367467670447, 'uncond_correct_prob_per_char': 0.2798278828413361, 'uncond_total_prob': 2.831057979091474e-05, 'norm_correct_prob': 0.2248184762063894, 'norm_correct_prob_per_token': 0.27544848503249053, 'norm_correct_prob_per_char': 0.2608468739159721, 'primary_metric': 0.21212121212121213}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_geography
| 15,117 |
default
|
5xC
| 990,707,712 | 58,850,606,007,189,500 |
{'predicted_index_raw': 1.3888888888888888, 'predicted_index_per_token': 1.5656565656565657, 'predicted_index_per_char': 1.5656565656565657, 'predicted_index_per_byte': 1.5656565656565657, 'predicted_index_uncond': 1.6565656565656566, 'correct_choice': 1.7828282828282829, 'acc_raw': 0.26262626262626265, 'acc_per_token': 0.25757575757575757, 'acc_per_char': 0.30303030303030304, 'acc_per_byte': 0.30303030303030304, 'acc_uncond': 0.3181818181818182, 'no_answer': 0.0, 'sum_logits_corr': -26.418640541307855, 'logits_per_token_corr': -6.110546721929337, 'logits_per_char_corr': -1.1834581619835958, 'bits_per_byte_corr': 1.7013820431160165, 'correct_prob': 9.235524613085369e-05, 'correct_prob_per_token': 0.007175950708109082, 'correct_prob_per_char': 0.33874369174795016, 'margin': -1.8357347503629293e-05, 'margin_per_token': -0.0032806985727444208, 'margin_per_char': -0.05691375858345177, 'total_prob': 0.00024947946941826347, 'total_prob_per_token': 0.024961199961348112, 'total_prob_per_char': 1.3062907754423136, 'uncond_correct_prob': 8.778697387968721e-06, 'uncond_correct_prob_per_token': 0.0025395227862732204, 'uncond_correct_prob_per_char': 0.2732611293216173, 'uncond_total_prob': 2.3026700223387326e-05, 'norm_correct_prob': 0.2503638596873766, 'norm_correct_prob_per_token': 0.2706580021192799, 'norm_correct_prob_per_char': 0.2598147407110491, 'primary_metric': 0.26262626262626265}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 0 |
small aux 2
|
5xC
| 0 | 0 |
{'predicted_index_raw': 1.4248704663212435, 'predicted_index_per_token': 1.4455958549222798, 'predicted_index_per_char': 1.5595854922279793, 'predicted_index_per_byte': 1.5595854922279793, 'predicted_index_uncond': 1.4870466321243523, 'correct_choice': 1.766839378238342, 'acc_raw': 0.16580310880829016, 'acc_per_token': 0.16580310880829016, 'acc_per_char': 0.23316062176165803, 'acc_per_byte': 0.23316062176165803, 'acc_uncond': 0.26424870466321243, 'no_answer': 0.0, 'sum_logits_corr': -107.7204640640496, 'logits_per_token_corr': -10.880523001815197, 'logits_per_char_corr': -1.919169388990236, 'bits_per_byte_corr': 2.7687761601241045, 'correct_prob': 1.9944376032167897e-07, 'correct_prob_per_token': 1.889371770399555e-05, 'correct_prob_per_char': 0.1629012976843229, 'margin': -6.036184939184662e-07, 'margin_per_token': -1.7318444433906849e-06, 'margin_per_char': -0.04315618093062075, 'total_prob': 1.159895955669897e-06, 'total_prob_per_token': 7.655675210392736e-05, 'total_prob_per_char': 0.651218277343619, 'uncond_correct_prob': 1.8591770252131182e-07, 'uncond_correct_prob_per_token': 1.8944303249966408e-05, 'uncond_correct_prob_per_char': 0.16292847467966518, 'uncond_total_prob': 1.1167308192093767e-06, 'norm_correct_prob': 0.18570614142688618, 'norm_correct_prob_per_token': 0.24692122883033626, 'norm_correct_prob_per_char': 0.24846953623235976, 'primary_metric': 0.16580310880829016}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 0 |
small aux 3
|
5xC
| 0 | 0 |
{'predicted_index_raw': 1.4663212435233162, 'predicted_index_per_token': 1.5906735751295338, 'predicted_index_per_char': 1.544041450777202, 'predicted_index_per_byte': 1.544041450777202, 'predicted_index_uncond': 1.6580310880829014, 'correct_choice': 1.766839378238342, 'acc_raw': 0.19170984455958548, 'acc_per_token': 0.24870466321243523, 'acc_per_char': 0.23316062176165803, 'acc_per_byte': 0.23316062176165803, 'acc_uncond': 0.22797927461139897, 'no_answer': 0.0, 'sum_logits_corr': -107.08992356710483, 'logits_per_token_corr': -10.81360749318345, 'logits_per_char_corr': -1.907944578688437, 'bits_per_byte_corr': 2.7525821819667677, 'correct_prob': 2.0156911893322872e-07, 'correct_prob_per_token': 2.0207239103513242e-05, 'correct_prob_per_char': 0.16471073602665273, 'margin': -7.005072220159825e-07, 'margin_per_token': -1.351356208766559e-06, 'margin_per_char': -0.04230631008899894, 'total_prob': 1.3065540578750753e-06, 'total_prob_per_token': 8.078848289653595e-05, 'total_prob_per_char': 0.65678848859991, 'uncond_correct_prob': 1.615722418222093e-07, 'uncond_correct_prob_per_token': 1.9581726789646228e-05, 'uncond_correct_prob_per_char': 0.16377697698210486, 'uncond_total_prob': 1.0386780589000243e-06, 'norm_correct_prob': 0.19454659443866834, 'norm_correct_prob_per_token': 0.2501450888141543, 'norm_correct_prob_per_char': 0.24906790460633974, 'primary_metric': 0.19170984455958548}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 0 |
default
|
5xC
| 0 | 0 |
{'predicted_index_raw': 1.378238341968912, 'predicted_index_per_token': 1.4663212435233162, 'predicted_index_per_char': 1.544041450777202, 'predicted_index_per_byte': 1.544041450777202, 'predicted_index_uncond': 1.2953367875647668, 'correct_choice': 1.766839378238342, 'acc_raw': 0.21761658031088082, 'acc_per_token': 0.27461139896373055, 'acc_per_char': 0.23316062176165803, 'acc_per_byte': 0.23316062176165803, 'acc_uncond': 0.27461139896373055, 'no_answer': 0.0, 'sum_logits_corr': -107.6706942671939, 'logits_per_token_corr': -10.877527506368365, 'logits_per_char_corr': -1.917737547496426, 'bits_per_byte_corr': 2.7667104495016446, 'correct_prob': 1.6077870724866937e-07, 'correct_prob_per_token': 1.8990727383809065e-05, 'correct_prob_per_char': 0.16288419581990624, 'margin': -6.374367856806929e-07, 'margin_per_token': -1.39053792663973e-06, 'margin_per_char': -0.04233694193012815, 'total_prob': 1.16987314839813e-06, 'total_prob_per_token': 7.550986057568105e-05, 'total_prob_per_char': 0.6493801108492382, 'uncond_correct_prob': 2.164214307446655e-07, 'uncond_correct_prob_per_token': 2.004009861077277e-05, 'uncond_correct_prob_per_char': 0.16439610330369717, 'uncond_total_prob': 1.101554363127961e-06, 'norm_correct_prob': 0.20450269508096042, 'norm_correct_prob_per_token': 0.25135193441711395, 'norm_correct_prob_per_char': 0.24920600587945202, 'primary_metric': 0.21761658031088082}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 2,500 |
small aux 2
|
5xC
| 163,840,000 | 9,732,520,673,280,000 |
{'predicted_index_raw': 1.5854922279792747, 'predicted_index_per_token': 1.5336787564766838, 'predicted_index_per_char': 1.5647668393782384, 'predicted_index_per_byte': 1.5647668393782384, 'predicted_index_uncond': 1.4663212435233162, 'correct_choice': 1.766839378238342, 'acc_raw': 0.21243523316062177, 'acc_per_token': 0.2849740932642487, 'acc_per_char': 0.25906735751295334, 'acc_per_byte': 0.25906735751295334, 'acc_uncond': 0.3005181347150259, 'no_answer': 0.0, 'sum_logits_corr': -49.85155777115896, 'logits_per_token_corr': -5.645587766454699, 'logits_per_char_corr': -0.975437512591814, 'bits_per_byte_corr': 1.407258862114287, 'correct_prob': 1.3429516408307039e-06, 'correct_prob_per_token': 0.007959473626449085, 'correct_prob_per_char': 0.39559926187500705, 'margin': -1.802243567027195e-06, 'margin_per_token': -0.004649882508355285, 'margin_per_char': -0.05309127687802676, 'total_prob': 5.260222162041319e-06, 'total_prob_per_token': 0.030510070476592686, 'total_prob_per_char': 1.5635731123127525, 'uncond_correct_prob': 5.451359501125661e-07, 'uncond_correct_prob_per_token': 0.005381245922652826, 'uncond_correct_prob_per_char': 0.3655144934544783, 'uncond_total_prob': 2.0950439943917853e-06, 'norm_correct_prob': 0.21561820037246784, 'norm_correct_prob_per_token': 0.2614959452850456, 'norm_correct_prob_per_char': 0.25231537688527145, 'primary_metric': 0.21243523316062177}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 2,500 |
small aux 3
|
5xC
| 163,840,000 | 9,732,520,673,280,000 |
{'predicted_index_raw': 1.5958549222797926, 'predicted_index_per_token': 1.455958549222798, 'predicted_index_per_char': 1.528497409326425, 'predicted_index_per_byte': 1.528497409326425, 'predicted_index_uncond': 1.5595854922279793, 'correct_choice': 1.766839378238342, 'acc_raw': 0.21761658031088082, 'acc_per_token': 0.2538860103626943, 'acc_per_char': 0.24352331606217617, 'acc_per_byte': 0.24352331606217617, 'acc_uncond': 0.25906735751295334, 'no_answer': 0.0, 'sum_logits_corr': -49.643950294336506, 'logits_per_token_corr': -5.6710959117249296, 'logits_per_char_corr': -0.9723326683044125, 'bits_per_byte_corr': 1.4027795186581173, 'correct_prob': 1.957149848010716e-06, 'correct_prob_per_token': 0.007987660665641279, 'correct_prob_per_char': 0.3944557236848478, 'margin': -5.81335662066133e-06, 'margin_per_token': -0.004777360730387515, 'margin_per_char': -0.052605238519152754, 'total_prob': 1.0581640248879376e-05, 'total_prob_per_token': 0.030638778971432093, 'total_prob_per_char': 1.5643643960185445, 'uncond_correct_prob': 1.2271325091382245e-07, 'uncond_correct_prob_per_token': 0.0037043138357561794, 'uncond_correct_prob_per_char': 0.33465394428204376, 'uncond_total_prob': 6.619697404690109e-07, 'norm_correct_prob': 0.20854473134755327, 'norm_correct_prob_per_token': 0.2566453567111037, 'norm_correct_prob_per_char': 0.25212854637554927, 'primary_metric': 0.21761658031088082}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 2,500 |
default
|
5xC
| 163,840,000 | 9,732,520,673,280,000 |
{'predicted_index_raw': 1.544041450777202, 'predicted_index_per_token': 1.5077720207253886, 'predicted_index_per_char': 1.461139896373057, 'predicted_index_per_byte': 1.461139896373057, 'predicted_index_uncond': 1.4248704663212435, 'correct_choice': 1.766839378238342, 'acc_raw': 0.21243523316062177, 'acc_per_token': 0.25906735751295334, 'acc_per_char': 0.18652849740932642, 'acc_per_byte': 0.18652849740932642, 'acc_uncond': 0.20725388601036268, 'no_answer': 0.0, 'sum_logits_corr': -50.853000077558924, 'logits_per_token_corr': -5.855873407451588, 'logits_per_char_corr': -1.0050139332581123, 'bits_per_byte_corr': 1.449928617536832, 'correct_prob': 1.0223997759349428e-06, 'correct_prob_per_token': 0.00741419372095719, 'correct_prob_per_char': 0.38382522131163205, 'margin': -1.5549730475464446e-06, 'margin_per_token': -0.005204891485445194, 'margin_per_char': -0.061508950243768463, 'total_prob': 4.499529682972078e-06, 'total_prob_per_token': 0.02998521987612756, 'total_prob_per_char': 1.537752733226202, 'uncond_correct_prob': 4.5773338228878254e-07, 'uncond_correct_prob_per_token': 0.004845258774784435, 'uncond_correct_prob_per_char': 0.35645420794999116, 'uncond_total_prob': 1.3332520668972508e-06, 'norm_correct_prob': 0.2051718312525143, 'norm_correct_prob_per_token': 0.24579809710934689, 'norm_correct_prob_per_char': 0.24929804808392647, 'primary_metric': 0.21243523316062177}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 3,750 |
small aux 2
|
5xC
| 245,760,000 | 14,598,781,009,920,000 |
{'predicted_index_raw': 1.5647668393782384, 'predicted_index_per_token': 1.5958549222797926, 'predicted_index_per_char': 1.4974093264248705, 'predicted_index_per_byte': 1.4974093264248705, 'predicted_index_uncond': 1.4248704663212435, 'correct_choice': 1.766839378238342, 'acc_raw': 0.23834196891191708, 'acc_per_token': 0.2694300518134715, 'acc_per_char': 0.2538860103626943, 'acc_per_byte': 0.2538860103626943, 'acc_uncond': 0.23834196891191708, 'no_answer': 0.0, 'sum_logits_corr': -48.700824485541624, 'logits_per_token_corr': -5.48720704769799, 'logits_per_char_corr': -0.9506490387117839, 'bits_per_byte_corr': 1.3714966537763362, 'correct_prob': 2.2410943678936316e-06, 'correct_prob_per_token': 0.008961632205344914, 'correct_prob_per_char': 0.4043554373471894, 'margin': -9.245120468131151e-06, 'margin_per_token': -0.004884491928351983, 'margin_per_char': -0.049663029527210814, 'total_prob': 1.8256931183430595e-05, 'total_prob_per_token': 0.03381799809815907, 'total_prob_per_char': 1.592401424877512, 'uncond_correct_prob': 8.843677781083209e-07, 'uncond_correct_prob_per_token': 0.005624916159779407, 'uncond_correct_prob_per_char': 0.36830381511511123, 'uncond_total_prob': 5.129739251376235e-06, 'norm_correct_prob': 0.2400168600739583, 'norm_correct_prob_per_token': 0.26586434763959726, 'norm_correct_prob_per_char': 0.2537941510138075, 'primary_metric': 0.23834196891191708}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 3,750 |
small aux 3
|
5xC
| 245,760,000 | 14,598,781,009,920,000 |
{'predicted_index_raw': 1.528497409326425, 'predicted_index_per_token': 1.5803108808290156, 'predicted_index_per_char': 1.455958549222798, 'predicted_index_per_byte': 1.455958549222798, 'predicted_index_uncond': 1.461139896373057, 'correct_choice': 1.766839378238342, 'acc_raw': 0.20725388601036268, 'acc_per_token': 0.22279792746113988, 'acc_per_char': 0.21243523316062177, 'acc_per_byte': 0.21243523316062177, 'acc_uncond': 0.25906735751295334, 'no_answer': 0.0, 'sum_logits_corr': -51.11831922975846, 'logits_per_token_corr': -5.931561988238743, 'logits_per_char_corr': -1.0119615778058149, 'bits_per_byte_corr': 1.4599519498716689, 'correct_prob': 1.0242166215311647e-06, 'correct_prob_per_token': 0.007392764509426715, 'correct_prob_per_char': 0.38061185298527594, 'margin': -1.9038932547857815e-07, 'margin_per_token': -0.004453883197602976, 'margin_per_char': -0.05263027015767747, 'total_prob': 2.411898054552963e-06, 'total_prob_per_token': 0.02856453296834797, 'total_prob_per_char': 1.512697954945264, 'uncond_correct_prob': 3.3446817855706955e-07, 'uncond_correct_prob_per_token': 0.00507674979739918, 'uncond_correct_prob_per_char': 0.36230151548385503, 'uncond_total_prob': 1.1964180687296304e-06, 'norm_correct_prob': 0.2052984656146511, 'norm_correct_prob_per_token': 0.25111581846340614, 'norm_correct_prob_per_char': 0.25151586037692447, 'primary_metric': 0.20725388601036268}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 3,750 |
default
|
5xC
| 245,760,000 | 14,598,781,009,920,000 |
{'predicted_index_raw': 1.5958549222797926, 'predicted_index_per_token': 1.5129533678756477, 'predicted_index_per_char': 1.5181347150259068, 'predicted_index_per_byte': 1.5181347150259068, 'predicted_index_uncond': 1.616580310880829, 'correct_choice': 1.766839378238342, 'acc_raw': 0.22279792746113988, 'acc_per_token': 0.2694300518134715, 'acc_per_char': 0.22279792746113988, 'acc_per_byte': 0.22279792746113988, 'acc_uncond': 0.23316062176165803, 'no_answer': 0.0, 'sum_logits_corr': -49.80058636937117, 'logits_per_token_corr': -5.632075801554778, 'logits_per_char_corr': -0.9720094139247837, 'bits_per_byte_corr': 1.4023131611676805, 'correct_prob': 2.8589855892370576e-06, 'correct_prob_per_token': 0.008659041301434559, 'correct_prob_per_char': 0.3960973776097226, 'margin': -2.5859011659244136e-06, 'margin_per_token': -0.005239105493756476, 'margin_per_char': -0.05418312284422811, 'total_prob': 1.0403949794366512e-05, 'total_prob_per_token': 0.0332383458555919, 'total_prob_per_char': 1.5695906732258538, 'uncond_correct_prob': 1.2346037125611406e-07, 'uncond_correct_prob_per_token': 0.0047774611537331774, 'uncond_correct_prob_per_char': 0.3552558210026042, 'uncond_total_prob': 6.50468181479932e-07, 'norm_correct_prob': 0.2231476884350985, 'norm_correct_prob_per_token': 0.26237850657917, 'norm_correct_prob_per_char': 0.252382124601798, 'primary_metric': 0.22279792746113988}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 5,000 |
small aux 2
|
5xC
| 327,680,000 | 19,465,041,346,560,000 |
{'predicted_index_raw': 1.5647668393782384, 'predicted_index_per_token': 1.5233160621761659, 'predicted_index_per_char': 1.5803108808290156, 'predicted_index_per_byte': 1.5803108808290156, 'predicted_index_uncond': 1.3419689119170986, 'correct_choice': 1.766839378238342, 'acc_raw': 0.21243523316062177, 'acc_per_token': 0.3316062176165803, 'acc_per_char': 0.2694300518134715, 'acc_per_byte': 0.2694300518134715, 'acc_uncond': 0.2694300518134715, 'no_answer': 0.0, 'sum_logits_corr': -47.38568385524453, 'logits_per_token_corr': -5.376461227295373, 'logits_per_char_corr': -0.9301500720808134, 'bits_per_byte_corr': 1.3419228962744654, 'correct_prob': 8.33735857529421e-06, 'correct_prob_per_token': 0.010269278671691012, 'correct_prob_per_char': 0.4130432793447156, 'margin': -1.025152665607777e-05, 'margin_per_token': -0.005211489723841863, 'margin_per_char': -0.05057925903554958, 'total_prob': 3.014008516944628e-05, 'total_prob_per_token': 0.03831196658713562, 'total_prob_per_char': 1.6257782347710403, 'uncond_correct_prob': 2.1040777274156224e-07, 'uncond_correct_prob_per_token': 0.006081708548593019, 'uncond_correct_prob_per_char': 0.37493547917026576, 'uncond_total_prob': 2.397859995721948e-06, 'norm_correct_prob': 0.21616306575875346, 'norm_correct_prob_per_token': 0.2711512167305003, 'norm_correct_prob_per_char': 0.25419718500550714, 'primary_metric': 0.21243523316062177}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 5,000 |
small aux 3
|
5xC
| 327,680,000 | 19,465,041,346,560,000 |
{'predicted_index_raw': 1.6424870466321244, 'predicted_index_per_token': 1.5803108808290156, 'predicted_index_per_char': 1.4922279792746114, 'predicted_index_per_byte': 1.4922279792746114, 'predicted_index_uncond': 1.5854922279792747, 'correct_choice': 1.766839378238342, 'acc_raw': 0.20207253886010362, 'acc_per_token': 0.2694300518134715, 'acc_per_char': 0.25906735751295334, 'acc_per_byte': 0.25906735751295334, 'acc_uncond': 0.21243523316062177, 'no_answer': 0.0, 'sum_logits_corr': -48.143960223914426, 'logits_per_token_corr': -5.538279155736569, 'logits_per_char_corr': -0.9531069043725766, 'bits_per_byte_corr': 1.3750426043763357, 'correct_prob': 4.848564973115211e-06, 'correct_prob_per_token': 0.00970261921211545, 'correct_prob_per_char': 0.40451803071185954, 'margin': -9.262515986417334e-08, 'margin_per_token': -0.005784688799384335, 'margin_per_char': -0.05579119682405013, 'total_prob': 1.0538934864098762e-05, 'total_prob_per_token': 0.03734963158994975, 'total_prob_per_char': 1.6088499061702162, 'uncond_correct_prob': 3.7901875390547647e-07, 'uncond_correct_prob_per_token': 0.005762500033698966, 'uncond_correct_prob_per_char': 0.3703809193068455, 'uncond_total_prob': 1.7277737686551606e-06, 'norm_correct_prob': 0.20901797291094307, 'norm_correct_prob_per_token': 0.2554119977864547, 'norm_correct_prob_per_char': 0.25082825402075504, 'primary_metric': 0.20207253886010362}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 5,000 |
default
|
5xC
| 327,680,000 | 19,465,041,346,560,000 |
{'predicted_index_raw': 1.528497409326425, 'predicted_index_per_token': 1.5336787564766838, 'predicted_index_per_char': 1.6269430051813472, 'predicted_index_per_byte': 1.6269430051813472, 'predicted_index_uncond': 1.6062176165803108, 'correct_choice': 1.766839378238342, 'acc_raw': 0.24352331606217617, 'acc_per_token': 0.2694300518134715, 'acc_per_char': 0.23834196891191708, 'acc_per_byte': 0.23834196891191708, 'acc_uncond': 0.25906735751295334, 'no_answer': 0.0, 'sum_logits_corr': -48.14195358197306, 'logits_per_token_corr': -5.46495800259727, 'logits_per_char_corr': -0.9416439118413144, 'bits_per_byte_corr': 1.3585050018977243, 'correct_prob': 3.6349977059175124e-06, 'correct_prob_per_token': 0.009719943952862357, 'correct_prob_per_char': 0.4070297075468417, 'margin': -1.7328362052314682e-06, 'margin_per_token': -0.005792754429276367, 'margin_per_char': -0.05357237341477525, 'total_prob': 1.079291938598558e-05, 'total_prob_per_token': 0.03703710951142996, 'total_prob_per_char': 1.6127594470077522, 'uncond_correct_prob': 2.7180431263757404e-07, 'uncond_correct_prob_per_token': 0.005442012893953207, 'uncond_correct_prob_per_char': 0.36532406915099197, 'uncond_total_prob': 1.4220914570052352e-06, 'norm_correct_prob': 0.23013317071769368, 'norm_correct_prob_per_token': 0.2581233977517459, 'norm_correct_prob_per_char': 0.25185796497306046, 'primary_metric': 0.24352331606217617}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 7,500 |
small aux 2
|
5xC
| 491,520,000 | 29,197,562,019,840,000 |
{'predicted_index_raw': 1.538860103626943, 'predicted_index_per_token': 1.528497409326425, 'predicted_index_per_char': 1.5751295336787565, 'predicted_index_per_byte': 1.5751295336787565, 'predicted_index_uncond': 1.4974093264248705, 'correct_choice': 1.766839378238342, 'acc_raw': 0.23316062176165803, 'acc_per_token': 0.27979274611398963, 'acc_per_char': 0.29533678756476683, 'acc_per_byte': 0.29533678756476683, 'acc_uncond': 0.2849740932642487, 'no_answer': 0.0, 'sum_logits_corr': -46.97466679805301, 'logits_per_token_corr': -5.350872318559728, 'logits_per_char_corr': -0.922641569286599, 'bits_per_byte_corr': 1.331090416528744, 'correct_prob': 3.715494678949933e-06, 'correct_prob_per_token': 0.010772731509672027, 'correct_prob_per_char': 0.4155718949929629, 'margin': -1.163146677712904e-05, 'margin_per_token': -0.005745271265859142, 'margin_per_char': -0.050649229269395475, 'total_prob': 2.6775569269487568e-05, 'total_prob_per_token': 0.04072916779895014, 'total_prob_per_char': 1.6378103514148634, 'uncond_correct_prob': 2.783302111890172e-07, 'uncond_correct_prob_per_token': 0.005987511667676404, 'uncond_correct_prob_per_char': 0.37253673865180437, 'uncond_total_prob': 1.2231656277984818e-06, 'norm_correct_prob': 0.22166671007095226, 'norm_correct_prob_per_token': 0.2681905816517469, 'norm_correct_prob_per_char': 0.2538532781783257, 'primary_metric': 0.23316062176165803}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 7,500 |
small aux 3
|
5xC
| 491,520,000 | 29,197,562,019,840,000 |
{'predicted_index_raw': 1.5906735751295338, 'predicted_index_per_token': 1.616580310880829, 'predicted_index_per_char': 1.5233160621761659, 'predicted_index_per_byte': 1.5233160621761659, 'predicted_index_uncond': 1.4870466321243523, 'correct_choice': 1.766839378238342, 'acc_raw': 0.20207253886010362, 'acc_per_token': 0.22797927461139897, 'acc_per_char': 0.23834196891191708, 'acc_per_byte': 0.23834196891191708, 'acc_uncond': 0.24870466321243523, 'no_answer': 0.0, 'sum_logits_corr': -48.72205456056743, 'logits_per_token_corr': -5.611913489070776, 'logits_per_char_corr': -0.9635135484395012, 'bits_per_byte_corr': 1.390056218163995, 'correct_prob': 7.834872792809076e-06, 'correct_prob_per_token': 0.00920653834144268, 'correct_prob_per_char': 0.4000317242831097, 'margin': -3.914142225924903e-06, 'margin_per_token': -0.006094453190280475, 'margin_per_char': -0.05733624580611921, 'total_prob': 2.2110466774439755e-05, 'total_prob_per_token': 0.03665928108087598, 'total_prob_per_char': 1.5920534857176951, 'uncond_correct_prob': 3.964322994102196e-07, 'uncond_correct_prob_per_token': 0.0058977044600090015, 'uncond_correct_prob_per_char': 0.371975885697561, 'uncond_total_prob': 2.201577205119429e-06, 'norm_correct_prob': 0.2051218485008673, 'norm_correct_prob_per_token': 0.25177122832924054, 'norm_correct_prob_per_char': 0.2514306138843167, 'primary_metric': 0.20207253886010362}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 7,500 |
default
|
5xC
| 491,520,000 | 29,197,562,019,840,000 |
{'predicted_index_raw': 1.5595854922279793, 'predicted_index_per_token': 1.6373056994818653, 'predicted_index_per_char': 1.616580310880829, 'predicted_index_per_byte': 1.616580310880829, 'predicted_index_uncond': 1.616580310880829, 'correct_choice': 1.766839378238342, 'acc_raw': 0.24870466321243523, 'acc_per_token': 0.26424870466321243, 'acc_per_char': 0.22797927461139897, 'acc_per_byte': 0.22797927461139897, 'acc_uncond': 0.27979274611398963, 'no_answer': 0.0, 'sum_logits_corr': -47.60741433701985, 'logits_per_token_corr': -5.432925528526355, 'logits_per_char_corr': -0.9352603631691042, 'bits_per_byte_corr': 1.3492954878850472, 'correct_prob': 3.36623210843365e-06, 'correct_prob_per_token': 0.010238666506434712, 'correct_prob_per_char': 0.41017970168475926, 'margin': -3.720981299866937e-06, 'margin_per_token': -0.0060956335896220105, 'margin_per_char': -0.05216628466147129, 'total_prob': 1.103613392415138e-05, 'total_prob_per_token': 0.039546208070759335, 'total_prob_per_char': 1.6199311752263863, 'uncond_correct_prob': 6.645944231434406e-07, 'uncond_correct_prob_per_token': 0.005869850298123598, 'uncond_correct_prob_per_char': 0.3731322884869973, 'uncond_total_prob': 1.4487707110795889e-06, 'norm_correct_prob': 0.23389088563091553, 'norm_correct_prob_per_token': 0.26058541187651496, 'norm_correct_prob_per_char': 0.25303346175125846, 'primary_metric': 0.24870466321243523}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 8,750 |
small aux 2
|
5xC
| 573,440,000 | 34,063,822,356,480,000 |
{'predicted_index_raw': 1.6632124352331605, 'predicted_index_per_token': 1.5336787564766838, 'predicted_index_per_char': 1.6010362694300517, 'predicted_index_per_byte': 1.6010362694300517, 'predicted_index_uncond': 1.4974093264248705, 'correct_choice': 1.766839378238342, 'acc_raw': 0.2694300518134715, 'acc_per_token': 0.3005181347150259, 'acc_per_char': 0.27461139896373055, 'acc_per_byte': 0.27461139896373055, 'acc_uncond': 0.29015544041450775, 'no_answer': 0.0, 'sum_logits_corr': -46.46928213050328, 'logits_per_token_corr': -5.290726063838256, 'logits_per_char_corr': -0.909884507839658, 'bits_per_byte_corr': 1.312685867242913, 'correct_prob': 1.5537751049430064e-06, 'correct_prob_per_token': 0.01112151976915442, 'correct_prob_per_char': 0.41918177109773525, 'margin': -7.118772063016444e-06, 'margin_per_token': -0.006149562590628759, 'margin_per_char': -0.053465746754091825, 'total_prob': 1.2606558760223916e-05, 'total_prob_per_token': 0.04222782411359368, 'total_prob_per_char': 1.6555610685329405, 'uncond_correct_prob': 2.009775507701716e-07, 'uncond_correct_prob_per_token': 0.0059182479403429985, 'uncond_correct_prob_per_char': 0.36834902150296994, 'uncond_total_prob': 9.806215280564598e-07, 'norm_correct_prob': 0.25037090735224343, 'norm_correct_prob_per_token': 0.2647189180164948, 'norm_correct_prob_per_char': 0.25320629422473157, 'primary_metric': 0.2694300518134715}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 8,750 |
small aux 3
|
5xC
| 573,440,000 | 34,063,822,356,480,000 |
{'predicted_index_raw': 1.6010362694300517, 'predicted_index_per_token': 1.4870466321243523, 'predicted_index_per_char': 1.5699481865284974, 'predicted_index_per_byte': 1.5699481865284974, 'predicted_index_uncond': 1.461139896373057, 'correct_choice': 1.766839378238342, 'acc_raw': 0.21243523316062177, 'acc_per_token': 0.25906735751295334, 'acc_per_char': 0.22279792746113988, 'acc_per_byte': 0.22279792746113988, 'acc_uncond': 0.2538860103626943, 'no_answer': 0.0, 'sum_logits_corr': -46.94073277310386, 'logits_per_token_corr': -5.374369059918961, 'logits_per_char_corr': -0.925718201928179, 'bits_per_byte_corr': 1.335529059183391, 'correct_prob': 1.8363000391336452e-06, 'correct_prob_per_token': 0.010606199862937886, 'correct_prob_per_char': 0.4139971608066896, 'margin': -4.549051638636178e-06, 'margin_per_token': -0.006553629802620257, 'margin_per_char': -0.05355202539512587, 'total_prob': 1.1961124455033935e-05, 'total_prob_per_token': 0.04134318483052814, 'total_prob_per_char': 1.6396590550528511, 'uncond_correct_prob': 3.689257010481664e-07, 'uncond_correct_prob_per_token': 0.0063837329705419355, 'uncond_correct_prob_per_char': 0.37705140541707033, 'uncond_total_prob': 1.6594869602885838e-06, 'norm_correct_prob': 0.2140534259458155, 'norm_correct_prob_per_token': 0.2534221821795143, 'norm_correct_prob_per_char': 0.252440527084004, 'primary_metric': 0.21243523316062177}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 8,750 |
default
|
5xC
| 573,440,000 | 34,063,822,356,480,000 |
{'predicted_index_raw': 1.538860103626943, 'predicted_index_per_token': 1.61139896373057, 'predicted_index_per_char': 1.616580310880829, 'predicted_index_per_byte': 1.616580310880829, 'predicted_index_uncond': 1.471502590673575, 'correct_choice': 1.766839378238342, 'acc_raw': 0.22797927461139897, 'acc_per_token': 0.2694300518134715, 'acc_per_char': 0.29533678756476683, 'acc_per_byte': 0.29533678756476683, 'acc_uncond': 0.23834196891191708, 'no_answer': 0.0, 'sum_logits_corr': -48.00012464968034, 'logits_per_token_corr': -5.532733253236012, 'logits_per_char_corr': -0.9494286850986997, 'bits_per_byte_corr': 1.3697360556706073, 'correct_prob': 2.505019982465144e-06, 'correct_prob_per_token': 0.009495379061174605, 'correct_prob_per_char': 0.4042846945433266, 'margin': -9.457986684105375e-08, 'margin_per_token': -0.0054946121751572995, 'margin_per_char': -0.05126922401011054, 'total_prob': 5.347846556966453e-06, 'total_prob_per_token': 0.036513031523075425, 'total_prob_per_char': 1.5976520236840046, 'uncond_correct_prob': 2.787862547503833e-07, 'uncond_correct_prob_per_token': 0.005647818162680822, 'uncond_correct_prob_per_char': 0.373143350017016, 'uncond_total_prob': 1.1433445640175234e-06, 'norm_correct_prob': 0.22267836724267437, 'norm_correct_prob_per_token': 0.25692363619843195, 'norm_correct_prob_per_char': 0.2529703220748636, 'primary_metric': 0.22797927461139897}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 10,000 |
small aux 2
|
5xC
| 655,360,000 | 38,930,082,693,120,000 |
{'predicted_index_raw': 1.5854922279792747, 'predicted_index_per_token': 1.4922279792746114, 'predicted_index_per_char': 1.6269430051813472, 'predicted_index_per_byte': 1.6269430051813472, 'predicted_index_uncond': 1.5233160621761659, 'correct_choice': 1.766839378238342, 'acc_raw': 0.24870466321243523, 'acc_per_token': 0.2694300518134715, 'acc_per_char': 0.2538860103626943, 'acc_per_byte': 0.2538860103626943, 'acc_uncond': 0.27979274611398963, 'no_answer': 0.0, 'sum_logits_corr': -45.8284110677057, 'logits_per_token_corr': -5.272495502214007, 'logits_per_char_corr': -0.9053738982524598, 'bits_per_byte_corr': 1.3061784331600712, 'correct_prob': 2.2469387233658484e-06, 'correct_prob_per_token': 0.011817920415226607, 'correct_prob_per_char': 0.4218209582627715, 'margin': -4.1401382578069985e-06, 'margin_per_token': -0.006301743168793653, 'margin_per_char': -0.053066037727510944, 'total_prob': 1.1298999361731141e-05, 'total_prob_per_token': 0.04452305395365268, 'total_prob_per_char': 1.666859085815034, 'uncond_correct_prob': 1.920630114796323e-07, 'uncond_correct_prob_per_token': 0.006267664558379319, 'uncond_correct_prob_per_char': 0.3715668181657472, 'uncond_total_prob': 2.073435455208104e-06, 'norm_correct_prob': 0.2416602518703838, 'norm_correct_prob_per_token': 0.2607452995646347, 'norm_correct_prob_per_char': 0.2532205460200737, 'primary_metric': 0.24870466321243523}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 10,000 |
small aux 3
|
5xC
| 655,360,000 | 38,930,082,693,120,000 |
{'predicted_index_raw': 1.616580310880829, 'predicted_index_per_token': 1.4455958549222798, 'predicted_index_per_char': 1.5129533678756477, 'predicted_index_per_byte': 1.5129533678756477, 'predicted_index_uncond': 1.455958549222798, 'correct_choice': 1.766839378238342, 'acc_raw': 0.24352331606217617, 'acc_per_token': 0.25906735751295334, 'acc_per_char': 0.29015544041450775, 'acc_per_byte': 0.29015544041450775, 'acc_uncond': 0.2694300518134715, 'no_answer': 0.0, 'sum_logits_corr': -46.673543010968615, 'logits_per_token_corr': -5.343333954329113, 'logits_per_char_corr': -0.9165342448870731, 'bits_per_byte_corr': 1.3222794099044413, 'correct_prob': 5.819159606055389e-06, 'correct_prob_per_token': 0.011498940694233345, 'correct_prob_per_char': 0.41748876808920216, 'margin': -8.205502577933037e-07, 'margin_per_token': -0.006958504131136344, 'margin_per_char': -0.055086184235633495, 'total_prob': 1.6161726428221592e-05, 'total_prob_per_token': 0.04418335644041256, 'total_prob_per_char': 1.656632928103345, 'uncond_correct_prob': 2.301689043443896e-07, 'uncond_correct_prob_per_token': 0.006027083275773026, 'uncond_correct_prob_per_char': 0.3701491340352952, 'uncond_total_prob': 1.3219238273714112e-06, 'norm_correct_prob': 0.22957438129599161, 'norm_correct_prob_per_token': 0.25508902091797286, 'norm_correct_prob_per_char': 0.25178498708208213, 'primary_metric': 0.24352331606217617}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 10,000 |
default
|
5xC
| 655,360,000 | 38,930,082,693,120,000 |
{'predicted_index_raw': 1.621761658031088, 'predicted_index_per_token': 1.6735751295336787, 'predicted_index_per_char': 1.5854922279792747, 'predicted_index_per_byte': 1.5854922279792747, 'predicted_index_uncond': 1.5803108808290156, 'correct_choice': 1.766839378238342, 'acc_raw': 0.23316062176165803, 'acc_per_token': 0.24870466321243523, 'acc_per_char': 0.2538860103626943, 'acc_per_byte': 0.2538860103626943, 'acc_uncond': 0.2849740932642487, 'no_answer': 0.0, 'sum_logits_corr': -46.748695334004616, 'logits_per_token_corr': -5.312308836118689, 'logits_per_char_corr': -0.9143981646983806, 'bits_per_byte_corr': 1.3191976976092714, 'correct_prob': 4.887436799833909e-06, 'correct_prob_per_token': 0.010782414569304828, 'correct_prob_per_char': 0.4171921342005115, 'margin': 8.236419251938881e-07, 'margin_per_token': -0.006526928667497548, 'margin_per_char': -0.05266156271571799, 'total_prob': 9.601806586842618e-06, 'total_prob_per_token': 0.0418683398425195, 'total_prob_per_char': 1.646938632980476, 'uncond_correct_prob': 4.802544811584944e-07, 'uncond_correct_prob_per_token': 0.005612784884961094, 'uncond_correct_prob_per_char': 0.3643233945772548, 'uncond_total_prob': 9.01872903184704e-07, 'norm_correct_prob': 0.23328250438128764, 'norm_correct_prob_per_token': 0.2612103598393549, 'norm_correct_prob_per_char': 0.25348596958015285, 'primary_metric': 0.23316062176165803}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 11,250 |
small aux 2
|
5xC
| 737,280,000 | 43,796,343,029,760,000 |
{'predicted_index_raw': 1.5751295336787565, 'predicted_index_per_token': 1.6062176165803108, 'predicted_index_per_char': 1.6683937823834196, 'predicted_index_per_byte': 1.6683937823834196, 'predicted_index_uncond': 1.5336787564766838, 'correct_choice': 1.766839378238342, 'acc_raw': 0.23834196891191708, 'acc_per_token': 0.24352331606217617, 'acc_per_char': 0.27461139896373055, 'acc_per_byte': 0.27461139896373055, 'acc_uncond': 0.27461139896373055, 'no_answer': 0.0, 'sum_logits_corr': -44.60579251501844, 'logits_per_token_corr': -5.083188138075283, 'logits_per_char_corr': -0.8774800987799696, 'bits_per_byte_corr': 1.2659361869895294, 'correct_prob': 7.173345860109028e-06, 'correct_prob_per_token': 0.01364196898174654, 'correct_prob_per_char': 0.43386048777939556, 'margin': -1.3571176154118533e-05, 'margin_per_token': -0.0064589327317703185, 'margin_per_char': -0.052699308533613404, 'total_prob': 4.174665233649495e-05, 'total_prob_per_token': 0.0500074785153995, 'total_prob_per_char': 1.7109801006740815, 'uncond_correct_prob': 2.4870213012304794e-07, 'uncond_correct_prob_per_token': 0.005839837597547146, 'uncond_correct_prob_per_char': 0.3651752459482864, 'uncond_total_prob': 1.1974799461365136e-06, 'norm_correct_prob': 0.23683370523030708, 'norm_correct_prob_per_token': 0.2622037760578129, 'norm_correct_prob_per_char': 0.2532763692206453, 'primary_metric': 0.23834196891191708}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 11,250 |
small aux 3
|
5xC
| 737,280,000 | 43,796,343,029,760,000 |
{'predicted_index_raw': 1.616580310880829, 'predicted_index_per_token': 1.5181347150259068, 'predicted_index_per_char': 1.61139896373057, 'predicted_index_per_byte': 1.61139896373057, 'predicted_index_uncond': 1.4196891191709844, 'correct_choice': 1.766839378238342, 'acc_raw': 0.21761658031088082, 'acc_per_token': 0.24352331606217617, 'acc_per_char': 0.2694300518134715, 'acc_per_byte': 0.2694300518134715, 'acc_uncond': 0.27979274611398963, 'no_answer': 0.0, 'sum_logits_corr': -45.77186859704052, 'logits_per_token_corr': -5.214988180222067, 'logits_per_char_corr': -0.8951832882109181, 'bits_per_byte_corr': 1.2914764905894953, 'correct_prob': 7.3971583938871385e-06, 'correct_prob_per_token': 0.01179191113752155, 'correct_prob_per_char': 0.4238296225517631, 'margin': -3.8110386963892644e-06, 'margin_per_token': -0.006492992582025389, 'margin_per_char': -0.053168567256524094, 'total_prob': 2.1202304159865654e-05, 'total_prob_per_token': 0.0450203544705699, 'total_prob_per_char': 1.6739319692848043, 'uncond_correct_prob': 4.536939341492026e-07, 'uncond_correct_prob_per_token': 0.0065626409499004515, 'uncond_correct_prob_per_char': 0.3802913632799665, 'uncond_total_prob': 1.7103884357090244e-06, 'norm_correct_prob': 0.21123334140695596, 'norm_correct_prob_per_token': 0.2593840319322951, 'norm_correct_prob_per_char': 0.2536004150326505, 'primary_metric': 0.21761658031088082}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 11,250 |
default
|
5xC
| 737,280,000 | 43,796,343,029,760,000 |
{'predicted_index_raw': 1.5647668393782384, 'predicted_index_per_token': 1.5854922279792747, 'predicted_index_per_char': 1.5699481865284974, 'predicted_index_per_byte': 1.5699481865284974, 'predicted_index_uncond': 1.5854922279792747, 'correct_choice': 1.766839378238342, 'acc_raw': 0.24870466321243523, 'acc_per_token': 0.24870466321243523, 'acc_per_char': 0.24352331606217617, 'acc_per_byte': 0.24352331606217617, 'acc_uncond': 0.29533678756476683, 'no_answer': 0.0, 'sum_logits_corr': -46.018712577424516, 'logits_per_token_corr': -5.281937809355196, 'logits_per_char_corr': -0.9041665321192561, 'bits_per_byte_corr': 1.3044365720271596, 'correct_prob': 1.9690594063050024e-06, 'correct_prob_per_token': 0.011693110430620103, 'correct_prob_per_char': 0.42010788577829267, 'margin': -2.9171740259548445e-06, 'margin_per_token': -0.007056301076387868, 'margin_per_char': -0.04971436236350686, 'total_prob': 8.445238961459035e-06, 'total_prob_per_token': 0.04509936254027673, 'total_prob_per_char': 1.6607786342768975, 'uncond_correct_prob': 1.9096970980193244e-07, 'uncond_correct_prob_per_token': 0.005810501597700772, 'uncond_correct_prob_per_char': 0.36520300569933933, 'uncond_total_prob': 1.4917907703255118e-06, 'norm_correct_prob': 0.22644986514102172, 'norm_correct_prob_per_token': 0.2561287592954111, 'norm_correct_prob_per_char': 0.25330072569954953, 'primary_metric': 0.24870466321243523}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 12,500 |
small aux 2
|
5xC
| 819,200,000 | 48,662,603,366,400,000 |
{'predicted_index_raw': 1.616580310880829, 'predicted_index_per_token': 1.528497409326425, 'predicted_index_per_char': 1.6476683937823835, 'predicted_index_per_byte': 1.6476683937823835, 'predicted_index_uncond': 1.5647668393782384, 'correct_choice': 1.766839378238342, 'acc_raw': 0.22797927461139897, 'acc_per_token': 0.2694300518134715, 'acc_per_char': 0.27979274611398963, 'acc_per_byte': 0.27979274611398963, 'acc_uncond': 0.2849740932642487, 'no_answer': 0.0, 'sum_logits_corr': -44.671099435480144, 'logits_per_token_corr': -5.12289368203984, 'logits_per_char_corr': -0.8801246471822333, 'bits_per_byte_corr': 1.2697514638548688, 'correct_prob': 3.7513249882536567e-06, 'correct_prob_per_token': 0.013741562863594799, 'correct_prob_per_char': 0.43227331056748475, 'margin': -1.7450611192539475e-05, 'margin_per_token': -0.006479631462664448, 'margin_per_char': -0.05277113075839895, 'total_prob': 3.581054775022489e-05, 'total_prob_per_token': 0.05051708684185091, 'total_prob_per_char': 1.7070181535708384, 'uncond_correct_prob': 1.4360224654328223e-07, 'uncond_correct_prob_per_token': 0.0063160887173594376, 'uncond_correct_prob_per_char': 0.37130264652752265, 'uncond_total_prob': 1.3835395811184716e-06, 'norm_correct_prob': 0.22372645128735605, 'norm_correct_prob_per_token': 0.2621740657624457, 'norm_correct_prob_per_char': 0.2532803359264068, 'primary_metric': 0.22797927461139897}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 12,500 |
small aux 3
|
5xC
| 819,200,000 | 48,662,603,366,400,000 |
{'predicted_index_raw': 1.5906735751295338, 'predicted_index_per_token': 1.538860103626943, 'predicted_index_per_char': 1.4922279792746114, 'predicted_index_per_byte': 1.4922279792746114, 'predicted_index_uncond': 1.4041450777202074, 'correct_choice': 1.766839378238342, 'acc_raw': 0.22797927461139897, 'acc_per_token': 0.2694300518134715, 'acc_per_char': 0.27461139896373055, 'acc_per_byte': 0.27461139896373055, 'acc_uncond': 0.22797927461139897, 'no_answer': 0.0, 'sum_logits_corr': -45.578145066691185, 'logits_per_token_corr': -5.217484640166679, 'logits_per_char_corr': -0.8954418625067843, 'bits_per_byte_corr': 1.2918495344438428, 'correct_prob': 4.054297839818812e-06, 'correct_prob_per_token': 0.012068233551121283, 'correct_prob_per_char': 0.42460571302157346, 'margin': -3.464699131406477e-07, 'margin_per_token': -0.006573046617409238, 'margin_per_char': -0.052474344877773096, 'total_prob': 1.1071118160976512e-05, 'total_prob_per_token': 0.045994132937510375, 'total_prob_per_char': 1.6783768063637987, 'uncond_correct_prob': 2.404137740957811e-07, 'uncond_correct_prob_per_token': 0.006522511201101652, 'uncond_correct_prob_per_char': 0.3747434781023501, 'uncond_total_prob': 1.1489438672250373e-06, 'norm_correct_prob': 0.21645086708386554, 'norm_correct_prob_per_token': 0.260470011033852, 'norm_correct_prob_per_char': 0.25344776017094783, 'primary_metric': 0.22797927461139897}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 12,500 |
default
|
5xC
| 819,200,000 | 48,662,603,366,400,000 |
{'predicted_index_raw': 1.5906735751295338, 'predicted_index_per_token': 1.5699481865284974, 'predicted_index_per_char': 1.5647668393782384, 'predicted_index_per_byte': 1.5647668393782384, 'predicted_index_uncond': 1.7046632124352332, 'correct_choice': 1.766839378238342, 'acc_raw': 0.23834196891191708, 'acc_per_token': 0.2849740932642487, 'acc_per_char': 0.25906735751295334, 'acc_per_byte': 0.25906735751295334, 'acc_uncond': 0.27979274611398963, 'no_answer': 0.0, 'sum_logits_corr': -44.775783313988406, 'logits_per_token_corr': -5.090119934578788, 'logits_per_char_corr': -0.8744540615894495, 'bits_per_byte_corr': 1.2615705381412172, 'correct_prob': 8.559952601175175e-06, 'correct_prob_per_token': 0.013005266241306386, 'correct_prob_per_char': 0.4322435380262482, 'margin': 3.337448349882888e-07, 'margin_per_token': -0.007327696409509572, 'margin_per_char': -0.048763858085539395, 'total_prob': 1.8707946370833606e-05, 'total_prob_per_token': 0.049311126037567266, 'total_prob_per_char': 1.6962087289458156, 'uncond_correct_prob': 3.0658200623934955e-07, 'uncond_correct_prob_per_token': 0.0062560289148115185, 'uncond_correct_prob_per_char': 0.3724068067249732, 'uncond_total_prob': 1.0488132106013789e-06, 'norm_correct_prob': 0.2289454695350613, 'norm_correct_prob_per_token': 0.26882405330266423, 'norm_correct_prob_per_char': 0.25525984179471584, 'primary_metric': 0.23834196891191708}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 13,750 |
small aux 2
|
5xC
| 901,120,000 | 53,528,863,703,040,000 |
{'predicted_index_raw': 1.5906735751295338, 'predicted_index_per_token': 1.5181347150259068, 'predicted_index_per_char': 1.6528497409326426, 'predicted_index_per_byte': 1.6528497409326426, 'predicted_index_uncond': 1.5544041450777202, 'correct_choice': 1.766839378238342, 'acc_raw': 0.22797927461139897, 'acc_per_token': 0.24352331606217617, 'acc_per_char': 0.29015544041450775, 'acc_per_byte': 0.29015544041450775, 'acc_uncond': 0.3160621761658031, 'no_answer': 0.0, 'sum_logits_corr': -44.09893463312653, 'logits_per_token_corr': -5.043503925474347, 'logits_per_char_corr': -0.8647909870449026, 'bits_per_byte_corr': 1.2476296684160493, 'correct_prob': 4.121110153211667e-06, 'correct_prob_per_token': 0.014359172377552281, 'correct_prob_per_char': 0.4373765406845944, 'margin': -1.7405483687894245e-05, 'margin_per_token': -0.00707294145374131, 'margin_per_char': -0.05282548181439076, 'total_prob': 3.556766794518755e-05, 'total_prob_per_token': 0.05318822935404879, 'total_prob_per_char': 1.726079019222178, 'uncond_correct_prob': 3.221808862066564e-07, 'uncond_correct_prob_per_token': 0.006421338790679592, 'uncond_correct_prob_per_char': 0.37239758724050037, 'uncond_total_prob': 1.8370850488520316e-06, 'norm_correct_prob': 0.22814310419457162, 'norm_correct_prob_per_token': 0.2611318513908008, 'norm_correct_prob_per_char': 0.2536216232024791, 'primary_metric': 0.22797927461139897}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 13,750 |
small aux 3
|
5xC
| 901,120,000 | 53,528,863,703,040,000 |
{'predicted_index_raw': 1.5854922279792747, 'predicted_index_per_token': 1.4663212435233162, 'predicted_index_per_char': 1.5699481865284974, 'predicted_index_per_byte': 1.5699481865284974, 'predicted_index_uncond': 1.4041450777202074, 'correct_choice': 1.766839378238342, 'acc_raw': 0.22797927461139897, 'acc_per_token': 0.25906735751295334, 'acc_per_char': 0.27461139896373055, 'acc_per_byte': 0.27461139896373055, 'acc_uncond': 0.23316062176165803, 'no_answer': 0.0, 'sum_logits_corr': -44.67374500462428, 'logits_per_token_corr': -5.102558749442927, 'logits_per_char_corr': -0.8757025965938384, 'bits_per_byte_corr': 1.263371793400427, 'correct_prob': 6.640632943718286e-06, 'correct_prob_per_token': 0.01307825997165814, 'correct_prob_per_char': 0.4322602906128835, 'margin': -2.9104870768882975e-06, 'margin_per_token': -0.007440364725831821, 'margin_per_char': -0.052370402154267526, 'total_prob': 1.8397987311197154e-05, 'total_prob_per_token': 0.05023236468188593, 'total_prob_per_char': 1.708129647603007, 'uncond_correct_prob': 2.784988910587288e-07, 'uncond_correct_prob_per_token': 0.006774809556058113, 'uncond_correct_prob_per_char': 0.3784359794423126, 'uncond_total_prob': 1.3052609171987997e-06, 'norm_correct_prob': 0.21414701164834385, 'norm_correct_prob_per_token': 0.26089234791095933, 'norm_correct_prob_per_char': 0.25355038882467645, 'primary_metric': 0.22797927461139897}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 13,750 |
default
|
5xC
| 901,120,000 | 53,528,863,703,040,000 |
{'predicted_index_raw': 1.6062176165803108, 'predicted_index_per_token': 1.5854922279792747, 'predicted_index_per_char': 1.6062176165803108, 'predicted_index_per_byte': 1.6062176165803108, 'predicted_index_uncond': 1.6010362694300517, 'correct_choice': 1.766839378238342, 'acc_raw': 0.24352331606217617, 'acc_per_token': 0.2694300518134715, 'acc_per_char': 0.2538860103626943, 'acc_per_byte': 0.2538860103626943, 'acc_uncond': 0.29015544041450775, 'no_answer': 0.0, 'sum_logits_corr': -44.57189053451459, 'logits_per_token_corr': -5.101174578281267, 'logits_per_char_corr': -0.8742350903872044, 'bits_per_byte_corr': 1.2612546294736404, 'correct_prob': 3.749181964177854e-06, 'correct_prob_per_token': 0.013385167287433187, 'correct_prob_per_char': 0.4322523497702621, 'margin': -3.83359391450674e-06, 'margin_per_token': -0.007344069910308628, 'margin_per_char': -0.04987742197974012, 'total_prob': 1.4177006499993608e-05, 'total_prob_per_token': 0.05094806598005999, 'total_prob_per_char': 1.7028256087566043, 'uncond_correct_prob': 3.339868477140503e-07, 'uncond_correct_prob_per_token': 0.006451697949864335, 'uncond_correct_prob_per_char': 0.3740657993690047, 'uncond_total_prob': 1.5860645570678133e-06, 'norm_correct_prob': 0.2377185764619079, 'norm_correct_prob_per_token': 0.26243632097667235, 'norm_correct_prob_per_char': 0.25442915627067575, 'primary_metric': 0.24352331606217617}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 15,000 |
small aux 2
|
5xC
| 983,040,000 | 58,395,124,039,680,000 |
{'predicted_index_raw': 1.549222797927461, 'predicted_index_per_token': 1.5647668393782384, 'predicted_index_per_char': 1.6580310880829014, 'predicted_index_per_byte': 1.6580310880829014, 'predicted_index_uncond': 1.538860103626943, 'correct_choice': 1.766839378238342, 'acc_raw': 0.23316062176165803, 'acc_per_token': 0.2694300518134715, 'acc_per_char': 0.2849740932642487, 'acc_per_byte': 0.2849740932642487, 'acc_uncond': 0.31088082901554404, 'no_answer': 0.0, 'sum_logits_corr': -43.944088491133456, 'logits_per_token_corr': -5.029559969796643, 'logits_per_char_corr': -0.8645718569298393, 'bits_per_byte_corr': 1.2473135304857377, 'correct_prob': 4.746373726725203e-06, 'correct_prob_per_token': 0.014568984046175654, 'correct_prob_per_char': 0.4386133610651618, 'margin': -1.211550506378537e-05, 'margin_per_token': -0.006913774595567022, 'margin_per_char': -0.05134727415986409, 'total_prob': 3.215712452913072e-05, 'total_prob_per_token': 0.05360931349238411, 'total_prob_per_char': 1.7278258991872364, 'uncond_correct_prob': 3.732809753935093e-07, 'uncond_correct_prob_per_token': 0.006333950653734794, 'uncond_correct_prob_per_char': 0.3720145739811925, 'uncond_total_prob': 1.2279684643588024e-06, 'norm_correct_prob': 0.22705746434835056, 'norm_correct_prob_per_token': 0.26456050361638594, 'norm_correct_prob_per_char': 0.2539213815540167, 'primary_metric': 0.23316062176165803}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 15,000 |
small aux 3
|
5xC
| 983,040,000 | 58,395,124,039,680,000 |
{'predicted_index_raw': 1.6062176165803108, 'predicted_index_per_token': 1.4974093264248705, 'predicted_index_per_char': 1.6010362694300517, 'predicted_index_per_byte': 1.6010362694300517, 'predicted_index_uncond': 1.4352331606217616, 'correct_choice': 1.766839378238342, 'acc_raw': 0.22279792746113988, 'acc_per_token': 0.25906735751295334, 'acc_per_char': 0.27979274611398963, 'acc_per_byte': 0.27979274611398963, 'acc_uncond': 0.24870466321243523, 'no_answer': 0.0, 'sum_logits_corr': -44.48412163146419, 'logits_per_token_corr': -5.094381446626864, 'logits_per_char_corr': -0.8733542905705085, 'bits_per_byte_corr': 1.2599839039460765, 'correct_prob': 7.221023075526232e-06, 'correct_prob_per_token': 0.013327335817207131, 'correct_prob_per_char': 0.4332888062960145, 'margin': -4.100330820030678e-07, 'margin_per_token': -0.007375440958301759, 'margin_per_char': -0.05266336977616793, 'total_prob': 1.7726196460618596e-05, 'total_prob_per_token': 0.05112444940694078, 'total_prob_per_char': 1.7139818989742506, 'uncond_correct_prob': 2.896428717662967e-07, 'uncond_correct_prob_per_token': 0.006850520914656768, 'uncond_correct_prob_per_char': 0.3782516842924639, 'uncond_total_prob': 1.5031570506978115e-06, 'norm_correct_prob': 0.21635223998715244, 'norm_correct_prob_per_token': 0.260019351351767, 'norm_correct_prob_per_char': 0.2533017591130451, 'primary_metric': 0.22279792746113988}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 15,000 |
default
|
5xC
| 983,040,000 | 58,395,124,039,680,000 |
{'predicted_index_raw': 1.5751295336787565, 'predicted_index_per_token': 1.5595854922279793, 'predicted_index_per_char': 1.61139896373057, 'predicted_index_per_byte': 1.61139896373057, 'predicted_index_uncond': 1.6373056994818653, 'correct_choice': 1.766839378238342, 'acc_raw': 0.24352331606217617, 'acc_per_token': 0.27461139896373055, 'acc_per_char': 0.2538860103626943, 'acc_per_byte': 0.2538860103626943, 'acc_uncond': 0.2849740932642487, 'no_answer': 0.0, 'sum_logits_corr': -44.47246025263337, 'logits_per_token_corr': -5.082208429469514, 'logits_per_char_corr': -0.8717760763450924, 'bits_per_byte_corr': 1.2577070221096067, 'correct_prob': 4.149318670802984e-06, 'correct_prob_per_token': 0.013493743913588159, 'correct_prob_per_char': 0.43349630172398446, 'margin': -3.1925467507084655e-06, 'margin_per_token': -0.007428517615674964, 'margin_per_char': -0.049366281309708736, 'total_prob': 1.3870136976089399e-05, 'total_prob_per_token': 0.05116024612810566, 'total_prob_per_char': 1.7051025689942616, 'uncond_correct_prob': 2.533316653716964e-07, 'uncond_correct_prob_per_token': 0.0064665084928509945, 'uncond_correct_prob_per_char': 0.37414238641786896, 'uncond_total_prob': 1.7290101401043998e-06, 'norm_correct_prob': 0.23905497919144647, 'norm_correct_prob_per_token': 0.2637127900092652, 'norm_correct_prob_per_char': 0.2547158105437912, 'primary_metric': 0.24352331606217617}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 15,117 |
small aux 2
|
5xC
| 990,707,712 | 58,850,606,007,189,500 |
{'predicted_index_raw': 1.5647668393782384, 'predicted_index_per_token': 1.549222797927461, 'predicted_index_per_char': 1.6424870466321244, 'predicted_index_per_byte': 1.6424870466321244, 'predicted_index_uncond': 1.5233160621761659, 'correct_choice': 1.766839378238342, 'acc_raw': 0.24870466321243523, 'acc_per_token': 0.26424870466321243, 'acc_per_char': 0.2849740932642487, 'acc_per_byte': 0.2849740932642487, 'acc_uncond': 0.3160621761658031, 'no_answer': 0.0, 'sum_logits_corr': -44.0799823227324, 'logits_per_token_corr': -5.045822842457324, 'logits_per_char_corr': -0.8674828387899006, 'bits_per_byte_corr': 1.251513189579369, 'correct_prob': 4.7211792716516695e-06, 'correct_prob_per_token': 0.014329405016040353, 'correct_prob_per_char': 0.4374808161666555, 'margin': -1.2605249736027978e-05, 'margin_per_token': -0.006795834155156018, 'margin_per_char': -0.05153870311547928, 'total_prob': 3.15677291715946e-05, 'total_prob_per_token': 0.0527005089999503, 'total_prob_per_char': 1.723413796108303, 'uncond_correct_prob': 3.339747140811121e-07, 'uncond_correct_prob_per_token': 0.006329489351192725, 'uncond_correct_prob_per_char': 0.3722278787510198, 'uncond_total_prob': 1.1943235683464431e-06, 'norm_correct_prob': 0.2301646173500623, 'norm_correct_prob_per_token': 0.26457654154560784, 'norm_correct_prob_per_char': 0.25388830853243943, 'primary_metric': 0.24870466321243523}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 15,117 |
small aux 3
|
5xC
| 990,707,712 | 58,850,606,007,189,500 |
{'predicted_index_raw': 1.616580310880829, 'predicted_index_per_token': 1.5129533678756477, 'predicted_index_per_char': 1.6010362694300517, 'predicted_index_per_byte': 1.6010362694300517, 'predicted_index_uncond': 1.4455958549222798, 'correct_choice': 1.766839378238342, 'acc_raw': 0.22279792746113988, 'acc_per_token': 0.2694300518134715, 'acc_per_char': 0.2849740932642487, 'acc_per_byte': 0.2849740932642487, 'acc_uncond': 0.24352331606217617, 'no_answer': 0.0, 'sum_logits_corr': -44.362562636637314, 'logits_per_token_corr': -5.072088174987007, 'logits_per_char_corr': -0.8694656824753383, 'bits_per_byte_corr': 1.2543738283312098, 'correct_prob': 8.042002244908524e-06, 'correct_prob_per_token': 0.01342898201498358, 'correct_prob_per_char': 0.4346139228096714, 'margin': -4.174982440572434e-07, 'margin_per_token': -0.007434110627398679, 'margin_per_char': -0.05282851402358553, 'total_prob': 1.9423461132177934e-05, 'total_prob_per_token': 0.05146659340820681, 'total_prob_per_char': 1.718106300374346, 'uncond_correct_prob': 2.8411030856901707e-07, 'uncond_correct_prob_per_token': 0.0067977935977602, 'uncond_correct_prob_per_char': 0.3776166792734187, 'uncond_total_prob': 1.4316256947439596e-06, 'norm_correct_prob': 0.21584823466545777, 'norm_correct_prob_per_token': 0.261677913106305, 'norm_correct_prob_per_char': 0.25353551225271037, 'primary_metric': 0.22279792746113988}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_government_and_politics
| 15,117 |
default
|
5xC
| 990,707,712 | 58,850,606,007,189,500 |
{'predicted_index_raw': 1.5699481865284974, 'predicted_index_per_token': 1.5336787564766838, 'predicted_index_per_char': 1.5854922279792747, 'predicted_index_per_byte': 1.5854922279792747, 'predicted_index_uncond': 1.621761658031088, 'correct_choice': 1.766839378238342, 'acc_raw': 0.24352331606217617, 'acc_per_token': 0.2694300518134715, 'acc_per_char': 0.2538860103626943, 'acc_per_byte': 0.2538860103626943, 'acc_uncond': 0.29533678756476683, 'no_answer': 0.0, 'sum_logits_corr': -44.50681402769731, 'logits_per_token_corr': -5.084048780052489, 'logits_per_char_corr': -0.8722721318003059, 'bits_per_byte_corr': 1.2584226788548494, 'correct_prob': 4.0413985549811585e-06, 'correct_prob_per_token': 0.013422677081844496, 'correct_prob_per_char': 0.4332849693264811, 'margin': -3.2235445549582884e-06, 'margin_per_token': -0.007471975953829293, 'margin_per_char': -0.049630969799128816, 'total_prob': 1.36799734415913e-05, 'total_prob_per_token': 0.05108005088235238, 'total_prob_per_char': 1.7051175992069814, 'uncond_correct_prob': 2.492362698541753e-07, 'uncond_correct_prob_per_token': 0.006443507469602167, 'uncond_correct_prob_per_char': 0.3737527360579285, 'uncond_total_prob': 1.552443623387442e-06, 'norm_correct_prob': 0.23956054004299332, 'norm_correct_prob_per_token': 0.2634692778349037, 'norm_correct_prob_per_char': 0.254552720717932, 'primary_metric': 0.24352331606217617}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 0 |
small aux 2
|
5xC
| 0 | 0 |
{'predicted_index_raw': 1.4820512820512821, 'predicted_index_per_token': 1.587179487179487, 'predicted_index_per_char': 1.6512820512820512, 'predicted_index_per_byte': 1.6487179487179486, 'predicted_index_uncond': 1.523076923076923, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.2230769230769231, 'acc_per_token': 0.23076923076923078, 'acc_per_char': 0.2358974358974359, 'acc_per_byte': 0.2358974358974359, 'acc_uncond': 0.23846153846153847, 'no_answer': 0.0, 'sum_logits_corr': -92.96719007736597, 'logits_per_token_corr': -10.88412297538933, 'logits_per_char_corr': -2.2805770276209416, 'bits_per_byte_corr': 3.2523410779659785, 'correct_prob': 1.0942089111167587e-07, 'correct_prob_per_token': 1.8824930885168107e-05, 'correct_prob_per_char': 0.12851722649719444, 'margin': -2.2399533194182353e-07, 'margin_per_token': -1.1744927932252703e-06, 'margin_per_char': -0.02610514731501689, 'total_prob': 5.831652441883558e-07, 'total_prob_per_token': 7.543294430319752e-05, 'total_prob_per_char': 0.5162867033802377, 'uncond_correct_prob': 8.769820561318052e-08, 'uncond_correct_prob_per_token': 1.8291533779370745e-05, 'uncond_correct_prob_per_char': 0.12781502979739573, 'uncond_total_prob': 5.295910846939229e-07, 'norm_correct_prob': 0.22953715389283688, 'norm_correct_prob_per_token': 0.24968545610797555, 'norm_correct_prob_per_char': 0.24571096593221914, 'primary_metric': 0.2230769230769231}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 0 |
small aux 3
|
5xC
| 0 | 0 |
{'predicted_index_raw': 1.4846153846153847, 'predicted_index_per_token': 1.5025641025641026, 'predicted_index_per_char': 1.623076923076923, 'predicted_index_per_byte': 1.6128205128205129, 'predicted_index_uncond': 1.6, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.23846153846153847, 'acc_per_token': 0.25384615384615383, 'acc_per_char': 0.2641025641025641, 'acc_per_byte': 0.2641025641025641, 'acc_uncond': 0.27692307692307694, 'no_answer': 0.0, 'sum_logits_corr': -92.40438931538509, 'logits_per_token_corr': -10.810214679618275, 'logits_per_char_corr': -2.266801644907027, 'bits_per_byte_corr': 3.232368441744704, 'correct_prob': 7.76350979539318e-08, 'correct_prob_per_token': 2.0288852418971868e-05, 'correct_prob_per_char': 0.13028060971082164, 'margin': -1.7565483248541364e-07, 'margin_per_token': -1.14787792040439e-06, 'margin_per_char': -0.026118787282708618, 'total_prob': 4.690681514250132e-07, 'total_prob_per_token': 8.10047903797439e-05, 'total_prob_per_char': 0.5230218509540343, 'uncond_correct_prob': 1.0529550265117966e-07, 'uncond_correct_prob_per_token': 1.938786661291596e-05, 'uncond_correct_prob_per_char': 0.12908365496403787, 'uncond_total_prob': 6.383145516372811e-07, 'norm_correct_prob': 0.22883521273274185, 'norm_correct_prob_per_token': 0.250488781256816, 'norm_correct_prob_per_char': 0.2457197994341327, 'primary_metric': 0.23846153846153847}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 0 |
default
|
5xC
| 0 | 0 |
{'predicted_index_raw': 1.441025641025641, 'predicted_index_per_token': 1.4487179487179487, 'predicted_index_per_char': 1.5897435897435896, 'predicted_index_per_byte': 1.582051282051282, 'predicted_index_uncond': 1.5692307692307692, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.21794871794871795, 'acc_per_token': 0.19487179487179487, 'acc_per_char': 0.23846153846153847, 'acc_per_byte': 0.23846153846153847, 'acc_uncond': 0.2282051282051282, 'no_answer': 0.0, 'sum_logits_corr': -92.77870278969789, 'logits_per_token_corr': -10.855419064437536, 'logits_per_char_corr': -2.2742391557017907, 'bits_per_byte_corr': 3.2435622224211214, 'correct_prob': 8.88291035527815e-08, 'correct_prob_per_token': 1.9375486853556642e-05, 'correct_prob_per_char': 0.1290730739458668, 'margin': -2.1193677719372325e-07, 'margin_per_token': -1.2653991270291675e-06, 'margin_per_char': -0.026156817315533272, 'total_prob': 5.223579176015366e-07, 'total_prob_per_token': 7.778844628244541e-05, 'total_prob_per_char': 0.5185991760904043, 'uncond_correct_prob': 9.700152145205823e-08, 'uncond_correct_prob_per_token': 1.9763403497245463e-05, 'uncond_correct_prob_per_char': 0.12947629687652423, 'uncond_total_prob': 5.120331699682994e-07, 'norm_correct_prob': 0.22876861862694184, 'norm_correct_prob_per_token': 0.24907065606867335, 'norm_correct_prob_per_char': 0.24545988339015773, 'primary_metric': 0.21794871794871795}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 2,500 |
small aux 2
|
5xC
| 163,840,000 | 9,732,520,673,280,000 |
{'predicted_index_raw': 1.382051282051282, 'predicted_index_per_token': 1.5205128205128204, 'predicted_index_per_char': 1.541025641025641, 'predicted_index_per_byte': 1.541025641025641, 'predicted_index_uncond': 1.535897435897436, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.2205128205128205, 'acc_per_token': 0.28717948717948716, 'acc_per_char': 0.24358974358974358, 'acc_per_byte': 0.24358974358974358, 'acc_uncond': 0.258974358974359, 'no_answer': 0.0, 'sum_logits_corr': -44.09476955853976, 'logits_per_token_corr': -5.494862939725502, 'logits_per_char_corr': -1.1400906007454388, 'bits_per_byte_corr': 1.6260620537628199, 'correct_prob': 1.2010748848505102e-06, 'correct_prob_per_token': 0.008155232685604313, 'correct_prob_per_char': 0.3534289908748375, 'margin': -1.2876909416826568e-05, 'margin_per_token': -0.004240067213369022, 'margin_per_char': -0.050247695743980594, 'total_prob': 1.671258711359357e-05, 'total_prob_per_token': 0.03312048153017271, 'total_prob_per_char': 1.4203337732259074, 'uncond_correct_prob': 2.455909850559956e-07, 'uncond_correct_prob_per_token': 0.004663990694927716, 'uncond_correct_prob_per_char': 0.3174391770654952, 'uncond_total_prob': 1.6015213992155242e-06, 'norm_correct_prob': 0.21684254184899687, 'norm_correct_prob_per_token': 0.2593815674483825, 'norm_correct_prob_per_char': 0.24972713450478068, 'primary_metric': 0.2205128205128205}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 2,500 |
small aux 3
|
5xC
| 163,840,000 | 9,732,520,673,280,000 |
{'predicted_index_raw': 1.3717948717948718, 'predicted_index_per_token': 1.4923076923076923, 'predicted_index_per_char': 1.4538461538461538, 'predicted_index_per_byte': 1.4487179487179487, 'predicted_index_uncond': 1.5743589743589743, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.22564102564102564, 'acc_per_token': 0.28974358974358977, 'acc_per_char': 0.2641025641025641, 'acc_per_byte': 0.26153846153846155, 'acc_uncond': 0.2846153846153846, 'no_answer': 0.0, 'sum_logits_corr': -43.711224932548326, 'logits_per_token_corr': -5.433045701276156, 'logits_per_char_corr': -1.12848544067089, 'bits_per_byte_corr': 1.6071686942653225, 'correct_prob': 2.403218063247683e-06, 'correct_prob_per_token': 0.008807769230874339, 'correct_prob_per_char': 0.357728366266307, 'margin': -1.0786002723965194e-05, 'margin_per_token': -0.005411059426063764, 'margin_per_char': -0.05256282646974495, 'total_prob': 1.8765790219226115e-05, 'total_prob_per_token': 0.036847026307453554, 'total_prob_per_char': 1.4415689027736434, 'uncond_correct_prob': 1.1182444284288692e-07, 'uncond_correct_prob_per_token': 0.003808750070131991, 'uncond_correct_prob_per_char': 0.3037047604976892, 'uncond_total_prob': 6.9071337298316e-07, 'norm_correct_prob': 0.22425496741939754, 'norm_correct_prob_per_token': 0.2583864860402765, 'norm_correct_prob_per_char': 0.2487800692521497, 'primary_metric': 0.22564102564102564}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 2,500 |
default
|
5xC
| 163,840,000 | 9,732,520,673,280,000 |
{'predicted_index_raw': 1.464102564102564, 'predicted_index_per_token': 1.6025641025641026, 'predicted_index_per_char': 1.5102564102564102, 'predicted_index_per_byte': 1.5102564102564102, 'predicted_index_uncond': 1.5846153846153845, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.22564102564102564, 'acc_per_token': 0.26666666666666666, 'acc_per_char': 0.2564102564102564, 'acc_per_byte': 0.2564102564102564, 'acc_uncond': 0.2641025641025641, 'no_answer': 0.0, 'sum_logits_corr': -44.88582579539372, 'logits_per_token_corr': -5.635768654963809, 'logits_per_char_corr': -1.1712977077477909, 'bits_per_byte_corr': 1.6696069006608334, 'correct_prob': 1.7811802572001067e-06, 'correct_prob_per_token': 0.0073897102072299535, 'correct_prob_per_char': 0.3453511901038496, 'margin': -4.991590562156445e-07, 'margin_per_token': -0.004233784316723351, 'margin_per_char': -0.052386136698397916, 'total_prob': 4.950554329555905e-06, 'total_prob_per_token': 0.03042158136993689, 'total_prob_per_char': 1.3916781213278653, 'uncond_correct_prob': 3.5270036568128065e-07, 'uncond_correct_prob_per_token': 0.0045600569349954385, 'uncond_correct_prob_per_char': 0.3156923438258951, 'uncond_total_prob': 1.2596692646188447e-06, 'norm_correct_prob': 0.21623843762307368, 'norm_correct_prob_per_token': 0.2562829277628221, 'norm_correct_prob_per_char': 0.24858637440507972, 'primary_metric': 0.22564102564102564}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 3,750 |
small aux 2
|
5xC
| 245,760,000 | 14,598,781,009,920,000 |
{'predicted_index_raw': 1.3846153846153846, 'predicted_index_per_token': 1.5435897435897437, 'predicted_index_per_char': 1.528205128205128, 'predicted_index_per_byte': 1.528205128205128, 'predicted_index_uncond': 1.5461538461538462, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.20256410256410257, 'acc_per_token': 0.2692307692307692, 'acc_per_char': 0.2205128205128205, 'acc_per_byte': 0.2205128205128205, 'acc_uncond': 0.27692307692307694, 'no_answer': 0.0, 'sum_logits_corr': -42.660849456298045, 'logits_per_token_corr': -5.358882291910425, 'logits_per_char_corr': -1.112466717783726, 'bits_per_byte_corr': 1.5864240283248918, 'correct_prob': 3.5257014377828516e-06, 'correct_prob_per_token': 0.00982449288028906, 'correct_prob_per_char': 0.36414117786660927, 'margin': -1.7465208946585565e-05, 'margin_per_token': -0.004847498586800009, 'margin_per_char': -0.05223464296586587, 'total_prob': 2.6220474095839958e-05, 'total_prob_per_token': 0.039952027625420904, 'total_prob_per_char': 1.4656586101221756, 'uncond_correct_prob': 3.664495620466988e-07, 'uncond_correct_prob_per_token': 0.004752673651243206, 'uncond_correct_prob_per_char': 0.3158379554385263, 'uncond_total_prob': 9.853197379501707e-07, 'norm_correct_prob': 0.20871753462329587, 'norm_correct_prob_per_token': 0.25488466900543316, 'norm_correct_prob_per_char': 0.2480683036440288, 'primary_metric': 0.20256410256410257}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 3,750 |
small aux 3
|
5xC
| 245,760,000 | 14,598,781,009,920,000 |
{'predicted_index_raw': 1.464102564102564, 'predicted_index_per_token': 1.5846153846153845, 'predicted_index_per_char': 1.523076923076923, 'predicted_index_per_byte': 1.5205128205128204, 'predicted_index_uncond': 1.4615384615384615, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.23076923076923078, 'acc_per_token': 0.2794871794871795, 'acc_per_char': 0.2717948717948718, 'acc_per_byte': 0.2717948717948718, 'acc_uncond': 0.24871794871794872, 'no_answer': 0.0, 'sum_logits_corr': -44.67419199087681, 'logits_per_token_corr': -5.607067351848634, 'logits_per_char_corr': -1.165486320387057, 'bits_per_byte_corr': 1.6610659810109711, 'correct_prob': 5.824025439335644e-07, 'correct_prob_per_token': 0.007556900893037109, 'correct_prob_per_char': 0.3472085533600843, 'margin': -4.646193023919805e-06, 'margin_per_token': -0.003666253902699694, 'margin_per_char': -0.04895924097995626, 'total_prob': 6.430648753146261e-06, 'total_prob_per_token': 0.030025832104410104, 'total_prob_per_char': 1.3933051817264466, 'uncond_correct_prob': 2.191270713045703e-07, 'uncond_correct_prob_per_token': 0.004752780358332473, 'uncond_correct_prob_per_char': 0.31929106908565175, 'uncond_total_prob': 1.6837594026063741e-06, 'norm_correct_prob': 0.22588717782298134, 'norm_correct_prob_per_token': 0.261349646265243, 'norm_correct_prob_per_char': 0.2499274450664923, 'primary_metric': 0.23076923076923078}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 3,750 |
default
|
5xC
| 245,760,000 | 14,598,781,009,920,000 |
{'predicted_index_raw': 1.376923076923077, 'predicted_index_per_token': 1.5307692307692307, 'predicted_index_per_char': 1.5128205128205128, 'predicted_index_per_byte': 1.5102564102564102, 'predicted_index_uncond': 1.4897435897435898, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.2230769230769231, 'acc_per_token': 0.28205128205128205, 'acc_per_char': 0.24358974358974358, 'acc_per_byte': 0.24358974358974358, 'acc_uncond': 0.2564102564102564, 'no_answer': 0.0, 'sum_logits_corr': -43.79290915758182, 'logits_per_token_corr': -5.520604189884078, 'logits_per_char_corr': -1.147218670950053, 'bits_per_byte_corr': 1.636035227721954, 'correct_prob': 1.7313319098435335e-06, 'correct_prob_per_token': 0.008439963566266466, 'correct_prob_per_char': 0.35308710581217584, 'margin': -4.67730738638411e-06, 'margin_per_token': -0.003475554113618146, 'margin_per_char': -0.046911722031639014, 'total_prob': 9.874411707909063e-06, 'total_prob_per_token': 0.032516732060865736, 'total_prob_per_char': 1.4169223334788128, 'uncond_correct_prob': 5.008238678234414e-08, 'uncond_correct_prob_per_token': 0.004535300423931782, 'uncond_correct_prob_per_char': 0.3139590043915427, 'uncond_total_prob': 3.929006540859729e-07, 'norm_correct_prob': 0.21681373976876928, 'norm_correct_prob_per_token': 0.2614006593367399, 'norm_correct_prob_per_char': 0.24910765335194565, 'primary_metric': 0.2230769230769231}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 5,000 |
small aux 2
|
5xC
| 327,680,000 | 19,465,041,346,560,000 |
{'predicted_index_raw': 1.435897435897436, 'predicted_index_per_token': 1.594871794871795, 'predicted_index_per_char': 1.5461538461538462, 'predicted_index_per_byte': 1.5435897435897437, 'predicted_index_uncond': 1.5076923076923077, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.2076923076923077, 'acc_per_token': 0.26666666666666666, 'acc_per_char': 0.2282051282051282, 'acc_per_byte': 0.2282051282051282, 'acc_uncond': 0.23846153846153847, 'no_answer': 0.0, 'sum_logits_corr': -43.09286273320516, 'logits_per_token_corr': -5.371652805006425, 'logits_per_char_corr': -1.110594022794516, 'bits_per_byte_corr': 1.5843613697391523, 'correct_prob': 7.229175177589115e-06, 'correct_prob_per_token': 0.009306321363703567, 'correct_prob_per_char': 0.3613201852654562, 'margin': -5.521808050592712e-06, 'margin_per_token': -0.004436442764786365, 'margin_per_char': -0.05023860461154284, 'total_prob': 2.120159663095669e-05, 'total_prob_per_token': 0.03738194968794124, 'total_prob_per_char': 1.4494022053595923, 'uncond_correct_prob': 1.963671890573352e-07, 'uncond_correct_prob_per_token': 0.00497965368405746, 'uncond_correct_prob_per_char': 0.31984228398838, 'uncond_total_prob': 1.0404969327450048e-06, 'norm_correct_prob': 0.21109271203690158, 'norm_correct_prob_per_token': 0.25911074506391185, 'norm_correct_prob_per_char': 0.24966823003206615, 'primary_metric': 0.2076923076923077}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 5,000 |
small aux 3
|
5xC
| 327,680,000 | 19,465,041,346,560,000 |
{'predicted_index_raw': 1.4102564102564104, 'predicted_index_per_token': 1.5564102564102564, 'predicted_index_per_char': 1.4897435897435898, 'predicted_index_per_byte': 1.4897435897435898, 'predicted_index_uncond': 1.523076923076923, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.2128205128205128, 'acc_per_token': 0.2846153846153846, 'acc_per_char': 0.23333333333333334, 'acc_per_byte': 0.23333333333333334, 'acc_uncond': 0.24358974358974358, 'no_answer': 0.0, 'sum_logits_corr': -43.8388699776087, 'logits_per_token_corr': -5.501892928035789, 'logits_per_char_corr': -1.1387208398665511, 'bits_per_byte_corr': 1.623077400187582, 'correct_prob': 1.1405155041433517e-06, 'correct_prob_per_token': 0.008603257900687106, 'correct_prob_per_char': 0.3541911191774834, 'margin': -3.5086268237380636e-06, 'margin_per_token': -0.004577528941827905, 'margin_per_char': -0.04998297698318006, 'total_prob': 7.746068901621125e-06, 'total_prob_per_token': 0.034841973777115565, 'total_prob_per_char': 1.4219463201857496, 'uncond_correct_prob': 2.820003146694344e-07, 'uncond_correct_prob_per_token': 0.005392957328774633, 'uncond_correct_prob_per_char': 0.32546335411610905, 'uncond_total_prob': 1.2016420406718753e-06, 'norm_correct_prob': 0.22578300069182794, 'norm_correct_prob_per_token': 0.2603310111805648, 'norm_correct_prob_per_char': 0.25013570644502736, 'primary_metric': 0.2128205128205128}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 5,000 |
default
|
5xC
| 327,680,000 | 19,465,041,346,560,000 |
{'predicted_index_raw': 1.3717948717948718, 'predicted_index_per_token': 1.5307692307692307, 'predicted_index_per_char': 1.528205128205128, 'predicted_index_per_byte': 1.5205128205128204, 'predicted_index_uncond': 1.4692307692307693, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.2205128205128205, 'acc_per_token': 0.26153846153846155, 'acc_per_char': 0.2564102564102564, 'acc_per_byte': 0.2564102564102564, 'acc_uncond': 0.258974358974359, 'no_answer': 0.0, 'sum_logits_corr': -42.90310163253393, 'logits_per_token_corr': -5.375222761234779, 'logits_per_char_corr': -1.1191813277381342, 'bits_per_byte_corr': 1.5950951222228118, 'correct_prob': 1.545554327270706e-06, 'correct_prob_per_token': 0.009185301469305294, 'correct_prob_per_char': 0.36151673213733165, 'margin': -7.544407771759308e-06, 'margin_per_token': -0.0049224249530080855, 'margin_per_char': -0.04927285910272458, 'total_prob': 1.340556977504928e-05, 'total_prob_per_token': 0.03696108748700084, 'total_prob_per_char': 1.450128248908867, 'uncond_correct_prob': 1.663517640214035e-07, 'uncond_correct_prob_per_token': 0.004662068036691068, 'uncond_correct_prob_per_char': 0.31684868149891127, 'uncond_total_prob': 7.086656431483584e-07, 'norm_correct_prob': 0.2281964704133354, 'norm_correct_prob_per_token': 0.2601827075921166, 'norm_correct_prob_per_char': 0.2492651277135518, 'primary_metric': 0.2205128205128205}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 7,500 |
small aux 2
|
5xC
| 491,520,000 | 29,197,562,019,840,000 |
{'predicted_index_raw': 1.3948717948717948, 'predicted_index_per_token': 1.541025641025641, 'predicted_index_per_char': 1.458974358974359, 'predicted_index_per_byte': 1.458974358974359, 'predicted_index_uncond': 1.5128205128205128, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.2230769230769231, 'acc_per_token': 0.2948717948717949, 'acc_per_char': 0.25384615384615383, 'acc_per_byte': 0.25384615384615383, 'acc_uncond': 0.26666666666666666, 'no_answer': 0.0, 'sum_logits_corr': -41.59049229010557, 'logits_per_token_corr': -5.169162061530515, 'logits_per_char_corr': -1.0700700158413232, 'bits_per_byte_corr': 1.5252089987318131, 'correct_prob': 6.895770876468664e-06, 'correct_prob_per_token': 0.011285757519524462, 'correct_prob_per_char': 0.37508345831768736, 'margin': -2.7323297378539517e-05, 'margin_per_token': -0.00598750519474552, 'margin_per_char': -0.05138180155661557, 'total_prob': 4.340612896916639e-05, 'total_prob_per_token': 0.0454870331467737, 'total_prob_per_char': 1.502987304885723, 'uncond_correct_prob': 3.3891973805169487e-07, 'uncond_correct_prob_per_token': 0.005117583585322146, 'uncond_correct_prob_per_char': 0.32425505144293243, 'uncond_total_prob': 1.586695183244851e-06, 'norm_correct_prob': 0.22184905327192564, 'norm_correct_prob_per_token': 0.26115053345139666, 'norm_correct_prob_per_char': 0.2499231147328608, 'primary_metric': 0.2230769230769231}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 7,500 |
small aux 3
|
5xC
| 491,520,000 | 29,197,562,019,840,000 |
{'predicted_index_raw': 1.4205128205128206, 'predicted_index_per_token': 1.5769230769230769, 'predicted_index_per_char': 1.564102564102564, 'predicted_index_per_byte': 1.564102564102564, 'predicted_index_uncond': 1.5025641025641026, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.23333333333333334, 'acc_per_token': 0.27692307692307694, 'acc_per_char': 0.27692307692307694, 'acc_per_byte': 0.27692307692307694, 'acc_uncond': 0.2923076923076923, 'no_answer': 0.0, 'sum_logits_corr': -43.22788760600946, 'logits_per_token_corr': -5.438418279903599, 'logits_per_char_corr': -1.1268866809005988, 'bits_per_byte_corr': 1.607371301787815, 'correct_prob': 3.6976560667822708e-06, 'correct_prob_per_token': 0.009102902760784234, 'correct_prob_per_char': 0.3582260662759818, 'margin': -2.8668525931501954e-06, 'margin_per_token': -0.004233898364863442, 'margin_per_char': -0.049193387414293, 'total_prob': 1.222717990610126e-05, 'total_prob_per_token': 0.03599743290662339, 'total_prob_per_char': 1.436824342402911, 'uncond_correct_prob': 2.7072942598144386e-07, 'uncond_correct_prob_per_token': 0.00543575533188932, 'uncond_correct_prob_per_char': 0.3261283595506798, 'uncond_total_prob': 2.1112206662713483e-06, 'norm_correct_prob': 0.23323643893506837, 'norm_correct_prob_per_token': 0.2631239074373466, 'norm_correct_prob_per_char': 0.2499160070921472, 'primary_metric': 0.23333333333333334}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 7,500 |
default
|
5xC
| 491,520,000 | 29,197,562,019,840,000 |
{'predicted_index_raw': 1.435897435897436, 'predicted_index_per_token': 1.5076923076923077, 'predicted_index_per_char': 1.5256410256410255, 'predicted_index_per_byte': 1.5179487179487179, 'predicted_index_uncond': 1.5435897435897437, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.2358974358974359, 'acc_per_token': 0.25384615384615383, 'acc_per_char': 0.2358974358974359, 'acc_per_byte': 0.23333333333333334, 'acc_uncond': 0.2358974358974359, 'no_answer': 0.0, 'sum_logits_corr': -42.5422049400134, 'logits_per_token_corr': -5.341519521296668, 'logits_per_char_corr': -1.1110132205255585, 'bits_per_byte_corr': 1.5840991353901046, 'correct_prob': 4.089868377077834e-06, 'correct_prob_per_token': 0.009908679328157385, 'correct_prob_per_char': 0.36439850199323837, 'margin': -4.9833441196992704e-06, 'margin_per_token': -0.005134586779055128, 'margin_per_char': -0.05106179927589827, 'total_prob': 1.8610823229258257e-05, 'total_prob_per_token': 0.03980402151089232, 'total_prob_per_char': 1.4647263482073594, 'uncond_correct_prob': 1.623952013848411e-07, 'uncond_correct_prob_per_token': 0.005532307593383294, 'uncond_correct_prob_per_char': 0.3281407586590605, 'uncond_total_prob': 8.703907065538205e-07, 'norm_correct_prob': 0.2291330850532599, 'norm_correct_prob_per_token': 0.2565798119093235, 'norm_correct_prob_per_char': 0.2488289118463414, 'primary_metric': 0.2358974358974359}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 8,750 |
small aux 2
|
5xC
| 573,440,000 | 34,063,822,356,480,000 |
{'predicted_index_raw': 1.3794871794871795, 'predicted_index_per_token': 1.558974358974359, 'predicted_index_per_char': 1.5128205128205128, 'predicted_index_per_byte': 1.5128205128205128, 'predicted_index_uncond': 1.5615384615384615, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.21794871794871795, 'acc_per_token': 0.2846153846153846, 'acc_per_char': 0.2512820512820513, 'acc_per_byte': 0.2512820512820513, 'acc_uncond': 0.2692307692307692, 'no_answer': 0.0, 'sum_logits_corr': -41.52140740003341, 'logits_per_token_corr': -5.200566610637786, 'logits_per_char_corr': -1.0735512970811545, 'bits_per_byte_corr': 1.53042853897923, 'correct_prob': 7.378426470149043e-06, 'correct_prob_per_token': 0.01159780709166178, 'correct_prob_per_char': 0.3735281344843614, 'margin': -3.7425512638244065e-05, 'margin_per_token': -0.005941428416163787, 'margin_per_char': -0.0527659500968235, 'total_prob': 5.4453563015337004e-05, 'total_prob_per_token': 0.04656897114055758, 'total_prob_per_char': 1.49983415343914, 'uncond_correct_prob': 3.135903224464274e-07, 'uncond_correct_prob_per_token': 0.004779826462493944, 'uncond_correct_prob_per_char': 0.315456147385084, 'uncond_total_prob': 2.502186559171779e-06, 'norm_correct_prob': 0.2179162491203724, 'norm_correct_prob_per_token': 0.25850082853722517, 'norm_correct_prob_per_char': 0.249516580501608, 'primary_metric': 0.21794871794871795}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 8,750 |
small aux 3
|
5xC
| 573,440,000 | 34,063,822,356,480,000 |
{'predicted_index_raw': 1.4153846153846155, 'predicted_index_per_token': 1.594871794871795, 'predicted_index_per_char': 1.4692307692307693, 'predicted_index_per_byte': 1.4666666666666666, 'predicted_index_uncond': 1.5487179487179488, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.24358974358974358, 'acc_per_token': 0.26666666666666666, 'acc_per_char': 0.24871794871794872, 'acc_per_byte': 0.24871794871794872, 'acc_uncond': 0.26153846153846155, 'no_answer': 0.0, 'sum_logits_corr': -42.071004350368796, 'logits_per_token_corr': -5.280512560756395, 'logits_per_char_corr': -1.0976069791289456, 'bits_per_byte_corr': 1.5638363233981005, 'correct_prob': 5.463779395556408e-06, 'correct_prob_per_token': 0.010765546755126565, 'correct_prob_per_char': 0.36926301357641694, 'margin': -1.3175349910948827e-05, 'margin_per_token': -0.004966820348962, 'margin_per_char': -0.050143733683577045, 'total_prob': 3.35385815999716e-05, 'total_prob_per_token': 0.042489240939137835, 'total_prob_per_char': 1.4784134505297846, 'uncond_correct_prob': 2.4098081314079166e-07, 'uncond_correct_prob_per_token': 0.006195537536750959, 'uncond_correct_prob_per_char': 0.33391381764542943, 'uncond_total_prob': 1.8873613295774922e-06, 'norm_correct_prob': 0.23427882039071057, 'norm_correct_prob_per_token': 0.26239574089938095, 'norm_correct_prob_per_char': 0.2506873417574845, 'primary_metric': 0.24358974358974358}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 8,750 |
default
|
5xC
| 573,440,000 | 34,063,822,356,480,000 |
{'predicted_index_raw': 1.4384615384615385, 'predicted_index_per_token': 1.582051282051282, 'predicted_index_per_char': 1.5333333333333334, 'predicted_index_per_byte': 1.535897435897436, 'predicted_index_uncond': 1.505128205128205, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.2205128205128205, 'acc_per_token': 0.27692307692307694, 'acc_per_char': 0.23076923076923078, 'acc_per_byte': 0.23076923076923078, 'acc_uncond': 0.24871794871794872, 'no_answer': 0.0, 'sum_logits_corr': -42.35680595422402, 'logits_per_token_corr': -5.339825330084374, 'logits_per_char_corr': -1.114834753324305, 'bits_per_byte_corr': 1.5894403538446142, 'correct_prob': 3.843827060289542e-06, 'correct_prob_per_token': 0.010521298907211176, 'correct_prob_per_char': 0.36581597316352554, 'margin': -7.007691353841725e-06, 'margin_per_token': -0.0051189339633289565, 'margin_per_char': -0.049325771928971245, 'total_prob': 1.6990189256114695e-05, 'total_prob_per_token': 0.04232899683673899, 'total_prob_per_char': 1.4657592332809708, 'uncond_correct_prob': 1.6510790201877262e-07, 'uncond_correct_prob_per_token': 0.004945634423168056, 'uncond_correct_prob_per_char': 0.3228268605322392, 'uncond_total_prob': 6.906698768581676e-07, 'norm_correct_prob': 0.2213503869102082, 'norm_correct_prob_per_token': 0.2610069447821164, 'norm_correct_prob_per_char': 0.24907699921371287, 'primary_metric': 0.2205128205128205}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 10,000 |
small aux 2
|
5xC
| 655,360,000 | 38,930,082,693,120,000 |
{'predicted_index_raw': 1.4461538461538461, 'predicted_index_per_token': 1.5897435897435896, 'predicted_index_per_char': 1.5153846153846153, 'predicted_index_per_byte': 1.5153846153846153, 'predicted_index_uncond': 1.558974358974359, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.2205128205128205, 'acc_per_token': 0.28205128205128205, 'acc_per_char': 0.24358974358974358, 'acc_per_byte': 0.24358974358974358, 'acc_uncond': 0.2692307692307692, 'no_answer': 0.0, 'sum_logits_corr': -41.23376272030366, 'logits_per_token_corr': -5.167867436001727, 'logits_per_char_corr': -1.0681923335954642, 'bits_per_byte_corr': 1.5222781266003493, 'correct_prob': 5.357325511981121e-06, 'correct_prob_per_token': 0.011901511052101563, 'correct_prob_per_char': 0.3765801909003047, 'margin': -1.9447759525111003e-05, 'margin_per_token': -0.005744719323812691, 'margin_per_char': -0.050839075685407985, 'total_prob': 3.3470815705419514e-05, 'total_prob_per_token': 0.04703243174975571, 'total_prob_per_char': 1.5083714739792744, 'uncond_correct_prob': 2.878378987746262e-07, 'uncond_correct_prob_per_token': 0.0049859041905767855, 'uncond_correct_prob_per_char': 0.31792343773461734, 'uncond_total_prob': 2.3454191559145408e-06, 'norm_correct_prob': 0.22550532195464615, 'norm_correct_prob_per_token': 0.259596591677852, 'norm_correct_prob_per_char': 0.25028010324318123, 'primary_metric': 0.2205128205128205}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 10,000 |
small aux 3
|
5xC
| 655,360,000 | 38,930,082,693,120,000 |
{'predicted_index_raw': 1.4487179487179487, 'predicted_index_per_token': 1.594871794871795, 'predicted_index_per_char': 1.5153846153846153, 'predicted_index_per_byte': 1.5128205128205128, 'predicted_index_uncond': 1.564102564102564, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.2512820512820513, 'acc_per_token': 0.2923076923076923, 'acc_per_char': 0.2641025641025641, 'acc_per_byte': 0.2641025641025641, 'acc_uncond': 0.2743589743589744, 'no_answer': 0.0, 'sum_logits_corr': -41.778248981329114, 'logits_per_token_corr': -5.215378971920565, 'logits_per_char_corr': -1.0823420445638774, 'bits_per_byte_corr': 1.5416666623894408, 'correct_prob': 2.192968132147915e-05, 'correct_prob_per_token': 0.011793393310881852, 'correct_prob_per_char': 0.3732414565756611, 'margin': -3.5023605269106956e-05, 'margin_per_token': -0.005851142193921447, 'margin_per_char': -0.05140797056589478, 'total_prob': 9.935788214973812e-05, 'total_prob_per_token': 0.04655414507192846, 'total_prob_per_char': 1.4936161180947145, 'uncond_correct_prob': 3.2953192214223853e-07, 'uncond_correct_prob_per_token': 0.005480612661852979, 'uncond_correct_prob_per_char': 0.3246920320558372, 'uncond_total_prob': 2.0160314892157063e-06, 'norm_correct_prob': 0.23973633896068514, 'norm_correct_prob_per_token': 0.2620383840308748, 'norm_correct_prob_per_char': 0.24974804970012843, 'primary_metric': 0.2512820512820513}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 10,000 |
default
|
5xC
| 655,360,000 | 38,930,082,693,120,000 |
{'predicted_index_raw': 1.4333333333333333, 'predicted_index_per_token': 1.5512820512820513, 'predicted_index_per_char': 1.541025641025641, 'predicted_index_per_byte': 1.5333333333333334, 'predicted_index_uncond': 1.5, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.22564102564102564, 'acc_per_token': 0.2923076923076923, 'acc_per_char': 0.23846153846153847, 'acc_per_byte': 0.2358974358974359, 'acc_uncond': 0.24871794871794872, 'no_answer': 0.0, 'sum_logits_corr': -42.03568570063664, 'logits_per_token_corr': -5.26777796763486, 'logits_per_char_corr': -1.0957589156737517, 'bits_per_byte_corr': 1.5622924139172383, 'correct_prob': 2.2031715278470436e-06, 'correct_prob_per_token': 0.010586354335902055, 'correct_prob_per_char': 0.36934331861252845, 'margin': -1.6237664371814437e-05, 'margin_per_token': -0.005422639014425948, 'margin_per_char': -0.04927119919822195, 'total_prob': 2.4012584151928912e-05, 'total_prob_per_token': 0.04280182941250167, 'total_prob_per_char': 1.4793108218945112, 'uncond_correct_prob': 8.922946748748905e-08, 'uncond_correct_prob_per_token': 0.005004971181080613, 'uncond_correct_prob_per_char': 0.31876341634201577, 'uncond_total_prob': 7.519319448873573e-07, 'norm_correct_prob': 0.2253946426883804, 'norm_correct_prob_per_token': 0.2605277176328143, 'norm_correct_prob_per_char': 0.2501102506030405, 'primary_metric': 0.22564102564102564}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 11,250 |
small aux 2
|
5xC
| 737,280,000 | 43,796,343,029,760,000 |
{'predicted_index_raw': 1.405128205128205, 'predicted_index_per_token': 1.582051282051282, 'predicted_index_per_char': 1.4923076923076923, 'predicted_index_per_byte': 1.494871794871795, 'predicted_index_uncond': 1.4846153846153847, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.2205128205128205, 'acc_per_token': 0.2692307692307692, 'acc_per_char': 0.2230769230769231, 'acc_per_byte': 0.2230769230769231, 'acc_uncond': 0.2641025641025641, 'no_answer': 0.0, 'sum_logits_corr': -40.66753899623186, 'logits_per_token_corr': -5.09338799097442, 'logits_per_char_corr': -1.0577436077594669, 'bits_per_byte_corr': 1.5080630296176545, 'correct_prob': 1.0468473938438794e-05, 'correct_prob_per_token': 0.012729558117022935, 'correct_prob_per_char': 0.3816439920879239, 'margin': -4.1609929180538156e-05, 'margin_per_token': -0.006193646644433896, 'margin_per_char': -0.052289388768362116, 'total_prob': 6.963422851095699e-05, 'total_prob_per_token': 0.051030112429885165, 'total_prob_per_char': 1.5333596981892044, 'uncond_correct_prob': 5.817520578676018e-07, 'uncond_correct_prob_per_token': 0.0045055019028904806, 'uncond_correct_prob_per_char': 0.31059305036921764, 'uncond_total_prob': 2.588411841388804e-06, 'norm_correct_prob': 0.2143037082426452, 'norm_correct_prob_per_token': 0.2564705232411878, 'norm_correct_prob_per_char': 0.24854645823249216, 'primary_metric': 0.2205128205128205}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 11,250 |
small aux 3
|
5xC
| 737,280,000 | 43,796,343,029,760,000 |
{'predicted_index_raw': 1.382051282051282, 'predicted_index_per_token': 1.5487179487179488, 'predicted_index_per_char': 1.5461538461538462, 'predicted_index_per_byte': 1.541025641025641, 'predicted_index_uncond': 1.5666666666666667, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.24871794871794872, 'acc_per_token': 0.2794871794871795, 'acc_per_char': 0.2717948717948718, 'acc_per_byte': 0.2717948717948718, 'acc_uncond': 0.2846153846153846, 'no_answer': 0.0, 'sum_logits_corr': -41.53120554777292, 'logits_per_token_corr': -5.203892235847523, 'logits_per_char_corr': -1.0787724381675905, 'bits_per_byte_corr': 1.5374616968728672, 'correct_prob': 1.0489506918816403e-05, 'correct_prob_per_token': 0.01141802264231926, 'correct_prob_per_char': 0.3737878828609716, 'margin': -2.0315358244059276e-05, 'margin_per_token': -0.0053301746610312116, 'margin_per_char': -0.048746882071416364, 'total_prob': 5.429970969331396e-05, 'total_prob_per_token': 0.04513782229862176, 'total_prob_per_char': 1.4952495718017091, 'uncond_correct_prob': 3.474679306851995e-07, 'uncond_correct_prob_per_token': 0.005638525290894747, 'uncond_correct_prob_per_char': 0.3293816297156178, 'uncond_total_prob': 2.1347868099448396e-06, 'norm_correct_prob': 0.24333526879574846, 'norm_correct_prob_per_token': 0.264422001112072, 'norm_correct_prob_per_char': 0.25075501966472713, 'primary_metric': 0.24871794871794872}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 11,250 |
default
|
5xC
| 737,280,000 | 43,796,343,029,760,000 |
{'predicted_index_raw': 1.4384615384615385, 'predicted_index_per_token': 1.6051282051282052, 'predicted_index_per_char': 1.558974358974359, 'predicted_index_per_byte': 1.558974358974359, 'predicted_index_uncond': 1.505128205128205, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.23846153846153847, 'acc_per_token': 0.29743589743589743, 'acc_per_char': 0.2358974358974359, 'acc_per_byte': 0.2358974358974359, 'acc_uncond': 0.25384615384615383, 'no_answer': 0.0, 'sum_logits_corr': -41.80250359681936, 'logits_per_token_corr': -5.285121904707382, 'logits_per_char_corr': -1.0997742082888395, 'bits_per_byte_corr': 1.5675646489307962, 'correct_prob': 6.40399166300543e-06, 'correct_prob_per_token': 0.010979501431948422, 'correct_prob_per_char': 0.36912076051796217, 'margin': -8.779200766009091e-06, 'margin_per_token': -0.005539948286993756, 'margin_per_char': -0.05016769009857199, 'total_prob': 2.7773016269270626e-05, 'total_prob_per_token': 0.04415949443328164, 'total_prob_per_char': 1.480974665371504, 'uncond_correct_prob': 1.728899528222311e-07, 'uncond_correct_prob_per_token': 0.004798932577116339, 'uncond_correct_prob_per_char': 0.31706935236023853, 'uncond_total_prob': 1.8288709267541937e-06, 'norm_correct_prob': 0.22996146803919398, 'norm_correct_prob_per_token': 0.2583617188217771, 'norm_correct_prob_per_char': 0.24908153745549955, 'primary_metric': 0.23846153846153847}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 12,500 |
small aux 2
|
5xC
| 819,200,000 | 48,662,603,366,400,000 |
{'predicted_index_raw': 1.3871794871794871, 'predicted_index_per_token': 1.5846153846153845, 'predicted_index_per_char': 1.464102564102564, 'predicted_index_per_byte': 1.464102564102564, 'predicted_index_uncond': 1.494871794871795, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.2153846153846154, 'acc_per_token': 0.28205128205128205, 'acc_per_char': 0.23076923076923078, 'acc_per_byte': 0.23076923076923078, 'acc_uncond': 0.24615384615384617, 'no_answer': 0.0, 'sum_logits_corr': -40.37227325194921, 'logits_per_token_corr': -5.066563060366131, 'logits_per_char_corr': -1.0492629029247897, 'bits_per_byte_corr': 1.4960014384036882, 'correct_prob': 1.3606459896559794e-05, 'correct_prob_per_token': 0.01323345751667808, 'correct_prob_per_char': 0.3837350626685643, 'margin': -3.291766770715334e-05, 'margin_per_token': -0.006591878786719905, 'margin_per_char': -0.05281007988630285, 'total_prob': 6.816405853300163e-05, 'total_prob_per_token': 0.05341985418895411, 'total_prob_per_char': 1.5416106271841943, 'uncond_correct_prob': 3.175450739790255e-07, 'uncond_correct_prob_per_token': 0.005208806446630153, 'uncond_correct_prob_per_char': 0.3198444021527366, 'uncond_total_prob': 1.915965717381862e-06, 'norm_correct_prob': 0.21739170069140715, 'norm_correct_prob_per_token': 0.25657714779597207, 'norm_correct_prob_per_char': 0.24920735239542438, 'primary_metric': 0.2153846153846154}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 12,500 |
small aux 3
|
5xC
| 819,200,000 | 48,662,603,366,400,000 |
{'predicted_index_raw': 1.4461538461538461, 'predicted_index_per_token': 1.587179487179487, 'predicted_index_per_char': 1.5487179487179488, 'predicted_index_per_byte': 1.5384615384615385, 'predicted_index_uncond': 1.5179487179487179, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.2358974358974359, 'acc_per_token': 0.2846153846153846, 'acc_per_char': 0.2641025641025641, 'acc_per_byte': 0.2641025641025641, 'acc_uncond': 0.2717948717948718, 'no_answer': 0.0, 'sum_logits_corr': -40.88617143019652, 'logits_per_token_corr': -5.128915276016336, 'logits_per_char_corr': -1.0677543520536066, 'bits_per_byte_corr': 1.5216949470583276, 'correct_prob': 1.0922535998484852e-05, 'correct_prob_per_token': 0.012432768778548283, 'correct_prob_per_char': 0.3792785771792356, 'margin': -1.790864067217942e-05, 'margin_per_token': -0.005322323017178807, 'margin_per_char': -0.048533311297616843, 'total_prob': 4.601580382619023e-05, 'total_prob_per_token': 0.04858746967725154, 'total_prob_per_char': 1.5169800638404365, 'uncond_correct_prob': 2.9016699415902155e-07, 'uncond_correct_prob_per_token': 0.005732788201504994, 'uncond_correct_prob_per_char': 0.3284617457380341, 'uncond_total_prob': 2.2623536293698662e-06, 'norm_correct_prob': 0.24234658744718893, 'norm_correct_prob_per_token': 0.26168667688115554, 'norm_correct_prob_per_char': 0.2505528350006668, 'primary_metric': 0.2358974358974359}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 12,500 |
default
|
5xC
| 819,200,000 | 48,662,603,366,400,000 |
{'predicted_index_raw': 1.4512820512820512, 'predicted_index_per_token': 1.6128205128205129, 'predicted_index_per_char': 1.5923076923076922, 'predicted_index_per_byte': 1.594871794871795, 'predicted_index_uncond': 1.5794871794871794, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.2358974358974359, 'acc_per_token': 0.28717948717948716, 'acc_per_char': 0.24615384615384617, 'acc_per_byte': 0.24615384615384617, 'acc_uncond': 0.2512820512820513, 'no_answer': 0.0, 'sum_logits_corr': -40.81587380140255, 'logits_per_token_corr': -5.146557820084594, 'logits_per_char_corr': -1.074442890875442, 'bits_per_byte_corr': 1.5322189183580137, 'correct_prob': 6.558208843890211e-06, 'correct_prob_per_token': 0.012104745354723365, 'correct_prob_per_char': 0.3784929176841586, 'margin': -1.934743121285548e-05, 'margin_per_token': -0.005889886291739562, 'margin_per_char': -0.050933041669684286, 'total_prob': 3.87031960388147e-05, 'total_prob_per_token': 0.04850207158593142, 'total_prob_per_char': 1.5165530094765496, 'uncond_correct_prob': 1.4385969330751432e-07, 'uncond_correct_prob_per_token': 0.005215222280876281, 'uncond_correct_prob_per_char': 0.3230626068597278, 'uncond_total_prob': 1.5618295867884113e-06, 'norm_correct_prob': 0.2303154469899722, 'norm_correct_prob_per_token': 0.2611820114811405, 'norm_correct_prob_per_char': 0.24991168352959378, 'primary_metric': 0.2358974358974359}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 13,750 |
small aux 2
|
5xC
| 901,120,000 | 53,528,863,703,040,000 |
{'predicted_index_raw': 1.3641025641025641, 'predicted_index_per_token': 1.5717948717948718, 'predicted_index_per_char': 1.4615384615384615, 'predicted_index_per_byte': 1.464102564102564, 'predicted_index_uncond': 1.476923076923077, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.2, 'acc_per_token': 0.258974358974359, 'acc_per_char': 0.24358974358974358, 'acc_per_byte': 0.24358974358974358, 'acc_uncond': 0.24358974358974358, 'no_answer': 0.0, 'sum_logits_corr': -40.20638686938164, 'logits_per_token_corr': -5.0542019122195185, 'logits_per_char_corr': -1.0465236953882424, 'bits_per_byte_corr': 1.4928203497094372, 'correct_prob': 1.2726841179163367e-05, 'correct_prob_per_token': 0.012953696077084874, 'correct_prob_per_char': 0.38423944634142565, 'margin': -1.6587710819317488e-05, 'margin_per_token': -0.00617983882468833, 'margin_per_char': -0.0519925707276332, 'total_prob': 4.899836468091514e-05, 'total_prob_per_token': 0.05196945201784767, 'total_prob_per_char': 1.5431300462093072, 'uncond_correct_prob': 4.747805607713688e-07, 'uncond_correct_prob_per_token': 0.00537890977939415, 'uncond_correct_prob_per_char': 0.3220820936320211, 'uncond_total_prob': 3.1241512503309604e-06, 'norm_correct_prob': 0.21374896312857808, 'norm_correct_prob_per_token': 0.2562839796544521, 'norm_correct_prob_per_char': 0.248990771841607, 'primary_metric': 0.2}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 13,750 |
small aux 3
|
5xC
| 901,120,000 | 53,528,863,703,040,000 |
{'predicted_index_raw': 1.4153846153846155, 'predicted_index_per_token': 1.5743589743589743, 'predicted_index_per_char': 1.5435897435897437, 'predicted_index_per_byte': 1.5435897435897437, 'predicted_index_uncond': 1.541025641025641, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.26153846153846155, 'acc_per_token': 0.3, 'acc_per_char': 0.2641025641025641, 'acc_per_byte': 0.2641025641025641, 'acc_uncond': 0.28205128205128205, 'no_answer': 0.0, 'sum_logits_corr': -40.00446645174271, 'logits_per_token_corr': -5.016025929169455, 'logits_per_char_corr': -1.0445125288253552, 'bits_per_byte_corr': 1.4890173884351492, 'correct_prob': 1.060018091516331e-05, 'correct_prob_per_token': 0.013527196811533365, 'correct_prob_per_char': 0.38720351645019013, 'margin': -1.5543355761005572e-05, 'margin_per_token': -0.005857074417437584, 'margin_per_char': -0.048057646304437915, 'total_prob': 4.664835223972001e-05, 'total_prob_per_token': 0.053151538593449114, 'total_prob_per_char': 1.5469747383491093, 'uncond_correct_prob': 3.2239689043921673e-07, 'uncond_correct_prob_per_token': 0.006115650844345869, 'uncond_correct_prob_per_char': 0.332815074682383, 'uncond_total_prob': 1.8961852857501752e-06, 'norm_correct_prob': 0.25308151117032285, 'norm_correct_prob_per_token': 0.2641820913715981, 'norm_correct_prob_per_char': 0.25096229702552486, 'primary_metric': 0.26153846153846155}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 13,750 |
default
|
5xC
| 901,120,000 | 53,528,863,703,040,000 |
{'predicted_index_raw': 1.4102564102564104, 'predicted_index_per_token': 1.5692307692307692, 'predicted_index_per_char': 1.5307692307692307, 'predicted_index_per_byte': 1.5333333333333334, 'predicted_index_uncond': 1.564102564102564, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.2358974358974359, 'acc_per_token': 0.3, 'acc_per_char': 0.2564102564102564, 'acc_per_byte': 0.2564102564102564, 'acc_uncond': 0.2564102564102564, 'no_answer': 0.0, 'sum_logits_corr': -40.52844421435625, 'logits_per_token_corr': -5.117711391583144, 'logits_per_char_corr': -1.0707029991594916, 'bits_per_byte_corr': 1.5270639043936853, 'correct_prob': 4.437034321512446e-06, 'correct_prob_per_token': 0.012526567952367292, 'correct_prob_per_char': 0.3806101618620626, 'margin': -3.216747377268504e-05, 'margin_per_token': -0.006189649964025204, 'margin_per_char': -0.051301482933542725, 'total_prob': 4.873451370284784e-05, 'total_prob_per_token': 0.05081307595259053, 'total_prob_per_char': 1.5269029403869288, 'uncond_correct_prob': 1.3396484686778003e-07, 'uncond_correct_prob_per_token': 0.005494789664358043, 'uncond_correct_prob_per_char': 0.3264942938463881, 'uncond_total_prob': 1.8943254025681931e-06, 'norm_correct_prob': 0.22488959001207345, 'norm_correct_prob_per_token': 0.25951485833115406, 'norm_correct_prob_per_char': 0.24969517145578468, 'primary_metric': 0.2358974358974359}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 15,000 |
small aux 2
|
5xC
| 983,040,000 | 58,395,124,039,680,000 |
{'predicted_index_raw': 1.3666666666666667, 'predicted_index_per_token': 1.5435897435897437, 'predicted_index_per_char': 1.5153846153846153, 'predicted_index_per_byte': 1.5179487179487179, 'predicted_index_uncond': 1.5102564102564102, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.2076923076923077, 'acc_per_token': 0.26153846153846155, 'acc_per_char': 0.24102564102564103, 'acc_per_byte': 0.24102564102564103, 'acc_uncond': 0.24871794871794872, 'no_answer': 0.0, 'sum_logits_corr': -40.0399983222668, 'logits_per_token_corr': -5.039732898554001, 'logits_per_char_corr': -1.0446241950167479, 'bits_per_byte_corr': 1.4898495046009115, 'correct_prob': 7.177737327952842e-06, 'correct_prob_per_token': 0.013410926447257695, 'correct_prob_per_char': 0.38564606071621227, 'margin': -2.3529689309414555e-05, 'margin_per_token': -0.006592227266229682, 'margin_per_char': -0.052742080814561054, 'total_prob': 4.457229555409794e-05, 'total_prob_per_token': 0.05428891421449655, 'total_prob_per_char': 1.5504114581695039, 'uncond_correct_prob': 4.6924163490836944e-07, 'uncond_correct_prob_per_token': 0.005311699761946465, 'uncond_correct_prob_per_char': 0.31996400711700085, 'uncond_total_prob': 2.857253590259394e-06, 'norm_correct_prob': 0.215186030989941, 'norm_correct_prob_per_token': 0.2554831084825123, 'norm_correct_prob_per_char': 0.24886319990184416, 'primary_metric': 0.2076923076923077}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 15,000 |
small aux 3
|
5xC
| 983,040,000 | 58,395,124,039,680,000 |
{'predicted_index_raw': 1.412820512820513, 'predicted_index_per_token': 1.5564102564102564, 'predicted_index_per_char': 1.5333333333333334, 'predicted_index_per_byte': 1.5307692307692307, 'predicted_index_uncond': 1.5205128205128204, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.26153846153846155, 'acc_per_token': 0.30256410256410254, 'acc_per_char': 0.2692307692307692, 'acc_per_byte': 0.2692307692307692, 'acc_uncond': 0.27692307692307694, 'no_answer': 0.0, 'sum_logits_corr': -40.1606077732184, 'logits_per_token_corr': -5.037078092989339, 'logits_per_char_corr': -1.0480778314514003, 'bits_per_byte_corr': 1.493794077012741, 'correct_prob': 1.0273630406401683e-05, 'correct_prob_per_token': 0.013247931198861139, 'correct_prob_per_char': 0.3855943985228079, 'margin': -1.662210046402282e-05, 'margin_per_token': -0.005915700186414004, 'margin_per_char': -0.049140539084062775, 'total_prob': 4.6900562115935025e-05, 'total_prob_per_token': 0.052261896085884954, 'total_prob_per_char': 1.5421245475997551, 'uncond_correct_prob': 3.4132590673576884e-07, 'uncond_correct_prob_per_token': 0.005958207068292162, 'uncond_correct_prob_per_char': 0.3313300109404032, 'uncond_total_prob': 2.132634881508234e-06, 'norm_correct_prob': 0.2452735702412262, 'norm_correct_prob_per_token': 0.2633015242342554, 'norm_correct_prob_per_char': 0.25085903856350805, 'primary_metric': 0.26153846153846155}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 15,000 |
default
|
5xC
| 983,040,000 | 58,395,124,039,680,000 |
{'predicted_index_raw': 1.458974358974359, 'predicted_index_per_token': 1.6538461538461537, 'predicted_index_per_char': 1.541025641025641, 'predicted_index_per_byte': 1.5435897435897437, 'predicted_index_uncond': 1.5461538461538462, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.2282051282051282, 'acc_per_token': 0.2948717948717949, 'acc_per_char': 0.23846153846153847, 'acc_per_byte': 0.23846153846153847, 'acc_uncond': 0.26666666666666666, 'no_answer': 0.0, 'sum_logits_corr': -40.55718162243183, 'logits_per_token_corr': -5.120133418692202, 'logits_per_char_corr': -1.0692524501934, 'bits_per_byte_corr': 1.5249301849015495, 'correct_prob': 5.795320417840119e-06, 'correct_prob_per_token': 0.01246259892937185, 'correct_prob_per_char': 0.38031182226099686, 'margin': -1.7216296983374964e-05, 'margin_per_token': -0.005868848864784719, 'margin_per_char': -0.050478281539073876, 'total_prob': 3.495703081285451e-05, 'total_prob_per_token': 0.05007970136898324, 'total_prob_per_char': 1.5243349102408836, 'uncond_correct_prob': 1.9081125266700966e-07, 'uncond_correct_prob_per_token': 0.005479627909664242, 'uncond_correct_prob_per_char': 0.3265923335173012, 'uncond_total_prob': 2.0269819862107307e-06, 'norm_correct_prob': 0.22666887517187767, 'norm_correct_prob_per_token': 0.26074526442850954, 'norm_correct_prob_per_char': 0.24996166957102783, 'primary_metric': 0.2282051282051282}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 15,117 |
small aux 2
|
5xC
| 990,707,712 | 58,850,606,007,189,500 |
{'predicted_index_raw': 1.376923076923077, 'predicted_index_per_token': 1.5846153846153845, 'predicted_index_per_char': 1.5102564102564102, 'predicted_index_per_byte': 1.5128205128205128, 'predicted_index_uncond': 1.5538461538461539, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.2128205128205128, 'acc_per_token': 0.2641025641025641, 'acc_per_char': 0.24358974358974358, 'acc_per_byte': 0.24358974358974358, 'acc_uncond': 0.2564102564102564, 'no_answer': 0.0, 'sum_logits_corr': -40.11749047132639, 'logits_per_token_corr': -5.05126868911925, 'logits_per_char_corr': -1.0468123586824938, 'bits_per_byte_corr': 1.4928689102814081, 'correct_prob': 7.600587454821535e-06, 'correct_prob_per_token': 0.013304721930665267, 'correct_prob_per_char': 0.38477728185629695, 'margin': -2.4107374600370252e-05, 'margin_per_token': -0.0065127893795565855, 'margin_per_char': -0.052616083083870696, 'total_prob': 4.6276373721420186e-05, 'total_prob_per_token': 0.053826908516020906, 'total_prob_per_char': 1.5467986740834652, 'uncond_correct_prob': 4.778550007504943e-07, 'uncond_correct_prob_per_token': 0.005308536711746753, 'uncond_correct_prob_per_char': 0.3199771630495369, 'uncond_total_prob': 2.9357811977267985e-06, 'norm_correct_prob': 0.21492680984466014, 'norm_correct_prob_per_token': 0.2555876728054521, 'norm_correct_prob_per_char': 0.248886335916498, 'primary_metric': 0.2128205128205128}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 15,117 |
small aux 3
|
5xC
| 990,707,712 | 58,850,606,007,189,500 |
{'predicted_index_raw': 1.4102564102564104, 'predicted_index_per_token': 1.5615384615384615, 'predicted_index_per_char': 1.523076923076923, 'predicted_index_per_byte': 1.523076923076923, 'predicted_index_uncond': 1.505128205128205, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.2564102564102564, 'acc_per_token': 0.31025641025641026, 'acc_per_char': 0.2641025641025641, 'acc_per_byte': 0.2641025641025641, 'acc_uncond': 0.2743589743589744, 'no_answer': 0.0, 'sum_logits_corr': -39.96504401182517, 'logits_per_token_corr': -5.013126913138924, 'logits_per_char_corr': -1.0430228549747242, 'bits_per_byte_corr': 1.4865704046133497, 'correct_prob': 1.1083556252645513e-05, 'correct_prob_per_token': 0.013523785948962697, 'correct_prob_per_char': 0.38729627318971993, 'margin': -1.4385034166230175e-05, 'margin_per_token': -0.005981681897875313, 'margin_per_char': -0.04888550112284682, 'total_prob': 4.6250609576234215e-05, 'total_prob_per_token': 0.053311690461827675, 'total_prob_per_char': 1.5482198489478562, 'uncond_correct_prob': 3.2682930696993795e-07, 'uncond_correct_prob_per_token': 0.00594282464408946, 'uncond_correct_prob_per_char': 0.33077353365972173, 'uncond_total_prob': 2.0032347910320016e-06, 'norm_correct_prob': 0.2450956653289724, 'norm_correct_prob_per_token': 0.26400823732162426, 'norm_correct_prob_per_char': 0.2510132346685564, 'primary_metric': 0.2564102564102564}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_macroeconomics
| 15,117 |
default
|
5xC
| 990,707,712 | 58,850,606,007,189,500 |
{'predicted_index_raw': 1.4487179487179487, 'predicted_index_per_token': 1.6307692307692307, 'predicted_index_per_char': 1.5333333333333334, 'predicted_index_per_byte': 1.535897435897436, 'predicted_index_uncond': 1.5512820512820513, 'correct_choice': 1.7384615384615385, 'acc_raw': 0.23333333333333334, 'acc_per_token': 0.3, 'acc_per_char': 0.23846153846153847, 'acc_per_byte': 0.23846153846153847, 'acc_uncond': 0.2564102564102564, 'no_answer': 0.0, 'sum_logits_corr': -40.47364354255872, 'logits_per_token_corr': -5.10436837232519, 'logits_per_char_corr': -1.066434145029227, 'bits_per_byte_corr': 1.52090721204058, 'correct_prob': 5.126690892231915e-06, 'correct_prob_per_token': 0.012551119671647603, 'correct_prob_per_char': 0.38118613864231454, 'margin': -1.919939838862679e-05, 'margin_per_token': -0.00602348963378426, 'margin_per_char': -0.05098233422668128, 'total_prob': 3.561565007489107e-05, 'total_prob_per_token': 0.05060309336086868, 'total_prob_per_char': 1.5285832380869244, 'uncond_correct_prob': 1.666536042752463e-07, 'uncond_correct_prob_per_token': 0.0054882778830381415, 'uncond_correct_prob_per_char': 0.3262636325153967, 'uncond_total_prob': 1.7749764804454254e-06, 'norm_correct_prob': 0.22436995777377072, 'norm_correct_prob_per_token': 0.2598006316227331, 'norm_correct_prob_per_char': 0.2497974461978145, 'primary_metric': 0.23333333333333334}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_mathematics
| 0 |
small aux 2
|
5xC
| 0 | 0 |
{'predicted_index_raw': 1.5740740740740742, 'predicted_index_per_token': 1.6111111111111112, 'predicted_index_per_char': 1.5814814814814815, 'predicted_index_per_byte': 1.6, 'predicted_index_uncond': 1.4814814814814814, 'correct_choice': 1.5777777777777777, 'acc_raw': 0.26666666666666666, 'acc_per_token': 0.2740740740740741, 'acc_per_char': 0.2851851851851852, 'acc_per_byte': 0.2814814814814815, 'acc_uncond': 0.22592592592592592, 'no_answer': 0.0, 'sum_logits_corr': -33.83039044274224, 'logits_per_token_corr': -10.91024202500625, 'logits_per_char_corr': -4.978418567411432, 'bits_per_byte_corr': 7.0859247202676015, 'correct_prob': 1.0019341555928345e-05, 'correct_prob_per_token': 1.858598413170255e-05, 'correct_prob_per_char': 0.019301476994763447, 'margin': -4.722034581678693e-06, 'margin_per_token': -2.597301278282986e-06, 'margin_per_char': -0.009832006450337365, 'total_prob': 4.312349837227965e-05, 'total_prob_per_token': 7.458783052977335e-05, 'total_prob_per_char': 0.07898309667289959, 'uncond_correct_prob': 1.0216230264045688e-05, 'uncond_correct_prob_per_token': 1.8809979639240085e-05, 'uncond_correct_prob_per_char': 0.019356766706948858, 'uncond_total_prob': 4.502237499562587e-05, 'norm_correct_prob': 0.21573370603304512, 'norm_correct_prob_per_token': 0.24940090652520533, 'norm_correct_prob_per_char': 0.2478460853207465, 'primary_metric': 0.26666666666666666}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_mathematics
| 0 |
small aux 3
|
5xC
| 0 | 0 |
{'predicted_index_raw': 1.5555555555555556, 'predicted_index_per_token': 1.5185185185185186, 'predicted_index_per_char': 1.5851851851851853, 'predicted_index_per_byte': 1.6037037037037036, 'predicted_index_uncond': 1.5037037037037038, 'correct_choice': 1.5777777777777777, 'acc_raw': 0.23703703703703705, 'acc_per_token': 0.3, 'acc_per_char': 0.27037037037037037, 'acc_per_byte': 0.26296296296296295, 'acc_uncond': 0.28888888888888886, 'no_answer': 0.0, 'sum_logits_corr': -33.5616830084059, 'logits_per_token_corr': -10.829459156356256, 'logits_per_char_corr': -4.938751909604259, 'bits_per_byte_corr': 7.029268511673241, 'correct_prob': 1.0407337619672618e-05, 'correct_prob_per_token': 2.0103955829357836e-05, 'correct_prob_per_char': 0.01969456845394622, 'margin': -4.4576646656074435e-06, 'margin_per_token': -2.091680838838453e-06, 'margin_per_char': -0.0095852762831555, 'total_prob': 4.3753331023168375e-05, 'total_prob_per_token': 7.914488174268246e-05, 'total_prob_per_char': 0.07993424249431907, 'uncond_correct_prob': 1.051143804377541e-05, 'uncond_correct_prob_per_token': 2.0052493330409944e-05, 'uncond_correct_prob_per_char': 0.019415487937079944, 'uncond_total_prob': 4.688827011002037e-05, 'norm_correct_prob': 0.21647037393926005, 'norm_correct_prob_per_token': 0.25427452777636994, 'norm_correct_prob_per_char': 0.24923613291171692, 'primary_metric': 0.23703703703703705}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_mathematics
| 0 |
default
|
5xC
| 0 | 0 |
{'predicted_index_raw': 1.4851851851851852, 'predicted_index_per_token': 1.462962962962963, 'predicted_index_per_char': 1.614814814814815, 'predicted_index_per_byte': 1.625925925925926, 'predicted_index_uncond': 1.5740740740740742, 'correct_choice': 1.5777777777777777, 'acc_raw': 0.2518518518518518, 'acc_per_token': 0.2962962962962963, 'acc_per_char': 0.2814814814814815, 'acc_per_byte': 0.2814814814814815, 'acc_uncond': 0.2518518518518518, 'no_answer': 0.0, 'sum_logits_corr': -33.66232545993946, 'logits_per_token_corr': -10.840314769339162, 'logits_per_char_corr': -4.94207306436861, 'bits_per_byte_corr': 7.033949977136793, 'correct_prob': 1.0656281012563595e-05, 'correct_prob_per_token': 2.0077974990794664e-05, 'correct_prob_per_char': 0.019557522747272352, 'margin': -5.517723914923937e-06, 'margin_per_token': -2.9565998571609363e-06, 'margin_per_char': -0.00982179521999643, 'total_prob': 4.500730689358645e-05, 'total_prob_per_token': 7.889214177420302e-05, 'total_prob_per_char': 0.07964022604190828, 'uncond_correct_prob': 1.0376393935917561e-05, 'uncond_correct_prob_per_token': 2.052412991449017e-05, 'uncond_correct_prob_per_char': 0.01954150828926023, 'uncond_total_prob': 4.380157172020923e-05, 'norm_correct_prob': 0.2185543428325431, 'norm_correct_prob_per_token': 0.25440803801077844, 'norm_correct_prob_per_char': 0.25033542588017543, 'primary_metric': 0.2518518518518518}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_mathematics
| 2,500 |
small aux 2
|
5xC
| 163,840,000 | 9,732,520,673,280,000 |
{'predicted_index_raw': 1.4555555555555555, 'predicted_index_per_token': 1.4481481481481482, 'predicted_index_per_char': 1.5444444444444445, 'predicted_index_per_byte': 1.537037037037037, 'predicted_index_uncond': 1.6185185185185185, 'correct_choice': 1.5777777777777777, 'acc_raw': 0.17037037037037037, 'acc_per_token': 0.1925925925925926, 'acc_per_char': 0.1814814814814815, 'acc_per_byte': 0.18518518518518517, 'acc_uncond': 0.2111111111111111, 'no_answer': 0.0, 'sum_logits_corr': -16.182193249243277, 'logits_per_token_corr': -6.2690610177229855, 'logits_per_char_corr': -2.7028575374080623, 'bits_per_byte_corr': 3.835604359479644, 'correct_prob': 0.00236460753538292, 'correct_prob_per_token': 0.010646046682571682, 'correct_prob_per_char': 0.10188088926366672, 'margin': -0.0035797333674295567, 'margin_per_token': -0.008153174397848335, 'margin_per_char': -0.054112385377522595, 'total_prob': 0.010851159528004781, 'total_prob_per_token': 0.046118737397647665, 'total_prob_per_char': 0.4255620281313482, 'uncond_correct_prob': 0.00012441176288580005, 'uncond_correct_prob_per_token': 0.0034385271511626287, 'uncond_correct_prob_per_char': 0.04674707685063339, 'uncond_total_prob': 0.0008557798019469309, 'norm_correct_prob': 0.17853587283550296, 'norm_correct_prob_per_token': 0.21158191247869226, 'norm_correct_prob_per_char': 0.22611827624624853, 'primary_metric': 0.17037037037037037}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_mathematics
| 2,500 |
small aux 3
|
5xC
| 163,840,000 | 9,732,520,673,280,000 |
{'predicted_index_raw': 1.3, 'predicted_index_per_token': 1.337037037037037, 'predicted_index_per_char': 1.4666666666666666, 'predicted_index_per_byte': 1.4481481481481482, 'predicted_index_uncond': 1.6185185185185185, 'correct_choice': 1.5777777777777777, 'acc_raw': 0.17037037037037037, 'acc_per_token': 0.18888888888888888, 'acc_per_char': 0.2, 'acc_per_byte': 0.2, 'acc_uncond': 0.23703703703703705, 'no_answer': 0.0, 'sum_logits_corr': -15.903816126011035, 'logits_per_token_corr': -6.223431916414758, 'logits_per_char_corr': -2.642847936744722, 'bits_per_byte_corr': 3.7590109792644735, 'correct_prob': 0.0015898410283646189, 'correct_prob_per_token': 0.010866214502186337, 'correct_prob_per_char': 0.09589430880739801, 'margin': -0.0037728253526174513, 'margin_per_token': -0.007427695671942173, 'margin_per_char': -0.04454518568259043, 'total_prob': 0.009413618933419363, 'total_prob_per_token': 0.04579240473524237, 'total_prob_per_char': 0.40401846636032857, 'uncond_correct_prob': 0.00012493832681194402, 'uncond_correct_prob_per_token': 0.0023638547650001863, 'uncond_correct_prob_per_char': 0.04623083702450991, 'uncond_total_prob': 0.0008252238872377968, 'norm_correct_prob': 0.1872809521058057, 'norm_correct_prob_per_token': 0.21209239082064282, 'norm_correct_prob_per_char': 0.23081925598937275, 'primary_metric': 0.17037037037037037}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_mathematics
| 2,500 |
default
|
5xC
| 163,840,000 | 9,732,520,673,280,000 |
{'predicted_index_raw': 1.3, 'predicted_index_per_token': 1.3037037037037038, 'predicted_index_per_char': 1.4259259259259258, 'predicted_index_per_byte': 1.4185185185185185, 'predicted_index_uncond': 1.5925925925925926, 'correct_choice': 1.5777777777777777, 'acc_raw': 0.13333333333333333, 'acc_per_token': 0.14814814814814814, 'acc_per_char': 0.1259259259259259, 'acc_per_byte': 0.1259259259259259, 'acc_uncond': 0.17407407407407408, 'no_answer': 0.0, 'sum_logits_corr': -14.995270746725577, 'logits_per_token_corr': -5.898422352926053, 'logits_per_char_corr': -2.4786113200806463, 'bits_per_byte_corr': 3.522532083995948, 'correct_prob': 0.002306083799102661, 'correct_prob_per_token': 0.016257541164051783, 'correct_prob_per_char': 0.10992460140818815, 'margin': -0.0040262455680977055, 'margin_per_token': -0.014526038977231447, 'margin_per_char': -0.0522378767819568, 'total_prob': 0.012475103785443415, 'total_prob_per_token': 0.07736064972393632, 'total_prob_per_char': 0.4832643091779567, 'uncond_correct_prob': 0.00017780532349783004, 'uncond_correct_prob_per_token': 0.0033028379711007067, 'uncond_correct_prob_per_char': 0.04867473535117993, 'uncond_total_prob': 0.0012045135237893137, 'norm_correct_prob': 0.16803952517963988, 'norm_correct_prob_per_token': 0.2050662938744577, 'norm_correct_prob_per_char': 0.22421713605331353, 'primary_metric': 0.13333333333333333}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_mathematics
| 3,750 |
small aux 2
|
5xC
| 245,760,000 | 14,598,781,009,920,000 |
{'predicted_index_raw': 1.385185185185185, 'predicted_index_per_token': 1.4, 'predicted_index_per_char': 1.4703703703703703, 'predicted_index_per_byte': 1.474074074074074, 'predicted_index_uncond': 1.637037037037037, 'correct_choice': 1.5777777777777777, 'acc_raw': 0.1814814814814815, 'acc_per_token': 0.1925925925925926, 'acc_per_char': 0.1962962962962963, 'acc_per_byte': 0.2, 'acc_uncond': 0.22962962962962963, 'no_answer': 0.0, 'sum_logits_corr': -16.141227037818343, 'logits_per_token_corr': -6.671316939200221, 'logits_per_char_corr': -2.7921345665513804, 'bits_per_byte_corr': 3.977550815978674, 'correct_prob': 0.0013874148119458513, 'correct_prob_per_token': 0.010525644941898405, 'correct_prob_per_char': 0.08562667804699678, 'margin': -0.003532560652881339, 'margin_per_token': -0.007786314689891544, 'margin_per_char': -0.0435874002279178, 'total_prob': 0.0077003767554414525, 'total_prob_per_token': 0.04503677480455862, 'total_prob_per_char': 0.3630011107795229, 'uncond_correct_prob': 0.00017642286865890067, 'uncond_correct_prob_per_token': 0.0025451292766964046, 'uncond_correct_prob_per_char': 0.041285750082521264, 'uncond_total_prob': 0.0013725062950618934, 'norm_correct_prob': 0.18255635555041608, 'norm_correct_prob_per_token': 0.21158909828727113, 'norm_correct_prob_per_char': 0.23092488416757076, 'primary_metric': 0.1814814814814815}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_mathematics
| 3,750 |
small aux 3
|
5xC
| 245,760,000 | 14,598,781,009,920,000 |
{'predicted_index_raw': 1.5074074074074073, 'predicted_index_per_token': 1.5037037037037038, 'predicted_index_per_char': 1.625925925925926, 'predicted_index_per_byte': 1.6333333333333333, 'predicted_index_uncond': 1.6555555555555554, 'correct_choice': 1.5777777777777777, 'acc_raw': 0.15925925925925927, 'acc_per_token': 0.18518518518518517, 'acc_per_char': 0.15925925925925927, 'acc_per_byte': 0.15555555555555556, 'acc_uncond': 0.26666666666666666, 'no_answer': 0.0, 'sum_logits_corr': -15.414935467861318, 'logits_per_token_corr': -5.76105997299079, 'logits_per_char_corr': -2.4530835664034947, 'bits_per_byte_corr': 3.483770270152739, 'correct_prob': 0.0039380723613752396, 'correct_prob_per_token': 0.012046172910950876, 'correct_prob_per_char': 0.11307072943082833, 'margin': -0.005161050085778729, 'margin_per_token': -0.009474548975812399, 'margin_per_char': -0.04988741306225464, 'total_prob': 0.018420435834214502, 'total_prob_per_token': 0.051995158650903685, 'total_prob_per_char': 0.47874910022753125, 'uncond_correct_prob': 0.00043209456740381686, 'uncond_correct_prob_per_token': 0.004372313276210439, 'uncond_correct_prob_per_char': 0.06025549737881299, 'uncond_total_prob': 0.0025251631661405514, 'norm_correct_prob': 0.18649003499930347, 'norm_correct_prob_per_token': 0.21487888943645908, 'norm_correct_prob_per_char': 0.22902399499753934, 'primary_metric': 0.15925925925925927}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_mathematics
| 3,750 |
default
|
5xC
| 245,760,000 | 14,598,781,009,920,000 |
{'predicted_index_raw': 1.3777777777777778, 'predicted_index_per_token': 1.3777777777777778, 'predicted_index_per_char': 1.4222222222222223, 'predicted_index_per_byte': 1.4185185185185185, 'predicted_index_uncond': 1.6111111111111112, 'correct_choice': 1.5777777777777777, 'acc_raw': 0.1814814814814815, 'acc_per_token': 0.1962962962962963, 'acc_per_char': 0.17777777777777778, 'acc_per_byte': 0.17777777777777778, 'acc_uncond': 0.25555555555555554, 'no_answer': 0.0, 'sum_logits_corr': -15.958061997978776, 'logits_per_token_corr': -6.220631149417477, 'logits_per_char_corr': -2.6453786006936997, 'bits_per_byte_corr': 3.7630515114295147, 'correct_prob': 0.0017899166221964928, 'correct_prob_per_token': 0.009315997453381374, 'correct_prob_per_char': 0.09407239685019392, 'margin': -0.003401025964847359, 'margin_per_token': -0.0074298818632880315, 'margin_per_char': -0.04661067749880909, 'total_prob': 0.00950549619392883, 'total_prob_per_token': 0.041221116761715894, 'total_prob_per_char': 0.40220646600901605, 'uncond_correct_prob': 0.00012821782352972667, 'uncond_correct_prob_per_token': 0.0024226532653966837, 'uncond_correct_prob_per_char': 0.04193308205316233, 'uncond_total_prob': 0.0008863332342694146, 'norm_correct_prob': 0.1873603785746905, 'norm_correct_prob_per_token': 0.20789648463272203, 'norm_correct_prob_per_char': 0.2279406093112803, 'primary_metric': 0.1814814814814815}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_mathematics
| 5,000 |
small aux 2
|
5xC
| 327,680,000 | 19,465,041,346,560,000 |
{'predicted_index_raw': 1.2925925925925925, 'predicted_index_per_token': 1.2814814814814814, 'predicted_index_per_char': 1.4777777777777779, 'predicted_index_per_byte': 1.5037037037037038, 'predicted_index_uncond': 1.6, 'correct_choice': 1.5777777777777777, 'acc_raw': 0.1962962962962963, 'acc_per_token': 0.22962962962962963, 'acc_per_char': 0.2, 'acc_per_byte': 0.2074074074074074, 'acc_uncond': 0.26666666666666666, 'no_answer': 0.0, 'sum_logits_corr': -15.300711543471724, 'logits_per_token_corr': -6.429220638303944, 'logits_per_char_corr': -2.700363800592916, 'bits_per_byte_corr': 3.845523337054462, 'correct_prob': 0.0008600907377027629, 'correct_prob_per_token': 0.015950449374599194, 'correct_prob_per_char': 0.09664613034120288, 'margin': -0.0018776077977192467, 'margin_per_token': -0.00788048531147226, 'margin_per_char': -0.034467098176628484, 'total_prob': 0.004771401873917941, 'total_prob_per_token': 0.06598765671089886, 'total_prob_per_char': 0.39745920454031636, 'uncond_correct_prob': 0.0002432059904525484, 'uncond_correct_prob_per_token': 0.0036415021432308547, 'uncond_correct_prob_per_char': 0.050275438991052704, 'uncond_total_prob': 0.0017777943896021221, 'norm_correct_prob': 0.18764867224211734, 'norm_correct_prob_per_token': 0.22088299230240008, 'norm_correct_prob_per_char': 0.23569567915191034, 'primary_metric': 0.1962962962962963}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_mathematics
| 5,000 |
small aux 3
|
5xC
| 327,680,000 | 19,465,041,346,560,000 |
{'predicted_index_raw': 1.3962962962962964, 'predicted_index_per_token': 1.3, 'predicted_index_per_char': 1.488888888888889, 'predicted_index_per_byte': 1.488888888888889, 'predicted_index_uncond': 1.6111111111111112, 'correct_choice': 1.5777777777777777, 'acc_raw': 0.17777777777777778, 'acc_per_token': 0.18888888888888888, 'acc_per_char': 0.18888888888888888, 'acc_per_byte': 0.18518518518518517, 'acc_uncond': 0.25925925925925924, 'no_answer': 0.0, 'sum_logits_corr': -14.92155191898346, 'logits_per_token_corr': -5.771168421436227, 'logits_per_char_corr': -2.465859281960706, 'bits_per_byte_corr': 3.504347663094525, 'correct_prob': 0.002840287943793055, 'correct_prob_per_token': 0.01457327157551383, 'correct_prob_per_char': 0.115616397622298, 'margin': -0.004664217904830923, 'margin_per_token': -0.010548275078405214, 'margin_per_char': -0.051982459374324795, 'total_prob': 0.01428432433840911, 'total_prob_per_token': 0.06424406106871466, 'total_prob_per_char': 0.49026157124375097, 'uncond_correct_prob': 0.00043800178212887597, 'uncond_correct_prob_per_token': 0.0049801837009995405, 'uncond_correct_prob_per_char': 0.06591924217425908, 'uncond_total_prob': 0.0026098893490775527, 'norm_correct_prob': 0.1823650187868018, 'norm_correct_prob_per_token': 0.2127719448892786, 'norm_correct_prob_per_char': 0.2281377345851064, 'primary_metric': 0.17777777777777778}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_mathematics
| 5,000 |
default
|
5xC
| 327,680,000 | 19,465,041,346,560,000 |
{'predicted_index_raw': 1.4555555555555555, 'predicted_index_per_token': 1.4296296296296296, 'predicted_index_per_char': 1.5666666666666667, 'predicted_index_per_byte': 1.5518518518518518, 'predicted_index_uncond': 1.5814814814814815, 'correct_choice': 1.5777777777777777, 'acc_raw': 0.16666666666666666, 'acc_per_token': 0.1814814814814815, 'acc_per_char': 0.16666666666666666, 'acc_per_byte': 0.17037037037037037, 'acc_uncond': 0.1925925925925926, 'no_answer': 0.0, 'sum_logits_corr': -14.619722436092518, 'logits_per_token_corr': -5.796285567056056, 'logits_per_char_corr': -2.480367690251922, 'bits_per_byte_corr': 3.522991112349, 'correct_prob': 0.002821501562745639, 'correct_prob_per_token': 0.01836149950996519, 'correct_prob_per_char': 0.1218529114834688, 'margin': -0.004212679881236718, 'margin_per_token': -0.012466843042821574, 'margin_per_char': -0.05838891357687622, 'total_prob': 0.01355230017662523, 'total_prob_per_token': 0.07888761217399556, 'total_prob_per_char': 0.5129240928331982, 'uncond_correct_prob': 0.0002208144101364467, 'uncond_correct_prob_per_token': 0.0032530912795082976, 'uncond_correct_prob_per_char': 0.05033643182967874, 'uncond_total_prob': 0.001455750260140772, 'norm_correct_prob': 0.18328049374712999, 'norm_correct_prob_per_token': 0.21139098567695364, 'norm_correct_prob_per_char': 0.22759211738233479, 'primary_metric': 0.16666666666666666}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_mathematics
| 7,500 |
small aux 2
|
5xC
| 491,520,000 | 29,197,562,019,840,000 |
{'predicted_index_raw': 1.3962962962962964, 'predicted_index_per_token': 1.4259259259259258, 'predicted_index_per_char': 1.4703703703703703, 'predicted_index_per_byte': 1.4777777777777779, 'predicted_index_uncond': 1.5777777777777777, 'correct_choice': 1.5777777777777777, 'acc_raw': 0.2, 'acc_per_token': 0.22592592592592592, 'acc_per_char': 0.17777777777777778, 'acc_per_byte': 0.1814814814814815, 'acc_uncond': 0.25925925925925924, 'no_answer': 0.0, 'sum_logits_corr': -14.187100488168221, 'logits_per_token_corr': -5.463891820459324, 'logits_per_char_corr': -2.3248681934115143, 'bits_per_byte_corr': 3.299653308731839, 'correct_prob': 0.00521917504259595, 'correct_prob_per_token': 0.017825237327659716, 'correct_prob_per_char': 0.13224441687699362, 'margin': -0.006656819091333214, 'margin_per_token': -0.011447071789830246, 'margin_per_char': -0.06012292760407834, 'total_prob': 0.02411238567825848, 'total_prob_per_token': 0.07564535596188808, 'total_prob_per_char': 0.5556437599025933, 'uncond_correct_prob': 0.0002564537453552442, 'uncond_correct_prob_per_token': 0.004790986304761806, 'uncond_correct_prob_per_char': 0.06074633565262383, 'uncond_total_prob': 0.0015102244433544596, 'norm_correct_prob': 0.1869456657292448, 'norm_correct_prob_per_token': 0.2182182501932542, 'norm_correct_prob_per_char': 0.22998914317990254, 'primary_metric': 0.2}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_mathematics
| 7,500 |
small aux 3
|
5xC
| 491,520,000 | 29,197,562,019,840,000 |
{'predicted_index_raw': 1.5222222222222221, 'predicted_index_per_token': 1.462962962962963, 'predicted_index_per_char': 1.6296296296296295, 'predicted_index_per_byte': 1.6296296296296295, 'predicted_index_uncond': 1.5777777777777777, 'correct_choice': 1.5777777777777777, 'acc_raw': 0.18888888888888888, 'acc_per_token': 0.2074074074074074, 'acc_per_char': 0.2037037037037037, 'acc_per_byte': 0.2037037037037037, 'acc_uncond': 0.26296296296296295, 'no_answer': 0.0, 'sum_logits_corr': -15.226666518493936, 'logits_per_token_corr': -5.939266655325705, 'logits_per_char_corr': -2.546912252168048, 'bits_per_byte_corr': 3.6213396602932235, 'correct_prob': 0.0026229913951452905, 'correct_prob_per_token': 0.014310969257618478, 'correct_prob_per_char': 0.1115745142220579, 'margin': -0.003385705042170627, 'margin_per_token': -0.00999446544325616, 'margin_per_char': -0.04902837223586526, 'total_prob': 0.010990945089258162, 'total_prob_per_token': 0.060649331473091, 'total_prob_per_char': 0.454128104052612, 'uncond_correct_prob': 0.00031240543951288233, 'uncond_correct_prob_per_token': 0.005562725554534709, 'uncond_correct_prob_per_char': 0.0662794008192567, 'uncond_total_prob': 0.0017829452274529616, 'norm_correct_prob': 0.1978446370802048, 'norm_correct_prob_per_token': 0.2345232951631007, 'norm_correct_prob_per_char': 0.23882526629403059, 'primary_metric': 0.18888888888888888}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_mathematics
| 7,500 |
default
|
5xC
| 491,520,000 | 29,197,562,019,840,000 |
{'predicted_index_raw': 1.3888888888888888, 'predicted_index_per_token': 1.3592592592592592, 'predicted_index_per_char': 1.4444444444444444, 'predicted_index_per_byte': 1.4444444444444444, 'predicted_index_uncond': 1.5962962962962963, 'correct_choice': 1.5777777777777777, 'acc_raw': 0.1962962962962963, 'acc_per_token': 0.2, 'acc_per_char': 0.15925925925925927, 'acc_per_byte': 0.15555555555555556, 'acc_uncond': 0.25555555555555554, 'no_answer': 0.0, 'sum_logits_corr': -15.42722586525811, 'logits_per_token_corr': -6.28006984624327, 'logits_per_char_corr': -2.6605242991359352, 'bits_per_byte_corr': 3.7847619630414373, 'correct_prob': 0.0014671952563529783, 'correct_prob_per_token': 0.01311728775469019, 'correct_prob_per_char': 0.0996876066954402, 'margin': -0.0024946355316162144, 'margin_per_token': -0.00900093001368009, 'margin_per_char': -0.047345782633983986, 'total_prob': 0.007223586093920832, 'total_prob_per_token': 0.057946238587610444, 'total_prob_per_char': 0.4225277581624362, 'uncond_correct_prob': 0.00020669734327101683, 'uncond_correct_prob_per_token': 0.005509823956679889, 'uncond_correct_prob_per_char': 0.0582372037557639, 'uncond_total_prob': 0.0012962346999847367, 'norm_correct_prob': 0.18335112921412958, 'norm_correct_prob_per_token': 0.21447709869123957, 'norm_correct_prob_per_char': 0.2281799444796336, 'primary_metric': 0.1962962962962963}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_mathematics
| 8,750 |
small aux 2
|
5xC
| 573,440,000 | 34,063,822,356,480,000 |
{'predicted_index_raw': 1.3037037037037038, 'predicted_index_per_token': 1.2962962962962963, 'predicted_index_per_char': 1.4222222222222223, 'predicted_index_per_byte': 1.4148148148148147, 'predicted_index_uncond': 1.5333333333333334, 'correct_choice': 1.5777777777777777, 'acc_raw': 0.14814814814814814, 'acc_per_token': 0.16666666666666666, 'acc_per_char': 0.14444444444444443, 'acc_per_byte': 0.14444444444444443, 'acc_uncond': 0.25555555555555554, 'no_answer': 0.0, 'sum_logits_corr': -15.099157529407078, 'logits_per_token_corr': -6.320908819207477, 'logits_per_char_corr': -2.6377048404718955, 'bits_per_byte_corr': 3.75480275422742, 'correct_prob': 0.001178639365743944, 'correct_prob_per_token': 0.0152763754178207, 'correct_prob_per_char': 0.09829897270151283, 'margin': -0.003042566354188342, 'margin_per_token': -0.011285926130687553, 'margin_per_char': -0.046299139714733754, 'total_prob': 0.007223859068580867, 'total_prob_per_token': 0.06769576729810063, 'total_prob_per_char': 0.41948037919873127, 'uncond_correct_prob': 0.0009567497258353839, 'uncond_correct_prob_per_token': 0.006888025879851869, 'uncond_correct_prob_per_char': 0.07509718613782462, 'uncond_total_prob': 0.0055157420107914265, 'norm_correct_prob': 0.17455116100439727, 'norm_correct_prob_per_token': 0.21167975314388543, 'norm_correct_prob_per_char': 0.22909600262423419, 'primary_metric': 0.14814814814814814}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_mathematics
| 8,750 |
small aux 3
|
5xC
| 573,440,000 | 34,063,822,356,480,000 |
{'predicted_index_raw': 1.3148148148148149, 'predicted_index_per_token': 1.3037037037037038, 'predicted_index_per_char': 1.4666666666666666, 'predicted_index_per_byte': 1.462962962962963, 'predicted_index_uncond': 1.5962962962962963, 'correct_choice': 1.5777777777777777, 'acc_raw': 0.1814814814814815, 'acc_per_token': 0.1925925925925926, 'acc_per_char': 0.17037037037037037, 'acc_per_byte': 0.16296296296296298, 'acc_uncond': 0.26296296296296295, 'no_answer': 0.0, 'sum_logits_corr': -14.114560985565186, 'logits_per_token_corr': -5.682240913990096, 'logits_per_char_corr': -2.379080607877898, 'bits_per_byte_corr': 3.382919164787615, 'correct_prob': 0.0033150323130471636, 'correct_prob_per_token': 0.019266767290047886, 'correct_prob_per_char': 0.12059815093411398, 'margin': -0.010151497418431512, 'margin_per_token': -0.01796252770628033, 'margin_per_char': -0.05971469567273707, 'total_prob': 0.02069719605979182, 'total_prob_per_token': 0.08793814329198875, 'total_prob_per_char': 0.5188241246707989, 'uncond_correct_prob': 0.00047404760788064107, 'uncond_correct_prob_per_token': 0.006688387593508612, 'uncond_correct_prob_per_char': 0.06964610071596918, 'uncond_total_prob': 0.0029637530710921837, 'norm_correct_prob': 0.18178648033876071, 'norm_correct_prob_per_token': 0.21081786696361557, 'norm_correct_prob_per_char': 0.22888055116500886, 'primary_metric': 0.1814814814814815}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_mathematics
| 8,750 |
default
|
5xC
| 573,440,000 | 34,063,822,356,480,000 |
{'predicted_index_raw': 1.4703703703703703, 'predicted_index_per_token': 1.488888888888889, 'predicted_index_per_char': 1.5814814814814815, 'predicted_index_per_byte': 1.5703703703703704, 'predicted_index_uncond': 1.6592592592592592, 'correct_choice': 1.5777777777777777, 'acc_raw': 0.17777777777777778, 'acc_per_token': 0.18518518518518517, 'acc_per_char': 0.1814814814814815, 'acc_per_byte': 0.1814814814814815, 'acc_uncond': 0.2222222222222222, 'no_answer': 0.0, 'sum_logits_corr': -14.931759865195662, 'logits_per_token_corr': -5.808809596422258, 'logits_per_char_corr': -2.486656972512496, 'bits_per_byte_corr': 3.5330522528404784, 'correct_prob': 0.0028792581204595187, 'correct_prob_per_token': 0.016090481663307732, 'correct_prob_per_char': 0.11712847953992336, 'margin': -0.0048586351528532076, 'margin_per_token': -0.012750179657281736, 'margin_per_char': -0.05746516191711285, 'total_prob': 0.014077165801506891, 'total_prob_per_token': 0.07045945295790546, 'total_prob_per_char': 0.49498961434410627, 'uncond_correct_prob': 9.814506575619776e-05, 'uncond_correct_prob_per_token': 0.004793634302620859, 'uncond_correct_prob_per_char': 0.05114267498116801, 'uncond_total_prob': 0.0006254761192949588, 'norm_correct_prob': 0.18774185316028516, 'norm_correct_prob_per_token': 0.21185658402863067, 'norm_correct_prob_per_char': 0.22813541842566817, 'primary_metric': 0.17777777777777778}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_mathematics
| 10,000 |
small aux 2
|
5xC
| 655,360,000 | 38,930,082,693,120,000 |
{'predicted_index_raw': 1.325925925925926, 'predicted_index_per_token': 1.3518518518518519, 'predicted_index_per_char': 1.4222222222222223, 'predicted_index_per_byte': 1.4074074074074074, 'predicted_index_uncond': 1.4962962962962962, 'correct_choice': 1.5777777777777777, 'acc_raw': 0.15555555555555556, 'acc_per_token': 0.1814814814814815, 'acc_per_char': 0.17037037037037037, 'acc_per_byte': 0.17777777777777778, 'acc_uncond': 0.2777777777777778, 'no_answer': 0.0, 'sum_logits_corr': -14.560970188070227, 'logits_per_token_corr': -6.1523801874606034, 'logits_per_char_corr': -2.5533259820673924, 'bits_per_byte_corr': 3.6338344858386598, 'correct_prob': 0.0015980390344575563, 'correct_prob_per_token': 0.01768149194824221, 'correct_prob_per_char': 0.10510201600550509, 'margin': -0.005099959630235297, 'margin_per_token': -0.012874674452836462, 'margin_per_char': -0.04746333533380712, 'total_prob': 0.010518269536077937, 'total_prob_per_token': 0.0776565135782328, 'total_prob_per_char': 0.44784298743243567, 'uncond_correct_prob': 0.0008578695905337305, 'uncond_correct_prob_per_token': 0.005733390269415213, 'uncond_correct_prob_per_char': 0.07022467666694013, 'uncond_total_prob': 0.005181689296822449, 'norm_correct_prob': 0.17385466547712686, 'norm_correct_prob_per_token': 0.21266857530380592, 'norm_correct_prob_per_char': 0.23222321455782755, 'primary_metric': 0.15555555555555556}
|
10M
|
DCLM-Baseline
|
mmlu_high_school_mathematics
| 10,000 |
small aux 3
|
5xC
| 655,360,000 | 38,930,082,693,120,000 |
{'predicted_index_raw': 1.2814814814814814, 'predicted_index_per_token': 1.2851851851851852, 'predicted_index_per_char': 1.4, 'predicted_index_per_byte': 1.4111111111111112, 'predicted_index_uncond': 1.5407407407407407, 'correct_choice': 1.5777777777777777, 'acc_raw': 0.1814814814814815, 'acc_per_token': 0.1962962962962963, 'acc_per_char': 0.18518518518518517, 'acc_per_byte': 0.18518518518518517, 'acc_uncond': 0.2518518518518518, 'no_answer': 0.0, 'sum_logits_corr': -13.768968832934345, 'logits_per_token_corr': -5.704946057273667, 'logits_per_char_corr': -2.3737887430083884, 'bits_per_byte_corr': 3.3783560132372905, 'correct_prob': 0.0035189672966777336, 'correct_prob_per_token': 0.02591850266272441, 'correct_prob_per_char': 0.1272775637306065, 'margin': -0.011132719318849665, 'margin_per_token': -0.021598971447085835, 'margin_per_char': -0.06490188976685486, 'total_prob': 0.022262936648598686, 'total_prob_per_token': 0.11513461069546653, 'total_prob_per_char': 0.5447589982030517, 'uncond_correct_prob': 0.00033683904522785783, 'uncond_correct_prob_per_token': 0.006547294889636637, 'uncond_correct_prob_per_char': 0.06720616355510733, 'uncond_total_prob': 0.00213177191138929, 'norm_correct_prob': 0.18120141214336735, 'norm_correct_prob_per_token': 0.21258841693191616, 'norm_correct_prob_per_char': 0.23213928378852267, 'primary_metric': 0.1814814814814815}
|
Subsets and Splits
150M Checkpoints Data
The query retrieves all records for a specific parameter setting ('150M') and adds a column indicating the total number of distinct steps, providing basic filtering with limited analytical value.