Datasets:

Modalities:
Tabular
Text
Formats:
parquet
Languages:
English
ArXiv:
Libraries:
Datasets
pandas
License:
Dataset Viewer
Auto-converted to Parquet
task
stringclasses
244 values
model
stringclasses
372 values
model_type
stringclasses
4 values
observational_model
bool
2 classes
extracted_size
float64
4M
110B
primary_score
float64
0
15
logits_per_byte_corr
float64
0
20.7
logits_per_char_corr
float64
-14.31
-0
primary_metric
stringclasses
10 values
task_category
stringclasses
6 values
num_instances
int64
1
152k
num_tokens
float64
0
666
processing_time
float64
0.02
143k
model_path
stringclasses
365 values
model_revision
stringclasses
528 values
model_params
float64
3.74M
110B
model_tokens
float64
375M
18,000B
flops
float64
8,430,222B
7,776,000,000,000,000B
step
float64
0
1.91M
mix
stringclasses
30 values
size
stringclasses
14 values
token_ratio
stringclasses
6 values
model_config
dict
metrics
dict
__index_level_0__
int64
2
451k
agi_eval_aqua-rat:mc
AMD-Llama-135m
external
false
135,000,000
0.244094
1.801358
-1.248606
acc_raw
knowledge
254
0
13.340763
amd/AMD-Llama-135m
main
null
null
null
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "AMD-Llama-135m", "model_size": null }, "model": "amd/AMD-Llama-135m", "model_path": "amd/AMD-Llama-135m", "model_type": "vllm", "revision": null, "trust_remote_code": true }
{ "acc_per_char": 0.2440944881889764, "acc_per_token": 0.2440944881889764, "acc_raw": 0.2440944881889764, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.8013582147420124, "logits_per_char_corr": -1.2486063677260255, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.2440944881889764, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
2
agi_eval_aqua-rat:mc
Bielik-11B-v2
external
false
11,000,000,000
0.322835
1.110676
-0.769862
acc_raw
knowledge
254
0
42.810966
speakleash/Bielik-11B-v2
main
null
null
null
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "Bielik-11B-v2", "model_size": null }, "model": "speakleash/Bielik-11B-v2", "model_path": "speakleash/Bielik-11B-v2", "model_type": "vllm", "revision": null, "trust_remote_code": true }
{ "acc_per_char": 0.3228346456692913, "acc_per_token": 0.3228346456692913, "acc_raw": 0.3228346456692913, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.1106755818129639, "logits_per_char_corr": -0.7698616480498802, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.3228346456692913, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
3
agi_eval_aqua-rat:mc
CodeQwen1.5-7B
external
true
7,000,000,000
0.204724
1.169067
-0.810336
acc_raw
knowledge
254
0
0.022493
Qwen/CodeQwen1.5-7B
main
7,000,000,000
null
null
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": 0.7, "max_length": 2048, "metadata": { "alias": "codeqwen1.5-7b", "model_size": null }, "model": "Qwen/CodeQwen1.5-7B", "model_path": "Qwen/CodeQwen1.5-7B", "model_type": "vllm", "revision": null, "trust_remote_code": true }
{ "acc_per_char": 0.2047244094488189, "acc_per_token": 0.2047244094488189, "acc_raw": 0.2047244094488189, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.169067260590209, "logits_per_char_corr": -0.8103356755624606, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.2047244094488189, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
5
agi_eval_aqua-rat:mc
DCLM-baseline-150M-5xC-2
datadecide
false
150,000,000
0.216535
1.34401
-0.931596
acc_raw
knowledge
254
0
3.202815
allenai/DataDecide-dclm-baseline-150M
step33750-seed-default
151,898,880
15,003,942,912
13,674,492,743,500,431,000
33,750
DCLM-baseline
150M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-150M-5xC-2/step33750-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-150M-5xC-2/step33750-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.21653543307086615, "acc_per_token": 0.21653543307086615, "acc_raw": 0.21653543307086615, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.344009644851876, "logits_per_char_corr": -0.9315964959737822, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.21653543307086615, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
7
agi_eval_aqua-rat:mc
DCLM-baseline-150M-5xC-2
datadecide
false
150,000,000
0.200787
1.355991
-0.939901
acc_raw
knowledge
254
0
3.197284
allenai/DataDecide-dclm-baseline-150M
step35000-seed-default
151,898,880
15,003,942,912
13,674,492,743,500,431,000
35,000
DCLM-baseline
150M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-150M-5xC-2/step35000-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-150M-5xC-2/step35000-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.20078740157480315, "acc_per_token": 0.20078740157480315, "acc_raw": 0.20078740157480315, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.3559909294687242, "logits_per_char_corr": -0.9399012896254306, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.20078740157480315, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
8
agi_eval_aqua-rat:mc
DCLM-baseline-150M-5xC-2
datadecide
false
150,000,000
0.228346
1.381415
-0.957524
acc_raw
knowledge
254
0
0.020857
allenai/DataDecide-dclm-baseline-150M
step36250-seed-default
151,898,880
15,003,942,912
13,674,492,743,500,431,000
36,250
DCLM-baseline
150M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-150M-5xC-2/step36250-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-150M-5xC-2/step36250-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.2283464566929134, "acc_per_token": 0.2283464566929134, "acc_raw": 0.2283464566929134, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.381415227495271, "logits_per_char_corr": -0.9575240701202332, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.2283464566929134, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
9
agi_eval_aqua-rat:mc
DCLM-baseline-150M-5xC-2
datadecide
false
150,000,000
0.216535
1.350656
-0.936204
acc_raw
knowledge
254
0
0.020318
allenai/DataDecide-dclm-baseline-150M
step37500-seed-default
151,898,880
15,003,942,912
13,674,492,743,500,431,000
37,500
DCLM-baseline
150M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-150M-5xC-2/step37500-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-150M-5xC-2/step37500-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.21653543307086615, "acc_per_token": 0.21653543307086615, "acc_raw": 0.21653543307086615, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.3506562272133114, "logits_per_char_corr": -0.9362035557979674, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.21653543307086615, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
10
agi_eval_aqua-rat:mc
DCLM-baseline-150M-5xC-2
datadecide
false
150,000,000
0.224409
1.335796
-0.925903
acc_raw
knowledge
254
0
0.019656
allenai/DataDecide-dclm-baseline-150M
step38157-seed-default
151,898,880
15,003,942,912
13,674,492,743,500,431,000
38,157
DCLM-baseline
150M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-150M-5xC-2/step38157-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-150M-5xC-2/step38157-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.22440944881889763, "acc_per_token": 0.22440944881889763, "acc_raw": 0.22440944881889763, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.3357956630542065, "logits_per_char_corr": -0.9259029976495607, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.22440944881889763, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
11
agi_eval_aqua-rat:mc
DCLM-baseline-1B-5xC-2
datadecide
false
1,000,000,000
0.212598
1.189306
-0.824364
acc_raw
knowledge
254
0
0.020006
allenai/DataDecide-dclm-baseline-1B
step60000-seed-default
1,176,832,000
100,015,669,248
706,209,840,434,774,000,000
60,000
DCLM-baseline
1B
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-1B-5xC-2/step60000-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-1B-5xC-2/step60000-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.2125984251968504, "acc_per_token": 0.2125984251968504, "acc_raw": 0.2125984251968504, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.189305730028414, "logits_per_char_corr": -0.8243639135923911, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.2125984251968504, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
12
agi_eval_aqua-rat:mc
DCLM-baseline-1B-5xC-2
datadecide
false
1,000,000,000
0.216535
1.175305
-0.814659
acc_raw
knowledge
254
0
0.019589
allenai/DataDecide-dclm-baseline-1B
step62500-seed-default
1,176,832,000
100,015,669,248
706,209,840,434,774,000,000
62,500
DCLM-baseline
1B
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-1B-5xC-2/step62500-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-1B-5xC-2/step62500-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.21653543307086615, "acc_per_token": 0.21653543307086615, "acc_raw": 0.21653543307086615, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.1753049453969573, "logits_per_char_corr": -0.8146593091994758, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.21653543307086615, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
13
agi_eval_aqua-rat:mc
DCLM-baseline-1B-5xC-2
datadecide
false
1,000,000,000
0.208661
1.196495
-0.829347
acc_raw
knowledge
254
0
9.293114
allenai/DataDecide-dclm-baseline-1B
step65000-seed-default
1,176,832,000
100,015,669,248
706,209,840,434,774,000,000
65,000
DCLM-baseline
1B
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-1B-5xC-2/step65000-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-1B-5xC-2/step65000-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.20866141732283464, "acc_per_token": 0.20866141732283464, "acc_raw": 0.20866141732283464, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.1964953652144739, "logits_per_char_corr": -0.8293473889508586, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.20866141732283464, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
14
agi_eval_aqua-rat:mc
DCLM-baseline-1B-5xC-2
datadecide
false
1,000,000,000
0.204724
1.207278
-0.836821
acc_raw
knowledge
254
0
9.289337
allenai/DataDecide-dclm-baseline-1B
step67500-seed-default
1,176,832,000
100,015,669,248
706,209,840,434,774,000,000
67,500
DCLM-baseline
1B
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-1B-5xC-2/step67500-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-1B-5xC-2/step67500-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.2047244094488189, "acc_per_token": 0.2047244094488189, "acc_raw": 0.2047244094488189, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.207277737951421, "logits_per_char_corr": -0.8368211602132152, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.2047244094488189, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
15
agi_eval_aqua-rat:mc
DCLM-baseline-1B-5xC-2
datadecide
false
1,000,000,000
0.19685
1.209813
-0.838578
acc_raw
knowledge
254
0
9.296574
allenai/DataDecide-dclm-baseline-1B
step69369-seed-default
1,176,832,000
100,015,669,248
706,209,840,434,774,000,000
69,369
DCLM-baseline
1B
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-1B-5xC-2/step69369-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-1B-5xC-2/step69369-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.1968503937007874, "acc_per_token": 0.1968503937007874, "acc_raw": 0.1968503937007874, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.2098128075906427, "logits_per_char_corr": -0.8385783365861638, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.1968503937007874, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
16
agi_eval_aqua-rat:mc
DCLM-baseline-20M-5xC
datadecide
false
20,000,000
0.228346
1.628618
-1.128872
acc_raw
knowledge
254
0
2.381514
allenai/DataDecide-dclm-baseline-20M
step11250-seed-default
19,101,888
1,911,554,048
219,085,747,985,055,740
11,250
DCLM-baseline
20M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-20M-5xC/step11250-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-20M-5xC/step11250-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.2283464566929134, "acc_per_token": 0.2283464566929134, "acc_raw": 0.2283464566929134, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.6286175106505871, "logits_per_char_corr": -1.1288716357172004, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.2283464566929134, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
17
agi_eval_aqua-rat:mc
DCLM-baseline-20M-5xC
datadecide
false
20,000,000
0.228346
1.596814
-1.106827
acc_raw
knowledge
254
0
2.399529
allenai/DataDecide-dclm-baseline-20M
step12500-seed-default
19,101,888
1,911,554,048
219,085,747,985,055,740
12,500
DCLM-baseline
20M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-20M-5xC/step12500-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-20M-5xC/step12500-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.2283464566929134, "acc_per_token": 0.2283464566929134, "acc_raw": 0.2283464566929134, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.5968144227385952, "logits_per_char_corr": -1.1068274149979194, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.2283464566929134, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
18
agi_eval_aqua-rat:mc
DCLM-baseline-20M-5xC
datadecide
false
20,000,000
0.220472
1.54546
-1.071231
acc_raw
knowledge
254
0
2.354517
allenai/DataDecide-dclm-baseline-20M
step13750-seed-default
19,101,888
1,911,554,048
219,085,747,985,055,740
13,750
DCLM-baseline
20M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-20M-5xC/step13750-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-20M-5xC/step13750-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.2204724409448819, "acc_per_token": 0.2204724409448819, "acc_raw": 0.2204724409448819, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.5454603568097767, "logits_per_char_corr": -1.0712314889890941, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.2204724409448819, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
19
agi_eval_aqua-rat:mc
DCLM-baseline-20M-5xC
datadecide
false
20,000,000
0.224409
1.557237
-1.079394
acc_raw
knowledge
254
0
2.358639
allenai/DataDecide-dclm-baseline-20M
step14584-seed-default
19,101,888
1,911,554,048
219,085,747,985,055,740
14,584
DCLM-baseline
20M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-20M-5xC/step14584-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-20M-5xC/step14584-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.22440944881889763, "acc_per_token": 0.22440944881889763, "acc_raw": 0.22440944881889763, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.557236664682855, "logits_per_char_corr": -1.0793942035887185, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.22440944881889763, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
20
agi_eval_aqua-rat:mc
DCLM-baseline-20M-5xC
datadecide
false
20,000,000
0.224409
1.579323
-1.094703
acc_raw
knowledge
254
0
2.420793
allenai/DataDecide-dclm-baseline-20M
step14594-seed-default
19,101,888
1,911,554,048
219,085,747,985,055,740
14,594
DCLM-baseline
20M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-20M-5xC/step14594-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-20M-5xC/step14594-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.22440944881889763, "acc_per_token": 0.22440944881889763, "acc_raw": 0.22440944881889763, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.5793227738701003, "logits_per_char_corr": -1.0947031279013852, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.22440944881889763, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
21
agi_eval_aqua-rat:mc
DCLM-baseline-300M-5xC-2
datadecide
false
300,000,000
0.248031
1.351363
-0.936694
acc_raw
knowledge
254
0
0.019636
allenai/DataDecide-dclm-baseline-300M
step41250-seed-default
319,980,544
30,006,968,320
57,609,876,280,946,200,000
41,250
DCLM-baseline
300M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-300M-5xC-2/step41250-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-300M-5xC-2/step41250-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.24803149606299213, "acc_per_token": 0.24803149606299213, "acc_raw": 0.24803149606299213, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.3513633811098535, "logits_per_char_corr": -0.9366937175275772, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.24803149606299213, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
22
agi_eval_aqua-rat:mc
DCLM-baseline-300M-5xC-2
datadecide
false
300,000,000
0.185039
1.384101
-0.959386
acc_raw
knowledge
254
0
0.019714
allenai/DataDecide-dclm-baseline-300M
step42500-seed-default
319,980,544
30,006,968,320
57,609,876,280,946,200,000
42,500
DCLM-baseline
300M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-300M-5xC-2/step42500-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-300M-5xC-2/step42500-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.18503937007874016, "acc_per_token": 0.18503937007874016, "acc_raw": 0.18503937007874016, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.384100862528545, "logits_per_char_corr": -0.9593856104715602, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.18503937007874016, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
23
agi_eval_aqua-rat:mc
DCLM-baseline-300M-5xC-2
datadecide
false
300,000,000
0.200787
1.341672
-0.929976
acc_raw
knowledge
254
0
4.469081
allenai/DataDecide-dclm-baseline-300M
step43750-seed-default
319,980,544
30,006,968,320
57,609,876,280,946,200,000
43,750
DCLM-baseline
300M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-300M-5xC-2/step43750-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-300M-5xC-2/step43750-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.20078740157480315, "acc_per_token": 0.20078740157480315, "acc_raw": 0.20078740157480315, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.3416720644479636, "logits_per_char_corr": -0.929976208707479, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.20078740157480315, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
24
agi_eval_aqua-rat:mc
DCLM-baseline-300M-5xC-2
datadecide
false
300,000,000
0.240157
1.371095
-0.95037
acc_raw
knowledge
254
0
0.021196
allenai/DataDecide-dclm-baseline-300M
step45000-seed-default
319,980,544
30,006,968,320
57,609,876,280,946,200,000
45,000
DCLM-baseline
300M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-300M-5xC-2/step45000-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-300M-5xC-2/step45000-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.24015748031496062, "acc_per_token": 0.24015748031496062, "acc_raw": 0.24015748031496062, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.3710947911870033, "logits_per_char_corr": -0.9503704887910152, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.24015748031496062, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
25
agi_eval_aqua-rat:mc
DCLM-baseline-300M-5xC-2
datadecide
false
300,000,000
0.228346
1.380095
-0.956609
acc_raw
knowledge
254
0
0.019809
allenai/DataDecide-dclm-baseline-300M
step45787-seed-default
319,980,544
30,006,968,320
57,609,876,280,946,200,000
45,787
DCLM-baseline
300M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-300M-5xC-2/step45787-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-300M-5xC-2/step45787-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.2283464566929134, "acc_per_token": 0.2283464566929134, "acc_raw": 0.2283464566929134, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.3800945759355039, "logits_per_char_corr": -0.9566086642150804, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.2283464566929134, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
26
agi_eval_aqua-rat:mc
DCLM-baseline-4M-5xC
datadecide
false
4,000,000
0.248031
4.348393
-3.014076
acc_raw
knowledge
254
0
2.186376
allenai/DataDecide-dclm-baseline-4M
step3750-seed-default
3,744,832
375,193,600
8,430,221,996,851,200
3,750
DCLM-baseline
4M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-4M-5xC/step3750-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-4M-5xC/step3750-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.24803149606299213, "acc_per_token": 0.24803149606299213, "acc_raw": 0.24803149606299213, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 4.348392720331589, "logits_per_char_corr": -3.014076154063067, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.24803149606299213, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
27
agi_eval_aqua-rat:mc
DCLM-baseline-4M-5xC
datadecide
false
4,000,000
0.248031
3.98533
-2.76242
acc_raw
knowledge
254
0
2.148501
allenai/DataDecide-dclm-baseline-4M
step5000-seed-default
3,744,832
375,193,600
8,430,221,996,851,200
5,000
DCLM-baseline
4M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-4M-5xC/step5000-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-4M-5xC/step5000-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.24803149606299213, "acc_per_token": 0.24803149606299213, "acc_raw": 0.24803149606299213, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 3.9853301359847357, "logits_per_char_corr": -2.7624203473564206, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.24803149606299213, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
28
agi_eval_aqua-rat:mc
DCLM-baseline-4M-5xC
datadecide
false
4,000,000
0.248031
4.09368
-2.837523
acc_raw
knowledge
254
0
2.169446
allenai/DataDecide-dclm-baseline-4M
step5725-seed-default
3,744,832
375,193,600
8,430,221,996,851,200
5,725
DCLM-baseline
4M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-4M-5xC/step5725-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-4M-5xC/step5725-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.24803149606299213, "acc_per_token": 0.24803149606299213, "acc_raw": 0.24803149606299213, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 4.093680416000813, "logits_per_char_corr": -2.837523038462391, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.24803149606299213, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
29
agi_eval_aqua-rat:mc
DCLM-baseline-4M-5xC
datadecide
false
4,000,000
0.248031
4.104833
-2.845253
acc_raw
knowledge
254
0
2.16977
allenai/DataDecide-dclm-baseline-4M
step5735-seed-default
3,744,832
375,193,600
8,430,221,996,851,200
5,735
DCLM-baseline
4M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-4M-5xC/step5735-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-4M-5xC/step5735-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.24803149606299213, "acc_per_token": 0.24803149606299213, "acc_raw": 0.24803149606299213, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 4.104832800270284, "logits_per_char_corr": -2.8452532821752894, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.24803149606299213, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
30
agi_eval_aqua-rat:mc
DCLM-baseline-4M-5xC
datadecide
false
4,000,000
0.248031
4.078714
-2.827149
acc_raw
knowledge
254
0
2.171196
allenai/DataDecide-dclm-baseline-4M
step5745-seed-default
3,744,832
375,193,600
8,430,221,996,851,200
5,745
DCLM-baseline
4M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-4M-5xC/step5745-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-4M-5xC/step5745-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.24803149606299213, "acc_per_token": 0.24803149606299213, "acc_raw": 0.24803149606299213, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 4.078713669143681, "logits_per_char_corr": -2.8271488800762206, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.24803149606299213, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
31
agi_eval_aqua-rat:mc
DCLM-baseline-530M-5xC-2
datadecide
false
530,000,000
0.19685
1.182045
-0.819331
acc_raw
knowledge
254
0
0.019724
allenai/DataDecide-dclm-baseline-530M
step53750-seed-default
530,074,944
53,018,886,144
168,623,898,622,339,060,000
53,750
DCLM-baseline
530M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-530M-5xC-2/step53750-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-530M-5xC-2/step53750-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.1968503937007874, "acc_per_token": 0.1968503937007874, "acc_raw": 0.1968503937007874, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.1820452874182867, "logits_per_char_corr": -0.8193313582675663, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.1968503937007874, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
32
agi_eval_aqua-rat:mc
DCLM-baseline-530M-5xC-2
datadecide
false
530,000,000
0.185039
1.180775
-0.818451
acc_raw
knowledge
254
0
0.019744
allenai/DataDecide-dclm-baseline-530M
step55000-seed-default
530,074,944
53,018,886,144
168,623,898,622,339,060,000
55,000
DCLM-baseline
530M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-530M-5xC-2/step55000-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-530M-5xC-2/step55000-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.18503937007874016, "acc_per_token": 0.18503937007874016, "acc_raw": 0.18503937007874016, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.180775394608196, "logits_per_char_corr": -0.8184511356466398, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.18503937007874016, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
33
agi_eval_aqua-rat:mc
DCLM-baseline-530M-5xC-2
datadecide
false
530,000,000
0.181102
1.172968
-0.81304
acc_raw
knowledge
254
0
0.01976
allenai/DataDecide-dclm-baseline-530M
step56250-seed-default
530,074,944
53,018,886,144
168,623,898,622,339,060,000
56,250
DCLM-baseline
530M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-530M-5xC-2/step56250-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-530M-5xC-2/step56250-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.18110236220472442, "acc_per_token": 0.18110236220472442, "acc_raw": 0.18110236220472442, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.1729681165707704, "logits_per_char_corr": -0.8130395428871545, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.18110236220472442, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
34
agi_eval_aqua-rat:mc
DCLM-baseline-530M-5xC-2
datadecide
false
530,000,000
0.173228
1.180914
-0.818547
acc_raw
knowledge
254
0
0.021867
allenai/DataDecide-dclm-baseline-530M
step57500-seed-default
530,074,944
53,018,886,144
168,623,898,622,339,060,000
57,500
DCLM-baseline
530M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-530M-5xC-2/step57500-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-530M-5xC-2/step57500-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.1732283464566929, "acc_per_token": 0.1732283464566929, "acc_raw": 0.1732283464566929, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.1809135271462063, "logits_per_char_corr": -0.8185468818259052, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.1732283464566929, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
35
agi_eval_aqua-rat:mc
DCLM-baseline-530M-5xC-2
datadecide
false
530,000,000
0.177165
1.17602
-0.815155
acc_raw
knowledge
254
0
0.020405
allenai/DataDecide-dclm-baseline-530M
step57776-seed-default
530,074,944
53,018,886,144
168,623,898,622,339,060,000
57,776
DCLM-baseline
530M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-530M-5xC-2/step57776-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-530M-5xC-2/step57776-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.17716535433070865, "acc_per_token": 0.17716535433070865, "acc_raw": 0.17716535433070865, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.1760203014778967, "logits_per_char_corr": -0.8151551562500751, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.17716535433070865, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
36
agi_eval_aqua-rat:mc
DCLM-baseline-60M-5xC
datadecide
false
60,000,000
0.181102
1.616982
-1.120806
acc_raw
knowledge
254
0
2.961163
allenai/DataDecide-dclm-baseline-60M
step26250-seed-default
57,078,144
5,709,889,536
1,955,459,382,959,407,000
26,250
DCLM-baseline
60M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-60M-5xC/step26250-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-60M-5xC/step26250-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.18110236220472442, "acc_per_token": 0.18110236220472442, "acc_raw": 0.18110236220472442, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.61698179740967, "logits_per_char_corr": -1.1208063738904601, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.18110236220472442, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
37
agi_eval_aqua-rat:mc
DCLM-baseline-60M-5xC
datadecide
false
60,000,000
0.181102
1.54322
-1.069678
acc_raw
knowledge
254
0
2.921052
allenai/DataDecide-dclm-baseline-60M
step27500-seed-default
57,078,144
5,709,889,536
1,955,459,382,959,407,000
27,500
DCLM-baseline
60M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-60M-5xC/step27500-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-60M-5xC/step27500-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.18110236220472442, "acc_per_token": 0.18110236220472442, "acc_raw": 0.18110236220472442, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.5432198548569511, "logits_per_char_corr": -1.069678491377455, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.18110236220472442, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
38
agi_eval_aqua-rat:mc
DCLM-baseline-60M-5xC
datadecide
false
60,000,000
0.185039
1.533246
-1.062765
acc_raw
knowledge
254
0
2.948492
allenai/DataDecide-dclm-baseline-60M
step28750-seed-default
57,078,144
5,709,889,536
1,955,459,382,959,407,000
28,750
DCLM-baseline
60M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-60M-5xC/step28750-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-60M-5xC/step28750-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.18503937007874016, "acc_per_token": 0.18503937007874016, "acc_raw": 0.18503937007874016, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.533245851869161, "logits_per_char_corr": -1.0627650393275765, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.18503937007874016, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
39
agi_eval_aqua-rat:mc
DCLM-baseline-60M-5xC
datadecide
false
60,000,000
0.185039
1.538885
-1.066674
acc_raw
knowledge
254
0
2.936947
allenai/DataDecide-dclm-baseline-60M
step29042-seed-default
57,078,144
5,709,889,536
1,955,459,382,959,407,000
29,042
DCLM-baseline
60M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-60M-5xC/step29042-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-60M-5xC/step29042-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.18503937007874016, "acc_per_token": 0.18503937007874016, "acc_raw": 0.18503937007874016, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.5388849139038778, "logits_per_char_corr": -1.066673739277941, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.18503937007874016, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
40
agi_eval_aqua-rat:mc
DCLM-baseline-60M-5xC
datadecide
false
60,000,000
0.185039
1.540237
-1.067611
acc_raw
knowledge
254
0
0.019852
allenai/DataDecide-dclm-baseline-60M
step29052-seed-default
57,078,144
5,709,889,536
1,955,459,382,959,407,000
29,052
DCLM-baseline
60M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-60M-5xC/step29052-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-60M-5xC/step29052-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.18503937007874016, "acc_per_token": 0.18503937007874016, "acc_raw": 0.18503937007874016, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.540236868760577, "logits_per_char_corr": -1.067610842975106, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.18503937007874016, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
41
agi_eval_aqua-rat:mc
DCLM-baseline-750M-5xC-2
datadecide
false
750,000,000
0.188976
1.173647
-0.81351
acc_raw
knowledge
254
0
6.665378
allenai/DataDecide-dclm-baseline-750M
step58750-seed-default
681,297,408
75,012,636,672
306,635,489,591,276,100,000
58,750
DCLM-baseline
750M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-750M-5xC-2/step58750-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-750M-5xC-2/step58750-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.1889763779527559, "acc_per_token": 0.1889763779527559, "acc_raw": 0.1889763779527559, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.1736470627734574, "logits_per_char_corr": -0.8135101525332984, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.1889763779527559, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
42
agi_eval_aqua-rat:mc
DCLM-baseline-750M-5xC-2
datadecide
false
750,000,000
0.200787
1.177345
-0.816074
acc_raw
knowledge
254
0
6.499924
allenai/DataDecide-dclm-baseline-750M
step60000-seed-default
681,297,408
75,012,636,672
306,635,489,591,276,100,000
60,000
DCLM-baseline
750M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-750M-5xC-2/step60000-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-750M-5xC-2/step60000-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.20078740157480315, "acc_per_token": 0.20078740157480315, "acc_raw": 0.20078740157480315, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.1773453942864915, "logits_per_char_corr": -0.8160736405943322, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.20078740157480315, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
43
agi_eval_aqua-rat:mc
DCLM-baseline-750M-5xC-2
datadecide
false
750,000,000
0.200787
1.17626
-0.815321
acc_raw
knowledge
254
0
6.415148
allenai/DataDecide-dclm-baseline-750M
step61250-seed-default
681,297,408
75,012,636,672
306,635,489,591,276,100,000
61,250
DCLM-baseline
750M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-750M-5xC-2/step61250-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-750M-5xC-2/step61250-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.20078740157480315, "acc_per_token": 0.20078740157480315, "acc_raw": 0.20078740157480315, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.1762602470681205, "logits_per_char_corr": -0.8153214738594265, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.20078740157480315, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
44
agi_eval_aqua-rat:mc
DCLM-baseline-750M-5xC-2
datadecide
false
750,000,000
0.181102
1.181854
-0.819199
acc_raw
knowledge
254
0
0.019875
allenai/DataDecide-dclm-baseline-750M
step62500-seed-default
681,297,408
75,012,636,672
306,635,489,591,276,100,000
62,500
DCLM-baseline
750M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-750M-5xC-2/step62500-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-750M-5xC-2/step62500-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.18110236220472442, "acc_per_token": 0.18110236220472442, "acc_raw": 0.18110236220472442, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.1818543690713037, "logits_per_char_corr": -0.8191990237536393, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.18110236220472442, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
45
agi_eval_aqua-rat:mc
DCLM-baseline-750M-5xC-2
datadecide
false
750,000,000
0.188976
1.178986
-0.817211
acc_raw
knowledge
254
0
6.547774
allenai/DataDecide-dclm-baseline-750M
step63589-seed-default
681,297,408
75,012,636,672
306,635,489,591,276,100,000
63,589
DCLM-baseline
750M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-750M-5xC-2/step63589-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/benb/DCLM-baseline-750M-5xC-2/step63589-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.1889763779527559, "acc_per_token": 0.1889763779527559, "acc_raw": 0.1889763779527559, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.178985787493173, "logits_per_char_corr": -0.8172106745205526, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.1889763779527559, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
46
agi_eval_aqua-rat:mc
DCLM-baseline-90M-5xC
datadecide
false
90,000,000
0.19685
1.42521
-0.98788
acc_raw
knowledge
254
0
0.019603
allenai/DataDecide-dclm-baseline-90M
step25000-seed-default
97,946,640
9,797,959,680
5,758,063,377,068,851,000
25,000
DCLM-baseline
90M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-90M-5xC/step25000-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-90M-5xC/step25000-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.1968503937007874, "acc_per_token": 0.1968503937007874, "acc_raw": 0.1968503937007874, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.4252096160668672, "logits_per_char_corr": -0.9878800270829614, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.1968503937007874, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
47
agi_eval_aqua-rat:mc
DCLM-baseline-90M-5xC
datadecide
false
90,000,000
0.212598
1.440884
-0.998745
acc_raw
knowledge
254
0
3.084481
allenai/DataDecide-dclm-baseline-90M
step26250-seed-default
97,946,640
9,797,959,680
5,758,063,377,068,851,000
26,250
DCLM-baseline
90M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-90M-5xC/step26250-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-90M-5xC/step26250-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.2125984251968504, "acc_per_token": 0.2125984251968504, "acc_raw": 0.2125984251968504, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.4408844564619765, "logits_per_char_corr": -0.998744998508551, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.2125984251968504, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
48
agi_eval_aqua-rat:mc
DCLM-baseline-90M-5xC
datadecide
false
90,000,000
0.188976
1.393661
-0.966012
acc_raw
knowledge
254
0
3.051489
allenai/DataDecide-dclm-baseline-90M
step27500-seed-default
97,946,640
9,797,959,680
5,758,063,377,068,851,000
27,500
DCLM-baseline
90M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-90M-5xC/step27500-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-90M-5xC/step27500-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.1889763779527559, "acc_per_token": 0.1889763779527559, "acc_raw": 0.1889763779527559, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.3936606068764748, "logits_per_char_corr": -0.9660119203131968, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.1889763779527559, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
49
agi_eval_aqua-rat:mc
DCLM-baseline-90M-5xC
datadecide
false
90,000,000
0.181102
1.422052
-0.985691
acc_raw
knowledge
254
0
3.013278
allenai/DataDecide-dclm-baseline-90M
step28750-seed-default
97,946,640
9,797,959,680
5,758,063,377,068,851,000
28,750
DCLM-baseline
90M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-90M-5xC/step28750-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-90M-5xC/step28750-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.18110236220472442, "acc_per_token": 0.18110236220472442, "acc_raw": 0.18110236220472442, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.4220518514166365, "logits_per_char_corr": -0.9856912314187823, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.18110236220472442, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
50
agi_eval_aqua-rat:mc
DCLM-baseline-90M-5xC
datadecide
false
90,000,000
0.173228
1.429273
-0.990696
acc_raw
knowledge
254
0
3.133163
allenai/DataDecide-dclm-baseline-90M
step29901-seed-default
97,946,640
9,797,959,680
5,758,063,377,068,851,000
29,901
DCLM-baseline
90M
5xC
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": null, "model": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-90M-5xC/step29901-unsharded-hf", "model_path": "gs://ai2-llm/checkpoints/davidh/OLMo-ladder/davidh/DCLM-baseline-90M-5xC/step29901-unsharded-hf", "model_type": "hf", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.1732283464566929, "acc_per_token": 0.1732283464566929, "acc_raw": 0.1732283464566929, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.4292726708151824, "logits_per_char_corr": -0.9906963220262152, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.1732283464566929, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
51
agi_eval_aqua-rat:mc
DeciLM-7B
external
false
7,000,000,000
0.291339
1.138913
-0.789434
acc_raw
knowledge
254
0
0.025726
Deci/DeciLM-7B
main
null
null
null
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "DeciLM-7B", "model_size": null }, "model": "Deci/DeciLM-7B", "model_path": "Deci/DeciLM-7B", "model_type": "vllm", "revision": null, "trust_remote_code": true }
{ "acc_per_char": 0.29133858267716534, "acc_per_token": 0.29133858267716534, "acc_raw": 0.29133858267716534, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.1389127177102036, "logits_per_char_corr": -0.7894341391841257, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.29133858267716534, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
52
agi_eval_aqua-rat:mc
Falcon3-10B-Base
external
false
10,000,000,000
0.385827
1.042124
-0.722345
acc_raw
knowledge
254
0
41.165283
tiiuae/Falcon3-10B-Base
main
null
null
null
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "Falcon3-10B-Base", "model_size": null }, "model": "tiiuae/Falcon3-10B-Base", "model_path": "tiiuae/Falcon3-10B-Base", "model_type": "vllm", "revision": null, "trust_remote_code": true }
{ "acc_per_char": 0.3858267716535433, "acc_per_token": 0.3858267716535433, "acc_raw": 0.3858267716535433, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.0421237361600613, "logits_per_char_corr": -0.7223451295134238, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.3858267716535433, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
53
agi_eval_aqua-rat:mc
Falcon3-3B-Base
external
false
3,000,000,000
0.275591
1.133731
-0.785842
acc_raw
knowledge
254
0
23.683381
tiiuae/Falcon3-3B-Base
main
null
null
null
null
Falcon3
3B
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "Falcon3-3B-Base", "model_size": null }, "model": "tiiuae/Falcon3-3B-Base", "model_path": "tiiuae/Falcon3-3B-Base", "model_type": "vllm", "revision": null, "trust_remote_code": true }
{ "acc_per_char": 0.2755905511811024, "acc_per_token": 0.2755905511811024, "acc_raw": 0.2755905511811024, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.1337309591099178, "logits_per_char_corr": -0.7858424178199974, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.2755905511811024, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
55
agi_eval_aqua-rat:mc
Falcon3-Mamba-7B-Base
external
false
7,000,000,000
0.34252
1.081798
-0.749845
acc_raw
knowledge
254
0
0.0285
tiiuae/Falcon3-Mamba-7B-Base
main
null
null
null
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "Falcon3-Mamba-7B-Base", "model_size": null }, "model": "tiiuae/Falcon3-Mamba-7B-Base", "model_path": "tiiuae/Falcon3-Mamba-7B-Base", "model_type": "vllm", "revision": null, "trust_remote_code": true }
{ "acc_per_char": 0.3425196850393701, "acc_per_token": 0.3425196850393701, "acc_raw": 0.3425196850393701, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.0817978360699352, "logits_per_char_corr": -0.7498451200071868, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.3425196850393701, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
57
agi_eval_aqua-rat:mc
Llama-2-13b-hf
external
false
13,000,000,000
0.200787
1.1723
-0.812576
acc_raw
knowledge
254
0
25.34047
meta-llama/Llama-2-13b-hf
main
null
null
null
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "llama2-13b", "model_size": null }, "model": "meta-llama/Llama-2-13b-hf", "model_path": "meta-llama/Llama-2-13b-hf", "model_type": "vllm", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.20078740157480315, "acc_per_token": 0.20078740157480315, "acc_raw": 0.20078740157480315, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": null, "logits_per_char_corr": null, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.20078740157480315, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
66
agi_eval_aqua-rat:mc
Llama-2-7b-hf
external
false
7,000,000,000
0.240157
1.158598
-0.803079
acc_raw
knowledge
254
0
29.294562
meta-llama/Llama-2-7b-hf
main
null
null
null
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "llama2-7b", "model_size": null }, "model": "meta-llama/Llama-2-7b-hf", "model_path": "meta-llama/Llama-2-7b-hf", "model_type": "vllm", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.24015748031496062, "acc_per_token": 0.24015748031496062, "acc_raw": 0.24015748031496062, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": null, "logits_per_char_corr": null, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.24015748031496062, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
67
agi_eval_aqua-rat:mc
Llama-3.1-70B
external
true
70,000,000,000
0.350394
1.063502
-0.737164
acc_raw
knowledge
254
0
67.410912
meta-llama/Llama-3.1-70B
main
70,000,000,000
15,000,000,000,000
6,300,000,000,000,000,000,000,000
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": 0.7, "max_length": 2048, "metadata": { "alias": "llama-3.1-70b", "model_size": null }, "model": "meta-llama/Llama-3.1-70B", "model_path": "meta-llama/Llama-3.1-70B", "model_type": "vllm", "revision": null, "trust_remote_code": true }
{ "acc_per_char": 0.35039370078740156, "acc_per_token": 0.35039370078740156, "acc_raw": 0.35039370078740156, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.063502476440711, "logits_per_char_corr": -0.7371637430628688, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.35039370078740156, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
69
agi_eval_aqua-rat:mc
Llama-3.1-8B
external
true
8,000,000,000
0.287402
1.131831
-0.784525
acc_raw
knowledge
254
0
20.914992
meta-llama/Llama-3.1-8B
main
8,000,000,000
15,000,000,000,000
720,000,000,000,000,000,000,000
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": 0.7, "max_length": 2048, "metadata": { "alias": "llama-3.1-8b", "model_size": null }, "model": "meta-llama/Llama-3.1-8B", "model_path": "meta-llama/Llama-3.1-8B", "model_type": "vllm", "revision": null, "trust_remote_code": true }
{ "acc_per_char": 0.2874015748031496, "acc_per_token": 0.2874015748031496, "acc_raw": 0.2874015748031496, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.1318305141276859, "logits_per_char_corr": -0.7845251297387551, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.2874015748031496, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
70
agi_eval_aqua-rat:mc
Llama-3.2-1B
external
true
1,000,000,000
0.188976
1.175692
-0.814928
acc_raw
knowledge
254
0
14.830505
meta-llama/Llama-3.2-1B
main
1,000,000,000
9,000,000,000,000
54,000,000,000,000,000,000,000
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "llama3.2-1b", "model_size": null }, "model": "meta-llama/Llama-3.2-1B", "model_path": "meta-llama/Llama-3.2-1B", "model_type": "vllm", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.1889763779527559, "acc_per_token": 0.1889763779527559, "acc_raw": 0.1889763779527559, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": null, "logits_per_char_corr": null, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.1889763779527559, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
71
agi_eval_aqua-rat:mc
Llama-3.2-3B
external
true
3,000,000,000
0.255906
1.158039
-0.802692
acc_raw
knowledge
254
0
20.144862
meta-llama/Llama-3.2-3B
main
3,000,000,000
9,000,000,000,000
162,000,000,000,000,000,000,000
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "llama3.2-3b", "model_size": null }, "model": "meta-llama/Llama-3.2-3B", "model_path": "meta-llama/Llama-3.2-3B", "model_type": "vllm", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.2559055118110236, "acc_per_token": 0.2559055118110236, "acc_raw": 0.2559055118110236, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": null, "logits_per_char_corr": null, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.2559055118110236, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
72
agi_eval_aqua-rat:mc
Mathstral-7B-v0.1
external
true
7,000,000,000
0.251969
1.156793
-0.801828
acc_raw
knowledge
254
0
19.711897
mistralai/Mathstral-7B-v0.1
main
7,000,000,000
null
null
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": 0.7, "max_length": 2048, "metadata": { "alias": "mathstral-7b-v0.1", "model_size": null }, "model": "mistralai/Mathstral-7B-v0.1", "model_path": "mistralai/Mathstral-7B-v0.1", "model_type": "vllm", "revision": null, "trust_remote_code": true }
{ "acc_per_char": 0.25196850393700787, "acc_per_token": 0.25196850393700787, "acc_raw": 0.25196850393700787, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.1567932481289254, "logits_per_char_corr": -0.8018279784307705, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.25196850393700787, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
76
agi_eval_aqua-rat:mc
Meta-Llama-3-70B
external
false
70,000,000,000
0.38189
1.036605
-0.71852
acc_raw
knowledge
254
0
65.348104
meta-llama/Meta-Llama-3-70B
main
null
null
null
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "llama3-70b", "model_size": null }, "model": "meta-llama/Meta-Llama-3-70B", "model_path": "meta-llama/Meta-Llama-3-70B", "model_type": "vllm", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.38188976377952755, "acc_per_token": 0.38188976377952755, "acc_raw": 0.38188976377952755, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": null, "logits_per_char_corr": null, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.38188976377952755, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
77
agi_eval_aqua-rat:mc
Meta-Llama-3-8B
external
true
8,000,000,000
0.326772
1.128489
-0.782209
acc_raw
knowledge
254
0
29.683881
meta-llama/Meta-Llama-3-8B
main
8,000,000,000
15,000,000,000,000
720,000,000,000,000,000,000,000
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "llama3-8b", "model_size": null }, "model": "meta-llama/Meta-Llama-3-8B", "model_path": "meta-llama/Meta-Llama-3-8B", "model_type": "vllm", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.32677165354330706, "acc_per_token": 0.32677165354330706, "acc_raw": 0.32677165354330706, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": null, "logits_per_char_corr": null, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.32677165354330706, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
78
agi_eval_aqua-rat:mc
Meta-Llama-3.1-70B
external
false
70,000,000,000
0.350394
1.069659
-0.741431
acc_raw
knowledge
254
0
64.275595
meta-llama/Meta-Llama-3.1-70B
main
null
null
null
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "llama3.1-70b", "model_size": null }, "model": "meta-llama/Meta-Llama-3.1-70B", "model_path": "meta-llama/Meta-Llama-3.1-70B", "model_type": "vllm", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.35039370078740156, "acc_per_token": 0.35039370078740156, "acc_raw": 0.35039370078740156, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": null, "logits_per_char_corr": null, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.35039370078740156, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
79
agi_eval_aqua-rat:mc
Meta-Llama-3.1-8B
external
false
8,000,000,000
0.255906
1.132024
-0.784659
acc_raw
knowledge
254
0
29.450249
meta-llama/Meta-Llama-3.1-8B
main
null
null
null
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "llama3.1-8b", "model_size": null }, "model": "meta-llama/Meta-Llama-3.1-8B", "model_path": "meta-llama/Meta-Llama-3.1-8B", "model_type": "vllm", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.2559055118110236, "acc_per_token": 0.2559055118110236, "acc_raw": 0.2559055118110236, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": null, "logits_per_char_corr": null, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.2559055118110236, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
80
agi_eval_aqua-rat:mc
Mixtral-8x22B-v0.1
external
true
22,000,000,000
0.362205
1.066826
-0.739467
acc_raw
knowledge
254
0
39.137972
mistralai/Mixtral-8x22B-v0.1
main
22,000,000,000
null
null
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": 0.7, "max_length": 2048, "metadata": { "alias": "mixtral-8x22b-v0.1", "model_size": null }, "model": "mistralai/Mixtral-8x22B-v0.1", "model_path": "mistralai/Mixtral-8x22B-v0.1", "model_type": "vllm", "revision": null, "trust_remote_code": true }
{ "acc_per_char": 0.36220472440944884, "acc_per_token": 0.36220472440944884, "acc_raw": 0.36220472440944884, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.066826090702519, "logits_per_char_corr": -0.7394674969177077, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.36220472440944884, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
94
agi_eval_aqua-rat:mc
Mixtral-8x7B-v0.1
external
true
7,000,000,000
0.283465
1.143872
-0.792872
acc_raw
knowledge
254
0
26.864495
mistralai/Mixtral-8x7B-v0.1
main
7,000,000,000
null
null
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": 0.7, "max_length": 2048, "metadata": { "alias": "mixtral-8x7b-v0.1", "model_size": null }, "model": "mistralai/Mixtral-8x7B-v0.1", "model_path": "mistralai/Mixtral-8x7B-v0.1", "model_type": "vllm", "revision": null, "trust_remote_code": true }
{ "acc_per_char": 0.28346456692913385, "acc_per_token": 0.28346456692913385, "acc_raw": 0.28346456692913385, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.1438720165400074, "logits_per_char_corr": -0.7928716631855551, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.28346456692913385, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
95
agi_eval_aqua-rat:mc
MobileLLM-125M-HF
external
false
125,000,000
0.220472
1.255582
-0.870303
acc_raw
knowledge
254
0
13.451424
vonjack/MobileLLM-125M-HF
main
null
null
null
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "MobileLLM-125M-HF", "model_size": null }, "model": "vonjack/MobileLLM-125M-HF", "model_path": "vonjack/MobileLLM-125M-HF", "model_type": "vllm", "revision": null, "trust_remote_code": true }
{ "acc_per_char": 0.2204724409448819, "acc_per_token": 0.2204724409448819, "acc_raw": 0.2204724409448819, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.255581671085001, "logits_per_char_corr": -0.870302895274688, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.2204724409448819, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
96
agi_eval_aqua-rat:mc
Nemotron-4-Mini-Hindi-4B-Base
external
false
4,000,000,000
0.350394
1.08185
-0.749881
acc_raw
knowledge
254
0
27.281382
jebish7/Nemotron-4-Mini-Hindi-4B-Base
main
null
null
null
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "Nemotron-4-Mini-Hindi-4B-Base", "model_size": null }, "model": "jebish7/Nemotron-4-Mini-Hindi-4B-Base", "model_path": "jebish7/Nemotron-4-Mini-Hindi-4B-Base", "model_type": "vllm", "revision": null, "trust_remote_code": true }
{ "acc_per_char": 0.35039370078740156, "acc_per_token": 0.35039370078740156, "acc_raw": 0.35039370078740156, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.0818498184344882, "logits_per_char_corr": -0.7498811514366154, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.35039370078740156, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
98
agi_eval_aqua-rat:mc
OLMo-1B-0724-hf
external
false
1,000,000,000
0.204724
2.178135
-1.509768
acc_raw
knowledge
254
0
12.661862
allenai/OLMo-1B-0724-hf
main
null
null
null
null
OLMo
1B
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "olmo-1b-0724", "model_size": null }, "model": "allenai/OLMo-1B-0724-hf", "model_path": "allenai/OLMo-1B-0724-hf", "model_type": "vllm", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.2047244094488189, "acc_per_token": 0.2047244094488189, "acc_raw": 0.2047244094488189, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": null, "logits_per_char_corr": null, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.2047244094488189, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
101
agi_eval_aqua-rat:mc
OLMo-1B-hf
external
true
1,000,000,000
0.169291
1.317051
-0.91291
acc_raw
knowledge
254
0
12.289532
allenai/OLMo-1B-hf
main
1,000,000,000
3,000,000,000,000
18,000,000,000,000,000,000,000
null
OLMo
1B
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "olmo-1b", "model_size": null }, "model": "allenai/OLMo-1B-hf", "model_path": "allenai/OLMo-1B-hf", "model_type": "vllm", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.16929133858267717, "acc_per_token": 0.16929133858267717, "acc_raw": 0.16929133858267717, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": null, "logits_per_char_corr": null, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.16929133858267717, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
102
agi_eval_aqua-rat:mc
OLMo-2-1124-13B
external
true
13,000,000,000
0.299213
1.113017
-0.771485
acc_raw
knowledge
254
0
34.438979
allenai/OLMo-2-1124-13B
main
13,000,000,000
5,000,000,000,000
390,000,000,000,000,000,000,000
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": 0.7, "max_length": 2048, "metadata": { "alias": "olmo-2-1124-13b", "model_size": null }, "model": "allenai/OLMo-2-1124-13B", "model_path": "allenai/OLMo-2-1124-13B", "model_type": "vllm", "revision": null, "trust_remote_code": true }
{ "acc_per_char": 0.2992125984251969, "acc_per_token": 0.2992125984251969, "acc_raw": 0.2992125984251969, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.1130173651275446, "logits_per_char_corr": -0.7714848485518628, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.2992125984251969, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
104
agi_eval_aqua-rat:mc
OLMo-2-1124-7B
external
true
7,000,000,000
0.251969
1.148454
-0.796048
acc_raw
knowledge
254
0
26.002349
allenai/OLMo-2-1124-7B
main
7,000,000,000
4,000,000,000,000
168,000,000,000,000,000,000,000
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": 0.7, "max_length": 2048, "metadata": { "alias": "olmo-2-1124-7b", "model_size": null }, "model": "allenai/OLMo-2-1124-7B", "model_path": "allenai/OLMo-2-1124-7B", "model_type": "vllm", "revision": null, "trust_remote_code": true }
{ "acc_per_char": 0.25196850393700787, "acc_per_token": 0.25196850393700787, "acc_raw": 0.25196850393700787, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.1484544279167384, "logits_per_char_corr": -0.7960479487115004, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.25196850393700787, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
105
agi_eval_aqua-rat:mc
OLMo-7B-0424-hf
external
true
7,000,000,000
0.224409
1.149859
-0.797021
acc_raw
knowledge
254
0
0.02205
allenai/OLMo-7B-0424-hf
main
7,000,000,000
2,500,000,000,000
105,000,000,000,000,000,000,000
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "olmo-7b-0424", "model_size": null }, "model": "allenai/OLMo-7B-0424-hf", "model_path": "allenai/OLMo-7B-0424-hf", "model_type": "vllm", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.22440944881889763, "acc_per_token": 0.22440944881889763, "acc_raw": 0.22440944881889763, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": null, "logits_per_char_corr": null, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.22440944881889763, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
106
agi_eval_aqua-rat:mc
OLMo-7B-0724-hf
external
true
7,000,000,000
0.248031
1.150291
-0.797321
acc_raw
knowledge
254
0
27.147031
allenai/OLMo-7B-0724-hf
main
7,000,000,000
2,750,000,000,000
115,500,000,000,000,000,000,000
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "olmo-7b-0724", "model_size": null }, "model": "allenai/OLMo-7B-0724-hf", "model_path": "allenai/OLMo-7B-0724-hf", "model_type": "vllm", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.24803149606299213, "acc_per_token": 0.24803149606299213, "acc_raw": 0.24803149606299213, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": null, "logits_per_char_corr": null, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.24803149606299213, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
107
agi_eval_aqua-rat:mc
OLMo-7B-Twin-2T-hf
external
true
7,000,000,000
0.149606
1.212002
-0.840096
acc_raw
knowledge
254
0
17.359545
allenai/OLMo-7B-Twin-2T-hf
main
7,000,000,000
2,000,000,000,000
84,000,000,000,000,000,000,000
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": 0.7, "max_length": 2048, "metadata": { "alias": "olmo-7b-twin-2t-hf", "model_size": null }, "model": "allenai/OLMo-7B-Twin-2T-hf", "model_path": "allenai/OLMo-7B-Twin-2T-hf", "model_type": "vllm", "revision": null, "trust_remote_code": true }
{ "acc_per_char": 0.14960629921259844, "acc_per_token": 0.14960629921259844, "acc_raw": 0.14960629921259844, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.2120023234584594, "logits_per_char_corr": -0.8400959933367301, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.14960629921259844, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
108
agi_eval_aqua-rat:mc
OLMo-7B-hf
external
true
7,000,000,000
0.224409
1.170041
-0.811011
acc_raw
knowledge
254
0
25.759016
allenai/OLMo-7B-hf
main
7,000,000,000
2,500,000,000,000
105,000,000,000,000,000,000,000
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "olmo-7b", "model_size": null }, "model": "allenai/OLMo-7B-hf", "model_path": "allenai/OLMo-7B-hf", "model_type": "vllm", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.22440944881889763, "acc_per_token": 0.22440944881889763, "acc_raw": 0.22440944881889763, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.1700414508992465, "logits_per_char_corr": -0.8110109328284977, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.22440944881889763, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
109
agi_eval_aqua-rat:mc
OLMoE-1B-7B-0924
external
false
1,000,000,000
0.220472
1.15726
-0.802152
acc_raw
knowledge
254
0
17.679828
allenai/OLMoE-1B-7B-0924
main
null
null
null
null
OLMoE
1B
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "olmoe-1b-7b-0924", "model_size": "7b" }, "model": "allenai/OLMoE-1B-7B-0924", "model_path": "allenai/OLMoE-1B-7B-0924", "model_type": "vllm", "revision": null, "trust_remote_code": null }
{ "acc_per_char": 0.2204724409448819, "acc_per_token": 0.2204724409448819, "acc_raw": 0.2204724409448819, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.1572604706542828, "logits_per_char_corr": -0.8021518324069151, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.2204724409448819, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
110
agi_eval_aqua-rat:mc
Orca-2-13b
external
true
13,000,000,000
0.318898
1.429371
-0.990765
acc_raw
knowledge
254
0
25.17385
microsoft/Orca-2-13b
main
13,000,000,000
300,000,000,000
23,400,000,000,000,000,000,000
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": 0.7, "max_length": 2048, "metadata": { "alias": "orca-2-13b", "model_size": null }, "model": "microsoft/Orca-2-13b", "model_path": "microsoft/Orca-2-13b", "model_type": "vllm", "revision": null, "trust_remote_code": true }
{ "acc_per_char": 0.3188976377952756, "acc_per_token": 0.3188976377952756, "acc_raw": 0.3188976377952756, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.4293711876227417, "logits_per_char_corr": -0.990764608673612, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.3188976377952756, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
111
agi_eval_aqua-rat:mc
Orca-2-7b
external
true
7,000,000,000
0.267717
1.298971
-0.900378
acc_raw
knowledge
254
0
19.41169
microsoft/Orca-2-7b
main
7,000,000,000
300,000,000,000
12,600,000,000,000,000,000,000
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": 0.7, "max_length": 2048, "metadata": { "alias": "orca-2-7b", "model_size": null }, "model": "microsoft/Orca-2-7b", "model_path": "microsoft/Orca-2-7b", "model_type": "vllm", "revision": null, "trust_remote_code": true }
{ "acc_per_char": 0.2677165354330709, "acc_per_token": 0.2677165354330709, "acc_raw": 0.2677165354330709, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.2989707757318416, "logits_per_char_corr": -0.9003779308276442, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.2677165354330709, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
112
agi_eval_aqua-rat:mc
PowerLM-3b
external
false
3,000,000,000
0.244094
1.156938
-0.801928
acc_raw
knowledge
254
0
24.813169
ibm/PowerLM-3b
main
null
null
null
null
null
null
null
{ "add_bos_token": null, "gpu_memory_utilization": null, "max_length": 2048, "metadata": { "alias": "PowerLM-3b", "model_size": null }, "model": "ibm/PowerLM-3b", "model_path": "ibm/PowerLM-3b", "model_type": "vllm", "revision": null, "trust_remote_code": true }
{ "acc_per_char": 0.2440944881889764, "acc_per_token": 0.2440944881889764, "acc_raw": 0.2440944881889764, "acc_uncond": null, "bits_per_byte": null, "exact_match": null, "exact_match_flex": null, "exact_match_flex_sub_2022": null, "exact_match_flex_sub_2023": null, "exact_match_flex_sub_2024": null, "exact_match_simple": null, "exact_match_sub_2022": null, "exact_match_sub_2023": null, "exact_match_sub_2024": null, "exact_match_sub_cnn": null, "exact_match_sub_gutenberg": null, "exact_match_sub_mctest": null, "exact_match_sub_race": null, "exact_match_sub_wikipedia": null, "extra_metrics": null, "f1": null, "f1_sub_cnn": null, "f1_sub_gutenberg": null, "f1_sub_mctest": null, "f1_sub_race": null, "f1_sub_wikipedia": null, "logits_per_byte_corr": 1.156937526338317, "logits_per_char_corr": -0.8019279844648256, "max_token_count": null, "num_instances": null, "pass_at_1": null, "pass_at_10": null, "ppl_byte": null, "ppl_char": null, "ppl_token": null, "ppl_word": null, "primary_score": 0.2440944881889764, "recall": null, "total_tokens": null, "twitterAAE_HELM_fixed__AA_bits_per_byte": null, "twitterAAE_HELM_fixed__AA_ppl_byte": null, "twitterAAE_HELM_fixed__AA_ppl_char": null, "twitterAAE_HELM_fixed__AA_ppl_token": null, "twitterAAE_HELM_fixed__AA_ppl_word": null, "twitterAAE_HELM_fixed__white_bits_per_byte": null, "twitterAAE_HELM_fixed__white_ppl_byte": null, "twitterAAE_HELM_fixed__white_ppl_char": null, "twitterAAE_HELM_fixed__white_ppl_token": null, "twitterAAE_HELM_fixed__white_ppl_word": null }
115
End of preview. Expand in Data Studio

Signal and Noise: A Framework for Reducing Uncertainty in Language Model Evaluation

GitHub License Paper URL Blog Huggingface URL

Our work studies the ratio between signal, a benchmark's ability to separate models; and noise, a benchmark's sensitivity to random variability during training steps.

This dataset contains evaluation results. For utilites to use this dataset and to reproduce the findings in our paper, please see our github.

Main Eval Suite (375 models)

import pandas as pd
from snr.download.hf import pull_predictions_from_hf

local_path = pull_predictions_from_hf("allenai/signal-and-noise", split_name='core')
df = pd.read_parquet(local_path)

print(f'Loaded {len(df):,} model evaluations')
>>> Loaded 388,924 model evaluations
List of Included Tasks

agi_eval, arc_challenge, arc_challenge:mc, arc_easy, arc_easy:mc, autobencher, autobencher:mc, boolq, boolq:mc, codex_humaneval, codex_humanevalplus, copycolors:mc, csqa, csqa:mc, custom_loss_numia_math, custom_loss_sky_t1, custom_loss_tulu_if, drop, gsm8k, gsm_plus, gsm_symbolic_main, gsm_symbolic_p1, gsm_symbolic_p2, hellaswag, hellaswag:mc, jeopardy, mbpp, mbppplus, medmcqa, medmcqa:mc, minerva, minerva_math_500, mmlu, multitask_all, multitask_code, multitask_knowledge, multitask_math, openbookqa, openbookqa:mc, paloma_4chan_meta_sep, paloma_c4_100_domains, paloma_c4_en, paloma_dolma-v1_5, paloma_dolma_100_programing_languages, paloma_dolma_100_subreddits, paloma_falcon-refinedweb, paloma_gab, paloma_m2d2_s2orc_unsplit, paloma_m2d2_wikipedia_unsplit, paloma_manosphere_meta_sep, paloma_mc4, paloma_ptb, paloma_redpajama, paloma_twitterAAE_HELM_fixed, paloma_wikitext_103, piqa, piqa:mc, socialiqa, socialiqa:mc, squad, triviaqa, winogrande, winogrande:mc

List of Included Models
  • Intermediate checkpoint models (2): allenai/OLMo-2-1124-13B, allenai/OLMo-2-1124-7B
  • Ladder models (25): allenai/OLMo-Ladder-{190M|370M|760M|1B|3B}-{0.5xC|1xC|2xC|5xC|10xC}
  • Datadecide models (225): allenai/DataDecide-{c4|dclm-baseline|dclm-baseline-25p-dolma1.7-75p|dclm-baseline-50p-dolma1.7-50p|dclm-baseline-75p-dolma1.7-25p|dclm-baseline-qc-10p|dclm-baseline-qc-20p|dclm-baseline-qc-7p-fw2|dclm-baseline-qc-7p-fw3|dclm-baseline-qc-fw-10p|dclm-baseline-qc-fw-3p|dolma1_6plus|dolma1_7|dolma1_7-no-code|dolma1_7-no-flan|dolma1_7-no-math-code|dolma1_7-no-reddit|falcon|falcon-and-cc|falcon-and-cc-qc-10p|falcon-and-cc-qc-20p|falcon-and-cc-qc-orig-10p|falcon-and-cc-qc-tulu-10p|fineweb-edu|fineweb-pro}-{4M|20M|60M|90M|150M|300M|530M|750M|1B}
  • External models (119): 01-ai/Yi-1.5-34B, 01-ai/Yi-1.5-6B, 01-ai/Yi-1.5-9B, 01-ai/Yi-1.5-9B-32K, 01-ai/Yi-34B, 01-ai/Yi-6B, 01-ai/Yi-6B-200K, 01-ai/Yi-9B, 01-ai/Yi-9B-200K, BEE-spoke-data/smol_llama-220M-GQA, BEE-spoke-data/smol_llama-220M-GQA-fineweb_edu, CortexLM/btlm-7b-base-v0.2, Deci/DeciLM-7B, EleutherAI/pythia-1.4b, EleutherAI/pythia-12b, EleutherAI/pythia-14m, EleutherAI/pythia-160m, EleutherAI/pythia-1b, EleutherAI/pythia-2.8b, EleutherAI/pythia-6.9b, EleutherAI/pythia-70m, HelpingAI/Priya-3B, HuggingFaceTB/SmolLM-1.7B, HuggingFaceTB/SmolLM-135M, HuggingFaceTB/SmolLM-360M, HuggingFaceTB/SmolLM2-1.7B, HuggingFaceTB/SmolLM2-135M, Qwen/CodeQwen1.5-7B, Qwen/Qwen1.5-1.8B, Qwen/Qwen1.5-110B, Qwen/Qwen1.5-14B, Qwen/Qwen1.5-32B, Qwen/Qwen1.5-4B, Qwen/Qwen1.5-72B, Qwen/Qwen1.5-7B, Qwen/Qwen2-0.5B, Qwen/Qwen2-1.5B, Qwen/Qwen2-72B, Qwen/Qwen2-7B, Qwen/Qwen2.5-0.5B, Qwen/Qwen2.5-1.5B, Qwen/Qwen2.5-14B, Qwen/Qwen2.5-32B, Qwen/Qwen2.5-3B, Qwen/Qwen2.5-72B, Qwen/Qwen2.5-7B, Qwen/Qwen2.5-Coder-14B, Qwen/Qwen2.5-Coder-7B, Qwen/Qwen2.5-Math-7B, TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T, TinyLlama/TinyLlama_v1.1, allenai/OLMo-1B-0724-hf, allenai/OLMo-1B-hf, allenai/OLMo-2-0325-32B, allenai/OLMo-2-0425-1B, allenai/OLMo-2-1124-13B, allenai/OLMo-2-1124-7B, allenai/OLMo-7B-0424-hf, allenai/OLMo-7B-0724-hf, allenai/OLMo-7B-Twin-2T-hf, allenai/OLMo-7B-hf, allenai/OLMoE-1B-7B-0924, amd/AMD-Llama-135m, beomi/gemma-mling-7b, bigcode/starcoder2-3b, bigcode/starcoder2-7b, databricks/dolly-v1-6b, deepseek-ai/deepseek-llm-67b-base, deepseek-ai/deepseek-llm-7b-base, deepseek-ai/deepseek-moe-16b-base, dicta-il/dictalm2.0, facebook/opt-1.3b, google/codegemma-1.1-2b, google/gemma-2-2b, google/gemma-2-9b, google/gemma-2b, google/gemma-7b, h2oai/h2o-danube3-4b-base, huggyllama/llama-13b, huggyllama/llama-30b, huggyllama/llama-65b, huggyllama/llama-7b, ibm/PowerLM-3b, jebish7/Nemotron-4-Mini-Hindi-4B-Base, m-a-p/neo_7b, meta-llama/Llama-2-13b-hf, meta-llama/Llama-2-7b-hf, meta-llama/Llama-3.1-70B, meta-llama/Llama-3.1-8B, meta-llama/Llama-3.2-1B, meta-llama/Llama-3.2-3B, meta-llama/Meta-Llama-3-70B, meta-llama/Meta-Llama-3-8B, meta-llama/Meta-Llama-3.1-70B, meta-llama/Meta-Llama-3.1-8B, microsoft/Orca-2-13b, microsoft/Orca-2-7b, microsoft/phi-1, microsoft/phi-1_5, microsoft/phi-2, microsoft/phi-4, mistralai/Mathstral-7B-v0.1, mistralai/Mixtral-8x22B-v0.1, mistralai/Mixtral-8x7B-v0.1, mosaicml/mpt-7b, princeton-nlp/Sheared-LLaMA-1.3B, princeton-nlp/Sheared-LLaMA-2.7B, qingy2024/Qwen2.5-4B, speakleash/Bielik-11B-v2, stabilityai/stablelm-2-1_6b, stabilityai/stablelm-3b-4e1t, tiiuae/Falcon3-10B-Base, tiiuae/Falcon3-3B-Base, tiiuae/Falcon3-Mamba-7B-Base, tiiuae/falcon-11B, tiiuae/falcon-7b, togethercomputer/RedPajama-INCITE-7B-Base, upstage/SOLAR-10.7B-v1.0, vonjack/MobileLLM-125M-HF

DataDecide Eval Suite (225 models with 4M to 1B params)

import pandas as pd
from snr.download.hf import pull_predictions_from_hf

local_path = pull_predictions_from_hf("allenai/signal-and-noise", split_name='datadecide_intermediate')
df = pd.read_parquet(local_path)

print(f'Loaded {len(df):,} model evaluations')
>>> Loaded 212,047 model evaluations

Random Seed Eval Suite (20 models with 1B params)

import pandas as pd
from snr.download.hf import pull_predictions_from_hf

local_path = pull_predictions_from_hf("allenai/signal-and-noise", split_name='random_seeds')
df = pd.read_parquet(local_path)

print(f'Loaded {len(df):,} model evaluations')
>>> Loaded 296,358 model evaluations

AutoBencher QA Benchmark

For the AutoBencher evaluation used in our work, please refer to huggingface.co/datasets/allenai/autobencher-qa-33k.

Citation

@article{heineman2025signal,
  title={Signal and Noise: A Framework for Reducing Uncertainty in Language Model Evaluation},
  author={Heineman, David and Hofmann, Valentin and Magnusson, Ian and Gu, Yuling and Smith, Noah A and Hajishirzi, Hannaneh and Lo, Kyle and Dodge, Jesse},
  journal={arXiv preprint arXiv:2508.13144},
  year={2025}
}
Downloads last month
234