Datasets:
Tasks:
Question Answering
Modalities:
Text
Formats:
parquet
Sub-tasks:
multiple-choice-qa
Languages:
English
Size:
1M - 10M
ArXiv:
License:
Add 'high_school_world_history' config data files
Browse files
README.md
CHANGED
|
@@ -1018,6 +1018,8 @@ dataset_info:
|
|
| 1018 |
features:
|
| 1019 |
- name: question
|
| 1020 |
dtype: string
|
|
|
|
|
|
|
| 1021 |
- name: choices
|
| 1022 |
sequence: string
|
| 1023 |
- name: answer
|
|
@@ -1030,19 +1032,19 @@ dataset_info:
|
|
| 1030 |
'3': D
|
| 1031 |
splits:
|
| 1032 |
- name: auxiliary_train
|
| 1033 |
-
num_bytes:
|
| 1034 |
num_examples: 99842
|
| 1035 |
- name: test
|
| 1036 |
-
num_bytes:
|
| 1037 |
num_examples: 237
|
| 1038 |
- name: validation
|
| 1039 |
-
num_bytes:
|
| 1040 |
num_examples: 26
|
| 1041 |
- name: dev
|
| 1042 |
-
num_bytes:
|
| 1043 |
num_examples: 5
|
| 1044 |
-
download_size:
|
| 1045 |
-
dataset_size:
|
| 1046 |
- config_name: human_aging
|
| 1047 |
features:
|
| 1048 |
- name: question
|
|
@@ -2089,6 +2091,16 @@ configs:
|
|
| 2089 |
path: high_school_us_history/validation-*
|
| 2090 |
- split: dev
|
| 2091 |
path: high_school_us_history/dev-*
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2092 |
---
|
| 2093 |
|
| 2094 |
# Dataset Card for MMLU
|
|
|
|
| 1018 |
features:
|
| 1019 |
- name: question
|
| 1020 |
dtype: string
|
| 1021 |
+
- name: subject
|
| 1022 |
+
dtype: string
|
| 1023 |
- name: choices
|
| 1024 |
sequence: string
|
| 1025 |
- name: answer
|
|
|
|
| 1032 |
'3': D
|
| 1033 |
splits:
|
| 1034 |
- name: auxiliary_train
|
| 1035 |
+
num_bytes: 161000625
|
| 1036 |
num_examples: 99842
|
| 1037 |
- name: test
|
| 1038 |
+
num_bytes: 385478
|
| 1039 |
num_examples: 237
|
| 1040 |
- name: validation
|
| 1041 |
+
num_bytes: 46243
|
| 1042 |
num_examples: 26
|
| 1043 |
- name: dev
|
| 1044 |
+
num_bytes: 5015
|
| 1045 |
num_examples: 5
|
| 1046 |
+
download_size: 47405363
|
| 1047 |
+
dataset_size: 161437361
|
| 1048 |
- config_name: human_aging
|
| 1049 |
features:
|
| 1050 |
- name: question
|
|
|
|
| 2091 |
path: high_school_us_history/validation-*
|
| 2092 |
- split: dev
|
| 2093 |
path: high_school_us_history/dev-*
|
| 2094 |
+
- config_name: high_school_world_history
|
| 2095 |
+
data_files:
|
| 2096 |
+
- split: auxiliary_train
|
| 2097 |
+
path: high_school_world_history/auxiliary_train-*
|
| 2098 |
+
- split: test
|
| 2099 |
+
path: high_school_world_history/test-*
|
| 2100 |
+
- split: validation
|
| 2101 |
+
path: high_school_world_history/validation-*
|
| 2102 |
+
- split: dev
|
| 2103 |
+
path: high_school_world_history/dev-*
|
| 2104 |
---
|
| 2105 |
|
| 2106 |
# Dataset Card for MMLU
|
dataset_infos.json
CHANGED
|
@@ -2177,39 +2177,34 @@
|
|
| 2177 |
"features": {
|
| 2178 |
"question": {
|
| 2179 |
"dtype": "string",
|
| 2180 |
-
"
|
|
|
|
|
|
|
|
|
|
| 2181 |
"_type": "Value"
|
| 2182 |
},
|
| 2183 |
"choices": {
|
| 2184 |
"feature": {
|
| 2185 |
"dtype": "string",
|
| 2186 |
-
"id": null,
|
| 2187 |
"_type": "Value"
|
| 2188 |
},
|
| 2189 |
-
"length": -1,
|
| 2190 |
-
"id": null,
|
| 2191 |
"_type": "Sequence"
|
| 2192 |
},
|
| 2193 |
"answer": {
|
| 2194 |
-
"num_classes": 4,
|
| 2195 |
"names": [
|
| 2196 |
"A",
|
| 2197 |
"B",
|
| 2198 |
"C",
|
| 2199 |
"D"
|
| 2200 |
],
|
| 2201 |
-
"id": null,
|
| 2202 |
"_type": "ClassLabel"
|
| 2203 |
}
|
| 2204 |
},
|
| 2205 |
-
"
|
| 2206 |
-
"
|
| 2207 |
-
"task_templates": null,
|
| 2208 |
-
"builder_name": "mmlu",
|
| 2209 |
"config_name": "high_school_world_history",
|
| 2210 |
"version": {
|
| 2211 |
"version_str": "1.0.0",
|
| 2212 |
-
"description": null,
|
| 2213 |
"major": 1,
|
| 2214 |
"minor": 0,
|
| 2215 |
"patch": 0
|
|
@@ -2217,39 +2212,32 @@
|
|
| 2217 |
"splits": {
|
| 2218 |
"auxiliary_train": {
|
| 2219 |
"name": "auxiliary_train",
|
| 2220 |
-
"num_bytes":
|
| 2221 |
"num_examples": 99842,
|
| 2222 |
-
"dataset_name":
|
| 2223 |
},
|
| 2224 |
"test": {
|
| 2225 |
"name": "test",
|
| 2226 |
-
"num_bytes":
|
| 2227 |
"num_examples": 237,
|
| 2228 |
-
"dataset_name":
|
| 2229 |
},
|
| 2230 |
"validation": {
|
| 2231 |
"name": "validation",
|
| 2232 |
-
"num_bytes":
|
| 2233 |
"num_examples": 26,
|
| 2234 |
-
"dataset_name":
|
| 2235 |
},
|
| 2236 |
"dev": {
|
| 2237 |
"name": "dev",
|
| 2238 |
-
"num_bytes":
|
| 2239 |
"num_examples": 5,
|
| 2240 |
-
"dataset_name":
|
| 2241 |
-
}
|
| 2242 |
-
},
|
| 2243 |
-
"download_checksums": {
|
| 2244 |
-
"data.tar": {
|
| 2245 |
-
"num_bytes": 166184960,
|
| 2246 |
-
"checksum": "bec563ba4bac1d6aaf04141cd7d1605d7a5ca833e38f994051e818489592989b"
|
| 2247 |
}
|
| 2248 |
},
|
| 2249 |
-
"download_size":
|
| 2250 |
-
"
|
| 2251 |
-
"
|
| 2252 |
-
"size_in_bytes": 327215181
|
| 2253 |
},
|
| 2254 |
"human_aging": {
|
| 2255 |
"description": "This is a massive multitask test consisting of multiple-choice questions from various branches of knowledge, covering 57 tasks including elementary mathematics, US history, computer science, law, and more.\n",
|
|
|
|
| 2177 |
"features": {
|
| 2178 |
"question": {
|
| 2179 |
"dtype": "string",
|
| 2180 |
+
"_type": "Value"
|
| 2181 |
+
},
|
| 2182 |
+
"subject": {
|
| 2183 |
+
"dtype": "string",
|
| 2184 |
"_type": "Value"
|
| 2185 |
},
|
| 2186 |
"choices": {
|
| 2187 |
"feature": {
|
| 2188 |
"dtype": "string",
|
|
|
|
| 2189 |
"_type": "Value"
|
| 2190 |
},
|
|
|
|
|
|
|
| 2191 |
"_type": "Sequence"
|
| 2192 |
},
|
| 2193 |
"answer": {
|
|
|
|
| 2194 |
"names": [
|
| 2195 |
"A",
|
| 2196 |
"B",
|
| 2197 |
"C",
|
| 2198 |
"D"
|
| 2199 |
],
|
|
|
|
| 2200 |
"_type": "ClassLabel"
|
| 2201 |
}
|
| 2202 |
},
|
| 2203 |
+
"builder_name": "parquet",
|
| 2204 |
+
"dataset_name": "mmlu",
|
|
|
|
|
|
|
| 2205 |
"config_name": "high_school_world_history",
|
| 2206 |
"version": {
|
| 2207 |
"version_str": "1.0.0",
|
|
|
|
| 2208 |
"major": 1,
|
| 2209 |
"minor": 0,
|
| 2210 |
"patch": 0
|
|
|
|
| 2212 |
"splits": {
|
| 2213 |
"auxiliary_train": {
|
| 2214 |
"name": "auxiliary_train",
|
| 2215 |
+
"num_bytes": 161000625,
|
| 2216 |
"num_examples": 99842,
|
| 2217 |
+
"dataset_name": null
|
| 2218 |
},
|
| 2219 |
"test": {
|
| 2220 |
"name": "test",
|
| 2221 |
+
"num_bytes": 385478,
|
| 2222 |
"num_examples": 237,
|
| 2223 |
+
"dataset_name": null
|
| 2224 |
},
|
| 2225 |
"validation": {
|
| 2226 |
"name": "validation",
|
| 2227 |
+
"num_bytes": 46243,
|
| 2228 |
"num_examples": 26,
|
| 2229 |
+
"dataset_name": null
|
| 2230 |
},
|
| 2231 |
"dev": {
|
| 2232 |
"name": "dev",
|
| 2233 |
+
"num_bytes": 5015,
|
| 2234 |
"num_examples": 5,
|
| 2235 |
+
"dataset_name": null
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2236 |
}
|
| 2237 |
},
|
| 2238 |
+
"download_size": 47405363,
|
| 2239 |
+
"dataset_size": 161437361,
|
| 2240 |
+
"size_in_bytes": 208842724
|
|
|
|
| 2241 |
},
|
| 2242 |
"human_aging": {
|
| 2243 |
"description": "This is a massive multitask test consisting of multiple-choice questions from various branches of knowledge, covering 57 tasks including elementary mathematics, US history, computer science, law, and more.\n",
|
high_school_world_history/auxiliary_train-00000-of-00001.parquet
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c2782fc860f57d9345a9233ab04f494b0af5ae85b893a27853f7014b14a3bd07
|
| 3 |
+
size 47163955
|
high_school_world_history/dev-00000-of-00001.parquet
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1e315dc536ad6b2d445c3ed96bc2915d6c1a9c283aa86fd37cdfcc00464803af
|
| 3 |
+
size 8481
|
high_school_world_history/test-00000-of-00001.parquet
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9688b239ea456500eca29784bd3c059b6105c9edb79c118b4ad61b1c6825ba08
|
| 3 |
+
size 198753
|
high_school_world_history/validation-00000-of-00001.parquet
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f909ed4ebca35952fabd89826e0b5a3d1ce9057733fccc077ae0377093acada0
|
| 3 |
+
size 34174
|