File size: 2,067 Bytes
9f80cbd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 |
{
"dataset_revision": "ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba",
"evaluation_time": 19.373476266860962,
"kg_co2_emissions": null,
"mteb_version": "1.14.5",
"scores": {
"test": [
{
"accuracy": 0.887733698130415,
"f1": 0.6446679683763925,
"f1_weighted": 0.8828001372200353,
"hf_subset": "en",
"languages": [
"eng-Latn"
],
"main_score": 0.887733698130415,
"scores_per_experiment": [
{
"accuracy": 0.8903328773369813,
"f1": 0.6443196100400796,
"f1_weighted": 0.8858075024576232
},
{
"accuracy": 0.8875968992248062,
"f1": 0.6353699249343289,
"f1_weighted": 0.87927962511134
},
{
"accuracy": 0.8901048791609667,
"f1": 0.6360768466219334,
"f1_weighted": 0.8860015331545291
},
{
"accuracy": 0.8866849065207478,
"f1": 0.6493641222457096,
"f1_weighted": 0.8839610419300968
},
{
"accuracy": 0.8885088919288646,
"f1": 0.639949142686732,
"f1_weighted": 0.884408678059172
},
{
"accuracy": 0.8919288645690835,
"f1": 0.6428687762213194,
"f1_weighted": 0.8848113821779731
},
{
"accuracy": 0.883264933880529,
"f1": 0.644593179250931,
"f1_weighted": 0.8809102398647856
},
{
"accuracy": 0.8850889192886456,
"f1": 0.65363821076355,
"f1_weighted": 0.880234303806651
},
{
"accuracy": 0.8903328773369813,
"f1": 0.6553213509453706,
"f1_weighted": 0.8849216102804892
},
{
"accuracy": 0.8834929320565436,
"f1": 0.6451785200539706,
"f1_weighted": 0.8776654553576927
}
]
}
]
},
"task_name": "MTOPIntentClassification"
} |