cpu
/
cpu_inference_transformers_fill-mask_hf-internal-testing
/tiny-random-BertModel
/benchmark.json
IlyasMoutawwakil
HF Staff
Upload cpu_inference_transformers_fill-mask_hf-internal-testing/tiny-random-BertModel/benchmark.json with huggingface_hub
c35f2c8
verified
| { | |
| "config": { | |
| "name": "cpu_inference_transformers_fill-mask_hf-internal-testing/tiny-random-BertModel", | |
| "backend": { | |
| "name": "pytorch", | |
| "version": "2.8.0", | |
| "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
| "model": "hf-internal-testing/tiny-random-BertModel", | |
| "processor": "hf-internal-testing/tiny-random-BertModel", | |
| "task": "fill-mask", | |
| "library": "transformers", | |
| "model_type": "bert", | |
| "device": "cpu", | |
| "device_ids": null, | |
| "seed": 42, | |
| "inter_op_num_threads": null, | |
| "intra_op_num_threads": null, | |
| "model_kwargs": {}, | |
| "processor_kwargs": {}, | |
| "no_weights": true, | |
| "tp_plan": null, | |
| "device_map": null, | |
| "torch_dtype": null, | |
| "eval_mode": true, | |
| "to_bettertransformer": false, | |
| "low_cpu_mem_usage": null, | |
| "attn_implementation": null, | |
| "cache_implementation": null, | |
| "allow_tf32": false, | |
| "autocast_enabled": false, | |
| "autocast_dtype": null, | |
| "torch_compile": false, | |
| "torch_compile_target": "forward", | |
| "torch_compile_config": {}, | |
| "quantization_scheme": null, | |
| "quantization_config": {}, | |
| "deepspeed_inference": false, | |
| "deepspeed_inference_config": {}, | |
| "peft_type": null, | |
| "peft_config": {} | |
| }, | |
| "scenario": { | |
| "name": "inference", | |
| "_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
| "iterations": 1, | |
| "duration": 1, | |
| "warmup_runs": 1, | |
| "input_shapes": { | |
| "batch_size": 2, | |
| "sequence_length": 16, | |
| "num_choices": 2 | |
| }, | |
| "new_tokens": null, | |
| "memory": true, | |
| "latency": true, | |
| "energy": true, | |
| "forward_kwargs": {}, | |
| "generate_kwargs": { | |
| "max_new_tokens": 2, | |
| "min_new_tokens": 2 | |
| }, | |
| "call_kwargs": { | |
| "num_inference_steps": 2 | |
| } | |
| }, | |
| "launcher": { | |
| "name": "process", | |
| "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
| "device_isolation": false, | |
| "device_isolation_action": null, | |
| "numactl": false, | |
| "numactl_kwargs": {}, | |
| "start_method": "spawn" | |
| }, | |
| "environment": { | |
| "cpu": " AMD EPYC 7763 64-Core Processor", | |
| "cpu_count": 4, | |
| "cpu_ram_mb": 16772.579328, | |
| "system": "Linux", | |
| "machine": "x86_64", | |
| "platform": "Linux-6.11.0-1018-azure-x86_64-with-glibc2.39", | |
| "processor": "x86_64", | |
| "python_version": "3.10.18", | |
| "optimum_benchmark_version": "0.7.0.dev0", | |
| "optimum_benchmark_commit": "b263af468a9e9a9a3d1449039849727352955646", | |
| "transformers_version": "4.56.2", | |
| "transformers_commit": "b263af468a9e9a9a3d1449039849727352955646", | |
| "accelerate_version": "1.10.1", | |
| "accelerate_commit": "b263af468a9e9a9a3d1449039849727352955646", | |
| "diffusers_version": "0.35.1", | |
| "diffusers_commit": "b263af468a9e9a9a3d1449039849727352955646", | |
| "optimum_version": null, | |
| "optimum_commit": null, | |
| "timm_version": "1.0.20", | |
| "timm_commit": "b263af468a9e9a9a3d1449039849727352955646", | |
| "peft_version": "0.17.1", | |
| "peft_commit": "b263af468a9e9a9a3d1449039849727352955646" | |
| }, | |
| "print_report": true, | |
| "log_report": true | |
| }, | |
| "report": { | |
| "load_model": { | |
| "memory": { | |
| "unit": "MB", | |
| "max_ram": 920.24832, | |
| "max_global_vram": null, | |
| "max_process_vram": null, | |
| "max_reserved": null, | |
| "max_allocated": null | |
| }, | |
| "latency": { | |
| "unit": "s", | |
| "values": [ | |
| 0.13120866599999204 | |
| ], | |
| "count": 1, | |
| "total": 0.13120866599999204, | |
| "mean": 0.13120866599999204, | |
| "p50": 0.13120866599999204, | |
| "p90": 0.13120866599999204, | |
| "p95": 0.13120866599999204, | |
| "p99": 0.13120866599999204, | |
| "stdev": 0, | |
| "stdev_": 0 | |
| }, | |
| "throughput": null, | |
| "energy": { | |
| "unit": "kWh", | |
| "cpu": 8.512076235693502e-05, | |
| "ram": 1.433861648611112e-05, | |
| "gpu": 0.0, | |
| "total": 9.945937884304614e-05 | |
| }, | |
| "efficiency": null | |
| }, | |
| "first_forward": { | |
| "memory": { | |
| "unit": "MB", | |
| "max_ram": 920.90368, | |
| "max_global_vram": null, | |
| "max_process_vram": null, | |
| "max_reserved": null, | |
| "max_allocated": null | |
| }, | |
| "latency": { | |
| "unit": "s", | |
| "values": [ | |
| 0.0049649750000071435 | |
| ], | |
| "count": 1, | |
| "total": 0.0049649750000071435, | |
| "mean": 0.0049649750000071435, | |
| "p50": 0.0049649750000071435, | |
| "p90": 0.0049649750000071435, | |
| "p95": 0.0049649750000071435, | |
| "p99": 0.0049649750000071435, | |
| "stdev": 0, | |
| "stdev_": 0 | |
| }, | |
| "throughput": null, | |
| "energy": { | |
| "unit": "kWh", | |
| "cpu": 8.684955976370636e-05, | |
| "ram": 1.3958986002777765e-05, | |
| "gpu": 0.0, | |
| "total": 0.00010080854576648414 | |
| }, | |
| "efficiency": null | |
| }, | |
| "forward": { | |
| "memory": { | |
| "unit": "MB", | |
| "max_ram": 926.932992, | |
| "max_global_vram": null, | |
| "max_process_vram": null, | |
| "max_reserved": null, | |
| "max_allocated": null | |
| }, | |
| "latency": { | |
| "unit": "s", | |
| "values": [ | |
| 0.0034489829999984067, | |
| 0.0029913660000033815, | |
| 0.002964044999998805, | |
| 0.0029707469999777913, | |
| 0.0029845729999919968, | |
| 0.002988790000017616, | |
| 0.002956259999990607, | |
| 0.0029471829999749843, | |
| 0.0029464710000013383, | |
| 0.002948575000004894, | |
| 0.0029574319999881027, | |
| 0.00294498900001372, | |
| 0.0029508799999860003, | |
| 0.0029402399999867157, | |
| 0.0029285280000124203, | |
| 0.0029178980000210686, | |
| 0.002898631999983081, | |
| 0.0029404400000032638, | |
| 0.0029437370000096053, | |
| 0.002923269000007167, | |
| 0.0029446880000136844, | |
| 0.002925081000000773, | |
| 0.002968803000015896, | |
| 0.002945038999996541, | |
| 0.00293180399998505, | |
| 0.002925032000007377, | |
| 0.0029241700000000037, | |
| 0.0029916270000001077, | |
| 0.0029406099999960134, | |
| 0.0029966459999855033, | |
| 0.0029504289999806588, | |
| 0.002935691000004681, | |
| 0.0029380759999924067, | |
| 0.002938948000007713, | |
| 0.0029487460000154897, | |
| 0.0029311830000153805, | |
| 0.0033496169999978065, | |
| 0.0029694849999941653, | |
| 0.0029499590000057196, | |
| 0.002947823999988941, | |
| 0.002978390999999192, | |
| 0.0029445580000242444, | |
| 0.0029531749999875956, | |
| 0.0029817879999995967, | |
| 0.00298868000001562, | |
| 0.0030057630000044355, | |
| 0.002978011000010383, | |
| 0.0029622319999873525, | |
| 0.0029673810000190315, | |
| 0.002933087000002388, | |
| 0.002944818999992549, | |
| 0.0029399199999886605, | |
| 0.0030841989999998987, | |
| 0.0029641739999988204, | |
| 0.0029585939999776656, | |
| 0.0029308929999842803, | |
| 0.002933306999977958, | |
| 0.002940289999997958, | |
| 0.0029476439999882587, | |
| 0.002945769999996628, | |
| 0.00295905499999094, | |
| 0.0029372839999837197, | |
| 0.002972541000019646, | |
| 0.0029475730000001477, | |
| 0.0029517320000138625, | |
| 0.0029928380000114885, | |
| 0.00293458899997745, | |
| 0.00294543999999064, | |
| 0.002928208000014365, | |
| 0.002923207999998567, | |
| 0.00294957700000964, | |
| 0.0029289990000052057, | |
| 0.0029595860000029006, | |
| 0.0030306289999941782, | |
| 0.0029401599999800965, | |
| 0.002996506000016552, | |
| 0.0029715979999878073, | |
| 0.0029614299999991545, | |
| 0.002934839999994665, | |
| 0.002963543999982221, | |
| 0.002942234000016697, | |
| 0.0029687929999795415, | |
| 0.002993108999987726, | |
| 0.0029610089999891898, | |
| 0.002933006000006344, | |
| 0.0029412719999868386, | |
| 0.0029504290000090805, | |
| 0.002967781999984709, | |
| 0.0029306429999849115, | |
| 0.0029273149999937687, | |
| 0.0029515710000111994, | |
| 0.0029708879999930105, | |
| 0.00294800500000747, | |
| 0.0029339990000210037, | |
| 0.0029360620000034032, | |
| 0.0029255830000067817, | |
| 0.0029498379999779445, | |
| 0.002956861000001254, | |
| 0.002934839999994665, | |
| 0.0029463920000125654, | |
| 0.002931023000002142, | |
| 0.00297804100000576, | |
| 0.002965015999990328, | |
| 0.002969855000003463, | |
| 0.002960768000008329, | |
| 0.0029303419999848757, | |
| 0.00293237500000032, | |
| 0.0029583839999816064, | |
| 0.0029857349999815597, | |
| 0.0029175979999820356, | |
| 0.00296529700000292, | |
| 0.002953966000006858, | |
| 0.0030354179999960706, | |
| 0.002940179999995962, | |
| 0.0029545159999884163, | |
| 0.002941371999980902, | |
| 0.002927034999999023, | |
| 0.0029631419999986974, | |
| 0.002946041000001287, | |
| 0.002932555999990427, | |
| 0.002939278000013701, | |
| 0.0029407000000105654, | |
| 0.002941953000004105, | |
| 0.0029350000000079035, | |
| 0.0029433059999917077, | |
| 0.002925621999992245, | |
| 0.0029378950000022996, | |
| 0.0029306520000034197, | |
| 0.002949456999999711, | |
| 0.0029276059999858717, | |
| 0.00292260699998792, | |
| 0.0029495269999983975, | |
| 0.002952993000008064, | |
| 0.0029110150000235535, | |
| 0.0029628730000013093, | |
| 0.0029328260000056616, | |
| 0.0029413620000013907, | |
| 0.00293865700001561, | |
| 0.00297105799998576, | |
| 0.002932055000002265, | |
| 0.0029282079999859434, | |
| 0.002927306000003682, | |
| 0.002928467999993245, | |
| 0.0029331169999977647, | |
| 0.0029489060000003064, | |
| 0.002948215000003529, | |
| 0.002947212999998783, | |
| 0.002928988999997273, | |
| 0.0029465310000205136, | |
| 0.002920582999990984, | |
| 0.0029424340000048232, | |
| 0.002939257999997835, | |
| 0.0029463109999881, | |
| 0.0029487560000234225, | |
| 0.002934309000011126, | |
| 0.002945168999985981, | |
| 0.0029302009999980783, | |
| 0.0029303309999875182, | |
| 0.002941953000004105, | |
| 0.0029210539999837692, | |
| 0.0029340979999972205, | |
| 0.002928607999990618, | |
| 0.002936312000002772, | |
| 0.0029353399999934027, | |
| 0.002928737999980058, | |
| 0.0029228169999839793, | |
| 0.0028964579999808393, | |
| 0.0029456000000038784, | |
| 0.0029400390000091647, | |
| 0.0029438970000228437, | |
| 0.0029219259999990754, | |
| 0.0029255930000147146, | |
| 0.002951921999994056, | |
| 0.002942733999987013, | |
| 0.0029578630000060002, | |
| 0.0029354810000086218, | |
| 0.002919220999984873, | |
| 0.0029134190000092985, | |
| 0.0029199119999816503, | |
| 0.0029353710000066258, | |
| 0.003932560000009744, | |
| 0.0029532940000081, | |
| 0.002938546999985192, | |
| 0.002945400000015752, | |
| 0.0029297600000006696, | |
| 0.002916725999995151, | |
| 0.0029167859999859047, | |
| 0.002932184999991705, | |
| 0.002944388000003073, | |
| 0.0029393179999885888, | |
| 0.0029354810000086218, | |
| 0.0029413019999822154, | |
| 0.002952141999998048, | |
| 0.0029348390000052405, | |
| 0.002953012999995508, | |
| 0.0029548079999983656, | |
| 0.0029320849999976417, | |
| 0.0029321049999850857, | |
| 0.002937405000011495, | |
| 0.0029550779999851784, | |
| 0.002948715999991691, | |
| 0.0029436959999884493, | |
| 0.0029660980000016934, | |
| 0.002942574000002196, | |
| 0.002937295000009499, | |
| 0.002927034999999023, | |
| 0.0029406409999808147, | |
| 0.002928797999999233, | |
| 0.002930722000002106, | |
| 0.0029304919999901813, | |
| 0.0029410420000033355, | |
| 0.0029324749999943833, | |
| 0.0029521720000218465, | |
| 0.0029424139999889576, | |
| 0.0029299100000059752, | |
| 0.0029188700000020162, | |
| 0.0029400299999906565, | |
| 0.0029648759999929553, | |
| 0.0029457299999933184, | |
| 0.002937294999981077, | |
| 0.00293609199999878, | |
| 0.002952322999988155, | |
| 0.0029391080000209513, | |
| 0.0029340490000038244, | |
| 0.0028989319999936924, | |
| 0.0029193809999981113, | |
| 0.0029319949999830897, | |
| 0.002913489000007985, | |
| 0.002934869999990042, | |
| 0.0029501189999905364, | |
| 0.0029304710000133127, | |
| 0.0029277970000123332, | |
| 0.0029298999999980424, | |
| 0.002939507999997204, | |
| 0.002930191000018567, | |
| 0.0029303209999795854, | |
| 0.0029361619999974664, | |
| 0.0029448789999833025, | |
| 0.0029145310000160407, | |
| 0.0029186790000039764, | |
| 0.002927807000020266, | |
| 0.0029090220000114186, | |
| 0.0029369439999982205, | |
| 0.002965536999994356, | |
| 0.0029368239999882917, | |
| 0.0029451990000097794, | |
| 0.0029547770000135642, | |
| 0.0029374849999896924, | |
| 0.0029504999999971915, | |
| 0.0029385670000010577, | |
| 0.002941603000010673, | |
| 0.0029460899999946832, | |
| 0.0029436260000181846, | |
| 0.0029667389999872285, | |
| 0.002929761000018516, | |
| 0.0029309219999902325, | |
| 0.002940309999985402, | |
| 0.0029422940000074504, | |
| 0.002920533000008163, | |
| 0.0029171370000256047, | |
| 0.0029124980000005962, | |
| 0.002916606000013644, | |
| 0.0029257019999988643, | |
| 0.0029426339999929496, | |
| 0.0029463410000118984, | |
| 0.002944919000015034, | |
| 0.002927435999993122, | |
| 0.00293685400001209, | |
| 0.002913430000006656, | |
| 0.0029270949999897766, | |
| 0.0029452500000104465, | |
| 0.002959446000005528, | |
| 0.0029332569999951374, | |
| 0.002937265000014122, | |
| 0.0029601870000135477, | |
| 0.00296007699998313, | |
| 0.002940080000001899, | |
| 0.0029478240000173628, | |
| 0.0029101540000056048, | |
| 0.0029285379999919314, | |
| 0.0029432059999976445, | |
| 0.002929209000001265, | |
| 0.0029312029999744027, | |
| 0.0029347800000039115, | |
| 0.0029559790000064368, | |
| 0.002934277999997903, | |
| 0.0029336080000064158, | |
| 0.0029307019999862405, | |
| 0.0029124980000005962, | |
| 0.002922587000000476, | |
| 0.00291697599999452, | |
| 0.002943506000008256, | |
| 0.0029355110000039986, | |
| 0.002935992000004717, | |
| 0.002938255999993089, | |
| 0.0029161650000162354, | |
| 0.0029260729999975865, | |
| 0.002959556000007524, | |
| 0.002950408999993215, | |
| 0.0029199619999928927, | |
| 0.002932585999985804, | |
| 0.0029832199999759723, | |
| 0.002929739999984804, | |
| 0.0029417629999954897, | |
| 0.0029495269999983975, | |
| 0.0029331660000195825, | |
| 0.0029543269999976474, | |
| 0.00293135300000813, | |
| 0.00294586000001118, | |
| 0.002928467999993245, | |
| 0.00292065299998967, | |
| 0.0029471729999954732, | |
| 0.0029415120000066963, | |
| 0.00292574300002002, | |
| 0.0029217059999950834, | |
| 0.002907378000003291, | |
| 0.0029276159999938045, | |
| 0.002930501999998114, | |
| 0.0029467420000059974, | |
| 0.0029366530000061175, | |
| 0.0029294599999900583, | |
| 0.0029496079999944413, | |
| 0.0029731320000223604, | |
| 0.0029271950000122615, | |
| 0.002935600999990129, | |
| 0.002937535000000935, | |
| 0.0029364230000226144, | |
| 0.002932074999989709, | |
| 0.0029431959999897117, | |
| 0.002932314999981145, | |
| 0.0029394480000064505, | |
| 0.0029473529999961556, | |
| 0.0029233980000071824, | |
| 0.002897018999988177, | |
| 0.002923008000010441, | |
| 0.0029397090000031767, | |
| 0.0029243200000053093, | |
| 0.0029252730000166594, | |
| 0.0029615599999885944 | |
| ], | |
| "count": 339, | |
| "total": 0.999347844999761, | |
| "mean": 0.0029479287463119795, | |
| "p50": 0.0029399199999886605, | |
| "p90": 0.0029668673999935892, | |
| "p95": 0.0029833552999775745, | |
| "p99": 0.003065662219998444, | |
| "stdev": 6.725858630574176e-05, | |
| "stdev_": 2.281554002615767 | |
| }, | |
| "throughput": { | |
| "unit": "samples/s", | |
| "value": 678.4424496359044 | |
| }, | |
| "energy": { | |
| "unit": "kWh", | |
| "cpu": 5.321988356907658e-08, | |
| "ram": 1.6471020627876317e-08, | |
| "gpu": 0.0, | |
| "total": 6.969090419695288e-08 | |
| }, | |
| "efficiency": { | |
| "unit": "samples/kWh", | |
| "value": 28698149.680305723 | |
| } | |
| } | |
| } | |
| } |