IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub
b66225e
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.2+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "multiple-choice", | |
"model": "FacebookAI/roberta-base", | |
"library": "transformers", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"hub_kwargs": { | |
"revision": "main", | |
"force_download": false, | |
"local_files_only": false, | |
"trust_remote_code": false | |
}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.236096, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 68702699520, | |
"optimum_benchmark_version": "0.2.0", | |
"optimum_benchmark_commit": "ebd20fcf042acf5db8d8956e7057fa93c82e14ab", | |
"transformers_version": "4.40.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.30.1", | |
"accelerate_commit": null, | |
"diffusers_version": "0.27.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "0.9.16", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1006.546944, | |
"max_global_vram": 898.469888, | |
"max_process_vram": 236416.294912, | |
"max_reserved": 555.74528, | |
"max_allocated": 499.507712 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 132, | |
"total": 0.9964992427825929, | |
"mean": 0.007549236687746916, | |
"stdev": 0.0004251598062794823, | |
"p50": 0.0072824816703796385, | |
"p90": 0.0081010103225708, | |
"p95": 0.008159554147720337, | |
"p99": 0.009257718667984008, | |
"values": [ | |
0.00929824161529541, | |
0.009455041885375976, | |
0.008008001327514648, | |
0.00802848243713379, | |
0.007899840831756592, | |
0.007907201766967773, | |
0.007836321830749511, | |
0.007506721019744873, | |
0.007251041889190674, | |
0.007262081146240234, | |
0.007232000827789306, | |
0.00723872184753418, | |
0.007238881111145019, | |
0.007213602066040039, | |
0.007223361015319825, | |
0.00723104190826416, | |
0.007251040935516357, | |
0.007262241840362549, | |
0.007239840984344483, | |
0.007227042198181152, | |
0.007244640827178955, | |
0.007234401226043701, | |
0.007252322196960449, | |
0.007602880954742432, | |
0.007626242160797119, | |
0.007635361194610596, | |
0.007591842174530029, | |
0.00734912109375, | |
0.008022722244262695, | |
0.009167522430419921, | |
0.008101442337036132, | |
0.008074721336364747, | |
0.008103841781616212, | |
0.008073921203613282, | |
0.008097122192382813, | |
0.008123842239379883, | |
0.008085761070251465, | |
0.008103201866149903, | |
0.008103522300720215, | |
0.008065920829772949, | |
0.008149762153625489, | |
0.00805648136138916, | |
0.00769008207321167, | |
0.007865121841430664, | |
0.007795361042022705, | |
0.008024962425231933, | |
0.007694080829620361, | |
0.007764801979064941, | |
0.007459681034088134, | |
0.007646242141723633, | |
0.007608961105346679, | |
0.007506562232971191, | |
0.007235520839691162, | |
0.007227042198181152, | |
0.007243521213531494, | |
0.00721296215057373, | |
0.007234880924224854, | |
0.007266561985015869, | |
0.007251040935516357, | |
0.007202881813049317, | |
0.007246080875396728, | |
0.007244161128997803, | |
0.00726928186416626, | |
0.007250561237335205, | |
0.00722720193862915, | |
0.007222880840301513, | |
0.0072244820594787595, | |
0.0073214411735534664, | |
0.0072648019790649415, | |
0.007244640827178955, | |
0.007244162082672119, | |
0.0072377610206604, | |
0.007254880905151367, | |
0.0072526421546936035, | |
0.007266880989074707, | |
0.007251522064208984, | |
0.0072356810569763185, | |
0.007242722034454346, | |
0.007298241138458252, | |
0.007282882213592529, | |
0.007282081127166748, | |
0.007229762077331543, | |
0.007250561237335205, | |
0.0072721619606018065, | |
0.00723504114151001, | |
0.0072411208152770995, | |
0.007263842105865478, | |
0.007270720958709717, | |
0.007275042057037354, | |
0.007253760814666748, | |
0.007273441791534424, | |
0.007252800941467285, | |
0.007195042133331299, | |
0.007222880840301513, | |
0.007238081932067871, | |
0.007244161128997803, | |
0.007227200984954834, | |
0.007229762077331543, | |
0.007199201107025146, | |
0.007176161766052246, | |
0.007939520835876466, | |
0.008036481857299804, | |
0.00759968090057373, | |
0.007595042228698731, | |
0.007559361934661865, | |
0.00756256103515625, | |
0.0075352020263671875, | |
0.007578560829162598, | |
0.007578882217407226, | |
0.007845601081848145, | |
0.00817152214050293, | |
0.008226241111755371, | |
0.008213281631469727, | |
0.007631522178649902, | |
0.007635041236877442, | |
0.007659361839294433, | |
0.007664000988006592, | |
0.007651202201843262, | |
0.007583200931549072, | |
0.0076104021072387696, | |
0.0074228811264038085, | |
0.007220481872558594, | |
0.007220160961151123, | |
0.007221762180328369, | |
0.007217761039733887, | |
0.0072171220779418946, | |
0.0072377610206604, | |
0.007397602081298828, | |
0.008137600898742676, | |
0.00807984161376953, | |
0.008172640800476074, | |
0.0075536007881164555 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 132.46372333551142 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |