rocm
/
cuda_inference_transformers_image-classification_google
/vit-base-patch16-224
/benchmark.json
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
7785271
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.3.1+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "transformers", | |
"model_type": "vit", | |
"model": "google/vit-base-patch16-224", | |
"processor": "google/vit-base-patch16-224", | |
"device": "cuda", | |
"device_ids": "6", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"memory": true, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "warn", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.256576, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-101-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 8, | |
"gpu_vram_mb": 549621596160, | |
"optimum_benchmark_version": "0.4.0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.44.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.34.2", | |
"accelerate_commit": null, | |
"diffusers_version": "0.30.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.9", | |
"timm_commit": null, | |
"peft_version": "0.12.0", | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1036.935168, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 45451.751424, | |
"max_reserved": 400.556032, | |
"max_allocated": 346.271744 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 7.8229345703125, | |
"mean": 7.8229345703125, | |
"stdev": 0.0, | |
"p50": 7.8229345703125, | |
"p90": 7.8229345703125, | |
"p95": 7.8229345703125, | |
"p99": 7.8229345703125, | |
"values": [ | |
7.8229345703125 | |
] | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null, | |
"measures": null | |
}, | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1208.889344, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 175945.883648, | |
"max_reserved": 406.847488, | |
"max_allocated": 355.303424 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 161, | |
"total": 0.9966567282676697, | |
"mean": 0.006190414461289874, | |
"stdev": 0.00030420731962647256, | |
"p50": 0.006035688877105713, | |
"p90": 0.006761449813842774, | |
"p95": 0.0067936110496521, | |
"p99": 0.007031371021270754, | |
"values": [ | |
0.006441130161285401, | |
0.0065579299926757815, | |
0.006488649845123291, | |
0.006366889953613281, | |
0.006424970149993896, | |
0.006452010154724121, | |
0.0064300899505615235, | |
0.006394889831542969, | |
0.006338250160217285, | |
0.006353128910064698, | |
0.006334409236907959, | |
0.006303530216217041, | |
0.0062595300674438475, | |
0.006232809066772461, | |
0.006265289783477783, | |
0.006250569820404053, | |
0.006214249134063721, | |
0.006201290130615235, | |
0.006169929027557373, | |
0.006174729824066162, | |
0.006117448806762695, | |
0.006144649982452392, | |
0.006156168937683106, | |
0.006177608966827393, | |
0.0062086501121521, | |
0.006149448871612549, | |
0.006192490100860596, | |
0.006157128810882568, | |
0.008052332878112793, | |
0.0073131308555603024, | |
0.006021449089050293, | |
0.006036489963531494, | |
0.006017448902130127, | |
0.0059955291748046875, | |
0.005983688831329346, | |
0.006004169940948486, | |
0.006092968940734863, | |
0.0060052890777587895, | |
0.006030409812927246, | |
0.006007528781890869, | |
0.005962409019470215, | |
0.006005449771881104, | |
0.005987528800964355, | |
0.006002248764038086, | |
0.006025928974151612, | |
0.005999849796295166, | |
0.006000648975372314, | |
0.006044649124145508, | |
0.006013288974761963, | |
0.005992808818817139, | |
0.006021449089050293, | |
0.006017128944396973, | |
0.006020490169525146, | |
0.006016328811645508, | |
0.006017289161682129, | |
0.006011689186096191, | |
0.006024969100952148, | |
0.005993288993835449, | |
0.006004969120025635, | |
0.006018089771270752, | |
0.0060200090408325194, | |
0.006011369228363037, | |
0.006039848804473877, | |
0.006070728778839111, | |
0.00604944896697998, | |
0.006031848907470703, | |
0.0060169692039489745, | |
0.006000168800354004, | |
0.006027849197387696, | |
0.006063849925994873, | |
0.006039528846740722, | |
0.006033449172973633, | |
0.006041450023651123, | |
0.006005609035491943, | |
0.006264170169830322, | |
0.0060188889503479, | |
0.0059937691688537595, | |
0.006213930130004883, | |
0.00603856897354126, | |
0.006185609817504883, | |
0.00604896879196167, | |
0.006070249080657959, | |
0.006051209926605225, | |
0.006022569179534912, | |
0.006073129177093506, | |
0.006030570030212403, | |
0.0060315289497375486, | |
0.006009929180145264, | |
0.006060329914093018, | |
0.006033449172973633, | |
0.006223849773406983, | |
0.006075048923492431, | |
0.006035368919372558, | |
0.0060320100784301755, | |
0.006019049167633057, | |
0.006019689083099365, | |
0.006035688877105713, | |
0.006048809051513672, | |
0.006059208869934082, | |
0.006028328895568848, | |
0.006021608829498291, | |
0.006017609119415283, | |
0.006029448986053467, | |
0.006020649909973145, | |
0.006046409130096435, | |
0.006013449192047119, | |
0.006011690139770508, | |
0.006036808967590332, | |
0.006056488990783692, | |
0.006028170108795166, | |
0.006017289161682129, | |
0.006030889034271241, | |
0.0060160098075866695, | |
0.006035368919372558, | |
0.006021449089050293, | |
0.0060040087699890134, | |
0.006012010097503662, | |
0.005988968849182129, | |
0.005988968849182129, | |
0.006027369976043701, | |
0.006019049167633057, | |
0.006077128887176514, | |
0.006041450023651123, | |
0.006028489112854004, | |
0.0060028891563415525, | |
0.006028328895568848, | |
0.005976809978485108, | |
0.006002569198608398, | |
0.006015688896179199, | |
0.006023529052734375, | |
0.006011528968811035, | |
0.006005448818206787, | |
0.006007208824157715, | |
0.006007209777832031, | |
0.006033608913421631, | |
0.006008648872375488, | |
0.006003209114074707, | |
0.006027049064636231, | |
0.0065907301902771, | |
0.006744009971618652, | |
0.006764491081237793, | |
0.006761449813842774, | |
0.006752330780029297, | |
0.006800330162048339, | |
0.006776491165161133, | |
0.006798089981079102, | |
0.006512010097503662, | |
0.006614409923553467, | |
0.006771050930023194, | |
0.006788969993591309, | |
0.006832011222839355, | |
0.006806250095367432, | |
0.006765931129455566, | |
0.006800970077514648, | |
0.006843531131744384, | |
0.006774250030517578, | |
0.0067936110496521, | |
0.006782730102539062, | |
0.006358250141143799, | |
0.0064019298553466795, | |
0.006203528881072998 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 161.5400723575516 | |
}, | |
"energy": null, | |
"efficiency": null, | |
"measures": null | |
} | |
} | |
} |