rocm
/
cuda_inference_transformers_image-classification_google
/vit-base-patch16-224
/benchmark.json
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
0c2f1d6
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.3.1+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "transformers", | |
"model_type": "vit", | |
"model": "google/vit-base-patch16-224", | |
"processor": "google/vit-base-patch16-224", | |
"device": "cuda", | |
"device_ids": "6", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"memory": true, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "warn", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.256576, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-101-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 8, | |
"gpu_vram_mb": 549621596160, | |
"optimum_benchmark_version": "0.4.0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.44.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.34.2", | |
"accelerate_commit": null, | |
"diffusers_version": "0.30.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.9", | |
"timm_commit": null, | |
"peft_version": "0.12.0", | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1037.930496, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 46118.453248, | |
"max_reserved": 400.556032, | |
"max_allocated": 346.271744 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 7.988341796875, | |
"mean": 7.988341796875, | |
"stdev": 0.0, | |
"p50": 7.988341796875, | |
"p90": 7.988341796875, | |
"p95": 7.988341796875, | |
"p99": 7.988341796875, | |
"values": [ | |
7.988341796875 | |
] | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null, | |
"measures": null | |
}, | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1208.434688, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 175230.517248, | |
"max_reserved": 406.847488, | |
"max_allocated": 355.303424 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 161, | |
"total": 0.997180332183838, | |
"mean": 0.006193666659526944, | |
"stdev": 0.0017113469679532047, | |
"p50": 0.0059592070579528806, | |
"p90": 0.006704808235168457, | |
"p95": 0.006738409042358399, | |
"p99": 0.007795338249206554, | |
"values": [ | |
0.0060836877822875975, | |
0.006191688060760498, | |
0.006202727794647217, | |
0.006100327968597412, | |
0.006038568019866943, | |
0.005984327793121338, | |
0.0059412879943847655, | |
0.0059193668365478515, | |
0.006020648002624512, | |
0.005975368022918701, | |
0.005856008052825928, | |
0.005787366867065429, | |
0.005857128143310547, | |
0.00581552791595459, | |
0.005708487033843994, | |
0.005727528095245361, | |
0.0057060871124267575, | |
0.005727047920227051, | |
0.00570384693145752, | |
0.005688648223876953, | |
0.0057060871124267575, | |
0.005745768070220947, | |
0.0057462468147277835, | |
0.005740968227386475, | |
0.005714087009429931, | |
0.00573248815536499, | |
0.005738886833190918, | |
0.0057118477821350095, | |
0.005745606899261475, | |
0.005727528095245361, | |
0.005728806972503662, | |
0.008907691955566406, | |
0.0069489688873291015, | |
0.006678248882293701, | |
0.006732328891754151, | |
0.00675872802734375, | |
0.006776487827301026, | |
0.006734728813171387, | |
0.006734728813171387, | |
0.006738409042358399, | |
0.0067214488983154295, | |
0.006762088775634766, | |
0.00672160816192627, | |
0.026635393142700196, | |
0.003991524934768677, | |
0.00397696590423584, | |
0.003979685068130493, | |
0.00397984504699707, | |
0.0039766449928283695, | |
0.004366566181182861, | |
0.006694728851318359, | |
0.006686247825622558, | |
0.006680007934570313, | |
0.006717928886413574, | |
0.006694088935852051, | |
0.006685608863830566, | |
0.006700168132781983, | |
0.0061443281173706055, | |
0.006113927841186523, | |
0.006099048137664795, | |
0.006116648197174072, | |
0.006118406772613525, | |
0.0062080078125, | |
0.006168968200683593, | |
0.00614080810546875, | |
0.005950247764587403, | |
0.005943847179412842, | |
0.005942408084869385, | |
0.005942408084869385, | |
0.005934728145599365, | |
0.005927847862243652, | |
0.00592816686630249, | |
0.005956168174743652, | |
0.0059486479759216304, | |
0.0059430480003356935, | |
0.005960328102111816, | |
0.00594784688949585, | |
0.006148647785186767, | |
0.00596832799911499, | |
0.005951208114624023, | |
0.0059372878074646, | |
0.0059449682235717775, | |
0.005965287208557129, | |
0.005975368022918701, | |
0.005958087921142578, | |
0.005932807922363281, | |
0.00594976806640625, | |
0.0059566469192504886, | |
0.005924807071685791, | |
0.0059430480003356935, | |
0.005965447902679443, | |
0.005971367835998535, | |
0.005940967082977295, | |
0.005978567123413086, | |
0.005919528007507324, | |
0.005942887783050537, | |
0.0059625678062438965, | |
0.005934566974639893, | |
0.00592592716217041, | |
0.006080647945404053, | |
0.005968808174133301, | |
0.005926087856292724, | |
0.005964168071746826, | |
0.005979046821594238, | |
0.005884807109832763, | |
0.005964168071746826, | |
0.005949448108673096, | |
0.005939688205718994, | |
0.0059456081390380855, | |
0.0059334468841552735, | |
0.005963367938995361, | |
0.005939208030700684, | |
0.005938568115234375, | |
0.005941768169403076, | |
0.005940486907958984, | |
0.005922887802124023, | |
0.0059382481575012205, | |
0.005945767879486084, | |
0.005915687084197998, | |
0.005963686943054199, | |
0.005961448192596436, | |
0.005952648162841797, | |
0.005959047794342041, | |
0.0059692878723144535, | |
0.00594832706451416, | |
0.00594544792175293, | |
0.005954247951507568, | |
0.005940328121185303, | |
0.0061779279708862305, | |
0.005972968101501465, | |
0.0059566469192504886, | |
0.0059592070579528806, | |
0.005924327850341797, | |
0.005956007957458496, | |
0.005930568218231201, | |
0.005953766822814941, | |
0.005934727191925049, | |
0.006099847793579102, | |
0.0070537691116333, | |
0.006648488044738769, | |
0.006693609237670898, | |
0.006695048809051514, | |
0.006691849231719971, | |
0.0065308880805969236, | |
0.0061356878280639645, | |
0.006214248180389404, | |
0.006207208156585694, | |
0.006343849182128906, | |
0.006295368194580078, | |
0.006369128227233887, | |
0.0066908888816833495, | |
0.006681128978729248, | |
0.006753129005432129, | |
0.006665928840637207, | |
0.006694888114929199, | |
0.006712489128112793, | |
0.00668480920791626, | |
0.0066267290115356444, | |
0.006529448032379151, | |
0.006525767803192139, | |
0.006704808235168457 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 161.45525017266223 | |
}, | |
"energy": null, | |
"efficiency": null, | |
"measures": null | |
} | |
} | |
} |