rocm
/
cuda_inference_transformers_image-classification_google
/vit-base-patch16-224
/benchmark.json
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
5f63f2a
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.3.1+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "transformers", | |
"model_type": "vit", | |
"model": "google/vit-base-patch16-224", | |
"processor": "google/vit-base-patch16-224", | |
"device": "cuda", | |
"device_ids": "5", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"memory": true, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "warn", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.256576, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-101-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 8, | |
"gpu_vram_mb": 549621596160, | |
"optimum_benchmark_version": "0.4.0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.44.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.34.2", | |
"accelerate_commit": null, | |
"diffusers_version": "0.30.3", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.9", | |
"timm_commit": null, | |
"peft_version": "0.12.0", | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1073.201152, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 44420.34176, | |
"max_reserved": 400.556032, | |
"max_allocated": 346.271744 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 8.243681640625, | |
"mean": 8.243681640625, | |
"stdev": 0.0, | |
"p50": 8.243681640625, | |
"p90": 8.243681640625, | |
"p95": 8.243681640625, | |
"p99": 8.243681640625, | |
"values": [ | |
8.243681640625 | |
] | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null | |
}, | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1243.942912, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 185365.090304, | |
"max_reserved": 406.847488, | |
"max_allocated": 355.303424 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 164, | |
"total": 0.995947322845459, | |
"mean": 0.006072849529545482, | |
"stdev": 0.0008661441315309034, | |
"p50": 0.006013265609741211, | |
"p90": 0.00646996831893921, | |
"p95": 0.006736735153198242, | |
"p99": 0.009253994913101197, | |
"values": [ | |
0.006568463802337646, | |
0.006268145084381104, | |
0.006515503883361817, | |
0.006641903877258301, | |
0.006503505229949951, | |
0.00641518497467041, | |
0.01490764331817627, | |
0.004011030197143555, | |
0.004000950813293457, | |
0.003996469974517822, | |
0.005838864803314209, | |
0.006157424926757813, | |
0.006114544868469238, | |
0.006089584827423096, | |
0.0060420651435852055, | |
0.006052946090698242, | |
0.00605694580078125, | |
0.006032145023345948, | |
0.006042385101318359, | |
0.00601886510848999, | |
0.006073585033416748, | |
0.006044944763183594, | |
0.006053585052490235, | |
0.006068305015563965, | |
0.006109425067901611, | |
0.0060718259811401365, | |
0.0060625457763671875, | |
0.006078704833984375, | |
0.006057905197143555, | |
0.006093585014343262, | |
0.006081745147705078, | |
0.0060622248649597164, | |
0.006065424919128418, | |
0.009440776824951171, | |
0.006078545093536377, | |
0.0057751860618591305, | |
0.0057606248855590824, | |
0.0057537460327148435, | |
0.005767506122589111, | |
0.005733905792236328, | |
0.0057732658386230465, | |
0.005746545791625976, | |
0.005738865852355957, | |
0.005762866020202637, | |
0.005753265857696533, | |
0.005805106163024903, | |
0.005739346027374268, | |
0.005757105827331543, | |
0.005744946002960205, | |
0.00574574613571167, | |
0.005750545978546143, | |
0.005735025882720947, | |
0.005761106014251709, | |
0.0057518258094787595, | |
0.005795185089111328, | |
0.005757266044616699, | |
0.005776465892791748, | |
0.005747025966644287, | |
0.005745585918426514, | |
0.00575774621963501, | |
0.0057463860511779786, | |
0.005756784915924072, | |
0.005848785877227783, | |
0.005808946132659912, | |
0.005794546127319336, | |
0.005769585132598877, | |
0.005756626129150391, | |
0.005760305881500244, | |
0.005752625942230224, | |
0.005773585796356201, | |
0.0057727861404418945, | |
0.005781264781951905, | |
0.005787826061248779, | |
0.0057427058219909665, | |
0.005775345802307129, | |
0.005754226207733154, | |
0.006001265048980713, | |
0.005795506000518799, | |
0.005777425765991211, | |
0.005775825977325439, | |
0.005787665843963623, | |
0.005768305778503418, | |
0.005769105911254882, | |
0.005780466079711914, | |
0.005740146160125733, | |
0.005782705783843994, | |
0.005791825771331787, | |
0.005830706119537354, | |
0.005773105144500732, | |
0.005768305778503418, | |
0.00576862621307373, | |
0.005787025928497314, | |
0.005777105808258056, | |
0.005755986213684082, | |
0.00914429759979248, | |
0.006080144882202148, | |
0.00600686502456665, | |
0.006029585838317871, | |
0.0060215859413146975, | |
0.00603310489654541, | |
0.006011024951934814, | |
0.005999344825744629, | |
0.005999986171722412, | |
0.006002225875854492, | |
0.005972785949707031, | |
0.005986064910888672, | |
0.0060303850173950195, | |
0.006004785060882568, | |
0.006018226146697998, | |
0.006035185813903808, | |
0.006034864902496338, | |
0.006023184776306152, | |
0.006005265235900879, | |
0.006017584800720215, | |
0.0059916658401489254, | |
0.0060182251930236816, | |
0.005998064994812011, | |
0.006109745025634766, | |
0.006034544944763184, | |
0.00604510498046875, | |
0.006003345966339111, | |
0.00600030517578125, | |
0.006018704891204834, | |
0.006040625095367432, | |
0.006003345012664795, | |
0.006021426200866699, | |
0.006036145210266113, | |
0.006027345180511475, | |
0.006029584884643555, | |
0.006001104831695556, | |
0.00601646614074707, | |
0.006029426097869873, | |
0.0060267047882080075, | |
0.006020464897155762, | |
0.006012145042419433, | |
0.0060143861770629884, | |
0.005974545955657959, | |
0.0061471848487854005, | |
0.006834702968597412, | |
0.0066902232170104985, | |
0.006755663871765137, | |
0.006744943141937256, | |
0.006817742824554444, | |
0.006484784126281738, | |
0.0062633447647094725, | |
0.006345903873443604, | |
0.006452945232391358, | |
0.006305264949798584, | |
0.006477263927459717, | |
0.0065681438446044925, | |
0.006755503177642822, | |
0.006751344203948974, | |
0.006242703914642334, | |
0.006388784885406494, | |
0.006323503971099854, | |
0.0061545448303222654, | |
0.0062934250831604005, | |
0.006161745071411133, | |
0.006236784934997559, | |
0.00625038480758667, | |
0.006189263820648193, | |
0.006133423805236816, | |
0.005984786033630371, | |
0.005996144771575927 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 164.66734358143142 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |