rocm
/
cuda_inference_transformers_image-classification_google
/vit-base-patch16-224
/benchmark.json
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
14c8810
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.3.1+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "transformers", | |
"model_type": "vit", | |
"model": "google/vit-base-patch16-224", | |
"processor": "google/vit-base-patch16-224", | |
"device": "cuda", | |
"device_ids": "6", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"memory": true, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "warn", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.256576, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-101-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 8, | |
"gpu_vram_mb": 549621596160, | |
"optimum_benchmark_version": "0.4.0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.44.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.34.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.30.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.9", | |
"timm_commit": null, | |
"peft_version": "0.12.0", | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1036.795904, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 45934.333952, | |
"max_reserved": 400.556032, | |
"max_allocated": 346.271744 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 8.1544462890625, | |
"mean": 8.1544462890625, | |
"stdev": 0.0, | |
"p50": 8.1544462890625, | |
"p90": 8.1544462890625, | |
"p95": 8.1544462890625, | |
"p99": 8.1544462890625, | |
"values": [ | |
8.1544462890625 | |
] | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null | |
}, | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1208.901632, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 183673.597952, | |
"max_reserved": 406.847488, | |
"max_allocated": 355.303424 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 161, | |
"total": 0.9946027207374575, | |
"mean": 0.006177656650543214, | |
"stdev": 0.002513704838505632, | |
"p50": 0.006032648086547851, | |
"p90": 0.006373288154602051, | |
"p95": 0.006740488052368164, | |
"p99": 0.007602698040008546, | |
"values": [ | |
0.006159687995910644, | |
0.006200967788696289, | |
0.0061780881881713864, | |
0.006221447944641113, | |
0.006360328197479248, | |
0.00627728796005249, | |
0.006308488845825196, | |
0.006223688125610352, | |
0.006268167972564697, | |
0.006278567790985108, | |
0.006204967975616455, | |
0.0061761679649353024, | |
0.0063198480606079105, | |
0.006188807964324951, | |
0.006099207878112793, | |
0.006087207794189453, | |
0.006141608238220215, | |
0.006082087993621826, | |
0.006071047782897949, | |
0.006077447891235352, | |
0.006033448219299317, | |
0.006020486831665039, | |
0.005988167762756348, | |
0.0059766478538513185, | |
0.006048488140106201, | |
0.005992167949676513, | |
0.0061060872077941895, | |
0.006063366889953613, | |
0.006057127952575684, | |
0.005989287853240967, | |
0.006091847896575928, | |
0.006096327781677246, | |
0.006051847934722901, | |
0.006302248001098633, | |
0.006058407783508301, | |
0.006065767765045166, | |
0.00716736888885498, | |
0.0060500879287719725, | |
0.005984327793121338, | |
0.00618560791015625, | |
0.005930407047271729, | |
0.006500807762145996, | |
0.0060550479888916015, | |
0.005993927955627441, | |
0.006020806789398194, | |
0.006019847869873047, | |
0.0075420899391174315, | |
0.007693610191345215, | |
0.006344007968902588, | |
0.00621616792678833, | |
0.0060091280937194826, | |
0.006373288154602051, | |
0.006237607955932617, | |
0.006196008205413818, | |
0.036948848724365234, | |
0.004039525032043457, | |
0.004028165817260742, | |
0.004044644832611084, | |
0.004038084983825683, | |
0.004028164863586426, | |
0.004049604892730713, | |
0.004040966033935547, | |
0.004145764827728272, | |
0.0040745649337768555, | |
0.004035365104675293, | |
0.00412976598739624, | |
0.004066565036773682, | |
0.004033444881439209, | |
0.005209446907043457, | |
0.006006087779998779, | |
0.005980167865753174, | |
0.006073287010192871, | |
0.006093128204345703, | |
0.006012648105621338, | |
0.005992807865142822, | |
0.00600320816040039, | |
0.005988166809082031, | |
0.0061798481941223145, | |
0.006007526874542236, | |
0.006064807891845703, | |
0.006018248081207276, | |
0.005991528034210205, | |
0.006017286777496338, | |
0.0060283269882202145, | |
0.005999527931213379, | |
0.006038728237152099, | |
0.006028327941894531, | |
0.006040647983551025, | |
0.006016326904296875, | |
0.006032648086547851, | |
0.006007847785949707, | |
0.006011367797851563, | |
0.006017767906188965, | |
0.006008646965026856, | |
0.006015848159790039, | |
0.006012328147888183, | |
0.006004168033599854, | |
0.006020167827606201, | |
0.0059952068328857425, | |
0.005992968082427979, | |
0.006008808135986328, | |
0.006008808135986328, | |
0.006004487991333008, | |
0.006028646945953369, | |
0.0059568071365356445, | |
0.006025767803192138, | |
0.006016808032989502, | |
0.005992807865142822, | |
0.006003527164459228, | |
0.005965928077697754, | |
0.006017928123474121, | |
0.006012487888336182, | |
0.006043208122253418, | |
0.006007047176361084, | |
0.006004007816314697, | |
0.005992008209228516, | |
0.00603760814666748, | |
0.005988808155059815, | |
0.005998567104339599, | |
0.006053287982940674, | |
0.006010568141937256, | |
0.00602672815322876, | |
0.00600656795501709, | |
0.00601552677154541, | |
0.006041927814483643, | |
0.006018887996673584, | |
0.005999368190765381, | |
0.00595792818069458, | |
0.005984487056732178, | |
0.005975368022918701, | |
0.006000648021697998, | |
0.006020167827606201, | |
0.0065908880233764644, | |
0.006686728954315185, | |
0.006744489192962646, | |
0.006819529056549072, | |
0.006660647869110107, | |
0.006332327842712402, | |
0.006554568767547607, | |
0.006295048236846924, | |
0.006293928146362305, | |
0.006283527851104737, | |
0.00611024808883667, | |
0.0063033680915832516, | |
0.006270567893981934, | |
0.006239048004150391, | |
0.006263207912445068, | |
0.006223527908325195, | |
0.006165448188781738, | |
0.0059712080955505375, | |
0.006316487789154053, | |
0.006125768184661865, | |
0.005964968204498291, | |
0.005994407176971436, | |
0.00653040885925293, | |
0.006690728187561035, | |
0.006740488052368164, | |
0.006811529159545898, | |
0.006816328048706055, | |
0.0062580881118774415, | |
0.006320168018341064 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 161.87367744241143 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |